diff --git a/docs/yaml.md b/docs/yaml.md index 42181a0848..2b4e67e69b 100644 --- a/docs/yaml.md +++ b/docs/yaml.md @@ -100,7 +100,7 @@ build: python_version: "3.11.1" ``` -Cog supports all active branches of Python: 3.8, 3.9, 3.10, 3.11, 3.12. If you don't define a version, Cog will use the latest version of Python 3.12 or a version of Python that is compatible with the versions of PyTorch or TensorFlow you specify. +Cog supports all active branches of Python: 3.8, 3.9, 3.10, 3.11, 3.12, 3.13. If you don't define a version, Cog will use the latest version of Python 3.12 or a version of Python that is compatible with the versions of PyTorch or TensorFlow you specify. Note that these are the versions supported **in the Docker container**, not your host machine. You can run any version(s) of Python you wish on your host machine. diff --git a/pkg/config/torch_compatibility_matrix.json b/pkg/config/torch_compatibility_matrix.json index 1bde2a3424..e5db5b3fc9 100644 --- a/pkg/config/torch_compatibility_matrix.json +++ b/pkg/config/torch_compatibility_matrix.json @@ -7,10 +7,10 @@ "ExtraIndexURL": "https://download.pytorch.org/whl/cpu", "CUDA": null, "Pythons": [ + "3.9", "3.10", "3.11", - "3.12", - "3.9" + "3.12" ] }, { @@ -21,10 +21,10 @@ "ExtraIndexURL": "https://download.pytorch.org/whl/cu118", "CUDA": "11.8", "Pythons": [ + "3.9", "3.10", "3.11", - "3.12", - "3.9" + "3.12" ] }, { @@ -35,10 +35,10 @@ "ExtraIndexURL": "https://download.pytorch.org/whl/cu121", "CUDA": "12.1", "Pythons": [ + "3.9", "3.10", "3.11", - "3.12", - "3.9" + "3.12" ] }, { @@ -49,10 +49,10 @@ "ExtraIndexURL": "https://download.pytorch.org/whl/cu124", "CUDA": "12.4", "Pythons": [ + "3.9", "3.10", "3.11", - "3.12", - "3.9" + "3.12" ] }, { @@ -63,11 +63,11 @@ "ExtraIndexURL": "https://download.pytorch.org/whl/cpu", "CUDA": null, "Pythons": [ + "3.9", "3.10", "3.11", "3.12", - "3.13", - "3.9" + "3.13" ] }, { @@ -78,11 +78,11 @@ "ExtraIndexURL": "https://download.pytorch.org/whl/cu118", "CUDA": "11.8", "Pythons": [ + "3.9", "3.10", "3.11", "3.12", - "3.13", - "3.9" + "3.13" ] }, { @@ -93,12 +93,11 @@ "ExtraIndexURL": "https://download.pytorch.org/whl/cu121", "CUDA": "12.1", "Pythons": [ + "3.9", "3.10", "3.11", "3.12", - "3.13", - "3.9", - "3.10" + "3.13" ] }, { @@ -109,11 +108,11 @@ "ExtraIndexURL": "https://download.pytorch.org/whl/cu124", "CUDA": "12.4", "Pythons": [ + "3.9", "3.10", "3.11", "3.12", - "3.13", - "3.9" + "3.13" ] }, { diff --git a/pkg/dockerfile/standard_generator_test.go b/pkg/dockerfile/standard_generator_test.go index 65b660e7de..0cb01cbb24 100644 --- a/pkg/dockerfile/standard_generator_test.go +++ b/pkg/dockerfile/standard_generator_test.go @@ -87,6 +87,7 @@ func TestGenerateEmptyCPU(t *testing.T) { conf, err := config.FromYAML([]byte(` build: gpu: false + python_version: "3.12" predict: predict.py:Predictor `)) require.NoError(t, err) @@ -121,6 +122,7 @@ func TestGenerateEmptyGPU(t *testing.T) { conf, err := config.FromYAML([]byte(` build: gpu: true + python_version: "3.12" predict: predict.py:Predictor `)) require.NoError(t, err) @@ -155,6 +157,7 @@ func TestGenerateFullCPU(t *testing.T) { conf, err := config.FromYAML([]byte(` build: gpu: false + python_version: "3.12" system_packages: - ffmpeg - cowsay @@ -209,6 +212,7 @@ func TestGenerateFullGPU(t *testing.T) { conf, err := config.FromYAML([]byte(` build: gpu: true + python_version: "3.12" system_packages: - ffmpeg - cowsay @@ -264,6 +268,7 @@ func TestPreInstall(t *testing.T) { conf, err := config.FromYAML([]byte(` build: + python_version: "3.12" system_packages: - cowsay pre_install: @@ -303,6 +308,7 @@ func TestPythonRequirements(t *testing.T) { require.NoError(t, err) conf, err := config.FromYAML([]byte(` build: + python_version: "3.12" python_requirements: "my-requirements.txt" `)) require.NoError(t, err) @@ -349,6 +355,7 @@ func TestGenerateWithLargeModels(t *testing.T) { conf, err := config.FromYAML([]byte(` build: gpu: true + python_version: "3.12" system_packages: - ffmpeg - cowsay @@ -454,6 +461,7 @@ func TestGenerateDockerfileWithoutSeparateWeights(t *testing.T) { conf, err := config.FromYAML([]byte(` build: gpu: false + python_version: "3.12" predict: predict.py:Predictor `)) require.NoError(t, err) @@ -487,6 +495,7 @@ func TestGenerateEmptyCPUWithCogBaseImage(t *testing.T) { conf, err := config.FromYAML([]byte(` build: gpu: false + python_version: "3.12" predict: predict.py:Predictor `)) require.NoError(t, err) @@ -516,10 +525,10 @@ func TestGeneratePythonCPUWithCogBaseImage(t *testing.T) { conf, err := config.FromYAML([]byte(` build: gpu: false + python_version: "3.12" system_packages: - ffmpeg - cowsay - python_version: "3.12" python_packages: - pandas==1.2.0.12 run: @@ -624,6 +633,7 @@ func TestGenerateTorchWithStrippedModifiedVersion(t *testing.T) { build: gpu: true cuda: "11.8" + python_version: "3.12" system_packages: - ffmpeg - cowsay @@ -675,6 +685,7 @@ func TestGenerateWithStrip(t *testing.T) { build: gpu: true cuda: "11.8" + python_version: "3.12" system_packages: - ffmpeg - cowsay @@ -727,6 +738,7 @@ func TestGenerateDoesNotContainDangerousCFlags(t *testing.T) { build: gpu: true cuda: "11.8" + python_version: "3.12" system_packages: - ffmpeg - cowsay @@ -758,6 +770,7 @@ func TestGenerateWithPrecompile(t *testing.T) { build: gpu: true cuda: "11.8" + python_version: "3.12" system_packages: - ffmpeg - cowsay diff --git a/pyproject.toml b/pyproject.toml index bc8a52715a..becf97a209 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,6 +16,7 @@ classifiers = [ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", ] requires-python = ">=3.8" diff --git a/python/cog/mimetypes_ext.py b/python/cog/mimetypes_ext.py index d1a13f10cf..52be376f11 100644 --- a/python/cog/mimetypes_ext.py +++ b/python/cog/mimetypes_ext.py @@ -1,3 +1,4 @@ +import sys import typing if typing.TYPE_CHECKING: @@ -18,4 +19,5 @@ def install_mime_extensions(mimetypes: IMimeTypes) -> None: # This could also be done by loading a mime.types file from disk using # mimetypes.read_mime_types(). - mimetypes.add_type("image/webp", ".webp") + if sys.version_info < (3, 13): + mimetypes.add_type("image/webp", ".webp") diff --git a/python/tests/server/test_helpers.py b/python/tests/server/test_helpers.py index 871cc1af51..2e0b700b51 100644 --- a/python/tests/server/test_helpers.py +++ b/python/tests/server/test_helpers.py @@ -189,14 +189,14 @@ def _write_hook(stream_name, data): pass original_limits = resource.getrlimit(resource.RLIMIT_NOFILE) - resource.setrlimit(resource.RLIMIT_NOFILE, (128, original_limits[1])) + resource.setrlimit(resource.RLIMIT_NOFILE, (256, original_limits[1])) request.addfinalizer( lambda: resource.setrlimit(resource.RLIMIT_NOFILE, original_limits) ) r = StreamRedirector(callback=_write_hook, streams=[stream]) - for _ in range(10 * 128): + for _ in range(10 * 256): with r: stream.write("one\n") stream.flush() diff --git a/python/tests/test_mimetypes_ext.py b/python/tests/test_mimetypes_ext.py index a1e5303321..87f146de00 100644 --- a/python/tests/test_mimetypes_ext.py +++ b/python/tests/test_mimetypes_ext.py @@ -1,3 +1,4 @@ +import sys from mimetypes import MimeTypes from cog.mimetypes_ext import install_mime_extensions @@ -6,7 +7,9 @@ def test_webp_ext_support(): # Assert on empty database. mt = MimeTypes(filenames=tuple()) - assert mt.guess_type("image.webp") == (None, None) + if sys.version_info < (3, 13): + assert mt.guess_type("image.webp") == (None, None) + install_mime_extensions(mt) assert mt.guess_type("image.webp") == ("image/webp", None) diff --git a/tools/compatgen/internal/torch.go b/tools/compatgen/internal/torch.go index 6f3c4c18c0..650fa1bb8e 100644 --- a/tools/compatgen/internal/torch.go +++ b/tools/compatgen/internal/torch.go @@ -209,7 +209,7 @@ func parseTorchInstallString(s string, defaultVersions map[string]string, cuda * torchaudio := libVersions["torchaudio"] // TODO: this could be determined from https://download.pytorch.org/whl/torch/ - pythons := []string{"3.8", "3.9", "3.10", "3.11", "3.12"} + pythons := []string{"3.8", "3.9", "3.10", "3.11", "3.12", "3.13"} return &config.TorchCompatibility{ Torch: torch,