python311Packages.tensorflow-datasets: 4.9.7 -> 4.9.8 (#376040)

authored by Pol Dellaiera and committed by GitHub 6c679116 546a7aa6

+117 -93
+10 -17
pkgs/development/python-modules/array-record/default.nix
··· 7 fetchPypi, 8 absl-py, 9 etils, 10 - importlib-resources, 11 - typing-extensions, 12 - zipp, 13 }: 14 15 buildPythonPackage rec { 16 pname = "array-record"; 17 - version = "0.5.0"; 18 format = "wheel"; 19 20 - # As of 2023-10-31, PyPI includes wheels for Python 3.9, 3.10, and 3.11. 21 - disabled = pythonOlder "3.9" || pythonAtLeast "3.12"; 22 23 src = 24 let ··· 33 platform = "manylinux_2_17_x86_64.manylinux2014_x86_64"; 34 hash = 35 { 36 - cp39 = "sha256-BzMOVue7E1S1+5+XTcPELko81ujc9MbmqLhNsU7pqO0="; 37 - cp310 = "sha256-eUD9pQu9GsbV8MPD1MiF3Ihr+zYioSOo6P15hYIwPYo="; 38 - cp311 = "sha256-rAmkI3EIZPYiXrxFowfDC0Gf3kRw0uX0i6Kx6Zu+hNM="; 39 } 40 .${pyShortVersion} or (throw "${pname} is missing hash for ${pyShortVersion}"); 41 }; 42 43 - propagatedBuildInputs = [ 44 absl-py 45 etils 46 - importlib-resources 47 - typing-extensions 48 - zipp 49 - ]; 50 51 pythonImportsCheck = [ "array_record" ]; 52 53 - meta = with lib; { 54 description = "ArrayRecord is a new file format derived from Riegeli, achieving a new frontier of IO efficiency"; 55 homepage = "https://github.com/google/array_record"; 56 - license = licenses.asl20; 57 - maintainers = with maintainers; [ GaetanLepage ]; 58 platforms = [ "x86_64-linux" ]; 59 }; 60 }
··· 7 fetchPypi, 8 absl-py, 9 etils, 10 }: 11 12 buildPythonPackage rec { 13 pname = "array-record"; 14 + version = "0.7.1"; 15 format = "wheel"; 16 17 + disabled = pythonOlder "3.10" || pythonAtLeast "3.13"; 18 19 src = 20 let ··· 29 platform = "manylinux_2_17_x86_64.manylinux2014_x86_64"; 30 hash = 31 { 32 + cp310 = "sha256-JDaj1iJy1BQ7fHjmCbGQkNqG5rIRuwTwENbanM9a8hg="; 33 + cp311 = "sha256-QVynMK9t0BnEtgdfbJ5T3s7N02i0XD2siUSRxKtrI+M="; 34 + cp312 = "sha256-xJJGm6kLQ2/TzVYTrBtQ1Hqky1odHfbhe/g+PSSYt1c="; 35 } 36 .${pyShortVersion} or (throw "${pname} is missing hash for ${pyShortVersion}"); 37 }; 38 39 + dependencies = [ 40 absl-py 41 etils 42 + ] ++ etils.optional-dependencies.epath; 43 44 pythonImportsCheck = [ "array_record" ]; 45 46 + meta = { 47 description = "ArrayRecord is a new file format derived from Riegeli, achieving a new frontier of IO efficiency"; 48 homepage = "https://github.com/google/array_record"; 49 + license = lib.licenses.asl20; 50 + maintainers = with lib.maintainers; [ GaetanLepage ]; 51 platforms = [ "x86_64-linux" ]; 52 }; 53 }
+43 -20
pkgs/development/python-modules/dm-sonnet/default.nix
··· 1 { 2 lib, 3 buildPythonPackage, 4 - click, 5 dm-tree, 6 - docutils, 7 etils, 8 - fetchFromGitHub, 9 - fetchpatch, 10 numpy, 11 - pythonOlder, 12 tabulate, 13 tensorflow, 14 tensorflow-datasets, 15 - wrapt, 16 }: 17 18 buildPythonPackage rec { 19 pname = "dm-sonnet"; 20 version = "2.0.2"; 21 - format = "setuptools"; 22 - 23 - disabled = pythonOlder "3.6"; 24 25 src = fetchFromGitHub { 26 owner = "deepmind"; 27 repo = "sonnet"; 28 - rev = "refs/tags/v${version}"; 29 hash = "sha256-WkloUbqSyPG3cbLG8ktsjdluACkCbUZ7t6rYWst8rs8="; 30 }; 31 32 - patches = [ 33 - (fetchpatch { 34 - name = "replace-np-bool-with-np-bool_.patch"; 35 - url = "https://github.com/deepmind/sonnet/commit/df5d099d4557a9a81a0eb969e5a81ed917bcd612.patch"; 36 - hash = "sha256-s7abl83osD4wa0ZhqgDyjqQ3gagwGYCdQifwFqhNp34="; 37 - }) 38 ]; 39 40 - propagatedBuildInputs = [ 41 dm-tree 42 etils 43 numpy ··· 52 nativeCheckInputs = [ 53 click 54 docutils 55 tensorflow 56 tensorflow-datasets 57 ]; 58 59 pythonImportsCheck = [ "sonnet" ]; 60 61 - meta = with lib; { 62 description = "Library for building neural networks in TensorFlow"; 63 homepage = "https://github.com/deepmind/sonnet"; 64 - license = licenses.asl20; 65 - maintainers = with maintainers; [ onny ]; 66 }; 67 }
··· 1 { 2 lib, 3 buildPythonPackage, 4 + fetchFromGitHub, 5 + 6 + # build-system 7 + setuptools, 8 + 9 + # dependencies 10 dm-tree, 11 etils, 12 numpy, 13 tabulate, 14 + wrapt, 15 + 16 + # tests 17 + click, 18 + docutils, 19 + keras, 20 + pytestCheckHook, 21 tensorflow, 22 tensorflow-datasets, 23 + tf-keras, 24 }: 25 26 buildPythonPackage rec { 27 pname = "dm-sonnet"; 28 version = "2.0.2"; 29 + pyproject = true; 30 31 src = fetchFromGitHub { 32 owner = "deepmind"; 33 repo = "sonnet"; 34 + tag = "v${version}"; 35 hash = "sha256-WkloUbqSyPG3cbLG8ktsjdluACkCbUZ7t6rYWst8rs8="; 36 }; 37 38 + build-system = [ 39 + setuptools 40 ]; 41 42 + dependencies = [ 43 dm-tree 44 etils 45 numpy ··· 54 nativeCheckInputs = [ 55 click 56 docutils 57 + keras 58 + pytestCheckHook 59 tensorflow 60 tensorflow-datasets 61 + tf-keras 62 + ]; 63 + 64 + # ImportError: `keras.optimizers.legacy` is not supported in Keras 3 65 + preCheck = '' 66 + export TF_USE_LEGACY_KERAS=True 67 + ''; 68 + 69 + disabledTests = [ 70 + # AssertionError: 2 != 0 : 2 doctests failed 71 + "test_doctest_sonnet.functional" 72 + 73 + # AssertionError: Not equal to tolerance 74 + "testComputationAgainstNumPy1" 75 + 76 + # tensorflow.python.framework.errors_impl.InvalidArgumentError: cannot compute MatMul as input #1(zero-based) was expected to be a float tensor but is a half tensor [Op:MatMul] 77 + "testComputationAgainstNumPy0" 78 + "testComputationAgainstNumPy1" 79 ]; 80 81 pythonImportsCheck = [ "sonnet" ]; 82 83 + meta = { 84 description = "Library for building neural networks in TensorFlow"; 85 homepage = "https://github.com/deepmind/sonnet"; 86 + changelog = "https://github.com/google-deepmind/sonnet/releases/tag/v${version}"; 87 + license = lib.licenses.asl20; 88 + maintainers = with lib.maintainers; [ onny ]; 89 }; 90 }
-22
pkgs/development/python-modules/tensorflow-datasets/corruptions.patch
··· 1 - diff --git a/tensorflow_datasets/image_classification/corruptions.py b/tensorflow_datasets/image_classification/corruptions.py 2 - index 066c4460..cb9a6667 100644 3 - --- a/tensorflow_datasets/image_classification/corruptions.py 4 - +++ b/tensorflow_datasets/image_classification/corruptions.py 5 - @@ -35,7 +35,7 @@ FROST_FILENAMES = [] 6 - 7 - 8 - def _imagemagick_bin(): 9 - - return 'imagemagick' # pylint: disable=unreachable 10 - + return 'convert' # pylint: disable=unreachable 11 - 12 - 13 - # /////////////// Corruption Helpers /////////////// 14 - @@ -675,7 +675,7 @@ def spatter(x, severity=1): 15 - # ker = np.array([[-1,-2,-3],[-2,0,0],[-3,0,1]], dtype=np.float32) 16 - # ker -= np.mean(ker) 17 - ker = np.array([[-2, -1, 0], [-1, 1, 1], [0, 1, 2]]) 18 - - dist = cv2.filter2D(dist, cv2.CVX_8U, ker) 19 - + dist = cv2.filter2D(dist, cv2.CV_8U, ker) 20 - dist = cv2.blur(dist, (3, 3)).astype(np.float32) 21 - 22 - m = cv2.cvtColor(liquid_layer * dist, cv2.COLOR_GRAY2BGRA)
···
+57 -32
pkgs/development/python-modules/tensorflow-datasets/default.nix
··· 1 { 2 apache-beam, 3 - array-record, 4 - attrs, 5 beautifulsoup4, 6 - buildPythonPackage, 7 click, 8 datasets, 9 - dill, 10 - dm-tree, 11 - fetchFromGitHub, 12 ffmpeg, 13 - future, 14 imagemagick, 15 - importlib-resources, 16 jax, 17 jaxlib, 18 jinja2, 19 langdetect, 20 - lib, 21 lxml, 22 matplotlib, 23 mwparserfromhell, 24 mwxml, 25 networkx, 26 nltk, 27 - numpy, 28 opencv4, 29 pandas, 30 pillow, 31 - promise, 32 - protobuf, 33 - psutil, 34 pycocotools, 35 pydub, 36 pytest-xdist, 37 pytestCheckHook, 38 - requests, 39 scikit-image, 40 scipy, 41 - six, 42 tensorflow, 43 - tensorflow-metadata, 44 - termcolor, 45 tifffile, 46 - tqdm, 47 zarr, 48 }: 49 50 buildPythonPackage rec { 51 pname = "tensorflow-datasets"; 52 - version = "4.9.7"; 53 - format = "setuptools"; 54 55 src = fetchFromGitHub { 56 owner = "tensorflow"; 57 repo = "datasets"; 58 tag = "v${version}"; 59 - hash = "sha256-pnut5z3rEYIOOhsQT6uVjPdy+SqftKptSQMwxbMXoKA="; 60 }; 61 62 patches = [ 63 - # addresses https://github.com/tensorflow/datasets/issues/3673 64 - ./corruptions.patch 65 ]; 66 67 - propagatedBuildInputs = [ 68 array-record 69 - attrs 70 dill 71 dm-tree 72 future 73 importlib-resources 74 numpy 75 promise 76 protobuf 77 psutil 78 requests 79 six 80 tensorflow-metadata 81 termcolor ··· 97 langdetect 98 lxml 99 matplotlib 100 mwparserfromhell 101 mwxml 102 networkx ··· 110 pytestCheckHook 111 scikit-image 112 scipy 113 tensorflow 114 tifffile 115 zarr 116 ]; 117 118 disabledTestPaths = [ 119 # Sandbox violations: network access, filesystem write attempts outside of build dir, ... 120 "tensorflow_datasets/core/dataset_builder_test.py" ··· 126 "tensorflow_datasets/import_without_tf_test.py" 127 "tensorflow_datasets/proto/build_tf_proto_test.py" 128 "tensorflow_datasets/scripts/cli/build_test.py" 129 130 # Requires `pretty_midi` which is not packaged in `nixpkgs`. 131 "tensorflow_datasets/audio/groove.py" ··· 142 143 # Requires `gcld3` and `pretty_midi` which are not packaged in `nixpkgs`. 144 "tensorflow_datasets/core/lazy_imports_lib_test.py" 145 146 # Requires `tensorflow_io` which is not packaged in `nixpkgs`. 147 "tensorflow_datasets/core/features/audio_feature_test.py" 148 "tensorflow_datasets/image/lsun_test.py" 149 150 - # Requires `envlogger` which is not packaged in `nixpkgs`. 151 - "tensorflow_datasets/rlds/locomotion/locomotion_test.py" 152 - "tensorflow_datasets/rlds/robosuite_panda_pick_place_can/robosuite_panda_pick_place_can_test.py" 153 - 154 # Fails with `TypeError: Constant constructor takes either 0 or 2 positional arguments` 155 # deep in TF AutoGraph. Doesn't reproduce in Docker with Ubuntu 22.04 => might be related 156 # to the differences in some of the dependencies? ··· 171 "tensorflow_datasets/text/c4_utils_test.py" 172 ]; 173 174 - meta = with lib; { 175 description = "Library of datasets ready to use with TensorFlow"; 176 homepage = "https://www.tensorflow.org/datasets/overview"; 177 - license = licenses.asl20; 178 - maintainers = with maintainers; [ ndl ]; 179 }; 180 }
··· 1 { 2 + lib, 3 + buildPythonPackage, 4 + fetchFromGitHub, 5 + fetchpatch, 6 + 7 + # dependencies 8 + array-record, 9 + dill, 10 + dm-tree, 11 + future, 12 + immutabledict, 13 + importlib-resources, 14 + numpy, 15 + promise, 16 + protobuf, 17 + psutil, 18 + requests, 19 + simple-parsing, 20 + six, 21 + tensorflow-metadata, 22 + termcolor, 23 + tqdm, 24 + 25 + # tests 26 apache-beam, 27 beautifulsoup4, 28 click, 29 datasets, 30 ffmpeg, 31 imagemagick, 32 jax, 33 jaxlib, 34 jinja2, 35 langdetect, 36 lxml, 37 matplotlib, 38 + mlcroissant, 39 mwparserfromhell, 40 mwxml, 41 networkx, 42 nltk, 43 opencv4, 44 pandas, 45 pillow, 46 pycocotools, 47 pydub, 48 pytest-xdist, 49 pytestCheckHook, 50 scikit-image, 51 scipy, 52 + sortedcontainers, 53 tensorflow, 54 tifffile, 55 zarr, 56 }: 57 58 buildPythonPackage rec { 59 pname = "tensorflow-datasets"; 60 + version = "4.9.8"; 61 + pyproject = true; 62 63 src = fetchFromGitHub { 64 owner = "tensorflow"; 65 repo = "datasets"; 66 tag = "v${version}"; 67 + hash = "sha256-nqveZ+8b0f5sGIn6WufKeA37yEsZjzhCIbCfwMZ9JOM="; 68 }; 69 70 patches = [ 71 + # mlmlcroissant uses encoding_formats, not encoding_formats. 72 + # Backport https://github.com/tensorflow/datasets/pull/11037 until released. 73 + (fetchpatch { 74 + url = "https://github.com/tensorflow/datasets/commit/92cbcff725a1036569a515cc3356aa8480740451.patch"; 75 + hash = "sha256-2hnMvQP83+eAJllce19aHujcoWQzUz3+LsasWCo4BtM="; 76 + }) 77 ]; 78 79 + dependencies = [ 80 array-record 81 dill 82 dm-tree 83 future 84 + immutabledict 85 importlib-resources 86 numpy 87 promise 88 protobuf 89 psutil 90 requests 91 + simple-parsing 92 six 93 tensorflow-metadata 94 termcolor ··· 110 langdetect 111 lxml 112 matplotlib 113 + mlcroissant 114 mwparserfromhell 115 mwxml 116 networkx ··· 124 pytestCheckHook 125 scikit-image 126 scipy 127 + sortedcontainers 128 tensorflow 129 tifffile 130 zarr 131 ]; 132 133 + pytestFlagsArray = [ 134 + # AttributeError: 'NoneType' object has no attribute 'Table' 135 + "--deselect=tensorflow_datasets/core/file_adapters_test.py::test_read_write" 136 + "--deselect=tensorflow_datasets/text/c4_wsrs/c4_wsrs_test.py::C4WSRSTest" 137 + ]; 138 + 139 disabledTestPaths = [ 140 # Sandbox violations: network access, filesystem write attempts outside of build dir, ... 141 "tensorflow_datasets/core/dataset_builder_test.py" ··· 147 "tensorflow_datasets/import_without_tf_test.py" 148 "tensorflow_datasets/proto/build_tf_proto_test.py" 149 "tensorflow_datasets/scripts/cli/build_test.py" 150 + "tensorflow_datasets/datasets/imagenet2012_corrupted/imagenet2012_corrupted_dataset_builder_test.py" 151 152 # Requires `pretty_midi` which is not packaged in `nixpkgs`. 153 "tensorflow_datasets/audio/groove.py" ··· 164 165 # Requires `gcld3` and `pretty_midi` which are not packaged in `nixpkgs`. 166 "tensorflow_datasets/core/lazy_imports_lib_test.py" 167 + 168 + # AttributeError: 'NoneType' object has no attribute 'Table' 169 + "tensorflow_datasets/core/dataset_builder_beam_test.py" 170 + "tensorflow_datasets/core/dataset_builders/adhoc_builder_test.py" 171 + "tensorflow_datasets/core/split_builder_test.py" 172 + "tensorflow_datasets/core/writer_test.py" 173 174 # Requires `tensorflow_io` which is not packaged in `nixpkgs`. 175 "tensorflow_datasets/core/features/audio_feature_test.py" 176 "tensorflow_datasets/image/lsun_test.py" 177 178 # Fails with `TypeError: Constant constructor takes either 0 or 2 positional arguments` 179 # deep in TF AutoGraph. Doesn't reproduce in Docker with Ubuntu 22.04 => might be related 180 # to the differences in some of the dependencies? ··· 195 "tensorflow_datasets/text/c4_utils_test.py" 196 ]; 197 198 + meta = { 199 description = "Library of datasets ready to use with TensorFlow"; 200 homepage = "https://www.tensorflow.org/datasets/overview"; 201 + changelog = "https://github.com/tensorflow/datasets/releases/tag/v${version}"; 202 + license = lib.licenses.asl20; 203 + maintainers = with lib.maintainers; [ ndl ]; 204 }; 205 }
+1 -1
pkgs/development/python-modules/tensorflow-metadata/build.patch
··· 1 diff --git a/setup.py b/setup.py 2 - index 7a09b2f..94c5aa6 100644 3 --- a/setup.py 4 +++ b/setup.py 5 @@ -141,8 +141,4 @@ setup(
··· 1 diff --git a/setup.py b/setup.py 2 + index 30ac370..a05812d 100644 3 --- a/setup.py 4 +++ b/setup.py 5 @@ -141,8 +141,4 @@ setup(
+6 -1
pkgs/development/python-modules/tensorflow-metadata/default.nix
··· 45 # has no tests 46 doCheck = false; 47 48 - pythonImportsCheck = [ "tensorflow_metadata" ]; 49 50 meta = { 51 description = "Standard representations for metadata that are useful when training machine learning models with TensorFlow"; 52 homepage = "https://github.com/tensorflow/metadata"; 53 license = lib.licenses.asl20; 54 maintainers = with lib.maintainers; [ ndl ]; 55 };
··· 45 # has no tests 46 doCheck = false; 47 48 + pythonImportsCheck = [ 49 + "tensorflow_metadata" 50 + "tensorflow_metadata.proto.v0" 51 + "google.protobuf.runtime_version" 52 + ]; 53 54 meta = { 55 description = "Standard representations for metadata that are useful when training machine learning models with TensorFlow"; 56 homepage = "https://github.com/tensorflow/metadata"; 57 + changelog = "https://github.com/tensorflow/metadata/releases/tag/v${version}"; 58 license = lib.licenses.asl20; 59 maintainers = with lib.maintainers; [ ndl ]; 60 };