python311Packages.tensorflow-datasets: 4.9.7 -> 4.9.8 (#376040)

authored by Pol Dellaiera and committed by GitHub 6c679116 546a7aa6

+117 -93
+10 -17
pkgs/development/python-modules/array-record/default.nix
··· 7 7 fetchPypi, 8 8 absl-py, 9 9 etils, 10 - importlib-resources, 11 - typing-extensions, 12 - zipp, 13 10 }: 14 11 15 12 buildPythonPackage rec { 16 13 pname = "array-record"; 17 - version = "0.5.0"; 14 + version = "0.7.1"; 18 15 format = "wheel"; 19 16 20 - # As of 2023-10-31, PyPI includes wheels for Python 3.9, 3.10, and 3.11. 21 - disabled = pythonOlder "3.9" || pythonAtLeast "3.12"; 17 + disabled = pythonOlder "3.10" || pythonAtLeast "3.13"; 22 18 23 19 src = 24 20 let ··· 33 29 platform = "manylinux_2_17_x86_64.manylinux2014_x86_64"; 34 30 hash = 35 31 { 36 - cp39 = "sha256-BzMOVue7E1S1+5+XTcPELko81ujc9MbmqLhNsU7pqO0="; 37 - cp310 = "sha256-eUD9pQu9GsbV8MPD1MiF3Ihr+zYioSOo6P15hYIwPYo="; 38 - cp311 = "sha256-rAmkI3EIZPYiXrxFowfDC0Gf3kRw0uX0i6Kx6Zu+hNM="; 32 + cp310 = "sha256-JDaj1iJy1BQ7fHjmCbGQkNqG5rIRuwTwENbanM9a8hg="; 33 + cp311 = "sha256-QVynMK9t0BnEtgdfbJ5T3s7N02i0XD2siUSRxKtrI+M="; 34 + cp312 = "sha256-xJJGm6kLQ2/TzVYTrBtQ1Hqky1odHfbhe/g+PSSYt1c="; 39 35 } 40 36 .${pyShortVersion} or (throw "${pname} is missing hash for ${pyShortVersion}"); 41 37 }; 42 38 43 - propagatedBuildInputs = [ 39 + dependencies = [ 44 40 absl-py 45 41 etils 46 - importlib-resources 47 - typing-extensions 48 - zipp 49 - ]; 42 + ] ++ etils.optional-dependencies.epath; 50 43 51 44 pythonImportsCheck = [ "array_record" ]; 52 45 53 - meta = with lib; { 46 + meta = { 54 47 description = "ArrayRecord is a new file format derived from Riegeli, achieving a new frontier of IO efficiency"; 55 48 homepage = "https://github.com/google/array_record"; 56 - license = licenses.asl20; 57 - maintainers = with maintainers; [ GaetanLepage ]; 49 + license = lib.licenses.asl20; 50 + maintainers = with lib.maintainers; [ GaetanLepage ]; 58 51 platforms = [ "x86_64-linux" ]; 59 52 }; 60 53 }
+43 -20
pkgs/development/python-modules/dm-sonnet/default.nix
··· 1 1 { 2 2 lib, 3 3 buildPythonPackage, 4 - click, 4 + fetchFromGitHub, 5 + 6 + # build-system 7 + setuptools, 8 + 9 + # dependencies 5 10 dm-tree, 6 - docutils, 7 11 etils, 8 - fetchFromGitHub, 9 - fetchpatch, 10 12 numpy, 11 - pythonOlder, 12 13 tabulate, 14 + wrapt, 15 + 16 + # tests 17 + click, 18 + docutils, 19 + keras, 20 + pytestCheckHook, 13 21 tensorflow, 14 22 tensorflow-datasets, 15 - wrapt, 23 + tf-keras, 16 24 }: 17 25 18 26 buildPythonPackage rec { 19 27 pname = "dm-sonnet"; 20 28 version = "2.0.2"; 21 - format = "setuptools"; 22 - 23 - disabled = pythonOlder "3.6"; 29 + pyproject = true; 24 30 25 31 src = fetchFromGitHub { 26 32 owner = "deepmind"; 27 33 repo = "sonnet"; 28 - rev = "refs/tags/v${version}"; 34 + tag = "v${version}"; 29 35 hash = "sha256-WkloUbqSyPG3cbLG8ktsjdluACkCbUZ7t6rYWst8rs8="; 30 36 }; 31 37 32 - patches = [ 33 - (fetchpatch { 34 - name = "replace-np-bool-with-np-bool_.patch"; 35 - url = "https://github.com/deepmind/sonnet/commit/df5d099d4557a9a81a0eb969e5a81ed917bcd612.patch"; 36 - hash = "sha256-s7abl83osD4wa0ZhqgDyjqQ3gagwGYCdQifwFqhNp34="; 37 - }) 38 + build-system = [ 39 + setuptools 38 40 ]; 39 41 40 - propagatedBuildInputs = [ 42 + dependencies = [ 41 43 dm-tree 42 44 etils 43 45 numpy ··· 52 54 nativeCheckInputs = [ 53 55 click 54 56 docutils 57 + keras 58 + pytestCheckHook 55 59 tensorflow 56 60 tensorflow-datasets 61 + tf-keras 62 + ]; 63 + 64 + # ImportError: `keras.optimizers.legacy` is not supported in Keras 3 65 + preCheck = '' 66 + export TF_USE_LEGACY_KERAS=True 67 + ''; 68 + 69 + disabledTests = [ 70 + # AssertionError: 2 != 0 : 2 doctests failed 71 + "test_doctest_sonnet.functional" 72 + 73 + # AssertionError: Not equal to tolerance 74 + "testComputationAgainstNumPy1" 75 + 76 + # tensorflow.python.framework.errors_impl.InvalidArgumentError: cannot compute MatMul as input #1(zero-based) was expected to be a float tensor but is a half tensor [Op:MatMul] 77 + "testComputationAgainstNumPy0" 78 + "testComputationAgainstNumPy1" 57 79 ]; 58 80 59 81 pythonImportsCheck = [ "sonnet" ]; 60 82 61 - meta = with lib; { 83 + meta = { 62 84 description = "Library for building neural networks in TensorFlow"; 63 85 homepage = "https://github.com/deepmind/sonnet"; 64 - license = licenses.asl20; 65 - maintainers = with maintainers; [ onny ]; 86 + changelog = "https://github.com/google-deepmind/sonnet/releases/tag/v${version}"; 87 + license = lib.licenses.asl20; 88 + maintainers = with lib.maintainers; [ onny ]; 66 89 }; 67 90 }
-22
pkgs/development/python-modules/tensorflow-datasets/corruptions.patch
··· 1 - diff --git a/tensorflow_datasets/image_classification/corruptions.py b/tensorflow_datasets/image_classification/corruptions.py 2 - index 066c4460..cb9a6667 100644 3 - --- a/tensorflow_datasets/image_classification/corruptions.py 4 - +++ b/tensorflow_datasets/image_classification/corruptions.py 5 - @@ -35,7 +35,7 @@ FROST_FILENAMES = [] 6 - 7 - 8 - def _imagemagick_bin(): 9 - - return 'imagemagick' # pylint: disable=unreachable 10 - + return 'convert' # pylint: disable=unreachable 11 - 12 - 13 - # /////////////// Corruption Helpers /////////////// 14 - @@ -675,7 +675,7 @@ def spatter(x, severity=1): 15 - # ker = np.array([[-1,-2,-3],[-2,0,0],[-3,0,1]], dtype=np.float32) 16 - # ker -= np.mean(ker) 17 - ker = np.array([[-2, -1, 0], [-1, 1, 1], [0, 1, 2]]) 18 - - dist = cv2.filter2D(dist, cv2.CVX_8U, ker) 19 - + dist = cv2.filter2D(dist, cv2.CV_8U, ker) 20 - dist = cv2.blur(dist, (3, 3)).astype(np.float32) 21 - 22 - m = cv2.cvtColor(liquid_layer * dist, cv2.COLOR_GRAY2BGRA)
+57 -32
pkgs/development/python-modules/tensorflow-datasets/default.nix
··· 1 1 { 2 + lib, 3 + buildPythonPackage, 4 + fetchFromGitHub, 5 + fetchpatch, 6 + 7 + # dependencies 8 + array-record, 9 + dill, 10 + dm-tree, 11 + future, 12 + immutabledict, 13 + importlib-resources, 14 + numpy, 15 + promise, 16 + protobuf, 17 + psutil, 18 + requests, 19 + simple-parsing, 20 + six, 21 + tensorflow-metadata, 22 + termcolor, 23 + tqdm, 24 + 25 + # tests 2 26 apache-beam, 3 - array-record, 4 - attrs, 5 27 beautifulsoup4, 6 - buildPythonPackage, 7 28 click, 8 29 datasets, 9 - dill, 10 - dm-tree, 11 - fetchFromGitHub, 12 30 ffmpeg, 13 - future, 14 31 imagemagick, 15 - importlib-resources, 16 32 jax, 17 33 jaxlib, 18 34 jinja2, 19 35 langdetect, 20 - lib, 21 36 lxml, 22 37 matplotlib, 38 + mlcroissant, 23 39 mwparserfromhell, 24 40 mwxml, 25 41 networkx, 26 42 nltk, 27 - numpy, 28 43 opencv4, 29 44 pandas, 30 45 pillow, 31 - promise, 32 - protobuf, 33 - psutil, 34 46 pycocotools, 35 47 pydub, 36 48 pytest-xdist, 37 49 pytestCheckHook, 38 - requests, 39 50 scikit-image, 40 51 scipy, 41 - six, 52 + sortedcontainers, 42 53 tensorflow, 43 - tensorflow-metadata, 44 - termcolor, 45 54 tifffile, 46 - tqdm, 47 55 zarr, 48 56 }: 49 57 50 58 buildPythonPackage rec { 51 59 pname = "tensorflow-datasets"; 52 - version = "4.9.7"; 53 - format = "setuptools"; 60 + version = "4.9.8"; 61 + pyproject = true; 54 62 55 63 src = fetchFromGitHub { 56 64 owner = "tensorflow"; 57 65 repo = "datasets"; 58 66 tag = "v${version}"; 59 - hash = "sha256-pnut5z3rEYIOOhsQT6uVjPdy+SqftKptSQMwxbMXoKA="; 67 + hash = "sha256-nqveZ+8b0f5sGIn6WufKeA37yEsZjzhCIbCfwMZ9JOM="; 60 68 }; 61 69 62 70 patches = [ 63 - # addresses https://github.com/tensorflow/datasets/issues/3673 64 - ./corruptions.patch 71 + # mlmlcroissant uses encoding_formats, not encoding_formats. 72 + # Backport https://github.com/tensorflow/datasets/pull/11037 until released. 73 + (fetchpatch { 74 + url = "https://github.com/tensorflow/datasets/commit/92cbcff725a1036569a515cc3356aa8480740451.patch"; 75 + hash = "sha256-2hnMvQP83+eAJllce19aHujcoWQzUz3+LsasWCo4BtM="; 76 + }) 65 77 ]; 66 78 67 - propagatedBuildInputs = [ 79 + dependencies = [ 68 80 array-record 69 - attrs 70 81 dill 71 82 dm-tree 72 83 future 84 + immutabledict 73 85 importlib-resources 74 86 numpy 75 87 promise 76 88 protobuf 77 89 psutil 78 90 requests 91 + simple-parsing 79 92 six 80 93 tensorflow-metadata 81 94 termcolor ··· 97 110 langdetect 98 111 lxml 99 112 matplotlib 113 + mlcroissant 100 114 mwparserfromhell 101 115 mwxml 102 116 networkx ··· 110 124 pytestCheckHook 111 125 scikit-image 112 126 scipy 127 + sortedcontainers 113 128 tensorflow 114 129 tifffile 115 130 zarr 116 131 ]; 117 132 133 + pytestFlagsArray = [ 134 + # AttributeError: 'NoneType' object has no attribute 'Table' 135 + "--deselect=tensorflow_datasets/core/file_adapters_test.py::test_read_write" 136 + "--deselect=tensorflow_datasets/text/c4_wsrs/c4_wsrs_test.py::C4WSRSTest" 137 + ]; 138 + 118 139 disabledTestPaths = [ 119 140 # Sandbox violations: network access, filesystem write attempts outside of build dir, ... 120 141 "tensorflow_datasets/core/dataset_builder_test.py" ··· 126 147 "tensorflow_datasets/import_without_tf_test.py" 127 148 "tensorflow_datasets/proto/build_tf_proto_test.py" 128 149 "tensorflow_datasets/scripts/cli/build_test.py" 150 + "tensorflow_datasets/datasets/imagenet2012_corrupted/imagenet2012_corrupted_dataset_builder_test.py" 129 151 130 152 # Requires `pretty_midi` which is not packaged in `nixpkgs`. 131 153 "tensorflow_datasets/audio/groove.py" ··· 142 164 143 165 # Requires `gcld3` and `pretty_midi` which are not packaged in `nixpkgs`. 144 166 "tensorflow_datasets/core/lazy_imports_lib_test.py" 167 + 168 + # AttributeError: 'NoneType' object has no attribute 'Table' 169 + "tensorflow_datasets/core/dataset_builder_beam_test.py" 170 + "tensorflow_datasets/core/dataset_builders/adhoc_builder_test.py" 171 + "tensorflow_datasets/core/split_builder_test.py" 172 + "tensorflow_datasets/core/writer_test.py" 145 173 146 174 # Requires `tensorflow_io` which is not packaged in `nixpkgs`. 147 175 "tensorflow_datasets/core/features/audio_feature_test.py" 148 176 "tensorflow_datasets/image/lsun_test.py" 149 177 150 - # Requires `envlogger` which is not packaged in `nixpkgs`. 151 - "tensorflow_datasets/rlds/locomotion/locomotion_test.py" 152 - "tensorflow_datasets/rlds/robosuite_panda_pick_place_can/robosuite_panda_pick_place_can_test.py" 153 - 154 178 # Fails with `TypeError: Constant constructor takes either 0 or 2 positional arguments` 155 179 # deep in TF AutoGraph. Doesn't reproduce in Docker with Ubuntu 22.04 => might be related 156 180 # to the differences in some of the dependencies? ··· 171 195 "tensorflow_datasets/text/c4_utils_test.py" 172 196 ]; 173 197 174 - meta = with lib; { 198 + meta = { 175 199 description = "Library of datasets ready to use with TensorFlow"; 176 200 homepage = "https://www.tensorflow.org/datasets/overview"; 177 - license = licenses.asl20; 178 - maintainers = with maintainers; [ ndl ]; 201 + changelog = "https://github.com/tensorflow/datasets/releases/tag/v${version}"; 202 + license = lib.licenses.asl20; 203 + maintainers = with lib.maintainers; [ ndl ]; 179 204 }; 180 205 }
+1 -1
pkgs/development/python-modules/tensorflow-metadata/build.patch
··· 1 1 diff --git a/setup.py b/setup.py 2 - index 7a09b2f..94c5aa6 100644 2 + index 30ac370..a05812d 100644 3 3 --- a/setup.py 4 4 +++ b/setup.py 5 5 @@ -141,8 +141,4 @@ setup(
+6 -1
pkgs/development/python-modules/tensorflow-metadata/default.nix
··· 45 45 # has no tests 46 46 doCheck = false; 47 47 48 - pythonImportsCheck = [ "tensorflow_metadata" ]; 48 + pythonImportsCheck = [ 49 + "tensorflow_metadata" 50 + "tensorflow_metadata.proto.v0" 51 + "google.protobuf.runtime_version" 52 + ]; 49 53 50 54 meta = { 51 55 description = "Standard representations for metadata that are useful when training machine learning models with TensorFlow"; 52 56 homepage = "https://github.com/tensorflow/metadata"; 57 + changelog = "https://github.com/tensorflow/metadata/releases/tag/v${version}"; 53 58 license = lib.licenses.asl20; 54 59 maintainers = with lib.maintainers; [ ndl ]; 55 60 };