diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7d881e334a4..463adb77a40 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -16,13 +16,13 @@ repos: files: ^xarray/ - repo: https://github.com/charliermarsh/ruff-pre-commit # Ruff version. - rev: 'v0.0.237' + rev: 'v0.0.241' hooks: - id: ruff args: ["--fix"] # https://github.com/python/black#version-control-integration - repo: https://github.com/psf/black - rev: 22.12.0 + rev: 23.1.0 hooks: - id: black - id: black-jupyter @@ -31,7 +31,7 @@ repos: hooks: - id: blackdoc exclude: "generate_aggregations.py" - additional_dependencies: ["black==22.12.0"] + additional_dependencies: ["black==23.1.0"] - id: blackdoc-autoupdate-black - repo: https://github.com/pre-commit/mirrors-mypy rev: v0.991 diff --git a/doc/conf.py b/doc/conf.py index b5e08b81b46..c916fde5760 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -436,7 +436,6 @@ def update_videos(app: Sphinx): items = [] for video in videos: - authors = " | ".join(video["authors"]) item = f""" .. grid-item-card:: {" ".join(video["title"].split())} diff --git a/doc/examples/apply_ufunc_vectorize_1d.ipynb b/doc/examples/apply_ufunc_vectorize_1d.ipynb index d1d6a52919c..68d011d0725 100644 --- a/doc/examples/apply_ufunc_vectorize_1d.ipynb +++ b/doc/examples/apply_ufunc_vectorize_1d.ipynb @@ -663,7 +663,6 @@ "\n", "\n", "def xr_interp(data, dim, newdim):\n", - "\n", " interped = xr.apply_ufunc(\n", " interp1d_np_gufunc, # first the function\n", " data, # now arguments in the order expected by 'interp1_np'\n", diff --git a/doc/whats-new.rst b/doc/whats-new.rst index b7667b6078e..3d1669589f2 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -20,9 +20,9 @@ What's New v2023.02.0 (Feb 7. 2023) ------------------------ -This release brings a major upgrade to :py:func:`xarray.concat`, bug fixes and -a bump in supported dependency versions. Thanks to our 9 contributors: -Aron Gergely, Deepak Cherian, Illviljan, James Bourbeau, Joe Hamman, +This release brings a major upgrade to :py:func:`xarray.concat`, bug fixes and +a bump in supported dependency versions. Thanks to our 9 contributors: +Aron Gergely, Deepak Cherian, Illviljan, James Bourbeau, Joe Hamman, Justus Magin, Kai Mühlbauer, Ken Mankoff, Spencer Clark. Breaking changes diff --git a/xarray/backends/cfgrib_.py b/xarray/backends/cfgrib_.py index 4ab4c73ed07..4c7d6a65e8f 100644 --- a/xarray/backends/cfgrib_.py +++ b/xarray/backends/cfgrib_.py @@ -119,7 +119,6 @@ def open_dataset( squeeze=True, time_dims=("time", "step"), ): - filename_or_obj = _normalize_path(filename_or_obj) store = CfGribDataStore( filename_or_obj, diff --git a/xarray/backends/h5netcdf_.py b/xarray/backends/h5netcdf_.py index 51d79005b7f..c4f75672173 100644 --- a/xarray/backends/h5netcdf_.py +++ b/xarray/backends/h5netcdf_.py @@ -401,7 +401,6 @@ def open_dataset( phony_dims=None, decode_vlen_strings=True, ): - filename_or_obj = _normalize_path(filename_or_obj) store = H5NetCDFStore.open( filename_or_obj, diff --git a/xarray/backends/netCDF4_.py b/xarray/backends/netCDF4_.py index f1ca560dee1..0c6e083158d 100644 --- a/xarray/backends/netCDF4_.py +++ b/xarray/backends/netCDF4_.py @@ -573,7 +573,6 @@ def open_dataset( lock=None, autoclose=False, ): - filename_or_obj = _normalize_path(filename_or_obj) store = NetCDF4DataStore.open( filename_or_obj, diff --git a/xarray/backends/pseudonetcdf_.py b/xarray/backends/pseudonetcdf_.py index fc8bf2c81b3..ae8f90e3a44 100644 --- a/xarray/backends/pseudonetcdf_.py +++ b/xarray/backends/pseudonetcdf_.py @@ -156,7 +156,6 @@ def open_dataset( lock=None, **format_kwargs, ): - filename_or_obj = _normalize_path(filename_or_obj) store = PseudoNetCDFDataStore.open( filename_or_obj, lock=lock, mode=mode, **format_kwargs diff --git a/xarray/backends/pydap_.py b/xarray/backends/pydap_.py index cba31d7e697..df26a03d790 100644 --- a/xarray/backends/pydap_.py +++ b/xarray/backends/pydap_.py @@ -178,7 +178,6 @@ def open_dataset( verify=None, user_charset=None, ): - store = PydapDataStore.open( url=filename_or_obj, application=application, diff --git a/xarray/backends/scipy_.py b/xarray/backends/scipy_.py index 12f88b02f47..651aebce2ce 100644 --- a/xarray/backends/scipy_.py +++ b/xarray/backends/scipy_.py @@ -266,7 +266,6 @@ class ScipyBackendEntrypoint(BackendEntrypoint): url = "https://docs.xarray.dev/en/stable/generated/xarray.backends.ScipyBackendEntrypoint.html" def guess_can_open(self, filename_or_obj): - magic_number = try_read_magic_number_from_file_or_path(filename_or_obj) if magic_number is not None and magic_number.startswith(b"\x1f\x8b"): with gzip.open(filename_or_obj) as f: @@ -296,7 +295,6 @@ def open_dataset( mmap=None, lock=None, ): - filename_or_obj = _normalize_path(filename_or_obj) store = ScipyDataStore( filename_or_obj, mode=mode, format=format, group=group, mmap=mmap, lock=lock diff --git a/xarray/backends/zarr.py b/xarray/backends/zarr.py index e30e7e9f4d8..428dd3818ad 100644 --- a/xarray/backends/zarr.py +++ b/xarray/backends/zarr.py @@ -882,7 +882,6 @@ def open_dataset( stacklevel=3, zarr_version=None, ): - filename_or_obj = _normalize_path(filename_or_obj) store = ZarrStore.open_group( filename_or_obj, diff --git a/xarray/convert.py b/xarray/convert.py index 629f18ed6b9..5863352ae41 100644 --- a/xarray/convert.py +++ b/xarray/convert.py @@ -115,10 +115,8 @@ def set_cdms2_attrs(var, attrs): # Curvilinear and unstructured grids if dataarray.name not in dataarray.coords: - cdms2_axes = {} for coord_name in set(dataarray.coords.keys()) - set(dataarray.dims): - coord_array = dataarray.coords[coord_name].to_cdms2() cdms2_axis_cls = ( diff --git a/xarray/core/accessor_dt.py b/xarray/core/accessor_dt.py index 5c67af16d99..118cbcb7ac5 100644 --- a/xarray/core/accessor_dt.py +++ b/xarray/core/accessor_dt.py @@ -201,7 +201,6 @@ def _strftime(values, date_format): class TimeAccessor(Generic[T_DataArray]): - __slots__ = ("_obj",) def __init__(self, obj: T_DataArray) -> None: diff --git a/xarray/core/alignment.py b/xarray/core/alignment.py index 45d448dd113..05e2ca7eb8b 100644 --- a/xarray/core/alignment.py +++ b/xarray/core/alignment.py @@ -938,7 +938,6 @@ def reindex_like( def _get_broadcast_dims_map_common_coords(args, exclude): - common_coords = {} dims_map = {} for arg in args: @@ -954,7 +953,6 @@ def _get_broadcast_dims_map_common_coords(args, exclude): def _broadcast_helper( arg: T_DataWithCoords, exclude, dims_map, common_coords ) -> T_DataWithCoords: - from xarray.core.dataarray import DataArray from xarray.core.dataset import Dataset diff --git a/xarray/core/combine.py b/xarray/core/combine.py index 790ff629b2a..946f71e5d28 100644 --- a/xarray/core/combine.py +++ b/xarray/core/combine.py @@ -53,7 +53,6 @@ def _infer_tile_ids_from_nested_list(entry, current_pos): def _ensure_same_types(series, dim): - if series.dtype == object: types = set(series.map(type)) if len(types) > 1: @@ -80,17 +79,14 @@ def _ensure_same_types(series, dim): def _infer_concat_order_from_coords(datasets): - concat_dims = [] tile_ids = [() for ds in datasets] # All datasets have same variables because they've been grouped as such ds0 = datasets[0] for dim in ds0.dims: - # Check if dim is a coordinate dimension if dim in ds0: - # Need to read coordinate values to do ordering indexes = [ds._indexes.get(dim) for ds in datasets] if any(index is None for index in indexes): @@ -105,7 +101,6 @@ def _infer_concat_order_from_coords(datasets): # If dimension coordinate values are same on every dataset then # should be leaving this dimension alone (it's just a "bystander") if not all(index.equals(indexes[0]) for index in indexes[1:]): - # Infer order datasets should be arranged in along this dim concat_dims.append(dim) @@ -261,7 +256,6 @@ def _combine_all_along_first_dim( join: JoinOptions = "outer", combine_attrs: CombineAttrsOptions = "drop", ): - # Group into lines of datasets which must be combined along dim # need to sort by _new_tile_id first for groupby to work # TODO: is the sorted need? @@ -345,7 +339,6 @@ def _nested_combine( join: JoinOptions = "outer", combine_attrs: CombineAttrsOptions = "drop", ): - if len(datasets) == 0: return Dataset() diff --git a/xarray/core/common.py b/xarray/core/common.py index 095d15e32f1..3a73f463ea9 100644 --- a/xarray/core/common.py +++ b/xarray/core/common.py @@ -1747,7 +1747,6 @@ def ones_like( def get_chunksizes( variables: Iterable[Variable], ) -> Mapping[Any, tuple[int, ...]]: - chunks: dict[Any, tuple[int, ...]] = {} for v in variables: if hasattr(v._data, "chunks"): diff --git a/xarray/core/computation.py b/xarray/core/computation.py index 2b3cdaeb143..d0ba083cf0c 100644 --- a/xarray/core/computation.py +++ b/xarray/core/computation.py @@ -156,7 +156,6 @@ def to_gufunc_string(self, exclude_dims=frozenset()): # enumerate input_core_dims contained in exclude_dims to make them unique if exclude_dims: - exclude_dims = [self.dims_map[dim] for dim in exclude_dims] counter = Counter() @@ -555,7 +554,6 @@ def apply_groupby_func(func, *args): def unified_dim_sizes( variables: Iterable[Variable], exclude_dims: AbstractSet = frozenset() ) -> dict[Hashable, int]: - dim_sizes: dict[Hashable, int] = {} for var in variables: diff --git a/xarray/core/dataarray.py b/xarray/core/dataarray.py index d82a3e06a56..f6db654bdf6 100644 --- a/xarray/core/dataarray.py +++ b/xarray/core/dataarray.py @@ -395,7 +395,6 @@ def __init__( # try to fill in arguments from data if they weren't supplied if coords is None: - if isinstance(data, DataArray): coords = data.coords elif isinstance(data, pd.Series): diff --git a/xarray/core/dataset.py b/xarray/core/dataset.py index 7e672bbcc06..57209c61f48 100644 --- a/xarray/core/dataset.py +++ b/xarray/core/dataset.py @@ -6768,7 +6768,6 @@ def shift( fill_value: Any = xrdtypes.NA, **shifts_kwargs: int, ) -> T_Dataset: - """Shift this dataset by an offset along one or more dimensions. Only data variables are moved; coordinates stay in place. This is diff --git a/xarray/core/duck_array_ops.py b/xarray/core/duck_array_ops.py index e09ae4c2449..96baf7f96cd 100644 --- a/xarray/core/duck_array_ops.py +++ b/xarray/core/duck_array_ops.py @@ -492,7 +492,6 @@ def datetime_to_numeric(array, offset=None, datetime_unit=None, dtype=float): # Convert np.NaT to np.nan elif array.dtype.kind in "mM": - # Convert to specified timedelta units. if datetime_unit: array = array / np.timedelta64(1, datetime_unit) diff --git a/xarray/core/groupby.py b/xarray/core/groupby.py index 45227fd9c2b..5bfa0229af5 100644 --- a/xarray/core/groupby.py +++ b/xarray/core/groupby.py @@ -46,7 +46,6 @@ def check_reduce_dims(reduce_dims, dimensions): - if reduce_dims is not ...: if is_scalar(reduce_dims): reduce_dims = [reduce_dims] @@ -1208,7 +1207,6 @@ class DataArrayGroupBy( # type: ignore[misc] class DatasetGroupByBase(GroupBy["Dataset"], DatasetGroupbyArithmetic): - __slots__ = () _dims: Frozen[Hashable, int] | None diff --git a/xarray/core/indexing.py b/xarray/core/indexing.py index 5448c18d8e9..becf1554453 100644 --- a/xarray/core/indexing.py +++ b/xarray/core/indexing.py @@ -1092,7 +1092,6 @@ def _logical_any(args): def _masked_result_drop_slice(key, data=None): - key = (k for k in key if not isinstance(k, slice)) chunks_hint = getattr(data, "chunks", None) @@ -1345,7 +1344,6 @@ def __init__(self, array): self.array = array def __getitem__(self, key): - if not isinstance(key, VectorizedIndexer): # if possible, short-circuit when keys are effectively slice(None) # This preserves dask name and passes lazy array equivalence checks diff --git a/xarray/core/merge.py b/xarray/core/merge.py index 740e3c8c4e2..6cb758851b4 100644 --- a/xarray/core/merge.py +++ b/xarray/core/merge.py @@ -174,7 +174,7 @@ def _assert_prioritized_valid( indexes: dict[int, Index] = {} for name, elements_list in grouped.items(): - for (_, index) in elements_list: + for _, index in elements_list: if index is not None: grouped_by_index[id(index)].append(name) indexes[id(index)] = index diff --git a/xarray/core/missing.py b/xarray/core/missing.py index 97703b576fa..1caa79a7dfd 100644 --- a/xarray/core/missing.py +++ b/xarray/core/missing.py @@ -80,7 +80,6 @@ class NumpyInterpolator(BaseInterpolator): """ def __init__(self, xi, yi, method="linear", fill_value=None, period=None): - if method != "linear": raise ValueError("only method `linear` is valid for the NumpyInterpolator") diff --git a/xarray/core/parallel.py b/xarray/core/parallel.py index 38ac1532f29..af3bc9340c6 100644 --- a/xarray/core/parallel.py +++ b/xarray/core/parallel.py @@ -33,7 +33,6 @@ def assert_chunks_compatible(a: Dataset, b: Dataset): def check_result_variables( result: DataArray | Dataset, expected: Mapping[str, Any], kind: str ): - if kind == "coords": nice_str = "coordinate" elif kind == "data_vars": diff --git a/xarray/core/resample.py b/xarray/core/resample.py index 9d67fbbe9e3..f8b2a049aed 100644 --- a/xarray/core/resample.py +++ b/xarray/core/resample.py @@ -41,7 +41,6 @@ def __init__( resample_dim: Hashable | None = None, **kwargs, ) -> None: - if dim == resample_dim: raise ValueError( f"Proxy resampling dimension ('{resample_dim}') " @@ -57,7 +56,6 @@ def _flox_reduce( keep_attrs: bool | None = None, **kwargs, ) -> T_Xarray: - from xarray.core.dataarray import DataArray kwargs.setdefault("method", "cohorts") diff --git a/xarray/core/rolling.py b/xarray/core/rolling.py index e1a475b6910..8b9f31bfdfd 100644 --- a/xarray/core/rolling.py +++ b/xarray/core/rolling.py @@ -132,7 +132,8 @@ def _reduce_method( # type: ignore[misc] name: str, fillna: Any, rolling_agg_func: Callable | None = None ) -> Callable[..., T_Xarray]: """Constructs reduction methods built on a numpy reduction function (e.g. sum), - a bottleneck reduction function (e.g. move_sum), or a Rolling reduction (_mean).""" + a bottleneck reduction function (e.g. move_sum), or a Rolling reduction (_mean). + """ if rolling_agg_func: array_agg_func = None else: @@ -141,7 +142,6 @@ def _reduce_method( # type: ignore[misc] bottleneck_move_func = getattr(bottleneck, "move_" + name, None) def method(self, keep_attrs=None, **kwargs): - keep_attrs = self._get_keep_attrs(keep_attrs) return self._numpy_or_bottleneck_reduce( @@ -272,7 +272,7 @@ def __iter__(self) -> Iterator[tuple[DataArray, DataArray]]: starts = stops - window0 starts[: window0 - offset] = 0 - for (label, start, stop) in zip(self.window_labels, starts, stops): + for label, start, stop in zip(self.window_labels, starts, stops): window = self.obj.isel({dim0: slice(start, stop)}) counts = window.count(dim=[dim0]) diff --git a/xarray/core/utils.py b/xarray/core/utils.py index d4a1727887b..08625fe7d95 100644 --- a/xarray/core/utils.py +++ b/xarray/core/utils.py @@ -863,7 +863,6 @@ def drop_dims_from_indexers( return indexers elif missing_dims == "warn": - # don't modify input indexers = dict(indexers) @@ -912,7 +911,6 @@ def drop_missing_dims( return supplied_dims elif missing_dims == "warn": - invalid = set(supplied_dims) - set(dims) if invalid: warnings.warn( diff --git a/xarray/core/weighted.py b/xarray/core/weighted.py index 46fd8edbe1b..904c6a4d980 100644 --- a/xarray/core/weighted.py +++ b/xarray/core/weighted.py @@ -343,7 +343,6 @@ def _weighted_quantile_1d( skipna: bool, method: QUANTILE_METHODS = "linear", ) -> np.ndarray: - # This algorithm has been adapted from: # https://aakinshin.net/posts/weighted-quantiles/#reference-implementation is_nan = np.isnan(data) @@ -444,7 +443,6 @@ def _weighted_quantile_1d( return result def _implementation(self, func, dim, **kwargs): - raise NotImplementedError("Use `Dataset.weighted` or `DataArray.weighted`") def sum_of_weights( @@ -452,7 +450,6 @@ def sum_of_weights( dim: Dims = None, keep_attrs: bool | None = None, ) -> T_Xarray: - return self._implementation( self._sum_of_weights, dim=dim, keep_attrs=keep_attrs ) @@ -463,7 +460,6 @@ def sum_of_squares( skipna: bool | None = None, keep_attrs: bool | None = None, ) -> T_Xarray: - return self._implementation( self._sum_of_squares, dim=dim, skipna=skipna, keep_attrs=keep_attrs ) @@ -474,7 +470,6 @@ def sum( skipna: bool | None = None, keep_attrs: bool | None = None, ) -> T_Xarray: - return self._implementation( self._weighted_sum, dim=dim, skipna=skipna, keep_attrs=keep_attrs ) @@ -485,7 +480,6 @@ def mean( skipna: bool | None = None, keep_attrs: bool | None = None, ) -> T_Xarray: - return self._implementation( self._weighted_mean, dim=dim, skipna=skipna, keep_attrs=keep_attrs ) @@ -496,7 +490,6 @@ def var( skipna: bool | None = None, keep_attrs: bool | None = None, ) -> T_Xarray: - return self._implementation( self._weighted_var, dim=dim, skipna=skipna, keep_attrs=keep_attrs ) @@ -507,7 +500,6 @@ def std( skipna: bool | None = None, keep_attrs: bool | None = None, ) -> T_Xarray: - return self._implementation( self._weighted_std, dim=dim, skipna=skipna, keep_attrs=keep_attrs ) @@ -520,7 +512,6 @@ def quantile( keep_attrs: bool | None = None, skipna: bool = True, ) -> T_Xarray: - return self._implementation( self._weighted_quantile, q=q, dim=dim, skipna=skipna, keep_attrs=keep_attrs ) @@ -535,7 +526,6 @@ def __repr__(self) -> str: class DataArrayWeighted(Weighted["DataArray"]): def _implementation(self, func, dim, **kwargs) -> DataArray: - self._check_dim(dim) dataset = self.obj._to_temp_dataset() @@ -545,14 +535,12 @@ def _implementation(self, func, dim, **kwargs) -> DataArray: class DatasetWeighted(Weighted["Dataset"]): def _implementation(self, func, dim, **kwargs) -> Dataset: - self._check_dim(dim) return self.obj.map(func, dim=dim, **kwargs) def _inject_docstring(cls, cls_name): - cls.sum_of_weights.__doc__ = _SUM_OF_WEIGHTS_DOCSTRING.format(cls=cls_name) cls.sum.__doc__ = _WEIGHTED_REDUCE_DOCSTRING_TEMPLATE.format( diff --git a/xarray/plot/dataarray_plot.py b/xarray/plot/dataarray_plot.py index f18495e5e94..73d6370a45b 100644 --- a/xarray/plot/dataarray_plot.py +++ b/xarray/plot/dataarray_plot.py @@ -56,7 +56,6 @@ def _infer_line_data( darray: DataArray, x: Hashable | None, y: Hashable | None, hue: Hashable | None ) -> tuple[DataArray, DataArray, DataArray | None, str]: - ndims = len(darray.dims) if x is not None and y is not None: diff --git a/xarray/plot/dataset_plot.py b/xarray/plot/dataset_plot.py index eda37aadffe..0d9898a6e9a 100644 --- a/xarray/plot/dataset_plot.py +++ b/xarray/plot/dataset_plot.py @@ -192,7 +192,6 @@ def newplotfunc( levels: ArrayLike | None = None, **kwargs: Any, ) -> Any: - if args: # TODO: Deprecated since 2022.10: msg = "Using positional arguments is deprecated for plot methods, use keyword arguments instead." diff --git a/xarray/plot/utils.py b/xarray/plot/utils.py index 373b303def2..7afcaed142d 100644 --- a/xarray/plot/utils.py +++ b/xarray/plot/utils.py @@ -492,7 +492,6 @@ def get_axis( def _maybe_gca(**subplot_kws: Any) -> Axes: - import matplotlib.pyplot as plt # can call gcf unconditionally: either it exists or would be created by plt.axes @@ -597,7 +596,6 @@ def _resolve_intervals_1dplot( # Is it a step plot? (see matplotlib.Axes.step) if kwargs.get("drawstyle", "").startswith("steps-"): - remove_drawstyle = False # Convert intervals to double points @@ -618,7 +616,6 @@ def _resolve_intervals_1dplot( # Is it another kind of plot? else: - # Convert intervals to mid points and adjust labels if _valid_other_type(xval, pd.Interval): xval = _interval_to_mid_points(xval) @@ -720,7 +717,6 @@ def _is_numeric(arr): def _add_colorbar(primitive, ax, cbar_ax, cbar_kwargs, cmap_params): - cbar_kwargs.setdefault("extend", cmap_params["extend"]) if cbar_ax is None: cbar_kwargs.setdefault("ax", ax) @@ -1319,7 +1315,6 @@ def _parse_size( data: DataArray | None, norm: tuple[float | None, float | None, bool] | Normalize | None, ) -> None | pd.Series: - import matplotlib as mpl if data is None: @@ -1708,7 +1703,6 @@ def _add_legend( legend_ax, plotfunc: str, ): - primitive = primitive if isinstance(primitive, list) else [primitive] handles, labels = [], [] diff --git a/xarray/tests/__init__.py b/xarray/tests/__init__.py index 9ff8de74ba3..ea556847ed8 100644 --- a/xarray/tests/__init__.py +++ b/xarray/tests/__init__.py @@ -175,7 +175,6 @@ def source_ndarray(array): @contextmanager def assert_no_warnings(): - with warnings.catch_warnings(record=True) as record: yield record assert len(record) == 0, "got unexpected warning(s)" diff --git a/xarray/tests/test_accessor_dt.py b/xarray/tests/test_accessor_dt.py index eecacd6fd23..aabdf6a5c75 100644 --- a/xarray/tests/test_accessor_dt.py +++ b/xarray/tests/test_accessor_dt.py @@ -69,7 +69,6 @@ def setup(self): ], ) def test_field_access(self, field) -> None: - if field in ["week", "weekofyear"]: data = self.times.isocalendar()["week"] else: @@ -96,7 +95,6 @@ def test_field_access(self, field) -> None: ], ) def test_isocalendar(self, field, pandas_field) -> None: - # pandas isocalendar has dtypy UInt32Dtype, convert to Int64 expected = pd.Index(getattr(self.times.isocalendar(), pandas_field).astype(int)) expected = xr.DataArray( @@ -403,7 +401,6 @@ def times_3d(times): "field", ["year", "month", "day", "hour", "dayofyear", "dayofweek"] ) def test_field_access(data, field) -> None: - result = getattr(data.time.dt, field) expected = xr.DataArray( getattr(xr.coding.cftimeindex.CFTimeIndex(data.time.values), field), @@ -458,7 +455,6 @@ def test_calendar_dask_cftime() -> None: @requires_cftime def test_isocalendar_cftime(data) -> None: - with pytest.raises( AttributeError, match=r"'CFTimeIndex' object has no attribute 'isocalendar'" ): @@ -467,7 +463,6 @@ def test_isocalendar_cftime(data) -> None: @requires_cftime def test_date_cftime(data) -> None: - with pytest.raises( AttributeError, match=r"'CFTimeIndex' object has no attribute `date`. Consider using the floor method instead, for instance: `.time.dt.floor\('D'\)`.", diff --git a/xarray/tests/test_backends.py b/xarray/tests/test_backends.py index b49500bd00c..cfcc3c6f057 100644 --- a/xarray/tests/test_backends.py +++ b/xarray/tests/test_backends.py @@ -1234,7 +1234,6 @@ def test_refresh_from_disk(self) -> None: with create_tmp_file() as example_1_path: with create_tmp_file() as example_1_modified_path: - with open_example_dataset("example_1.nc") as example_1: self.save(example_1, example_1_path) @@ -1749,7 +1748,6 @@ def test_write_inconsistent_chunks(self) -> None: @requires_zarr class ZarrBase(CFEncodedBase): - DIMENSION_KEY = "_ARRAY_DIMENSIONS" zarr_version = 2 version_kwargs: dict[str, Any] = {} @@ -1801,7 +1799,6 @@ def test_roundtrip_consolidated(self, consolidated) -> None: assert_identical(expected, actual) def test_read_non_consolidated_warning(self) -> None: - if self.zarr_version > 2: pytest.xfail("consolidated metadata is not supported for zarr v3 yet") @@ -2226,7 +2223,6 @@ def test_append_string_length_mismatch_raises(self, dtype) -> None: ) def test_check_encoding_is_consistent_after_append(self) -> None: - ds, ds_to_append, _ = create_append_test_data() # check encoding consistency @@ -2250,7 +2246,6 @@ def test_check_encoding_is_consistent_after_append(self) -> None: ) def test_append_with_new_variable(self) -> None: - ds, ds_to_append, ds_with_new_var = create_append_test_data() # check append mode for new variable @@ -2888,7 +2883,6 @@ def test_complex(self) -> None: @pytest.mark.parametrize("invalid_netcdf", [None, False]) def test_complex_error(self, invalid_netcdf) -> None: - import h5netcdf expected = Dataset({"x": ("y", np.ones(5) + 1j * np.ones(5))}) @@ -3244,7 +3238,6 @@ def skip_if_not_engine(engine): def test_open_mfdataset_manyfiles( readengine, nfiles, parallel, chunks, file_cache_maxsize ): - # skip certain combinations skip_if_not_engine(readengine) @@ -3273,7 +3266,6 @@ def test_open_mfdataset_manyfiles( parallel=parallel, chunks=chunks if (not chunks and readengine != "zarr") else "auto", ) as actual: - # check that using open_mfdataset returns dask arrays for variables assert isinstance(actual["foo"].data, dask_array_type) @@ -3329,7 +3321,6 @@ def setup_files_and_datasets(self, fuzz=0): with create_tmp_file() as tmpfile1: with create_tmp_file() as tmpfile2: - # save data to the temporary files ds1.to_netcdf(tmpfile1) ds2.to_netcdf(tmpfile2) @@ -3489,7 +3480,6 @@ def test_common_coord_when_datavars_all(self) -> None: with open_mfdataset( files, data_vars=opt, combine="nested", concat_dim="t" ) as ds: - coord_shape = ds[self.coord_name].shape coord_shape1 = ds1[self.coord_name].shape coord_shape2 = ds2[self.coord_name].shape @@ -3508,7 +3498,6 @@ def test_common_coord_when_datavars_minimal(self) -> None: with open_mfdataset( files, data_vars=opt, combine="nested", concat_dim="t" ) as ds: - coord_shape = ds[self.coord_name].shape coord_shape1 = ds1[self.coord_name].shape coord_shape2 = ds2[self.coord_name].shape @@ -3520,7 +3509,6 @@ def test_common_coord_when_datavars_minimal(self) -> None: assert coord_shape2 == coord_shape def test_invalid_data_vars_value_should_fail(self) -> None: - with self.setup_files_and_datasets() as (files, _): with pytest.raises(ValueError): with open_mfdataset(files, data_vars="minimum", combine="by_coords"): # type: ignore[arg-type] @@ -4661,7 +4649,6 @@ def test_indexing(self) -> None: with pytest.warns(DeprecationWarning), xr.open_rasterio( tmp_file, cache=False ) as actual: - # tests # assert_allclose checks all data + coordinates assert_allclose(actual, expected) @@ -4777,7 +4764,6 @@ def test_caching(self) -> None: ) as (tmp_file, expected): # Cache is the default with pytest.warns(DeprecationWarning), xr.open_rasterio(tmp_file) as actual: - # This should cache everything assert_allclose(actual, expected) @@ -4795,7 +4781,6 @@ def test_chunks(self) -> None: with pytest.warns(DeprecationWarning), xr.open_rasterio( tmp_file, chunks=(1, 2, 2) ) as actual: - import dask.array as da assert isinstance(actual.data, da.Array) @@ -5416,7 +5401,6 @@ def test_encode_zarr_attr_value() -> None: @requires_zarr def test_extract_zarr_variable_encoding() -> None: - var = xr.Variable("x", [1, 2]) actual = backends.zarr.extract_zarr_variable_encoding(var) assert "chunks" in actual diff --git a/xarray/tests/test_backends_file_manager.py b/xarray/tests/test_backends_file_manager.py index 528ee88d29a..cede3e66fcf 100644 --- a/xarray/tests/test_backends_file_manager.py +++ b/xarray/tests/test_backends_file_manager.py @@ -137,7 +137,6 @@ def test_file_manager_cache_repeated_open() -> None: def test_file_manager_cache_with_pickle(tmpdir) -> None: - path = str(tmpdir.join("testing.txt")) with open(path, "w") as f: f.write("data") diff --git a/xarray/tests/test_cftimeindex.py b/xarray/tests/test_cftimeindex.py index 2049fd3915d..a676b1f07f1 100644 --- a/xarray/tests/test_cftimeindex.py +++ b/xarray/tests/test_cftimeindex.py @@ -1310,7 +1310,6 @@ def test_infer_freq(freq, calendar): @requires_cftime @pytest.mark.parametrize("calendar", _CFTIME_CALENDARS) def test_pickle_cftimeindex(calendar): - idx = xr.cftime_range("2000-01-01", periods=3, freq="D", calendar=calendar) idx_pkl = pickle.loads(pickle.dumps(idx)) assert (idx == idx_pkl).all() diff --git a/xarray/tests/test_coarsen.py b/xarray/tests/test_coarsen.py index a46d10bd0f6..d58361afdd3 100644 --- a/xarray/tests/test_coarsen.py +++ b/xarray/tests/test_coarsen.py @@ -252,7 +252,6 @@ def test_coarsen_da_reduce(da, window, name) -> None: class TestCoarsenConstruct: @pytest.mark.parametrize("dask", [True, False]) def test_coarsen_construct(self, dask: bool) -> None: - ds = Dataset( { "vart": ("time", np.arange(48), {"a": "b"}), diff --git a/xarray/tests/test_coding_times.py b/xarray/tests/test_coding_times.py index 79eab1ccbf1..0746a949cc8 100644 --- a/xarray/tests/test_coding_times.py +++ b/xarray/tests/test_coding_times.py @@ -670,7 +670,6 @@ def test_decode_cf(calendar) -> None: def test_decode_cf_time_bounds() -> None: - da = DataArray( np.arange(6, dtype="int64").reshape((3, 2)), coords={"time": [1, 2, 3]}, @@ -713,7 +712,6 @@ def test_decode_cf_time_bounds() -> None: @requires_cftime def test_encode_time_bounds() -> None: - time = pd.date_range("2000-01-16", periods=1) time_bounds = pd.date_range("2000-01-01", periods=2, freq="MS") ds = Dataset(dict(time=time, time_bounds=time_bounds)) @@ -834,7 +832,6 @@ def test_encode_cf_datetime_overflow(shape) -> None: def test_encode_expected_failures() -> None: - dates = pd.date_range("2000", periods=3) with pytest.raises(ValueError, match="invalid time units"): encode_cf_datetime(dates, units="days after 2000-01-01") diff --git a/xarray/tests/test_combine.py b/xarray/tests/test_combine.py index 53ec8f2f66c..ea1659e4539 100644 --- a/xarray/tests/test_combine.py +++ b/xarray/tests/test_combine.py @@ -1152,7 +1152,6 @@ def test_combine_by_coords_raises_for_differing_calendars(): def test_combine_by_coords_raises_for_differing_types(): - # str and byte cannot be compared da_1 = DataArray([0], dims=["time"], coords=[["a"]], name="a").to_dataset() da_2 = DataArray([1], dims=["time"], coords=[[b"b"]], name="a").to_dataset() diff --git a/xarray/tests/test_concat.py b/xarray/tests/test_concat.py index eb3cd7fccc8..f60308f8863 100644 --- a/xarray/tests/test_concat.py +++ b/xarray/tests/test_concat.py @@ -952,7 +952,6 @@ def test_concat_fill_value(self, fill_value) -> None: @pytest.mark.parametrize("dtype", [str, bytes]) @pytest.mark.parametrize("dim", ["x1", "x2"]) def test_concat_str_dtype(self, dtype, dim) -> None: - data = np.arange(4).reshape([2, 2]) da1 = Dataset( @@ -1125,7 +1124,6 @@ def test_concat_combine_attrs_kwarg(self) -> None: @pytest.mark.parametrize("dtype", [str, bytes]) @pytest.mark.parametrize("dim", ["x1", "x2"]) def test_concat_str_dtype(self, dtype, dim) -> None: - data = np.arange(4).reshape([2, 2]) da1 = DataArray( @@ -1143,7 +1141,6 @@ def test_concat_str_dtype(self, dtype, dim) -> None: assert np.issubdtype(actual.x2.dtype, dtype) def test_concat_coord_name(self) -> None: - da = DataArray([0], dims="a") da_concat = concat([da, da], dim=DataArray([0, 1], dims="b")) assert list(da_concat.coords) == ["b"] @@ -1155,7 +1152,6 @@ def test_concat_coord_name(self) -> None: @pytest.mark.parametrize("attr1", ({"a": {"meta": [10, 20, 30]}}, {"a": [1, 2, 3]}, {})) @pytest.mark.parametrize("attr2", ({"a": [1, 2, 3]}, {})) def test_concat_attrs_first_variable(attr1, attr2) -> None: - arrs = [ DataArray([[1], [2]], dims=["x", "y"], attrs=attr1), DataArray([[3], [4]], dims=["x", "y"], attrs=attr2), diff --git a/xarray/tests/test_dataarray.py b/xarray/tests/test_dataarray.py index b1a16eb1dfa..836d30b60b8 100644 --- a/xarray/tests/test_dataarray.py +++ b/xarray/tests/test_dataarray.py @@ -1648,7 +1648,6 @@ def test_reindex_fill_value(self, fill_value) -> None: @pytest.mark.parametrize("dtype", [str, bytes]) def test_reindex_str_dtype(self, dtype) -> None: - data = DataArray( [1, 2], dims="x", coords={"x": np.array(["a", "b"], dtype=dtype)} ) @@ -1660,7 +1659,6 @@ def test_reindex_str_dtype(self, dtype) -> None: assert actual.dtype == expected.dtype def test_rename(self) -> None: - da = xr.DataArray( [1, 2, 3], dims="dim", name="name", coords={"coord": ("dim", [5, 6, 7])} ) @@ -2166,13 +2164,11 @@ def test_math_automatic_alignment(self) -> None: assert_identical(a - b, expected) def test_non_overlapping_dataarrays_return_empty_result(self) -> None: - a = DataArray(range(5), [("x", range(5))]) result = a.isel(x=slice(2)) + a.isel(x=slice(2, None)) assert len(result["x"]) == 0 def test_empty_dataarrays_return_empty_result(self) -> None: - a = DataArray(data=[]) result = a * a assert len(result["dim_0"]) == 0 @@ -2735,7 +2731,6 @@ def test_reduce_out(self) -> None: "axis, dim", zip([None, 0, [0], [0, 1]], [None, "x", ["x"], ["x", "y"]]) ) def test_quantile(self, q, axis, dim, skipna) -> None: - va = self.va.copy(deep=True) va[0, 0] = np.NaN @@ -2764,7 +2759,6 @@ def test_quantile_method(self, method) -> None: @pytest.mark.parametrize("method", ["midpoint", "lower"]) def test_quantile_interpolation_deprecated(self, method) -> None: - da = DataArray(self.va) q = [0.25, 0.5, 0.75] @@ -3015,7 +3009,6 @@ def test_align_without_indexes_errors(self) -> None: ) def test_align_str_dtype(self) -> None: - a = DataArray([0, 1], dims=["x"], coords={"x": ["a", "b"]}) b = DataArray([1, 2], dims=["x"], coords={"x": ["b", "c"]}) @@ -3589,7 +3582,6 @@ def test_to_dataset_split(self) -> None: assert_identical(expected, actual) def test_to_dataset_retains_keys(self) -> None: - # use dates as convenient non-str objects. Not a specific date test import datetime @@ -3852,7 +3844,6 @@ def test_dot_align_coords(self) -> None: assert_equal(expected2, actual2) def test_matmul(self) -> None: - # copied from above (could make a fixture) x = np.linspace(-3, 3, 6) y = np.linspace(-3, 3, 5) @@ -4158,7 +4149,6 @@ def test_pad_linear_ramp(self, end_values) -> None: @pytest.mark.parametrize("mode", ("reflect", "symmetric")) @pytest.mark.parametrize("reflect_type", (None, "even", "odd")) def test_pad_reflect(self, mode, reflect_type) -> None: - ar = DataArray(np.arange(3 * 4 * 5).reshape(3, 4, 5)) actual = ar.pad( dim_0=(1, 3), dim_2=(2, 2), mode=mode, reflect_type=reflect_type @@ -5693,7 +5683,6 @@ def test_argmin_dim( nanindices_yz: dict[str, np.ndarray], nanindices_xyz: dict[str, np.ndarray], ) -> None: - ar = xr.DataArray( x, dims=["x", "y", "z"], @@ -5921,7 +5910,6 @@ def test_argmax_dim( nanindices_yz: dict[str, np.ndarray], nanindices_xyz: dict[str, np.ndarray], ) -> None: - ar = xr.DataArray( x, dims=["x", "y", "z"], diff --git a/xarray/tests/test_dataset.py b/xarray/tests/test_dataset.py index a6650bca9c5..ecadc768727 100644 --- a/xarray/tests/test_dataset.py +++ b/xarray/tests/test_dataset.py @@ -501,7 +501,6 @@ def test_constructor_auto_align(self) -> None: Dataset({"a": a, "b": b, "e": e}) def test_constructor_pandas_sequence(self) -> None: - ds = self.make_example_math_dataset() pandas_objs = { var_name: ds[var_name].to_pandas() for var_name in ["foo", "bar"] @@ -518,7 +517,6 @@ def test_constructor_pandas_sequence(self) -> None: assert_equal(ds, ds_based_on_pandas) def test_constructor_pandas_single(self) -> None: - das = [ DataArray(np.random.rand(4), dims=["a"]), # series DataArray(np.random.rand(4, 3), dims=["a", "b"]), # df @@ -2320,7 +2318,6 @@ def test_align_non_unique(self) -> None: align(x, y) def test_align_str_dtype(self) -> None: - a = Dataset({"foo": ("x", [0, 1])}, coords={"x": ["a", "b"]}) b = Dataset({"foo": ("x", [1, 2])}, coords={"x": ["b", "c"]}) @@ -3677,7 +3674,6 @@ def test_to_stacked_array_dtype_dims(self) -> None: assert y.dims == ("x", "features") def test_to_stacked_array_to_unstacked_dataset(self) -> None: - # single dimension: regression test for GH4049 arr = xr.DataArray(np.arange(3), coords=[("x", [0, 1, 2])]) data = xr.Dataset({"a": arr, "b": arr}) @@ -3954,7 +3950,6 @@ def test_setitem(self) -> None: ) def test_setitem_pandas(self) -> None: - ds = self.make_example_math_dataset() ds["x"] = np.arange(3) ds_copy = ds.copy() @@ -4058,7 +4053,6 @@ def test_setitem_align_new_indexes(self) -> None: @pytest.mark.parametrize("dtype", [str, bytes]) def test_setitem_str_dtype(self, dtype) -> None: - ds = xr.Dataset(coords={"x": np.array(["x", "y"], dtype=dtype)}) # test Dataset update ds["foo"] = xr.DataArray(np.array([0, 0]), dims=["x"]) @@ -4066,7 +4060,6 @@ def test_setitem_str_dtype(self, dtype) -> None: assert np.issubdtype(ds.x.dtype, dtype) def test_setitem_using_list(self) -> None: - # assign a list of variables var1 = Variable(["dim1"], np.random.randn(8)) var2 = Variable(["dim1"], np.random.randn(8)) @@ -4813,7 +4806,6 @@ def test_fillna(self) -> None: "func", [lambda x: x.clip(0, 1), lambda x: np.float64(1.0) * x, np.abs, abs] ) def test_propagate_attrs(self, func) -> None: - da = DataArray(range(5), name="a", attrs={"attr": "da"}) ds = Dataset({"a": da}, attrs={"attr": "ds"}) @@ -5293,7 +5285,6 @@ def test_quantile_skipna(self, skipna) -> None: @pytest.mark.parametrize("method", ["midpoint", "lower"]) def test_quantile_method(self, method) -> None: - ds = create_test_data(seed=123) q = [0.25, 0.5, 0.75] @@ -5305,7 +5296,6 @@ def test_quantile_method(self, method) -> None: @pytest.mark.parametrize("method", ["midpoint", "lower"]) def test_quantile_interpolation_deprecated(self, method) -> None: - ds = create_test_data(seed=123) q = [0.25, 0.5, 0.75] @@ -5597,7 +5587,6 @@ def test_dataset_ellipsis_transpose_different_ordered_vars(self) -> None: assert list(result["b"].dims) == list("xwzy") def test_dataset_retains_period_index_on_transpose(self) -> None: - ds = create_test_data() ds["time"] = pd.period_range("2000-01-01", periods=20) @@ -6356,7 +6345,6 @@ def test_isin_dataset() -> None: def test_dataset_constructor_aligns_to_explicit_coords( unaligned_coords, coords ) -> None: - a = xr.DataArray([1, 2, 3], dims=["x"], coords=unaligned_coords) expected = xr.Dataset(coords=coords) @@ -6374,14 +6362,12 @@ def test_error_message_on_set_supplied() -> None: @pytest.mark.parametrize("unaligned_coords", ({"y": ("b", np.asarray([2, 1, 0]))},)) def test_constructor_raises_with_invalid_coords(unaligned_coords) -> None: - with pytest.raises(ValueError, match="not a subset of the DataArray dimensions"): xr.DataArray([1, 2, 3], dims=["x"], coords=unaligned_coords) @pytest.mark.parametrize("ds", [3], indirect=True) def test_dir_expected_attrs(ds) -> None: - some_expected_attrs = {"pipe", "mean", "isnull", "var1", "dim2", "numbers"} result = dir(ds) assert set(result) >= some_expected_attrs diff --git a/xarray/tests/test_distributed.py b/xarray/tests/test_distributed.py index bd970173705..e13667af73a 100644 --- a/xarray/tests/test_distributed.py +++ b/xarray/tests/test_distributed.py @@ -90,7 +90,6 @@ def tmp_netcdf_filename(tmpdir): def test_dask_distributed_netcdf_roundtrip( loop, tmp_netcdf_filename, engine, nc_format ): - if engine not in ENGINES: pytest.skip("engine not available") @@ -98,7 +97,6 @@ def test_dask_distributed_netcdf_roundtrip( with cluster() as (s, [a, b]): with Client(s["address"], loop=loop): - original = create_test_data().chunk(chunks) if engine == "scipy": @@ -122,10 +120,8 @@ def test_dask_distributed_netcdf_roundtrip( def test_dask_distributed_write_netcdf_with_dimensionless_variables( loop, tmp_netcdf_filename ): - with cluster() as (s, [a, b]): with Client(s["address"], loop=loop): - original = xr.Dataset({"x": da.zeros(())}) original.to_netcdf(tmp_netcdf_filename) @@ -153,7 +149,6 @@ def test_open_mfdataset_can_open_files_with_cftime_index(tmp_path): def test_dask_distributed_read_netcdf_integration_test( loop, tmp_netcdf_filename, engine, nc_format ): - if engine not in ENGINES: pytest.skip("engine not available") @@ -161,7 +156,6 @@ def test_dask_distributed_read_netcdf_integration_test( with cluster() as (s, [a, b]): with Client(s["address"], loop=loop): - original = create_test_data() original.to_netcdf(tmp_netcdf_filename, engine=engine, format=nc_format) @@ -284,7 +278,6 @@ def f(x, lock=None): CombinedLock([HDF5_LOCK]), CombinedLock([HDF5_LOCK, Lock("filename.nc")]), ]: - futures = c.map(f, list(range(10)), lock=lock) await c.gather(futures) diff --git a/xarray/tests/test_duck_array_ops.py b/xarray/tests/test_duck_array_ops.py index 2943bd71671..c873c7b76d3 100644 --- a/xarray/tests/test_duck_array_ops.py +++ b/xarray/tests/test_duck_array_ops.py @@ -425,7 +425,6 @@ def test_empty_axis_dtype(): @pytest.mark.parametrize("skipna", [False, True]) @pytest.mark.parametrize("aggdim", [None, "x"]) def test_reduce(dim_num, dtype, dask, func, skipna, aggdim): - if aggdim == "y" and dim_num < 2: pytest.skip("dim not in this test") diff --git a/xarray/tests/test_formatting.py b/xarray/tests/test_formatting.py index 48917c9c19b..3cba5b965f9 100644 --- a/xarray/tests/test_formatting.py +++ b/xarray/tests/test_formatting.py @@ -565,7 +565,6 @@ def test_short_numpy_repr() -> None: def test_large_array_repr_length() -> None: - da = xr.DataArray(np.random.randn(100, 5, 1)) result = repr(da).splitlines() @@ -614,7 +613,7 @@ def test__mapping_repr(display_max_rows, n_vars, n_attr) -> None: attrs = {k: 2 for k in b} coords = {_c: np.array([0, 1]) for _c in c} data_vars = dict() - for (v, _c) in zip(a, coords.items()): + for v, _c in zip(a, coords.items()): data_vars[v] = xr.DataArray( name=v, data=np.array([3, 4]), @@ -625,7 +624,6 @@ def test__mapping_repr(display_max_rows, n_vars, n_attr) -> None: ds.attrs = attrs with xr.set_options(display_max_rows=display_max_rows): - # Parse the data_vars print and show only data_vars rows: summary = formatting.dataset_repr(ds).split("\n") summary = [v for v in summary if long_name in v] diff --git a/xarray/tests/test_groupby.py b/xarray/tests/test_groupby.py index 3b214f43d69..cec37560d8f 100644 --- a/xarray/tests/test_groupby.py +++ b/xarray/tests/test_groupby.py @@ -42,7 +42,6 @@ def array(dataset): def test_consolidate_slices() -> None: - assert _consolidate_slices([slice(3), slice(3, 5)]) == [slice(5)] assert _consolidate_slices([slice(2, 3), slice(3, 6)]) == [slice(2, 6)] assert _consolidate_slices([slice(2, 3, 1), slice(3, 6, 1)]) == [slice(2, 6, 1)] @@ -187,7 +186,6 @@ def func(arg1, arg2, arg3=0): def test_da_groupby_empty() -> None: - empty_array = xr.DataArray([], dims="dim") with pytest.raises(ValueError): @@ -195,7 +193,6 @@ def test_da_groupby_empty() -> None: def test_da_groupby_quantile() -> None: - array = xr.DataArray( data=[1, 2, 3, 4, 5, 6], coords={"x": [1, 1, 1, 2, 2, 2]}, dims="x" ) @@ -429,7 +426,6 @@ def test_ds_groupby_quantile() -> None: @pytest.mark.parametrize("as_dataset", [False, True]) def test_groupby_quantile_interpolation_deprecated(as_dataset) -> None: - array = xr.DataArray(data=[1, 2, 3, 4], coords={"x": [1, 1, 2, 2]}, dims="x") arr: xr.DataArray | xr.Dataset @@ -652,7 +648,6 @@ def test_groupby_none_group_name() -> None: def test_groupby_getitem(dataset) -> None: - assert_identical(dataset.sel(x="a"), dataset.groupby("x")["a"]) assert_identical(dataset.sel(z=1), dataset.groupby("z")[1]) @@ -905,7 +900,6 @@ def test_groupby_dataset_order() -> None: def test_groupby_dataset_fillna(): - ds = Dataset({"a": ("x", [np.nan, 1, np.nan, 3])}, {"x": [0, 1, 2, 3]}) expected = Dataset({"a": ("x", range(4))}, {"x": [0, 1, 2, 3]}) for target in [ds, expected]: @@ -1024,12 +1018,12 @@ def test_stack_groupby_unsorted_coord(self): assert_equal(actual2, expected2) def test_groupby_iter(self): - for ((act_x, act_dv), (exp_x, exp_ds)) in zip( + for (act_x, act_dv), (exp_x, exp_ds) in zip( self.dv.groupby("y"), self.ds.groupby("y") ): assert exp_x == act_x assert_identical(exp_ds["foo"], act_dv) - for ((_, exp_dv), act_dv) in zip(self.dv.groupby("x"), self.dv): + for (_, exp_dv), act_dv in zip(self.dv.groupby("x"), self.dv): assert_identical(exp_dv, act_dv) def test_groupby_properties(self): @@ -1446,7 +1440,6 @@ def test_groupby_bins_sort(self): assert_identical(actual, expected) def test_groupby_assign_coords(self): - array = DataArray([1, 2, 3, 4], {"c": ("x", [0, 0, 1, 1])}, dims="x") actual = array.groupby("c").assign_coords(d=lambda a: a.mean()) expected = array.copy() @@ -1990,7 +1983,6 @@ def test_resample_drop_nondim_coords(self): assert "tc" not in actual.coords def test_resample_old_api(self): - times = pd.date_range("2000-01-01", freq="6H", periods=10) ds = Dataset( { diff --git a/xarray/tests/test_indexes.py b/xarray/tests/test_indexes.py index caf0f51135e..27b5cf2119c 100644 --- a/xarray/tests/test_indexes.py +++ b/xarray/tests/test_indexes.py @@ -174,7 +174,6 @@ def test_concat_periods(self): @pytest.mark.parametrize("dtype", [str, bytes]) def test_concat_str_dtype(self, dtype) -> None: - a = PandasIndex(np.array(["a"], dtype=dtype), "x", coord_dtype=dtype) b = PandasIndex(np.array(["b"], dtype=dtype), "x", coord_dtype=dtype) expected = PandasIndex( diff --git a/xarray/tests/test_indexing.py b/xarray/tests/test_indexing.py index b28aebe6830..8229db0c132 100644 --- a/xarray/tests/test_indexing.py +++ b/xarray/tests/test_indexing.py @@ -233,7 +233,6 @@ def test_indexer( test_indexer(mdata, {"one": "a"}, expected) def test_read_only_view(self) -> None: - arr = DataArray( np.random.rand(3, 3), coords={"x": np.arange(3), "y": np.arange(3)}, @@ -710,7 +709,6 @@ def nonzero(x): np.arange(10) < 5, ] for i, j, k in itertools.product(indexers, repeat=3): - if isinstance(j, np.ndarray) and j.dtype.kind == "b": # match size j = np.arange(20) < 4 if isinstance(k, np.ndarray) and k.dtype.kind == "b": diff --git a/xarray/tests/test_interp.py b/xarray/tests/test_interp.py index 386b26bbc7e..e66045e978d 100644 --- a/xarray/tests/test_interp.py +++ b/xarray/tests/test_interp.py @@ -29,7 +29,6 @@ def get_example_data(case: int) -> xr.DataArray: - if case == 0: # 2D x = np.linspace(0, 1, 100) @@ -809,7 +808,6 @@ def test_interpolate_chunk_1d( # choose the data dimensions for data_dims in permutations(da.dims, data_ndim): - # select only data_ndim dim da = da.isel( # take the middle line {dim: len(da.coords[dim]) // 2 for dim in da.dims if dim not in data_dims} diff --git a/xarray/tests/test_missing.py b/xarray/tests/test_missing.py index 6f3d7a702dd..83ed1aace2f 100644 --- a/xarray/tests/test_missing.py +++ b/xarray/tests/test_missing.py @@ -98,8 +98,7 @@ def test_interpolate_pd_compat(): frac_nans = [0, 0.5, 1] methods = ["linear", "nearest", "zero", "slinear", "quadratic", "cubic"] - for (shape, frac_nan, method) in itertools.product(shapes, frac_nans, methods): - + for shape, frac_nan, method in itertools.product(shapes, frac_nans, methods): da, df = make_interpolate_example_data(shape, frac_nan) for dim in ["time", "x"]: @@ -132,8 +131,7 @@ def test_interpolate_pd_compat_non_uniform_index(): frac_nans = [0, 0.5, 1] methods = ["time", "index", "values"] - for (shape, frac_nan, method) in itertools.product(shapes, frac_nans, methods): - + for shape, frac_nan, method in itertools.product(shapes, frac_nans, methods): da, df = make_interpolate_example_data(shape, frac_nan, non_uniform=True) for dim in ["time", "x"]: if method == "time" and dim != "time": @@ -160,8 +158,7 @@ def test_interpolate_pd_compat_polynomial(): frac_nans = [0, 0.5, 1] orders = [1, 2, 3] - for (shape, frac_nan, order) in itertools.product(shapes, frac_nans, orders): - + for shape, frac_nan, order in itertools.product(shapes, frac_nans, orders): da, df = make_interpolate_example_data(shape, frac_nan) for dim in ["time", "x"]: @@ -247,7 +244,6 @@ def test_interpolate_keep_attrs(): def test_interpolate(): - vals = np.array([1, 2, 3, 4, 5, 6], dtype=np.float64) expected = xr.DataArray(vals, dims="x") mvals = vals.copy() @@ -481,7 +477,6 @@ def test_ffill_bfill_dask(method): @requires_bottleneck def test_ffill_bfill_nonans(): - vals = np.array([1, 2, 3, 4, 5, 6], dtype=np.float64) expected = xr.DataArray(vals, dims="x") @@ -494,7 +489,6 @@ def test_ffill_bfill_nonans(): @requires_bottleneck def test_ffill_bfill_allnans(): - vals = np.full(6, np.nan, dtype=np.float64) expected = xr.DataArray(vals, dims="x") @@ -722,7 +716,6 @@ def test_interpolators_complex_out_of_bounds(): ("linear", NumpyInterpolator), ("linear", ScipyInterpolator), ]: - f = interpolator(xi, yi, method=method) actual = f(x) assert_array_equal(actual, expected) diff --git a/xarray/tests/test_plot.py b/xarray/tests/test_plot.py index e7f0323e3b4..b4043afdf6a 100644 --- a/xarray/tests/test_plot.py +++ b/xarray/tests/test_plot.py @@ -1085,7 +1085,6 @@ def test_norm_sets_vmin_vmax(self) -> None: ["neither", "neither", "both", "max", "min"], [7, None, None, None, None], ): - test_min = vmin if norm.vmin is None else norm.vmin test_max = vmax if norm.vmax is None else norm.vmax @@ -1121,7 +1120,7 @@ def test_recover_from_seaborn_jet_exception(self) -> None: @pytest.mark.slow def test_build_discrete_cmap(self) -> None: - for (cmap, levels, extend, filled) in [ + for cmap, levels, extend, filled in [ ("jet", [0, 1], "both", False), ("hot", [-4, 4], "max", True), ]: @@ -1282,7 +1281,6 @@ def test_can_pass_in_axis(self) -> None: self.pass_in_axis(self.plotmethod) def test_xyincrease_defaults(self) -> None: - # With default settings the axis must be ordered regardless # of the coords order. self.plotfunc(DataArray(easy_array((3, 2)), coords=[[1, 2, 3], [1, 2]])) @@ -1386,7 +1384,6 @@ def test_positional_coord_string(self) -> None: assert "y_long_name [y_units]" == ax.get_ylabel() def test_bad_x_string_exception(self) -> None: - with pytest.raises(ValueError, match=r"x and y cannot be equal."): self.plotmethod(x="y", y="y") @@ -1651,7 +1648,6 @@ def test_colormap_error_norm_and_vmin_vmax(self) -> None: @pytest.mark.slow class TestContourf(Common2dMixin, PlotTestCase): - plotfunc = staticmethod(xplt.contourf) @pytest.mark.slow @@ -1701,7 +1697,6 @@ def test_levels(self) -> None: @pytest.mark.slow class TestContour(Common2dMixin, PlotTestCase): - plotfunc = staticmethod(xplt.contour) # matplotlib cmap.colors gives an rgbA ndarray @@ -1711,7 +1706,6 @@ def _color_as_tuple(c): return tuple(c[:3]) def test_colors(self) -> None: - # with single color, we don't want rgb array artist = self.plotmethod(colors="k") assert artist.cmap.colors[0] == "k" @@ -1728,7 +1722,6 @@ def test_colors(self) -> None: assert self._color_as_tuple(artist.cmap._rgba_over) == (0.0, 0.0, 1.0) def test_colors_np_levels(self) -> None: - # https://github.com/pydata/xarray/issues/3284 levels = np.array([-0.5, 0.0, 0.5, 1.0]) artist = self.darray.plot.contour(levels=levels, colors=["k", "r", "w", "b"]) @@ -1761,7 +1754,6 @@ def test_single_level(self) -> None: class TestPcolormesh(Common2dMixin, PlotTestCase): - plotfunc = staticmethod(xplt.pcolormesh) def test_primitive_artist_returned(self) -> None: @@ -1839,7 +1831,6 @@ def test_interval_breaks_logspace(self) -> None: @pytest.mark.slow class TestImshow(Common2dMixin, PlotTestCase): - plotfunc = staticmethod(xplt.imshow) @pytest.mark.slow @@ -1859,7 +1850,6 @@ def test_default_aspect_is_auto(self) -> None: @pytest.mark.slow def test_cannot_change_mpl_aspect(self) -> None: - with pytest.raises(ValueError, match=r"not available in xarray"): self.darray.plot.imshow(aspect="equal") @@ -1980,7 +1970,6 @@ def test_origin_overrides_xyincrease(self) -> None: class TestSurface(Common2dMixin, PlotTestCase): - plotfunc = staticmethod(xplt.surface) subplot_kws = {"projection": "3d"} @@ -2204,7 +2193,6 @@ def test_can_set_norm(self) -> None: @pytest.mark.slow def test_figure_size(self) -> None: - assert_array_equal(self.g.fig.get_size_inches(), (10, 3)) g = xplt.FacetGrid(self.darray, col="z", size=6) @@ -2513,7 +2501,6 @@ def test_facetgrid(self) -> None: ], ) def test_add_guide(self, add_guide, hue_style, legend, colorbar): - meta_data = _infer_meta_data( self.ds, x="x", @@ -2628,7 +2615,6 @@ def test_add_guide( legend: bool, colorbar: bool, ) -> None: - meta_data = _infer_meta_data( self.ds, x="A", @@ -3100,7 +3086,6 @@ def test_get_axis_current() -> None: @requires_matplotlib def test_maybe_gca() -> None: - with figure_context(): ax = _maybe_gca(aspect=1) @@ -3108,7 +3093,6 @@ def test_maybe_gca() -> None: assert ax.get_aspect() == 1 with figure_context(): - # create figure without axes plt.figure() ax = _maybe_gca(aspect=1) diff --git a/xarray/tests/test_plugins.py b/xarray/tests/test_plugins.py index 8029eb3f228..2160b8d16ed 100644 --- a/xarray/tests/test_plugins.py +++ b/xarray/tests/test_plugins.py @@ -64,7 +64,6 @@ def test_broken_plugin() -> None: def test_remove_duplicates_warnings(dummy_duplicated_entrypoints) -> None: - with pytest.warns(RuntimeWarning) as record: _ = plugins.remove_duplicates(dummy_duplicated_entrypoints) @@ -112,7 +111,6 @@ def test_set_missing_parameters() -> None: def test_set_missing_parameters_raise_error() -> None: - backend = DummyBackendEntrypointKwargs() with pytest.raises(TypeError): plugins.set_missing_parameters({"engine": backend}) diff --git a/xarray/tests/test_rolling.py b/xarray/tests/test_rolling.py index f1b2ce94376..ddc193712ae 100644 --- a/xarray/tests/test_rolling.py +++ b/xarray/tests/test_rolling.py @@ -55,7 +55,6 @@ def test_rolling_repr(self, da) -> None: @requires_dask def test_repeated_rolling_rechunks(self) -> None: - # regression test for GH3277, GH2514 dat = DataArray(np.random.rand(7653, 300), dims=("day", "item")) dat_chunk = dat.chunk({"item": 20}) @@ -656,7 +655,6 @@ def test_rolling_construct(self, center, window) -> None: "name", ("sum", "mean", "std", "var", "min", "max", "median") ) def test_rolling_reduce(self, ds, center, min_periods, window, name) -> None: - if min_periods is not None and window < min_periods: min_periods = window @@ -761,13 +759,11 @@ def test_raise_no_warning_dask_rolling_assert_close(self, ds, name) -> None: class TestDatasetRollingExp: @pytest.mark.parametrize("backend", ["numpy"], indirect=True) def test_rolling_exp(self, ds) -> None: - result = ds.rolling_exp(time=10, window_type="span").mean() assert isinstance(result, Dataset) @pytest.mark.parametrize("backend", ["numpy"], indirect=True) def test_rolling_exp_keep_attrs(self, ds) -> None: - attrs_global = {"attrs": "global"} attrs_z1 = {"attr": "z1"} diff --git a/xarray/tests/test_sparse.py b/xarray/tests/test_sparse.py index a5faa78dd38..f64ce9338d7 100644 --- a/xarray/tests/test_sparse.py +++ b/xarray/tests/test_sparse.py @@ -59,7 +59,6 @@ def __init__(self, meth, *args, **kwargs): self.kwargs = kwargs def __call__(self, obj): - # cannot pass np.sum when using pytest-xdist kwargs = self.kwargs.copy() if "func" in self.kwargs: diff --git a/xarray/tests/test_utils.py b/xarray/tests/test_utils.py index ef4d054e422..0f6f353faf2 100644 --- a/xarray/tests/test_utils.py +++ b/xarray/tests/test_utils.py @@ -26,7 +26,6 @@ def new_method(): "a, b, expected", [["a", "b", np.array(["a", "b"])], [1, 2, pd.Index([1, 2])]] ) def test_maybe_coerce_to_str(a, b, expected): - a = np.array([a]) b = np.array([b]) index = pd.Index(a).append(pd.Index(b)) @@ -38,7 +37,6 @@ def test_maybe_coerce_to_str(a, b, expected): def test_maybe_coerce_to_str_minimal_str_dtype(): - a = np.array(["a", "a_long_string"]) index = pd.Index(["a"]) @@ -215,7 +213,6 @@ def test_hidden_key_dict(): def test_either_dict_or_kwargs(): - result = either_dict_or_kwargs(dict(a=1), None, "foo") expected = dict(a=1) assert result == expected diff --git a/xarray/tests/test_variable.py b/xarray/tests/test_variable.py index 8eadf7d51fd..f656818c71f 100644 --- a/xarray/tests/test_variable.py +++ b/xarray/tests/test_variable.py @@ -452,7 +452,6 @@ def test_encoding_preserved(self): expected.copy(deep=True), expected.copy(deep=False), ]: - assert_identical(expected.to_base_variable(), actual.to_base_variable()) assert expected.encoding == actual.encoding @@ -1761,7 +1760,6 @@ def raise_if_called(*args, **kwargs): "axis, dim", zip([None, 0, [0], [0, 1]], [None, "x", ["x"], ["x", "y"]]) ) def test_quantile(self, q, axis, dim, skipna): - d = self.d.copy() d[0, 0] = np.NaN @@ -1786,7 +1784,6 @@ def test_quantile_dask(self, q, axis, dim): "use_dask", [pytest.param(True, marks=requires_dask), False] ) def test_quantile_method(self, method, use_dask) -> None: - v = Variable(["x", "y"], self.d) if use_dask: v = v.chunk({"x": 2}) @@ -1806,7 +1803,6 @@ def test_quantile_method(self, method, use_dask) -> None: @pytest.mark.parametrize("method", ["midpoint", "lower"]) def test_quantile_interpolation_deprecation(self, method) -> None: - v = Variable(["x", "y"], self.d) q = np.array([0.25, 0.5, 0.75]) @@ -2400,7 +2396,6 @@ def test_concat_multiindex(self): @pytest.mark.parametrize("dtype", [str, bytes]) def test_concat_str_dtype(self, dtype): - a = IndexVariable("x", np.array(["a"], dtype=dtype)) b = IndexVariable("x", np.array(["b"], dtype=dtype)) expected = IndexVariable("x", np.array(["a", "b"], dtype=dtype)) diff --git a/xarray/tests/test_weighted.py b/xarray/tests/test_weighted.py index f93192de926..e2530d41fbe 100644 --- a/xarray/tests/test_weighted.py +++ b/xarray/tests/test_weighted.py @@ -19,7 +19,6 @@ @pytest.mark.parametrize("as_dataset", (True, False)) def test_weighted_non_DataArray_weights(as_dataset: bool) -> None: - data: DataArray | Dataset = DataArray([1, 2]) if as_dataset: data = data.to_dataset(name="data") @@ -31,7 +30,6 @@ def test_weighted_non_DataArray_weights(as_dataset: bool) -> None: @pytest.mark.parametrize("as_dataset", (True, False)) @pytest.mark.parametrize("weights", ([np.nan, 2], [np.nan, np.nan])) def test_weighted_weights_nan_raises(as_dataset: bool, weights: list[float]) -> None: - data: DataArray | Dataset = DataArray([1, 2]) if as_dataset: data = data.to_dataset(name="data") @@ -44,7 +42,6 @@ def test_weighted_weights_nan_raises(as_dataset: bool, weights: list[float]) -> @pytest.mark.parametrize("as_dataset", (True, False)) @pytest.mark.parametrize("weights", ([np.nan, 2], [np.nan, np.nan])) def test_weighted_weights_nan_raises_dask(as_dataset, weights): - data = DataArray([1, 2]).chunk({"dim_0": -1}) if as_dataset: data = data.to_dataset(name="data") @@ -86,7 +83,6 @@ def mean_func(ds): (([1, 2], 3), ([2, 0], 2), ([0, 0], np.nan), ([-1, 1], np.nan)), ) def test_weighted_sum_of_weights_no_nan(weights, expected): - da = DataArray([1, 2]) weights = DataArray(weights) result = da.weighted(weights).sum_of_weights() @@ -101,7 +97,6 @@ def test_weighted_sum_of_weights_no_nan(weights, expected): (([1, 2], 2), ([2, 0], np.nan), ([0, 0], np.nan), ([-1, 1], 1)), ) def test_weighted_sum_of_weights_nan(weights, expected): - da = DataArray([np.nan, 2]) weights = DataArray(weights) result = da.weighted(weights).sum_of_weights() @@ -142,7 +137,6 @@ def test_weighted_sum_equal_weights(da, factor, skipna): ("weights", "expected"), (([1, 2], 5), ([0, 2], 4), ([0, 0], 0)) ) def test_weighted_sum_no_nan(weights, expected): - da = DataArray([1, 2]) weights = DataArray(weights) @@ -157,7 +151,6 @@ def test_weighted_sum_no_nan(weights, expected): ) @pytest.mark.parametrize("skipna", (True, False)) def test_weighted_sum_nan(weights, expected, skipna): - da = DataArray([np.nan, 2]) weights = DataArray(weights) @@ -193,7 +186,6 @@ def test_weighted_mean_equal_weights(da, skipna, factor): ("weights", "expected"), (([4, 6], 1.6), ([1, 0], 1.0), ([0, 0], np.nan)) ) def test_weighted_mean_no_nan(weights, expected): - da = DataArray([1, 2]) weights = DataArray(weights) expected = DataArray(expected) @@ -231,7 +223,6 @@ def test_weighted_quantile_no_nan(weights, expected): def test_weighted_quantile_zero_weights(): - da = DataArray([0, 1, 2, 3]) weights = DataArray([1, 0, 1, 0]) q = 0.75 @@ -352,7 +343,6 @@ def test_weighted_quantile_bool(): @pytest.mark.parametrize("q", (-1, 1.1, (0.5, 1.1), ((0.2, 0.4), (0.6, 0.8)))) def test_weighted_quantile_with_invalid_q(q): - da = DataArray([1, 1.9, 2.2, 3, 3.7, 4.1, 5]) q = np.asarray(q) weights = xr.ones_like(da) @@ -370,7 +360,6 @@ def test_weighted_quantile_with_invalid_q(q): ) @pytest.mark.parametrize("skipna", (True, False)) def test_weighted_mean_nan(weights, expected, skipna): - da = DataArray([np.nan, 2]) weights = DataArray(weights) @@ -400,7 +389,6 @@ def test_weighted_mean_bool(): (([1, 2], 2 / 3), ([2, 0], 0), ([0, 0], 0), ([-1, 1], 0)), ) def test_weighted_sum_of_squares_no_nan(weights, expected): - da = DataArray([1, 2]) weights = DataArray(weights) result = da.weighted(weights).sum_of_squares() @@ -415,7 +403,6 @@ def test_weighted_sum_of_squares_no_nan(weights, expected): (([1, 2], 0), ([2, 0], 0), ([0, 0], 0), ([-1, 1], 0)), ) def test_weighted_sum_of_squares_nan(weights, expected): - da = DataArray([np.nan, 2]) weights = DataArray(weights) result = da.weighted(weights).sum_of_squares() @@ -447,7 +434,6 @@ def test_weighted_var_equal_weights(da, skipna, factor): ("weights", "expected"), (([4, 6], 0.24), ([1, 0], 0.0), ([0, 0], np.nan)) ) def test_weighted_var_no_nan(weights, expected): - da = DataArray([1, 2]) weights = DataArray(weights) expected = DataArray(expected) @@ -461,7 +447,6 @@ def test_weighted_var_no_nan(weights, expected): ("weights", "expected"), (([4, 6], 0), ([1, 0], np.nan), ([0, 0], np.nan)) ) def test_weighted_var_nan(weights, expected): - da = DataArray([np.nan, 2]) weights = DataArray(weights) expected = DataArray(expected) @@ -504,7 +489,6 @@ def test_weighted_std_equal_weights(da, skipna, factor): ("weights", "expected"), (([4, 6], np.sqrt(0.24)), ([1, 0], 0.0), ([0, 0], np.nan)) ) def test_weighted_std_no_nan(weights, expected): - da = DataArray([1, 2]) weights = DataArray(weights) expected = DataArray(expected) @@ -518,7 +502,6 @@ def test_weighted_std_no_nan(weights, expected): ("weights", "expected"), (([4, 6], 0), ([1, 0], np.nan), ([0, 0], np.nan)) ) def test_weighted_std_nan(weights, expected): - da = DataArray([np.nan, 2]) weights = DataArray(weights) expected = DataArray(expected) @@ -579,7 +562,6 @@ def expected_weighted(da, weights, dim, skipna, operation): def check_weighted_operations(data, weights, dim, skipna): - # check sum of weights result = data.weighted(weights).sum_of_weights(dim) expected = expected_weighted(data, weights, dim, skipna, "sum_of_weights") @@ -616,7 +598,6 @@ def check_weighted_operations(data, weights, dim, skipna): @pytest.mark.parametrize("skipna", (None, True, False)) @pytest.mark.filterwarnings("ignore:invalid value encountered in sqrt") def test_weighted_operations_3D(dim, add_nans, skipna): - dims = ("a", "b", "c") coords = dict(a=[0, 1, 2, 3], b=[0, 1, 2, 3], c=[0, 1, 2, 3]) @@ -642,7 +623,6 @@ def test_weighted_operations_3D(dim, add_nans, skipna): @pytest.mark.parametrize("add_nans", (True, False)) @pytest.mark.parametrize("skipna", (None, True, False)) def test_weighted_quantile_3D(dim, q, add_nans, skipna): - dims = ("a", "b", "c") coords = dict(a=[0, 1, 2], b=[0, 1, 2, 3], c=[0, 1, 2, 3, 4]) @@ -722,7 +702,6 @@ def test_weighted_operations_nonequal_coords( def test_weighted_operations_different_shapes( shape_data, shape_weights, add_nans, skipna ): - weights = DataArray(np.random.randn(*shape_weights)) data = np.random.randn(*shape_data) @@ -749,7 +728,6 @@ def test_weighted_operations_different_shapes( @pytest.mark.parametrize("as_dataset", (True, False)) @pytest.mark.parametrize("keep_attrs", (True, False, None)) def test_weighted_operations_keep_attr(operation, as_dataset, keep_attrs): - weights = DataArray(np.random.randn(2, 2), attrs=dict(attr="weights")) data = DataArray(np.random.randn(2, 2)) @@ -795,7 +773,6 @@ def test_weighted_operations_keep_attr_da_in_ds(operation): @pytest.mark.parametrize("operation", ("sum_of_weights", "sum", "mean", "quantile")) @pytest.mark.parametrize("as_dataset", (True, False)) def test_weighted_bad_dim(operation, as_dataset): - data = DataArray(np.random.randn(2, 2)) weights = xr.ones_like(data) if as_dataset: diff --git a/xarray/util/deprecation_helpers.py b/xarray/util/deprecation_helpers.py index c4c58356672..e9681bdf398 100644 --- a/xarray/util/deprecation_helpers.py +++ b/xarray/util/deprecation_helpers.py @@ -72,7 +72,6 @@ def func(a, *, b=2): """ def _decorator(func): - signature = inspect.signature(func) pos_or_kw_args = [] @@ -89,11 +88,9 @@ def _decorator(func): @wraps(func) def inner(*args, **kwargs): - name = func.__name__ n_extra_args = len(args) - len(pos_or_kw_args) if n_extra_args > 0: - extra_args = ", ".join(kwonly_args[:n_extra_args]) warnings.warn( diff --git a/xarray/util/generate_aggregations.py b/xarray/util/generate_aggregations.py index 69d1185611e..19701a26bd6 100644 --- a/xarray/util/generate_aggregations.py +++ b/xarray/util/generate_aggregations.py @@ -252,7 +252,6 @@ def __init__( class AggregationGenerator: - _dim_docstring = _DIM_DOCSTRING _template_signature = TEMPLATE_REDUCTION_SIGNATURE diff --git a/xarray/util/generate_ops.py b/xarray/util/generate_ops.py index 0ca5954b056..02a3725f475 100644 --- a/xarray/util/generate_ops.py +++ b/xarray/util/generate_ops.py @@ -254,7 +254,6 @@ def _render_classbody(method_blocks, is_module): if __name__ == "__main__": - option = sys.argv[1].lower() if len(sys.argv) == 2 else None if option not in {"--module", "--stubs"}: raise SystemExit(f"Usage: {sys.argv[0]} --module | --stubs") diff --git a/xarray/util/print_versions.py b/xarray/util/print_versions.py index d53f1aab65e..d470d9b0633 100755 --- a/xarray/util/print_versions.py +++ b/xarray/util/print_versions.py @@ -136,7 +136,7 @@ def show_versions(file=sys.stdout): ] deps_blob = [] - for (modname, ver_f) in deps: + for modname, ver_f in deps: try: if modname in sys.modules: mod = sys.modules[modname]