diff --git a/doc/examples/apply_ufunc_vectorize_1d.ipynb b/doc/examples/apply_ufunc_vectorize_1d.ipynb index 68d011d0725..c2ab7271873 100644 --- a/doc/examples/apply_ufunc_vectorize_1d.ipynb +++ b/doc/examples/apply_ufunc_vectorize_1d.ipynb @@ -11,7 +11,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "This example will illustrate how to conveniently apply an unvectorized function `func` to xarray objects using `apply_ufunc`. `func` expects 1D numpy arrays and returns a 1D numpy array. Our goal is to coveniently apply this function along a dimension of xarray objects that may or may not wrap dask arrays with a signature.\n", + "This example will illustrate how to conveniently apply an unvectorized function `func` to xarray objects using `apply_ufunc`. `func` expects 1D numpy arrays and returns a 1D numpy array. Our goal is to conveniently apply this function along a dimension of xarray objects that may or may not wrap dask arrays with a signature.\n", "\n", "We will illustrate this using `np.interp`: \n", "\n", diff --git a/doc/user-guide/io.rst b/doc/user-guide/io.rst index ffded682035..9656a2ba973 100644 --- a/doc/user-guide/io.rst +++ b/doc/user-guide/io.rst @@ -819,7 +819,7 @@ with ``mode='a'`` on a Dataset containing the new variables, passing in an existing Zarr store or path to a Zarr store. To resize and then append values along an existing dimension in a store, set -``append_dim``. This is a good option if data always arives in a particular +``append_dim``. This is a good option if data always arrives in a particular order, e.g., for time-stepping a simulation: .. ipython:: python diff --git a/xarray/backends/common.py b/xarray/backends/common.py index 1ac988c6b4f..5b8f9a6840f 100644 --- a/xarray/backends/common.py +++ b/xarray/backends/common.py @@ -247,7 +247,7 @@ def sync(self, compute=True, chunkmanager_store_kwargs=None): chunkmanager = get_chunked_array_type(*self.sources) # TODO: consider wrapping targets with dask.delayed, if this makes - # for any discernible difference in perforance, e.g., + # for any discernible difference in performance, e.g., # targets = [dask.delayed(t) for t in self.targets] if chunkmanager_store_kwargs is None: diff --git a/xarray/core/accessor_dt.py b/xarray/core/accessor_dt.py index 8255e2a5232..0d4a402cd19 100644 --- a/xarray/core/accessor_dt.py +++ b/xarray/core/accessor_dt.py @@ -601,7 +601,7 @@ class CombinedDatetimelikeAccessor( DatetimeAccessor[T_DataArray], TimedeltaAccessor[T_DataArray] ): def __new__(cls, obj: T_DataArray) -> CombinedDatetimelikeAccessor: - # CombinedDatetimelikeAccessor isn't really instatiated. Instead + # CombinedDatetimelikeAccessor isn't really instantiated. Instead # we need to choose which parent (datetime or timedelta) is # appropriate. Since we're checking the dtypes anyway, we'll just # do all the validation here. diff --git a/xarray/core/computation.py b/xarray/core/computation.py index 9cb60e0c424..1b96043f1f5 100644 --- a/xarray/core/computation.py +++ b/xarray/core/computation.py @@ -408,7 +408,7 @@ def _unpack_dict_tuples( def _check_core_dims(signature, variable_args, name): """ - Chcek if an arg has all the core dims required by the signature. + Check if an arg has all the core dims required by the signature. Slightly awkward design, of returning the error message. But we want to give a detailed error message, which requires inspecting the variable in diff --git a/xarray/core/groupby.py b/xarray/core/groupby.py index 8ed7148e2a1..788e1efa80b 100644 --- a/xarray/core/groupby.py +++ b/xarray/core/groupby.py @@ -989,7 +989,7 @@ def _flox_reduce( if kwargs["func"] not in ["sum", "prod"]: raise TypeError("Received an unexpected keyword argument 'min_count'") elif kwargs["min_count"] is None: - # set explicitly to avoid unncessarily accumulating count + # set explicitly to avoid unnecessarily accumulating count kwargs["min_count"] = 0 # weird backcompat diff --git a/xarray/core/missing.py b/xarray/core/missing.py index e77ec34b307..90a9dd2e76c 100644 --- a/xarray/core/missing.py +++ b/xarray/core/missing.py @@ -678,7 +678,7 @@ def interp_func(var, x, new_x, method: InterpOptions, kwargs): Notes ----- - This requiers scipy installed. + This requires scipy installed. See Also -------- @@ -724,7 +724,7 @@ def interp_func(var, x, new_x, method: InterpOptions, kwargs): for i in range(new_x[0].ndim) } - # if useful, re-use localize for each chunk of new_x + # if useful, reuse localize for each chunk of new_x localize = (method in ["linear", "nearest"]) and new_x0_chunks_is_not_none # scipy.interpolate.interp1d always forces to float. diff --git a/xarray/core/types.py b/xarray/core/types.py index eeee7995f55..1be5b00c43f 100644 --- a/xarray/core/types.py +++ b/xarray/core/types.py @@ -62,7 +62,7 @@ _DTypeLikeNested = Any # TODO: wait for support for recursive types # Xarray requires a Mapping[Hashable, dtype] in many places which - # conflics with numpys own DTypeLike (with dtypes for fields). + # conflicts with numpys own DTypeLike (with dtypes for fields). # https://numpy.org/devdocs/reference/typing.html#numpy.typing.DTypeLike # This is a copy of this DTypeLike that allows only non-Mapping dtypes. DTypeLikeSave = Union[ diff --git a/xarray/namedarray/core.py b/xarray/namedarray/core.py index eba3f2f714b..feff052101b 100644 --- a/xarray/namedarray/core.py +++ b/xarray/namedarray/core.py @@ -255,7 +255,7 @@ def __init__( def __init_subclass__(cls, **kwargs: Any) -> None: if NamedArray in cls.__bases__ and (cls._new == NamedArray._new): # Type hinting does not work for subclasses unless _new is - # overriden with the correct class. + # overridden with the correct class. raise TypeError( "Subclasses of `NamedArray` must override the `_new` method." ) diff --git a/xarray/tests/test_concat.py b/xarray/tests/test_concat.py index e443631a148..11d0d38594d 100644 --- a/xarray/tests/test_concat.py +++ b/xarray/tests/test_concat.py @@ -622,7 +622,7 @@ def test_concat_errors(self): concat([data, data], "new_dim", data_vars=["not_found"]) with pytest.raises(ValueError, match=r"global attributes not"): - # call deepcopy seperately to get unique attrs + # call deepcopy separately to get unique attrs data0 = deepcopy(split_data[0]) data1 = deepcopy(split_data[1]) data1.attrs["foo"] = "bar" diff --git a/xarray/tests/test_formatting.py b/xarray/tests/test_formatting.py index 5ca134503e8..d5c8e0c0d0a 100644 --- a/xarray/tests/test_formatting.py +++ b/xarray/tests/test_formatting.py @@ -497,7 +497,7 @@ def test_array_repr_variable(self) -> None: def test_array_repr_recursive(self) -> None: # GH:issue:7111 - # direct recurion + # direct recursion var = xr.Variable("x", [0, 1]) var.attrs["x"] = var formatting.array_repr(var) diff --git a/xarray/tests/test_plot.py b/xarray/tests/test_plot.py index 6cc061a7ee5..31c23955b02 100644 --- a/xarray/tests/test_plot.py +++ b/xarray/tests/test_plot.py @@ -2768,7 +2768,7 @@ def test_datetime_hue(self) -> None: def test_facetgrid_hue_style(self) -> None: ds2 = self.ds.copy() - # Numbers plots as continous: + # Numbers plots as continuous: g = ds2.plot.scatter(x="A", y="B", row="row", col="col", hue="hue") assert isinstance(g._mappables[-1], mpl.collections.PathCollection) diff --git a/xarray/util/generate_ops.py b/xarray/util/generate_ops.py index f339470884a..5859934f646 100644 --- a/xarray/util/generate_ops.py +++ b/xarray/util/generate_ops.py @@ -121,7 +121,7 @@ def {method}(self, *args: Any, **kwargs: Any) -> Self: # We need to add "# type: ignore[override]" # Keep an eye out for: # https://discuss.python.org/t/make-type-hints-for-eq-of-primitives-less-strict/34240 -# The type ignores might not be neccesary anymore at some point. +# The type ignores might not be necessary anymore at some point. # # We require a "hack" to tell type checkers that e.g. Variable + DataArray = DataArray # In reality this returns NotImplementes, but this is not a valid type in python 3.9.