Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

STYLE: fix pylint: no-else-raise #49520

Merged
merged 2 commits into from
Nov 4, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 7 additions & 3 deletions ci/fix_wheels.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,12 @@
import zipfile

try:
_, wheel_path, dest_dir = sys.argv
if len(sys.argv) != 3:
raise ValueError(
"User must pass the path to the wheel and the destination directory."
)
wheel_path = sys.argv[1]
dest_dir = sys.argv[2]
MarcoGorelli marked this conversation as resolved.
Show resolved Hide resolved
# Figure out whether we are building on 32 or 64 bit python
is_32 = sys.maxsize <= 2**32
PYTHON_ARCH = "x86" if is_32 else "x64"
Expand Down Expand Up @@ -50,5 +55,4 @@
if not success:
os.remove(repaired_wheel_path)
raise exception
else:
print(f"Successfully repaired wheel was written to {repaired_wheel_path}")
print(f"Successfully repaired wheel was written to {repaired_wheel_path}")
3 changes: 1 addition & 2 deletions doc/make.py
Original file line number Diff line number Diff line change
Expand Up @@ -259,8 +259,7 @@ def latex(self, force=False):
"You should check the file "
'"build/latex/pandas.pdf" for problems.'
)
else:
self._run_os("make")
self._run_os("make")
return ret_code

def latex_forced(self):
Expand Down
5 changes: 1 addition & 4 deletions pandas/_testing/_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,10 +243,7 @@ def wrapper(*args, **kwargs):

if not isinstance(err, error_classes) or raise_on_error:
raise
else:
pytest.skip(
f"Skipping test due to lack of connectivity and error {err}"
)
pytest.skip(f"Skipping test due to lack of connectivity and error {err}")

return wrapper

Expand Down
3 changes: 1 addition & 2 deletions pandas/compat/_optional.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,8 +141,7 @@ def import_optional_dependency(
except ImportError:
if errors == "raise":
raise ImportError(msg)
else:
return None
return None

# Handle submodules: if we have submodule, grab parent module from sys.modules
parent = name.split(".")[0]
Expand Down
3 changes: 1 addition & 2 deletions pandas/compat/numpy/function.py
Original file line number Diff line number Diff line change
Expand Up @@ -412,8 +412,7 @@ def validate_resampler_func(method: str, args, kwargs) -> None:
"numpy operations are not valid with resample. "
f"Use .resample(...).{method}() instead"
)
else:
raise TypeError("too many arguments passed in")
raise TypeError("too many arguments passed in")


def validate_minmax_axis(axis: AxisInt | None, ndim: int = 1) -> None:
Expand Down
3 changes: 1 addition & 2 deletions pandas/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -1163,8 +1163,7 @@ def deco(*args):
raise ValueError(
f"Could not find file {path} and --strict-data-files is set."
)
else:
pytest.skip(f"Could not find {path}.")
pytest.skip(f"Could not find {path}.")
return path

return deco
Expand Down
6 changes: 3 additions & 3 deletions pandas/core/apply.py
Original file line number Diff line number Diff line change
Expand Up @@ -464,7 +464,7 @@ def apply_str(self) -> DataFrame | Series:
"axis" not in arg_names or f in ("corrwith", "skew")
):
raise ValueError(f"Operation {f} does not support axis=1")
elif "axis" in arg_names:
if "axis" in arg_names:
self.kwargs["axis"] = self.axis
return self._try_aggregate_string_function(obj, f, *self.args, **self.kwargs)

Expand Down Expand Up @@ -762,7 +762,7 @@ def apply_broadcast(self, target: DataFrame) -> DataFrame:
# must be a scalar or 1d
if ares > 1:
raise ValueError("too many dims to broadcast")
elif ares == 1:
if ares == 1:

# must match return dim
if result_compare != len(res):
Expand Down Expand Up @@ -1179,7 +1179,7 @@ def reconstruct_func(
"Function names must be unique if there is no new column names "
"assigned"
)
elif func is None:
if func is None:
# nicer error message
raise TypeError("Must provide 'func' or tuples of '(column, aggfunc).")

Expand Down
2 changes: 1 addition & 1 deletion pandas/core/arrays/datetimelike.py
Original file line number Diff line number Diff line change
Expand Up @@ -2264,7 +2264,7 @@ def validate_inferred_freq(
"values does not conform to passed frequency "
f"{freq.freqstr}"
)
elif freq is None:
if freq is None:
freq = inferred_freq
freq_infer = False

Expand Down
2 changes: 1 addition & 1 deletion pandas/core/arrays/datetimes.py
Original file line number Diff line number Diff line change
Expand Up @@ -2368,7 +2368,7 @@ def validate_tz_from_dtype(
if dtz is not None:
if tz is not None and not timezones.tz_compare(tz, dtz):
raise ValueError("cannot supply both a tz and a dtype with a tz")
elif explicit_tz_none:
if explicit_tz_none:
raise ValueError("Cannot pass both a timezone-aware dtype and tz=None")
tz = dtz

Expand Down
8 changes: 4 additions & 4 deletions pandas/core/arrays/interval.py
Original file line number Diff line number Diff line change
Expand Up @@ -314,17 +314,17 @@ def _simple_new(
f"right [{type(right).__name__}] types"
)
raise ValueError(msg)
elif is_categorical_dtype(left.dtype) or is_string_dtype(left.dtype):
if is_categorical_dtype(left.dtype) or is_string_dtype(left.dtype):
# GH 19016
msg = (
"category, object, and string subtypes are not supported "
"for IntervalArray"
)
raise TypeError(msg)
elif isinstance(left, ABCPeriodIndex):
if isinstance(left, ABCPeriodIndex):
msg = "Period dtypes are not supported, use a PeriodIndex instead"
raise ValueError(msg)
elif isinstance(left, ABCDatetimeIndex) and str(left.tz) != str(right.tz):
if isinstance(left, ABCDatetimeIndex) and str(left.tz) != str(right.tz):
msg = (
"left and right must have the same time zone, got "
f"'{left.tz}' and '{right.tz}'"
Expand Down Expand Up @@ -1321,7 +1321,7 @@ def mid(self) -> Index:
def overlaps(self, other):
if isinstance(other, (IntervalArray, ABCIntervalIndex)):
raise NotImplementedError
elif not isinstance(other, Interval):
if not isinstance(other, Interval):
msg = f"`other` must be Interval-like, got {type(other).__name__}"
raise TypeError(msg)

Expand Down
4 changes: 2 additions & 2 deletions pandas/core/arrays/masked.py
Original file line number Diff line number Diff line change
Expand Up @@ -655,7 +655,7 @@ def _arith_method(self, other, op):
raise NotImplementedError(
f"operator '{op_name}' not implemented for bool dtypes"
)
elif op_name in {"mod", "rmod"}:
if op_name in {"mod", "rmod"}:
dtype = "int8"
else:
dtype = "bool"
Expand Down Expand Up @@ -1034,7 +1034,7 @@ def _quantile(
# I think this should be out_mask=self.isna().all(axis=1)
# but am holding off until we have tests
raise NotImplementedError
elif self.isna().all():
if self.isna().all():
out_mask = np.ones(res.shape, dtype=bool)
else:
out_mask = np.zeros(res.shape, dtype=bool)
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/arrays/period.py
Original file line number Diff line number Diff line change
Expand Up @@ -1167,7 +1167,7 @@ def _make_field_arrays(*fields) -> list[np.ndarray]:
if isinstance(x, (list, np.ndarray, ABCSeries)):
if length is not None and len(x) != length:
raise ValueError("Mismatched Period array lengths")
elif length is None:
if length is None:
length = len(x)

# error: Argument 2 to "repeat" has incompatible type "Optional[int]"; expected
Expand Down
5 changes: 2 additions & 3 deletions pandas/core/arrays/sparse/array.py
Original file line number Diff line number Diff line change
Expand Up @@ -781,7 +781,7 @@ def fillna(
):
raise ValueError("Must specify one of 'method' or 'value'.")

elif method is not None:
if method is not None:
msg = "fillna with 'method' requires high memory usage."
warnings.warn(
msg,
Expand Down Expand Up @@ -1172,8 +1172,7 @@ def _take_without_fill(self: SparseArrayT, indices) -> SparseArrayT:
if (indices.max() >= n) or (indices.min() < -n):
if n == 0:
raise IndexError("cannot do a non-empty take from an empty axes.")
else:
raise IndexError("out of bounds value in 'indices'.")
raise IndexError("out of bounds value in 'indices'.")

if to_shift.any():
indices = indices.copy()
Expand Down
8 changes: 4 additions & 4 deletions pandas/core/arrays/timedeltas.py
Original file line number Diff line number Diff line change
Expand Up @@ -490,7 +490,7 @@ def __truediv__(self, other):
if len(other) != len(self):
raise ValueError("Cannot divide vectors with unequal lengths")

elif is_timedelta64_dtype(other.dtype):
if is_timedelta64_dtype(other.dtype):
# let numpy handle it
return self._ndarray / other

Expand Down Expand Up @@ -554,7 +554,7 @@ def __rtruediv__(self, other):
if len(other) != len(self):
raise ValueError("Cannot divide vectors with unequal lengths")

elif is_timedelta64_dtype(other.dtype):
if is_timedelta64_dtype(other.dtype):
# let numpy handle it
return other / self._ndarray

Expand Down Expand Up @@ -606,7 +606,7 @@ def __floordiv__(self, other):
if len(other) != len(self):
raise ValueError("Cannot divide with unequal lengths")

elif is_timedelta64_dtype(other.dtype):
if is_timedelta64_dtype(other.dtype):
other = type(self)(other)

# numpy timedelta64 does not natively support floordiv, so operate
Expand Down Expand Up @@ -675,7 +675,7 @@ def __rfloordiv__(self, other):
if len(other) != len(self):
raise ValueError("Cannot divide with unequal lengths")

elif is_timedelta64_dtype(other.dtype):
if is_timedelta64_dtype(other.dtype):
other = type(self)(other)
# numpy timedelta64 does not natively support floordiv, so operate
# on the i8 values
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -388,7 +388,7 @@ def standardize_mapping(into):
into = type(into)
if not issubclass(into, abc.Mapping):
raise TypeError(f"unsupported type: {into}")
elif into == defaultdict:
if into == defaultdict:
raise TypeError("to_dict() only accepts initialized defaultdicts")
return into

Expand Down
2 changes: 1 addition & 1 deletion pandas/core/computation/eval.py
Original file line number Diff line number Diff line change
Expand Up @@ -344,7 +344,7 @@ def eval(
"Multi-line expressions are only valid "
"if all expressions contain an assignment"
)
elif inplace:
if inplace:
raise ValueError("Cannot operate inplace if there is no assignment")

# assign if needed
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/construction.py
Original file line number Diff line number Diff line change
Expand Up @@ -698,7 +698,7 @@ def _sanitize_ndim(
if getattr(result, "ndim", 0) == 0:
raise ValueError("result should be arraylike with ndim > 0")

elif result.ndim == 1:
if result.ndim == 1:
# the result that we want
result = _maybe_repeat(result, index)

Expand Down
18 changes: 9 additions & 9 deletions pandas/core/dtypes/cast.py
Original file line number Diff line number Diff line change
Expand Up @@ -1866,7 +1866,7 @@ def np_can_hold_element(dtype: np.dtype, element: Any) -> Any:
return element
raise LossySetitemError

elif is_integer(element) or (is_float(element) and element.is_integer()):
if is_integer(element) or (is_float(element) and element.is_integer()):
# e.g. test_setitem_series_int8 if we have a python int 1
# tipo may be np.int32, despite the fact that it will fit
# in smaller int dtypes.
Expand All @@ -1893,7 +1893,7 @@ def np_can_hold_element(dtype: np.dtype, element: Any) -> Any:

# Anything other than integer we cannot hold
raise LossySetitemError
elif (
if (
dtype.kind == "u"
and isinstance(element, np.ndarray)
and element.dtype.kind == "i"
Expand All @@ -1905,9 +1905,9 @@ def np_can_hold_element(dtype: np.dtype, element: Any) -> Any:
# itemsize issues there?
return casted
raise LossySetitemError
elif dtype.itemsize < tipo.itemsize:
if dtype.itemsize < tipo.itemsize:
raise LossySetitemError
elif not isinstance(tipo, np.dtype):
if not isinstance(tipo, np.dtype):
# i.e. nullable IntegerDtype; we can put this into an ndarray
# losslessly iff it has no NAs
if element._hasna:
Expand All @@ -1918,7 +1918,7 @@ def np_can_hold_element(dtype: np.dtype, element: Any) -> Any:

raise LossySetitemError

elif dtype.kind == "f":
if dtype.kind == "f":
if lib.is_integer(element) or lib.is_float(element):
casted = dtype.type(element)
if np.isnan(casted) or casted == element:
Expand All @@ -1931,7 +1931,7 @@ def np_can_hold_element(dtype: np.dtype, element: Any) -> Any:
if tipo.kind not in ["f", "i", "u"]:
# Anything other than float/integer we cannot hold
raise LossySetitemError
elif not isinstance(tipo, np.dtype):
if not isinstance(tipo, np.dtype):
# i.e. nullable IntegerDtype or FloatingDtype;
# we can put this into an ndarray losslessly iff it has no NAs
if element._hasna:
Expand All @@ -1950,7 +1950,7 @@ def np_can_hold_element(dtype: np.dtype, element: Any) -> Any:

raise LossySetitemError

elif dtype.kind == "c":
if dtype.kind == "c":
if lib.is_integer(element) or lib.is_complex(element) or lib.is_float(element):
if np.isnan(element):
# see test_where_complex GH#6345
Expand All @@ -1968,7 +1968,7 @@ def np_can_hold_element(dtype: np.dtype, element: Any) -> Any:
raise LossySetitemError
raise LossySetitemError

elif dtype.kind == "b":
if dtype.kind == "b":
if tipo is not None:
if tipo.kind == "b":
if not isinstance(tipo, np.dtype):
Expand All @@ -1982,7 +1982,7 @@ def np_can_hold_element(dtype: np.dtype, element: Any) -> Any:
return element
raise LossySetitemError

elif dtype.kind == "S":
if dtype.kind == "S":
# TODO: test tests.frame.methods.test_replace tests get here,
# need more targeted tests. xref phofl has a PR about this
if tipo is not None:
Expand Down
5 changes: 2 additions & 3 deletions pandas/core/dtypes/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -1471,7 +1471,7 @@ def get_dtype(arr_or_dtype) -> DtypeObj:
raise TypeError("Cannot deduce dtype from null object")

# fastpath
elif isinstance(arr_or_dtype, np.dtype):
if isinstance(arr_or_dtype, np.dtype):
return arr_or_dtype
elif isinstance(arr_or_dtype, type):
return np.dtype(arr_or_dtype)
Expand Down Expand Up @@ -1639,8 +1639,7 @@ def validate_all_hashable(*args, error_name: str | None = None) -> None:
if not all(is_hashable(arg) for arg in args):
if error_name:
raise TypeError(f"{error_name} must be a hashable type")
else:
raise TypeError("All elements must be hashable")
raise TypeError("All elements must be hashable")


def pandas_dtype(dtype) -> DtypeObj:
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/dtypes/dtypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -523,7 +523,7 @@ def validate_categories(categories, fastpath: bool = False) -> Index:
raise TypeError(
f"Parameter 'categories' must be list-like, was {repr(categories)}"
)
elif not isinstance(categories, ABCIndex):
if not isinstance(categories, ABCIndex):
categories = Index._with_infer(categories, tupleize_cols=False)

if not fastpath:
Expand Down
Loading