Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

STYLE: Resolved Inconsistent namespace #40286

Merged
merged 22 commits into from
Mar 11, 2021
Merged
Show file tree
Hide file tree
Changes from 8 commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
5d55412
STYLE: Inconsistent namespace - tools (pandas-dev#39992)
deepang17 Mar 7, 2021
c9c3eff
Pre-Commit Fixes
deepang17 Mar 7, 2021
6b4e259
Merge branch 'master' of https://github.com/pandas-dev/pandas
deepang17 Mar 7, 2021
5723060
STYLE: Resolved Inconsistent namespace except test_algos.py - (#39992)
deepang17 Mar 7, 2021
6bc371c
STYLE: Resolved Inconsistent namespace by excluding test_algos.py
deepang17 Mar 7, 2021
06387fb
test_algos.py fixes
deepang17 Mar 7, 2021
58d1017
Merge branch 'master' of https://github.com/pandas-dev/pandas
deepang17 Mar 8, 2021
9c631eb
Updated test_algos.py
deepang17 Mar 8, 2021
6af3b2d
STYLE: inconsistent-namespace-usage fixes
deepang17 Mar 8, 2021
91cba42
updated test_interval.py
deepang17 Mar 8, 2021
61108f7
FIX: Improved varible names
deepang17 Mar 8, 2021
23de5ef
test_interval.py fixes
deepang17 Mar 8, 2021
57f78a9
Merge branch 'master' of https://github.com/pandas-dev/pandas
deepang17 Mar 9, 2021
373b4e4
FIX: Resolved confusing variable names
deepang17 Mar 9, 2021
f29cce7
Merge branch 'master' of https://github.com/pandas-dev/pandas
deepang17 Mar 9, 2021
92c177b
test_string.py fixes
deepang17 Mar 9, 2021
1e1d76e
Merge branch 'master' of https://github.com/pandas-dev/pandas
deepang17 Mar 10, 2021
17a82ca
Merge branch 'master' of https://github.com/pandas-dev/pandas
deepang17 Mar 10, 2021
a3ceb65
getitem.py fixes
deepang17 Mar 10, 2021
40a35ee
rename more variables to arr
MarcoGorelli Mar 11, 2021
51700e7
Merge remote-tracking branch 'upstream/master' into pr/deepang17/40286
MarcoGorelli Mar 11, 2021
ac1ad61
Merge remote-tracking branch 'upstream/master' into pr/deepang17/40286
MarcoGorelli Mar 11, 2021
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ repos:
entry: python scripts/check_for_inconsistent_pandas_namespace.py
language: python
types: [python]
files: ^pandas/tests/frame/
files: ^pandas/tests/
- id: incorrect-code-directives
name: Check for incorrect code block or IPython directives
language: pygrep
Expand Down
4 changes: 2 additions & 2 deletions pandas/tests/indexes/period/test_setops.py
Original file line number Diff line number Diff line change
Expand Up @@ -351,10 +351,10 @@ def test_intersection_equal_duplicates(self):

def test_union_duplicates(self):
# GH#36289
idx = pd.period_range("2011-01-01", periods=2)
idx = period_range("2011-01-01", periods=2)
idx_dup = idx.append(idx)

idx2 = pd.period_range("2011-01-02", periods=2)
idx2 = period_range("2011-01-02", periods=2)
idx2_dup = idx2.append(idx2)
result = idx_dup.union(idx2_dup)

Expand Down
2 changes: 1 addition & 1 deletion pandas/tests/indexing/interval/test_interval.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ def test_mi_intervalindex_slicing_with_scalar(self):
pd.Index(
["RID1", "RID1", "RID2", "RID2", "RID1", "RID1", "RID2", "RID2"]
),
pd.IntervalIndex.from_arrays(
IntervalIndex.from_arrays(
[0, 1, 10, 11, 0, 1, 10, 11], [1, 2, 11, 12, 1, 2, 11, 12]
),
]
Expand Down
2 changes: 1 addition & 1 deletion pandas/tests/indexing/multiindex/test_multiindex.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def test_nested_tuples_duplicates(self):

dti = pd.to_datetime(["20190101", "20190101", "20190102"])
idx = Index(["a", "a", "c"])
mi = pd.MultiIndex.from_arrays([dti, idx], names=["index1", "index2"])
mi = MultiIndex.from_arrays([dti, idx], names=["index1", "index2"])

df = DataFrame({"c1": [1, 2, 3], "c2": [np.nan, np.nan, np.nan]}, index=mi)

Expand Down
8 changes: 2 additions & 6 deletions pandas/tests/indexing/multiindex/test_slice.py
Original file line number Diff line number Diff line change
Expand Up @@ -534,9 +534,7 @@ def test_loc_axis_single_level_multi_col_indexing_multiindex_col_df(self):
# GH29519
df = DataFrame(
np.arange(27).reshape(3, 9),
columns=pd.MultiIndex.from_product(
[["a1", "a2", "a3"], ["b1", "b2", "b3"]]
),
columns=MultiIndex.from_product([["a1", "a2", "a3"], ["b1", "b2", "b3"]]),
)
result = df.loc(axis=1)["a1":"a2"]
expected = df.iloc[:, :-3]
Expand All @@ -548,9 +546,7 @@ def test_loc_axis_single_level_single_col_indexing_multiindex_col_df(self):
# GH29519
df = DataFrame(
np.arange(27).reshape(3, 9),
columns=pd.MultiIndex.from_product(
[["a1", "a2", "a3"], ["b1", "b2", "b3"]]
),
columns=MultiIndex.from_product([["a1", "a2", "a3"], ["b1", "b2", "b3"]]),
)
result = df.loc(axis=1)["a1"]
expected = df.iloc[:, :3]
Expand Down
36 changes: 17 additions & 19 deletions pandas/tests/indexing/test_loc.py
Original file line number Diff line number Diff line change
Expand Up @@ -525,10 +525,10 @@ def test_loc_setitem_consistency_slice_column_len(self):
Region_1,Site_2,3977723089,A,5/20/2015 8:33,5/20/2015 9:09,Yes,No"""

df = pd.read_csv(StringIO(data), header=[0, 1], index_col=[0, 1, 2])
df.loc[:, ("Respondent", "StartDate")] = pd.to_datetime(
df.loc[:, ("Respondent", "StartDate")] = to_datetime(
df.loc[:, ("Respondent", "StartDate")]
)
df.loc[:, ("Respondent", "EndDate")] = pd.to_datetime(
df.loc[:, ("Respondent", "EndDate")] = to_datetime(
df.loc[:, ("Respondent", "EndDate")]
)
df.loc[:, ("Respondent", "Duration")] = (
Expand Down Expand Up @@ -568,7 +568,7 @@ def test_loc_modify_datetime(self):
{"date": [1485264372711, 1485265925110, 1540215845888, 1540282121025]}
)

df["date_dt"] = pd.to_datetime(df["date"], unit="ms", cache=True)
df["date_dt"] = to_datetime(df["date"], unit="ms", cache=True)

df.loc[:, "date_dt_cp"] = df.loc[:, "date_dt"]
df.loc[[2, 3], "date_dt_cp"] = df.loc[[2, 3], "date_dt"]
Expand All @@ -584,7 +584,7 @@ def test_loc_modify_datetime(self):
)

columns = ["date_dt", "date_dt_cp"]
expected[columns] = expected[columns].apply(pd.to_datetime)
expected[columns] = expected[columns].apply(to_datetime)

tm.assert_frame_equal(df, expected)

Expand Down Expand Up @@ -808,8 +808,8 @@ def test_loc_coercion(self):
def test_setitem_new_key_tz(self, indexer_sl):
# GH#12862 should not raise on assigning the second value
vals = [
pd.to_datetime(42).tz_localize("UTC"),
pd.to_datetime(666).tz_localize("UTC"),
to_datetime(42).tz_localize("UTC"),
to_datetime(666).tz_localize("UTC"),
]
expected = Series(vals, index=["foo", "bar"])

Expand Down Expand Up @@ -1458,7 +1458,7 @@ def test_loc_getitem_access_none_value_in_multiindex(self):
# GH#34318: test that you can access a None value using .loc
# through a Multiindex

ser = Series([None], pd.MultiIndex.from_arrays([["Level1"], ["Level2"]]))
ser = Series([None], MultiIndex.from_arrays([["Level1"], ["Level2"]]))
result = ser.loc[("Level1", "Level2")]
assert result is None

Expand All @@ -1474,7 +1474,7 @@ def test_loc_getitem_access_none_value_in_multiindex(self):
def test_loc_setitem_multiindex_slice(self):
# GH 34870

index = pd.MultiIndex.from_tuples(
index = MultiIndex.from_tuples(
zip(
["bar", "bar", "baz", "baz", "foo", "foo", "qux", "qux"],
["one", "two", "one", "two", "one", "two", "one", "two"],
Expand Down Expand Up @@ -1629,14 +1629,14 @@ def test_loc_setitem_with_expansion_and_existing_dst(self):
start = Timestamp("2017-10-29 00:00:00+0200", tz="Europe/Madrid")
end = Timestamp("2017-10-29 03:00:00+0100", tz="Europe/Madrid")
ts = Timestamp("2016-10-10 03:00:00", tz="Europe/Madrid")
idx = pd.date_range(start, end, closed="left", freq="H")
idx = date_range(start, end, closed="left", freq="H")
assert ts not in idx # i.e. result.loc setitem is with-expansion

result = DataFrame(index=idx, columns=["value"])
result.loc[ts, "value"] = 12
expected = DataFrame(
[np.nan] * len(idx) + [12],
index=idx.append(pd.DatetimeIndex([ts])),
index=idx.append(DatetimeIndex([ts])),
columns=["value"],
dtype=object,
)
Expand All @@ -1645,7 +1645,7 @@ def test_loc_setitem_with_expansion_and_existing_dst(self):
def test_setitem_with_expansion(self):
# indexing - setting an element
df = DataFrame(
data=pd.to_datetime(["2015-03-30 20:12:32", "2015-03-12 00:11:11"]),
data=to_datetime(["2015-03-30 20:12:32", "2015-03-12 00:11:11"]),
columns=["time"],
)
df["new_col"] = ["new", "old"]
Expand All @@ -1660,7 +1660,7 @@ def test_setitem_with_expansion(self):
expected = Series([v[0], df.loc[1, "time"]], name="time")
tm.assert_series_equal(df2.time, expected)

v = df.loc[df.new_col == "new", "time"] + pd.Timedelta("1s")
v = df.loc[df.new_col == "new", "time"] + Timedelta("1s")
df.loc[df.new_col == "new", "time"] = v
tm.assert_series_equal(df.loc[df.new_col == "new", "time"], v)

Expand Down Expand Up @@ -2296,23 +2296,21 @@ def test_loc_axis_1_slice():
df = DataFrame(
np.ones((10, 8)),
index=tuple("ABCDEFGHIJ"),
columns=pd.MultiIndex.from_tuples(cols),
columns=MultiIndex.from_tuples(cols),
)
result = df.loc(axis=1)[(2014, 9):(2015, 8)]
expected = DataFrame(
np.ones((10, 4)),
index=tuple("ABCDEFGHIJ"),
columns=pd.MultiIndex.from_tuples(
[(2014, 9), (2014, 10), (2015, 7), (2015, 8)]
),
columns=MultiIndex.from_tuples([(2014, 9), (2014, 10), (2015, 7), (2015, 8)]),
)
tm.assert_frame_equal(result, expected)


def test_loc_set_dataframe_multiindex():
# GH 14592
expected = DataFrame(
"a", index=range(2), columns=pd.MultiIndex.from_product([range(2), range(2)])
"a", index=range(2), columns=MultiIndex.from_product([range(2), range(2)])
)
result = expected.copy()
result.loc[0, [(0, 1)]] = result.loc[0, [(0, 1)]]
Expand Down Expand Up @@ -2340,7 +2338,7 @@ def test_loc_with_positional_slice_deprecation():

def test_loc_slice_disallows_positional():
# GH#16121, GH#24612, GH#31810
dti = pd.date_range("2016-01-01", periods=3)
dti = date_range("2016-01-01", periods=3)
df = DataFrame(np.random.random((3, 2)), index=dti)

ser = df[0]
Expand Down Expand Up @@ -2372,7 +2370,7 @@ def test_loc_datetimelike_mismatched_dtypes():
df = DataFrame(
np.random.randn(5, 3),
columns=["a", "b", "c"],
index=pd.date_range("2012", freq="H", periods=5),
index=date_range("2012", freq="H", periods=5),
)
# create dataframe with non-unique DatetimeIndex
df = df.iloc[[0, 2, 2, 3]].copy()
Expand Down
4 changes: 2 additions & 2 deletions pandas/tests/io/formats/style/test_style.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,13 +84,13 @@ def test_copy(self, do_changes, do_render):
self.styler.set_table_attributes('class="foo" data-bar')
self.styler.hidden_index = not self.styler.hidden_index
self.styler.hide_columns("A")
classes = pd.DataFrame(
classes = DataFrame(
[["favorite-val red", ""], [None, "blue my-val"]],
index=self.df.index,
columns=self.df.columns,
)
self.styler.set_td_classes(classes)
ttips = pd.DataFrame(
ttips = DataFrame(
data=[["Favorite", ""], [np.nan, "my"]],
columns=self.df.columns,
index=self.df.index,
Expand Down
3 changes: 1 addition & 2 deletions pandas/tests/strings/test_cat.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import numpy as np
import pytest

import pandas as pd
from pandas import (
DataFrame,
Index,
Expand Down Expand Up @@ -366,7 +365,7 @@ def test_cat_on_filtered_index():
assert str_multiple.loc[1] == "2011 2 2"


@pytest.mark.parametrize("klass", [tuple, list, np.array, pd.Series, pd.Index])
@pytest.mark.parametrize("klass", [tuple, list, np.array, Series, Index])
def test_cat_different_classes(klass):
# https://github.com/pandas-dev/pandas/issues/33425
s = Series(["a", "b", "c"])
Expand Down
8 changes: 4 additions & 4 deletions pandas/tests/test_algos.py
Original file line number Diff line number Diff line change
Expand Up @@ -1478,7 +1478,7 @@ def test_unique_index(self):
)

@pytest.mark.parametrize(
"arr, unique",
"arr, uniques",
[
(
[(0, 0), (0, 1), (1, 0), (1, 1), (0, 0), (0, 1), (1, 0), (1, 1)],
Expand All @@ -1491,10 +1491,10 @@ def test_unique_index(self):
([("a", 1), ("b", 2), ("a", 3), ("a", 1)], [("a", 1), ("b", 2), ("a", 3)]),
],
)
def test_unique_tuples(self, arr, unique):
def test_unique_tuples(self, arr, uniques):
# https://github.com/pandas-dev/pandas/issues/16519
expected = np.empty(len(unique), dtype=object)
expected[:] = unique
expected = np.empty(len(uniques), dtype=object)
expected[:] = uniques

result = pd.unique(arr)
tm.assert_numpy_array_equal(result, expected)
Expand Down