Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DEPR: deprecate strings T, S, L, U, and N in offsets frequencies, resolution abbreviations, _attrname_to_abbrevs #54061

Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
Show all changes
47 commits
Select commit Hold shift + click to select a range
6e3e96e
DEPR: deprecate codes T and L in to_abbrevs/_abbrev_to_attrnames
natmokval Jul 10, 2023
fe88663
replace T/L with min/ms in _prefix, period_code_map, _attrname_to_abb…
natmokval Jul 12, 2023
1e79480
correct def get_freq for tseries, fix tests
natmokval Jul 14, 2023
f7fd2a1
replace T, L in _offset_to_period_map, is_subperiod, is_superperiod, …
natmokval Jul 14, 2023
98e8a39
correct def to_timedelta, def _round_temporally and fix tests
natmokval Jul 17, 2023
93bbd08
correct def resolution_string, get_reso_from_freqstr and fix tests
natmokval Jul 21, 2023
b0dfd2f
Merge branch 'main' into DEPR-codes-T-L-from-_attrname_to_abbrevs-_ab…
natmokval Jul 21, 2023
51c62a1
fix tests
natmokval Jul 21, 2023
898f811
correct def _maybe_coerce_freq , is_subperiod, is_superperiod, and _o…
natmokval Jul 23, 2023
1d30c07
fix a test for plotting
natmokval Jul 25, 2023
37de346
Merge branch 'main' into DEPR-codes-T-L-from-_attrname_to_abbrevs-_ab…
natmokval Jul 25, 2023
7171237
fix tests
natmokval Jul 25, 2023
29dcd8d
fix failures in asv benchmarks
natmokval Jul 25, 2023
317718a
correct docstrings
natmokval Jul 27, 2023
99a0cf9
deprecate abbrevs U, N add dict depr_abbrevs and fix tests
natmokval Jul 28, 2023
a13a041
correct get_freq and fix tests
natmokval Jul 31, 2023
a949282
Merge branch 'main' into DEPR-codes-T-L-from-_attrname_to_abbrevs-_ab…
natmokval Jul 31, 2023
7bd6188
correct is_superperiod, is_subperiod, _maybe_coerce_freq and fix tests
natmokval Jul 31, 2023
77949c4
correct __eq__ for PeriodDtype
natmokval Aug 1, 2023
a24c0ec
update docstrings
natmokval Aug 1, 2023
733d68b
correct whatsnew and user_guide
natmokval Aug 1, 2023
beeac14
correct tables of Offset/Period aliases in user_guide
natmokval Aug 1, 2023
a3e3522
correct warning message, add the warning to some tests
natmokval Aug 1, 2023
65ddf90
resolve conflicts in tests
natmokval Aug 1, 2023
c61b0fb
add the futurewarning to def asfreq, fix tests
natmokval Aug 1, 2023
c2f45ba
add the futurewarning to to_offset, correct warning message and add t…
natmokval Aug 2, 2023
73405bf
add the warning to parse_timedelta_unit, remove t, l, u, n from timed…
natmokval Aug 2, 2023
b2ab238
correct docstrings, update user_guide for timeseries and add tests
natmokval Aug 2, 2023
4775471
update whatsnew/v2.1.0.rst
natmokval Aug 2, 2023
c3ed691
remove warning from to_timedelta, correct tests
natmokval Aug 3, 2023
155b0a7
Merge branch 'main' into DEPR-codes-T-L-from-_attrname_to_abbrevs-_ab…
natmokval Aug 7, 2023
31d292c
deprecate 'S' in favour of 's', fix tests
natmokval Aug 9, 2023
d5dabd0
fix tests
natmokval Aug 9, 2023
9cf0565
correct parse_iso_format_string, fix tests
natmokval Aug 13, 2023
609646e
correct docs
natmokval Aug 17, 2023
cc04261
correct docs
natmokval Aug 17, 2023
93533d9
correct docstrings in PeriodProperties
natmokval Aug 17, 2023
9ba1734
Merge branch 'main' into DEPR-codes-T-L-from-_attrname_to_abbrevs-_ab…
natmokval Aug 17, 2023
b79e9b6
correct docs, tests, and add lines to whatsnew/v2.2.0.rst
natmokval Aug 17, 2023
3408d0b
resolve conflict
natmokval Aug 17, 2023
12888f8
correct examples in docs
natmokval Aug 17, 2023
cdd5f6b
resolve conflict
natmokval Aug 18, 2023
c7b8b24
resolve conflict
natmokval Aug 21, 2023
5bb2ca8
correct v2.2.0.rst and test_subset
natmokval Aug 22, 2023
c54e431
resolve conflict in v2.2.0.rst
natmokval Aug 22, 2023
0966d2f
resolve conflict
natmokval Aug 22, 2023
271bd6b
resolve conflict v2.2.0.rst
natmokval Aug 22, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions pandas/core/arrays/arrow/array.py
Original file line number Diff line number Diff line change
Expand Up @@ -2466,9 +2466,9 @@ def _round_temporally(
"W": "week",
"D": "day",
"H": "hour",
"T": "minute",
"min": "minute",
"S": "second",
"L": "millisecond",
"ms": "millisecond",
"U": "microsecond",
"N": "nanosecond",
}
Expand Down
4 changes: 4 additions & 0 deletions pandas/core/tools/timedeltas.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,10 @@ def to_timedelta(
FutureWarning,
stacklevel=find_stack_level(),
)
if unit.lower() == "t":
unit = unit.replace(unit, "min")
else:
unit = unit.replace(unit, "ms")

if unit is not None:
unit = parse_timedelta_unit(unit)
Expand Down
4 changes: 2 additions & 2 deletions pandas/tests/copy_view/test_methods.py
Original file line number Diff line number Diff line change
Expand Up @@ -1499,10 +1499,10 @@ def test_where_mask_noop_on_single_column(using_copy_on_write, dtype, val, func)
def test_asfreq_noop(using_copy_on_write):
df = DataFrame(
{"a": [0.0, None, 2.0, 3.0]},
index=date_range("1/1/2000", periods=4, freq="T"),
index=date_range("1/1/2000", periods=4, freq="min"),
)
df_orig = df.copy()
df2 = df.asfreq(freq="T")
df2 = df.asfreq(freq="min")

if using_copy_on_write:
assert np.shares_memory(get_array(df2, "a"), get_array(df, "a"))
Expand Down
2 changes: 1 addition & 1 deletion pandas/tests/extension/test_arrow.py
Original file line number Diff line number Diff line change
Expand Up @@ -2623,7 +2623,7 @@ def test_dt_roundlike_unsupported_freq(method):
@pytest.mark.xfail(
pa_version_under7p0, reason="Methods not supported for pyarrow < 7.0"
)
@pytest.mark.parametrize("freq", ["D", "H", "T", "S", "L", "U", "N"])
@pytest.mark.parametrize("freq", ["D", "H", "min", "S", "ms", "U", "N"])
@pytest.mark.parametrize("method", ["ceil", "floor", "round"])
def test_dt_ceil_year_floor(freq, method):
ser = pd.Series(
Expand Down
2 changes: 1 addition & 1 deletion pandas/tests/frame/methods/test_asfreq.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def test_tz_aware_asfreq_smoke(self, tz, frame_or_series):
obj = frame_or_series(np.random.randn(len(dr)), index=dr)

# it works!
obj.asfreq("T")
obj.asfreq("min")

def test_asfreq_normalize(self, frame_or_series):
rng = date_range("1/1/2000 09:30", periods=20)
Expand Down
4 changes: 2 additions & 2 deletions pandas/tests/frame/methods/test_equals.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def test_equals(self):
index = np.random.random(10)
df1 = DataFrame(np.random.random(10), index=index, columns=["floats"])
df1["text"] = "the sky is so blue. we could use more chocolate.".split()
df1["start"] = date_range("2000-1-1", periods=10, freq="T")
df1["start"] = date_range("2000-1-1", periods=10, freq="min")
df1["end"] = date_range("2000-1-1", periods=10, freq="D")
df1["diff"] = df1["end"] - df1["start"]
# Explicitly cast to object, to avoid implicit cast when setting np.nan
Expand Down Expand Up @@ -64,7 +64,7 @@ def test_equals(self):
assert not df1.equals(different)

# DatetimeIndex
index = date_range("2000-1-1", periods=10, freq="T")
index = date_range("2000-1-1", periods=10, freq="min")
df1 = df1.set_index(index)
df2 = df1.copy()
assert df1.equals(df2)
Expand Down
4 changes: 2 additions & 2 deletions pandas/tests/frame/methods/test_join.py
Original file line number Diff line number Diff line change
Expand Up @@ -553,13 +553,13 @@ def test_frame_join_tzaware(self):
test1 = DataFrame(
np.zeros((6, 3)),
index=date_range(
"2012-11-15 00:00:00", periods=6, freq="100L", tz="US/Central"
"2012-11-15 00:00:00", periods=6, freq="100ms", tz="US/Central"
),
)
test2 = DataFrame(
np.zeros((3, 3)),
index=date_range(
"2012-11-15 00:00:00", periods=3, freq="250L", tz="US/Central"
"2012-11-15 00:00:00", periods=3, freq="250ms", tz="US/Central"
),
columns=range(3, 6),
)
Expand Down
4 changes: 2 additions & 2 deletions pandas/tests/frame/methods/test_shift.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,8 @@ def test_shift_mismatched_freq(self, frame_or_series):
np.random.randn(5), index=date_range("1/1/2000", periods=5, freq="H")
)

result = ts.shift(1, freq="5T")
exp_index = ts.index.shift(1, freq="5T")
result = ts.shift(1, freq="5min")
exp_index = ts.index.shift(1, freq="5min")
tm.assert_index_equal(result.index, exp_index)

# GH#1063, multiple of same base
Expand Down
6 changes: 3 additions & 3 deletions pandas/tests/frame/methods/test_to_timestamp.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def test_to_timestamp_columns(self):
tm.assert_index_equal(result.columns, exp_index)

delta = timedelta(hours=23, minutes=59)
result = df.to_timestamp("T", "end", axis=1)
result = df.to_timestamp("min", "end", axis=1)
exp_index = _get_with_delta(delta)
exp_index = exp_index + Timedelta(1, "m") - Timedelta(1, "ns")
tm.assert_index_equal(result.columns, exp_index)
Expand All @@ -110,8 +110,8 @@ def test_to_timestamp_columns(self):
exp_index = exp_index + Timedelta(1, "s") - Timedelta(1, "ns")
tm.assert_index_equal(result.columns, exp_index)

result1 = df.to_timestamp("5t", axis=1)
result2 = df.to_timestamp("t", axis=1)
result1 = df.to_timestamp("5min", axis=1)
result2 = df.to_timestamp("min", axis=1)
expected = date_range("2001-01-01", "2009-01-01", freq="AS")
assert isinstance(result1.columns, DatetimeIndex)
assert isinstance(result2.columns, DatetimeIndex)
Expand Down
2 changes: 1 addition & 1 deletion pandas/tests/frame/test_constructors.py
Original file line number Diff line number Diff line change
Expand Up @@ -2882,7 +2882,7 @@ def test_frame_datetime64_mixed_index_ctor_1681(self):

def test_frame_timeseries_column(self):
# GH19157
dr = date_range(start="20130101T10:00:00", periods=3, freq="T", tz="US/Eastern")
dr = date_range(start="20130101T10:00:00", periods=3, freq="min", tz="US/Eastern")
result = DataFrame(dr, columns=["timestamps"])
expected = DataFrame(
{
Expand Down
2 changes: 1 addition & 1 deletion pandas/tests/generic/test_frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def test_metadata_propagation_indiv_resample(self):
np.random.randn(1000, 2),
index=date_range("20130101", periods=1000, freq="s"),
)
result = df.resample("1T")
result = df.resample("1min")
tm.assert_metadata_equivalent(df, result)

def test_metadata_propagation_indiv(self, monkeypatch):
Expand Down
6 changes: 3 additions & 3 deletions pandas/tests/generic/test_series.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,13 +111,13 @@ def test_metadata_propagation_indiv_resample(self):
index=date_range("20130101", periods=1000, freq="s"),
name="foo",
)
result = ts.resample("1T").mean()
result = ts.resample("1min").mean()
tm.assert_metadata_equivalent(ts, result)

result = ts.resample("1T").min()
result = ts.resample("1min").min()
tm.assert_metadata_equivalent(ts, result)

result = ts.resample("1T").apply(lambda x: x.sum())
result = ts.resample("1min").apply(lambda x: x.sum())
tm.assert_metadata_equivalent(ts, result)

def test_metadata_propagation_indiv(self, monkeypatch):
Expand Down
14 changes: 7 additions & 7 deletions pandas/tests/groupby/aggregate/test_aggregate.py
Original file line number Diff line number Diff line change
Expand Up @@ -363,13 +363,13 @@ def test_agg_multiple_functions_same_name():
index=pd.date_range("1/1/2012", freq="S", periods=1000),
columns=["A", "B", "C"],
)
result = df.resample("3T").agg(
result = df.resample("3min").agg(
{"A": [partial(np.quantile, q=0.9999), partial(np.quantile, q=0.1111)]}
)
expected_index = pd.date_range("1/1/2012", freq="3T", periods=6)
expected_index = pd.date_range("1/1/2012", freq="3min", periods=6)
expected_columns = MultiIndex.from_tuples([("A", "quantile"), ("A", "quantile")])
expected_values = np.array(
[df.resample("3T").A.quantile(q=q).values for q in [0.9999, 0.1111]]
[df.resample("3min").A.quantile(q=q).values for q in [0.9999, 0.1111]]
).T
expected = DataFrame(
expected_values, columns=expected_columns, index=expected_index
Expand All @@ -385,10 +385,10 @@ def test_agg_multiple_functions_same_name_with_ohlc_present():
index=pd.date_range("1/1/2012", freq="S", periods=1000, name="dti"),
columns=Index(["A", "B", "C"], name="alpha"),
)
result = df.resample("3T").agg(
result = df.resample("3min").agg(
{"A": ["ohlc", partial(np.quantile, q=0.9999), partial(np.quantile, q=0.1111)]}
)
expected_index = pd.date_range("1/1/2012", freq="3T", periods=6, name="dti")
expected_index = pd.date_range("1/1/2012", freq="3min", periods=6, name="dti")
expected_columns = MultiIndex.from_tuples(
[
("A", "ohlc", "open"),
Expand All @@ -401,9 +401,9 @@ def test_agg_multiple_functions_same_name_with_ohlc_present():
names=["alpha", None, None],
)
non_ohlc_expected_values = np.array(
[df.resample("3T").A.quantile(q=q).values for q in [0.9999, 0.1111]]
[df.resample("3min").A.quantile(q=q).values for q in [0.9999, 0.1111]]
).T
expected_values = np.hstack([df.resample("3T").A.ohlc(), non_ohlc_expected_values])
expected_values = np.hstack([df.resample("3min").A.ohlc(), non_ohlc_expected_values])
expected = DataFrame(
expected_values, columns=expected_columns, index=expected_index
)
Expand Down
2 changes: 1 addition & 1 deletion pandas/tests/groupby/aggregate/test_cython.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ def test_cython_agg_nothing_to_agg_with_dates():
{
"a": np.random.randint(0, 5, 50),
"b": ["foo", "bar"] * 25,
"dates": pd.date_range("now", periods=50, freq="T"),
"dates": pd.date_range("now", periods=50, freq="min"),
}
)
msg = "Cannot use numeric_only=True with SeriesGroupBy.mean and non-numeric dtypes"
Expand Down
4 changes: 2 additions & 2 deletions pandas/tests/groupby/test_categorical.py
Original file line number Diff line number Diff line change
Expand Up @@ -1137,7 +1137,7 @@ def test_groupby_multiindex_categorical_datetime():
{
"key1": Categorical(list("abcbabcba")),
"key2": Categorical(
list(pd.date_range("2018-06-01 00", freq="1T", periods=3)) * 3
list(pd.date_range("2018-06-01 00", freq="1min", periods=3)) * 3
),
"values": np.arange(9),
}
Expand All @@ -1147,7 +1147,7 @@ def test_groupby_multiindex_categorical_datetime():
idx = MultiIndex.from_product(
[
Categorical(["a", "b", "c"]),
Categorical(pd.date_range("2018-06-01 00", freq="1T", periods=3)),
Categorical(pd.date_range("2018-06-01 00", freq="1min", periods=3)),
],
names=["key1", "key2"],
)
Expand Down
2 changes: 1 addition & 1 deletion pandas/tests/groupby/test_counting.py
Original file line number Diff line number Diff line change
Expand Up @@ -266,7 +266,7 @@ def test_groupby_timedelta_cython_count():

def test_count():
n = 1 << 15
dr = date_range("2015-08-30", periods=n // 10, freq="T")
dr = date_range("2015-08-30", periods=n // 10, freq="min")

df = DataFrame(
{
Expand Down
4 changes: 2 additions & 2 deletions pandas/tests/groupby/test_groupby.py
Original file line number Diff line number Diff line change
Expand Up @@ -3110,12 +3110,12 @@ def test_groupby_with_Time_Grouper():

expected_output = DataFrame(
{
"time2": date_range("2016-08-31 22:08:00", periods=13, freq="1T"),
"time2": date_range("2016-08-31 22:08:00", periods=13, freq="1min"),
"quant": [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
"quant2": [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
}
)

df = test_data.groupby(Grouper(key="time2", freq="1T")).count().reset_index()
df = test_data.groupby(Grouper(key="time2", freq="1min")).count().reset_index()

tm.assert_frame_equal(df, expected_output)
2 changes: 1 addition & 1 deletion pandas/tests/groupby/test_quantile.py
Original file line number Diff line number Diff line change
Expand Up @@ -420,7 +420,7 @@ def test_timestamp_groupby_quantile():
df = DataFrame(
{
"timestamp": pd.date_range(
start="2020-04-19 00:00:00", freq="1T", periods=100, tz="UTC"
start="2020-04-19 00:00:00", freq="1min", periods=100, tz="UTC"
).floor("1H"),
"category": list(range(1, 101)),
"value": list(range(101, 201)),
Expand Down
2 changes: 1 addition & 1 deletion pandas/tests/groupby/test_timegrouper.py
Original file line number Diff line number Diff line change
Expand Up @@ -755,7 +755,7 @@ def test_timezone_info(self):

def test_datetime_count(self):
df = DataFrame(
{"a": [1, 2, 3] * 2, "dates": date_range("now", periods=6, freq="T")}
{"a": [1, 2, 3] * 2, "dates": date_range("now", periods=6, freq="min")}
)
result = df.groupby("a").dates.count()
expected = Series([2, 2, 2], index=Index([1, 2, 3], name="a"), name="dates")
Expand Down
2 changes: 1 addition & 1 deletion pandas/tests/indexes/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def sort(request):
return request.param


@pytest.fixture(params=["D", "3D", "-3D", "H", "2H", "-2H", "T", "2T", "S", "-3S"])
@pytest.fixture(params=["D", "3D", "-3D", "H", "2H", "-2H", "min", "2min", "S", "-3S"])
def freq_sample(request):
"""
Valid values for 'freq' parameter used to create date_range and
Expand Down
1 change: 1 addition & 0 deletions pandas/tests/indexes/datetimelike_/test_drop_duplicates.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
class DropDuplicates:
def test_drop_duplicates_metadata(self, idx):
# GH#10115
print("11111111 = ", idx)
result = idx.drop_duplicates()
tm.assert_index_equal(idx, result)
assert idx.freq == result.freq
Expand Down
2 changes: 1 addition & 1 deletion pandas/tests/indexes/datetimes/methods/test_shift.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def test_dti_shift_localized(self, tzstr):
dr = date_range("2011/1/1", "2012/1/1", freq="W-FRI")
dr_tz = dr.tz_localize(tzstr)

result = dr_tz.shift(1, "10T")
result = dr_tz.shift(1, "10min")
assert result.tz == dr_tz.tz

def test_dti_shift_across_dst(self):
Expand Down
6 changes: 3 additions & 3 deletions pandas/tests/indexes/datetimes/methods/test_to_period.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,10 +126,10 @@ def test_to_period_millisecond(self):

with tm.assert_produces_warning(UserWarning):
# warning that timezone info will be lost
period = index.to_period(freq="L")
period = index.to_period(freq="ms")
assert 2 == len(period)
assert period[0] == Period("2007-01-01 10:11:12.123Z", "L")
assert period[1] == Period("2007-01-01 10:11:13.789Z", "L")
assert period[0] == Period("2007-01-01 10:11:12.123Z", "ms")
assert period[1] == Period("2007-01-01 10:11:13.789Z", "ms")

def test_to_period_microsecond(self):
index = DatetimeIndex(
Expand Down
2 changes: 1 addition & 1 deletion pandas/tests/indexes/datetimes/test_constructors.py
Original file line number Diff line number Diff line change
Expand Up @@ -1034,7 +1034,7 @@ def test_constructor_int64_nocopy(self):
assert (index.asi8[50:100] != -1).all()

@pytest.mark.parametrize(
"freq", ["M", "Q", "A", "D", "B", "BH", "T", "S", "L", "U", "H", "N", "C"]
"freq", ["M", "Q", "A", "D", "B", "BH", "min", "S", "ms", "U", "H", "N", "C"]
)
def test_from_freq_recreate_from_data(self, freq):
org = date_range(start="2001/02/01 09:00", freq=freq, periods=1)
Expand Down
6 changes: 3 additions & 3 deletions pandas/tests/indexes/datetimes/test_date_range.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ def test_date_range_timestamp_equiv_preserve_frequency(self):


class TestDateRanges:
@pytest.mark.parametrize("freq", ["N", "U", "L", "T", "S", "H", "D"])
@pytest.mark.parametrize("freq", ["N", "U", "ms", "min", "S", "H", "D"])
def test_date_range_edges(self, freq):
# GH#13672
td = Timedelta(f"1{freq}")
Expand Down Expand Up @@ -761,13 +761,13 @@ def test_freq_divides_end_in_nanos(self):
expected_1 = DatetimeIndex(
["2005-01-12 10:00:00", "2005-01-12 15:45:00"],
dtype="datetime64[ns]",
freq="345T",
freq="345min",
tz=None,
)
expected_2 = DatetimeIndex(
["2005-01-13 10:00:00", "2005-01-13 15:45:00"],
dtype="datetime64[ns]",
freq="345T",
freq="345min",
tz=None,
)
tm.assert_index_equal(result_1, expected_1)
Expand Down
4 changes: 2 additions & 2 deletions pandas/tests/indexes/datetimes/test_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,9 @@ class TestDatetimeIndexOps:
("M", "day"),
("D", "day"),
("H", "hour"),
("T", "minute"),
("min", "minute"),
("S", "second"),
("L", "millisecond"),
("ms", "millisecond"),
("U", "microsecond"),
],
)
Expand Down
4 changes: 2 additions & 2 deletions pandas/tests/indexes/datetimes/test_partial_slicing.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ def test_partial_slice_daily(self):
s["2004-12-31 00"]

def test_partial_slice_hourly(self):
rng = date_range(freq="T", start=datetime(2005, 1, 1, 20, 0, 0), periods=500)
rng = date_range(freq="min", start=datetime(2005, 1, 1, 20, 0, 0), periods=500)
s = Series(np.arange(len(rng)), index=rng)

result = s["2005-1-1"]
Expand Down Expand Up @@ -333,7 +333,7 @@ def test_partial_slicing_with_multiindex(self):
"TICKER": ["ABC", "MNP", "XYZ", "XYZ"],
"val": [1, 2, 3, 4],
},
index=date_range("2013-06-19 09:30:00", periods=4, freq="5T"),
index=date_range("2013-06-19 09:30:00", periods=4, freq="5min"),
)
df_multi = df.set_index(["ACCOUNT", "TICKER"], append=True)

Expand Down
8 changes: 4 additions & 4 deletions pandas/tests/indexes/datetimes/test_scalar_compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ def test_no_rounding_occurs(self, tz_naive_fixture):
]
)

tm.assert_index_equal(rng.round(freq="2T"), expected_rng)
tm.assert_index_equal(rng.round(freq="2min"), expected_rng)

@pytest.mark.parametrize(
"test_input, rounder, freq, expected",
Expand All @@ -196,8 +196,8 @@ def test_no_rounding_occurs(self, tz_naive_fixture):
),
(["1823-01-01 00:00:01"], "floor", "1s", ["1823-01-01 00:00:01"]),
(["1823-01-01 00:00:01"], "ceil", "1s", ["1823-01-01 00:00:01"]),
(["2018-01-01 00:15:00"], "ceil", "15T", ["2018-01-01 00:15:00"]),
(["2018-01-01 00:15:00"], "floor", "15T", ["2018-01-01 00:15:00"]),
(["2018-01-01 00:15:00"], "ceil", "15min", ["2018-01-01 00:15:00"]),
(["2018-01-01 00:15:00"], "floor", "15min", ["2018-01-01 00:15:00"]),
(["1823-01-01 03:00:00"], "ceil", "3H", ["1823-01-01 03:00:00"]),
(["1823-01-01 03:00:00"], "floor", "3H", ["1823-01-01 03:00:00"]),
(
Expand Down Expand Up @@ -333,7 +333,7 @@ def test_hour(self):
tm.assert_index_equal(r1, r2)

def test_minute(self):
dr = date_range(start=Timestamp("2000-02-27"), periods=5, freq="T")
dr = date_range(start=Timestamp("2000-02-27"), periods=5, freq="min")
r1 = pd.Index([x.to_julian_date() for x in dr])
r2 = dr.to_julian_date()
assert isinstance(r2, pd.Index) and r2.dtype == np.float64
Expand Down
Loading