Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DEPR: rename to _consolidate and create deprecation warning #15501

Merged
merged 2 commits into from
Feb 28, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions doc/source/whatsnew/v0.20.0.txt
Original file line number Diff line number Diff line change
Expand Up @@ -484,6 +484,7 @@ Deprecations
- ``DataFrame.astype()`` has deprecated the ``raise_on_error`` parameter in favor of ``errors`` (:issue:`14878`)
- ``Series.sortlevel`` and ``DataFrame.sortlevel`` have been deprecated in favor of ``Series.sort_index`` and ``DataFrame.sort_index`` (:issue:`15099`)
- importing ``concat`` from ``pandas.tools.merge`` has been deprecated in favor of imports from the ``pandas`` namespace. This should only affect explict imports (:issue:`15358`)
- ``Series/DataFrame/Panel.consolidate()`` been deprecated as a public method. (:issue:`15483`)

.. _whatsnew_0200.prior_deprecations:

Expand Down
14 changes: 11 additions & 3 deletions pandas/core/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -2874,11 +2874,10 @@ def f():

self._protect_consolidate(f)

def consolidate(self, inplace=False):
def _consolidate(self, inplace=False):
"""
Compute NDFrame with "consolidated" internals (data of each dtype
grouped together in a single ndarray). Mainly an internal API function,
but available here to the savvy user
grouped together in a single ndarray).

Parameters
----------
Expand All @@ -2897,6 +2896,15 @@ def consolidate(self, inplace=False):
cons_data = self._protect_consolidate(f)
return self._constructor(cons_data).__finalize__(self)

def consolidate(self, inplace=False):
"""
DEPRECATED: consolidate will be an internal implementation only.
"""
# 15483
warnings.warn("consolidate is deprecated and will be removed in a "
"future release.", FutureWarning, stacklevel=2)
return self._consolidate(inplace)

@property
def _is_mixed_type(self):
f = lambda: self._data.is_mixed_type
Expand Down
4 changes: 2 additions & 2 deletions pandas/core/groupby.py
Original file line number Diff line number Diff line change
Expand Up @@ -3893,7 +3893,7 @@ def _wrap_aggregated_output(self, output, names=None):
if not self.as_index:
result = DataFrame(output, columns=output_keys)
self._insert_inaxis_grouper_inplace(result)
result = result.consolidate()
result = result._consolidate()
else:
index = self.grouper.result_index
result = DataFrame(output, index=index, columns=output_keys)
Expand All @@ -3913,7 +3913,7 @@ def _wrap_agged_blocks(self, items, blocks):
result = DataFrame(mgr)

self._insert_inaxis_grouper_inplace(result)
result = result.consolidate()
result = result._consolidate()
else:
index = self.grouper.result_index
mgr = BlockManager(blocks, [items, index])
Expand Down
6 changes: 3 additions & 3 deletions pandas/io/pytables.py
Original file line number Diff line number Diff line change
Expand Up @@ -835,7 +835,7 @@ def func(_start, _stop, _where):

# concat and return
return concat(objs, axis=axis,
verify_integrity=False).consolidate()
verify_integrity=False)._consolidate()

# create the iterator
it = TableIterator(self, s, func, where=where, nrows=nrows,
Expand Down Expand Up @@ -3442,7 +3442,7 @@ def get_blk_items(mgr, blocks):
return [mgr.items.take(blk.mgr_locs) for blk in blocks]

# figure out data_columns and get out blocks
block_obj = self.get_object(obj).consolidate()
block_obj = self.get_object(obj)._consolidate()
blocks = block_obj._data.blocks
blk_items = get_blk_items(block_obj._data, blocks)
if len(self.non_index_axes):
Expand Down Expand Up @@ -3809,7 +3809,7 @@ def read(self, where=None, columns=None, **kwargs):
if len(objs) == 1:
wp = objs[0]
else:
wp = concat(objs, axis=0, verify_integrity=False).consolidate()
wp = concat(objs, axis=0, verify_integrity=False)._consolidate()

# apply the selection filters & axis orderings
wp = self.process_axes(wp, columns=columns)
Expand Down
11 changes: 8 additions & 3 deletions pandas/tests/frame/test_block_internals.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,19 +40,24 @@ def test_cast_internals(self):

def test_consolidate(self):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can you add a single test (test_consolidate_deprecation), which uses assert_produces_warning(FutureWarning)

self.frame['E'] = 7.
consolidated = self.frame.consolidate()
consolidated = self.frame._consolidate()
self.assertEqual(len(consolidated._data.blocks), 1)

# Ensure copy, do I want this?
recons = consolidated.consolidate()
recons = consolidated._consolidate()
self.assertIsNot(recons, consolidated)
assert_frame_equal(recons, consolidated)

self.frame['F'] = 8.
self.assertEqual(len(self.frame._data.blocks), 3)
self.frame.consolidate(inplace=True)
self.frame._consolidate(inplace=True)
self.assertEqual(len(self.frame._data.blocks), 1)

def test_consolidate_deprecation(self):
self.frame['E'] = 7
with tm.assert_produces_warning(FutureWarning):
self.frame.consolidate()

def test_consolidate_inplace(self):
frame = self.frame.copy() # noqa

Expand Down
2 changes: 1 addition & 1 deletion pandas/tests/frame/test_nonunique_indexes.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def check(result, expected=None):
check(df, expected)

# consolidate
df = df.consolidate()
df = df._consolidate()
expected = DataFrame([[1, 1, 'bah', 3], [1, 2, 'bah', 3],
[2, 3, 'bah', 3]],
columns=['foo', 'foo', 'string', 'foo2'])
Expand Down
14 changes: 7 additions & 7 deletions pandas/tests/io/test_pytables.py
Original file line number Diff line number Diff line change
Expand Up @@ -418,7 +418,7 @@ def test_repr(self):
df['datetime1'] = datetime.datetime(2001, 1, 2, 0, 0)
df['datetime2'] = datetime.datetime(2001, 1, 3, 0, 0)
df.loc[3:6, ['obj1']] = np.nan
df = df.consolidate()._convert(datetime=True)
df = df._consolidate()._convert(datetime=True)

warnings.filterwarnings('ignore', category=PerformanceWarning)
store['df'] = df
Expand Down Expand Up @@ -762,7 +762,7 @@ def test_put_mixed_type(self):
df['datetime1'] = datetime.datetime(2001, 1, 2, 0, 0)
df['datetime2'] = datetime.datetime(2001, 1, 3, 0, 0)
df.loc[3:6, ['obj1']] = np.nan
df = df.consolidate()._convert(datetime=True)
df = df._consolidate()._convert(datetime=True)

with ensure_clean_store(self.path) as store:
_maybe_remove(store, 'df')
Expand Down Expand Up @@ -2077,7 +2077,7 @@ def test_table_mixed_dtypes(self):
df['datetime1'] = datetime.datetime(2001, 1, 2, 0, 0)
df['datetime2'] = datetime.datetime(2001, 1, 3, 0, 0)
df.loc[3:6, ['obj1']] = np.nan
df = df.consolidate()._convert(datetime=True)
df = df._consolidate()._convert(datetime=True)

with ensure_clean_store(self.path) as store:
store.append('df1_mixed', df)
Expand All @@ -2091,7 +2091,7 @@ def test_table_mixed_dtypes(self):
wp['bool2'] = wp['ItemB'] > 0
wp['int1'] = 1
wp['int2'] = 2
wp = wp.consolidate()
wp = wp._consolidate()

with ensure_clean_store(self.path) as store:
store.append('p1_mixed', wp)
Expand All @@ -2106,7 +2106,7 @@ def test_table_mixed_dtypes(self):
wp['bool2'] = wp['l2'] > 0
wp['int1'] = 1
wp['int2'] = 2
wp = wp.consolidate()
wp = wp._consolidate()

with ensure_clean_store(self.path) as store:
store.append('p4d_mixed', wp)
Expand Down Expand Up @@ -2134,7 +2134,7 @@ def test_unimplemented_dtypes_table_columns(self):
df['obj1'] = 'foo'
df['obj2'] = 'bar'
df['datetime1'] = datetime.date(2001, 1, 2)
df = df.consolidate()._convert(datetime=True)
df = df._consolidate()._convert(datetime=True)

with ensure_clean_store(self.path) as store:
# this fails because we have a date in the object block......
Expand Down Expand Up @@ -2949,7 +2949,7 @@ def _make_one():
df['bool2'] = df['B'] > 0
df['int1'] = 1
df['int2'] = 2
return df.consolidate()
return df._consolidate()

df1 = _make_one()
df2 = _make_one()
Expand Down
2 changes: 1 addition & 1 deletion pandas/tests/test_generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -658,7 +658,7 @@ def test_validate_bool_args(self):
super(DataFrame, df).sort_index(inplace=value)

with self.assertRaises(ValueError):
super(DataFrame, df).consolidate(inplace=value)
super(DataFrame, df)._consolidate(inplace=value)

with self.assertRaises(ValueError):
super(DataFrame, df).fillna(value=0, inplace=value)
Expand Down
2 changes: 1 addition & 1 deletion pandas/tests/test_panel4d.py
Original file line number Diff line number Diff line change
Expand Up @@ -677,7 +677,7 @@ def test_consolidate(self):
self.panel4d['foo'] = 1.
self.assertFalse(self.panel4d._data.is_consolidated())

panel4d = self.panel4d.consolidate()
panel4d = self.panel4d._consolidate()
self.assertTrue(panel4d._data.is_consolidated())

def test_ctor_dict(self):
Expand Down
2 changes: 1 addition & 1 deletion pandas/tools/concat.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ def __init__(self, objs, axis=0, join='outer', join_axes=None,
raise TypeError("cannot concatenate a non-NDFrame object")

# consolidate
obj.consolidate(inplace=True)
obj._consolidate(inplace=True)
ndims.add(obj.ndim)

# get the sample
Expand Down
2 changes: 1 addition & 1 deletion pandas/tseries/resample.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ def _convert_obj(self, obj):
-------
obj : converted object
"""
obj = obj.consolidate()
obj = obj._consolidate()
return obj

def _get_binner_for_time(self):
Expand Down