Skip to content
Permalink

Comparing changes

This is a direct comparison between two commits made in this repository or its related repositories. View the default comparison for this range or learn more about diff comparisons.

Open a pull request

Create a new pull request by comparing changes across two branches. If you need to, you can also . Learn more about diff comparisons here.
base repository: pandas-dev/pandas
Failed to load repositories. Confirm that selected base ref is valid, then try again.
Loading
base: 50baa9aebbc0d59bcc55ce2a6c28f4005ce1503e
Choose a base ref
..
head repository: pandas-dev/pandas
Failed to load repositories. Confirm that selected head ref is valid, then try again.
Loading
compare: 15f2946fe4be5a4615411a059fb1c81e7ef0af52
Choose a head ref
Showing with 2,562 additions and 1,615 deletions.
  1. +1 −1 asv_bench/benchmarks/reshape.py
  2. +1 −1 doc/source/io.rst
  3. +52 −7 doc/source/whatsnew/v0.24.0.txt
  4. +1 −1 pandas/_libs/algos.pyx
  5. +3 −1 pandas/_libs/groupby.pyx
  6. +4 −4 pandas/_libs/hashing.pyx
  7. +3 −3 pandas/_libs/index.pyx
  8. +3 −11 pandas/_libs/src/numpy_helper.h
  9. +2 −2 pandas/_libs/src/ujson/python/objToJSON.c
  10. +9 −22 pandas/_libs/tslib.pyx
  11. +28 −24 pandas/_libs/tslibs/conversion.pyx
  12. +2 −3 pandas/_libs/tslibs/fields.pyx
  13. +18 −19 pandas/_libs/tslibs/parsing.pyx
  14. +25 −19 pandas/_libs/tslibs/period.pyx
  15. +4 −3 pandas/_libs/tslibs/resolution.pyx
  16. +4 −0 pandas/_libs/{ → tslibs}/src/datetime/np_datetime.c
  17. +4 −0 pandas/_libs/{ → tslibs}/src/datetime/np_datetime.h
  18. +4 −0 pandas/_libs/{ → tslibs}/src/datetime/np_datetime_strings.c
  19. +4 −0 pandas/_libs/{ → tslibs}/src/datetime/np_datetime_strings.h
  20. +13 −11 pandas/_libs/{ → tslibs}/src/period_helper.c
  21. +5 −3 pandas/_libs/{ → tslibs}/src/period_helper.h
  22. +5 −5 pandas/_libs/tslibs/strptime.pyx
  23. +2 −2 pandas/_libs/tslibs/timedeltas.pxd
  24. +7 −7 pandas/_libs/tslibs/timedeltas.pyx
  25. +3 −3 pandas/_libs/tslibs/timestamps.pyx
  26. +3 −4 pandas/_libs/tslibs/timezones.pyx
  27. +146 −97 pandas/_libs/tslibs/util.pxd
  28. +80 −0 pandas/_libs/util.pxd
  29. +0 −11 pandas/core/api.py
  30. +22 −3 pandas/core/arrays/categorical.py
  31. +27 −15 pandas/core/arrays/datetimes.py
  32. +0 −4 pandas/core/arrays/interval.py
  33. +0 −55 pandas/core/datetools.py
  34. +8 −2 pandas/core/dtypes/cast.py
  35. +10 −15 pandas/core/dtypes/common.py
  36. +8 −5 pandas/core/dtypes/dtypes.py
  37. +11 −7 pandas/core/frame.py
  38. +5 −4 pandas/core/generic.py
  39. +3 −1 pandas/core/groupby/generic.py
  40. +3 −0 pandas/core/groupby/groupby.py
  41. +8 −2 pandas/core/indexes/interval.py
  42. +11 −1 pandas/core/indexes/period.py
  43. +6 −4 pandas/core/internals/blocks.py
  44. +30 −1 pandas/core/ops.py
  45. +7 −4 pandas/core/series.py
  46. +4 −4 pandas/io/clipboards.py
  47. +21 −20 pandas/io/formats/csvs.py
  48. +1 −1 pandas/io/json/json.py
  49. +26 −5 pandas/io/parsers.py
  50. +5 −1 pandas/plotting/_converter.py
  51. +1 −14 pandas/tests/api/test_api.py
  52. +22 −1 pandas/tests/arrays/categorical/test_constructors.py
  53. +17 −7 pandas/tests/dtypes/test_dtypes.py
  54. +2 −1 pandas/tests/extension/base/setitem.py
  55. +7 −3 pandas/tests/extension/integer/test_integer.py
  56. +4 −1 pandas/tests/extension/json/test_json.py
  57. +25 −25 pandas/tests/frame/test_analytics.py
  58. +8 −9 pandas/tests/frame/test_apply.py
  59. +2 −1 pandas/tests/frame/test_arithmetic.py
  60. +6 −23 pandas/tests/frame/test_axis_select_reindex.py
  61. +2 −1 pandas/tests/frame/test_duplicates.py
  62. +53 −37 pandas/tests/frame/test_operators.py
  63. +9 −1 pandas/tests/frame/test_period.py
  64. +12 −9 pandas/tests/frame/test_query_eval.py
  65. +37 −42 pandas/tests/frame/test_replace.py
  66. +14 −14 pandas/tests/frame/test_reshape.py
  67. +11 −1 pandas/tests/groupby/aggregate/test_other.py
  68. +4 −3 pandas/tests/groupby/test_apply.py
  69. +36 −26 pandas/tests/groupby/test_rank.py
  70. +117 −4 pandas/tests/indexes/datetimes/test_arithmetic.py
  71. +178 −166 pandas/tests/indexes/datetimes/test_date_range.py
  72. +2 −2 pandas/tests/indexes/interval/test_astype.py
  73. +4 −2 pandas/tests/indexes/interval/test_interval.py
  74. +1 −1 pandas/tests/indexes/multi/test_missing.py
  75. +2 −1 pandas/tests/indexes/period/test_arithmetic.py
  76. +13 −0 pandas/tests/indexes/period/test_period.py
  77. +2 −1 pandas/tests/indexes/period/test_scalar_compat.py
  78. +11 −0 pandas/tests/indexes/period/test_tools.py
  79. +5 −3 pandas/tests/indexes/test_base.py
  80. +2 −1 pandas/tests/indexes/test_numeric.py
  81. +0 −72 pandas/tests/indexes/timedeltas/test_arithmetic.py
  82. +2 −2 pandas/tests/io/conftest.py
  83. +3 −3 pandas/tests/io/formats/test_format.py
  84. +1 −1 pandas/tests/io/formats/test_to_csv.py
  85. +9 −3 pandas/tests/io/json/test_json_table_schema.py
  86. +2 −2 pandas/tests/io/parser/test_network.py
  87. +9 −3 pandas/tests/io/parser/test_parsers.py
  88. +12 −12 pandas/tests/io/parser/test_unsupported.py
  89. +53 −32 pandas/tests/io/test_common.py
  90. +99 −0 pandas/tests/io/test_compression.py
  91. +2 −1 pandas/tests/io/test_excel.py
  92. +3 −1 pandas/tests/io/test_parquet.py
  93. +11 −0 pandas/tests/plotting/test_datetimelike.py
  94. +2 −1 pandas/tests/plotting/test_frame.py
  95. +0 −1 pandas/tests/plotting/test_misc.py
  96. +11 −7 pandas/tests/reshape/test_pivot.py
  97. +2 −1 pandas/tests/scalar/period/test_asfreq.py
  98. +15 −10 pandas/tests/scalar/period/test_period.py
  99. +19 −13 pandas/tests/series/test_alter_axes.py
  100. +1 −1 pandas/tests/series/test_analytics.py
  101. +1 −348 pandas/tests/series/test_arithmetic.py
  102. +9 −10 pandas/tests/series/test_datetime_values.py
  103. +93 −69 pandas/tests/series/test_operators.py
  104. +30 −8 pandas/tests/series/test_period.py
  105. +25 −24 pandas/tests/series/test_quantile.py
  106. +2 −1 pandas/tests/series/test_rank.py
  107. +53 −45 pandas/tests/series/test_timeseries.py
  108. +4 −4 pandas/tests/sparse/frame/test_analytics.py
  109. +6 −5 pandas/tests/sparse/frame/test_frame.py
  110. +8 −8 pandas/tests/sparse/frame/test_indexing.py
  111. +4 −4 pandas/tests/sparse/series/test_indexing.py
  112. +2 −4 pandas/tests/test_algos.py
  113. +742 −11 pandas/tests/test_arithmetic.py
  114. +2 −1 pandas/tests/test_base.py
  115. +7 −62 pandas/tests/test_common.py
  116. +1 −1 pandas/tests/test_downstream.py
  117. +3 −5 pandas/tests/test_multilevel.py
  118. +6 −2 pandas/tests/test_resample.py
  119. +3 −2 pandas/tests/test_window.py
  120. +1 −1 pandas/tseries/offsets.py
  121. +3 −1 pandas/util/testing.py
  122. +40 −15 setup.py
2 changes: 1 addition & 1 deletion asv_bench/benchmarks/reshape.py
Original file line number Diff line number Diff line change
@@ -141,7 +141,7 @@ class GetDummies(object):

def setup(self):
categories = list(string.ascii_letters[:12])
s = pd.Series(np.random.choice(categories, size=1_000_000),
s = pd.Series(np.random.choice(categories, size=1000000),
dtype=pd.api.types.CategoricalDtype(categories))
self.s = s

2 changes: 1 addition & 1 deletion doc/source/io.rst
Original file line number Diff line number Diff line change
@@ -298,7 +298,7 @@ compression : {``'infer'``, ``'gzip'``, ``'bz2'``, ``'zip'``, ``'xz'``, ``None``
Set to ``None`` for no decompression.

.. versionadded:: 0.18.1 support for 'zip' and 'xz' compression.

.. versionchanged:: 0.24.0 'infer' option added and set to default.
thousands : str, default ``None``
Thousands separator.
decimal : str, default ``'.'``
59 changes: 52 additions & 7 deletions doc/source/whatsnew/v0.24.0.txt
Original file line number Diff line number Diff line change
@@ -177,7 +177,8 @@ Other Enhancements
- :func:`read_html` copies cell data across ``colspan`` and ``rowspan``, and it treats all-``th`` table rows as headers if ``header`` kwarg is not given and there is no ``thead`` (:issue:`17054`)
- :meth:`Series.nlargest`, :meth:`Series.nsmallest`, :meth:`DataFrame.nlargest`, and :meth:`DataFrame.nsmallest` now accept the value ``"all"`` for the ``keep`` argument. This keeps all ties for the nth largest/smallest value (:issue:`16818`)
- :class:`IntervalIndex` has gained the :meth:`~IntervalIndex.set_closed` method to change the existing ``closed`` value (:issue:`21670`)
- :func:`~DataFrame.to_csv` and :func:`~DataFrame.to_json` now support ``compression='infer'`` to infer compression based on filename (:issue:`15008`)
- :func:`~DataFrame.to_csv`, :func:`~Series.to_csv`, :func:`~DataFrame.to_json`, and :func:`~Series.to_json` now support ``compression='infer'`` to infer compression based on filename extension (:issue:`15008`).
The default compression for ``to_csv``, ``to_json``, and ``to_pickle`` methods has been updated to ``'infer'`` (:issue:`22004`).
- :func:`to_timedelta` now supports iso-formated timedelta strings (:issue:`21877`)
- :class:`Series` and :class:`DataFrame` now support :class:`Iterable` in constructor (:issue:`2193`)

@@ -281,6 +282,43 @@ that the dates have been converted to UTC
.. ipython:: python
pd.to_datetime(["2015-11-18 15:30:00+05:30", "2015-11-18 16:30:00+06:30"], utc=True)

.. _whatsnew_0240.api_breaking.period_end_time:

Time values in ``dt.end_time`` and ``to_timestamp(how='end')``
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

The time values in :class:`Period` and :class:`PeriodIndex` objects are now set
to '23:59:59.999999999' when calling :attr:`Series.dt.end_time`, :attr:`Period.end_time`,
:attr:`PeriodIndex.end_time`, :func:`Period.to_timestamp()` with ``how='end'``,
or :func:`PeriodIndex.to_timestamp()` with ``how='end'`` (:issue:`17157`)

Previous Behavior:

.. code-block:: ipython

In [2]: p = pd.Period('2017-01-01', 'D')
In [3]: pi = pd.PeriodIndex([p])

In [4]: pd.Series(pi).dt.end_time[0]
Out[4]: Timestamp(2017-01-01 00:00:00)

In [5]: p.end_time
Out[5]: Timestamp(2017-01-01 23:59:59.999999999)

Current Behavior:

Calling :attr:`Series.dt.end_time` will now result in a time of '23:59:59.999999999' as
is the case with :attr:`Period.end_time`, for example

.. ipython:: python

p = pd.Period('2017-01-01', 'D')
pi = pd.PeriodIndex([p])

pd.Series(pi).dt.end_time[0]

p.end_time

.. _whatsnew_0240.api.datetimelike.normalize:

Tick DateOffset Normalize Restrictions
@@ -439,6 +477,8 @@ Deprecations
- :meth:`MultiIndex.to_hierarchical` is deprecated and will be removed in a future version (:issue:`21613`)
- :meth:`Series.ptp` is deprecated. Use ``numpy.ptp`` instead (:issue:`21614`)
- :meth:`Series.compress` is deprecated. Use ``Series[condition]`` instead (:issue:`18262`)
- :meth:`Categorical.from_codes` has deprecated providing float values for the ``codes`` argument. (:issue:`21767`)
- :func:`pandas.read_table` is deprecated. Instead, use :func:`pandas.read_csv` passing ``sep='\t'`` if necessary (:issue:`21948`)

.. _whatsnew_0240.prior_deprecations:

@@ -447,7 +487,7 @@ Removal of prior version deprecations/changes

- The ``LongPanel`` and ``WidePanel`` classes have been removed (:issue:`10892`)
- Several private functions were removed from the (non-public) module ``pandas.core.common`` (:issue:`22001`)
-
- Removal of the previously deprecated module ``pandas.core.datetools`` (:issue:`14105`, :issue:`14094`)
-

.. _whatsnew_0240.performance:
@@ -486,16 +526,17 @@ Bug Fixes
Categorical
^^^^^^^^^^^

-
-
-
- Bug in :meth:`Categorical.from_codes` where ``NaN`` values in `codes` were silently converted to ``0`` (:issue:`21767`). In the future this will raise a ``ValueError``. Also changes the behavior of `.from_codes([1.1, 2.0])`.

Datetimelike
^^^^^^^^^^^^

- Fixed bug where two :class:`DateOffset` objects with different ``normalize`` attributes could evaluate as equal (:issue:`21404`)
- Fixed bug where :meth:`Timestamp.resolution` incorrectly returned 1-microsecond ``timedelta`` instead of 1-nanosecond :class:`Timedelta` (:issue:`21336`,:issue:`21365`)
- Bug in :func:`to_datetime` that did not consistently return an :class:`Index` when ``box=True`` was specified (:issue:`21864`)
- Bug in :class:`DatetimeIndex` comparisons where string comparisons incorrectly raises ``TypeError`` (:issue:`22074`)
- Bug in :class:`DatetimeIndex` comparisons when comparing against ``timedelta64[ns]`` dtyped arrays; in some cases ``TypeError`` was incorrectly raised, in others it incorrectly failed to raise (:issue:`22074`)
- Bug in :class:`DatetimeIndex` comparisons when comparing against object-dtyped arrays (:issue:`22074`)

Timedelta
^^^^^^^^^
@@ -518,6 +559,7 @@ Timezones
- Bug in :class:`Index` with ``datetime64[ns, tz]`` dtype that did not localize integer data correctly (:issue:`20964`)
- Bug in :class:`DatetimeIndex` where constructing with an integer and tz would not localize correctly (:issue:`12619`)
- Fixed bug where :meth:`DataFrame.describe` and :meth:`Series.describe` on tz-aware datetimes did not show `first` and `last` result (:issue:`21328`)
- Bug in :class:`DatetimeIndex` comparisons failing to raise ``TypeError`` when comparing timezone-aware ``DatetimeIndex`` against ``np.datetime64`` (:issue:`22074`)

Offsets
^^^^^^^
@@ -535,6 +577,7 @@ Numeric
- Bug in :meth:`DataFrame.agg`, :meth:`DataFrame.transform` and :meth:`DataFrame.apply` where,
when supplied with a list of functions and ``axis=1`` (e.g. ``df.apply(['sum', 'mean'], axis=1)``),
a ``TypeError`` was wrongly raised. For all three methods such calculation are now done correctly. (:issue:`16679`).
- Bug in :class:`Series` comparison against datetime-like scalars and arrays (:issue:`22074`)
-

Strings
@@ -587,8 +630,8 @@ I/O
Plotting
^^^^^^^^

- Bug in :func:'DataFrame.plot.scatter' and :func:'DataFrame.plot.hexbin' caused x-axis label and ticklabels to disappear when colorbar was on in IPython inline backend (:issue:`10611`, :issue:`10678`, and :issue:`20455`)
-
- Bug in :func:`DataFrame.plot.scatter` and :func:`DataFrame.plot.hexbin` caused x-axis label and ticklabels to disappear when colorbar was on in IPython inline backend (:issue:`10611`, :issue:`10678`, and :issue:`20455`)
- Bug in plotting a Series with datetimes using :func:`matplotlib.axes.Axes.scatter` (:issue:`22039`)

Groupby/Resample/Rolling
^^^^^^^^^^^^^^^^^^^^^^^^
@@ -615,6 +658,8 @@ Reshaping
- Bug in :meth:`Series.combine_first` with ``datetime64[ns, tz]`` dtype which would return tz-naive result (:issue:`21469`)
- Bug in :meth:`Series.where` and :meth:`DataFrame.where` with ``datetime64[ns, tz]`` dtype (:issue:`21546`)
- Bug in :meth:`Series.mask` and :meth:`DataFrame.mask` with ``list`` conditionals (:issue:`21891`)
- Bug in :meth:`DataFrame.replace` raises RecursionError when converting OutOfBounds ``datetime64[ns, tz]`` (:issue:`20380`)
- :func:`pandas.core.groupby.GroupBy.rank` now raises a ``ValueError`` when an invalid value is passed for argument ``na_option`` (:issue:`22124`)
- Bug in :func:`get_dummies` with Unicode attributes in Python 2 (:issue:`22084`)
-

2 changes: 1 addition & 1 deletion pandas/_libs/algos.pyx
Original file line number Diff line number Diff line change
@@ -129,7 +129,7 @@ def is_lexsorted(list list_of_arrays):
for i in range(nlevels):
arr = list_of_arrays[i]
assert arr.dtype.name == 'int64'
vecs[i] = <int64_t*> arr.data
vecs[i] = <int64_t*> cnp.PyArray_DATA(arr)

# Assume uniqueness??
with nogil:
4 changes: 3 additions & 1 deletion pandas/_libs/groupby.pyx
Original file line number Diff line number Diff line change
@@ -7,10 +7,12 @@ from cython cimport Py_ssize_t
from libc.stdlib cimport malloc, free

import numpy as np
cimport numpy as cnp
from numpy cimport (ndarray,
double_t,
int8_t, int16_t, int32_t, int64_t, uint8_t, uint16_t,
uint32_t, uint64_t, float32_t, float64_t)
cnp.import_array()


from util cimport numeric, get_nat
@@ -118,7 +120,7 @@ def group_median_float64(ndarray[float64_t, ndim=2] out,
counts[:] = _counts[1:]

data = np.empty((K, N), dtype=np.float64)
ptr = <float64_t*> data.data
ptr = <float64_t*> cnp.PyArray_DATA(data)

take_2d_axis1_float64_float64(values.T, indexer, out=data)

8 changes: 4 additions & 4 deletions pandas/_libs/hashing.pyx
Original file line number Diff line number Diff line change
@@ -5,7 +5,7 @@
import cython

import numpy as np
from numpy cimport ndarray, uint8_t, uint32_t, uint64_t
from numpy cimport uint8_t, uint32_t, uint64_t

from util cimport _checknull
from cpython cimport (PyBytes_Check,
@@ -17,7 +17,7 @@ DEF dROUNDS = 4


@cython.boundscheck(False)
def hash_object_array(ndarray[object] arr, object key, object encoding='utf8'):
def hash_object_array(object[:] arr, object key, object encoding='utf8'):
"""
Parameters
----------
@@ -37,7 +37,7 @@ def hash_object_array(ndarray[object] arr, object key, object encoding='utf8'):
"""
cdef:
Py_ssize_t i, l, n
ndarray[uint64_t] result
uint64_t[:] result
bytes data, k
uint8_t *kb
uint64_t *lens
@@ -89,7 +89,7 @@ def hash_object_array(ndarray[object] arr, object key, object encoding='utf8'):

free(vecs)
free(lens)
return result
return result.base # .base to retrieve underlying np.ndarray


cdef inline uint64_t _rotl(uint64_t x, uint64_t b) nogil:
6 changes: 3 additions & 3 deletions pandas/_libs/index.pyx
Original file line number Diff line number Diff line change
@@ -37,7 +37,7 @@ cdef inline bint is_definitely_invalid_key(object val):
return True

# we have a _data, means we are a NDFrame
return (PySlice_Check(val) or cnp.PyArray_Check(val)
return (PySlice_Check(val) or util.is_array(val)
or PyList_Check(val) or hasattr(val, '_data'))


@@ -104,7 +104,7 @@ cdef class IndexEngine:
void* data_ptr

loc = self.get_loc(key)
if PySlice_Check(loc) or cnp.PyArray_Check(loc):
if PySlice_Check(loc) or util.is_array(loc):
return arr[loc]
else:
return get_value_at(arr, loc, tz=tz)
@@ -120,7 +120,7 @@ cdef class IndexEngine:
loc = self.get_loc(key)
value = convert_scalar(arr, value)

if PySlice_Check(loc) or cnp.PyArray_Check(loc):
if PySlice_Check(loc) or util.is_array(loc):
arr[loc] = value
else:
util.set_value_at(arr, loc, value)
14 changes: 3 additions & 11 deletions pandas/_libs/src/numpy_helper.h
Original file line number Diff line number Diff line change
@@ -16,8 +16,6 @@ The full license is in the LICENSE file, distributed with this software.
#include "numpy/arrayscalars.h"


PANDAS_INLINE npy_int64 get_nat(void) { return NPY_MIN_INT64; }

PANDAS_INLINE int assign_value_1d(PyArrayObject* ap, Py_ssize_t _i,
PyObject* v) {
npy_intp i = (npy_intp)_i;
@@ -40,16 +38,10 @@ PANDAS_INLINE const char* get_c_string(PyObject* obj) {
#endif
}

PANDAS_INLINE PyObject* char_to_string(const char* data) {
#if PY_VERSION_HEX >= 0x03000000
return PyUnicode_FromString(data);
#else
return PyString_FromString(data);
#endif
}

void set_array_not_contiguous(PyArrayObject* ao) {
ao->flags &= ~(NPY_ARRAY_C_CONTIGUOUS | NPY_ARRAY_F_CONTIGUOUS);
// Numpy>=1.8-compliant equivalent to:
// ao->flags &= ~(NPY_ARRAY_C_CONTIGUOUS | NPY_ARRAY_F_CONTIGUOUS);
PyArray_CLEARFLAGS(ao, (NPY_ARRAY_C_CONTIGUOUS | NPY_ARRAY_F_CONTIGUOUS));
}

#endif // PANDAS__LIBS_SRC_NUMPY_HELPER_H_
4 changes: 2 additions & 2 deletions pandas/_libs/src/ujson/python/objToJSON.c
Original file line number Diff line number Diff line change
@@ -47,8 +47,8 @@ Numeric decoder derived from from TCL library
#include <numpy/npy_math.h> // NOLINT(build/include_order)
#include <stdio.h> // NOLINT(build/include_order)
#include <ultrajson.h> // NOLINT(build/include_order)
#include <np_datetime.h> // NOLINT(build/include_order)
#include <np_datetime_strings.h> // NOLINT(build/include_order)
#include <../../../tslibs/src/datetime/np_datetime.h> // NOLINT(build/include_order)
#include <../../../tslibs/src/datetime/np_datetime_strings.h> // NOLINT(build/include_order)
#include "datetime.h"

static PyObject *type_decimal;
31 changes: 9 additions & 22 deletions pandas/_libs/tslib.pyx
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
# -*- coding: utf-8 -*-
# cython: profile=False
cimport cython
from cython cimport Py_ssize_t

from cpython cimport PyFloat_Check, PyUnicode_Check
@@ -37,8 +36,7 @@ from tslibs.np_datetime import OutOfBoundsDatetime
from tslibs.parsing import parse_datetime_string

from tslibs.timedeltas cimport cast_from_unit
from tslibs.timezones cimport (is_utc, is_tzlocal, is_fixed_offset,
treat_tz_as_pytz, get_dst_info)
from tslibs.timezones cimport is_utc, is_tzlocal, get_dst_info
from tslibs.conversion cimport (tz_convert_single, _TSObject,
convert_datetime_to_tsobject,
get_datetime64_nanos,
@@ -77,8 +75,7 @@ cdef inline object create_time_from_ts(
return time(dts.hour, dts.min, dts.sec, dts.us)


def ints_to_pydatetime(ndarray[int64_t] arr, tz=None, freq=None,
box="datetime"):
def ints_to_pydatetime(int64_t[:] arr, tz=None, freq=None, box="datetime"):
"""
Convert an i8 repr to an ndarray of datetimes, date, time or Timestamp
@@ -102,7 +99,9 @@ def ints_to_pydatetime(ndarray[int64_t] arr, tz=None, freq=None,

cdef:
Py_ssize_t i, n = len(arr)
ndarray[int64_t] trans, deltas
ndarray[int64_t] trans
int64_t[:] deltas
Py_ssize_t pos
npy_datetimestruct dts
object dt
int64_t value, delta
@@ -635,24 +634,12 @@ cpdef array_to_datetime(ndarray[object] values, errors='raise',

# If the dateutil parser returned tzinfo, capture it
# to check if all arguments have the same tzinfo
tz = py_dt.tzinfo
tz = py_dt.utcoffset()
if tz is not None:
seen_datetime_offset = 1
if tz == dateutil_utc():
# dateutil.tz.tzutc has no offset-like attribute
# Just add the 0 offset explicitly
out_tzoffset_vals.add(0)
elif tz == tzlocal():
# is comparison fails unlike other dateutil.tz
# objects. Also, dateutil.tz.tzlocal has no
# _offset attribute like tzoffset
offset_seconds = tz._dst_offset.total_seconds()
out_tzoffset_vals.add(offset_seconds)
else:
# dateutil.tz.tzoffset objects cannot be hashed
# store the total_seconds() instead
offset_seconds = tz._offset.total_seconds()
out_tzoffset_vals.add(offset_seconds)
# dateutil timezone objects cannot be hashed, so store
# the UTC offsets in seconds instead
out_tzoffset_vals.add(tz.total_seconds())
else:
# Add a marker for naive string, to track if we are
# parsing mixed naive and aware strings
Loading