-
-
Notifications
You must be signed in to change notification settings - Fork 18.7k
BUG: DataFrame.append with timedelta64 #39574
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 1 commit
b3fb477
8083939
512a50c
fa6d8a4
1c63c05
d75a950
5e35e31
7de3800
dbee2bc
dbb59e7
f58d791
f40cf7c
7315004
faf6c35
3ed534c
d1c9872
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
- Loading branch information
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -19,7 +19,7 @@ | |
is_sparse, | ||
) | ||
from pandas.core.dtypes.concat import concat_compat | ||
from pandas.core.dtypes.missing import isna_all | ||
from pandas.core.dtypes.missing import is_valid_nat_for_dtype, isna_all | ||
|
||
import pandas.core.algorithms as algos | ||
from pandas.core.arrays import DatetimeArray, ExtensionArray, TimedeltaArray | ||
|
@@ -227,6 +227,24 @@ def dtype(self): | |
else: | ||
return get_dtype(maybe_promote(self.block.dtype, self.block.fill_value)[0]) | ||
|
||
def is_valid_na_for(self, dtype: DtypeObj) -> bool: | ||
""" | ||
Check that we are all-NA of a type/dtype that is compatible with this dtype. | ||
""" | ||
if not self.is_na: | ||
return False | ||
if self.block is None: | ||
return True | ||
|
||
if self.dtype == object: | ||
values = self.block.values | ||
return all( | ||
is_valid_nat_for_dtype(x, dtype) for x in values.ravel(order="K") | ||
) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is not only required for object dtype, I think. Also float NaN is considered "all NaN" when it comes to ignoring the dtype in concatting dataframes (and other dtypes as well I think):
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. the non-object case is handled below on L245-246. or do you have something else in mind? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Does my snippet above work with this PR? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
yes it does
I think that's driven by something sketchy-looking in get_reindexed_values, will see if that can be addressed. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. better? |
||
|
||
na_value = self.block.fill_value | ||
return is_valid_nat_for_dtype(na_value, dtype) | ||
|
||
@cache_readonly | ||
def is_na(self) -> bool: | ||
if self.block is None: | ||
|
@@ -257,7 +275,7 @@ def get_reindexed_values(self, empty_dtype: DtypeObj, upcasted_na) -> ArrayLike: | |
else: | ||
fill_value = upcasted_na | ||
|
||
if self.is_na: | ||
if self.is_valid_na_for(empty_dtype): | ||
blk_dtype = getattr(self.block, "dtype", None) | ||
|
||
if blk_dtype == np.dtype(object): | ||
|
@@ -418,8 +436,12 @@ def _get_empty_dtype(join_units: Sequence[JoinUnit]) -> DtypeObj: | |
return empty_dtype | ||
|
||
has_none_blocks = any(unit.block is None for unit in join_units) | ||
dtypes = [None if unit.block is None else unit.dtype for unit in join_units] | ||
dtypes = [x for x in dtypes if x is not None] | ||
|
||
dtypes = [ | ||
unit.dtype for unit in join_units if unit.block is not None and not unit.is_na | ||
] | ||
if not len(dtypes): | ||
dtypes = [unit.dtype for unit in join_units if unit.block is not None] | ||
|
||
dtype = find_common_type(dtypes) | ||
if has_none_blocks: | ||
|
Uh oh!
There was an error while loading. Please reload this page.