Skip to content

Commit

Permalink
Potential concat fix (#45)
Browse files Browse the repository at this point in the history
* Potential concat fix

* Fix

* Moved tests, comment
  • Loading branch information
zkrolikowski-vl authored Jun 21, 2021
1 parent f5c8d61 commit 24384b5
Show file tree
Hide file tree
Showing 5 changed files with 34 additions and 26 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def list_packages(source_path: str = src_path) -> None:
setup(
name="pandas-stubs",
package_dir={"": src_path},
version="1.1.0.10",
version="1.1.0.11",
description="Type annotations for Pandas",
long_description=(open("README.md").read()
if os.path.exists("README.md") else ""),
Expand Down
10 changes: 0 additions & 10 deletions tests/snippets/test_frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -361,16 +361,6 @@ def test_types_melt() -> None:
pd.melt(df, id_vars=['col1'], value_vars=['col2'], var_name="someVariable", value_name="someValue")


def test_types_concat() -> None:
df = pd.DataFrame(data={'col1': [1, 2], 'col2': [3, 4]})
df2 = pd.DataFrame(data={'col1': [10, 20], 'col2': [30, 40]})

pd.concat([df, df2])
pd.concat([df, df2], axis=1)
pd.concat([df, df2], keys=['first', 'second'], sort=True)
pd.concat([df, df2], keys=['first', 'second'], names=["source", "row"])


def test_types_pivot() -> None:
df = pd.DataFrame(data={'col1': ['first', 'second', 'third', 'fourth'],
'col2': [50, 70, 56, 111], 'col3': ['A', 'B', 'B', 'A'], 'col4': [100, 102, 500, 600]})
Expand Down
24 changes: 24 additions & 0 deletions tests/snippets/test_pandas.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
# flake8: noqa: F841
from typing import Union

import pandas as pd


Expand All @@ -10,3 +13,24 @@ def test_types_to_datetime() -> None:
pd.to_datetime(df, unit="ns", dayfirst=True, utc=None, format="%M:%D", exact=False)
pd.to_datetime([1, 2], unit="D", origin=pd.Timestamp("01/01/2000"))
pd.to_datetime([1, 2], unit="D", origin=3)


def test_types_concat() -> None:
s = pd.Series([0, 1, -10])
s2 = pd.Series([7, -5, 10])

pd.concat([s, s2])
pd.concat([s, s2], axis=1)
pd.concat([s, s2], keys=['first', 'second'], sort=True)
pd.concat([s, s2], keys=['first', 'second'], names=["source", "row"])

df = pd.DataFrame(data={'col1': [1, 2], 'col2': [3, 4]})
df2 = pd.DataFrame(data={'col1': [10, 20], 'col2': [30, 40]})

pd.concat([df, df2])
pd.concat([df, df2], axis=1)
pd.concat([df, df2], keys=['first', 'second'], sort=True)
pd.concat([df, df2], keys=['first', 'second'], names=["source", "row"])

result: pd.DataFrame = pd.concat({"a": pd.DataFrame([1, 2, 3]), "b": pd.DataFrame([4, 5, 6])}, axis=1)
result2: Union[pd.DataFrame, pd.Series] = pd.concat({"a": pd.Series([1, 2, 3]), "b": pd.Series([4, 5, 6])}, axis=1)
10 changes: 0 additions & 10 deletions tests/snippets/test_series.py
Original file line number Diff line number Diff line change
Expand Up @@ -313,16 +313,6 @@ def test_types_element_wise_arithmetic() -> None:
s.mod(s2, fill_value=0)


def test_types_concat() -> None:
s = pd.Series([0, 1, -10])
s2 = pd.Series([7, -5, 10])

pd.concat([s, s2])
pd.concat([s, s2], axis=1)
pd.concat([s, s2], keys=['first', 'second'], sort=True)
pd.concat([s, s2], keys=['first', 'second'], names=["source", "row"])


def test_types_groupby() -> None:
s = pd.Series([4, 2, 1, 8], index=['a', 'b', 'a', 'b'])
s.groupby(['a', 'b', 'a', 'b'])
Expand Down
14 changes: 9 additions & 5 deletions third_party/3/pandas/core/reshape/concat.pyi
Original file line number Diff line number Diff line change
@@ -1,16 +1,20 @@
from pandas import DataFrame as DataFrame, Index as Index, MultiIndex as MultiIndex, Series as Series
from pandas._typing import Axis, FrameOrSeries
from pandas._typing import Axis, FrameOrSeries, Label
from pandas.core.arrays.categorical import factorize_from_iterable as factorize_from_iterable, factorize_from_iterables as factorize_from_iterables
from pandas.core.generic import NDFrame as NDFrame
from pandas.core.indexes.api import all_indexes_same as all_indexes_same, ensure_index as ensure_index, get_consensus_names as get_consensus_names, get_objs_combined_axis as get_objs_combined_axis
from pandas.core.internals import concatenate_block_managers as concatenate_block_managers
from typing import Any, Hashable, Iterable, Mapping, Optional, Union, overload

from typing import Any, Iterable, Mapping, overload

# For some reason mypy won't read this declaration correctly if we use 2 overloads with union
@overload
def concat(objs: Iterable[DataFrame], axis: Axis = ..., join: str=..., ignore_index: bool=..., keys: Any = ..., levels: Any = ..., names: Any = ..., verify_integrity: bool=..., sort: bool=..., copy: bool=...) -> DataFrame: ...
@overload
def concat(objs: Mapping[Label, DataFrame], axis: Axis = ..., join: str=..., ignore_index: bool=..., keys: Any = ..., levels: Any = ..., names: Any = ..., verify_integrity: bool=..., sort: bool=..., copy: bool=...) -> DataFrame: ...
@overload
def concat(objs: Union[Iterable[DataFrame], Mapping[Optional[Hashable], DataFrame]], axis: Axis = ..., join: str=..., ignore_index: bool=..., keys: Any = ..., levels: Any = ..., names: Any = ..., verify_integrity: bool=..., sort: bool=..., copy: bool=...) -> DataFrame: ...
def concat(objs: Iterable[Series], axis: Axis = ..., join: str=..., ignore_index: bool=..., keys: Any = ..., levels: Any = ..., names: Any = ..., verify_integrity: bool=..., sort: bool=..., copy: bool=...) -> FrameOrSeries: ...
@overload
def concat(objs: Union[Iterable[Series], Mapping[Optional[Hashable], Series]], axis: Axis = ..., join: str=..., ignore_index: bool=..., keys: Any = ..., levels: Any = ..., names: Any = ..., verify_integrity: bool=..., sort: bool=..., copy: bool=...) -> FrameOrSeries: ...
def concat(objs: Mapping[Label, Series], axis: Axis = ..., join: str=..., ignore_index: bool=..., keys: Any = ..., levels: Any = ..., names: Any = ..., verify_integrity: bool=..., sort: bool=..., copy: bool=...) -> FrameOrSeries: ...

class _Concatenator:
intersect: bool = ...
Expand Down

0 comments on commit 24384b5

Please sign in to comment.