Skip to content

Commit

Permalink
xfail test_read_multiple_parquet_files
Browse files Browse the repository at this point in the history
  • Loading branch information
AlenkaF committed Dec 21, 2023
1 parent 77b4ecb commit 4c89276
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 6 deletions.
8 changes: 2 additions & 6 deletions python/pyarrow/filesystem.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,12 +223,8 @@ def read_parquet(self, path, columns=None, metadata=None, schema=None,
table : pyarrow.Table
"""
from pyarrow.parquet import ParquetDataset

if metadata is not None:
raise ValueError(
"Keyword 'metadata' is not supported with the Dataset API")

dataset = ParquetDataset(path, schema=schema, filesystem=self)
dataset = ParquetDataset(path, schema=schema, metadata=metadata,
filesystem=self)
return dataset.read(columns=columns, use_threads=use_threads,
use_pandas_metadata=use_pandas_metadata)

Expand Down
2 changes: 2 additions & 0 deletions python/pyarrow/tests/test_hdfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -309,6 +309,8 @@ def _write_multiple_hdfs_pq_files(self, tmpdir):
expected = pa.concat_tables(test_data)
return expected

@pytest.mark.xfail(reason="legacy.FileSystem.read_parquet used legacy ParquetDataset "
"that has been removed in PyArrow 15.0.0.", raises=TypeError)
@pytest.mark.pandas
@pytest.mark.parquet
def test_read_multiple_parquet_files(self):
Expand Down

0 comments on commit 4c89276

Please sign in to comment.