| |
| |
| |
| |
| |
| |
| """Re-shard existing Parquet files into fewer, larger shards. |
| |
| Reads the small shards from data/ and rewrites them with many more |
| rows per shard to minimize the total number of files. |
| |
| Usage: |
| uv run reshard_parquet.py [--input-dir PATH] [--output-dir PATH] [--rows-per-shard N] |
| """ |
|
|
| import argparse |
| import os |
| from pathlib import Path |
|
|
| import pyarrow.parquet as pq |
|
|
|
|
| DEFAULT_DIR = os.path.expanduser("~/parameter-golf/data") |
|
|
|
|
| def collect_shards(directory: Path, prefix: str) -> list[Path]: |
| """Return sorted shard paths matching a split prefix.""" |
| return sorted(p for p in directory.glob(f"{prefix}-*.parquet")) |
|
|
|
|
| def reshard(shards: list[Path], split: str, out_dir: Path) -> None: |
| """Stream input shards into a single output file using ParquetWriter (low memory).""" |
| total_rows = sum(pq.read_metadata(s).num_rows for s in shards) |
| print(f" {split}: {total_rows:,} rows across {len(shards)} shards -> 1 output file") |
|
|
| name = f"{split}-00000-of-00001.parquet" |
| out_path = out_dir / name |
| schema = pq.read_schema(shards[0]) |
| writer = pq.ParquetWriter( |
| out_path, schema, |
| compression="zstd", |
| write_page_index=True, |
| use_content_defined_chunking=True, |
| ) |
|
|
| rows_written = 0 |
| for shard_path in shards: |
| table = pq.read_table(shard_path) |
| writer.write_table(table, row_group_size=200_000) |
| rows_written += table.num_rows |
| print(f" appended {shard_path.name} ({rows_written:,}/{total_rows:,} rows)") |
| del table |
|
|
| writer.close() |
| print(f" wrote {name}") |
|
|
|
|
| def main() -> None: |
| parser = argparse.ArgumentParser(description="Re-shard Parquet files into fewer, larger shards") |
| parser.add_argument("--input-dir", default=DEFAULT_DIR, help="Directory with existing shards") |
| parser.add_argument("--output-dir", default=None, help="Output directory (default: input-dir + '_resharded')") |
| args = parser.parse_args() |
|
|
| in_dir = Path(args.input_dir) |
| out_dir = Path(args.output_dir) if args.output_dir else in_dir.parent / "data_resharded" |
| out_dir.mkdir(parents=True, exist_ok=True) |
|
|
| print(f"Input: {in_dir}") |
| print(f"Output: {out_dir}") |
| print() |
|
|
| for split in ("validation", "train"): |
| shards = collect_shards(in_dir, split) |
| if shards: |
| reshard(shards, split, out_dir) |
|
|
| print(f"\nDone! Output in {out_dir}") |
|
|
|
|
| if __name__ == "__main__": |
| main() |
|
|