| | import os |
| | import random |
| | import shutil |
| | from pathlib import Path |
| | import json |
| |
|
| | import numpy as np |
| |
|
| | SEED = 42 |
| | random.seed(SEED) |
| | np.random.seed(SEED) |
| |
|
| |
|
| | if __name__ == "__main__": |
| | root_indir = Path("./raw-datasets/DigitizePID_Dataset") |
| | imgs_indir = root_indir / "image_2" |
| | imgs_in = os.listdir(imgs_indir) |
| |
|
| | root_outdir = Path("./processed-datasets/DigitizePID_Dataset") |
| | |
| |
|
| | for split in ("train", "val"): |
| | (root_outdir / split).mkdir(parents=True, exist_ok=True) |
| | |
| |
|
| | imgs_in = os.listdir(imgs_indir) |
| | random.shuffle(imgs_in) |
| |
|
| | n = len(imgs_in) |
| | train_end = int(0.8 * n) |
| |
|
| | splits = ( |
| | ("train", imgs_in[:train_end]), |
| | ("val", imgs_in[train_end:]), |
| | ) |
| |
|
| | for split, files in splits: |
| | metadata_lines = [] |
| | for img_fname in files: |
| | idx = int(Path(img_fname).stem) |
| | shutil.copy(imgs_indir / img_fname, root_outdir / split / img_fname) |
| |
|
| | symbols = np.load( |
| | root_indir / str(idx) / f"{idx}_symbols.npy", allow_pickle=True |
| | ) |
| |
|
| | metadata_lines.append({ |
| | "file_name": img_fname, |
| | "symbols": { |
| | "bbox": [[int(n) for n in symbol[1]] for symbol in symbols], |
| | "labels": [int(symbol[2]) for symbol in symbols], |
| | }, |
| | }) |
| |
|
| | with open(root_outdir / split / "metadata.jsonl", "w") as f: |
| | for line in metadata_lines: |
| | f.write(json.dumps(line) + "\n") |
| |
|