File size: 2,771 Bytes
da3e9e3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
from pydantic import BaseModel
from pathlib import Path
import tarfile
import pickle
import logging
import argparse

logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)


def setup_parser():
    parser = argparse.ArgumentParser(description="Save images from all scenarios")
    parser.add_argument(
        "--visual_genome_images_dir",
        type=str,
        required=True,
        help="Path to VisualGenome images directory allImages/images",
    )
    return parser


class DataSplits(BaseModel):
    train: dict[str, list[str]]
    test: dict[str, list[str]]


class MetashiftData(BaseModel):
    selected_classes: list[str]
    spurious_class: str
    train_context: str
    test_context: str
    data_splits: DataSplits


class MetashiftFactory(object):
    visual_genome_images_dir: str

    def __init__(
        self,
        visual_genome_images_dir: str,
    ):
        """
        visual_genome_images_dir: Path to VisualGenome images directory `allImages/images`
        """
        self.visual_genome_images_dir = visual_genome_images_dir

    def _get_unique_ids_from_info(self, info: dict[str, MetashiftData]):
        """Get unique ids from info struct."""
        unique_ids = set()
        for data in info.values():
            for ids in data.data_splits.train.values():
                unique_ids.update(ids)
            for ids in data.data_splits.test.values():
                unique_ids.update(ids)
        return unique_ids

    def save_all(self):
        """Save all datasets to the given directory."""
        info: dict[str, MetashiftData] = dict()
        out_path = Path(".")
        data_path = out_path / "data"
        data_path.mkdir(parents=True, exist_ok=True)
        scenarios_path = out_path / "scenarios"

        for scenario_path in scenarios_path.glob("**/*.json"):
            # examples:
            #   cherrypicked/task_1_bed_dog_cat
            #   seed_42/task_1_bed_dog_cat
            dataset = str(Path(*scenario_path.parts[1:]).with_suffix(""))
            with open(scenario_path, "r") as f:
                info[dataset] = MetashiftData.model_validate_json(f.read())

        unique_image_paths = self._get_unique_ids_from_info(info)
        with tarfile.open(data_path / "images.tar.gz", "w:gz") as tar:
            for unique_image_path in unique_image_paths:
                _id = Path(unique_image_path).stem
                tar.add(
                    Path(self.visual_genome_images_dir) / f"{_id}.jpg",
                )
def main():
    parser = setup_parser()
    args = parser.parse_args()
    metashift_factory = MetashiftFactory(
        visual_genome_images_dir=args.visual_genome_images_dir,
    )
    metashift_factory.save_all()


if __name__ == "__main__":
    main()