def ready()

in source/forecast-shared/shared/DatasetGroup/dataset_group.py [0:0]


    def ready(self) -> bool:
        """
        Ensure this dataset group is ready (all defined datasets are ACTIVE). Raise an exception if not
        :return: bool
        """
        datasets = self.datasets

        # ensure each dataset is active
        datasets_ready = all(dataset.get("Status") == "ACTIVE" for dataset in datasets)
        if not datasets_ready:
            msg = f"One or more of the datasets for dataset group {self._dataset_group_name} is not yet ACTIVE\n\n"
            for dataset in datasets:
                msg += f"Dataset {dataset.get('DatasetName')} had status {dataset.get('Status')}\n"
            raise DatasetsImporting(msg)

        # check there is an active import for each dataset
        msg = ""
        for dataset in datasets:
            imports = []
            paginator = self.cli.get_paginator("list_dataset_import_jobs")
            iterator = paginator.paginate(
                Filters=[
                    {
                        "Condition": "IS",
                        "Key": "DatasetArn",
                        "Value": dataset["DatasetArn"],
                    },
                    {"Condition": "IS", "Key": "Status", "Value": "ACTIVE"},
                ]
            )
            for page in iterator:
                imports.extend(page.get("DatasetImportJobs", []))
            imports = sorted(
                imports, key=itemgetter("LastModificationTime"), reverse=True
            )
            if len(imports) == 0:
                msg += f"no ACTIVE datasets for {dataset.get('DatasetArn')}\n"
            else:
                status = imports[0].get("Status")
                if status != "ACTIVE":
                    msg += f"no ACTIVE dataset for {dataset.get('DatasetArn')} - status was {status}\n"
        if msg:
            raise DatasetsImporting(msg)

        return True