def process_objects()

in pantri/scripts/lib/pantri.py [0:0]


    def process_objects(self, expanded_objects=None):
        """
        process_objects(expanded_objects)

        Given a list of objects, determines if uploadable (binary), and
        then create a dictionary of:
          sha1_hash
          sha256_hash
          modified_time
          filesize

        Sha1_hash is only determined on first upload or if modified time and
        file size changed.
        """

        if expanded_objects is None:
            expanded_objects = []

        objects_metadata = {}
        for obj in expanded_objects:
            # Process if object is uploadable
            if self.uploadable_object(obj):

                # Object name in metadata file. Replace \\ with / to remain consistent
                # accoss platforms
                object_name = utils.unix_path(
                    os.path.relpath(obj, self.paths["shelves"])
                )

                # Determine paths
                object_path = os.path.abspath(obj)
                object_metadata_file = "%s.pitem" % object_path

                # Add object to gitignore
                self.add_object_to_gitignore(obj)

                object_mtime = utils.get_modified_time(obj)
                object_file_size = utils.get_file_size(obj)
                # Use cached checksum since checksum hashing is cpu intensive and
                # file size and modified times are quicker. Checksums are force using
                # cli flag --checksum.
                if not self.options["checksum"] and os.path.exists(
                    object_metadata_file
                ):

                    with open(object_metadata_file) as json_file:
                        cached_metadata = json.load(json_file)

                    # Use cached hash if filesize and mtime are the same
                    if (
                        object_file_size == cached_metadata[object_name]["file_size"]
                        and object_mtime
                        == cached_metadata[object_name]["modified_time"]
                    ):

                        object_sha1_hash = cached_metadata[object_name]["sha1_hash"]
                        if "sha26_hash" in cached_metadata[object_name]:
                            cryp = "sha256_hash"
                            obj = object_name
                            object_sha256_hash = cached_metadata[obj][cryp]
                        else:
                            object_sha256_hash = utils.get_sha256(obj)
                    else:
                        object_sha1_hash = utils.get_sha1(obj)
                        object_sha256_hash = utils.get_sha256(obj)
                else:
                    # Genertate hash if cached_metadat is not present
                    object_sha1_hash = utils.get_sha1(obj)
                    object_sha256_hash = utils.get_sha256(obj)

                # Add object to metadata dictionary
                objects_metadata[object_name] = {
                    "sha1_hash": object_sha1_hash,
                    "sha256_hash": object_sha256_hash,
                    "modified_time": object_mtime,
                    "file_size": object_file_size,
                }

        return objects_metadata