def collect_logs_and_get_archive()

in azurelinuxagent/ga/logcollector.py [0:0]


    def collect_logs_and_get_archive(self):
        """
        Public method that collects necessary log files in a compressed zip archive.
        :return: Returns the path of the collected compressed archive
        """
        files_to_collect = []
        total_uncompressed_size = 0

        try:
            # Clear previous run's output and create base directories if they don't exist already.
            self._create_base_dirs()
            LogCollector._reset_file(OUTPUT_RESULTS_FILE_PATH)
            start_time = datetime.utcnow()
            _LOGGER.info("Starting log collection at %s", start_time.strftime("%Y-%m-%dT%H:%M:%SZ"))
            _LOGGER.info("Using log collection mode %s", "full" if self._is_full_mode else "normal")

            files_to_collect, total_uncompressed_size = self._create_list_of_files_to_collect()
            _LOGGER.info("### Creating compressed archive ###")

            compressed_archive = None

            def handle_add_file_to_archive_error(error_count, max_errors, file_to_collect, exception):
                error_count += 1
                if error_count >= max_errors:
                    raise Exception("Too many errors, giving up. Last error: {0}".format(ustr(exception)))
                else:
                    _LOGGER.warning("Failed to add file %s to the archive: %s", file_to_collect, ustr(exception))
                return error_count

            try:
                compressed_archive = zipfile.ZipFile(COMPRESSED_ARCHIVE_PATH, "w", compression=zipfile.ZIP_DEFLATED)

                max_errors = 8
                error_count = 0

                for file_to_collect in files_to_collect:
                    try:
                        archive_file_name = LogCollector._convert_file_name_to_archive_name(file_to_collect)
                        compressed_archive.write(file_to_collect.encode("utf-8"), arcname=archive_file_name)
                    except IOError as e:
                        if e.errno == 2:    # [Errno 2] No such file or directory
                            _LOGGER.warning("File %s does not exist, skipping collection for this file",
                                            file_to_collect)
                        else:
                            error_count = handle_add_file_to_archive_error(error_count, max_errors, file_to_collect, e)
                    except Exception as e:
                        error_count = handle_add_file_to_archive_error(error_count, max_errors, file_to_collect, e)

                compressed_archive_size = os.path.getsize(COMPRESSED_ARCHIVE_PATH)
                _LOGGER.info("Successfully compressed files. Compressed archive size is %s b", compressed_archive_size)

                end_time = datetime.utcnow()
                duration = end_time - start_time
                elapsed_ms = int(((duration.days * 24 * 60 * 60 + duration.seconds) * 1000) + (duration.microseconds / 1000.0))
                _LOGGER.info("Finishing log collection at %s", end_time.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"))
                _LOGGER.info("Elapsed time: %s ms", elapsed_ms)

                compressed_archive.write(OUTPUT_RESULTS_FILE_PATH.encode("utf-8"), arcname="results.txt")
            finally:
                if compressed_archive is not None:
                    compressed_archive.close()

            return COMPRESSED_ARCHIVE_PATH, total_uncompressed_size
        except Exception as e:
            msg = "Failed to collect logs: {0}".format(ustr(e))
            _LOGGER.error(msg)

            raise
        finally:
            self._remove_uncollected_truncated_files(files_to_collect)