def _BotoBucketToBucket()

in gslib/boto_translation.py [0:0]


  def _BotoBucketToBucket(self, bucket, fields=None):
    """Constructs an apitools Bucket from a boto bucket.

    Args:
      bucket: Boto bucket.
      fields: If present, construct the apitools Bucket with only this set of
              metadata fields.

    Returns:
      apitools Bucket.
    """
    bucket_uri = self._StorageUriForBucket(bucket.name)

    cloud_api_bucket = apitools_messages.Bucket(name=bucket.name,
                                                id=bucket.name)
    headers = self._CreateBaseHeaders()
    if self.provider == 'gs':
      if not fields or 'storageClass' in fields:
        if hasattr(bucket, 'get_storage_class'):
          cloud_api_bucket.storageClass = bucket.get_storage_class()
      if not fields or 'acl' in fields:
        try:
          for acl in AclTranslation.BotoBucketAclToMessage(
              bucket.get_acl(headers=headers)):
            cloud_api_bucket.acl.append(acl)
        except TRANSLATABLE_BOTO_EXCEPTIONS as e:
          translated_exception = self._TranslateBotoException(
              e, bucket_name=bucket.name)
          if (translated_exception and
              isinstance(translated_exception, AccessDeniedException)):
            # JSON API doesn't differentiate between a blank ACL list
            # and an access denied, so this is intentionally left blank.
            pass
          else:
            self._TranslateExceptionAndRaise(e, bucket_name=bucket.name)
      if not fields or 'cors' in fields:
        try:
          boto_cors = bucket_uri.get_cors(headers=headers)
          cloud_api_bucket.cors = CorsTranslation.BotoCorsToMessage(boto_cors)
        except TRANSLATABLE_BOTO_EXCEPTIONS as e:
          self._TranslateExceptionAndRaise(e, bucket_name=bucket.name)
      if not fields or 'defaultObjectAcl' in fields:
        try:
          for acl in AclTranslation.BotoObjectAclToMessage(
              bucket.get_def_acl(headers=headers)):
            cloud_api_bucket.defaultObjectAcl.append(acl)
        except TRANSLATABLE_BOTO_EXCEPTIONS as e:
          translated_exception = self._TranslateBotoException(
              e, bucket_name=bucket.name)
          if (translated_exception and
              isinstance(translated_exception, AccessDeniedException)):
            # JSON API doesn't differentiate between a blank ACL list
            # and an access denied, so this is intentionally left blank.
            pass
          else:
            self._TranslateExceptionAndRaise(e, bucket_name=bucket.name)
      if not fields or 'encryption' in fields:
        try:
          keyname = (bucket_uri.get_encryption_config(
              headers=headers).default_kms_key_name)
          if keyname:
            cloud_api_bucket.encryption = (
                apitools_messages.Bucket.EncryptionValue())
            cloud_api_bucket.encryption.defaultKmsKeyName = keyname
        except TRANSLATABLE_BOTO_EXCEPTIONS as e:
          self._TranslateExceptionAndRaise(e, bucket_name=bucket.name)
      if not fields or 'lifecycle' in fields:
        try:
          boto_lifecycle = bucket_uri.get_lifecycle_config(headers=headers)
          cloud_api_bucket.lifecycle = (
              LifecycleTranslation.BotoLifecycleToMessage(boto_lifecycle))
        except TRANSLATABLE_BOTO_EXCEPTIONS as e:
          self._TranslateExceptionAndRaise(e, bucket_name=bucket.name)
      if not fields or 'logging' in fields:
        try:
          boto_logging = bucket_uri.get_logging_config(headers=headers)
          if boto_logging and 'Logging' in boto_logging:
            logging_config = boto_logging['Logging']
            log_object_prefix_present = 'LogObjectPrefix' in logging_config
            log_bucket_present = 'LogBucket' in logging_config
            if log_object_prefix_present or log_bucket_present:
              cloud_api_bucket.logging = apitools_messages.Bucket.LoggingValue()
              if log_object_prefix_present:
                cloud_api_bucket.logging.logObjectPrefix = (
                    logging_config['LogObjectPrefix'])
              if log_bucket_present:
                cloud_api_bucket.logging.logBucket = logging_config['LogBucket']
        except TRANSLATABLE_BOTO_EXCEPTIONS as e:
          self._TranslateExceptionAndRaise(e, bucket_name=bucket.name)
      if not fields or 'website' in fields:
        try:
          boto_website = bucket_uri.get_website_config(headers=headers)
          if boto_website and 'WebsiteConfiguration' in boto_website:
            website_config = boto_website['WebsiteConfiguration']
            main_page_suffix_present = 'MainPageSuffix' in website_config
            not_found_page_present = 'NotFoundPage' in website_config
            if main_page_suffix_present or not_found_page_present:
              cloud_api_bucket.website = apitools_messages.Bucket.WebsiteValue()
              if main_page_suffix_present:
                cloud_api_bucket.website.mainPageSuffix = (
                    website_config['MainPageSuffix'])
              if not_found_page_present:
                cloud_api_bucket.website.notFoundPage = (
                    website_config['NotFoundPage'])
        except TRANSLATABLE_BOTO_EXCEPTIONS as e:
          self._TranslateExceptionAndRaise(e, bucket_name=bucket.name)
      if not fields or 'location' in fields:
        cloud_api_bucket.location = bucket_uri.get_location(headers=headers)
      # End gs-specific field checks.
    if not fields or 'labels' in fields:
      try:
        # TODO: Define tags-related methods on storage_uri objects. In the
        # meantime, we invoke the underlying bucket's methods directly.
        try:
          boto_tags = bucket_uri.get_bucket().get_tags()
          cloud_api_bucket.labels = (
              LabelTranslation.BotoTagsToMessage(boto_tags))
        except boto.exception.StorageResponseError as e:
          # If no tagging config exists, the S3 API returns a 404 (the GS API
          # returns a 200 with an empty TagSet). If the bucket didn't exist,
          # we would have failed much earlier in this method, so we know that
          # it's the tagging config that doesn't exist.
          if not (self.provider == 's3' and e.status == 404):
            raise
      except TRANSLATABLE_BOTO_EXCEPTIONS as e:
        self._TranslateExceptionAndRaise(e, bucket_name=bucket.name)
    if not fields or 'versioning' in fields:
      versioning = bucket_uri.get_versioning_config(headers=headers)
      if versioning:
        if (self.provider == 's3' and 'Versioning' in versioning and
            versioning['Versioning'] == 'Enabled'):
          cloud_api_bucket.versioning = (
              apitools_messages.Bucket.VersioningValue(enabled=True))
        elif self.provider == 'gs':
          cloud_api_bucket.versioning = (
              apitools_messages.Bucket.VersioningValue(enabled=True))

    # For S3 long bucket listing we do not support CORS, lifecycle, website, and
    # logging translation. The individual commands can be used to get
    # the XML equivalents for S3.
    return cloud_api_bucket