in aliyun/log/logclient.py [0:0]
def update_logstore(self, project_name, logstore_name, ttl=None, enable_tracking=None, shard_count=None,
append_meta=None,
auto_split=None,
max_split_shard=None,
preserve_storage=None,
encrypt_conf=None,
hot_ttl=-1,
mode = None,
telemetry_type=None,
infrequent_access_ttl=-1
):
"""
update the logstore meta info
Unsuccessful operation will cause an LogException.
:type project_name: string
:param project_name: the Project name
:type logstore_name: string
:param logstore_name: the logstore name
:type ttl: int
:param ttl: the life cycle of log in the logstore in days
:type hot_ttl: int
:param hot_ttl: the life cycle of hot storage,[0-hot_ttl]is hot storage, (hot_ttl-ttl] is warm storage, if hot_ttl=-1, it means [0-ttl]is all hot storage
:type enable_tracking: bool
:param enable_tracking: enable web tracking
:type shard_count: int
:param shard_count: deprecated, the shard count could only be updated by split & merge
:type append_meta: bool
:param append_meta: allow to append meta info (server received time and IP for external IP to each received log)
:type auto_split: bool
:param auto_split: auto split shard, max_split_shard will be 64 by default is True
:type max_split_shard: int
:param max_split_shard: max shard to split, up to 256
:type preserve_storage: bool
:param preserve_storage: if always persist data, TTL will be ignored.
:type encrypt_conf: dict
:param encrypt_conf : following is a sample
+ {
+ "enable" : True/False, # required
+ "encrypt_type" : "default", # required, default encrypt alogrithm only currently
+ "user_cmk_info" : # optional, if 'user_cmk_info' is set, use byok cmk key, otherwise use sls system cmk key
+ {
+ "cmk_key_id" : # the cmk key used to generate data encrypt key
+ "arn" : # arn to grant sls service to get/generate data encrypt key in kms
+ "region_id" : # the region id of cmk_key_id
+ }
+ }
:type mode: string
:param mode: type of logstore, can be choose between lite and standard, default value standard
:type telemetry_type: string
:param telemetry_type: the Telemetry type
:type infrequent_access_ttl: int
:param infrequent_access_ttl: infrequent access storage time
:return: UpdateLogStoreResponse
:raise: LogException
"""
res = self.get_logstore(project_name, logstore_name)
shard_count = res.get_shard_count()
if enable_tracking is None:
enable_tracking = res.get_enable_tracking()
if preserve_storage is None and ttl is None:
preserve_storage = res.preserve_storage
if ttl is None:
ttl = res.get_ttl()
if auto_split is None:
auto_split = res.auto_split
if append_meta is None:
append_meta = res.append_meta
if max_split_shard is None:
max_split_shard = res.max_split_shard
if preserve_storage:
ttl = 3650
headers = {"x-log-bodyrawsize": '0', "Content-Type": "application/json"}
params = {}
resource = "/logstores/" + logstore_name
body = {
"logstoreName": logstore_name, "ttl": int(ttl), "enable_tracking": enable_tracking,
"shardCount": shard_count,
"autoSplit": auto_split,
"maxSplitShard": max_split_shard,
"appendMeta": append_meta
}
if hot_ttl !=-1:
body['hot_ttl'] = hot_ttl
if encrypt_conf != None:
body["encrypt_conf"] = encrypt_conf
if mode != None:
body['mode'] = mode
if telemetry_type != None:
body["telemetryType"] = telemetry_type
if infrequent_access_ttl >= 0:
body["infrequentAccessTTL"] = infrequent_access_ttl
body_str = six.b(json.dumps(body))
try:
(resp, header) = self._send("PUT", project_name, body_str, resource, params, headers)
except LogException as ex:
if ex.get_error_code() == "LogStoreInfoInvalid" and ex.get_error_message() == "redundant key exist in json":
logger.warning("LogStoreInfoInvalid, will retry with basic parameters. detail: {0}".format(ex))
body = { "logstoreName": logstore_name, "ttl": int(ttl), "enable_tracking": enable_tracking,
"shardCount": shard_count }
body_str = six.b(json.dumps(body))
(resp, header) = self._send("PUT", project_name, body_str, resource, params, headers)
else:
raise
return UpdateLogStoreResponse(header, resp)