in aliyun/log/logclient.py [0:0]
def create_logstore(self, project_name, logstore_name,
ttl=30,
shard_count=2,
enable_tracking=False,
append_meta=False,
auto_split=True,
max_split_shard=64,
preserve_storage=False,
encrypt_conf=None,
telemetry_type='',
hot_ttl=-1,
mode = None,
infrequent_access_ttl=-1
):
""" create log store
Unsuccessful operation will cause an LogException.
:type project_name: string
:param project_name: the Project name
:type logstore_name: string
:param logstore_name: the logstore name
:type ttl: int
:param ttl: the life cycle of log in the logstore in days, default 30, up to 3650
:type shard_count: int
:param shard_count: the shard count of the logstore to create, default 2
:type enable_tracking: bool
:param enable_tracking: enable web tracking, default is False
:type append_meta: bool
:param append_meta: allow to append meta info (server received time and IP for external IP to each received log)
:type auto_split: bool
:param auto_split: auto split shard, max_split_shard will be 64 by default is True
:type max_split_shard: int
:param max_split_shard: max shard to split, up to 256
:type preserve_storage: bool
:param preserve_storage: if always persist data, TTL will be ignored.
:type encrypt_conf: dict
:param encrypt_conf : following is a sample
+ {
+ "enable" : True/False, # required
+ "encrypt_type" : "default", # required, default encrypt alogrithm only currently
+ "user_cmk_info" : # optional, if 'user_cmk_info' is set, use byok cmk key, otherwise use sls system cmk key
+ {
+ "cmk_key_id" : # the cmk key used to generate data encrypt key
+ "arn" : # arn to grant sls service to get/generate data encrypt key in kms
+ "region_id" : # the region id of cmk_key_id
+ }
+ }
:type telemetry_type: string
:param telemetry_type: the Telemetry type
:type mode: string
:param mode: type of logstore, can be choose between lite and standard, default value standard
:type infrequent_access_ttl: int
:param infrequent_access_ttl: infrequent access storage time
:type hot_ttl: int
:param hot_ttl: the life cycle of hot storage,[0-hot_ttl]is hot storage, (hot_ttl-ttl] is warm storage, if hot_ttl=-1, it means [0-ttl]is all hot storage
:return: CreateLogStoreResponse
:raise: LogException
"""
if preserve_storage:
ttl = 3650
params = {}
resource = "/logstores"
headers = {"x-log-bodyrawsize": '0', "Content-Type": "application/json"}
body = {"logstoreName": logstore_name, "ttl": int(ttl), "shardCount": int(shard_count),
"enable_tracking": enable_tracking,
"autoSplit": auto_split,
"maxSplitShard": max_split_shard,
"appendMeta": append_meta,
"telemetryType": telemetry_type
}
if hot_ttl !=-1:
body['hot_ttl'] = hot_ttl
if encrypt_conf != None:
body["encrypt_conf"] = encrypt_conf
if mode != None:
body["mode"] = mode
if infrequent_access_ttl >= 0:
body["infrequentAccessTTL"] = infrequent_access_ttl
body_str = six.b(json.dumps(body))
try:
(resp, header) = self._send("POST", project_name, body_str, resource, params, headers)
except LogException as ex:
if ex.get_error_code() == "LogStoreInfoInvalid" and ex.get_error_message() == "redundant key exist in json":
logger.warning("LogStoreInfoInvalid, will retry with basic parameters. detail: {0}".format(ex))
body = {"logstoreName": logstore_name, "ttl": int(ttl), "shardCount": int(shard_count),
"enable_tracking": enable_tracking }
body_str = six.b(json.dumps(body))
(resp, header) = self._send("POST", project_name, body_str, resource, params, headers)
else:
raise
return CreateLogStoreResponse(header, resp)