def get_log()

in aliyun/log/logclient.py [0:0]


    def get_log(self, project, logstore, from_time, to_time, topic=None,
                query=None, reverse=False, offset=0, size=100, power_sql=False, scan=False, forward=True, accurate_query=True, from_time_nano_part=0, to_time_nano_part=0):
        """ Get logs from log service.
        will retry DEFAULT_QUERY_RETRY_COUNT when incomplete.
        Unsuccessful operation will cause an LogException.
        Note: for larger volume of data (e.g. > 1 million logs), use get_log_all

        :type project: string
        :param project: project name

        :type logstore: string
        :param logstore: logstore name

        :type from_time: int/string
        :param from_time: the begin timestamp or format of time in readable time like "%Y-%m-%d %H:%M:%S<time_zone>" e.g. "2018-01-02 12:12:10+8:00", also support human readable string, e.g. "1 hour ago", "now", "yesterday 0:0:0", refer to https://aliyun-log-cli.readthedocs.io/en/latest/tutorials/tutorial_human_readable_datetime.html

        :type to_time: int/string
        :param to_time: the end timestamp or format of time in readable time like "%Y-%m-%d %H:%M:%S<time_zone>" e.g. "2018-01-02 12:12:10+8:00", also support human readable string, e.g. "1 hour ago", "now", "yesterday 0:0:0", refer to https://aliyun-log-cli.readthedocs.io/en/latest/tutorials/tutorial_human_readable_datetime.html

        :type topic: string
        :param topic: topic name of logs, could be None

        :type query: string
        :param query: user defined query, could be None

        :type reverse: bool
        :param reverse: if reverse is set to true, the query will return the latest logs first, default is false

        :type offset: int
        :param offset: line offset of return logs

        :type size: int
        :param size: max line number of return logs, -1 means get all

        :type power_sql: bool
        :param power_sql: if power_sql is set to true, the query will run on enhanced sql mode

        :type scan: bool
        :param scan: if scan is set to true, the query will use scan mode

        :type forward: bool
        :param forward: only for scan query, if forward is set to true, the query will get next page, otherwise previous page

        :type accurate_query: bool
        :param accurate_query: if accurate_query is set to true, the query will global ordered time second mode

        :type from_time_nano_part: int
        :param from_time_nano_part: nano part of query begin time

        :type to_time_nano_part: int
        :param to_time_nano_part: nano part of query end time

        :return: GetLogsResponse

        :raise: LogException
        """
        # need to use extended method to get more when:
        # it's not select query, and size > default page size
        size, offset = int(size), int(offset)
        if not is_stats_query(query) and (size == -1 or size > MAX_GET_LOG_PAGING_SIZE):
            return query_more(
                self.get_log,
                offset=offset,
                size=size,
                batch_size=MAX_GET_LOG_PAGING_SIZE,
                project=project,
                logstore=logstore,
                from_time=from_time,
                to_time=to_time,
                topic=topic,
                query=query,
                reverse=reverse,
                accurate_query=accurate_query,
                from_time_nano_part=from_time_nano_part,
                to_time_nano_part=to_time_nano_part
            )

        ret = None
        for _c in xrange(DEFAULT_QUERY_RETRY_COUNT):
            headers = {}
            params = {'from': parse_timestamp(from_time),
                      'to': parse_timestamp(to_time),
                      'line': size,
                      'offset': offset,
                      'powerSql': power_sql,
                      'accurate': accurate_query,
                      'fromNs': from_time_nano_part,
                      'toNs': to_time_nano_part
                      }

            if topic:
                params['topic'] = topic
            if query:
                params['query'] = query
            if scan:
                params['session'] = 'mode=scan'
                params['forward'] = 'true' if forward else 'false'
            
            if self._get_logs_v2_enabled:
                resource = "/logstores/" + logstore + "/logs"
                headers["Content-Type"] = "application/json"
                params['reverse'] = reverse
                params['forward'] = forward
                body_str = six.b(json.dumps(params))
                headers["x-log-bodyrawsize"] = str(len(body_str))
                accept_encoding = str(CompressType.default_compress_type())
                headers['Accept-Encoding'] = accept_encoding

                (resp, header) = self._send("POST", project, body_str, resource, None, headers, respons_body_type=accept_encoding)

                raw_data = Compressor.decompress_response(header, resp)
                exJson = self._loadJson(200, header, raw_data, requestId=Util.h_v_td(header, 'x-log-requestid', ''))
                exJson = Util.convert_unicode_to_str(exJson)
                ret = GetLogsResponse(exJson, header)
            else:
                resource = "/logstores/" + logstore
                params['type'] = 'log'
                params['reverse'] = 'true' if reverse else 'false'
                (resp, header) = self._send("GET", project, None, resource, params, headers)
                ret = GetLogsResponse._from_v1_resp(resp, header)
            if ret.is_completed():
                break

            time.sleep(DEFAULT_QUERY_RETRY_INTERVAL)

        return ret