def downloadRDSLogs()

in container-code/src/rdslogsshipper.py [0:0]


def downloadRDSLogs(region, s3Client, rdsClient, dbinst, logBucket):
    startDownload = False
    print ("Function download rds Logs into S3 bucket {} for database instance {}".format(logBucket, dbinst))

    logPointer = logBucket + '/' + dbinst + '/LOGPOINTER.TXT'

    loggedTimeStamp = checkLastPointer(logBucket, logPointer)
    print ("Logged timestamp {}".format(loggedTimeStamp))

    # Describe logs for instance
    dbLogFilesListResp = rdsClient.describe_db_log_files(
        DBInstanceIdentifier=dbinst
    )
    print (dbLogFilesListResp['DescribeDBLogFiles'])

    # Iterate over log files, and download each one IF not already downloaded.
    pointerTimeStamp = loggedTimeStamp
    for logFile in dbLogFilesListResp['DescribeDBLogFiles']:
        logFileName = logFile['LogFileName']

        LogFileData = ''
        if (loggedTimeStamp == 0) or (int(loggedTimeStamp) < logFile['LastWritten']):
            print ("Logfile {} had a last update logged at {}".format(logFile['LogFileName'], logFile['LastWritten']))
            marker = '0'
            LogFileData=downloadRDSLogPortion(region=region, rdsClient=rdsClient, dbinst=dbinst, logFileName=logFileName, marker=marker, LogFileData=LogFileData)
            pointerTimeStamp = logFile['LastWritten']
        else:
            print ("Logfile {} already downloaded....skipping".format(logFile['LogFileName']))

    # Write this logfile data to S3 bucket for future runs of this program
    writeToS3Bucket(region=region, s3Client=s3Client, bucketName=logBucket, fileNameKey=logPointer, data=pointerTimeStamp)