def read_file()

in src/batch_processor.py [0:0]


def read_file(fileName, inputBucket, inputFile, s3, dbTableName):
    input_products = []
    logMessage(fileName, 'Reading file - ' + inputBucket + "/" + inputFile, LOGTYPE_INFO)
    productId = ""
    productName = ""
    productDescription = ""
    try:
        s3_object = s3.Object(inputBucket, inputFile).get()[u'Body'].read().decode('utf-8')
        input_lines = s3_object.splitlines()
        
        productIndex = 0
        for row in csv.DictReader(input_lines, delimiter='\t'):
            try:
                print('row - ' + str(row))
                eachRow = row['ProductId,ProductName,ProductDescription'].split(',')
                productId = eachRow[0]
                productName = eachRow[1]
                productDescription = eachRow[2]
                
            except Exception as ex:
                logMessage(fileName, "Error retrieving Product " + str(ex), LOGTYPE_DEBUG)

            productIndex = productIndex + 1
            input_products.append({'productIndex': productIndex, 'productId': productId, 'productName': productName, 'productDescription': productDescription})
            input_products = sorted(input_products, key=itemgetter('productIndex')) 
            
    except Exception as ex:
        logMessage(fileName, "Error parsing excel " + str(ex), LOGTYPE_ERROR)

    return input_products