in connector/src/main/scala/com/microsoft/kusto/spark/datasource/KustoReader.scala [239:272]
private[kusto] def setupBlobAccess(
request: KustoReadRequest,
storageParameters: TransientStorageParameters): Unit = {
val config = request.sparkSession.sparkContext.hadoopConfiguration
val now = Instant.now(Clock.systemUTC())
for (storage <- storageParameters.storageCredentials) {
storage.authMethod match {
case AuthMethod.Key =>
if (!KustoAzureFsSetupCache.updateAndGetPrevStorageAccountAccess(
storage.storageAccountName,
storage.storageAccountKey,
now)) {
config.set(
s"fs.azure.account.key.${storage.storageAccountName}.blob.${storageParameters.endpointSuffix}",
s"${storage.storageAccountKey}")
}
case AuthMethod.Sas =>
if (!KustoAzureFsSetupCache.updateAndGetPrevSas(
storage.blobContainer,
storage.storageAccountName,
storage.sasKey,
now)) {
config.set(
s"fs.azure.sas.${storage.blobContainer}.${storage.storageAccountName}.blob.${storageParameters.endpointSuffix}",
s"${storage.sasKey}")
}
case _ =>
}
}
if (!KustoAzureFsSetupCache.updateAndGetPrevNativeAzureFs(now)) {
config.set("fs.azure", "org.apache.hadoop.fs.azure.NativeAzureFileSystem")
}
}