in connector/src/main/scala/com/microsoft/kusto/spark/datasource/KustoReader.scala [167:198]
private def dirExist(spark:SparkSession, params: TransientStorageCredentials, directory: String, endpointSuffix: String): Boolean = {
if (params.authMethod == AuthMethod.Impersonation) {
val url = s"wasbs://${params.blobContainer}@${params.storageAccountName}.blob.$endpointSuffix"
val hadoopConf = spark.sparkContext.hadoopConfiguration
val fs = FileSystem.get(new URI(url), hadoopConf)
val path = new Path(url + s"/$directory")
fs.exists(path)
} else {
val endpoint = s"https://${params.storageAccountName}.blob.$endpointSuffix"
val container = params.authMethod match {
case AuthMethod.Sas =>
val sas = if (params.sasKey(0) == '?') params.sasKey else s"?${params.sasKey}"
new BlobContainerClientBuilder()
.endpoint(endpoint)
.containerName(params.blobContainer)
.credential(new AzureSasCredential(sas))
.buildClient()
case AuthMethod.Key =>
new BlobContainerClientBuilder ()
.endpoint (endpoint)
.containerName (params.blobContainer)
.credential (
new StorageSharedKeyCredential (params.storageAccountName, params.storageAccountKey) )
.buildClient ()
case _ => throw new InvalidParameterException("")
}
val exists = container.listBlobsByHierarchy(directory).stream().count() > 0
// Existing logic container.exists() && container.getDirectoryReference(directory).listBlobsSegmented().getLength > 0
exists
}
}