spark-connector/spark-common/build.gradle.kts (134 lines of code) (raw):
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
plugins {
`maven-publish`
id("java")
id("idea")
alias(libs.plugins.shadow)
}
repositories {
mavenCentral()
}
val scalaVersion: String = project.properties["scalaVersion"] as? String ?: extra["defaultScalaVersion"].toString()
val sparkVersion: String = libs.versions.spark33.get()
val sparkMajorVersion: String = sparkVersion.substringBeforeLast(".")
val icebergVersion: String = libs.versions.iceberg4spark.get()
val paimonVersion: String = libs.versions.paimon.get()
// kyuubi hive connector for Spark 3.3 doesn't support scala 2.13
val kyuubiVersion: String = libs.versions.kyuubi4spark34.get()
val scalaJava8CompatVersion: String = libs.versions.scala.java.compat.get()
val scalaCollectionCompatVersion: String = libs.versions.scala.collection.compat.get()
dependencies {
implementation(project(":catalogs:catalog-common"))
implementation(libs.guava)
implementation(libs.caffeine)
compileOnly(project(":clients:client-java-runtime", configuration = "shadow"))
compileOnly("org.apache.iceberg:iceberg-spark-runtime-${sparkMajorVersion}_$scalaVersion:$icebergVersion")
compileOnly("org.apache.kyuubi:kyuubi-spark-connector-hive_$scalaVersion:$kyuubiVersion")
compileOnly("org.apache.paimon:paimon-spark-$sparkMajorVersion:$paimonVersion") {
exclude("org.apache.spark")
}
compileOnly("org.apache.spark:spark-catalyst_$scalaVersion:$sparkVersion")
compileOnly("org.apache.spark:spark-core_$scalaVersion:$sparkVersion")
compileOnly("org.apache.spark:spark-sql_$scalaVersion:$sparkVersion")
compileOnly("org.scala-lang.modules:scala-java8-compat_$scalaVersion:$scalaJava8CompatVersion")
annotationProcessor(libs.lombok)
compileOnly(libs.lombok)
testAnnotationProcessor(libs.lombok)
testCompileOnly(libs.lombok)
// use log from spark, spark3.3 use low version of log4j, to avoid java.lang.NoSuchMethodError: org.apache.logging.slf4j.Log4jLoggerFactory: method <init>()V not found
testImplementation(project(":api")) {
exclude("org.apache.logging.log4j")
}
testImplementation(project(":clients:client-java")) {
exclude("org.apache.logging.log4j")
}
testImplementation(project(":core")) {
exclude("org.apache.logging.log4j")
}
testImplementation(project(":common")) {
exclude("org.apache.logging.log4j")
}
testImplementation(project(":server")) {
exclude("org.apache.logging.log4j")
}
testImplementation(project(":server-common")) {
exclude("org.apache.logging.log4j")
}
testImplementation(project(":integration-test-common", "testArtifacts"))
testImplementation(libs.hive2.common) {
exclude("org.apache.curator")
// use hadoop from Spark
exclude("org.apache.hadoop")
exclude("org.apache.logging.log4j")
exclude("org.eclipse.jetty.aggregate", "jetty-all")
exclude("org.eclipse.jetty.orbit", "javax.servlet")
}
testImplementation(libs.hive2.metastore) {
exclude("co.cask.tephra")
exclude("com.github.joshelser")
exclude("com.google.code.findbugs", "jsr305")
exclude("com.google.code.findbugs", "sr305")
exclude("com.tdunning", "json")
exclude("com.zaxxer", "HikariCP")
exclude("io.dropwizard.metrics")
exclude("javax.transaction", "transaction-api")
exclude("org.apache.avro")
exclude("org.apache.curator")
exclude("org.apache.hbase")
exclude("org.apache.hadoop")
exclude("org.apache.hive", "hive-common")
exclude("org.apache.hive", "hive-shims")
exclude("org.apache.logging.log4j")
exclude("org.apache.parquet", "parquet-hadoop-bundle")
exclude("org.apache.zookeeper")
exclude("org.eclipse.jetty.aggregate", "jetty-all")
exclude("org.eclipse.jetty.orbit", "javax.servlet")
exclude("org.slf4j")
}
testImplementation(libs.junit.jupiter.api)
testImplementation(libs.junit.jupiter.params)
testImplementation(libs.mysql.driver)
testImplementation(libs.postgresql.driver)
testImplementation(libs.testcontainers)
testImplementation("org.apache.iceberg:iceberg-core:$icebergVersion")
testImplementation("org.apache.iceberg:iceberg-hive-metastore:$icebergVersion")
testImplementation("org.apache.iceberg:iceberg-spark-runtime-${sparkMajorVersion}_$scalaVersion:$icebergVersion")
testImplementation("org.apache.paimon:paimon-spark-$sparkMajorVersion:$paimonVersion") {
exclude("org.apache.spark")
}
testImplementation("org.apache.kyuubi:kyuubi-spark-connector-hive_$scalaVersion:$kyuubiVersion")
// include spark-sql,spark-catalyst,hive-common,hdfs-client
testImplementation("org.apache.spark:spark-hive_$scalaVersion:$sparkVersion") {
// conflict with Gravitino server jersey
exclude("org.glassfish.jersey.core")
exclude("org.glassfish.jersey.containers")
exclude("org.glassfish.jersey.inject")
}
testImplementation("org.scala-lang.modules:scala-collection-compat_$scalaVersion:$scalaCollectionCompatVersion")
testImplementation("org.apache.spark:spark-catalyst_$scalaVersion:$sparkVersion")
testImplementation("org.apache.spark:spark-core_$scalaVersion:$sparkVersion")
testImplementation("org.apache.spark:spark-sql_$scalaVersion:$sparkVersion")
testRuntimeOnly(libs.junit.jupiter.engine)
}
tasks.test {
val skipITs = project.hasProperty("skipITs")
if (skipITs) {
// Exclude integration tests
exclude("**/integration/test/**")
} else {
dependsOn(tasks.jar)
}
}
tasks.clean {
delete("spark-warehouse")
}
sourceSets {
named("test") {
resources {
exclude("**/*")
}
}
}
val testJar by tasks.registering(Jar::class) {
archiveClassifier.set("tests")
from(sourceSets["test"].output)
}
configurations {
create("testArtifacts")
}
artifacts {
add("testArtifacts", testJar)
}