in buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/BuildPlugin.groovy [332:545]
private static void configureBuildTasks(Project project) {
// Target Java 1.8 compilation
project.java.sourceCompatibility = '1.8'
project.java.targetCompatibility = '1.8'
// TODO: Remove all root project distribution logic. It should exist in a separate dist project.
if (project != project.rootProject) {
SourceSet mainSourceSet = project.sourceSets.main
// Add java source to project's source elements and javadoc elements
FileCollection javaSourceDirs = mainSourceSet.java.sourceDirectories
javaSourceDirs.each { File srcDir ->
project.getArtifacts().add('sourceElements', srcDir)
project.getArtifacts().add('javadocSourceElements', srcDir)
}
// Add scala sources to source elements if that plugin is applied
project.getPlugins().withType(ScalaPlugin.class) {
FileCollection scalaSourceDirs = mainSourceSet.scala.sourceDirectories
scalaSourceDirs.each { File scalaSrcDir ->
project.getArtifacts().add('sourceElements', scalaSrcDir)
}
}
// Do the same for any variants
project.getPlugins().withType(SparkVariantPlugin).whenPluginAdded {
SparkVariantPluginExtension sparkVariants = project.getExtensions().getByType(SparkVariantPluginExtension.class)
sparkVariants.featureVariants { SparkVariant variant ->
SourceSet variantMainSourceSet = project.sourceSets.getByName(variant.getSourceSetName('main'))
FileCollection variantJavaSourceDirs = variantMainSourceSet.java.sourceDirectories
variantJavaSourceDirs.each { File srcDir ->
project.getArtifacts().add(variant.configuration('sourceElements'), srcDir)
project.getArtifacts().add(variant.configuration('javadocSourceElements'), srcDir)
}
FileCollection variantScalaSourceDirs = variantMainSourceSet.scala.sourceDirectories
variantScalaSourceDirs.each { File scalaSrcDir ->
project.getArtifacts().add(variant.configuration('sourceElements'), scalaSrcDir)
}
}
}
}
project.tasks.withType(JavaCompile) { JavaCompile compile ->
compile.getOptions().setCompilerArgs(['-Xlint:unchecked', '-Xlint:options'])
}
// Enable HTML test reports
project.tasks.withType(Test) { Test testTask ->
testTask.getReports().getByName('html').setRequired(true)
}
// Configure project jar task with manifest and include license and notice data.
project.tasks.withType(Jar) { Jar jar ->
Manifest manifest = jar.getManifest()
manifest.attributes["Created-By"] = "${System.getProperty("java.version")} (${System.getProperty("java.specification.vendor")})"
manifest.attributes['Implementation-Title'] = project.name
manifest.attributes['Implementation-Version'] = project.version
manifest.attributes['Implementation-URL'] = "https://github.com/elastic/elasticsearch-hadoop"
manifest.attributes['Implementation-Vendor'] = "Elastic"
manifest.attributes['Implementation-Vendor-Id'] = "org.elasticsearch.hadoop"
manifest.attributes['Repository-Revision'] = BuildParams.gitRevision
String build = System.env['ESHDP.BUILD']
if (build != null) {
manifest.attributes['Build'] = build
}
// TODO: Are these better to be set on just the jar or do these make sense to be on all jars (jar, javadoc, source)?
jar.from("${project.rootDir}/docs/src/info") { CopySpec spec ->
spec.include("license.txt")
spec.include("notice.txt")
spec.into("META-INF")
spec.expand(copyright: new Date().format('yyyy'), version: project.version)
}
}
if (project != project.rootProject) {
project.getArtifacts().add('distElements', project.tasks.getByName('jar'))
project.getPlugins().withType(SparkVariantPlugin).whenPluginAdded {
SparkVariantPluginExtension sparkVariants = project.getExtensions().getByType(SparkVariantPluginExtension.class)
sparkVariants.featureVariants { SparkVariant variant ->
project.getArtifacts().add(variant.configuration('distElements'), project.tasks.getByName(variant.taskName('jar')))
}
}
}
// Creates jar tasks and producer configurations for javadocs and sources.
// Producer configurations (javadocElements and sourcesElements) contain javadoc and source JARS. This makes
// them more akin to distElements than the source code configurations (javadocSourceElements and sourceElements)
project.java {
withJavadocJar()
withSourcesJar()
}
Jar sourcesJar = project.tasks.getByName('sourcesJar') as Jar
sourcesJar.dependsOn(project.tasks.classes)
// TODO: Remove when root project does not handle distribution
if (project != project.rootProject) {
sourcesJar.from(project.configurations.additionalSources)
project.getArtifacts().add('distElements', sourcesJar)
}
project.getPlugins().withType(SparkVariantPlugin).whenPluginAdded {
SparkVariantPluginExtension sparkVariants = project.getExtensions().getByType(SparkVariantPluginExtension.class)
sparkVariants.featureVariants { SparkVariant variant ->
// Don't need to create sources jar task since it is already created by the variant plugin
Jar variantSourcesJar = project.tasks.getByName(variant.taskName('sourcesJar')) as Jar
variantSourcesJar.dependsOn(project.tasks.getByName(variant.taskName('classes')))
variantSourcesJar.from(project.configurations.getByName(variant.configuration('additionalSources')))
project.getArtifacts().add(variant.configuration('distElements'), variantSourcesJar)
}
}
// Configure javadoc
project.tasks.withType(Javadoc) { Javadoc javadoc ->
javadoc.title = "${project.rootProject.description} ${project.version} API"
javadoc.excludes = [
"org/elasticsearch/hadoop/mr/compat/**",
"org/elasticsearch/hadoop/rest/**",
"org/elasticsearch/hadoop/serialization/**",
"org/elasticsearch/hadoop/util/**",
"org/apache/hadoop/hive/**"
]
// Set javadoc executable to runtime Java (1.8)
javadoc.executable = new File(project.ext.runtimeJavaHome, 'bin/javadoc')
MinimalJavadocOptions javadocOptions = javadoc.getOptions()
javadocOptions.docFilesSubDirs = true
javadocOptions.outputLevel = JavadocOutputLevel.QUIET
javadocOptions.breakIterator = true
javadocOptions.author = false
javadocOptions.header = project.name
javadocOptions.showFromProtected()
javadocOptions.addStringOption('Xdoclint:none', '-quiet')
javadocOptions.groups = [
'Elasticsearch Map/Reduce' : ['org.elasticsearch.hadoop.mr*'],
'Elasticsearch Hive' : ['org.elasticsearch.hadoop.hive*'],
'Elasticsearch Spark' : ['org.elasticsearch.spark*']
]
javadocOptions.links = [ // External doc links
"https://docs.oracle.com/javase/8/docs/api/",
"https://commons.apache.org/proper/commons-logging/apidocs/",
"https://hadoop.apache.org/docs/stable2/api/",
"https://hive.apache.org/javadocs/r1.2.2/api/",
"https://spark.apache.org/docs/latest/api/java/"
]
}
// TODO: Remove when root project does not handle distribution
if (project != project.rootProject) {
Javadoc javadoc = project.tasks.getByName('javadoc') as Javadoc
javadoc.source(project.configurations.javadocSources)
project.getPlugins().withType(SparkVariantPlugin).whenPluginAdded {
SparkVariantPluginExtension sparkVarients = project.getExtensions().getByType(SparkVariantPluginExtension.class)
sparkVarients.featureVariants { SparkVariant variant ->
Javadoc variantJavadoc = project.tasks.getByName(variant.taskName('javadoc')) as Javadoc
variantJavadoc.source(project.configurations.getByName(variant.configuration('javadocSources')))
}
}
}
// Package up the javadocs into their own jar
Jar javadocJar = project.tasks.getByName('javadocJar') as Jar
if (project != project.rootProject) {
project.getArtifacts().add('distElements', javadocJar)
}
project.getPlugins().withType(SparkVariantPlugin).whenPluginAdded {
SparkVariantPluginExtension sparkVariants = project.getExtensions().getByType(SparkVariantPluginExtension.class)
sparkVariants.featureVariants { SparkVariant variant ->
Jar variantJavadocJar = project.tasks.getByName(variant.taskName('javadocJar')) as Jar
project.getArtifacts().add(variant.configuration('distElements'), variantJavadocJar)
}
}
// Task for creating ALL of a project's jars - Like assemble, but this includes the sourcesJar and javadocJar.
// TODO: Assemble is being configured to make javadoc and sources jars no matter what due to the withX() methods above. Is this even required in that case?
// The assemble task was previously configured to ignore javadoc and source tasks because they can be time consuming to generate when simply building the project.
// Probably better to just run them.
Task pack = project.tasks.create('pack')
pack.dependsOn(project.tasks.jar)
pack.dependsOn(project.tasks.javadocJar)
pack.dependsOn(project.tasks.sourcesJar)
project.getPlugins().withType(SparkVariantPlugin).whenPluginAdded {
SparkVariantPluginExtension sparkVariants = project.getExtensions().getByType(SparkVariantPluginExtension.class)
sparkVariants.featureVariants { SparkVariant variant ->
pack.dependsOn(project.tasks.getByName(variant.taskName('jar')))
pack.dependsOn(project.tasks.getByName(variant.taskName('javadocJar')))
pack.dependsOn(project.tasks.getByName(variant.taskName('sourcesJar')))
}
}
// The distribution task is like assemble, but packages up a lot of extra jars and performs extra tasks that
// are mostly used for snapshots and releases.
Task distribution = project.tasks.create('distribution')
distribution.dependsOn(pack)
// Co-locate all build artifacts into distributions subdir for easier build automation
Copy collectArtifacts = project.tasks.create('collectArtifacts', Copy)
collectArtifacts.from(project.tasks.jar)
collectArtifacts.from(project.tasks.javadocJar)
collectArtifacts.from(project.tasks.sourcesJar)
collectArtifacts.into("${project.buildDir}/distributions")
collectArtifacts.dependsOn(pack)
distribution.dependsOn(collectArtifacts)
project.getPlugins().withType(SparkVariantPlugin).whenPluginAdded {
SparkVariantPluginExtension sparkVariants = project.getExtensions().getByType(SparkVariantPluginExtension.class)
sparkVariants.featureVariants { SparkVariant variant ->
Copy variantCollectArtifacts = project.tasks.create('collectArtifacts' + variant.getName(), Copy)
variantCollectArtifacts.from(project.tasks.getByName(variant.taskName('jar')))
variantCollectArtifacts.from(project.tasks.getByName(variant.taskName('javadocJar')))
variantCollectArtifacts.from(project.tasks.getByName(variant.taskName('sourcesJar')))
variantCollectArtifacts.into("${project.buildDir}/distributions")
variantCollectArtifacts.dependsOn(pack)
distribution.dependsOn(variantCollectArtifacts)
}
}
}