in genie-web/src/integTest/java/com/netflix/genie/web/apis/rest/v3/controllers/ApplicationRestControllerIntegrationTest.java [225:417]
void canFindApplications() throws Exception {
final Application spark151 = new Application.Builder("spark", "genieUser1", "1.5.1", ApplicationStatus.ACTIVE)
.withDependencies(Sets.newHashSet("s3://mybucket/spark/spark-1.5.1.tar.gz"))
.withSetupFile("s3://mybucket/spark/setupBase-spark.sh")
.withConfigs(Sets.newHashSet("s3://mybucket/spark/spark-env.sh"))
.withDescription("Spark 1.5.1 for Genie")
.withTags(Sets.newHashSet("type:spark", "ver:1.5.1"))
.withType("spark")
.build();
final Application spark150 = new Application.Builder("spark", "genieUser2", "1.5.0", ApplicationStatus.ACTIVE)
.withDependencies(Sets.newHashSet("s3://mybucket/spark/spark-1.5.0.tar.gz"))
.withSetupFile("s3://mybucket/spark/setupBase-spark.sh")
.withConfigs(Sets.newHashSet("s3://mybucket/spark/spark-env.sh"))
.withDescription("Spark 1.5.0 for Genie")
.withTags(Sets.newHashSet("type:spark", "ver:1.5.0"))
.withType("spark")
.build();
final Application spark141 = new Application.Builder("spark", "genieUser3", "1.4.1", ApplicationStatus.INACTIVE)
.withDependencies(Sets.newHashSet("s3://mybucket/spark/spark-1.4.1.tar.gz"))
.withSetupFile("s3://mybucket/spark/setupBase-spark.sh")
.withConfigs(Sets.newHashSet("s3://mybucket/spark/spark-env.sh"))
.withDescription("Spark 1.4.1 for Genie")
.withTags(Sets.newHashSet("type:spark", "ver:1.4.1"))
.withType("spark")
.build();
final Application spark140
= new Application.Builder("spark", "genieUser4", "1.4.0", ApplicationStatus.DEPRECATED)
.withDependencies(Sets.newHashSet("s3://mybucket/spark/spark-1.4.0.tar.gz"))
.withSetupFile("s3://mybucket/spark/setupBase-spark.sh")
.withConfigs(Sets.newHashSet("s3://mybucket/spark/spark-env.sh"))
.withDescription("Spark 1.4.0 for Genie")
.withTags(Sets.newHashSet("type:spark", "ver:1.4.0"))
.withType("spark")
.build();
final Application spark131
= new Application.Builder("spark", "genieUser5", "1.3.1", ApplicationStatus.DEPRECATED)
.withDependencies(Sets.newHashSet("s3://mybucket/spark/spark-1.3.1.tar.gz"))
.withSetupFile("s3://mybucket/spark/setupBase-spark.sh")
.withConfigs(Sets.newHashSet("s3://mybucket/spark/spark-env.sh"))
.withDescription("Spark 1.3.1 for Genie")
.withTags(Sets.newHashSet("type:spark", "ver:1.3.1"))
.withType("spark")
.build();
final Application pig = new Application.Builder("spark", "genieUser6", "0.4.0", ApplicationStatus.ACTIVE)
.withDependencies(Sets.newHashSet("s3://mybucket/pig/pig-0.15.0.tar.gz"))
.withSetupFile("s3://mybucket/pig/setupBase-pig.sh")
.withConfigs(Sets.newHashSet("s3://mybucket/pig/pig.properties"))
.withDescription("Pig 0.15.0 for Genie")
.withTags(Sets.newHashSet("type:pig", "ver:0.15.0"))
.withType("pig")
.build();
final Application hive = new Application.Builder("hive", "genieUser7", "1.0.0", ApplicationStatus.ACTIVE)
.withDependencies(Sets.newHashSet("s3://mybucket/hive/hive-1.0.0.tar.gz"))
.withSetupFile("s3://mybucket/hive/setupBase-hive.sh")
.withConfigs(
Sets.newHashSet("s3://mybucket/hive/hive-env.sh", "s3://mybucket/hive/hive-log4j.properties")
)
.withDescription("Hive 1.0.0 for Genie")
.withTags(Sets.newHashSet("type:hive", "ver:1.0.0"))
.withType("hive")
.build();
final String spark151Id = this.createConfigResource(spark151, null);
final String spark150Id = this.createConfigResource(spark150, null);
final String spark141Id = this.createConfigResource(spark141, null);
final String spark140Id = this.createConfigResource(spark140, null);
final String spark131Id = this.createConfigResource(spark131, null);
final String pigId = this.createConfigResource(pig, null);
final String hiveId = this.createConfigResource(hive, null);
final List<String> appIds = Lists.newArrayList(
spark151Id, spark150Id, spark141Id, spark140Id, spark131Id, pigId, hiveId);
final RestDocumentationFilter findFilter = RestAssuredRestDocumentation.document(
"{class-name}/{method-name}/{step}/",
Snippets.APPLICATION_SEARCH_QUERY_PARAMETERS, // Request query parameters
Snippets.HAL_CONTENT_TYPE_HEADER, // Response headers
Snippets.APPLICATION_SEARCH_RESULT_FIELDS, // Result fields
Snippets.SEARCH_LINKS // HAL Links
);
// Test finding all applications
RestAssured
.given(this.getRequestSpecification()).filter(findFilter)
.when()
.port(this.port).get(APPLICATIONS_API)
.then()
.statusCode(Matchers.is(HttpStatus.OK.value()))
.contentType(Matchers.containsString(MediaTypes.HAL_JSON_VALUE))
.body(APPLICATIONS_LIST_PATH, Matchers.hasSize(7))
.body(APPLICATIONS_ID_LIST_PATH, Matchers.hasSize(7))
.body(
APPLICATIONS_ID_LIST_PATH,
Matchers.containsInAnyOrder(spark151Id, spark150Id, spark141Id, spark140Id, spark131Id, pigId, hiveId)
)
.body(
APPLICATIONS_COMMANDS_LINK_PATH,
EntitiesLinksMatcher.matchUrisAnyOrder(
APPLICATIONS_API, COMMANDS_LINK_KEY, COMMANDS_OPTIONAL_HAL_LINK_PARAMETERS, appIds
)
);
// Limit the size
RestAssured
.given(this.getRequestSpecification()).filter(findFilter).param("size", 2)
.when()
.port(this.port)
.get(APPLICATIONS_API)
.then()
.statusCode(Matchers.is(HttpStatus.OK.value()))
.contentType(Matchers.containsString(MediaTypes.HAL_JSON_VALUE))
.body(APPLICATIONS_LIST_PATH, Matchers.hasSize(2));
// Query by name
RestAssured
.given(this.getRequestSpecification()).filter(findFilter).param("name", "hive")
.when()
.port(this.port)
.get(APPLICATIONS_API)
.then()
.statusCode(Matchers.is(HttpStatus.OK.value()))
.contentType(Matchers.containsString(MediaTypes.HAL_JSON_VALUE))
.body(APPLICATIONS_LIST_PATH, Matchers.hasSize(1))
.body(APPLICATIONS_LIST_PATH + "[0].id", Matchers.is(hiveId));
// Query by user
RestAssured
.given(this.getRequestSpecification()).filter(findFilter).param("user", "genieUser3")
.when()
.port(this.port)
.get(APPLICATIONS_API)
.then()
.statusCode(Matchers.is(HttpStatus.OK.value()))
.contentType(Matchers.containsString(MediaTypes.HAL_JSON_VALUE))
.body(APPLICATIONS_LIST_PATH, Matchers.hasSize(1))
.body(APPLICATIONS_LIST_PATH + "[0].id", Matchers.is(spark141Id));
// Query by statuses
RestAssured
.given(this.getRequestSpecification()).filter(findFilter)
.param("status", ApplicationStatus.ACTIVE.toString(), ApplicationStatus.DEPRECATED.toString())
.when()
.port(this.port)
.get(APPLICATIONS_API)
.then()
.statusCode(Matchers.is(HttpStatus.OK.value()))
.contentType(Matchers.containsString(MediaTypes.HAL_JSON_VALUE))
.body(APPLICATIONS_LIST_PATH, Matchers.hasSize(6))
.body(APPLICATIONS_LIST_PATH + "[0].id", Matchers.is(hiveId))
.body(APPLICATIONS_LIST_PATH + "[1].id", Matchers.is(pigId))
.body(APPLICATIONS_LIST_PATH + "[2].id", Matchers.is(spark131Id))
.body(APPLICATIONS_LIST_PATH + "[3].id", Matchers.is(spark140Id))
.body(APPLICATIONS_LIST_PATH + "[4].id", Matchers.is(spark150Id))
.body(APPLICATIONS_LIST_PATH + "[5].id", Matchers.is(spark151Id));
// Query by tags
RestAssured
.given(this.getRequestSpecification()).filter(findFilter).param("tag", "genie.id:" + spark131Id)
.when()
.port(this.port)
.get(APPLICATIONS_API)
.then()
.statusCode(Matchers.is(HttpStatus.OK.value()))
.contentType(Matchers.containsString(MediaTypes.HAL_JSON_VALUE))
.body(APPLICATIONS_LIST_PATH, Matchers.hasSize(1))
.body(APPLICATIONS_LIST_PATH + "[0].id", Matchers.is(spark131Id));
// Query by type
RestAssured
.given(this.getRequestSpecification()).filter(findFilter).param("type", "spark")
.when()
.port(this.port)
.get(APPLICATIONS_API)
.then()
.statusCode(Matchers.is(HttpStatus.OK.value()))
.contentType(Matchers.containsString(MediaTypes.HAL_JSON_VALUE))
.body(APPLICATIONS_LIST_PATH, Matchers.hasSize(5))
.body(APPLICATIONS_LIST_PATH + "[0].id", Matchers.is(spark131Id))
.body(APPLICATIONS_LIST_PATH + "[1].id", Matchers.is(spark140Id))
.body(APPLICATIONS_LIST_PATH + "[2].id", Matchers.is(spark141Id))
.body(APPLICATIONS_LIST_PATH + "[3].id", Matchers.is(spark150Id))
.body(APPLICATIONS_LIST_PATH + "[4].id", Matchers.is(spark151Id));
//TODO: Add tests for sort, orderBy etc
Assertions.assertThat(this.applicationRepository.count()).isEqualTo(7L);
}