public List getSchemaList()

in src/main/java/com/amazonaws/schemamanager/repo/FileSystemRepoClient.java [94:227]


	public List<RepoSchema> getSchemaList(String baseDir) throws IOException {
		
		ArrayList<RepoSchema> repoSchemas = new ArrayList<>();
		
		//populating default metadata for the given appconfiguration
		defaults = RepoUtils.getDefaultMetadata(baseDir);
		
		List<String> filesToProcess = getSchemaFiles(baseDir);
		Map<String, String> failedFiles = null;
		
		Integer successCount = null;
		int totalIterations = 0;
		Map<String, Schema> avroKnownTypes = new HashMap<>();
		ProtobufParser protoParser = new ProtobufParser();
		
		/**
		 * Because references from one schema to another can cause schema parsing failures
		 * we build a map of the failures to process the references in their necessary order.
		 * We do not know the order until we examine each schema and attempt to serialize it. 
		 */
		while (filesToProcess != null && !filesToProcess.isEmpty() && (successCount == null || successCount>0)) {
			failedFiles = new HashMap<>();
			totalIterations++;
			successCount = 0;

			for (String repoSchemaFile : filesToProcess) {
				String fileExtension =  getExtensionByStringHandling(repoSchemaFile);
				
				if(fileExtension != null){
					FILE_EXTENSIONS fileExtensionKnown = FILE_EXTENSIONS.extensionFromString(fileExtension.toString().toUpperCase());
					
					if(fileExtensionKnown != null && fileExtensionKnown != FILE_EXTENSIONS.UNKNOWN){
						//Create the new RepoSchema and build out its members
						RepoSchema repoSchema = new RepoSchema();
						RepoSchemaMetadata repoSchemaMetadata = new RepoSchemaMetadata();
						Path path = Paths.get(repoSchemaFile);
						Path relativePath = Paths.get(baseDir).relativize(path);
						repoSchema.setPath(relativePath.toString());
						
						switch (fileExtensionKnown) {
							case AVSC:
								try {
									InputStream in = Files.newInputStream(path);
									String schemaString = convertStreamToString(in);
									AvroSchemaSM.initParser(avroKnownTypes);
									ParsedSchema parsedSchema = new AvroSchemaSM(schemaString); //Parsed Schema because RepoSchema expects Parsed Schema not Avro/proto etc
//									ParsedSchema parsedSchema = new AvroSchema(schemaString, Collections.emptyList(), avroResolvedRefs, null); //Parsed Schema because RepoSchema expects Parsed Schema not Avro/proto etc
//									ParsedSchema parsedSchema = RepoUtils.parseAvro(schemaString); 
									parsedSchema.validate();
									repoSchema.setSchema(parsedSchema);
									repoSchemaMetadata = getAssociatedFileMetadata(path.toString());

									if (repoSchemaMetadata.getSchemaName() == null) {
										repoSchemaMetadata.setSchemaName(parsedSchema.name());
									}

									repoSchema.setMetadata(repoSchemaMetadata);

									RepoUtils.completeDefaults(repoSchema, defaults);

									avroKnownTypes.put(repoSchemaMetadata.getSchemaName(),
											(Schema) parsedSchema.rawSchema());
								} catch (Exception e) {
									failedFiles.put(repoSchemaFile, e.getMessage());
									continue;
								}
								
							break;
							case PROTO:
							try {
								InputStream in = Files.newInputStream(path);
								String schemaString =  convertStreamToString(in);
//								ParsedSchema parsedSchema = new ProtobufSchema(schemaString);
								repoSchema = protoParser.parseProtoFile(repoSchemaFile, schemaString, repoSchema);
//								ParsedSchema parsedSchema = protoParser.parse(repoSchemaFile, schemaString);
//								repoSchema.setSchema(parsedSchema);
								repoSchemaMetadata = getAssociatedFileMetadata(path.toString());
								repoSchema.getMetadata().merge(repoSchemaMetadata);
								repoSchemaMetadata = repoSchema.getMetadata();
								
								if(repoSchemaMetadata.getSchemaName() == null){
									repoSchemaMetadata.setSchemaName(RepoUtils.getFullName(repoSchema.getSchema()));
								}
								
								RepoUtils.completeDefaults(repoSchema, defaults);
								
							} catch (Exception e) {
								failedFiles.put(repoSchemaFile, e.getMessage());
								continue;
							}
							break;
							case JSON:
							try {
								InputStream in = Files.newInputStream(path);
								String schemaString =  convertStreamToString(in);
								ParsedSchema parsedSchema = new JsonSchema(schemaString);
								repoSchema.setSchema(parsedSchema);
								repoSchemaMetadata = getAssociatedFileMetadata(path.toString());
								
								if(repoSchemaMetadata.getSchemaName() == null){
									repoSchemaMetadata.setSchemaName(parsedSchema.name());
								}
								
								repoSchema.setMetadata(repoSchemaMetadata);
								RepoUtils.completeDefaults(repoSchema, defaults);
								
							} catch (IOException | SchemaParseException e) {
								failedFiles.put(repoSchemaFile, e.getMessage());
								continue;
							}
							break;
							case UNKNOWN:
							
								log.info("File: %s", String.join(" ", repoSchemaFile, "is not a parsable file type."));

								break;
							
						}
						repoSchemas.add(repoSchema);
						successCount++;
					}else log.info(String.format("File: %s", String.join(" ", repoSchemaFile, "is not a Schema File type.")));
				}
			}
			filesToProcess = new LinkedList<>(failedFiles.keySet());
		}
		log.info("Iteractions: " + totalIterations);
		if (failedFiles != null && !failedFiles.isEmpty()) {
			failedFiles.forEach( (ff, err) -> {
				log.warn("Couldn't parse schema file: " + ff + ". Error: " + err);
			});
		}
		
		return repoSchemas;
	}