public record ImportOpenStreetMap()

in baremaps-core/src/main/java/org/apache/baremaps/workflow/tasks/ImportOpenStreetMap.java [44:144]


public record ImportOpenStreetMap(Path file, String database, Integer databaseSrid)
    implements
      Task {

  private static final Logger logger = LoggerFactory.getLogger(ImportOpenStreetMap.class);

  @Override
  public void execute(WorkflowContext context) throws Exception {
    var dataSource = context.getDataSource(database);
    var path = file.toAbsolutePath();

    var headerRepository = new PostgresHeaderRepository(dataSource);
    var nodeRepository = new PostgresNodeRepository(dataSource);
    var wayRepository = new PostgresWayRepository(dataSource);
    var relationRepository = new PostgresRelationRepository(dataSource);

    headerRepository.drop();
    nodeRepository.drop();
    wayRepository.drop();
    relationRepository.drop();

    headerRepository.create();
    nodeRepository.create();
    wayRepository.create();
    relationRepository.create();

    var cacheDir = Files.createTempDirectory(Paths.get("."), "cache_");

    DataMap<Coordinate> coordinateMap;
    if (Files.size(path) > 1 << 30) {
      var coordinatesFile = Files.createFile(cacheDir.resolve("coordinates"));
      coordinateMap = new MemoryAlignedDataMap<>(
          new LonLatDataType(),
          new MemoryMappedFile(coordinatesFile));
    } else {
      var coordinatesKeysFile = Files.createFile(cacheDir.resolve("coordinates_keys"));
      var coordinatesValsFile = Files.createFile(cacheDir.resolve("coordinates_vals"));
      coordinateMap =
          new MonotonicDataMap<>(
              new MemoryAlignedDataList<>(
                  new PairDataType<>(new LongDataType(), new LongDataType()),
                  new MemoryMappedFile(coordinatesKeysFile)),
              new AppendOnlyBuffer<>(
                  new LonLatDataType(),
                  new MemoryMappedFile(coordinatesValsFile)));
    }

    var referencesKeysDir = Files.createFile(cacheDir.resolve("references_keys"));
    var referencesValuesDir = Files.createFile(cacheDir.resolve("references_vals"));
    var referenceMap =
        new MonotonicDataMap<>(
            new MemoryAlignedDataList<>(
                new PairDataType<>(new LongDataType(), new LongDataType()),
                new MemoryMappedFile(referencesKeysDir)),
            new AppendOnlyBuffer<>(
                new LongListDataType(),
                new MemoryMappedFile(referencesValuesDir)));

    execute(
        path,
        coordinateMap,
        referenceMap,
        headerRepository,
        nodeRepository,
        wayRepository,
        relationRepository,
        databaseSrid);

    FileUtils.deleteRecursively(cacheDir);
  }

  public static void execute(
      Path path,
      DataMap<Coordinate> coordinateMap,
      DataMap<List<Long>> referenceMap,
      HeaderRepository headerRepository,
      Repository<Long, Node> nodeRepository,
      Repository<Long, Way> wayRepository,
      Repository<Long, Relation> relationRepository,
      Integer databaseSrid) throws IOException {

    // configure the block reader
    var reader = new PbfBlockReader()
        .geometries(true)
        .projection(databaseSrid)
        .coordinateMap(coordinateMap)
        .referenceMap(referenceMap);

    // configure the block importer
    var importer = new BlockImporter(
        headerRepository,
        nodeRepository,
        wayRepository,
        relationRepository);

    // Stream and process the blocks
    try (var input = Files.newInputStream(path)) {
      StreamUtils.batch(reader.stream(input)).forEach(importer);
    }
  }
}