Skip to content

Commit

Permalink
Create a simple Overture example (#862)
Browse files Browse the repository at this point in the history
* Create a simple overture example

* Fix OPTIONAL group cardinality

* Clean up code

* Clean up code 2

* Clean up code 3
  • Loading branch information
Drabble authored Jun 4, 2024
1 parent cd2018d commit a2475fd
Show file tree
Hide file tree
Showing 17 changed files with 307 additions and 36 deletions.
11 changes: 11 additions & 0 deletions .run/overture-serve.run.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="overture-serve" type="Application" factoryName="Application">
<option name="MAIN_CLASS_NAME" value="org.apache.baremaps.cli.Baremaps" />
<module name="baremaps-cli" />
<option name="PROGRAM_PARAMETERS" value="map serve --tileset tileset.json --style style.json" />
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/examples/overture" />
<method v="2">
<option name="Make" enabled="true" />
</method>
</configuration>
</component>
11 changes: 11 additions & 0 deletions .run/overture-workflow.run.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="overture-workflow" type="Application" factoryName="Application">
<option name="MAIN_CLASS_NAME" value="org.apache.baremaps.cli.Baremaps" />
<module name="baremaps-cli" />
<option name="PROGRAM_PARAMETERS" value="workflow execute --file workflow.json" />
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/examples/overture" />
<method v="2">
<option name="Make" enabled="true" />
</method>
</configuration>
</component>
8 changes: 8 additions & 0 deletions baremaps-cli/src/main/resources/log4j.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# Root logger option
log4j.rootLogger=INFO, stdout

# Direct log messages to console
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.Target=System.out
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
Original file line number Diff line number Diff line change
Expand Up @@ -101,9 +101,17 @@ public DataSchema schema() {
this.schema = GeoParquetTypeConversion.asSchema(path.toString(), schema);
return this.schema;
} catch (URISyntaxException e) {
throw new GeoParquetException("Fail toe get the schema.", e);
throw new GeoParquetException("Failed to get the schema.", e);
}
}
return schema;
}

public int srid(String column) {
try {
return reader().getGeoParquetMetadata().getSrid(column);
} catch (Exception e) {
throw new GeoParquetException("Fail to read the SRID from the GeoParquet metadata", e);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,10 @@ public static List<Object> asRowValues(GeoParquetGroup group) {
Schema schema = group.getSchema();
List<Field> fields = schema.fields();
for (int i = 0; i < fields.size(); i++) {
if (group.getValues(i).isEmpty()) {
values.add(null);
continue;
}
Field field = fields.get(i);
switch (field.type()) {
case BINARY -> values.add(group.getBinaryValue(i).getBytes());
Expand All @@ -92,6 +96,9 @@ public static Map<String, Object> asNested(GeoParquetGroup group) {
Schema schema = group.getSchema();
List<Field> fields = schema.fields();
for (int i = 0; i < fields.size(); i++) {
if (group.getValues(i).isEmpty()) {
continue;
}
Field field = fields.get(i);
nested.put(field.name(), switch (field.type()) {
case BINARY -> group.getBinaryValue(i).getBytes();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@
@Type(value = ImportDaylightFeatures.class, name = "ImportDaylightFeatures"),
@Type(value = ImportDaylightTranslations.class, name = "ImportDaylightTranslations"),
@Type(value = ImportGeoPackage.class, name = "ImportGeoPackage"),
@Type(value = ImportGeoParquet.class, name = "ImportGeoParquet"),
@Type(value = ImportOsmOsc.class, name = "ImportOsmOsc"),
@Type(value = ImportOsmPbf.class, name = "ImportOsmPbf"),
@Type(value = ImportShapefile.class, name = "ImportShapefile"),
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.baremaps.workflow.tasks;

import java.net.URI;
import java.util.StringJoiner;
import org.apache.baremaps.data.storage.DataTableGeometryMapper;
import org.apache.baremaps.data.storage.DataTableMapper;
import org.apache.baremaps.openstreetmap.function.ProjectionTransformer;
import org.apache.baremaps.storage.geoparquet.GeoParquetDataStore;
import org.apache.baremaps.storage.geoparquet.GeoParquetDataTable;
import org.apache.baremaps.storage.postgres.PostgresDataStore;
import org.apache.baremaps.workflow.Task;
import org.apache.baremaps.workflow.WorkflowContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* Import a GeoParquet into a database.
*/
public class ImportGeoParquet implements Task {

private static final Logger logger = LoggerFactory.getLogger(ImportGeoParquet.class);

private URI uri;
private String tableName;
private Object database;
private Integer databaseSrid;

/**
* Constructs a {@code ImportGeoParquet}.
*/
public ImportGeoParquet() {

}

/**
* Constructs an {@code ImportGeoParquet}.
*
* @param uri the GeoParquet uri
* @param database the database
* @param databaseSrid the target SRID
*/
public ImportGeoParquet(URI uri, String tableName, Object database, Integer databaseSrid) {
this.uri = uri;
this.tableName = tableName;
this.database = database;
this.databaseSrid = databaseSrid;
}

/**
* {@inheritDoc}
*/
@Override
public void execute(WorkflowContext context) throws Exception {
var geoParquetDataStore = new GeoParquetDataStore(uri);
var dataSource = context.getDataSource(database);
var postgresDataStore = new PostgresDataStore(dataSource);
for (var name : geoParquetDataStore.list()) {
var geoParquetTable = (GeoParquetDataTable) geoParquetDataStore.get(name);
var projectionTransformer =
new ProjectionTransformer(geoParquetTable.srid("geometry"), databaseSrid);
var rowTransformer =
new DataTableGeometryMapper(geoParquetTable, projectionTransformer);
var transformedDataTable =
new DataTableMapper(geoParquetDataStore.get(name), rowTransformer);
postgresDataStore.add(tableName, transformedDataTable);
}
}

/**
* {@inheritDoc}
*/
@Override
public String toString() {
return new StringJoiner(", ", ImportGeoParquet.class.getSimpleName() + "[", "]")
.add("uri=" + uri)
.add("database=" + database)
.add("databaseSrid=" + databaseSrid)
.toString();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider;
import org.apache.hadoop.fs.s3a.S3AFileSystem;
import org.apache.parquet.hadoop.ParquetFileReader;
import org.apache.parquet.hadoop.ParquetReader;
import org.apache.parquet.schema.MessageType;
Expand Down Expand Up @@ -91,16 +93,14 @@ public Long size() throws URISyntaxException {
return files().values().stream().map(FileInfo::recordCount).reduce(0L, Long::sum);
}

private synchronized Map<FileStatus, FileInfo> files() throws URISyntaxException {
private synchronized Map<FileStatus, FileInfo> files() {
try {
if (files == null) {
files = new HashMap<>();
Path globPath = new Path(uri.getPath());
URI rootUri = getRootUri(uri);
FileSystem fileSystem = FileSystem.get(rootUri, configuration);
FileSystem fs = FileSystem.get(uri, configuration);
FileStatus[] fileStatuses = fs.globStatus(new Path(uri));

// Iterate over all the files in the path
for (FileStatus file : fileSystem.globStatus(globPath)) {
for (FileStatus file : fileStatuses) {
files.put(file, buildFileInfo(file));
}

Expand All @@ -115,7 +115,7 @@ private synchronized Map<FileStatus, FileInfo> files() throws URISyntaxException
}
}
} catch (IOException e) {
throw new GeoParquetException("IOException while attempting to list files.", e);
throw new GeoParquetException("IOException while attempting to list files.", e);
}
return files;
}
Expand Down Expand Up @@ -254,31 +254,11 @@ public int characteristics() {
}

private static Configuration createConfiguration() {
Configuration configuration = new Configuration();
configuration.set("fs.s3a.aws.credentials.provider",
"org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider");
configuration.setBoolean("fs.s3a.path.style.access", true);
return configuration;
Configuration conf = new Configuration();
conf.set("fs.s3a.endpoint", "s3.us-west-2.amazonaws.com");
conf.set("fs.s3a.aws.credentials.provider", AnonymousAWSCredentialsProvider.class.getName());
conf.set("fs.s3a.impl", S3AFileSystem.class.getName());
conf.set("fs.s3a.path.style.access", "true");
return conf;
}

private static URI getRootUri(URI uri) throws URISyntaxException {
// TODO:
// This is a quick and dirty way to get the root uri of the path.
// We take everything before the first wildcard in the path.
// This is not a perfect solution, and we should probably look for a better way to do this.
String path = uri.getPath();
int index = path.indexOf("*");
if (index != -1) {
path = path.substring(0, path.lastIndexOf("/", index) + 1);
}
return new URI(
uri.getScheme(),
uri.getUserInfo(),
uri.getHost(),
uri.getPort(),
path,
null,
null);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,8 @@ public interface GeoParquetGroup {
*/
GeoParquetGroup createGroup(int fieldIndex);

List<Primitive> getValues(int fieldIndex);

Binary getBinaryValue(int fieldIndex);

List<Binary> getBinaryValues(int fieldIndex);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ else if (!field.isPrimitive()) {
GeoParquetGroup.Schema geoParquetSchema = createGeoParquetSchema(groupType, metadata);
return (Field) new GeoParquetGroup.GroupField(
groupType.getName(),
GeoParquetGroup.Cardinality.REQUIRED,
cardinality,
geoParquetSchema);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -278,7 +278,8 @@ private void appendToString(StringBuilder builder, String indent) {
}
}

private List<Primitive> getValues(int fieldIndex) {
@Override
public List<Primitive> getValues(int fieldIndex) {
return (List<Primitive>) data[fieldIndex];
}

Expand Down
15 changes: 15 additions & 0 deletions examples/overture/indexes.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
-- Licensed to the Apache Software Foundation (ASF) under one or more
-- contributor license agreements. See the NOTICE file distributed with
-- this work for additional information regarding copyright ownership.
-- The ASF licenses this file to you under the Apache License, Version 2.0
-- (the "License"); you may not use this file except in compliance with
-- the License. You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
CREATE INDEX IF NOT EXISTS overture_admins_administrative_boundary_materialized_view_gist ON overture_admins_administrative_boundary_materialized_view USING GIST(geom);
24 changes: 24 additions & 0 deletions examples/overture/style.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
{
"version" : 8,
"sources" : {
"baremaps" : {
"type" : "vector",
"url" : "http://localhost:9000/tiles.json"
}
},
"layers" : [ {
"id" : "administrative_boundary",
"type" : "line",
"source" : "baremaps",
"source-layer" : "administrative_boundary",
"layout" : {
"visibility" : "visible"
},
"paint" : {
"line-color": "black",
"line-width": 1
}
}],
"center" : [ 0, 0 ],
"zoom" : 2
}
24 changes: 24 additions & 0 deletions examples/overture/tiles.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
{
"tilejson": "2.2.0",
"tiles": [
"http://localhost:9000/tiles/{z}/{x}/{y}.mvt"
],
"minzoom": 0.0,
"maxzoom": 14.0,
"center": [0, 0],
"bounds": [-180, -85, 180, 85],
"zoom": 2,
"database": "jdbc:postgresql://localhost:5432/baremaps?&user=baremaps&password=baremaps",
"vector_layers": [
{
"id": "administrative_boundary",
"queries": [
{
"minzoom": 0,
"maxzoom": 14,
"sql": "SELECT id, tags, geom FROM overture_admins_administrative_boundary_materialized_view"
}
]
}
]
}
24 changes: 24 additions & 0 deletions examples/overture/tileset.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
{
"tilejson": "2.2.0",
"minzoom": 0,
"maxzoom": 14,
"center": [0, 0],
"bounds": [-180, -85, 180, 85],
"zoom": 2,
"tiles": [
"http://localhost:9000/tiles/{z}/{x}/{y}.mvt"
],
"database": "jdbc:postgresql://localhost:5432/baremaps?&user=baremaps&password=baremaps",
"vector_layers": [
{
"id": "administrative_boundary",
"queries": [
{
"minzoom": 0,
"maxzoom": 14,
"sql": "SELECT id, tags, geom FROM overture_admins_administrative_boundary_materialized_view"
}
]
}
]
}
Loading

0 comments on commit a2475fd

Please sign in to comment.