Skip to content

Commit

Permalink
Merge pull request #2 from mjohns-databricks/jts-default
Browse files Browse the repository at this point in the history
More adjustments to JTS default
  • Loading branch information
mjohns-databricks authored Jun 16, 2023
2 parents d85515c + c42f263 commit a0c5a28
Show file tree
Hide file tree
Showing 19 changed files with 39 additions and 39 deletions.
8 changes: 4 additions & 4 deletions R/sparkR-mosaic/enableMosaic.R
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
#'
#' @description enableMosaic activates the context dependent Databricks Mosaic functions, giving control over the geometry API and index system used.
#' See \url{https://databrickslabs.github.io/mosaic/} for full documentation
#' @param geometryAPI character, default="ESRI"
#' @param geometryAPI character, default="JTS"
#' @param indexSystem character, default="H3"
#' @param indexSystem boolean, default=F
#' @name enableMosaic
Expand All @@ -12,10 +12,10 @@
#' @examples
#' \dontrun{
#' enableMosaic()
#' enableMosaic("ESRI", "H3")
#' enableMosaic("ESRI", "BNG") }
#' enableMosaic("JTS", "H3")
#' enableMosaic("JTS", "BNG") }
enableMosaic <- function(
geometryAPI="ESRI"
geometryAPI="JTS"
,indexSystem="H3"
,rasterAPI="GDAL"
){
Expand Down
8 changes: 4 additions & 4 deletions R/sparklyr-mosaic/enableMosaic.R
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
#' @description enableMosaic activates the context dependent Databricks Mosaic functions, giving control over the geometry API and index system used.
#' See \url{https://databrickslabs.github.io/mosaic/} for full documentation
#' @param sc sparkContext
#' @param geometryAPI character, default="ESRI"
#' @param geometryAPI character, default="JTS"
#' @param indexSystem character, default="H3"
#' @name enableMosaic
#' @rdname enableMosaic
Expand All @@ -12,12 +12,12 @@
#' @examples
#' \dontrun{
#' enableMosaic()
#' enableMosaic("ESRI", "H3")
#' enableMosaic("ESRI", "BNG")}
#' enableMosaic("JTS", "H3")
#' enableMosaic("JTS", "BNG")}

enableMosaic <- function(
sc
,geometryAPI="ESRI"
,geometryAPI="JTS"
,indexSystem="H3"
,rasterAPI="GDAL"
){
Expand Down
4 changes: 2 additions & 2 deletions docs/code-example-notebooks/setup/setup-scala.scala
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
// Databricks notebook source
import org.apache.spark.sql.functions._
import com.databricks.labs.mosaic.functions.MosaicContext
import com.databricks.labs.mosaic.ESRI
import com.databricks.labs.mosaic.JTS
import com.databricks.labs.mosaic.H3

val mosaicContext: MosaicContext = MosaicContext.build(H3, ESRI)
val mosaicContext: MosaicContext = MosaicContext.build(H3, JTS)

// COMMAND ----------

Expand Down
2 changes: 1 addition & 1 deletion docs/source/api/spatial-functions.rst
Original file line number Diff line number Diff line change
Expand Up @@ -949,7 +949,7 @@ st_isvalid
+---------------+

.. note:: Validity assertions will be dependent on the chosen geometry API.
The assertions used in the ESRI geometry API (the default) follow the definitions in the
The assertions used in the ESRI geometry API (JTS is the default) follow the definitions in the
"Simple feature access - Part 1" document (OGC 06-103r4) for each geometry type.


Expand Down
8 changes: 4 additions & 4 deletions docs/source/models/spatial-knn.rst
Original file line number Diff line number Diff line change
Expand Up @@ -157,9 +157,9 @@ The transformer is called SpatialKNN and it is used as follows:
import com.databricks.labs.mosaic.models.knn.SpatialKNN
import com.databricks.labs.mosaic.functions.MosaicContext
import com.databricks.labs.mosaic.H3
import com.databricks.labs.mosaic.ESRI
import com.databricks.labs.mosaic.JTS
>>>
val mosaicContext = MosaicContext.build(H3, ESRI)
val mosaicContext = MosaicContext.build(H3, JTS)
import mosaicContext.functions._
mosaicContext.register(spark)
>>>
Expand Down Expand Up @@ -328,9 +328,9 @@ These datasets are not serialised with the model, and neither are the model outp
import com.databricks.labs.mosaic.models.knn.SpatialKNN
import com.databricks.labs.mosaic.functions.MosaicContext
import com.databricks.labs.mosaic.H3
import com.databricks.labs.mosaic.ESRI
import com.databricks.labs.mosaic.JTS
>>>
val mosaicContext = MosaicContext.build(H3, ESRI)
val mosaicContext = MosaicContext.build(H3, JTS)
import mosaicContext.functions._
mosaicContext.register(spark)
>>>
Expand Down
2 changes: 1 addition & 1 deletion docs/source/usage/automatic-sql-registration.rst
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ To install Mosaic on your Databricks cluster, take the following steps:
spark.databricks.labs.mosaic.index.system H3
# JTS or ESRI
spark.databricks.labs.mosaic.geometry.api JTS
# MosaicSQL or MosaicSQLDefault, MosaicSQLDefault corresponds to (H3, ESRI)
# MosaicSQL or MosaicSQLDefault, MosaicSQLDefault corresponds to (H3, JTS)
spark.sql.extensions com.databricks.labs.mosaic.sql.extensions.MosaicSQL
Testing
Expand Down
6 changes: 3 additions & 3 deletions docs/source/usage/grid-indexes-bng.rst
Original file line number Diff line number Diff line change
Expand Up @@ -34,15 +34,15 @@ configurations. Spark provides an easy way to supply configuration parameters us
.. code-tab:: scala

import com.databricks.labs.mosaic.functions.MosaicContext
import com.databricks.labs.mosaic.{BNG, ESRI}
import com.databricks.labs.mosaic.{BNG, JTS}

val mosaicContext = MosaicContext.build(BNG, ESRI)
val mosaicContext = MosaicContext.build(BNG, JTS)
import mosaicContext.functions._

.. code-tab:: r R

library(sparkrMosaic)
enableMosaic("ESRI", "BNG")
enableMosaic("JTS", "BNG")

.. code-tab:: sql

Expand Down
8 changes: 4 additions & 4 deletions docs/source/usage/installation.rst
Original file line number Diff line number Diff line change
Expand Up @@ -70,9 +70,9 @@ The mechanism for enabling the Mosaic functions varies by language:

import com.databricks.labs.mosaic.functions.MosaicContext
import com.databricks.labs.mosaic.H3
import com.databricks.labs.mosaic.ESRI
import com.databricks.labs.mosaic.JTS

val mosaicContext = MosaicContext.build(H3, ESRI)
val mosaicContext = MosaicContext.build(H3, JTS)
import mosaicContext.functions._

.. code-tab:: r R
Expand All @@ -90,8 +90,8 @@ register the Mosaic SQL functions in your SparkSession from a Scala notebook cel
import com.databricks.labs.mosaic.functions.MosaicContext
import com.databricks.labs.mosaic.H3
import com.databricks.labs.mosaic.ESRI
import com.databricks.labs.mosaic.JTS
val mosaicContext = MosaicContext.build(H3, ESRI)
val mosaicContext = MosaicContext.build(H3, JTS)
mosaicContext.register(spark)
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from pyspark.sql.functions import *
import mosaic as mos

spark.conf.set("spark.databricks.labs.mosaic.geometry.api", "ESRI")
spark.conf.set("spark.databricks.labs.mosaic.geometry.api", "JTS")
spark.conf.set("spark.databricks.labs.mosaic.index.system", "H3")
mos.enable_mosaic(spark, dbutils)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from pyspark.sql.functions import *
import mosaic as mos

spark.conf.set("spark.databricks.labs.mosaic.geometry.api", "ESRI")
spark.conf.set("spark.databricks.labs.mosaic.geometry.api", "JTS")
spark.conf.set("spark.databricks.labs.mosaic.index.system", "H3")
mos.enable_mosaic(spark, dbutils)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from pyspark.sql.functions import *
import mosaic as mos

spark.conf.set("spark.databricks.labs.mosaic.geometry.api", "ESRI")
spark.conf.set("spark.databricks.labs.mosaic.geometry.api", "JTS")
spark.conf.set("spark.databricks.labs.mosaic.index.system", "H3")
mos.enable_mosaic(spark, dbutils)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from pyspark.sql.functions import *
import mosaic as mos

spark.conf.set("spark.databricks.labs.mosaic.geometry.api", "ESRI")
spark.conf.set("spark.databricks.labs.mosaic.geometry.api", "JTS")
spark.conf.set("spark.databricks.labs.mosaic.index.system", "H3")
mos.enable_mosaic(spark, dbutils)

Expand Down
4 changes: 2 additions & 2 deletions notebooks/examples/scala/MosaicAndSedona.scala
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,9 @@ SedonaSQLRegistrator.registerAll(spark)
// Import Mosaic functions
import com.databricks.labs.mosaic.functions.MosaicContext
import com.databricks.labs.mosaic.H3
import com.databricks.labs.mosaic.ESRI
import com.databricks.labs.mosaic.JTS

val mosaicContext = MosaicContext.build(H3, ESRI)
val mosaicContext = MosaicContext.build(H3, JTS)
import mosaicContext.functions._
import org.apache.spark.sql.functions._

Expand Down
4 changes: 2 additions & 2 deletions notebooks/examples/scala/QuickstartNotebook.scala
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@ print(s"The raw data is stored in $raw_path")

import com.databricks.labs.mosaic.functions.MosaicContext
import com.databricks.labs.mosaic.H3
import com.databricks.labs.mosaic.ESRI
import com.databricks.labs.mosaic.JTS

val mosaicContext = MosaicContext.build(H3, ESRI)
val mosaicContext = MosaicContext.build(H3, JTS)
import mosaicContext.functions._
import org.apache.spark.sql.functions._

Expand Down
4 changes: 2 additions & 2 deletions notebooks/examples/sql/MosaicAndSedona.sql
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,9 @@
-- MAGIC // Import Mosaic functions
-- MAGIC import com.databricks.labs.mosaic.functions.MosaicContext
-- MAGIC import com.databricks.labs.mosaic.H3
-- MAGIC import com.databricks.labs.mosaic.ESRI
-- MAGIC import com.databricks.labs.mosaic.JTS
-- MAGIC
-- MAGIC val mosaicContext = MosaicContext.build(H3, ESRI)
-- MAGIC val mosaicContext = MosaicContext.build(H3, JTS)
-- MAGIC import mosaicContext.functions._
-- MAGIC import org.apache.spark.sql.functions._

Expand Down
2 changes: 1 addition & 1 deletion python/mosaic/api/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -487,7 +487,7 @@ def st_isvalid(geom: ColumnOrName) -> Column:
Notes
-----
Validity assertions will be dependent on the chosen geometry API.
The assertions used in the ESRI geometry API (the default) follow the definitions in
The assertions used in the ESRI geometry API (JTS is the default) follow the definitions in
the “Simple feature access - Part 1” document (OGC 06-103r4) for each geometry type.
"""
Expand Down
2 changes: 1 addition & 1 deletion python/mosaic/core/mosaic_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def __init__(self, spark: SparkSession):
"spark.databricks.labs.mosaic.geometry.api"
)
except Py4JJavaError as e:
self._geometry_api = "ESRI"
self._geometry_api = "JTS"

try:
self._index_system = spark.conf.get(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package com.databricks.labs.mosaic.sql.extensions

import com.databricks.labs.mosaic.core.geometry.api.ESRI
import com.databricks.labs.mosaic.core.geometry.api.JTS
import com.databricks.labs.mosaic.core.index.H3IndexSystem
import com.databricks.labs.mosaic.core.raster.api.RasterAPI.GDAL
import com.databricks.labs.mosaic.functions.MosaicContext
Expand All @@ -24,8 +24,8 @@ class MosaicSQLDefault extends (SparkSessionExtensions => Unit) with Logging {
*/
override def apply(ext: SparkSessionExtensions): Unit = {
ext.injectCheckRule(spark => {
val mosaicContext = MosaicContext.build(H3IndexSystem, ESRI, GDAL)
logInfo(s"Registering Mosaic SQL Extensions (H3, ESRI, GDAL).")
val mosaicContext = MosaicContext.build(H3IndexSystem, JTS, GDAL)
logInfo(s"Registering Mosaic SQL Extensions (H3, JTS, GDAL).")
mosaicContext.register(spark)
// NOP rule. This rule is specified only to respect syntax.
_ => ()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ class ConvertToCodegenMockTest extends AnyFunSuite with MockFactory {
test("ConvertTo Expression from GEOJSON to Unsupported format should throw an exception") {
val ctx = stub[CodegenContext]
val api = stub[GeometryAPI]
api.name _ when () returns "ESRI"
api.name _ when () returns "JTS"

assertThrows[Error] {
ConvertToCodeGen writeGeometryCode (ctx, "", "unsupported", api)
Expand Down

0 comments on commit a0c5a28

Please sign in to comment.