Skip to content
This repository has been archived by the owner on Jun 14, 2024. It is now read-only.

Commit

Permalink
Make CoveringIndexConfig available in Python
Browse files Browse the repository at this point in the history
  • Loading branch information
Chungmin Lee committed Jul 2, 2021
1 parent 7f10ec7 commit 27d39e9
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 13 deletions.
4 changes: 2 additions & 2 deletions python/hyperspace/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from .hyperspace import Hyperspace
from .indexconfig import IndexConfig
from .indexconfig import CoveringIndexConfig, IndexConfig

__all__ = [
'Hyperspace', 'IndexConfig'
'Hyperspace', 'CoveringIndexConfig', 'IndexConfig'
]
14 changes: 7 additions & 7 deletions python/hyperspace/hyperspace.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,13 @@ def __init__(self, spark):
self.jvm = spark._jvm
self.hyperspace = self.jvm.com.microsoft.hyperspace.Hyperspace(spark._jsparkSession)

def _getJavaIndexConfig(self, index_config):
def _getJavaCoveringIndexConfig(self, index_config):
"""
Constructs IndexConfig Java object from python wrapper IndexConfig object.
:param index_config: IndexConfig java object
:return: IndexConfig python object
Constructs CoveringIndexConfig Java object from python wrapper CoveringIndexConfig object.
:param index_config: CoveringIndexConfig java object
:return: CoveringIndexConfig python object
>>> _getJavaIndexConfig(idx_config)
>>> _getJavaCoveringIndexConfig(idx_config)
"""
indexed_columns = self._getScalaSeqFromList(index_config.indexedColumns)
included_columns = self._getScalaSeqFromList(index_config.includedColumns)
Expand Down Expand Up @@ -67,11 +67,11 @@ def createIndex(self, dataFrame, indexConfig):
:param indexConfig: indexConfig
>>> hyperspace = Hyperspace(spark)
>>> idxConfig = IndexConfig("indexName", ["c1"], ["c2","c3"])
>>> idxConfig = CoveringIndexConfig("indexName", ["c1"], ["c2","c3"])
>>> df = spark.read.parquet("./sample.parquet").toDF("c1", "c2", "c3")
>>> hyperspace.createIndex(df, indexConfig)
"""
self.hyperspace.createIndex(dataFrame._jdf, self._getJavaIndexConfig(indexConfig))
self.hyperspace.createIndex(dataFrame._jdf, self._getJavaCoveringIndexConfig(indexConfig))

def deleteIndex(self, indexName):
"""
Expand Down
11 changes: 7 additions & 4 deletions python/hyperspace/indexconfig.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,17 @@
class IndexConfig:
class CoveringIndexConfig:
def __init__(self, indexName, indexedColumns, includedColumns):
"""
Initializes IndexConfig object.
Initializes CoveringIndexConfig object.
:param indexName: index name
:param indexedColumns: indexed columns
:param includedColumns: included columns
:return: IndexConfig object
:return: CoveringIndexConfig object
>>> idxConfig = IndexConfig("indexName", ["c1"], ["c2","c3"])
>>> idxConfig = CoveringIndexConfig("indexName", ["c1"], ["c2","c3"])
"""
self.indexName = indexName
self.indexedColumns = indexedColumns
self.includedColumns = includedColumns

# TODO(ChungminL): Add deprecation warning
IndexConfig = CoveringIndexConfig

0 comments on commit 27d39e9

Please sign in to comment.