From a094bc1c817358e90128f097ab5099b045b73e33 Mon Sep 17 00:00:00 2001 From: Ubuntu Date: Tue, 31 Oct 2023 05:25:31 +0000 Subject: [PATCH] run through black --- .../python/synapse/ml/cognitive/__init__.py | 2 ++ .../langchain/test_LangchainTransform.py | 1 + .../synapse/ml/core/platform/Platform.py | 1 - .../cyber/anomaly/collaborative_filtering.py | 3 --- .../ml/cyber/anomaly/complement_access.py | 1 - .../main/python/synapse/ml/cyber/dataset.py | 3 --- .../synapse/ml/cyber/feature/scalers.py | 6 ------ .../isolationforest/IsolationForestModel.py | 1 - .../synapse/ml/dl/DeepTextClassifier.py | 2 -- .../python/synapse/ml/dl/DeepTextModel.py | 1 - .../synapse/ml/dl/DeepVisionClassifier.py | 1 - .../python/synapse/ml/dl/DeepVisionModel.py | 1 - .../python/synapse/ml/dl/PredictionParams.py | 4 ---- .../test/python/synapsemltest/dl/conftest.py | 1 - .../dl/test_deep_text_classifier.py | 1 - .../dl/test_deep_vision_classifier.py | 1 - ...age - Async, Batching, and Multi-Key.ipynb | 8 ++++++-- .../Multivariate Anomaly Detection.ipynb | 8 ++++++-- .../AI Services/Overview.ipynb | 20 ++++++++++++++----- ...uickstart - Analyze Celebrity Quotes.ipynb | 8 ++++++-- .../Quickstart - Analyze Text.ipynb | 4 +++- ...tart - Creare a Visual Search Engine.ipynb | 8 ++++++-- .../Quickstart - Create Audiobooks.ipynb | 8 ++++++-- ...ent Question and Answering with PDFs.ipynb | 8 ++++++-- .../Quickstart - Flooding Risk.ipynb | 4 +++- .../Quickstart - Predictive Maintenance.ipynb | 4 +++- .../Quickstart - Isolation Forests.ipynb | 4 +++- docs/Explore Algorithms/OpenAI/OpenAI.ipynb | 4 +++- .../Quickstart - OpenAI Embedding.ipynb | 4 +++- ...kstart - Understand and Search Forms.ipynb | 16 +++++++++++---- .../Quickstart - Snow Leopard Detection.ipynb | 4 +++- .../Quickstart - Your First Models.ipynb | 4 +++- tools/esrp/prepare_jar.py | 1 - website/doctest.py | 1 - 34 files changed, 90 insertions(+), 58 deletions(-) diff --git a/cognitive/src/main/python/synapse/ml/cognitive/__init__.py b/cognitive/src/main/python/synapse/ml/cognitive/__init__.py index fe4966baf4..67e563ea95 100644 --- a/cognitive/src/main/python/synapse/ml/cognitive/__init__.py +++ b/cognitive/src/main/python/synapse/ml/cognitive/__init__.py @@ -7,10 +7,12 @@ ) import synapse.ml.services + # This function will be called when an attribute is not found in synapse.ml.cognitive def __getattr__(name): return getattr(synapse.ml.services, name) + # Set the __getattr__ function to the cognitive module sys.modules["synapse.ml.cognitive"].__getattr__ = __getattr__ diff --git a/cognitive/src/test/python/synapsemltest/services/langchain/test_LangchainTransform.py b/cognitive/src/test/python/synapsemltest/services/langchain/test_LangchainTransform.py index 2e625a00cf..3a1503832a 100644 --- a/cognitive/src/test/python/synapsemltest/services/langchain/test_LangchainTransform.py +++ b/cognitive/src/test/python/synapsemltest/services/langchain/test_LangchainTransform.py @@ -8,6 +8,7 @@ from synapse.ml.services.langchain import LangchainTransformer from synapsemltest.spark import * + ####################################################### # this part is to correct a bug in langchain, # where the llm type of AzureOpenAI was set diff --git a/core/src/main/python/synapse/ml/core/platform/Platform.py b/core/src/main/python/synapse/ml/core/platform/Platform.py index 8e47f19d2a..f33e565acb 100644 --- a/core/src/main/python/synapse/ml/core/platform/Platform.py +++ b/core/src/main/python/synapse/ml/core/platform/Platform.py @@ -47,7 +47,6 @@ def running_on_databricks(): def find_secret(secret_name, keyvault): - if running_on_synapse() or running_on_synapse_internal(): from notebookutils.mssparkutils.credentials import getSecret diff --git a/core/src/main/python/synapse/ml/cyber/anomaly/collaborative_filtering.py b/core/src/main/python/synapse/ml/cyber/anomaly/collaborative_filtering.py index ccf9f3448d..55950728b7 100644 --- a/core/src/main/python/synapse/ml/cyber/anomaly/collaborative_filtering.py +++ b/core/src/main/python/synapse/ml/cyber/anomaly/collaborative_filtering.py @@ -106,7 +106,6 @@ def __init__( user_feature_vector_mapping_df: DataFrame, res_feature_vector_mapping_df: DataFrame, ): - self.tenant_col = tenant_col self.user_col = user_col self.user_vec_col = user_vec_col @@ -127,7 +126,6 @@ def replace_mappings( user_feature_vector_mapping_df: Optional[DataFrame] = None, res_feature_vector_mapping_df: Optional[DataFrame] = None, ): - """ create a new model replacing the user and resource models with new ones (optional) @@ -765,7 +763,6 @@ def __init__( negScore: Optional[float] = None, historyAccessDf: Optional[DataFrame] = None, ): - super().__init__() if applyImplicitCf: diff --git a/core/src/main/python/synapse/ml/cyber/anomaly/complement_access.py b/core/src/main/python/synapse/ml/cyber/anomaly/complement_access.py index 8a14613d83..530d1bf829 100644 --- a/core/src/main/python/synapse/ml/cyber/anomaly/complement_access.py +++ b/core/src/main/python/synapse/ml/cyber/anomaly/complement_access.py @@ -41,7 +41,6 @@ def __init__( indexed_col_names_arr: List[str], complementset_factor: int, ): - super().__init__() # we assume here that all indices of the columns are continuous within their partition_key diff --git a/core/src/main/python/synapse/ml/cyber/dataset.py b/core/src/main/python/synapse/ml/cyber/dataset.py index b73f368912..02fd06db09 100644 --- a/core/src/main/python/synapse/ml/cyber/dataset.py +++ b/core/src/main/python/synapse/ml/cyber/dataset.py @@ -19,7 +19,6 @@ def __init__( num_eng_resources: int = 50, single_component: bool = True, ): - self.hr_users = ["hr_user_" + str(i) for i in range(num_hr_users)] self.hr_resources = ["hr_res_" + str(i) for i in range(num_hr_resources)] @@ -67,7 +66,6 @@ def edges_between( full_node_coverage: bool, not_set: Optional[Set[Tuple[str, str]]] = None, ) -> List[Tuple[str, str, float]]: - import itertools if len(users) == 0 or len(resources) == 0: @@ -92,7 +90,6 @@ def edges_between( and (len(seen_users) < len(users)) or (len(seen_resources) < len(resources)) ): - if cart is not None: assert len(cart) > 0, cart ii = self.rand.randint(0, len(cart) - 1) diff --git a/core/src/main/python/synapse/ml/cyber/feature/scalers.py b/core/src/main/python/synapse/ml/cyber/feature/scalers.py index dad6ae299d..1d04edf8ee 100644 --- a/core/src/main/python/synapse/ml/cyber/feature/scalers.py +++ b/core/src/main/python/synapse/ml/cyber/feature/scalers.py @@ -39,7 +39,6 @@ def __init__( per_group_stats: Union[DataFrame, Dict[str, float]], use_pandas: bool = True, ): - super().__init__() ExplainBuilder.build( self, @@ -128,7 +127,6 @@ def __init__( output_col: str, use_pandas: bool = True, ): - super().__init__() ExplainBuilder.build( self, @@ -193,7 +191,6 @@ def __init__( coefficient_factor: float = 1.0, use_pandas: bool = True, ): - super().__init__( input_col, partition_key, @@ -250,7 +247,6 @@ def __init__( coefficient_factor: float = 1.0, use_pandas: bool = True, ): - super().__init__(input_col, partition_key, output_col, use_pandas) self.coefficient_factor = coefficient_factor @@ -292,7 +288,6 @@ def __init__( max_required_value: float, use_pandas: bool = True, ): - super().__init__( input_col, partition_key, @@ -382,7 +377,6 @@ def __init__( max_required_value: float = 1.0, use_pandas: bool = True, ): - super().__init__(input_col, partition_key, output_col, use_pandas) self.min_required_value = min_required_value self.max_required_value = max_required_value diff --git a/core/src/main/python/synapse/ml/isolationforest/IsolationForestModel.py b/core/src/main/python/synapse/ml/isolationforest/IsolationForestModel.py index 06d95759bb..822d4d5097 100644 --- a/core/src/main/python/synapse/ml/isolationforest/IsolationForestModel.py +++ b/core/src/main/python/synapse/ml/isolationforest/IsolationForestModel.py @@ -7,7 +7,6 @@ @inherit_doc class IsolationForestModel(_IsolationForestModel): - # The generated implementation does not work. Override it to return the java object. def getInnerModel(self): return self._java_obj.getInnerModel() diff --git a/deep-learning/src/main/python/synapse/ml/dl/DeepTextClassifier.py b/deep-learning/src/main/python/synapse/ml/dl/DeepTextClassifier.py index 0702fc828b..cd41cf508d 100644 --- a/deep-learning/src/main/python/synapse/ml/dl/DeepTextClassifier.py +++ b/deep-learning/src/main/python/synapse/ml/dl/DeepTextClassifier.py @@ -25,7 +25,6 @@ class DeepTextClassifier(TorchEstimator, TextPredictionParams): - checkpoint = Param( Params._dummy(), "checkpoint", "checkpoint of the deep text classifier" ) @@ -230,7 +229,6 @@ def _get_or_create_backend(self): ) def _update_transformation_fn(self): - text_col = self.getTextCol() label_col = self.getLabelCol() max_token_len = self.getMaxTokenLen() diff --git a/deep-learning/src/main/python/synapse/ml/dl/DeepTextModel.py b/deep-learning/src/main/python/synapse/ml/dl/DeepTextModel.py index 4bdc2c8d51..1fafe1ec4f 100644 --- a/deep-learning/src/main/python/synapse/ml/dl/DeepTextModel.py +++ b/deep-learning/src/main/python/synapse/ml/dl/DeepTextModel.py @@ -11,7 +11,6 @@ class DeepTextModel(TorchModel, TextPredictionParams): - tokenizer = Param(Params._dummy(), "tokenizer", "tokenizer") checkpoint = Param( diff --git a/deep-learning/src/main/python/synapse/ml/dl/DeepVisionClassifier.py b/deep-learning/src/main/python/synapse/ml/dl/DeepVisionClassifier.py index 2968fbd7a8..069d723340 100644 --- a/deep-learning/src/main/python/synapse/ml/dl/DeepVisionClassifier.py +++ b/deep-learning/src/main/python/synapse/ml/dl/DeepVisionClassifier.py @@ -29,7 +29,6 @@ class DeepVisionClassifier(TorchEstimator, VisionPredictionParams): - backbone = Param( Params._dummy(), "backbone", "backbone of the deep vision classifier" ) diff --git a/deep-learning/src/main/python/synapse/ml/dl/DeepVisionModel.py b/deep-learning/src/main/python/synapse/ml/dl/DeepVisionModel.py index 1fa67dcb4f..d2520eeb36 100644 --- a/deep-learning/src/main/python/synapse/ml/dl/DeepVisionModel.py +++ b/deep-learning/src/main/python/synapse/ml/dl/DeepVisionModel.py @@ -14,7 +14,6 @@ class DeepVisionModel(TorchModel, VisionPredictionParams): - transform_fn = Param( Params._dummy(), "transform_fn", diff --git a/deep-learning/src/main/python/synapse/ml/dl/PredictionParams.py b/deep-learning/src/main/python/synapse/ml/dl/PredictionParams.py index c2e7782790..5c83b85037 100644 --- a/deep-learning/src/main/python/synapse/ml/dl/PredictionParams.py +++ b/deep-learning/src/main/python/synapse/ml/dl/PredictionParams.py @@ -5,7 +5,6 @@ class HasLabelColParam(Params): - label_col = Param( Params._dummy(), "label_col", @@ -31,7 +30,6 @@ def getLabelCol(self): class HasImageColParam(Params): - image_col = Param( Params._dummy(), "image_col", @@ -58,7 +56,6 @@ def getImageCol(self): ## TODO: Potentially generalize to support multiple text columns as input class HasTextColParam(Params): - text_col = Param( Params._dummy(), "text_col", @@ -84,7 +81,6 @@ def getTextCol(self): class HasPredictionColParam(Params): - prediction_col = Param( Params._dummy(), "prediction_col", diff --git a/deep-learning/src/test/python/synapsemltest/dl/conftest.py b/deep-learning/src/test/python/synapsemltest/dl/conftest.py index 1542168271..c469c72049 100644 --- a/deep-learning/src/test/python/synapsemltest/dl/conftest.py +++ b/deep-learning/src/test/python/synapsemltest/dl/conftest.py @@ -30,7 +30,6 @@ def num_processes(self): def _download_dataset(): - urllib.request.urlretrieve( "https://mmlspark.blob.core.windows.net/publicwasb/17flowers.tgz", dataset_dir + "17flowers.tgz", diff --git a/deep-learning/src/test/python/synapsemltest/dl/test_deep_text_classifier.py b/deep-learning/src/test/python/synapsemltest/dl/test_deep_text_classifier.py index 4cd464f34a..97c9e421e8 100644 --- a/deep-learning/src/test/python/synapsemltest/dl/test_deep_text_classifier.py +++ b/deep-learning/src/test/python/synapsemltest/dl/test_deep_text_classifier.py @@ -27,7 +27,6 @@ def test_bert_base_cased(): ] with local_store() as store: - checkpoint = "bert-base-uncased" deep_text_classifier = DeepTextClassifier( diff --git a/deep-learning/src/test/python/synapsemltest/dl/test_deep_vision_classifier.py b/deep-learning/src/test/python/synapsemltest/dl/test_deep_vision_classifier.py index 24dac4d80f..d089dc1f59 100644 --- a/deep-learning/src/test/python/synapsemltest/dl/test_deep_vision_classifier.py +++ b/deep-learning/src/test/python/synapsemltest/dl/test_deep_vision_classifier.py @@ -80,7 +80,6 @@ def test_mobilenet_v2(get_data_path): train_folder, test_folder = get_data_path with local_store() as store: - deep_vision_classifier = DeepVisionClassifier( backbone="mobilenet_v2", store=store, diff --git a/docs/Explore Algorithms/AI Services/Advanced Usage - Async, Batching, and Multi-Key.ipynb b/docs/Explore Algorithms/AI Services/Advanced Usage - Async, Batching, and Multi-Key.ipynb index 963cdf996f..59f4910d97 100644 --- a/docs/Explore Algorithms/AI Services/Advanced Usage - Async, Batching, and Multi-Key.ipynb +++ b/docs/Explore Algorithms/AI Services/Advanced Usage - Async, Batching, and Multi-Key.ipynb @@ -46,7 +46,9 @@ "source": [ "from synapse.ml.core.platform import find_secret\n", "\n", - "service_key = find_secret(secret_name=\"cognitive-api-key\", keyvault=\"mmlspark-build-keys\")\n", + "service_key = find_secret(\n", + " secret_name=\"cognitive-api-key\", keyvault=\"mmlspark-build-keys\"\n", + ")\n", "service_loc = \"eastus\"" ] }, @@ -358,7 +360,9 @@ "from pyspark.sql.functions import udf\n", "import random\n", "\n", - "service_key_2 = find_secret(secret_name=\"cognitive-api-key-2\", keyvault=\"mmlspark-build-keys\")\n", + "service_key_2 = find_secret(\n", + " secret_name=\"cognitive-api-key-2\", keyvault=\"mmlspark-build-keys\"\n", + ")\n", "keys = [service_key, service_key_2]\n", "\n", "\n", diff --git a/docs/Explore Algorithms/AI Services/Multivariate Anomaly Detection.ipynb b/docs/Explore Algorithms/AI Services/Multivariate Anomaly Detection.ipynb index 14c0423951..61b9fbba93 100644 --- a/docs/Explore Algorithms/AI Services/Multivariate Anomaly Detection.ipynb +++ b/docs/Explore Algorithms/AI Services/Multivariate Anomaly Detection.ipynb @@ -62,11 +62,15 @@ "from synapse.ml.core.platform import find_secret\n", "\n", "# An Anomaly Dectector subscription key\n", - "anomalyKey = find_secret(secret_name=\"anomaly-api-key\", keyvault=\"mmlspark-build-keys\") # use your own anomaly api key\n", + "anomalyKey = find_secret(\n", + " secret_name=\"anomaly-api-key\", keyvault=\"mmlspark-build-keys\"\n", + ") # use your own anomaly api key\n", "# Your storage account name\n", "storageName = \"anomalydetectiontest\" # use your own storage account name\n", "# A connection string to your blob storage account\n", - "storageKey = find_secret(secret_name=\"madtest-storage-key\", keyvault=\"mmlspark-build-keys\") # use your own storage key\n", + "storageKey = find_secret(\n", + " secret_name=\"madtest-storage-key\", keyvault=\"mmlspark-build-keys\"\n", + ") # use your own storage key\n", "# A place to save intermediate MVAD results\n", "intermediateSaveDir = (\n", " \"wasbs://madtest@anomalydetectiontest.blob.core.windows.net/intermediateData\"\n", diff --git a/docs/Explore Algorithms/AI Services/Overview.ipynb b/docs/Explore Algorithms/AI Services/Overview.ipynb index 871528d814..a39c672eed 100644 --- a/docs/Explore Algorithms/AI Services/Overview.ipynb +++ b/docs/Explore Algorithms/AI Services/Overview.ipynb @@ -159,22 +159,32 @@ "from synapse.ml.core.platform import *\n", "\n", "# A general AI services key for Text Analytics, Computer Vision and Form Recognizer (or use separate keys that belong to each service)\n", - "service_key = find_secret(secret_name=\"cognitive-api-key\", keyvault=\"mmlspark-build-keys\") # Replace it with your ai service key, check prerequisites for more details\n", + "service_key = find_secret(\n", + " secret_name=\"cognitive-api-key\", keyvault=\"mmlspark-build-keys\"\n", + ") # Replace it with your ai service key, check prerequisites for more details\n", "service_loc = \"eastus\"\n", "\n", "# A Bing Search v7 subscription key\n", - "bing_search_key = find_secret(secret_name=\"bing-search-key\", keyvault=\"mmlspark-build-keys\") # Replace the call to find_secret with your key as a python string.\n", + "bing_search_key = find_secret(\n", + " secret_name=\"bing-search-key\", keyvault=\"mmlspark-build-keys\"\n", + ") # Replace the call to find_secret with your key as a python string.\n", "\n", "# An Anomaly Detector subscription key\n", - "anomaly_key = find_secret(secret_name=\"anomaly-api-key\", keyvault=\"mmlspark-build-keys\") # Replace the call to find_secret with your key as a python string. If you don't have an anomaly detection resource created before Sep 20th 2023, you won't be able to create one.\n", + "anomaly_key = find_secret(\n", + " secret_name=\"anomaly-api-key\", keyvault=\"mmlspark-build-keys\"\n", + ") # Replace the call to find_secret with your key as a python string. If you don't have an anomaly detection resource created before Sep 20th 2023, you won't be able to create one.\n", "anomaly_loc = \"westus2\"\n", "\n", "# A Translator subscription key\n", - "translator_key = find_secret(secret_name=\"translator-key\", keyvault=\"mmlspark-build-keys\") # Replace the call to find_secret with your key as a python string.\n", + "translator_key = find_secret(\n", + " secret_name=\"translator-key\", keyvault=\"mmlspark-build-keys\"\n", + ") # Replace the call to find_secret with your key as a python string.\n", "translator_loc = \"eastus\"\n", "\n", "# An Azure search key\n", - "search_key = find_secret(secret_name=\"azure-search-key\", keyvault=\"mmlspark-build-keys\") # Replace the call to find_secret with your key as a python string." + "search_key = find_secret(\n", + " secret_name=\"azure-search-key\", keyvault=\"mmlspark-build-keys\"\n", + ") # Replace the call to find_secret with your key as a python string." ] }, { diff --git a/docs/Explore Algorithms/AI Services/Quickstart - Analyze Celebrity Quotes.ipynb b/docs/Explore Algorithms/AI Services/Quickstart - Analyze Celebrity Quotes.ipynb index d42b4d209f..85e7865970 100644 --- a/docs/Explore Algorithms/AI Services/Quickstart - Analyze Celebrity Quotes.ipynb +++ b/docs/Explore Algorithms/AI Services/Quickstart - Analyze Celebrity Quotes.ipynb @@ -29,9 +29,13 @@ "from synapse.ml.core.platform import find_secret\n", "\n", "# put your service keys here\n", - "cognitive_key = find_secret(secret_name=\"cognitive-api-key\", keyvault=\"mmlspark-build-keys\")\n", + "cognitive_key = find_secret(\n", + " secret_name=\"cognitive-api-key\", keyvault=\"mmlspark-build-keys\"\n", + ")\n", "cognitive_location = \"eastus\"\n", - "bing_search_key = find_secret(secret_name=\"bing-search-key\", keyvault=\"mmlspark-build-keys\")" + "bing_search_key = find_secret(\n", + " secret_name=\"bing-search-key\", keyvault=\"mmlspark-build-keys\"\n", + ")" ] }, { diff --git a/docs/Explore Algorithms/AI Services/Quickstart - Analyze Text.ipynb b/docs/Explore Algorithms/AI Services/Quickstart - Analyze Text.ipynb index 1985a2a56f..0f8333652b 100644 --- a/docs/Explore Algorithms/AI Services/Quickstart - Analyze Text.ipynb +++ b/docs/Explore Algorithms/AI Services/Quickstart - Analyze Text.ipynb @@ -16,7 +16,9 @@ "source": [ "from synapse.ml.core.platform import find_secret\n", "\n", - "cognitive_key = find_secret(secret_name=\"cognitive-api-key\", keyvault=\"mmlspark-build-keys\")\n", + "cognitive_key = find_secret(\n", + " secret_name=\"cognitive-api-key\", keyvault=\"mmlspark-build-keys\"\n", + ")\n", "cognitive_location = \"eastus\"" ] }, diff --git a/docs/Explore Algorithms/AI Services/Quickstart - Creare a Visual Search Engine.ipynb b/docs/Explore Algorithms/AI Services/Quickstart - Creare a Visual Search Engine.ipynb index 180e8d9045..d8d176a79e 100644 --- a/docs/Explore Algorithms/AI Services/Quickstart - Creare a Visual Search Engine.ipynb +++ b/docs/Explore Algorithms/AI Services/Quickstart - Creare a Visual Search Engine.ipynb @@ -22,9 +22,13 @@ "from pyspark.sql.functions import lit, udf, col, split\n", "from synapse.ml.core.platform import *\n", "\n", - "cognitive_key = find_secret(secret_name=\"cognitive-api-key\", keyvault=\"mmlspark-build-keys\")\n", + "cognitive_key = find_secret(\n", + " secret_name=\"cognitive-api-key\", keyvault=\"mmlspark-build-keys\"\n", + ")\n", "cognitive_loc = \"eastus\"\n", - "azure_search_key = find_secret(secret_name=\"azure-search-key\", keyvault=\"mmlspark-build-keys\")\n", + "azure_search_key = find_secret(\n", + " secret_name=\"azure-search-key\", keyvault=\"mmlspark-build-keys\"\n", + ")\n", "search_service = \"mmlspark-azure-search\"\n", "search_index = \"test\"" ], diff --git a/docs/Explore Algorithms/AI Services/Quickstart - Create Audiobooks.ipynb b/docs/Explore Algorithms/AI Services/Quickstart - Create Audiobooks.ipynb index f4098d11f0..65cd5fc6ba 100644 --- a/docs/Explore Algorithms/AI Services/Quickstart - Create Audiobooks.ipynb +++ b/docs/Explore Algorithms/AI Services/Quickstart - Create Audiobooks.ipynb @@ -39,11 +39,15 @@ " from notebookutils import mssparkutils\n", "\n", "# Fill this in with your cognitive service information\n", - "service_key = find_secret(secret_name=\"cognitive-api-key\", keyvault=\"mmlspark-build-keys\") # Replace this line with a string like service_key = \"dddjnbdkw9329\"\n", + "service_key = find_secret(\n", + " secret_name=\"cognitive-api-key\", keyvault=\"mmlspark-build-keys\"\n", + ") # Replace this line with a string like service_key = \"dddjnbdkw9329\"\n", "service_loc = \"eastus\"\n", "\n", "storage_container = \"audiobooks\"\n", - "storage_key = find_secret(secret_name=\"madtest-storage-key\", keyvault=\"mmlspark-build-keys\")\n", + "storage_key = find_secret(\n", + " secret_name=\"madtest-storage-key\", keyvault=\"mmlspark-build-keys\"\n", + ")\n", "storage_account = \"anomalydetectiontest\"" ], "outputs": [], diff --git a/docs/Explore Algorithms/AI Services/Quickstart - Document Question and Answering with PDFs.ipynb b/docs/Explore Algorithms/AI Services/Quickstart - Document Question and Answering with PDFs.ipynb index 9e911f7f5b..d66135c7f7 100644 --- a/docs/Explore Algorithms/AI Services/Quickstart - Document Question and Answering with PDFs.ipynb +++ b/docs/Explore Algorithms/AI Services/Quickstart - Document Question and Answering with PDFs.ipynb @@ -141,7 +141,9 @@ "from pyspark.sql import SparkSession\n", "from synapse.ml.core.platform import find_secret\n", "\n", - "ai_services_key = find_secret(secret_name=\"cognitive-api-key\", keyvault=\"mmlspark-build-keys\")\n", + "ai_services_key = find_secret(\n", + " secret_name=\"cognitive-api-key\", keyvault=\"mmlspark-build-keys\"\n", + ")\n", "ai_services_location = \"eastus\"\n", "\n", "# Fill in the following lines with your Azure service information\n", @@ -155,7 +157,9 @@ "# Azure Cognitive Search\n", "cogsearch_name = \"mmlspark-azure-search\"\n", "cogsearch_index_name = \"examplevectorindex\"\n", - "cogsearch_api_key = find_secret(secret_name=\"azure-search-key\", keyvault=\"mmlspark-build-keys\")" + "cogsearch_api_key = find_secret(\n", + " secret_name=\"azure-search-key\", keyvault=\"mmlspark-build-keys\"\n", + ")" ], "id": "8fbc0743f3a0f6ab" }, diff --git a/docs/Explore Algorithms/AI Services/Quickstart - Flooding Risk.ipynb b/docs/Explore Algorithms/AI Services/Quickstart - Flooding Risk.ipynb index 0c0e8c2112..98290e4b3e 100644 --- a/docs/Explore Algorithms/AI Services/Quickstart - Flooding Risk.ipynb +++ b/docs/Explore Algorithms/AI Services/Quickstart - Flooding Risk.ipynb @@ -56,7 +56,9 @@ "from synapse.ml.core.platform import *\n", "\n", "# Azure Maps account key\n", - "maps_key = find_secret(secret_name=\"azuremaps-api-key\", keyvault=\"mmlspark-build-keys\") # Replace this with your azure maps key\n", + "maps_key = find_secret(\n", + " secret_name=\"azuremaps-api-key\", keyvault=\"mmlspark-build-keys\"\n", + ") # Replace this with your azure maps key\n", "\n", "# Creator Geo prefix\n", "# for this example, assuming that the creator resource is created in `EAST US 2`.\n", diff --git a/docs/Explore Algorithms/AI Services/Quickstart - Predictive Maintenance.ipynb b/docs/Explore Algorithms/AI Services/Quickstart - Predictive Maintenance.ipynb index c6f2e389ac..151fb6b5c0 100644 --- a/docs/Explore Algorithms/AI Services/Quickstart - Predictive Maintenance.ipynb +++ b/docs/Explore Algorithms/AI Services/Quickstart - Predictive Maintenance.ipynb @@ -45,7 +45,9 @@ "import os\n", "from synapse.ml.core.platform import find_secret\n", "\n", - "service_key = find_secret(secret_name=\"anomaly-api-key\", keyvault=\"mmlspark-build-keys\") # Paste your anomaly detector key here\n", + "service_key = find_secret(\n", + " secret_name=\"anomaly-api-key\", keyvault=\"mmlspark-build-keys\"\n", + ") # Paste your anomaly detector key here\n", "location = \"westus2\" # Paste your anomaly detector location here" ], "metadata": {}, diff --git a/docs/Explore Algorithms/Anomaly Detection/Quickstart - Isolation Forests.ipynb b/docs/Explore Algorithms/Anomaly Detection/Quickstart - Isolation Forests.ipynb index 03da21f9fa..986c72344a 100644 --- a/docs/Explore Algorithms/Anomaly Detection/Quickstart - Isolation Forests.ipynb +++ b/docs/Explore Algorithms/Anomaly Detection/Quickstart - Isolation Forests.ipynb @@ -371,7 +371,9 @@ "if running_on_synapse():\n", " from synapse.ml.core.platform import find_secret\n", "\n", - " tracking_url = find_secret(secret_name=\"aml-mlflow-tracking-url\", keyvault=\"mmlspark-build-keys\") # check link in prerequisites for more information on mlflow tracking url\n", + " tracking_url = find_secret(\n", + " secret_name=\"aml-mlflow-tracking-url\", keyvault=\"mmlspark-build-keys\"\n", + " ) # check link in prerequisites for more information on mlflow tracking url\n", " mlflow.set_tracking_uri(tracking_url)\n", " experiment_name = f\"isolation_forest_experiment\"\n", " model_name = \"isolation-forest\"" diff --git a/docs/Explore Algorithms/OpenAI/OpenAI.ipynb b/docs/Explore Algorithms/OpenAI/OpenAI.ipynb index c13e776d34..67e0f6f64f 100644 --- a/docs/Explore Algorithms/OpenAI/OpenAI.ipynb +++ b/docs/Explore Algorithms/OpenAI/OpenAI.ipynb @@ -83,7 +83,9 @@ "deployment_name = \"gpt-35-turbo\"\n", "deployment_name_embeddings = \"text-embedding-ada-002\"\n", "\n", - "key = find_secret(secret_name=\"openai-api-key\", keyvault=\"mmlspark-build-keys\") # please replace this line with your key as a string\n", + "key = find_secret(\n", + " secret_name=\"openai-api-key\", keyvault=\"mmlspark-build-keys\"\n", + ") # please replace this line with your key as a string\n", "\n", "assert key is not None and service_name is not None" ] diff --git a/docs/Explore Algorithms/OpenAI/Quickstart - OpenAI Embedding.ipynb b/docs/Explore Algorithms/OpenAI/Quickstart - OpenAI Embedding.ipynb index b06ff1c84f..ca04e0e7a7 100644 --- a/docs/Explore Algorithms/OpenAI/Quickstart - OpenAI Embedding.ipynb +++ b/docs/Explore Algorithms/OpenAI/Quickstart - OpenAI Embedding.ipynb @@ -66,7 +66,9 @@ "service_name = \"synapseml-openai\"\n", "deployment_name_embeddings = \"text-embedding-ada-002\"\n", "\n", - "key = find_secret(secret_name=\"openai-api-key\", keyvault=\"mmlspark-build-keys\") # please replace this with your key as a string\n", + "key = find_secret(\n", + " secret_name=\"openai-api-key\", keyvault=\"mmlspark-build-keys\"\n", + ") # please replace this with your key as a string\n", "\n", "assert key is not None and service_name is not None" ] diff --git a/docs/Explore Algorithms/OpenAI/Quickstart - Understand and Search Forms.ipynb b/docs/Explore Algorithms/OpenAI/Quickstart - Understand and Search Forms.ipynb index e18e06c15b..9b5237aae2 100644 --- a/docs/Explore Algorithms/OpenAI/Quickstart - Understand and Search Forms.ipynb +++ b/docs/Explore Algorithms/OpenAI/Quickstart - Understand and Search Forms.ipynb @@ -80,17 +80,25 @@ "source": [ "from synapse.ml.core.platform import find_secret\n", "\n", - "cognitive_key = find_secret(secret_name=\"cognitive-api-key\", keyvault=\"mmlspark-build-keys\") # Replace the call to find_secret with your key as a python string. e.g. cognitive_key=\"27snaiw...\"\n", + "cognitive_key = find_secret(\n", + " secret_name=\"cognitive-api-key\", keyvault=\"mmlspark-build-keys\"\n", + ") # Replace the call to find_secret with your key as a python string. e.g. cognitive_key=\"27snaiw...\"\n", "cognitive_location = \"eastus\"\n", "\n", - "translator_key = find_secret(secret_name=\"translator-api-key\", keyvault=\"mmlspark-build-keys\") # Replace the call to find_secret with your key as a python string.\n", + "translator_key = find_secret(\n", + " secret_name=\"translator-api-key\", keyvault=\"mmlspark-build-keys\"\n", + ") # Replace the call to find_secret with your key as a python string.\n", "translator_location = \"eastus\"\n", "\n", - "search_key = find_secret(secret_name=\"azure-search-key\", keyvault=\"mmlspark-build-keys\") # Replace the call to find_secret with your key as a python string.\n", + "search_key = find_secret(\n", + " secret_name=\"azure-search-key\", keyvault=\"mmlspark-build-keys\"\n", + ") # Replace the call to find_secret with your key as a python string.\n", "search_service = \"mmlspark-azure-search\"\n", "search_index = \"form-demo-index-5\"\n", "\n", - "openai_key = find_secret(secret_name=\"openai-api-key\", keyvault=\"mmlspark-build-keys\") # Replace the call to find_secret with your key as a python string.\n", + "openai_key = find_secret(\n", + " secret_name=\"openai-api-key\", keyvault=\"mmlspark-build-keys\"\n", + ") # Replace the call to find_secret with your key as a python string.\n", "openai_service_name = \"synapseml-openai\"\n", "openai_deployment_name = \"gpt-35-turbo\"\n", "openai_url = f\"https://{openai_service_name}.openai.azure.com/\"" diff --git a/docs/Explore Algorithms/Responsible AI/Quickstart - Snow Leopard Detection.ipynb b/docs/Explore Algorithms/Responsible AI/Quickstart - Snow Leopard Detection.ipynb index d79e494b3d..71af86261e 100644 --- a/docs/Explore Algorithms/Responsible AI/Quickstart - Snow Leopard Detection.ipynb +++ b/docs/Explore Algorithms/Responsible AI/Quickstart - Snow Leopard Detection.ipynb @@ -15,7 +15,9 @@ "source": [ "from synapse.ml.core.platform import *\n", "\n", - "bing_search_key = find_secret(secret_name=\"bing-search-key\", keyvault=\"mmlspark-build-keys\")\n", + "bing_search_key = find_secret(\n", + " secret_name=\"bing-search-key\", keyvault=\"mmlspark-build-keys\"\n", + ")\n", "\n", "# WARNING this notebook requires a lot of memory.\n", "# If you get a heap space error, try dropping the number of images bing returns\n", diff --git a/docs/Get Started/Quickstart - Your First Models.ipynb b/docs/Get Started/Quickstart - Your First Models.ipynb index 28df905dcd..4952e995cc 100644 --- a/docs/Get Started/Quickstart - Your First Models.ipynb +++ b/docs/Get Started/Quickstart - Your First Models.ipynb @@ -166,7 +166,9 @@ "model = TextSentiment(\n", " textCol=\"text\",\n", " outputCol=\"sentiment\",\n", - " subscriptionKey=find_secret(secret_name=\"cognitive-api-key\", keyvault=\"mmlspark-build-keys\")\n", + " subscriptionKey=find_secret(\n", + " secret_name=\"cognitive-api-key\", keyvault=\"mmlspark-build-keys\"\n", + " ),\n", ").setLocation(\"eastus\")\n", "\n", "display(model.transform(test))" diff --git a/tools/esrp/prepare_jar.py b/tools/esrp/prepare_jar.py index 26cadd0953..88e2b0feb5 100644 --- a/tools/esrp/prepare_jar.py +++ b/tools/esrp/prepare_jar.py @@ -28,7 +28,6 @@ def flatten_dir(top_dir): # Walk through all subdirectories for foldername, subfolders, filenames in os.walk(top_dir, topdown=False): - # If we are not in the top-level directory, move files to the top-level directory if foldername != top_dir: for filename in filenames: diff --git a/website/doctest.py b/website/doctest.py index 6ab47b688d..6e2fcaeebf 100644 --- a/website/doctest.py +++ b/website/doctest.py @@ -44,7 +44,6 @@ def getSecret(secretName): def iterate_over_documentation(folder, version): - cur_folders = [folder] while cur_folders: cur_dir = cur_folders.pop(0)