diff --git a/multicloud/azure-cli/README.md b/multicloud/azure-cli/README.md index 37da994..4d867f1 100644 --- a/multicloud/azure-cli/README.md +++ b/multicloud/azure-cli/README.md @@ -1,7 +1,7 @@ # Oracle Database@Azure: Create an Autonomous Database There are different ways that you can deploy a new Oracle Autonomous Database: * [Using the Azure Portal](https://youtu.be/QOCvRr5CfeQ) -* [Using Terraform scripts](https://github.com/oci-landing-zones/terraform-oci-multicloud-azure/tree/main) +* [Using Terraform scripts](https://github.com/oci-landing-zones/terraform-oci-multicloud-azure) * Using the Azure CLI The steps below show how to create an Autonomous Database using the Azure CLI. @@ -29,6 +29,8 @@ You can run the scripts independently or run `create-all-resources.sh`. Simply u |[create-all-resources.sh](create-all-resources.sh)|Creates your resource group, network, ADB and VM| |[create-data-lake-storage.sh](create-data-lake-storage.sh)|Creates an Azure Data Lake Gen 2 storage account, a container and uploads sample data into that container| |[delete-all-resources.sh](delete-all-resources.sh)|Deletes your resource group, network, ADB and VM| +|[show-adb-info.sh](show-adb-info.sh)|Shows information about your ADB - including you JDBC connection details to the HIGH service| +|[show-data-lake-storage-info.sh](show-data-lake-storage-info.sh)|Shows information about your data lake storage - including the storage endpoint URL| ### Configuration file The Azure cli deployment scripts rely on settings found in the config file. These resources **will be created** by the scripts. Update the config file prior to running any of the scripts. @@ -80,6 +82,8 @@ Connect to your Autonomous Database! * Use these great VS Code extensions that help you develop and debug your database apps: * SQL Developer for VS Code ([Learn More](https://www.oracle.com/database/sqldeveloper/vscode/) | [Marketplace](https://marketplace.visualstudio.com/items?itemName=Oracle.sql-developer)) * Oracle Developer Tools for VS Code ([Learn More](https://docs.oracle.com/en/database/oracle/developer-tools-for-vscode/getting-started/gettingstarted.html) | [Marketplace](https://marketplace.visualstudio.com/items?itemName=Oracle.oracledevtools)) +* [Use the sample scripts](../../sql/README.md) to learn how to use different features - like Select AI, data lake integration, JSON, and more. + #### JDBC Example: JDBC is a common way to connect to Autonomous Database. For example, you can use the **Custom JDBC URL** in the VS Code SQL Developer Extension: diff --git a/multicloud/azure-cli/config b/multicloud/azure-cli/config index 1a8375e..9b51423 100644 --- a/multicloud/azure-cli/config +++ b/multicloud/azure-cli/config @@ -2,38 +2,49 @@ # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/ # update the values below to match your requirements -LOCATION="eastus" -RESOURCE_GROUP="development" +# Region and resource groupwhere resources are deployed +# example: eastus +LOCATION="" +RESOURCE_GROUP="" ADB_NAME="quickstart" ## NETWORKING # database -VNET_NAME="dev-vnet" -VNET_PREFIX="19x.xxx.0.0/16" +# example: dev-vnet +VNET_NAME="" +# example: 192.168.0.0/16 +VNET_PREFIX="" # subnet for the database -SUBNET_NAME="dev-sn-db" -SUBNET_PREFIX="19x.xxx.1.0/24" +# example: dev-sn-db +SUBNET_NAME="" +# example: 192.168.1.0/24 +SUBNET_PREFIX="" # client subnet -SUBNET2_NAME="dev-sn-client" -SUBNET2_PREFIX="19x.xxx.2.0/24" +# example: dev-sn-client +SUBNET2_NAME="" +# example: 192.168.2.0/24 +SUBNET2_PREFIX="" #network security group NSG_NAME=$SUBNET2_NAME-nsg ## COMPUTE VM -VM_NAME="adb-vm-client" +# example: adb-vm-client +VM_NAME="" VM_PREFERRED_SIZES=( "Standard_DS3_v2" "Standard_DC1s_v2" "Standard_DC2s_v2" "Standard_DC2ads_v5" "Standard_L4s" ) VM_IMAGE="MicrosoftWindowsDesktop:Windows-11:win11-22h2-pro:latest" ## CLOUD STORAGE # Storage accounts require a unique name across azure. Enter your unique name below. -STORAGE_ACCOUNT_NAME="your-storage-account" +# example: devadbstorageacct +STORAGE_ACCOUNT_NAME="" STORAGE_CONTAINER_NAME="adb-sample" ## IDENTITIES # This identity will be used for your VM. The password will also be used for the database ADMIN user USER_NAME="adb" ---The password must be between 12 and 30 characters long and must include at least one uppercase letter, one lowercase letter, and one numeric character +# The password must be between 12 and 30 characters long and must include at least one uppercase letter, one lowercase letter, and one numeric character +# example: watchS0meMovies# USER_PASSWORD="" \ No newline at end of file diff --git a/multicloud/azure-cli/config.default b/multicloud/azure-cli/config.default index 1a8375e..9b51423 100644 --- a/multicloud/azure-cli/config.default +++ b/multicloud/azure-cli/config.default @@ -2,38 +2,49 @@ # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/ # update the values below to match your requirements -LOCATION="eastus" -RESOURCE_GROUP="development" +# Region and resource groupwhere resources are deployed +# example: eastus +LOCATION="" +RESOURCE_GROUP="" ADB_NAME="quickstart" ## NETWORKING # database -VNET_NAME="dev-vnet" -VNET_PREFIX="19x.xxx.0.0/16" +# example: dev-vnet +VNET_NAME="" +# example: 192.168.0.0/16 +VNET_PREFIX="" # subnet for the database -SUBNET_NAME="dev-sn-db" -SUBNET_PREFIX="19x.xxx.1.0/24" +# example: dev-sn-db +SUBNET_NAME="" +# example: 192.168.1.0/24 +SUBNET_PREFIX="" # client subnet -SUBNET2_NAME="dev-sn-client" -SUBNET2_PREFIX="19x.xxx.2.0/24" +# example: dev-sn-client +SUBNET2_NAME="" +# example: 192.168.2.0/24 +SUBNET2_PREFIX="" #network security group NSG_NAME=$SUBNET2_NAME-nsg ## COMPUTE VM -VM_NAME="adb-vm-client" +# example: adb-vm-client +VM_NAME="" VM_PREFERRED_SIZES=( "Standard_DS3_v2" "Standard_DC1s_v2" "Standard_DC2s_v2" "Standard_DC2ads_v5" "Standard_L4s" ) VM_IMAGE="MicrosoftWindowsDesktop:Windows-11:win11-22h2-pro:latest" ## CLOUD STORAGE # Storage accounts require a unique name across azure. Enter your unique name below. -STORAGE_ACCOUNT_NAME="your-storage-account" +# example: devadbstorageacct +STORAGE_ACCOUNT_NAME="" STORAGE_CONTAINER_NAME="adb-sample" ## IDENTITIES # This identity will be used for your VM. The password will also be used for the database ADMIN user USER_NAME="adb" ---The password must be between 12 and 30 characters long and must include at least one uppercase letter, one lowercase letter, and one numeric character +# The password must be between 12 and 30 characters long and must include at least one uppercase letter, one lowercase letter, and one numeric character +# example: watchS0meMovies# USER_PASSWORD="" \ No newline at end of file diff --git a/multicloud/azure-cli/create-data-lake-storage.sh b/multicloud/azure-cli/create-data-lake-storage.sh index e52f214..e128417 100755 --- a/multicloud/azure-cli/create-data-lake-storage.sh +++ b/multicloud/azure-cli/create-data-lake-storage.sh @@ -99,8 +99,10 @@ az storage account keys list \ --resource-group $RESOURCE_GROUP --query "[0].value" -o tsv echo "Storage URL:" -az storage account show \ +STORAGE_URL=$(az storage account show \ --name $STORAGE_ACCOUNT_NAME \ --query primaryEndpoints.blob \ - --output tsv + --output tsv) +echo $STORAGE_URL$STORAGE_CONTAINER_NAME + echo "" diff --git a/multicloud/azure-cli/show-data-lake-storage-info.sh b/multicloud/azure-cli/show-data-lake-storage-info.sh index 7e53dfa..bef69fa 100755 --- a/multicloud/azure-cli/show-data-lake-storage-info.sh +++ b/multicloud/azure-cli/show-data-lake-storage-info.sh @@ -15,9 +15,10 @@ az storage account keys list \ --account-name $STORAGE_ACCOUNT_NAME \ --resource-group $RESOURCE_GROUP --query "[0].value" -o tsv echo "Storage URL:" -az storage account show \ +STORAGE_URL=$(az storage account show \ --name $STORAGE_ACCOUNT_NAME \ --query primaryEndpoints.blob \ - --output tsv + --output tsv) +echo $STORAGE_URL$STORAGE_CONTAINER_NAME echo "" diff --git a/multicloud/gcloud-cli/README.md b/multicloud/gcloud-cli/README.md index 139cab8..bfe057e 100644 --- a/multicloud/gcloud-cli/README.md +++ b/multicloud/gcloud-cli/README.md @@ -1,37 +1,44 @@ # Oracle Database@Google Cloud: Create an Autonomous Database There are different ways that you can deploy a new Oracle Autonomous Database: -* [Using the Google Cloud Portal](https://youtu.be/QOCvRr5CfeQ) -* [Using Terraform scripts](https://github.com/oci-landing-zones/terraform-oci-multicloud-azure/tree/main) -* Using the Azure CLI +* [Using the Google Cloud Portal](https://docs.oracle.com/en-us/iaas/ogadb/ogadb-provisioning-autonomous-database.html) +* [Using Terraform scripts](https://github.com/oci-landing-zones/terraform-oci-multicloud-azure) +* Using the Google Command Line Interace (gcloud CLI) -The steps below show how to create an Autonomous Database using the Azure CLI. + +The steps below show how to create an Autonomous Database using the gcloud CLI. ## Prerequisites: -* [Install the Azure CLI](https://learn.microsoft.com/en-us/cli/azure/) -* [Subscribe to Oracle Database@Azure](https://www.youtube.com/watch?v=MEB8kB_TI2I) -* Ensure you have the appropriate user groups and privileges. See [details for onboarding Autonomous Database](https://learn.microsoft.com/en-us/azure/oracle/oracle-db/onboard-oracle-database) +* [Install the gcloud CLI](https://cloud.google.com/sdk/docs/install) +* [Onboard Oracle Database@Google Cloud](https://docs.oracle.com/en-us/iaas/Content/database-at-gcp/oagcp-onboard.htm#oagcp_onboard). Onboarding includes subscribing to the service using a Pay as You Go or private offer, setting up permissions, and more. ## Deploy your Autonomous Database and your infrastructure -Use the following scripts to deploy your infrastructure and Autonomous Database: +The gcloud CLI scripts will deploy the following infrastructure: +* A VPC Network with a client subnet +* An Oracle Autonomous Database. It is deployed to a private subnet on that VPC Network. That private subnet is managed by Oracle Database@Google Cloud. +* A Windows-based Virtual Machine is deployed to the client subnet. You can RDP to that VM to develop your apps and access Autonomous Database. +* A Cloud Storage bucket with sample data +* Sample code will use Google Gemini. Ensure the API is enabled. + + +![deployment](../images/gcloud-deployment.png) -![deployment](../images/azure-deployment.png) +**Note:** Gemini is used by the samples - but the scripts do not set up access to the resource. See the [Generative AI on Vertex Quickstart](https://cloud.google.com/vertex-ai/generative-ai/docs/start/quickstarts/quickstart-multimodal?authuser=1) -**Note:** Azure OpenAI is used by the samples - but the scripts do not deploy the resource. -f You can run the scripts independently or run `create-all-resources.sh`. Simply update the [`config`](#configuration-file) prior to running the scripts: |Script|Description| |----|---| -|[create-resource-group.sh](create-resource-group.sh)|Create a resource group| -|[create-network.sh](create-network.sh)|Create virtual cloud network.

ADB must be deployed to a delegated subnet. In addition, ADB access is thru a private endpoint. This means it must be accessed from either the same VCN or another privileged network.| +|[create-network.sh](create-network.sh)|Creates a VPC Network and subnet with required firewall rules. ADB is accessed thru a private endpoint on this network. The VM is deployed to this network and can be used to work with ADB.| |[create-adb.sh](create-adb.sh)|Create an Autonomous Database| -|[create-compute-vm.sh](create-compute-vm.sh)|Create a VM in that VCN| -|[create-all-resources.sh](create-all-resources.sh)|Creates your resource group, network, ADB and VM| -|[create-data-lake-storage.sh](create-data-lake-storage.sh)|Creates an Azure Data Lake Gen 2 storage account, a container and uploads sample data into that container| +|[create-compute-vm.sh](create-compute-vm.sh)|Create a VM in that VPC. By default, a Windows VM is created and can be accessed via RDP. After running this script, you can set up the password by running:
`source config`
`gcloud compute reset-windows-password $VM_NAME --zone=$REGION-a`| +|[create-all-resources.sh](create-all-resources.sh)|Creates your network, ADB, VM and Cloud Storage bucket.| +|[create-data-lake-storage.sh](create-data-lake-storage.sh)|Creates a bucket on Cloud Storage and uploads sample data into that bucket| |[delete-all-resources.sh](delete-all-resources.sh)|Deletes your resource group, network, ADB and VM| +|[show-adb-info.sh](show-adb-info.sh)|Shows information about your ADB - including you JDBC connection details to the HIGH service| +|[show-data-lake-storage-info.sh](show-data-lake-storage-info.sh)|Shows information about your data lake storage - including the storage endpoint URL| ### Configuration file -The Azure cli deployment scripts rely on settings found in the config file. These resources **will be created** by the scripts. Update the config file prior to running any of the scripts. +The gcloud CLI deployment scripts rely on settings found in the config file. These resources **will be created** by the scripts. Update the config file prior to running any of the scripts. >**IMPORTANT:** This file will contain a password that is used to connect to Autonomous Database and the virtual machine. Set the file's permissions so that only the file's owner can view its contents: ```bash @@ -40,30 +47,30 @@ chmod 600 config |Setting|Description|Example| |----|----|----| -|LOCATION|Region where resources will be deployed. [See documentation](https://docs.oracle.com/en-us/iaas/Content/database-at-azure/oaa_regions.htm) for regions where Oracle Database 23ai is available|"eastus"| -|RESOURCE_GROUP|Target resource group for new resources|"development"| +|REGION|Region where resources will be deployed. [See documentation](https://docs.oracle.com/en-us/iaas/Content/database-at-gcp/oagcp-regions.htm) for region availability|"us-east4"| +|PROJECT|Target Google Cloud project for new resources|"development"| +|USER_PASSWORD|The password for the Autonomous Database admin user|"watchS0meMovies#"| |ADB_NAME|Autonomous Database name. This name must be unique within a region location|"quickstart"| -|VNET_NAME|Virtual network|"dev-vnet"| +|SUBNET_DB_IP_RANGE|IP address range used for ADB. It can not overlap with the client subnet range. It can overlap with other ADB instances.|"192.168.11.0/24"| +|VPC_NETWORK_NAME|Name of the VPC Network|"dev-network"| |VNET_PREFIX|CIDR range for the virtual network|"192.168.0.0/16"| -|SUBNET_NAME|Delegated subnet where the database will be deployed|"dev-sn-db"| -|SUBNET_PREFIX|CIDR range for the delegated subnet|"192.168.1.0/24"| -|SUBNET2_NAME|Client subnet. The VM will be deployed to this subnet|"dev-sn-client"| -|SUBNET2_PREFIX|CIDR range for the client subnet|"192.168.2.0/24"| -|NSG_NAME|Name of the network security group used by the client subnet|$SUBNET2_NAME-nsg| -|VM_NAME|Name of the virtual machine|"adb-vm-client"| -|VM_PREFERRED_SIZES|A list of VM sizes. Change these values based on region availability. The script will attempt to create a VM based on the order listed|( "Standard_GS1" "Standard_DC1s_v2" "Standard_DC2s_v2" "Standard_DC2ads_v5" "Standard_L4s" )| -|VM_IMAGE|The image used by the VM|"MicrosoftWindowsDesktop:Windows-11:win11-22h2-pro:latest"| -|STORAGE_ACCOUNT_NAME|The name of an Azure Data Lake Storage Gen 2 account. This name must be unique across Azure. Sample data files will be uploaded into this storage account.|"mytenancysamplestorageaccount"| -|STORAGE_CONTAINER_NAME|The name of the container where files will be uploaded|"adb-sample"| -|USER_NAME|The name of the user for the virtual machine|"adb"| -|USER_PASSWORD|The password for both the VM and the Autonomous Database admin user|"Welcome1234#abcd"| +|SUBNET_CLIENT_NAME|Name of the client subnet where the VM is deployed|"dev-sn-client"| +|SUBNET_CLIENT_IP_RANGE|CIDR range for the client subnet|"192.168.10.0/24"| +|VM_NAME|Name of the virtual machine|"dev-vm-client"| +|VM_IMAGE_FAMILY|The image deployed to the VM |"windows-2022"| +|VM_MACHINE_TYPE|The type of VM deployed|"e2-standard-4"| +|BUCKET_NAME|The name of the cloud storage bucket where sample files will be uploaded.|"adb-sample-quickstart"| + ### Using the scripts -Log into azure: after updating the config file: +Make sure that you have enabled APIs for your project. [See the documentation](https://cloud.google.com/endpoints/docs/openapi/enable-api) for details. +Log into Google Cloud from the CLI: ```bash -az login +gcloud auth login ``` + +Update the config file Then, run your scripts. The following will deploy a complete environment, but you can also install independent components. Just make sure you install dependencies (e.g. a VCN prior to Autonomous Database): Creating all of the resources will take approximately 15-20 minutes. @@ -80,6 +87,8 @@ Connect to your Autonomous Database! * Use these great VS Code extensions that help you develop and debug your database apps: * SQL Developer for VS Code ([Learn More](https://www.oracle.com/database/sqldeveloper/vscode/) | [Marketplace](https://marketplace.visualstudio.com/items?itemName=Oracle.sql-developer)) * Oracle Developer Tools for VS Code ([Learn More](https://docs.oracle.com/en/database/oracle/developer-tools-for-vscode/getting-started/gettingstarted.html) | [Marketplace](https://marketplace.visualstudio.com/items?itemName=Oracle.oracledevtools)) +* [Use the sample scripts](../../sql/README.md) to learn how to use different features - like Select AI, data lake integration, JSON, and more. + #### JDBC Example: JDBC is a common way to connect to Autonomous Database. For example, you can use the **Custom JDBC URL** in the VS Code SQL Developer Extension: diff --git a/multicloud/gcloud-cli/config b/multicloud/gcloud-cli/config new file mode 100644 index 0000000..207f9bb --- /dev/null +++ b/multicloud/gcloud-cli/config @@ -0,0 +1,46 @@ +# Copyright (c) 2024 Oracle and/or its affiliates. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/ + +# update the values below to match your requirements +# example: us-east4 +REGION="" +PROJECT="" + +## IDENTITIES +# The password must be between 12 and 30 characters long and must include at least one uppercase letter, one lowercase letter, and one numeric character +# example: watchS0meMovies# +USER_PASSWORD="" + +## ADB properties +# Database name. This will be used for the display name as well. +ADB_NAME="quickstart" + +# ADB IP range. It can not overlap with the client subnet range +# example: 192.168.11.0/24 +SUBNET_DB_IP_RANGE="" + +## NETWORKING +# public client network +# example: dev-network +VPC_NETWORK_NAME="" + +# names firewall rules +VPC_FIREWALL_INGRESS_NAME="allow-common-ingress-ports" +VPC_FIREWALL_EGRESS_NAME="allow-client-egress-ports" + +# client subnet +# example: dev-sn-client +SUBNET_CLIENT_NAME="" +# example: 192.168.10.0/24 +SUBNET_CLIENT_IP_RANGE="" + +## COMPUTE VM +# example: dev-vm-client +VM_NAME="" +VM_IMAGE_FAMILY="windows-2022" +VM_MACHINE_TYPE="e2-standard-4" + +## CLOUD STORAGE +# Storage bucket require a unique name across google. Enter your unique name below. +# example: adb-sample-quickstart +BUCKET_NAME="" \ No newline at end of file diff --git a/multicloud/gcloud-cli/config.default b/multicloud/gcloud-cli/config.default new file mode 100644 index 0000000..4de8d30 --- /dev/null +++ b/multicloud/gcloud-cli/config.default @@ -0,0 +1,46 @@ +# Copyright (c) 2024 Oracle and/or its affiliates. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/ + +# update the values below to match your requirements +# example: us-east4 +REGION="" +PROJECT="" + +## IDENTITIES +# The password must be between 12 and 30 characters long and must include at least one uppercase letter, one lowercase letter, and one numeric character +# example: watchS0meMovies# +USER_PASSWORD="" + +## ADB properties +# Database name. This will be used for the display name as well. +ADB_NAME="quickstart" + +# ADB IP range. It can not overlap with the client subnet range +# example: 192.168.11.0/24 +SUBNET_DB_IP_RANGE="" + +## NETWORKING +# public client network +# example: dev-network +VPC_NETWORK_NAME="" + +# names firewall rules +VPC_FIREWALL_INGRESS_NAME="allow-common-ingress-ports" +VPC_FIREWALL_EGRESS_NAME="allow-client-egress-ports" + +# client subnet +# example: dev-sn-client +SUBNET_CLIENT_NAME="" +# example: 192.168.10.0/24 +SUBNET_CLIENT_IP_RANGE="" + +## COMPUTE VM +# example: dev-vm-client +VM_NAME="" +VM_IMAGE_FAMILY="windows-2022" +VM_MACHINE_TYPE="e2-standard-4" + +## CLOUD STORAGE +# Storage accounts require a unique name across google. Enter your unique name below. +# example: adb-sample-quickstart +BUCKET_NAME="" \ No newline at end of file diff --git a/multicloud/gcloud-cli/create-data-lake-storage.sh b/multicloud/gcloud-cli/create-data-lake-storage.sh index cd33146..ed3f821 100755 --- a/multicloud/gcloud-cli/create-data-lake-storage.sh +++ b/multicloud/gcloud-cli/create-data-lake-storage.sh @@ -26,5 +26,6 @@ gcloud storage ls --long --recursive gs://$BUCKET_NAME echo "" echo "Bucket Name: $BUCKET_NAME" +gcloud storage hmac list echo "Storage URL:" echo "https://storage.googleapis.com/$BUCKET_NAME" diff --git a/multicloud/gcloud-cli/create-network.sh b/multicloud/gcloud-cli/create-network.sh index 0c11d8f..27798cb 100755 --- a/multicloud/gcloud-cli/create-network.sh +++ b/multicloud/gcloud-cli/create-network.sh @@ -13,12 +13,6 @@ source ./config # Create a VPC gcloud compute networks create $VPC_NETWORK_NAME --subnet-mode=custom -#gcloud compute networks subnets create private-subnet \ -# --network=$VPC_NETWORK_NAME \ -# --region=$REGION \ -# --range=192.168.5.0/24 \ -# --enable-private-ip-google-access - gcloud compute networks subnets create $SUBNET_CLIENT_NAME \ --network=$VPC_NETWORK_NAME \ --region=$REGION \ diff --git a/multicloud/gcloud-cli/show-data-lake-storage-info.sh b/multicloud/gcloud-cli/show-data-lake-storage-info.sh index b2ae7b1..6eff2be 100755 --- a/multicloud/gcloud-cli/show-data-lake-storage-info.sh +++ b/multicloud/gcloud-cli/show-data-lake-storage-info.sh @@ -12,6 +12,7 @@ gcloud storage ls --long --recursive gs://$BUCKET_NAME echo "" echo "Bucket Name: $BUCKET_NAME" +gcloud storage hmac list echo "Storage URL:" echo "https://storage.googleapis.com/$BUCKET_NAME" echo "" diff --git a/multicloud/images/gcloud-deployment.png b/multicloud/images/gcloud-deployment.png new file mode 100644 index 0000000..35837b7 Binary files /dev/null and b/multicloud/images/gcloud-deployment.png differ diff --git a/multicloud/oci-cli/README.md b/multicloud/oci-cli/README.md index 86e06cc..57c84ea 100644 --- a/multicloud/oci-cli/README.md +++ b/multicloud/oci-cli/README.md @@ -15,11 +15,16 @@ The steps below show how to create an Autonomous Database using the OCI CLI. * [Use OCI Object Storage](https://docs.oracle.com/en-us/iaas/Content/Security/Reference/objectstorage_security.htm#iam-policies) ## Deploy your Autonomous Database -Use the following scripts to deploy your Autonomous Database and sample data on OCI. Because the sample script deploys ADB on a public endpoint, the architecture is very simple: +Autonomous Database will be deployed on a public endpoint - which will simplify the architecture. The OCI CLI will deploy: +* An Oracle Autonomous Database. It is deployed to a private subnet on that VPC Network. That private subnet is managed by Oracle Database@Google Cloud. +* An Object Storage bucket with sample data +* Sample code will use OCI GenAI (or other AI service). +* Use your computer as a client. + ![ADB on OCI](../images/oci-adb-github-samples.drawio.png) -You can run the scripts independently or run `create-all-resources.sh`. Simply update the [`config`](#configuration-file) prior to running the scripts: +You can run the OCI CLI scripts independently or run `create-all-resources.sh`. Simply update the [`config`](#configuration-file) prior to running the scripts: |Script|Description| |----|---| @@ -28,6 +33,8 @@ You can run the scripts independently or run `create-all-resources.sh`. Simply u |[create-all-resources.sh](create-all-resources.sh)|Creates your resource group, network, ADB and VM| |[create-data-lake-storage.sh](create-data-lake-storage.sh)|Creates an OCI Object Storage bucket and uploads sample data into that bucket| |[delete-all-resources.sh](delete-all-resources.sh)|Deletes your compartment, bucket and Autonomous Database| +|[show-adb-info.sh](show-adb-info.sh)|Shows information about your ADB - including you JDBC connection details to the HIGH service| +|[show-data-lake-storage-info.sh](show-data-lake-storage-info.sh)|Shows information about your data lake storage - including the storage endpoint URL| ### Configuration file The OCI cli deployment scripts rely on settings found in the config file. These resources **will be created** by the scripts. Update the config file prior to running any of the scripts. @@ -63,6 +70,8 @@ Connect to your Autonomous Database! * Use these great VS Code extensions that help you develop and debug your database apps: * SQL Developer for VS Code ([Learn More](https://www.oracle.com/database/sqldeveloper/vscode/) | [Marketplace](https://marketplace.visualstudio.com/items?itemName=Oracle.sql-developer)) * Oracle Developer Tools for VS Code ([Learn More](https://docs.oracle.com/en/database/oracle/developer-tools-for-vscode/getting-started/gettingstarted.html) | [Marketplace](https://marketplace.visualstudio.com/items?itemName=Oracle.oracledevtools)) +* [Use the sample scripts](../../sql/README.md) to learn how to use different features - like Select AI, data lake integration, JSON, and more. + #### JDBC Example: JDBC is a common way to connect to Autonomous Database. For example, you can use the **Custom JDBC URL** in the VS Code SQL Developer Extension: diff --git a/multicloud/oci-cli/config b/multicloud/oci-cli/config index ed1d017..2f11722 100644 --- a/multicloud/oci-cli/config +++ b/multicloud/oci-cli/config @@ -2,9 +2,19 @@ # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/ # update the values below to match your requirements. -TENANCY_OCID="ocid1.tenancy.oc1..aaaaaaaafcue47pqmrf4vigneebgbcmmoy5r7xvoypicjqqge32ewnrcyx2a" -REGION="us-ashburn-1" -COMPARTMENT_NAME="from-github" + +# your Tenancy unique identifier +# example: ocid1.tenancy.oc1..example.... +TENANCY_OCID="" +# Region where resources will be deployed +# example: us-ashburn-1 +REGION="" + +# Compartment name. Will be created if it does not exist +# example: mycompartment +COMPARTMENT_NAME="" + +# Autonomous Database name ADB_NAME="quickstart" ## CLOUD STORAGE @@ -13,4 +23,5 @@ BUCKET_NAME="adb-sample" ## IDENTITIES # The password is for the ADB ADMIN user. It must be between 12 and 30 characters long and must include at least one uppercase letter, one lowercase letter, and one numeric character -USER_PASSWORD="bigdataPM2019#" \ No newline at end of file +# example: watchS0meMovies# +USER_PASSWORD="" \ No newline at end of file diff --git a/multicloud/oci-cli/config.default b/multicloud/oci-cli/config.default index 70f5351..2f11722 100644 --- a/multicloud/oci-cli/config.default +++ b/multicloud/oci-cli/config.default @@ -2,9 +2,19 @@ # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/ # update the values below to match your requirements. -TENANCY_OCID="your-tenancy-ocid" -REGION="us-ashburn-1" -COMPARTMENT_NAME="your-target-compartment" + +# your Tenancy unique identifier +# example: ocid1.tenancy.oc1..example.... +TENANCY_OCID="" +# Region where resources will be deployed +# example: us-ashburn-1 +REGION="" + +# Compartment name. Will be created if it does not exist +# example: mycompartment +COMPARTMENT_NAME="" + +# Autonomous Database name ADB_NAME="quickstart" ## CLOUD STORAGE @@ -13,4 +23,5 @@ BUCKET_NAME="adb-sample" ## IDENTITIES # The password is for the ADB ADMIN user. It must be between 12 and 30 characters long and must include at least one uppercase letter, one lowercase letter, and one numeric character +# example: watchS0meMovies# USER_PASSWORD="" \ No newline at end of file diff --git a/multicloud/oci-cli/create-compartment.sh b/multicloud/oci-cli/create-compartment.sh index 65c0851..f0a3409 100755 --- a/multicloud/oci-cli/create-compartment.sh +++ b/multicloud/oci-cli/create-compartment.sh @@ -9,4 +9,11 @@ echo "" # ensure you update the config file to match your deployment prior to running the deployment source ./config -oci iam compartment create --region $REGION --compartment-id $TENANCY_OCID --name "$COMPARTMENT_NAME" --description "Created by oracle-autonomous-database-samples" \ No newline at end of file +RESULT=$(oci iam compartment create --region $REGION --compartment-id $TENANCY_OCID --name "$COMPARTMENT_NAME" --description "Created by oracle-autonomous-database-samples" 2>&1) + +if echo "$RESULT" | grep -q "CompartmentAlreadyExists"; then + echo "Compartment '$COMPARTMENT_NAME' already exists. Continuing execution..." +else + echo "Error creating compartment. Exiting." + exit 1 +fi diff --git a/sql/config.sql b/sql/config.sql index 2a5ba78..eeec41b 100644 --- a/sql/config.sql +++ b/sql/config.sql @@ -10,6 +10,7 @@ define CONN='jdbc:oracle:thin:@(description=...)' -- user name and password used for the sample data define USER_NAME='moviestream' -- # The password is for the sample user. It must be between 12 and 30 characters long and must include at least one uppercase letter, one lowercase letter, and one numeric character +-- example: watchS0meMovies# define USER_PASSWORD='' -- @@ -21,9 +22,12 @@ define AI_PROVIDER='oci' -- The Select AI profile name that encapsulates the AI provider info + tables for NL2SQL define AI_PROFILE_NAME='genai' -- This is a database credential that captures the secret key or other connection info -define AI_CREDENTIAL_NAME='AI_cred' +define AI_CREDENTIAL_NAME='AI_CRED' --- The endpoint should be the servername only. For example, myopenai.openai.azure.com. This is not required for OCI GenAI. +-- The endpoint should be the servername only. This is not required for OCI GenAI. +-- Examples: +-- myopenai.openai.azure.com +-- us-east4-aiplatform.googleapis.com define AI_ENDPOINT='' -- API key for AI service. This is not required for OCI GenAI. define AI_KEY='' @@ -43,13 +47,21 @@ define AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME='your-openai-emedding-deployment-n -- you can get the storage info by running /multicloud/{oci|azure|gcloud}-cli/show-data-lake-storage-info.sh define STORAGE_PROVIDER='oci' -- The url is a pointer to the bucket that will be used for import/export to object storage +-- Examples: +-- google: https://storage.googleapis.com/adb-sample +-- azure : https://devadbstorageacct.blob.core.windows.net/adb-sample +-- oci : https://adwc4pm.objectstorage.us-ashburn-1.oci.customer-oci.com/n/adwc4pm/b/adb-sample/o define STORAGE_URL='' + -- A database credential encapsulates the authentication details to the object store. Specify a name for the credential below define STORAGE_CREDENTIAL_NAME='storage_cred' --- below required for azure -define STORAGE_KEY='' -define STORAGE_ACCOUNT_NAME='' +--Azure storage properties +define AZURE_STORAGE_ACCOUNT_NAME='' +define AZURE_STORAGE_KEY='' +-- Google storage properties +define GOOGLE_STORAGE_ACCESS_KEY='' +define GOOGLE_STORAGE_SECRET='' -- -- OCI API credentials diff --git a/sql/credential-create.sql b/sql/credential-create.sql index a596acc..d4edc51 100644 --- a/sql/credential-create.sql +++ b/sql/credential-create.sql @@ -13,6 +13,8 @@ prompt "Creating credential: &user_param" DECLARE l_exists number := 0; l_type varchar2(20) := nvl(upper('&user_param'),'ALL'); + l_username varchar2(400); + l_password varchar2(400); BEGIN -- AI provider. Note, they will have different syntax based on the provider if l_type in ('AI','ALL') then @@ -73,10 +75,14 @@ BEGIN private_key => '&OCI_PRIVATE_KEY' ); ELSE + -- Google and Azure use different settings for username and password + l_username := CASE WHEN UPPER('&STORAGE_PROVIDER') = 'AZURE' THEN '&AZURE_STORAGE_ACCOUNT_NAME' ELSE '&GOOGLE_STORAGE_ACCESS_KEY' END; + l_password := CASE WHEN UPPER('&STORAGE_PROVIDER') = 'AZURE' THEN '&AZURE_STORAGE_KEY' ELSE '&GOOGLE_STORAGE_SECRET' END; + dbms_cloud.create_credential( credential_name => '&STORAGE_CREDENTIAL_NAME', - username => '&STORAGE_ACCOUNT_NAME', - password => '&STORAGE_KEY' + username => l_username, + password => l_password ); END IF; -- OCI vs other AI services END IF; -- Storage diff --git a/sql/data-export-to-datalake.sql b/sql/data-export-to-datalake.sql index 253ea72..6227b8f 100644 --- a/sql/data-export-to-datalake.sql +++ b/sql/data-export-to-datalake.sql @@ -1,7 +1,7 @@ -- Copyright (c) 2024 Oracle and/or its affiliates. -- Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/ -/** Export data to Azure Data Lake **/ +/** Export data to cloud storage **/ /* PREREQUISITES Install the sample schema using script