From cc60f7361df096dff3747f7247b79e2d7291b27c Mon Sep 17 00:00:00 2001 From: martygubar Date: Fri, 15 Nov 2024 15:43:59 -0500 Subject: [PATCH] centralized credential creation and added RAG example --- azure-cli/config | 3 +- azure-cli/config.default | 7 +- sql/README.md | 5 +- sql/config-default.sql | 9 +- sql/config.sql | 9 +- sql/credential-create.sql | 69 ++++++++++ sql/data-export-to-datalake.sql | 21 +-- sql/data-import-from-datalake.sql | 15 +-- sql/select-ai-create-profile.sql | 26 +--- sql/select-ai-get-started.sql | 210 ------------------------------ sql/select-ai-rag.sql | 96 ++++++++++---- 11 files changed, 173 insertions(+), 297 deletions(-) create mode 100644 sql/credential-create.sql delete mode 100644 sql/select-ai-get-started.sql diff --git a/azure-cli/config b/azure-cli/config index 0008bd3..1a8375e 100644 --- a/azure-cli/config +++ b/azure-cli/config @@ -10,6 +10,7 @@ ADB_NAME="quickstart" # database VNET_NAME="dev-vnet" VNET_PREFIX="19x.xxx.0.0/16" + # subnet for the database SUBNET_NAME="dev-sn-db" SUBNET_PREFIX="19x.xxx.1.0/24" @@ -34,5 +35,5 @@ STORAGE_CONTAINER_NAME="adb-sample" ## IDENTITIES # This identity will be used for your VM. The password will also be used for the database ADMIN user USER_NAME="adb" -# The password must be between 12 and 30 characters long and must include at least one uppercase letter, one lowercase letter, and one numeric character. +--The password must be between 12 and 30 characters long and must include at least one uppercase letter, one lowercase letter, and one numeric character USER_PASSWORD="" \ No newline at end of file diff --git a/azure-cli/config.default b/azure-cli/config.default index 5ae5540..1a8375e 100644 --- a/azure-cli/config.default +++ b/azure-cli/config.default @@ -9,16 +9,14 @@ ADB_NAME="quickstart" ## NETWORKING # database VNET_NAME="dev-vnet" -#VNET_PREFIX="192.168.0.0/16" VNET_PREFIX="19x.xxx.0.0/16" + # subnet for the database SUBNET_NAME="dev-sn-db" -#SUBNET_PREFIX="192.168.1.0/24" SUBNET_PREFIX="19x.xxx.1.0/24" # client subnet SUBNET2_NAME="dev-sn-client" -#SUBNET2_PREFIX="192.168.2.0/24" SUBNET2_PREFIX="19x.xxx.2.0/24" #network security group @@ -37,4 +35,5 @@ STORAGE_CONTAINER_NAME="adb-sample" ## IDENTITIES # This identity will be used for your VM. The password will also be used for the database ADMIN user USER_NAME="adb" -USER_PASSWORD="your-complex-password" \ No newline at end of file +--The password must be between 12 and 30 characters long and must include at least one uppercase letter, one lowercase letter, and one numeric character +USER_PASSWORD="" \ No newline at end of file diff --git a/sql/README.md b/sql/README.md index 11f8b3d..a789f37 100644 --- a/sql/README.md +++ b/sql/README.md @@ -16,6 +16,7 @@ Try out these scripts to learn how to get started using Autonomous Database. Sim |Script|Description| |----|---| +|[credential-create.sql](credential-create.sql)|Autonomous Database credentials contain the secret keys used to connect to services - like Azure OpenAI. This script creates those credentials. It's called by multiple scripts listed below | |[data-create-sample-schema.sql](data-create-sample-schema.sql)|Create a sample user and install sample data| |[data-create-synthetic-data.sql](data-create-synthetic-data.sql)|Use AI to generate sample data sets| |[data-import-from-datalake.sql](data-create-synthetic-data.sql)|Import sample data from Azure Data Lake. Sample data was uploaded using the [`create-all-resources.sh`](../azure-cli/create-all-resources.sh)and [`create-data-lake-storage.sh`](../azure-cli/create-data-lake-storage.sh) scripts. You can run [`show-data-lake-storage-info.sh`](../azure-cli/show-data-lake-storage-info.sh) to get connection information to the storage container.| @@ -43,7 +44,8 @@ chmod 600 config.sql |**Select AI and GenAI**| |AZURE_OPENAI_RESOURCE_NAME|Name of the Azure OpenAI endpoint|'dev-adb-azure-openai'| |AZURE_OPENAI_ENDPOINT|Your Azure OpenAI endpoint (server name only)|'my-openai.openai.azure.com'| -|AZURE_OPENAI_DEPLOYMENT_NAME|Your Azure OpenAI deployment name|'gpt-4o'| +|AZURE_OPENAI_DEPLOYMENT_NAME|Your Azure OpenAI deployment name. This is used for NL2SQL and AI SQLfunctions|'gpt-4o'| +|AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME|The Azure OpenAI deployment that uses an embedding model. This is used for creating vector embeddings.|'text-embedding-ada-002'| |AZURE_OPENAI_KEY|Azure OpenAI secret key|'3Cu9AB...H53'| |AZURE_OPENAI_PROFILE_NAME|The Select AI profile that will reference your Azure OpenAI deployment|'gpt4o'| |AZURE_OPENAI_CREDENTIAL_NAME|The database credential that will be used to connect to Azure OpenAI|'azure_cred4o'| @@ -51,6 +53,7 @@ chmod 600 config.sql |STORAGE_ACCOUNT_NAME|Name of your Azure Data Lake Storage Gen 2 account. You can run [`show-data-lake-storage-info.sh`](../azure-cli/show-data-lake-storage-info.sh) to get storage details|'mysamplestorage'| |STORAGE_URL|Azure data lake storage URL|'https://mysamplestorage.blob.core.windows.net/adb-sample' |STORAGE_KEY|The secret key used to connecto Azure Data Lake Storage|'dJVNxq1YTT...jp/g==' +|STORAGE_CREDENTIAL_NAME|The name of the Autonomous Database credential that's used to connect to Azure Data Lake Storage Gen 2|'adls_cred'| You can find the Azure OpenAI settings in the Azure OpenAI Studio: ![Azure OpenAI settings](images/azure-openai.png) diff --git a/sql/config-default.sql b/sql/config-default.sql index c9ceaca..d873fc2 100644 --- a/sql/config-default.sql +++ b/sql/config-default.sql @@ -9,7 +9,8 @@ define CONN='your-database-connection-string' -- the database user that will own the sample schema define USER_NAME='moviestream' -define USER_PASSWORD='your-strong-password' +--The password must be between 12 and 30 characters long and must include at least one uppercase letter, one lowercase letter, and one numeric character +define USER_PASSWORD='' -- -- GENAI -- @@ -19,6 +20,8 @@ define AZURE_OPENAI_ENDPOINT='your-azure-openai-endpoint' define AZURE_OPENAI_RESOURCE_NAME='your-azure-openai-resourcename' -- Azure OpenAI deployment Name define AZURE_OPENAI_DEPLOYMENT_NAME='your-azure-openai-deployment-name' +-- Azure OpenAI Embedding deployment name. This is used for creating embeddings for RAG +define AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME='your-azure-openai-embedding-deployment-name' -- Azure OpenAI key define AZURE_OPENAI_KEY='your-azure-openai-key' @@ -32,4 +35,6 @@ define AZURE_OPENAI_CREDENTIAL_NAME='azure_cred4o' -- Get this information by running ../azure-cli/show-data-lake-storage-info.sh define STORAGE_KEY='your-azure-data-lake-storage-key' define STORAGE_ACCOUNT_NAME='your-azure-data-lake-storage-account-name' -define STORAGE_URL='https://your-storage-url/adb-sample' \ No newline at end of file +define STORAGE_URL='https://your-storage-url' +-- You can leave this default +define STORAGE_CREDENTIAL_NAME='adls_cred' \ No newline at end of file diff --git a/sql/config.sql b/sql/config.sql index e6649ea..d873fc2 100644 --- a/sql/config.sql +++ b/sql/config.sql @@ -9,8 +9,7 @@ define CONN='your-database-connection-string' -- the database user that will own the sample schema define USER_NAME='moviestream' --- Password for the database user --- The password must be between 12 and 30 characters long and must include at least one uppercase letter, one lowercase letter, and one numeric character. +--The password must be between 12 and 30 characters long and must include at least one uppercase letter, one lowercase letter, and one numeric character define USER_PASSWORD='' -- -- GENAI @@ -21,6 +20,8 @@ define AZURE_OPENAI_ENDPOINT='your-azure-openai-endpoint' define AZURE_OPENAI_RESOURCE_NAME='your-azure-openai-resourcename' -- Azure OpenAI deployment Name define AZURE_OPENAI_DEPLOYMENT_NAME='your-azure-openai-deployment-name' +-- Azure OpenAI Embedding deployment name. This is used for creating embeddings for RAG +define AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME='your-azure-openai-embedding-deployment-name' -- Azure OpenAI key define AZURE_OPENAI_KEY='your-azure-openai-key' @@ -34,4 +35,6 @@ define AZURE_OPENAI_CREDENTIAL_NAME='azure_cred4o' -- Get this information by running ../azure-cli/show-data-lake-storage-info.sh define STORAGE_KEY='your-azure-data-lake-storage-key' define STORAGE_ACCOUNT_NAME='your-azure-data-lake-storage-account-name' -define STORAGE_URL='https://your-storage-url/adb-sample' \ No newline at end of file +define STORAGE_URL='https://your-storage-url' +-- You can leave this default +define STORAGE_CREDENTIAL_NAME='adls_cred' \ No newline at end of file diff --git a/sql/credential-create.sql b/sql/credential-create.sql new file mode 100644 index 0000000..c988419 --- /dev/null +++ b/sql/credential-create.sql @@ -0,0 +1,69 @@ +ARGUMENT 1 DEFAULT 'ALL' + +define user_param=&1 +undefine 1 + +prompt "Creating credential: &user_param" + +-- Get the config information +@config.sql + +-- This procedure will recreate credentials. You can specify a credential type (storage, OpenAI) + +DECLARE + l_exists number := 0; + l_type varchar2(20) := nvl(upper('&user_param'),'ALL'); +BEGIN + -- Azure OpenAI + if l_type in ('OPENAI','ALL') then + -- Create your credential. Replace it if already exists + select COUNT(*) + into l_exists + from user_credentials + where upper(credential_name)=upper('&AZURE_OPENAI_CREDENTIAL_NAME'); + + IF l_exists = 1 THEN + dbms_cloud.drop_credential ( + credential_name => '&AZURE_OPENAI_CREDENTIAL_NAME' + ); + END IF; + + dbms_cloud.create_credential ( + credential_name => '&AZURE_OPENAI_CREDENTIAL_NAME', + username => 'AZURE_OPENAI', + password => '&AZURE_OPENAI_KEY' + ); + + END IF; -- Azure OpenAI + + if l_type in ('STORAGE','ALL') then + -- Create your credential. Replace it if already exists + select COUNT(*) + into l_exists + from user_credentials + where upper(credential_name)=upper('&STORAGE_CREDENTIAL_NAME'); + + IF l_exists = 1 THEN + dbms_cloud.drop_credential ( + credential_name => '&STORAGE_CREDENTIAL_NAME' + ); + END IF; + + dbms_cloud.create_credential( + credential_name => '&STORAGE_CREDENTIAL_NAME', + username => '&STORAGE_ACCOUNT_NAME', + password => '&STORAGE_KEY' + ); + + END IF; -- Storage +END; +/ + +-- Review the credentials +COLUMN credential_name FORMAT A40 +COLUMN username FORMAT A40 + +select + credential_name, + username +from user_credentials; diff --git a/sql/data-export-to-datalake.sql b/sql/data-export-to-datalake.sql index b1bfa41..e0bfbf4 100644 --- a/sql/data-export-to-datalake.sql +++ b/sql/data-export-to-datalake.sql @@ -11,20 +11,13 @@ @config.sql -- Create a credential in order to connect to data lake storage -BEGIN - DBMS_CLOUD.CREATE_CREDENTIAL( - credential_name => 'ADLS_CRED', - username => '&STORAGE_ACCOUNT_NAME', - password => '&STORAGE_KEY' - ); -END; -/ +@credential-create.sql storage -- List the files in that storage container SELECT object_name, bytes FROM dbms_cloud.list_objects( - credential_name => 'ADLS_CRED', - location_uri => '&STORAGE_URL' + credential_name => '&STORAGE_CREDENTIAL_NAME', + location_uri => '&STORAGE_URL/adb-sample' ); -- @@ -37,8 +30,8 @@ FROM genre; -- Export in CSV format BEGIN DBMS_CLOUD.EXPORT_DATA( - credential_name => 'ADLS_CRED', - file_uri_list => '&STORAGE_URL/data/genre/genre', + credential_name => '&STORAGE_CREDENTIAL_NAME', + file_uri_list => '&STORAGE_URL/adb-sample/data/genre/genre', query => 'SELECT * FROM genre', format => JSON_OBJECT('type' VALUE 'csv', 'delimiter' VALUE ',') ); @@ -48,6 +41,6 @@ END; -- List the files in that storage container. Notice the new genre data. SELECT object_name, bytes FROM dbms_cloud.list_objects( - credential_name => 'ADLS_CRED', - location_uri => '&STORAGE_URL' + credential_name => '&STORAGE_CREDENTIAL_NAME', + location_uri => '&STORAGE_URL/adb-sample' ); \ No newline at end of file diff --git a/sql/data-import-from-datalake.sql b/sql/data-import-from-datalake.sql index f90cc4d..c54c2dc 100644 --- a/sql/data-import-from-datalake.sql +++ b/sql/data-import-from-datalake.sql @@ -6,20 +6,13 @@ @config.sql -- Create a credential in order to connect to data lake storage -BEGIN - DBMS_CLOUD.CREATE_CREDENTIAL( - credential_name => 'ADLS_CRED', - username => '&STORAGE_ACCOUNT_NAME', - password => '&STORAGE_KEY' - ); -END; -/ +@credential-create.sql storage -- List the files SELECT object_name, bytes FROM dbms_cloud.list_objects( - credential_name => 'ADLS_CRED', - location_uri => '&STORAGE_URL' + credential_name => '&STORAGE_CREDENTIAL_NAME', + location_uri => '&STORAGE_URL/adb-sample' ); -- Create a table for movies that were created from books @@ -37,7 +30,7 @@ CREATE TABLE movie_from_book BEGIN DBMS_CLOUD.COPY_DATA ( table_name => 'MOVIE_FROM_BOOK', - credential_name => 'ADLS_CRED', + credential_name => '&STORAGE_CREDENTIAL_NAME', file_uri_list => 'https://storagemartygubaradb.blob.core.windows.net/adb-sample/data/movie_from_book/*.csv', field_list => 'MOVIE_NAME CHAR(4000), MOVIE_RELEASE_DATE CHAR date_format DATE MASK "YYYY-MM-DD", diff --git a/sql/select-ai-create-profile.sql b/sql/select-ai-create-profile.sql index fae551a..3cc8afe 100644 --- a/sql/select-ai-create-profile.sql +++ b/sql/select-ai-create-profile.sql @@ -7,33 +7,9 @@ -- config.sql contains the endpoints, resource groups and other settings required to connect to your Azure OpenAI deployment @config.sql - -- Create a credential that allows the user to access the Azure OpenAI endpoint -DECLARE - l_exists number := 0; -BEGIN - -- Create your credential. Replace it if already exists - select COUNT(*) - into l_exists - from user_credentials - where upper(credential_name)=upper('&AZURE_OPENAI_CREDENTIAL_NAME'); - - IF l_exists = 1 THEN - dbms_cloud.drop_credential ( - credential_name => '&AZURE_OPENAI_CREDENTIAL_NAME' - ); - END IF; - - - dbms_cloud.create_credential ( - credential_name => '&AZURE_OPENAI_CREDENTIAL_NAME', - username => 'AZURE_OPENAI', - password => '&AZURE_OPENAI_KEY' - ); - -END; -/ +@credential-create.sql openai /* A Select AI profile describes the LLM you will use plus information that will be used for natural language queries. You can create as many diff --git a/sql/select-ai-get-started.sql b/sql/select-ai-get-started.sql deleted file mode 100644 index 5daad59..0000000 --- a/sql/select-ai-get-started.sql +++ /dev/null @@ -1,210 +0,0 @@ -/* Run this script as the Autonomous Database database user that will be access Azure OpenAI */ - --- config.sql contains the endpoints, resource groups and other settings required to connect to your Azure OpenAI deployment -@./config.sql - -ACCEPT azureOpenAIKey CHAR PROMPT 'Enter your Azure OpenAI Key: ' HIDE - --- Create a credential that allows the user to access the Azure OpenAI endpoint -BEGIN - dbms_cloud.create_credential( - credential_name => '&azureOpenAICredentialName', - username => 'AZURE_OPENAI', - password => '&azureOpenAIKey'); -END; -/ - -/* - A Select AI profile describes the LLM you will use plus information that will be used for natural language queries. You can create as many - AI profiles as you need. You may want to try different models to see their effectiveness, expose profiles to different user groups, etc.: - 1. For Azure OpenAI, a deployment was created that is using the gpt-4o model - 2. The object list contains the tables that will be the targets for natural language queries -*/ - -begin - -- recreate the profile - dbms_cloud_ai.drop_profile( - profile_name => '&profileName', - force => true - ); - - -- create an AI profile - dbms_cloud_ai.create_profile( - profile_name => '&profileName', - attributes => - '{"provider": "azure", - "azure_resource_name": "&azureOpenAIResourceName", - "azure_deployment_name": "&azureOpenAIDeploymentName", - "credential_name": "&azureOpenAICredentialName", - "comments":"true", - "object_list": [ - {"owner": "&userName", "name": "GENRE"}, - {"owner": "&userName", "name": "CUSTOMER"}, - {"owner": "&userName", "name": "PIZZA_SHOP"}, - {"owner": "&userName", "name": "STREAMS"}, - {"owner": "&userName", "name": "MOVIES"}, - {"owner": "&userName", "name": "ACTORS"} - ] - }' - ); - end; - / - - --- Set that profile for this session -begin - dbms_cloud_ai.set_profile( - profile_name => '&profileName' - ); -end; -/ - -/** -Start asking questions! -Notice how the SQL language has been extended with new AI keywords --- 1. chat - general AI chat --- 2. runsql - [default] ask a question and get a structured result --- 3. narrate - ask a question and get a conversational result --- 4. showsql - SQL used to produce the result --- 5. explainsql - explains the query and its processing -*/ --- simple chat -select ai chat what happened to the new england patriots; - --- use your data -select ai what are our total views; -select ai showsql what are our total views; - --- more sophisticated -select ai what are our total streams broken out by genre; -select ai explainsql what are our total streams broken out by genre; - -select ai what are total streams by movie for tom hanks movies; - -/** -There are also api's for using Select AI -*/ --- Ask another simple question -SELECT - DBMS_CLOUD_AI.GENERATE( - PROMPT => 'What is Tom Hanks best known for', - PROFILE_NAME => '&profileName', - ACTION => 'chat' - ) AS response -FROM dual; - -/** - what's great is you can now easily apply AI to your organization's data with a simple query -*/ --- look at a humorous support chat -SELECT support_chat -FROM v_customer_support -WHERE support_chat_id = 1; - -/* -Let's summarize find out the sentiment of the support conversation. -A JSON document is a really good way to structure the prompt; the LLM can easily interpret the -task and data set to operate on. The following SQL query combines the task with the data set. -*/ --- Here's the task and we'll apply it to the support chat. -SELECT JSON_OBJECT( - 'task' VALUE 'summarize the support chat in 3 sentences. also return the customer sentiment', - support_chat) AS prompt_details -FROM v_customer_support WHERE support_chat_id = 1; - --- now apply GenAI in a query to get teh answer -WITH prompt_document AS ( - -- this json document - SELECT JSON_OBJECT( - 'task' VALUE 'summarize the support chat in 3 sentences. also return the customer sentiment', - support_chat) AS prompt_details - FROM v_customer_support WHERE support_chat_id = 1 -) -SELECT - DBMS_CLOUD_AI.GENERATE( - PROMPT => prompt_details, - PROFILE_NAME => '&profileName', - ACTION => 'chat' - ) AS response -FROM prompt_document; - -/* Create an EMAIL promotion to a customer. Recommend movies based on - those they previously watched AND movies that Moviestream wants to promote. - This is information the LLM knows nothing about - the prompt will augment the model - with customer data -*/ -WITH promoted_movie_list AS -( - -- movies we want to promote - SELECT - json_arrayagg(json_object( - 'title' value m.json_document.title , - 'year' value m.json_document.year) - ) as promoted_movie_list - FROM "movieCollection" m - WHERE m.json_document.studio like '%Amblin Entertainment%' -), -customer_latest_movies AS ( - -- movies the customer watched - SELECT - s.cust_id, - m.title, - m.year, - max(s.day_id) as day_id - FROM streams s, movies m, v_target_customers c - WHERE m.movie_id = s.movie_id - and c.customer_id = 1 - and c.cust_id = s.cust_id - GROUP BY s.cust_id, m.title, m.year - ORDER BY day_id desc - FETCH first 3 ROWS ONLY -), -customer_details AS ( - -- attributes about the customer - SELECT - m.cust_id, - c.customer_id, - c.first_name, - c.last_name, - c.age, - c.gender, - c.has_kids, - c.marital_status, - c.dog_owner, - max(day_id), - json_arrayagg(m.title) as recently_watched_movies - FROM v_target_customers c, customer_latest_movies m - WHERE - c.cust_id = m.cust_id - GROUP BY - m.cust_id, - c.customer_id, - first_name, - last_name, - age, - gender, - has_kids, - marital_status, - dog_owner -), -dataset AS ( - -- combine this into a json document - SELECT json_object(p.*, c.*) doc - FROM customer_details c, promoted_movie_list p -) -SELECT - -- generate the promotion! - DBMS_CLOUD_AI.GENERATE ( - prompt => 'Create a promotional email with a catchy subject line and convincing email text. Follow the task rules. ' || - '1. Recommend 3 movies from the promoted movie list that are most similar to movies in the recently watched movie list. ' || - ' Do not say that we are promoting these movies. For each move, say why you will love them.' || - '2. Use lots of fun emojis in the response. ' || - '3. Finish the email thanking them for being a customer and sign it "From The MovieStream Team" \n' - || doc, - profile_name => '&profileName', - action => 'chat' - ) AS email_promotion -FROM dataset; - - - diff --git a/sql/select-ai-rag.sql b/sql/select-ai-rag.sql index 9e86193..2eb72c6 100644 --- a/sql/select-ai-rag.sql +++ b/sql/select-ai-rag.sql @@ -6,38 +6,75 @@ -- config.sql contains the endpoints, resource groups and other settings required to connect to your Azure OpenAI deployment @./config.sql +-- Make sure credentials have been defined to enable access to Azure OpenAI and Azure Blob Storage +@credential-create.sql ALL + -- -- Customer support site -- --- Start with the AI profile that uses OCI GenAI llama-3 model + a vector to support RAG -BEGIN - - DBMS_CLOUD_AI.create_profile( - 'SUPPORT_SITE', - '{"provider": "oci", - "credential_name": "OCIAI_CRED", - "region":"us-chicago-1", - "oci_compartment_id":"ocid1.compartment.oc1..aaaaaaaaoroyej3uayfybk6cbgcjkciidbxhcinpxnptcn6ley7bqb677hpq", - "vector_index_name": "support" - }'); -end; -/ +-- This script will create a vector index using Select AI based on web site files that were uploaded to Azure Blob Storage +-- It's a support site that helps customers get answers to issues with Oracle MovieStream - a fictitious movie streaming service + +-- Review the file list +SELECT object_name, bytes +FROM dbms_cloud.list_objects( + credential_name => '&STORAGE_CREDENTIAL_NAME', + location_uri => '&STORAGE_URL/adb-sample/support-site' +); + +-- Start with the AI profile for our support site that uses Azure OpenAI and a vector index +-- The Vector index will be used for RAG +BEGIN + -- recreate the profile in case it already exists + dbms_cloud_ai.drop_profile ( + profile_name => 'SUPPORT_SITE', + force => true + ); + + -- create an AI profile that will use the SUPPORT vector that's created in the next step. + dbms_cloud_ai.create_profile ( + profile_name => 'SUPPORT_SITE', + attributes => + '{"provider": "azure", + "azure_resource_name": "&AZURE_OPENAI_RESOURCE_NAME", + "azure_deployment_name": "&AZURE_OPENAI_DEPLOYMENT_NAME", + "azure_embedding_deployment_name":"&AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME", + "credential_name": "&AZURE_OPENAI_CREDENTIAL_NAME", + "vector_index_name": "SUPPORT" + }' + ); +END; +/ -- Create your vector index -begin - DBMS_CLOUD_AI.create_vector_index( - index_name => 'support', - attributes => '{"vector_db_provider": "oracle", - "location": "https://objectstorage.us-ashburn-1.oraclecloud.com/n/adwc4pm/b/moviestream-support/o/", - "profile_name": "SUPPORT_SITE", - "vector_dimension": 1024, - "vector_distance_metric": "cosine", - "chunk_overlap":50, - "chunk_size":450 - }'); -end; +BEGIN + -- Recreate the vector index in case it already exists + dbms_cloud_ai.drop_vector_index ( + index_name => 'SUPPORT', + force => true + ); + + -- Create a vector index that points to the azure storage location. This will create a pipeline that loads the index and keeps + -- it up to date + dbms_cloud_ai.create_vector_index( + index_name => 'SUPPORT', + attributes => '{"vector_db_provider": "oracle", + "object_storage_credential_name": "&STORAGE_CREDENTIAL_NAME", + "location": "&STORAGE_URL/adb-sample/support-site/", + "profile_name": "SUPPORT_SITE", + "vector_dimension": 1536, + "vector_distance_metric": "cosine", + "chunk_overlap":50, + "chunk_size":450 + }' + ); +END; / +-- A pipeline was created and you can see it here +select * from user_cloud_pipelines; + +-- Let's ask support questions using Select AI and this new profile begin dbms_cloud_ai.set_profile( profile_name => 'SUPPORT_SITE' @@ -46,5 +83,12 @@ end; / -- Ask your questions! -SELECT AI NARRATE my roku is stuck on the opening scene of my movie ; +SELECT AI NARRATE George Clooney lips are moving but I can not hear him; +-- Or use the PLSQL API +SELECT + dbms_cloud_ai.generate ( + profile_name => 'SUPPORT_SITE', + action => 'narrate', + prompt => 'George Clooney lips are moving but I can not hear him' + ) as support_question;