diff --git a/.config/dotnet-tools.json b/.config/dotnet-tools.json index d6eb0bd..549a268 100644 --- a/.config/dotnet-tools.json +++ b/.config/dotnet-tools.json @@ -2,20 +2,14 @@ "version": 1, "isRoot": true, "tools": { - "dotnet-reportgenerator-globaltool": { - "version": "5.2.0", - "commands": [ - "reportgenerator" - ] - }, "docfx": { - "version": "2.74.1", + "version": "2.75.3", "commands": [ "docfx" ] }, "DocFxTocGenerator": { - "version": "1.17.0", + "version": "1.18.0", "commands": [ "DocFxTocGenerator" ] diff --git a/.editorconfig b/.editorconfig index c68c8b4..fcdafe0 100644 --- a/.editorconfig +++ b/.editorconfig @@ -91,6 +91,7 @@ csharp_style_conditional_delegate_call = true:suggestion csharp_style_pattern_matching_over_is_with_cast_check = true:suggestion csharp_space_around_binary_operators = before_and_after dotnet_diagnostic.SA0001.severity = none +dotnet_diagnostic.SA1201.severity = none dotnet_diagnostic.SA1649.severity = none dotnet_analyzer_diagnostic.category-StyleCop.CSharp.DocumentationRules.severity = none dotnet_analyzer_diagnostic.category-StyleCop.CSharp.LayoutRules.severity = none diff --git a/.github/workflows/publishdocs-dryrun.yml b/.github/workflows/publishdocs-dryrun.yml index d13c508..79618c9 100644 --- a/.github/workflows/publishdocs-dryrun.yml +++ b/.github/workflows/publishdocs-dryrun.yml @@ -25,8 +25,6 @@ jobs: - uses: actions/download-artifact@v3 with: name: coverage-${{ inputs.runs-on-config }}-${{ inputs.build-config }} - - name: Restore .NET Tools - run: dotnet tool restore - name: Set up Python uses: actions/setup-python@v4 with: @@ -35,22 +33,9 @@ jobs: shell: pwsh run: | ./scripts/generate-docs.ps1 -SkipPreClean - cd ./docs - zip -r ../docs.zip * - cd .. - - name: Generate Coverage - run: | - dotnet reportgenerator -reporttypes:"Html;Badges" -reports:**/coverage*.cobertura.xml -targetdir:./coverage - cd ./coverage - zip -r ../coverage.zip * - cd .. + Compress-Archive ./docs/* -Destination docs.zip - name: Upload Docs Artifact uses: actions/upload-artifact@v3 with: name: docs - path: docs.zip - - name: Upload Coverage Artifact - uses: actions/upload-artifact@v3 - with: - name: coverage - path: coverage.zip + path: docs.zip \ No newline at end of file diff --git a/.github/workflows/publishdocs.yml b/.github/workflows/publishdocs.yml index 087ca05..fb117e9 100644 --- a/.github/workflows/publishdocs.yml +++ b/.github/workflows/publishdocs.yml @@ -39,8 +39,6 @@ jobs: - uses: actions/download-artifact@v3 with: name: coverage-${{ inputs.runs-on-config }}-${{ inputs.build-config }} - - name: Restore .NET Tools - run: dotnet tool restore - name: Set up Python uses: actions/setup-python@v4 with: @@ -49,31 +47,18 @@ jobs: shell: pwsh run: | ./scripts/generate-docs.ps1 -SkipPreClean - cd ./docs - zip -r ../docs.zip * - cd .. - - name: Generate Coverage - run: | - dotnet reportgenerator -reporttypes:"Html;Badges" -reports:**/coverage*.cobertura.xml -targetdir:./coverage - cd ./coverage - zip -r ../coverage.zip * - cd .. + Compress-Archive ./docs/* -Destination docs.zip - name: Upload Docs Artifact uses: actions/upload-artifact@v3 with: name: docs path: docs.zip - - name: Upload Coverage Artifact - uses: actions/upload-artifact@v3 - with: - name: coverage - path: coverage.zip - name: Setup GitHub Pages - uses: actions/configure-pages@v3.0.6 + uses: actions/configure-pages@v4.0.0 - name: Upload Github Pages - uses: actions/upload-pages-artifact@v2 + uses: actions/upload-pages-artifact@v3.0.1 with: path: './docs' - name: Deploy to GitHub Pages id: deployment - uses: actions/deploy-pages@v1.2.9 + uses: actions/deploy-pages@v4.0.4 diff --git a/.github/workflows/sdk-workflow.yml b/.github/workflows/sdk-workflow.yml index 982dd5a..283daab 100644 --- a/.github/workflows/sdk-workflow.yml +++ b/.github/workflows/sdk-workflow.yml @@ -129,7 +129,7 @@ jobs: uses: ./.github/workflows/publishdocs-dryrun.yml if: ${{ (inputs.publish-release != 'Prod' && inputs.publish-release != 'Prod-Internal' && inputs.publish-docs == false) || (github.ref != 'refs/heads/main' && !startsWith(github.ref, 'refs/heads/release/') && !startsWith(github.ref, 'refs/tags/release/')) }} with: - runs-on-config: ${{ vars.PUBLISH_OS }} + runs-on-config: ${{ vars.DOCS_OS }} build-config: ${{ vars.PUBLISH_CONFIGURATION }} python-version: ${{ vars.PYTHON_PUBLISH_DOCS_VERSION }} publish-docs: @@ -137,7 +137,7 @@ jobs: uses: ./.github/workflows/publishdocs.yml if: ${{ (inputs.publish-release == 'Prod' || inputs.publish-release == 'Prod-Internal' || inputs.publish-docs == true) && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/release/') || startsWith(github.ref, 'refs/tags/release/')) }} with: - runs-on-config: ${{ vars.PUBLISH_OS }} + runs-on-config: ${{ vars.DOCS_OS }} build-config: ${{ vars.PUBLISH_CONFIGURATION }} python-version: ${{ vars.PYTHON_PUBLISH_DOCS_VERSION }} create-release-from-dry-run: diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index df1daec..a65eec9 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -8,6 +8,10 @@ This page lists the operational governance model of this project, as well as the The intent and goal of open sourcing this project is to increase the contributor and user base. The governance model is one where new project leads (`admins`) will be added to the project based on their contributions and efforts, a so-called "do-acracy" or "meritocracy" similar to that used by all Apache Software Foundation projects. +## Getting started + +Please join the community on {Here list Slack channels, Email lists, Glitter, Discord, etc... links}. Also please make sure to take a look at the project [roadmap](ROADMAP.md) to see where are headed. + ## Issues, requests & ideas Use GitHub Issues page to submit issues, enhancement requests and discuss ideas. diff --git a/Directory.Build.props b/Directory.Build.props index c2203b1..8cfe4ad 100644 --- a/Directory.Build.props +++ b/Directory.Build.props @@ -4,7 +4,7 @@ enable true true - 1.1.0 + 2.0.0 Tableau Software, LLC Tableau Software, LLC Copyright (c) 2024, Tableau Software, LLC and its licensors diff --git a/examples/Csharp.ExampleApplication/MyMigrationApplication.cs b/examples/Csharp.ExampleApplication/MyMigrationApplication.cs index dd0cb00..f337e0d 100644 --- a/examples/Csharp.ExampleApplication/MyMigrationApplication.cs +++ b/examples/Csharp.ExampleApplication/MyMigrationApplication.cs @@ -101,8 +101,8 @@ public async Task StartAsync(CancellationToken cancel) // Add post-publish hooks #region UpdatePermissionsHook-Registration - _planBuilder.Hooks.Add>(); - _planBuilder.Hooks.Add>(); + _planBuilder.Hooks.Add>(); + _planBuilder.Hooks.Add>(); #endregion #region BulkLoggingHook-Registration diff --git a/examples/Csharp.ExampleApplication/appsettings.json b/examples/Csharp.ExampleApplication/appsettings.json index 9e29154..b4eb76a 100644 --- a/examples/Csharp.ExampleApplication/appsettings.json +++ b/examples/Csharp.ExampleApplication/appsettings.json @@ -11,7 +11,29 @@ }, "tableau": { "migrationSdk": { - "batchSize": 50 + "contentTypes": [ + { + "type": "user", + "batchSize": 50, + "batchPublishingEnabled": true + }, + { + "type": "group", + "batchSize": 60 + }, + { + "type": "project", + "batchSize": 70 + }, + { + "type": "datasource", + "batchSize": 80 + }, + { + "type": "workbook", + "batchSize": 90 + } + ] }, "emailDomainMapping": { "emailDomain": "" diff --git a/examples/Python.ExampleApplication/requirements.txt b/examples/Python.ExampleApplication/requirements.txt index f13abfc..451a49b 100644 --- a/examples/Python.ExampleApplication/requirements.txt +++ b/examples/Python.ExampleApplication/requirements.txt @@ -1,7 +1,7 @@ --index-url https://artifactory.prod.tableautools.com/artifactory/api/pypi/tabpypi/simple --pre -pip==23.3.2 -setuptools==69.0.3 +pip==24.0.0 +setuptools==69.1.0 configparser==6.0.0 tableau_migration cffi==1.16.0 diff --git a/global.json b/global.json index 62b2d73..922949d 100644 --- a/global.json +++ b/global.json @@ -1,6 +1,6 @@ { "sdk": { - "version": "8.0.101", + "version": "8.0.201", "rollForward": "latestMajor" } } \ No newline at end of file diff --git a/src/Documentation/articles/configuration.md b/src/Documentation/articles/configuration.md index 8e27541..1c8f8ef 100644 --- a/src/Documentation/articles/configuration.md +++ b/src/Documentation/articles/configuration.md @@ -5,47 +5,58 @@ The Migration SDK uses two sources of configuration in two blocks: the [Migratio ![Configuration Blocks](../images/configuration.png) ## Migration Plan + The migration plan is a required input in the migration process. It will define the Source and Destination servers and the hooks executed during the migration. Consider the Migration Plan as the steps the Migration SDK will follow to migrate the information from a given Source Server to a given Destination Server. The [`IMigrationPlan`](xref:Tableau.Migration.IMigrationPlan) interface defines the Migration Plan structure. And the easiest path to generate a new Migration Plan is the [`IMigrationPlanBuilder`](xref:Tableau.Migration.IMigrationPlanBuilder) implementation [**`MigrationPlanBuilder`**](xref:Tableau.Migration.Engine.MigrationPlanBuilder). For that, it is needed a few steps before [building](#build) a new plan: + - [Define the required Source server](#source). - [Define the required Destination server](#destination). - [Define the required Migration Type](#migration-type). - [Add supplementary hooks](#add-hooks). ### Source + *Optional/Required:* **Required**. *Description:* The method [`MigrationPlanBuilder.FromSourceTableauServer`](xref:Tableau.Migration.Engine.MigrationPlanBuilder#Tableau_Migration_Engine_MigrationPlanBuilder_FromSourceTableauServer_System_Uri_System_String_System_String_System_String_System_Boolean_) will define the source server by instantiating a new [`TableauSiteConnectionConfiguration`](xref:Tableau.Migration.Api.TableauSiteConnectionConfiguration) with the following parameters: + - **serverUrl:** Required. - **siteContentUrl:** Optional. - **accessTokenName:** Required. - **accessToken:** Required. + > [!Important] -> Personal access tokens (PATs) are long-lived authentication tokens that allow you to sign in to the Tableau REST API without requiring hard-coded credentials or interactive signin. Revoke and generate a new PAT every day to keep your server secure. Access tokens should not be stored in plain text in application configuration files, and should instead use secure alternatives such as encryption or a secrets management system. If the source and destination sites are on the same server, separate PATs should be used. +> Personal access tokens (PATs) are long-lived authentication tokens that allow you to sign in to the Tableau REST API without requiring hard-coded credentials or interactive sign-in. Revoke and generate a new PAT every day to keep your server secure. Access tokens should not be stored in plain text in application configuration files, and should instead use secure alternatives such as encryption or a secrets management system. If the source and destination sites are on the same server, separate PATs should be used. ### Destination + *Optional/Required:* **Required**. *Description:* The method [`MigrationPlanBuilder.ToDestinationTableauCloud`](xref:Tableau.Migration.Engine.MigrationPlanBuilder#Tableau_Migration_Engine_MigrationPlanBuilder_ToDestinationTableauCloud_System_Uri_System_String_System_String_System_String_System_Boolean_) will define the destination server by instantiating a new [`TableauSiteConnectionConfiguration`](xref:Tableau.Migration.Api.TableauSiteConnectionConfiguration) with the following parameters: + - **podUrl:** Required. - **siteContentUrl:** Required. This is the site name on Tableau Cloud. - **accessTokenName:** Required. - **accessToken:** Required. + > [!Important] > Personal access tokens (PATs) are long-lived authentication tokens that allow you to sign in to the Tableau REST API without requiring hard-coded credentials or interactive signin. Revoke and generate a new PAT every day to keep your server secure. Access tokens should not be stored in plain text in application configuration files, and should instead use secure alternatives such as encryption or a secrets management system. If the source and destination sites are on the same server, separate PATs should be used. ### Migration Type + *Optional/Required:* **Required**. *Description:* The method [`MigrationPlanBuilder.ForServerToCloud`](xref:Tableau.Migration.Engine.MigrationPlanBuilder#Tableau_Migration_Engine_MigrationPlanBuilder_ForServerToCloud) will define the migration type and load all default hooks for a **Server to Cloud** migration. ### Add Hooks + *Optional/Required:* **Optional**. *Description:* The Plan Builder exposes the properties [`MigrationPlanBuilder.Hooks`](xref:Tableau.Migration.Engine.MigrationPlanBuilder#Tableau_Migration_Engine_MigrationPlanBuilder_Hooks), [`MigrationPlanBuilder.Filters`](xref:Tableau.Migration.Engine.MigrationPlanBuilder#Tableau_Migration_Engine_MigrationPlanBuilder_Filters), [`MigrationPlanBuilder.Mappings`](xref:Tableau.Migration.Engine.MigrationPlanBuilder#Tableau_Migration_Engine_MigrationPlanBuilder_Mappings), and [`MigrationPlanBuilder.Transformers`](xref:Tableau.Migration.Engine.MigrationPlanBuilder#Tableau_Migration_Engine_MigrationPlanBuilder_Transformers). With these properties, it is possible to adjust a given migration plan for specific scenarios. For more details, check the [Custom Hooks article](advanced_config/hooks/custom_hooks.md). ### Build + *Optional/Required:* **Required**. *Description:* The method [`MigrationPlanBuilder.Build`](xref:Tableau.Migration.Engine.MigrationPlanBuilder#Tableau_Migration_Engine_MigrationPlanBuilder_Build) will generate a Migration Plan ready to be used as an input to a migration process. @@ -54,23 +65,91 @@ The [`IMigrationPlan`](xref:Tableau.Migration.IMigrationPlan) interface defines [`MigrationSdkOptions`](xref:Tableau.Migration.Config.MigrationSdkOptions) is the configuration class the Migration SDK uses internally to process a migration. It contains adjustable properties that change some engine behaviors. These properties are useful tools to troubleshoot and tune a migration process. Start with this class and others in the [Config](xref:Tableau.Migration.Config) section for more details. -When writting a C# application, it is recommended that a [.NET Generic Host](https://learn.microsoft.com/en-us/dotnet/core/extensions/generic-host?tabs=appbuilder) is used to initialize the application. This will enable setting configuration values via `appsettings.json` which can be passed into `userOptions` in [`.AddTableauMigrationSdk`](xref:Tableau.Migration.IServiceCollectionExtensions.html#Tableau_Migration_IServiceCollectionExtensions_AddTableauMigrationSdk_Microsoft_Extensions_DependencyInjection_IServiceCollection_Microsoft_Extensions_Configuration_IConfiguration_). See [.NET getting started examples](..\samples\csharp.md) for more info. +When writing a C# application, it is recommended that a [.NET Generic Host](https://learn.microsoft.com/en-us/dotnet/core/extensions/generic-host?tabs=appbuilder) is used to initialize the application. This will enable setting configuration values via `appsettings.json` which can be passed into `userOptions` in [`.AddTableauMigrationSdk`](xref:Tableau.Migration.IServiceCollectionExtensions#Tableau_Migration_IServiceCollectionExtensions_AddTableauMigrationSdk_Microsoft_Extensions_DependencyInjection_IServiceCollection_Microsoft_Extensions_Configuration_IConfiguration_). See [.NET getting started examples](~/samples/csharp.md) for more info. + +When writing a python application, configuration values are set via environment variables. The `:` delimiter doesn't work with environment variable hierarchical keys on all platforms. For example, the `:` delimiter is not supported by Bash. The double underscore (`__`), which is supported on all platforms, automatically replaces any `:` delimiters in environment variables. All configuration environment variables start with `MigrationSDK__`. -When writting a python application, configuration values are set via environment variables. The `:` delimiter doesn't work with environment variable hierarchical keys on all platforms. For example, the `:` delimiter is not supported by Bash. The double underscore (`__`), which is supported on all platforms, automatically replaces any `:` delimiters in environment variables. All configuration environment variables start with `MigrationSDK__`. +### ContentTypes -### BatchSize -*Reference:* [`MigrationSdkOptions.BatchSize`](xref:Tableau.Migration.Config.MigrationSdkOptions#Tableau_Migration_Config_MigrationSdkOptions_BatchSize). +*Reference:* [`MigrationSdkOptions.ContentTypesOptions`](xref:Tableau.Migration.Config.ContentTypesOptions). -*Default:* [`MigrationSdkOptions.Defaults.BATCH_SIZE`](xref:Tableau.Migration.Config.MigrationSdkOptions.Defaults#Tableau_Migration_Config_MigrationSdkOptions_Defaults_BATCH_SIZE). +This is an array of [`MigrationSdkOptions.ContentTypesOptions`](xref:Tableau.Migration.Config.ContentTypesOptions). Each array object corresponds to settings for a single content type. -*Python Environment Variable:* `MigrationSDK__BatchSize` +> [!IMPORTANT] +> The [type](xref:Tableau.Migration.Config.ContentTypesOptions.Type) values are case-insensitive. +> Duplicate [type](xref:Tableau.Migration.Config.ContentTypesOptions.Type) key values will result in an exception. + +In the following `json` example config file, a `BatchSize` of `201` is applied to the content type `User`. The same setting for `Project` is `203`. + +```JSON +{ + "MigrationSdkOptions": { + "contentType": [ + { + "type":"User", + "batchSize": 201 + }, + { + "type":"Project", + "batchSize": 203 + } + ], + } +} + +``` + +*Python Environment Variables:* + +- `MigrationSDK__ContentTypes______Type`. +- `MigrationSDK__ContentTypes______BatchSize`. + +Here is an example of environment variables you would set. This is equivalent to the previous `json` example. Note the array indexes. They tie the setting values together in the Migration SDK. + +```bash +MigrationSDK__ContentTypes__0__Type = User +MigrationSDK__ContentTypes__0__BatchSize = 201 +MigrationSDK__ContentTypes__1__Type = Project +MigrationSDK__ContentTypes__1__BatchSize = 203 +``` + +The following sections describe each setting. They should always be set per content type as described previously. If a setting below is not set for a content type, the Migration SDK falls back to the default value. + +#### ContentTypes.Type + +*Reference:* [`MigrationSdkOptions.ContentTypes.Type`](xref:Tableau.Migration.Config.ContentTypesOptions.Type). + +*Default:* blank string. + +*Reload on Edit?:* **Yes**. The update will apply next time the Migration SDK requests a list of objects. + +*Description:* For each array object, the [type](xref:Tableau.Migration.Config.ContentTypesOptions.Type) key determines which content type the settings apply to. Only supported content types will be considered and all others will be ignored. This key comes from the interface for the content type. This is determined by [MigrationPipelineContentType.GetConfigKey()](xref:Tableau.Migration.Engine.Pipelines.MigrationPipelineContentType.GetConfigKey). For example, the key for [IUser](xref:Tableau.Migration.Content.IUser) is `User`. Content type [type](xref:Tableau.Migration.Config.ContentTypesOptions.Type) values are case insensitive. + +#### ContentTypes.BatchSize + +*Reference:* [`MigrationSdkOptions.ContentTypes.BatchSize`](xref:Tableau.Migration.Config.ContentTypesOptions.BatchSize). + +*Default:* [`MigrationSdkOptions.ContentTypes.Defaults.BATCH_SIZE`](xref:Tableau.Migration.Config.ContentTypesOptions.Defaults.BATCH_SIZE). *Reload on Edit?:* **Yes**. The update will apply next time the Migration SDK requests a list of objects. *Description:* The Migration SDK uses the **BatchSize** property to define the page size of each List Request. For more details, check the [Tableau REST API Paginating Results documentation](https://help.tableau.com/current/api/rest_api/en-us/REST/rest_api_concepts_paging.htm). +#### ContentTypes.BatchPublishingEnabled + +*Reference:* [`MigrationSdkOptions.ContentTypes.BatchPublishingEnabled`](xref:Tableau.Migration.Config.ContentTypesOptions.BatchPublishingEnabled). + +*Default:* [`MigrationSdkOptions.ContentTypes.Defaults.BATCH_PUBLISHING_ENABLED`](xref:Tableau.Migration.Config.ContentTypesOptions.Defaults.BATCH_PUBLISHING_ENABLED). + +*Reload on Edit?:* **Yes**. The update will apply next time the Migration SDK starts migrating a given content type. + +*Description:* The Migration SDK uses the **BatchPublishingEnabled** property to select the mode it will publish a given content type. Disabled by default, with this configuration, the SDK will publish the content by using individual REST API calls for each item. When this option is enabled, it is possible to publish content in a batch of items (just for some supported content types). + +Supported Content Types: +- [User](xref:Tableau.Migration.Content.IUser) by using the method [Import Users to Site from CSV](https://help.tableau.com/current/api/rest_api/en-us/REST/rest_api_ref_users_and_groups.htm#import_users_to_site_from_csv); ### MigrationParallelism + *Reference:* [`MigrationSdkOptions.MigrationParallelism`](xref:Tableau.Migration.Config.MigrationSdkOptions#Tableau_Migration_Config_MigrationSdkOptions_MigrationParallelism). *Default:* [`MigrationSdkOptions.Defaults.MIGRATION_PARALLELISM`](xref:Tableau.Migration.Config.MigrationSdkOptions.Defaults#Tableau_Migration_Config_MigrationSdkOptions_Defaults_MIGRATION_PARALLELISM). @@ -79,12 +158,12 @@ When writting a python application, configuration values are set via environment *Reload on Edit?:* **Yes**. The update will apply the next time the Migration SDK publishes a new batch. -*Description:* The Migration SDK uses [two methods](advanced_config/hooks/index.md#hook-execution-flow) to publish the content to a destination server: the **bulk process**, where a single call to the API will push multiple items to the server, and the **individual process**, where it publishes a single item with a single call to the API. This configuration only applies to the **individual process**. The SDK uses the **MigrationParallelism** property to define the number of parallel tasks migrating the same type of content simultaneosly. It is possible to tune the Migration SDK processing time with this configuration. +*Description:* The Migration SDK uses [two methods](advanced_config/hooks/index.md#hook-execution-flow) to publish the content to a destination server: the **bulk process**, where a single call to the API will push multiple items to the server, and the **individual process**, where it publishes a single item with a single call to the API. This configuration only applies to the **individual process**. The SDK uses the **MigrationParallelism** property to define the number of parallel tasks migrating the same type of content simultaneously. It is possible to tune the Migration SDK processing time with this configuration. > [!WARNING] > There are [concurrency limits in REST APIs on Tableau Cloud](https://kb.tableau.com/articles/issue/concurrency-limits-in-rest-apis-on-tableau-cloud). The current default configuration is the balance between performance without blocking too many resources to the migration process. - ### Files.DisableFileEncryption + *Reference:* [`FileOptions.DisableFileEncryption`](xref:Tableau.Migration.Config.FileOptions#Tableau_Migration_Config_FileOptions_DisableFileEncryption). *Default:* [`FileOptions.Defaults.DISABLE_FILE_ENCRYPTION`](xref:Tableau.Migration.Config.FileOptions.Defaults#Tableau_Migration_Config_FileOptions_Defaults_DISABLE_FILE_ENCRYPTION). @@ -97,8 +176,8 @@ When writting a python application, configuration values are set via environment > [!CAUTION] > Do not disable file encryption when migrating production content. - ### Files.RootPath + *Reference:* [`FileOptions.RootPath`](xref:Tableau.Migration.Config.FileOptions#Tableau_Migration_Config_FileOptions_RootPath). *Default:* [`FileOptions.Defaults.ROOT_PATH`](xref:Tableau.Migration.Config.FileOptions.Defaults#Tableau_Migration_Config_FileOptions_Defaults_ROOT_PATH). @@ -109,8 +188,8 @@ When writting a python application, configuration values are set via environment *Description:* As part of the migration process, the Migration SDK has to adjust existing references for file-based content types like Workbooks and Data Sources. The SDK has to download and temporarily store the content in the migration machine to be able to read and edit these files. The Migration SDK uses the **RootPath** property to define the location where it will store the temporary files. - ### Network.FileChunkSizeKB + *Reference:* [`NetworkOptions.FileChunkSizeKB`](xref:Tableau.Migration.Config.NetworkOptions#Tableau_Migration_Config_NetworkOptions_FileChunkSizeKB). *Default:* [`NetworkOptions.Defaults.FILE_CHUNK_SIZE_KB`](xref:Tableau.Migration.Config.NetworkOptions.Defaults#Tableau_Migration_Config_NetworkOptions_Defaults_FILE_CHUNK_SIZE_KB). @@ -121,8 +200,8 @@ When writting a python application, configuration values are set via environment *Description:* As part of the migration process, the Migration SDK has to publish file-based content types like Workbooks and Data Sources. Some of these files are very large. The Migration SDK uses the **FileChunkSizeKB** property to split these files into smaller pieces, making the publishing process more reliable. For more details, check the [Tableau REST API Publishing Resources documentation](https://help.tableau.com/current/api/rest_api/en-us/REST/rest_api_concepts_publish.htm). - ### Network.HeadersLoggingEnabled + *Reference:* [`NetworkOptions.HeadersLoggingEnabled`](xref:Tableau.Migration.Config.NetworkOptions#Tableau_Migration_Config_NetworkOptions_HeadersLoggingEnabled). *Default:* [`NetworkOptions.Defaults.LOG_HEADERS_ENABLED`](xref:Tableau.Migration.Config.NetworkOptions.Defaults#Tableau_Migration_Config_NetworkOptions_Defaults_LOG_HEADERS_ENABLED). @@ -133,8 +212,8 @@ When writting a python application, configuration values are set via environment *Description:* Check the [logging article](logging.md) for more details. - ### Network.ContentLoggingEnabled + *Reference:* [`NetworkOptions.ContentLoggingEnabled`](xref:Tableau.Migration.Config.NetworkOptions#Tableau_Migration_Config_NetworkOptions_ContentLoggingEnabled). *Default:* [`NetworkOptions.Defaults.LOG_CONTENT_ENABLED`](xref:Tableau.Migration.Config.NetworkOptions.Defaults#Tableau_Migration_Config_NetworkOptions_Defaults_LOG_CONTENT_ENABLED). @@ -145,8 +224,8 @@ When writting a python application, configuration values are set via environment *Description:* Check the [logging article](logging.md) for more details. - ### Network.BinaryContentLoggingEnabled + *Reference:* [`NetworkOptions.BinaryContentLoggingEnabled`](xref:Tableau.Migration.Config.NetworkOptions#Tableau_Migration_Config_NetworkOptions_BinaryContentLoggingEnabled). *Default:* [`NetworkOptions.Defaults.LOG_BINARY_CONTENT_ENABLED`](xref:Tableau.Migration.Config.NetworkOptions.Defaults#Tableau_Migration_Config_NetworkOptions_Defaults_LOG_BINARY_CONTENT_ENABLED). @@ -157,8 +236,8 @@ When writting a python application, configuration values are set via environment *Description:* Check the [logging article](logging.md) for more details. - ### Network.ExceptionsLoggingEnabled + *Reference:* [`NetworkOptions.ExceptionsLoggingEnabled`](xref:Tableau.Migration.Config.NetworkOptions#Tableau_Migration_Config_NetworkOptions_ExceptionsLoggingEnabled). *Default:* [`NetworkOptions.Defaults.LOG_EXCEPTIONS_ENABLED`](xref:Tableau.Migration.Config.NetworkOptions.Defaults#Tableau_Migration_Config_NetworkOptions_Defaults_LOG_EXCEPTIONS_ENABLED). @@ -169,8 +248,8 @@ When writting a python application, configuration values are set via environment *Description:* Check the [logging article](logging.md) for more details. - ### Network.Resilience.RetryEnabled + *Reference:* [`ResilienceOptions.RetryEnabled`](xref:Tableau.Migration.Config.ResilienceOptions#Tableau_Migration_Config_ResilienceOptions_RetryEnabled). *Default:* [`ResilienceOptions.Defaults.RETRY_ENABLED`](xref:Tableau.Migration.Config.ResilienceOptions.Defaults#Tableau_Migration_Config_ResilienceOptions_Defaults_RETRY_ENABLED). @@ -179,10 +258,10 @@ When writting a python application, configuration values are set via environment *Reload on Edit?:* **Yes**. The update will apply the next time the Migration SDK makes a new HTTP request. -*Description:* The Migration SDK uses [Polly](https://github.com/App-vNext/Polly) as a resilience and transient-fault layer. The SDK uses the **RetryEnabled** property to define whether it will retry failed requests. - +*Description:* The Migration SDK uses [Microsoft.Extensions.Http.Resilience](https://learn.microsoft.com/en-us/dotnet/core/resilience) as a resilience and transient-fault layer. The SDK uses the **RetryEnabled** property to define whether it will retry failed requests. ### Network.Resilience.RetryIntervals + *Reference:* [`ResilienceOptions.RetryIntervals`](xref:Tableau.Migration.Config.ResilienceOptions#Tableau_Migration_Config_ResilienceOptions_RetryIntervals). *Default:* [`ResilienceOptions.Defaults.RETRY_INTERVALS`](xref:Tableau.Migration.Config.ResilienceOptions.Defaults#Tableau_Migration_Config_ResilienceOptions_Defaults_RETRY_INTERVALS). @@ -191,10 +270,10 @@ When writting a python application, configuration values are set via environment *Reload on Edit?:* **Yes**. The update will apply the next time the Migration SDK makes a new HTTP request. -*Description:* The Migration SDK uses [Polly](https://github.com/App-vNext/Polly) as a resilience and transient-fault layer. The SDK uses the **RetryIntervals** property to define the number of retries and the interval between each retry. - +*Description:* The Migration SDK uses [Microsoft.Extensions.Http.Resilience](https://learn.microsoft.com/en-us/dotnet/core/resilience) as a resilience and transient-fault layer. The SDK uses the **RetryIntervals** property to define the number of retries and the interval between each retry. ### Network.Resilience.RetryOverrideResponseCodes + *Reference:* [`ResilienceOptions.RetryOverrideResponseCodes`](xref:Tableau.Migration.Config.ResilienceOptions#Tableau_Migration_Config_ResilienceOptions_RetryOverrideResponseCodes). *Default:* [`ResilienceOptions.Defaults.RETRY_OVERRIDE_RESPONSE_CODES`](xref:Tableau.Migration.Config.ResilienceOptions.Defaults#Tableau_Migration_Config_ResilienceOptions_Defaults_RETRY_OVERRIDE_RESPONSE_CODES). @@ -203,10 +282,10 @@ When writting a python application, configuration values are set via environment *Reload on Edit?:* **Yes**. The update will apply the next time the Migration SDK makes a new HTTP request. -*Description:* The Migration SDK uses [Polly](https://github.com/App-vNext/Polly) as a resilience and transient-fault layer. The SDK uses the **RetryOverrideResponseCodes** property to override the default list of error status codes for retries with a specific list of status codes. - +*Description:* The Migration SDK uses [Microsoft.Extensions.Http.Resilience](https://learn.microsoft.com/en-us/dotnet/core/resilience) as a resilience and transient-fault layer. The SDK uses the **RetryOverrideResponseCodes** property to override the default list of error status codes for retries with a specific list of status codes. ### Network.Resilience.ConcurrentRequestsLimitEnabled + *Reference:* [`ResilienceOptions.ConcurrentRequestsLimitEnabled`](xref:Tableau.Migration.Config.ResilienceOptions#Tableau_Migration_Config_ResilienceOptions_ConcurrentRequestsLimitEnabled). *Default:* [`ResilienceOptions.Defaults.CONCURRENT_REQUESTS_LIMIT_ENABLED`](xref:Tableau.Migration.Config.ResilienceOptions.Defaults#Tableau_Migration_Config_ResilienceOptions_Defaults_CONCURRENT_REQUESTS_LIMIT_ENABLED). @@ -215,10 +294,10 @@ When writting a python application, configuration values are set via environment *Reload on Edit?:* **Yes**. The update will apply the next time the Migration SDK makes a new HTTP request. -*Description:* The Migration SDK uses [Polly](https://github.com/App-vNext/Polly) as a resilience and transient-fault layer. The SDK uses the **ConcurrentRequestsLimitEnabled** property to define whether it will limit concurrent requests. - +*Description:* The Migration SDK uses [Microsoft.Extensions.Http.Resilience](https://learn.microsoft.com/en-us/dotnet/core/resilience) as a resilience and transient-fault layer. The SDK uses the **ConcurrentRequestsLimitEnabled** property to define whether it will limit concurrent requests. ### Network.Resilience.MaxConcurrentRequests + *Reference:* [`ResilienceOptions.MaxConcurrentRequests`](xref:Tableau.Migration.Config.ResilienceOptions#Tableau_Migration_Config_ResilienceOptions_MaxConcurrentRequests). *Default:* [`ResilienceOptions.Defaults.MAX_CONCURRENT_REQUESTS`](xref:Tableau.Migration.Config.ResilienceOptions.Defaults#Tableau_Migration_Config_ResilienceOptions_Defaults_MAX_CONCURRENT_REQUESTS). @@ -227,10 +306,10 @@ When writting a python application, configuration values are set via environment *Reload on Edit?:* **Yes**. The update will apply the next time the Migration SDK makes a new HTTP request. -*Description:* The Migration SDK uses [Polly](https://github.com/App-vNext/Polly) as a resilience and transient-fault layer. The SDK uses the **MaxConcurrentRequests** property to define the maximum quantity of concurrent API requests. - +*Description:* The Migration SDK uses [Microsoft.Extensions.Http.Resilience](https://learn.microsoft.com/en-us/dotnet/core/resilience) as a resilience and transient-fault layer. The SDK uses the **MaxConcurrentRequests** property to define the maximum quantity of concurrent API requests. ### Network.Resilience.ConcurrentWaitingRequestsOnQueue + *Reference:* [`ResilienceOptions.ConcurrentWaitingRequestsOnQueue`](xref:Tableau.Migration.Config.ResilienceOptions#Tableau_Migration_Config_ResilienceOptions_ConcurrentWaitingRequestsOnQueue). *Default:* [`ResilienceOptions.Defaults.CONCURRENT_WAITING_REQUESTS_QUEUE`](xref:Tableau.Migration.Config.ResilienceOptions.Defaults#Tableau_Migration_Config_ResilienceOptions_Defaults_CONCURRENT_WAITING_REQUESTS_QUEUE). @@ -239,10 +318,10 @@ When writting a python application, configuration values are set via environment *Reload on Edit?:* **Yes**. The update will apply the next time the Migration SDK makes a new HTTP request. -*Description:* The Migration SDK uses [Polly](https://github.com/App-vNext/Polly) as a resilience and transient-fault layer. The SDK uses the **ConcurrentWaitingRequestsOnQueue** property to define the quantity of concurrent API requests waiting on queue. - +*Description:* The Migration SDK uses [Microsoft.Extensions.Http.Resilience](https://learn.microsoft.com/en-us/dotnet/core/resilience) as a resilience and transient-fault layer. The SDK uses the **ConcurrentWaitingRequestsOnQueue** property to define the quantity of concurrent API requests waiting on queue. ### Network.Resilience.ClientThrottleEnabled + *Reference:* [`ResilienceOptions.ClientThrottleEnabled`](xref:Tableau.Migration.Config.ResilienceOptions#Tableau_Migration_Config_ResilienceOptions_ClientThrottleEnabled). *Default:* [`ResilienceOptions.Defaults.CLIENT_THROTTLE_ENABLED`](xref:Tableau.Migration.Config.ResilienceOptions.Defaults#Tableau_Migration_Config_ResilienceOptions_Defaults_CLIENT_THROTTLE_ENABLED). @@ -251,10 +330,10 @@ When writting a python application, configuration values are set via environment *Reload on Edit?:* **Yes**. The update will apply the next time the Migration SDK makes a new HTTP request. -*Description:* The Migration SDK uses [Polly](https://github.com/App-vNext/Polly) as a resilience and transient-fault layer. The SDK uses the **ClientThrottleEnabled** property to define whether it will limit requests to a given endpoint on the client side. - +*Description:* The Migration SDK uses [Microsoft.Extensions.Http.Resilience](https://learn.microsoft.com/en-us/dotnet/core/resilience) as a resilience and transient-fault layer. The SDK uses the **ClientThrottleEnabled** property to define whether it will limit requests to a given endpoint on the client side. ### Network.Resilience.MaxReadRequests + *Reference:* [`ResilienceOptions.MaxReadRequests`](xref:Tableau.Migration.Config.ResilienceOptions#Tableau_Migration_Config_ResilienceOptions_MaxReadRequests). *Default:* [`ResilienceOptions.Defaults.MAX_READ_REQUESTS`](xref:Tableau.Migration.Config.ResilienceOptions.Defaults#Tableau_Migration_Config_ResilienceOptions_Defaults_MAX_READ_REQUESTS). @@ -263,10 +342,10 @@ When writting a python application, configuration values are set via environment *Reload on Edit?:* **Yes**. The update will apply the next time the Migration SDK makes a new HTTP request. -*Description:* The Migration SDK uses [Polly](https://github.com/App-vNext/Polly) as a resilience and transient-fault layer. The SDK uses the **MaxReadRequests** property to define the maximum quantity of GET requests on the client side. - +*Description:* The Migration SDK uses [Microsoft.Extensions.Http.Resilience](https://learn.microsoft.com/en-us/dotnet/core/resilience) as a resilience and transient-fault layer. The SDK uses the **MaxReadRequests** property to define the maximum quantity of GET requests on the client side. ### Network.Resilience.MaxReadRequestsInterval + *Reference:* [`ResilienceOptions.MaxReadRequestsInterval`](xref:Tableau.Migration.Config.ResilienceOptions#Tableau_Migration_Config_ResilienceOptions_MaxReadRequestsInterval). *Default:* [`ResilienceOptions.Defaults.MAX_READ_REQUESTS_INTERVAL`](xref:Tableau.Migration.Config.ResilienceOptions.Defaults#Tableau_Migration_Config_ResilienceOptions_Defaults_MAX_READ_REQUESTS_INTERVAL). @@ -275,22 +354,10 @@ When writting a python application, configuration values are set via environment *Reload on Edit?:* **Yes**. The update will apply the next time the Migration SDK makes a new HTTP request. -*Description:* The Migration SDK uses [Polly](https://github.com/App-vNext/Polly) as a resilience and transient-fault layer. The SDK uses the **MaxReadRequestsInterval** property to define the interval for the limit of GET requests on the client side. - - -### Network.Resilience.MaxBurstReadRequests -*Reference:* [`ResilienceOptions.MaxBurstReadRequests`](xref:Tableau.Migration.Config.ResilienceOptions#Tableau_Migration_Config_ResilienceOptions_MaxBurstReadRequests). - -*Default:* [`ResilienceOptions.Defaults.MAX_BURST_READ_REQUESTS`](xref:Tableau.Migration.Config.ResilienceOptions.Defaults#Tableau_Migration_Config_ResilienceOptions_Defaults_MAX_BURST_READ_REQUESTS). - -*Python Environment Variable:* `MigrationSDK__Network__Resilience__MaxBurstReadRequests` - -*Reload on Edit?:* **Yes**. The update will apply the next time the Migration SDK makes a new HTTP request. - -*Description:* The Migration SDK uses [Polly](https://github.com/App-vNext/Polly) as a resilience and transient-fault layer. With only the previous configuration values (`Network.Resilience.MaxReadRequests` and `Network.Resilience.MaxReadRequestsInterval`), the SDK will calculate the minimum interval to complete a single request. Any other request at the same period will be blocked. The SDK uses the **MaxBurstReadRequests** property to define the maximum quantity of GET requests on the calculated period. - +*Description:* The Migration SDK uses [Microsoft.Extensions.Http.Resilience](https://learn.microsoft.com/en-us/dotnet/core/resilience) as a resilience and transient-fault layer. The SDK uses the **MaxReadRequestsInterval** property to define the interval for the limit of GET requests on the client side. ### Network.Resilience.MaxPublishRequests + *Reference:* [`ResilienceOptions.MaxPublishRequests`](xref:Tableau.Migration.Config.ResilienceOptions#Tableau_Migration_Config_ResilienceOptions_MaxPublishRequests). *Default:* [`ResilienceOptions.Defaults.MAX_PUBLISH_REQUESTS`](xref:Tableau.Migration.Config.ResilienceOptions.Defaults#Tableau_Migration_Config_ResilienceOptions_Defaults_MAX_PUBLISH_REQUESTS). @@ -299,10 +366,10 @@ When writting a python application, configuration values are set via environment *Reload on Edit?:* **Yes**. The update will apply the next time the Migration SDK makes a new HTTP request. -*Description:* The Migration SDK uses [Polly](https://github.com/App-vNext/Polly) as a resilience and transient-fault layer. The SDK uses the **MaxPublishRequests** property to define the maximum quantity of non-GET requests on the client side. - +*Description:* The Migration SDK uses [Microsoft.Extensions.Http.Resilience](https://learn.microsoft.com/en-us/dotnet/core/resilience) as a resilience and transient-fault layer. The SDK uses the **MaxPublishRequests** property to define the maximum quantity of non-GET requests on the client side. ### Network.Resilience.MaxPublishRequestsInterval + *Reference:* [`ResilienceOptions.MaxPublishRequestsInterval`](xref:Tableau.Migration.Config.ResilienceOptions#Tableau_Migration_Config_ResilienceOptions_MaxPublishRequestsInterval). *Default:* [`ResilienceOptions.Defaults.MAX_PUBLISH_REQUESTS_INTERVAL`](xref:Tableau.Migration.Config.ResilienceOptions.Defaults#Tableau_Migration_Config_ResilienceOptions_Defaults_MAX_PUBLISH_REQUESTS_INTERVAL). @@ -311,22 +378,10 @@ When writting a python application, configuration values are set via environment *Reload on Edit?:* **Yes**. The update will apply the next time the Migration SDK makes a new HTTP request. -*Description:* The Migration SDK uses [Polly](https://github.com/App-vNext/Polly) as a resilience and transient-fault layer. The SDK uses the **MaxPublishRequestsInterval** property to define the interval for the limit of non-GET requests on the client side. - - -### Network.Resilience.MaxBurstPublishRequests -*Reference:* [`ResilienceOptions.MaxBurstPublishRequests`](xref:Tableau.Migration.Config.ResilienceOptions#Tableau_Migration_Config_ResilienceOptions_MaxBurstPublishRequests). - -*Default:* [`ResilienceOptions.Defaults.MAX_BURST_PUBLISH_REQUESTS`](xref:Tableau.Migration.Config.ResilienceOptions.Defaults#Tableau_Migration_Config_ResilienceOptions_Defaults_MAX_BURST_PUBLISH_REQUESTS). - -*Python Environment Variable:* `MigrationSDK__Network__Resilience__MaxBurstPublishRequests` - -*Reload on Edit?:* **Yes**. The update will apply the next time the Migration SDK makes a new HTTP request. - -*Description:* The Migration SDK uses [Polly](https://github.com/App-vNext/Polly) as a resilience and transient-fault layer. With only the previous configuration values (`Network.Resilience.MaxPublishRequests` and `Network.Resilience.MaxPublishRequestsInterval`), the SDK will calculate the minimum interval to complete a single request. Any other request at the same period will be blocked. The SDK uses the **MaxBurstPublishRequests** property to define the maximum quantity of non-GET requests on the calculated period. - +*Description:* The Migration SDK uses [Microsoft.Extensions.Http.Resilience](https://learn.microsoft.com/en-us/dotnet/core/resilience) as a resilience and transient-fault layer. The SDK uses the **MaxPublishRequestsInterval** property to define the interval for the limit of non-GET requests on the client side. ### Network.Resilience.ServerThrottleEnabled + *Reference:* [`ResilienceOptions.ServerThrottleEnabled`](xref:Tableau.Migration.Config.ResilienceOptions#Tableau_Migration_Config_ResilienceOptions_ServerThrottleEnabled). *Default:* [`ResilienceOptions.Defaults.SERVER_THROTTLE_ENABLED`](xref:Tableau.Migration.Config.ResilienceOptions.Defaults#Tableau_Migration_Config_ResilienceOptions_Defaults_SERVER_THROTTLE_ENABLED). @@ -335,10 +390,10 @@ When writting a python application, configuration values are set via environment *Reload on Edit?:* **Yes**. The update will apply the next time the Migration SDK makes a new HTTP request. -*Description:* The Migration SDK uses [Polly](https://github.com/App-vNext/Polly) as a resilience and transient-fault layer. The SDK uses the **ServerThrottleEnabled** property to define whether it will retry requests throttled on the server. - +*Description:* The Migration SDK uses [Microsoft.Extensions.Http.Resilience](https://learn.microsoft.com/en-us/dotnet/core/resilience) as a resilience and transient-fault layer. The SDK uses the **ServerThrottleEnabled** property to define whether it will retry requests throttled on the server. ### Network.Resilience.ServerThrottleLimitRetries + *Reference:* [`ResilienceOptions.ServerThrottleLimitRetries`](xref:Tableau.Migration.Config.ResilienceOptions#Tableau_Migration_Config_ResilienceOptions_ServerThrottleLimitRetries). *Default:* [`ResilienceOptions.Defaults.SERVER_THROTTLE_LIMIT_RETRIES`](xref:Tableau.Migration.Config.ResilienceOptions.Defaults#Tableau_Migration_Config_ResilienceOptions_Defaults_SERVER_THROTTLE_LIMIT_RETRIES). @@ -347,10 +402,10 @@ When writting a python application, configuration values are set via environment *Reload on Edit?:* **Yes**. The update will apply the next time the Migration SDK makes a new HTTP request. -*Description:* The Migration SDK uses [Polly](https://github.com/App-vNext/Polly) as a resilience and transient-fault layer. The SDK uses the **ServerThrottleLimitRetries** property to define whether it will have a limit of retries to a throttled request. - +*Description:* The Migration SDK uses [Microsoft.Extensions.Http.Resilience](https://learn.microsoft.com/en-us/dotnet/core/resilience) as a resilience and transient-fault layer. The SDK uses the **ServerThrottleLimitRetries** property to define whether it will have a limit of retries to a throttled request. ### Network.Resilience.ServerThrottleRetryIntervals + *Reference:* [`ResilienceOptions.ServerThrottleRetryIntervals`](xref:Tableau.Migration.Config.ResilienceOptions#Tableau_Migration_Config_ResilienceOptions_ServerThrottleRetryIntervals). *Default:* [`ResilienceOptions.Defaults.SERVER_THROTTLE_RETRY_INTERVALS`](xref:Tableau.Migration.Config.ResilienceOptions.Defaults#Tableau_Migration_Config_ResilienceOptions_Defaults_SERVER_THROTTLE_RETRY_INTERVALS). @@ -359,10 +414,10 @@ When writting a python application, configuration values are set via environment *Reload on Edit?:* **Yes**. The update will apply the next time the Migration SDK makes a new HTTP request. -*Description:* The Migration SDK uses [Polly](https://github.com/App-vNext/Polly) as a resilience and transient-fault layer. The SDK uses the **ServerThrottleRetryIntervals** property to define the interval between each retry for throttled requests without the 'Retry-After' header. If `ServerThrottleLimitRetries` is enabled, this configuration defines the maximum number of retries. Otherwise, the subsequent retries use the last interval value. - +*Description:* The Migration SDK uses [Microsoft.Extensions.Http.Resilience](https://learn.microsoft.com/en-us/dotnet/core/resilience) as a resilience and transient-fault layer. The SDK uses the **ServerThrottleRetryIntervals** property to define the interval between each retry for throttled requests without the 'Retry-After' header. If `ServerThrottleLimitRetries` is enabled, this configuration defines the maximum number of retries. Otherwise, the subsequent retries use the last interval value. ### Network.Resilience.PerRequestTimeout + *Reference:* [`ResilienceOptions.PerRequestTimeout`](xref:Tableau.Migration.Config.ResilienceOptions#Tableau_Migration_Config_ResilienceOptions_PerRequestTimeout). *Default:* [`ResilienceOptions.Defaults.REQUEST_TIMEOUT`](xref:Tableau.Migration.Config.ResilienceOptions.Defaults#Tableau_Migration_Config_ResilienceOptions_Defaults_REQUEST_TIMEOUT). @@ -371,10 +426,10 @@ When writting a python application, configuration values are set via environment *Reload on Edit?:* **Yes**. The update will apply the next time the Migration SDK makes a new HTTP request. -*Description:* The Migration SDK uses [Polly](https://github.com/App-vNext/Polly) as a resilience and transient-fault layer. The SDK uses the **PerRequestTimeout** property to define the maximum duration of non-FileTransfer requests. - +*Description:* The Migration SDK uses [Microsoft.Extensions.Http.Resilience](https://learn.microsoft.com/en-us/dotnet/core/resilience) as a resilience and transient-fault layer. The SDK uses the **PerRequestTimeout** property to define the maximum duration of non-FileTransfer requests. ### Network.Resilience.PerFileTransferRequestTimeout + *Reference:* [`ResilienceOptions.PerFileTransferRequestTimeout`](xref:Tableau.Migration.Config.ResilienceOptions#Tableau_Migration_Config_ResilienceOptions_PerFileTransferRequestTimeout). *Default:* [`ResilienceOptions.Defaults.FILE_TRANSFER_REQUEST_TIMEOUT`](xref:Tableau.Migration.Config.ResilienceOptions.Defaults#Tableau_Migration_Config_ResilienceOptions_Defaults_FILE_TRANSFER_REQUEST_TIMEOUT). @@ -383,10 +438,10 @@ When writting a python application, configuration values are set via environment *Reload on Edit?:* **Yes**. The update will apply the next time the Migration SDK makes a new HTTP request. -*Description:* The Migration SDK uses [Polly](https://github.com/App-vNext/Polly) as a resilience and transient-fault layer. The SDK uses the **PerFileTransferRequestTimeout** property to define the maximum duration of FileTransfer requests. - +*Description:* The Migration SDK uses [Microsoft.Extensions.Http.Resilience](https://learn.microsoft.com/en-us/dotnet/core/resilience) as a resilience and transient-fault layer. The SDK uses the **PerFileTransferRequestTimeout** property to define the maximum duration of FileTransfer requests. ### DefaultPermissionsContentTypes.UrlSegments + *Reference:* [`DefaultPermissionsContentTypeOptions.UrlSegments`](xref:Tableau.Migration.Config.DefaultPermissionsContentTypeOptions#Tableau_Migration_Config_DefaultPermissionsContentTypeOptions_UrlSegments). *Default:* [`DefaultPermissionsContentTypeUrlSegments`](xref:Tableau.Migration.Content.Permissions.DefaultPermissionsContentTypeUrlSegments). @@ -397,8 +452,8 @@ When writting a python application, configuration values are set via environment *Description:* The SDK uses the **UrlSegments** property as a list of types of default permissions of given project. For more details, check the [Query Default Permissions documentation](https://help.tableau.com/current/api/rest_api/en-us/REST/rest_api_ref_permissions.htm#query_default_permissions). - ### Jobs.JobPollRate + *Reference:* [`JobOptions.JobPollRate`](xref:Tableau.Migration.Config.JobOptions#Tableau_Migration_Config_JobOptions_JobPollRate). *Default:* [`JobOptions.Defaults.JOB_POLL_RATE`](xref:Tableau.Migration.Config.JobOptions.Defaults#Tableau_Migration_Config_JobOptions_Defaults_JOB_POLL_RATE). @@ -411,8 +466,8 @@ When writting a python application, configuration values are set via environment > [!WARNING] > There is [a limit for querying job status on Tableau Cloud](https://help.tableau.com/current/online/en-us/to_site_capacity.htm#jobs-initiated-by-command-line-and-api-calls). The current default configuration is the balance between performance without blocking too many resources to the migration process. - ### Jobs.JobTimeout + *Reference:* [`JobOptions.JobTimeout`](xref:Tableau.Migration.Config.JobOptions#Tableau_Migration_Config_JobOptions_JobTimeout). *Default:* [`JobOptions.Defaults.JOB_TIMEOUT`](xref:Tableau.Migration.Config.JobOptions.Defaults#Tableau_Migration_Config_JobOptions_Defaults_JOB_TIMEOUT). diff --git a/src/Python/pyproject.toml b/src/Python/pyproject.toml index 780f1bc..4468ecc 100644 --- a/src/Python/pyproject.toml +++ b/src/Python/pyproject.toml @@ -35,7 +35,7 @@ validate-bump = false [tool.hatch.envs.docs] dependencies = [ - "sphinx-markdown-builder==0.6.5" + "sphinx-markdown-builder==0.6.6" ] [tool.hatch.envs.docs.scripts] @@ -45,7 +45,7 @@ docs = "sphinx-build -M markdown .\\Documentation\\ ..\\Documentation\\python\\" detached = true dependencies = [ - "ruff==0.1.11" + "ruff==0.2.2" ] [tool.hatch.envs.lint.scripts] @@ -54,7 +54,7 @@ lint = "ruff ." [tool.hatch.envs.test] dev-mode = false dependencies = [ - "pytest>=7.4.4", + "pytest>=8.0.1", "pytest-cov>=4.1.0", "pytest-env>=1.1.3" ] diff --git a/src/Python/pytest.ini b/src/Python/pytest.ini index 605ca33..7463554 100644 --- a/src/Python/pytest.ini +++ b/src/Python/pytest.ini @@ -6,4 +6,5 @@ pythonpath = src env = - MigrationSDK__BatchSize = 102 \ No newline at end of file + MigrationSDK__ContentTypes__0__Type = User + MigrationSDK__ContentTypes__0__BatchSize = 102 \ No newline at end of file diff --git a/src/Python/tests/test_other.py b/src/Python/tests/test_other.py index cb20115..45d42b2 100644 --- a/src/Python/tests/test_other.py +++ b/src/Python/tests/test_other.py @@ -21,6 +21,7 @@ PyMigrationPlanBuilder) from Tableau.Migration.Config import IConfigReader +from Tableau.Migration.Content import IUser class TestEndToEnd(): def test_main(self): @@ -64,6 +65,6 @@ def test_config(self): services = tableau_migration.migration.get_service_provider() config_reader = tableau_migration.migration.get_service(services, IConfigReader) - batch_size = config_reader.Get().BatchSize + batch_size = config_reader.Get[IUser]().BatchSize assert batch_size==102 \ No newline at end of file diff --git a/src/Tableau.Migration/Api/ContentApiClientBase.cs b/src/Tableau.Migration/Api/ContentApiClientBase.cs index 4c7bf83..4ed42c5 100644 --- a/src/Tableau.Migration/Api/ContentApiClientBase.cs +++ b/src/Tableau.Migration/Api/ContentApiClientBase.cs @@ -53,32 +53,18 @@ public ContentApiClientBase( UserFinder = new(ContentFinderFactory.ForContentType); } - protected async Task FindProjectAsync( - [NotNull] IWithProjectType? response, + protected async Task FindProjectAsync( + [NotNull] T? response, + [DoesNotReturnIf(true)] bool throwIfNotFound, CancellationToken cancel) - { - Guard.AgainstNull(response, nameof(response)); - - var project = Guard.AgainstNull(response.Project, () => nameof(response.Project)); - var projectId = Guard.AgainstDefaultValue(project.Id, () => nameof(response.Project.Id)); - - var foundProject = await ProjectFinder.Value.FindByIdAsync(projectId, cancel).ConfigureAwait(false); - - return Guard.AgainstNull(foundProject, nameof(foundProject)); - } + where T : IWithProjectType, INamedContent + => await ContentFinderFactory.FindProjectAsync(response, Logger, SharedResourcesLocalizer, throwIfNotFound, cancel).ConfigureAwait(false); - protected async Task FindOwnerAsync( - [NotNull] IWithOwnerType? response, + protected async Task FindOwnerAsync( + [NotNull] T? response, + [DoesNotReturnIf(true)] bool throwIfNotFound, CancellationToken cancel) - { - Guard.AgainstNull(response, nameof(response)); - - var owner = Guard.AgainstNull(response.Owner, () => nameof(response.Owner)); - var ownerId = Guard.AgainstDefaultValue(owner.Id, () => nameof(response.Owner.Id)); - - var foundOwner = await UserFinder.Value.FindByIdAsync(ownerId, cancel).ConfigureAwait(false); - - return Guard.AgainstNull(foundOwner, nameof(foundOwner)); - } + where T : IWithOwnerType, INamedContent + => await ContentFinderFactory.FindOwnerAsync(response, Logger, SharedResourcesLocalizer, throwIfNotFound, cancel).ConfigureAwait(false); } } diff --git a/src/Tableau.Migration/Api/DataSourcesApiClient.cs b/src/Tableau.Migration/Api/DataSourcesApiClient.cs index 15bf556..33346b1 100644 --- a/src/Tableau.Migration/Api/DataSourcesApiClient.cs +++ b/src/Tableau.Migration/Api/DataSourcesApiClient.cs @@ -101,21 +101,24 @@ public async Task> GetAllPublishedDataSourcesAsync( .WithSorts(new Sort("size", false)) .ForGetRequest() .SendAsync(cancel) - .ToPagedResultAsync(async (r, c) => + .ToPagedResultAsync(async (response, cancel) => { // Take all items. - var results = ImmutableArray.CreateBuilder(r.Items.Length); + var results = ImmutableArray.CreateBuilder(response.Items.Length); - foreach (var item in r.Items) + foreach (var item in response.Items) { // Convert them all to type DataSource. if (item.Project is not null) // Project is null if item is in a personal space { - var project = await FindProjectAsync(item, c).ConfigureAwait(false); - var owner = await FindOwnerAsync(item, c).ConfigureAwait(false); + var project = await FindProjectAsync(item, false, cancel).ConfigureAwait(false); + var owner = await FindOwnerAsync(item, false, cancel).ConfigureAwait(false); + + if (project is null || owner is null) + continue; //Warnings will be logged by prior method calls. + results.Add(new DataSource(item, project, owner)); } - } // Produce immutable list of type IDataSource and return. @@ -127,11 +130,7 @@ public async Task> GetAllPublishedDataSourcesAsync( } /// - public async Task> GetDataSourceAsync( - Guid dataSourceId, - IImmutableList connections, - IContentFileHandle dataSourceFile, - CancellationToken cancel) + public async Task> GetDataSourceAsync(Guid dataSourceId, CancellationToken cancel) { var getResult = await RestRequestBuilderFactory .CreateUri($"{UrlPrefix}/{dataSourceId.ToUrlSegment()}") @@ -139,12 +138,12 @@ public async Task> GetDataSourceAsync( .SendAsync(cancel) .ToResultAsync(async (response, cancel) => { - var project = await FindProjectAsync(response.Item, cancel).ConfigureAwait(false); - var owner = await FindOwnerAsync(response.Item, cancel).ConfigureAwait(false); + var dataSource = Guard.AgainstNull(response.Item, () => response.Item); - return (IPublishableDataSource)new PublishableDataSource( - response, project, owner, - connections, dataSourceFile); + var project = await FindProjectAsync(response.Item, true, cancel).ConfigureAwait(false); + var owner = await FindOwnerAsync(response.Item, true, cancel).ConfigureAwait(false); + + return (IDataSourceDetails)new DataSourceDetails(dataSource, project, owner); }, SharedResourcesLocalizer, cancel) .ConfigureAwait(false); @@ -169,11 +168,11 @@ public async Task> DownloadDataSourceAsync( } /// - public async Task> PublishDataSourceAsync(IPublishDataSourceOptions options, CancellationToken cancel) + public async Task> PublishDataSourceAsync(IPublishDataSourceOptions options, CancellationToken cancel) => await _dataSourcePublisher.PublishAsync(options, cancel).ConfigureAwait(false); /// - public async Task> PublishAsync(IPublishableDataSource item, CancellationToken cancel) + public async Task> PublishAsync(IPublishableDataSource item, CancellationToken cancel) { var fileStream = await item.File.OpenReadAsync(cancel).ConfigureAwait(false); @@ -264,11 +263,18 @@ public async Task> PullAsync( * make sure the file is disposed. We clean up orphaned * files at the end of the DI scope, but we don't want to * bloat disk usage when we're processing future pages of items.*/ - var publishableDataSourceResult = await file.DisposeOnThrowOrFailureAsync( - async () => await GetDataSourceAsync(contentItem.Id, connectionsResult.Value, file, cancel).ConfigureAwait(false) + var dataSourceResult = await file.DisposeOnThrowOrFailureAsync( + async () => await GetDataSourceAsync(contentItem.Id, cancel).ConfigureAwait(false) ).ConfigureAwait(false); - return publishableDataSourceResult; + if (!dataSourceResult.Success) + { + return dataSourceResult.CastFailure(); + } + + var publishDataSource = new PublishableDataSource(dataSourceResult.Value, connectionsResult.Value, file); + + return Result.Succeeded(publishDataSource); } } diff --git a/src/Tableau.Migration/Api/GroupsApiClient.cs b/src/Tableau.Migration/Api/GroupsApiClient.cs index 3536f80..82a1998 100644 --- a/src/Tableau.Migration/Api/GroupsApiClient.cs +++ b/src/Tableau.Migration/Api/GroupsApiClient.cs @@ -299,7 +299,7 @@ public async Task> PublishAsync(IPublishableGroup item, Cancella public async Task> PullAsync(IGroup contentItem, CancellationToken cancel) { - IPager pager = new GroupUsersResponsePager(this, contentItem.Id, _configReader.Get().BatchSize); + IPager pager = new GroupUsersResponsePager(this, contentItem.Id, _configReader.Get().BatchSize); var result = await pager.GetAllPagesAsync(cancel) .ConfigureAwait(false); diff --git a/src/Tableau.Migration/Api/IContentReferenceFinderFactoryExtensions.cs b/src/Tableau.Migration/Api/IContentReferenceFinderFactoryExtensions.cs index 9957b70..fb08b93 100644 --- a/src/Tableau.Migration/Api/IContentReferenceFinderFactoryExtensions.cs +++ b/src/Tableau.Migration/Api/IContentReferenceFinderFactoryExtensions.cs @@ -14,21 +14,29 @@ // limitations under the License. // +using System; using System.Diagnostics.CodeAnalysis; using System.Threading; using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Tableau.Migration.Api.Rest; using Tableau.Migration.Api.Rest.Models; using Tableau.Migration.Content; using Tableau.Migration.Content.Search; +using Tableau.Migration.Resources; namespace Tableau.Migration.Api { internal static class IContentReferenceFinderFactoryExtensions { - public static async Task FindProjectAsync( + public static async Task FindProjectAsync( this IContentReferenceFinderFactory finderFactory, - [NotNull] IWithProjectType? response, + [NotNull] T? response, + ILogger logger, + ISharedResourcesLocalizer localizer, + [DoesNotReturnIf(true)] bool throwIfNotFound, CancellationToken cancel) + where T : IWithProjectType, INamedContent { Guard.AgainstNull(response, nameof(response)); @@ -39,13 +47,24 @@ public static async Task FindProjectAsync( var foundProject = await projectFinder.FindByIdAsync(projectId, cancel).ConfigureAwait(false); - return Guard.AgainstNull(foundProject, nameof(foundProject)); + if (foundProject is not null) + return foundProject; + + logger.LogWarning(localizer[SharedResourceKeys.ProjectReferenceNotFoundMessage], response.Project.Name, response.GetType().Name, response.Name); + + return throwIfNotFound + ? throw new InvalidOperationException($"The project with ID {projectId} was not found.") + : null; } - public static async Task FindOwnerAsync( + public static async Task FindOwnerAsync( this IContentReferenceFinderFactory finderFactory, - [NotNull] IWithOwnerType? response, + [NotNull] T? response, + ILogger logger, + ISharedResourcesLocalizer localizer, + [DoesNotReturnIf(true)] bool throwIfNotFound, CancellationToken cancel) + where T : IWithOwnerType, INamedContent { Guard.AgainstNull(response, nameof(response)); @@ -56,7 +75,14 @@ public static async Task FindOwnerAsync( var foundOwner = await userFinder.FindByIdAsync(ownerId, cancel).ConfigureAwait(false); - return Guard.AgainstNull(foundOwner, nameof(foundOwner)); + if (foundOwner is not null) + return foundOwner; + + logger.LogWarning(localizer[SharedResourceKeys.OwnerNotFoundMessage], ownerId, response.GetType().Name, response.Name); + + return throwIfNotFound + ? throw new InvalidOperationException($"The owner with ID {ownerId} was not found.") + : null; } } } diff --git a/src/Tableau.Migration/Api/IDataSourcesApiClient.cs b/src/Tableau.Migration/Api/IDataSourcesApiClient.cs index 9a75d7e..4c593a3 100644 --- a/src/Tableau.Migration/Api/IDataSourcesApiClient.cs +++ b/src/Tableau.Migration/Api/IDataSourcesApiClient.cs @@ -15,7 +15,6 @@ // using System; -using System.Collections.Immutable; using System.Threading; using System.Threading.Tasks; using Tableau.Migration.Api.Labels; @@ -23,7 +22,6 @@ using Tableau.Migration.Api.Permissions; using Tableau.Migration.Api.Tags; using Tableau.Migration.Content; -using Tableau.Migration.Content.Files; using Tableau.Migration.Paging; namespace Tableau.Migration.Api @@ -33,7 +31,7 @@ namespace Tableau.Migration.Api /// public interface IDataSourcesApiClient : IPagedListApiClient, - IPublishApiClient, + IPublishApiClient, IPullApiClient, IOwnershipApiClient, ITagsContentApiClient, @@ -58,14 +56,10 @@ Task> GetAllPublishedDataSourcesAsync( /// Gets a data source by the given ID. /// /// The ID to get the data source for. - /// The data source connection metadata. - /// The data source content file. /// A cancellation token to obey. /// The data source result. - Task> GetDataSourceAsync( + Task> GetDataSourceAsync( Guid dataSourceId, - IImmutableList connections, - IContentFileHandle dataSourceFile, CancellationToken cancel); /// @@ -86,7 +80,7 @@ Task> DownloadDataSourceAsync( /// The new data source's details. /// A cancellation token to obey. /// The published data source. - Task> PublishDataSourceAsync( + Task> PublishDataSourceAsync( IPublishDataSourceOptions options, CancellationToken cancel); diff --git a/src/Tableau.Migration/Api/IReadApiClient.cs b/src/Tableau.Migration/Api/IReadApiClient.cs new file mode 100644 index 0000000..d6231ab --- /dev/null +++ b/src/Tableau.Migration/Api/IReadApiClient.cs @@ -0,0 +1,38 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace Tableau.Migration.Api +{ + /// + /// Interface for an API client that can get a content item. + /// + /// The content type. + public interface IReadApiClient + where TContent : class + { + /// + /// Gets the content item by Id. + /// + /// The content item Id to get. + /// A cancellation token to obey. + /// The result of the get operation with the content item. + Task> GetByIdAsync(Guid contentId, CancellationToken cancel); + } +} diff --git a/src/Tableau.Migration/Api/IServiceCollectionExtensions.cs b/src/Tableau.Migration/Api/IServiceCollectionExtensions.cs index 92d960d..c225716 100644 --- a/src/Tableau.Migration/Api/IServiceCollectionExtensions.cs +++ b/src/Tableau.Migration/Api/IServiceCollectionExtensions.cs @@ -47,6 +47,7 @@ internal static IServiceCollection AddMigrationApiClient(this IServiceCollection services.AddSingleton(); services.AddSingleton(); + services.AddSingleton(); //Bootstrap and scope state tracking services. services.AddScoped(); @@ -59,15 +60,15 @@ internal static IServiceCollection AddMigrationApiClient(this IServiceCollection services.AddScoped(); //Main API client. - services.AddTransient(); - services.AddTransient(); - services.AddTransient(); - services.AddTransient(); - services.AddTransient(); - services.AddTransient(); - services.AddTransient(); - services.AddTransient(); - services.AddTransient(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); //API Simulator. services.AddSingleton(); diff --git a/src/Tableau.Migration/Api/ISitesApiClient.cs b/src/Tableau.Migration/Api/ISitesApiClient.cs index d6601ac..f618a6a 100644 --- a/src/Tableau.Migration/Api/ISitesApiClient.cs +++ b/src/Tableau.Migration/Api/ISitesApiClient.cs @@ -95,6 +95,14 @@ public interface ISitesApiClient : IAsyncDisposable, IContentApiClient /// If a list API client for the given content type is not supported. IPagedListApiClient GetListApiClient(); + /// + /// Gets the for the given content type. + /// + /// The content type. + /// The read API client for the given content type, or null if the given content type is not supported. + IReadApiClient? GetReadApiClient() + where TContent : class; + /// /// Gets the for the given content and publish types. /// diff --git a/src/Tableau.Migration/Api/IUsersApiClient.cs b/src/Tableau.Migration/Api/IUsersApiClient.cs index 0f63347..d48d227 100644 --- a/src/Tableau.Migration/Api/IUsersApiClient.cs +++ b/src/Tableau.Migration/Api/IUsersApiClient.cs @@ -28,7 +28,7 @@ namespace Tableau.Migration.Api /// /// Interface for API client user operations. /// - public interface IUsersApiClient : IContentApiClient, IPagedListApiClient, IBatchPublishApiClient, IApiPageAccessor + public interface IUsersApiClient : IContentApiClient, IPagedListApiClient, IBatchPublishApiClient, IApiPageAccessor, IReadApiClient, IPublishApiClient { /// /// Gets the groups belonging to a user. diff --git a/src/Tableau.Migration/Api/IWorkbooksApiClient.cs b/src/Tableau.Migration/Api/IWorkbooksApiClient.cs index de72c11..41ffeac 100644 --- a/src/Tableau.Migration/Api/IWorkbooksApiClient.cs +++ b/src/Tableau.Migration/Api/IWorkbooksApiClient.cs @@ -15,14 +15,12 @@ // using System; -using System.Collections.Immutable; using System.Threading; using System.Threading.Tasks; using Tableau.Migration.Api.Models; using Tableau.Migration.Api.Permissions; using Tableau.Migration.Api.Tags; using Tableau.Migration.Content; -using Tableau.Migration.Content.Files; using Tableau.Migration.Paging; namespace Tableau.Migration.Api @@ -32,7 +30,7 @@ namespace Tableau.Migration.Api /// public interface IWorkbooksApiClient : IPagedListApiClient, - IPublishApiClient, + IPublishApiClient, IPullApiClient, IOwnershipApiClient, ITagsContentApiClient, @@ -53,13 +51,9 @@ public interface IWorkbooksApiClient : /// Gets a workbook by the given ID. /// /// The ID to get the workbook for. - /// The workbook connection metadata. - /// The workbook file. /// A cancellation token to obey. /// The data sorce result. - Task> GetWorkbookAsync(Guid workbookId, - IImmutableList connections, - IContentFileHandle workbookFile, CancellationToken cancel); + Task> GetWorkbookAsync(Guid workbookId, CancellationToken cancel); /// /// Downloads the workbook file for the given ID. @@ -79,7 +73,7 @@ Task> DownloadWorkbookAsync( /// The new workbook's details. /// A cancellation token to obey. /// The published workbook. - Task> PublishWorkbookAsync( + Task> PublishWorkbookAsync( IPublishWorkbookOptions options, CancellationToken cancel); diff --git a/src/Tableau.Migration/Api/JobsApiClient.cs b/src/Tableau.Migration/Api/JobsApiClient.cs index 0cebb12..85ef5ab 100644 --- a/src/Tableau.Migration/Api/JobsApiClient.cs +++ b/src/Tableau.Migration/Api/JobsApiClient.cs @@ -34,6 +34,7 @@ internal class JobsApiClient : ContentApiClientBase, IJobsApiClient { private readonly ITaskDelayer _taskDelayer; private readonly IConfigReader _configReader; + private readonly TimeProvider _timeProvider; public JobsApiClient( IRestRequestBuilderFactory restRequestBuilderFactory, @@ -41,11 +42,13 @@ public JobsApiClient( ILoggerFactory loggerFactory, ITaskDelayer taskDelayer, IConfigReader configReader, - ISharedResourcesLocalizer sharedResourcesLocalizer) + ISharedResourcesLocalizer sharedResourcesLocalizer, + TimeProvider timeProvider) : base(restRequestBuilderFactory, finderFactory, loggerFactory, sharedResourcesLocalizer) { _taskDelayer = taskDelayer; _configReader = configReader; + _timeProvider = timeProvider; } public async Task> GetJobStatusAsync(Guid jobId, CancellationToken cancel) @@ -64,14 +67,14 @@ private static bool IsErrorStatus(IStatusNote statusNote) public async Task WaitForJobAsync(Guid jobId, CancellationToken cancel) { - var startTime = DateTime.UtcNow; + var startTime = _timeProvider.GetUtcNow(); IJob? job = null; while (true) { cancel.ThrowIfCancellationRequested(); // Check job waiting timeout - var timeSinceStart = DateTime.UtcNow - startTime; + var timeSinceStart = _timeProvider.GetUtcNow() - startTime; if(timeSinceStart > _configReader.Get().Jobs.JobTimeout) { return Result.Failed(new TimeoutJobException(job, SharedResourcesLocalizer)); diff --git a/src/Tableau.Migration/Api/Publishing/DataSourcePublisher.cs b/src/Tableau.Migration/Api/Publishing/DataSourcePublisher.cs index d63233c..108e8c5 100644 --- a/src/Tableau.Migration/Api/Publishing/DataSourcePublisher.cs +++ b/src/Tableau.Migration/Api/Publishing/DataSourcePublisher.cs @@ -17,6 +17,7 @@ using System.Net.Http; using System.Threading; using System.Threading.Tasks; +using Microsoft.Extensions.Logging; using Tableau.Migration.Api.Models; using Tableau.Migration.Api.Rest; using Tableau.Migration.Api.Rest.Models.Requests; @@ -29,18 +30,20 @@ namespace Tableau.Migration.Api.Publishing { - internal class DataSourcePublisher : FilePublisherBase, IDataSourcePublisher + internal class DataSourcePublisher : FilePublisherBase, IDataSourcePublisher { public DataSourcePublisher( IRestRequestBuilderFactory restRequestBuilderFactory, IContentReferenceFinderFactory finderFactory, IServerSessionProvider sessionProvider, + ILoggerFactory loggerFactory, ISharedResourcesLocalizer sharedResourcesLocalizer, IHttpStreamProcessor httpStreamProcessor) : base( restRequestBuilderFactory, finderFactory, sessionProvider, + loggerFactory, sharedResourcesLocalizer, httpStreamProcessor, RestUrlPrefixes.DataSources) @@ -49,30 +52,31 @@ public DataSourcePublisher( protected override CommitDataSourcePublishRequest BuildCommitRequest(IPublishDataSourceOptions options) => new(options); - protected override async Task> SendCommitRequestAsync( + protected override async Task> SendCommitRequestAsync( IPublishDataSourceOptions options, string uploadSessionId, MultipartContent content, CancellationToken cancel) { - var request = RestRequestBuilderFactory + var result = await RestRequestBuilderFactory .CreateUri(ContentTypeUrlPrefix) .WithQuery("uploadSessionId", uploadSessionId) .WithQuery("datasourceType", options.FileType) .WithQuery("overwrite", options.Overwrite.ToString().ToLower()) .ForPostRequest() - .WithContent(content); + .WithContent(content) + .SendAsync(cancel) + .ToResultAsync(async (response, cancel) => + { + var dataSource = Guard.AgainstNull(response.Item, () => response.Item); - var result = await request - .SendAsync(cancel) - .ToResultAsync(async (r, c) => - { - var project = await ContentFinderFactory.FindProjectAsync(r.Item, c).ConfigureAwait(false); - var owner = await ContentFinderFactory.FindOwnerAsync(r.Item, c).ConfigureAwait(false); - return new DataSource(r.Item, project, owner); - }, - SharedResourcesLocalizer, - cancel) + var project = await ContentFinderFactory.FindProjectAsync(dataSource, Logger, SharedResourcesLocalizer, true, cancel).ConfigureAwait(false); + var owner = await ContentFinderFactory.FindOwnerAsync(dataSource, Logger, SharedResourcesLocalizer, true, cancel).ConfigureAwait(false); + + return new DataSourceDetails(dataSource, project, owner); + }, + SharedResourcesLocalizer, + cancel) .ConfigureAwait(false); return result; diff --git a/src/Tableau.Migration/Api/Publishing/FilePublisherBase.cs b/src/Tableau.Migration/Api/Publishing/FilePublisherBase.cs index 071815c..d64317f 100644 --- a/src/Tableau.Migration/Api/Publishing/FilePublisherBase.cs +++ b/src/Tableau.Migration/Api/Publishing/FilePublisherBase.cs @@ -19,6 +19,7 @@ using System.Net.Http; using System.Threading; using System.Threading.Tasks; +using Microsoft.Extensions.Logging; using Tableau.Migration.Api.Models; using Tableau.Migration.Api.Rest; using Tableau.Migration.Api.Rest.Models.Requests; @@ -40,6 +41,7 @@ internal abstract class FilePublisherBase /// Interface for data source publisher classes. /// - public interface IDataSourcePublisher : IFilePublisher + public interface IDataSourcePublisher : IFilePublisher { } } diff --git a/src/Tableau.Migration/Api/Publishing/IWorkbookPublisher.cs b/src/Tableau.Migration/Api/Publishing/IWorkbookPublisher.cs index 0cf1595..88e969b 100644 --- a/src/Tableau.Migration/Api/Publishing/IWorkbookPublisher.cs +++ b/src/Tableau.Migration/Api/Publishing/IWorkbookPublisher.cs @@ -22,6 +22,6 @@ namespace Tableau.Migration.Api.Publishing /// /// Interface for workbook publisher classes. /// - public interface IWorkbookPublisher : IFilePublisher + public interface IWorkbookPublisher : IFilePublisher { } } diff --git a/src/Tableau.Migration/Api/Publishing/WorkbookPublisher.cs b/src/Tableau.Migration/Api/Publishing/WorkbookPublisher.cs index 4bba2cc..23fa01a 100644 --- a/src/Tableau.Migration/Api/Publishing/WorkbookPublisher.cs +++ b/src/Tableau.Migration/Api/Publishing/WorkbookPublisher.cs @@ -14,11 +14,10 @@ // limitations under the License. // -using System.Collections.Immutable; -using System.Linq; using System.Net.Http; using System.Threading; using System.Threading.Tasks; +using Microsoft.Extensions.Logging; using Tableau.Migration.Api.Models; using Tableau.Migration.Api.Rest; using Tableau.Migration.Api.Rest.Models.Requests; @@ -31,18 +30,20 @@ namespace Tableau.Migration.Api.Publishing { - internal class WorkbookPublisher : FilePublisherBase, IWorkbookPublisher + internal class WorkbookPublisher : FilePublisherBase, IWorkbookPublisher { public WorkbookPublisher( IRestRequestBuilderFactory restRequestBuilderFactory, IContentReferenceFinderFactory finderFactory, IServerSessionProvider sessionProvider, + ILoggerFactory loggerFactory, ISharedResourcesLocalizer sharedResourcesLocalizer, IHttpStreamProcessor httpStreamProcessor) : base( restRequestBuilderFactory, finderFactory, sessionProvider, + loggerFactory, sharedResourcesLocalizer, httpStreamProcessor, RestUrlPrefixes.Workbooks) @@ -51,34 +52,32 @@ public WorkbookPublisher( protected override CommitWorkbookPublishRequest BuildCommitRequest(IPublishWorkbookOptions options) => new(options); - protected override async Task> SendCommitRequestAsync( + protected override async Task> SendCommitRequestAsync( IPublishWorkbookOptions options, string uploadSessionId, MultipartContent content, CancellationToken cancel) { - var request = RestRequestBuilderFactory + var result = await RestRequestBuilderFactory .CreateUri(ContentTypeUrlPrefix) .WithQuery("uploadSessionId", uploadSessionId) .WithQuery("skipConnectionCheck", options.SkipConnectionCheck.ToString().ToLower()) .WithQuery("workbookType", options.FileType) .WithQuery("overwrite", options.Overwrite.ToString().ToLower()) .ForPostRequest() - .WithContent(content); + .WithContent(content) + .SendAsync(cancel) + .ToResultAsync(async (response, cancel) => + { + var workbook = Guard.AgainstNull(response.Item, () => response.Item); - var result = await request - .SendAsync(cancel) - .ToResultAsync(async (r, c) => - { - var project = await ContentFinderFactory.FindProjectAsync(r.Item, c).ConfigureAwait(false); - var owner = await ContentFinderFactory.FindOwnerAsync(r.Item, c).ConfigureAwait(false); - var views = r.Item.Views.Select(v => (IView)new View(v, project, r.Item.Name)) - .ToImmutableArray(); + var project = await ContentFinderFactory.FindProjectAsync(workbook, Logger, SharedResourcesLocalizer, true, cancel).ConfigureAwait(false); + var owner = await ContentFinderFactory.FindOwnerAsync(workbook, Logger, SharedResourcesLocalizer, true, cancel).ConfigureAwait(false); - return new ResultWorkbook(r.Item, project, owner, views); - }, - SharedResourcesLocalizer, - cancel) + return new WorkbookDetails(workbook, project, owner); + }, + SharedResourcesLocalizer, + cancel) .ConfigureAwait(false); return result; diff --git a/src/Tableau.Migration/Api/Rest/Models/IDataSourceDetailsType.cs b/src/Tableau.Migration/Api/Rest/Models/IDataSourceDetailsType.cs new file mode 100644 index 0000000..4d741bd --- /dev/null +++ b/src/Tableau.Migration/Api/Rest/Models/IDataSourceDetailsType.cs @@ -0,0 +1,29 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +namespace Tableau.Migration.Api.Rest.Models +{ + /// + /// Interface for a data source response object with extended information, from a GET query for example. + /// + public interface IDataSourceDetailsType : IDataSourceType + { + /// + /// Gets the certification note for the response. + /// + string? CertificationNote { get; } + } +} diff --git a/src/Tableau.Migration/Api/Rest/Models/IWithTagTypes.cs b/src/Tableau.Migration/Api/Rest/Models/IWithTagTypes.cs index 171d405..65fdbb3 100644 --- a/src/Tableau.Migration/Api/Rest/Models/IWithTagTypes.cs +++ b/src/Tableau.Migration/Api/Rest/Models/IWithTagTypes.cs @@ -24,6 +24,6 @@ public interface IWithTagTypes /// /// Gets the tags for the response. /// - ITagType[]? Tags { get; internal set; } + ITagType[] Tags { get; internal set; } } } diff --git a/src/Tableau.Migration/Api/Rest/Models/IViewsType.cs b/src/Tableau.Migration/Api/Rest/Models/IWorkbookDetailsType.cs similarity index 75% rename from src/Tableau.Migration/Api/Rest/Models/IViewsType.cs rename to src/Tableau.Migration/Api/Rest/Models/IWorkbookDetailsType.cs index 1b94a6b..825314e 100644 --- a/src/Tableau.Migration/Api/Rest/Models/IViewsType.cs +++ b/src/Tableau.Migration/Api/Rest/Models/IWorkbookDetailsType.cs @@ -17,13 +17,13 @@ namespace Tableau.Migration.Api.Rest.Models { /// - /// Interface for a view REST response. + /// Interface for a workbook response object with extended information, from a GET query for example. /// - public interface IViewsType : IRestIdentifiable, INamedContent, IWithOwnerType, IWithWorkbookReferenceType + public interface IWorkbookDetailsType : IWorkbookType { /// - /// The content URL for the response. + /// Gets the views for the response. /// - public string? ContentUrl { get; } + IViewReferenceType[] Views { get; } } } diff --git a/src/Tableau.Migration/Api/Rest/Models/Requests/AddTagsRequest.cs b/src/Tableau.Migration/Api/Rest/Models/Requests/AddTagsRequest.cs index cbbb6c6..4703669 100644 --- a/src/Tableau.Migration/Api/Rest/Models/Requests/AddTagsRequest.cs +++ b/src/Tableau.Migration/Api/Rest/Models/Requests/AddTagsRequest.cs @@ -40,7 +40,7 @@ public AddTagsRequest() /// The tags to populate the request with. public AddTagsRequest(IEnumerable tags) { - Tags = tags.Select(tag => new TagType(tag)).ToArray() ?? Array.Empty(); + Tags = tags.Select(tag => new TagType(tag)).ToArray(); } /// @@ -48,7 +48,8 @@ public AddTagsRequest(IEnumerable tags) /// [XmlArray("tags")] [XmlArrayItem("tag")] - public TagType[]? Tags { get; set; } = Array.Empty(); + public TagType[] Tags { get; set; } = Array.Empty(); + /// public class TagType : ITagType diff --git a/src/Tableau.Migration/Api/Rest/Models/Responses/DataSourceResponse.cs b/src/Tableau.Migration/Api/Rest/Models/Responses/DataSourceResponse.cs index 7d442c9..471662e 100644 --- a/src/Tableau.Migration/Api/Rest/Models/Responses/DataSourceResponse.cs +++ b/src/Tableau.Migration/Api/Rest/Models/Responses/DataSourceResponse.cs @@ -37,7 +37,7 @@ public class DataSourceResponse : TableauServerResponse [XmlType("datasource")] - public class DataSourceType : IDataSourceType + public class DataSourceType : IDataSourceDetailsType { /// /// Creates a new object. @@ -74,6 +74,12 @@ internal DataSourceType(IDataSourceType response) response.Tags.Select(tag => new TagType(tag)).ToArray(); } + internal DataSourceType(IDataSourceDetailsType response) + : this((IDataSourceType)response) + { + CertificationNote = response.CertificationNote; + } + /// /// Gets or sets the ID for the response. /// @@ -167,13 +173,13 @@ internal DataSourceType(IDataSourceType response) /// [XmlArray("tags")] [XmlArrayItem("tag")] - public TagType[]? Tags { get; set; } = Array.Empty(); + public TagType[] Tags { get; set; } = Array.Empty(); /// - ITagType[]? IWithTagTypes.Tags + ITagType[] IWithTagTypes.Tags { get => Tags; - set => Tags = value?.Select(t => new TagType(t)).ToArray(); + set => Tags = value.Select(t => new TagType(t)).ToArray(); } #region - Object Specific Types - diff --git a/src/Tableau.Migration/Api/Rest/Models/Responses/DataSourcesResponse.cs b/src/Tableau.Migration/Api/Rest/Models/Responses/DataSourcesResponse.cs index 52344fe..daa3bbc 100644 --- a/src/Tableau.Migration/Api/Rest/Models/Responses/DataSourcesResponse.cs +++ b/src/Tableau.Migration/Api/Rest/Models/Responses/DataSourcesResponse.cs @@ -127,13 +127,13 @@ public class DataSourceType : IDataSourceType /// [XmlArray("tags")] [XmlArrayItem("tag")] - public TagType[]? Tags { get; set; } = Array.Empty(); + public TagType[] Tags { get; set; } = Array.Empty(); /// - ITagType[]? IWithTagTypes.Tags + ITagType[] IWithTagTypes.Tags { get => Tags; - set => Tags = value?.Select(t => new TagType(t)).ToArray(); + set => Tags = value.Select(t => new TagType(t)).ToArray(); } #region - Object Specific Types - diff --git a/src/Tableau.Migration/Net/Policies/IHttpPolicyBuilder.cs b/src/Tableau.Migration/Api/Rest/Models/Responses/UserResponse.cs similarity index 55% rename from src/Tableau.Migration/Net/Policies/IHttpPolicyBuilder.cs rename to src/Tableau.Migration/Api/Rest/Models/Responses/UserResponse.cs index 4b24d2d..2103357 100644 --- a/src/Tableau.Migration/Net/Policies/IHttpPolicyBuilder.cs +++ b/src/Tableau.Migration/Api/Rest/Models/Responses/UserResponse.cs @@ -14,22 +14,21 @@ // limitations under the License. // -using System.Net.Http; -using Polly; +using System.Xml.Serialization; -namespace Tableau.Migration.Net.Policies +namespace Tableau.Migration.Api.Rest.Models.Responses { /// - /// Abstraction build a policy that apply for a given http request. - /// + /// Class representing a user response. + /// See Tableau API Reference for documentation. /// - public interface IHttpPolicyBuilder + [XmlType(XmlTypeName)] + public class UserResponse : TableauServerResponse { /// - /// Build and return the policy that apply for the http request. + /// Gets or sets the user for the response. /// - /// The http request that we will request the policies - /// A async policy that apply to a given http response of a http request. - IAsyncPolicy? Build(HttpRequestMessage httpRequest); + [XmlElement("user")] + public override UsersResponse.UserType? Item { get; set; } } -} +} \ No newline at end of file diff --git a/src/Tableau.Migration/Api/Rest/Models/Responses/UsersResponse.cs b/src/Tableau.Migration/Api/Rest/Models/Responses/UsersResponse.cs index 142a0cc..48c7627 100644 --- a/src/Tableau.Migration/Api/Rest/Models/Responses/UsersResponse.cs +++ b/src/Tableau.Migration/Api/Rest/Models/Responses/UsersResponse.cs @@ -20,8 +20,8 @@ namespace Tableau.Migration.Api.Rest.Models.Responses { /// - /// Class representing a user response. - /// See Tableau API Reference for documentation. + /// Class representing a users response. + /// See Tableau API Reference for documentation. /// [XmlType(XmlTypeName)] public class UsersResponse : PagedTableauServerResponse diff --git a/src/Tableau.Migration/Api/Rest/Models/Responses/ViewResponse.cs b/src/Tableau.Migration/Api/Rest/Models/Responses/ViewResponse.cs index 4ef9624..c46a85c 100644 --- a/src/Tableau.Migration/Api/Rest/Models/Responses/ViewResponse.cs +++ b/src/Tableau.Migration/Api/Rest/Models/Responses/ViewResponse.cs @@ -78,13 +78,13 @@ public class ViewType : IViewType /// [XmlArray("tags")] [XmlArrayItem("tag")] - public TagType[]? Tags { get; set; } = Array.Empty(); + public TagType[] Tags { get; set; } = Array.Empty(); /// - ITagType[]? IWithTagTypes.Tags + ITagType[] IWithTagTypes.Tags { get => Tags; - set => Tags = value?.Select(t => new TagType(t)).ToArray(); + set => Tags = value.Select(t => new TagType(t)).ToArray(); } diff --git a/src/Tableau.Migration/Api/Rest/Models/Responses/WorkbookResponse.cs b/src/Tableau.Migration/Api/Rest/Models/Responses/WorkbookResponse.cs index 670aed8..2bbf439 100644 --- a/src/Tableau.Migration/Api/Rest/Models/Responses/WorkbookResponse.cs +++ b/src/Tableau.Migration/Api/Rest/Models/Responses/WorkbookResponse.cs @@ -35,7 +35,7 @@ public class WorkbookResponse : TableauServerResponse /// Class representing a REST API workbook response. /// - public class WorkbookType : IWorkbookType + public class WorkbookType : IWorkbookDetailsType { /// /// Creates a new object. @@ -67,11 +67,13 @@ internal WorkbookType(IWorkbookType response) Owner = new OwnerType(response.Owner); } - Tags = response.Tags.IsNullOrEmpty() ? - Array.Empty() : - response.Tags.Select(tag => new TagType(tag)).ToArray(); + Tags = response.Tags.Select(tag => new TagType(tag)).ToArray(); } + internal WorkbookType(IWorkbookDetailsType response) + : this((IWorkbookType)response) + { } + /// [XmlAttribute("id")] public Guid Id { get; set; } @@ -148,13 +150,13 @@ internal WorkbookType(IWorkbookType response) /// [XmlArray("tags")] [XmlArrayItem("tag")] - public TagType[]? Tags { get; set; } = Array.Empty(); + public TagType[] Tags { get; set; } = Array.Empty(); /// - ITagType[]? IWithTagTypes.Tags + ITagType[] IWithTagTypes.Tags { get => Tags; - set => Tags = value?.Select(t => new TagType(t)).ToArray(); + set => Tags = value.Select(t => new TagType(t)).ToArray(); } /// @@ -164,6 +166,9 @@ internal WorkbookType(IWorkbookType response) [XmlArrayItem("view")] public ViewReferenceType[] Views { get; set; } = Array.Empty(); + /// + IViewReferenceType[] IWorkbookDetailsType.Views => Views; + /// /// Gets or sets the data acceleration config for the response. /// diff --git a/src/Tableau.Migration/Api/Rest/Models/Responses/WorkbooksResponse.cs b/src/Tableau.Migration/Api/Rest/Models/Responses/WorkbooksResponse.cs index f52b919..8183201 100644 --- a/src/Tableau.Migration/Api/Rest/Models/Responses/WorkbooksResponse.cs +++ b/src/Tableau.Migration/Api/Rest/Models/Responses/WorkbooksResponse.cs @@ -114,13 +114,13 @@ public class WorkbookType : IWorkbookType /// [XmlArray("tags")] [XmlArrayItem("tag")] - public TagType[]? Tags { get; set; } = Array.Empty(); + public TagType[] Tags { get; set; } = Array.Empty(); /// - ITagType[]? IWithTagTypes.Tags + ITagType[] IWithTagTypes.Tags { get => Tags; - set => Tags = value?.Select(t => new TagType(t)).ToArray(); + set => Tags = value.Select(t => new TagType(t)).ToArray(); } #region - Object Specific Types - diff --git a/src/Tableau.Migration/Api/Rest/Models/RestProjectBuilder.cs b/src/Tableau.Migration/Api/Rest/Models/RestProjectBuilder.cs index 098278c..dedffef 100644 --- a/src/Tableau.Migration/Api/Rest/Models/RestProjectBuilder.cs +++ b/src/Tableau.Migration/Api/Rest/Models/RestProjectBuilder.cs @@ -66,18 +66,18 @@ public static async Task FindProjectOwnerAsync( var owner = Guard.AgainstNull(restProject.Owner, () => nameof(restProject.Owner)); var ownerId = Guard.AgainstDefaultValue(owner.Id, () => nameof(restProject.Owner.Id)); - var foundOwner = await userFinder.FindByIdAsync(restProject.Owner.Id, cancel).ConfigureAwait(false); + var foundOwner = await userFinder.FindByIdAsync(ownerId, cancel).ConfigureAwait(false); if (foundOwner is null) { if (restProject.Name is not null && _systemProjectNames.Contains(restProject.Name)) { - return new ContentReferenceStub(restProject.Owner.Id, string.Empty, Constants.SystemUserLocation); + return new ContentReferenceStub(ownerId, string.Empty, Constants.SystemUserLocation); } throw new ArgumentNullException( nameof(restProject), - $"The project's owner ID {restProject.Owner.Id} is not valid."); + $"The project's owner ID {ownerId} is not valid."); } return foundOwner; diff --git a/src/Tableau.Migration/Api/Search/ApiContentReferenceFinderFactory.cs b/src/Tableau.Migration/Api/Search/ApiContentReferenceFinderFactory.cs index f3c9db3..d0e5722 100644 --- a/src/Tableau.Migration/Api/Search/ApiContentReferenceFinderFactory.cs +++ b/src/Tableau.Migration/Api/Search/ApiContentReferenceFinderFactory.cs @@ -39,7 +39,7 @@ public ApiContentReferenceFinderFactory(IServiceProvider services) /// public IContentReferenceFinder ForContentType() - where TContent : IContentReference + where TContent : class, IContentReference { var cache = _services.GetRequiredService>(); diff --git a/src/Tableau.Migration/Api/Search/BulkApiContentReferenceCache.cs b/src/Tableau.Migration/Api/Search/BulkApiContentReferenceCache.cs index ef76350..c87076c 100644 --- a/src/Tableau.Migration/Api/Search/BulkApiContentReferenceCache.cs +++ b/src/Tableau.Migration/Api/Search/BulkApiContentReferenceCache.cs @@ -31,9 +31,10 @@ namespace Tableau.Migration.Api.Search /// /// The content type. public class BulkApiContentReferenceCache : ContentReferenceCacheBase - where TContent : IContentReference + where TContent : class, IContentReference { private readonly IPagedListApiClient _apiListClient; + private readonly IReadApiClient? _apiReadClient; private readonly IConfigReader _configReader; /// @@ -41,16 +42,25 @@ public class BulkApiContentReferenceCache : ContentReferenceCacheBase /// /// An API client. /// A config reader. - public BulkApiContentReferenceCache(ISitesApiClient apiClient, IConfigReader configReader) + public BulkApiContentReferenceCache(ISitesApiClient? apiClient, IConfigReader configReader) { + Guard.AgainstNull(apiClient, () => apiClient); + _apiListClient = apiClient.GetListApiClient(); + _apiReadClient = apiClient.GetReadApiClient(); _configReader = configReader; } /// /// Gets the configured batch size. /// - protected int BatchSize => _configReader.Get().BatchSize; + protected int BatchSize => _configReader.Get().BatchSize; + + /// + /// Called after an item is loaded into the cache from the store. + /// + /// The item that was loaded. + protected virtual void ItemLoaded(TContent item) { } /// /// Loads all content items from the API client. @@ -66,6 +76,11 @@ protected async ValueTask> LoadAllAsync(Cancel return Enumerable.Empty(); } + foreach (var item in listResult.Value) + { + ItemLoaded(item); + } + return listResult.Value.Select(i => new ContentReferenceStub(i)); } @@ -76,5 +91,24 @@ protected override async ValueTask> SearchAsyn /// protected override async ValueTask> SearchAsync(Guid searchId, CancellationToken cancel) => await LoadAllAsync(cancel).ConfigureAwait(false); + + /// + protected override async Task IndividualSearchAsync(Guid searchId, CancellationToken cancel) + { + if (_apiReadClient is null) + { + return await base.IndividualSearchAsync(searchId, cancel).ConfigureAwait(false); + } + + var result = await _apiReadClient.GetByIdAsync(searchId, cancel).ConfigureAwait(false); + + if (result is not null && + result.Success) + { + return new ContentReferenceStub(result.Value!); + } + + return null; + } } } diff --git a/src/Tableau.Migration/Api/Simulation/Rest/Api/TagsRestApiSimulatorBase.cs b/src/Tableau.Migration/Api/Simulation/Rest/Api/TagsRestApiSimulatorBase.cs index cc905e2..e3a14ec 100644 --- a/src/Tableau.Migration/Api/Simulation/Rest/Api/TagsRestApiSimulatorBase.cs +++ b/src/Tableau.Migration/Api/Simulation/Rest/Api/TagsRestApiSimulatorBase.cs @@ -89,7 +89,7 @@ private Func BuildDeleteTagsDel var content = getContent(data).FirstOrDefault(ds => ds.Id == contentId.Value); - var existingTags = content?.Tags?.ToList(); + var existingTags = content?.Tags.ToList(); if (content == null || existingTags == null) { @@ -125,7 +125,7 @@ private Func BuildDeleteTagsDel return new List(); } - var tags = content.Tags?.ToList() ?? new List(); + var tags = content.Tags.ToList(); var requestContent = request.GetTableauServerRequest(); diff --git a/src/Tableau.Migration/Api/Simulation/Rest/Net/Responses/RestCommitDataSourceUploadResponseBuilder.cs b/src/Tableau.Migration/Api/Simulation/Rest/Net/Responses/RestCommitDataSourceUploadResponseBuilder.cs index b44747e..6c0c515 100644 --- a/src/Tableau.Migration/Api/Simulation/Rest/Net/Responses/RestCommitDataSourceUploadResponseBuilder.cs +++ b/src/Tableau.Migration/Api/Simulation/Rest/Net/Responses/RestCommitDataSourceUploadResponseBuilder.cs @@ -82,7 +82,7 @@ protected override DataSourceResponse.DataSourceType BuildContent( Id = commitDataSource.Project?.Id ?? Data.DefaultProject.Id }; - targetDataSource.Tags = null; + targetDataSource.Tags = []; return targetDataSource; } diff --git a/src/Tableau.Migration/Api/Simulation/Rest/Net/Responses/RestCommitWorkbookUploadResponseBuilder.cs b/src/Tableau.Migration/Api/Simulation/Rest/Net/Responses/RestCommitWorkbookUploadResponseBuilder.cs index c806c28..abdfc25 100644 --- a/src/Tableau.Migration/Api/Simulation/Rest/Net/Responses/RestCommitWorkbookUploadResponseBuilder.cs +++ b/src/Tableau.Migration/Api/Simulation/Rest/Net/Responses/RestCommitWorkbookUploadResponseBuilder.cs @@ -110,7 +110,7 @@ protected override WorkbookResponse.WorkbookType BuildContent( Id = commitWorkbook.Project?.Id ?? Data.DefaultProject.Id }; - targetWorkbook.Tags = null; + targetWorkbook.Tags = []; // Update connection data foreach (var connection in commitRequest.Workbook.Connections) diff --git a/src/Tableau.Migration/Api/Simulation/Rest/Net/Responses/RestUserAddResponseBuilder.cs b/src/Tableau.Migration/Api/Simulation/Rest/Net/Responses/RestUserAddResponseBuilder.cs index ecca63e..abcbff1 100644 --- a/src/Tableau.Migration/Api/Simulation/Rest/Net/Responses/RestUserAddResponseBuilder.cs +++ b/src/Tableau.Migration/Api/Simulation/Rest/Net/Responses/RestUserAddResponseBuilder.cs @@ -19,6 +19,7 @@ using System.Net.Http; using System.Threading; using System.Threading.Tasks; +using Tableau.Migration.Api.Rest.Models; using Tableau.Migration.Api.Rest.Models.Requests; using Tableau.Migration.Api.Rest.Models.Responses; using Tableau.Migration.Api.Simulation.Rest.Net.Requests; @@ -47,13 +48,17 @@ public RestUserAddResponseBuilder(TableauData data, IHttpContentSerializer seria $"Request must be of the type {nameof(AddUserToSiteRequest.UserType)} and not null", ""); } - + var siteRole = SiteRoleMapping.GetSiteRole( + SiteRoleMapping.GetAdministratorLevel(addUserRequest?.SiteRole), + SiteRoleMapping.GetLicenseLevel(addUserRequest?.SiteRole), + SiteRoleMapping.GetPublishingCapability(addUserRequest?.SiteRole)); var user = new UsersResponse.UserType() { Id = Guid.NewGuid(), Name = addUserRequest?.Name, AuthSetting = addUserRequest?.AuthSetting, - SiteRole = addUserRequest?.SiteRole + SiteRole = siteRole, + Domain = TableauData.GetUserDomain(addUserRequest?.Name) ?? new() { Name = Data.DefaultDomain } }; Data.AddUser(user); @@ -65,7 +70,7 @@ public RestUserAddResponseBuilder(TableauData data, IHttpContentSerializer seria Id = user.Id, AuthSetting = user.AuthSetting, Name = user.Name, - SiteRole = user.SiteRole + SiteRole = siteRole } }, HttpStatusCode.Created)); diff --git a/src/Tableau.Migration/Api/Simulation/Rest/Net/Responses/RestUserImportResponseBuilder.cs b/src/Tableau.Migration/Api/Simulation/Rest/Net/Responses/RestUserImportResponseBuilder.cs index 6f35df9..71132f6 100644 --- a/src/Tableau.Migration/Api/Simulation/Rest/Net/Responses/RestUserImportResponseBuilder.cs +++ b/src/Tableau.Migration/Api/Simulation/Rest/Net/Responses/RestUserImportResponseBuilder.cs @@ -66,12 +66,10 @@ protected static void AddUsers(TableauData data, StreamContent csvStreamContent) private static UsersResponse.UserType ParseUser(TableauData data, string[] columnData) { var username = columnData[0]; - string fullName = columnData[2]; string licenseLevel = columnData[3]; string adminLevel = columnData[4]; string publishingCapability = columnData[5]; - string email = columnData[6]; - + if (!bool.TryParse(publishingCapability, out bool canPublish)) { throw new ArgumentException( @@ -83,8 +81,6 @@ private static UsersResponse.UserType ParseUser(TableauData data, string[] colum { Id = Guid.NewGuid(), Name = username, - FullName = fullName, - Email = email, SiteRole = SiteRoleMapping.GetSiteRole(adminLevel, licenseLevel, canPublish), Domain = TableauData.GetUserDomain(username) ?? new() { Name = data.DefaultDomain } }; diff --git a/src/Tableau.Migration/Api/Simulation/Rest/Net/Responses/RestUserUpdateResponseBuilder.cs b/src/Tableau.Migration/Api/Simulation/Rest/Net/Responses/RestUserUpdateResponseBuilder.cs index 66ef217..451f8ff 100644 --- a/src/Tableau.Migration/Api/Simulation/Rest/Net/Responses/RestUserUpdateResponseBuilder.cs +++ b/src/Tableau.Migration/Api/Simulation/Rest/Net/Responses/RestUserUpdateResponseBuilder.cs @@ -21,6 +21,7 @@ using System.Net.Http; using System.Threading; using System.Threading.Tasks; +using Tableau.Migration.Api.Rest.Models; using Tableau.Migration.Api.Rest.Models.Requests; using Tableau.Migration.Api.Rest.Models.Responses; using Tableau.Migration.Api.Simulation.Rest.Net.Requests; @@ -39,9 +40,9 @@ public RestUserUpdateResponseBuilder( protected static UsersResponse.UserType? UpdateUser(HttpRequestMessage request, ICollection allUsers) { - var foundUser = allUsers.FirstOrDefault(u => u.Id == request.GetRequestIdFromUri()); + var oldUser = allUsers.FirstOrDefault(u => u.Id == request.GetRequestIdFromUri()); - if (foundUser is null) + if (oldUser is null) return null; var newUser = request.GetTableauServerRequest()?.User; @@ -49,7 +50,10 @@ public RestUserUpdateResponseBuilder( if (newUser is null) return null; - var oldUser = allUsers.First(u => u == foundUser); + var siteRole = SiteRoleMapping.GetSiteRole( + SiteRoleMapping.GetAdministratorLevel(newUser.SiteRole), + SiteRoleMapping.GetLicenseLevel(newUser.SiteRole), + SiteRoleMapping.GetPublishingCapability(newUser.SiteRole)); if (!string.IsNullOrEmpty(newUser.FullName)) oldUser.FullName = newUser.FullName; @@ -57,8 +61,8 @@ public RestUserUpdateResponseBuilder( if (!string.IsNullOrEmpty(newUser.Email)) oldUser.Email = newUser.Email; - if (!string.IsNullOrEmpty(newUser.SiteRole)) - oldUser.SiteRole = newUser.SiteRole; + if (!string.IsNullOrEmpty(siteRole)) + oldUser.SiteRole = siteRole; if (!string.IsNullOrEmpty(newUser.AuthSetting)) oldUser.AuthSetting = newUser.AuthSetting; diff --git a/src/Tableau.Migration/Api/Simulation/TableauApiSimulatorFactory.cs b/src/Tableau.Migration/Api/Simulation/TableauApiSimulatorFactory.cs index 7fef04f..a9f38a4 100644 --- a/src/Tableau.Migration/Api/Simulation/TableauApiSimulatorFactory.cs +++ b/src/Tableau.Migration/Api/Simulation/TableauApiSimulatorFactory.cs @@ -15,6 +15,8 @@ // using System; +using Tableau.Migration.Api.Rest.Models; +using Tableau.Migration.Api.Rest.Models.Responses; using Tableau.Migration.Net; namespace Tableau.Migration.Api.Simulation @@ -47,7 +49,16 @@ public TableauApiSimulator GetOrCreate(Uri serverUrl) return existing; } - var simulator = new TableauApiSimulator(serverUrl, _serializer); + var simulatorUser = new UsersResponse.UserType() + { + Id = Guid.NewGuid(), + Domain = new() { Name = "local" }, + Name = "simulatorAdmin", + FullName = "Simulator Admin", + SiteRole = SiteRoles.SiteAdministratorCreator + }; + + var simulator = new TableauApiSimulator(serverUrl, _serializer, simulatorUser); _simulators.AddOrUpdate(simulator); return simulator; } diff --git a/src/Tableau.Migration/Api/SitesApiClient.cs b/src/Tableau.Migration/Api/SitesApiClient.cs index f447619..be43527 100644 --- a/src/Tableau.Migration/Api/SitesApiClient.cs +++ b/src/Tableau.Migration/Api/SitesApiClient.cs @@ -79,10 +79,11 @@ public SitesApiClient( } .ToImmutableDictionary(InheritedTypeComparer.Instance); - private TApiClient GetApiClientFromContentType(Type contentType) + private TApiClient? GetApiClientFromContentType(Type contentType) + where TApiClient : class { //TODO: validate content type, this needs heavy unit testing since we do runtime casting. - return (TApiClient)_contentTypeAccessors[contentType](this); + return _contentTypeAccessors[contentType](this) as TApiClient; } #region - ISitesApiClient Implementation - @@ -108,44 +109,51 @@ private TApiClient GetApiClientFromContentType(Type contentType) /// public IViewsApiClient Views { get; } + /// + public IReadApiClient? GetReadApiClient() + where TContent : class + => GetApiClientFromContentType>(typeof(TContent)); + /// public IPagedListApiClient GetListApiClient() - => GetApiClientFromContentType>(typeof(TContent)); + => GetApiClientFromContentType>(typeof(TContent))!; /// public IPullApiClient GetPullApiClient() where TPublish : class - => GetApiClientFromContentType>(typeof(TContent)); + => GetApiClientFromContentType>(typeof(TContent))!; /// public IPublishApiClient GetPublishApiClient() where TPublishResult : class, IContentReference - => GetApiClientFromContentType>(typeof(TPublish)); //TODO: Better resolution logic based on content/publish types + => GetApiClientFromContentType>(typeof(TPublish))!; //TODO: Better resolution logic based on content/publish types /// public IBatchPublishApiClient GetBatchPublishApiClient() - => GetApiClientFromContentType>(typeof(TPublish)); //TODO: Better resolution logic based on content/publish types + => GetApiClientFromContentType>(typeof(TPublish))!; //TODO: Better resolution logic based on content/publish types /// public IPermissionsApiClient GetPermissionsApiClient() - => GetApiClientFromContentType(typeof(TContent)).Permissions; //TODO: Better resolution logic based on content/publish types + => GetApiClientFromContentType(typeof(TContent))!.Permissions; //TODO: Better resolution logic based on content/publish types + /// public IPermissionsApiClient GetPermissionsApiClient(Type type) - => GetApiClientFromContentType(type).Permissions; //TODO: Better resolution logic based on content/publish types + => GetApiClientFromContentType(type)!.Permissions; //TODO: Better resolution logic based on content/publish types /// public ITagsApiClient GetTagsApiClient() where TContent : IWithTags - => GetApiClientFromContentType(typeof(TContent)).Tags; //TODO: Better resolution logic based on content/publish types + => GetApiClientFromContentType(typeof(TContent))!.Tags; //TODO: Better resolution logic based on content/publish types /// public IOwnershipApiClient GetOwnershipApiClient() where TContent : IWithOwner - => GetApiClientFromContentType(typeof(TContent)); //TODO: Better resolution logic based on content/publish types + => GetApiClientFromContentType(typeof(TContent))!; //TODO: Better resolution logic based on content/publish types + /// public IConnectionsApiClient GetConnectionsApiClient() where TContent : IWithConnections - => GetApiClientFromContentType(typeof(TContent)); //TODO: Better resolution logic based on content/publish types + => GetApiClientFromContentType(typeof(TContent))!; //TODO: Better resolution logic based on content/publish types private async Task> GetSiteAsync(Func setKey, CancellationToken cancel) { diff --git a/src/Tableau.Migration/Api/UsersApiClient.cs b/src/Tableau.Migration/Api/UsersApiClient.cs index 5099b83..76bdad8 100644 --- a/src/Tableau.Migration/Api/UsersApiClient.cs +++ b/src/Tableau.Migration/Api/UsersApiClient.cs @@ -38,8 +38,6 @@ namespace Tableau.Migration.Api { internal sealed class UsersApiClient : ContentApiClientBase, IUsersApiClient { - private readonly static Encoding _dataEncoding = Encoding.UTF8; - private readonly IJobsApiClient _jobs; public UsersApiClient( @@ -53,6 +51,7 @@ public UsersApiClient( _jobs = jobs; } + /// public async Task> GetUserGroupsAsync(Guid userId, int pageNumber, int pageSize, CancellationToken cancel) { var getUserGroupsResult = await RestRequestBuilderFactory @@ -119,7 +118,7 @@ public async Task> ImportUsersAsync(IEnumerable users xmlRequest = new ImportUsersFromCsvRequest(requestUsers); } - var payloadContent = new StringContent(xmlRequest.ToXml(), _dataEncoding, MediaTypes.Xml.MediaType!); + var payloadContent = new StringContent(xmlRequest.ToXml(), Constants.DefaultEncoding, MediaTypes.Xml.MediaType!); // Create the multipart content. var csvDataStreamContent = new StreamContent(csvStream); @@ -183,6 +182,7 @@ public async Task> UpdateUserAsync(Guid id, #region - IPagedListApiClient Implementation - + /// public IPager GetPager(int pageSize) => new ApiListPager(this, pageSize); #endregion @@ -218,7 +218,7 @@ internal static Stream GenerateUserCsvStream(IEnumerable items) item.AppendCsvLine(csv); } - var csvStream = new MemoryStream(_dataEncoding.GetBytes(csv.ToString())); + var csvStream = new MemoryStream(Constants.DefaultEncoding.GetBytes(csv.ToString())); csvStream.Seek(0, SeekOrigin.Begin); @@ -229,9 +229,56 @@ internal static Stream GenerateUserCsvStream(IEnumerable items) #region - IApiPageAccessor Implementation - + /// public async Task> GetPageAsync(int pageNumber, int pageSize, CancellationToken cancel) => await GetAllUsersAsync(pageNumber, pageSize, cancel).ConfigureAwait(false); #endregion + + #region - IReadApiClient Implementation - + + /// + public async Task> GetByIdAsync(Guid contentId, CancellationToken cancel) + { + var getUserResult = await RestRequestBuilderFactory + .CreateUri($"/users/{contentId.ToUrlSegment()}") + .ForGetRequest() + .SendAsync(cancel) + .ToResultAsync(r => new User(r.Item!), SharedResourcesLocalizer) + .ConfigureAwait(false); + + return getUserResult; + } + + #endregion + + #region - IPublishApiClient Implementation - + + public async Task> PublishAsync(IUser item, CancellationToken cancel) + { + var result = await AddUserAsync( + item.Name, + item.SiteRole, + item.AuthenticationType, + cancel) + .ConfigureAwait(false); + + if (!result.Success) + { + return Result.Failed(result.Errors); + } + + return Result.Succeeded( + new User( + result.Value.Id, + null, + null, + result.Value.Name, + null, + result.Value.SiteRole, + result.Value.AuthSetting)); + } + + #endregion } } diff --git a/src/Tableau.Migration/Api/WorkbooksApiClient.cs b/src/Tableau.Migration/Api/WorkbooksApiClient.cs index 5400f81..64b35d1 100644 --- a/src/Tableau.Migration/Api/WorkbooksApiClient.cs +++ b/src/Tableau.Migration/Api/WorkbooksApiClient.cs @@ -109,12 +109,14 @@ public async Task> GetAllWorkbooksAsync( // Convert them all to type Workbook. if (item.Project is not null) // Project is null if item is in a personal space { - var project = await FindProjectAsync(item, c).ConfigureAwait(false); - var owner = await FindOwnerAsync(item, c).ConfigureAwait(false); + var project = await FindProjectAsync(item, false, c).ConfigureAwait(false); + var owner = await FindOwnerAsync(item, false, c).ConfigureAwait(false); + + if (project is null || owner is null) + continue; //Warnings will be logged by prior method calls. results.Add(new Workbook(item, project, owner)); } - } // Produce immutable list of type IWorkbook and return. @@ -126,24 +128,20 @@ public async Task> GetAllWorkbooksAsync( } /// - public async Task> GetWorkbookAsync( - Guid workbookId, - IImmutableList connections, - IContentFileHandle workbookFile, - CancellationToken cancel) + public async Task> GetWorkbookAsync(Guid workbookId, CancellationToken cancel) { var getResult = await RestRequestBuilderFactory .CreateUri($"{UrlPrefix}/{workbookId.ToUrlSegment()}") .ForGetRequest() .SendAsync(cancel) - .ToResultAsync(async (r, c) => + .ToResultAsync(async (response, cancel) => { - var project = await FindProjectAsync(r.Item, c).ConfigureAwait(false); - var owner = await FindOwnerAsync(r.Item, c).ConfigureAwait(false); - var views = r.Item.Views.Select(v => (IView)new View(v, project, r.Item.Name)) - .ToImmutableArray(); + var workbook = Guard.AgainstNull(response.Item, () => response.Item); - return (IPublishableWorkbook)new PublishableWorkbook(r, project, owner, connections, views, workbookFile); + var project = await FindProjectAsync(workbook, true, cancel).ConfigureAwait(false); + var owner = await FindOwnerAsync(workbook, true, cancel).ConfigureAwait(false); + + return (IWorkbookDetails)new WorkbookDetails(workbook, project, owner); }, SharedResourcesLocalizer, cancel) .ConfigureAwait(false); @@ -167,13 +165,13 @@ public async Task> DownloadWorkbookAsync( } /// - public async Task> PublishWorkbookAsync( + public async Task> PublishWorkbookAsync( IPublishWorkbookOptions options, CancellationToken cancel) => await _workbookPublisher.PublishAsync(options, cancel).ConfigureAwait(false); /// - public async Task> PublishAsync(IPublishableWorkbook item, CancellationToken cancel) + public async Task> PublishAsync(IPublishableWorkbook item, CancellationToken cancel) { var fileStream = await item.File.OpenReadAsync(cancel).ConfigureAwait(false); await using (fileStream) @@ -266,11 +264,18 @@ public async Task> PullAsync( * make sure the file is disposed. We clean up orphaned * files at the end of the DI scope, but we don't want to * bloat disk usage when we're processing future pages of items.*/ - var publishableResult = await file.DisposeOnThrowOrFailureAsync( - async () => await GetWorkbookAsync(contentItem.Id, connectionsResult.Value, file, cancel).ConfigureAwait(false) + var workbookResult = await file.DisposeOnThrowOrFailureAsync( + async () => await GetWorkbookAsync(contentItem.Id, cancel).ConfigureAwait(false) ).ConfigureAwait(false); - return publishableResult; + if (!workbookResult.Success) + { + return workbookResult.CastFailure(); + } + + var publishWorkbook = new PublishableWorkbook(workbookResult.Value, connectionsResult.Value, file); + + return Result.Succeeded(publishWorkbook); } } diff --git a/src/Tableau.Migration/ComparerBase.cs b/src/Tableau.Migration/ComparerBase.cs index 92bc0c1..4e72d36 100644 --- a/src/Tableau.Migration/ComparerBase.cs +++ b/src/Tableau.Migration/ComparerBase.cs @@ -22,8 +22,9 @@ namespace Tableau.Migration { internal abstract class ComparerBase : - IComparer, IEqualityComparer, - IComparer>, IEqualityComparer> + IComparer, IEqualityComparer, + IComparer?>, IEqualityComparer?> + where T : notnull { public int Compare(IEnumerable? x, IEnumerable? y) { @@ -78,7 +79,7 @@ private static int Compare(TItem? x, TItem? y, Func co public int Compare(T? x, T? y) => Compare(x, y, CompareItems); - public abstract int CompareItems(T x, T y); + protected abstract int CompareItems(T x, T y); public bool Equals(T? x, T? y) => Compare(x, y) == 0; diff --git a/src/Tableau.Migration/Config/ConfigReader.cs b/src/Tableau.Migration/Config/ConfigReader.cs index b2735bc..40706c3 100644 --- a/src/Tableau.Migration/Config/ConfigReader.cs +++ b/src/Tableau.Migration/Config/ConfigReader.cs @@ -14,7 +14,10 @@ // limitations under the License. // +using System; +using System.Linq; using Microsoft.Extensions.Options; +using Tableau.Migration.Engine.Pipelines; namespace Tableau.Migration.Config { @@ -32,6 +35,8 @@ public class ConfigReader : IConfigReader public ConfigReader(IOptionsMonitor optionsMonitor) { _optionsMonitor = optionsMonitor; + ValidateOptions(Get()); + _optionsMonitor.OnChange(ValidateOptions); } /// @@ -44,5 +49,42 @@ public MigrationSdkOptions Get() { return _optionsMonitor.Get(nameof(MigrationSdkOptions)); } + + /// + public ContentTypesOptions Get() + where TContent : IContentReference + { + var contentType = ServerToCloudMigrationPipeline.ContentTypes + .FirstOrDefault(c => c.ContentType.Name == typeof(TContent).Name); + + if (contentType != null) + { + var configKey = contentType.GetConfigKey(); + var contentTypeOptions = Get() + .ContentTypes + .FirstOrDefault(o => string.Equals(o.Type, configKey, StringComparison.OrdinalIgnoreCase)); + + return contentTypeOptions ?? new ContentTypesOptions() + { + Type = configKey + }; + } + + throw new NotSupportedException( + $"Content type specific options are not supported for {typeof(TContent)} since it is not supported for migration."); + } + + internal void ValidateOptions(MigrationSdkOptions? options) + { + options ??= Get(); + + foreach (var byContentTypeName in options.ContentTypes.GroupBy(v => v.Type)) + { + if (byContentTypeName.First().IsContentTypeValid() && byContentTypeName.Count() > 1) + { + throw new InvalidOperationException($"Duplicate content type names found in {(nameof(MigrationSdkOptions.ContentTypes))} section of the configuration."); + } + } + } } } diff --git a/src/Tableau.Migration/Config/ContentTypesOptions.cs b/src/Tableau.Migration/Config/ContentTypesOptions.cs new file mode 100644 index 0000000..46f8598 --- /dev/null +++ b/src/Tableau.Migration/Config/ContentTypesOptions.cs @@ -0,0 +1,80 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using System.Linq; +using Tableau.Migration.Engine.Pipelines; + +namespace Tableau.Migration.Config +{ + /// + /// Class for configuration settings specific to content types. + /// + public class ContentTypesOptions + { + /// + /// Defaults for migration options. + /// + public static class Defaults + { + /// + /// The default migration batch size. + /// + public const int BATCH_SIZE = 100; + + /// + /// The default migration batch publishing flag. + /// + public const bool BATCH_PUBLISHING_ENABLED = false; + } + + /// + /// The name of the content type Ex: User. + /// + public string Type { get; set; } = string.Empty; + + /// + /// Gets or sets the migration batch size. + /// + public int BatchSize + { + get => _batchSize ?? Defaults.BATCH_SIZE; + set => _batchSize = value; + } + private int? _batchSize; + + /// + /// Gets or sets the batch publishing to the supported types. Default: disabled.
+ /// Important: This option is only available to . + /// For more details, check the configuration article. + ///
+ public bool BatchPublishingEnabled + { + get => _batchPublishingEnabled ?? Defaults.BATCH_PUBLISHING_ENABLED; + set => _batchPublishingEnabled = value; + } + private bool? _batchPublishingEnabled; + + /// + /// Checks if the content type in is valid. + /// + /// + public bool IsContentTypeValid() + => ServerToCloudMigrationPipeline + .ContentTypes + .Any(c => string.Equals(c.GetConfigKey(), Type, StringComparison.OrdinalIgnoreCase)); + } +} \ No newline at end of file diff --git a/src/Tableau.Migration/Config/IConfigReader.cs b/src/Tableau.Migration/Config/IConfigReader.cs index ab47338..1ebbb27 100644 --- a/src/Tableau.Migration/Config/IConfigReader.cs +++ b/src/Tableau.Migration/Config/IConfigReader.cs @@ -26,5 +26,12 @@ public interface IConfigReader ///
/// MigrationSdkOptions Get(); + + /// + /// Get the current for a + /// given content type . + /// + /// Options specific to the content type. + ContentTypesOptions Get() where TContent : IContentReference; } } \ No newline at end of file diff --git a/src/Tableau.Migration/Config/MigrationSDKOptions.cs b/src/Tableau.Migration/Config/MigrationSDKOptions.cs index a225b01..19fe4d0 100644 --- a/src/Tableau.Migration/Config/MigrationSDKOptions.cs +++ b/src/Tableau.Migration/Config/MigrationSDKOptions.cs @@ -14,6 +14,8 @@ // limitations under the License. // +using System.Collections.Generic; + namespace Tableau.Migration.Config { /// @@ -26,11 +28,6 @@ public class MigrationSdkOptions /// public static class Defaults { - /// - /// The default migration batch size. - /// - public const int BATCH_SIZE = 100; - /// /// The default number of items to migrate in parallel. /// @@ -38,14 +35,9 @@ public static class Defaults } /// - /// Gets or sets the migration batch size. - /// - public int BatchSize - { - get => _batchSize ?? Defaults.BATCH_SIZE; - set => _batchSize = value; - } - private int? _batchSize; + /// Get or Set content type specific options. + ///
+ public List ContentTypes { get; set; } = new(); /// /// Gets or sets the number of items to migrate in parallel. diff --git a/src/Tableau.Migration/Config/ResilienceOptions.cs b/src/Tableau.Migration/Config/ResilienceOptions.cs index 72c4333..7ae56dc 100644 --- a/src/Tableau.Migration/Config/ResilienceOptions.cs +++ b/src/Tableau.Migration/Config/ResilienceOptions.cs @@ -104,13 +104,6 @@ public static class Defaults /// public readonly static TimeSpan MAX_READ_REQUESTS_INTERVAL = TimeSpan.FromHours(1); - /// - /// The default Burst Read Requests for the Client Throttle. Default is 20. - /// Without the burst configuration, it will be allowed just one request for each 90 milliseconds (1 hour / 40000 requests). - /// This override the configuration and allow 20 requests in an interval of 90 milliseconds. - /// - public const int MAX_BURST_READ_REQUESTS = 20; - /// /// The default Maximum Publish Requests for the Client Throttle. Default is 5500. /// @@ -121,13 +114,6 @@ public static class Defaults /// public readonly static TimeSpan MAX_PUBLISH_REQUESTS_INTERVAL = TimeSpan.FromDays(1); - /// - /// The default Burst Publish Requests for the Client Throttle. Default is 20. - /// Without the burst configuration, it will be allowed just one request for each 16 seconds (1 day / 5500 requests). - /// This overrides the configuration and allow 20 requests in an interval of 16 seconds. - /// - public const int MAX_BURST_PUBLISH_REQUESTS = 20; - /// /// The default Per-Request Timeout. Default is 30 minutes. /// @@ -247,22 +233,6 @@ public TimeSpan MaxReadRequestsInterval } private TimeSpan? _maxReadRequestsInterval; - /// - /// Limits the amount of burst read requests in for the Client Throttle. - /// Without the burst configuration, it follows this formula: - /// - /// 1 request for each ( / ) - /// - /// This overrides the configuration and allow more requests for the calculated interval. - /// The default value is 20. - /// - public int MaxBurstReadRequests - { - get => _maxBurstReadRequests ?? Defaults.MAX_BURST_READ_REQUESTS; - set => _maxBurstReadRequests = value; - } - private int? _maxBurstReadRequests; - /// /// Limits the amount of push requests in for the Client Throttle. /// The default value is 5500. @@ -285,22 +255,6 @@ public TimeSpan MaxPublishRequestsInterval } private TimeSpan? _maxPushRequestsInterval; - /// - /// Limits the amount of burst push requests in for the Client Throttle. - /// Without the burst configuration, it follows this formula - /// - /// 1 request for each ( / ) - /// - /// This overrides the configuration and allow more requests for the calculated interval. - /// The default value is 20. - /// - public int MaxBurstPublishRequests - { - get => _maxBurstPushRequests ?? Defaults.MAX_BURST_PUBLISH_REQUESTS; - set => _maxBurstPushRequests = value; - } - private int? _maxBurstPushRequests; - /// /// Gets or sets whether to wait and retry on server throttle responses. /// The default value is enabled. diff --git a/src/Tableau.Migration/Constants.cs b/src/Tableau.Migration/Constants.cs index 36373d7..da556ae 100644 --- a/src/Tableau.Migration/Constants.cs +++ b/src/Tableau.Migration/Constants.cs @@ -14,6 +14,8 @@ // limitations under the License. // +using System.Text; + namespace Tableau.Migration { /// @@ -81,6 +83,11 @@ public static class Constants /// public const string AdminInsightsTableauOnlineProjectName = "Admin Insights (Tableau Online)"; + /// + /// The default text encoding. + /// + public static readonly Encoding DefaultEncoding = Encoding.UTF8; + #endregion #region - Internal Constants - diff --git a/src/Tableau.Migration/Content/DataSource.cs b/src/Tableau.Migration/Content/DataSource.cs index ab6b361..fa86398 100644 --- a/src/Tableau.Migration/Content/DataSource.cs +++ b/src/Tableau.Migration/Content/DataSource.cs @@ -14,6 +14,7 @@ // limitations under the License. // +using System; using System.Collections.Generic; using Tableau.Migration.Api.Rest.Models; @@ -22,49 +23,75 @@ namespace Tableau.Migration.Content internal class DataSource : ContainerContentBase, IDataSource { public DataSource(IDataSourceType response, IContentReference project, IContentReference owner) + : this( + response.Id, + response.Name, + response.ContentUrl, + response.Description, + response.CreatedAt, + response.UpdatedAt, + response.EncryptExtracts, + response.HasExtracts, + response.IsCertified, + response.UseRemoteQueryAgent, + response.WebpageUrl, + response.Tags.ToTagList(t => new Tag(t)), + project, + owner) + { } + + public DataSource(IDataSource dataSource) + : this( + dataSource.Id, + dataSource.Name, + dataSource.ContentUrl, + dataSource.Description, + dataSource.CreatedAt, + dataSource.UpdatedAt, + dataSource.EncryptExtracts, + dataSource.HasExtracts, + dataSource.IsCertified, + dataSource.UseRemoteQueryAgent, + dataSource.WebpageUrl, + dataSource.Tags, + ((IContainerContent)dataSource).Container, + dataSource.Owner) + { } + + private DataSource( + Guid id, + string? name, + string? contentUrl, + string? description, + string? createdAt, + string? updatedAt, + bool encryptExtracts, + bool hasExtracts, + bool isCertified, + bool useRemoteQueryAgent, + string? webpageUrl, + IList tags, + IContentReference project, + IContentReference owner) : base(project) { - Id = Guard.AgainstDefaultValue(response.Id, () => response.Id); - Name = Guard.AgainstNullEmptyOrWhiteSpace(response.Name, () => response.Name); - ContentUrl = Guard.AgainstNullEmptyOrWhiteSpace(response.ContentUrl, () => response.ContentUrl); + Id = Guard.AgainstDefaultValue(id, () => id); + Name = Guard.AgainstNullEmptyOrWhiteSpace(name, () => name); + ContentUrl = Guard.AgainstNullEmptyOrWhiteSpace(contentUrl, () => contentUrl); - Description = response.Description ?? string.Empty; - CreatedAt = response.CreatedAt ?? string.Empty; - UpdatedAt = response.UpdatedAt ?? string.Empty; + Description = description ?? string.Empty; + CreatedAt = createdAt ?? string.Empty; + UpdatedAt = updatedAt ?? string.Empty; - EncryptExtracts = response.EncryptExtracts; - HasExtracts = response.HasExtracts; - IsCertified = response.IsCertified; - UseRemoteQueryAgent = response.UseRemoteQueryAgent; + EncryptExtracts = encryptExtracts; + HasExtracts = hasExtracts; + IsCertified = isCertified; + UseRemoteQueryAgent = useRemoteQueryAgent; - WebpageUrl = response.WebpageUrl ?? string.Empty; + WebpageUrl = webpageUrl ?? string.Empty; Owner = owner; - Tags = response.Tags.ToTagList(t => new Tag(t)); - - Location = project.Location.Append(Name); - } - - public DataSource(IDataSource item, IContentReference project, IContentReference owner) - : base(project) - { - Id = Guard.AgainstDefaultValue(item.Id, () => item.Id); - Name = Guard.AgainstNullEmptyOrWhiteSpace(item.Name, () => item.Name); - ContentUrl = Guard.AgainstNullEmptyOrWhiteSpace(item.ContentUrl, () => item.ContentUrl); - - Description = item.Description ?? string.Empty; - CreatedAt = item.CreatedAt ?? string.Empty; - UpdatedAt = item.UpdatedAt ?? string.Empty; - - EncryptExtracts = item.EncryptExtracts; - HasExtracts = item.HasExtracts; - IsCertified = item.IsCertified; - UseRemoteQueryAgent = item.UseRemoteQueryAgent; - - WebpageUrl = item.WebpageUrl ?? string.Empty; - - Owner = owner; - Tags = item.Tags.ToTagList(t => new Tag(t)); + Tags = tags; Location = project.Location.Append(Name); } diff --git a/src/Tableau.Migration/Content/DataSourceDetails.cs b/src/Tableau.Migration/Content/DataSourceDetails.cs new file mode 100644 index 0000000..d057156 --- /dev/null +++ b/src/Tableau.Migration/Content/DataSourceDetails.cs @@ -0,0 +1,38 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using Tableau.Migration.Api.Rest.Models; + +namespace Tableau.Migration.Content +{ + internal class DataSourceDetails : DataSource, IDataSourceDetails + { + public string CertificationNote { get; } + + public DataSourceDetails(IDataSourceDetailsType response, IContentReference project, IContentReference owner) + : base(response, project, owner) + { + CertificationNote = response.CertificationNote ?? String.Empty; + } + + public DataSourceDetails(IDataSourceDetails dataSource) + : base(dataSource) + { + CertificationNote = dataSource.CertificationNote; + } + } +} diff --git a/src/Tableau.Migration/Content/Files/DirectoryContentFileStore.cs b/src/Tableau.Migration/Content/Files/DirectoryContentFileStore.cs index 041094d..25bc92f 100644 --- a/src/Tableau.Migration/Content/Files/DirectoryContentFileStore.cs +++ b/src/Tableau.Migration/Content/Files/DirectoryContentFileStore.cs @@ -48,6 +48,11 @@ public class DirectoryContentFileStore : IContentFileStore /// protected IContentFilePathResolver PathResolver { get; } + /// + /// Gets the memory stream manager. + /// + protected IMemoryStreamManager MemoryStreamManager { get; } + /// /// Gets the content files being tracked by the file store. /// @@ -64,12 +69,18 @@ public class DirectoryContentFileStore : IContentFileStore /// The file system to use. /// The path resolver to use. /// A config reader to get the root path and other options from. + /// The memory stream manager to user. /// The relative directory name to use for this file store. - public DirectoryContentFileStore(IFileSystem fileSystem, IContentFilePathResolver pathResolver, - IConfigReader configReader, string storeDirectoryName) + public DirectoryContentFileStore( + IFileSystem fileSystem, + IContentFilePathResolver pathResolver, + IConfigReader configReader, + IMemoryStreamManager memoryStreamManager, + string storeDirectoryName) { FileSystem = fileSystem; PathResolver = pathResolver; + MemoryStreamManager = memoryStreamManager; TrackedFilePaths = new(); var config = configReader.Get(); @@ -135,7 +146,7 @@ public Task OpenWriteAsync(IContentFileHandle handle, Cancel public async Task GetTableauFileEditorAsync(IContentFileHandle handle, CancellationToken cancel, bool? zipFormatOverride = null) => await _openTableauFileEditors.GetOrAddAsync( handle.Path, - async path => (ITableauFileEditor)await TableauFileEditor.OpenAsync(handle, cancel, zipFormatOverride).ConfigureAwait(false)) + async path => (ITableauFileEditor)await TableauFileEditor.OpenAsync(handle, MemoryStreamManager, cancel, zipFormatOverride).ConfigureAwait(false)) .ConfigureAwait(false); /// diff --git a/src/Tableau.Migration/Content/Files/EncryptedFileStore.cs b/src/Tableau.Migration/Content/Files/EncryptedFileStore.cs index 8cdf993..bb3142f 100644 --- a/src/Tableau.Migration/Content/Files/EncryptedFileStore.cs +++ b/src/Tableau.Migration/Content/Files/EncryptedFileStore.cs @@ -15,7 +15,6 @@ // using System; -using System.IO; using System.Security.Cryptography; using System.Threading; using System.Threading.Tasks; @@ -84,40 +83,6 @@ public EncryptedFileStore(ISymmetricEncryptionFactory encryptionFactory, _encryptionKey = encryption.Key; } - #region - Encryption Helper Methods - - - private static async ValueTask ReadInitializationVectorAsync(Stream encryptedStream, - int ivLength, CancellationToken cancel) - { - byte[] iv = new byte[ivLength]; - int leftToRead = iv.Length; - int totalBytesRead = 0; - - while (leftToRead > 0) - { - int bytesRead = await encryptedStream.ReadAsync(iv.AsMemory(totalBytesRead, leftToRead), cancel) - .ConfigureAwait(false); - - if (bytesRead is 0) - { - break; - } - - totalBytesRead += bytesRead; - leftToRead -= bytesRead; - } - - return iv; - } - - private static async ValueTask WriteInitializationVector(Stream stream, - byte[] iv, CancellationToken cancel) - { - await stream.WriteAsync(iv, cancel).ConfigureAwait(false); - } - - #endregion - #region - IContentFileStore Implementation - /// @@ -150,11 +115,11 @@ public async Task OpenReadAsync(IContentFileHandle handle, C { var encryption = _encryptionFactory.Create(); //Disposed by file stream wrapper. - var iv = await ReadInitializationVectorAsync(stream.Content, encryption.IV.Length, cancel) + var iv = await stream.Content.ReadInitializationVectorAsync(encryption.IV.Length, cancel) .ConfigureAwait(false); var transform = encryption.CreateDecryptor(_encryptionKey, iv); //Disposed by file stream wrapper. - var cryptoStream = new CryptoStream(stream.Content, transform, CryptoStreamMode.Read, false); //Disposed by file stream wrapper. + var cryptoStream = new SeekableCryptoStream(stream.Content, transform, CryptoStreamMode.Read, false); //Disposed by file stream wrapper. stream = new EncryptedFileStream(stream, encryption, transform, cryptoStream); } @@ -171,11 +136,11 @@ public async Task OpenWriteAsync(IContentFileHandle handle, var encryption = _encryptionFactory.Create(); //Disposed by file stream wrapper. encryption.GenerateIV(); - await WriteInitializationVector(stream.Content, encryption.IV, cancel) + await stream.Content.WriteInitializationVectorAsync(encryption.IV, cancel) .ConfigureAwait(false); var transform = encryption.CreateEncryptor(_encryptionKey, encryption.IV); //Disposed by file stream wrapper. - var cryptoStream = new CryptoStream(stream.Content, transform, CryptoStreamMode.Write, false); //Disposed by file stream wrapper. + var cryptoStream = new SeekableCryptoStream(stream.Content, transform, CryptoStreamMode.Write, false); //Disposed by file stream wrapper. stream = new EncryptedFileStream(stream, encryption, transform, cryptoStream); } diff --git a/src/Tableau.Migration/Content/Files/IContentFileHandleExtensions.cs b/src/Tableau.Migration/Content/Files/IContentFileHandleExtensions.cs index 82e2abb..b6f1b59 100644 --- a/src/Tableau.Migration/Content/Files/IContentFileHandleExtensions.cs +++ b/src/Tableau.Migration/Content/Files/IContentFileHandleExtensions.cs @@ -14,6 +14,7 @@ // limitations under the License. // +using System; using System.Threading; using System.Threading.Tasks; @@ -22,8 +23,30 @@ namespace Tableau.Migration.Content.Files internal static class IContentFileHandleExtensions { internal static async Task CloseTableauFileEditorAsync(this IContentFileHandle contentFileHandle, CancellationToken cancel) - { - await contentFileHandle.Store.CloseTableauFileEditorAsync(contentFileHandle, cancel).ConfigureAwait(false); + => await contentFileHandle.Store.CloseTableauFileEditorAsync(contentFileHandle, cancel).ConfigureAwait(false); + + internal static async Task IsZipAsync(this IContentFileHandle handle, CancellationToken cancel) + { + var isZipFile = IsZipFile(h => h.GetOriginalFilePath()) ?? IsZipFile(h => h.GetStoreFilePath()); + + if (isZipFile is not null) + return isZipFile.Value; + + var fileStream = await handle.OpenReadAsync(cancel).ConfigureAwait(false); + + await using (fileStream) + { + return fileStream.Content.IsZip(); + } + + bool? IsZipFile(Func getFilePath) + => getFilePath(handle).IsZipFile; } + + internal static FilePath GetOriginalFilePath(this IContentFileHandle handle) + => new(handle.OriginalFileName); + + internal static FilePath GetStoreFilePath(this IContentFileHandle handle) + => new(handle.Path); } } \ No newline at end of file diff --git a/src/Tableau.Migration/Content/Files/ITableauFileEditor.cs b/src/Tableau.Migration/Content/Files/ITableauFileEditor.cs index 8a2f9a4..1f67aa8 100644 --- a/src/Tableau.Migration/Content/Files/ITableauFileEditor.cs +++ b/src/Tableau.Migration/Content/Files/ITableauFileEditor.cs @@ -15,8 +15,8 @@ // using System; -using System.IO; using System.IO.Compression; +using Microsoft.IO; namespace Tableau.Migration.Content.Files { @@ -32,7 +32,7 @@ public interface ITableauFileEditor : IAsyncDisposable /// with unencrypted tableau file data /// to write back to the file store upon disposal. /// - MemoryStream Content { get; } + RecyclableMemoryStream Content { get; } /// /// Gets the zip archive for the file, diff --git a/src/Tableau.Migration/Content/Files/SeekableCryptoStream.cs b/src/Tableau.Migration/Content/Files/SeekableCryptoStream.cs new file mode 100644 index 0000000..4c8017d --- /dev/null +++ b/src/Tableau.Migration/Content/Files/SeekableCryptoStream.cs @@ -0,0 +1,73 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using System.IO; +using System.Security.Cryptography; + +namespace Tableau.Migration.Content.Files +{ + /// + /// implementation that supports seeking. + /// + public sealed class SeekableCryptoStream : CryptoStream + { + private readonly Stream _innerStream; + + /// + /// Gets the read/write mode for the stream. + /// + internal readonly CryptoStreamMode Mode; + + #region - Seek Support Overrides - + + /// + public override bool CanSeek { get; } = true; + + /// + public override long Position + { + get => _innerStream.Position; + set => _innerStream.Position = value; + } + + /// + public override long Length => _innerStream.Length; + + /// + public override long Seek(long offset, SeekOrigin origin) => _innerStream.Seek(offset, origin); + + #endregion + + /// + /// Creates a new instance. + /// + /// The inner stream to encrypt or decrypt. + /// The encryption for the stream. + /// The read/write mode for the stream. + /// Whether this instance should take ownership of the inner stream. + public SeekableCryptoStream(Stream innerStream, ICryptoTransform cryptoTransform, CryptoStreamMode mode, bool leaveOpen) + : base(innerStream, cryptoTransform, mode, leaveOpen) + { + if (!innerStream.CanSeek) + throw new ArgumentException("The stream must be seekable.", nameof(innerStream)); + + Mode = mode; + + _innerStream = innerStream; + } + } +} diff --git a/src/Tableau.Migration/Content/Files/StreamExtensions.cs b/src/Tableau.Migration/Content/Files/StreamExtensions.cs new file mode 100644 index 0000000..b588c39 --- /dev/null +++ b/src/Tableau.Migration/Content/Files/StreamExtensions.cs @@ -0,0 +1,58 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using System.IO; +using System.Threading; +using System.Threading.Tasks; + +namespace Tableau.Migration.Content.Files +{ + internal static class StreamExtensions + { + public static async ValueTask WriteInitializationVectorAsync( + this Stream stream, + byte[] iv, + CancellationToken cancel) + => await stream.WriteAsync(iv, cancel).ConfigureAwait(false); + + public static async ValueTask ReadInitializationVectorAsync( + this Stream stream, + int ivLength, + CancellationToken cancel) + { + byte[] iv = new byte[ivLength]; + int leftToRead = iv.Length; + int totalBytesRead = 0; + + while (leftToRead > 0) + { + int bytesRead = await stream.ReadAsync(iv.AsMemory(totalBytesRead, leftToRead), cancel) + .ConfigureAwait(false); + + if (bytesRead is 0) + { + break; + } + + totalBytesRead += bytesRead; + leftToRead -= bytesRead; + } + + return iv; + } + } +} diff --git a/src/Tableau.Migration/Content/Files/TableauFileEditor.cs b/src/Tableau.Migration/Content/Files/TableauFileEditor.cs index e717ce0..51cb209 100644 --- a/src/Tableau.Migration/Content/Files/TableauFileEditor.cs +++ b/src/Tableau.Migration/Content/Files/TableauFileEditor.cs @@ -20,6 +20,7 @@ using System.Linq; using System.Threading; using System.Threading.Tasks; +using Microsoft.IO; namespace Tableau.Migration.Content.Files { @@ -35,7 +36,7 @@ public class TableauFileEditor : ITableauFileEditor private bool _disposed = false; /// - public MemoryStream Content { get; } + public RecyclableMemoryStream Content { get; } /// public ZipArchive? Archive { get; } @@ -55,13 +56,18 @@ public class TableauFileEditor : ITableauFileEditor /// /// The zip archive to use to manipulate the file, or null to consider the file as a single XML file. /// A cancellation token to obey, and to use when the editor is disposed. - public TableauFileEditor(IContentFileHandle fileStoreFile, MemoryStream content, - ZipArchive? archive, CancellationToken disposalCancel) + public TableauFileEditor( + IContentFileHandle fileStoreFile, + RecyclableMemoryStream content, + ZipArchive? archive, + CancellationToken disposalCancel) { _fileStoreFile = fileStoreFile; Content = content; Archive = archive; _disposalCancel = disposalCancel; + + Content.Seek(0, SeekOrigin.Begin); } internal static bool IsXmlFile(string fileName) @@ -99,34 +105,41 @@ public ITableauFileXmlStream GetXmlStream() /// /// Opens a new Tableau file editor. /// - /// The file store file to edit. - /// A cancellation token to obey, and to use when the editor is disposed. + /// The file store file to edit. + /// The memory stream manager. + /// A cancellation token to obey, and to use when the editor is disposed. /// /// True to consider the file a zip archive, /// false to consider the file an XML file, /// or null to detect whether the file is a zip archive. /// /// The newly created file editor. - public static async Task OpenAsync(IContentFileHandle fileStoreFile, CancellationToken disposalCancel, + public static async Task OpenAsync( + IContentFileHandle handle, + IMemoryStreamManager memoryStreamManager, + CancellationToken cancel, bool? zipFormatOverride = null) { - var fileContent = new MemoryStream(); //Use default ctor for resizable stream. + var fileStream = await handle.OpenReadAsync(cancel).ConfigureAwait(false); + + var outputStream = memoryStreamManager.GetStream(handle.OriginalFileName); - //Read the file into a seekable memory stream - //that the ZipArchive requires for update mode. - var fileStream = await fileStoreFile.OpenReadAsync(disposalCancel).ConfigureAwait(false); await using (fileStream) { - await fileStream.Content.CopyToAsync(fileContent, disposalCancel).ConfigureAwait(false); + //Read the file into a seekable memory stream + //that the ZipArchive requires for update mode. + await fileStream.Content.CopyToAsync(outputStream, cancel).ConfigureAwait(false); } - //Reset the memory stream so the user/zip archive can read it. - fileContent.Seek(0, SeekOrigin.Begin); + outputStream.Seek(0, SeekOrigin.Begin); + + var isZip = zipFormatOverride == true || await handle.IsZipAsync(cancel).ConfigureAwait(false); + + var archive = isZip ? new ZipArchive(outputStream, ZipArchiveMode.Update, leaveOpen: true) : null; - var isZip = zipFormatOverride ?? fileContent.IsZip(); - ZipArchive? archive = isZip ? new ZipArchive(fileContent, ZipArchiveMode.Update, leaveOpen: true) : null; + outputStream.Seek(0, SeekOrigin.Begin); - return new(fileStoreFile, fileContent, archive, disposalCancel); + return new(handle, outputStream, archive, cancel); } #region - IAsyncDisposable Implementation - diff --git a/src/Tableau.Migration/Content/Files/TemporaryDirectoryContentFileStore.cs b/src/Tableau.Migration/Content/Files/TemporaryDirectoryContentFileStore.cs index 62ca076..1e707d5 100644 --- a/src/Tableau.Migration/Content/Files/TemporaryDirectoryContentFileStore.cs +++ b/src/Tableau.Migration/Content/Files/TemporaryDirectoryContentFileStore.cs @@ -33,8 +33,13 @@ public class TemporaryDirectoryContentFileStore /// The file system to use. /// The path resolver to use. /// A config reader to get the root path and other options from. - public TemporaryDirectoryContentFileStore(IFileSystem fileSystem, IContentFilePathResolver pathResolver, IConfigReader configReader) - : base(fileSystem, pathResolver, configReader, Path.GetRandomFileName()) + /// The memory stream manager to user. + public TemporaryDirectoryContentFileStore( + IFileSystem fileSystem, + IContentFilePathResolver pathResolver, + IConfigReader configReader, + IMemoryStreamManager memoryStreamManager) + : base(fileSystem, pathResolver, configReader, memoryStreamManager, Path.GetRandomFileName()) { } } } diff --git a/src/Tableau.Migration/Content/IWithViews.cs b/src/Tableau.Migration/Content/IDataSourceDetails.cs similarity index 76% rename from src/Tableau.Migration/Content/IWithViews.cs rename to src/Tableau.Migration/Content/IDataSourceDetails.cs index db2caec..368247a 100644 --- a/src/Tableau.Migration/Content/IWithViews.cs +++ b/src/Tableau.Migration/Content/IDataSourceDetails.cs @@ -14,18 +14,16 @@ // limitations under the License. // -using System.Collections.Immutable; - namespace Tableau.Migration.Content { /// - /// Interface to be inherited by content items with tags. + /// Interface for a data source object with extended information, from a GET query for example. /// - public interface IWithViews + public interface IDataSourceDetails : IDataSource { /// - /// Gets the views for the content item. + /// Gets the certification note. /// - IImmutableList Views { get; } + string CertificationNote { get; } } -} \ No newline at end of file +} diff --git a/src/Tableau.Migration/Content/IPublishableDataSource.cs b/src/Tableau.Migration/Content/IPublishableDataSource.cs index b16db3d..64c870a 100644 --- a/src/Tableau.Migration/Content/IPublishableDataSource.cs +++ b/src/Tableau.Migration/Content/IPublishableDataSource.cs @@ -20,12 +20,6 @@ namespace Tableau.Migration.Content /// Interface for a that has been downloaded /// and has full information necessary for re-publishing. /// - public interface IPublishableDataSource : IDataSource, IFileContent, IConnectionsContent - { - /// - /// Gets the certification note for the data source. - /// Should be updated through a post-publish hook. - /// - string CertificationNote { get; } - } + public interface IPublishableDataSource : IDataSourceDetails, IFileContent, IConnectionsContent + { } } diff --git a/src/Tableau.Migration/Content/IPublishableWorkbook.cs b/src/Tableau.Migration/Content/IPublishableWorkbook.cs index 9360b17..42c2cd8 100644 --- a/src/Tableau.Migration/Content/IPublishableWorkbook.cs +++ b/src/Tableau.Migration/Content/IPublishableWorkbook.cs @@ -22,7 +22,7 @@ namespace Tableau.Migration.Content /// Intreface for a that has been downloaded /// and has full information necessary for re-publishing. /// - public interface IPublishableWorkbook : IWorkbook, IFileContent, IWithViews, IChildPermissionsContent, IConnectionsContent + public interface IPublishableWorkbook : IWorkbookDetails, IFileContent, IConnectionsContent { /// /// Gets the ID of the user to generate thumbnails as. diff --git a/src/Tableau.Migration/Content/IWorkbook.cs b/src/Tableau.Migration/Content/IWorkbook.cs index 7f36221..32a7866 100644 --- a/src/Tableau.Migration/Content/IWorkbook.cs +++ b/src/Tableau.Migration/Content/IWorkbook.cs @@ -36,7 +36,6 @@ public interface IWorkbook : /// bool ShowTabs { get; set; } - /// /// Gets the file size. /// diff --git a/src/Tableau.Migration/Content/ResultWorkbook.cs b/src/Tableau.Migration/Content/IWorkbookDetails.cs similarity index 70% rename from src/Tableau.Migration/Content/ResultWorkbook.cs rename to src/Tableau.Migration/Content/IWorkbookDetails.cs index c26b2b0..63b73c2 100644 --- a/src/Tableau.Migration/Content/ResultWorkbook.cs +++ b/src/Tableau.Migration/Content/IWorkbookDetails.cs @@ -15,14 +15,15 @@ // using System.Collections.Immutable; -using Tableau.Migration.Api.Rest.Models; namespace Tableau.Migration.Content { - internal class ResultWorkbook : ViewsWorkbook, IResultWorkbook + /// + /// Interface for a workbook object with extended information, from a GET query for example. + /// + public interface IWorkbookDetails : IWorkbook, IChildPermissionsContent { - public ResultWorkbook(IWorkbookType response, IContentReference project, IContentReference owner, IImmutableList views) - : base(response, project, owner, views) - { } + /// + public IImmutableList Views { get; } } } diff --git a/src/Tableau.Migration/Content/Permissions/ICapabilityComparer.cs b/src/Tableau.Migration/Content/Permissions/ICapabilityComparer.cs index fa419d3..92b008a 100644 --- a/src/Tableau.Migration/Content/Permissions/ICapabilityComparer.cs +++ b/src/Tableau.Migration/Content/Permissions/ICapabilityComparer.cs @@ -23,7 +23,7 @@ internal class ICapabilityComparer : ComparerBase { public static readonly ICapabilityComparer Instance = new(); - public override int CompareItems(ICapability x, ICapability y) + protected override int CompareItems(ICapability x, ICapability y) { var nameResult = StringComparer.OrdinalIgnoreCase.Compare(x.Name, y.Name); diff --git a/src/Tableau.Migration/Content/PublishableDataSource.cs b/src/Tableau.Migration/Content/PublishableDataSource.cs index d2e48f2..4d8c7c8 100644 --- a/src/Tableau.Migration/Content/PublishableDataSource.cs +++ b/src/Tableau.Migration/Content/PublishableDataSource.cs @@ -17,31 +17,21 @@ using System; using System.Collections.Immutable; using System.Threading.Tasks; -using Tableau.Migration.Api.Rest.Models.Responses; using Tableau.Migration.Content.Files; namespace Tableau.Migration.Content { - internal sealed class PublishableDataSource : DataSource, IPublishableDataSource + internal sealed class PublishableDataSource : DataSourceDetails, IPublishableDataSource { - /// - public string CertificationNote { get; } - /// public IContentFileHandle File { get; set; } /// public IImmutableList Connections { get; } - public PublishableDataSource( - DataSourceResponse response, - IContentReference project, - IContentReference owner, - IImmutableList connections, - IContentFileHandle file) - : base(Guard.AgainstNull(response.Item, () => response.Item), project, owner) + public PublishableDataSource(IDataSourceDetails dataSource, IImmutableList connections, IContentFileHandle file) + : base(dataSource) { - CertificationNote = response.Item.CertificationNote ?? string.Empty; Connections = connections; File = file; } diff --git a/src/Tableau.Migration/Content/PublishableWorkbook.cs b/src/Tableau.Migration/Content/PublishableWorkbook.cs index aa7898d..d93e495 100644 --- a/src/Tableau.Migration/Content/PublishableWorkbook.cs +++ b/src/Tableau.Migration/Content/PublishableWorkbook.cs @@ -15,15 +15,13 @@ // using System; -using System.Collections.Generic; using System.Collections.Immutable; using System.Threading.Tasks; -using Tableau.Migration.Api.Rest.Models.Responses; using Tableau.Migration.Content.Files; namespace Tableau.Migration.Content { - internal sealed class PublishableWorkbook : ViewsWorkbook, IPublishableWorkbook + internal sealed class PublishableWorkbook : WorkbookDetails, IPublishableWorkbook { /// public Guid? ThumbnailsUserId { get; set; } @@ -34,8 +32,8 @@ internal sealed class PublishableWorkbook : ViewsWorkbook, IPublishableWorkbook /// public IImmutableList Connections { get; } - public PublishableWorkbook(WorkbookResponse response, IContentReference project, IContentReference owner, IImmutableList connections, IImmutableList views, IContentFileHandle file) - : base(Guard.AgainstNull(response.Item, () => response.Item), project, owner, views) + public PublishableWorkbook(IWorkbookDetails workbook, IImmutableList connections, IContentFileHandle file) + : base(workbook) { ThumbnailsUserId = null; Connections = connections; diff --git a/src/Tableau.Migration/Content/Search/ContentReferenceCacheBase.cs b/src/Tableau.Migration/Content/Search/ContentReferenceCacheBase.cs index 116f8ef..5b2e1fc 100644 --- a/src/Tableau.Migration/Content/Search/ContentReferenceCacheBase.cs +++ b/src/Tableau.Migration/Content/Search/ContentReferenceCacheBase.cs @@ -52,9 +52,28 @@ public abstract class ContentReferenceCacheBase : IContentReferenceCache /// The content references to cache. protected abstract ValueTask> SearchAsync(Guid searchId, CancellationToken cancel); + /// + /// Searches for content at the given location. + /// + /// The primary location to search for. + /// A cancellation token to obey. + /// The content reference to cache, or null. + protected virtual Task IndividualSearchAsync(ContentLocation searchLocation, CancellationToken cancel) + => Task.FromResult(null); + + /// + /// Searches for content at the given ID. + /// + /// The primary ID to search for. + /// A cancellation token to obey. + /// The content reference to cache, or null. + protected virtual Task IndividualSearchAsync(Guid searchId, CancellationToken cancel) + => Task.FromResult(null); + private async Task SearchCacheAsync( Dictionary cache, TKey search, Func>> searchAsync, + Func> individualSearchAsync, CancellationToken cancel) where TKey : notnull { @@ -67,7 +86,7 @@ public abstract class ContentReferenceCacheBase : IContentReferenceCache try { - //Retry lookup in case a semaphore wait means the populated for this attempt. + // Retry lookup in case a semaphore wait means the populated for this attempt. if (cache.TryGetValue(search, out cachedResult)) { return cachedResult; @@ -79,12 +98,27 @@ public abstract class ContentReferenceCacheBase : IContentReferenceCache _idCache[searchResult.Id] = searchResult; _locationCache[searchResult.Location] = searchResult; } + + // Retry lookup now that this attempt populated. + if (cache.TryGetValue(search, out cachedResult)) + { + return cachedResult; + } - //Retry lookup now that this attempt populated. - //Assign an explicit null if this fails to avoid repeated populations that will fail. - if (!cache.TryGetValue(search, out cachedResult)) + // No cached results. Retry individual search. + cachedResult = await individualSearchAsync(search, cancel).ConfigureAwait(false); + + // Checks the individual search result. + if (cachedResult is null) + { + // Assign an explicit null if this fails to avoid repeated populations that will fail. + cache[search] = null; + } + else { - cachedResult = cache[search] = null; + // Sets the cache with the individual search result. + _idCache[cachedResult.Id] = cachedResult; + _locationCache[cachedResult.Location] = cachedResult; } return cachedResult; @@ -99,11 +133,11 @@ public abstract class ContentReferenceCacheBase : IContentReferenceCache /// public async Task ForLocationAsync(ContentLocation location, CancellationToken cancel) - => await SearchCacheAsync(_locationCache, location, SearchAsync, cancel).ConfigureAwait(false); + => await SearchCacheAsync(_locationCache, location, SearchAsync, IndividualSearchAsync, cancel).ConfigureAwait(false); /// public async Task ForIdAsync(Guid id, CancellationToken cancel) - => await SearchCacheAsync(_idCache, id, SearchAsync, cancel).ConfigureAwait(false); + => await SearchCacheAsync(_idCache, id, SearchAsync, IndividualSearchAsync, cancel).ConfigureAwait(false); #endregion } diff --git a/src/Tableau.Migration/Content/Search/IContentReferenceFinderFactory.cs b/src/Tableau.Migration/Content/Search/IContentReferenceFinderFactory.cs index d7c2cbb..44ead94 100644 --- a/src/Tableau.Migration/Content/Search/IContentReferenceFinderFactory.cs +++ b/src/Tableau.Migration/Content/Search/IContentReferenceFinderFactory.cs @@ -28,6 +28,6 @@ public interface IContentReferenceFinderFactory /// The content type. /// The content reference finder. IContentReferenceFinder ForContentType() - where TContent : IContentReference; + where TContent : class, IContentReference; } } diff --git a/src/Tableau.Migration/Content/User.cs b/src/Tableau.Migration/Content/User.cs index ef59ff6..6140d77 100644 --- a/src/Tableau.Migration/Content/User.cs +++ b/src/Tableau.Migration/Content/User.cs @@ -14,6 +14,7 @@ // limitations under the License. // +using System; using Tableau.Migration.Api.Rest.Models.Responses; namespace Tableau.Migration.Content @@ -33,17 +34,33 @@ internal sealed class User : UsernameContentBase, IUser public string? AuthenticationType { get; set; } public User(UsersResponse.UserType response) + : this( + response.Id, + Guard.AgainstNullEmptyOrWhiteSpace(Guard.AgainstNull(response.Domain, () => response.Domain).Name, () => response.Domain.Name), + response.Email, + response.Name, + response.FullName, + response.SiteRole, + response.AuthSetting) { - var domain = Guard.AgainstNull(response.Domain, () => response.Domain); - - Id = Guard.AgainstDefaultValue(response.Id, () => response.Id); + } - Email = response.Email ?? string.Empty; - Name = Guard.AgainstNullEmptyOrWhiteSpace(response.Name, () => response.Name); - FullName = Guard.AgainstNullEmptyOrWhiteSpace(response.FullName, () => response.FullName); - SiteRole = Guard.AgainstNullEmptyOrWhiteSpace(response.SiteRole, () => response.SiteRole); - AuthenticationType = response.AuthSetting; - Domain = Guard.AgainstNullEmptyOrWhiteSpace(domain.Name, () => response.Domain.Name); + public User( + Guid id, + string? userDomain, + string? email, + string? name, + string? fullName, + string? siteRole, + string? authSetting) + { + Id = Guard.AgainstDefaultValue(id, () => id); + Email = email ?? string.Empty; + Name = Guard.AgainstNullEmptyOrWhiteSpace(name, () => name); + FullName = fullName ?? string.Empty; + SiteRole = Guard.AgainstNullEmptyOrWhiteSpace(siteRole, () => siteRole); + AuthenticationType = authSetting; + Domain = userDomain ?? string.Empty; } } } diff --git a/src/Tableau.Migration/Content/View.cs b/src/Tableau.Migration/Content/View.cs index 49eb45d..d56befd 100644 --- a/src/Tableau.Migration/Content/View.cs +++ b/src/Tableau.Migration/Content/View.cs @@ -29,7 +29,7 @@ public View(IViewReferenceType view, IContentReference project, string? workbook Guard.AgainstNullEmptyOrWhiteSpace(workbookName, () => workbookName); Id = view.Id; - Name = Name = Guard.AgainstNullEmptyOrWhiteSpace(view.Name, () => view.Name); + Name = Guard.AgainstNullEmptyOrWhiteSpace(view.Name, () => view.Name); ContentUrl = Guard.AgainstNull(view.ContentUrl, () => view.ContentUrl); Location = project.Location.Append(workbookName).Append(Name); Tags = view.Tags.ToTagList(t => new Tag(t)); diff --git a/src/Tableau.Migration/Content/Workbook.cs b/src/Tableau.Migration/Content/Workbook.cs index 6bda1bd..9b1032c 100644 --- a/src/Tableau.Migration/Content/Workbook.cs +++ b/src/Tableau.Migration/Content/Workbook.cs @@ -14,6 +14,7 @@ // limitations under the License. // +using System; using System.Collections.Generic; using Tableau.Migration.Api.Rest.Models; @@ -22,24 +23,71 @@ namespace Tableau.Migration.Content internal class Workbook : ContainerContentBase, IWorkbook { public Workbook(IWorkbookType response, IContentReference project, IContentReference owner) + : this( + response.Id, + response.Name, + response.ContentUrl, + response.Description, + response.CreatedAt, + response.UpdatedAt, + response.EncryptExtracts, + response.ShowTabs, + response.Size, + response.WebpageUrl, + response.Tags.ToTagList(t => new Tag(t)), + project, + owner) + { } + + public Workbook(IWorkbook workbook) + : this( + workbook.Id, + workbook.Name, + workbook.ContentUrl, + workbook.Description, + workbook.CreatedAt, + workbook.UpdatedAt, + workbook.EncryptExtracts, + workbook.ShowTabs, + workbook.Size, + workbook.WebpageUrl, + workbook.Tags, + ((IContainerContent)workbook).Container, + workbook.Owner) + { } + + private Workbook( + Guid id, + string? name, + string? contentUrl, + string? description, + string? createdAt, + string? updatedAt, + bool encryptExtracts, + bool showTabs, + long size, + string? webpageUrl, + IList tags, + IContentReference project, + IContentReference owner) : base(project) { - Id = Guard.AgainstDefaultValue(response.Id, () => response.Id); - Name = Guard.AgainstNullEmptyOrWhiteSpace(response.Name, () => response.Name); - ContentUrl = Guard.AgainstNullEmptyOrWhiteSpace(response.ContentUrl, () => response.ContentUrl); + Id = Guard.AgainstDefaultValue(id, () => id); + Name = Guard.AgainstNullEmptyOrWhiteSpace(name, () => name); + ContentUrl = Guard.AgainstNullEmptyOrWhiteSpace(contentUrl, () => contentUrl); + + Description = description ?? string.Empty; + CreatedAt = createdAt ?? string.Empty; + UpdatedAt = updatedAt ?? string.Empty; - ShowTabs = response.ShowTabs; - Size = response.Size; - WebpageUrl = response.WebpageUrl; - EncryptExtracts = response.EncryptExtracts; + EncryptExtracts = encryptExtracts; + ShowTabs = showTabs; + Size = size; - Description = response.Description ?? string.Empty; - CreatedAt = response.CreatedAt ?? string.Empty; - UpdatedAt = response.UpdatedAt ?? string.Empty; - WebpageUrl = response.WebpageUrl ?? string.Empty; + WebpageUrl = webpageUrl ?? string.Empty; Owner = owner; - Tags = response.Tags.ToTagList(t => new Tag(t)); + Tags = tags; Location = project.Location.Append(Name); } diff --git a/src/Tableau.Migration/Content/ViewsWorkbook.cs b/src/Tableau.Migration/Content/WorkbookDetails.cs similarity index 74% rename from src/Tableau.Migration/Content/ViewsWorkbook.cs rename to src/Tableau.Migration/Content/WorkbookDetails.cs index 043e5ac..52052bf 100644 --- a/src/Tableau.Migration/Content/ViewsWorkbook.cs +++ b/src/Tableau.Migration/Content/WorkbookDetails.cs @@ -17,19 +17,25 @@ using System; using System.Collections.Generic; using System.Collections.Immutable; +using System.Linq; using Tableau.Migration.Api.Rest.Models; namespace Tableau.Migration.Content { - internal abstract class ViewsWorkbook : Workbook, IChildPermissionsContent + internal class WorkbookDetails : Workbook, IWorkbookDetails { - /// public IImmutableList Views { get; } - public ViewsWorkbook(IWorkbookType response, IContentReference project, IContentReference owner, IImmutableList views) + public WorkbookDetails(IWorkbookDetailsType response, IContentReference project, IContentReference owner) : base(response, project, owner) { - Views = views; + Views = response.Views.Select(v => new View(v, project, Name)).ToImmutableArray(); + } + + public WorkbookDetails(IWorkbookDetails workbook) + : base(workbook) + { + Views = workbook.Views; } #region - IChildPermissionsContent Implementation - diff --git a/src/Tableau.Migration/Engine/Endpoints/MigrationEndpointFactory.cs b/src/Tableau.Migration/Engine/Endpoints/MigrationEndpointFactory.cs index a3e38e2..a643645 100644 --- a/src/Tableau.Migration/Engine/Endpoints/MigrationEndpointFactory.cs +++ b/src/Tableau.Migration/Engine/Endpoints/MigrationEndpointFactory.cs @@ -18,6 +18,7 @@ using Microsoft.Extensions.DependencyInjection; using Tableau.Migration.Content.Files; using Tableau.Migration.Engine.Endpoints.Search; +using Tableau.Migration.Resources; namespace Tableau.Migration.Engine.Endpoints { @@ -30,6 +31,7 @@ public class MigrationEndpointFactory : IMigrationEndpointFactory private readonly ManifestDestinationContentReferenceFinderFactory _destinationFinderFactory; private readonly ManifestSourceContentReferenceFinderFactory _sourceFinderFactory; private readonly IContentFileStore _fileStore; + private readonly ISharedResourcesLocalizer _localizer; /// /// Creates a new object. @@ -38,15 +40,18 @@ public class MigrationEndpointFactory : IMigrationEndpointFactory /// A source content reference finder factory. /// A destination content reference finder factory. /// The file store to use. + /// A string localizer. public MigrationEndpointFactory(IServiceScopeFactory serviceScopeFactory, ManifestSourceContentReferenceFinderFactory sourceFinderFactory, ManifestDestinationContentReferenceFinderFactory destinationFinderFactory, - IContentFileStore fileStore) + IContentFileStore fileStore, + ISharedResourcesLocalizer localizer) { _serviceScopeFactory = serviceScopeFactory; _destinationFinderFactory = destinationFinderFactory; _sourceFinderFactory = sourceFinderFactory; _fileStore = fileStore; + _localizer = localizer; } /// @@ -54,7 +59,7 @@ public IDestinationEndpoint CreateDestination(IMigrationPlan plan) { if (plan.Destination is ITableauApiEndpointConfiguration apiConfig) { - return new TableauApiDestinationEndpoint(_serviceScopeFactory, apiConfig, _destinationFinderFactory, _fileStore); + return new TableauApiDestinationEndpoint(_serviceScopeFactory, apiConfig, _destinationFinderFactory, _fileStore, _localizer); } throw new ArgumentException($"Cannot create a destination endpoint for type {plan.Source.GetType()}"); @@ -65,7 +70,7 @@ public ISourceEndpoint CreateSource(IMigrationPlan plan) { if (plan.Source is ITableauApiEndpointConfiguration apiConfig) { - return new TableauApiSourceEndpoint(_serviceScopeFactory, apiConfig, _sourceFinderFactory, _fileStore); + return new TableauApiSourceEndpoint(_serviceScopeFactory, apiConfig, _sourceFinderFactory, _fileStore, _localizer); } throw new ArgumentException($"Cannot create a source endpoint for type {plan.Source.GetType()}"); diff --git a/src/Tableau.Migration/Engine/Endpoints/Search/BulkDestinationCache.cs b/src/Tableau.Migration/Engine/Endpoints/Search/BulkDestinationCache.cs index 05c45e4..12e31a8 100644 --- a/src/Tableau.Migration/Engine/Endpoints/Search/BulkDestinationCache.cs +++ b/src/Tableau.Migration/Engine/Endpoints/Search/BulkDestinationCache.cs @@ -16,10 +16,9 @@ using System; using System.Collections.Generic; -using System.Collections.Immutable; -using System.Linq; using System.Threading; using System.Threading.Tasks; +using Tableau.Migration.Api.Search; using Tableau.Migration.Config; using Tableau.Migration.Content; using Tableau.Migration.Engine.Manifest; @@ -27,105 +26,68 @@ namespace Tableau.Migration.Engine.Endpoints.Search { /// - /// implementation + /// implementation /// that falls back to bulk API listing when destination information is not found in the manifest. /// /// The content type. - public class BulkDestinationCache : DestinationManifestCacheBase - where TContent : IContentReference + public class BulkDestinationCache : BulkApiContentReferenceCache + where TContent : class, IContentReference { - private readonly IMigrationManifestEditor _manifest; - private readonly IDestinationEndpoint _endpoint; - private readonly IConfigReader _configReader; - - private bool _loaded; + private readonly IMigrationManifestContentTypePartitionEditor _manifestEntries; /// /// Creates a new /// - /// A migration manifest. - /// A destination endpoint. + /// The destination endpoint. /// A config reader. - public BulkDestinationCache(IMigrationManifestEditor manifest, IDestinationEndpoint endpoint, IConfigReader configReader) - : base(manifest) + /// A migration manifest. + public BulkDestinationCache( + IDestinationEndpoint endpoint, + IConfigReader configReader, + IMigrationManifestEditor manifest) + : base((endpoint as IDestinationApiEndpoint)?.SiteApi, configReader) { - _manifest = manifest; - _endpoint = endpoint; - _configReader = configReader; + _manifestEntries = manifest.Entries.GetOrCreatePartition(); } - /// - /// Gets the configured batch size. - /// - protected int BatchSize => _configReader.Get().BatchSize; - - /// - /// Called after an item is loaded into the cache from the store. - /// - /// The item that was loaded. - protected virtual void ItemLoaded(TContent item) { } - - /// - /// Ensures that the cache is loaded. - /// - /// A cancellation token to obey. - /// The loaded items, or an empty value if the store has already been loaded. - protected async ValueTask> LoadStoreAsync(CancellationToken cancel) + /// + protected override void ItemLoaded(TContent item) { - //Only load content a single time (unless we expire the cache) - //This is so failed lookups don't cause us to re-list - //everything just to fail the lookup again. - if (_loaded) + //Assign this info to the manifest if there's an entry with our mapped location. + //This updates any ID/other information that may have changed since last run. + if (_manifestEntries.ByMappedLocation.TryGetValue(item.Location, out var manifestEntry)) { - return Enumerable.Empty(); + manifestEntry.DestinationFound(new ContentReferenceStub(item)); } + base.ItemLoaded(item); + } - var manifestEntries = _manifest.Entries.GetOrCreatePartition(); - var pager = _endpoint.GetPager(BatchSize); - - cancel.ThrowIfCancellationRequested(); - - int loadedCount = 0; - - var page = await pager.NextPageAsync(cancel).ConfigureAwait(false); - var results = ImmutableArray.CreateBuilder(page.TotalCount); - while (!page.Value.IsNullOrEmpty()) + /// + protected override async ValueTask> SearchAsync(ContentLocation searchLocation, CancellationToken cancel) + { + if (_manifestEntries.ByMappedLocation.TryGetValue(searchLocation, out var entry)) { - foreach (var item in page.Value) + if (entry.Destination is not null) { - var destinationInfo = new ContentReferenceStub(item); - - //Assign this info to the manifest if there's an entry with our mapped location. - //This updates any ID/other information that may have changed since last run. - if (manifestEntries.ByMappedLocation.TryGetValue(item.Location, out var manifestEntry)) - { - manifestEntry.DestinationFound(destinationInfo); - } - - results.Add(destinationInfo); - - ItemLoaded(item); - loadedCount++; + return new[] { new ContentReferenceStub(entry.Destination) }; } - - if (loadedCount >= page.TotalCount) - break; - - cancel.ThrowIfCancellationRequested(); - - page = await pager.NextPageAsync(cancel).ConfigureAwait(false); } - _loaded = true; - return results.ToImmutable(); + return await base.SearchAsync(searchLocation, cancel).ConfigureAwait(false); } /// - protected override async ValueTask> SearchStoreAsync(ContentLocation searchLocation, CancellationToken cancel) - => await LoadStoreAsync(cancel).ConfigureAwait(false); + protected override async ValueTask> SearchAsync(Guid searchId, CancellationToken cancel) + { + if (_manifestEntries.ByDestinationId.TryGetValue(searchId, out var entry)) + { + if (entry.Destination is not null) + { + return new[] { new ContentReferenceStub(entry.Destination) }; + } + } - /// - protected override async ValueTask> SearchStoreAsync(Guid searchId, CancellationToken cancel) - => await LoadStoreAsync(cancel).ConfigureAwait(false); + return await base.SearchAsync(searchId, cancel).ConfigureAwait(false); + } } } diff --git a/src/Tableau.Migration/Engine/Endpoints/Search/BulkDestinationProjectCache.cs b/src/Tableau.Migration/Engine/Endpoints/Search/BulkDestinationProjectCache.cs index e487cf5..0cabb93 100644 --- a/src/Tableau.Migration/Engine/Endpoints/Search/BulkDestinationProjectCache.cs +++ b/src/Tableau.Migration/Engine/Endpoints/Search/BulkDestinationProjectCache.cs @@ -35,11 +35,14 @@ public class BulkDestinationProjectCache : BulkDestinationCache, ILock /// /// Creates a new object. /// - /// The migration manifest. /// The destination endpoint. /// The configuration reader. - public BulkDestinationProjectCache(IMigrationManifestEditor manifest, IDestinationEndpoint endpoint, IConfigReader configReader) - : base(manifest, endpoint, configReader) + /// The migration manifest. + public BulkDestinationProjectCache( + IDestinationEndpoint endpoint, + IConfigReader configReader, + IMigrationManifestEditor manifest) + : base(endpoint, configReader, manifest) { _projectContentPermissionModeCache = new(); } @@ -48,24 +51,24 @@ public BulkDestinationProjectCache(IMigrationManifestEditor manifest, IDestinati protected override void ItemLoaded(IProject item) { base.ItemLoaded(item); - UpdateLockedProjectCache(item); + UpdateLockedProjectCache(item); } /// public async Task IsProjectLockedAsync(Guid id, CancellationToken cancel, bool includeWithoutNested = true) { - await LoadStoreAsync(cancel).ConfigureAwait(false); + await SearchAsync(id, cancel).ConfigureAwait(false); - if(!_projectContentPermissionModeCache.TryGetValue(id, out var mode)) + if (!_projectContentPermissionModeCache.TryGetValue(id, out var mode)) { return false; } - - if(ContentPermissions.IsAMatch(ContentPermissions.LockedToProject, mode)) + + if (ContentPermissions.IsAMatch(ContentPermissions.LockedToProject, mode)) { return true; } - else if(includeWithoutNested && ContentPermissions.IsAMatch(ContentPermissions.LockedToProjectWithoutNested, mode)) + else if (includeWithoutNested && ContentPermissions.IsAMatch(ContentPermissions.LockedToProjectWithoutNested, mode)) { return true; } diff --git a/src/Tableau.Migration/Engine/Endpoints/Search/BulkSourceCache.cs b/src/Tableau.Migration/Engine/Endpoints/Search/BulkSourceCache.cs new file mode 100644 index 0000000..809612a --- /dev/null +++ b/src/Tableau.Migration/Engine/Endpoints/Search/BulkSourceCache.cs @@ -0,0 +1,42 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using Tableau.Migration.Api.Search; +using Tableau.Migration.Config; + +namespace Tableau.Migration.Engine.Endpoints.Search +{ + /// + /// implementation + /// that is built from ISourceEndpoint. + /// + /// The content type. + public class BulkSourceCache : BulkApiContentReferenceCache + where TContent : class, IContentReference + { + /// + /// Creates a new + /// + /// The source endpoint. + /// A config reader. + public BulkSourceCache( + ISourceEndpoint endpoint, + IConfigReader configReader) + : base((endpoint as ISourceApiEndpoint)?.SiteApi, configReader) + { + } + } +} diff --git a/src/Tableau.Migration/Engine/Endpoints/Search/DestinationManifestCacheBase.cs b/src/Tableau.Migration/Engine/Endpoints/Search/DestinationManifestCacheBase.cs deleted file mode 100644 index c200bb5..0000000 --- a/src/Tableau.Migration/Engine/Endpoints/Search/DestinationManifestCacheBase.cs +++ /dev/null @@ -1,94 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using Tableau.Migration.Content; -using Tableau.Migration.Content.Search; -using Tableau.Migration.Engine.Manifest; - -namespace Tableau.Migration.Engine.Endpoints.Search -{ - /// - /// Abstract base class for implementations that - /// first make use of the manifest's destination information, - /// falling back to a data store lookup of some kind. - /// - /// The content type. - public abstract class DestinationManifestCacheBase : ContentReferenceCacheBase - { - private readonly IMigrationManifestEditor _manifest; - - /// - /// Creates a new object. - /// - /// The current migration manifest. - protected DestinationManifestCacheBase(IMigrationManifestEditor manifest) - { - _manifest = manifest; - } - - /// - /// Searches the data store for at least the given location, - /// possibly with more items returned for opportunistic caching. - /// - /// The primary search location to search for. - /// A cancellation token to obey. - /// The search results. - protected abstract ValueTask> SearchStoreAsync(ContentLocation searchLocation, CancellationToken cancel); - - /// - /// Searches the data store for at least the given ID, - /// possibly with more items returned for opportunistic caching. - /// - /// The primary ID to search for. - /// A cancellation token to obey. - /// The search results. - protected abstract ValueTask> SearchStoreAsync(Guid searchId, CancellationToken cancel); - - /// - protected override async ValueTask> SearchAsync(ContentLocation searchLocation, CancellationToken cancel) - { - var entries = _manifest.Entries.GetOrCreatePartition(); - if (entries.ByMappedLocation.TryGetValue(searchLocation, out var entry)) - { - if (entry.Destination is not null) - { - return new[] { new ContentReferenceStub(entry.Destination) }; - } - } - - return await SearchStoreAsync(searchLocation, cancel).ConfigureAwait(false); - } - - /// - protected override async ValueTask> SearchAsync(Guid searchId, CancellationToken cancel) - { - var entries = _manifest.Entries.GetOrCreatePartition(); - if (entries.ByDestinationId.TryGetValue(searchId, out var entry)) - { - if (entry.Destination is not null) - { - return new[] { new ContentReferenceStub(entry.Destination) }; - } - } - - return await SearchStoreAsync(searchId, cancel).ConfigureAwait(false); - } - } -} diff --git a/src/Tableau.Migration/Engine/Endpoints/Search/ManifestDestinationContentReferenceFinder.cs b/src/Tableau.Migration/Engine/Endpoints/Search/ManifestDestinationContentReferenceFinder.cs index ab0e30e..5a7a81f 100644 --- a/src/Tableau.Migration/Engine/Endpoints/Search/ManifestDestinationContentReferenceFinder.cs +++ b/src/Tableau.Migration/Engine/Endpoints/Search/ManifestDestinationContentReferenceFinder.cs @@ -31,7 +31,7 @@ namespace Tableau.Migration.Engine.Endpoints.Search /// The content type. public class ManifestDestinationContentReferenceFinder : IMappedContentReferenceFinder, IContentReferenceFinder - where TContent : IContentReference + where TContent : class, IContentReference { private readonly IMigrationManifestEditor _manifest; private readonly IContentReferenceCache _destinationCache; diff --git a/src/Tableau.Migration/Engine/Endpoints/Search/ManifestDestinationContentReferenceFinderFactory.cs b/src/Tableau.Migration/Engine/Endpoints/Search/ManifestDestinationContentReferenceFinderFactory.cs index a21ac28..c4e3a01 100644 --- a/src/Tableau.Migration/Engine/Endpoints/Search/ManifestDestinationContentReferenceFinderFactory.cs +++ b/src/Tableau.Migration/Engine/Endpoints/Search/ManifestDestinationContentReferenceFinderFactory.cs @@ -39,7 +39,7 @@ public ManifestDestinationContentReferenceFinderFactory(IServiceProvider service /// public IContentReferenceFinder ForContentType() - where TContent : IContentReference + where TContent : class, IContentReference => _services.GetRequiredService>(); } } diff --git a/src/Tableau.Migration/Engine/Endpoints/Search/ManifestSourceContentReferenceFinder.cs b/src/Tableau.Migration/Engine/Endpoints/Search/ManifestSourceContentReferenceFinder.cs index a28c4b4..4aff6ac 100644 --- a/src/Tableau.Migration/Engine/Endpoints/Search/ManifestSourceContentReferenceFinder.cs +++ b/src/Tableau.Migration/Engine/Endpoints/Search/ManifestSourceContentReferenceFinder.cs @@ -19,6 +19,7 @@ using System.Threading.Tasks; using Tableau.Migration.Content.Search; using Tableau.Migration.Engine.Manifest; +using Tableau.Migration.Engine.Pipelines; namespace Tableau.Migration.Engine.Endpoints.Search { @@ -28,30 +29,33 @@ namespace Tableau.Migration.Engine.Endpoints.Search /// /// The content type. public class ManifestSourceContentReferenceFinder : IContentReferenceFinder - where TContent : IContentReference + where TContent : class, IContentReference { private readonly IMigrationManifestEditor _manifest; + private readonly IContentReferenceCache _sourceCache; /// /// Creates a new object. /// /// The manifest. - public ManifestSourceContentReferenceFinder(IMigrationManifestEditor manifest) + /// The pipeline to get a source cache from. + public ManifestSourceContentReferenceFinder(IMigrationManifestEditor manifest, IMigrationPipeline pipeline) { _manifest = manifest; + _sourceCache = pipeline.CreateSourceCache(); } /// - public Task FindByIdAsync(Guid id, CancellationToken cancel) + public async Task FindByIdAsync(Guid id, CancellationToken cancel) { var partition = _manifest.Entries.GetOrCreatePartition(); if (partition.BySourceId.TryGetValue(id, out var entry)) { - return Task.FromResult(entry.Source); + return entry.Source; } - return Task.FromResult(null); + return await _sourceCache.ForIdAsync(id, cancel).ConfigureAwait(false); } } } diff --git a/src/Tableau.Migration/Engine/Endpoints/Search/ManifestSourceContentReferenceFinderFactory.cs b/src/Tableau.Migration/Engine/Endpoints/Search/ManifestSourceContentReferenceFinderFactory.cs index d4f155b..8f38386 100644 --- a/src/Tableau.Migration/Engine/Endpoints/Search/ManifestSourceContentReferenceFinderFactory.cs +++ b/src/Tableau.Migration/Engine/Endpoints/Search/ManifestSourceContentReferenceFinderFactory.cs @@ -39,7 +39,7 @@ public ManifestSourceContentReferenceFinderFactory(IServiceProvider services) /// public IContentReferenceFinder ForContentType() - where TContent : IContentReference + where TContent : class, IContentReference => _services.GetRequiredService>(); } } diff --git a/src/Tableau.Migration/Engine/Endpoints/TableauApiDestinationEndpoint.cs b/src/Tableau.Migration/Engine/Endpoints/TableauApiDestinationEndpoint.cs index e3cf303..5d48c6d 100644 --- a/src/Tableau.Migration/Engine/Endpoints/TableauApiDestinationEndpoint.cs +++ b/src/Tableau.Migration/Engine/Endpoints/TableauApiDestinationEndpoint.cs @@ -16,7 +16,6 @@ using System; using System.Collections.Generic; -using System.Collections.Immutable; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.DependencyInjection; @@ -25,6 +24,7 @@ using Tableau.Migration.Content.Files; using Tableau.Migration.Content.Permissions; using Tableau.Migration.Engine.Endpoints.Search; +using Tableau.Migration.Resources; namespace Tableau.Migration.Engine.Endpoints { @@ -40,11 +40,13 @@ public class TableauApiDestinationEndpoint : TableauApiEndpointBase, IDestinatio /// The configuration options for connecting to the destination endpoint APIs. /// A destination finder factory. /// The file store to use. + /// A string localizer. public TableauApiDestinationEndpoint(IServiceScopeFactory serviceScopeFactory, ITableauApiEndpointConfiguration config, ManifestDestinationContentReferenceFinderFactory finderFactory, - IContentFileStore fileStore) - : base(serviceScopeFactory, config, finderFactory, fileStore) + IContentFileStore fileStore, + ISharedResourcesLocalizer localizer) + : base(serviceScopeFactory, config, finderFactory, fileStore, localizer) { } /// diff --git a/src/Tableau.Migration/Engine/Endpoints/TableauApiEndpointBase.cs b/src/Tableau.Migration/Engine/Endpoints/TableauApiEndpointBase.cs index 937cc63..696da34 100644 --- a/src/Tableau.Migration/Engine/Endpoints/TableauApiEndpointBase.cs +++ b/src/Tableau.Migration/Engine/Endpoints/TableauApiEndpointBase.cs @@ -25,6 +25,7 @@ using Tableau.Migration.Content.Permissions; using Tableau.Migration.Content.Search; using Tableau.Migration.Paging; +using Tableau.Migration.Resources; namespace Tableau.Migration.Engine.Endpoints { @@ -33,6 +34,7 @@ namespace Tableau.Migration.Engine.Endpoints /// public abstract class TableauApiEndpointBase : IMigrationApiEndpoint { + private readonly ISharedResourcesLocalizer _localizer; private IAsyncDisposableResult? _signInResult; /// @@ -55,11 +57,11 @@ public ISitesApiClient SiteApi { if (_signInResult is null) { - throw new InvalidOperationException("API endpoint is not initialized."); + throw new InvalidOperationException(_localizer[SharedResourceKeys.ApiEndpointNotInitializedError]); } else if (_signInResult.Value is null) { - throw new InvalidOperationException("API endpoint does not have a valid site API client."); + throw new InvalidOperationException(_localizer[SharedResourceKeys.ApiEndpointDoesnotHaveValidSiteError]); } return _signInResult.Value; @@ -73,16 +75,19 @@ public ISitesApiClient SiteApi /// The configuration options for connecting to the endpoint APIs. /// The content finder factory to supply to the API client. /// The file store to use. + /// A string localizer. public TableauApiEndpointBase(IServiceScopeFactory serviceScopeFactory, ITableauApiEndpointConfiguration config, IContentReferenceFinderFactory finderFactory, - IContentFileStore fileStore) + IContentFileStore fileStore, + ISharedResourcesLocalizer localizer) { EndpointScope = serviceScopeFactory.CreateAsyncScope(); var apiClientFactory = EndpointScope.ServiceProvider.GetRequiredService(); ServerApi = apiClientFactory.Initialize(config.SiteConnectionConfiguration, finderFactory, fileStore); + _localizer = localizer; } #region - IAsyncDisposable Implementation - diff --git a/src/Tableau.Migration/Engine/Endpoints/TableauApiSourceEndpoint.cs b/src/Tableau.Migration/Engine/Endpoints/TableauApiSourceEndpoint.cs index 2d7b33c..ab66df7 100644 --- a/src/Tableau.Migration/Engine/Endpoints/TableauApiSourceEndpoint.cs +++ b/src/Tableau.Migration/Engine/Endpoints/TableauApiSourceEndpoint.cs @@ -19,6 +19,7 @@ using Microsoft.Extensions.DependencyInjection; using Tableau.Migration.Content.Files; using Tableau.Migration.Engine.Endpoints.Search; +using Tableau.Migration.Resources; namespace Tableau.Migration.Engine.Endpoints { @@ -34,11 +35,13 @@ public class TableauApiSourceEndpoint : TableauApiEndpointBase, ISourceApiEndpoi /// The configuration options for connecting to the source endpoint APIs. /// A source manifest finder factory. /// The file store to use. + /// A string localizer. public TableauApiSourceEndpoint(IServiceScopeFactory serviceScopeFactory, ITableauApiEndpointConfiguration config, ManifestSourceContentReferenceFinderFactory finderFactory, - IContentFileStore fileStore) - : base(serviceScopeFactory, config, finderFactory, fileStore) + IContentFileStore fileStore, + ISharedResourcesLocalizer localizer) + : base(serviceScopeFactory, config, finderFactory, fileStore, localizer) { } /// diff --git a/src/Tableau.Migration/Engine/IServiceCollectionExtensions.cs b/src/Tableau.Migration/Engine/IServiceCollectionExtensions.cs index 8f216af..d3202fb 100644 --- a/src/Tableau.Migration/Engine/IServiceCollectionExtensions.cs +++ b/src/Tableau.Migration/Engine/IServiceCollectionExtensions.cs @@ -95,11 +95,12 @@ internal static IServiceCollection AddMigrationEngine(this IServiceCollection se //Caches/Content Finders //Register concrete types so that the easy way to get interface types is through IMigrationPipeline. + services.AddScoped(typeof(BulkSourceCache<>)); services.AddScoped(typeof(ManifestSourceContentReferenceFinder<>)); services.AddScoped(); services.AddScoped(typeof(BulkDestinationCache<>)); - services.AddScoped(typeof(BulkDestinationProjectCache)); + services.AddScoped(); services.AddScoped(typeof(ManifestDestinationContentReferenceFinder<>)); services.AddScoped(); diff --git a/src/Tableau.Migration/Engine/MigrationDirectoryContentFileStore.cs b/src/Tableau.Migration/Engine/MigrationDirectoryContentFileStore.cs index 3d641ed..b02c379 100644 --- a/src/Tableau.Migration/Engine/MigrationDirectoryContentFileStore.cs +++ b/src/Tableau.Migration/Engine/MigrationDirectoryContentFileStore.cs @@ -31,9 +31,15 @@ public class MigrationDirectoryContentFileStore : DirectoryContentFileStore /// The file system. /// The path resolver. /// The configuration reader. + /// The memory stream manager to user. /// The migration input to get the migration ID from. - public MigrationDirectoryContentFileStore(IFileSystem fileSystem, IContentFilePathResolver pathResolver, IConfigReader configReader, IMigrationInput migrationInput) - : base(fileSystem, pathResolver, configReader, $"migration-{migrationInput.MigrationId:N}") + public MigrationDirectoryContentFileStore( + IFileSystem fileSystem, + IContentFilePathResolver pathResolver, + IConfigReader configReader, + IMemoryStreamManager memoryStreamManager, + IMigrationInput migrationInput) + : base(fileSystem, pathResolver, configReader, memoryStreamManager, $"migration-{migrationInput.MigrationId:N}") { } } } diff --git a/src/Tableau.Migration/Engine/Migrators/ContentMigrator.cs b/src/Tableau.Migration/Engine/Migrators/ContentMigrator.cs index 9eafd56..c9f00a6 100644 --- a/src/Tableau.Migration/Engine/Migrators/ContentMigrator.cs +++ b/src/Tableau.Migration/Engine/Migrators/ContentMigrator.cs @@ -70,7 +70,7 @@ public ContentMigrator( /// /// Gets the configured batch size. /// - protected int BatchSize => _configReader.Get().BatchSize; + protected int BatchSize => _configReader.Get().BatchSize; /// /// Creates a migration item context object for a given source content item and manifest entry. diff --git a/src/Tableau.Migration/Engine/Pipelines/IMigrationPipeline.cs b/src/Tableau.Migration/Engine/Pipelines/IMigrationPipeline.cs index af23a69..47a0400 100644 --- a/src/Tableau.Migration/Engine/Pipelines/IMigrationPipeline.cs +++ b/src/Tableau.Migration/Engine/Pipelines/IMigrationPipeline.cs @@ -60,13 +60,21 @@ IContentItemPreparer GetItemPreparer() where TContent : class where TPublish : class; + /// + /// Gets the source cache for the given content type. + /// + /// The content type. + /// The source cache. + IContentReferenceCache CreateSourceCache() + where TContent : class, IContentReference; + /// /// Gets the destination cache for the given content type. /// /// The content type. /// The destination cache. IContentReferenceCache CreateDestinationCache() - where TContent : IContentReference; + where TContent : class, IContentReference; /// /// Gets the destination content finder for the given content type. @@ -74,7 +82,7 @@ IContentReferenceCache CreateDestinationCache() /// The content type. /// The destination content finder. IMappedContentReferenceFinder CreateDestinationFinder() - where TContent : IContentReference; + where TContent : class, IContentReference; /// /// Gets the destination locked project cache. diff --git a/src/Tableau.Migration/Engine/Pipelines/MigrationPipelineBase.cs b/src/Tableau.Migration/Engine/Pipelines/MigrationPipelineBase.cs index a57229c..f946e91 100644 --- a/src/Tableau.Migration/Engine/Pipelines/MigrationPipelineBase.cs +++ b/src/Tableau.Migration/Engine/Pipelines/MigrationPipelineBase.cs @@ -108,9 +108,14 @@ public virtual IContentItemPreparer GetItemPreparer + public virtual IContentReferenceCache CreateSourceCache() + where TContent : class, IContentReference + => Services.GetRequiredService>(); + /// public virtual IContentReferenceCache CreateDestinationCache() - where TContent : IContentReference + where TContent : class, IContentReference { switch (typeof(TContent)) { @@ -119,12 +124,11 @@ public virtual IContentReferenceCache CreateDestinationCache() default: return Services.GetRequiredService>(); } - } /// public virtual IMappedContentReferenceFinder CreateDestinationFinder() - where TContent : IContentReference + where TContent : class, IContentReference { return Services.GetRequiredService>(); } diff --git a/src/Tableau.Migration/Engine/Pipelines/MigrationPipelineContentType.cs b/src/Tableau.Migration/Engine/Pipelines/MigrationPipelineContentType.cs index 114e6fb..ec98f1c 100644 --- a/src/Tableau.Migration/Engine/Pipelines/MigrationPipelineContentType.cs +++ b/src/Tableau.Migration/Engine/Pipelines/MigrationPipelineContentType.cs @@ -15,7 +15,9 @@ // using System; +using System.Collections.Immutable; using System.Linq; +using Tableau.Migration.Content; namespace Tableau.Migration.Engine.Pipelines { @@ -24,26 +26,94 @@ namespace Tableau.Migration.Engine.Pipelines /// that a pipeline migrates. /// /// The content type. - /// The publish type. - /// The post-publish result type. - public record MigrationPipelineContentType(Type ContentType, Type PublishType, Type ResultType) + public record MigrationPipelineContentType(Type ContentType) { /// - /// Creates a new instance. + /// Gets the user . /// - /// The single content type also used for publish and result types. - public MigrationPipelineContentType(Type contentType) - : this(contentType, contentType, contentType) - { } + public static readonly MigrationPipelineContentType Users = new MigrationPipelineContentType(); /// - /// Creates a new instance. + /// Gets the groups . + /// + public static readonly MigrationPipelineContentType Groups = new MigrationPipelineContentType() + .WithPublishType(); + + /// + /// Gets the projects . + /// + public static readonly MigrationPipelineContentType Projects = new MigrationPipelineContentType(); + + /// + /// Gets the data sources . + /// + public static readonly MigrationPipelineContentType DataSources = new MigrationPipelineContentType() + .WithPublishType() + .WithResultType(); + + /// + /// Gets the workbooks . + /// + public static readonly MigrationPipelineContentType Workbooks = new MigrationPipelineContentType() + .WithPublishType() + .WithResultType(); + + /// + /// Gets the views . + /// + public static readonly MigrationPipelineContentType Views = new MigrationPipelineContentType(); + + /// + /// Gets a collection of all s. + /// + public static readonly IImmutableList All = ImmutableArray.Create( + Users, + Groups, + Projects, + DataSources, + Workbooks + ); + + /// + /// Gets the publish type. + /// + public Type PublishType { get; private init; } = ContentType; + + /// + /// Gets the result type. + /// + public Type ResultType { get; private init; } = ContentType; + + /// + /// Gets the types for this instance. + /// + public IImmutableList Types => new[] { ContentType, PublishType, ResultType }.Distinct().ToImmutableArray(); + + /// + /// Creates a new instance with the specified publish type. /// - /// The shared content type also used for the result type. /// The publish type. - public MigrationPipelineContentType(Type contentType, Type publishType) - : this(contentType, publishType, contentType) - { } + public MigrationPipelineContentType WithPublishType(Type publishType) + => new(ContentType) { PublishType = publishType, ResultType = ResultType }; + + /// + /// Creates a new instance with the specified publish type. + /// + public MigrationPipelineContentType WithPublishType() + => WithPublishType(typeof(TPublish)); + + /// + /// Creates a new instance with the specified result type. + /// + /// The result type. + public MigrationPipelineContentType WithResultType(Type resultType) + => new(ContentType) { PublishType = PublishType, ResultType = resultType }; + + /// + /// Creates a new instance with the specified result type. + /// + public MigrationPipelineContentType WithResultType() + => WithResultType(typeof(TResult)); /// /// Gets the value if it implements the given interface, or null if it does not. @@ -60,12 +130,22 @@ public MigrationPipelineContentType(Type contentType, Type publishType) => HasInterface(ContentType, @interface) ? new[] { ContentType } : null; /// - /// Gets the and array if it implements the given interface, or null if it does not. + /// Gets the and array if it implements the given interface, or null if it does not. /// /// The interface to search for. public Type[]? GetPostPublishTypesForInterface(Type @interface) => HasInterface(PublishType, @interface) ? new[] { PublishType, ResultType } : null; + /// + /// Gets the config key for this content type. + /// + /// The config key string. + public string GetConfigKey() + { + var typeName = ContentType.Name; + return typeName.TrimStart('I'); + } + private static bool HasInterface(Type t, Type @interface) => t.GetInterfaces().Contains(@interface); } @@ -74,29 +154,8 @@ private static bool HasInterface(Type t, Type @interface) /// Object that represents a definition of a content type /// that a pipeline migrates. /// - /// The content and result type. - /// The publish type. - /// The result type. - public record MigrationPipelineContentType() - : MigrationPipelineContentType(typeof(TContent), typeof(TPublish), typeof(TResult)) - { } - - /// - /// Object that represents a definition of a content type - /// that a pipeline migrates. - /// - /// The content and result type. - /// The publish type. - public record MigrationPipelineContentType() - : MigrationPipelineContentType(typeof(TContent), typeof(TPublish), typeof(TContent)) - { } - - /// - /// Object that represents a definition of a content type - /// that a pipeline migrates. - /// - /// The content and publish type. + /// The content, publish, result, and list type. public sealed record MigrationPipelineContentType() - : MigrationPipelineContentType() + : MigrationPipelineContentType(typeof(TContent)) { } } diff --git a/src/Tableau.Migration/Engine/Pipelines/ServerToCloudMigrationPipeline.cs b/src/Tableau.Migration/Engine/Pipelines/ServerToCloudMigrationPipeline.cs index 5777a03..cff84b9 100644 --- a/src/Tableau.Migration/Engine/Pipelines/ServerToCloudMigrationPipeline.cs +++ b/src/Tableau.Migration/Engine/Pipelines/ServerToCloudMigrationPipeline.cs @@ -18,6 +18,7 @@ using System.Collections.Generic; using System.Collections.Immutable; using Microsoft.Extensions.DependencyInjection; +using Tableau.Migration.Config; using Tableau.Migration.Content; using Tableau.Migration.Engine.Actions; using Tableau.Migration.Engine.Migrators.Batch; @@ -33,22 +34,28 @@ public class ServerToCloudMigrationPipeline : MigrationPipelineBase /// Content types that are supported for migrations. /// public static readonly ImmutableArray ContentTypes = - new MigrationPipelineContentType[] - { - new MigrationPipelineContentType(), - new MigrationPipelineContentType(), - new MigrationPipelineContentType(), - new MigrationPipelineContentType(), - new MigrationPipelineContentType(), - }.ToImmutableArray(); + [ + MigrationPipelineContentType.Users, + MigrationPipelineContentType.Groups, + MigrationPipelineContentType.Projects, + MigrationPipelineContentType.DataSources, + MigrationPipelineContentType.Workbooks, + ]; + + private readonly IConfigReader _configReader; /// /// Creates a new object. /// /// - public ServerToCloudMigrationPipeline(IServiceProvider services) + /// A config reader to get the REST API configuration. + public ServerToCloudMigrationPipeline(IServiceProvider services, + IConfigReader configReader) : base(services) - { } + { + _configReader = configReader; + } + /// protected override IEnumerable BuildPipeline() @@ -72,15 +79,19 @@ public override IContentBatchMigrator GetBatchMigrator() switch (typeof(TContent)) { case Type user when user == typeof(IUser): - return Services.GetRequiredService>(); + if (_configReader.Get().BatchPublishingEnabled) + { + return Services.GetRequiredService>(); + } + return Services.GetRequiredService>(); case Type group when group == typeof(IGroup): return Services.GetRequiredService>(); case Type project when project == typeof(IProject): return Services.GetRequiredService>(); case Type dataSource when dataSource == typeof(IDataSource): - return Services.GetRequiredService>(); + return Services.GetRequiredService>(); case Type worbook when worbook == typeof(IWorkbook): - return Services.GetRequiredService>(); + return Services.GetRequiredService>(); default: return base.GetBatchMigrator(); } diff --git a/src/Tableau.Migration/FilePath.cs b/src/Tableau.Migration/FilePath.cs new file mode 100644 index 0000000..2ad0970 --- /dev/null +++ b/src/Tableau.Migration/FilePath.cs @@ -0,0 +1,50 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using System.Collections.Immutable; +using System.IO; +using Tableau.Migration.Api.Rest.Models.Types; + +namespace Tableau.Migration +{ + internal record FilePath + { + // This is not an exhaustive list, just common extensions that are "zippy". + internal static readonly IImmutableSet ZipExtensions = ImmutableHashSet.Create( + StringComparer.OrdinalIgnoreCase, + WorkbookFileTypes.Twbx, + DataSourceFileTypes.Tdsx, + "zip", + "7z", + "gz", + "rar" + ); + + public readonly string FileName; + public readonly string? Extension; + public readonly bool? IsZipFile; + + public FilePath(string filePath) + { + FileName = Path.GetFileName(filePath); + Extension = Path.GetExtension(filePath)?.TrimStart('.'); + + if (!String.IsNullOrWhiteSpace(Extension)) + IsZipFile = ZipExtensions.Contains(Extension); + } + } +} diff --git a/src/Tableau.Migration/IMemoryStreamManager.cs b/src/Tableau.Migration/IMemoryStreamManager.cs new file mode 100644 index 0000000..f4e0f36 --- /dev/null +++ b/src/Tableau.Migration/IMemoryStreamManager.cs @@ -0,0 +1,209 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using Microsoft.IO; + +namespace Tableau.Migration +{ + /// + /// Wrapper interface for . + /// + public interface IMemoryStreamManager + { + /// + /// Gets the wrapped instance. + /// + RecyclableMemoryStreamManager Inner { get; } + + /// + /// Gets the settings for configuring stream behavior. + /// + RecyclableMemoryStreamManager.Options Settings { get; } + + /// + /// Retrieve a new object with no tag and a default initial capacity. + /// + /// The stream's ID and tag are used for tracking purposes and not for caching. + /// A . + RecyclableMemoryStream GetStream(); + + /// + /// Retrieve a new object with no tag and a default initial capacity. + /// + /// The stream's ID and tag are used for tracking purposes and not for caching. + /// A unique identifier which can be used to trace usages of the stream. + /// A . + RecyclableMemoryStream GetStream(Guid id); + + /// + /// Retrieve a new object with the given tag and a default initial capacity. + /// + /// The stream's ID and tag are used for tracking purposes and not for caching. + /// A tag which can be used to track the source of the stream. + /// A . + RecyclableMemoryStream GetStream(string? tag); + + /// + /// Retrieve a new object with the given tag and a default initial capacity. + /// + /// The stream's ID and tag are used for tracking purposes and not for caching. + /// A unique identifier which can be used to trace usages of the stream. + /// A tag which can be used to track the source of the stream. + /// A . + RecyclableMemoryStream GetStream(Guid id, string? tag); + + /// + /// Retrieve a new object with the given tag and at least the given capacity. + /// + /// The stream's ID and tag are used for tracking purposes and not for caching. + /// A tag which can be used to track the source of the stream. + /// The minimum desired capacity for the stream. + /// A . + RecyclableMemoryStream GetStream(string? tag, long requiredSize); + + /// + /// Retrieve a new object with the given tag and at least the given capacity. + /// + /// The stream's ID and tag are used for tracking purposes and not for caching. + /// A unique identifier which can be used to trace usages of the stream. + /// A tag which can be used to track the source of the stream. + /// The minimum desired capacity for the stream. + /// A . + RecyclableMemoryStream GetStream(Guid id, string? tag, long requiredSize); + + /// + /// Retrieve a new object with the given tag and at least the given capacity, possibly using + /// a single contiguous underlying buffer. + /// + /// + /// The stream's ID and tag are used for tracking purposes and not for caching. + /// + /// Retrieving a which provides a single contiguous buffer can be useful in situations + /// where the initial size is known and it is desirable to avoid copying data between the smaller underlying + /// buffers to a single large one. This is most helpful when you know that you will always call + /// on the underlying stream. + /// + /// + /// A unique identifier which can be used to trace usages of the stream. + /// A tag which can be used to track the source of the stream. + /// The minimum desired capacity for the stream. + /// Whether to attempt to use a single contiguous buffer. + /// A . + RecyclableMemoryStream GetStream(Guid id, string? tag, long requiredSize, bool asContiguousBuffer); + + /// + /// Retrieve a new object with the given tag and at least the given capacity, possibly using + /// a single contiguous underlying buffer. + /// + /// + /// The stream's ID and tag are used for tracking purposes and not for caching. + /// + /// Retrieving a which provides a single contiguous buffer can be useful in situations + /// where the initial size is known and it is desirable to avoid copying data between the smaller underlying + /// buffers to a single large one. This is most helpful when you know that you will always call + /// on the underlying stream. + /// + /// + /// A tag which can be used to track the source of the stream. + /// The minimum desired capacity for the stream. + /// Whether to attempt to use a single contiguous buffer. + /// A . + RecyclableMemoryStream GetStream(string? tag, long requiredSize, bool asContiguousBuffer); + + /// + /// Retrieve a new object with the given tag and with contents copied from the provided + /// buffer. The provided buffer is not wrapped or used after construction. + /// + /// + /// The stream's ID and tag are used for tracking purposes and not for caching. + /// The new stream's position is set to the beginning of the stream when returned. + /// + /// A unique identifier which can be used to trace usages of the stream. + /// A tag which can be used to track the source of the stream. + /// The byte buffer to copy data from. + /// The offset from the start of the buffer to copy from. + /// The number of bytes to copy from the buffer. + /// A . + RecyclableMemoryStream GetStream(Guid id, string? tag, byte[] buffer, int offset, int count); + + /// + /// Retrieve a new object with the contents copied from the provided + /// buffer. The provided buffer is not wrapped or used after construction. + /// + /// + /// The stream's ID and tag are used for tracking purposes and not for caching. + /// The new stream's position is set to the beginning of the stream when returned. + /// + /// The byte buffer to copy data from. + /// A . + RecyclableMemoryStream GetStream(byte[] buffer); + + /// + /// Retrieve a new object with the given tag and with contents copied from the provided + /// buffer. The provided buffer is not wrapped or used after construction. + /// + /// + /// The stream's ID and tag are used for tracking purposes and not for caching. + /// The new stream's position is set to the beginning of the stream when returned. + /// + /// A tag which can be used to track the source of the stream. + /// The byte buffer to copy data from. + /// The offset from the start of the buffer to copy from. + /// The number of bytes to copy from the buffer. + /// A . + RecyclableMemoryStream GetStream(string? tag, byte[] buffer, int offset, int count); + + /// + /// Retrieve a new object with the given tag and with contents copied from the provided + /// buffer. The provided buffer is not wrapped or used after construction. + /// + /// + /// The stream's ID and tag are used for tracking purposes and not for caching. + /// The new stream's position is set to the beginning of the stream when returned. + /// + /// A unique identifier which can be used to trace usages of the stream. + /// A tag which can be used to track the source of the stream. + /// The byte buffer to copy data from. + /// A . + RecyclableMemoryStream GetStream(Guid id, string? tag, ReadOnlySpan buffer); + + /// + /// Retrieve a new object with the contents copied from the provided + /// buffer. The provided buffer is not wrapped or used after construction. + /// + /// + /// The stream's ID and tag are used for tracking purposes and not for caching. + /// The new stream's position is set to the beginning of the stream when returned. + /// + /// The byte buffer to copy data from. + /// A . + RecyclableMemoryStream GetStream(ReadOnlySpan buffer); + + /// + /// Retrieve a new object with the given tag and with contents copied from the provided + /// buffer. The provided buffer is not wrapped or used after construction. + /// + /// + /// The stream's ID and tag are used for tracking purposes and not for caching. + /// The new stream's position is set to the beginning of the stream when returned. + /// + /// A tag which can be used to track the source of the stream. + /// The byte buffer to copy data from. + /// A . + RecyclableMemoryStream GetStream(string? tag, ReadOnlySpan buffer); + } +} diff --git a/src/Tableau.Migration/MemoryStreamManager.cs b/src/Tableau.Migration/MemoryStreamManager.cs new file mode 100644 index 0000000..58e173f --- /dev/null +++ b/src/Tableau.Migration/MemoryStreamManager.cs @@ -0,0 +1,74 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using Microsoft.IO; + +namespace Tableau.Migration +{ + /// + /// Default implementation. Wrapper class for . + /// + internal class MemoryStreamManager : IMemoryStreamManager + { + public static readonly MemoryStreamManager Instance = new(); + + public RecyclableMemoryStreamManager Inner { get; } + + public MemoryStreamManager() + : this(new RecyclableMemoryStreamManager.Options()) + { } + + public MemoryStreamManager(RecyclableMemoryStreamManager.Options options) + { + Inner = new(options); + } + + #region - IMemoryManager - + + public virtual RecyclableMemoryStreamManager.Options Settings => Inner.Settings; + + public virtual RecyclableMemoryStream GetStream() => Inner.GetStream(); + + public virtual RecyclableMemoryStream GetStream(Guid id) => Inner.GetStream(id); + + public virtual RecyclableMemoryStream GetStream(string? tag) => Inner.GetStream(tag); + + public virtual RecyclableMemoryStream GetStream(Guid id, string? tag) => Inner.GetStream(id, tag); + + public virtual RecyclableMemoryStream GetStream(string? tag, long requiredSize) => Inner.GetStream(tag, requiredSize); + + public virtual RecyclableMemoryStream GetStream(Guid id, string? tag, long requiredSize) => Inner.GetStream(id, tag, requiredSize); + + public virtual RecyclableMemoryStream GetStream(Guid id, string? tag, long requiredSize, bool asContiguousBuffer) => Inner.GetStream(id, tag, requiredSize, asContiguousBuffer); + + public virtual RecyclableMemoryStream GetStream(string? tag, long requiredSize, bool asContiguousBuffer) => Inner.GetStream(tag, requiredSize, asContiguousBuffer); + + public virtual RecyclableMemoryStream GetStream(Guid id, string? tag, byte[] buffer, int offset, int count) => Inner.GetStream(id, tag, buffer, offset, count); + + public virtual RecyclableMemoryStream GetStream(byte[] buffer) => Inner.GetStream(buffer); + + public virtual RecyclableMemoryStream GetStream(string? tag, byte[] buffer, int offset, int count) => Inner.GetStream(tag, buffer, offset, count); + + public virtual RecyclableMemoryStream GetStream(Guid id, string? tag, ReadOnlySpan buffer) => Inner.GetStream(id, tag, buffer); + + public virtual RecyclableMemoryStream GetStream(ReadOnlySpan buffer) => Inner.GetStream(buffer); + + public virtual RecyclableMemoryStream GetStream(string? tag, ReadOnlySpan buffer) => Inner.GetStream(tag, buffer); + + #endregion + } +} diff --git a/src/Tableau.Migration/Net/DefaultHttpClient.cs b/src/Tableau.Migration/Net/DefaultHttpClient.cs index eacd5b2..022385a 100644 --- a/src/Tableau.Migration/Net/DefaultHttpClient.cs +++ b/src/Tableau.Migration/Net/DefaultHttpClient.cs @@ -35,7 +35,7 @@ public DefaultHttpClient( _innerHttpClient = httpClient; _serializer = serializer; - //Timeout is controlled through a request timeout policy instead of the HTTP client. + //Timeout is controlled through a request timeout strategy instead of the HTTP client. _innerHttpClient.Timeout = Timeout.InfiniteTimeSpan; } diff --git a/src/Tableau.Migration/Net/HttpContentExtensions.cs b/src/Tableau.Migration/Net/HttpContentExtensions.cs index 31c7cbf..b0c45d3 100644 --- a/src/Tableau.Migration/Net/HttpContentExtensions.cs +++ b/src/Tableau.Migration/Net/HttpContentExtensions.cs @@ -17,7 +17,6 @@ using System; using System.Globalization; using System.Net.Http; -using System.Text; using System.Threading; using System.Threading.Tasks; @@ -48,7 +47,7 @@ internal static async Task ReadAsEncodedStringAsync(this HttpContent con //but that we know about. var decoded = await content.ReadAsByteArrayAsync(cancel).ConfigureAwait(false); - return Encoding.UTF8.GetString(decoded); + return Constants.DefaultEncoding.GetString(decoded); } //fall back to the standard way of reading strings. diff --git a/src/Tableau.Migration/Net/HttpContentRequestBuilder.cs b/src/Tableau.Migration/Net/HttpContentRequestBuilder.cs index af13ef8..3e79637 100644 --- a/src/Tableau.Migration/Net/HttpContentRequestBuilder.cs +++ b/src/Tableau.Migration/Net/HttpContentRequestBuilder.cs @@ -17,7 +17,6 @@ using System; using System.Net.Http; using System.Net.Http.Headers; -using System.Text; namespace Tableau.Migration.Net { @@ -95,7 +94,7 @@ public virtual TBuilderInterface WithContent(string content, MediaTypeWithQualit // this can be overridden by calling Accept again with another type. Accept(contentType, false); - Request.Content = new StringContent(content, Encoding.UTF8, contentType.MediaType!); + Request.Content = new StringContent(content, Constants.DefaultEncoding, contentType.MediaType!); return (TBuilderImpl)this; } diff --git a/src/Tableau.Migration/Net/HttpContentSerializer.cs b/src/Tableau.Migration/Net/HttpContentSerializer.cs index 8a89ae1..d47de80 100644 --- a/src/Tableau.Migration/Net/HttpContentSerializer.cs +++ b/src/Tableau.Migration/Net/HttpContentSerializer.cs @@ -17,7 +17,6 @@ using System; using System.Net.Http; using System.Net.Http.Headers; -using System.Text; using System.Threading; using System.Threading.Tasks; using Tableau.Migration.Api.Rest.Models; @@ -93,9 +92,9 @@ public HttpContentSerializer(ITableauSerializer serializer) return null; #if NET7_0_OR_GREATER - return new StringContent(stringContent, Encoding.UTF8, contentType); + return new StringContent(stringContent, Constants.DefaultEncoding, contentType); #else - return new StringContent(stringContent, Encoding.UTF8, contentType.MediaType); + return new StringContent(stringContent, Constants.DefaultEncoding, contentType.MediaType); #endif } diff --git a/src/Tableau.Migration/Net/IServiceCollectionExtensions.cs b/src/Tableau.Migration/Net/IServiceCollectionExtensions.cs index 56651b1..a4b214d 100644 --- a/src/Tableau.Migration/Net/IServiceCollectionExtensions.cs +++ b/src/Tableau.Migration/Net/IServiceCollectionExtensions.cs @@ -24,7 +24,7 @@ using Tableau.Migration.Api; using Tableau.Migration.Config; using Tableau.Migration.Net.Handlers; -using Tableau.Migration.Net.Policies; +using Tableau.Migration.Net.Resilience; using Tableau.Migration.Net.Rest; using Tableau.Migration.Net.Simulation; @@ -45,6 +45,7 @@ internal static IServiceCollection AddHttpServices( { services.AddSharedResourcesLocalization(); + services.TryAddSingleton(TimeProvider.System); services.TryAddSingleton(TableauSerializer.Instance); services.AddSingleton(); @@ -55,18 +56,6 @@ internal static IServiceCollection AddHttpServices( .AddSingleton() .AddSingleton() .AddTransient() - .AddScoped() - .AddScoped() - // All Handlers must be transients - // Their lifetime will be managed by the IHttpClientFactory - .AddTransient(provider => - { - var policyBuilder = provider.GetRequiredService(); - - return new PolicyHttpMessageHandler( - httpRequest => policyBuilder.GetRequestPolicies( - httpRequest)); - }) .AddTransient() .AddTransient() .AddTransient() @@ -83,25 +72,14 @@ internal static IServiceCollection AddHttpServices( nameof(DefaultHttpClient)), provider.GetRequiredService()); }) - .AddScoped() - .AddScoped() - .AddScoped() - .AddScoped() - .AddScoped() - .AddScoped() - .AddScoped() - .AddScoped() - .AddScoped() - .AddScoped() - // Policies builders. The order here is important for the dependency injection - // It injects all policies as an enumerator, on the same order they are registered here. - .AddScoped() - .AddScoped() - .AddScoped() - .AddScoped() - .AddScoped(); - - services + // Resilience strategy builders - the order here is important for dependency injection. + .AddTransient() + .AddTransient() + .AddTransient() + .AddTransient() + .AddTransient(); + + var httpClientBuilder = services // https://learn.microsoft.com/en-us/aspnet/core/fundamentals/http-requests?view=aspnetcore-7.0#httpclient-and-lifetime-management // The default handler lifetime is two minutes. The default value can be overridden on a per named client basis .AddScopedHttpClient(nameof(DefaultHttpClient)) @@ -109,8 +87,29 @@ internal static IServiceCollection AddHttpServices( // https://learn.microsoft.com/en-us/aspnet/core/fundamentals/http-requests?view=aspnetcore-7.0#outgoing-request-middleware // Multiple handlers can be registered in the order that they should execute. // Each handler wraps the next handler until the final HttpClientHandler executes the request. - .AddHttpMessageHandler() - .AddHttpMessageHandler() + .AddHttpMessageHandler(); + + httpClientBuilder.AddResilienceHandler(Constants.USER_AGENT_PREFIX, static (pipelineBuilder, ctx) => + { + ctx.EnableReloads(nameof(MigrationSdkOptions)); + + var options = ctx.ServiceProvider.GetRequiredService().Get(); + + var builders = ctx.ServiceProvider.GetServices(); + foreach (var builder in builders) + { + Action? onPipelineDisposed = null; + + builder.Build(pipelineBuilder, options, ref onPipelineDisposed); + + if(onPipelineDisposed is not null) + { + ctx.OnPipelineDisposed(onPipelineDisposed); + } + } + }); + + httpClientBuilder .AddHttpMessageHandler() .AddHttpMessageHandler() .AddHttpMessageHandler(); //Must be last for simulation to function. diff --git a/src/Tableau.Migration/Net/ObjectExtensions.cs b/src/Tableau.Migration/Net/ObjectExtensions.cs index 535a942..1dce347 100644 --- a/src/Tableau.Migration/Net/ObjectExtensions.cs +++ b/src/Tableau.Migration/Net/ObjectExtensions.cs @@ -15,7 +15,6 @@ // using System.IO; -using System.Text; using System.Text.Json; using System.Xml; using System.Xml.Serialization; @@ -31,7 +30,7 @@ public static string ToXml(this T obj) var settings = new XmlWriterSettings() { OmitXmlDeclaration = true, - Encoding = Encoding.UTF8 + Encoding = Constants.DefaultEncoding }; var serializer = new XmlSerializer(obj?.GetType() ?? typeof(T)); diff --git a/src/Tableau.Migration/Net/Policies/ClientThrottlePolicyBuilder.cs b/src/Tableau.Migration/Net/Policies/ClientThrottlePolicyBuilder.cs deleted file mode 100644 index cb98d31..0000000 --- a/src/Tableau.Migration/Net/Policies/ClientThrottlePolicyBuilder.cs +++ /dev/null @@ -1,84 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using System; -using System.Net.Http; -using Polly; -using Tableau.Migration.Config; - -namespace Tableau.Migration.Net.Policies -{ - internal class ClientThrottlePolicyBuilder - : IHttpPolicyBuilder - { - private readonly IConfigReader _configReader; - - public ClientThrottlePolicyBuilder( - IConfigReader configReader) - { - _configReader = configReader; - } - - public IAsyncPolicy? Build( - HttpRequestMessage httpRequest) - { - var resilienceOptions = _configReader - .Get() - .Network - .Resilience; - - if (!resilienceOptions.ClientThrottleEnabled) - { - return null; - } - - if (httpRequest.Method == HttpMethod.Get) - { - return BuildReadRateLimitPolicy( - resilienceOptions.MaxReadRequests, - resilienceOptions.MaxReadRequestsInterval, - resilienceOptions.MaxBurstReadRequests); - } - - return BuildPublishRateLimitPolicy( - resilienceOptions.MaxPublishRequests, - resilienceOptions.MaxPublishRequestsInterval, - resilienceOptions.MaxBurstPublishRequests); - } - - private static IAsyncPolicy? BuildReadRateLimitPolicy( - int maxReadRequests, - TimeSpan maxReadRequestsInterval, - int maxBurstReadRequests) - { - return Policy.RateLimitAsync( - maxReadRequests, - maxReadRequestsInterval, - maxBurstReadRequests); - } - - private static IAsyncPolicy BuildPublishRateLimitPolicy( - int maxPublishRequests, - TimeSpan maxPublishRequestsInterval, - int maxBurstPublishRequests) - { - return Policy.RateLimitAsync( - maxPublishRequests, - maxPublishRequestsInterval, - maxBurstPublishRequests); - } - } -} diff --git a/src/Tableau.Migration/Net/Policies/HttpPolicyWrapBuilder.cs b/src/Tableau.Migration/Net/Policies/HttpPolicyWrapBuilder.cs deleted file mode 100644 index 357163a..0000000 --- a/src/Tableau.Migration/Net/Policies/HttpPolicyWrapBuilder.cs +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using System.Collections.Generic; -using System.Net.Http; -using Polly; - -namespace Tableau.Migration.Net.Policies -{ - internal class HttpPolicyWrapBuilder - : IHttpPolicyWrapBuilder - { - private readonly IEnumerable _httpPolicyBuilders; - - public HttpPolicyWrapBuilder( - IEnumerable httpPolicyBuilders) - { - _httpPolicyBuilders = httpPolicyBuilders; - } - - public IAsyncPolicy GetRequestPolicies( - HttpRequestMessage httpRequest) - { - // TODO: Define policies for Http Request Messages - // Default: NoOp - // W-12406164: Network Client - Client Throttling - Rate Limit - // Additional policies that could be defined later: - // Circuit-breaker - // Cache - // Fallback - var policies = new List>(); - - foreach (var policyBuilder in _httpPolicyBuilders) - { - var policy = policyBuilder.Build(httpRequest); - - if (policy is not null) - { - policies.Add(policy); - } - } - - if (policies.Count == 1) - { - return policies[0]; - } - - return Policy.WrapAsync(policies.ToArray()); - } - } -} diff --git a/src/Tableau.Migration/Net/Policies/IHttpPolicyWrapBuilder.cs b/src/Tableau.Migration/Net/Policies/IHttpPolicyWrapBuilder.cs deleted file mode 100644 index db4b8f1..0000000 --- a/src/Tableau.Migration/Net/Policies/IHttpPolicyWrapBuilder.cs +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using System.Net.Http; -using Polly; - -namespace Tableau.Migration.Net.Policies -{ - /// - /// Abstraction responsible to return all policies that apply for a given http request, giving to it resilience and transient-fault-handling. - /// - /// - public interface IHttpPolicyWrapBuilder - { - /// - /// Get all policies that apply for the http request. In case the request apply for more than one policy, - /// they must be wrapped by a PolicyWrap - /// - /// The http request that we will request the policies - /// A async policy that apply to a given http response of a http request. - IAsyncPolicy GetRequestPolicies(HttpRequestMessage httpRequest); - } -} diff --git a/src/Tableau.Migration/Net/Policies/RetryPolicyBuilder.cs b/src/Tableau.Migration/Net/Policies/RetryPolicyBuilder.cs deleted file mode 100644 index fec49b3..0000000 --- a/src/Tableau.Migration/Net/Policies/RetryPolicyBuilder.cs +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using System.Collections.Generic; -using System.Net.Http; -using Polly; -using Polly.Extensions.Http; -using Polly.RateLimit; -using Polly.Timeout; -using Tableau.Migration.Config; - -namespace Tableau.Migration.Net.Policies -{ - internal class RetryPolicyBuilder - : IHttpPolicyBuilder - { - private readonly IConfigReader _configReader; - - public RetryPolicyBuilder(IConfigReader configReader) - { - _configReader = configReader; - } - - public IAsyncPolicy? Build(HttpRequestMessage httpRequest) - { - var resilienceOptions = _configReader - .Get() - .Network - .Resilience; - - var retryIntervals = resilienceOptions.RetryIntervals; - - if (!resilienceOptions.RetryEnabled - || retryIntervals.Length == 0) - { - return null; - } - - var policy = HttpPolicyExtensions.HandleTransientHttpError(); - - var retryStatusCodes = resilienceOptions.RetryOverrideResponseCodes; - - if (retryStatusCodes is not null && - retryStatusCodes.Length > 0) - { - var hashStatusCodes = new HashSet(retryStatusCodes); - - policy = Policy - .Handle() - .OrResult(result => hashStatusCodes.Contains((int)result.StatusCode)); - } - - return policy - .Or() - .Or() - .WaitAndRetryAsync(retryIntervals); - } - } -} diff --git a/src/Tableau.Migration/Net/Policies/ServerThrottlePolicyBuilder.cs b/src/Tableau.Migration/Net/Policies/ServerThrottlePolicyBuilder.cs deleted file mode 100644 index 9589244..0000000 --- a/src/Tableau.Migration/Net/Policies/ServerThrottlePolicyBuilder.cs +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using System; -using System.Linq; -using System.Net; -using System.Net.Http; -using System.Threading.Tasks; -using Polly; -using Polly.Retry; -using Tableau.Migration.Config; - -namespace Tableau.Migration.Net.Policies -{ - internal sealed class ServerThrottlePolicyBuilder : IHttpPolicyBuilder - { - private static readonly TimeSpan DEFAULT_RETRY_INTERVAL_FALLBACK = TimeSpan.FromMinutes(1); - - private readonly IConfigReader _configReader; - - public ServerThrottlePolicyBuilder(IConfigReader configReader) - { - _configReader = configReader; - } - - public IAsyncPolicy? Build(HttpRequestMessage httpRequest) - { - var resilienceOptions = _configReader.Get().Network.Resilience; - - if (!resilienceOptions.ServerThrottleEnabled) - { - return null; - } - - TimeSpan SleepDurationProvider(int i, DelegateResult result, Context context) - { - //Obey the server Retry-After header value. - var retryAfter = result.Result.Headers.RetryAfter; - if (retryAfter is not null) - { - if (retryAfter.Delta is not null) - { - return retryAfter.Delta.Value; - } - else if (retryAfter.Date is not null) - { - return retryAfter.Date.Value.Subtract(DateTime.UtcNow); - } - } - - //If no Retry-After header use our configured retry intervals. - //Falling back to an internal default if there are no configured intervals. - if (resilienceOptions.ServerThrottleRetryIntervals.Length < 1) - { - return DEFAULT_RETRY_INTERVAL_FALLBACK; - } - else if (i >= resilienceOptions.ServerThrottleRetryIntervals.Length) - { - return resilienceOptions.ServerThrottleRetryIntervals.Last(); - } - - return resilienceOptions.ServerThrottleRetryIntervals[i]; - } - - var policy = Policy - .HandleResult(r => r.StatusCode is HttpStatusCode.TooManyRequests); - - AsyncRetryPolicy result; - if (resilienceOptions.ServerThrottleLimitRetries) - { - result = policy.WaitAndRetryAsync(resilienceOptions.ServerThrottleRetryIntervals.Length, - sleepDurationProvider: SleepDurationProvider, - onRetryAsync: async (_, _, _, _) => await Task.CompletedTask.ConfigureAwait(false)); - } - else - { - result = policy.WaitAndRetryForeverAsync( - sleepDurationProvider: SleepDurationProvider, - onRetryAsync: async (_, _, _, _) => await Task.CompletedTask.ConfigureAwait(false) - ); - } - - return result; - } - } -} diff --git a/src/Tableau.Migration/Net/Policies/SimpleCachedClientThrottlePolicyBuilder.cs b/src/Tableau.Migration/Net/Policies/SimpleCachedClientThrottlePolicyBuilder.cs deleted file mode 100644 index 6c72d4b..0000000 --- a/src/Tableau.Migration/Net/Policies/SimpleCachedClientThrottlePolicyBuilder.cs +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using Tableau.Migration.Config; - -namespace Tableau.Migration.Net.Policies -{ - internal class SimpleCachedClientThrottlePolicyBuilder - : SimpleCachedHttpPolicyBuilder, IHttpPolicyBuilder - { - private readonly IConfigReader _configReader; - - public SimpleCachedClientThrottlePolicyBuilder( - ClientThrottlePolicyBuilder policyBuilder, - IConfigReader configReader) - : base(policyBuilder) - { - _configReader = configReader; - } - - protected override string GetCachedConfigurationKey() - { - var resilienceOptions = _configReader - .Get() - .Network - .Resilience; - - return $"{resilienceOptions.ClientThrottleEnabled}_" + - $"{resilienceOptions.MaxReadRequests}_" + - $"{resilienceOptions.MaxReadRequestsInterval}_" + - $"{resilienceOptions.MaxBurstReadRequests}_" + - $"{resilienceOptions.MaxPublishRequests}_" + - $"{resilienceOptions.MaxPublishRequestsInterval}_" + - $"{resilienceOptions.MaxBurstPublishRequests}"; - } - } -} diff --git a/src/Tableau.Migration/Net/Policies/SimpleCachedHttpPolicyBuilder.cs b/src/Tableau.Migration/Net/Policies/SimpleCachedHttpPolicyBuilder.cs deleted file mode 100644 index 69ef262..0000000 --- a/src/Tableau.Migration/Net/Policies/SimpleCachedHttpPolicyBuilder.cs +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using System.Net.Http; -using Polly; - -namespace Tableau.Migration.Net.Policies -{ - internal abstract class SimpleCachedHttpPolicyBuilder - : SimpleCachedHttpPolicyContainer, IHttpPolicyBuilder - { - protected readonly IHttpPolicyBuilder _policyBuilder; - - public SimpleCachedHttpPolicyBuilder( - IHttpPolicyBuilder policyBuilder) - { - _policyBuilder = policyBuilder; - } - - public IAsyncPolicy? Build( - HttpRequestMessage httpRequest) - { - return GetCachedPolicy(httpRequest); - } - - protected override IAsyncPolicy? GetFreshPolicy( - HttpRequestMessage httpRequest) - { - return _policyBuilder.Build(httpRequest); - } - } -} diff --git a/src/Tableau.Migration/Net/Policies/SimpleCachedHttpPolicyContainer.cs b/src/Tableau.Migration/Net/Policies/SimpleCachedHttpPolicyContainer.cs deleted file mode 100644 index b1dda30..0000000 --- a/src/Tableau.Migration/Net/Policies/SimpleCachedHttpPolicyContainer.cs +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using System.Collections.Concurrent; -using System.Net.Http; -using System.Threading; -using Polly; - -namespace Tableau.Migration.Net.Policies -{ - internal abstract class SimpleCachedHttpPolicyContainer - { - private string _cachedConfigurationKey = string.Empty; - private readonly ConcurrentDictionary?> _requestPolicies = new(); - private SpinLock _lock = new(); - - public IAsyncPolicy? GetCachedPolicy( - HttpRequestMessage httpRequest) - { - RefreshCachedConfiguration(); - - return GetPolicy(httpRequest); - } - - private void RefreshCachedConfiguration() - { - var configurationKey = GetCachedConfigurationKey(); - var lockTaken = false; - - while (!string.Equals(configurationKey, _cachedConfigurationKey)) - { - try - { - _lock.TryEnter(ref lockTaken); - - if (lockTaken) - { - _requestPolicies.Clear(); - - _cachedConfigurationKey = configurationKey; - } - } - finally - { - if (lockTaken) - { - _lock.Exit(); - } - } - }; - } - - private IAsyncPolicy? GetPolicy( - HttpRequestMessage httpRequest) - { - var requestKey = GetRequestKey(httpRequest); - IAsyncPolicy? policy; - - while (!_requestPolicies.TryGetValue( - requestKey, - out policy)) - { - policy = GetFreshPolicy(httpRequest); - - if (_requestPolicies.TryAdd( - requestKey, - policy)) - { - return policy; - } - }; - - return policy; - } - - protected virtual string GetRequestKey( - HttpRequestMessage httpRequest) - { - return httpRequest.GetPolicyRequestKey(); - } - - protected abstract string GetCachedConfigurationKey(); - - protected abstract IAsyncPolicy? GetFreshPolicy( - HttpRequestMessage httpRequest); - } -} diff --git a/src/Tableau.Migration/Net/Policies/SimpleCachedHttpPolicyWrapBuilder.cs b/src/Tableau.Migration/Net/Policies/SimpleCachedHttpPolicyWrapBuilder.cs deleted file mode 100644 index 8ab00fd..0000000 --- a/src/Tableau.Migration/Net/Policies/SimpleCachedHttpPolicyWrapBuilder.cs +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using System.Net.Http; -using Polly; -using Tableau.Migration.Config; - -namespace Tableau.Migration.Net.Policies -{ - internal class SimpleCachedHttpPolicyWrapBuilder - : SimpleCachedHttpPolicyContainer, IHttpPolicyWrapBuilder - { - private readonly HttpPolicyWrapBuilder _policyBuilder; - private readonly IConfigReader _configReader; - - public SimpleCachedHttpPolicyWrapBuilder( - HttpPolicyWrapBuilder policyBuilder, - IConfigReader configReader) - { - _policyBuilder = policyBuilder; - _configReader = configReader; - } - - public IAsyncPolicy GetRequestPolicies( - HttpRequestMessage httpRequest) - { - return GetCachedPolicy(httpRequest)!; - } - - protected override string GetCachedConfigurationKey() - { - return _configReader - .Get() - .Network - .Resilience - .ToJson(); - } - - protected override IAsyncPolicy? GetFreshPolicy( - HttpRequestMessage httpRequest) - { - return _policyBuilder.GetRequestPolicies( - httpRequest); - } - } -} diff --git a/src/Tableau.Migration/Net/Policies/SimpleCachedMaxConcurrencyPolicyBuilder.cs b/src/Tableau.Migration/Net/Policies/SimpleCachedMaxConcurrencyPolicyBuilder.cs deleted file mode 100644 index 90cfc0c..0000000 --- a/src/Tableau.Migration/Net/Policies/SimpleCachedMaxConcurrencyPolicyBuilder.cs +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using System.Net.Http; -using Tableau.Migration.Config; - -namespace Tableau.Migration.Net.Policies -{ - internal class SimpleCachedMaxConcurrencyPolicyBuilder - : SimpleCachedHttpPolicyBuilder, IHttpPolicyBuilder - { - private readonly IConfigReader _configReader; - - public SimpleCachedMaxConcurrencyPolicyBuilder( - MaxConcurrencyPolicyBuilder policyBuilder, - IConfigReader configReader) - : base(policyBuilder) - { - _configReader = configReader; - } - - protected override string GetRequestKey( - HttpRequestMessage httpRequest) - { - // Single Key - Shared for every request - return string.Empty; - } - - protected override string GetCachedConfigurationKey() - { - var resilienceOptions = _configReader - .Get() - .Network - .Resilience; - - return $"{resilienceOptions.ConcurrentRequestsLimitEnabled}_" + - $"{resilienceOptions.MaxConcurrentRequests}_" + - $"{resilienceOptions.ConcurrentWaitingRequestsOnQueue}"; - } - } -} diff --git a/src/Tableau.Migration/Net/Policies/SimpleCachedRequestTimeoutPolicyBuilder.cs b/src/Tableau.Migration/Net/Policies/SimpleCachedRequestTimeoutPolicyBuilder.cs deleted file mode 100644 index f981370..0000000 --- a/src/Tableau.Migration/Net/Policies/SimpleCachedRequestTimeoutPolicyBuilder.cs +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using System.Net.Http; -using Tableau.Migration.Config; - -namespace Tableau.Migration.Net.Policies -{ - internal class SimpleCachedRequestTimeoutPolicyBuilder - : SimpleCachedHttpPolicyBuilder, IHttpPolicyBuilder - { - private const string RequestKey = "request"; - private const string FileTransferRequestKey = "fileTransferRequest"; - - private readonly IConfigReader _configReader; - - public SimpleCachedRequestTimeoutPolicyBuilder( - RequestTimeoutPolicyBuilder policyBuilder, - IConfigReader configReader) - : base(policyBuilder) - { - _configReader = configReader; - } - - protected override string GetRequestKey( - HttpRequestMessage httpRequest) - { - // Double Key - Shared for every request/file-transfer request - return RequestTimeoutPolicyBuilder.IsFileTransferRequest(httpRequest) ? RequestKey : FileTransferRequestKey; - } - - protected override string GetCachedConfigurationKey() - { - var resilienceOptions = _configReader - .Get() - .Network - .Resilience; - - return $"{resilienceOptions.PerRequestTimeout}_{resilienceOptions.PerFileTransferRequestTimeout}"; - } - } -} diff --git a/src/Tableau.Migration/Net/Policies/SimpleCachedRetryPolicyBuilder.cs b/src/Tableau.Migration/Net/Policies/SimpleCachedRetryPolicyBuilder.cs deleted file mode 100644 index abc9840..0000000 --- a/src/Tableau.Migration/Net/Policies/SimpleCachedRetryPolicyBuilder.cs +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using System.Net.Http; -using Tableau.Migration.Config; - -namespace Tableau.Migration.Net.Policies -{ - internal class SimpleCachedRetryPolicyBuilder - : SimpleCachedHttpPolicyBuilder, IHttpPolicyBuilder - { - private readonly IConfigReader _configReader; - - public SimpleCachedRetryPolicyBuilder( - RetryPolicyBuilder policyBuilder, - IConfigReader configReader) - : base(policyBuilder) - { - _configReader = configReader; - } - - protected override string GetRequestKey( - HttpRequestMessage httpRequest) - { - // Single Key - Shared for every request - return string.Empty; - } - - protected override string GetCachedConfigurationKey() - { - var resilienceOptions = _configReader - .Get() - .Network - .Resilience; - - return $"{resilienceOptions.RetryEnabled}_" + - $"{string.Join(";", resilienceOptions.RetryIntervals)}_" + - $"{string.Join(";", resilienceOptions.RetryOverrideResponseCodes)}"; - } - } -} diff --git a/src/Tableau.Migration/Net/Policies/SimpleCachedServerThrottlePolicyBuilder.cs b/src/Tableau.Migration/Net/Policies/SimpleCachedServerThrottlePolicyBuilder.cs deleted file mode 100644 index 47b05aa..0000000 --- a/src/Tableau.Migration/Net/Policies/SimpleCachedServerThrottlePolicyBuilder.cs +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using System.Net.Http; -using Tableau.Migration.Config; - -namespace Tableau.Migration.Net.Policies -{ - internal sealed class SimpleCachedServerThrottlePolicyBuilder - : SimpleCachedHttpPolicyBuilder, IHttpPolicyBuilder - { - private readonly IConfigReader _configReader; - - public SimpleCachedServerThrottlePolicyBuilder(ServerThrottlePolicyBuilder policyBuilder, - IConfigReader configReader) - : base(policyBuilder) - { - _configReader = configReader; - } - - protected override string GetRequestKey(HttpRequestMessage httpRequest) - { - // Single Key - Shared for every request - return string.Empty; - } - - protected override string GetCachedConfigurationKey() - { - var resilienceOptions = _configReader.Get().Network.Resilience; - - return $"{resilienceOptions.ServerThrottleEnabled}_" + - $"{resilienceOptions.ServerThrottleLimitRetries}_" + - $"{string.Join(";", resilienceOptions.ServerThrottleRetryIntervals)}_"; - } - } -} diff --git a/src/Tableau.Migration/Net/Resilience/ClientThrottleStrategyBuilder.cs b/src/Tableau.Migration/Net/Resilience/ClientThrottleStrategyBuilder.cs new file mode 100644 index 0000000..82bd433 --- /dev/null +++ b/src/Tableau.Migration/Net/Resilience/ClientThrottleStrategyBuilder.cs @@ -0,0 +1,68 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using System.Net.Http; +using System.Threading.RateLimiting; +using Polly; +using Polly.RateLimiting; +using Tableau.Migration.Config; + +namespace Tableau.Migration.Net.Resilience +{ + internal sealed class ClientThrottleStrategyBuilder + : IResilienceStrategyBuilder + { + /// + public void Build(ResiliencePipelineBuilder pipelineBuilder, MigrationSdkOptions options, ref Action? onPipelineDisposed) + { + var resilienceOptions = options.Network.Resilience; + + if (resilienceOptions.ClientThrottleEnabled) + { + // Dynamically build the limiter so it is based on current configuration. + var limiter = PartitionedRateLimiter.Create(method => + { + return RateLimitPartition.GetSlidingWindowLimiter(method, m => + { + if(m == HttpMethod.Get) + { + return new SlidingWindowRateLimiterOptions + { + PermitLimit = resilienceOptions.MaxReadRequests, + Window = resilienceOptions.MaxReadRequestsInterval + }; + } + + return new SlidingWindowRateLimiterOptions + { + PermitLimit = resilienceOptions.MaxPublishRequests, + Window = resilienceOptions.MaxPublishRequestsInterval + }; + }); + }); + + pipelineBuilder.AddRateLimiter(new RateLimiterStrategyOptions + { + RateLimiter = args => limiter.AcquireAsync(args.Context.GetRequest().Method, cancellationToken: args.Context.CancellationToken) + }); + + // Ensure the dynamic limiter is disposed when configuration changes. + onPipelineDisposed = () => limiter.Dispose(); + } + } + } +} diff --git a/src/Tableau.Migration/Net/Resilience/IResilienceStrategyBuilder.cs b/src/Tableau.Migration/Net/Resilience/IResilienceStrategyBuilder.cs new file mode 100644 index 0000000..4b97f7e --- /dev/null +++ b/src/Tableau.Migration/Net/Resilience/IResilienceStrategyBuilder.cs @@ -0,0 +1,41 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using System.Net.Http; +using Microsoft.Extensions.Http.Resilience; +using Polly; +using Tableau.Migration.Config; + +namespace Tableau.Migration.Net.Resilience +{ + /// + /// Interface for an object that can build and add a resilience strategy to a resilience pipeline builder. + /// + public interface IResilienceStrategyBuilder + { + /// + /// Adds a resiliance strategy to the pipeline builder. + /// + /// The resilience pipeline builder to add strategies to. + /// The current SDK options. + /// + /// An action to perform when the pipeline is disposed, or null. + /// Supplied as an out parameter because is not unit test-able. + /// + void Build(ResiliencePipelineBuilder pipelineBuilder, MigrationSdkOptions options, ref Action? onPipelineDisposed); + } +} diff --git a/src/Tableau.Migration/Net/Policies/MaxConcurrencyPolicyBuilder.cs b/src/Tableau.Migration/Net/Resilience/MaxConcurrencyStrategyBuilder.cs similarity index 50% rename from src/Tableau.Migration/Net/Policies/MaxConcurrencyPolicyBuilder.cs rename to src/Tableau.Migration/Net/Resilience/MaxConcurrencyStrategyBuilder.cs index ec12b66..8ebafe2 100644 --- a/src/Tableau.Migration/Net/Policies/MaxConcurrencyPolicyBuilder.cs +++ b/src/Tableau.Migration/Net/Resilience/MaxConcurrencyStrategyBuilder.cs @@ -14,34 +14,27 @@ // limitations under the License. // +using System; using System.Net.Http; using Polly; using Tableau.Migration.Config; -namespace Tableau.Migration.Net.Policies +namespace Tableau.Migration.Net.Resilience { - internal class MaxConcurrencyPolicyBuilder - : IHttpPolicyBuilder + internal sealed class MaxConcurrencyStrategyBuilder + : IResilienceStrategyBuilder { - private readonly IConfigReader _configReader; - - public MaxConcurrencyPolicyBuilder(IConfigReader configReader) + /// + public void Build(ResiliencePipelineBuilder pipelineBuilder, MigrationSdkOptions options, ref Action? onPipelineDisposed) { - _configReader = configReader; - } - - public IAsyncPolicy? Build(HttpRequestMessage httpRequest) - { - var sdkOptions = _configReader - .Get() - .Network - .Resilience; - - return sdkOptions.ConcurrentRequestsLimitEnabled - ? Policy.BulkheadAsync( - sdkOptions.MaxConcurrentRequests, - sdkOptions.ConcurrentWaitingRequestsOnQueue) - : null; + if(options.Network.Resilience.ConcurrentRequestsLimitEnabled) + { + pipelineBuilder.AddConcurrencyLimiter(new() + { + PermitLimit = options.Network.Resilience.MaxConcurrentRequests, + QueueLimit = options.Network.Resilience.ConcurrentWaitingRequestsOnQueue + }); + } } } } diff --git a/src/Tableau.Migration/Net/Policies/RequestTimeoutPolicyBuilder.cs b/src/Tableau.Migration/Net/Resilience/RequestTimeoutStrategyBuilder.cs similarity index 70% rename from src/Tableau.Migration/Net/Policies/RequestTimeoutPolicyBuilder.cs rename to src/Tableau.Migration/Net/Resilience/RequestTimeoutStrategyBuilder.cs index 2c5f97d..931f3b0 100644 --- a/src/Tableau.Migration/Net/Policies/RequestTimeoutPolicyBuilder.cs +++ b/src/Tableau.Migration/Net/Resilience/RequestTimeoutStrategyBuilder.cs @@ -14,21 +14,23 @@ // limitations under the License. // +using System; using System.Collections.Generic; using System.Linq; using System.Net.Http; using System.Text.RegularExpressions; +using System.Threading.Tasks; using Polly; using Polly.Timeout; using Tableau.Migration.Api.Rest; using Tableau.Migration.Config; -namespace Tableau.Migration.Net.Policies +namespace Tableau.Migration.Net.Resilience { - internal class RequestTimeoutPolicyBuilder - : IHttpPolicyBuilder + internal class RequestTimeoutStrategyBuilder + : IResilienceStrategyBuilder { - private static readonly HashSet<(HttpMethod, Regex)> _fileTransferRequests = new() + private static readonly HashSet<(HttpMethod Method, Regex Pattern)> _fileTransferRequests = new() { // Regex to capture the download content AbsolutePath: // - GET /api/api-version/sites/site-id/datasources/datasource-id/content?includeExtract=extract-value @@ -57,37 +59,28 @@ internal class RequestTimeoutPolicyBuilder // - "{1}": Exactly one match. (HttpMethod.Put, new Regex($@"^.+(\/({RestUrlPrefixes.FileUploads})\/.+){{1}}$", RegexOptions.Compiled | RegexOptions.Singleline | RegexOptions.IgnoreCase)), }; - private readonly IConfigReader _configReader; - public RequestTimeoutPolicyBuilder( - IConfigReader configReader) - { - _configReader = configReader; - } + private static bool IsFileTransferRequest(HttpRequestMessage httpRequest) + => httpRequest.RequestUri is not null && + _fileTransferRequests.Any(lrr => httpRequest.Method == lrr.Method && lrr.Pattern.IsMatch(httpRequest.RequestUri!.AbsolutePath)); - public IAsyncPolicy? Build( - HttpRequestMessage httpRequest) + private static TimeSpan GetRequestTimeout(TimeoutGeneratorArguments args, MigrationSdkOptions options) { - var sdkOptions = _configReader.Get(); - - if (IsFileTransferRequest(httpRequest)) + if (IsFileTransferRequest(args.Context.GetRequest())) { - return Policy.TimeoutAsync( - sdkOptions.Network.Resilience.PerFileTransferRequestTimeout, - TimeoutStrategy.Optimistic); + return options.Network.Resilience.PerFileTransferRequestTimeout; } - // Basic Timeout Per-Request Implementation - // TODO: W-12611689 - Network Client - Timeout - return Policy.TimeoutAsync( - sdkOptions.Network.Resilience.PerRequestTimeout, - TimeoutStrategy.Optimistic); + return options.Network.Resilience.PerRequestTimeout; } - public static bool IsFileTransferRequest(HttpRequestMessage httpRequest) - => httpRequest.RequestUri is not null && - _fileTransferRequests.Any(lrr => - httpRequest.Method == lrr.Item1 && - lrr.Item2.IsMatch(httpRequest.RequestUri!.AbsolutePath)); + /// + public void Build(ResiliencePipelineBuilder pipelineBuilder, MigrationSdkOptions options, ref Action? onPipelineDisposed) + { + pipelineBuilder.AddTimeout(new TimeoutStrategyOptions() + { + TimeoutGenerator = args => ValueTask.FromResult(GetRequestTimeout(args, options)) + }); + } } } diff --git a/src/Tableau.Migration/Net/Resilience/ResilienceContextExtensions.cs b/src/Tableau.Migration/Net/Resilience/ResilienceContextExtensions.cs new file mode 100644 index 0000000..f857b29 --- /dev/null +++ b/src/Tableau.Migration/Net/Resilience/ResilienceContextExtensions.cs @@ -0,0 +1,42 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using System.Net.Http; +using Polly; + +namespace Tableau.Migration.Net.Resilience +{ + internal static class ResilienceContextExtensions + { + /* Key string value taken from + * https://github.com/dotnet/extensions/blob/v8.1.0/src/Libraries/Microsoft.Extensions.Http.Resilience/Internal/ResilienceKeys.cs + * due to the value being internal. + */ + internal static readonly ResiliencePropertyKey REQUEST_CONTEXT_KEY + = new("Resilience.Http.RequestMessage"); + + internal static HttpRequestMessage GetRequest(this ResilienceContext ctx) + { + if (ctx.Properties.TryGetValue(REQUEST_CONTEXT_KEY, out var request)) + { + return request; + } + + throw new InvalidOperationException("No request was provided for resilience strategy context."); + } + } +} diff --git a/src/Tableau.Migration/Net/Resilience/RetryStrategyBuilder.cs b/src/Tableau.Migration/Net/Resilience/RetryStrategyBuilder.cs new file mode 100644 index 0000000..8c0b320 --- /dev/null +++ b/src/Tableau.Migration/Net/Resilience/RetryStrategyBuilder.cs @@ -0,0 +1,88 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using System.Collections.Immutable; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Threading.Tasks; +using Polly; +using Polly.RateLimiting; +using Polly.Retry; +using Polly.Timeout; +using Tableau.Migration.Config; + +namespace Tableau.Migration.Net.Resilience +{ + internal sealed class RetryStrategyBuilder + : IResilienceStrategyBuilder + { + private static readonly ImmutableArray _standardRetryExceptions = new[] + { + typeof(HttpRequestException), + typeof(TimeoutRejectedException), + typeof(RateLimiterRejectedException) + }.ToImmutableArray(); + + private static bool ShouldRetry(RetryPredicateArguments args, ResilienceOptions resilienceOptions) + { + if (args.Outcome.Exception is null) + { + var response = args.Outcome.Result; + if(response is null) + { + return false; + } + + if (resilienceOptions.RetryOverrideResponseCodes.IsNullOrEmpty()) + { + return (int)response.StatusCode >= 500 || response.StatusCode is HttpStatusCode.RequestTimeout; + } + else + { + return resilienceOptions.RetryOverrideResponseCodes.Contains((int)response.StatusCode); + } + } + + return _standardRetryExceptions.Contains(args.Outcome.Exception.GetType()); + } + + /// + public void Build(ResiliencePipelineBuilder pipelineBuilder, MigrationSdkOptions options, ref Action? onPipelineDisposed) + { + var resilienceOptions = options.Network.Resilience; + + if(!resilienceOptions.RetryEnabled || !resilienceOptions.RetryIntervals.Any()) + { + return; + } + + pipelineBuilder.AddRetry(new RetryStrategyOptions + { + MaxRetryAttempts = resilienceOptions.RetryIntervals.Length, + ShouldHandle = args => ValueTask.FromResult(ShouldRetry(args, resilienceOptions)), + DelayGenerator = args => + { + var interval = args.AttemptNumber < resilienceOptions.RetryIntervals.Length ? + resilienceOptions.RetryIntervals[args.AttemptNumber] : resilienceOptions.RetryIntervals[^1]; + + return ValueTask.FromResult(interval); + } + }); + } + } +} diff --git a/src/Tableau.Migration/Net/Resilience/ServerThrottleStrategyBuilder.cs b/src/Tableau.Migration/Net/Resilience/ServerThrottleStrategyBuilder.cs new file mode 100644 index 0000000..bfd0598 --- /dev/null +++ b/src/Tableau.Migration/Net/Resilience/ServerThrottleStrategyBuilder.cs @@ -0,0 +1,94 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Threading.Tasks; +using Polly; +using Polly.Retry; +using Tableau.Migration.Config; + +namespace Tableau.Migration.Net.Resilience +{ + internal sealed class ServerThrottleStrategyBuilder + : IResilienceStrategyBuilder + { + private readonly TimeProvider _timeProvider; + + internal static readonly TimeSpan DEFAULT_RETRY_INTERVAL_FALLBACK = TimeSpan.FromMinutes(1); + + public ServerThrottleStrategyBuilder(TimeProvider timeProvider) + { + _timeProvider = timeProvider; + } + + private TimeSpan DelayGenerator(RetryDelayGeneratorArguments args, ResilienceOptions resilienceOptions) + { + //Obey the server Retry-After header value. + if(args.Outcome.Result is not null) + { + var retryAfter = args.Outcome.Result.Headers.RetryAfter; + if (retryAfter is not null) + { + if (retryAfter.Delta is not null) + { + return retryAfter.Delta.Value; + } + else if (retryAfter.Date is not null) + { + return retryAfter.Date.Value - _timeProvider.GetUtcNow(); + } + } + } + + //If no Retry-After header use our configured retry intervals. + //Falling back to an internal default if there are no configured intervals. + if (!resilienceOptions.ServerThrottleRetryIntervals.Any()) + { + return DEFAULT_RETRY_INTERVAL_FALLBACK; + } + else if (args.AttemptNumber >= resilienceOptions.ServerThrottleRetryIntervals.Length) + { + return resilienceOptions.ServerThrottleRetryIntervals[^1]; + } + else + { + return resilienceOptions.ServerThrottleRetryIntervals[args.AttemptNumber]; + } + } + + /// + public void Build(ResiliencePipelineBuilder pipelineBuilder, MigrationSdkOptions options, ref Action? onPipelineDisposed) + { + var resilienceOptions = options.Network.Resilience; + + if (!resilienceOptions.ServerThrottleEnabled) + { + return; + } + + pipelineBuilder.AddRetry(new RetryStrategyOptions + { + ShouldHandle = static args => ValueTask.FromResult(args.Outcome.Result?.StatusCode is HttpStatusCode.TooManyRequests), + MaxRetryAttempts = resilienceOptions.ServerThrottleLimitRetries && resilienceOptions.ServerThrottleRetryIntervals.Any() ? + resilienceOptions.ServerThrottleRetryIntervals.Length : int.MaxValue, + DelayGenerator = args => ValueTask.FromResult(DelayGenerator(args, resilienceOptions)) + }); + } + } +} diff --git a/src/Tableau.Migration/Net/StringExtensions.cs b/src/Tableau.Migration/Net/StringExtensions.cs index ca5341c..f2f7194 100644 --- a/src/Tableau.Migration/Net/StringExtensions.cs +++ b/src/Tableau.Migration/Net/StringExtensions.cs @@ -46,9 +46,9 @@ internal static class StringExtensions internal static StringContent ToHttpContent(this string content, MediaTypeWithQualityHeaderValue contentType) { #if NET7_0_OR_GREATER - return new StringContent(content, Encoding.UTF8, contentType); + return new StringContent(content, Constants.DefaultEncoding, contentType); #else - return new StringContent(content, Encoding.UTF8, contentType.MediaType); + return new StringContent(content, Constants.DefaultEncoding, contentType.MediaType); #endif } diff --git a/src/Tableau.Migration/Resources/SharedResourceKeys.cs b/src/Tableau.Migration/Resources/SharedResourceKeys.cs index dac5753..96aed0b 100644 --- a/src/Tableau.Migration/Resources/SharedResourceKeys.cs +++ b/src/Tableau.Migration/Resources/SharedResourceKeys.cs @@ -89,5 +89,15 @@ internal static class SharedResourceKeys public const string SiteSettingsSkippedNoAccessLogMessage = "SiteSettingsSkippedNoAccessLogMessage"; public const string SiteSettingsExtractEncryptionDisabledLogMessage = "SiteSettingsExtractEncryptionDisabledLogMessage"; + + public const string ApiClientDoesnotImplementIReadApiClientError = "ApiClientDoesnotImplementIReadApiClientError"; + + public const string ApiEndpointNotInitializedError = "ApiEndpointNotInitializedError"; + + public const string ApiEndpointDoesnotHaveValidSiteError = "ApiEndpointDoesnotHaveValidSiteError"; + + public const string ProjectReferenceNotFoundMessage = "ProjectReferenceNotFoundMessage"; + + public const string OwnerNotFoundMessage = "OwnerNotFoundMessage"; } } diff --git a/src/Tableau.Migration/Resources/SharedResources.resx b/src/Tableau.Migration/Resources/SharedResources.resx index 075945c..e2b9106 100644 --- a/src/Tableau.Migration/Resources/SharedResources.resx +++ b/src/Tableau.Migration/Resources/SharedResources.resx @@ -117,9 +117,18 @@ System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 + + {TypeName} doesn't implement IReadApiClient. + API client input must be initialized before it is used. + + API endpoint does not have a valid site API client. + + + API endpoint is not initialized. + An authentication type domain mapping is required for server to cloud migrations. @@ -229,4 +238,10 @@ Detail: {3} Could not transform permissions. The {granteeType} for {UserOrGroupName} was not found at the destination. + + Project {ProjectName} was not found for {ContentType} {ContentName}. + + + Owner {OwnerId} was not found for {ContentType} {ContentName}. + \ No newline at end of file diff --git a/src/Tableau.Migration/Tableau.Migration.csproj b/src/Tableau.Migration/Tableau.Migration.csproj index 8613f2b..b15e54d 100644 --- a/src/Tableau.Migration/Tableau.Migration.csproj +++ b/src/Tableau.Migration/Tableau.Migration.csproj @@ -45,15 +45,16 @@ Note: This SDK is specific for migrating from Tableau Server to Tableau Cloud. I - - + + + all runtime; build; native; contentfiles; analyzers; buildtransitive - - + + diff --git a/tests/Python.TestApplication/migration_testcomponents_mappings.py b/tests/Python.TestApplication/migration_testcomponents_mappings.py index f62e409..d29f5e5 100644 --- a/tests/Python.TestApplication/migration_testcomponents_mappings.py +++ b/tests/Python.TestApplication/migration_testcomponents_mappings.py @@ -24,11 +24,12 @@ from Tableau.Migration.Interop.Hooks.Mappings import ISyncContentMapping from Tableau.Migration.Engine.Hooks.Mappings.Default import ITableauCloudUsernameMapping -class PyTestTableauCloudUsernameMapping(ISyncContentMapping[IUser]): +class PyTestTableauCloudUsernameMapping(ITableauCloudUsernameMapping, ISyncContentMapping[IUser] +): """Mapping that takes a base email and appends the source item name to the email username.""" __namespace__ = "Python.TestApplication" - _dotnet_base = ISyncContentMapping[IUser] + _dotnet_base = ITableauCloudUsernameMapping def __init__(self): """Default init to set up logging.""" diff --git a/tests/Python.TestApplication/pyproject.toml b/tests/Python.TestApplication/pyproject.toml index 0c6a289..bdb46cb 100644 --- a/tests/Python.TestApplication/pyproject.toml +++ b/tests/Python.TestApplication/pyproject.toml @@ -30,7 +30,7 @@ ignore = ["D401", "D407", "E501", "D203", "D212"] [tool.hatch.envs.test] dev-mode = false dependencies = [ - "pytest==7.4.4" + "pytest==8.0.1" ] [tool.hatch.envs.test.scripts] diff --git a/tests/Tableau.Migration.TestApplication/appsettings.json b/tests/Tableau.Migration.TestApplication/appsettings.json index fbf6bbd..e4b8f2d 100644 --- a/tests/Tableau.Migration.TestApplication/appsettings.json +++ b/tests/Tableau.Migration.TestApplication/appsettings.json @@ -11,7 +11,29 @@ }, "tableau": { "migrationSdk": { - "batchSize": 50 + "contentTypes": [ + { + "type": "user", + "batchSize": 50, + "batchPublishingEnabled": true + }, + { + "type": "group", + "batchSize": 50 + }, + { + "type": "project", + "batchSize": 50 + }, + { + "type": "datasource", + "batchSize": 50 + }, + { + "type": "workbook", + "batchSize": 50 + } + ] }, "migrationOptions": { "baseOverrideMailAddress": "", diff --git a/tests/Tableau.Migration.TestComponents.Tests/Tableau.Migration.TestComponents.Tests.csproj b/tests/Tableau.Migration.TestComponents.Tests/Tableau.Migration.TestComponents.Tests.csproj index e251b3d..6aecc9f 100644 --- a/tests/Tableau.Migration.TestComponents.Tests/Tableau.Migration.TestComponents.Tests.csproj +++ b/tests/Tableau.Migration.TestComponents.Tests/Tableau.Migration.TestComponents.Tests.csproj @@ -12,13 +12,13 @@ - + - + - - + + runtime; build; native; contentfiles; analyzers; buildtransitive all diff --git a/tests/Tableau.Migration.Tests/AutoFixtureTestBase.cs b/tests/Tableau.Migration.Tests/AutoFixtureTestBase.cs index 22e8f9a..dd53ad6 100644 --- a/tests/Tableau.Migration.Tests/AutoFixtureTestBase.cs +++ b/tests/Tableau.Migration.Tests/AutoFixtureTestBase.cs @@ -18,11 +18,11 @@ using System.Collections.Generic; using System.IO; using System.Linq; -using System.Text; using System.Threading; using AutoFixture; using AutoFixture.AutoMoq; using AutoFixture.Kernel; +using Moq; using Tableau.Migration.Api.Rest.Models.Requests; using Tableau.Migration.Api.Rest.Models.Responses; @@ -33,7 +33,7 @@ public abstract class AutoFixtureTestBase /// /// The configured for this instance. /// - protected readonly IFixture AutoFixture = new Fixture().Customize(new AutoMoqCustomization { ConfigureMembers = true }); + protected readonly IFixture AutoFixture = CreateFixture(); protected readonly CancellationTokenSource CancelSource = new(); protected CancellationToken Cancel => CancelSource.Token; @@ -52,6 +52,11 @@ public AutoFixtureTestBase() } } + /// + /// Creates a new instance. + /// + protected static IFixture CreateFixture() => new Fixture().Customize(new AutoMoqCustomization { ConfigureMembers = true }); + /// /// Creates a variable of the requested type. /// @@ -64,7 +69,27 @@ public AutoFixtureTestBase() /// /// The type of object to create. /// An object of type - protected T Create() => AutoFixture.Create(); + protected T Create(Action? configure = null) + { + var obj = AutoFixture.Create(); + + configure?.Invoke(obj); + + return obj; + } + + /// + /// Creates a string variable. + /// + /// The lendth of the string. + protected string CreateString(int length) + => new(CreateMany(length).ToArray()); + + /// + /// Creates a string variable. + /// + protected string CreateString() + => new(CreateMany().ToArray()); /// /// Create variables of the requested type. @@ -101,18 +126,30 @@ protected IEnumerable CreateMany(int generatedCount, IEnumerable? first /// The value that will subsequently always be created for . protected T Freeze() => AutoFixture.Freeze(); + /// + /// Freezes the type to a single value. + /// + /// The type of object to freeze. + /// The value to freeze. + /// The value that will subsequently always be created for . + protected T Freeze(T value) => AutoFixture.Freeze(composer => composer.FromFactory(() => value)); + private void Customize() { AutoFixture.Register(() => Create().Object); + AutoFixture.Register(() => Create>().Object); + AutoFixture.Register(() => new ContentLocation(CreateMany())); AutoFixture.Register((string data) => { - var bytes = Encoding.UTF8.GetBytes(data); + var bytes = Constants.DefaultEncoding.GetBytes(data); return new MemoryStream(bytes); }); + AutoFixture.Register(() => MemoryStreamManager.Instance); + #region - JobResponse - // These properties should return DateTime strings instead of the default Guid-like ones. diff --git a/tests/Tableau.Migration.Tests/IDataSourceExtensions.cs b/tests/Tableau.Migration.Tests/IDataSourceExtensions.cs new file mode 100644 index 0000000..5c01df6 --- /dev/null +++ b/tests/Tableau.Migration.Tests/IDataSourceExtensions.cs @@ -0,0 +1,125 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using System.Collections.Generic; +using System.Linq; +using Tableau.Migration.Api.Rest.Models; +using Tableau.Migration.Content; + +using static Xunit.Assert; + +namespace Tableau.Migration.Tests.Unit.Content +{ + public static class IDataSourceExtensions + { + public static void Assert( + this TDataSource actual, + IDataSource expected, + Action? extra = null) + where TDataSource : IDataSource => + actual.Assert( + expected.Id, + expected.Name, + expected.ContentUrl, + expected.Description, + expected.CreatedAt, + expected.UpdatedAt, + expected.EncryptExtracts, + expected.HasExtracts, + expected.IsCertified, + expected.UseRemoteQueryAgent, + expected.WebpageUrl, + expected.Tags, + ((IContainerContent)expected).Container, + expected.Owner, + extra + ); + + public static void Assert( + this TDataSource actual, + IDataSourceType expected, + IContentReference expectedProject, + IContentReference expectedOwner, + Action? extra = null) + where TDataSource : IDataSource => + actual.Assert( + expected.Id, + expected.Name, + expected.ContentUrl, + expected.Description, + expected.CreatedAt, + expected.UpdatedAt, + expected.EncryptExtracts, + expected.HasExtracts, + expected.IsCertified, + expected.UseRemoteQueryAgent, + expected.WebpageUrl, + expected.Tags.Select(t => new Tag(t)), + expectedProject, + expectedOwner, + extra + ); + + public static void Assert( + this TDataSource actual, + Guid expectedId, + string? expectedName, + string? expectedContentUrl, + string? expectedDescription, + string? expectedCreatedAt, + string? expectedUpdatedAt, + bool expectedEncryptExtracts, + bool expectedHasExtracts, + bool expectedIsCertified, + bool expectedUseRemoteQueryAgent, + string? expectedWebpageUrl, + IEnumerable expectedTags, + IContentReference expectedProject, + IContentReference expectedOwner, + Action? extra = null) + where TDataSource : IDataSource + { + NotNull(actual); + Equal(expectedId, actual.Id); + Equal(expectedName, actual.Name); + Equal(expectedContentUrl, actual.ContentUrl); + + Equal(expectedDescription, actual.Description); + Equal(expectedCreatedAt, actual.CreatedAt); + Equal(expectedUpdatedAt, actual.UpdatedAt); + + Equal(expectedEncryptExtracts, actual.EncryptExtracts); + Equal(expectedHasExtracts, actual.HasExtracts); + Equal(expectedIsCertified, actual.IsCertified); + Equal(expectedUseRemoteQueryAgent, actual.UseRemoteQueryAgent); + + Equal(expectedWebpageUrl, actual.WebpageUrl); + + var actualProject = ((IContainerContent)actual).Container; + NotNull(actualProject); + + Same(expectedProject, actualProject); + Equal(expectedProject.Location.Append(actual.Name), actual.Location); + + Same(expectedOwner, actual.Owner); + + expectedTags.AssertEqual(actual.Tags); + + extra?.Invoke(actual); + } + } +} diff --git a/tests/Tableau.Migration.Tests/IEnumerableExtensions.cs b/tests/Tableau.Migration.Tests/IEnumerableExtensions.cs index 5c666b9..61ccdd0 100644 --- a/tests/Tableau.Migration.Tests/IEnumerableExtensions.cs +++ b/tests/Tableau.Migration.Tests/IEnumerableExtensions.cs @@ -30,8 +30,11 @@ public static class IEnumerableExtensions /// The second collection. /// The sorting to use to normalize ordering between the two collections. /// True if the sequences are equal, false otherwise. - public static bool SequenceEqual(this IEnumerable first, IEnumerable second, Func sort) + public static bool SequenceEqual(this IEnumerable? first, IEnumerable? second, Func sort) { + if (first is null || second is null) + return ReferenceEquals(first, second); + var firstSorted = first.OrderBy(sort); var secondSorted = second.OrderBy(sort); diff --git a/tests/Tableau.Migration.Tests/ITagTypeComparer.cs b/tests/Tableau.Migration.Tests/ITagTypeComparer.cs index 302f28f..a2e8b8b 100644 --- a/tests/Tableau.Migration.Tests/ITagTypeComparer.cs +++ b/tests/Tableau.Migration.Tests/ITagTypeComparer.cs @@ -23,7 +23,7 @@ internal class ITagTypeComparer : ComparerBase { public static ITagTypeComparer Instance = new(); - public override int CompareItems(ITagType x, ITagType y) + protected override int CompareItems(ITagType x, ITagType y) => StringComparer.Ordinal.Compare(x.Label, y.Label); } } diff --git a/tests/Tableau.Migration.Tests/IViewReferenceTypeComparer.cs b/tests/Tableau.Migration.Tests/IViewReferenceTypeComparer.cs index 11e885e..a741414 100644 --- a/tests/Tableau.Migration.Tests/IViewReferenceTypeComparer.cs +++ b/tests/Tableau.Migration.Tests/IViewReferenceTypeComparer.cs @@ -25,7 +25,7 @@ internal class IViewReferenceTypeComparer : ComparerBase { public static IViewReferenceTypeComparer Instance = new(); - public override int CompareItems(IViewReferenceType x, IViewReferenceType y) + protected override int CompareItems(IViewReferenceType x, IViewReferenceType y) { Assert.NotNull(x.ContentUrl); Assert.NotNull(y.ContentUrl); diff --git a/tests/Tableau.Migration.Tests/IWorkbookConnectionComparer.cs b/tests/Tableau.Migration.Tests/IWorkbookConnectionComparer.cs index 8f1b367..c565bf9 100644 --- a/tests/Tableau.Migration.Tests/IWorkbookConnectionComparer.cs +++ b/tests/Tableau.Migration.Tests/IWorkbookConnectionComparer.cs @@ -23,7 +23,7 @@ internal class IWorkbookConnectionComparer : ComparerBase( + this TWorkbook actual, + IWorkbook expected, + Action? extra = null) + where TWorkbook : IWorkbook => + actual.Assert( + expected.Id, + expected.Name, + expected.ContentUrl, + expected.Description, + expected.CreatedAt, + expected.UpdatedAt, + expected.EncryptExtracts, + expected.ShowTabs, + expected.Size, + expected.WebpageUrl, + expected.Tags, + ((IContainerContent)expected).Container, + expected.Owner, + extra + ); + + public static void Assert( + this TWorkbook actual, + IWorkbook expected, + IContentReference expectedProject, + IContentReference expectedOwner, + Action? extra = null) + where TWorkbook : IWorkbook => + actual.Assert( + expected.Id, + expectedProject.Name, + expectedOwner.ContentUrl, + expected.Description, + expected.CreatedAt, + expected.UpdatedAt, + expected.EncryptExtracts, + expected.ShowTabs, + expected.Size, + expected.WebpageUrl, + expected.Tags.Select(t => new Tag(t)), + expectedProject, + expectedOwner, + extra + ); + + public static void Assert( + this TWorkbook actual, + IWorkbookType expected, + IContentReference expectedProject, + IContentReference expectedOwner, + Action? extra = null) + where TWorkbook : IWorkbook + { + NotNull(actual); + Equal(expected.Id, actual.Id); + Equal(expected.Name, actual.Name); + Equal(expected.ContentUrl, actual.ContentUrl); + + Equal(expected.Description, actual.Description); + Equal(expected.CreatedAt, actual.CreatedAt); + Equal(expected.UpdatedAt, actual.UpdatedAt); + + Equal(expected.EncryptExtracts, actual.EncryptExtracts); + Equal(expected.ShowTabs, actual.ShowTabs); + Equal(expected.Size, actual.Size); + + Equal(expected.WebpageUrl, actual.WebpageUrl); + + var actualProject = ((IContainerContent)actual).Container; + NotNull(actualProject); + + Same(expectedProject, actualProject); + Equal(expectedProject.Location.Append(actual.Name), actual.Location); + + Same(expectedOwner, actual.Owner); + + expected.Tags.AssertEqual(actual.Tags); + + extra?.Invoke(actual); + } + + public static void Assert( + this TWorkbook actual, + Guid expectedId, + string? expectedName, + string? expectedContentUrl, + string? expectedDescription, + string? expectedCreatedAt, + string? expectedUpdatedAt, + bool expectedEncryptExtracts, + bool expectedShowTabs, + long expectedSize, + string? expectedWebpageUrl, + IEnumerable expectedTags, + IContentReference expectedProject, + IContentReference expectedOwner, + Action? extra = null) + where TWorkbook : IWorkbook + { + NotNull(actual); + Equal(expectedId, actual.Id); + Equal(expectedName, actual.Name); + Equal(expectedContentUrl, actual.ContentUrl); + + Equal(expectedDescription, actual.Description); + Equal(expectedCreatedAt, actual.CreatedAt); + Equal(expectedUpdatedAt, actual.UpdatedAt); + + Equal(expectedEncryptExtracts, actual.EncryptExtracts); + Equal(expectedShowTabs, actual.ShowTabs); + Equal(expectedSize, actual.Size); + + Equal(expectedWebpageUrl, actual.WebpageUrl); + + var actualProject = ((IContainerContent)actual).Container; + NotNull(actualProject); + + Same(expectedProject, actualProject); + Equal(expectedProject.Location.Append(actual.Name), actual.Location); + + Same(expectedOwner, actual.Owner); + + expectedTags.AssertEqual(actual.Tags); + + extra?.Invoke(actual); + } + } +} diff --git a/tests/Tableau.Migration.Tests/MockExtensions.cs b/tests/Tableau.Migration.Tests/MockExtensions.cs index 2d7dc20..4dd6899 100644 --- a/tests/Tableau.Migration.Tests/MockExtensions.cs +++ b/tests/Tableau.Migration.Tests/MockExtensions.cs @@ -15,9 +15,12 @@ // using System; +using System.Collections.Generic; +using System.Linq; using System.Linq.Expressions; using Microsoft.Extensions.Logging; using Moq; +using Xunit; namespace Tableau.Migration.Tests { @@ -69,5 +72,36 @@ public static void VerifyLogging(this Mock mock, LogLevel logLevel, Times { mock.Verify(x => x.Log(logLevel, It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>()), times); } + + public static bool IsDisposed(this Mock mock) + where T : class + { + if (!mock.Object.IsDisposable()) + throw new ArgumentException($"{typeof(T).Name} does not implement {typeof(IDisposable).Name} or {typeof(IAsyncDisposable).Name}."); + + var methodNames = new HashSet(); + + if (mock.Object is IDisposable) + { + methodNames.Add(nameof(IDisposable.Dispose)); + } + + if (mock.Object is IAsyncDisposable) + { + methodNames.Add(nameof(IAsyncDisposable.DisposeAsync)); + } + + var methods = typeof(T).IsInterface + ? typeof(T).GetAllInterfaceMethods().Where(m => methodNames.Contains(m.Name)) + : typeof(T).GetMethods().Where(m => methodNames.Contains(m.Name)); + + methods = methods.ToList(); + + return mock.Invocations.Any(i => methodNames.Contains(i.Method.Name)); + } + + public static void AssertDisposed(this Mock mock, bool expected = true) + where T : class + => Assert.Equal(expected, mock.IsDisposed()); } } diff --git a/src/Tableau.Migration/Content/IResultWorkbook.cs b/tests/Tableau.Migration.Tests/ObjectExtensions.cs similarity index 73% rename from src/Tableau.Migration/Content/IResultWorkbook.cs rename to tests/Tableau.Migration.Tests/ObjectExtensions.cs index 2e3a80b..d97c61c 100644 --- a/src/Tableau.Migration/Content/IResultWorkbook.cs +++ b/tests/Tableau.Migration.Tests/ObjectExtensions.cs @@ -14,11 +14,12 @@ // limitations under the License. // -namespace Tableau.Migration.Content +using System; + +namespace Tableau.Migration.Tests { - /// - /// Intreface for the publish result of an . - /// - public interface IResultWorkbook : IWorkbook, IWithViews, IChildPermissionsContent - { } + public static class ObjectExtensions + { + public static bool IsDisposable(this object obj) => obj is IDisposable || obj is IAsyncDisposable; + } } diff --git a/tests/Tableau.Migration.Tests/Reflection/ObjectReflectionWrapper.cs b/tests/Tableau.Migration.Tests/Reflection/ObjectReflectionWrapper.cs new file mode 100644 index 0000000..dd34b26 --- /dev/null +++ b/tests/Tableau.Migration.Tests/Reflection/ObjectReflectionWrapper.cs @@ -0,0 +1,88 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using System.Collections.Concurrent; +using System.Diagnostics.CodeAnalysis; +using Moq; + +namespace Tableau.Migration.Tests.Reflection +{ + public class ObjectReflectionWrapper + where T : notnull + { + private static readonly ConcurrentDictionary> _cache = new(); + + private readonly TypeReflectionWrapper _typeWrapper = TypeReflectionWrapper.Instance; + + public readonly T Object; + + public ObjectReflectionWrapper(T @object) + { + Object = @object; + } + + public object? GetFieldValue(string fieldName) => _typeWrapper.GetFieldValue(Object, fieldName); + + public TValue GetFieldValue(string fieldName) => _typeWrapper.GetFieldValue(Object, fieldName); + + public Mock GetMockFieldValue(string fieldName) + where TValue : class + => _typeWrapper.GetMockFieldValue(Object, fieldName); + + public Mock GetMockFieldValue(Func, TValue> getFieldValue) + where TValue : class + => Mock.Get(getFieldValue(this)); + + public static ObjectReflectionWrapper InstanceFor(T @object) + => _cache.GetOrAdd(@object, _ => new ObjectReflectionWrapper(@object)); + } + + public abstract class ObjectReflectionWrapper : ObjectReflectionWrapper + where TWrapper : ObjectReflectionWrapper + where TType : notnull + { + private static readonly ConcurrentDictionary _cache = new(); + + public ObjectReflectionWrapper(TType @object) + : base(@object) + { } + + public Mock? GetMockFieldValue(Func getFieldValue, [DoesNotReturnIf(false)] bool canBeNull = false) + where TValue : class + { + var value = getFieldValue((TWrapper)this); + + if (value is null) + { + if (canBeNull) + return null; + else + throw new ArgumentException($"The value returned from {nameof(getFieldValue)} is null.", nameof(getFieldValue)); + } + + return Mock.Get(value); + } + + public TMock? GetMockFieldValue(Func getFieldValue, [DoesNotReturnIf(false)] bool canBeNull = false) + where TMock : Mock + where TValue : class + => (TMock?)GetMockFieldValue(getFieldValue, canBeNull); + + new public static TWrapper InstanceFor(TType @object) + => _cache.GetOrAdd(@object, _ => (TWrapper)Activator.CreateInstance(typeof(TWrapper), @object)!); + } +} diff --git a/tests/Tableau.Migration.Tests/Reflection/SeekableCryptoStreamWrapper.cs b/tests/Tableau.Migration.Tests/Reflection/SeekableCryptoStreamWrapper.cs new file mode 100644 index 0000000..e55d0b3 --- /dev/null +++ b/tests/Tableau.Migration.Tests/Reflection/SeekableCryptoStreamWrapper.cs @@ -0,0 +1,30 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System.IO; +using Tableau.Migration.Content.Files; + +namespace Tableau.Migration.Tests.Reflection +{ + public class SeekableCryptoStreamWrapper : ObjectReflectionWrapper + { + public Stream InnerStream => GetFieldValue("_innerStream"); + + public SeekableCryptoStreamWrapper(SeekableCryptoStream @object) + : base(@object) + { } + } +} diff --git a/tests/Tableau.Migration.Tests/Reflection/TypeReflectionWrapper.cs b/tests/Tableau.Migration.Tests/Reflection/TypeReflectionWrapper.cs new file mode 100644 index 0000000..cb7463d --- /dev/null +++ b/tests/Tableau.Migration.Tests/Reflection/TypeReflectionWrapper.cs @@ -0,0 +1,72 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using System.Collections.Immutable; +using System.Reflection; +using Moq; + +namespace Tableau.Migration.Tests.Reflection +{ + public class TypeReflectionWrapper + { + public static readonly TypeReflectionWrapper Instance = new(); + + private readonly Lazy> _fields; + + private readonly Func _fieldValueFactory; + + protected readonly Type Type; + + public TypeReflectionWrapper() + { + Type = typeof(T); + + _fields = new(GetFields); + + _fieldValueFactory = (@object, field) => field.GetValue(@object); + } + + private ImmutableSortedDictionary GetFields() + => Type.GetFields(BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic) + .ToImmutableSortedDictionary(f => f.Name, f => f); + + protected object? GetFieldValue(object @object, string fieldName) + { + if (_fields.Value.TryGetValue(fieldName, out var field)) + return _fieldValueFactory(@object, field); + + var fields = String.Join(Environment.NewLine, _fields.Value.Keys); + + throw new ArgumentException($"Field \"{fieldName}\" was not found on type {Type.Name}. Available fields:{Environment.NewLine}{fields}", nameof(fieldName)); + } + + public TValue GetFieldValue(object @object, string fieldName) => (TValue)GetFieldValue(@object, fieldName)!; + + public Mock GetMockFieldValue(object @object, string fieldName) + where TValue : class + { + var value = (TValue?)GetFieldValue(@object, fieldName) ?? + throw new ArgumentException($"The value for field \"{fieldName}\" on type {Type.Name} is null.", nameof(fieldName)); + + return Mock.Get(value); + } + + public static ObjectReflectionWrapper ForObject(TType @object) + where TType : notnull + => new(@object); + } +} diff --git a/tests/Tableau.Migration.Tests/ReflectionExtensions.cs b/tests/Tableau.Migration.Tests/ReflectionExtensions.cs index 688d3d6..7004792 100644 --- a/tests/Tableau.Migration.Tests/ReflectionExtensions.cs +++ b/tests/Tableau.Migration.Tests/ReflectionExtensions.cs @@ -45,6 +45,29 @@ public static class ReflectionExtensions public static TValue? GetFieldValue(this Type type, string fieldName) => (TValue?)type.GetFieldValue(fieldName); + public static object? GetPropertyValue(this Type type, string fieldName, object obj) + { + return type.GetProperty(fieldName, BindingFlags.NonPublic | BindingFlags.Instance)!.GetValue(obj); + } + + public static object? GetPropertyValue(this object obj, string fieldName) + { + return GetPropertyValue(obj.GetType(), fieldName, obj); + } + + public static object? GetPropertyValue(this object obj, Type type, string fieldName) + { + return GetPropertyValue(type, fieldName, obj); + } + + public static object? GetPropertyValue(this Type type, string fieldName) + { + return type.GetProperty(fieldName, BindingFlags.NonPublic | BindingFlags.Static)!.GetValue(null); + } + + public static TValue? GetPropertyValue(this Type type, string fieldName) + => (TValue?)type.GetPropertyValue(fieldName); + public static bool HasGenericTypeDefinition(this Type type, Type genericTypeDefinition) { if (!genericTypeDefinition.IsGenericTypeDefinition) diff --git a/tests/Tableau.Migration.Tests/SimulatedConnectionComparer.cs b/tests/Tableau.Migration.Tests/SimulatedConnectionComparer.cs index 2cab06a..981dbc5 100644 --- a/tests/Tableau.Migration.Tests/SimulatedConnectionComparer.cs +++ b/tests/Tableau.Migration.Tests/SimulatedConnectionComparer.cs @@ -23,7 +23,7 @@ internal class SimulatedConnectionComparer : ComparerBase { public static SimulatedConnectionComparer Instance = new(); - public override int CompareItems(SimulatedConnection x, SimulatedConnection y) + protected override int CompareItems(SimulatedConnection x, SimulatedConnection y) { Guard.AgainstNull(x.Credentials, nameof(x.Credentials)); Guard.AgainstNull(y.Credentials, nameof(y.Credentials)); diff --git a/tests/Tableau.Migration.Tests/Simulation/ServerToCloudSimulationTestBase.cs b/tests/Tableau.Migration.Tests/Simulation/ServerToCloudSimulationTestBase.cs index c33ac0b..7efabb7 100644 --- a/tests/Tableau.Migration.Tests/Simulation/ServerToCloudSimulationTestBase.cs +++ b/tests/Tableau.Migration.Tests/Simulation/ServerToCloudSimulationTestBase.cs @@ -20,6 +20,8 @@ using System.Linq; using System.Text; using AutoFixture; +using Microsoft.Extensions.DependencyInjection; +using Moq; using Tableau.Migration.Api; using Tableau.Migration.Api.Rest.Models; using Tableau.Migration.Api.Rest.Models.Responses; @@ -64,7 +66,7 @@ UsersResponse.UserType CreateDefaultUser() .Create(); // Wrong - Work item in in backlog - defaultUser.Name = $"{defaultUser.Domain!.Name}\\{defaultUser.Name}"; ; + defaultUser.Name = $"{defaultUser.Domain!.Name}\\{defaultUser.Name}"; return defaultUser; } @@ -78,6 +80,23 @@ UsersResponse.UserType CreateDefaultUser() CloudDestinationEndpointConfig = new(CloudDestinationSiteConfig); } + protected virtual bool UsersBatchImportEnabled { get; } = true; + + protected override IServiceCollection ConfigureServices(IServiceCollection services) + { + var mockedConfigReader = Freeze>(); + mockedConfigReader.Setup(x => x.Get()) + .Returns(new ContentTypesOptions + { + BatchPublishingEnabled = UsersBatchImportEnabled + }); + mockedConfigReader.Setup(x => x.Get()) + .Returns(new MigrationSdkOptions()); + + return services.AddTableauMigrationSdk() + .AddSingleton(mockedConfigReader.Object); + } + #region - Asserts - protected static IContentReference? MapReference(IMigrationManifest manifest, Guid sourceId) @@ -127,20 +146,6 @@ protected static void AssertPermissionsMigrated(IMigrationManifest manifest, Per Assert.Equal(mappedGranteeCapabilities.ToIGranteeCapabilities(), destinationGranteeCapabilities.ToIGranteeCapabilities(), comparer); } - internal static void AssertTags( - ITagType[]? sourceTags, - ITagType[]? destinationTags) - { - if (sourceTags is null) - { - Assert.NotNull(destinationTags); - Assert.Empty(destinationTags); - return; - } - - Assert.Equal(sourceTags, destinationTags, ITagTypeComparer.Instance); - } - #endregion #region - Prepare Source Data (Users) - @@ -148,7 +153,7 @@ internal static void AssertTags( protected (List NonSupportUsers, List SupportUsers) PrepareSourceUsersData(int? count = null) { var allSiteRoles = SiteRoles.GetAll(); - var numSourceUsers = count ?? (int)Math.Ceiling(MigrationSdkOptions.Defaults.BATCH_SIZE * 2.5); + var numSourceUsers = count ?? (int)Math.Ceiling(ContentTypesOptions.Defaults.BATCH_SIZE * 2.5); var nonSupportUsers = new List(); var supportUsers = new List(); @@ -193,7 +198,7 @@ internal static void AssertTags( { var groups = new List(); var allSiteRoles = SiteRoles.GetAll(); - var numSourceGroups = count ?? (int)Math.Ceiling(MigrationSdkOptions.Defaults.BATCH_SIZE * 1.5); + var numSourceGroups = count ?? (int)Math.Ceiling(ContentTypesOptions.Defaults.BATCH_SIZE * 1.5); for (int i = 0; i < numSourceGroups; i++) { @@ -346,7 +351,7 @@ internal static void AssertTags( // Our data source data will just be a guid as a string, encoded to a byte array - byte[] dataSourceData = Encoding.Default.GetBytes($"{Guid.NewGuid()}"); + byte[] dataSourceData = Constants.DefaultEncoding.GetBytes($"{Guid.NewGuid()}"); SourceApi.Data.AddDataSource(dataSource, dataSourceData); dataSources.Add(dataSource); counter++; @@ -485,7 +490,7 @@ void CreateConnectionsForWorkbook(SimulatedWorkbookData workbookData, int connec CreateConnectionsForWorkbook(workbookFileData); - SourceApi.Data.AddWorkbook(workbook, Encoding.Default.GetBytes(workbookFileData.ToXml())); + SourceApi.Data.AddWorkbook(workbook, Constants.DefaultEncoding.GetBytes(workbookFileData.ToXml())); workbooks.Add(workbook); } diff --git a/tests/Tableau.Migration.Tests/Simulation/TableauDataExtensions.cs b/tests/Tableau.Migration.Tests/Simulation/TableauDataExtensions.cs index 7bd6841..1e98534 100644 --- a/tests/Tableau.Migration.Tests/Simulation/TableauDataExtensions.cs +++ b/tests/Tableau.Migration.Tests/Simulation/TableauDataExtensions.cs @@ -52,7 +52,7 @@ public static DataSourceResponse.DataSourceType CreateDataSource( }) .Create(); - fileData ??= Encoding.Default.GetBytes(new SimulatedDataSourceData().ToXml()); + fileData ??= Constants.DefaultEncoding.GetBytes(new SimulatedDataSourceData().ToXml()); data.AddDataSource(dataSource, fileData); @@ -69,7 +69,7 @@ public static DataSourceResponse.DataSourceType CreateDataSource( autoFixture, project, user, - Encoding.Default.GetBytes(simulatedData.ToXml())); + Constants.DefaultEncoding.GetBytes(simulatedData.ToXml())); public static DataSourceResponse.DataSourceType CreateDataSource(this TableauData data, IFixture autoFixture) { @@ -91,7 +91,7 @@ public static WorkbookResponse.WorkbookType CreateWorkbook( }) .Create(); - fileData ??= Encoding.Default.GetBytes(new SimulatedWorkbookData().ToXml()); + fileData ??= Constants.DefaultEncoding.GetBytes(new SimulatedWorkbookData().ToXml()); data.AddWorkbook(workbook, fileData); diff --git a/tests/Tableau.Migration.Tests/Simulation/Tests/Api/DataSourcesApiClientTests.cs b/tests/Tableau.Migration.Tests/Simulation/Tests/Api/DataSourcesApiClientTests.cs index 94cf379..24acc3a 100644 --- a/tests/Tableau.Migration.Tests/Simulation/Tests/Api/DataSourcesApiClientTests.cs +++ b/tests/Tableau.Migration.Tests/Simulation/Tests/Api/DataSourcesApiClientTests.cs @@ -105,7 +105,7 @@ public async Task Returns_success_on_success() var options = new PublishDataSourceOptions( mockPublishable.Object, - new MemoryStream(Encoding.UTF8.GetBytes(dataSourceResponse.ToXml())), + new MemoryStream(Constants.DefaultEncoding.GetBytes(dataSourceResponse.ToXml())), DataSourceFileTypes.Tdsx); var result = await sitesClient.DataSources.PublishDataSourceAsync(options, Cancel); @@ -154,12 +154,9 @@ public async Task Returns_success_on_success() await using var sitesClient = await GetSitesClientAsync(Cancel); var dataSource = Api.Data.CreateDataSource(AutoFixture); - var connections = CreateMany().ToImmutableArray(); var result = await sitesClient.DataSources.GetDataSourceAsync( dataSource.Id, - connections, - Create(), Cancel); Assert.Empty(result.Errors); @@ -325,7 +322,7 @@ public async Task Set_project_returns_success() Name = project.Name }; - Api.Data.AddDataSource(dataSource, fileData: Encoding.UTF8.GetBytes(dataSource.ToXml())); + Api.Data.AddDataSource(dataSource, fileData: Constants.DefaultEncoding.GetBytes(dataSource.ToXml())); // Act var result = await sitesClient.DataSources.UpdateDataSourceAsync( @@ -362,7 +359,7 @@ public async Task Set_owner_returns_success() Id = owner.Id }; - Api.Data.AddDataSource(dataSource, fileData: Encoding.UTF8.GetBytes(dataSource.ToXml())); + Api.Data.AddDataSource(dataSource, fileData: Constants.DefaultEncoding.GetBytes(dataSource.ToXml())); // Act var result = await sitesClient.DataSources.UpdateDataSourceAsync( @@ -407,12 +404,8 @@ public async Task Returns_success_on_success() Assert.Empty(result.Errors); Assert.True(result.Success); - var connections = CreateMany().ToImmutableArray(); - var getResult = await sitesClient.DataSources.GetDataSourceAsync( dataSource.Id, - connections, - Create(), Cancel); Assert.Empty(getResult.Errors); @@ -430,10 +423,10 @@ public async Task No_Duplicates_Inserted() var owner = Api.Data.CreateUser(AutoFixture); var dataSource = Api.Data.CreateDataSource(AutoFixture); - var tagsCountBefore = dataSource.Tags is null ? 0 : dataSource.Tags.Length; + var tagsCountBefore = dataSource.Tags.Length; Api.Data.AddDataSource(dataSource, null); - var testTags = dataSource.Tags?.Select(tag => new Tag(tag)).ToArray(); + var testTags = dataSource.Tags.Select(tag => new Tag(tag)).ToArray(); Assert.NotNull(testTags); @@ -442,12 +435,8 @@ public async Task No_Duplicates_Inserted() Assert.Empty(result.Errors); Assert.True(result.Success); - var connections = CreateMany().ToImmutableArray(); - var getResult = await sitesClient.DataSources.GetDataSourceAsync( dataSource.Id, - connections, - Create(), Cancel); Assert.Empty(getResult.Errors); @@ -470,9 +459,9 @@ public async Task Returns_success_on_success() var owner = Api.Data.CreateUser(AutoFixture); var dataSource = Api.Data.CreateDataSource(AutoFixture, project, owner); - var tagsCountBefore = dataSource.Tags is null ? 0 : dataSource.Tags.Length; + var tagsCountBefore = dataSource.Tags.Length; - var tagsToRemove = dataSource.Tags?.Select(t => new Tag(t)).ToArray(); + var tagsToRemove = dataSource.Tags.Select(t => new Tag(t)).ToArray(); Assert.NotNull(tagsToRemove); var result = await sitesClient.DataSources.Tags.RemoveTagsAsync(dataSource.Id, tagsToRemove, Cancel); @@ -480,12 +469,8 @@ public async Task Returns_success_on_success() Assert.Empty(result.Errors); Assert.True(result.Success); - var connections = CreateMany().ToImmutableArray(); - var getResult = await sitesClient.DataSources.GetDataSourceAsync( dataSource.Id, - connections, - Create(), Cancel); Assert.Empty(getResult.Errors); @@ -531,7 +516,7 @@ public async Task Returns_success_on_success() var user = Create(); var fileData = Create(); - var workbook = Api.Data.CreateDataSource(AutoFixture, project, user, Encoding.Default.GetBytes(fileData.ToXml())); + var workbook = Api.Data.CreateDataSource(AutoFixture, project, user, Constants.DefaultEncoding.GetBytes(fileData.ToXml())); var result = await sitesClient.DataSources.GetConnectionsAsync(workbook.Id, Cancel); diff --git a/tests/Tableau.Migration.Tests/Simulation/Tests/Api/UsersApiClientTests.cs b/tests/Tableau.Migration.Tests/Simulation/Tests/Api/UsersApiClientTests.cs index 0b194a1..94e2b95 100644 --- a/tests/Tableau.Migration.Tests/Simulation/Tests/Api/UsersApiClientTests.cs +++ b/tests/Tableau.Migration.Tests/Simulation/Tests/Api/UsersApiClientTests.cs @@ -33,7 +33,7 @@ internal static void AddUserAssert(UsersResponse.UserType user, IResult().ToImmutableArray(); foreach (var view in workbook.Views) { @@ -103,7 +102,7 @@ public async Task Returns_success_on_success() Api.Data.CreateViewPermissions(AutoFixture, view, view.Id, view.Name); } - var result = await sitesClient.Workbooks.GetWorkbookAsync(workbook.Id, connections, Create(), Cancel); + var result = await sitesClient.Workbooks.GetWorkbookAsync(workbook.Id, Cancel); Assert.Empty(result.Errors); Assert.True(result.Success); @@ -124,7 +123,7 @@ public async Task Returns_success_on_success() var workbook = Api.Data.CreateWorkbook(AutoFixture); - Api.Data.AddWorkbook(workbook, fileData: Encoding.UTF8.GetBytes(workbook.ToXml())); + Api.Data.AddWorkbook(workbook, fileData: Constants.DefaultEncoding.GetBytes(workbook.ToXml())); var result = await sitesClient.Workbooks.DownloadWorkbookAsync(workbook.Id, true, Cancel); @@ -154,7 +153,7 @@ public async Task Returns_success_on_success() var options = new PublishWorkbookOptions( mockPublishable.Object, - new MemoryStream(Encoding.Default.GetBytes(new SimulatedWorkbookData().ToXml())), + new MemoryStream(Constants.DefaultEncoding.GetBytes(new SimulatedWorkbookData().ToXml())), WorkbookFileTypes.Twbx); var result = await sitesClient.Workbooks.PublishWorkbookAsync(options, Cancel); @@ -224,7 +223,7 @@ public async Task No_changes_returns_success() Name = project.Name }; - Api.Data.AddWorkbook(workbookResponse.Item, fileData: Encoding.UTF8.GetBytes(workbookResponse.Item.ToXml())); + Api.Data.AddWorkbook(workbookResponse.Item, fileData: Constants.DefaultEncoding.GetBytes(workbookResponse.Item.ToXml())); // Act var result = await sitesClient.Workbooks.UpdateWorkbookAsync( @@ -261,7 +260,7 @@ public async Task Set_name_returns_success() Name = project.Name }; - Api.Data.AddWorkbook(workbookResponse.Item, fileData: Encoding.UTF8.GetBytes(workbookResponse.Item.ToXml())); + Api.Data.AddWorkbook(workbookResponse.Item, fileData: Constants.DefaultEncoding.GetBytes(workbookResponse.Item.ToXml())); // Act var result = await sitesClient.Workbooks.UpdateWorkbookAsync( @@ -299,7 +298,7 @@ public async Task Set_show_tabs_returns_success() Name = project.Name }; - Api.Data.AddWorkbook(workbookResponse.Item, fileData: Encoding.UTF8.GetBytes(workbookResponse.Item.ToXml())); + Api.Data.AddWorkbook(workbookResponse.Item, fileData: Constants.DefaultEncoding.GetBytes(workbookResponse.Item.ToXml())); // Act var result = await sitesClient.Workbooks.UpdateWorkbookAsync( @@ -337,7 +336,7 @@ public async Task Clear_show_tabs_returns_success() Name = project.Name }; - Api.Data.AddWorkbook(workbookResponse.Item, fileData: Encoding.UTF8.GetBytes(workbookResponse.Item.ToXml())); + Api.Data.AddWorkbook(workbookResponse.Item, fileData: Constants.DefaultEncoding.GetBytes(workbookResponse.Item.ToXml())); // Act var result = await sitesClient.Workbooks.UpdateWorkbookAsync( @@ -375,7 +374,7 @@ public async Task Set_excrypt_extracts_returns_success() Name = project.Name }; - Api.Data.AddWorkbook(workbookResponse.Item, fileData: Encoding.UTF8.GetBytes(workbookResponse.Item.ToXml())); + Api.Data.AddWorkbook(workbookResponse.Item, fileData: Constants.DefaultEncoding.GetBytes(workbookResponse.Item.ToXml())); // Act var result = await sitesClient.Workbooks.UpdateWorkbookAsync( @@ -416,7 +415,7 @@ public async Task Set_project_returns_success() Name = project.Name }; - Api.Data.AddWorkbook(workbookResponse.Item, fileData: Encoding.UTF8.GetBytes(workbookResponse.Item.ToXml())); + Api.Data.AddWorkbook(workbookResponse.Item, fileData: Constants.DefaultEncoding.GetBytes(workbookResponse.Item.ToXml())); // Act var result = await sitesClient.Workbooks.UpdateWorkbookAsync( @@ -467,7 +466,7 @@ public async Task Set_owner_returns_success() Id = owner.Id }; - Api.Data.AddWorkbook(workbookResponse.Item, fileData: Encoding.UTF8.GetBytes(workbookResponse.Item.ToXml())); + Api.Data.AddWorkbook(workbookResponse.Item, fileData: Constants.DefaultEncoding.GetBytes(workbookResponse.Item.ToXml())); // Act var result = await sitesClient.Workbooks.UpdateWorkbookAsync( @@ -521,7 +520,7 @@ public async Task Returns_success_on_success() Id = owner.Id }; - var tagsCountBefore = workbook.Tags is null ? 0 : workbook.Tags.Length; + var tagsCountBefore = workbook.Tags.Length; Api.Data.AddWorkbook(workbook, null); var testTags = CreateMany().ToArray(); @@ -531,9 +530,7 @@ public async Task Returns_success_on_success() Assert.Empty(result.Errors); Assert.True(result.Success); - var connections = CreateMany().ToImmutableArray(); - - var getResult = await sitesClient.Workbooks.GetWorkbookAsync(workbook.Id, connections, Create(), Cancel); + var getResult = await sitesClient.Workbooks.GetWorkbookAsync(workbook.Id, Cancel); Assert.Empty(getResult.Errors); Assert.True(getResult.Success); Assert.NotNull(getResult.Value); @@ -569,10 +566,10 @@ public async Task No_Duplicates_Inserted() Id = owner.Id }; - var tagsCountBefore = workbook.Tags is null ? 0 : workbook.Tags.Length; + var tagsCountBefore = workbook.Tags.Length; Api.Data.AddWorkbook(workbook, null); - var testTags = workbook.Tags?.Select(tag => new Tag(tag)).ToArray(); + var testTags = workbook.Tags.Select(tag => new Tag(tag)).ToArray(); Assert.NotNull(testTags); @@ -581,9 +578,7 @@ public async Task No_Duplicates_Inserted() Assert.Empty(result.Errors); Assert.True(result.Success); - var connections = CreateMany().ToImmutableArray(); - - var getResult = await sitesClient.Workbooks.GetWorkbookAsync(workbook.Id, connections, Create(), Cancel); + var getResult = await sitesClient.Workbooks.GetWorkbookAsync(workbook.Id, Cancel); Assert.Empty(getResult.Errors); Assert.True(getResult.Success); Assert.NotNull(getResult.Value); @@ -626,10 +621,10 @@ public async Task Returns_success_on_success() Id = owner.Id }; - var tagsCountBefore = workbook.Tags is null ? 0 : workbook.Tags.Length; + var tagsCountBefore = workbook.Tags.Length; Api.Data.AddWorkbook(workbook, null); - var tagsToRemove = workbook.Tags?.Select(t => new Tag(t)).ToArray(); + var tagsToRemove = workbook.Tags.Select(t => new Tag(t)).ToArray(); Assert.NotNull(tagsToRemove); var result = await sitesClient.Workbooks.Tags.RemoveTagsAsync(workbook.Id, tagsToRemove, Cancel); @@ -637,9 +632,7 @@ public async Task Returns_success_on_success() Assert.Empty(result.Errors); Assert.True(result.Success); - var connections = CreateMany().ToImmutableArray(); - - var getResult = await sitesClient.Workbooks.GetWorkbookAsync(workbook.Id, connections, Create(), Cancel); + var getResult = await sitesClient.Workbooks.GetWorkbookAsync(workbook.Id, Cancel); Assert.Empty(getResult.Errors); Assert.True(getResult.Success); Assert.NotNull(getResult.Value); @@ -661,7 +654,7 @@ public async Task Returns_success_on_success() var user = Create(); var fileData = Create(); - var workbook = Api.Data.CreateWorkbook(AutoFixture, project, user, Encoding.Default.GetBytes(fileData.ToXml())); + var workbook = Api.Data.CreateWorkbook(AutoFixture, project, user, Constants.DefaultEncoding.GetBytes(fileData.ToXml())); var result = await sitesClient.Workbooks.GetConnectionsAsync(workbook.Id, Cancel); @@ -683,7 +676,7 @@ public async Task Returns_success_on_success() var connectionToUpdate = fileData.Connections.First(); Assert.NotNull(connectionToUpdate); - var workbook = Api.Data.CreateWorkbook(AutoFixture, project, user, Encoding.Default.GetBytes(fileData.ToXml())); + var workbook = Api.Data.CreateWorkbook(AutoFixture, project, user, Constants.DefaultEncoding.GetBytes(fileData.ToXml())); var updateOptions = Create(); diff --git a/tests/Tableau.Migration.Tests/Simulation/Tests/DataSourceMigrationTests.cs b/tests/Tableau.Migration.Tests/Simulation/Tests/DataSourceMigrationTests.cs index 3351f2f..1b659e4 100644 --- a/tests/Tableau.Migration.Tests/Simulation/Tests/DataSourceMigrationTests.cs +++ b/tests/Tableau.Migration.Tests/Simulation/Tests/DataSourceMigrationTests.cs @@ -28,13 +28,17 @@ namespace Tableau.Migration.Tests.Simulation.Tests { public class DataSourceMigrationTests { - public class ServerToCloud : ServerToCloudSimulationTestBase + public class UsersBatch : ServerToCloud { - protected override IServiceCollection ConfigureServices(IServiceCollection services) - { - return services.AddTableauMigrationSdk(); - } + } + public class UsersIndividual : ServerToCloud + { + protected override bool UsersBatchImportEnabled => false; + } + + public abstract class ServerToCloud : ServerToCloudSimulationTestBase + { [Fact] public async Task MigratesAllDataSourcesToCloudAsync() { @@ -86,7 +90,7 @@ void AssertDataSourceMigrated(DataSourceResponse.DataSourceType sourceDataSource Assert.NotEqual(destinationDataSource.Owner.Id, Guid.Empty); Assert.NotEqual(destinationDataSource.Owner.Id, sourceDataSource.Owner?.Id); - AssertTags(sourceDataSource.Tags, destinationDataSource.Tags); + sourceDataSource.Tags.AssertEqual(destinationDataSource.Tags); } } diff --git a/tests/Tableau.Migration.Tests/Simulation/Tests/GroupMigrationTests.cs b/tests/Tableau.Migration.Tests/Simulation/Tests/GroupMigrationTests.cs index eb45e86..a7ff875 100644 --- a/tests/Tableau.Migration.Tests/Simulation/Tests/GroupMigrationTests.cs +++ b/tests/Tableau.Migration.Tests/Simulation/Tests/GroupMigrationTests.cs @@ -26,13 +26,17 @@ namespace Tableau.Migration.Tests.Simulation.Tests { public class GroupMigrationTests { - public class ServerToCloud : ServerToCloudSimulationTestBase + public class UsersBatch : ServerToCloud { - protected override IServiceCollection ConfigureServices(IServiceCollection services) - { - return services.AddTableauMigrationSdk(); - } + } + public class UsersIndividual : ServerToCloud + { + protected override bool UsersBatchImportEnabled => false; + } + + public abstract class ServerToCloud : ServerToCloudSimulationTestBase + { [Fact] public async Task MigratesAllGroupsToCloudAsync() { diff --git a/tests/Tableau.Migration.Tests/Simulation/Tests/IncrementalMigrationTests.cs b/tests/Tableau.Migration.Tests/Simulation/Tests/IncrementalMigrationTests.cs index f083133..2ecdcac 100644 --- a/tests/Tableau.Migration.Tests/Simulation/Tests/IncrementalMigrationTests.cs +++ b/tests/Tableau.Migration.Tests/Simulation/Tests/IncrementalMigrationTests.cs @@ -24,147 +24,155 @@ namespace Tableau.Migration.Tests.Simulation.Tests { - public class IncrementalMigrationTests : ServerToCloudSimulationTestBase + public class IncrementalMigrationTests { - protected override IServiceCollection ConfigureServices(IServiceCollection services) + public class UsersBatch : ServerToCloud { - return services.AddTableauMigrationSdk(); } - private IMigrationPlanBuilder ConfigurePlanBuilder() + public class UsersIndividual : ServerToCloud { - return ServiceProvider.GetRequiredService() - .FromSource(SourceEndpointConfig) - .ToDestination(CloudDestinationEndpointConfig) - .ForServerToCloud() - .WithTableauIdAuthenticationType() - .WithTableauCloudUsernames("test.com"); + protected override bool UsersBatchImportEnabled => false; } - [Fact] - public async Task IncrementalFilterAsync() + + public abstract class ServerToCloud : ServerToCloudSimulationTestBase { - //Scenario: We are migrating workbooks but have mistakenly - //included a filter that prevents some of the workbooks from migrating. - //We want to re-run the migration an only migrate the workbooks we missed the first run. + private IMigrationPlanBuilder ConfigurePlanBuilder() + { + return ServiceProvider.GetRequiredService() + .FromSource(SourceEndpointConfig) + .ToDestination(CloudDestinationEndpointConfig) + .ForServerToCloud() + .WithTableauIdAuthenticationType() + .WithTableauCloudUsernames("test.com"); + } + + [Fact] + public async Task IncrementalFilterAsync() + { + //Scenario: We are migrating workbooks but have mistakenly + //included a filter that prevents some of the workbooks from migrating. + //We want to re-run the migration an only migrate the workbooks we missed the first run. - //Arrange - create source content to migrate. - var sourceProjects = PrepareSourceProjectsData(); - var sourceWorkbooks = PrepareSourceWorkbooksData(); + //Arrange - create source content to migrate. + var sourceProjects = PrepareSourceProjectsData(); + var sourceWorkbooks = PrepareSourceWorkbooksData(); - var filteredWorkbooksIds = sourceWorkbooks - .Where((w, i) => i % 2 == 1) - .Select(w => w.Id) - .ToImmutableHashSet(); + var filteredWorkbooksIds = sourceWorkbooks + .Where((w, i) => i % 2 == 1) + .Select(w => w.Id) + .ToImmutableHashSet(); - var migrator = ServiceProvider.GetRequiredService(); + var migrator = ServiceProvider.GetRequiredService(); - //First migration, filters out some workbooks. - var planBuilder = ConfigurePlanBuilder(); + //First migration, filters out some workbooks. + var planBuilder = ConfigurePlanBuilder(); - planBuilder.Filters.Add(items => items.Where(i => !filteredWorkbooksIds.Contains(i.SourceItem.Id))); + planBuilder.Filters.Add(items => items.Where(i => !filteredWorkbooksIds.Contains(i.SourceItem.Id))); - var plan = planBuilder.Build(); - var result1 = await migrator.ExecuteAsync(plan, Cancel); + var plan = planBuilder.Build(); + var result1 = await migrator.ExecuteAsync(plan, Cancel); - //Sanity test our 'mistake' filter worked. - Assert.Equal(sourceWorkbooks.Count - filteredWorkbooksIds.Count, CloudDestinationApi.Data.Workbooks.Count); + //Sanity test our 'mistake' filter worked. + Assert.Equal(sourceWorkbooks.Count - filteredWorkbooksIds.Count, CloudDestinationApi.Data.Workbooks.Count); - Assert.All(result1.Manifest.Entries.ForContentType(), we => - { - if (filteredWorkbooksIds.Contains(we.Source.Id)) + Assert.All(result1.Manifest.Entries.ForContentType(), we => { - Assert.Equal(MigrationManifestEntryStatus.Skipped, we.Status); - Assert.False(we.HasMigrated); - } - else + if (filteredWorkbooksIds.Contains(we.Source.Id)) + { + Assert.Equal(MigrationManifestEntryStatus.Skipped, we.Status); + Assert.False(we.HasMigrated); + } + else + { + Assert.Equal(MigrationManifestEntryStatus.Migrated, we.Status); + Assert.True(we.HasMigrated); + } + }); + + //Perform second (incremental) migration + planBuilder = ConfigurePlanBuilder(); + plan = planBuilder.Build(); + + var result2 = await migrator.ExecuteAsync(plan, result1.Manifest, Cancel); + + //Assert everything moved eventually, but only the workbooks that were filtered out + //last run were migrated this run. + Assert.Equal(sourceWorkbooks.Count, CloudDestinationApi.Data.Workbooks.Count); + + Assert.All(result2.Manifest.Entries.ForContentType(), we => { - Assert.Equal(MigrationManifestEntryStatus.Migrated, we.Status); - Assert.True(we.HasMigrated); - } - }); - - //Perform second (incremental) migration - planBuilder = ConfigurePlanBuilder(); - plan = planBuilder.Build(); - - var result2 = await migrator.ExecuteAsync(plan, result1.Manifest, Cancel); - - //Assert everything moved eventually, but only the workbooks that were filtered out - //last run were migrated this run. - Assert.Equal(sourceWorkbooks.Count, CloudDestinationApi.Data.Workbooks.Count); - - Assert.All(result2.Manifest.Entries.ForContentType(), we => + if (filteredWorkbooksIds.Contains(we.Source.Id)) + { + Assert.Equal(MigrationManifestEntryStatus.Migrated, we.Status); + Assert.True(we.HasMigrated); + } + else + { + Assert.Equal(MigrationManifestEntryStatus.Skipped, we.Status); + Assert.True(we.HasMigrated); + } + }); + } + + [Fact] + public async Task ContentAfterStructureAsync() { - if (filteredWorkbooksIds.Contains(we.Source.Id)) - { - Assert.Equal(MigrationManifestEntryStatus.Migrated, we.Status); - Assert.True(we.HasMigrated); - } - else - { - Assert.Equal(MigrationManifestEntryStatus.Skipped, we.Status); - Assert.True(we.HasMigrated); - } - }); - } + //Scenario: A user migrates the users/groups/project structure in a first run. + //then migrates data source/workbook content in a second run. - [Fact] - public async Task ContentAfterStructureAsync() - { - //Scenario: A user migrates the users/groups/project structure in a first run. - //then migrates data source/workbook content in a second run. + //Arrange - create source content to migrate. + (var sourceUsers, var sourceSupportUsers) = PrepareSourceUsersData(); + var sourceGroups = PrepareSourceGroupsData(); + var sourceProjects = PrepareSourceProjectsData(); - //Arrange - create source content to migrate. - (var sourceUsers, var sourceSupportUsers) = PrepareSourceUsersData(); - var sourceGroups = PrepareSourceGroupsData(); - var sourceProjects = PrepareSourceProjectsData(); + var migrator = ServiceProvider.GetRequiredService(); - var migrator = ServiceProvider.GetRequiredService(); + //First migration, filters out some workbooks. + var planBuilder = ConfigurePlanBuilder(); - //First migration, filters out some workbooks. - var planBuilder = ConfigurePlanBuilder(); + var plan = planBuilder.Build(); + var result1 = await migrator.ExecuteAsync(plan, Cancel); - var plan = planBuilder.Build(); - var result1 = await migrator.ExecuteAsync(plan, Cancel); - - //Sanity test our first migration worked. - Assert.All(result1.Manifest.Entries.ForContentType(), e => - { - //Don't expect support users to migrate. - if (sourceUsers.Any(u => u.Id == e.Source.Id)) + //Sanity test our first migration worked. + Assert.All(result1.Manifest.Entries.ForContentType(), e => { - Assert.Equal(MigrationManifestEntryStatus.Migrated, e.Status); - Assert.True(e.HasMigrated); - } - }); - Assert.All(result1.Manifest.Entries.ForContentType(), e => - { - //Don't expect the all users group to migrate. - if (sourceGroups.Any(u => u.Id == e.Source.Id)) + //Don't expect support users to migrate. + if (sourceUsers.Any(u => u.Id == e.Source.Id)) + { + Assert.Equal(MigrationManifestEntryStatus.Migrated, e.Status); + Assert.True(e.HasMigrated); + } + }); + Assert.All(result1.Manifest.Entries.ForContentType(), e => { - Assert.Equal(MigrationManifestEntryStatus.Migrated, e.Status); - Assert.True(e.HasMigrated); - } - }); - Assert.All(result1.Manifest.Entries.ForContentType(), e => Assert.Equal(MigrationManifestEntryStatus.Migrated, e.Status)); - - //Perform second (incremental) migration - var sourceDataSources = PrepareSourceDataSourceData(); - var sourceWorkbooks = PrepareSourceWorkbooksData(); - - planBuilder = ConfigurePlanBuilder(); - plan = planBuilder.Build(); - - var result2 = await migrator.ExecuteAsync(plan, result1.Manifest, Cancel); - - //Assert everything moved eventually, but only the workbooks that were filtered out - //last run were migrated this run. - Assert.All(result2.Manifest.Entries.ForContentType(), e => Assert.Equal(MigrationManifestEntryStatus.Skipped, e.Status)); - Assert.All(result2.Manifest.Entries.ForContentType(), e => Assert.Equal(MigrationManifestEntryStatus.Skipped, e.Status)); - Assert.All(result2.Manifest.Entries.ForContentType(), e => Assert.Equal(MigrationManifestEntryStatus.Skipped, e.Status)); - Assert.All(result2.Manifest.Entries.ForContentType(), e => Assert.Equal(MigrationManifestEntryStatus.Migrated, e.Status)); - Assert.All(result2.Manifest.Entries.ForContentType(), e => Assert.Equal(MigrationManifestEntryStatus.Migrated, e.Status)); + //Don't expect the all users group to migrate. + if (sourceGroups.Any(u => u.Id == e.Source.Id)) + { + Assert.Equal(MigrationManifestEntryStatus.Migrated, e.Status); + Assert.True(e.HasMigrated); + } + }); + Assert.All(result1.Manifest.Entries.ForContentType(), e => Assert.Equal(MigrationManifestEntryStatus.Migrated, e.Status)); + + //Perform second (incremental) migration + var sourceDataSources = PrepareSourceDataSourceData(); + var sourceWorkbooks = PrepareSourceWorkbooksData(); + + planBuilder = ConfigurePlanBuilder(); + plan = planBuilder.Build(); + + var result2 = await migrator.ExecuteAsync(plan, result1.Manifest, Cancel); + + //Assert everything moved eventually, but only the workbooks that were filtered out + //last run were migrated this run. + Assert.All(result2.Manifest.Entries.ForContentType(), e => Assert.Equal(MigrationManifestEntryStatus.Skipped, e.Status)); + Assert.All(result2.Manifest.Entries.ForContentType(), e => Assert.Equal(MigrationManifestEntryStatus.Skipped, e.Status)); + Assert.All(result2.Manifest.Entries.ForContentType(), e => Assert.Equal(MigrationManifestEntryStatus.Skipped, e.Status)); + Assert.All(result2.Manifest.Entries.ForContentType(), e => Assert.Equal(MigrationManifestEntryStatus.Migrated, e.Status)); + Assert.All(result2.Manifest.Entries.ForContentType(), e => Assert.Equal(MigrationManifestEntryStatus.Migrated, e.Status)); + } } } } diff --git a/tests/Tableau.Migration.Tests/Simulation/Tests/MigratorTests.cs b/tests/Tableau.Migration.Tests/Simulation/Tests/MigratorTests.cs index 2984f71..7313428 100644 --- a/tests/Tableau.Migration.Tests/Simulation/Tests/MigratorTests.cs +++ b/tests/Tableau.Migration.Tests/Simulation/Tests/MigratorTests.cs @@ -120,11 +120,5 @@ public async Task ThrowCancelAsync() Assert.Empty(result.Manifest.Errors); Assert.Equal(MigrationCompletionStatus.Canceled, result.Status); } - - protected override IServiceCollection ConfigureServices(IServiceCollection services) - { - return base.ConfigureServices(services) - .AddTableauMigrationSdk(); - } } } diff --git a/tests/Tableau.Migration.Tests/Simulation/Tests/ProjectMigrationTests.cs b/tests/Tableau.Migration.Tests/Simulation/Tests/ProjectMigrationTests.cs index 576ea33..347474f 100644 --- a/tests/Tableau.Migration.Tests/Simulation/Tests/ProjectMigrationTests.cs +++ b/tests/Tableau.Migration.Tests/Simulation/Tests/ProjectMigrationTests.cs @@ -27,13 +27,17 @@ namespace Tableau.Migration.Tests.Simulation.Tests { public class ProjectMigrationTests { - public class ServerToCloud : ServerToCloudSimulationTestBase + public class UsersBatch : ServerToCloud { - protected override IServiceCollection ConfigureServices(IServiceCollection services) - { - return services.AddTableauMigrationSdk(); - } + } + public class UsersIndividual : ServerToCloud + { + protected override bool UsersBatchImportEnabled => false; + } + + public abstract class ServerToCloud : ServerToCloudSimulationTestBase + { [Fact] public async Task MigratesAllProjectsToCloudAsync() { diff --git a/tests/Tableau.Migration.Tests/Simulation/Tests/SimulatorFactoryTests.cs b/tests/Tableau.Migration.Tests/Simulation/Tests/SimulatorFactoryTests.cs new file mode 100644 index 0000000..b773e78 --- /dev/null +++ b/tests/Tableau.Migration.Tests/Simulation/Tests/SimulatorFactoryTests.cs @@ -0,0 +1,55 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Xunit; + +namespace Tableau.Migration.Tests.Simulation.Tests +{ + public class SimulatorFactoryTests + { + public class Authentication : SimulationTestBase + { + protected override IServiceCollection ConfigureServices(IServiceCollection services) + { + return services.AddTableauMigrationSdk(); + } + + /// + /// GH Issue #9 + /// Ensure the simulator built by a plan builder can authenticate. + /// + [Fact] + public async Task AuthenticatesAsync() + { + var plan = ServiceProvider.GetRequiredService() + .FromSourceTableauServer(new("http://source"), "s", "sTokenName", "sToken", createApiSimulator: true) + .ToDestinationTableauCloud(new("http://destination"), "d", "dTokenName", "dToken", createApiSimulator: true) + .ForServerToCloud() + .Build(); + + var migrator = ServiceProvider.GetRequiredService(); + var result = await migrator.ExecuteAsync(plan, Cancel); + + //Assert - empty migration should have succeded. + + Assert.Empty(result.Manifest.Errors); + Assert.Equal(MigrationCompletionStatus.Completed, result.Status); + } + } + } +} diff --git a/tests/Tableau.Migration.Tests/Simulation/Tests/UserMigrationTests.cs b/tests/Tableau.Migration.Tests/Simulation/Tests/UserMigrationTests.cs index 877516d..98ec914 100644 --- a/tests/Tableau.Migration.Tests/Simulation/Tests/UserMigrationTests.cs +++ b/tests/Tableau.Migration.Tests/Simulation/Tests/UserMigrationTests.cs @@ -27,13 +27,17 @@ namespace Tableau.Migration.Tests.Simulation.Tests { public class UserMigrationTests { - public class ServerToCloud : ServerToCloudSimulationTestBase + public class Batch : ServerToCloud { - protected override IServiceCollection ConfigureServices(IServiceCollection services) - { - return services.AddTableauMigrationSdk(); - } + } + public class Individual : ServerToCloud + { + protected override bool UsersBatchImportEnabled => false; + } + + public abstract class ServerToCloud : ServerToCloudSimulationTestBase + { [Fact] public async Task MigratesAllUsersToCloudAsync() { @@ -72,7 +76,7 @@ void AssertUserMigrated(UsersResponse.UserType sourceUser) Assert.NotEqual(sourceUser.Id, destinationUser.Id); Assert.Equal(sourceUser.Domain?.Name, destinationUser.Domain?.Name); Assert.Equal(sourceUser.Name, destinationUser.Name); - Assert.Equal(sourceUser.Email, destinationUser.Email); + Assert.Null(destinationUser.Email); if (sourceUser.SiteRole == SiteRoles.Viewer || sourceUser.SiteRole == SiteRoles.Guest || @@ -88,7 +92,7 @@ void AssertUserMigrated(UsersResponse.UserType sourceUser) { Assert.Equal(sourceUser.SiteRole, destinationUser.SiteRole); } - Assert.Equal(sourceUser.FullName, destinationUser.FullName); + Assert.Null(destinationUser.FullName); } Assert.All(SourceApi.Data.Users.Where(u => u.SiteRole != SiteRoles.SupportUser), AssertUserMigrated); diff --git a/tests/Tableau.Migration.Tests/Simulation/Tests/WorkbookMigrationTests.cs b/tests/Tableau.Migration.Tests/Simulation/Tests/WorkbookMigrationTests.cs index 74148fa..b7f894e 100644 --- a/tests/Tableau.Migration.Tests/Simulation/Tests/WorkbookMigrationTests.cs +++ b/tests/Tableau.Migration.Tests/Simulation/Tests/WorkbookMigrationTests.cs @@ -31,13 +31,17 @@ namespace Tableau.Migration.Tests.Simulation.Tests { public class WorkbookMigrationTests { - public class ServerToCloud : ServerToCloudSimulationTestBase + public class UsersBatch : ServerToCloud { - protected override IServiceCollection ConfigureServices(IServiceCollection services) - { - return services.AddTableauMigrationSdk(); - } + } + public class UsersIndividual : ServerToCloud + { + protected override bool UsersBatchImportEnabled => false; + } + + public abstract class ServerToCloud : ServerToCloudSimulationTestBase + { [Fact] public async Task MigratesAllWorkbooksToCloudAsync() { @@ -91,7 +95,7 @@ void AssertWorkbookMigrated(WorkbookResponse.WorkbookType sourceWorkbook) Assert.NotEqual(destinationWorkbook.Owner.Id, sourceWorkbook.Owner?.Id); // Assert tags - AssertTags(sourceWorkbook.Tags, destinationWorkbook.Tags); + sourceWorkbook.Tags.AssertEqual(destinationWorkbook.Tags); // Assert views Assert.All(sourceWorkbook.Views, AssertWorkbookViewMigrated); @@ -116,7 +120,7 @@ void AssertWorkbookViewMigrated(WorkbookResponse.WorkbookType.ViewReferenceType CloudDestinationApi.Data.ViewPermissions[destinationView.Id]); // Assert view tags - AssertTags(sourceView.Tags, destinationView.Tags); + sourceView.Tags.AssertEqual(destinationView.Tags); // No need to verify owner as it's not migratable. View owner is the same as workbook owner. } @@ -127,7 +131,7 @@ void AssertWorkbookConnectionsMigrated(WorkbookResponse.WorkbookType sourceWorkb var sourceWorkbookFile = SourceApi.Data.WorkbookFiles[sourceWorkbook!.Id]; Assert.NotNull(sourceWorkbookFile); - var sourceSimulatedWorkbook = Encoding.Default + var sourceSimulatedWorkbook = Constants.DefaultEncoding .GetString(sourceWorkbookFile) .FromXml(); @@ -137,7 +141,7 @@ void AssertWorkbookConnectionsMigrated(WorkbookResponse.WorkbookType sourceWorkb var destinationWorkbookFile = CloudDestinationApi.Data.WorkbookFiles[destinationWorkbook!.Id]; Assert.NotNull(destinationWorkbookFile); - var destinationSimulatedWorkbook = Encoding.Default + var destinationSimulatedWorkbook = Constants.DefaultEncoding .GetString(destinationWorkbookFile) .FromXml(); diff --git a/tests/Tableau.Migration.Tests/StreamFactory.cs b/tests/Tableau.Migration.Tests/StreamFactory.cs new file mode 100644 index 0000000..7d9a38e --- /dev/null +++ b/tests/Tableau.Migration.Tests/StreamFactory.cs @@ -0,0 +1,44 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using System.IO; + +namespace Tableau.Migration.Tests +{ + public class StreamFactory + where T : Stream + { + private readonly Func _factory; + + public StreamFactory(Func factory) + { + _factory = factory; + } + + public T Create(byte[] data, Action? configure = null) + { + var stream = _factory(data); + + configure?.Invoke(stream); + + return stream; + } + + public T Create(StreamFactoryOptions options, Action? configure = null) + => Create(options.Data, configure); + } +} diff --git a/tests/Tableau.Migration.Tests/StreamFactoryOptions.cs b/tests/Tableau.Migration.Tests/StreamFactoryOptions.cs new file mode 100644 index 0000000..3c77ff6 --- /dev/null +++ b/tests/Tableau.Migration.Tests/StreamFactoryOptions.cs @@ -0,0 +1,44 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using System.Linq; +using AutoFixture; + +namespace Tableau.Migration.Tests +{ + public record StreamFactoryOptions + { + public readonly byte[] Data; + + private StreamFactoryOptions() + : this(Array.Empty()) + { } + + public StreamFactoryOptions(byte[] data) + { + Data = data; + } + + public StreamFactoryOptions(string content) + : this(Constants.DefaultEncoding.GetBytes(content)) + { } + + public StreamFactoryOptions(IFixture autoFixture, int length) + : this(new String(autoFixture.CreateMany(length).ToArray())) + { } + } +} diff --git a/tests/Tableau.Migration.Tests/Tableau.Migration.Tests.csproj b/tests/Tableau.Migration.Tests/Tableau.Migration.Tests.csproj index f39ee47..0db2537 100644 --- a/tests/Tableau.Migration.Tests/Tableau.Migration.Tests.csproj +++ b/tests/Tableau.Migration.Tests/Tableau.Migration.Tests.csproj @@ -21,17 +21,18 @@ - + + all runtime; build; native; contentfiles; analyzers; buildtransitive - + - - + + all runtime; build; native; contentfiles; analyzers; buildtransitive diff --git a/tests/Tableau.Migration.Tests/TagExtensions.cs b/tests/Tableau.Migration.Tests/TagExtensions.cs new file mode 100644 index 0000000..75a8b59 --- /dev/null +++ b/tests/Tableau.Migration.Tests/TagExtensions.cs @@ -0,0 +1,50 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System.Collections.Generic; +using System.Linq; +using Tableau.Migration.Api.Rest.Models; +using Tableau.Migration.Content; + +namespace Tableau.Migration.Tests +{ + public static class TagExtensions + { + public static void AssertEqual(this ITag? tag, ITag? other) + => TagLabelComparer.Instance.Equals(tag, other); + + public static void AssertEqual(this ITag? tag, ITagType? other) + => TagLabelComparer.Instance.Equals(tag, other is not null ? new Tag(other) : null); + + public static void AssertEqual(this ITagType? tag, ITagType? other) + => ITagTypeComparer.Instance.Equals(tag, other); + + public static void AssertEqual(this ITagType? tag, ITag? other) + => AssertEqual(other is not null ? new Tag(other) : null, tag); + + public static void AssertEqual(this IEnumerable? tags, IEnumerable? others) + => tags.SequenceEqual(others, t => t.Label); + + public static void AssertEqual(this IEnumerable? tags, IEnumerable? others) + => tags.AssertEqual(others?.Select(t => new Tag(t))); + + public static void AssertEqual(this IEnumerable? tags, IEnumerable? others) + => tags.SequenceEqual(others, t => t.Label); + + public static void AssertEqual(this IEnumerable? tags, IEnumerable? others) + => AssertEqual(others?.Select(t => new Tag(t)), tags); + } +} diff --git a/tests/Tableau.Migration.Tests/Unit/Api/DataSourcesApiClientTests.cs b/tests/Tableau.Migration.Tests/Unit/Api/DataSourcesApiClientTests.cs index c8c09f5..3bfd172 100644 --- a/tests/Tableau.Migration.Tests/Unit/Api/DataSourcesApiClientTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Api/DataSourcesApiClientTests.cs @@ -19,7 +19,6 @@ using System.Linq; using System.Net; using System.Net.Http; -using System.Text; using System.Threading.Tasks; using Moq; using Tableau.Migration.Api; @@ -62,13 +61,8 @@ public async Task ErrorAsync() MockHttpClient.SetupResponse(mockResponse); var dataSourceId = Guid.NewGuid(); - var connections = CreateMany().ToImmutableArray(); - var result = await ApiClient.GetDataSourceAsync( - dataSourceId, - connections, - Create(), - Cancel); + var result = await ApiClient.GetDataSourceAsync(dataSourceId, Cancel); result.AssertFailure(); @@ -88,11 +82,7 @@ public async Task FailureResponseAsync() var dataSourceId = Guid.NewGuid(); var connections = CreateMany().ToImmutableArray(); - var result = await ApiClient.GetDataSourceAsync( - dataSourceId, - connections, - Create(), - Cancel); + var result = await ApiClient.GetDataSourceAsync(dataSourceId, Cancel); result.AssertFailure(); @@ -122,12 +112,9 @@ public async Task SuccessAsync() MockHttpClient.SetupResponse(mockResponse); var dataSourceId = Guid.NewGuid(); - var connections = CreateMany().ToImmutableArray(); var result = await ApiClient.GetDataSourceAsync( dataSourceId, - connections, - Create(), Cancel); result.AssertSuccess(); @@ -191,7 +178,7 @@ public async Task FailureResponseAsync() [Fact] public async Task SuccessAsync() { - var content = new ByteArrayContent(Encoding.UTF8.GetBytes("hi2u")); + var content = new ByteArrayContent(Constants.DefaultEncoding.GetBytes("hi2u")); var mockResponse = new MockHttpResponseMessage(content); MockHttpClient.SetupResponse(mockResponse); diff --git a/tests/Tableau.Migration.Tests/Unit/Api/GroupsApiClientTests.cs b/tests/Tableau.Migration.Tests/Unit/Api/GroupsApiClientTests.cs index 7891039..f136887 100644 --- a/tests/Tableau.Migration.Tests/Unit/Api/GroupsApiClientTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Api/GroupsApiClientTests.cs @@ -27,9 +27,9 @@ using Tableau.Migration.Api.Rest.Models; using Tableau.Migration.Api.Rest.Models.Requests; using Tableau.Migration.Api.Rest.Models.Responses; +using Tableau.Migration.Config; using Tableau.Migration.Content; using Tableau.Migration.Net; -using Tableau.Migration.Tests.Unit; using Xunit; namespace Tableau.Migration.Tests.Unit.Api @@ -38,6 +38,12 @@ public class GroupsApiClientTests { public abstract class GroupsApiClientTest : ApiClientTestBase { + public GroupsApiClientTest() + { + MockConfigReader + .Setup(x => x.Get()) + .Returns(new ContentTypesOptions()); + } internal GroupsApiClient GroupsApiClient => GetApiClient(); } @@ -304,7 +310,7 @@ public async Task Succeeds_when_group_exists() AssertGetGroupRequest(MockHttpClient.SentRequests[1], existingGroup.Domain, existingGroup.Name); AssertAddUsersToGroup(MockHttpClient, existingGroup); } - + [Fact] public async Task Removes_extra_users() { @@ -376,7 +382,7 @@ public async Task Returns_failure() request.AssertRelativeUri($"/api/{TableauServerVersion.RestApiVersion}/sites/{SiteId}/groups"); } - + #region - Assert Helpers - @@ -437,7 +443,7 @@ private void AssertGetGroupRequest(HttpRequestMessage r, string domain, string n /// - During the delete loop /// - During the add loop /// - public class PublishAsync_Cancellation: GroupsApiClientTest + public class PublishAsync_Cancellation : GroupsApiClientTest { [Fact] public async void Publish_cancel_after_create_group() diff --git a/tests/Tableau.Migration.Tests/Unit/Api/IContentFileStoreExtensionsTests.cs b/tests/Tableau.Migration.Tests/Unit/Api/IContentFileStoreExtensionsTests.cs index b2b5cab..925e057 100644 --- a/tests/Tableau.Migration.Tests/Unit/Api/IContentFileStoreExtensionsTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Api/IContentFileStoreExtensionsTests.cs @@ -15,7 +15,6 @@ // using System.IO; -using System.Text; using System.Threading; using System.Threading.Tasks; using Tableau.Migration.Api; @@ -36,14 +35,14 @@ public async Task CreatesFromFileDownloadAsync() var fs = new MemoryContentFileStore(); var fileText = "text"; - await using (var fileDownload = new FileDownload("fileName", new MemoryStream(Encoding.UTF8.GetBytes(fileText)))) + await using (var fileDownload = new FileDownload("fileName", new MemoryStream(Constants.DefaultEncoding.GetBytes(fileText)))) { var file = await fs.CreateAsync(new object(), fileDownload, cancel); Assert.Equal(fileDownload.Filename, file.OriginalFileName); await using (var readStream = await file.OpenReadAsync(cancel)) - using (var reader = new StreamReader(readStream.Content, Encoding.UTF8)) + using (var reader = new StreamReader(readStream.Content, Constants.DefaultEncoding)) { Assert.Equal(fileText, reader.ReadToEnd()); } diff --git a/tests/Tableau.Migration.Tests/Unit/Api/IContentReferenceFinderFactoryExtensionsTests.cs b/tests/Tableau.Migration.Tests/Unit/Api/IContentReferenceFinderFactoryExtensionsTests.cs new file mode 100644 index 0000000..d06fbb1 --- /dev/null +++ b/tests/Tableau.Migration.Tests/Unit/Api/IContentReferenceFinderFactoryExtensionsTests.cs @@ -0,0 +1,250 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Moq; +using Tableau.Migration.Api; +using Tableau.Migration.Api.Rest; +using Tableau.Migration.Api.Rest.Models; +using Tableau.Migration.Content; +using Tableau.Migration.Content.Search; +using Tableau.Migration.Resources; +using Xunit; + +namespace Tableau.Migration.Tests.Unit.Api +{ + public class IContentReferenceFinderFactoryExtensionsTests + { + private class WithProjectType : IWithProjectType, INamedContent + { + public virtual string? Name { get; set; } + public virtual IProjectReferenceType? Project { get; set; } + } + + private class WithOwnerType : IWithOwnerType, INamedContent + { + public virtual string? Name { get; set; } + public virtual IOwnerType? Owner { get; set; } + } + + public abstract class IContentReferenceFinderFactoryExtensionsTest : AutoFixtureTestBase + { + protected readonly Mock MockFinderFactory = new(); + protected readonly Mock> MockProjectFinder = new(); + protected readonly Mock> MockUserFinder = new(); + protected readonly Mock MockLogger = new(); + protected readonly ISharedResourcesLocalizer SharedResourcesLocalizer = new TestSharedResourcesLocalizer(); + + public IContentReferenceFinderFactoryExtensionsTest() + { + MockFinderFactory.Setup(f => f.ForContentType()).Returns(MockProjectFinder.Object); + MockFinderFactory.Setup(f => f.ForContentType()).Returns(MockUserFinder.Object); + } + } + + public class FindProjectAsync : IContentReferenceFinderFactoryExtensionsTest + { + [Fact] + public async Task Throws_when_response_is_null() + { + await Assert.ThrowsAsync(() => + MockFinderFactory.Object.FindProjectAsync( + null, + MockLogger.Object, + SharedResourcesLocalizer, + true, + Cancel)); + } + + [Fact] + public async Task Throws_when_response_project_is_null() + { + await Assert.ThrowsAsync(() => + MockFinderFactory.Object.FindProjectAsync( + new WithProjectType(), + MockLogger.Object, + SharedResourcesLocalizer, + true, + Cancel)); + } + + [Fact] + public async Task Throws_when_response_project_id_is_default() + { + var mockProjectReference = new Mock(); + mockProjectReference.SetupGet(p => p.Id).Returns(Guid.Empty); + + var response = new WithProjectType { Project = mockProjectReference.Object }; + + await Assert.ThrowsAsync(() => + MockFinderFactory.Object.FindProjectAsync( + response, + MockLogger.Object, + SharedResourcesLocalizer, + true, + Cancel)); + } + + [Fact] + public async Task Returns_project_when_found() + { + var mockContentReference = new Mock(); + mockContentReference.SetupGet(p => p.Id).Returns(Guid.NewGuid()); + + var mockProjectReference = new Mock(); + mockProjectReference.SetupGet(p => p.Id).Returns(mockContentReference.Object.Id); + + MockProjectFinder.Setup(f => f.FindByIdAsync(mockContentReference.Object.Id, Cancel)).ReturnsAsync(mockContentReference.Object); + + var response = new WithProjectType { Project = mockProjectReference.Object }; + + var result = await MockFinderFactory.Object.FindProjectAsync( + response, + MockLogger.Object, + SharedResourcesLocalizer, + true, + Cancel); + + Assert.Same(mockContentReference.Object, result); + } + + [Fact] + public async Task Returns_null_when_not_found_and_throw_is_false() + { + var response = new WithProjectType { Project = Create>().Object }; + + var result = await MockFinderFactory.Object.FindProjectAsync( + response, + MockLogger.Object, + SharedResourcesLocalizer, + false, + Cancel); + + Assert.Null(result); + } + + [Fact] + public async Task Throws_when_not_found_and_throw_is_true() + { + var response = new WithProjectType { Project = Create>().Object }; + + await Assert.ThrowsAsync(() => MockFinderFactory.Object.FindProjectAsync( + response, + MockLogger.Object, + SharedResourcesLocalizer, + true, + Cancel)); + } + } + + public class FindOwnerAsync : IContentReferenceFinderFactoryExtensionsTest + { + [Fact] + public async Task Throws_when_response_is_null() + { + await Assert.ThrowsAsync(() => + MockFinderFactory.Object.FindOwnerAsync( + null, + MockLogger.Object, + SharedResourcesLocalizer, + true, + Cancel)); + } + + [Fact] + public async Task Throws_when_response_owner_is_null() + { + await Assert.ThrowsAsync(() => + MockFinderFactory.Object.FindOwnerAsync( + new WithOwnerType(), + MockLogger.Object, + SharedResourcesLocalizer, + true, + Cancel)); + } + + [Fact] + public async Task Throws_when_response_owner_id_is_default() + { + var mockOwnerReference = new Mock(); + mockOwnerReference.SetupGet(o => o.Id).Returns(Guid.Empty); + + var response = new WithOwnerType { Owner = mockOwnerReference.Object }; + + await Assert.ThrowsAsync(() => + MockFinderFactory.Object.FindOwnerAsync( + response, + MockLogger.Object, + SharedResourcesLocalizer, + true, + Cancel)); + } + + [Fact] + public async Task Returns_owner_when_found() + { + var mockContentReference = new Mock(); + mockContentReference.SetupGet(o => o.Id).Returns(Guid.NewGuid()); + + var mockOwnerReference = new Mock(); + mockOwnerReference.SetupGet(o => o.Id).Returns(mockContentReference.Object.Id); + + MockUserFinder.Setup(f => f.FindByIdAsync(mockContentReference.Object.Id, Cancel)).ReturnsAsync(mockContentReference.Object); + + var response = new WithOwnerType { Owner = mockOwnerReference.Object }; + + var result = await MockFinderFactory.Object.FindOwnerAsync( + response, + MockLogger.Object, + SharedResourcesLocalizer, + true, + Cancel); + + Assert.Same(mockContentReference.Object, result); + } + + [Fact] + public async Task Returns_null_when_not_found_and_throw_is_false() + { + var response = new WithOwnerType { Owner = Create>().Object }; + + var result = await MockFinderFactory.Object.FindOwnerAsync( + response, + MockLogger.Object, + SharedResourcesLocalizer, + false, + Cancel); + + Assert.Null(result); + } + + [Fact] + public async Task Throws_when_not_found_and_throw_is_true() + { + var response = new WithOwnerType { Owner = Create>().Object }; + + await Assert.ThrowsAsync(() => MockFinderFactory.Object.FindOwnerAsync( + response, + MockLogger.Object, + SharedResourcesLocalizer, + true, + Cancel)); + } + } + } +} diff --git a/tests/Tableau.Migration.Tests/Unit/Api/IHttpRequestBuilderExtensionsTests.cs b/tests/Tableau.Migration.Tests/Unit/Api/IHttpRequestBuilderExtensionsTests.cs index ef74358..b2cf4da 100644 --- a/tests/Tableau.Migration.Tests/Unit/Api/IHttpRequestBuilderExtensionsTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Api/IHttpRequestBuilderExtensionsTests.cs @@ -16,7 +16,6 @@ using System.Net.Http; using System.Net.Http.Headers; -using System.Text; using System.Threading.Tasks; using Moq; using Tableau.Migration.Api; @@ -48,7 +47,7 @@ public class DownloadAsync : IHttpRequestBuilderExtensionsTest [Fact] public async Task SendsAndDownloadsAsync() { - var content = new ByteArrayContent(Encoding.UTF8.GetBytes("test content")); + var content = new ByteArrayContent(Constants.DefaultEncoding.GetBytes("test content")); MockResponse.Setup(x => x.Content).Returns(content); var result = await MockRequestBuilder.Object diff --git a/tests/Tableau.Migration.Tests/Unit/Api/IHttpResponseMessageExtensionsTests.cs b/tests/Tableau.Migration.Tests/Unit/Api/IHttpResponseMessageExtensionsTests.cs index e524feb..26afd75 100644 --- a/tests/Tableau.Migration.Tests/Unit/Api/IHttpResponseMessageExtensionsTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Api/IHttpResponseMessageExtensionsTests.cs @@ -19,7 +19,6 @@ using System.IO; using System.Net.Http; using System.Net.Http.Headers; -using System.Text; using System.Threading.Tasks; using Microsoft.Extensions.Localization; using Moq; @@ -420,7 +419,7 @@ public async Task ExceptionAsync() [Fact] public async Task CreatesFileDownloadAsync() { - var content = new ByteArrayContent(Encoding.UTF8.GetBytes("test content")); + var content = new ByteArrayContent(Constants.DefaultEncoding.GetBytes("test content")); MockResponse.Setup(x => x.Content).Returns(content); content.Headers.TryAddWithoutValidation(RestHeaders.ContentDisposition, "FileName=test"); diff --git a/tests/Tableau.Migration.Tests/Unit/Api/IServiceCollectionExtensionsTests.cs b/tests/Tableau.Migration.Tests/Unit/Api/IServiceCollectionExtensionsTests.cs index 473a071..f4114e9 100644 --- a/tests/Tableau.Migration.Tests/Unit/Api/IServiceCollectionExtensionsTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Api/IServiceCollectionExtensionsTests.cs @@ -98,19 +98,19 @@ public async Task RegistersScopedApiClientInputAndInitializer() } [Fact] - public async Task RegistersTransientApiClients() + public async Task RegistersScopedApiClients() { await using var scope = InitializeApiScope(); - AssertService(scope, ServiceLifetime.Transient); - AssertService(scope, ServiceLifetime.Transient); - AssertService(scope, ServiceLifetime.Transient); - AssertService(scope, ServiceLifetime.Transient); - AssertService(scope, ServiceLifetime.Transient); - AssertService(scope, ServiceLifetime.Transient); - AssertService(scope, ServiceLifetime.Transient); - AssertService(scope, ServiceLifetime.Transient); - AssertService(scope, ServiceLifetime.Transient); + AssertService(scope, ServiceLifetime.Scoped); + AssertService(scope, ServiceLifetime.Scoped); + AssertService(scope, ServiceLifetime.Scoped); + AssertService(scope, ServiceLifetime.Scoped); + AssertService(scope, ServiceLifetime.Scoped); + AssertService(scope, ServiceLifetime.Scoped); + AssertService(scope, ServiceLifetime.Scoped); + AssertService(scope, ServiceLifetime.Scoped); + AssertService(scope, ServiceLifetime.Scoped); } [Fact] diff --git a/tests/Tableau.Migration.Tests/Unit/Api/Publishing/DataSourcePublisherTests.cs b/tests/Tableau.Migration.Tests/Unit/Api/Publishing/DataSourcePublisherTests.cs index 0809949..f34fd83 100644 --- a/tests/Tableau.Migration.Tests/Unit/Api/Publishing/DataSourcePublisherTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Api/Publishing/DataSourcePublisherTests.cs @@ -26,7 +26,7 @@ namespace Tableau.Migration.Tests.Unit.Api.Publishing { public class DataSourcePublisherTests { - public abstract class DataSourcePublisherTest : FilePublisherTestBase + public abstract class DataSourcePublisherTest : FilePublisherTestBase { internal readonly DataSourcePublisher DataSourcePublisher; diff --git a/tests/Tableau.Migration.Tests/Unit/Api/Publishing/FilePublisherBaseTests.cs b/tests/Tableau.Migration.Tests/Unit/Api/Publishing/FilePublisherBaseTests.cs index fb1a69b..03ad652 100644 --- a/tests/Tableau.Migration.Tests/Unit/Api/Publishing/FilePublisherBaseTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Api/Publishing/FilePublisherBaseTests.cs @@ -19,6 +19,7 @@ using System.Threading; using System.Threading.Tasks; using System.Xml.Serialization; +using Microsoft.Extensions.Logging; using Tableau.Migration.Api; using Tableau.Migration.Api.Models; using Tableau.Migration.Api.Publishing; @@ -76,10 +77,11 @@ public TestFilePublisher( IRestRequestBuilderFactory restRequestBuilderFactory, IContentReferenceFinderFactory finderFactory, IServerSessionProvider sessionProvider, + ILoggerFactory loggerFactory, ISharedResourcesLocalizer sharedResourcesLocalizer, IHttpStreamProcessor httpStreamProcessor, string contentTypeUrlPrefix) - : base(restRequestBuilderFactory, finderFactory, sessionProvider, sharedResourcesLocalizer, httpStreamProcessor, contentTypeUrlPrefix) + : base(restRequestBuilderFactory, finderFactory, sessionProvider, loggerFactory, sharedResourcesLocalizer, httpStreamProcessor, contentTypeUrlPrefix) { } protected override TestPublishRequest BuildCommitRequest(ITestPublishOptions options) @@ -117,6 +119,7 @@ public FilePublisherBaseTest() RestRequestBuilderFactory, MockContentFinderFactory.Object, MockSessionProvider.Object, + MockLoggerFactory.Object, MockSharedResourcesLocalizer.Object, HttpStreamProcessor, UrlPrefix); diff --git a/tests/Tableau.Migration.Tests/Unit/Api/Publishing/WorkbookPublisherTests.cs b/tests/Tableau.Migration.Tests/Unit/Api/Publishing/WorkbookPublisherTests.cs index 7acbfa7..502be76 100644 --- a/tests/Tableau.Migration.Tests/Unit/Api/Publishing/WorkbookPublisherTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Api/Publishing/WorkbookPublisherTests.cs @@ -26,7 +26,7 @@ namespace Tableau.Migration.Tests.Unit.Api.Publishing { public class WorkbookPublisherTests { - public abstract class WorkbookPublisherTest : FilePublisherTestBase + public abstract class WorkbookPublisherTest : FilePublisherTestBase { internal readonly WorkbookPublisher WorkbookPublisher; diff --git a/tests/Tableau.Migration.Tests/Unit/Api/Rest/Models/ITagTypeComparer.cs b/tests/Tableau.Migration.Tests/Unit/Api/Rest/Models/ITagTypeComparer.cs index 3d4db07..d5121ea 100644 --- a/tests/Tableau.Migration.Tests/Unit/Api/Rest/Models/ITagTypeComparer.cs +++ b/tests/Tableau.Migration.Tests/Unit/Api/Rest/Models/ITagTypeComparer.cs @@ -23,6 +23,6 @@ internal class ITagTypeComparer : ComparerBase { public static readonly ITagTypeComparer Instance = new(); - public override int CompareItems(ITagType x, ITagType y) => StringComparer.Ordinal.Compare(x.Label, y.Label); + protected override int CompareItems(ITagType x, ITagType y) => StringComparer.Ordinal.Compare(x.Label, y.Label); } } diff --git a/tests/Tableau.Migration.Tests/Unit/Api/Rest/Models/Requests/AddTagsRequestTests.cs b/tests/Tableau.Migration.Tests/Unit/Api/Rest/Models/Requests/AddTagsRequestTests.cs index ad7b5c4..f1ca999 100644 --- a/tests/Tableau.Migration.Tests/Unit/Api/Rest/Models/Requests/AddTagsRequestTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Api/Rest/Models/Requests/AddTagsRequestTests.cs @@ -21,7 +21,7 @@ namespace Tableau.Migration.Tests.Unit.Api.Rest.Models.Requests { - internal class AddTagsRequestTests + public class AddTagsRequestTests { public class TagType { diff --git a/tests/Tableau.Migration.Tests/Unit/Api/Search/ApiContentReferenceFinderFactoryTests.cs b/tests/Tableau.Migration.Tests/Unit/Api/Search/ApiContentReferenceFinderFactoryTests.cs index d4735a2..76d0bf3 100644 --- a/tests/Tableau.Migration.Tests/Unit/Api/Search/ApiContentReferenceFinderFactoryTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Api/Search/ApiContentReferenceFinderFactoryTests.cs @@ -16,6 +16,7 @@ using Moq; using Tableau.Migration.Api.Search; +using Tableau.Migration.Content; using Tableau.Migration.Content.Search; using Xunit; @@ -38,6 +39,20 @@ public void BuildsCachedApiFinder() Assert.IsType>(finder); mockServices.Verify(x => x.GetService(typeof(BulkApiContentReferenceCache)), Times.Once); } + + [Fact] + public void BuildsUsersCachedApiFinder() + { + var cache = Freeze>(); + var mockServices = Freeze(); + + var factory = Create(); + + var finder = factory.ForContentType(); + + Assert.IsType>(finder); + mockServices.Verify(x => x.GetService(typeof(BulkApiContentReferenceCache)), Times.Once); + } } } } diff --git a/tests/Tableau.Migration.Tests/Unit/Api/Search/BulkApiContentReferenceCacheTests.cs b/tests/Tableau.Migration.Tests/Unit/Api/Search/BulkApiContentReferenceCacheTests.cs index f0f94e8..5488e89 100644 --- a/tests/Tableau.Migration.Tests/Unit/Api/Search/BulkApiContentReferenceCacheTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Api/Search/BulkApiContentReferenceCacheTests.cs @@ -17,6 +17,7 @@ using System; using System.Collections.Immutable; using System.Linq; +using System.Threading; using System.Threading.Tasks; using Moq; using Tableau.Migration.Api; @@ -32,35 +33,45 @@ public class BulkApiContentReferenceCacheTests { public class SearchAsync : AutoFixtureTestBase { - protected readonly Mock> MockApiClient; + protected readonly Mock> MockApiClient; + protected readonly Mock> MockReadApiClient; - protected int BatchSize { get; set; } = 10; + private static int BatchSize = 10; - protected ImmutableArray Data { get; set; } + protected ImmutableArray Data { get; set; } - protected readonly BulkApiContentReferenceCache Cache; + protected readonly BulkApiContentReferenceCache Cache; public SearchAsync() { - MockApiClient = new Mock> + MockApiClient = new Mock> { CallBase = true }; MockApiClient.Setup(x => x.GetPager(BatchSize)) - .Returns((int pageSize) => new MemoryPager(Data, pageSize)); + .Returns((int pageSize) => new MemoryPager(Data, pageSize)); + MockReadApiClient = new Mock> + { + CallBase = true + }; - Data = CreateMany() + Data = CreateMany() .ToImmutableArray(); var mockConfigReader = Freeze>(); - mockConfigReader.Setup(x => x.Get()) - .Returns(() => new MigrationSdkOptions { BatchSize = BatchSize }); + mockConfigReader.Setup(x => x.Get()) + .Returns(() => new ContentTypesOptions() { BatchSize = BatchSize }); + + mockConfigReader.Setup(x => x.Get()) + .Returns(() => new ContentTypesOptions() { BatchSize = BatchSize }); var mockSitesApi = Freeze>(); - mockSitesApi.Setup(x => x.GetListApiClient()) + mockSitesApi.Setup(x => x.GetListApiClient()) .Returns(MockApiClient.Object); + mockSitesApi.Setup(x => x.GetReadApiClient()) + .Returns(MockReadApiClient.Object); - Cache = Create>(); + Cache = Create>(); } [Fact] @@ -75,6 +86,16 @@ public async Task SuccessByLocationAsync() Assert.Equal(new ContentReferenceStub(search), resultStub); } + [Fact] + public async Task NotFoundByLocationAsync() + { + var search = Create(); + + var result = await Cache.ForLocationAsync(search.Location, Cancel); + + Assert.Null(result); + } + [Fact] public async Task SuccessByIdAsync() { @@ -87,12 +108,37 @@ public async Task SuccessByIdAsync() Assert.Equal(new ContentReferenceStub(search), resultStub); } + [Fact] + public async Task NotFoundByIdAsync() + { + var search = Create(); + + var result = await Cache.ForIdAsync(search.Id, Cancel); + + Assert.Null(result); + } + + [Fact] + public async Task NotFoundByIdWithFallbackAsync() + { + var search = Create(); + + MockReadApiClient.Setup(x => x.GetByIdAsync(search.Id, It.IsAny())) + .ReturnsAsync(Result.Succeeded(search)); + + var result = await Cache.ForIdAsync(search.Id, Cancel); + + Assert.NotNull(result); + var resultStub = Assert.IsType(result); + Assert.Equal(new ContentReferenceStub(search), resultStub); + } + [Fact] public async Task FailureByLocationReturnsEmptyAsync() { - var mockFailurePager = Create>>(); + var mockFailurePager = Create>>(); mockFailurePager.Setup(x => x.NextPageAsync(Cancel)) - .ReturnsAsync(PagedResult.Failed(new Exception())); + .ReturnsAsync(PagedResult.Failed(new Exception())); MockApiClient.Setup(x => x.GetPager(BatchSize)) .Returns(mockFailurePager.Object); @@ -105,9 +151,9 @@ public async Task FailureByLocationReturnsEmptyAsync() [Fact] public async Task FailureByIdReturnsEmptyAsync() { - var mockFailurePager = Create>>(); + var mockFailurePager = Create>>(); mockFailurePager.Setup(x => x.NextPageAsync(Cancel)) - .ReturnsAsync(PagedResult.Failed(new Exception())); + .ReturnsAsync(PagedResult.Failed(new Exception())); MockApiClient.Setup(x => x.GetPager(BatchSize)) .Returns(mockFailurePager.Object); diff --git a/tests/Tableau.Migration.Tests/Unit/Api/Simulation/TableauApiSimulatorFactoryTests.cs b/tests/Tableau.Migration.Tests/Unit/Api/Simulation/TableauApiSimulatorFactoryTests.cs new file mode 100644 index 0000000..6d9575f --- /dev/null +++ b/tests/Tableau.Migration.Tests/Unit/Api/Simulation/TableauApiSimulatorFactoryTests.cs @@ -0,0 +1,81 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using System.Collections.Generic; +using Moq; +using Tableau.Migration.Api.Simulation; +using Xunit; + +namespace Tableau.Migration.Tests.Unit.Api.Simulation +{ + public class TableauApiSimulatorFactoryTests + { + public class TableauApiSimulatorFactoryTest : AutoFixtureTestBase + { + protected Dictionary Simulators { get; } = new(); + + protected readonly Mock MockSimulatorCollection; + + public TableauApiSimulatorFactoryTest() + { + MockSimulatorCollection = Freeze>(); + MockSimulatorCollection.Setup(x => x.ForServer(It.IsAny())) + .Returns((Uri u) => + { + if (Simulators.TryGetValue(u.Host, out var result)) + return result; + + return null; + }); + MockSimulatorCollection.Setup(x => x.AddOrUpdate(It.IsAny())) + .Callback((TableauApiSimulator s) => Simulators[s.ServerUrl.Host] = s); + } + + protected TableauApiSimulatorFactory CreateFactory() + { + return Create(); + } + } + + public class GetOrCreate : TableauApiSimulatorFactoryTest + { + [Fact] + public void NoReCreation() + { + var factory = CreateFactory(); + + var simulator1 = factory.GetOrCreate(new("http://localhost")); + var simulator2 = factory.GetOrCreate(new("http://localhost")); + + Assert.Same(simulator1, simulator2); + MockSimulatorCollection.Verify(x => x.AddOrUpdate(It.IsAny()), Times.Once); + } + + [Fact] + public void CreatesAuthenticationUser() + { + var factory = CreateFactory(); + + var simulator = factory.GetOrCreate(new("http://localhost")); + + MockSimulatorCollection.Verify(x => x.AddOrUpdate(simulator), Times.Once); + + Assert.NotNull(simulator.Data.SignIn); + } + } + } +} diff --git a/tests/Tableau.Migration.Tests/Unit/Api/WorkbooksApiClientTests.cs b/tests/Tableau.Migration.Tests/Unit/Api/WorkbooksApiClientTests.cs index 2673c04..1038a18 100644 --- a/tests/Tableau.Migration.Tests/Unit/Api/WorkbooksApiClientTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Api/WorkbooksApiClientTests.cs @@ -20,7 +20,6 @@ using System.Linq; using System.Net; using System.Net.Http; -using System.Text; using System.Threading; using System.Threading.Tasks; using Moq; @@ -66,9 +65,8 @@ public async Task ErrorAsync() MockHttpClient.SetupResponse(mockResponse); var workbookId = Guid.NewGuid(); - var connections = CreateMany().ToImmutableArray(); - var result = await ApiClient.GetWorkbookAsync(workbookId, connections, Create(), Cancel); + var result = await ApiClient.GetWorkbookAsync(workbookId, Cancel); result.AssertFailure(); @@ -86,9 +84,8 @@ public async Task FailureResponseAsync() MockHttpClient.SetupResponse(mockResponse); var workbookId = Guid.NewGuid(); - var connections = CreateMany().ToImmutableArray(); - var result = await ApiClient.GetWorkbookAsync(workbookId, connections, Create(), Cancel); + var result = await ApiClient.GetWorkbookAsync(workbookId, Cancel); result.AssertFailure(); @@ -122,9 +119,8 @@ public async Task SuccessAsync() MockHttpClient.SetupResponse(mockResponse); var workbookId = Guid.NewGuid(); - var connections = CreateMany().ToImmutableArray(); - var result = await ApiClient.GetWorkbookAsync(workbookId, connections, Create(), Cancel); + var result = await ApiClient.GetWorkbookAsync(workbookId, Cancel); result.AssertSuccess(); Assert.NotNull(result.Value); @@ -250,7 +246,7 @@ public async Task FailureResponseAsync() [Fact] public async Task SuccessAsync() { - var content = new ByteArrayContent(Encoding.UTF8.GetBytes("hi2u")); + var content = new ByteArrayContent(Constants.DefaultEncoding.GetBytes("hi2u")); var mockResponse = new MockHttpResponseMessage(content); MockHttpClient.SetupResponse(mockResponse); @@ -287,7 +283,7 @@ public async Task Succeeds() var mockPublishableWorkbook = Create>(); mockPublishableWorkbook.SetupGet(wb => wb.File).Returns(mockContentFileHandle.Object); - var publisherResult = Result.Succeeded(Create()); + var publisherResult = Result.Succeeded(Create()); MockWorkbookPublisher.Setup(p => p.PublishAsync(It.IsAny(), It.IsAny())) .ReturnsAsync(publisherResult); @@ -311,7 +307,7 @@ public async Task Succeeds() { var mockOptions = new Mock(); - var publisherResult = Result.Succeeded(Create()); + var publisherResult = Result.Succeeded(Create()); MockWorkbookPublisher.Setup(p => p.PublishAsync(It.IsAny(), It.IsAny())) .ReturnsAsync(publisherResult); diff --git a/tests/Tableau.Migration.Tests/Unit/Config/ConfigReaderTests.cs b/tests/Tableau.Migration.Tests/Unit/Config/ConfigReaderTests.cs new file mode 100644 index 0000000..6b04d76 --- /dev/null +++ b/tests/Tableau.Migration.Tests/Unit/Config/ConfigReaderTests.cs @@ -0,0 +1,117 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System.Collections.Generic; +using System.Linq; +using Microsoft.Extensions.Options; +using Moq; +using Tableau.Migration.Config; +using Tableau.Migration.Content; +using Xunit; + +namespace Tableau.Migration.Tests.Unit.Config +{ + public class ConfigReaderTests : AutoFixtureTestBase + { + + private readonly Mock> MockOptionsMonitor; + private readonly ConfigReader Reader; + + public ConfigReaderTests() + { + MockOptionsMonitor = new Mock>(); + MockOptionsMonitor.Setup(x => x.Get(nameof(MigrationSdkOptions))).Returns(new MigrationSdkOptions()); + Reader = new ConfigReader(MockOptionsMonitor.Object); + } + protected void SetupOptionsMonitor() + { + MockOptionsMonitor.Setup(x => x.Get(nameof(MigrationSdkOptions))) + .Returns( + new MigrationSdkOptions() + { + ContentTypes = GetContentTypesOptionsTestData() + }); + } + + protected static List GetContentTypesOptionsTestData() + => [ + new() + { + Type = "User", + BatchSize = 223 + }, + new() + { + Type = "Group", + BatchSize = 224 + }, + new() + { + Type = "Project", + BatchSize = 225 + }, + new() + { + Type = "Workbook", + BatchSize = 226 + }, + new() + { + Type = "DataSource", + BatchSize = 227 + } + + ]; + + public void AssertCustomResult(ContentTypesOptions expected, ContentTypesOptions actual) + { + Assert.Equal(expected.BatchSize, actual.BatchSize); + } + + public void AssertDefaultResult(ContentTypesOptions actual) + { + Assert.Equal(ContentTypesOptions.Defaults.BATCH_SIZE, actual.BatchSize); + } + + public class GetContentTypeSpecific : ConfigReaderTests + { + [Fact] + public void GetsCustomValues() + { + SetupOptionsMonitor(); + + var testData = GetContentTypesOptionsTestData(); + + AssertCustomResult(testData.First(i => i.Type == "User"), Reader.Get()); + AssertCustomResult(testData.First(i => i.Type == "Group"), Reader.Get()); + AssertCustomResult(testData.First(i => i.Type == "Project"), Reader.Get()); + AssertCustomResult(testData.First(i => i.Type == "Workbook"), Reader.Get()); + AssertCustomResult(testData.First(i => i.Type == "DataSource"), Reader.Get()); + } + + [Fact] + public void GetsDefaultValues() + { + AssertDefaultResult(Reader.Get()); + AssertDefaultResult(Reader.Get()); + AssertDefaultResult(Reader.Get()); + AssertDefaultResult(Reader.Get()); + AssertDefaultResult(Reader.Get()); + } + + } + } +} diff --git a/tests/Tableau.Migration.Tests/Unit/Config/ConfigurationTests.cs b/tests/Tableau.Migration.Tests/Unit/Config/ConfigurationTests.cs index 10dd33a..fac60b8 100644 --- a/tests/Tableau.Migration.Tests/Unit/Config/ConfigurationTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Config/ConfigurationTests.cs @@ -15,6 +15,7 @@ // using System; +using System.Collections.Generic; using System.ComponentModel; using System.IO; using System.Linq; @@ -27,7 +28,9 @@ using Newtonsoft.Json; using Newtonsoft.Json.Linq; using Tableau.Migration.Config; +using Tableau.Migration.Content; using Tableau.Migration.Content.Permissions; +using Tableau.Migration.Engine.Pipelines; using Xunit; using Xunit.Abstractions; @@ -45,22 +48,70 @@ public class ConfigurationTests private const string TEST_DATA_DIR = "TestData"; private const string TEST_DATA_FILE1 = "configuration_testdata1.json"; private static readonly string TEST_DATA_FILE1_CONTENT = -$@"{{ - ""MigrationSdkOptions"": {{ - ""Network"": {{ - ""{nameof(MigrationSdkOptions.Network.FileChunkSizeKB)}"": 2034 +$@" +{{ + ""MigrationSdkOptions"": {{ + ""contentTypes"": [ + {{ + ""type"":""User"", + ""batchSize"": {TestData.USER_BATCH_SIZE} + }}, + {{ + ""type"":""Group"", + ""batchSize"": {TestData.GROUP_BATCH_SIZE} + }}, + {{ + ""type"":""Project"", + ""batchSize"": {TestData.PROJECT_BATCH_SIZE} + }}, + {{ + ""type"":""Workbook"", + ""batchSize"": {TestData.WORKBOOK_BATCH_SIZE} + }}, + {{ + ""type"":""DataSource"", + ""batchSize"": {TestData.DATASOURCE_BATCH_SIZE} + }} + ], + ""Network"": {{ + ""{nameof(MigrationSdkOptions.Network.FileChunkSizeKB)}"": {TestData.FILE_CHUNK_SIZE_GB} + }} }} - }} -}}"; +}} +"; private const string TEST_DATA_FILE2 = "configuration_testdata2.json"; private static readonly string TEST_DATA_FILE2_CONTENT = -$@"{{ - ""MigrationSdkOptions"": {{ - ""Network"": {{ - ""{nameof(MigrationSdkOptions.Network.FileChunkSizeKB)}"": 2034 +$@" +{{ + ""MigrationSdkOptions"": {{ + ""contentTypes"": [ + {{ + ""type"":""User"", + ""batchSize"": {TestData.USER_BATCH_SIZE} + }}, + {{ + ""type"":""Group"", + ""batchSize"": {TestData.GROUP_BATCH_SIZE} + }}, + {{ + ""type"":""Project"", + ""batchSize"": {TestData.PROJECT_BATCH_SIZE} + }}, + {{ + ""type"":""Workbook"", + ""batchSize"": {TestData.WORKBOOK_BATCH_SIZE} + }}, + {{ + ""type"":""DataSource"", + ""batchSize"": {TestData.DATASOURCE_BATCH_SIZE} + }} + ], + ""Network"": {{ + ""{nameof(MigrationSdkOptions.Network.FileChunkSizeKB)}"": {TestData.FILE_CHUNK_SIZE_GB} + }} }} - }} -}}"; +}} +"; private const string TEST_DATA_FILE3 = "configuration_testdata3.json"; private static readonly string TEST_DATA_FILE3_CONTENT = @@ -71,7 +122,16 @@ public class ConfigurationTests }} }} }}"; + protected class TestData + { + public const int USER_BATCH_SIZE = 201; + public const int GROUP_BATCH_SIZE = 202; + public const int PROJECT_BATCH_SIZE = 203; + public const int WORKBOOK_BATCH_SIZE = 204; + public const int DATASOURCE_BATCH_SIZE = 205; + public const int FILE_CHUNK_SIZE_GB = 2034; + } private readonly ITestOutputHelper _output; private readonly bool skipGithubRunnerTests; @@ -126,25 +186,40 @@ private static ServiceProvider GetServiceProvider(string? testDataFile = null) .BuildServiceProvider(); } - private static async Task EditConfigFile(string path, string configKey, int value, int waitAfterSaveMilliseconds) + private static async Task EditConfigFile(string path, Dictionary configKeys, int waitAfterSaveMilliseconds) { + JObject jObject = GetJsonObjectFromFile(path); + + foreach (var configKey in configKeys) + ReplaceValue(configKey.Key, configKey.Value, jObject); + + await SaveJsonFile(path, waitAfterSaveMilliseconds, jObject).ConfigureAwait(false); + } + + private static async Task SaveJsonFile(string path, int waitAfterSaveMilliseconds, JObject jObject) + { + // Convert the JObject back to a string and save the file. + await File.WriteAllTextAsync(path, jObject.ToString()).ConfigureAwait(false); + + /// Artificially induced wait so the IOptionsMonitor.OnChange() event handler can pick up file changes + await Task.Delay(waitAfterSaveMilliseconds).ConfigureAwait(false); + } + private static JObject GetJsonObjectFromFile(string path) + { var jsonString = File.ReadAllText(path); Assert.NotNull(jsonString); - var jObject = (JObject)JsonConvert.DeserializeObject(jsonString)!; + return (JObject)JsonConvert.DeserializeObject(jsonString)!; + } + + static void ReplaceValue(string configKey, int value, JObject jObject) + { var jToken = jObject.SelectToken(configKey); Assert.NotNull(jToken); // Update the value of the property: jToken.Replace(value); - - // Convert the JObject back to a string: - string updatedJsonString = jObject.ToString(); - await File.WriteAllTextAsync(path, updatedJsonString).ConfigureAwait(false); - - /// Artificially induced wait so the IOptionsMonitor.OnChange() event handler can pick up file changes - await Task.Delay(waitAfterSaveMilliseconds).ConfigureAwait(false); } #endregion @@ -153,10 +228,19 @@ private static async Task EditConfigFile(string path, string configKey, int valu public void LoadFromInitialConfiguration() { var serviceProvider = GetServiceProvider(TEST_DATA_FILE1); - var configHelper = serviceProvider.GetRequiredService(); - var freshConfig = configHelper.Get(); + var configReader = serviceProvider.GetRequiredService(); + + + Assert.Equal(TestData.USER_BATCH_SIZE, configReader.Get().BatchSize); + Assert.Equal(TestData.GROUP_BATCH_SIZE, configReader.Get().BatchSize); + Assert.Equal(TestData.PROJECT_BATCH_SIZE, configReader.Get().BatchSize); + Assert.Equal(TestData.WORKBOOK_BATCH_SIZE, configReader.Get().BatchSize); + Assert.Equal(TestData.DATASOURCE_BATCH_SIZE, configReader.Get().BatchSize); + + + var freshConfig = configReader.Get(); Assert.NotNull(freshConfig?.Network); - Assert.Equal(2034, freshConfig.Network.FileChunkSizeKB); + Assert.Equal(TestData.FILE_CHUNK_SIZE_GB, freshConfig.Network.FileChunkSizeKB); } /// @@ -173,20 +257,30 @@ public async Task FileConfigChangeReload(int readDelay, int maxRetries) try { var serviceProvider = GetServiceProvider(TEST_DATA_FILE2); - var configHelper = serviceProvider.GetRequiredService(); + var configReader = serviceProvider.GetRequiredService(); - var oldConfig = configHelper.Get(); + var oldConfig = configReader.Get(); Assert.NotNull(oldConfig?.Network); - Assert.Equal(2034, oldConfig.Network.FileChunkSizeKB); + Assert.Equal(TestData.FILE_CHUNK_SIZE_GB, oldConfig.Network.FileChunkSizeKB); - await EditConfigFile(Path.Combine(Directory.GetCurrentDirectory(), TEST_DATA_DIR, TEST_DATA_FILE2), - $"{nameof(MigrationSdkOptions)}.Network.{nameof(MigrationSdkOptions.Network.FileChunkSizeKB)}", - 55, - readDelay); + await EditConfigFile( + Path.Combine(Directory.GetCurrentDirectory(), TEST_DATA_DIR, TEST_DATA_FILE2), + new Dictionary { + {$"{nameof(MigrationSdkOptions)}.Network.{nameof(MigrationSdkOptions.Network.FileChunkSizeKB)}",55 }, + {$"{nameof(MigrationSdkOptions)}.contentTypes[0].batchSize",102 } + }, + readDelay); - var newConfig = configHelper.Get(); - Assert.NotNull(newConfig?.Network); - Assert.Equal(55, newConfig.Network.FileChunkSizeKB); + var newConfig = configReader.Get(); + var newNetworkConfig = newConfig?.Network; + + Assert.NotNull(newNetworkConfig); + Assert.Equal(55, newNetworkConfig.FileChunkSizeKB); + + + var newUserConfig = newConfig?.ContentTypes.FirstOrDefault(i => i.Type == "User"); + Assert.NotNull(newUserConfig); + Assert.Equal(102, newUserConfig.BatchSize); break; } @@ -217,11 +311,14 @@ await EditConfigFile(Path.Combine(Directory.GetCurrentDirectory(), TEST_DATA_DIR public void DefaultsApplied() { var serviceProvider = GetServiceProvider(); - var configHelper = serviceProvider.GetRequiredService(); + var configReader = serviceProvider.GetRequiredService(); - var freshConfig = configHelper.Get(); + var freshConfig = configReader.Get(); Assert.NotNull(freshConfig?.Network); - Assert.Equal(65536, freshConfig.Network.FileChunkSizeKB); + Assert.Equal(NetworkOptions.Defaults.FILE_CHUNK_SIZE_KB, freshConfig.Network.FileChunkSizeKB); + + Assert.Equal(ContentTypesOptions.Defaults.BATCH_SIZE, configReader.Get().BatchSize); + Assert.Equal(ContentTypesOptions.Defaults.BATCH_SIZE, configReader.Get().BatchSize); Assert.NotNull(freshConfig?.DefaultPermissionsContentTypes); } diff --git a/tests/Tableau.Migration.Tests/Unit/Config/MigrationSdkOptionsTests.cs b/tests/Tableau.Migration.Tests/Unit/Config/MigrationSdkOptionsTests.cs index 29ae9ee..327ea3f 100644 --- a/tests/Tableau.Migration.Tests/Unit/Config/MigrationSdkOptionsTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Config/MigrationSdkOptionsTests.cs @@ -14,33 +14,14 @@ // limitations under the License. // +using System; using Tableau.Migration.Config; using Xunit; namespace Tableau.Migration.Tests.Unit.Config { public class MigrationSdkOptionsTests - { - public class BatchSize - { - [Fact] - public void FallsBackToDefault() - { - var opts = new MigrationSdkOptions(); - Assert.Equal(MigrationSdkOptions.Defaults.BATCH_SIZE, opts.BatchSize); - } - - [Fact] - public void CustomizedValue() - { - var opts = new MigrationSdkOptions - { - BatchSize = 47 - }; - Assert.Equal(47, opts.BatchSize); - } - } - + { public class MigrationParallelism { [Fact] diff --git a/tests/Tableau.Migration.Tests/Unit/Content/DataSourceDetailsTests.cs b/tests/Tableau.Migration.Tests/Unit/Content/DataSourceDetailsTests.cs new file mode 100644 index 0000000..b9f2cf9 --- /dev/null +++ b/tests/Tableau.Migration.Tests/Unit/Content/DataSourceDetailsTests.cs @@ -0,0 +1,65 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using Tableau.Migration.Api.Rest.Models; +using Tableau.Migration.Content; +using Xunit; + +namespace Tableau.Migration.Tests.Unit.Content +{ + public class DataSourceDetailsTests + { + public abstract class DataSourceDetailsTest : AutoFixtureTestBase + { + public IDataSourceDetails CreateDataSource() => Create(); + public IDataSourceDetailsType CreateResponse() => Create(); + + public IContentReference CreateProjectReference() => Create(); + public IContentReference CreateOwnerReference() => Create(); + } + + public class Ctor + { + public class FromResponse : DataSourceDetailsTest + { + [Fact] + public void Initializes() + { + var response = CreateResponse(); + var project = CreateProjectReference(); + var owner = CreateOwnerReference(); + + var result = new DataSourceDetails(response, project, owner); + + result.Assert(response, project, owner, ds => Assert.Equal(response.CertificationNote, ds.CertificationNote)); + } + } + + public class FromDataSource : DataSourceDetailsTest + { + [Fact] + public void Initializes() + { + var dataSource = CreateDataSource(); + + var result = new DataSourceDetails(dataSource); + + result.Assert(dataSource, ds => Assert.Equal(dataSource.CertificationNote, ds.CertificationNote)); + } + } + } + } +} diff --git a/tests/Tableau.Migration.Tests/Unit/Content/DataSourceTests.cs b/tests/Tableau.Migration.Tests/Unit/Content/DataSourceTests.cs index 42c8b83..ea52bfa 100644 --- a/tests/Tableau.Migration.Tests/Unit/Content/DataSourceTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Content/DataSourceTests.cs @@ -128,7 +128,7 @@ public void InvalidContentUrl(string? testSize) public void Handles_Empty_Tags() { var response = CreateTestResponse(); - response.Tags = null; + response.Tags = []; var project = Create(); var owner = Create(); diff --git a/tests/Tableau.Migration.Tests/Unit/Content/Files/ContentFileStoreTestBase.cs b/tests/Tableau.Migration.Tests/Unit/Content/Files/ContentFileStoreTestBase.cs new file mode 100644 index 0000000..02a628f --- /dev/null +++ b/tests/Tableau.Migration.Tests/Unit/Content/Files/ContentFileStoreTestBase.cs @@ -0,0 +1,85 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using System.IO.Abstractions; +using System.IO.Abstractions.TestingHelpers; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Moq; +using Tableau.Migration.Config; +using Tableau.Migration.Content.Files; + +namespace Tableau.Migration.Tests.Unit.Content.Files +{ + public abstract class ContentFileStoreTestBase : AutoFixtureTestBase, IAsyncDisposable + where TFileStore : IContentFileStore + { + private readonly Lazy _fileStore; + private readonly Lazy _services; + + protected IServiceProvider Services => _services.Value; + + protected readonly MockFileSystem MockFileSystem = new(); + protected readonly Mock MockPathResolver = new(); + protected readonly Mock MockConfigReader = new(); + + protected readonly MigrationSdkOptions SdkOptions; + + protected readonly string RootPath; + protected readonly string BaseRelativePath; + + protected TFileStore FileStore => _fileStore.Value; + + public ContentFileStoreTestBase() + { + RootPath = Create(); + BaseRelativePath = Create(); + + SdkOptions = Freeze(); + SdkOptions.Files.RootPath = RootPath; + + MockConfigReader.Setup(r => r.Get()).Returns(SdkOptions); + + _services = new(() => ConfigureServices().BuildServiceProvider()); + + _fileStore = new(CreateFileStore); + } + + protected virtual TFileStore CreateFileStore() => ActivatorUtilities.GetServiceOrCreateInstance(Services); + + protected virtual IServiceCollection ConfigureServices(IServiceCollection services) => services; + + private IServiceCollection ConfigureServices() + { + var services = new ServiceCollection() + .AddTableauMigrationSdk() + .Replace(MockFileSystem) + .Replace(MockPathResolver) + .Replace(MockConfigReader); + + return ConfigureServices(services); + } + + public async virtual ValueTask DisposeAsync() + { + if (_services.IsValueCreated) + await _services.Value.DisposeAsync(); + + GC.SuppressFinalize(this); + } + } +} diff --git a/tests/Tableau.Migration.Tests/Unit/Content/Files/DirectoryContentFileStoreTestBase.cs b/tests/Tableau.Migration.Tests/Unit/Content/Files/DirectoryContentFileStoreTestBase.cs new file mode 100644 index 0000000..b1459a7 --- /dev/null +++ b/tests/Tableau.Migration.Tests/Unit/Content/Files/DirectoryContentFileStoreTestBase.cs @@ -0,0 +1,44 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System.IO; +using System.Threading.Tasks; +using Tableau.Migration.Content; +using Tableau.Migration.Content.Files; + +namespace Tableau.Migration.Tests.Unit.Content.Files +{ + public abstract class DirectoryContentFileStoreTestBase : ContentFileStoreTestBase + where TFileStore : DirectoryContentFileStore + { + protected string BaseStorePath => GetBaseStorePath(FileStore); + + protected ConcurrentSet TrackedFilePaths => GetTrackedFilePaths(FileStore); + + protected string ExpectedBasePath => Path.Combine(RootPath, BaseRelativePath); + + protected string GetBaseStorePath(TFileStore fileStore) => (fileStore.GetPropertyValue(typeof(DirectoryContentFileStore), "BaseStorePath") as string)!; + + protected ConcurrentSet GetTrackedFilePaths(TFileStore fileStore) => (fileStore.GetPropertyValue(typeof(DirectoryContentFileStore), "TrackedFilePaths") as ConcurrentSet)!; + + protected async Task CreateTestFileAsync(string relativePath, string originalFileName, string? content = null) + { + await using var fileData = new MemoryStream(Constants.DefaultEncoding.GetBytes(content ?? Create())); + + return await ((IContentFileStore)FileStore).CreateAsync(relativePath, originalFileName, fileData, Cancel); + } + } +} diff --git a/tests/Tableau.Migration.Tests/Unit/Content/Files/DirectoryContentFileStoreTests.cs b/tests/Tableau.Migration.Tests/Unit/Content/Files/DirectoryContentFileStoreTests.cs index 576fff4..bcb0c88 100644 --- a/tests/Tableau.Migration.Tests/Unit/Content/Files/DirectoryContentFileStoreTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Content/Files/DirectoryContentFileStoreTests.cs @@ -17,14 +17,10 @@ using System; using System.Collections.Immutable; using System.IO; -using System.IO.Abstractions; -using System.IO.Abstractions.TestingHelpers; using System.Linq; -using System.Text; using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; using Moq; -using Tableau.Migration.Config; -using Tableau.Migration.Content; using Tableau.Migration.Content.Files; using Xunit; @@ -34,46 +30,10 @@ public class DirectoryContentFileStoreTests { #region - Test Classes - - public class TestFileStore : DirectoryContentFileStore + public abstract class DirectoryContentFileStoreTest : DirectoryContentFileStoreTestBase { - public ConcurrentSet PublicTrackedFilePaths => TrackedFilePaths; - - public TestFileStore(IFileSystem fileSystem, IContentFilePathResolver pathResolver, IConfigReader configReader, string storeDirectoryName) - : base(fileSystem, pathResolver, configReader, storeDirectoryName) - { } - } - - public class DirectoryContentFileStoreTest : AutoFixtureTestBase - { - protected readonly MockFileSystem FileSystem; - protected readonly Mock MockPathResolver; - protected readonly string RootPath; - protected readonly string BaseRelativePath; - - protected string ExpectedBasePath => Path.Combine(RootPath, BaseRelativePath); - - protected readonly TestFileStore FileStore; - - public DirectoryContentFileStoreTest() - { - FileSystem = new MockFileSystem(); - MockPathResolver = Create>(); - RootPath = Create(); - BaseRelativePath = Create(); - - var config = Freeze(); - config.Files.RootPath = RootPath; - - FileStore = new(FileSystem, MockPathResolver.Object, Create(), BaseRelativePath); - } - - protected async Task CreateTestFileAsync(string relativePath, string originalFileName, string? content = null) - { - await using var fileData = new MemoryStream(Encoding.UTF8.GetBytes(content ?? Create())); - - return await ((IContentFileStore)FileStore).CreateAsync(relativePath, originalFileName, fileData, Cancel); - - } + protected override DirectoryContentFileStore CreateFileStore() + => ActivatorUtilities.CreateInstance(Services, BaseRelativePath); } #endregion @@ -94,7 +54,7 @@ public async Task InitializesAndTracksFileAsync() Assert.Equal(Path.Combine(ExpectedBasePath, relPath), file.Path); Assert.Equal(originalFileName, file.OriginalFileName); - Assert.Contains(file.Path, FileStore.PublicTrackedFilePaths); + Assert.Contains(file.Path, TrackedFilePaths); } [Fact] @@ -113,7 +73,7 @@ public async Task InitializesAndTracksFileWithContentItemAsync() Assert.Equal(Path.Combine(ExpectedBasePath, generatedPath), file.Path); Assert.Equal(originalFileName, file.OriginalFileName); - Assert.Contains(file.Path, FileStore.PublicTrackedFilePaths); + Assert.Contains(file.Path, TrackedFilePaths); MockPathResolver.Verify(x => x.ResolveRelativePath(contentItem, originalFileName), Times.Once()); } @@ -157,7 +117,7 @@ public async Task OverwritesAsync() await using (var writeStream = await FileStore.OpenWriteAsync(file, Cancel)) { - writeStream.Content.Write(Encoding.UTF8.GetBytes(expectedContent)); + writeStream.Content.Write(Constants.DefaultEncoding.GetBytes(expectedContent)); } await using var readStream = await FileStore.OpenReadAsync(file, Cancel); @@ -182,8 +142,8 @@ public async Task DeletesAndUntracksFileAsync() await FileStore.DeleteAsync(file, Cancel); - Assert.False(FileSystem.FileExists(file.Path)); - Assert.DoesNotContain(file.Path, FileStore.PublicTrackedFilePaths); + Assert.False(MockFileSystem.FileExists(file.Path)); + Assert.DoesNotContain(file.Path, TrackedFilePaths); } } @@ -255,11 +215,11 @@ public async Task CleansUpDirectoryAndAllTrackedFilesAsync() Assert.All(files, file => { - Assert.False(FileSystem.FileExists(file.Path)); + Assert.False(MockFileSystem.FileExists(file.Path)); }); - Assert.Empty(FileStore.PublicTrackedFilePaths); - Assert.False(FileSystem.Directory.Exists(ExpectedBasePath)); + Assert.Empty(TrackedFilePaths); + Assert.False(MockFileSystem.Directory.Exists(ExpectedBasePath)); Assert.False(FileStore.HasOpenTableauFileEditor); } } diff --git a/tests/Tableau.Migration.Tests/Unit/Content/Files/EncryptedFileStoreTests.cs b/tests/Tableau.Migration.Tests/Unit/Content/Files/EncryptedFileStoreTests.cs index 44f6590..f2c7a5e 100644 --- a/tests/Tableau.Migration.Tests/Unit/Content/Files/EncryptedFileStoreTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Content/Files/EncryptedFileStoreTests.cs @@ -16,9 +16,8 @@ using System.IO; using System.Security.Cryptography; -using System.Text; using System.Threading.Tasks; -using AutoFixture; +using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using Moq; using Tableau.Migration.Config; @@ -31,32 +30,25 @@ public class EncryptedFileStoreTests { #region - Test Classes - - public class EncryptedFileStoreTest : AutoFixtureTestBase + public abstract class EncryptedFileStoreTest : ContentFileStoreTestBase { - protected readonly Mock MockEncryptionFactory; + protected readonly Mock MockEncryptionFactory = new(); protected readonly Mock MockInnerFileStore; - protected readonly Mock> MockLogger; + protected readonly Mock> MockLogger = new(); + protected readonly MockSharedResourcesLocalizer MockSharedResourcesLocalizer = new(); protected bool DisableFileEncryption { get; set; } - protected readonly EncryptedFileStore FileStore; - public EncryptedFileStoreTest() { - MockEncryptionFactory = Freeze>(); - MockEncryptionFactory.Setup(x => x.Create()) - .Returns(() => Aes.Create()); - - MockInnerFileStore = new Mock() + MockInnerFileStore = new(MemoryStreamManager.Instance) { CallBase = true }; - AutoFixture.Register(() => MockInnerFileStore.Object); - MockLogger = Freeze>>(); + MockEncryptionFactory.Setup(f => f.Create()).Returns(Aes.Create()); - var mockConfig = Freeze>(); - mockConfig.Setup(x => x.Get()) + MockConfigReader.Setup(x => x.Get()) .Returns(() => new MigrationSdkOptions { Files = new() @@ -64,9 +56,22 @@ public EncryptedFileStoreTest() DisableFileEncryption = DisableFileEncryption } }); + } - FileStore = Create(); + protected override IServiceCollection ConfigureServices(IServiceCollection services) + { + var mockLoggerFactory = new Mock(); + mockLoggerFactory.Setup(f => f.CreateLogger(It.Is(s => s.Contains(nameof(EncryptedFileStore))))) + .Returns(MockLogger.Object); + + return services + .Replace(mockLoggerFactory) + .Replace(MockSharedResourcesLocalizer) + .Replace(MockEncryptionFactory) + .AddScoped(p => new EncryptedFileStore(p, MockInnerFileStore.Object)); } + + protected override EncryptedFileStore CreateFileStore() => Services.GetRequiredService(); } #endregion @@ -80,7 +85,7 @@ public void WarningOnEncryptionDisabled() { DisableFileEncryption = true; - var store = Create(); + var store = new EncryptedFileStore(Services, MockInnerFileStore.Object); MockLogger.VerifyWarnings(Times.Once); } @@ -191,25 +196,27 @@ public async Task RoundtripEncryptionAsync() const string path = "test.txt"; await using var file = FileStore.Create(path, Create()); - - await using (var writeStream = await file.OpenWriteAsync(Cancel)) - await using (var writer = new StreamWriter(writeStream.Content, Encoding.UTF8)) { - await writer.WriteAsync(content); - } + await using (var writeStream = await file.OpenWriteAsync(Cancel)) + await using (var writer = new StreamWriter(writeStream.Content, Constants.DefaultEncoding)) + { + await writer.WriteAsync(content); + } - var encryptedValue = Encoding.UTF8.GetString(MockInnerFileStore.Object.GetFileData(path)); - Assert.NotEqual(content, encryptedValue); - Assert.NotEmpty(encryptedValue); + var encryptedValue = Constants.DefaultEncoding.GetString(MockInnerFileStore.Object.GetFileData(path)); + Assert.NotEqual(content, encryptedValue); + Assert.NotEmpty(encryptedValue); - string roundtrip; - await using (var readStream = await file.OpenReadAsync(Cancel)) - using (var reader = new StreamReader(readStream.Content)) - { - roundtrip = await reader.ReadToEndAsync(); - } + string roundtrip; + await using var readStream = await file.OpenReadAsync(Cancel); - Assert.Equal(content, roundtrip); + using (var reader = new StreamReader(readStream.Content)) + { + roundtrip = await reader.ReadToEndAsync(); + } + + Assert.Equal(content, roundtrip); + } } } diff --git a/tests/Tableau.Migration.Tests/Unit/Content/Files/IContentFileStoreTests.cs b/tests/Tableau.Migration.Tests/Unit/Content/Files/IContentFileStoreTests.cs index dac27e3..f9d781d 100644 --- a/tests/Tableau.Migration.Tests/Unit/Content/Files/IContentFileStoreTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Content/Files/IContentFileStoreTests.cs @@ -15,7 +15,6 @@ // using System.IO; -using System.Text; using System.Threading; using System.Threading.Tasks; using Moq; @@ -48,14 +47,14 @@ public async Task CreatesAndWritesAsync() mockFileStore.Setup(x => x.OpenWriteAsync(handle, cancel)) .ReturnsAsync(new ContentFileStream(writeStream)); - using var initialStream = new MemoryStream(Encoding.UTF8.GetBytes(content)); + using var initialStream = new MemoryStream(Constants.DefaultEncoding.GetBytes(content)); var result = await mockFileStore.Object.CreateAsync(path, originalFileName, initialStream, cancel); mockFileStore.Verify(x => x.Create(path, originalFileName), Times.Once); mockFileStore.Verify(x => x.OpenWriteAsync(handle, cancel), Times.Once); - Assert.Equal(content, Encoding.UTF8.GetString(writeStream.ToArray())); + Assert.Equal(content, Constants.DefaultEncoding.GetString(writeStream.ToArray())); } [Fact] @@ -78,14 +77,14 @@ public async Task CreatesAndWritesContentItemAsync() mockFileStore.Setup(x => x.OpenWriteAsync(handle, cancel)) .ReturnsAsync(new ContentFileStream(writeStream)); - using var initialStream = new MemoryStream(Encoding.UTF8.GetBytes(content)); + using var initialStream = new MemoryStream(Constants.DefaultEncoding.GetBytes(content)); var result = await mockFileStore.Object.CreateAsync(contentItem, originalFileName, initialStream, cancel); mockFileStore.Verify(x => x.Create(contentItem, originalFileName), Times.Once); mockFileStore.Verify(x => x.OpenWriteAsync(handle, cancel), Times.Once); - Assert.Equal(content, Encoding.UTF8.GetString(writeStream.ToArray())); + Assert.Equal(content, Constants.DefaultEncoding.GetString(writeStream.ToArray())); } } } diff --git a/tests/Tableau.Migration.Tests/Unit/Content/Files/MemoryContentFileStore.cs b/tests/Tableau.Migration.Tests/Unit/Content/Files/MemoryContentFileStore.cs index 7ea16c5..ff517b1 100644 --- a/tests/Tableau.Migration.Tests/Unit/Content/Files/MemoryContentFileStore.cs +++ b/tests/Tableau.Migration.Tests/Unit/Content/Files/MemoryContentFileStore.cs @@ -26,11 +26,22 @@ namespace Tableau.Migration.Tests.Unit.Content.Files { public class MemoryContentFileStore : IContentFileStore { + private readonly IMemoryStreamManager _memoryStreamManager; + private readonly ConcurrentDictionary _fileData = new(); private readonly ConcurrentDictionary _editors = new(); public bool HasOpenTableauFileEditor => _editors.Any(); + public MemoryContentFileStore(IMemoryStreamManager memoryStreamManager) + { + _memoryStreamManager = memoryStreamManager; + } + + internal MemoryContentFileStore() + : this(MemoryStreamManager.Instance) + { } + public byte[] GetFileData(string path) => _fileData[path]; public virtual IContentFileHandle Create(string relativePath, string originalFileName) @@ -71,13 +82,8 @@ public virtual Task OpenWriteAsync(IContentFileHandle handle public virtual async Task GetTableauFileEditorAsync(IContentFileHandle handle, CancellationToken cancel, bool? zipFormatOverride = null) - { - return await Task.Run(() => - { - return _editors.GetOrAdd(handle.Path, - (path) => TableauFileEditor.OpenAsync(handle, cancel, zipFormatOverride).GetAwaiter().GetResult()); - }); - } + => await _editors.GetOrAddAsync(handle.Path, async (path) => + await TableauFileEditor.OpenAsync(handle, _memoryStreamManager, cancel, zipFormatOverride).ConfigureAwait(false)).ConfigureAwait(false); public virtual async Task CloseTableauFileEditorAsync(IContentFileHandle handle, CancellationToken cancel) { diff --git a/tests/Tableau.Migration.Tests/Unit/Content/Files/SeekableCryptoStreamTests.cs b/tests/Tableau.Migration.Tests/Unit/Content/Files/SeekableCryptoStreamTests.cs new file mode 100644 index 0000000..762324c --- /dev/null +++ b/tests/Tableau.Migration.Tests/Unit/Content/Files/SeekableCryptoStreamTests.cs @@ -0,0 +1,243 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using System.IO; +using System.Security.Cryptography; +using System.Threading.Tasks; +using Microsoft.IO; +using Moq; +using Tableau.Migration.Content.Files; +using Tableau.Migration.Tests.Reflection; +using Xunit; + +namespace Tableau.Migration.Tests.Unit.Content.Files +{ + public class SeekableCryptoStreamTests + { + public abstract class SeekableCryptoStreamTest : EncryptedFileStoreTests.EncryptedFileStoreTest + { + protected readonly StreamFactory StreamFactory = new(MemoryStreamManager.Instance.GetStream); + + protected readonly Mock MockTransform = new(); + + public SeekableCryptoStreamTest() + { } + + protected SeekableCryptoStream CreateCryptoStream(CryptoStreamMode mode, bool leaveOpen) + => CreateCryptoStream(CreateInnerStream(), mode, leaveOpen); + + protected SeekableCryptoStream CreateCryptoStream(Stream innerStream, CryptoStreamMode mode, bool leaveOpen) + => CreateCryptoStream(innerStream, MockTransform.Object, mode, leaveOpen); + + protected SeekableCryptoStream CreateCryptoStream(Stream innerStream, ICryptoTransform transform, CryptoStreamMode mode, bool leaveOpen) + => new(innerStream, transform, mode, leaveOpen); + + protected static Stream CreateInnerStream(byte[] data) + => MemoryStreamManager.Instance.GetStream(data); + + protected static Stream CreateInnerStream(string text) + => CreateInnerStream(Constants.DefaultEncoding.GetBytes(text)); + + protected Stream CreateInnerStream(int textLength = 100) + => CreateInnerStream(CreateString(textLength)); + + protected static Mock CreateMockInnerStream(bool canRead, bool canWrite) + { + var mockStream = new Mock() + { + CallBase = true + }; + + mockStream.SetupGet(s => s.CanSeek).Returns(true); + mockStream.SetupGet(s => s.CanRead).Returns(canRead); + mockStream.SetupGet(s => s.CanWrite).Returns(canWrite); + + return mockStream; + } + + protected static Mock CreateMockInnerStream(CryptoStreamMode mode) + => CreateMockInnerStream(mode is CryptoStreamMode.Read, mode is CryptoStreamMode.Write); + + protected Mock CreateMockInnerStream() + => CreateMockInnerStream(Create()); + + protected static void AssertCryptoStream(SeekableCryptoStream stream, CryptoStreamMode mode, Action? assert = null) + { + Assert.True(stream.CanSeek); + + Assert.Equal(mode, stream.Mode); + + Assert.Equal(mode is CryptoStreamMode.Read, stream.CanRead); + Assert.Equal(mode is CryptoStreamMode.Write, stream.CanWrite); + + assert?.Invoke(stream); + } + + protected static void AssertDisposed(SeekableCryptoStream cryptoStream, bool expectedInnerStreamDisposed = true) + { + var wrapper = SeekableCryptoStreamWrapper.InstanceFor(cryptoStream); + + var mockInnerStream = wrapper.GetMockFieldValue(w => w.InnerStream); + + mockInnerStream.AssertDisposed(expectedInnerStreamDisposed); + } + } + + public class Ctor : SeekableCryptoStreamTest + { + [Theory] + [EnumData] + public void Initializes(CryptoStreamMode mode) + { + var innerStream = CreateMockInnerStream(mode).Object; + + using var cryptoStream = new SeekableCryptoStream(innerStream, MockTransform.Object, mode, false); + + AssertCryptoStream(cryptoStream, mode); + } + + [Theory] + [EnumData] + public async Task Disposes_inner_stream_when_leaveOpen_is_false(CryptoStreamMode mode) + { + var mockInnerStream = CreateMockInnerStream(mode); + + var cryptoStream = new SeekableCryptoStream(mockInnerStream.Object, MockTransform.Object, mode, false); + + AssertCryptoStream(cryptoStream, mode); + + await cryptoStream.DisposeAsync(); + + AssertDisposed(cryptoStream); + } + + [Theory] + [EnumData] + public async Task Does_not_dispose_inner_stream_when_leaveOpen_is_true(CryptoStreamMode mode) + { + var mockInnerStream = CreateMockInnerStream(mode); + + var cryptoStream = new SeekableCryptoStream(mockInnerStream.Object, MockTransform.Object, mode, true); + + AssertCryptoStream(cryptoStream, mode); + + await cryptoStream.DisposeAsync(); + + AssertDisposed(cryptoStream, false); + } + + [Theory] + [Values(true, false)] + public async void Respects_leaveOpen_flag(bool leaveOpen) + { + var mode = Create(); + + var mockInnerStream = CreateMockInnerStream(mode); + + var cryptoStream = new SeekableCryptoStream(mockInnerStream.Object, MockTransform.Object, mode, leaveOpen); + + AssertCryptoStream(cryptoStream, mode); + + await cryptoStream.DisposeAsync(); + + AssertDisposed(cryptoStream, !leaveOpen); + } + } + + public class Position : SeekableCryptoStreamTest + { + [Fact] + public async Task Gets() + { + var length = 25; + + var innerStream = CreateInnerStream(length); + + await using var cryptoStream = new SeekableCryptoStream(innerStream, MockTransform.Object, CryptoStreamMode.Read, false); + + for (var i = 0; i != length; i++) + { + cryptoStream.Seek(i, SeekOrigin.Begin); + + Assert.Equal(i, cryptoStream.Position); + } + } + + [Fact] + public async Task Sets() + { + var length = 25; + + var innerStream = CreateInnerStream(length); + + await using var cryptoStream = new SeekableCryptoStream(innerStream, MockTransform.Object, CryptoStreamMode.Read, false); + + for (var i = 0; i != length; i++) + { + cryptoStream.Position = i; + } + } + } + + public class Roundtrip : SeekableCryptoStreamTest + { + [Fact] + public async Task Reads_and_writes() + { + var content = CreateString(100); + + using var aes = Aes.Create(); + aes.GenerateIV(); + + var key = aes.Key; + var iv = aes.IV; + + var encryptor = aes.CreateEncryptor(aes.Key, aes.IV); + + await using var innerStream = MemoryStreamManager.Instance.GetStream(); + + await using (var writeStream = CreateCryptoStream(innerStream, encryptor, CryptoStreamMode.Write, true)) + { + await writeStream.WriteInitializationVectorAsync(iv, Cancel); + writeStream.Write(Constants.DefaultEncoding.GetBytes(content)); + } + + innerStream.Seek(0, SeekOrigin.Begin); + + using var encryptedReader = new StreamReader(innerStream, Constants.DefaultEncoding); + + var encrypted = await encryptedReader.ReadToEndAsync(); + + Assert.NotEqual(content, encrypted); + + innerStream.Seek(0, SeekOrigin.Begin); + + iv = await innerStream.ReadInitializationVectorAsync(iv.Length, Cancel); + + var decryptor = aes.CreateDecryptor(aes.Key, iv); + + await using var decryptedStream = CreateCryptoStream(innerStream, decryptor, CryptoStreamMode.Read, true); + + using var decryptedReader = new StreamReader(decryptedStream, Constants.DefaultEncoding); + + var decrypted = await decryptedReader.ReadToEndAsync(); + + Assert.Equal(content, decrypted); + } + } + } +} diff --git a/tests/Tableau.Migration.Tests/Unit/Content/TableauFileEditorTests.cs b/tests/Tableau.Migration.Tests/Unit/Content/Files/TableauFileEditorTests.cs similarity index 79% rename from tests/Tableau.Migration.Tests/Unit/Content/TableauFileEditorTests.cs rename to tests/Tableau.Migration.Tests/Unit/Content/Files/TableauFileEditorTests.cs index a6f4337..4957a4e 100644 --- a/tests/Tableau.Migration.Tests/Unit/Content/TableauFileEditorTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Content/Files/TableauFileEditorTests.cs @@ -17,14 +17,14 @@ using System.IO; using System.IO.Compression; using System.Linq; -using System.Text; using System.Threading; using System.Threading.Tasks; +using Microsoft.IO; using Moq; using Tableau.Migration.Content.Files; using Xunit; -namespace Tableau.Migration.Tests.Unit.Content +namespace Tableau.Migration.Tests.Unit.Content.Files { public class TableauFileEditorTests { @@ -35,14 +35,17 @@ public class TableauFileEditorTest : AutoFixtureTestBase protected readonly Mock MockFile; - protected readonly MemoryStream WrittenFileData; + protected readonly RecyclableMemoryStream WrittenFileData; protected readonly Mock MockWriteFileStream; + protected readonly IMemoryStreamManager MemoryStreamManager = Migration.MemoryStreamManager.Instance; + public TableauFileEditorTest() { + Freeze(MemoryStreamManager); MockFile = Freeze>(); - WrittenFileData = new(); //Default ctor for resizable stream. + WrittenFileData = MemoryStreamManager.GetStream(); MockWriteFileStream = CreateTestFileStream(WrittenFileData); MockFile.Setup(x => x.OpenWriteAsync(Cancel)) @@ -54,36 +57,39 @@ public TableauFileEditorTest() }); } - protected Mock CreateTestFileStream(MemoryStream content) + protected Mock CreateTestFileStream(RecyclableMemoryStream content) { var mockFileStream = Create>(); mockFileStream.SetupGet(x => x.Content).Returns(() => content); return mockFileStream; } - protected static MemoryStream CreateResizableMemoryStream(byte[] data) + protected RecyclableMemoryStream CreateMemoryStream(byte[] data) { - var stream = new MemoryStream(); //Default ctor for resizable stream. + var stream = MemoryStreamManager.GetStream(); stream.Write(data); stream.Seek(0, SeekOrigin.Begin); return stream; } + protected RecyclableMemoryStream CreateMemoryStream(string data) + => CreateMemoryStream(Constants.DefaultEncoding.GetBytes(data)); + protected TableauFileEditor CreateXmlFileEditor(string xml = TEST_XML) { - var stream = TableauFileEditorTest.CreateResizableMemoryStream(Encoding.UTF8.GetBytes(xml)); + var stream = CreateMemoryStream(xml); return new(MockFile.Object, stream, null, Cancel); } - protected static byte[] BundleXmlIntoZipFile(string xml, string entryName = TEST_ENTRY_FILENAME) + protected byte[] BundleXmlIntoZipFile(string xml, string entryName = TEST_ENTRY_FILENAME) { - var stream = new MemoryStream(); + var stream = MemoryStreamManager.GetStream(); using (var createZip = new ZipArchive(stream, ZipArchiveMode.Create, leaveOpen: true)) { var entry = createZip.CreateEntry(entryName); - entry.Open().Write(Encoding.UTF8.GetBytes(xml)); + entry.Open().Write(Constants.DefaultEncoding.GetBytes(xml)); } return stream.ToArray(); @@ -91,9 +97,9 @@ protected static byte[] BundleXmlIntoZipFile(string xml, string entryName = TEST protected TableauFileEditor CreateZipArchiveEditor(string xml = TEST_XML) { - var zipData = TableauFileEditorTest.BundleXmlIntoZipFile(xml); + var zipData = BundleXmlIntoZipFile(xml); - var stream = TableauFileEditorTest.CreateResizableMemoryStream(zipData); + var stream = CreateMemoryStream(zipData); var zip = new ZipArchive(stream, ZipArchiveMode.Update, leaveOpen: true); return new(MockFile.Object, stream, zip, Cancel); @@ -107,7 +113,7 @@ public class Ctor : TableauFileEditorTest [Fact] public async Task InitializeAsync() { - var stream = new MemoryStream(); + var stream = MemoryStreamManager.GetStream(); var zipArchive = new ZipArchive(stream, ZipArchiveMode.Create, leaveOpen: true); await using var editor = new TableauFileEditor(MockFile.Object, stream, zipArchive, Cancel); @@ -119,7 +125,7 @@ public async Task InitializeAsync() [Fact] public async Task NullZipArchiveAsync() { - await using var stream = new MemoryStream(); + await using var stream = MemoryStreamManager.GetStream(); await using var editor = new TableauFileEditor(MockFile.Object, stream, null, Cancel); @@ -197,7 +203,7 @@ public class OpenAsync : TableauFileEditorTest [Fact] public async Task OpensXmlFileAsync() { - var dataStream = TableauFileEditorTest.CreateResizableMemoryStream(Encoding.UTF8.GetBytes(TEST_XML)); + var dataStream = CreateMemoryStream(TEST_XML); var mockFileStream = CreateTestFileStream(dataStream); @@ -205,23 +211,23 @@ public async Task OpensXmlFileAsync() MockFile.Setup(x => x.OpenReadAsync(Cancel)) .ReturnsAsync(mockFileStream.Object); - await using var editor = await TableauFileEditor.OpenAsync(MockFile.Object, Cancel); + await using var editor = await TableauFileEditor.OpenAsync(MockFile.Object, MemoryStreamManager, Cancel); Assert.NotSame(dataStream, editor.Content); + Assert.Equal(0, editor.Content.Position); //stream is ready to read. + Assert.Equal(dataStream.ToArray(), editor.Content.ToArray()); mockFileStream.Verify(x => x.DisposeAsync(), Times.Once); - Assert.Equal(0, editor.Content.Position); //stream is ready to read. - Assert.Null(editor.Archive); } [Fact] public async Task OpensZipFileAsync() { - var data = TableauFileEditorTest.BundleXmlIntoZipFile(TEST_XML); - var dataStream = TableauFileEditorTest.CreateResizableMemoryStream(data); + var data = BundleXmlIntoZipFile(TEST_XML); + var dataStream = CreateMemoryStream(data); var mockFileStream = CreateTestFileStream(dataStream); @@ -229,7 +235,7 @@ public async Task OpensZipFileAsync() MockFile.Setup(x => x.OpenReadAsync(Cancel)) .ReturnsAsync(mockFileStream.Object); - await using var editor = await TableauFileEditor.OpenAsync(MockFile.Object, Cancel); + await using var editor = await TableauFileEditor.OpenAsync(MockFile.Object, MemoryStreamManager, Cancel); Assert.NotSame(dataStream, editor.Content); Assert.Equal(dataStream.ToArray(), editor.Content.ToArray()); @@ -251,7 +257,7 @@ public class DisposeAsync : TableauFileEditorTest public async Task PersistsXmlAsync() { const string OUTPUT_XML = "changed"; - var dataStream = TableauFileEditorTest.CreateResizableMemoryStream(Encoding.UTF8.GetBytes(TEST_XML)); + var dataStream = CreateMemoryStream(TEST_XML); var mockFileStream = CreateTestFileStream(dataStream); @@ -259,12 +265,12 @@ public async Task PersistsXmlAsync() MockFile.Setup(x => x.OpenReadAsync(Cancel)) .ReturnsAsync(mockFileStream.Object); - await using (var editor = await TableauFileEditor.OpenAsync(MockFile.Object, Cancel)) + await using (var editor = await TableauFileEditor.OpenAsync(MockFile.Object, MemoryStreamManager, Cancel)) { - await editor.GetXmlStream().XmlContent.WriteAsync(Encoding.UTF8.GetBytes(OUTPUT_XML)); + await editor.GetXmlStream().XmlContent.WriteAsync(Constants.DefaultEncoding.GetBytes(OUTPUT_XML)); } - Assert.Equal(OUTPUT_XML, Encoding.UTF8.GetString(WrittenFileData.ToArray())); + Assert.Equal(OUTPUT_XML, Constants.DefaultEncoding.GetString(WrittenFileData.ToArray())); MockFile.Verify(x => x.OpenWriteAsync(Cancel), Times.Once); } @@ -273,7 +279,7 @@ public async Task PersistsXmlAsync() public async Task PersistsXmlDisposeTwiceNoErrors() { const string OUTPUT_XML = "changed"; - var dataStream = TableauFileEditorTest.CreateResizableMemoryStream(Encoding.UTF8.GetBytes(TEST_XML)); + var dataStream = CreateMemoryStream(TEST_XML); var mockFileStream = CreateTestFileStream(dataStream); @@ -281,13 +287,13 @@ public async Task PersistsXmlDisposeTwiceNoErrors() MockFile.Setup(x => x.OpenReadAsync(Cancel)) .ReturnsAsync(mockFileStream.Object); - var editor = await TableauFileEditor.OpenAsync(MockFile.Object, Cancel); + var editor = await TableauFileEditor.OpenAsync(MockFile.Object, MemoryStreamManager, Cancel); - await editor.GetXmlStream().XmlContent.WriteAsync(Encoding.UTF8.GetBytes(OUTPUT_XML)); + await editor.GetXmlStream().XmlContent.WriteAsync(Constants.DefaultEncoding.GetBytes(OUTPUT_XML)); await editor.DisposeAsync(); await editor.DisposeAsync(); - Assert.Equal(OUTPUT_XML, Encoding.UTF8.GetString(WrittenFileData.ToArray())); + Assert.Equal(OUTPUT_XML, Constants.DefaultEncoding.GetString(WrittenFileData.ToArray())); MockFile.Verify(x => x.OpenWriteAsync(Cancel), Times.Once); } @@ -297,8 +303,8 @@ public async Task PersistsZipAsync() { const string OUTPUT_XML = "changed"; - var data = TableauFileEditorTest.BundleXmlIntoZipFile(TEST_XML); - var dataStream = TableauFileEditorTest.CreateResizableMemoryStream(data); + var data = BundleXmlIntoZipFile(TEST_XML); + var dataStream = CreateMemoryStream(data); var mockFileStream = CreateTestFileStream(dataStream); @@ -306,12 +312,12 @@ public async Task PersistsZipAsync() MockFile.Setup(x => x.OpenReadAsync(Cancel)) .ReturnsAsync(mockFileStream.Object); - await using (var editor = await TableauFileEditor.OpenAsync(MockFile.Object, Cancel)) + await using (var editor = await TableauFileEditor.OpenAsync(MockFile.Object, MemoryStreamManager, Cancel)) { - await editor.GetXmlStream().XmlContent.WriteAsync(Encoding.UTF8.GetBytes(OUTPUT_XML)); + await editor.GetXmlStream().XmlContent.WriteAsync(Constants.DefaultEncoding.GetBytes(OUTPUT_XML)); } - var outputZip = new ZipArchive(new MemoryStream(WrittenFileData.ToArray()), ZipArchiveMode.Read); + var outputZip = new ZipArchive(MemoryStreamManager.GetStream(WrittenFileData.ToArray()), ZipArchiveMode.Read); using (var entryStream = outputZip.Entries.Single(e => e.Name == TEST_ENTRY_FILENAME).Open()) using (var streamReader = new StreamReader(entryStream)) { @@ -326,8 +332,8 @@ public async Task PersistsZipAsyncDisposeTwiceNoErrors() { const string OUTPUT_XML = "changed"; - var data = TableauFileEditorTest.BundleXmlIntoZipFile(TEST_XML); - var dataStream = TableauFileEditorTest.CreateResizableMemoryStream(data); + var data = BundleXmlIntoZipFile(TEST_XML); + var dataStream = CreateMemoryStream(data); var mockFileStream = CreateTestFileStream(dataStream); @@ -335,12 +341,12 @@ public async Task PersistsZipAsyncDisposeTwiceNoErrors() MockFile.Setup(x => x.OpenReadAsync(Cancel)) .ReturnsAsync(mockFileStream.Object); - var editor = await TableauFileEditor.OpenAsync(MockFile.Object, Cancel); - await editor.GetXmlStream().XmlContent.WriteAsync(Encoding.UTF8.GetBytes(OUTPUT_XML)); + var editor = await TableauFileEditor.OpenAsync(MockFile.Object, MemoryStreamManager, Cancel); + await editor.GetXmlStream().XmlContent.WriteAsync(Constants.DefaultEncoding.GetBytes(OUTPUT_XML)); await editor.DisposeAsync(); await editor.DisposeAsync(); - var outputZip = new ZipArchive(new MemoryStream(WrittenFileData.ToArray()), ZipArchiveMode.Read); + var outputZip = new ZipArchive(MemoryStreamManager.GetStream(WrittenFileData.ToArray()), ZipArchiveMode.Read); using (var entryStream = outputZip.Entries.Single(e => e.Name == TEST_ENTRY_FILENAME).Open()) using (var streamReader = new StreamReader(entryStream)) { diff --git a/tests/Tableau.Migration.Tests/Unit/Content/TableauFileXmlStreamTests.cs b/tests/Tableau.Migration.Tests/Unit/Content/Files/TableauFileXmlStreamTests.cs similarity index 93% rename from tests/Tableau.Migration.Tests/Unit/Content/TableauFileXmlStreamTests.cs rename to tests/Tableau.Migration.Tests/Unit/Content/Files/TableauFileXmlStreamTests.cs index d436990..b5ad072 100644 --- a/tests/Tableau.Migration.Tests/Unit/Content/TableauFileXmlStreamTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Content/Files/TableauFileXmlStreamTests.cs @@ -16,14 +16,13 @@ using System; using System.IO; -using System.Text; using System.Threading.Tasks; using System.Xml.Linq; using Moq; using Tableau.Migration.Content.Files; using Xunit; -namespace Tableau.Migration.Tests.Unit.Content +namespace Tableau.Migration.Tests.Unit.Content.Files { public class TableauFileXmlStreamTests { @@ -55,7 +54,7 @@ public class GetXmlAsync : TableauFileXmlStreamTest public async Task GetsOrCreatesAsync() { var stream = new MemoryStream(); - stream.Write(Encoding.UTF8.GetBytes("")); + stream.Write(Constants.DefaultEncoding.GetBytes("")); stream.Seek(0, SeekOrigin.Begin); await using var xmlStream = new TableauFileXmlStream(stream, Cancel); @@ -73,7 +72,7 @@ public class DisposeAsync : TableauFileXmlStreamTest public async Task SavesXmlAsync() { var stream = new MemoryStream(); - stream.Write(Encoding.UTF8.GetBytes("")); + stream.Write(Constants.DefaultEncoding.GetBytes("")); stream.Seek(0, SeekOrigin.Begin); await using (var xmlStream = new TableauFileXmlStream(stream, Cancel, leaveOpen: true)) diff --git a/tests/Tableau.Migration.Tests/Unit/Content/Files/TemporaryDirectoryContentFileStoreTests.cs b/tests/Tableau.Migration.Tests/Unit/Content/Files/TemporaryDirectoryContentFileStoreTests.cs index 33a8e76..754f65d 100644 --- a/tests/Tableau.Migration.Tests/Unit/Content/Files/TemporaryDirectoryContentFileStoreTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Content/Files/TemporaryDirectoryContentFileStoreTests.cs @@ -15,8 +15,6 @@ // using System.IO; -using System.IO.Abstractions; -using Tableau.Migration.Config; using Tableau.Migration.Content.Files; using Xunit; @@ -24,28 +22,19 @@ namespace Tableau.Migration.Tests.Unit.Content.Files { public class TemporaryDirectoryContentFileStoreTests { - public class TestFileStore : TemporaryDirectoryContentFileStore - { - public string PublicBaseStorePath => BaseStorePath; - - public TestFileStore(IFileSystem fileSystem, IContentFilePathResolver pathResolver, IConfigReader configReader) - : base(fileSystem, pathResolver, configReader) - { } - } + public abstract class TemporaryDirectoryContentFileStoreTest : DirectoryContentFileStoreTestBase + { } - public class Ctor : AutoFixtureTestBase + public class Ctor : TemporaryDirectoryContentFileStoreTest { [Fact] public void MakesRandomSubDirectory() { - var rootPath = Create(); - - var config = Freeze(); - config.Files.RootPath = rootPath; + var fileStore = CreateFileStore(); - var fs = Create(); + var baseStorePath = GetBaseStorePath(fileStore); - var subDir = Path.GetRelativePath(rootPath, fs.PublicBaseStorePath); + var subDir = Path.GetRelativePath(RootPath, baseStorePath); Assert.NotEmpty(subDir); } } diff --git a/tests/Tableau.Migration.Tests/Unit/Content/Permissions/IGranteeCapabilityComparer.cs b/tests/Tableau.Migration.Tests/Unit/Content/Permissions/IGranteeCapabilityComparer.cs index 45a9e74..a7d3934 100644 --- a/tests/Tableau.Migration.Tests/Unit/Content/Permissions/IGranteeCapabilityComparer.cs +++ b/tests/Tableau.Migration.Tests/Unit/Content/Permissions/IGranteeCapabilityComparer.cs @@ -28,7 +28,7 @@ internal class IGranteeCapabilityComparer : ComparerBase public IGranteeCapabilityComparer(bool compareGranteeIds = true) => _compareGranteeIds = compareGranteeIds; - public override int CompareItems(IGranteeCapability x, IGranteeCapability y) + protected override int CompareItems(IGranteeCapability x, IGranteeCapability y) { var granteeIdResult = _compareGranteeIds ? x.GranteeId.CompareTo(y.GranteeId) : 0; diff --git a/tests/Tableau.Migration.Tests/Unit/Content/Permissions/IPermissionsComparer.cs b/tests/Tableau.Migration.Tests/Unit/Content/Permissions/IPermissionsComparer.cs index 0c5e395..934e41f 100644 --- a/tests/Tableau.Migration.Tests/Unit/Content/Permissions/IPermissionsComparer.cs +++ b/tests/Tableau.Migration.Tests/Unit/Content/Permissions/IPermissionsComparer.cs @@ -26,7 +26,7 @@ internal class IPermissionsComparer : ComparerBase { public static readonly IPermissionsComparer Instance = new(); - public override int CompareItems(IPermissions x, IPermissions y) + protected override int CompareItems(IPermissions x, IPermissions y) { var parentIdResult = Comparer.Default.Compare(x.ParentId, y.ParentId); diff --git a/tests/Tableau.Migration.Tests/Unit/Content/WorkbookDetailsTests.cs b/tests/Tableau.Migration.Tests/Unit/Content/WorkbookDetailsTests.cs new file mode 100644 index 0000000..8186f29 --- /dev/null +++ b/tests/Tableau.Migration.Tests/Unit/Content/WorkbookDetailsTests.cs @@ -0,0 +1,79 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using System.Collections.Generic; +using System.Linq; +using Tableau.Migration.Api.Rest.Models; +using Tableau.Migration.Content; +using Xunit; + +namespace Tableau.Migration.Tests.Unit.Content +{ + public class WorkbookDetailsTests + { + public abstract class WorkbookDetailsTest : AutoFixtureTestBase + { + public IWorkbookDetails CreateWorkbook() => Create(); + public IWorkbookDetailsType CreateResponse() => Create(); + + public IContentReference CreateProjectReference() => Create(); + public IContentReference CreateOwnerReference() => Create(); + } + + public class Ctor + { + public class FromResponse : WorkbookDetailsTest + { + [Fact] + public void Initializes() + { + var response = CreateResponse(); + var project = CreateProjectReference(); + var owner = CreateOwnerReference(); + + var result = new WorkbookDetails(response, project, owner); + + result.Assert(response, project, owner, wb => + { + foreach (var responseView in response.Views) + { + Assert.Single(wb.Views, v => + v.Id == responseView.Id && + v.Name == responseView.Name && + v.ContentUrl == responseView.ContentUrl && + v.Location == new ContentLocation(project.Location, wb.Name).Append(responseView.Name) && + v.Tags.Select(t => t.Label).SequenceEqual(responseView.Tags.Select(t => t.Label))); + } + }); + } + } + + public class FromWorkbook : WorkbookDetailsTest + { + [Fact] + public void Initializes() + { + var workbook = CreateWorkbook(); + + var result = new WorkbookDetails(workbook); + + result.Assert(workbook, wb => Assert.Same(workbook.Views, wb.Views)); + } + } + } + } +} diff --git a/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/MigrationEndpointFactoryTests.cs b/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/MigrationEndpointFactoryTests.cs index b07a2d5..363c259 100644 --- a/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/MigrationEndpointFactoryTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/MigrationEndpointFactoryTests.cs @@ -22,6 +22,7 @@ using Tableau.Migration.Content.Files; using Tableau.Migration.Engine.Endpoints; using Tableau.Migration.Engine.Endpoints.Search; +using Tableau.Migration.Resources; using Xunit; namespace Tableau.Migration.Tests.Unit.Engine.Endpoints @@ -47,7 +48,8 @@ public MigrationEndpointFactoryTest() _factory = new(serviceScopeFactory, Create(), Create(), - Create() + Create(), + Create() ); } diff --git a/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/Search/BulkDestinationCacheTest.cs b/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/Search/BulkDestinationCacheTest.cs index 3d588b1..b3a0111 100644 --- a/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/Search/BulkDestinationCacheTest.cs +++ b/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/Search/BulkDestinationCacheTest.cs @@ -15,8 +15,12 @@ // using System.Collections.Generic; +using System.Collections.Immutable; using System.Linq; +using System.Threading; +using AutoFixture; using Moq; +using Tableau.Migration.Api; using Tableau.Migration.Config; using Tableau.Migration.Content; using Tableau.Migration.Engine.Endpoints; @@ -27,15 +31,17 @@ namespace Tableau.Migration.Tests.Unit.Engine.Endpoints.Search { public abstract class BulkDestinationCacheTest : AutoFixtureTestBase where TCache : BulkDestinationCache - where TContent : IContentReference + where TContent : class, IContentReference { protected readonly Mock MockManifestEntryBuilder; protected readonly Mock MockManifestPartition; - protected readonly Mock MockDestinationEndpoint; + protected readonly Mock MockDestinationEndpoint; + protected readonly Mock MockSitesApiClient; + protected readonly Mock> MockListApiClient; protected readonly TCache Cache; - protected int BatchSize { get; set; } + protected ContentTypesOptions ContentTypesOptions { get; set; } = new ContentTypesOptions(); protected List EndpointContent { get; set; } @@ -44,7 +50,7 @@ public abstract class BulkDestinationCacheTest : AutoFixtureTe public BulkDestinationCacheTest() { EndpointContent = CreateMany().ToList(); - BatchSize = EndpointContent.Count / 2; + ContentTypesOptions.BatchSize = EndpointContent.Count / 2; MockManifestEntryBuilder = Freeze>(); @@ -61,13 +67,23 @@ public BulkDestinationCacheTest() mockManifestEditor.Setup(x => x.Entries.GetOrCreatePartition()) .Returns(MockManifestPartition.Object); - MockDestinationEndpoint = Freeze>(); - MockDestinationEndpoint.Setup(x => x.GetPager(It.IsAny())) - .Returns((int batchSize) => new MemoryPager(EndpointContent, batchSize)); + MockListApiClient = Freeze>>(); + MockListApiClient.Setup(x => x.GetAllAsync(It.IsAny(), It.IsAny())) + .ReturnsAsync(() => Result>.Succeeded(EndpointContent.ToImmutableList())); + + MockSitesApiClient = Freeze>(); + MockSitesApiClient.Setup(x => x.GetListApiClient()) + .Returns(MockListApiClient.Object); + + MockDestinationEndpoint = Freeze>(); + MockDestinationEndpoint.Setup(x => x.SiteApi) + .Returns(MockSitesApiClient.Object); + + AutoFixture.Register(() => MockDestinationEndpoint.Object); var mockConfigReader = Freeze>(); - mockConfigReader.Setup(x => x.Get()) - .Returns(() => new MigrationSdkOptions { BatchSize = BatchSize }); + mockConfigReader.Setup(x => x.Get()) + .Returns(() => ContentTypesOptions); Cache = Create(); } diff --git a/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/Search/BulkDestinationCacheTests.cs b/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/Search/BulkDestinationCacheTests.cs index 8cd5def..3d9cb48 100644 --- a/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/Search/BulkDestinationCacheTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/Search/BulkDestinationCacheTests.cs @@ -15,6 +15,7 @@ // using System.Collections.Generic; +using System.Threading; using System.Threading.Tasks; using Moq; using Tableau.Migration.Config; @@ -32,8 +33,8 @@ public class TestBulkDestinationCache : BulkDestinationCache { public List ItemLoadedCalls { get; } = new(); - public TestBulkDestinationCache(IMigrationManifestEditor manifest, IDestinationEndpoint endpoint, IConfigReader configReader) - : base(manifest, endpoint, configReader) + public TestBulkDestinationCache(IDestinationEndpoint endpoint, IConfigReader configReader, IMigrationManifestEditor manifest) + : base(endpoint, configReader, manifest) { } protected override void ItemLoaded(TestContentType item) @@ -56,7 +57,7 @@ public async Task PopulatesAllPagesFromEndpointAsync() var resultStub = Assert.IsType(result); Assert.Equal(new ContentReferenceStub(item), resultStub); - MockDestinationEndpoint.Verify(x => x.GetPager(BatchSize), Times.Once); + MockListApiClient.Verify(x => x.GetAllAsync(ContentTypesOptions.BatchSize, It.IsAny()), Times.Once); Assert.Equal(EndpointContent.Count, Cache.Count); } @@ -87,7 +88,7 @@ public async Task LoadsOnlyOnceAsync() Assert.Equal(new ContentReferenceStub(item), resultStub); } - MockDestinationEndpoint.Verify(x => x.GetPager(BatchSize), Times.Once); + MockListApiClient.Verify(x => x.GetAllAsync(ContentTypesOptions.BatchSize, It.IsAny()), Times.Once); Assert.Equal(EndpointContent.Count, Cache.Count); } diff --git a/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/Search/BulkDestinationProjectCacheTests.cs b/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/Search/BulkDestinationProjectCacheTests.cs index 725185d..03b7f57 100644 --- a/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/Search/BulkDestinationProjectCacheTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/Search/BulkDestinationProjectCacheTests.cs @@ -32,8 +32,6 @@ public class LoadStoreAsync : BulkDestinationCacheTest x.GetPager(It.IsAny())) - .Returns((int batchSize) => new BreadthFirstPathHierarchyPager(new MemoryPager(EndpointContent, batchSize), batchSize)); } [Fact] @@ -52,7 +50,7 @@ public async Task PopulatesAllPagesFromProjectsAsync() mockChildProject.SetupGet(x => x.Location).Returns(new ContentLocation(mockProjects[0].Object.Name, mockChildProject.Object.Name)); EndpointContent = mockProjects.Select(m => m.Object).ToList(); - BatchSize = EndpointContent.Count; + ContentTypesOptions.BatchSize = EndpointContent.Count; var item = EndpointContent[1]; diff --git a/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/Search/DestinationManifestCacheBaseTests.cs b/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/Search/DestinationManifestCacheBaseTests.cs deleted file mode 100644 index c7ee292..0000000 --- a/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/Search/DestinationManifestCacheBaseTests.cs +++ /dev/null @@ -1,213 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using Moq; -using Tableau.Migration.Content; -using Tableau.Migration.Engine.Endpoints.Search; -using Tableau.Migration.Engine.Manifest; -using Xunit; - -namespace Tableau.Migration.Tests.Unit.Engine.Endpoints.Search -{ - public class DestinationManifestCacheBaseTests - { - #region - Test Classes - - - public class TestDestinationManifestCache : DestinationManifestCacheBase - { - public Func> SearchStoreByLocationAction { get; set; } - - public Func> SearchStoreByIdAction { get; set; } - - public int SearchStoreCalls { get; private set; } - - public TestDestinationManifestCache(IMigrationManifestEditor manifest) - : base(manifest) - { - SearchStoreByLocationAction = loc => Enumerable.Empty(); - SearchStoreByIdAction = id => Enumerable.Empty(); - } - - protected override ValueTask> SearchStoreAsync(ContentLocation searchLocation, CancellationToken cancel) - { - SearchStoreCalls++; - return ValueTask.FromResult(SearchStoreByLocationAction(searchLocation)); - } - - protected override ValueTask> SearchStoreAsync(Guid searchId, CancellationToken cancel) - { - SearchStoreCalls++; - return ValueTask.FromResult(SearchStoreByIdAction(searchId)); - } - } - - public class DestinationManifestCacheBaseTest : AutoFixtureTestBase - { - protected readonly Mock MockEntriesCollection; - protected readonly TestDestinationManifestCache Cache; - protected readonly List ManifestEntries; - - public DestinationManifestCacheBaseTest() - { - ManifestEntries = new(); - - var mockEntries = Create>(); - mockEntries.Setup(x => x.ByMappedLocation) - .Returns(() => ManifestEntries.ToImmutableDictionary(e => e.MappedLocation, e => (IMigrationManifestEntryEditor)e)); - mockEntries.Setup(x => x.ByDestinationId) - .Returns( - () => ManifestEntries - .Where(e => e.Destination is not null) - .ToImmutableDictionary(e => e.Destination!.Id, e => (IMigrationManifestEntryEditor)e) - ); - - MockEntriesCollection = Freeze>(); - MockEntriesCollection.Setup(x => x.GetOrCreatePartition()) - .Returns(mockEntries.Object); - - Cache = Create(); - } - } - - #endregion - - #region - SearchAsync (Location) - - - public class SearchAsyncByLocation : DestinationManifestCacheBaseTest - { - [Fact] - public async Task SearchesManifestDestinationInfoAsync() - { - var entry = Create(); - entry.DestinationFound(Create()); - ManifestEntries.Add(entry); - - var resultRef = await Cache.ForLocationAsync(entry.MappedLocation, Cancel); - - Assert.NotNull(resultRef); - Assert.Equal(new ContentReferenceStub(entry.Destination!), resultRef); - - Assert.Equal(0, Cache.SearchStoreCalls); - - MockEntriesCollection.Verify(x => x.GetOrCreatePartition(), Times.Once); - } - - [Fact] - public async Task SearchesStoreAsync() - { - var searchRef = Create(); - var searchLoc = searchRef.Location; - - var foundRefs = CreateMany() - .Append(searchRef) - .ToImmutableArray(); - - Cache.SearchStoreByLocationAction = (loc) => foundRefs; - - var resultRef = await Cache.ForLocationAsync(searchLoc, Cancel); - - Assert.NotNull(resultRef); - Assert.Equal(searchRef, resultRef); - - Assert.Equal(1, Cache.SearchStoreCalls); - - MockEntriesCollection.Verify(x => x.GetOrCreatePartition(), Times.Once); - } - - [Fact] - public async Task NotFoundInStoreAsync() - { - var searchRef = Create(); - var searchLoc = searchRef.Location; - - var resultRef = await Cache.ForLocationAsync(searchLoc, Cancel); - - Assert.Null(resultRef); - - Assert.Equal(1, Cache.SearchStoreCalls); - - MockEntriesCollection.Verify(x => x.GetOrCreatePartition(), Times.Once); - } - } - - #endregion - - #region - SearchAsync (Id) - - - public class SearchAsyncById : DestinationManifestCacheBaseTest - { - [Fact] - public async Task SearchesManifestDestinationInfoAsync() - { - var entry = Create(); - entry.DestinationFound(Create()); - ManifestEntries.Add(entry); - - var resultRef = await Cache.ForIdAsync(entry.Destination!.Id, Cancel); - - Assert.NotNull(resultRef); - Assert.Equal(new ContentReferenceStub(entry.Destination!), resultRef); - - Assert.Equal(0, Cache.SearchStoreCalls); - - MockEntriesCollection.Verify(x => x.GetOrCreatePartition(), Times.Once); - } - - [Fact] - public async Task SearchesStoreAsync() - { - var searchRef = Create(); - - var foundRefs = CreateMany() - .Append(searchRef) - .ToImmutableArray(); - - Cache.SearchStoreByIdAction = (id) => foundRefs; - - var resultRef = await Cache.ForIdAsync(searchRef.Id, Cancel); - - Assert.NotNull(resultRef); - Assert.Equal(searchRef, resultRef); - - Assert.Equal(1, Cache.SearchStoreCalls); - - MockEntriesCollection.Verify(x => x.GetOrCreatePartition(), Times.Once); - } - - [Fact] - public async Task NotFoundInStoreAsync() - { - var searchRef = Create(); - - var resultRef = await Cache.ForIdAsync(searchRef.Id, Cancel); - - Assert.Null(resultRef); - - Assert.Equal(1, Cache.SearchStoreCalls); - - MockEntriesCollection.Verify(x => x.GetOrCreatePartition(), Times.Once); - } - } - - #endregion - } -} diff --git a/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/Search/ManifestSourceContentReferenceFinderTests.cs b/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/Search/ManifestSourceContentReferenceFinderTests.cs index e30a126..215de0d 100644 --- a/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/Search/ManifestSourceContentReferenceFinderTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/Search/ManifestSourceContentReferenceFinderTests.cs @@ -14,10 +14,16 @@ // limitations under the License. // +using System; using System.Linq; +using System.Threading; using System.Threading.Tasks; +using Castle.Components.DictionaryAdapter.Xml; +using Moq; +using Tableau.Migration.Content.Search; using Tableau.Migration.Engine.Endpoints.Search; using Tableau.Migration.Engine.Manifest; +using Tableau.Migration.Engine.Pipelines; using Xunit; namespace Tableau.Migration.Tests.Unit.Engine.Endpoints.Search @@ -27,14 +33,24 @@ public class ManifestSourceContentReferenceFinderTests public class ManifestSourceContentReferenceFinderTest : AutoFixtureTestBase { protected readonly IMigrationManifestEditor Manifest; + protected readonly Mock Pipeline; + protected readonly Mock ContentReferenceCache; protected readonly ManifestSourceContentReferenceFinder Finder; public ManifestSourceContentReferenceFinderTest() { Manifest = Create(); + Pipeline = Create>(); + ContentReferenceCache = Create>(); - Finder = new ManifestSourceContentReferenceFinder(Manifest); + ContentReferenceCache.Setup(x => x.ForIdAsync(It.IsAny(), It.IsAny())) + .Returns(Task.FromResult(null)); + + Pipeline.Setup(x => x.CreateSourceCache()) + .Returns(ContentReferenceCache.Object); + + Finder = new ManifestSourceContentReferenceFinder(Manifest, Pipeline.Object); } } @@ -52,6 +68,21 @@ public async Task FindsManifestReferenceAsync() var result = await Finder.FindByIdAsync(sourceItem.Id, Cancel); Assert.Same(entry.Source, result); + ContentReferenceCache.Verify(x => x.ForIdAsync(It.IsAny(), It.IsAny()), Times.Never); + } + + [Fact] + public async Task FindCacheReferenceAsync() + { + var sourceItem = Create(); + + ContentReferenceCache.Setup(x => x.ForIdAsync(sourceItem.Id, It.IsAny())) + .ReturnsAsync(sourceItem); + + var result = await Finder.FindByIdAsync(sourceItem.Id, Cancel); + + Assert.Same(sourceItem, result); + ContentReferenceCache.Verify(x => x.ForIdAsync(It.IsAny(), It.IsAny()), Times.Once); } [Fact] @@ -62,6 +93,7 @@ public async Task NotFoundAsync() var result = await Finder.FindByIdAsync(sourceItem.Id, Cancel); Assert.Null(result); + ContentReferenceCache.Verify(x => x.ForIdAsync(It.IsAny(), It.IsAny()), Times.Once); } } } diff --git a/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/TableauApiDestinationEndpointTests.cs b/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/TableauApiDestinationEndpointTests.cs index 3bed22e..8d522dd 100644 --- a/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/TableauApiDestinationEndpointTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/TableauApiDestinationEndpointTests.cs @@ -23,6 +23,7 @@ using Tableau.Migration.Content.Files; using Tableau.Migration.Engine.Endpoints; using Tableau.Migration.Engine.Endpoints.Search; +using Tableau.Migration.Resources; using Xunit; namespace Tableau.Migration.Tests.Unit.Engine.Endpoints @@ -38,7 +39,8 @@ public TableauApiDestinationEndpointTest() Endpoint = new(MigrationServices.GetRequiredService(), Create(), Create(), - Create() + Create(), + Create() ); } } diff --git a/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/TableauApiEndpointBaseTests.cs b/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/TableauApiEndpointBaseTests.cs index 5d237dc..5503601 100644 --- a/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/TableauApiEndpointBaseTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/TableauApiEndpointBaseTests.cs @@ -23,9 +23,9 @@ using Tableau.Migration.Content; using Tableau.Migration.Content.Files; using Tableau.Migration.Content.Search; -using Tableau.Migration.Engine; using Tableau.Migration.Engine.Endpoints; using Tableau.Migration.Paging; +using Tableau.Migration.Resources; using Xunit; namespace Tableau.Migration.Tests.Unit.Engine.Endpoints @@ -45,8 +45,9 @@ public class TestApiEndpoint : TableauApiEndpointBase public TestApiEndpoint(IServiceScopeFactory serviceScopeFactory, ITableauApiEndpointConfiguration config, IContentReferenceFinderFactory finderFactory, - IContentFileStore fileStore) - : base(serviceScopeFactory, config, finderFactory, fileStore) + IContentFileStore fileStore, + ISharedResourcesLocalizer localizer) + : base(serviceScopeFactory, config, finderFactory, fileStore, localizer) { } } @@ -59,7 +60,8 @@ public TableauApiEndpointBaseTest() Endpoint = new(MigrationServices.GetRequiredService(), Create(), Create(), - Create() + Create(), + Create() ); } } @@ -85,8 +87,9 @@ public void CreatesApiClient() var config = Create(); var mockFinderFactory = Create(); var mockFileStore = Create(); + var mockLocalizer = Create(); - var endpoint = new TestApiEndpoint(serviceScopeFactory, config, mockFinderFactory, mockFileStore); + var endpoint = new TestApiEndpoint(serviceScopeFactory, config, mockFinderFactory, mockFileStore, mockLocalizer); Assert.Same(apiClient, endpoint.ServerApi); } @@ -106,8 +109,9 @@ public void ApiClientScopeFileStoreMatchesMigrationFileStore() var config = Create(); var mockFinderFactory = Create(); var mockFileStore = Create(); + var mockLocalizer = Create(); - var endpoint = new TestApiEndpoint(serviceScopeFactory, config, mockFinderFactory, mockFileStore); + var endpoint = new TestApiEndpoint(serviceScopeFactory, config, mockFinderFactory, mockFileStore, mockLocalizer); Assert.Same(mockFileStore, endpoint.EndpointScope.ServiceProvider.GetService()); } diff --git a/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/TableauApiSourceEndpointTests.cs b/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/TableauApiSourceEndpointTests.cs index 5874007..0818f2d 100644 --- a/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/TableauApiSourceEndpointTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Engine/Endpoints/TableauApiSourceEndpointTests.cs @@ -21,6 +21,7 @@ using Tableau.Migration.Content.Files; using Tableau.Migration.Engine.Endpoints; using Tableau.Migration.Engine.Endpoints.Search; +using Tableau.Migration.Resources; using Xunit; namespace Tableau.Migration.Tests.Unit.Engine.Endpoints @@ -36,7 +37,8 @@ public TableauApiSourceEndpointTest() Endpoint = new(MigrationServices.GetRequiredService(), Create(), Create(), - Create() + Create(), + Create() ); } } diff --git a/tests/Tableau.Migration.Tests/Unit/Engine/IServiceCollectionExtensionsTests.cs b/tests/Tableau.Migration.Tests/Unit/Engine/IServiceCollectionExtensionsTests.cs index 2dc88bf..438f2f1 100644 --- a/tests/Tableau.Migration.Tests/Unit/Engine/IServiceCollectionExtensionsTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Engine/IServiceCollectionExtensionsTests.cs @@ -14,11 +14,15 @@ // limitations under the License. // +using System.Threading; using System.Threading.Tasks; using AutoFixture; using Microsoft.Extensions.DependencyInjection; +using Moq; using Tableau.Migration.Api; +using Tableau.Migration.Content; using Tableau.Migration.Content.Files; +using Tableau.Migration.Content.Search; using Tableau.Migration.Engine; using Tableau.Migration.Engine.Endpoints; using Tableau.Migration.Engine.Endpoints.Search; @@ -46,23 +50,45 @@ public AddMigrationEngine() protected override void ConfigureServices(IServiceCollection services) { + var mockSitesClient = Freeze(); + var mockApiClient = Freeze>(); + var mockScopedClientFactory = Freeze>(); + + mockApiClient.Setup(x => x.SignInAsync(It.IsAny())) + .ReturnsAsync(AsyncDisposableResult.Succeeded(mockSitesClient)); + + mockScopedClientFactory.Setup(x => x.Initialize(It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(mockApiClient.Object); + services.AddLogging() .AddLocalization() .AddSharedResourcesLocalization() .AddMigrationApiClient() - .AddMigrationEngine(); + .AddMigrationEngine() + .AddScoped((provider) => mockScopedClientFactory.Object); } - protected AsyncServiceScope InitializeMigrationScope() - => InitializeMigrationScope(Create(), Create()); + protected async Task InitializeMigrationScopeAsync() + => await InitializeMigrationScopeAsync(Freeze(), Freeze()); - protected AsyncServiceScope InitializeMigrationScope(IMigrationPlan plan, IMigrationManifest? previousManifest) + protected async Task InitializeMigrationScopeAsync(IMigrationPlan plan, IMigrationManifest? previousManifest) { + // Creates the migration scope var scope = ServiceProvider.CreateAsyncScope(); var input = scope.ServiceProvider.GetRequiredService(); input.Initialize(plan, previousManifest); + //Initialize endpoints - any failure to connect is a fatal error before the pipeline is executed. + var migration = scope.ServiceProvider.GetRequiredService(); + var endpointInitTasks = new[] + { + migration.Source.InitializeAsync(Cancel), + migration.Destination.InitializeAsync(Cancel) + }; + + await Task.WhenAll(endpointInitTasks).ConfigureAwait(false); + return scope; } @@ -115,8 +141,8 @@ public async Task RegistersSingletoneManifestFactory() [Fact] public async Task RegistersScopedMigrationAndProperties() { - await using var scope1 = InitializeMigrationScope(); - await using var scope2 = InitializeMigrationScope(); + await using var scope1 = await InitializeMigrationScopeAsync(); + await using var scope2 = await InitializeMigrationScopeAsync(); var scope1Obj = scope1.ServiceProvider.GetRequiredService(); var scope1Repeat = scope1.ServiceProvider.GetRequiredService(); @@ -139,7 +165,7 @@ public async Task RegistersScopedMigrationAndProperties() [Fact] public async Task RegistersScopedPlanOptionsProvider() { - await using var scope = InitializeMigrationScope(); + await using var scope = await InitializeMigrationScopeAsync(); AssertService, MigrationPlanOptionsProvider>(scope, ServiceLifetime.Scoped); } @@ -147,7 +173,7 @@ public async Task RegistersScopedPlanOptionsProvider() [Fact] public async Task RegistersScopedHookRunner() { - await using var scope = InitializeMigrationScope(); + await using var scope = await InitializeMigrationScopeAsync(); AssertService(scope, ServiceLifetime.Scoped); } @@ -173,7 +199,7 @@ public async Task RegistersTransientHookBuilder() [Fact] public async Task RegistersScopedServerToCloudPipeline() { - await using var scope = InitializeMigrationScope(); + await using var scope = await InitializeMigrationScopeAsync(); AssertService(scope, ServiceLifetime.Scoped); } @@ -193,7 +219,7 @@ public async Task RegistersSingletonMigrator() [Fact] public async Task RegistersScopedContentMigratorAsync() { - await using var scope = InitializeMigrationScope(); + await using var scope = await InitializeMigrationScopeAsync(); AssertService>(scope, ServiceLifetime.Scoped); } @@ -201,7 +227,7 @@ public async Task RegistersScopedContentMigratorAsync() [Fact] public async Task RegistersScopedSourcePreparerAsync() { - await using var scope = InitializeMigrationScope(); + await using var scope = await InitializeMigrationScopeAsync(); AssertService>(scope, ServiceLifetime.Scoped); } @@ -209,7 +235,7 @@ public async Task RegistersScopedSourcePreparerAsync() [Fact] public async Task RegistersScopedEndpointPreparerAsync() { - await using var scope = InitializeMigrationScope(); + await using var scope = await InitializeMigrationScopeAsync(); AssertService>(scope, ServiceLifetime.Scoped); } @@ -217,7 +243,7 @@ public async Task RegistersScopedEndpointPreparerAsync() [Fact] public async Task RegistersScopedItemBatchMigratorAsync() { - await using var scope = InitializeMigrationScope(); + await using var scope = await InitializeMigrationScopeAsync(); AssertService>(scope, ServiceLifetime.Scoped); AssertService>(scope, ServiceLifetime.Scoped); @@ -226,7 +252,7 @@ public async Task RegistersScopedItemBatchMigratorAsync() [Fact] public async Task RegistersScopedBulkBatchMigratorAsync() { - await using var scope = InitializeMigrationScope(); + await using var scope = await InitializeMigrationScopeAsync(); AssertService>(scope, ServiceLifetime.Scoped); AssertService>(scope, ServiceLifetime.Scoped); @@ -235,15 +261,15 @@ public async Task RegistersScopedBulkBatchMigratorAsync() [Fact] public async Task RegistersScopedBulkDestinationCacheAsync() { - await using var scope = InitializeMigrationScope(); + await using var scope = await InitializeMigrationScopeAsync(); - AssertService>(scope, ServiceLifetime.Scoped); + AssertService>(scope, ServiceLifetime.Scoped); } [Fact] public async Task RegistersScopedDestinationContentFinderAsync() { - await using var scope = InitializeMigrationScope(); + await using var scope = await InitializeMigrationScopeAsync(); AssertService>(scope, ServiceLifetime.Scoped); } @@ -251,15 +277,23 @@ public async Task RegistersScopedDestinationContentFinderAsync() [Fact] public async Task RegistersScopedDestinationContentFinderFactoryAsync() { - await using var scope = InitializeMigrationScope(); + await using var scope = await InitializeMigrationScopeAsync(); AssertService(scope, ServiceLifetime.Scoped); } + [Fact] + public async Task RegistersScopedBulkSourceCacheAsync() + { + await using var scope = await InitializeMigrationScopeAsync(); + + AssertService>(scope, ServiceLifetime.Scoped); + } + [Fact] public async Task RegistersScopedSourceContentFinderAsync() { - await using var scope = InitializeMigrationScope(); + await using var scope = await InitializeMigrationScopeAsync(); AssertService>(scope, ServiceLifetime.Scoped); } @@ -267,7 +301,7 @@ public async Task RegistersScopedSourceContentFinderAsync() [Fact] public async Task RegistersScopedSourceContentFinderFactoryAsync() { - await using var scope = InitializeMigrationScope(); + await using var scope = await InitializeMigrationScopeAsync(); AssertService(scope, ServiceLifetime.Scoped); } @@ -275,7 +309,7 @@ public async Task RegistersScopedSourceContentFinderFactoryAsync() [Fact] public async Task RegistersScopedMigrationFileStoreAsync() { - await using var scope = InitializeMigrationScope(); + await using var scope = await InitializeMigrationScopeAsync(); AssertService(scope, ServiceLifetime.Scoped); } @@ -283,7 +317,7 @@ public async Task RegistersScopedMigrationFileStoreAsync() [Fact] public async Task RegistersScopedFileStoreAsync() { - await using var scope = InitializeMigrationScope(); + await using var scope = await InitializeMigrationScopeAsync(); AssertService(scope, ServiceLifetime.Scoped); } @@ -291,7 +325,7 @@ public async Task RegistersScopedFileStoreAsync() [Fact] public async Task RegistersScopedTableauServerConnectionUrlTransformerAsync() { - await using var scope = InitializeMigrationScope(); + await using var scope = await InitializeMigrationScopeAsync(); AssertService(scope, ServiceLifetime.Scoped); } @@ -299,7 +333,7 @@ public async Task RegistersScopedTableauServerConnectionUrlTransformerAsync() [Fact] public async Task RegistersScopedProjectCacheAsync() { - await using var scope = InitializeMigrationScope(); + await using var scope = await InitializeMigrationScopeAsync(); AssertService(scope, ServiceLifetime.Scoped); } @@ -307,7 +341,7 @@ public async Task RegistersScopedProjectCacheAsync() [Fact] public async Task RegistersScopedPreviouslyMigratedFilterAsync() { - await using var scope = InitializeMigrationScope(); + await using var scope = await InitializeMigrationScopeAsync(); AssertService>(scope, ServiceLifetime.Scoped); } diff --git a/tests/Tableau.Migration.Tests/Unit/Engine/MigrationDirectoryContentFileStoreTests.cs b/tests/Tableau.Migration.Tests/Unit/Engine/MigrationDirectoryContentFileStoreTests.cs index 3449778..a2682e6 100644 --- a/tests/Tableau.Migration.Tests/Unit/Engine/MigrationDirectoryContentFileStoreTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Engine/MigrationDirectoryContentFileStoreTests.cs @@ -16,44 +16,43 @@ using System; using System.IO; -using System.IO.Abstractions; +using Microsoft.Extensions.DependencyInjection; using Moq; -using Tableau.Migration.Config; -using Tableau.Migration.Content.Files; using Tableau.Migration.Engine; +using Tableau.Migration.Tests.Unit.Content.Files; using Xunit; namespace Tableau.Migration.Tests.Unit.Engine { public class MigrationDirectoryContentFileStoreTests { - public class TestFileStore : MigrationDirectoryContentFileStore + public abstract class MigrationDirectoryContentFileStoreTest : DirectoryContentFileStoreTestBase { - public TestFileStore(IFileSystem fileSystem, IContentFilePathResolver pathResolver, IConfigReader configReader, IMigrationInput migrationInput) - : base(fileSystem, pathResolver, configReader, migrationInput) - { } + protected readonly Mock MockMigrationInput = new(); - public string PublicBaseStorePath => BaseStorePath; + protected readonly Guid MigrationId = Guid.NewGuid(); + public MigrationDirectoryContentFileStoreTest() + { + MockMigrationInput.SetupGet(i => i.MigrationId).Returns(MigrationId); + } + + protected override IServiceCollection ConfigureServices(IServiceCollection services) + { + return services + .Replace(MockMigrationInput); + } + + protected override MigrationDirectoryContentFileStore CreateFileStore() + => Services.GetRequiredService(); } - public class Ctor : AutoFixtureTestBase + public class Ctor : MigrationDirectoryContentFileStoreTest { [Fact] public void UsesMigrationSubDirectory() { - var rootPath = Create(); - - var config = Freeze(); - config.Files.RootPath = rootPath; - - var migrationId = Guid.NewGuid(); - var mockInput = Freeze>(); - mockInput.SetupGet(x => x.MigrationId).Returns(migrationId); - - var fs = Create(); - - Assert.Equal(Path.Combine(rootPath, $"migration-{migrationId:N}"), fs.PublicBaseStorePath); + Assert.Equal(Path.Combine(RootPath, $"migration-{MigrationId:N}"), BaseStorePath); } } } diff --git a/tests/Tableau.Migration.Tests/Unit/Engine/Migrators/ContentMigratorTests.cs b/tests/Tableau.Migration.Tests/Unit/Engine/Migrators/ContentMigratorTests.cs index a79400a..0d6fa53 100644 --- a/tests/Tableau.Migration.Tests/Unit/Engine/Migrators/ContentMigratorTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Engine/Migrators/ContentMigratorTests.cs @@ -49,13 +49,7 @@ public TestContentMigrator( IMigrationHookRunner hooks, IContentMappingRunner mappings, IContentFilterRunner filters) - : base( - pipeline, - migration, - configReader, - hooks, - mappings, - filters) + : base(pipeline, migration, configReader, hooks, mappings, filters) { } new public int BatchSize => base.BatchSize; @@ -85,9 +79,9 @@ public class ContentMigratorTest : AutoFixtureTestBase public ContentMigratorTest() { BatchSize = 2; - MockConfigReader = Freeze>(); - MockConfigReader.Setup(x => x.Get()).Returns(() => new MigrationSdkOptions { BatchSize = BatchSize }); + MockConfigReader.Setup(x => x.Get()) + .Returns(() => new ContentTypesOptions() { BatchSize = BatchSize }); SourceContent = new() { @@ -179,7 +173,7 @@ public void GetsConfigBatchSize() var batchSize = Migrator.BatchSize; Assert.Equal(BatchSize, batchSize); - MockConfigReader.Verify(x => x.Get(), Times.Once); + MockConfigReader.Verify(x => x.Get(), Times.Once); } } diff --git a/tests/Tableau.Migration.Tests/Unit/Engine/Pipelines/MigrationPipelineBaseTests.cs b/tests/Tableau.Migration.Tests/Unit/Engine/Pipelines/MigrationPipelineBaseTests.cs index 05b406c..2546116 100644 --- a/tests/Tableau.Migration.Tests/Unit/Engine/Pipelines/MigrationPipelineBaseTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Engine/Pipelines/MigrationPipelineBaseTests.cs @@ -130,6 +130,22 @@ public void CreatesDefaultSourceItemPreparer() #endregion + #region - CreateSourceCache - + + public class CreateSourceCache : MigrationPipelineBaseTest + { + [Fact] + public void CreatesSourceTypedDestinationCache() + { + var cache = Pipeline.CreateSourceCache(); + + Assert.IsType>(cache); + MockServices.Verify(x => x.GetService(typeof(BulkSourceCache)), Times.Once); + } + } + + #endregion + #region - CreateDestinationCache - public class CreateDestinationCache : MigrationPipelineBaseTest @@ -137,10 +153,10 @@ public class CreateDestinationCache : MigrationPipelineBaseTest [Fact] public void CreatesDefaultTypedDestinationCache() { - var cache = Pipeline.CreateDestinationCache(); + var cache = Pipeline.CreateDestinationCache(); - Assert.IsType>(cache); - MockServices.Verify(x => x.GetService(typeof(BulkDestinationCache)), Times.Once); + Assert.IsType>(cache); + MockServices.Verify(x => x.GetService(typeof(BulkDestinationCache)), Times.Once); } [Fact] diff --git a/tests/Tableau.Migration.Tests/Unit/Engine/Pipelines/MigrationPipelineContentTypeExtensionsTests.cs b/tests/Tableau.Migration.Tests/Unit/Engine/Pipelines/MigrationPipelineContentTypeExtensionsTests.cs index a009ef6..6620348 100644 --- a/tests/Tableau.Migration.Tests/Unit/Engine/Pipelines/MigrationPipelineContentTypeExtensionsTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Engine/Pipelines/MigrationPipelineContentTypeExtensionsTests.cs @@ -14,6 +14,9 @@ // limitations under the License. // +using System; +using System.Collections.Generic; +using AutoFixture; using Tableau.Migration.Engine.Pipelines; using Xunit; @@ -21,20 +24,33 @@ namespace Tableau.Migration.Tests.Unit.Engine.Pipelines { public class MigrationPipelineContentTypeExtensionsTests { - public class WithContentTypeInterface + public abstract class MigrationPipelineContentTypeExtensionsTest : AutoFixtureTestBase { + private static readonly IFixture _fixture = CreateFixture(); + + protected static MigrationPipelineContentType CreateContentType( + Type contentType, + Type? publishType = null, + Type? resultType = null) + => new MigrationPipelineContentType(contentType) + .WithPublishType(publishType ?? _fixture.Create()) + .WithResultType(resultType ?? _fixture.Create()); + } + + public class WithContentTypeInterface : MigrationPipelineContentTypeExtensionsTest + { + protected static readonly IEnumerable ContentTypes = new[] + { + CreateContentType(typeof(TestContentType)), + CreateContentType(typeof(object)) + }; + public class NonGeneric { [Fact] public void Finds_types() { - var contentTypes = new[] - { - new MigrationPipelineContentType(typeof(TestContentType), typeof(object)), - new MigrationPipelineContentType(typeof(object), typeof(TestContentType)) - }; - - var results = contentTypes.WithContentTypeInterface(typeof(IContentReference)); + var results = ContentTypes.WithContentTypeInterface(typeof(IContentReference)); var result = Assert.Single(results); @@ -47,13 +63,7 @@ public class Generic [Fact] public void Finds_types() { - var contentTypes = new[] - { - new MigrationPipelineContentType(typeof(TestContentType), typeof(object)), - new MigrationPipelineContentType(typeof(object), typeof(TestContentType)) - }; - - var results = contentTypes.WithContentTypeInterface(); + var results = ContentTypes.WithContentTypeInterface(); var result = Assert.Single(results); @@ -62,20 +72,20 @@ public void Finds_types() } } - public class WithPublishTypeInterface + public class WithPublishTypeInterface : MigrationPipelineContentTypeExtensionsTest { + protected static readonly IEnumerable ContentTypes = new[] + { + CreateContentType(typeof(TestContentType)), + CreateContentType(typeof(object), publishType: typeof(TestContentType)) + }; + public class NonGeneric { [Fact] public void Finds_types() { - var contentTypes = new[] - { - new MigrationPipelineContentType(typeof(TestContentType), typeof(object)), - new MigrationPipelineContentType(typeof(object), typeof(TestContentType)) - }; - - var results = contentTypes.WithPublishTypeInterface(typeof(IContentReference)); + var results = ContentTypes.WithPublishTypeInterface(typeof(IContentReference)); var result = Assert.Single(results); @@ -88,13 +98,7 @@ public class Generic [Fact] public void Finds_types() { - var contentTypes = new[] - { - new MigrationPipelineContentType(typeof(TestContentType), typeof(object)), - new MigrationPipelineContentType(typeof(object), typeof(TestContentType)) - }; - - var results = contentTypes.WithPublishTypeInterface(); + var results = ContentTypes.WithPublishTypeInterface(); var result = Assert.Single(results); @@ -103,20 +107,20 @@ public void Finds_types() } } - public class WithPostPublishTypeInterface + public class WithPostPublishTypeInterface : MigrationPipelineContentTypeExtensionsTest { + protected static readonly IEnumerable ContentTypes = new[] + { + CreateContentType(typeof(TestContentType)), + CreateContentType(typeof(object), publishType: typeof(TestContentType), resultType: typeof(float)) + }; + public class NonGeneric { [Fact] public void Finds_types() { - var contentTypes = new[] - { - new MigrationPipelineContentType(typeof(TestContentType), typeof(object), typeof(int)), - new MigrationPipelineContentType(typeof(object), typeof(TestContentType), typeof(float)) - }; - - var results = contentTypes.WithPostPublishTypeInterface(typeof(IContentReference)); + var results = ContentTypes.WithPostPublishTypeInterface(typeof(IContentReference)); var result = Assert.Single(results); @@ -129,13 +133,7 @@ public class Generic [Fact] public void Finds_types() { - var contentTypes = new[] - { - new MigrationPipelineContentType(typeof(TestContentType), typeof(object), typeof(int)), - new MigrationPipelineContentType(typeof(object), typeof(TestContentType), typeof(float)) - }; - - var results = contentTypes.WithPostPublishTypeInterface(); + var results = ContentTypes.WithPostPublishTypeInterface(); var result = Assert.Single(results); diff --git a/tests/Tableau.Migration.Tests/Unit/Engine/Pipelines/MigrationPipelineContentTypeTests.cs b/tests/Tableau.Migration.Tests/Unit/Engine/Pipelines/MigrationPipelineContentTypeTests.cs index 359a635..f8e99b2 100644 --- a/tests/Tableau.Migration.Tests/Unit/Engine/Pipelines/MigrationPipelineContentTypeTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Engine/Pipelines/MigrationPipelineContentTypeTests.cs @@ -14,6 +14,9 @@ // limitations under the License. // +using System; +using System.Collections.Immutable; +using System.Linq; using Tableau.Migration.Engine.Pipelines; using Xunit; @@ -21,52 +24,90 @@ namespace Tableau.Migration.Tests.Unit.Engine.Pipelines { public class MigrationPipelineContentTypeTests { - public class Ctor + public abstract class MigrationPipelineContentTypeTest : AutoFixtureTestBase + { + protected Type CreateType() => Create(); + + protected IImmutableList CreateTypes(int createCount, params Type[] explicitTypes) + => explicitTypes.Concat(CreateMany(createCount)).ToImmutableArray(); + + protected MigrationPipelineContentType CreateContentType(Type? contentType = null) => new(contentType ?? CreateType()); + + protected static void AssertTypes(MigrationPipelineContentType result, Type contentType, Type publishType, Type resultType) + { + Assert.Same(contentType, result.ContentType); + Assert.Same(publishType, result.PublishType); + Assert.Same(resultType, result.ResultType); + } + } + + public class Ctor : MigrationPipelineContentTypeTest + { + [Fact] + public void Content_only() + { + var type = CreateType(); + + var t = new MigrationPipelineContentType(type); + + AssertTypes(t, type, type, type); + } + } + + public class WithPublishType : MigrationPipelineContentTypeTest { [Fact] public void Different_types() { - var type1 = typeof(object); - var type2 = typeof(string); + var contentType = CreateType(); + var publishType = CreateType(); - var t = new MigrationPipelineContentType(type1, type2); + var t = CreateContentType(contentType).WithPublishType(publishType); - Assert.Same(type1, t.ContentType); - Assert.Same(type2, t.PublishType); + AssertTypes(t, contentType, publishType, contentType); } [Fact] - public void Different_content_and_publish_types() + public void Same_types() { - var type1 = typeof(object); - var type2 = typeof(string); + var type = CreateType(); - var t = new MigrationPipelineContentType(type1, type2); + var t = CreateContentType(type).WithPublishType(type); - Assert.Same(type1, t.ContentType); - Assert.Same(type2, t.PublishType); - Assert.Same(type1, t.ResultType); + AssertTypes(t, type, type, type); } + } + public class WithResultType : MigrationPipelineContentTypeTest + { [Fact] - public void Same_content_and_publish_types() + public void Different_types() { - var type = typeof(object); + var contentType = CreateType(); + var resultType = CreateType(); - var t = new MigrationPipelineContentType(type); + var t = CreateContentType(contentType).WithResultType(resultType); + + AssertTypes(t, contentType, contentType, resultType); + } + + [Fact] + public void Same_types() + { + var type = CreateType(); + + var t = CreateContentType(type).WithResultType(type); - Assert.Same(type, t.ContentType); - Assert.Same(type, t.PublishType); - Assert.Same(type, t.ResultType); + AssertTypes(t, type, type, type); } } - public class GetContentTypeForInterface + public class GetContentTypeForInterface : MigrationPipelineContentTypeTest { [Fact] public void Returns_null_when_not_found() { - var t = new MigrationPipelineContentType(typeof(object), typeof(TestContentType)); + var t = new MigrationPipelineContentType(typeof(object)); Assert.Null(t.GetContentTypeForInterface(typeof(IContentReference))); } @@ -76,18 +117,18 @@ public void Returns_content_type_when_found() { var type = typeof(TestContentType); - var t = new MigrationPipelineContentType(type, typeof(object)); + var t = new MigrationPipelineContentType(type); Assert.Equal(new[] { type }, t.GetContentTypeForInterface(typeof(IContentReference))); } } - public class GetPublishTypeForInterface + public class GetPublishTypeForInterface : MigrationPipelineContentTypeTest { [Fact] public void Returns_null_when_not_found() { - var t = new MigrationPipelineContentType(typeof(TestContentType), typeof(object)); + var t = new MigrationPipelineContentType(typeof(TestContentType)).WithPublishType(typeof(object)); Assert.Null(t.GetPublishTypeForInterface(typeof(IContentReference))); } @@ -97,18 +138,18 @@ public void Returns_publish_type_when_found() { var type = typeof(TestContentType); - var t = new MigrationPipelineContentType(typeof(object), type); + var t = new MigrationPipelineContentType(typeof(object)).WithPublishType(type); Assert.Equal(new[] { type }, t.GetPublishTypeForInterface(typeof(IContentReference))); } } - public class GetPostPublishTypesForInterface + public class GetPostPublishTypesForInterface : MigrationPipelineContentTypeTest { [Fact] public void Returns_null_when_not_found() { - var t = new MigrationPipelineContentType(typeof(TestContentType), typeof(object)); + var t = new MigrationPipelineContentType(typeof(TestContentType)).WithPublishType(typeof(object)); Assert.Null(t.GetPostPublishTypesForInterface(typeof(IContentReference))); } @@ -118,10 +159,25 @@ public void Returns_publish_type_when_found() { var type = typeof(TestContentType); - var t = new MigrationPipelineContentType(typeof(object), type, typeof(int)); + var t = new MigrationPipelineContentType(typeof(object)).WithPublishType(type).WithResultType(typeof(int)); Assert.Equal(new[] { type, typeof(int) }, t.GetPostPublishTypesForInterface(typeof(IContentReference))); } } + + public class GetConfigKey + { + [Fact] + public void Returns_config_keys() + { + var pipelineContentTypes = ServerToCloudMigrationPipeline.ContentTypes; + + foreach(var pipelineContentType in pipelineContentTypes) + { + Assert.NotNull(pipelineContentType); + Assert.Equal(pipelineContentType.ContentType.Name, $"I{pipelineContentType.GetConfigKey()}"); + } + } + } } } diff --git a/tests/Tableau.Migration.Tests/Unit/Engine/Pipelines/MigrationPipelineFactoryTests.cs b/tests/Tableau.Migration.Tests/Unit/Engine/Pipelines/MigrationPipelineFactoryTests.cs index ad5ebcf..0be6d1b 100644 --- a/tests/Tableau.Migration.Tests/Unit/Engine/Pipelines/MigrationPipelineFactoryTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Engine/Pipelines/MigrationPipelineFactoryTests.cs @@ -16,6 +16,7 @@ using System; using Moq; +using Tableau.Migration.Config; using Tableau.Migration.Engine.Pipelines; using Xunit; @@ -27,19 +28,23 @@ public class Create : AutoFixtureTestBase { private readonly MockServiceProvider _mockServices; + private readonly IConfigReader _mockConfigReader; + private readonly MigrationPipelineFactory _factory; public Create() { _mockServices = Create(); + _mockConfigReader = Create(); + _factory = new(_mockServices.Object); } [Fact] public void CreatesServerToCloudMigration() { - var pipeline = new ServerToCloudMigrationPipeline(_mockServices.Object); + var pipeline = new ServerToCloudMigrationPipeline(_mockServices.Object, _mockConfigReader); _mockServices.Setup(x => x.GetService(typeof(ServerToCloudMigrationPipeline))).Returns(pipeline); var mockPlan = Create>(); diff --git a/tests/Tableau.Migration.Tests/Unit/Engine/Pipelines/MigrationPipelineTestBase.cs b/tests/Tableau.Migration.Tests/Unit/Engine/Pipelines/MigrationPipelineTestBase.cs index fcee367..a4a1a1d 100644 --- a/tests/Tableau.Migration.Tests/Unit/Engine/Pipelines/MigrationPipelineTestBase.cs +++ b/tests/Tableau.Migration.Tests/Unit/Engine/Pipelines/MigrationPipelineTestBase.cs @@ -16,8 +16,10 @@ using System; using System.Threading; +using AutoFixture; using Moq; using Tableau.Migration.Engine.Actions; +using Tableau.Migration.Engine.Endpoints; using Tableau.Migration.Engine.Hooks; namespace Tableau.Migration.Tests.Unit.Engine.Pipelines @@ -26,9 +28,14 @@ public class MigrationPipelineTestBase : AutoFixtureTestBase { protected readonly Mock MockServices; protected readonly Mock MockHookRunner; + protected readonly Mock MockDestinationEndpoint; + protected readonly Mock MockSourceEndpoint; protected readonly TPipeline Pipeline; + protected virtual TPipeline CreatePipeline() + => Create(); + public MigrationPipelineTestBase() { MockServices = Freeze(); @@ -38,7 +45,13 @@ public MigrationPipelineTestBase() MockHookRunner.Setup(x => x.ExecuteAsync(It.IsAny(), Cancel)) .ReturnsAsync((IMigrationActionResult r, CancellationToken c) => r); - Pipeline = Create(); + MockDestinationEndpoint = Freeze>(); + MockSourceEndpoint = Freeze>(); + + AutoFixture.Register(() => MockDestinationEndpoint.Object); + AutoFixture.Register(() => MockSourceEndpoint.Object); + + Pipeline = CreatePipeline(); } } } diff --git a/tests/Tableau.Migration.Tests/Unit/Engine/Pipelines/ServerToCloudMigrationPipelineTests.cs b/tests/Tableau.Migration.Tests/Unit/Engine/Pipelines/ServerToCloudMigrationPipelineTests.cs index 189789b..658bc72 100644 --- a/tests/Tableau.Migration.Tests/Unit/Engine/Pipelines/ServerToCloudMigrationPipelineTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Engine/Pipelines/ServerToCloudMigrationPipelineTests.cs @@ -19,6 +19,7 @@ using System.Collections.Immutable; using System.Linq; using Moq; +using Tableau.Migration.Config; using Tableau.Migration.Content; using Tableau.Migration.Engine.Actions; using Tableau.Migration.Engine.Migrators.Batch; @@ -127,6 +128,33 @@ public void CreatesDefaultBatchMigrator() MockServices.Verify(x => x.GetService(typeof(ItemPublishContentBatchMigrator)), Times.Once); } + [Fact] + public void CreatesDefaultUserBatchMigrator() + { + var migrator = Pipeline.GetBatchMigrator(); + + Assert.IsType>(migrator); + MockServices.Verify(x => x.GetService(typeof(ItemPublishContentBatchMigrator)), Times.Once); + } + } + + public class GetUserBatchMigrator : MigrationPipelineTestBase + { + protected override ServerToCloudMigrationPipeline CreatePipeline() + { + var config = new ContentTypesOptions + { + BatchPublishingEnabled = true + }; + + var mockConfigReader = Freeze>(); + + mockConfigReader.Setup(x => x.Get()) + .Returns(config); + + return base.CreatePipeline(); + } + [Fact] public void CreatesUserBatchMigrator() { diff --git a/tests/Tableau.Migration.Tests/Unit/FilePathTests.cs b/tests/Tableau.Migration.Tests/Unit/FilePathTests.cs new file mode 100644 index 0000000..95398fd --- /dev/null +++ b/tests/Tableau.Migration.Tests/Unit/FilePathTests.cs @@ -0,0 +1,87 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using System.Collections.Immutable; +using System.IO; +using Xunit; + +namespace Tableau.Migration.Tests.Unit +{ + public class FilePathTests + { + public abstract class FilePathTest : AutoFixtureTestBase + { + protected string CreateFilePath(string? extension) + => Path.Combine($@"{Create()}:\", CreateString(), CreateString(), CreateFileName(extension)); + + protected string CreateFileName(string? extension) + => $"{CreateString()}{(!String.IsNullOrWhiteSpace(extension) ? $".{extension}" : String.Empty)}"; + + public class ZipExtensionsData : ValuesAttribute + { + private static readonly IImmutableSet _zipExtensions; + + static ZipExtensionsData() + { + var zipExtensions = ImmutableSortedSet.CreateBuilder(); + + foreach (var zipExtension in FilePath.ZipExtensions) + zipExtensions.AddRange(new[] { zipExtension.ToLower(), zipExtension.ToUpper() }); + + _zipExtensions = zipExtensions.ToImmutable(); + } + + public ZipExtensionsData() + : base(_zipExtensions) + { } + } + } + + public class IsZipFile : FilePathTest + { + [Theory] + [ZipExtensionsData] + public void True(string extension) + { + AssertIsZipFile(extension, true); + } + + [Theory] + [Values("abc", "123", "twb", "tds")] + public void False(string extension) + { + AssertIsZipFile(extension, false); + } + + [Theory] + [NullEmptyWhiteSpaceData] + public void Null(string? extension) + { + AssertIsZipFile(extension, null); + } + + private void AssertIsZipFile(string? extension, bool? expected) + { + var filePath = CreateFilePath(extension); + + var result = new FilePath(filePath); + + Assert.Equal(expected, result.IsZipFile); + } + } + } +} diff --git a/tests/Tableau.Migration.Tests/Unit/IContentReferenceFinderFactoryExtensions.cs b/tests/Tableau.Migration.Tests/Unit/IContentReferenceFinderFactoryExtensions.cs index 15f607a..ed9e4aa 100644 --- a/tests/Tableau.Migration.Tests/Unit/IContentReferenceFinderFactoryExtensions.cs +++ b/tests/Tableau.Migration.Tests/Unit/IContentReferenceFinderFactoryExtensions.cs @@ -25,7 +25,7 @@ public static class IContentReferenceFinderFactoryExtensions public static Mock> SetupMockFinder( this Mock mockFinderFactory, IFixture autoFixture) - where TContent : IContentReference + where TContent : class, IContentReference { var mockFinder = autoFixture.Create>>(); mockFinderFactory.Setup(x => x.ForContentType()).Returns(mockFinder.Object); diff --git a/tests/Tableau.Migration.Tests/Unit/Net/HttpStreamProcessorTests.cs b/tests/Tableau.Migration.Tests/Unit/Net/HttpStreamProcessorTests.cs index 9363020..ee6e9e7 100644 --- a/tests/Tableau.Migration.Tests/Unit/Net/HttpStreamProcessorTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Net/HttpStreamProcessorTests.cs @@ -21,7 +21,6 @@ using System.Net; using System.Net.Http; using System.Runtime.InteropServices; -using System.Text; using System.Threading; using System.Threading.Tasks; using Moq; @@ -161,7 +160,7 @@ public ProcessAsync() public async Task SendsRequests() { // Arrange - using var memoryStream = new MemoryStream(Encoding.UTF8.GetBytes("Test")); + using var memoryStream = new MemoryStream(Constants.DefaultEncoding.GetBytes("Test")); OnRequestCreated += (o, r) => SetupResponse(r, new MockHttpResponseMessage(HttpStatusCode.OK, new()).Object); diff --git a/tests/Tableau.Migration.Tests/Unit/Net/IServiceCollectionExtensionsTests.cs b/tests/Tableau.Migration.Tests/Unit/Net/IServiceCollectionExtensionsTests.cs index 987da71..f2a7653 100644 --- a/tests/Tableau.Migration.Tests/Unit/Net/IServiceCollectionExtensionsTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Net/IServiceCollectionExtensionsTests.cs @@ -18,12 +18,11 @@ using System.Net.Http; using System.Threading.Tasks; using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Http; using Tableau.Migration.Api; using Tableau.Migration.Config; using Tableau.Migration.Net; using Tableau.Migration.Net.Handlers; -using Tableau.Migration.Net.Policies; +using Tableau.Migration.Net.Resilience; using Tableau.Migration.Net.Rest; using Xunit; @@ -51,34 +50,10 @@ public async Task Registers_expected_services() await AssertServiceAsync(ServiceLifetime.Singleton); await AssertServiceAsync(ServiceLifetime.Singleton); await AssertServiceAsync(ServiceLifetime.Transient); - await AssertServiceAsync(ServiceLifetime.Scoped); - await AssertServiceAsync(ServiceLifetime.Scoped); - await AssertServiceAsync(ServiceLifetime.Transient); await AssertServiceAsync(ServiceLifetime.Transient); await AssertServiceAsync(ServiceLifetime.Transient); await AssertServiceAsync(ServiceLifetime.Transient); await AssertServiceAsync(ServiceLifetime.Transient); - await AssertServiceAsync(ServiceLifetime.Scoped); - await AssertServiceAsync(ServiceLifetime.Scoped); - await AssertServiceAsync(ServiceLifetime.Scoped); - await AssertServiceAsync(ServiceLifetime.Scoped); - await AssertServiceAsync(ServiceLifetime.Scoped); - await AssertServiceAsync(ServiceLifetime.Scoped); - await AssertServiceAsync(ServiceLifetime.Scoped); - await AssertServiceAsync(ServiceLifetime.Scoped); - await AssertServiceAsync(ServiceLifetime.Scoped); - - await using var scope = ServiceProvider.CreateAsyncScope(); - var policies = scope.ServiceProvider.GetServices(); - - Assert.NotNull(policies); - Assert.Equal(5, policies.Count()); - Assert.Collection(policies, - policy => Assert.IsType(policy), - policy => Assert.IsType(policy), - policy => Assert.IsType(policy), - policy => Assert.IsType(policy), - policy => Assert.IsType(policy)); var defaultHttpClientFactoryType = Migration.Net.IServiceCollectionExtensions.GetDefaultHttpClientFactoryType(); await AssertServiceAsync(defaultHttpClientFactoryType, ServiceLifetime.Scoped); @@ -93,6 +68,17 @@ public async Task Registers_expected_services() await AssertServiceAsync(ServiceLifetime.Scoped); await AssertServiceAsync(ServiceLifetime.Scoped); await AssertServiceAsync(ServiceLifetime.Scoped); + + var strategyBuilders = ServiceProvider.GetServices(); + + Assert.NotNull(strategyBuilders); + Assert.Equal(5, strategyBuilders.Count()); + Assert.Collection(strategyBuilders, + b => Assert.IsType(b), + b => Assert.IsType(b), + b => Assert.IsType(b), + b => Assert.IsType(b), + b => Assert.IsType(b)); } [Fact] diff --git a/tests/Tableau.Migration.Tests/Unit/Net/Policies/ClientThrottlePolicyBuilderTests.cs b/tests/Tableau.Migration.Tests/Unit/Net/Policies/ClientThrottlePolicyBuilderTests.cs deleted file mode 100644 index 04fddca..0000000 --- a/tests/Tableau.Migration.Tests/Unit/Net/Policies/ClientThrottlePolicyBuilderTests.cs +++ /dev/null @@ -1,167 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using System; -using System.Net.Http; -using Moq; -using Polly.RateLimit; -using Tableau.Migration.Config; -using Tableau.Migration.Net.Policies; -using Xunit; - -namespace Tableau.Migration.Tests.Unit.Net.Policies; - -public class ClientThrottlePolicyBuilderTests -{ - private readonly Mock _mockedConfigReader; - private readonly MigrationSdkOptions _sdkOptions; - private readonly ClientThrottlePolicyBuilder _builder; - - public ClientThrottlePolicyBuilderTests() - { - _mockedConfigReader = new Mock(); - _sdkOptions = new MigrationSdkOptions(); - _mockedConfigReader - .Setup(x => x.Get()) - .Returns(_sdkOptions); - _builder = new ClientThrottlePolicyBuilder( - _mockedConfigReader.Object); - } - - [Fact] - public void BuildPolicy_ReturnsDefaultPolicy() - { - // Act - var policy = _builder.Build(new HttpRequestMessage()); - - // Assert - Assert.Null(policy); - } - - [Fact] - public void BuildPolicy_EnableClientThrottling_ReturnsDefaultPolicyForRead() - { - // Arrange - _sdkOptions.Network.Resilience.ClientThrottleEnabled = true; - - // Act - var policy = _builder.Build(new HttpRequestMessage()); - - // Assert - Assert.NotNull(policy); - Assert.IsType>(policy); - ClientThrottlePolicyBuilderTests.AssertLimiterProperties( - (AsyncRateLimitPolicy)policy, - _sdkOptions.Network.Resilience.MaxReadRequests, - _sdkOptions.Network.Resilience.MaxReadRequestsInterval, - _sdkOptions.Network.Resilience.MaxBurstReadRequests); - } - - [Fact] - public void BuildPolicy_CustomReadConfiguration_ReturnsPolicyForRead() - { - // Arrange - _sdkOptions.Network.Resilience.ClientThrottleEnabled = true; - _sdkOptions.Network.Resilience.MaxReadRequests = 100; - _sdkOptions.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromSeconds(1); - _sdkOptions.Network.Resilience.MaxBurstReadRequests = 1; - - // Act - var policy = _builder.Build(new HttpRequestMessage()); - - // Assert - Assert.NotNull(policy); - Assert.IsType>(policy); - ClientThrottlePolicyBuilderTests.AssertLimiterProperties( - (AsyncRateLimitPolicy)policy, - _sdkOptions.Network.Resilience.MaxReadRequests, - _sdkOptions.Network.Resilience.MaxReadRequestsInterval, - _sdkOptions.Network.Resilience.MaxBurstReadRequests); - } - - [Fact] - public void BuildPolicy_EnableClientThrottling_ReturnsDefaultPolicyForPublish() - { - // Arrange - _sdkOptions.Network.Resilience.ClientThrottleEnabled = true; - - // Act - var policy = _builder.Build( - new HttpRequestMessage( - HttpMethod.Put, - (Uri?)null)); - - // Assert - Assert.NotNull(policy); - Assert.IsType>(policy); - ClientThrottlePolicyBuilderTests.AssertLimiterProperties( - (AsyncRateLimitPolicy)policy, - _sdkOptions.Network.Resilience.MaxPublishRequests, - _sdkOptions.Network.Resilience.MaxPublishRequestsInterval, - _sdkOptions.Network.Resilience.MaxBurstPublishRequests); - } - - [Fact] - public void BuildPolicy_CustomPublishConfiguration_ReturnsPolicyForPublish() - { - // Arrange - _sdkOptions.Network.Resilience.ClientThrottleEnabled = true; - _sdkOptions.Network.Resilience.MaxPublishRequests = 60; - _sdkOptions.Network.Resilience.MaxPublishRequestsInterval = TimeSpan.FromMinutes(2); - _sdkOptions.Network.Resilience.MaxBurstPublishRequests = 10; - - // Act - var policy = _builder.Build( - new HttpRequestMessage( - HttpMethod.Put, - (Uri?)null)); - - // Assert - Assert.NotNull(policy); - Assert.IsType>(policy); - ClientThrottlePolicyBuilderTests.AssertLimiterProperties( - (AsyncRateLimitPolicy)policy, - _sdkOptions.Network.Resilience.MaxPublishRequests, - _sdkOptions.Network.Resilience.MaxPublishRequestsInterval, - _sdkOptions.Network.Resilience.MaxBurstPublishRequests); - } - - private static void AssertLimiterProperties( - AsyncRateLimitPolicy policy, - long rateLimit, - TimeSpan rateLimitInterval, - long burstRateLimit) - { - var limiter = policy.GetFieldValue("_rateLimiter"); - - Assert.NotNull(limiter); - - var addTokenTickIntervalObject = limiter.GetFieldValue("addTokenTickInterval"); - var bucketCapacityObject = limiter.GetFieldValue("bucketCapacity"); - var currentTokensObject = limiter.GetFieldValue("currentTokens"); - - Assert.NotNull(addTokenTickIntervalObject); - Assert.NotNull(bucketCapacityObject); - Assert.NotNull(currentTokensObject); - Assert.IsType(addTokenTickIntervalObject); - Assert.IsType(bucketCapacityObject); - Assert.IsType(currentTokensObject); - - Assert.Equal(burstRateLimit, (long)bucketCapacityObject); - Assert.Equal(rateLimitInterval.Ticks / rateLimit, (long)addTokenTickIntervalObject); - Assert.Equal(burstRateLimit, (long)currentTokensObject); - } -} diff --git a/tests/Tableau.Migration.Tests/Unit/Net/Policies/HttpPolicyWrapBuilderTests.cs b/tests/Tableau.Migration.Tests/Unit/Net/Policies/HttpPolicyWrapBuilderTests.cs deleted file mode 100644 index 0048172..0000000 --- a/tests/Tableau.Migration.Tests/Unit/Net/Policies/HttpPolicyWrapBuilderTests.cs +++ /dev/null @@ -1,300 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using System; -using System.Collections.Generic; -using System.Net.Http; -using Microsoft.Extensions.DependencyInjection; -using Moq; -using Polly; -using Polly.Bulkhead; -using Polly.RateLimit; -using Polly.Retry; -using Polly.Timeout; -using Polly.Wrap; -using Tableau.Migration.Config; -using Tableau.Migration.Net.Policies; -using Xunit; - -namespace Tableau.Migration.Tests.Unit.Net.Policies; - -public class HttpPolicyWrapBuilderTests -{ - private readonly Mock _mockedConfigReader; - private readonly MigrationSdkOptions _sdkOptions; - private readonly IServiceCollection _services; - - #region - GetPolicyWrapCases - - - public static IEnumerable GetPolicyWrapCases() - { - // GetRequestPolicies_ReturnsDefaultPolicy - yield return new object[] - { - // assertPolicy - (IAsyncPolicy policy) => - { - var wrap = Assert.IsType>(policy); - - Assert.IsType>(wrap.Outer); - - wrap = Assert.IsType>(wrap.Inner); - - Assert.IsType>(wrap.Outer); - Assert.IsType>(wrap.Inner); - } - }; - - // GetRequestPolicies_ReturnsDefaultPolicy - yield return new object[] - { - // assertPolicy - (IAsyncPolicy policy) => - { - var wrap = Assert.IsType>(policy); - - Assert.IsType>(wrap.Outer); - Assert.IsType>(wrap.Inner); - }, - // setConfiguration - (MigrationSdkOptions options) => - { - options.Network.Resilience.ServerThrottleEnabled = false; - } - }; - - // GetRequestPolicies_DisableRetry_ReturnsTimeout - yield return new object[] - { - // assertPolicy - (IAsyncPolicy policy) => - { - var wrap = Assert.IsType>(policy); - - Assert.IsType>(wrap.Outer); - Assert.IsType>(wrap.Inner); - }, - // setConfiguration - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryEnabled = false; - } - }; - - // GetRequestPolicies_EmptyRetryIntervals_ReturnsTimeout - yield return new object[] - { - // assertPolicy - (IAsyncPolicy policy) => - { - var wrap = Assert.IsType>(policy); - - Assert.IsType>(wrap.Outer); - Assert.IsType>(wrap.Inner); - }, - // setConfiguration - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryIntervals = Array.Empty(); - } - }; - - // GetRequestPolicies_EnableMaxConcurrency_ReturnsBulkheadPolicy - yield return new object[] - { - // assertPolicy - (IAsyncPolicy policy) => - { - var wrap = Assert.IsType>(policy); - - Assert.IsType>(wrap.Outer); - - wrap = Assert.IsType>(wrap.Inner); - - Assert.IsType>(wrap.Outer); - Assert.IsType>(wrap.Inner); - }, - // setConfiguration - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryEnabled = false; - options.Network.Resilience.ConcurrentRequestsLimitEnabled = true; - } - }; - - // GetRequestPolicies_EnableMaxConcurrencyAndRetry_ReturnsPoliciesWrapped - yield return new object[] - { - // assertPolicy - (IAsyncPolicy policy) => - { - var wrap = Assert.IsType>(policy); - - Assert.IsType>(wrap.Outer); - - wrap = Assert.IsType>(wrap.Inner); - - Assert.IsType>(wrap.Outer); - - wrap = Assert.IsType>(wrap.Inner); - - Assert.IsType>(wrap.Outer); - Assert.IsType>(wrap.Inner); - }, - // setConfiguration - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryEnabled = true; - options.Network.Resilience.ConcurrentRequestsLimitEnabled = true; - } - }; - - // GetRequestPolicies_EnableClientThrottle_ReturnsRateLimitPolicy - yield return new object[] - { - // assertPolicy - (IAsyncPolicy policy) => - { - var wrap = Assert.IsType>(policy); - - Assert.IsType>(wrap.Outer); - - wrap = Assert.IsType>(wrap.Inner); - - Assert.IsType>(wrap.Outer); - Assert.IsType>(wrap.Inner); - }, - // setConfiguration - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryEnabled = false; - options.Network.Resilience.ClientThrottleEnabled = true; - } - }; - - // GetRequestPolicies_EnableClientThrottle_ReturnsRateLimitPolicy - yield return new object[] - { - // assertPolicy - (IAsyncPolicy policy) => - { - var wrap = Assert.IsType>(policy); - - Assert.IsType>(wrap.Outer); - - wrap = Assert.IsType>(wrap.Inner); - - Assert.IsType>(wrap.Outer); - - wrap = Assert.IsType>(wrap.Inner); - - Assert.IsType>(wrap.Outer); - Assert.IsType>(wrap.Inner); - }, - // setConfiguration - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryEnabled = true; - options.Network.Resilience.ClientThrottleEnabled = true; - } - }; - - // GetRequestPolicies_EnableAll_ReturnsPoliciesWrapped - yield return new object[] - { - // assertPolicy - (IAsyncPolicy policy) => - { - var wrap = Assert.IsType>(policy); - - Assert.IsType>(wrap.Outer); - - wrap = Assert.IsType>(wrap.Inner); - - Assert.IsType>(wrap.Outer); - - wrap = Assert.IsType>(wrap.Inner); - - Assert.IsType>(wrap.Outer); - - wrap = Assert.IsType>(wrap.Inner); - - Assert.IsType>(wrap.Outer); - Assert.IsType>(wrap.Inner); - }, - // setConfiguration - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryEnabled = true; - options.Network.Resilience.ConcurrentRequestsLimitEnabled = true; - options.Network.Resilience.ClientThrottleEnabled = true; - } - }; - } - - #endregion - - public HttpPolicyWrapBuilderTests() - { - _mockedConfigReader = new Mock(); - _sdkOptions = new MigrationSdkOptions(); - _mockedConfigReader - .Setup(x => x.Get()) - .Returns(_sdkOptions); - - _services = new ServiceCollection() - .AddTableauMigrationSdk() - .AddSingleton(_mockedConfigReader.Object); - } - - [Fact] - public void GetRequestPoliciesTwoTimes_DifferentObjects() - { - // Arrange - var request = new HttpRequestMessage(); - using var serviceProvider = _services.BuildServiceProvider(); - var selector = serviceProvider.GetRequiredService(); - - // Act - var policy1 = selector.GetRequestPolicies(request); - var policy2 = selector.GetRequestPolicies(request); - - // Assert - Assert.NotNull(policy1); - Assert.NotNull(policy2); - Assert.NotSame(policy1, policy2); - } - - [Theory] - [MemberData(nameof(GetPolicyWrapCases))] - public void GetRequestPoliciesCases( - Action> assertPolicy, - Action? setConfiguration = null) - { - // Arrange - setConfiguration?.Invoke(_sdkOptions); - - using var serviceProvider = _services.BuildServiceProvider(); - var selector = serviceProvider.GetRequiredService(); - - // Act - var policy = selector.GetRequestPolicies(new HttpRequestMessage()); - - // Assert - Assert.NotNull(policy); - assertPolicy(policy); - } -} diff --git a/tests/Tableau.Migration.Tests/Unit/Net/Policies/MaxConcurrencyPolicyBuilderTests.cs b/tests/Tableau.Migration.Tests/Unit/Net/Policies/MaxConcurrencyPolicyBuilderTests.cs deleted file mode 100644 index 7b490f2..0000000 --- a/tests/Tableau.Migration.Tests/Unit/Net/Policies/MaxConcurrencyPolicyBuilderTests.cs +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using System.Net.Http; -using Moq; -using Polly.Bulkhead; -using Tableau.Migration.Config; -using Tableau.Migration.Net.Policies; -using Xunit; - -namespace Tableau.Migration.Tests.Unit.Net.Policies -{ - public class MaxConcurrencyPolicyBuilderTests - { - private readonly Mock _mockedConfigReader; - private readonly MigrationSdkOptions _sdkOptions; - private readonly MaxConcurrencyPolicyBuilder _builder; - - public MaxConcurrencyPolicyBuilderTests() - { - _mockedConfigReader = new Mock(); - _sdkOptions = new MigrationSdkOptions(); - _mockedConfigReader - .Setup(x => x.Get()) - .Returns(_sdkOptions); - _builder = new MaxConcurrencyPolicyBuilder( - _mockedConfigReader.Object); - } - - [Fact] - public void BuildPolicy_ReturnsDefaultPolicy() - { - // Act - var policy = _builder.Build(new HttpRequestMessage()); - - // Assert - Assert.Null(policy); - } - - [Fact] - public void BuildPolicy_EnableRequestsLimit_ReturnsDefaultPolicy() - { - // Arrange - _sdkOptions.Network.Resilience.ConcurrentRequestsLimitEnabled = true; - - // Act - var policy = _builder.Build(new HttpRequestMessage()); - - // Assert - Assert.NotNull(policy); - Assert.IsType>(policy); - var bulkheadPolicy = (AsyncBulkheadPolicy)policy; - Assert.Equal(_sdkOptions.Network.Resilience.MaxConcurrentRequests, bulkheadPolicy.BulkheadAvailableCount); - Assert.Equal(_sdkOptions.Network.Resilience.ConcurrentWaitingRequestsOnQueue, bulkheadPolicy.QueueAvailableCount); - } - - [Fact] - public void BuildPolicy_CustomRequestsLimit_ReturnsPolicy() - { - // Arrange - _sdkOptions.Network.Resilience.ConcurrentRequestsLimitEnabled = true; - _sdkOptions.Network.Resilience.MaxConcurrentRequests = 2; - _sdkOptions.Network.Resilience.ConcurrentWaitingRequestsOnQueue = 3; - - // Act - var policy = _builder.Build(new HttpRequestMessage()); - - // Assert - Assert.NotNull(policy); - Assert.IsType>(policy); - var bulkheadPolicy = (AsyncBulkheadPolicy)policy; - Assert.Equal(_sdkOptions.Network.Resilience.MaxConcurrentRequests, bulkheadPolicy.BulkheadAvailableCount); - Assert.Equal(_sdkOptions.Network.Resilience.ConcurrentWaitingRequestsOnQueue, bulkheadPolicy.QueueAvailableCount); - } - } -} diff --git a/tests/Tableau.Migration.Tests/Unit/Net/Policies/RequestTimeoutPolicyBuilderTests.cs b/tests/Tableau.Migration.Tests/Unit/Net/Policies/RequestTimeoutPolicyBuilderTests.cs deleted file mode 100644 index ee1c069..0000000 --- a/tests/Tableau.Migration.Tests/Unit/Net/Policies/RequestTimeoutPolicyBuilderTests.cs +++ /dev/null @@ -1,140 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using System; -using System.Collections.Generic; -using System.Net.Http; -using Moq; -using Polly.Timeout; -using Tableau.Migration.Config; -using Tableau.Migration.Net.Policies; -using Xunit; - -namespace Tableau.Migration.Tests.Unit.Net.Policies -{ - public class RequestTimeoutPolicyBuilderTests - { - private readonly Mock _mockedConfigReader; - private readonly MigrationSdkOptions _sdkOptions; - private readonly RequestTimeoutPolicyBuilder _builder; - - public RequestTimeoutPolicyBuilderTests() - { - _mockedConfigReader = new Mock(); - _sdkOptions = new MigrationSdkOptions(); - _mockedConfigReader - .Setup(x => x.Get()) - .Returns(_sdkOptions); - _builder = new RequestTimeoutPolicyBuilder( - _mockedConfigReader.Object); - } - - [Fact] - public void BuildPolicy_ReturnsDefaultPolicy() - { - // Act - var policy = _builder.Build(new HttpRequestMessage()); - - // Assert - Assert.NotNull(policy); - Assert.IsType>(policy); - var timeoutPolicy = (AsyncTimeoutPolicy)policy; - var timeoutProviderObject = timeoutPolicy.GetFieldValue("_timeoutProvider"); - Assert.NotNull(timeoutProviderObject); - Assert.IsType>(timeoutProviderObject); - var timeoutProvider = (Func)timeoutProviderObject; - Assert.Equal(_sdkOptions.Network.Resilience.PerRequestTimeout, timeoutProvider(new Polly.Context())); - } - - public static IEnumerable GetFileTransferRequests() - { - yield return new object[] - { - HttpMethod.Get, - new Uri($"http://localhost/api/3.21/sites/{Guid.NewGuid()}/datasources/{Guid.NewGuid()}/content?includeExtract=true") - }; - yield return new object[] - { - HttpMethod.Get, - new Uri($"https://localhost/api/3.21/sites/{Guid.NewGuid()}/workbooks/{Guid.NewGuid()}/content?includeExtract=true") - }; - yield return new object[] - { - HttpMethod.Put, - new Uri($"https://localhost/api/3.21/sites/{Guid.NewGuid()}/fileUploads/{Guid.NewGuid()}") - }; - } - - [Theory] - [MemberData(nameof(GetFileTransferRequests))] - public void BuildPolicyForFileTransfer_ReturnsDefaultPolicy(HttpMethod method, Uri uri) - { - // Act - var policy = _builder.Build(new HttpRequestMessage(method, uri)); - - // Assert - Assert.NotNull(policy); - Assert.IsType>(policy); - var timeoutPolicy = (AsyncTimeoutPolicy)policy; - var timeoutProviderObject = timeoutPolicy.GetFieldValue("_timeoutProvider"); - Assert.NotNull(timeoutProviderObject); - Assert.IsType>(timeoutProviderObject); - var timeoutProvider = (Func)timeoutProviderObject; - Assert.Equal(_sdkOptions.Network.Resilience.PerFileTransferRequestTimeout, timeoutProvider(new Polly.Context())); - } - - [Fact] - public void BuildPolicy_CustomTimeoutLimit_ReturnsPolicy() - { - // Arrange - _sdkOptions.Network.Resilience.PerRequestTimeout = TimeSpan.FromSeconds(15); - - // Act - var policy = _builder.Build(new HttpRequestMessage()); - - // Assert - Assert.NotNull(policy); - Assert.IsType>(policy); - var timeoutPolicy = (AsyncTimeoutPolicy)policy; - var timeoutProviderObject = timeoutPolicy.GetFieldValue("_timeoutProvider"); - Assert.NotNull(timeoutProviderObject); - Assert.IsType>(timeoutProviderObject); - var timeoutProvider = (Func)timeoutProviderObject; - Assert.Equal(_sdkOptions.Network.Resilience.PerRequestTimeout, timeoutProvider(new Polly.Context())); - } - - [Theory] - [MemberData(nameof(GetFileTransferRequests))] - public void BuildPolicyForFileTransfer_CustomTimeoutLimit_ReturnsPolicy(HttpMethod method, Uri uri) - { - // Arrange - _sdkOptions.Network.Resilience.PerFileTransferRequestTimeout = TimeSpan.FromDays(1); - - // Act - var policy = _builder.Build(new HttpRequestMessage(method, uri)); - - // Assert - Assert.NotNull(policy); - Assert.IsType>(policy); - var timeoutPolicy = (AsyncTimeoutPolicy)policy; - var timeoutProviderObject = timeoutPolicy.GetFieldValue("_timeoutProvider"); - Assert.NotNull(timeoutProviderObject); - Assert.IsType>(timeoutProviderObject); - var timeoutProvider = (Func)timeoutProviderObject; - Assert.Equal(_sdkOptions.Network.Resilience.PerFileTransferRequestTimeout, timeoutProvider(new Polly.Context())); - } - } -} diff --git a/tests/Tableau.Migration.Tests/Unit/Net/Policies/RetryPolicyBuilderTests.cs b/tests/Tableau.Migration.Tests/Unit/Net/Policies/RetryPolicyBuilderTests.cs deleted file mode 100644 index ceddedf..0000000 --- a/tests/Tableau.Migration.Tests/Unit/Net/Policies/RetryPolicyBuilderTests.cs +++ /dev/null @@ -1,294 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using System; -using System.Collections.Generic; -using System.Net; -using System.Net.Http; -using System.Threading.Tasks; -using Moq; -using Polly; -using Polly.Retry; -using Polly.Timeout; -using Tableau.Migration.Config; -using Tableau.Migration.Net.Policies; -using Xunit; - -namespace Tableau.Migration.Tests.Unit.Net.Policies -{ - public class RetryPolicyBuilderTests - { - private readonly Mock _mockedConfigReader; - private readonly MigrationSdkOptions _sdkOptions; - private readonly RetryPolicyBuilder _builder; - - #region GetRetryCasesForResponseStatusCodes - - public static IEnumerable GetRetryCasesForResponseStatusCodes() - { - yield return new object[] - { - // returnedStatusCode - HttpStatusCode.RequestTimeout - }; - - // Test default configuration for response status codes 5XX - for (var statusCode = (int)HttpStatusCode.InternalServerError; statusCode <= (int)HttpStatusCode.NetworkAuthenticationRequired; statusCode++) - { - yield return new object[] - { - // returnedStatusCode - (HttpStatusCode)statusCode - }; - } - - // OK Status - yield return new object[] - { - // returnedStatusCode - HttpStatusCode.OK, - // expectRetry - false - }; - - // OK Status and with Override Status Code configuration - yield return new object[] - { - // returnedStatusCode - HttpStatusCode.OK, - // expectRetry - false, - // overrideStatusCodes - new int[] - { - (int)HttpStatusCode.ServiceUnavailable - } - }; - - // With Override Status Code configuration - yield return new object[] - { - // returnedStatusCode - HttpStatusCode.Conflict, - // expectRetry - true, - // overrideStatusCodes - new int[] - { - (int)HttpStatusCode.Conflict - } - }; - - // Too Many Requests not retried - yield return new object[] - { - // returnedStatusCode - HttpStatusCode.TooManyRequests, - // expectRetry - false - }; - } - - #endregion GetRetryCasesForResponseStatusCodes - - #region GetRetryCasesForExceptions - - public static IEnumerable GetRetryCasesForExceptions() - { - yield return new object[] - { - // exceptionType - typeof(HttpRequestException), - // expectedException - new HttpRequestException() - }; - - yield return new object[] - { - // exceptionType - typeof(TimeoutRejectedException), - // expectedException - new TimeoutRejectedException() - }; - - yield return new object[] - { - // exceptionType - typeof(Exception), - // expectedException - new Exception(), - // expectRetry - false - }; - - yield return new object[] - { - // exceptionType - typeof(TaskCanceledException), - // expectedException - new TaskCanceledException(), - // expectRetry - false - }; - } - - #endregion GetRetryCasesForExceptions - - public RetryPolicyBuilderTests() - { - _mockedConfigReader = new Mock(); - _sdkOptions = new MigrationSdkOptions(); - _mockedConfigReader - .Setup(x => x.Get()) - .Returns(_sdkOptions); - _builder = new RetryPolicyBuilder( - _mockedConfigReader.Object); - } - - [Fact] - public void BuildPolicy_ReturnsDefaultPolicy() - { - // Act - var policy = _builder.Build(new HttpRequestMessage()); - - // Assert - Assert.NotNull(policy); - Assert.IsType>(policy); - var retryPolicy = (AsyncRetryPolicy)policy; - - var retriesObject = retryPolicy.GetFieldValue("_sleepDurationsEnumerable"); - Assert.NotNull(retriesObject); - Assert.IsAssignableFrom>(retriesObject); - Assert.Equal( - (IEnumerable)retriesObject, - _sdkOptions.Network.Resilience.RetryIntervals); - } - - [Fact] - public void BuildPolicy_DisableRetry_ReturnsNull() - { - // Arrange - _sdkOptions.Network.Resilience.RetryEnabled = false; - - // Act - var policy = _builder.Build(new HttpRequestMessage()); - - // Assert - Assert.Null(policy); - } - - [Fact] - public void BuildPolicy_EmptyRetryIntervals_ReturnsNull() - { - // Arrange - _sdkOptions.Network.Resilience.RetryEnabled = true; - _sdkOptions.Network.Resilience.RetryIntervals = Array.Empty(); - - // Act - var policy = _builder.Build(new HttpRequestMessage()); - - // Assert - Assert.Null(policy); - } - - [Theory] - [MemberData(nameof(GetRetryCasesForResponseStatusCodes))] - public async Task ExecuteBuiltPolicy_TestRetryCasesForResponseStatusCodes( - HttpStatusCode returnedStatusCode, - bool expectRetry = true, - int[]? overrideStatusCodes = null) - { - // Arrange - if (overrideStatusCodes is not null) - { - _sdkOptions.Network.Resilience.RetryOverrideResponseCodes = overrideStatusCodes; - } - _sdkOptions.Network.Resilience.RetryIntervals = new TimeSpan[] - { - TimeSpan.FromMilliseconds(5) - }; - var expectedTries = expectRetry ? 2 : 1; - var retryKey = "retryCount"; - var context = new Context - { - { retryKey, 0 } - }; - var policy = _builder.Build(new HttpRequestMessage()); - var retryPolicy = (AsyncRetryPolicy)policy!; - - // Act - var response = await retryPolicy.ExecuteAsync( - (ctx) => - { - var tries = (int)ctx[retryKey]; - ctx[retryKey] = ++tries; - return Task.FromResult( - new HttpResponseMessage( - returnedStatusCode)); - }, - context); - - // Assert - Assert.Equal( - expectedTries, - (int)context[retryKey]); - - Assert.Equal( - returnedStatusCode, - response.StatusCode); - } - - [Theory] - [MemberData(nameof(GetRetryCasesForExceptions))] - public async Task ExecuteBuiltPolicy_TestRetryCasesForExceptions( - Type exceptionType, - Exception expectedException, - bool expectRetry = true) - { - // Arrange - _sdkOptions.Network.Resilience.RetryIntervals = new TimeSpan[] - { - TimeSpan.FromMilliseconds(5) - }; - var expectedTries = expectRetry ? 2 : 1; - var retryKey = "retryCount"; - var context = new Context - { - { retryKey, 0 } - }; - var policy = _builder.Build(new HttpRequestMessage()); - var retryPolicy = (AsyncRetryPolicy)policy!; - - // Act - _ = await Assert.ThrowsAsync( - exceptionType, - async () => - await retryPolicy.ExecuteAsync( - (ctx) => - { - var tries = (int)ctx[retryKey]; - ctx[retryKey] = ++tries; - throw expectedException; - }, - context)); - - // Assert - Assert.Equal( - expectedTries, - (int)context[retryKey]); - } - } -} diff --git a/tests/Tableau.Migration.Tests/Unit/Net/Policies/ServerThrottlePolicyBuilderTests.cs b/tests/Tableau.Migration.Tests/Unit/Net/Policies/ServerThrottlePolicyBuilderTests.cs deleted file mode 100644 index c3ddd30..0000000 --- a/tests/Tableau.Migration.Tests/Unit/Net/Policies/ServerThrottlePolicyBuilderTests.cs +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using System.Net.Http; -using Moq; -using Polly.Retry; -using Tableau.Migration.Config; -using Tableau.Migration.Net.Policies; -using Xunit; - -namespace Tableau.Migration.Tests.Unit.Net.Policies -{ - public class ServerThrottlePolicyBuilderTests - { - private readonly Mock _mockedConfigReader; - private readonly MigrationSdkOptions _sdkOptions; - private readonly ServerThrottlePolicyBuilder _builder; - - public ServerThrottlePolicyBuilderTests() - { - _mockedConfigReader = new Mock(); - _sdkOptions = new MigrationSdkOptions(); - _mockedConfigReader - .Setup(x => x.Get()) - .Returns(_sdkOptions); - _builder = new(_mockedConfigReader.Object); - } - - [Fact] - public void PolicyDisabled() - { - // Arrange - _sdkOptions.Network.Resilience.ServerThrottleEnabled = false; - - // Act - var policy = _builder.Build(new HttpRequestMessage()); - - // Assert - Assert.Null(policy); - } - - [Fact] - public void PolicyEnabled() - { - // Arrange - _sdkOptions.Network.Resilience.ClientThrottleEnabled = true; - - // Act - var policy = _builder.Build(new HttpRequestMessage()); - - // Assert - Assert.NotNull(policy); - Assert.IsType>(policy); - } - - [Fact] - public void RetriesLimited() - { - // Arrange - _sdkOptions.Network.Resilience.ServerThrottleLimitRetries = true; - - // Act - var policy = _builder.Build(new HttpRequestMessage()); - - // Assert - Assert.NotNull(policy); - Assert.IsType>(policy); - } - } -} diff --git a/tests/Tableau.Migration.Tests/Unit/Net/Policies/SimpleCachedClientThrottlePolicyBuilderTests.cs b/tests/Tableau.Migration.Tests/Unit/Net/Policies/SimpleCachedClientThrottlePolicyBuilderTests.cs deleted file mode 100644 index 0fd6a55..0000000 --- a/tests/Tableau.Migration.Tests/Unit/Net/Policies/SimpleCachedClientThrottlePolicyBuilderTests.cs +++ /dev/null @@ -1,359 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using System; -using System.Collections.Generic; -using System.Net.Http; -using Microsoft.Extensions.DependencyInjection; -using Moq; -using Polly; -using Tableau.Migration.Config; -using Tableau.Migration.Net.Policies; -using Xunit; - -namespace Tableau.Migration.Tests.Unit.Net.Policies -{ - public class SimpleCachedClientThrottlePolicyBuilderTests - { - private readonly Mock _mockedConfigReader; - private readonly MigrationSdkOptions _sdkOptions; - private readonly IServiceCollection _services; - - private static readonly Action[] _setOtherConfigurationList = new Action[] - { - // Default ConcurrentRequests Config - (MigrationSdkOptions options) => - { - options.Network.Resilience.ConcurrentRequestsLimitEnabled = true; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ConcurrentRequestsLimitEnabled = true; - options.Network.Resilience.MaxConcurrentRequests = 13; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ConcurrentRequestsLimitEnabled = true; - options.Network.Resilience.ConcurrentWaitingRequestsOnQueue = 100; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ConcurrentRequestsLimitEnabled = true; - options.Network.Resilience.MaxConcurrentRequests = 4; - options.Network.Resilience.ConcurrentWaitingRequestsOnQueue = 50; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryIntervals = new TimeSpan[] - { - TimeSpan.FromMilliseconds(250) - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryIntervals = new TimeSpan[] - { - TimeSpan.FromMilliseconds(50), - TimeSpan.FromMilliseconds(50), - TimeSpan.FromMilliseconds(50), - TimeSpan.FromMilliseconds(50), - TimeSpan.FromMilliseconds(50) - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryIntervals = new TimeSpan[] - { - TimeSpan.FromMilliseconds(10), - TimeSpan.FromMilliseconds(20), - TimeSpan.FromMilliseconds(30), - TimeSpan.FromMilliseconds(40), - TimeSpan.FromMilliseconds(50) - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryOverrideResponseCodes = new int[] - { - 500 - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryOverrideResponseCodes = new int[] - { - 500, - 200, - 208 - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryOverrideResponseCodes = new int[] - { - 408 - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryIntervals = new TimeSpan[] - { - TimeSpan.FromMilliseconds(10), - TimeSpan.FromMilliseconds(20), - TimeSpan.FromMilliseconds(30), - TimeSpan.FromMilliseconds(40), - TimeSpan.FromMilliseconds(50) - }; - options.Network.Resilience.RetryOverrideResponseCodes = new int[] - { - 500, - 200, - 208 - }; - }, - // Custom Timeout config - (MigrationSdkOptions options) => - { - options.Network.Resilience.PerRequestTimeout = TimeSpan.FromSeconds(105); - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ConcurrentRequestsLimitEnabled = true; - options.Network.Resilience.MaxConcurrentRequests = 4; - options.Network.Resilience.ConcurrentWaitingRequestsOnQueue = 50; - options.Network.Resilience.PerRequestTimeout = TimeSpan.FromSeconds(105); - options.Network.Resilience.RetryIntervals = new TimeSpan[] - { - TimeSpan.FromMilliseconds(10), - TimeSpan.FromMilliseconds(20), - TimeSpan.FromMilliseconds(30), - TimeSpan.FromMilliseconds(40), - TimeSpan.FromMilliseconds(50) - }; - options.Network.Resilience.RetryOverrideResponseCodes = new int[] - { - 500, - 200, - 208 - }; - } - }; - - private static readonly Action[] _setThrottleConfigurationList = new Action[] - { - (MigrationSdkOptions options) => - { - options.Network.Resilience.MaxReadRequests = 111; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromHours(3); - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.MaxBurstReadRequests = 54; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.MaxReadRequests = 111; - options.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromHours(3); - options.Network.Resilience.MaxBurstReadRequests = 54; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.MaxPublishRequests = 112; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.MaxPublishRequestsInterval = TimeSpan.FromHours(4); - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.MaxBurstPublishRequests = 55; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.MaxReadRequests = 112; - options.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromHours(4); - options.Network.Resilience.MaxBurstReadRequests = 55; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.MaxReadRequests = 111; - options.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromHours(3); - options.Network.Resilience.MaxBurstReadRequests = 54; - options.Network.Resilience.MaxReadRequests = 112; - options.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromHours(4); - options.Network.Resilience.MaxBurstReadRequests = 55; - } - }; - - #region GetCachedPolicyCases - - public static IEnumerable GetCachedPolicyCases() - { - // Default Config - No Changes - Same Request - Cached Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - } - }; - - // Default Config - No Changes - Different Requests - Cached Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(HttpMethod.Put, (Uri?)null), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.NotSame(policy1, policy2); - } - }; - - foreach (var setConfigAction in _setOtherConfigurationList) - { - // Custom configuration - No Changes - Same Request - Cached Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - }, - // setInitialConfiguration - setConfigAction - }; - } - - foreach (var setConfigAction in _setOtherConfigurationList) - { - // Default configuration - With Changes - Same Request - Cached Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - }, - // setInitialConfiguration, - (MigrationSdkOptions options)=>{ }, - // changeConfiguration - setConfigAction - }; - } - - foreach (var setConfigAction in _setThrottleConfigurationList) - { - // Custom retry configuration - No Changes - Same Request - Cached Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - }, - // setInitialConfiguration - setConfigAction - }; - } - - foreach (var setConfigAction in _setThrottleConfigurationList) - { - // Default configuration - With Retry Changes - Same Request - New Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.NotSame(policy1, policy2); - }, - // setInitialConfiguration, - (MigrationSdkOptions options)=>{ }, - // changeConfiguration - setConfigAction - }; - } - } - - #endregion GetCachedPolicyCases - - public SimpleCachedClientThrottlePolicyBuilderTests() - { - _mockedConfigReader = new Mock(); - _sdkOptions = new MigrationSdkOptions(); - _sdkOptions.Network.Resilience.ClientThrottleEnabled = true; - _mockedConfigReader - .Setup(x => x.Get()) - .Returns(_sdkOptions); - - _services = new ServiceCollection() - .AddTableauMigrationSdk() - .AddSingleton(_mockedConfigReader.Object); - } - - [Theory] - [MemberData(nameof(GetCachedPolicyCases))] - public void BuildCachedPolicy( - HttpRequestMessage request1, - HttpRequestMessage request2, - Action, IAsyncPolicy> assertPolicies, - Action? setInitialConfiguration = null, - Action? changeConfiguration = null) - { - // Arrange - setInitialConfiguration?.Invoke(_sdkOptions); - var request = new HttpRequestMessage(); - using var serviceProvider = _services.BuildServiceProvider(); - var selector = serviceProvider.GetRequiredService(); - - // Act - var policy1 = selector.Build(request1); - changeConfiguration?.Invoke(_sdkOptions); - var policy2 = selector.Build(request2); - - // Assert - Assert.NotNull(policy1); - Assert.NotNull(policy2); - assertPolicies(policy1, policy2); - } - } -} \ No newline at end of file diff --git a/tests/Tableau.Migration.Tests/Unit/Net/Policies/SimpleCachedHttpPolicyWrapBuilderTests.cs b/tests/Tableau.Migration.Tests/Unit/Net/Policies/SimpleCachedHttpPolicyWrapBuilderTests.cs deleted file mode 100644 index 8296407..0000000 --- a/tests/Tableau.Migration.Tests/Unit/Net/Policies/SimpleCachedHttpPolicyWrapBuilderTests.cs +++ /dev/null @@ -1,187 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using System; -using System.Collections.Generic; -using System.Net.Http; -using Microsoft.Extensions.DependencyInjection; -using Moq; -using Polly; -using Tableau.Migration.Config; -using Tableau.Migration.Net.Policies; -using Xunit; - -namespace Tableau.Migration.Tests.Unit.Net.Policies -{ - public class SimpleCachedHttpPolicyWrapBuilderTests - { - private readonly Mock _mockedConfigReader; - private readonly MigrationSdkOptions _sdkOptions; - private readonly IServiceCollection _services; - - private static readonly Action[] _setConfigurationList = new Action[] - { - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryEnabled = false; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryIntervals = Array.Empty(); - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryEnabled = false; - options.Network.Resilience.ConcurrentRequestsLimitEnabled = true; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryEnabled = true; - options.Network.Resilience.ConcurrentRequestsLimitEnabled = true; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryEnabled = false; - options.Network.Resilience.ClientThrottleEnabled = true; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryEnabled = true; - options.Network.Resilience.ClientThrottleEnabled = true; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryEnabled = true; - options.Network.Resilience.ConcurrentRequestsLimitEnabled = true; - options.Network.Resilience.ClientThrottleEnabled = true; - } - }; - - #region GetPolicyWrapCases - - public static IEnumerable GetCachedPolicyWrapCases() - { - // Default Config - No Changes - Same Request - Cached Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - } - }; - - // Default Config - No Changes - Different Requests - Different Policies - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(HttpMethod.Put, (Uri?)null), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.NotSame(policy1, policy2); - } - }; - - foreach (var setConfigAction in _setConfigurationList) - { - // Custom configuration - No Changes - Same Request - Cached Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - }, - // setInitialConfiguration - setConfigAction - }; - } - - foreach (var setConfigAction in _setConfigurationList) - { - // Default configuration - With Changes - Same Request - New Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.NotSame(policy1, policy2); - }, - // setInitialConfiguration, - (MigrationSdkOptions options)=>{ }, - // changeConfiguration - setConfigAction - }; - } - } - - #endregion - - public SimpleCachedHttpPolicyWrapBuilderTests() - { - _mockedConfigReader = new Mock(); - _sdkOptions = new MigrationSdkOptions(); - _mockedConfigReader - .Setup(x => x.Get()) - .Returns(_sdkOptions); - - _services = new ServiceCollection() - .AddTableauMigrationSdk() - .AddSingleton(_mockedConfigReader.Object); - } - - [Theory] - [MemberData(nameof(GetCachedPolicyWrapCases))] - public void GetCachedPolicyWrap( - HttpRequestMessage request1, - HttpRequestMessage request2, - Action, IAsyncPolicy> assertPolicies, - Action? setInitialConfiguration = null, - Action? changeConfiguration = null) - { - // Arrange - setInitialConfiguration?.Invoke(_sdkOptions); - var request = new HttpRequestMessage(); - using var serviceProvider = _services.BuildServiceProvider(); - var selector = serviceProvider.GetRequiredService(); - - // Act - var policy1 = selector.GetRequestPolicies(request1); - changeConfiguration?.Invoke(_sdkOptions); - var policy2 = selector.GetRequestPolicies(request2); - - // Assert - Assert.NotNull(policy1); - Assert.NotNull(policy2); - assertPolicies(policy1, policy2); - } - } -} \ No newline at end of file diff --git a/tests/Tableau.Migration.Tests/Unit/Net/Policies/SimpleCachedMaxConcurrencyPolicyBuilderTests.cs b/tests/Tableau.Migration.Tests/Unit/Net/Policies/SimpleCachedMaxConcurrencyPolicyBuilderTests.cs deleted file mode 100644 index d1bb790..0000000 --- a/tests/Tableau.Migration.Tests/Unit/Net/Policies/SimpleCachedMaxConcurrencyPolicyBuilderTests.cs +++ /dev/null @@ -1,369 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using System; -using System.Collections.Generic; -using System.Net.Http; -using Microsoft.Extensions.DependencyInjection; -using Moq; -using Polly; -using Tableau.Migration.Config; -using Tableau.Migration.Net.Policies; -using Xunit; - -namespace Tableau.Migration.Tests.Unit.Net.Policies -{ - public class SimpleCachedMaxConcurrencyPolicyBuilderTests - { - private readonly Mock _mockedConfigReader; - private readonly MigrationSdkOptions _sdkOptions; - private readonly IServiceCollection _services; - - private static readonly Action[] _setOtherConfigurationList = new Action[] - { - // Default ClientThrottle Config - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxReadRequests = 111; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromHours(3); - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxBurstReadRequests = 54; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxReadRequests = 111; - options.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromHours(3); - options.Network.Resilience.MaxBurstReadRequests = 54; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxPublishRequests = 112; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxPublishRequestsInterval = TimeSpan.FromHours(4); - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxBurstPublishRequests = 55; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxReadRequests = 112; - options.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromHours(4); - options.Network.Resilience.MaxBurstReadRequests = 55; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxReadRequests = 111; - options.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromHours(3); - options.Network.Resilience.MaxBurstReadRequests = 54; - options.Network.Resilience.MaxReadRequests = 112; - options.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromHours(4); - options.Network.Resilience.MaxBurstReadRequests = 55; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryIntervals = new TimeSpan[] - { - TimeSpan.FromMilliseconds(250) - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryIntervals = new TimeSpan[] - { - TimeSpan.FromMilliseconds(50), - TimeSpan.FromMilliseconds(50), - TimeSpan.FromMilliseconds(50), - TimeSpan.FromMilliseconds(50), - TimeSpan.FromMilliseconds(50) - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryIntervals = new TimeSpan[] - { - TimeSpan.FromMilliseconds(10), - TimeSpan.FromMilliseconds(20), - TimeSpan.FromMilliseconds(30), - TimeSpan.FromMilliseconds(40), - TimeSpan.FromMilliseconds(50) - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryOverrideResponseCodes = new int[] - { - 500 - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryOverrideResponseCodes = new int[] - { - 500, - 200, - 208 - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryOverrideResponseCodes = new int[] - { - 408 - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryIntervals = new TimeSpan[] - { - TimeSpan.FromMilliseconds(10), - TimeSpan.FromMilliseconds(20), - TimeSpan.FromMilliseconds(30), - TimeSpan.FromMilliseconds(40), - TimeSpan.FromMilliseconds(50) - }; - options.Network.Resilience.RetryOverrideResponseCodes = new int[] - { - 500, - 200, - 208 - }; - }, - // Custom Timeout config - (MigrationSdkOptions options) => - { - options.Network.Resilience.PerRequestTimeout = TimeSpan.FromSeconds(105); - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxReadRequests = 111; - options.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromHours(3); - options.Network.Resilience.MaxBurstReadRequests = 54; - options.Network.Resilience.MaxReadRequests = 112; - options.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromHours(4); - options.Network.Resilience.MaxBurstReadRequests = 55; - options.Network.Resilience.PerRequestTimeout = TimeSpan.FromSeconds(105); - options.Network.Resilience.RetryIntervals = new TimeSpan[] - { - TimeSpan.FromMilliseconds(10), - TimeSpan.FromMilliseconds(20), - TimeSpan.FromMilliseconds(30), - TimeSpan.FromMilliseconds(40), - TimeSpan.FromMilliseconds(50) - }; - options.Network.Resilience.RetryOverrideResponseCodes = new int[] - { - 500, - 200, - 208 - }; - } - }; - - private static readonly Action[] _setRetryConfigurationList = new Action[] - { - (MigrationSdkOptions options) => - { - options.Network.Resilience.MaxConcurrentRequests = 13; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ConcurrentWaitingRequestsOnQueue = 100; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.MaxConcurrentRequests = 4; - options.Network.Resilience.ConcurrentWaitingRequestsOnQueue = 50; - } - }; - - #region GetCachedPolicyCases - - public static IEnumerable GetCachedPolicyCases() - { - // Default Config - No Changes - Same Request - Cached Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - } - }; - - // Default Config - No Changes - Different Requests - Cached Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(HttpMethod.Put, (Uri?)null), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - } - }; - - foreach (var setConfigAction in _setOtherConfigurationList) - { - // Custom configuration - No Changes - Different Requests - Cached Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(HttpMethod.Put, (Uri?)null), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - }, - // setInitialConfiguration - setConfigAction - }; - } - - foreach (var setConfigAction in _setOtherConfigurationList) - { - // Default configuration - With Changes - Different Requests - New Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(HttpMethod.Put, (Uri?)null), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - }, - // setInitialConfiguration, - (MigrationSdkOptions options)=>{ }, - // changeConfiguration - setConfigAction - }; - } - - foreach (var setConfigAction in _setRetryConfigurationList) - { - // Custom retry configuration - No Changes - Different Requests - Cached Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(HttpMethod.Put, (Uri?)null), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - }, - // setInitialConfiguration - setConfigAction - }; - } - - foreach (var setConfigAction in _setRetryConfigurationList) - { - // Default configuration - With Retry Changes - Same Request - New Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.NotSame(policy1, policy2); - }, - // setInitialConfiguration, - (MigrationSdkOptions options)=>{ }, - // changeConfiguration - setConfigAction - }; - } - } - - #endregion GetCachedPolicyCases - - public SimpleCachedMaxConcurrencyPolicyBuilderTests() - { - _mockedConfigReader = new Mock(); - _sdkOptions = new MigrationSdkOptions(); - _sdkOptions.Network.Resilience.ConcurrentRequestsLimitEnabled = true; - _mockedConfigReader - .Setup(x => x.Get()) - .Returns(_sdkOptions); - - _services = new ServiceCollection() - .AddTableauMigrationSdk() - .AddSingleton(_mockedConfigReader.Object); - } - - [Theory] - [MemberData(nameof(GetCachedPolicyCases))] - public void BuildCachedPolicy( - HttpRequestMessage request1, - HttpRequestMessage request2, - Action, IAsyncPolicy> assertPolicies, - Action? setInitialConfiguration = null, - Action? changeConfiguration = null) - { - // Arrange - setInitialConfiguration?.Invoke(_sdkOptions); - var request = new HttpRequestMessage(); - using var serviceProvider = _services.BuildServiceProvider(); - var selector = serviceProvider.GetRequiredService(); - - // Act - var policy1 = selector.Build(request1); - changeConfiguration?.Invoke(_sdkOptions); - var policy2 = selector.Build(request2); - - // Assert - Assert.NotNull(policy1); - Assert.NotNull(policy2); - assertPolicies(policy1, policy2); - } - } -} \ No newline at end of file diff --git a/tests/Tableau.Migration.Tests/Unit/Net/Policies/SimpleCachedRequestTimeoutPolicyBuilderTests.cs b/tests/Tableau.Migration.Tests/Unit/Net/Policies/SimpleCachedRequestTimeoutPolicyBuilderTests.cs deleted file mode 100644 index c249424..0000000 --- a/tests/Tableau.Migration.Tests/Unit/Net/Policies/SimpleCachedRequestTimeoutPolicyBuilderTests.cs +++ /dev/null @@ -1,511 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using System; -using System.Collections.Generic; -using System.Net.Http; -using Microsoft.Extensions.DependencyInjection; -using Moq; -using Polly; -using Tableau.Migration.Config; -using Tableau.Migration.Net.Policies; -using Xunit; - -namespace Tableau.Migration.Tests.Unit.Net.Policies -{ - public class SimpleCachedRequestTimeoutPolicyBuilderTests - { - private readonly Mock _mockedConfigReader; - private readonly MigrationSdkOptions _sdkOptions; - private readonly IServiceCollection _services; - - private static readonly Action[] _setOtherConfigurationList = new Action[] - { - // Default ConcurrentRequests Config - (MigrationSdkOptions options) => - { - options.Network.Resilience.ConcurrentRequestsLimitEnabled = true; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ConcurrentRequestsLimitEnabled = true; - options.Network.Resilience.MaxConcurrentRequests = 13; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ConcurrentRequestsLimitEnabled = true; - options.Network.Resilience.ConcurrentWaitingRequestsOnQueue = 100; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ConcurrentRequestsLimitEnabled = true; - options.Network.Resilience.MaxConcurrentRequests = 4; - options.Network.Resilience.ConcurrentWaitingRequestsOnQueue = 50; - }, - // Default ClientThrottle Config - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxReadRequests = 111; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromHours(3); - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxBurstReadRequests = 54; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxReadRequests = 111; - options.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromHours(3); - options.Network.Resilience.MaxBurstReadRequests = 54; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxPublishRequests = 112; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxPublishRequestsInterval = TimeSpan.FromHours(4); - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxBurstPublishRequests = 55; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxReadRequests = 112; - options.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromHours(4); - options.Network.Resilience.MaxBurstReadRequests = 55; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxReadRequests = 111; - options.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromHours(3); - options.Network.Resilience.MaxBurstReadRequests = 54; - options.Network.Resilience.MaxReadRequests = 112; - options.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromHours(4); - options.Network.Resilience.MaxBurstReadRequests = 55; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryIntervals = new TimeSpan[] - { - TimeSpan.FromMilliseconds(250) - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryIntervals = new TimeSpan[] - { - TimeSpan.FromMilliseconds(50), - TimeSpan.FromMilliseconds(50), - TimeSpan.FromMilliseconds(50), - TimeSpan.FromMilliseconds(50), - TimeSpan.FromMilliseconds(50) - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryIntervals = new TimeSpan[] - { - TimeSpan.FromMilliseconds(10), - TimeSpan.FromMilliseconds(20), - TimeSpan.FromMilliseconds(30), - TimeSpan.FromMilliseconds(40), - TimeSpan.FromMilliseconds(50) - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryOverrideResponseCodes = new int[] - { - 500 - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryOverrideResponseCodes = new int[] - { - 500, - 200, - 208 - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryOverrideResponseCodes = new int[] - { - 408 - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryIntervals = new TimeSpan[] - { - TimeSpan.FromMilliseconds(10), - TimeSpan.FromMilliseconds(20), - TimeSpan.FromMilliseconds(30), - TimeSpan.FromMilliseconds(40), - TimeSpan.FromMilliseconds(50) - }; - options.Network.Resilience.RetryOverrideResponseCodes = new int[] - { - 500, - 200, - 208 - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ConcurrentRequestsLimitEnabled = true; - options.Network.Resilience.MaxConcurrentRequests = 4; - options.Network.Resilience.ConcurrentWaitingRequestsOnQueue = 50; - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxReadRequests = 111; - options.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromHours(3); - options.Network.Resilience.MaxBurstReadRequests = 54; - options.Network.Resilience.MaxReadRequests = 112; - options.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromHours(4); - options.Network.Resilience.MaxBurstReadRequests = 55; - options.Network.Resilience.RetryIntervals = new TimeSpan[] - { - TimeSpan.FromMilliseconds(10), - TimeSpan.FromMilliseconds(20), - TimeSpan.FromMilliseconds(30), - TimeSpan.FromMilliseconds(40), - TimeSpan.FromMilliseconds(50) - }; - options.Network.Resilience.RetryOverrideResponseCodes = new int[] - { - 500, - 200, - 208 - }; - } - }; - - private static readonly Action[] _setTimeoutConfigurationList = new Action[] - { - (MigrationSdkOptions options) => - { - options.Network.Resilience.PerRequestTimeout = TimeSpan.FromSeconds(105); - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.PerRequestTimeout = TimeSpan.FromDays(1); - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.PerFileTransferRequestTimeout = TimeSpan.FromHours(2); - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.PerFileTransferRequestTimeout = TimeSpan.FromDays(7); - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.PerRequestTimeout = TimeSpan.FromSeconds(105); - options.Network.Resilience.PerFileTransferRequestTimeout = TimeSpan.FromHours(1); - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.PerRequestTimeout = TimeSpan.FromMinutes(1); - options.Network.Resilience.PerFileTransferRequestTimeout = TimeSpan.FromMinutes(15); - } - }; - - #region GetCachedPolicyCases - - public static IEnumerable GetCachedPolicyCases() - { - // Case #1: Default Config - No Changes - Same Request - Cached Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request2 - new HttpRequestMessage(), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - } - }; - - // Case #2: Default Config - No Changes - Different Requests - Cached Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request2 - new HttpRequestMessage(HttpMethod.Put, (Uri?)null), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - } - }; - - // Case #3: Default Config - No Changes - Different Requests (one file transfer) - New Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request2 - new HttpRequestMessage(HttpMethod.Put, new Uri($"https://localhost/api/3.21/sites/{Guid.NewGuid()}/fileUploads/{Guid.NewGuid()}")), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.NotSame(policy1, policy2); - } - }; - - // Case #4: Default Config - No Changes - Different Requests (two file transfer) - Same Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(HttpMethod.Get, new Uri($"http://localhost/api/3.21/sites/{Guid.NewGuid()}/datasources/{Guid.NewGuid()}/content?includeExtract=true")), - // request2 - new HttpRequestMessage(HttpMethod.Put, new Uri($"https://localhost/api/3.21/sites/{Guid.NewGuid()}/fileUploads/{Guid.NewGuid()}")), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - } - }; - - // Case #5: Default Config - No Changes - Same Request (file transfer) - Same Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(HttpMethod.Get, new Uri($"http://localhost/api/3.21/sites/{Guid.NewGuid()}/datasources/{Guid.NewGuid()}/content?includeExtract=true")), - // request2 - new HttpRequestMessage(HttpMethod.Get, new Uri($"http://localhost/api/3.21/sites/{Guid.NewGuid()}/datasources/{Guid.NewGuid()}/content?includeExtract=true")), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - } - }; - - // Case #6: Custom Other Network Config - foreach (var setConfigAction in _setOtherConfigurationList) - { - // Custom configuration - No Changes on Timeout Config - Different Requests - Cached Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request2 - new HttpRequestMessage(HttpMethod.Put, (Uri?)null), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - }, - // setInitialConfiguration - setConfigAction - }; - } - - // Case #7: Changes on other Network Config - foreach (var setConfigAction in _setOtherConfigurationList) - { - // Default configuration - No Changes on Timeout Config - Different Requests - Cached Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request2 - new HttpRequestMessage(HttpMethod.Put, (Uri?)null), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - }, - // setInitialConfiguration, - (MigrationSdkOptions options)=>{ }, - // changeConfiguration - setConfigAction - }; - } - - // Case #8: Custom timeout configuration - foreach (var setConfigAction in _setTimeoutConfigurationList) - { - // Custom timeout configuration - No Changes - Different Requests - Cached Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request2 - new HttpRequestMessage(HttpMethod.Put, (Uri?)null), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - }, - // setInitialConfiguration - setConfigAction - }; - } - - // Case #9: Validate Case #1 - foreach (var setConfigAction in _setTimeoutConfigurationList) - { - // Default configuration - With Timeout Changes - Same Request - New Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request2 - new HttpRequestMessage(), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.NotSame(policy1, policy2); - }, - // setInitialConfiguration, - (MigrationSdkOptions options)=>{ }, - // changeConfiguration - setConfigAction - }; - } - - // Case #10: Validate Case #2 - foreach (var setConfigAction in _setTimeoutConfigurationList) - { - // Default configuration - With Timeout Changes - Different Requests - New Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request2 - new HttpRequestMessage(HttpMethod.Put, (Uri?)null), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.NotSame(policy1, policy2); - }, - // setInitialConfiguration, - (MigrationSdkOptions options)=>{ }, - // changeConfiguration - setConfigAction - }; - } - - // Case #11: Validate Case #4 - foreach (var setConfigAction in _setTimeoutConfigurationList) - { - // Default configuration - With Timeout Changes - Different Requests (one file transfer) - New Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(HttpMethod.Get, new Uri($"http://localhost/api/3.21/sites/{Guid.NewGuid()}/datasources/{Guid.NewGuid()}/content?includeExtract=true")), - // request2 - new HttpRequestMessage(HttpMethod.Get, new Uri($"http://localhost/api/3.21/sites/{Guid.NewGuid()}/datasources/{Guid.NewGuid()}/content?includeExtract=true")), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.NotSame(policy1, policy2); - }, - // setInitialConfiguration, - (MigrationSdkOptions options)=>{ }, - // changeConfiguration - setConfigAction - }; - } - - // Case #12: Validate Case #5 - foreach (var setConfigAction in _setTimeoutConfigurationList) - { - // Default configuration - With Timeout Changes - Different Requests (one file transfer) - New Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(HttpMethod.Get, new Uri($"http://localhost/api/3.21/sites/{Guid.NewGuid()}/datasources/{Guid.NewGuid()}/content?includeExtract=true")), - // request2 - new HttpRequestMessage(HttpMethod.Put, new Uri($"https://localhost/api/3.21/sites/{Guid.NewGuid()}/fileUploads/{Guid.NewGuid()}")), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.NotSame(policy1, policy2); - }, - // setInitialConfiguration, - (MigrationSdkOptions options)=>{ }, - // changeConfiguration - setConfigAction - }; - } - } - - #endregion GetCachedPolicyCases - - public SimpleCachedRequestTimeoutPolicyBuilderTests() - { - _mockedConfigReader = new Mock(); - _sdkOptions = new MigrationSdkOptions(); - _mockedConfigReader - .Setup(x => x.Get()) - .Returns(_sdkOptions); - - _services = new ServiceCollection() - .AddTableauMigrationSdk() - .AddSingleton(_mockedConfigReader.Object); - } - - [Theory] - [MemberData(nameof(GetCachedPolicyCases))] - public void BuildCachedPolicy( - HttpRequestMessage request1, - HttpRequestMessage request2, - Action, IAsyncPolicy> assertPolicies, - Action? setInitialConfiguration = null, - Action? changeConfiguration = null) - { - // Arrange - setInitialConfiguration?.Invoke(_sdkOptions); - var request = new HttpRequestMessage(); - using var serviceProvider = _services.BuildServiceProvider(); - var selector = serviceProvider.GetRequiredService(); - - // Act - var policy1 = selector.Build(request1); - changeConfiguration?.Invoke(_sdkOptions); - var policy2 = selector.Build(request2); - - // Assert - Assert.NotNull(policy1); - Assert.NotNull(policy2); - assertPolicies(policy1, policy2); - } - } -} \ No newline at end of file diff --git a/tests/Tableau.Migration.Tests/Unit/Net/Policies/SimpleCachedRetryPolicyBuilderTests.cs b/tests/Tableau.Migration.Tests/Unit/Net/Policies/SimpleCachedRetryPolicyBuilderTests.cs deleted file mode 100644 index 632ecd6..0000000 --- a/tests/Tableau.Migration.Tests/Unit/Net/Policies/SimpleCachedRetryPolicyBuilderTests.cs +++ /dev/null @@ -1,365 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using System; -using System.Collections.Generic; -using System.Net.Http; -using Microsoft.Extensions.DependencyInjection; -using Moq; -using Polly; -using Tableau.Migration.Config; -using Tableau.Migration.Net.Policies; -using Xunit; - -namespace Tableau.Migration.Tests.Unit.Net.Policies -{ - public class SimpleCachedRetryPolicyBuilderTests - { - private readonly Mock _mockedConfigReader; - private readonly MigrationSdkOptions _sdkOptions; - private readonly IServiceCollection _services; - - private static readonly Action[] _setOtherConfigurationList = new Action[] - { - // Default ConcurrentRequests Config - (MigrationSdkOptions options) => - { - options.Network.Resilience.ConcurrentRequestsLimitEnabled = true; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ConcurrentRequestsLimitEnabled = true; - options.Network.Resilience.MaxConcurrentRequests = 13; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ConcurrentRequestsLimitEnabled = true; - options.Network.Resilience.ConcurrentWaitingRequestsOnQueue = 100; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ConcurrentRequestsLimitEnabled = true; - options.Network.Resilience.MaxConcurrentRequests = 4; - options.Network.Resilience.ConcurrentWaitingRequestsOnQueue = 50; - }, - // Default ClientThrottle Config - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxReadRequests = 111; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromHours(3); - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxBurstReadRequests = 54; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxReadRequests = 111; - options.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromHours(3); - options.Network.Resilience.MaxBurstReadRequests = 54; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxPublishRequests = 112; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxPublishRequestsInterval = TimeSpan.FromHours(4); - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxBurstPublishRequests = 55; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxReadRequests = 112; - options.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromHours(4); - options.Network.Resilience.MaxBurstReadRequests = 55; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxReadRequests = 111; - options.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromHours(3); - options.Network.Resilience.MaxBurstReadRequests = 54; - options.Network.Resilience.MaxReadRequests = 112; - options.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromHours(4); - options.Network.Resilience.MaxBurstReadRequests = 55; - }, - // Custom Timeout config - (MigrationSdkOptions options) => - { - options.Network.Resilience.PerRequestTimeout = TimeSpan.FromSeconds(105); - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ConcurrentRequestsLimitEnabled = true; - options.Network.Resilience.MaxConcurrentRequests = 4; - options.Network.Resilience.ConcurrentWaitingRequestsOnQueue = 50; - options.Network.Resilience.ClientThrottleEnabled = true; - options.Network.Resilience.MaxReadRequests = 111; - options.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromHours(3); - options.Network.Resilience.MaxBurstReadRequests = 54; - options.Network.Resilience.MaxReadRequests = 112; - options.Network.Resilience.MaxReadRequestsInterval = TimeSpan.FromHours(4); - options.Network.Resilience.MaxBurstReadRequests = 55; - options.Network.Resilience.PerRequestTimeout = TimeSpan.FromSeconds(105); - } - }; - - private static readonly Action[] _setRetryConfigurationList = new Action[] - { - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryIntervals = new TimeSpan[] - { - TimeSpan.FromMilliseconds(250) - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryIntervals = new TimeSpan[] - { - TimeSpan.FromMilliseconds(50), - TimeSpan.FromMilliseconds(50), - TimeSpan.FromMilliseconds(50), - TimeSpan.FromMilliseconds(50), - TimeSpan.FromMilliseconds(50) - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryIntervals = new TimeSpan[] - { - TimeSpan.FromMilliseconds(10), - TimeSpan.FromMilliseconds(20), - TimeSpan.FromMilliseconds(30), - TimeSpan.FromMilliseconds(40), - TimeSpan.FromMilliseconds(50) - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryOverrideResponseCodes = new int[] - { - 500 - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryOverrideResponseCodes = new int[] - { - 500, - 200, - 208 - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryOverrideResponseCodes = new int[] - { - 408 - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.RetryIntervals = new TimeSpan[] - { - TimeSpan.FromMilliseconds(10), - TimeSpan.FromMilliseconds(20), - TimeSpan.FromMilliseconds(30), - TimeSpan.FromMilliseconds(40), - TimeSpan.FromMilliseconds(50) - }; - options.Network.Resilience.RetryOverrideResponseCodes = new int[] - { - 500, - 200, - 208 - }; - } - }; - - #region GetCachedPolicyCases - - public static IEnumerable GetCachedPolicyCases() - { - // Default Config - No Changes - Same Request - Cached Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - } - }; - - // Default Config - No Changes - Different Requests - Cached Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(HttpMethod.Put, (Uri?)null), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - } - }; - - foreach (var setConfigAction in _setOtherConfigurationList) - { - // Custom configuration - No Changes - Different Requests - Cached Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(HttpMethod.Put, (Uri?)null), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - }, - // setInitialConfiguration - setConfigAction - }; - } - - foreach (var setConfigAction in _setOtherConfigurationList) - { - // Default configuration - With Changes - Different Requests - Cached Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(HttpMethod.Put, (Uri?)null), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - }, - // setInitialConfiguration, - (MigrationSdkOptions options)=>{ }, - // changeConfiguration - setConfigAction - }; - } - - foreach (var setConfigAction in _setRetryConfigurationList) - { - // Custom retry configuration - No Changes - Different Requests - Cached Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(HttpMethod.Put, (Uri?)null), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - }, - // setInitialConfiguration - setConfigAction - }; - } - - foreach (var setConfigAction in _setRetryConfigurationList) - { - // Default configuration - With Retry Changes - Same Request - New Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.NotSame(policy1, policy2); - }, - // setInitialConfiguration, - (MigrationSdkOptions options)=>{ }, - // changeConfiguration - setConfigAction - }; - } - } - - #endregion GetCachedPolicyCases - - public SimpleCachedRetryPolicyBuilderTests() - { - _mockedConfigReader = new Mock(); - _sdkOptions = new MigrationSdkOptions(); - _mockedConfigReader - .Setup(x => x.Get()) - .Returns(_sdkOptions); - - _services = new ServiceCollection() - .AddTableauMigrationSdk() - .AddSingleton(_mockedConfigReader.Object); - } - - [Theory] - [MemberData(nameof(GetCachedPolicyCases))] - public void BuildCachedPolicy( - HttpRequestMessage request1, - HttpRequestMessage request2, - Action, IAsyncPolicy> assertPolicies, - Action? setInitialConfiguration = null, - Action? changeConfiguration = null) - { - // Arrange - setInitialConfiguration?.Invoke(_sdkOptions); - var request = new HttpRequestMessage(); - using var serviceProvider = _services.BuildServiceProvider(); - var selector = serviceProvider.GetRequiredService(); - - // Act - var policy1 = selector.Build(request1); - changeConfiguration?.Invoke(_sdkOptions); - var policy2 = selector.Build(request2); - - // Assert - Assert.NotNull(policy1); - Assert.NotNull(policy2); - assertPolicies(policy1, policy2); - } - } -} \ No newline at end of file diff --git a/tests/Tableau.Migration.Tests/Unit/Net/Policies/SimpleCachedServerThrottlePolicyBuilderTests.cs b/tests/Tableau.Migration.Tests/Unit/Net/Policies/SimpleCachedServerThrottlePolicyBuilderTests.cs deleted file mode 100644 index 2397e3f..0000000 --- a/tests/Tableau.Migration.Tests/Unit/Net/Policies/SimpleCachedServerThrottlePolicyBuilderTests.cs +++ /dev/null @@ -1,252 +0,0 @@ -// Copyright (c) 2023, Salesforce, Inc. -// SPDX-License-Identifier: Apache-2 -// -// Licensed under the Apache License, Version 2.0 (the ""License"") -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an ""AS IS"" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -using System; -using System.Collections.Generic; -using System.Net.Http; -using Microsoft.Extensions.DependencyInjection; -using Moq; -using Polly; -using Tableau.Migration.Config; -using Tableau.Migration.Net.Policies; -using Xunit; - -namespace Tableau.Migration.Tests.Unit.Net.Policies -{ - public class SimpleCachedServerThrottlePolicyBuilderTests - { - private readonly Mock _mockedConfigReader; - private readonly MigrationSdkOptions _sdkOptions; - private readonly IServiceCollection _services; - - private static readonly Action[] _setOtherConfigurationList = new Action[] - { - // Default ConcurrentRequests Config - (MigrationSdkOptions options) => - { - options.Network.Resilience.ConcurrentRequestsLimitEnabled = true; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ConcurrentRequestsLimitEnabled = true; - options.Network.Resilience.MaxConcurrentRequests = 13; - } - }; - - private static readonly Action[] _setServerThrottleConfigurationList = new Action[] - { - (MigrationSdkOptions options) => - { - options.Network.Resilience.ServerThrottleRetryIntervals = new TimeSpan[] - { - TimeSpan.FromMilliseconds(250) - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ServerThrottleRetryIntervals = new TimeSpan[] - { - TimeSpan.FromMilliseconds(50), - TimeSpan.FromMilliseconds(50), - TimeSpan.FromMilliseconds(50), - TimeSpan.FromMilliseconds(50), - TimeSpan.FromMilliseconds(50) - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ServerThrottleRetryIntervals = new TimeSpan[] - { - TimeSpan.FromMilliseconds(10), - TimeSpan.FromMilliseconds(20), - TimeSpan.FromMilliseconds(30), - TimeSpan.FromMilliseconds(40), - TimeSpan.FromMilliseconds(50) - }; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ServerThrottleLimitRetries = true; - }, - (MigrationSdkOptions options) => - { - options.Network.Resilience.ServerThrottleLimitRetries = true; - options.Network.Resilience.ServerThrottleRetryIntervals = new TimeSpan[] - { - TimeSpan.FromMilliseconds(10), - TimeSpan.FromMilliseconds(20), - TimeSpan.FromMilliseconds(30), - TimeSpan.FromMilliseconds(40), - TimeSpan.FromMilliseconds(50) - }; - } - }; - - #region - GetCachedPolicyCases - - - public static IEnumerable GetCachedPolicyCases() - { - // Default Config - No Changes - Same Request - Cached Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - } - }; - - // Default Config - No Changes - Different Requests - Cached Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(HttpMethod.Put, (Uri?)null), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - } - }; - - foreach (var setConfigAction in _setOtherConfigurationList) - { - // Custom configuration - No Changes - Different Requests - Cached Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(HttpMethod.Put, (Uri?)null), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - }, - // setInitialConfiguration - setConfigAction - }; - } - - foreach (var setConfigAction in _setOtherConfigurationList) - { - // Default configuration - With Changes - Different Requests - Cached Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(HttpMethod.Put, (Uri?)null), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - }, - // setInitialConfiguration, - (MigrationSdkOptions options)=>{ }, - // changeConfiguration - setConfigAction - }; - } - - foreach (var setConfigAction in _setServerThrottleConfigurationList) - { - // Custom server throttle configuration - No Changes - Different Requests - Cached Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(HttpMethod.Put, (Uri?)null), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.Same(policy1, policy2); - }, - // setInitialConfiguration - setConfigAction - }; - } - - foreach (var setConfigAction in _setServerThrottleConfigurationList) - { - // Default configuration - With server throttle Changes - Same Request - New Policy - yield return new object[] - { - // request1 - new HttpRequestMessage(), - // request1 - new HttpRequestMessage(), - // assertPolicy - (IAsyncPolicy policy1, IAsyncPolicy policy2) => - { - Assert.NotSame(policy1, policy2); - }, - // setInitialConfiguration, - (MigrationSdkOptions options)=>{ }, - // changeConfiguration - setConfigAction - }; - } - } - - #endregion - - public SimpleCachedServerThrottlePolicyBuilderTests() - { - _mockedConfigReader = new Mock(); - _sdkOptions = new MigrationSdkOptions(); - _mockedConfigReader - .Setup(x => x.Get()) - .Returns(_sdkOptions); - - _services = new ServiceCollection() - .AddTableauMigrationSdk() - .AddSingleton(_mockedConfigReader.Object); - } - - [Theory] - [MemberData(nameof(GetCachedPolicyCases))] - public void BuildCachedPolicy( - HttpRequestMessage request1, - HttpRequestMessage request2, - Action, IAsyncPolicy> assertPolicies, - Action? setInitialConfiguration = null, - Action? changeConfiguration = null) - { - // Arrange - setInitialConfiguration?.Invoke(_sdkOptions); - var request = new HttpRequestMessage(); - using var serviceProvider = _services.BuildServiceProvider(); - var selector = serviceProvider.GetRequiredService(); - - // Act - var policy1 = selector.Build(request1); - changeConfiguration?.Invoke(_sdkOptions); - var policy2 = selector.Build(request2); - - // Assert - Assert.NotNull(policy1); - Assert.NotNull(policy2); - assertPolicies(policy1, policy2); - } - } -} diff --git a/tests/Tableau.Migration.Tests/Unit/Net/Resilience/ClientThrottleStrategyBuilderTests.cs b/tests/Tableau.Migration.Tests/Unit/Net/Resilience/ClientThrottleStrategyBuilderTests.cs new file mode 100644 index 0000000..7877214 --- /dev/null +++ b/tests/Tableau.Migration.Tests/Unit/Net/Resilience/ClientThrottleStrategyBuilderTests.cs @@ -0,0 +1,59 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using Polly.RateLimiting; +using Tableau.Migration.Net.Resilience; +using Xunit; + +namespace Tableau.Migration.Tests.Unit.Net.Resilience +{ + public class ClientThrottleStrategyBuilderTests + { + public class Build : ResilienceStrategyTestBase + { + protected override IResilienceStrategyBuilder GetBuilder() + => Create(); + + [Fact] + public void StrategyDisabledByDefault() + { + // Act + var (pipeline, onDispose) = Build(); + + // Assert + Assert.Empty(pipeline.Strategies); + + Assert.Null(onDispose); + } + + [Fact] + public void StrategyEnabled() + { + // Arrange + Options.Network.Resilience.ClientThrottleEnabled = true; + + // Act + var (pipeline, onDispose) = Build(); + + // Assert + var strategy = Assert.Single(pipeline.Strategies); + Assert.IsType(strategy.Options); + + Assert.NotNull(onDispose); + } + } + } +} diff --git a/tests/Tableau.Migration.Tests/Unit/Net/Resilience/MaxConcurrentStrategyBuilderTests.cs b/tests/Tableau.Migration.Tests/Unit/Net/Resilience/MaxConcurrentStrategyBuilderTests.cs new file mode 100644 index 0000000..cf9dd19 --- /dev/null +++ b/tests/Tableau.Migration.Tests/Unit/Net/Resilience/MaxConcurrentStrategyBuilderTests.cs @@ -0,0 +1,63 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using Polly.RateLimiting; +using Tableau.Migration.Net.Resilience; +using Xunit; + +namespace Tableau.Migration.Tests.Unit.Net.Resilience +{ + public class MaxConcurrentStrategyBuilderTests + { + public class Build : ResilienceStrategyTestBase + { + protected override IResilienceStrategyBuilder GetBuilder() + => Create(); + + [Fact] + public void StrategyDisabledByDefault() + { + // Act + var (pipeline, onDispose) = Build(); + + // Assert + Assert.Empty(pipeline.Strategies); + + Assert.Null(onDispose); + } + + [Fact] + public void StrategyEnabled() + { + // Arrange + var resilienceOptions = Options.Network.Resilience; + resilienceOptions.ConcurrentRequestsLimitEnabled = true; + + // Act + var (pipeline, onDispose) = Build(); + + // Assert + var strategy = Assert.Single(pipeline.Strategies); + var options = Assert.IsType(strategy.Options); + + Assert.Equal(resilienceOptions.MaxConcurrentRequests, options.DefaultRateLimiterOptions.PermitLimit); + Assert.Equal(resilienceOptions.ConcurrentWaitingRequestsOnQueue, options.DefaultRateLimiterOptions.QueueLimit); + + Assert.Null(onDispose); + } + } + } +} diff --git a/tests/Tableau.Migration.Tests/Unit/Net/Resilience/RequestTimeoutStrategyBuilderTests.cs b/tests/Tableau.Migration.Tests/Unit/Net/Resilience/RequestTimeoutStrategyBuilderTests.cs new file mode 100644 index 0000000..e513cf8 --- /dev/null +++ b/tests/Tableau.Migration.Tests/Unit/Net/Resilience/RequestTimeoutStrategyBuilderTests.cs @@ -0,0 +1,87 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using System.Net.Http; +using System.Threading.Tasks; +using Polly; +using Polly.Timeout; +using Tableau.Migration.Net.Resilience; +using Xunit; + +namespace Tableau.Migration.Tests.Unit.Net.Resilience +{ + public class RequestTimeoutStrategyBuilderTests + { + public class Build : ResilienceStrategyTestBase + { + protected override IResilienceStrategyBuilder GetBuilder() + => Create(); + + private async ValueTask GetTimeoutAsync(TimeoutStrategyOptions strategyOptions, HttpMethod method, string? url) + { + Assert.NotNull(strategyOptions.TimeoutGenerator); + + var ctx = ResilienceContextPool.Shared.Get(); + try + { + var request = new HttpRequestMessage(method, url); + ctx.SetRequest(request); + + var args = new TimeoutGeneratorArguments(ctx); + + return await strategyOptions.TimeoutGenerator(args); + } + finally + { + ResilienceContextPool.Shared.Return(ctx); + } + } + + private async ValueTask AssertTimeoutsAsync(TimeoutStrategyOptions strategyOptions) + { + var timeout = Options.Network.Resilience.PerRequestTimeout; + var fileTimeout = Options.Network.Resilience.PerFileTransferRequestTimeout; + + // Fall back to default timeout when no method/URL match. + Assert.Equal(timeout, await GetTimeoutAsync(strategyOptions, HttpMethod.Get, null)); + Assert.Equal(timeout, await GetTimeoutAsync(strategyOptions, HttpMethod.Get, "http://localhost/datasources")); + Assert.Equal(timeout, await GetTimeoutAsync(strategyOptions, HttpMethod.Post, "http://localhost/datasources/id/content")); + + Assert.Equal(fileTimeout, await GetTimeoutAsync(strategyOptions, HttpMethod.Get, "http://localhost/api/datasources/id/content")); + Assert.Equal(fileTimeout, await GetTimeoutAsync(strategyOptions, HttpMethod.Get, "http://localhost/api/workbooks/id/content")); + Assert.Equal(fileTimeout, await GetTimeoutAsync(strategyOptions, HttpMethod.Put, "http://localhost/api/fileUploads/session")); + } + + [Fact] + public async Task TimeoutGeneratorConfiguredAsync() + { + // Arrange + + // Act + var (pipeline, onDispose) = Build(); + + // Assert + var strategy = Assert.Single(pipeline.Strategies); + var options = Assert.IsType(strategy.Options); + + await AssertTimeoutsAsync(options); + + Assert.Null(onDispose); + } + } + } +} diff --git a/src/Tableau.Migration/Net/HttpRequestMessageExtensions.cs b/tests/Tableau.Migration.Tests/Unit/Net/Resilience/ResilienceContextExtensions.cs similarity index 68% rename from src/Tableau.Migration/Net/HttpRequestMessageExtensions.cs rename to tests/Tableau.Migration.Tests/Unit/Net/Resilience/ResilienceContextExtensions.cs index bed5a35..1027116 100644 --- a/src/Tableau.Migration/Net/HttpRequestMessageExtensions.cs +++ b/tests/Tableau.Migration.Tests/Unit/Net/Resilience/ResilienceContextExtensions.cs @@ -15,15 +15,15 @@ // using System.Net.Http; +using Polly; -namespace Tableau.Migration.Net +namespace Tableau.Migration.Tests.Unit.Net.Resilience { - internal static class HttpRequestMessageExtensions + internal static class ResilienceContextExtensions { - internal static string GetPolicyRequestKey( - this HttpRequestMessage httpRequest) + internal static void SetRequest(this ResilienceContext ctx, HttpRequestMessage request) { - return $"{httpRequest.RequestUri?.AbsolutePath}_{httpRequest.Method}"; + ctx.Properties.Set(Migration.Net.Resilience.ResilienceContextExtensions.REQUEST_CONTEXT_KEY, request); } } } diff --git a/tests/Tableau.Migration.Tests/Unit/Net/Resilience/ResilienceStrategyTestBase.cs b/tests/Tableau.Migration.Tests/Unit/Net/Resilience/ResilienceStrategyTestBase.cs new file mode 100644 index 0000000..076cdc2 --- /dev/null +++ b/tests/Tableau.Migration.Tests/Unit/Net/Resilience/ResilienceStrategyTestBase.cs @@ -0,0 +1,56 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using System.Net.Http; +using Moq; +using Polly; +using Polly.Testing; +using Tableau.Migration.Config; +using Tableau.Migration.Net.Resilience; + +namespace Tableau.Migration.Tests.Unit.Net.Resilience +{ + public abstract class ResilienceStrategyTestBase : AutoFixtureTestBase + { + protected ResiliencePipelineBuilder PipelineBuilder { get; } + + protected MigrationSdkOptions Options { get; } + + protected DateTimeOffset UtcNow { get; set; } = DateTimeOffset.UtcNow - TimeSpan.FromMinutes(5); + + protected ResilienceStrategyTestBase() + { + PipelineBuilder = new(); + Options = new(); + + var mockTimeProvider = Freeze>(); + mockTimeProvider.Setup(x => x.GetUtcNow()).Returns(() => UtcNow); + } + + protected abstract IResilienceStrategyBuilder GetBuilder(); + + protected (ResiliencePipelineDescriptor, Action?) Build() + { + var builder = GetBuilder(); + + Action? onPipelineDisposed = null; + builder.Build(PipelineBuilder, Options, ref onPipelineDisposed); + + return (PipelineBuilder.Build().GetPipelineDescriptor(), onPipelineDisposed); + } + } +} diff --git a/tests/Tableau.Migration.Tests/Unit/Net/Resilience/RetryStrategyBuilderTests.cs b/tests/Tableau.Migration.Tests/Unit/Net/Resilience/RetryStrategyBuilderTests.cs new file mode 100644 index 0000000..28cc83d --- /dev/null +++ b/tests/Tableau.Migration.Tests/Unit/Net/Resilience/RetryStrategyBuilderTests.cs @@ -0,0 +1,187 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Threading.Tasks; +using Polly; +using Polly.RateLimiting; +using Polly.Retry; +using Polly.Timeout; +using Tableau.Migration.Net.Resilience; +using Xunit; + +namespace Tableau.Migration.Tests.Unit.Net.Resilience +{ + public class RetryStrategyBuilderTests + { + public class Build : ResilienceStrategyTestBase + { + protected override IResilienceStrategyBuilder GetBuilder() + => Create(); + + private async ValueTask GetPredicateResultAsync(RetryStrategyOptions strategyOptions, HttpStatusCode? resultCode = null, Exception? ex = null) + { + var ctx = ResilienceContextPool.Shared.Get(); + try + { + Outcome outcome = ex is not null ? + Outcome.FromException(ex) : + Outcome.FromResult(new HttpResponseMessage(resultCode ?? HttpStatusCode.OK)); + + var args = new RetryPredicateArguments(ctx, outcome, 1); + return await strategyOptions.ShouldHandle(args); + } + finally + { + ResilienceContextPool.Shared.Return(ctx); + } + } + + private async ValueTask AssertPredicateAsync(RetryStrategyOptions strategyOptions) + { + Assert.True(await GetPredicateResultAsync(strategyOptions, ex: new HttpRequestException())); + Assert.True(await GetPredicateResultAsync(strategyOptions, ex: new TimeoutRejectedException())); + Assert.True(await GetPredicateResultAsync(strategyOptions, ex: new RateLimiterRejectedException())); + + //Non-transient exceptions + Assert.False(await GetPredicateResultAsync(strategyOptions, ex: new Exception())); + Assert.False(await GetPredicateResultAsync(strategyOptions, ex: new OperationCanceledException())); + + var resilienceOptions = Options.Network.Resilience; + if(resilienceOptions.RetryOverrideResponseCodes.IsNullOrEmpty()) + { + foreach(var retryCode in resilienceOptions.RetryOverrideResponseCodes) + { + Assert.True(await GetPredicateResultAsync(strategyOptions, (HttpStatusCode)retryCode)); + } + } + else + { + Assert.True(await GetPredicateResultAsync(strategyOptions, HttpStatusCode.InternalServerError)); + Assert.True(await GetPredicateResultAsync(strategyOptions, HttpStatusCode.ServiceUnavailable)); + Assert.True(await GetPredicateResultAsync(strategyOptions, HttpStatusCode.RequestTimeout)); + + Assert.False(await GetPredicateResultAsync(strategyOptions, HttpStatusCode.NotFound)); + } + } + + private async ValueTask GetDelayAsync(RetryStrategyOptions strategyOptions, int attemptNumber) + { + Assert.NotNull(strategyOptions.DelayGenerator); + + var ctx = ResilienceContextPool.Shared.Get(); + try + { + var args = new RetryDelayGeneratorArguments(ctx, Outcome.FromResult(new HttpResponseMessage()), attemptNumber); + return await strategyOptions.DelayGenerator(args); + } + finally + { + ResilienceContextPool.Shared.Return(ctx); + } + } + + private async ValueTask AssertDelayGeneratorAsync(RetryStrategyOptions strategyOptions) + { + var resilienceOptions = Options.Network.Resilience; + if(!resilienceOptions.RetryIntervals.Any()) + { + return; + } + + for(int i = 0; i < resilienceOptions.RetryIntervals.Length; i++) + { + var interval = resilienceOptions.RetryIntervals[i]; + Assert.Equal(interval, await GetDelayAsync(strategyOptions, i)); + } + + Assert.Equal(resilienceOptions.RetryIntervals[^1], await GetDelayAsync(strategyOptions, resilienceOptions.RetryIntervals.Length + 1)); + } + + private async ValueTask AssertStrategyOptionsAsync(RetryStrategyOptions strategyOptions) + { + await AssertPredicateAsync(strategyOptions); + await AssertDelayGeneratorAsync(strategyOptions); + } + + [Fact] + public void StrategyDisabled() + { + // Arrange + Options.Network.Resilience.RetryEnabled = false; + + // Act + var (pipeline, onDispose) = Build(); + + // Assert + Assert.Empty(pipeline.Strategies); + + Assert.Null(onDispose); + } + + [Fact] + public void NoRetryIntervals() + { + // Arrange + Options.Network.Resilience.RetryIntervals = Array.Empty(); + + // Act + var (pipeline, onDispose) = Build(); + + // Assert + Assert.Empty(pipeline.Strategies); + + Assert.Null(onDispose); + } + + [Fact] + public async Task StrategyEnabledWithDefaultsAsync() + { + // Act + var (pipeline, onDispose) = Build(); + + // Assert + var strategy = Assert.Single(pipeline.Strategies); + var strategyOptions = Assert.IsType>(strategy.Options); + + await AssertStrategyOptionsAsync(strategyOptions); + + Assert.Null(onDispose); + } + + [Fact] + public async Task CustomIntervalsAsync() + { + // Arrange + Options.Network.Resilience.RetryIntervals = new[] { TimeSpan.FromSeconds(3), TimeSpan.FromSeconds(1) }; + + // Act + var (pipeline, onDispose) = Build(); + + // Assert + var strategy = Assert.Single(pipeline.Strategies); + var strategyOptions = Assert.IsType>(strategy.Options); + + await AssertStrategyOptionsAsync(strategyOptions); + + Assert.Null(onDispose); + } + } + } +} diff --git a/tests/Tableau.Migration.Tests/Unit/Net/Resilience/ServerThrottleStrategyBuilderTests.cs b/tests/Tableau.Migration.Tests/Unit/Net/Resilience/ServerThrottleStrategyBuilderTests.cs new file mode 100644 index 0000000..49fc7bf --- /dev/null +++ b/tests/Tableau.Migration.Tests/Unit/Net/Resilience/ServerThrottleStrategyBuilderTests.cs @@ -0,0 +1,186 @@ +// Copyright (c) 2023, Salesforce, Inc. +// SPDX-License-Identifier: Apache-2 +// +// Licensed under the Apache License, Version 2.0 (the ""License"") +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an ""AS IS"" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +using System; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Threading.Tasks; +using Polly; +using Polly.Retry; +using Tableau.Migration.Net.Resilience; +using Xunit; + +namespace Tableau.Migration.Tests.Unit.Net.Resilience +{ + public class ServerThrottleStrategyBuilderTests + { + public class Build : ResilienceStrategyTestBase + { + protected override IResilienceStrategyBuilder GetBuilder() + => Create(); + + private async ValueTask GetHandleResponseAsync(RetryStrategyOptions strategyOptions, HttpStatusCode statusCode) + { + var ctx = ResilienceContextPool.Shared.Get(); + try + { + var outcome = Outcome.FromResult(new HttpResponseMessage(statusCode)); + var args = new RetryPredicateArguments(ctx, outcome, 1); + + return await strategyOptions.ShouldHandle(args); + } + finally + { + ResilienceContextPool.Shared.Return(ctx); + } + } + + private async ValueTask AssertHandleResponseAsync(RetryStrategyOptions strategyOptions) + { + Assert.True(await GetHandleResponseAsync(strategyOptions, HttpStatusCode.TooManyRequests)); + Assert.False(await GetHandleResponseAsync(strategyOptions, HttpStatusCode.OK)); + } + + private async ValueTask GetDelayAsync(RetryStrategyOptions strategyOptions, + RetryConditionHeaderValue? retryHeader = null, int attemptNumber = 1) + { + Assert.NotNull(strategyOptions.DelayGenerator); + + var ctx = ResilienceContextPool.Shared.Get(); + try + { + var response = new HttpResponseMessage(HttpStatusCode.TooManyRequests); + if(retryHeader is not null) + { + response.Headers.RetryAfter = retryHeader; + } + + var outcome = Outcome.FromResult(response); + var args = new RetryDelayGeneratorArguments(ctx, outcome, attemptNumber); + + return await strategyOptions.DelayGenerator(args); + } + finally + { + ResilienceContextPool.Shared.Return(ctx); + } + } + + private async ValueTask AssertDelayGeneratorAsync(RetryStrategyOptions strategyOptions) + { + var delta = TimeSpan.FromMinutes(47); + Assert.Equal(delta, await GetDelayAsync(strategyOptions, new(delta))); + + var targetDate = DateTimeOffset.UtcNow.AddMinutes(12); + Assert.Equal(targetDate - UtcNow, await GetDelayAsync(strategyOptions, new(targetDate))); + + var resilienceOptions = Options.Network.Resilience; + if(!resilienceOptions.ServerThrottleRetryIntervals.Any()) + { + Assert.Equal(ServerThrottleStrategyBuilder.DEFAULT_RETRY_INTERVAL_FALLBACK, await GetDelayAsync(strategyOptions)); + } + else + { + for(var i = 0; i < resilienceOptions.ServerThrottleRetryIntervals.Length; i++) + { + var interval = resilienceOptions.ServerThrottleRetryIntervals[i]; + Assert.Equal(interval, await GetDelayAsync(strategyOptions, attemptNumber: i)); + } + + Assert.Equal(resilienceOptions.ServerThrottleRetryIntervals[^1], await GetDelayAsync(strategyOptions, attemptNumber: resilienceOptions.ServerThrottleRetryIntervals.Length)); + } + } + + private async ValueTask AssertStrategyOptionsAsync(RetryStrategyOptions strategyOptions) + { + await AssertHandleResponseAsync(strategyOptions); + await AssertDelayGeneratorAsync(strategyOptions); + } + + [Fact] + public async Task StrategyEnabledWithDefaultsAsync() + { + // Act + var (pipeline, onDispose) = Build(); + + // Assert + var strategy = Assert.Single(pipeline.Strategies); + var options = Assert.IsType>(strategy.Options); + + await AssertStrategyOptionsAsync(options); + + Assert.Null(onDispose); + } + + [Fact] + public async Task NoConfiguredRetryIntervalsAsync() + { + // Arrange + Options.Network.Resilience.ServerThrottleRetryIntervals = Array.Empty(); + + // Act + var (pipeline, onDispose) = Build(); + + // Assert + var strategy = Assert.Single(pipeline.Strategies); + var options = Assert.IsType>(strategy.Options); + + await AssertStrategyOptionsAsync(options); + + Assert.Equal(int.MaxValue, options.MaxRetryAttempts); + + Assert.Null(onDispose); + } + + [Fact] + public async Task RetryLimitDisabledAsync() + { + // Arrange + Options.Network.Resilience.ServerThrottleLimitRetries = false; + + // Act + var (pipeline, onDispose) = Build(); + + // Assert + var strategy = Assert.Single(pipeline.Strategies); + var options = Assert.IsType>(strategy.Options); + + await AssertStrategyOptionsAsync(options); + + Assert.Equal(int.MaxValue, options.MaxRetryAttempts); + + Assert.Null(onDispose); + } + + [Fact] + public void StrategyDisabled() + { + // Arrange + Options.Network.Resilience.ServerThrottleEnabled = false; + + // Act + var (pipeline, onDispose) = Build(); + + // Assert + Assert.Empty(pipeline.Strategies); + + Assert.Null(onDispose); + } + } + } +} diff --git a/tests/Tableau.Migration.Tests/Unit/Net/Rest/Sorting/SortBuilderTests.cs b/tests/Tableau.Migration.Tests/Unit/Net/Rest/Sorting/SortBuilderTests.cs index a5f7008..b0e03b8 100644 --- a/tests/Tableau.Migration.Tests/Unit/Net/Rest/Sorting/SortBuilderTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Net/Rest/Sorting/SortBuilderTests.cs @@ -22,7 +22,7 @@ namespace Tableau.Migration.Tests.Unit.Net.Rest.Sorting { - internal class SortBuilderTests + public class SortBuilderTests { public abstract class SortBuilderTest : AutoFixtureTestBase { diff --git a/tests/Tableau.Migration.Tests/ValuesAttribute.cs b/tests/Tableau.Migration.Tests/ValuesAttribute.cs index 5cc609b..350fedd 100644 --- a/tests/Tableau.Migration.Tests/ValuesAttribute.cs +++ b/tests/Tableau.Migration.Tests/ValuesAttribute.cs @@ -46,4 +46,15 @@ public ValuesAttribute(params T?[] values) protected virtual object?[] CreateArguments(T? value) => new object?[] { value }; } + + public class ValuesAttribute : ValuesAttribute + { + public ValuesAttribute(IEnumerable values) + : base(values) + { } + + public ValuesAttribute(params object?[] values) + : base(values) + { } + } }