diff --git a/.config/dotnet-tools.json b/.config/dotnet-tools.json index 549a268..278b6c7 100644 --- a/.config/dotnet-tools.json +++ b/.config/dotnet-tools.json @@ -7,12 +7,6 @@ "commands": [ "docfx" ] - }, - "DocFxTocGenerator": { - "version": "1.18.0", - "commands": [ - "DocFxTocGenerator" - ] } } } \ No newline at end of file diff --git a/.github/actions/setup-dotnet/action.yml b/.github/actions/setup-dotnet/action.yml index d1fe213..f859d30 100644 --- a/.github/actions/setup-dotnet/action.yml +++ b/.github/actions/setup-dotnet/action.yml @@ -5,21 +5,21 @@ runs: using: "composite" steps: - name: nuget Cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/.nuget/packages key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }} restore-keys: | ${{ runner.os }}-nuget- - name: Setup .NET 6.0 - uses: actions/setup-dotnet@v3 + uses: actions/setup-dotnet@v4 with: dotnet-version: 6.0.x - name: Setup .NET 7.0 - uses: actions/setup-dotnet@v3 + uses: actions/setup-dotnet@v4 with: dotnet-version: 7.0.x - name: Setup .NET 8.0 - uses: actions/setup-dotnet@v3 + uses: actions/setup-dotnet@v4 with: dotnet-version: 8.0.x \ No newline at end of file diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml index ce148e4..68dd39a 100644 --- a/.github/workflows/create-release.yml +++ b/.github/workflows/create-release.yml @@ -21,7 +21,7 @@ jobs: steps: - name: Create Release id: create-release - uses: actions/github-script@v6 + uses: actions/github-script@v7 env: RELEASE_VERSION: ${{ inputs.release-version }} IS_PRERELEASE: ${{ inputs.is-pre-release }} @@ -48,13 +48,13 @@ jobs: }) return createReleaseResponse.data.id - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: name: docs - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: name: nuget-package - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: name: pypi-package - name: Generate Bundle @@ -62,7 +62,7 @@ jobs: zip -r bundle.zip ./Tableau.Migration.${{ inputs.release-version }}.nupkg docs.zip tableau_migration-pypi.zip - name: Upload Release Assets id: upload-docs - uses: actions/github-script@v6 + uses: actions/github-script@v7 env: RELEASE_VERSION: ${{ inputs.release-version }} RELEASE_ID: ${{ steps.create-release.outputs.result }} diff --git a/.github/workflows/dotnet-build.yml b/.github/workflows/dotnet-build.yml index 9e0db8d..3818c61 100644 --- a/.github/workflows/dotnet-build.yml +++ b/.github/workflows/dotnet-build.yml @@ -23,11 +23,11 @@ jobs: runs-on: ${{ matrix.os }} name: .Net Build ${{ matrix.os }}, ${{ matrix.config }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: ./.github/actions/setup-dotnet - name: Set Replaced Version Windows if: ${{ runner.os == 'Windows' && inputs.beta-version != '' }} - run: echo "VERSION_REPLACE_ARGS=-p:Version='${{ inputs.beta-version }}'" | Out-File -FilePath $env:GITHUB_ENV -Append # no need for -Encoding utf8- uses: actions/checkout@v3 + run: echo "VERSION_REPLACE_ARGS=-p:Version='${{ inputs.beta-version }}'" | Out-File -FilePath $env:GITHUB_ENV -Append # no need for -Encoding utf8 - name: Set Replaced Version Not Windows if: ${{ runner.os != 'Windows' && inputs.beta-version != '' }} run: echo "VERSION_REPLACE_ARGS=-p:Version='${{ inputs.beta-version }}'" >> $GITHUB_ENV @@ -43,25 +43,25 @@ jobs: if: ${{ matrix.os == vars.PUBLISH_OS && matrix.config == 'Release' }} run: dotnet publish --no-build -p:DebugType=None -p:DebugSymbols=false -c ${{ matrix.config }} -f ${{ vars.PYTHON_NETPACKAGE_FRAMEWORK }} -o './dist/testcomponents/' './tests/Tableau.Migration.TestComponents/Tableau.Migration.TestComponents.csproj' - name: Upload Published Artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: ${{ matrix.os == vars.PUBLISH_OS && matrix.config == 'Release' }} with: name: published-${{ matrix.config }} path: './src/Python/src/tableau_migration/bin/**' - name: Upload Tests Artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: ${{ matrix.os == vars.PUBLISH_OS && matrix.config == 'Release' }} with: name: tests-published-${{ matrix.config }} path: './dist/tests/**' - name: Upload TestComponents Artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: ${{ matrix.os == vars.PUBLISH_OS && matrix.config == 'Release' }} with: name: testcomponents-published-${{ matrix.config }} path: './dist/testcomponents/**' - name: Upload Nupkg Artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: ${{ matrix.os == vars.PUBLISH_OS && matrix.config == 'Release' }} with: name: nuget-package diff --git a/.github/workflows/dotnet-package.yml b/.github/workflows/dotnet-package.yml index eb9c39f..e51cae2 100644 --- a/.github/workflows/dotnet-package.yml +++ b/.github/workflows/dotnet-package.yml @@ -34,11 +34,11 @@ jobs: runs-on: ${{ inputs.runs-on-config }} name: Publish Package from ${{ inputs.published-os }} with ${{ inputs.build-config }} configuration steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 if: ${{ env.PUBLISH_PACKAGE_KEY != '' }} - uses: ./.github/actions/setup-dotnet if: ${{ env.PUBLISH_PACKAGE_KEY != '' && inputs.runs-on-config != 'self-hosted' }} - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 if: ${{ env.PUBLISH_PACKAGE_KEY != '' }} with: name: nuget-package diff --git a/.github/workflows/dotnet-test.yml b/.github/workflows/dotnet-test.yml index a991aa0..6840db6 100644 --- a/.github/workflows/dotnet-test.yml +++ b/.github/workflows/dotnet-test.yml @@ -20,22 +20,18 @@ jobs: runs-on: ${{ matrix.os }} name: .Net Test ${{ matrix.os }}, ${{ matrix.config }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: ./.github/actions/setup-dotnet - - name: Test ${{ matrix.config }} + - name: Build solution with ${{ matrix.config }} configuration + run: dotnet build '${{ vars.BUILD_SOLUTION }}' -c ${{ matrix.config }} + - name: Test solution with ${{ matrix.config }} configuration run: | - dotnet test '${{ vars.BUILD_SOLUTION }}' -c ${{ matrix.config }} -p:CollectCoverage=true -p:CoverletOutputFormat=cobertura -p:CoverletOutput=./artifacts/ --verbosity normal --logger trx --results-directory "TestResults-${{ matrix.os }}-${{ matrix.config }}" + dotnet test '${{ vars.BUILD_SOLUTION }}' --no-build -c ${{ matrix.config }} --verbosity normal --logger trx --results-directory "TestResults-${{ matrix.os }}-${{ matrix.config }}" -- RunConfiguration.TestSessionTimeout=${{ vars.MIGRATIONSDK_TEST_CANCELLATION_TIMEOUT_MILLISECONDS }} - name: Upload test results # Use always() to always run this step to publish test results when there are test failures if: ${{ always() }} - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: dotnet-results-${{ matrix.os }}-${{ matrix.config }} path: TestResults-${{ matrix.os }}-${{ matrix.config }} if-no-files-found: error - - name: Upload Code Coverage - uses: actions/upload-artifact@v3 - with: - name: coverage-${{ matrix.os }}-${{ matrix.config }} - path: '**/artifacts/coverage*.cobertura.xml' - if-no-files-found: error diff --git a/.github/workflows/publishdocs-dryrun.yml b/.github/workflows/publishdocs-dryrun.yml index 79618c9..773bb81 100644 --- a/.github/workflows/publishdocs-dryrun.yml +++ b/.github/workflows/publishdocs-dryrun.yml @@ -20,11 +20,8 @@ jobs: runs-on: ${{ inputs.runs-on-config }} name: Publish docs - Dry-Run steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: ./.github/actions/setup-dotnet - - uses: actions/download-artifact@v3 - with: - name: coverage-${{ inputs.runs-on-config }}-${{ inputs.build-config }} - name: Set up Python uses: actions/setup-python@v4 with: @@ -35,7 +32,7 @@ jobs: ./scripts/generate-docs.ps1 -SkipPreClean Compress-Archive ./docs/* -Destination docs.zip - name: Upload Docs Artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: docs path: docs.zip \ No newline at end of file diff --git a/.github/workflows/publishdocs.yml b/.github/workflows/publishdocs.yml index fb117e9..3ba9c20 100644 --- a/.github/workflows/publishdocs.yml +++ b/.github/workflows/publishdocs.yml @@ -34,11 +34,8 @@ jobs: runs-on: ${{ inputs.runs-on-config }} name: Publish docs steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: ./.github/actions/setup-dotnet - - uses: actions/download-artifact@v3 - with: - name: coverage-${{ inputs.runs-on-config }}-${{ inputs.build-config }} - name: Set up Python uses: actions/setup-python@v4 with: @@ -49,7 +46,7 @@ jobs: ./scripts/generate-docs.ps1 -SkipPreClean Compress-Archive ./docs/* -Destination docs.zip - name: Upload Docs Artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: docs path: docs.zip @@ -61,4 +58,4 @@ jobs: path: './docs' - name: Deploy to GitHub Pages id: deployment - uses: actions/deploy-pages@v4.0.4 + uses: actions/deploy-pages@v4.0.5 diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 4085414..12b6f93 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -38,7 +38,7 @@ jobs: runs-on: ${{ inputs.runs-on-config }} name: Publish Package with ${{ inputs.build-config }} configuration steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 if: ${{ env.PUBLISH_PACKAGE_KEY != '' }} - uses: ./.github/actions/setup-dotnet if: ${{ env.PUBLISH_PACKAGE_KEY != '' && inputs.runs-on-config != 'self-hosted' }} @@ -59,7 +59,7 @@ jobs: run: | # default set of ruff rules with GitHub Annotations python -m hatch run lint:lint - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 if: ${{ env.PUBLISH_PACKAGE_KEY != '' }} with: name: published-${{ inputs.build-config }} @@ -83,7 +83,7 @@ jobs: python -m twine upload --repository-url ${{ vars.PYPI_PACKAGE_REPOSITORY_URL }} dist/* Compress-Archive -Path .\dist\* -DestinationPath .\tableau_migration-pypi.zip - name: Upload Pypi Artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: ${{ env.PUBLISH_PACKAGE_KEY != '' && inputs.publish-artifact }} with: name: pypi-package diff --git a/.github/workflows/python-test.yml b/.github/workflows/python-test.yml index 59731ee..c2d0f41 100644 --- a/.github/workflows/python-test.yml +++ b/.github/workflows/python-test.yml @@ -19,7 +19,7 @@ jobs: runs-on: ${{ matrix.os }} name: Test on ${{ matrix.os }}, ${{ matrix.config }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: ./.github/actions/setup-dotnet - name: Set up Python uses: actions/setup-python@v4 @@ -30,11 +30,11 @@ jobs: run: | python -m pip install --upgrade pip python -m pip install hatch - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: name: tests-published-${{ matrix.config }} path: ./src/Python/src/tableau_migration/bin/ - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: name: testcomponents-published-${{ matrix.config }} path: ./tests/Python.TestApplication/bin/ diff --git a/.github/workflows/sdk-workflow.yml b/.github/workflows/sdk-workflow.yml index 283daab..db47ec8 100644 --- a/.github/workflows/sdk-workflow.yml +++ b/.github/workflows/sdk-workflow.yml @@ -14,9 +14,6 @@ on: - '**/.gitignore' - 'CODEOWNERS' pull_request: - branches: - - main - - 'release/**' # The default trigger for Pull Requests are: # - opened # - synchronize @@ -56,7 +53,7 @@ jobs: code-version: ${{ steps.get-version.outputs.codeversion }} if: github.event.pull_request.draft == false steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set Beta Version id: set-beta-version if: ${{ inputs.publish-release != 'Prod' && inputs.publish-release != 'Prod-Internal' }} diff --git a/.gitignore b/.gitignore index d9a177b..3e6a492 100644 --- a/.gitignore +++ b/.gitignore @@ -173,6 +173,7 @@ cython_debug/ # Other stuff UpgradeLog.htm appsettings.Development.json +clean-server-settings.dev.json launchSettings.json UpgradeLog.htm *.DEV.ini diff --git a/Directory.Build.props b/Directory.Build.props index 8cfe4ad..32a8051 100644 --- a/Directory.Build.props +++ b/Directory.Build.props @@ -4,7 +4,7 @@ enable true true - 2.0.0 + 2.1.0 Tableau Software, LLC Tableau Software, LLC Copyright (c) 2024, Tableau Software, LLC and its licensors diff --git a/examples/Python.ExampleApplication/Python.ExampleApplication.py b/examples/Python.ExampleApplication/Python.ExampleApplication.py index 4619711..01f820c 100644 --- a/examples/Python.ExampleApplication/Python.ExampleApplication.py +++ b/examples/Python.ExampleApplication/Python.ExampleApplication.py @@ -1,57 +1,64 @@ -# This application is meant to mimic what an actual user would write -# This application assumes you have already installed the Tableau Migration SDK Python package. +# This application performs a basic migration using the Tableau Migration SDK. +# By default all supported content will be migrated, but can be modified to your specific needs. +# The application assumes you have already installed the Tableau Migration SDK Python package. +import configparser # configuration parser +import os # environment variables +import sys # system utility +import tableau_migration # Tableau Migration SDK -import configparser # configuration parser -import os # environment variables -import tableau_migration -from threading import Thread - -from tableau_migration.migration_engine import PyMigrationPlanBuilder -from tableau_migration.migration_engine_migrators import PyMigrator +from threading import Thread # threading def migrate(): - """This function does the actual migration""" + """Performs a migration using Tableau Migration SDK.""" - planBuilder = PyMigrationPlanBuilder() - migration = PyMigrator() + planBuilder = tableau_migration.MigrationPlanBuilder() + migration = tableau_migration.Migrator() config = configparser.ConfigParser() - config.read('config.DEV.ini') + config.read('config.ini') - # Build the plan - planBuilder = planBuilder \ - .from_source_tableau_server(config['SOURCE']['URL'], config['SOURCE']['SITE_CONTENT_URL'], config['SOURCE']['ACCESS_TOKEN_NAME'], os.environ['TABLEAU_MIGRATION_SOURCE_TOKEN']) \ - .to_destination_tableau_cloud(config['DESTINATION']['URL'], config['DESTINATION']['SITE_CONTENT_URL'], config['DESTINATION']['ACCESS_TOKEN_NAME'], os.environ['TABLEAU_MIGRATION_DESTINATION_TOKEN']) \ - .for_server_to_cloud() \ + # Build the plan. + plan_builder = plan_builder \ + .from_source_tableau_server( + server_url = config['SOURCE']['URL'], + site_content_url = config['SOURCE']['SITE_CONTENT_URL'], + access_token_name = config['SOURCE']['ACCESS_TOKEN_NAME'], + access_token = os.environ.get('TABLEAU_MIGRATION_SOURCE_TOKEN', config['SOURCE']['ACCESS_TOKEN'])) \ + .to_destination_tableau_cloud( + pod_url = config['DESTINATION']['URL'], + site_content_url = config['DESTINATION']['SITE_CONTENT_URL'], + access_token_name = config['DESTINATION']['ACCESS_TOKEN_NAME'], + access_token = os.environ.get('TABLEAU_MIGRATION_DESTINATION_TOKEN', config['DESTINATION']['ACCESS_TOKEN'])) \ + .for_server_to_cloud() \ .with_tableau_id_authentication_type() \ .with_tableau_cloud_usernames(config['USERS']['EMAIL_DOMAIN']) - # You can Add filters, mappings, transformers, etc. here + # TODO: add filters, mappings, transformers, etc. here. - # Validate the migration plan - validation_result=planBuilder.validate() + # Validate the migration plan. + validation_result = planBuilder.validate() - #You can log errors here if the validation fails + # TODO: Handle errors if the validation fails here. plan = planBuilder.build() - # Run the migration + # Run the migration. results = migration.execute(plan) - # You can handle results here + # TODO: Handle results here. - print("All done") + print("All done.") if __name__ == '__main__': - # Create a thread that will run the migration and start it + # Create a thread that will run the migration and start it. migration_thread = Thread(target = migrate) migration_thread.start(); done = False - # Create a busy-wait look to continue checking if Ctrl+C was pressed to cancel the migration + # Create a busy-wait loop to continue checking if Ctrl+C was pressed to cancel the migration. while not done: try: migration_thread.join(1) @@ -60,10 +67,10 @@ def migrate(): # Ctrl+C was caught, request migration to cancel. print("Caught Ctrl+C, shutting down...") - # This will cause the migration-sdk to cleanup and finish - # which will cause the thread to finish + # This will cause the Migration SDK to cleanup and finish, + # which will cause the thread to finish. tableau_migration.cancellation_token_source.Cancel() - # Wait for the migration thread to finish and then quit the app + # Wait for the migration thread to finish and then quit the application. migration_thread.join() done = True diff --git a/examples/Python.ExampleApplication/hooks.py b/examples/Python.ExampleApplication/hooks.py index 9943458..0de1a7d 100644 --- a/examples/Python.ExampleApplication/hooks.py +++ b/examples/Python.ExampleApplication/hooks.py @@ -1,14 +1,12 @@ -from Tableau.Migration.Interop.Hooks import ISyncMigrationHook -from Tableau.Migration.Engine.Hooks import IMigrationActionCompletedHook +from System.Collections.Generic import List +from Tableau.Migration.Engine import ContentMigrationItem from Tableau.Migration.Engine.Actions import IMigrationActionResult +from Tableau.Migration.Engine.Hooks import IMigrationActionCompletedHook +from Tableau.Migration.Interop.Hooks import ISyncMigrationHook from Tableau.Migration.Interop.Hooks.Filters import ISyncContentFilter from Tableau.Migration.Interop.Hooks.Mappings import ISyncContentMapping from Tableau.Migration.Interop.Hooks.Transformers import ISyncContentTransformer -from Tableau.Migration.Content import( - IGroup, - IUser, - IProject) -from Tableau.Migration.Engine import ContentMigrationItem +from Tableau.Migration.Content import IGroup, IProject, IUser class PyLogActionHook(ISyncMigrationHook[IMigrationActionResult], IMigrationActionCompletedHook): __namespace__ = "MyNamespace" diff --git a/examples/Python.ExampleApplication/hooks_callback.py b/examples/Python.ExampleApplication/hooks_callback.py index 320bb79..11e9043 100644 --- a/examples/Python.ExampleApplication/hooks_callback.py +++ b/examples/Python.ExampleApplication/hooks_callback.py @@ -1,5 +1,6 @@ from Tableau.Migration.Content import IGroup from Tableau.Migration.Engine import ContentMigrationItem +from System.Collections.Generic import List def log_callback(ctx): print("ACTION COMPLETED") diff --git a/examples/Python.ExampleApplication/hooks_factory.py b/examples/Python.ExampleApplication/hooks_factory.py index 8c4b961..5731307 100644 --- a/examples/Python.ExampleApplication/hooks_factory.py +++ b/examples/Python.ExampleApplication/hooks_factory.py @@ -1,15 +1,12 @@ -from Tableau.Migration.Interop.Hooks import ISyncMigrationHook +from System.Collections.Generic import List +from Tableau.Migration.Content import IGroup, IProject, IUser +from Tableau.Migration.Engine import ContentMigrationItem from Tableau.Migration.Engine.Actions import IMigrationActionResult from Tableau.Migration.Engine.Hooks import IMigrationActionCompletedHook +from Tableau.Migration.Interop.Hooks import ISyncMigrationHook from Tableau.Migration.Interop.Hooks.Filters import ISyncContentFilter from Tableau.Migration.Interop.Hooks.Mappings import ISyncContentMapping from Tableau.Migration.Interop.Hooks.Transformers import ISyncContentTransformer -from Tableau.Migration.Content import ( - IGroup, - IUser, - IProject) -from Tableau.Migration.Engine import ContentMigrationItem -from System.Collections.Generic import List class PyLogActionHook(ISyncMigrationHook[IMigrationActionResult], IMigrationActionCompletedHook): __namespace__ = "MyNamespace" diff --git a/scripts/Clean-Server.dib b/scripts/Clean-Server.dib new file mode 100644 index 0000000..c938451 --- /dev/null +++ b/scripts/Clean-Server.dib @@ -0,0 +1,205 @@ +#!meta + +{"kernelInfo":{"defaultKernelName":"csharp","items":[{"aliases":[],"languageName":"csharp","name":"csharp"}]}} + +#!markdown + +# Clean Server +This notebook will delete all projects, groups, users. + +#!markdown + +## Readme + + + + +This is [polyglot notebook](https://code.visualstudio.com/docs/languages/polyglot). It's the VS Code version of a jupyter notebook. + +The [Polyglot Notebooks extension](https://marketplace.visualstudio.com/items?itemName=ms-dotnettools.dotnet-interactive-vscode) VS Code extension needs to be installed. + +Once that's done. Just open in VS Code, and it turns into a wiki with executable cells. If you run them all in order, then all the projects, groups, users on the configured server will be deleted to get it ready for the next E2E tests. + +If you only want to delete certain content types, then run all the cells through "Sign in" manually, then you can run the cells after that in any order. + +#!markdown + +## Setup + +Because of how C# Interactive and notebooks work, the best way to get the current directory is by calling C# from pwsh. + +Then we `dotnet pack` the solution which creates the nuget package that is loaded in the next cell. + +The version needs to be ever increasing, else a cached nuget package will be used. + +**Note:** +* For me there is some red text about missing python targets. This can be ignored. +* For some reason it doesn't like the version of major.minor.build.revision. It still work, so ignore warnings + +#!pwsh + +#Register-PackageSource -provider NuGet -Location "C:\Users\sfroehlich\Code\migration-sdk\src\Tableau.Migration\bin\Release\" -Name local +Install-Package "Tableau.Migration" -Scope CurrentUser -ProviderName local + +#!pwsh + +# Setup well known directory paths +$currentDir = [System.IO.Directory]::GetCurrentDirectory() +$baseDir = (Get-Item $currentDir).Parent +$releaseDir = Join-Path $baseDir "src/Tableau.Migration/bin/Release" +$nugetDir = "C:\temp\migration-sdk" + +# Create nuget package version +[xml]$buildProps = Get-Content -Path (Join-Path $baseDir "Directory.Build.props") +$packageSuffix = Get-Date -UFormat %s +$packageVersion = $buildProps.Project.PropertyGroup.Version + ".${packageSuffix}" + + +if (-not (Test-Path -Path $nugetDir)) { + New-Item -Path $nugetDir -ItemType Directory +} + + +# Delete previous packages +Remove-Item $nugetDir -Include *.nupkg,*.snupkg + +# Build new packages +cd $baseDir +#dotnet build -c Release +dotnet pack -c Release -p:Version=$packageVersion --version-suffix $packageSuffix --output "C:\temp\migration-sdk" + +#!markdown + +Loads the Tableau.Migration nuget package that was build in the previous step. This also installs dependent nuget packages only used in this notebook. + +#!csharp + +// This will load the required nuget packages. +// If you already ran this once, you must reload the kernel of the notebook, else it will use the already loaded version. + +#i "nuget:C:\Temp\migration-sdk" +#r "nuget:Tableau.Migration," +#r "nuget:Microsoft.Extensions.Logging.Console" +#r "nuget:Microsoft.Extensions.Configuration.Json" + +#!markdown + +## Delete projects, groups, users + +#!markdown + +### Setup and configuration + +This section defines all the namespace the main script needs and sets up the configuration values. + +You must copy the `clean-server-settings.json` to `clean-server-settings.dev.json` and fill it in. + +#!csharp + +using System.Collections.Concurrent; +using System.Threading; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Tableau.Migration; +using Tableau.Migration.Api; +using Tableau.Migration.Content; + +CancellationToken cancel = default; + +var config = new ConfigurationBuilder() + .AddInMemoryCollection(new Dictionary + { + { "Files:RootPath", @"C:\Temp\filestore"}, + { "Network:HeadersLoggingEnabled", "true" }, + { "Network:ContentLoggingEnabled", "true" }, + { "Network:BinaryContentLoggingEnabled", "true" } + }) + // Copy the clean-server-settings.json to clean-server-settings.dev.json and fill in. + .AddJsonFile(System.IO.Path.Combine(System.IO.Directory.GetCurrentDirectory(), "clean-server-settings.dev.json")) + .Build(); + +#!markdown + +### Sign into server + +This must be run before any of the following cells. + +Once signed in, any of the cells below can be run in any order. +Note that the order project, groups, users is the fastest. + +#!csharp + +var serviceCollection = new ServiceCollection() + //.AddLogging(b => b.AddConsole()) + .AddTableauMigrationSdk(config); + +var services = serviceCollection.BuildServiceProvider(); + +var connectionConfig = config.GetSection("ConnectionConfig").Get(); + +var apiClient = services.GetRequiredService() + .Initialize(connectionConfig); + +var signIn = await apiClient.SignInAsync(cancel); + +if(!signIn.Success) +{ + foreach(var e in signIn.Errors) + Console.WriteLine(e); + + // If the above error isn't enough, uncomment the .AddLogging on line 2 +} + + var siteClient = signIn.Value!; + +#!markdown + +Delete all projects + +#!csharp + +var projects = await siteClient.Projects.GetAllAsync(100, cancel); + +foreach(var proj in projects.Value) +{ + Console.WriteLine($"About to delete project: {proj.Name}"); + await siteClient.Projects.DeleteProjectAsync(proj.Id, cancel); +} + +#!markdown + +Delete all groups + +#!csharp + +var groups = await siteClient.Groups.GetAllAsync(100, cancel); + +foreach(var group in groups.Value) +{ + Console.WriteLine($"About to delete group: {group.Name}"); + await siteClient.Groups.DeleteGroupAsync(group.Id, cancel); +} + +#!markdown + +Delete all non-admin Users + +#!csharp + +var users = await siteClient.Users.GetAllAsync(1000, cancel); + +var parallelUsers = new ConcurrentBag(users.Value); + +ParallelOptions parallelOptions = new() +{ + MaxDegreeOfParallelism = 10 +}; + +await Parallel.ForEachAsync(parallelUsers, parallelOptions, async (user, cancel) => { + if(user.AdministratorLevel.Contains("None")) + { + Console.WriteLine($"About to delete user: {user.Name}"); + await siteClient.Users.DeleteUserAsync(user.Id, cancel); + } +}); diff --git a/scripts/clean-server-settings.json b/scripts/clean-server-settings.json new file mode 100644 index 0000000..087e739 --- /dev/null +++ b/scripts/clean-server-settings.json @@ -0,0 +1,8 @@ +{ + "ConnectionConfig": { + "ServerUrl": "http://server", + "SiteContentUrl": "", + "AccessTokenName": "my server token name", + "AccessToken": "my server token" + } +} \ No newline at end of file diff --git a/scripts/generate-docs.ps1 b/scripts/generate-docs.ps1 index 1e92b0d..a636285 100644 --- a/scripts/generate-docs.ps1 +++ b/scripts/generate-docs.ps1 @@ -44,14 +44,23 @@ $main_docs_dir = Join-Path $root_dir "src/Documentation" $sphinx_output_dir = Join-Path $python_dir "Documentation/generated" function Run-Command { - param([string] $cmd) - Write-Host(" ") - Write-Host($cmd) - Invoke-Expression $cmd | Format-Console - if ($LASTEXITCODE -ne 0) { - Throw "Error: $cmd failed" + param([string]$Cmd) + Write-Host "Executing command"; + Write-Host "$Cmd"; + + try { + $startTime = Get-Date; + Invoke-Expression $Cmd -OutVariable output | Format-Console; + $endTime = Get-Date; + $executionTime = $endTime - $startTime; + + Write-Host "Done. $($executionTime.Hours*60 + $executionTime.Minutes)m$($executionTime.Seconds)s$($executionTime.Milliseconds)ms." -ForegroundColor Green; + + return $output; } - Write-Host(" ") + catch { + Write-Host-With-Timestamp "Function: $((Get-Variable MyInvocation -Scope 1).Value.MyCommand.Name). Command: $Cmd. Error: $_" "ERROR"; + } } function Restore-Tools { @@ -59,7 +68,7 @@ function Restore-Tools { .SYNOPSIS Restore required .NET tools. #> - Write-Host("Restoring dotnet tools with versions from global.config.") + Write-Host-With-Timestamp "Restoring dotnet tools with versions from global.config."; & dotnet tool restore -v q } @@ -72,25 +81,43 @@ function Clear-Directory { Path to the directory. #> if ($SkipPreClean) { - Write-Host("Skipping directory clean since SkipPreClean flag is on: " + $path) + Write-Host-With-Timestamp "Skipping directory clean since SkipPreClean flag is on: " + $path; } elseif (-Not (Test-Path $path)) { - Write-Host("Skipping directory clean since the folder does not exist: " + $path) + Write-Host-With-Timestamp "Skipping directory clean since the folder does not exist: " + $path; } else { - Write-Host("Cleaning directory: " + $path) - - Get-ChildItem -Path ($path + "/") * -Exclude ".gitignore" -File -Recurse | ForEach-Object { $_.Delete() } + Get-ChildItem -Path ($path + "/") * -Exclude ".gitignore" -Directory -Recurse | ForEach-Object { Remove-Item $_ -Recurse }; + Get-ChildItem -Path ($path + "/") * -Exclude ".gitignore" -File -Recurse | ForEach-Object { Remove-Item $_ }; if ($LASTEXITCODE -ne 0) { - Throw "Failed: Cleaning directory" + $path - } - else { - Write-Host("Finished: Cleaning directory: " + $path) + Throw "Failed: Cleaning directory $path"; } } } +function Write-Sdk-Version { + <# +.SYNOPSIS + Generate a Migration SDK version metadata file for docfx to use. +#> + $versionSourceFileName = "Directory.Build.props"; + $docfxMetadataFileName = "migration_sdk_metadata.json"; + + Write-Host-With-Timestamp("Writing Tableau Migration SDK from $versionSourceFileName for docfx."); + + $buildPropsXml = (Run-Command ("[Xml] (Get-Content (Join-Path $root_dir $versionSourceFileName))")); + + $sdkVersion = $buildPropsXml.Project.PropertyGroup.Version; + + Write-Host("Tableau Migration SDK version is $sdkVersion"); + + $fileContent = "{""_comment"": ""This is an auto-generated file. Do not modify."", ""_migrationSdkVersion"": ""$sdkVersion""}"; + $filePath = Join-Path $main_docs_dir $docfxMetadataFileName; + + Run-Command ("Out-File -FilePath '$filePath' -InputObject '$($fileContent | Out-String)'"); +} + function Write-Python-docs { <# .SYNOPSIS @@ -100,17 +127,17 @@ function Write-Python-docs { # Sphinx related paths $sphinx_build_dir = Join-Path $python_dir "Documentation" - Write-Host("Generating python docs.") - & Run-Command ("Push-Location $python_dir") - & Run-Command ("python -m pip install -q --upgrade pip") - & Run-Command ("python -m pip install -q hatch") - & Run-Command ("Clear-Directory -Path $sphinx_output_dir") - & Run-Command ("python -m hatch run docs:sphinx-build -M markdown $sphinx_build_dir $sphinx_output_dir") - & Run-Command ("Pop-Location") - & Write-Host("Finished: Generating python docs.") + Write-Host-With-Timestamp "Generating python docs."; + Run-Command ("Push-Location $python_dir"); + Run-Command ("python -m pip install -q --upgrade pip"); + Run-Command ("python -m pip install -q hatch"); + Run-Command ("Clear-Directory -Path $sphinx_output_dir"); + Run-Command ("python -m hatch run docs:sphinx-build -M markdown $sphinx_build_dir $sphinx_output_dir -q"); + Run-Command ("Pop-Location"); + Write-Host-With-Timestamp "Finished: Generating python docs."; if ($LASTEXITCODE -ne 0) { - Throw "Failed: Generating python docs." + Throw "Failed: Generating python docs."; } } @@ -121,16 +148,16 @@ function Copy-Python-Docs { Sphinx fails to generate documentation for some of our Python files. #> # Directory to which sphinx write generated markdown files. - $sphinx_generated_files_dir = Join-Path $sphinx_output_dir "markdown/generated/*" + $sphinx_generated_files_dir = Join-Path $sphinx_output_dir "markdown/generated/*"; # Directory where DocFX looks for markdown files to render into our 'Python Wrapper' section. - $python_md_destination = Join-Path $main_docs_dir "python_wrapper" + $python_md_destination = Join-Path $main_docs_dir "python_wrapper"; - Write-Host("Copying python docs to final destination.") - & Run-Command ("Clear-Directory -Path $python_md_destination") - & Run-Command ("Copy-Item -Force -Recurse $sphinx_generated_files_dir -Destination $python_md_destination") + Write-Host-With-Timestamp "Copying python docs to final destination."; + Run-Command ("Clear-Directory -Path $python_md_destination"); + Run-Command ("Copy-Item -Force -Recurse $sphinx_generated_files_dir -Destination $python_md_destination"); if ($LASTEXITCODE -ne 0) { - Throw "Failed: Copying python docs to final destination." + Throw "Failed: Copying python docs to final destination."; } } @@ -139,19 +166,104 @@ function Write-Python-Docs-Toc { .SYNOPSIS Generate a toc.yml file (table of contents for DocFX) from the python doc markdown files. #> - $python_md_destination = Join-Path $main_docs_dir "python_wrapper" - Write-Host("Generating toc.yml for Python auto-generated doc files ($python_md_destination).") - & Run-Command ("dotnet DocFxTocGenerator --docfolder $python_md_destination") + $python_md_destination = Join-Path $main_docs_dir "python_wrapper"; + Write-Host-With-Timestamp "Generating toc.yml for Python auto-generated doc files ($python_md_destination)."; + + class DocFileInfo { + [string]$Package; + [string]$Module; + [string]$Member; + [string]$Category; + [string]$FileName; + } + + $docFiles = New-Object Collections.Generic.List[DocFileInfo]; + $files = Get-ChildItem $python_md_destination -include *.md -Recurse -File | Select-Object BaseName, NameString | Sort-Object -Property BaseName.Length; + + foreach ($file in $files) { + + #file names are in the package.module.Member format. + # Hence we split them into their individual components are put them into + # a DocFileInfo object + $splitName = $file.BaseName.Split("."); + + $newDocFile = New-Object -TypeName DocFileInfo; + $newDocFile.FileName = $file.NameString; + + if ($splitName.Length -gt 2) { + $newDocFile.Member = $splitName[2]; + $newDocFile.Category = "Member"; + } + if ($splitName.Length -gt 1) { + $newDocFile.Module = $splitName[1]; + if ($null -eq $newDocFile.Category) { + $newDocFile.Category = "Module"; + } + } + if ($splitName.Length -gt 0) { + $newDocFile.Package = $splitName[0]; + if ($null -eq $newDocFile.Category) { + $newDocFile.Category = "Package"; + } + } + + $docFiles.Add($newDocFile); + } + + # Build the yaml file from DocFileInfo list + $fileContent = New-Object Collections.Generic.List[string]; + $fileContent.Add("items:"); + $packages = $docFiles | Where-Object { $_.Category -eq "Package" } + if ($packages.Length -eq 0) { + return; + } + foreach ($package in $packages) { + $fileContent.Add("- name: $($package.Package)"); + $fileContent.Add(" href: $($package.FileName)"); + $moduleGroups = $docFiles | Where-Object { $_.Category -ne "Package" -and $_.Package -eq $package.Package } | Group-Object -Property Package, Module; + if ($moduleGroups.Length -eq 0) { + continue; + } + + $fileContent.Add(" items:"); + foreach ($moduleGroup in $moduleGroups) { + $module = $moduleGroup.Group | Where-Object { $_.Category -eq "Module" }; + $fileContent.Add(" - name: $($module.Module)"); + $fileContent.Add(" href: $($module.FileName)"); + + $members = $moduleGroup.Group | Where-Object { $_.Category -eq "Member" }; + if ($members.Length -eq 0) { + continue; + } + + $fileContent.Add(" items:"); + $functions = $members | Where-Object { $_.Member -match "[a-zA-Z]+(_[a-zA-Z]+)" }; + $classes = $members | Where-Object { $_.Member -notmatch "[a-zA-Z]+(_[a-zA-Z]+)" }; + + foreach ($member in $functions) { + $fileContent.Add(" - name: $($member.Member)"); + $fileContent.Add(" href: $($member.FileName)"); + } + foreach ($member in $classes) { + $fileContent.Add(" - name: $($member.Member)"); + $fileContent.Add(" href: $($member.FileName)"); + } + + } + } + + $tocPath = Join-Path -Path $python_md_destination -ChildPath toc.yml; + + Run-Command ("Out-File -FilePath '$tocPath' -InputObject '$($fileContent | Out-String)'"); if ($LASTEXITCODE -ne 0) { Throw "Failed: Generating toc.yml for Python auto-generated doc files." } else { - Write-Host("Finished: Generating toc.yml for Python auto-generated doc files.") + Write-Host-With-Timestamp "Finished: Generating toc.yml for Python auto-generated doc files." } } - function Write-Final-Docs { <# .SYNOPSIS @@ -160,25 +272,25 @@ function Write-Final-Docs { #> # Docfx related paths - $docfx_config_path = Join-Path $main_docs_dir "docfx.json" - $docfx_cmd = "dotnet docfx $docfx_config_path -t statictoc,templates\tableau --logLevel warning" + $docfx_config_path = Join-Path $main_docs_dir "docfx.json"; + $docfx_cmd = "dotnet docfx $docfx_config_path -t statictoc,templates\tableau --logLevel warning"; # Run the docfx command to generate the final output if ($Serve) { - Write-Host("Generating final documentation output and hosting it locally.") - & Run-Command ($docfx_cmd + " --serve") - & Write-Host-With-Timestamp("Finished: Documentation generated and hosted.") + Write-Host-With-Timestamp "Generating final documentation output and hosting it locally."; + Run-Command ($docfx_cmd + " --serve"); + Write-Host-With-Timestamp "Finished: Documentation generated and hosted."; } else { - $docs_output_dir = Join-Path (Split-Path $PSScriptRoot -Parent) "docs" + $docs_output_dir = Join-Path (Split-Path $PSScriptRoot -Parent) "docs"; - Write-Host("Generating final documentation output.") - & Run-Command ("Clear-Directory -Path $docs_output_dir") - & Run-Command ($docfx_cmd) - & Write-Host-With-Timestamp("Finished: API Reference documentation has been generated to: " + $docs_output_dir) + Write-Host-With-Timestamp "Generating final documentation output."; + Run-Command ("Clear-Directory -Path $docs_output_dir"); + Run-Command ($docfx_cmd); + Write-Host-With-Timestamp "Finished: API Reference documentation has been generated to: $docs_output_dir"; } if ($LASTEXITCODE -ne 0) { - Throw "Failed: Generating final documentation output." + Throw "Failed: Generating final documentation output."; } } @@ -186,15 +298,29 @@ function Format-Console { [CmdletBinding()] param([Parameter(ValueFromPipeline = $True)][string[]]$inputObject) PROCESS { - Write-Host " $inputObject" + Write-Host " $inputObject"; } } function Write-Host-With-Timestamp { - param([string]$message) + param( + [string]$message, + [string]$level = "INFO") $timestamp = Get-Date -Format "MM/dd/yyyy HH:mm:ss" - Write-Host "${timestamp}: ${message}" + + if ($level -eq "ERROR") { + Write-Host "${timestamp}: $level :" -ForegroundColor Red; + Write-Error -Message "${message}" -ErrorAction Stop; + } + else { + Write-Host "${timestamp}: $level : ${message}"; + } } -Restore-Tools && Write-Python-docs && Copy-Python-Docs && Write-Python-Docs-Toc && Write-Final-Docs \ No newline at end of file +Restore-Tools; +Write-Python-docs; +Copy-Python-Docs; +Write-Python-Docs-Toc; +Write-Sdk-Version; +Write-Final-Docs; \ No newline at end of file diff --git a/src/Documentation/.gitignore b/src/Documentation/.gitignore index 4378419..d264e89 100644 --- a/src/Documentation/.gitignore +++ b/src/Documentation/.gitignore @@ -7,3 +7,4 @@ /**/bin/ /**/obj/ _site +migration_sdk_metadata.json diff --git a/src/Documentation/articles/plan_validation.md b/src/Documentation/articles/plan_validation.md new file mode 100644 index 0000000..c2937a5 --- /dev/null +++ b/src/Documentation/articles/plan_validation.md @@ -0,0 +1,30 @@ +# Plan Validation + + +## Plan Builder + +The main input for a migration is through a migration plan. A plan builder is provided by the SDK to build a valid migration plan through a [fluent interface](https://en.wikipedia.org/wiki/Fluent_interface). The plan builder is able to find validation errors before the migration plan is built and executed. + +### Validation + +The migration engine does not enforce plan validation, but users are highly encouraged to validate migration plans and abort execution if validation errors are returned to prevent errors during migration. The **Validate** plan builder method is used to perform plan builder validation: + +```C# + var validationResult = _planBuilder.Validate(); +``` + + +### Handling Validation Errors +If a validation error is detected, we recommend aborting the migration in an application appropriate manner. The following example checks for validation errors and logs them to the console: + +```C# +if (!validationResult.Success) + { + _logger.LogError($"Migration plan validation failed.", validationResult.Errors); + Console.WriteLine("Press any key to exit"); + Console.ReadKey(); + _appLifetime.StopApplication(); + } +``` + +Each validation error provides information on how to fix the error detected in the migration plan. Review the validation errors, adjust the plan builder as necessary, and re-run the migration. \ No newline at end of file diff --git a/src/Documentation/articles/toc.yml b/src/Documentation/articles/toc.yml index dbdf2fa..0321042 100644 --- a/src/Documentation/articles/toc.yml +++ b/src/Documentation/articles/toc.yml @@ -2,6 +2,8 @@ href: intro.md - name: Configuration href: configuration.md +- name: Plan Validation + href: plan_validation.md - name: Advanced Configuration items: - name: Hooks diff --git a/src/Documentation/docfx.json b/src/Documentation/docfx.json index cb9dcf3..71ef959 100644 --- a/src/Documentation/docfx.json +++ b/src/Documentation/docfx.json @@ -65,7 +65,9 @@ } ], "dest": "../../docs", - "globalMetadataFiles": [], + "globalMetadataFiles": [ + "migration_sdk_metadata.json" + ], "globalMetadata": { "_appTitle": "Tableau Migration SDK", "_appLogoPath": "tableau.svg", @@ -73,6 +75,7 @@ "_disableContribution": true, "_disableFooter": true, "_enableNewTab": true, + "_googleAnalyticsTagId": "UA-625217-51", "_gitContribute": { "repo": "https://github.com/tableau/tableau-migration-sdk", "branch": "main" diff --git a/src/Documentation/index.md b/src/Documentation/index.md index 80b4057..3f69438 100644 --- a/src/Documentation/index.md +++ b/src/Documentation/index.md @@ -1,9 +1,17 @@ -# Tableau Migration SDK +# Contents -[API Reference](~/api/index.md) +## [API Reference](~/api/index.md) -[Python Wrapper](~/python_wrapper/tableau_migration.md) +The full C# API Reference. -[Code Samples](~/samples/intro.md) +## [Python Wrapper](~/python_wrapper/tableau_migration.md) -[Articles](~/articles/intro.md) +The Python Wrapper Reference. + +## [Code Samples](~/samples/intro.md) + +Code samples to get you started. + +## [Articles](~/articles/intro.md) + +Articles describing various concepts including how to customize the Migration SDK. diff --git a/src/Documentation/templates/tableau/partials/head.tmpl.partial b/src/Documentation/templates/tableau/partials/head.tmpl.partial index 5920032..68b2ea8 100644 --- a/src/Documentation/templates/tableau/partials/head.tmpl.partial +++ b/src/Documentation/templates/tableau/partials/head.tmpl.partial @@ -1,8 +1,16 @@ -{{!Copyright (c) Microsoft. All rights reserved. Licensed under the MIT license. See LICENSE file in the project root for full license information.}} +{{!Licensed to the .NET Foundation under one or more agreements. The .NET Foundation licenses this file to you under the MIT license.}} - + {{#_googleAnalyticsTagId}} + + + {{/_googleAnalyticsTagId}} {{#redirect_url}} {{/redirect_url}} @@ -12,10 +20,11 @@ {{#_description}}{{/_description}} + {{#description}}{{/description}} - + {{#_noindex}}{{/_noindex}} {{#_enableSearch}}{{/_enableSearch}} {{#_enableNewTab}}{{/_enableNewTab}} diff --git a/src/Documentation/templates/tableau/partials/navbar.tmpl.partial b/src/Documentation/templates/tableau/partials/navbar.tmpl.partial index f1f2721..6c0b4d3 100644 --- a/src/Documentation/templates/tableau/partials/navbar.tmpl.partial +++ b/src/Documentation/templates/tableau/partials/navbar.tmpl.partial @@ -9,7 +9,14 @@ - {{>partials/logo}} + + + {{{_appTitle}}} + + + {{{_migrationSdkVersion}}} + + diff --git a/src/Documentation/templates/tableau/styles/main.css b/src/Documentation/templates/tableau/styles/main.css index eedf788..30e6aa9 100644 --- a/src/Documentation/templates/tableau/styles/main.css +++ b/src/Documentation/templates/tableau/styles/main.css @@ -55,8 +55,13 @@ body .toc { font-size: 14px; } +.expand-stub::before { + color: #337ab7; +} + +/*Navbar*/ .navbar-inverse { - background-color: #ffffff; + background-color: #f8f8f8; border-color: #e7e7e7; } @@ -82,12 +87,26 @@ body .toc { background-color: transparent; } +.subnav { + background-color: #fff; + border-bottom: 1px solid #f7f7f7; +} + +.breadcrumb>li+li:before { + content: "\00a0>"; + color: #337ab7; +} + +.breadcrumb { + padding-left: 15px; +} + /*Setting widths to make more sense given namespace lengths*/ .container { width: 100%; } -.sidefilter { +.sidefilter { width: 20%; } @@ -95,6 +114,48 @@ body .toc { width: 20%; } -.article.grid-right { +.article.grid-right { margin-left: 21%; +} + +/*Title and log styles*/ + +.tableau-navbar-title { + float: left; + height: 50px; + padding-top: 13px; + padding-left: 15px; + font-size: 18px; + line-height: 20px; + color: #337ab7 +} + +.tableau-navbar-version { + font-size: 15px; + color: #337ab7 +} + +.tableau-navbar-logo { + float: right !important; + margin: 6px -15px 1px 0px; + height: 40px; +} + +.tableau-navbar-logo>img { + max-width: 100%; + max-height: 100%; +} + +/* Alerts */ +.alert { + padding: 12px; + border-radius: 8px; + border-width: 0px; + box-shadow: 0px 2px 2px 0px #c7c7c766; +} + +/* Code blocks*/ +pre { + border-radius: 10px; + box-shadow: 0px 2px 2px 0px #c7c7c766; } \ No newline at end of file diff --git a/src/Python/Documentation/_templates/custom-class-template.rst b/src/Python/Documentation/_templates/custom-class-template.rst index a7f6464..d2ef366 100644 --- a/src/Python/Documentation/_templates/custom-class-template.rst +++ b/src/Python/Documentation/_templates/custom-class-template.rst @@ -1,4 +1,4 @@ -{{ fullname | escape | underline}} +{{ objname | escape | underline}} .. currentmodule:: {{ module }} @@ -12,7 +12,9 @@ .. automethod:: __init__ {% if methods %} - .. rubric:: {{ _('Methods') }} + ================= + Methods + ================= .. autosummary:: {% for item in methods %} @@ -23,7 +25,9 @@ {% block attributes %} {% if attributes|reject("in","Equals,Finalize,GetHashCode,GetType,MemberwiseClone,Overloads,ReferenceEquals,ToString,BeginScope")|list %} - .. rubric:: {{ _('Attributes') }} + ================= + Attributes + ================= .. autosummary:: {% for item in attributes|reject("in","Equals,Finalize,GetHashCode,GetType,MemberwiseClone,Overloads,ReferenceEquals,ToString,BeginScope")|list %} diff --git a/src/Python/Documentation/_templates/custom-module-template.rst b/src/Python/Documentation/_templates/custom-module-template.rst index caf8c38..1b6845a 100644 --- a/src/Python/Documentation/_templates/custom-module-template.rst +++ b/src/Python/Documentation/_templates/custom-module-template.rst @@ -4,7 +4,9 @@ {% block attributes %} {% if attributes %} - .. rubric:: Module Attributes + ================= + Module Attributes + ================= .. autosummary:: :toctree: @@ -16,7 +18,9 @@ {% block functions %} {% if functions %} - .. rubric:: {{ _('Functions') }} + ================= + Functions + ================= .. autosummary:: :toctree: @@ -28,7 +32,9 @@ {% block classes %} {% if classes %} - .. rubric:: {{ _('Classes') }} + ================= + Classes + ================= .. autosummary:: :toctree: @@ -41,7 +47,9 @@ {% block exceptions %} {% if exceptions %} - .. rubric:: {{ _('Exceptions') }} + ================= + Exceptions + ================= .. autosummary:: :toctree: @@ -53,7 +61,9 @@ {% block modules %} {% if modules %} -.. rubric:: Modules +================= +Modules +================= .. autosummary:: :toctree: diff --git a/src/Python/Documentation/conf.py b/src/Python/Documentation/conf.py index 20745fb..580cd02 100644 --- a/src/Python/Documentation/conf.py +++ b/src/Python/Documentation/conf.py @@ -27,4 +27,5 @@ autodoc_typehints='description' markdown_anchor_sections=True markdown_anchor_signatures=True +add_module_names=False print("..done.") \ No newline at end of file diff --git a/src/Tableau.Migration/Api/GroupsApiClient.cs b/src/Tableau.Migration/Api/GroupsApiClient.cs index 82a1998..803faff 100644 --- a/src/Tableau.Migration/Api/GroupsApiClient.cs +++ b/src/Tableau.Migration/Api/GroupsApiClient.cs @@ -168,6 +168,19 @@ public async Task RemoveUserFromGroupAsync(Guid groupId, Guid userId, C return result; } + /// + public async Task DeleteGroupAsync(Guid groupId, CancellationToken cancel) + { + var result = await RestRequestBuilderFactory + .CreateUri($"/groups/{groupId.ToUrlSegment()}") + .ForDeleteRequest() + .SendAsync(cancel) + .ToResultAsync(_serializer, SharedResourcesLocalizer, cancel) + .ConfigureAwait(false); + + return result; + } + #region - IPagedListApiClient Implementation - public IPager GetPager(int pageSize) => new ApiListPager(this, pageSize); diff --git a/src/Tableau.Migration/Api/IGroupsApiClient.cs b/src/Tableau.Migration/Api/IGroupsApiClient.cs index 4c9acfb..1bfb2c8 100644 --- a/src/Tableau.Migration/Api/IGroupsApiClient.cs +++ b/src/Tableau.Migration/Api/IGroupsApiClient.cs @@ -106,5 +106,13 @@ Task> ImportGroupFromActiveDirectoryBackgroundProcessAsync( /// The cancellation token. /// The operation result. Task RemoveUserFromGroupAsync(Guid groupId, Guid userId, CancellationToken cancel); + + /// + /// Deletes a group. + /// + /// The id of the group to delete. + /// The cancellation token. + /// + Task DeleteGroupAsync(Guid groupId, CancellationToken cancel); } } diff --git a/src/Tableau.Migration/Api/IHttpResponseMessageExtensions.cs b/src/Tableau.Migration/Api/IHttpResponseMessageExtensions.cs index 3c87aaf..3df8d0f 100644 --- a/src/Tableau.Migration/Api/IHttpResponseMessageExtensions.cs +++ b/src/Tableau.Migration/Api/IHttpResponseMessageExtensions.cs @@ -204,7 +204,7 @@ public static IPagedResult ToPagedResult(this IHttpRe var model = createModel(content); - return PagedResult.Succeeded(model, content.PageNumber, content.PageSize, content.TotalCount); + return PagedResult.Succeeded(model, content.PageNumber, content.PageSize, content.TotalCount, content.FetchedAllPages); } catch (Exception ex) { @@ -252,7 +252,7 @@ public static async Task> ToPagedResultAsync.Succeeded(model, content.PageNumber, content.PageSize, content.TotalCount); + return PagedResult.Succeeded(model, content.PageNumber, content.PageSize, content.TotalCount, content.FetchedAllPages); } catch (Exception ex) { diff --git a/src/Tableau.Migration/Api/IProjectsApiClient.cs b/src/Tableau.Migration/Api/IProjectsApiClient.cs index e77350d..72148a7 100644 --- a/src/Tableau.Migration/Api/IProjectsApiClient.cs +++ b/src/Tableau.Migration/Api/IProjectsApiClient.cs @@ -90,5 +90,12 @@ public Task> UpdateProjectAsync( string? newContentPermissions = null, Guid? newControllingPermissionsProjectId = null, Guid? newOwnerId = null); + + /// + /// Deletes a project. + /// + /// The ID for the project to delete. + /// A cancellation token to obey. + public Task DeleteProjectAsync(Guid projectId, CancellationToken cancel); } } diff --git a/src/Tableau.Migration/Api/IUsersApiClient.cs b/src/Tableau.Migration/Api/IUsersApiClient.cs index d48d227..3d9d524 100644 --- a/src/Tableau.Migration/Api/IUsersApiClient.cs +++ b/src/Tableau.Migration/Api/IUsersApiClient.cs @@ -87,5 +87,13 @@ Task> UpdateUserAsync(Guid id, string? newEmail = null, string? newPassword = null, string? newAuthSetting = null); + + /// + /// Deletes a user. + /// + /// The user's ID. + /// The cancellation token. + /// + Task DeleteUserAsync(Guid userId, CancellationToken cancel); } } diff --git a/src/Tableau.Migration/Api/IWorkbooksApiClient.cs b/src/Tableau.Migration/Api/IWorkbooksApiClient.cs index 41ffeac..1bb4f0c 100644 --- a/src/Tableau.Migration/Api/IWorkbooksApiClient.cs +++ b/src/Tableau.Migration/Api/IWorkbooksApiClient.cs @@ -39,7 +39,7 @@ public interface IWorkbooksApiClient : IConnectionsApiClient { /// - /// Gets all workbook in the current site. + /// Gets all workbooks in the current site except the ones in the Personal Space. /// /// The 1-indexed page number. /// The size of the page. diff --git a/src/Tableau.Migration/Api/Models/IPublishWorkbookOptions.cs b/src/Tableau.Migration/Api/Models/IPublishWorkbookOptions.cs index 6fa72b3..5cd1df5 100644 --- a/src/Tableau.Migration/Api/Models/IPublishWorkbookOptions.cs +++ b/src/Tableau.Migration/Api/Models/IPublishWorkbookOptions.cs @@ -15,6 +15,7 @@ // using System; +using System.Collections.Generic; namespace Tableau.Migration.Api.Models { @@ -62,5 +63,10 @@ public interface IPublishWorkbookOptions : IPublishFileOptions /// Gets the ID of the project to publish to. /// Guid ProjectId { get; } + + /// + /// Gets the names of the views that should be hidden. + /// + IEnumerable HiddenViewNames { get; } } } diff --git a/src/Tableau.Migration/Api/Models/PublishWorkbookOptions.cs b/src/Tableau.Migration/Api/Models/PublishWorkbookOptions.cs index 9780551..21615bd 100644 --- a/src/Tableau.Migration/Api/Models/PublishWorkbookOptions.cs +++ b/src/Tableau.Migration/Api/Models/PublishWorkbookOptions.cs @@ -15,6 +15,7 @@ // using System; +using System.Collections.Generic; using System.IO; using Tableau.Migration.Api.Rest.Models.Types; using Tableau.Migration.Content; @@ -59,6 +60,9 @@ public class PublishWorkbookOptions : IPublishWorkbookOptions /// public string FileType { get; } + /// + public IEnumerable HiddenViewNames { get; } + /// /// Creates a new instance. /// @@ -76,6 +80,7 @@ public PublishWorkbookOptions(IPublishableWorkbook workbook, Stream file, string File = file; FileName = workbook.File.OriginalFileName; FileType = fileType; + HiddenViewNames = workbook.HiddenViewNames; } } } diff --git a/src/Tableau.Migration/Api/Permissions/DefaultPermissionsApiClient.cs b/src/Tableau.Migration/Api/Permissions/DefaultPermissionsApiClient.cs index d891bee..ec88242 100644 --- a/src/Tableau.Migration/Api/Permissions/DefaultPermissionsApiClient.cs +++ b/src/Tableau.Migration/Api/Permissions/DefaultPermissionsApiClient.cs @@ -21,9 +21,11 @@ using System.Linq; using System.Threading; using System.Threading.Tasks; +using Microsoft.Extensions.Logging; using Tableau.Migration.Api.Rest; using Tableau.Migration.Config; using Tableau.Migration.Content.Permissions; +using Tableau.Migration.Resources; namespace Tableau.Migration.Api.Permissions { @@ -32,13 +34,18 @@ internal class DefaultPermissionsApiClient : IDefaultPermissionsApiClient private readonly ConcurrentDictionary _contentTypeClients = new(StringComparer.OrdinalIgnoreCase); private readonly IPermissionsApiClientFactory _permissionsClientFactory; + private readonly ILogger _logger; + private readonly ISharedResourcesLocalizer _localizer; public DefaultPermissionsApiClient( IPermissionsApiClientFactory permissionsClientFactory, - DefaultPermissionsContentTypeOptions options) + DefaultPermissionsContentTypeOptions options, + ILoggerFactory loggerFactory, + ISharedResourcesLocalizer localizer) { _permissionsClientFactory = permissionsClientFactory; - + _logger = loggerFactory.CreateLogger(); + _localizer = localizer; foreach (var contentTypeUrlSegment in options.UrlSegments) EnsurePermissionsClient(contentTypeUrlSegment); } @@ -116,18 +123,27 @@ public async Task>> GetAllPer var getPermissionsResult = getPermissionsTask.Value.Result; if (!getPermissionsResult.Success) + { resultBuilder.Add(getPermissionsResult); - else - defaultPermissions.Add(getPermissionsTask.Key, getPermissionsResult.Value); + _logger.LogWarning( + new AggregateException(getPermissionsResult.Errors), + _localizer[SharedResourceKeys.FailedToGetDefaultPermissionsMessage], + getPermissionsTask.Key, + projectId); + continue; + } + + defaultPermissions.Add(getPermissionsTask.Key, getPermissionsResult.Value); } } - var result = resultBuilder.Build(); - - if (!result.Success) - return Result>.Failed(result.Errors); + if (defaultPermissions.Any()) + { + return Result>.Succeeded(defaultPermissions.ToImmutable()); + } - return Result>.Succeeded(defaultPermissions.ToImmutable()); + return Result>.Failed( + resultBuilder.Build().Errors); } public async Task>> UpdateAllPermissionsAsync( diff --git a/src/Tableau.Migration/Api/Permissions/PermissionsApiClientFactory.cs b/src/Tableau.Migration/Api/Permissions/PermissionsApiClientFactory.cs index 47266fc..490199e 100644 --- a/src/Tableau.Migration/Api/Permissions/PermissionsApiClientFactory.cs +++ b/src/Tableau.Migration/Api/Permissions/PermissionsApiClientFactory.cs @@ -14,6 +14,7 @@ // limitations under the License. // +using Microsoft.Extensions.Logging; using Tableau.Migration.Api.Rest; using Tableau.Migration.Config; using Tableau.Migration.Net; @@ -28,17 +29,20 @@ internal sealed class PermissionsApiClientFactory : IPermissionsApiClientFactory private readonly IHttpContentSerializer _serializer; private readonly ISharedResourcesLocalizer _sharedResourcesLocalizer; private readonly IConfigReader _configReader; + private readonly ILoggerFactory _loggerFactory; public PermissionsApiClientFactory( IRestRequestBuilderFactory restRequestBuilderFactory, IHttpContentSerializer serializer, ISharedResourcesLocalizer sharedResourcesLocalizer, - IConfigReader configReader) + IConfigReader configReader, + ILoggerFactory loggerFactory) { _restRequestBuilderFactory = restRequestBuilderFactory; _serializer = serializer; _sharedResourcesLocalizer = sharedResourcesLocalizer; _configReader = configReader; + _loggerFactory = loggerFactory; } /// @@ -51,6 +55,6 @@ public IPermissionsApiClient Create(IPermissionsUriBuilder uriBuilder) /// public IDefaultPermissionsApiClient CreateDefaultPermissionsClient() - => new DefaultPermissionsApiClient(this, _configReader.Get().DefaultPermissionsContentTypes); + => new DefaultPermissionsApiClient(this, _configReader.Get().DefaultPermissionsContentTypes, _loggerFactory, _sharedResourcesLocalizer); } } diff --git a/src/Tableau.Migration/Api/ProjectsApiClient.cs b/src/Tableau.Migration/Api/ProjectsApiClient.cs index 5af17cb..e347586 100644 --- a/src/Tableau.Migration/Api/ProjectsApiClient.cs +++ b/src/Tableau.Migration/Api/ProjectsApiClient.cs @@ -30,6 +30,7 @@ using Tableau.Migration.Content; using Tableau.Migration.Content.Permissions; using Tableau.Migration.Content.Search; +using Tableau.Migration.Net; using Tableau.Migration.Net.Rest; using Tableau.Migration.Net.Rest.Filtering; using Tableau.Migration.Paging; @@ -42,6 +43,7 @@ internal sealed class ProjectsApiClient : { internal const string PROJECT_NAME_CONFLICT_ERROR_CODE = "409006"; + private readonly IHttpContentSerializer _serializer; private readonly IDefaultPermissionsApiClient _defaultPermissionsClient; public ProjectsApiClient( @@ -49,10 +51,13 @@ public ProjectsApiClient( IPermissionsApiClientFactory permissionsClientFactory, IContentReferenceFinderFactory finderFactory, ILoggerFactory loggerFactory, + IHttpContentSerializer serializer, ISharedResourcesLocalizer sharedResourcesLocalizer) : base(restRequestBuilderFactory, finderFactory, loggerFactory, sharedResourcesLocalizer) { _defaultPermissionsClient = permissionsClientFactory.CreateDefaultPermissionsClient(); + _serializer = serializer; + Permissions = permissionsClientFactory.Create(this); } @@ -110,6 +115,19 @@ public async Task> UpdateProjectAsync( return updateResult; } + // + public async Task DeleteProjectAsync(Guid projectId, CancellationToken cancel) + { + var result = await RestRequestBuilderFactory + .CreateUri($"{UrlPrefix}/{projectId.ToUrlSegment()}") + .ForDeleteRequest() + .SendAsync(cancel) + .ToResultAsync(_serializer, SharedResourcesLocalizer, cancel) + .ConfigureAwait(false); + + return result; + } + #region - IPermissionsContentApiClientImplementation - /// diff --git a/src/Tableau.Migration/Api/Rest/Models/Requests/CommitWorkbookPublishRequest.cs b/src/Tableau.Migration/Api/Rest/Models/Requests/CommitWorkbookPublishRequest.cs index 881b09c..11b2302 100644 --- a/src/Tableau.Migration/Api/Rest/Models/Requests/CommitWorkbookPublishRequest.cs +++ b/src/Tableau.Migration/Api/Rest/Models/Requests/CommitWorkbookPublishRequest.cs @@ -15,8 +15,10 @@ // using System; +using System.Linq; using System.Xml.Serialization; using Tableau.Migration.Api.Models; +using Tableau.Migration.Content; namespace Tableau.Migration.Api.Rest.Models.Requests { @@ -53,7 +55,14 @@ public CommitWorkbookPublishRequest(IPublishWorkbookOptions options) Project = new WorkbookType.ProjectType { Id = options.ProjectId - } + }, + // We're only setting the hidden view names here because any others will be not hidden by default. + Views = options.HiddenViewNames.Distinct(View.NameComparer).Select(v => new WorkbookType.ViewType + { + Name = v, + Hidden = true + }) + .ToArray() }; } diff --git a/src/Tableau.Migration/Api/Rest/Models/Responses/PagedTableauServerResponse.cs b/src/Tableau.Migration/Api/Rest/Models/Responses/PagedTableauServerResponse.cs index 8181211..2df8a9f 100644 --- a/src/Tableau.Migration/Api/Rest/Models/Responses/PagedTableauServerResponse.cs +++ b/src/Tableau.Migration/Api/Rest/Models/Responses/PagedTableauServerResponse.cs @@ -35,5 +35,19 @@ public abstract class PagedTableauServerResponse : TableauServerListRespo int IPageInfo.PageSize => Pagination.PageSize; int IPageInfo.TotalCount => Pagination.TotalAvailable; + + bool IPageInfo.FetchedAllPages + { + get + { + if (Pagination.TotalAvailable == 0) + { + return true; + } + var pagesAvailable = (Pagination.TotalAvailable / Pagination.PageSize) + (Pagination.TotalAvailable % Pagination.PageSize > 0 ? 1 : 0); + + return Pagination.PageNumber >= pagesAvailable; + } + } } } diff --git a/src/Tableau.Migration/Api/Rest/Models/RestProjectBuilderPager.cs b/src/Tableau.Migration/Api/Rest/Models/RestProjectBuilderPager.cs index f63455c..4920416 100644 --- a/src/Tableau.Migration/Api/Rest/Models/RestProjectBuilderPager.cs +++ b/src/Tableau.Migration/Api/Rest/Models/RestProjectBuilderPager.cs @@ -68,7 +68,7 @@ protected override async Task> GetPageAsync(int pageNumbe projects.Add(newProject); } - return PagedResult.Succeeded(projects.ToImmutable(), pageNumber, pageSize, projectBuilder.Count); + return PagedResult.Succeeded(projects.ToImmutable(), pageNumber, pageSize, projectBuilder.Count, !restProjects.Any()); } } } diff --git a/src/Tableau.Migration/Api/UsersApiClient.cs b/src/Tableau.Migration/Api/UsersApiClient.cs index 76bdad8..88d4cb8 100644 --- a/src/Tableau.Migration/Api/UsersApiClient.cs +++ b/src/Tableau.Migration/Api/UsersApiClient.cs @@ -21,6 +21,7 @@ using System.Linq; using System.Net.Http; using System.Text; +using System.Text.RegularExpressions; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.Logging; @@ -39,16 +40,19 @@ namespace Tableau.Migration.Api internal sealed class UsersApiClient : ContentApiClientBase, IUsersApiClient { private readonly IJobsApiClient _jobs; + private readonly IHttpContentSerializer _serializer; public UsersApiClient( IJobsApiClient jobs, IRestRequestBuilderFactory restRequestBuilderFactory, IContentReferenceFinderFactory finderFactory, ILoggerFactory loggerFactory, + IHttpContentSerializer serializer, ISharedResourcesLocalizer sharedResourcesLocalizer) : base(restRequestBuilderFactory, finderFactory, loggerFactory, sharedResourcesLocalizer) { _jobs = jobs; + _serializer = serializer; } /// @@ -180,6 +184,19 @@ public async Task> UpdateUserAsync(Guid id, return userResult; } + /// + public async Task DeleteUserAsync(Guid userId, CancellationToken cancel) + { + var result = await RestRequestBuilderFactory + .CreateUri($"/users/{userId.ToUrlSegment()}") + .ForDeleteRequest() + .SendAsync(cancel) + .ToResultAsync(_serializer, SharedResourcesLocalizer, cancel) + .ConfigureAwait(false); + + return result; + } + #region - IPagedListApiClient Implementation - /// diff --git a/src/Tableau.Migration/Api/WorkbooksApiClient.cs b/src/Tableau.Migration/Api/WorkbooksApiClient.cs index 64b35d1..e5c2bcf 100644 --- a/src/Tableau.Migration/Api/WorkbooksApiClient.cs +++ b/src/Tableau.Migration/Api/WorkbooksApiClient.cs @@ -16,7 +16,6 @@ using System; using System.Collections.Immutable; -using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.Logging; diff --git a/src/Tableau.Migration/Content/IPublishableWorkbook.cs b/src/Tableau.Migration/Content/IPublishableWorkbook.cs index 42c2cd8..297a7f0 100644 --- a/src/Tableau.Migration/Content/IPublishableWorkbook.cs +++ b/src/Tableau.Migration/Content/IPublishableWorkbook.cs @@ -15,11 +15,12 @@ // using System; +using System.Collections.Generic; namespace Tableau.Migration.Content { /// - /// Intreface for a that has been downloaded + /// Interface for an that has been downloaded /// and has full information necessary for re-publishing. /// public interface IPublishableWorkbook : IWorkbookDetails, IFileContent, IConnectionsContent @@ -28,5 +29,10 @@ public interface IPublishableWorkbook : IWorkbookDetails, IFileContent, IConnect /// Gets the ID of the user to generate thumbnails as. /// Guid? ThumbnailsUserId { get; set; } + + /// + /// Gets the names of the views that should be hidden. + /// + ISet HiddenViewNames { get; } } } diff --git a/src/Tableau.Migration/Content/PublishableWorkbook.cs b/src/Tableau.Migration/Content/PublishableWorkbook.cs index d93e495..2c955f6 100644 --- a/src/Tableau.Migration/Content/PublishableWorkbook.cs +++ b/src/Tableau.Migration/Content/PublishableWorkbook.cs @@ -15,6 +15,7 @@ // using System; +using System.Collections.Generic; using System.Collections.Immutable; using System.Threading.Tasks; using Tableau.Migration.Content.Files; @@ -32,6 +33,9 @@ internal sealed class PublishableWorkbook : WorkbookDetails, IPublishableWorkboo /// public IImmutableList Connections { get; } + /// + public ISet HiddenViewNames { get; } = new HashSet(View.NameComparer); + public PublishableWorkbook(IWorkbookDetails workbook, IImmutableList connections, IContentFileHandle file) : base(workbook) { diff --git a/src/Tableau.Migration/Engine/Migrators/ContentMigrator.cs b/src/Tableau.Migration/Engine/Migrators/ContentMigrator.cs index c9f00a6..e929ef1 100644 --- a/src/Tableau.Migration/Engine/Migrators/ContentMigrator.cs +++ b/src/Tableau.Migration/Engine/Migrators/ContentMigrator.cs @@ -93,7 +93,6 @@ public async Task MigrateAsync(CancellationToken cancel) var sourcePage = await sourcePager.NextPageAsync(cancel).ConfigureAwait(false); resultBuilder.Add(sourcePage); - var processedItems = 0; var manifestEntryBuilder = manifestPartition.GetEntryBuilder(sourcePage.TotalCount); while (!sourcePage.Value.IsNullOrEmpty()) { @@ -125,9 +124,8 @@ public async Task MigrateAsync(CancellationToken cancel) //We only bubble up batch-level errors to the action, and not item-level errors. //This means a batch can succeed even if some of the items fail. - processedItems += sourcePage.Value.Count; resultBuilder.Add(batchResult); - if (!batchResult.PerformNextBatch || processedItems >= sourcePage.TotalCount) + if (!batchResult.PerformNextBatch || sourcePage.FetchedAllPages) { break; } diff --git a/src/Tableau.Migration/Paging/BreadthFirstPathHierarchyPager.cs b/src/Tableau.Migration/Paging/BreadthFirstPathHierarchyPager.cs index 015491d..d2a86a9 100644 --- a/src/Tableau.Migration/Paging/BreadthFirstPathHierarchyPager.cs +++ b/src/Tableau.Migration/Paging/BreadthFirstPathHierarchyPager.cs @@ -114,17 +114,17 @@ public async Task> NextPageAsync(CancellationToken cancel //Handle empty set. if (!_itemsByHierarchyLevel.Any()) { - return PagedResult.Succeeded(ImmutableArray.Empty, _currentPage, _pageSize, _totalCount); + return PagedResult.Succeeded(ImmutableArray.Empty, _currentPage, _pageSize, _totalCount, true); } //See if there is any items left in our current level. var level = _itemsByHierarchyLevel[_currentLevel]; var levelPage = level.Skip((_currentLevelPage - 1) * _pageSize).Take(_pageSize).ToImmutableArray(); - var result = PagedResult.Succeeded(levelPage, _currentPage++, _pageSize, _totalCount); _currentLevelPage++; var peekNextPage = level.Skip((_currentLevelPage - 1) * _pageSize).Take(_pageSize).ToImmutableArray(); + var lastPage = false; if (peekNextPage.IsEmpty) { @@ -134,9 +134,18 @@ public async Task> NextPageAsync(CancellationToken cancel _currentLevel = nextLevels.First(); _currentLevelPage = DEFAULT_PAGE; } + else + { + lastPage = true; + } } - return result; + return PagedResult.Succeeded( + levelPage, + _currentPage++, + _pageSize, + _totalCount, + lastPage); } } } diff --git a/src/Tableau.Migration/Paging/IPageInfo.cs b/src/Tableau.Migration/Paging/IPageInfo.cs index 205ac17..24b3395 100644 --- a/src/Tableau.Migration/Paging/IPageInfo.cs +++ b/src/Tableau.Migration/Paging/IPageInfo.cs @@ -35,5 +35,10 @@ public interface IPageInfo /// Gets the total unpaged item count. /// int TotalCount { get; } + + /// + /// Indicates whether the SDK has already fetched all pages or not. + /// + bool FetchedAllPages { get; } } } diff --git a/src/Tableau.Migration/Paging/IPager.cs b/src/Tableau.Migration/Paging/IPager.cs index 8e990f9..08b3992 100644 --- a/src/Tableau.Migration/Paging/IPager.cs +++ b/src/Tableau.Migration/Paging/IPager.cs @@ -50,21 +50,26 @@ public async Task GetAllPagesAsync(Action initCapacity, Action results) + { + if (!results.Value.IsNullOrEmpty()) + { + pageAction(results.Value); + } + } + return resultBuilder.Build(); } @@ -79,7 +84,7 @@ public async Task>> GetAllPagesAsync(Cancellati var result = await GetAllPagesAsync( capacity => resultItems.Capacity = capacity, - page => resultItems.AddRange(page), + resultItems.AddRange, cancel).ConfigureAwait(false); return Result>.Create(result, resultItems.ToImmutable()); diff --git a/src/Tableau.Migration/Paging/PagedResult.cs b/src/Tableau.Migration/Paging/PagedResult.cs index a62ef90..5b92ae7 100644 --- a/src/Tableau.Migration/Paging/PagedResult.cs +++ b/src/Tableau.Migration/Paging/PagedResult.cs @@ -31,13 +31,15 @@ internal record PagedResult : Result>, IPagedResult /// The current 1-indexed page number. /// The page size. /// The total unpaged available item count. + /// Whether the SDK has already fetched all pages or not. /// The errors encountered during the operation, if any. - protected PagedResult(bool success, IImmutableList? value, int pageNumber, int pageSize, int totalCount, params Exception[] errors) + protected PagedResult(bool success, IImmutableList? value, int pageNumber, int pageSize, int totalCount, bool fetchedAllPages, params Exception[] errors) : base(success, value, errors) { PageNumber = pageNumber; PageSize = pageSize; TotalCount = totalCount; + FetchedAllPages = fetchedAllPages; } /// @@ -49,6 +51,9 @@ protected PagedResult(bool success, IImmutableList? value, int pageNumber /// public int TotalCount { get; } + /// + public bool FetchedAllPages { get; } + /// /// Creates a new instance for successful paged operations. /// @@ -56,9 +61,10 @@ protected PagedResult(bool success, IImmutableList? value, int pageNumber /// The current 1-indexed page number. /// The page size. /// The total unpaged available item count. + /// Whether the SDK has already fetched all pages or not. /// A new instance. - public static PagedResult Succeeded(IImmutableList value, int pageNumber, int pageSize, int totalCount) - => new(true, value, pageNumber, pageSize, totalCount); + public static PagedResult Succeeded(IImmutableList value, int pageNumber, int pageSize, int totalCount, bool fetchedAllPages) + => new(true, value, pageNumber, pageSize, totalCount, fetchedAllPages); /// /// Creates a new instance for failed operations. @@ -72,6 +78,6 @@ public static PagedResult Succeeded(IImmutableList value, int page /// /// The errors encountered during the operation. /// A new instance. - public static new PagedResult Failed(IEnumerable errors) => new(false, null, 0, 0, 0, errors.ToArray()); + public static new PagedResult Failed(IEnumerable errors) => new(false, null, 0, 0, 0, true, errors.ToArray()); } } diff --git a/src/Tableau.Migration/Resources/SharedResourceKeys.cs b/src/Tableau.Migration/Resources/SharedResourceKeys.cs index 96aed0b..95b9d99 100644 --- a/src/Tableau.Migration/Resources/SharedResourceKeys.cs +++ b/src/Tableau.Migration/Resources/SharedResourceKeys.cs @@ -99,5 +99,7 @@ internal static class SharedResourceKeys public const string ProjectReferenceNotFoundMessage = "ProjectReferenceNotFoundMessage"; public const string OwnerNotFoundMessage = "OwnerNotFoundMessage"; + + public const string FailedToGetDefaultPermissionsMessage = "FailedToGetDefaultPermissionsMessage"; } } diff --git a/src/Tableau.Migration/Resources/SharedResources.resx b/src/Tableau.Migration/Resources/SharedResources.resx index e2b9106..7302962 100644 --- a/src/Tableau.Migration/Resources/SharedResources.resx +++ b/src/Tableau.Migration/Resources/SharedResources.resx @@ -244,4 +244,7 @@ Detail: {3} Owner {OwnerId} was not found for {ContentType} {ContentName}. + + Failed to get Default Permissions {UrlSegment} for {ProjectId}. + \ No newline at end of file diff --git a/src/Tableau.Migration/Tableau.Migration.csproj b/src/Tableau.Migration/Tableau.Migration.csproj index b15e54d..9e052f4 100644 --- a/src/Tableau.Migration/Tableau.Migration.csproj +++ b/src/Tableau.Migration/Tableau.Migration.csproj @@ -41,6 +41,7 @@ Note: This SDK is specific for migrating from Tableau Server to Tableau Cloud. I + diff --git a/tests/Python.TestApplication/Python.TestApplication.pyproj b/tests/Python.TestApplication/Python.TestApplication.pyproj index 036b888..b795ab9 100644 --- a/tests/Python.TestApplication/Python.TestApplication.pyproj +++ b/tests/Python.TestApplication/Python.TestApplication.pyproj @@ -36,8 +36,7 @@ - - + diff --git a/tests/Tableau.Migration.Tests/MemoryPager.cs b/tests/Tableau.Migration.Tests/MemoryPager.cs index 46b3e87..f6bb3c5 100644 --- a/tests/Tableau.Migration.Tests/MemoryPager.cs +++ b/tests/Tableau.Migration.Tests/MemoryPager.cs @@ -46,7 +46,7 @@ public MemoryPager(IReadOnlyCollection items, int pageSize) public Task> NextPageAsync(CancellationToken cancel) { var pageItems = _items.Skip(_offset).Take(_pageSize).ToImmutableArray(); - var result = PagedResult.Succeeded(pageItems, _pageNumber, _pageSize, _items.Count); + var result = PagedResult.Succeeded(pageItems, _pageNumber, _pageSize, _items.Count, !pageItems.Any()); _offset += _pageSize; _pageNumber++; diff --git a/tests/Tableau.Migration.Tests/Unit/Api/GroupsApiClientTests.cs b/tests/Tableau.Migration.Tests/Unit/Api/GroupsApiClientTests.cs index f136887..1157196 100644 --- a/tests/Tableau.Migration.Tests/Unit/Api/GroupsApiClientTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Api/GroupsApiClientTests.cs @@ -221,6 +221,51 @@ public async Task Failure() } } + public class DeleteGroupAsync : GroupsApiClientTest + { + [Fact] + public async Task Success() + { + //Setup + var groupId = Guid.NewGuid(); + + MockHttpClient.SetupResponse(new MockHttpResponseMessage(HttpStatusCode.NoContent)); + + //Act + var result = await GroupsApiClient.DeleteGroupAsync(groupId, Cancel); + + //Test + result.AssertSuccess(); + + MockHttpClient.AssertSingleRequest(r => + { + r.AssertHttpMethod(HttpMethod.Delete); + r.AssertRelativeUri($"/api/{TableauServerVersion.RestApiVersion}/sites/{SiteId}/groups/{groupId}"); + }); + } + + [Fact] + public async Task Failure() + { + //Setup + var groupId = Guid.NewGuid(); + + MockHttpClient.SetupResponse(new MockHttpResponseMessage(HttpStatusCode.InternalServerError)); + + //Act + var result = await GroupsApiClient.DeleteGroupAsync(groupId, Cancel); + + //Test + result.AssertFailure(); + + MockHttpClient.AssertSingleRequest(r => + { + r.AssertHttpMethod(HttpMethod.Delete); + r.AssertRelativeUri($"/api/{TableauServerVersion.RestApiVersion}/sites/{SiteId}/groups/{groupId}"); + }); + } + } + public class PublishAsync : GroupsApiClientTest { private IPublishableGroup CreateGroup(Action>? configure = null) diff --git a/tests/Tableau.Migration.Tests/Unit/Api/Permissions/DefaultPermissionsApiClientTests.cs b/tests/Tableau.Migration.Tests/Unit/Api/Permissions/DefaultPermissionsApiClientTests.cs index 57d17e6..f246c96 100644 --- a/tests/Tableau.Migration.Tests/Unit/Api/Permissions/DefaultPermissionsApiClientTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Api/Permissions/DefaultPermissionsApiClientTests.cs @@ -19,11 +19,13 @@ using System.Linq; using System.Reflection; using System.Threading.Tasks; +using Microsoft.Extensions.Logging; using Moq; using Tableau.Migration.Api.Permissions; using Tableau.Migration.Api.Rest; using Tableau.Migration.Config; using Tableau.Migration.Content.Permissions; +using Tableau.Migration.Resources; using Xunit; using Xunit.Sdk; @@ -41,6 +43,10 @@ public abstract class DefaultPermissionsApiClientTest : AutoFixtureTestBase protected readonly Dictionary> MockPermissionsClients; + protected readonly Mock MockLoggerFactory; + + protected readonly Mock MockLocalizer; + internal readonly DefaultPermissionsApiClient DefaultPermissionsClient; public DefaultPermissionsApiClientTest() @@ -55,9 +61,14 @@ public DefaultPermissionsApiClientTest() .Returns(b => MockPermissionsClients[b.Suffix.Split('/').Last()].Object); + MockLoggerFactory = Create>(); + MockLocalizer = Create>(); + DefaultPermissionsClient = new DefaultPermissionsApiClient( MockPermissionsClientFactory.Object, - Options); + Options, + MockLoggerFactory.Object, + MockLocalizer.Object); } } @@ -138,37 +149,101 @@ public async Task Calls_inner_client(string contentTypeUrlSegment) public class GetAllPermissionsAsync : DefaultPermissionsApiClientTest { [Fact] - public async Task Calls_inner_clients() + public async Task Calls_inner_clients_successfully() { var projectId = Create(); - var resultsByContentType = new Dictionary>(); + var permissionsByContentType = new Dictionary(); foreach (var contentTypeUrlSegment in Options.UrlSegments) { - var contentTypeResult = Result.Succeeded(Create()); + var permissions = Create(); + permissionsByContentType.Add(contentTypeUrlSegment, permissions); MockPermissionsClients[contentTypeUrlSegment] .Setup(c => c.GetPermissionsAsync(projectId, Cancel)) - .ReturnsAsync(contentTypeResult); + .ReturnsAsync(Result.Succeeded(permissions)); } var result = await DefaultPermissionsClient.GetAllPermissionsAsync(projectId, Cancel); Assert.True(result.Success); - var granteeCapabilityResults = result.Value.Values.SelectMany(v => v.GranteeCapabilities); + foreach (var resultByContentType in result.Value) + { + Assert.True(permissionsByContentType.ContainsKey(resultByContentType.Key)); + Assert.Equal(permissionsByContentType[resultByContentType.Key], resultByContentType.Value); + } - foreach (var resultByContentType in resultsByContentType.Select(r => r.Value)) + foreach (var mockClient in MockPermissionsClients.Values) { - Assert.True(resultByContentType.Success); + mockClient.VerifyAll(); + mockClient.VerifyNoOtherCalls(); + } + } + + [Fact] + public async Task Calls_inner_clients_some_failures() + { + var projectId = Create(); + + var permissionsByContentType = new Dictionary(); - foreach (var granteeCapability in resultByContentType.Value.GranteeCapabilities) + foreach (var contentTypeUrlSegment in Options.UrlSegments) + { + if (contentTypeUrlSegment == DefaultPermissionsContentTypeUrlSegments.Databases || + contentTypeUrlSegment == DefaultPermissionsContentTypeUrlSegments.Tables) { - Assert.Contains(granteeCapability, granteeCapabilityResults); + MockPermissionsClients[contentTypeUrlSegment] + .Setup(c => c.GetPermissionsAsync(projectId, Cancel)) + .ReturnsAsync(Result.Failed(new InvalidOperationException())); + continue; } + var permissions = Create(); + permissionsByContentType.Add(contentTypeUrlSegment, permissions); + + MockPermissionsClients[contentTypeUrlSegment] + .Setup(c => c.GetPermissionsAsync(projectId, Cancel)) + .ReturnsAsync(Result.Succeeded(permissions)); } + var result = await DefaultPermissionsClient.GetAllPermissionsAsync(projectId, Cancel); + + Assert.True(result.Success); + Assert.Empty(result.Errors); + Assert.False(result.Value.ContainsKey(DefaultPermissionsContentTypeUrlSegments.Databases)); + Assert.False(result.Value.ContainsKey(DefaultPermissionsContentTypeUrlSegments.Tables)); + + foreach (var resultByContentType in result.Value) + { + Assert.True(permissionsByContentType.ContainsKey(resultByContentType.Key)); + Assert.Equal(permissionsByContentType[resultByContentType.Key], resultByContentType.Value); + } + + foreach (var mockClient in MockPermissionsClients.Values) + { + mockClient.VerifyAll(); + mockClient.VerifyNoOtherCalls(); + } + } + + [Fact] + public async Task Calls_inner_clients_failed() + { + var projectId = Create(); + + foreach (var contentTypeUrlSegment in Options.UrlSegments) + { + MockPermissionsClients[contentTypeUrlSegment] + .Setup(c => c.GetPermissionsAsync(projectId, Cancel)) + .ReturnsAsync(Result.Failed(new InvalidOperationException())); + } + + var result = await DefaultPermissionsClient.GetAllPermissionsAsync(projectId, Cancel); + + Assert.False(result.Success); + Assert.Equal(Options.UrlSegments.Count, result.Errors.Count); + foreach (var mockClient in MockPermissionsClients.Values) { mockClient.VerifyAll(); diff --git a/tests/Tableau.Migration.Tests/Unit/Api/Permissions/PermissionsApiClientFactoryTests.cs b/tests/Tableau.Migration.Tests/Unit/Api/Permissions/PermissionsApiClientFactoryTests.cs index aff33b2..ca63f61 100644 --- a/tests/Tableau.Migration.Tests/Unit/Api/Permissions/PermissionsApiClientFactoryTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Api/Permissions/PermissionsApiClientFactoryTests.cs @@ -15,6 +15,7 @@ // using System.Collections.Concurrent; +using Microsoft.Extensions.Logging; using Moq; using Tableau.Migration.Api.Permissions; using Tableau.Migration.Api.Rest; @@ -34,6 +35,7 @@ public abstract class PermissionsApiClientFactoryTest : AutoFixtureTestBase protected readonly Mock MockSerializer = new(); protected readonly Mock MockSharedResourcesLocalizer = new(); protected readonly Mock MockConfigReader = new(); + protected readonly Mock MockLoggerFactory = new(); protected readonly Mock MockUriBuilder = new(); @@ -45,7 +47,8 @@ public PermissionsApiClientFactoryTest() MockRestRequestBuilderFactory.Object, MockSerializer.Object, MockSharedResourcesLocalizer.Object, - MockConfigReader.Object); + MockConfigReader.Object, + MockLoggerFactory.Object); } } diff --git a/tests/Tableau.Migration.Tests/Unit/Api/ProjectsApiClientTests.cs b/tests/Tableau.Migration.Tests/Unit/Api/ProjectsApiClientTests.cs index fa0fb36..f18d6bc 100644 --- a/tests/Tableau.Migration.Tests/Unit/Api/ProjectsApiClientTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Api/ProjectsApiClientTests.cs @@ -17,6 +17,7 @@ using System; using System.Net; using System.Net.Http; +using System.Text.RegularExpressions; using System.Threading.Tasks; using Moq; using Tableau.Migration.Api; @@ -408,5 +409,49 @@ public async Task UpdatesProjectAsync() } #endregion + + #region - DeleteProjectAsync - + + public class DeleteProjectAsync : ProjectsApiClientTest + { + + [Fact] + public async Task Returns_success() + { + var projectId = Guid.NewGuid(); + + MockHttpClient.SetupResponse(new MockHttpResponseMessage(HttpStatusCode.NoContent)); + + var result = await ProjectsApiClient.DeleteProjectAsync(projectId, Cancel); + + result.AssertSuccess(); + + MockHttpClient.AssertSingleRequest(r => + { + r.AssertHttpMethod(HttpMethod.Delete); + r.AssertRelativeUri($"/api/{TableauServerVersion.RestApiVersion}/sites/{SiteId}/projects/{projectId}"); + }); + } + + [Fact] + public async Task Returns_failure() + { + var projectId = Guid.NewGuid(); + + MockHttpClient.SetupResponse(new MockHttpResponseMessage(HttpStatusCode.InternalServerError)); + + var result = await ProjectsApiClient.DeleteProjectAsync(projectId, Cancel); + + result.AssertFailure(); + + MockHttpClient.AssertSingleRequest(r => + { + r.AssertHttpMethod(HttpMethod.Delete); + r.AssertRelativeUri($"/api/{TableauServerVersion.RestApiVersion}/sites/{SiteId}/projects/{projectId}"); + }); + } + } + + #endregion } } diff --git a/tests/Tableau.Migration.Tests/Unit/Api/Rest/Models/CommitWorkbookPublishRequestTests.cs b/tests/Tableau.Migration.Tests/Unit/Api/Rest/Models/CommitWorkbookPublishRequestTests.cs index afe2eee..cccb3ed 100644 --- a/tests/Tableau.Migration.Tests/Unit/Api/Rest/Models/CommitWorkbookPublishRequestTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Api/Rest/Models/CommitWorkbookPublishRequestTests.cs @@ -14,6 +14,7 @@ // limitations under the License. // +using System.Linq; using Tableau.Migration.Api.Models; using Tableau.Migration.Api.Rest.Models.Requests; using Xunit; @@ -45,6 +46,8 @@ public void Initializes() Assert.NotNull(request.Workbook.Project); Assert.Equal(options.ProjectId, request.Workbook.Project.Id); + + Assert.All(options.HiddenViewNames, v => Assert.Single(request.Workbook.Views.Where(wbv => wbv.Name == v && wbv.Hidden))); } } } diff --git a/tests/Tableau.Migration.Tests/Unit/Api/UsersApiClientTests.cs b/tests/Tableau.Migration.Tests/Unit/Api/UsersApiClientTests.cs index c648a9b..e79dc67 100644 --- a/tests/Tableau.Migration.Tests/Unit/Api/UsersApiClientTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Api/UsersApiClientTests.cs @@ -356,5 +356,51 @@ public async Task Failure() Assert.Null(result.Value); } } + + public class DeleteUserAsync : UsersApiClientTest + { + [Fact] + public async Task Success() + { + //Setup + var userId = Guid.NewGuid(); + + MockHttpClient.SetupResponse(new MockHttpResponseMessage(HttpStatusCode.NoContent)); + + //Act + var result = await UsersApiClient.DeleteUserAsync(userId, Cancel); + + //Test + result.AssertSuccess(); + + MockHttpClient.AssertSingleRequest(r => + { + r.AssertHttpMethod(HttpMethod.Delete); + r.AssertRelativeUri($"/api/{TableauServerVersion.RestApiVersion}/sites/{SiteId}/users/{userId}"); + }); + } + + [Fact] + public async Task Failure() + { + //Setup + var userId = Guid.NewGuid(); + + MockHttpClient.SetupResponse(new MockHttpResponseMessage(HttpStatusCode.InternalServerError)); + + //Act + var result = await UsersApiClient.DeleteUserAsync(userId, Cancel); + + //Test + result.AssertFailure(); + + MockHttpClient.AssertSingleRequest(r => + { + r.AssertHttpMethod(HttpMethod.Delete); + r.AssertRelativeUri($"/api/{TableauServerVersion.RestApiVersion}/sites/{SiteId}/users/{userId}"); + }); + } + } + } } diff --git a/tests/Tableau.Migration.Tests/Unit/Paging/BreadthFirstPathHierarchyPagerTests.cs b/tests/Tableau.Migration.Tests/Unit/Paging/BreadthFirstPathHierarchyPagerTests.cs index c137008..0a6a12d 100644 --- a/tests/Tableau.Migration.Tests/Unit/Paging/BreadthFirstPathHierarchyPagerTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Paging/BreadthFirstPathHierarchyPagerTests.cs @@ -77,6 +77,7 @@ public async Task EmptyInnerPagerAsync() Assert.Equal(1, result.PageNumber); Assert.Equal(2, result.PageSize); Assert.Equal(0, result.TotalCount); + Assert.True(result.FetchedAllPages); Assert.Empty(result.Value); } @@ -116,6 +117,7 @@ public async Task PagesByOrderedHierarchyAsync() Assert.Equal(2, pageResult.Value.Count); Assert.Contains(top1, pageResult.Value); Assert.Contains(top2, pageResult.Value); + Assert.False(pageResult.FetchedAllPages); //Second page should be the last top item, but not go further to children. pageResult = await pager.NextPageAsync(_cancel); @@ -127,6 +129,7 @@ public async Task PagesByOrderedHierarchyAsync() Assert.Equal(hierarchyList.Length, pageResult.TotalCount); Assert.Single(pageResult.Value); Assert.Contains(top3, pageResult.Value); + Assert.False(pageResult.FetchedAllPages); //Third page should be filled with ordered child items. pageResult = await pager.NextPageAsync(_cancel); @@ -139,6 +142,7 @@ public async Task PagesByOrderedHierarchyAsync() Assert.Equal(2, pageResult.Value.Count); Assert.Contains(child1, pageResult.Value); Assert.Contains(child2, pageResult.Value); + Assert.False(pageResult.FetchedAllPages); //Fourth page should be the last child item but not go further to grandchildren. pageResult = await pager.NextPageAsync(_cancel); @@ -150,6 +154,7 @@ public async Task PagesByOrderedHierarchyAsync() Assert.Equal(hierarchyList.Length, pageResult.TotalCount); Assert.Single(pageResult.Value); Assert.Contains(child3, pageResult.Value); + Assert.False(pageResult.FetchedAllPages); //Fifth page should be the grandchildren. pageResult = await pager.NextPageAsync(_cancel); @@ -161,6 +166,7 @@ public async Task PagesByOrderedHierarchyAsync() Assert.Equal(hierarchyList.Length, pageResult.TotalCount); Assert.Single(pageResult.Value); Assert.Contains(grandchild, pageResult.Value); + Assert.True(pageResult.FetchedAllPages); //Sixth page should be the empty - we went past our data. pageResult = await pager.NextPageAsync(_cancel); @@ -171,6 +177,7 @@ public async Task PagesByOrderedHierarchyAsync() Assert.Equal(2, pageResult.PageSize); Assert.Equal(hierarchyList.Length, pageResult.TotalCount); Assert.Empty(pageResult.Value); + Assert.True(pageResult.FetchedAllPages); } } } diff --git a/tests/Tableau.Migration.Tests/Unit/Paging/IPagerTests.cs b/tests/Tableau.Migration.Tests/Unit/Paging/IPagerTests.cs index ab1a011..d4f38b8 100644 --- a/tests/Tableau.Migration.Tests/Unit/Paging/IPagerTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Paging/IPagerTests.cs @@ -43,19 +43,19 @@ public GetAllPagesAsync() private void EnqueuePage(IPagedResult result) => _pages.Enqueue(result); - private void EnqueuePage(int pageSize, int totalCount, IEnumerable items) - => EnqueuePage(PagedResult.Succeeded(items.ToImmutableArray(), _pages.Count, pageSize, totalCount)); + private void EnqueuePage(int pageSize, int totalCount, IEnumerable items, bool lastPage) + => EnqueuePage(PagedResult.Succeeded(items.ToImmutableArray(), _pages.Count + 1, pageSize, totalCount, lastPage)); [Fact] public async Task GetsAllItemsUntilTotalCountAsync() { var items = CreateMany(10); - EnqueuePage(2, 10, items.Skip(0).Take(2)); - EnqueuePage(2, 10, items.Skip(2).Take(2)); - EnqueuePage(2, 10, items.Skip(4).Take(2)); - EnqueuePage(2, 10, items.Skip(6).Take(2)); - EnqueuePage(2, 10, items.Skip(8).Take(2)); + EnqueuePage(2, 10, items.Skip(0).Take(2), false); + EnqueuePage(2, 10, items.Skip(2).Take(2), false); + EnqueuePage(2, 10, items.Skip(4).Take(2), false); + EnqueuePage(2, 10, items.Skip(6).Take(2), false); + EnqueuePage(2, 10, items.Skip(8).Take(2), true); var results = await MockPager.Object.GetAllPagesAsync(Cancel); @@ -67,23 +67,23 @@ public async Task GetsAllItemsUntilTotalCountAsync() } [Fact] - public async Task GetsAllItemsUntilUnexpectedEmptyPageAsync() + public async Task GetsAllItemsWithUnexpectedEmptyPageAsync() { var items = CreateMany(10); - EnqueuePage(2, 10, items.Skip(0).Take(2)); - EnqueuePage(2, 10, items.Skip(2).Take(2)); - EnqueuePage(2, 10, items.Skip(4).Take(2)); - EnqueuePage(2, 10, Enumerable.Empty()); - EnqueuePage(2, 10, items.Skip(8).Take(2)); + EnqueuePage(2, 10, items.Skip(0).Take(2), false); + EnqueuePage(2, 10, items.Skip(2).Take(2), false); + EnqueuePage(2, 10, items.Skip(4).Take(2), false); + EnqueuePage(2, 10, Enumerable.Empty(), false); + EnqueuePage(2, 10, items.Skip(6).Take(2), true); var results = await MockPager.Object.GetAllPagesAsync(Cancel); results.AssertSuccess(); Assert.NotNull(results.Value); - Assert.Equal(items.Take(6), results.Value!); + Assert.Equal(items.Take(8), results.Value!); - MockPager.Verify(x => x.NextPageAsync(Cancel), Times.Exactly(4)); + MockPager.Verify(x => x.NextPageAsync(Cancel), Times.Exactly(5)); } [Fact] @@ -91,11 +91,11 @@ public async Task GetsAllItemsUntilFailureAsync() { var items = CreateMany(10); - EnqueuePage(2, 10, items.Skip(0).Take(2)); - EnqueuePage(2, 10, items.Skip(2).Take(2)); - EnqueuePage(2, 10, items.Skip(4).Take(2)); + EnqueuePage(2, 10, items.Skip(0).Take(2), false); + EnqueuePage(2, 10, items.Skip(2).Take(2), false); + EnqueuePage(2, 10, items.Skip(4).Take(2), false); EnqueuePage(PagedResult.Failed(new Exception())); - EnqueuePage(2, 10, items.Skip(8).Take(2)); + EnqueuePage(2, 10, items.Skip(8).Take(2), false); var results = await MockPager.Object.GetAllPagesAsync(Cancel); @@ -105,6 +105,48 @@ public async Task GetsAllItemsUntilFailureAsync() MockPager.Verify(x => x.NextPageAsync(Cancel), Times.Exactly(4)); } + + [Fact] + public async Task GetsAllItemsLastPageIncomplete() + { + var items = CreateMany(10); + + EnqueuePage(2, 10, items.Skip(0).Take(2), false); + EnqueuePage(2, 10, items.Skip(2).Take(2), false); + EnqueuePage(2, 10, items.Skip(4).Take(2), false); + EnqueuePage(2, 10, items.Skip(6).Take(2), false); + EnqueuePage(2, 10, items.Skip(8).Take(1), true); + EnqueuePage(PagedResult.Failed(new Exception())); + + var results = await MockPager.Object.GetAllPagesAsync(Cancel); + + results.AssertSuccess(); + Assert.NotNull(results.Value); + Assert.Equal(items.Take(9), results.Value!); + + MockPager.Verify(x => x.NextPageAsync(Cancel), Times.Exactly(5)); + } + + [Fact] + public async Task GetsAllItemsTwoPagesIncomplete() + { + var items = CreateMany(10); + + EnqueuePage(2, 10, items.Skip(0).Take(2), false); + EnqueuePage(2, 10, items.Skip(2).Take(2), false); + EnqueuePage(2, 10, items.Skip(4).Take(1), false); + EnqueuePage(2, 10, items.Skip(5).Take(2), false); + EnqueuePage(2, 10, items.Skip(7).Take(1), true); + EnqueuePage(PagedResult.Failed(new Exception())); + + var results = await MockPager.Object.GetAllPagesAsync(Cancel); + + results.AssertSuccess(); + Assert.NotNull(results.Value); + Assert.Equal(items.Take(8), results.Value!); + + MockPager.Verify(x => x.NextPageAsync(Cancel), Times.Exactly(5)); + } } } } diff --git a/tests/Tableau.Migration.Tests/Unit/Paging/IndexedPagerBaseTests.cs b/tests/Tableau.Migration.Tests/Unit/Paging/IndexedPagerBaseTests.cs index 7de3857..61d4910 100644 --- a/tests/Tableau.Migration.Tests/Unit/Paging/IndexedPagerBaseTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Paging/IndexedPagerBaseTests.cs @@ -37,7 +37,7 @@ protected override Task> GetPageAsync(int pageNumb { CalledPageNumbers.Add(pageNumber); - var result = PagedResult.Succeeded(ImmutableArray.Empty, pageNumber, pageSize, 2 * pageSize); + var result = PagedResult.Succeeded(ImmutableArray.Empty, pageNumber, pageSize, 2 * pageSize, true); return Task.FromResult((IPagedResult)result); } } diff --git a/tests/Tableau.Migration.Tests/Unit/Paging/PagedResultTests.cs b/tests/Tableau.Migration.Tests/Unit/Paging/PagedResultTests.cs index 3cf56ae..78f8a3b 100644 --- a/tests/Tableau.Migration.Tests/Unit/Paging/PagedResultTests.cs +++ b/tests/Tableau.Migration.Tests/Unit/Paging/PagedResultTests.cs @@ -30,7 +30,7 @@ public void Initializes() { var items = CreateMany().ToImmutableList(); - var r = PagedResult.Succeeded(items, 1, 2, 3); + var r = PagedResult.Succeeded(items, 1, 2, 3, true); r.AssertSuccess(); @@ -38,6 +38,7 @@ public void Initializes() Assert.Equal(1, r.PageNumber); Assert.Equal(2, r.PageSize); Assert.Equal(3, r.TotalCount); + Assert.True(r.FetchedAllPages); } } @@ -58,6 +59,7 @@ public void Initializes() Assert.Equal(0, r.PageNumber); Assert.Equal(0, r.PageSize); Assert.Equal(0, r.TotalCount); + Assert.True(r.FetchedAllPages); } } @@ -76,6 +78,7 @@ public void Initializes() Assert.Equal(0, r.PageNumber); Assert.Equal(0, r.PageSize); Assert.Equal(0, r.TotalCount); + Assert.True(r.FetchedAllPages); } } }