diff --git a/.github/workflows/enforce-PR-labelling.yml b/.github/workflows/enforce-PR-labelling.yml new file mode 100644 index 000000000..e9d7f9ae7 --- /dev/null +++ b/.github/workflows/enforce-PR-labelling.yml @@ -0,0 +1,56 @@ +name: PR Validation + +on: + pull_request: + # one limitation here is that there's no trigger to re-run any time we "connect" or "disconnect" an issue + types: [opened, edited, labeled, unlabeled, synchronize] + workflow_dispatch: + +jobs: + validate-pr: + runs-on: ubuntu-latest + steps: + - name: Check out the repository + uses: actions/checkout@v2 + + - name: Validate PR has labels + id: check_labels + run: | + PR_LABELS=$(jq -r '.pull_request.labels | length' $GITHUB_EVENT_PATH) + if [ "$PR_LABELS" -eq "0" ]; then + echo "No labels found on the pull request." + exit 1 + fi + + - name: Validate PR is linked to an issue + id: check_linked_issues + run: | + PR_NUMBER=$(jq -r '.pull_request.number' $GITHUB_EVENT_PATH) + REPO_OWNER=$(jq -r '.repository.owner.login' $GITHUB_EVENT_PATH) + REPO_NAME=$(jq -r '.repository.name' $GITHUB_EVENT_PATH) + TIMELINE_JSON=$(curl -s "https://api.github.com/repos/$REPO_OWNER/$REPO_NAME/issues/$PR_NUMBER/timeline") + + # Count the number of times the timeline sees a "connected" event and subract the number of "disconnected" events + # We might also consider using the "cross-referenced" event in the future if actual connecting/disconnecting is too heavy-handed + LINKED_ISSUES=$(echo "$TIMELINE_JSON" | jq ' + reduce .[] as $event ( + 0; + if $event.event == "connected" then + . + 1 + elif $event.event == "disconnected" then + . - 1 + else + . + end + )') + + # If the sum is 0, then no linked issues were found + if [ "$LINKED_ISSUES" -eq "0" ]; then + echo "āŒ No linked issues found in the pull request." + exit 1 + elif [ "$LINKED_ISSUES" -lt "0" ]; then + echo "Error: More disconnected events than connected events. This shouldn't be possible and likely indicates a big ol' šŸŖ²" + exit 1 + else + echo "Linked issues found: $LINKED_ISSUES" + fi diff --git a/.github/workflows/enforce-issue-labelling.yml b/.github/workflows/enforce-issue-labelling.yml new file mode 100644 index 000000000..113fccd28 --- /dev/null +++ b/.github/workflows/enforce-issue-labelling.yml @@ -0,0 +1,32 @@ +name: Issue Validation + +on: + issues: + types: [closed] + +jobs: + validate-issue: + runs-on: ubuntu-latest + steps: + - name: Check out the repository + uses: actions/checkout@v2 + + - name: Validate issue has labels + id: check_labels + run: | + ISSUE_LABELS=$(jq -r '.issue.labels | length' $GITHUB_EVENT_PATH) + if [ "$ISSUE_LABELS" -eq "0" ]; then + echo "No labels found on the issue." + # Re-open the issue + ISSUE_NUMBER=$(jq -r '.issue.number' $GITHUB_EVENT_PATH) + REPO_OWNER=$(jq -r '.repository.owner.login' $GITHUB_EVENT_PATH) + REPO_NAME=$(jq -r '.repository.name' $GITHUB_EVENT_PATH) + curl -L \ + -X PATCH \ + -H "Accept: application/vnd.github.v3+json" \ + -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + https://api.github.com/repos/$REPO_OWNER/$REPO_NAME/issues/$ISSUE_NUMBER \ + -d '{"state":"open"}' + exit 1 + fi diff --git a/.github/workflows/post-release.yaml b/.github/workflows/post-release.yaml new file mode 100644 index 000000000..33aa2d6ec --- /dev/null +++ b/.github/workflows/post-release.yaml @@ -0,0 +1,15 @@ +# Toggle webhook to pull latest release onto pelicanplatform.org and update the download offerings there +# Post release this will result in the new release being available and the Major/Minor pointers being moved/created accordingly +name: post-release + +on: + release: + types: [published] + +jobs: + toggle-webhook: + runs-on: ubuntu-latest + steps: + - name: Toggle Webhook + run: | + curl -X POST https://dl.pelicanplatform.org/api/api/hooks/release-download-toggle diff --git a/.github/workflows/test-template.yml b/.github/workflows/test-template.yml new file mode 100644 index 000000000..12e09f25f --- /dev/null +++ b/.github/workflows/test-template.yml @@ -0,0 +1,99 @@ +name: Test Template + +on: + workflow_call: + inputs: + tags: + required: true + type: string + coverprofile: + required: true + type: string + binary_name: + required: true + type: string + +jobs: + test: + runs-on: ubuntu-latest + container: + image: hub.opensciencegrid.org/pelican_platform/pelican-dev:latest-itb + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: actions/setup-node@v4 + with: + node-version: 20 + - name: Fetch tags + run: | + git config --global --add safe.directory /__w/pelican/pelican + git fetch --force --tags + - name: Cache Next.js + uses: actions/cache@v4 + with: + path: | + ~/.npm + ${{ github.workspace }}/.next/cache + key: ${{ runner.os }}-nextjs-${{ hashFiles('**/package-lock.json') }}-${{ hashFiles('**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx') }} + restore-keys: | + ${{ runner.os }}-nextjs-${{ hashFiles('**/package-lock.json') }}- + - name: Test + run: | + make web-build + go test -tags=${{ inputs.tags }} -timeout 15m -coverpkg=./... -coverprofile=${{ inputs.coverprofile }} -covermode=count ./... + - name: Get total code coverage + if: github.event_name == 'pull_request' + id: cc + run: | + set -x + cc_total=`go tool cover -func=${{ inputs.coverprofile }} | grep total | grep -Eo '[0-9]+\.[0-9]+'` + echo "cc_total=$cc_total" >> $GITHUB_OUTPUT + - name: Restore base test coverage + id: base-coverage + if: github.event.pull_request.base.sha != '' + uses: actions/cache@v4 + with: + path: | + unit-base.txt + key: ${{ runner.os }}-unit-test-coverage-${{ (github.event.pull_request.base.sha != github.event.after) && github.event.pull_request.base.sha || github.event.after }} + - name: Run test for base code + if: steps.base-coverage.outputs.cache-hit != 'true' && github.event.pull_request.base.sha != '' + run: | + git config --global --add safe.directory "$GITHUB_WORKSPACE" + git fetch origin main ${{ github.event.pull_request.base.sha }} + HEAD=$(git rev-parse HEAD) + git reset --hard ${{ github.event.pull_request.base.sha }} + make web-build + go generate ./... + go test -tags=${{ inputs.tags }} -timeout 15m -coverpkg=./... -coverprofile=base_coverage.out -covermode=count ./... + go tool cover -func=base_coverage.out > unit-base.txt + git reset --hard $HEAD + - name: Get base code coverage value + if: github.event_name == 'pull_request' + id: cc_b + run: | + set -x + cc_base_total=`grep total ./unit-base.txt | grep -Eo '[0-9]+\.[0-9]+'` + echo "cc_base_total=$cc_base_total" >> $GITHUB_OUTPUT + - name: Add coverage information to action summary + if: github.event_name == 'pull_request' + run: echo 'Code coverage ' ${{steps.cc.outputs.cc_total}}'% Prev ' ${{steps.cc_b.outputs.cc_base_total}}'%' >> $GITHUB_STEP_SUMMARY + - name: Run GoReleaser for Ubuntu + uses: goreleaser/goreleaser-action@v5 + with: + distribution: goreleaser + version: latest + args: build --single-target --clean --snapshot + - name: Copy files (Ubuntu) + run: | + cp dist/${{ inputs.binary_name }}_linux_amd64_v1/${{ inputs.binary_name }} ./pelican + - name: Run Integration Tests + run: ./github_scripts/citests.sh + - name: Run End-to-End Test for Object get/put + run: ./github_scripts/get_put_test.sh + - name: Run End-to-End Test for Director stat + run: ./github_scripts/stat_test.sh + - name: Run End-to-End Test for --version flag + run: ./github_scripts/version_test.sh diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 7d0e56fc3..0804df90f 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -13,8 +13,8 @@ jobs: - name: Checkout code uses: actions/checkout@v4 with: - # Do fetch depth 0 here because otherwise goreleaser might not work properly: - # https://goreleaser.com/ci/actions/?h=tag#workflow + # Do fetch depth 0 here because otherwise goreleaser might not work properly: + # https://goreleaser.com/ci/actions/?h=tag#workflow fetch-depth: 0 - uses: actions/setup-node@v4 with: @@ -55,94 +55,14 @@ jobs: version: latest args: build --single-target --clean --snapshot test-ubuntu: - runs-on: ubuntu-latest - container: - image: hub.opensciencegrid.org/pelican_platform/pelican-dev:latest-itb - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - # See above for why fetch depth is 0 here - fetch-depth: 0 - - uses: actions/setup-node@v4 - with: - node-version: 20 - # Fetch the tags is essential so that goreleaser can build the correct version. Workaround found here: - # https://github.com/actions/checkout/issues/290 - - name: Fetch tags - run: | - git config --global --add safe.directory /__w/pelican/pelican - git fetch --force --tags - - name: Cache Next.js - uses: actions/cache@v4 - with: - path: | - ~/.npm - ${{ github.workspace }}/.next/cache - # Generate a new cache whenever packages or source files change. - key: ${{ runner.os }}-nextjs-${{ hashFiles('**/package-lock.json') }}-${{ hashFiles('**/*.js', '**/*.jsx', '**/*.ts', '**/*.tsx') }} - # If source files changed but packages didn't, rebuild from a prior cache. - restore-keys: | - ${{ runner.os }}-nextjs-${{ hashFiles('**/package-lock.json') }}- - - name: Test - run: | - make web-build - go test -timeout 15m -coverpkg=./... -coverprofile=coverage.out -covermode=count ./... - - name: Get total code coverage - if: github.event_name == 'pull_request' - id: cc - run: | - set -x - cc_total=`go tool cover -func=coverage.out | grep total | grep -Eo '[0-9]+\.[0-9]+'` - echo "cc_total=$cc_total" >> $GITHUB_OUTPUT - - name: Restore base test coverage - id: base-coverage - if: github.event.pull_request.base.sha != '' - uses: actions/cache@v4 - with: - path: | - unit-base.txt - # Use base sha for PR or new commit hash for master/main push in test result key. - key: ${{ runner.os }}-unit-test-coverage-${{ (github.event.pull_request.base.sha != github.event.after) && github.event.pull_request.base.sha || github.event.after }} - - name: Run test for base code - if: steps.base-coverage.outputs.cache-hit != 'true' && github.event.pull_request.base.sha != '' - run: | - git config --global --add safe.directory "$GITHUB_WORKSPACE" - git fetch origin main ${{ github.event.pull_request.base.sha }} - HEAD=$(git rev-parse HEAD) - git reset --hard ${{ github.event.pull_request.base.sha }} - make web-build - go generate ./... - go test -timeout 15m -coverpkg=./... -coverprofile=base_coverage.out -covermode=count ./... - go tool cover -func=base_coverage.out > unit-base.txt - git reset --hard $HEAD - - name: Get base code coverage value - if: github.event_name == 'pull_request' - id: cc_b - run: | - set -x - cc_base_total=`grep total ./unit-base.txt | grep -Eo '[0-9]+\.[0-9]+'` - echo "cc_base_total=$cc_base_total" >> $GITHUB_OUTPUT - - name: Add coverage information to action summary - if: github.event_name == 'pull_request' - run: echo 'Code coverage ' ${{steps.cc.outputs.cc_total}}'% Prev ' ${{steps.cc_b.outputs.cc_base_total}}'%' >> $GITHUB_STEP_SUMMARY - - name: Run GoReleaser for Ubuntu - uses: goreleaser/goreleaser-action@v5 - with: - # either 'goreleaser' (default) or 'goreleaser-pro' - distribution: goreleaser - version: latest - args: --clean --snapshot - - name: Copy files (Ubuntu) - run: | - cp dist/pelican_linux_amd64_v1/pelican ./ - - name: Run Integration Tests - run: ./github_scripts/citests.sh - - name: Run End-to-End Test for Object get/put - run: ./github_scripts/get_put_test.sh - - name: Run End-to-End Test for Director stat - run: ./github_scripts/stat_test.sh - - name: Run End-to-End Test of x509 access - run: ./github_scripts/x509_test.sh - - name: Run End-to-End Test for --version flag - run: ./github_scripts/version_test.sh + uses: ./.github/workflows/test-template.yml + with: + tags: "" + coverprofile: "coverage.out" + binary_name: "pelican" + test-ubuntu-server: + uses: ./.github/workflows/test-template.yml + with: + tags: "lotman" + coverprofile: "coverage-server.out" + binary_name: "pelican-server" diff --git a/.goreleaser.yml b/.goreleaser.yml index dc184d93a..5ae8b446f 100644 --- a/.goreleaser.yml +++ b/.goreleaser.yml @@ -50,9 +50,30 @@ builds: goarch: ppc64le - goos: darwin goarch: ppc64le - + # Set things up to build a second server binary that enables Lotman. Eventually + # we'll also use this to filter which moduels are built into the binary. + - env: + - CGO_ENABLED=0 + goos: + - linux + goarch: + - "amd64" + - "arm64" + id: "pelican-server" + dir: ./cmd + binary: pelican-server + tags: + - forceposix + - lotman + ldflags: + - -s -w -X github.com/pelicanplatform/pelican/config.commit={{.Commit}} -X github.com/pelicanplatform/pelican/config.date={{.Date}} -X github.com/pelicanplatform/pelican/config.builtBy=goreleaser -X github.com/pelicanplatform/pelican/config.version={{.Version}} +# Goreleaser complains if there's a different number of binaries built for different architectures +# in the same archive. Instead of plopping pelican-server in the same archive as pelican, split the +# builds into separate archives. archives: - id: pelican + builds: + - pelican name_template: >- {{ .ProjectName }}_ {{- title .Os }}_ @@ -62,6 +83,15 @@ archives: - goos: windows format: zip wrap_in_directory: '{{ .ProjectName }}-{{ trimsuffix .Version "-next" }}' + - id: pelican-server + builds: + - pelican-server + name_template: >- + {{ .ProjectName }}-server_ + {{- title .Os }}_ + {{- if eq .Arch "amd64" }}x86_64 + {{- else }}{{ .Arch }}{{ end }} + wrap_in_directory: '{{ .ProjectName }}-server-{{ trimsuffix .Version "-next" }}' checksum: name_template: 'checksums.txt' snapshot: diff --git a/cache/cache_api.go b/cache/cache_api.go index ae4f38823..a47f18ce7 100644 --- a/cache/cache_api.go +++ b/cache/cache_api.go @@ -37,7 +37,7 @@ import ( func CheckCacheSentinelLocation() error { if param.Cache_SentinelLocation.IsSet() { sentinelPath := param.Cache_SentinelLocation.GetString() - dataLoc := param.Cache_LocalRoot.GetString() + dataLoc := param.Cache_StorageLocation.GetString() sentinelPath = path.Clean(sentinelPath) if path.Base(sentinelPath) != sentinelPath { return errors.Errorf("invalid Cache.SentinelLocation path. File must not contain a directory. Got %s", sentinelPath) @@ -51,17 +51,17 @@ func CheckCacheSentinelLocation() error { return nil } -// Periodically scan the //pelican/monitoring directory to clean up test files +// Periodically scan the ${Cache.StorageLocation}/pelican/monitoring directory to clean up test files // TODO: Director test files should be under /pelican/monitoring/directorTest and the file names // should have director-test- as the prefix func LaunchDirectorTestFileCleanup(ctx context.Context) { server_utils.LaunchWatcherMaintenance(ctx, - []string{filepath.Join(param.Cache_LocalRoot.GetString(), "pelican", "monitoring")}, + []string{filepath.Join(param.Cache_StorageLocation.GetString(), "pelican", "monitoring")}, "cache director-based health test clean up", time.Minute, func(notifyEvent bool) error { // We run this function regardless of notifyEvent to do the cleanup - dirPath := filepath.Join(param.Cache_LocalRoot.GetString(), "pelican", "monitoring") + dirPath := filepath.Join(param.Cache_StorageLocation.GetString(), "pelican", "monitoring") dirInfo, err := os.Stat(dirPath) if err != nil { return err diff --git a/cache/cache_api_test.go b/cache/cache_api_test.go index 22694622f..a9ce4c318 100644 --- a/cache/cache_api_test.go +++ b/cache/cache_api_test.go @@ -50,7 +50,7 @@ func TestCheckCacheSentinelLocation(t *testing.T) { tmpDir := t.TempDir() server_utils.ResetTestState() viper.Set(param.Cache_SentinelLocation.GetName(), "test.txt") - viper.Set(param.Cache_LocalRoot.GetName(), tmpDir) + viper.Set(param.Cache_StorageLocation.GetName(), tmpDir) err := CheckCacheSentinelLocation() require.Error(t, err) assert.Contains(t, err.Error(), "failed to open Cache.SentinelLocation") @@ -61,7 +61,7 @@ func TestCheckCacheSentinelLocation(t *testing.T) { server_utils.ResetTestState() viper.Set(param.Cache_SentinelLocation.GetName(), "test.txt") - viper.Set(param.Cache_LocalRoot.GetName(), tmpDir) + viper.Set(param.Cache_StorageLocation.GetName(), tmpDir) file, err := os.Create(filepath.Join(tmpDir, "test.txt")) require.NoError(t, err) diff --git a/cache/self_monitor.go b/cache/self_monitor.go index 0bd94706d..44a801dd5 100644 --- a/cache/self_monitor.go +++ b/cache/self_monitor.go @@ -59,7 +59,7 @@ func InitSelfTestDir() error { return err } - basePath := param.Cache_LocalRoot.GetString() + basePath := param.Cache_StorageLocation.GetString() pelicanMonPath := filepath.Join(basePath, "/pelican") monitoringPath := filepath.Join(pelicanMonPath, "/monitoring") selfTestPath := filepath.Join(monitoringPath, "/selfTest") @@ -80,9 +80,9 @@ func InitSelfTestDir() error { } func generateTestFile() (string, error) { - basePath := param.Cache_LocalRoot.GetString() + basePath := param.Cache_StorageLocation.GetString() if basePath == "" { - return "", errors.New("failed to generate self-test file for cache: Cache.LocalRoot is not set.") + return "", errors.New("failed to generate self-test file for cache: Cache.StorageLocation is not set.") } selfTestPath := filepath.Join(basePath, selfTestDir) _, err := os.Stat(selfTestPath) @@ -225,7 +225,7 @@ func downloadTestFile(ctx context.Context, fileUrl string) error { } func deleteTestFile(fileUrlStr string) error { - basePath := param.Cache_LocalRoot.GetString() + basePath := param.Cache_StorageLocation.GetString() fileUrl, err := url.Parse(fileUrlStr) if err != nil { return errors.Wrap(err, "invalid file url to remove the test file") diff --git a/client/fed_test.go b/client/fed_test.go index a066177fc..17b6cfb5f 100644 --- a/client/fed_test.go +++ b/client/fed_test.go @@ -623,7 +623,7 @@ func TestDirectReads(t *testing.T) { assert.Equal(t, transferResults[0].TransferredBytes, int64(17)) // Assert that the file was not cached - cacheDataLocation := param.Cache_LocalRoot.GetString() + export.FederationPrefix + cacheDataLocation := param.Cache_StorageLocation.GetString() + export.FederationPrefix filepath := filepath.Join(cacheDataLocation, filepath.Base(tempFile.Name())) _, err = os.Stat(filepath) assert.True(t, os.IsNotExist(err)) diff --git a/cmd/fed_serve_cache_test.go b/cmd/fed_serve_cache_test.go index 22a7b29c5..359c3b70d 100644 --- a/cmd/fed_serve_cache_test.go +++ b/cmd/fed_serve_cache_test.go @@ -77,7 +77,7 @@ func TestFedServeCache(t *testing.T) { viper.Set("ConfigDir", tmpPath) viper.Set("Origin.RunLocation", filepath.Join(tmpPath, "xOrigin")) viper.Set("Cache.RunLocation", filepath.Join(tmpPath, "xCache")) - viper.Set("Cache.LocalRoot", filepath.Join(tmpPath, "data")) + viper.Set("Cache.StorageLocation", filepath.Join(tmpPath, "data")) viper.Set("Origin.StoragePrefix", filepath.Join(origPath, "ns")) viper.Set("Origin.FederationPrefix", "/test") testFilePath := filepath.Join(origPath, "ns", "test-file.txt") diff --git a/cmd/main.go b/cmd/main.go index b6d4f0cea..295915339 100644 --- a/cmd/main.go +++ b/cmd/main.go @@ -45,7 +45,7 @@ func handleCLI(args []string) error { // Being case-insensitive execName = strings.ToLower(execName) - if strings.HasPrefix(execName, "stash_plugin") || strings.HasPrefix(execName, "osdf_plugin") || strings.HasPrefix(execName, "pelican_xfer_plugin") { + if strings.HasPrefix(execName, "stash_plugin") || strings.HasPrefix(execName, "osdf_plugin") || strings.HasPrefix(execName, "pelican_xfer_plugin") || strings.HasPrefix(execName, "pelican_plugin") { stashPluginMain(args[1:]) } else if strings.HasPrefix(execName, "stashcp") { err := copyCmd.Execute() diff --git a/config/config.go b/config/config.go index 0762d9abf..a5866e698 100644 --- a/config/config.go +++ b/config/config.go @@ -32,6 +32,7 @@ import ( "net/url" "os" "path/filepath" + "reflect" "slices" "sort" "strconv" @@ -50,6 +51,7 @@ import ( "github.com/spf13/viper" "golang.org/x/sync/errgroup" + "github.com/pelicanplatform/pelican/docs" "github.com/pelicanplatform/pelican/param" "github.com/pelicanplatform/pelican/pelican_url" "github.com/pelicanplatform/pelican/server_structs" @@ -171,7 +173,8 @@ var ( "": true, } - clientInitialized = false + clientInitialized = false + printClientConfigOnce sync.Once ) func init() { @@ -922,6 +925,99 @@ func PrintConfig() error { return nil } +func contains(slice []string, item string) bool { + for _, s := range slice { + if s == item { + return true + } + } + return false +} + +// GetComponentConfig filters the full config and returns only the config parameters related to the given component. +// The filtering is based on whether the given component is part of the components in docs.parameters.yaml. +func GetComponentConfig(component string) (map[string]interface{}, error) { + rawConfig, err := param.UnmarshalConfig(viper.GetViper()) + if err != nil { + return nil, err + } + value, hasValue := filterConfigRecursive(reflect.ValueOf(rawConfig), "", component) + if hasValue { + return (*value).(map[string]interface{}), nil + } + return nil, nil +} + +// filterConfigRecursive is a helper function for GetComponentConfig. +// It recursively creates a nested config map of the parameters that relate to the given component. +func filterConfigRecursive(v reflect.Value, currentPath string, component string) (*interface{}, bool) { + if v.Kind() == reflect.Ptr { + v = v.Elem() + } + + switch v.Kind() { + case reflect.Struct: + t := v.Type() + result := make(map[string]interface{}) + hasField := false + for i := 0; i < v.NumField(); i++ { + field := v.Field(i) + fieldType := t.Field(i) + if !fieldType.IsExported() { + continue + } + + fieldName := strings.ToLower(fieldType.Name) + + var newPath string + if currentPath == "" { + newPath = fieldName + } else { + newPath = currentPath + "." + fieldName + } + + fieldValue, fieldHasValue := filterConfigRecursive(field, newPath, component) + if fieldHasValue && fieldValue != nil { + result[fieldName] = *fieldValue + hasField = true + } + } + if hasField { + resultInterface := interface{}(result) + return &resultInterface, true + } + return nil, false + default: + lowerPath := strings.ToLower(currentPath) + paramDoc, exists := docs.ParsedParameters[lowerPath] + if exists && contains(paramDoc.Components, component) { + resultValue := v.Interface() + resultInterface := interface{}(resultValue) + return &resultInterface, true + } + return nil, false + } +} + +// PrintClientConfig prints the client config in JSON format to stderr. +func PrintClientConfig() error { + clientConfig, err := GetComponentConfig("client") + if err != nil { + return err + } + + bytes, err := json.MarshalIndent(clientConfig, "", " ") + if err != nil { + return err + } + fmt.Fprintln(os.Stderr, + "================ Pelican Client Configuration ================\n", + string(bytes), + "\n", + "============= End of Pelican Client Configuration ============") + return nil +} + func SetServerDefaults(v *viper.Viper) error { configDir := v.GetString("ConfigDir") v.SetConfigType("yaml") @@ -953,18 +1049,16 @@ func SetServerDefaults(v *viper.Viper) error { if IsRootExecution() { v.SetDefault(param.Origin_RunLocation.GetName(), filepath.Join("/run", "pelican", "xrootd", "origin")) v.SetDefault(param.Cache_RunLocation.GetName(), filepath.Join("/run", "pelican", "xrootd", "cache")) - // To ensure Cache.DataLocation still works, we default Cache.LocalRoot to Cache.DataLocation - // The logic is extracted from handleDeprecatedConfig as we manually set the default value here - v.SetDefault(param.Cache_DataLocation.GetName(), "/run/pelican/cache") - v.SetDefault(param.Cache_LocalRoot.GetName(), v.GetString(param.Cache_DataLocation.GetName())) - - if v.IsSet(param.Cache_DataLocation.GetName()) { - v.SetDefault(param.Cache_DataLocations.GetName(), []string{filepath.Join(v.GetString(param.Cache_DataLocation.GetName()), "data")}) - v.SetDefault(param.Cache_MetaLocations.GetName(), []string{filepath.Join(v.GetString(param.Cache_DataLocation.GetName()), "meta")}) - } else { - v.SetDefault(param.Cache_DataLocations.GetName(), []string{"/run/pelican/cache/data"}) - v.SetDefault(param.Cache_MetaLocations.GetName(), []string{"/run/pelican/cache/meta"}) + + // Several deprecated keys point to Cache.StorageLocation, and by the time we reach this section of code, we should + // have already mapped those keys in handleDeprecatedConfig(). To prevent overriding potentially-mapped deprecated keys, + // we only re-set he default here if this key is not set. + if !v.IsSet(param.Cache_StorageLocation.GetName()) { + v.SetDefault(param.Cache_StorageLocation.GetName(), filepath.Join("/run", "pelican", "cache")) } + v.SetDefault(param.Cache_NamespaceLocation.GetName(), filepath.Join(param.Cache_StorageLocation.GetString(), "namespace")) + v.SetDefault(param.Cache_DataLocations.GetName(), []string{filepath.Join(param.Cache_StorageLocation.GetString(), "data")}) + v.SetDefault(param.Cache_MetaLocations.GetName(), []string{filepath.Join(param.Cache_StorageLocation.GetString(), "meta")}) v.SetDefault(param.LocalCache_RunLocation.GetName(), filepath.Join("/run", "pelican", "localcache")) v.SetDefault(param.Origin_Multiuser.GetName(), true) @@ -1012,18 +1106,17 @@ func SetServerDefaults(v *viper.Viper) error { } v.SetDefault(param.Origin_GlobusConfigLocation.GetName(), filepath.Join(runtimeDir, "xrootd", "origin", "globus")) - // To ensure Cache.DataLocation still works, we default Cache.LocalRoot to Cache.DataLocation - // The logic is extracted from handleDeprecatedConfig as we manually set the default value here - v.SetDefault(param.Cache_DataLocation.GetName(), filepath.Join(runtimeDir, "cache")) - v.SetDefault(param.Cache_LocalRoot.GetName(), v.GetString(param.Cache_DataLocation.GetName())) - - if v.IsSet(param.Cache_DataLocation.GetName()) { - v.SetDefault(param.Cache_DataLocations.GetName(), []string{filepath.Join(v.GetString(param.Cache_DataLocation.GetName()), "data")}) - v.SetDefault(param.Cache_MetaLocations.GetName(), []string{filepath.Join(v.GetString(param.Cache_DataLocation.GetName()), "meta")}) - } else { - v.SetDefault(param.Cache_DataLocations.GetName(), []string{filepath.Join(runtimeDir, "pelican/cache/data")}) - v.SetDefault(param.Cache_MetaLocations.GetName(), []string{filepath.Join(runtimeDir, "pelican/cache/meta")}) + + // Several deprecated keys point to Cache.StorageLocation, and by the time we reach this section of code, we should + // have already mapped those keys in handleDeprecatedConfig(). To prevent overriding potentially-mapped deprecated keys, + // we only re-set he default here if this key is not set. + if !viper.IsSet(param.Cache_StorageLocation.GetName()) { + viper.SetDefault(param.Cache_StorageLocation.GetName(), filepath.Join(runtimeDir, "cache")) } + viper.SetDefault(param.Cache_NamespaceLocation.GetName(), filepath.Join(param.Cache_StorageLocation.GetString(), "namespace")) + viper.SetDefault(param.Cache_DataLocations.GetName(), []string{filepath.Join(param.Cache_StorageLocation.GetString(), "data")}) + viper.SetDefault(param.Cache_MetaLocations.GetName(), []string{filepath.Join(param.Cache_StorageLocation.GetString(), "meta")}) + v.SetDefault(param.LocalCache_RunLocation.GetName(), filepath.Join(runtimeDir, "cache")) v.SetDefault(param.Origin_Multiuser.GetName(), false) } @@ -1155,11 +1248,6 @@ func InitServer(ctx context.Context, currentServers server_structs.ServerType) e } } - if param.Cache_DataLocation.IsSet() { - log.Warningf("Deprecated configuration key %s is set. Please migrate to use %s instead", param.Cache_DataLocation.GetName(), param.Cache_LocalRoot.GetName()) - log.Warningf("Will attempt to use the value of %s as default for %s", param.Cache_DataLocation.GetName(), param.Cache_LocalRoot.GetName()) - } - if err := SetServerDefaults(viper.GetViper()); err != nil { return err } @@ -1526,6 +1614,18 @@ func InitClient() error { clientInitialized = true + var printClientConfigErr error + printClientConfigOnce.Do(func() { + if log.GetLevel() == log.DebugLevel { + printClientConfigErr = PrintClientConfig() + } + }) + + // Return any error encountered during PrintClientConfig + if printClientConfigErr != nil { + return printClientConfigErr + } + return nil } diff --git a/config/resources/defaults.yaml b/config/resources/defaults.yaml index 8f4d49653..2e5216874 100644 --- a/config/resources/defaults.yaml +++ b/config/resources/defaults.yaml @@ -60,6 +60,7 @@ Director: CachePresenceTTL: 1m CachePresenceCapacity: 10000 Cache: + DefaultCacheTimeout: "9.5s" Port: 8442 SelfTest: true SelfTestInterval: 15s @@ -100,6 +101,7 @@ Shoveler: PortHigher: 9999 AMQPExchange: shoveled-xrd Xrootd: + MaxStartupWait: "10s" Mount: "" ManagerPort: 1213 DetailedMonitoringPort: 9930 diff --git a/director/cache_ads.go b/director/cache_ads.go index 9b19cc686..2dea53d93 100644 --- a/director/cache_ads.go +++ b/director/cache_ads.go @@ -33,6 +33,7 @@ import ( log "github.com/sirupsen/logrus" "golang.org/x/sync/errgroup" + "github.com/pelicanplatform/pelican/metrics" "github.com/pelicanplatform/pelican/param" "github.com/pelicanplatform/pelican/server_structs" "github.com/pelicanplatform/pelican/utils" @@ -81,7 +82,11 @@ func (f filterType) String() string { // 4. Set up utilities for collecting origin/health server file transfer test status // 5. Return the updated ServerAd. The ServerAd passed in will not be modified func recordAd(ctx context.Context, sAd server_structs.ServerAd, namespaceAds *[]server_structs.NamespaceAdV2) (updatedAd server_structs.ServerAd) { - if err := updateLatLong(ctx, &sAd); err != nil { + if err := updateLatLong(&sAd); err != nil { + if geoIPError, ok := err.(geoIPError); ok { + labels := geoIPError.labels + metrics.PelicanDirectorGeoIPErrors.With(labels).Inc() + } log.Debugln("Failed to lookup GeoIP coordinates for host", sAd.URL.Host) } @@ -239,7 +244,7 @@ func recordAd(ctx context.Context, sAd server_structs.ServerAd, namespaceAds *[] return sAd } -func updateLatLong(ctx context.Context, ad *server_structs.ServerAd) error { +func updateLatLong(ad *server_structs.ServerAd) error { if ad == nil { return errors.New("Cannot provide a nil ad to UpdateLatLong") } @@ -257,7 +262,7 @@ func updateLatLong(ctx context.Context, ad *server_structs.ServerAd) error { } // NOTE: If GeoIP resolution of this address fails, lat/long are set to 0.0 (the null lat/long) // This causes the server to be sorted to the end of the list whenever the Director requires distance-aware sorting. - lat, long, err := getLatLong(ctx, addr) + lat, long, err := getLatLong(addr) if err != nil { return err } diff --git a/director/cache_ads_test.go b/director/cache_ads_test.go index 15699834c..34944efdb 100644 --- a/director/cache_ads_test.go +++ b/director/cache_ads_test.go @@ -441,11 +441,11 @@ func TestRecordAd(t *testing.T) { statUtils = make(map[string]serverStatUtil) serverAds.DeleteAll() - geoIPOverrides = nil + geoNetOverrides = nil }) server_utils.ResetTestState() func() { - geoIPOverrides = nil + geoNetOverrides = nil healthTestUtilsMutex.Lock() statUtilsMutex.Lock() diff --git a/director/director.go b/director/director.go index 07dd24cb9..f8f8f6344 100644 --- a/director/director.go +++ b/director/director.go @@ -344,7 +344,6 @@ func redirectToCache(ginCtx *gin.Context) { collectDirectorRedirectionMetric(ginCtx, "cache") } }() - defer collectDirectorRedirectionMetric(ginCtx, "cache") err := versionCompatCheck(reqVer, service) if err != nil { log.Warningf("A version incompatibility was encountered while redirecting to a cache and no response was served: %v", err) diff --git a/director/sort.go b/director/sort.go index 6a11e3361..b0450f355 100644 --- a/director/sort.go +++ b/director/sort.go @@ -21,12 +21,12 @@ package director import ( "cmp" "context" + "fmt" "math/rand" "net" "net/netip" "slices" "sort" - "strings" "time" "github.com/jellydator/ttlcache/v3" @@ -59,11 +59,25 @@ type ( IP string `mapstructure:"IP"` Coordinate Coordinate `mapstructure:"Coordinate"` } + + GeoNetOverride struct { + IPNet net.IPNet + Coordinate Coordinate + } + + geoIPError struct { + labels prometheus.Labels + errorMsg string + } ) +func (e geoIPError) Error() string { + return e.errorMsg +} + var ( - invalidOverrideLogOnce = map[string]bool{} - geoIPOverrides []GeoIPOverride + // Stores the unmarshalled GeoIP override config in a form that's efficient to test + geoNetOverrides []GeoNetOverride // Stores a mapping of client IPs that have been randomly assigned a coordinate clientIpCache = ttlcache.New(ttlcache.WithTTL[netip.Addr, Coordinate](20 * time.Minute)) @@ -98,57 +112,73 @@ func (me SwapMaps) Swap(left, right int) { me[left], me[right] = me[right], me[left] } +// Unmarshal any configured GeoIP overrides. +// Malformed IPs and CIDRs are logged but not returned as errors. +func unmarshalOverrides() error { + var geoIPOverrides []GeoIPOverride + + // Ensure that we're starting with an empty slice. + geoNetOverrides = nil + + if err := param.GeoIPOverrides.Unmarshal(&geoIPOverrides); err != nil { + return err + } + + for _, override := range geoIPOverrides { + var ipNet *net.IPNet + + if _, parsedNet, err := net.ParseCIDR(override.IP); err == nil { + ipNet = parsedNet + } else if ip := net.ParseIP(override.IP); ip != nil { + if ip4 := ip.To4(); ip4 != nil { + ipNet = &net.IPNet{IP: ip4, Mask: net.CIDRMask(32, 32)} + } else if ip16 := ip.To16(); ip16 != nil { + ipNet = &net.IPNet{IP: ip16, Mask: net.CIDRMask(128, 128)} + } + } + + if ipNet == nil { + // Log the error, and continue looking for good configuration. + log.Warningf("Failed to parse configured GeoIPOverride address (%s). Unable to use for GeoIP resolution!", override.IP) + continue + } + geoNetOverrides = append(geoNetOverrides, GeoNetOverride{IPNet: *ipNet, Coordinate: override.Coordinate}) + } + return nil +} + // Check for any pre-configured IP-to-lat/long overrides. If the passed address // matches an override IP (either directly or via CIDR masking), then we use the // configured lat/long from the override instead of relying on MaxMind. // NOTE: We don't return an error because if checkOverrides encounters an issue, // we still have GeoIP to fall back on. func checkOverrides(addr net.IP) (coordinate *Coordinate) { - // Unmarshal the values, but only the first time we run through this block - if geoIPOverrides == nil { - err := param.GeoIPOverrides.Unmarshal(&geoIPOverrides) + // Unmarshal the GeoIP override config if we haven't already done so. + if geoNetOverrides == nil { + err := unmarshalOverrides() if err != nil { - log.Warningf("Error while unmarshaling GeoIP Overrides: %v", err) + log.Warningf("Error while unmarshalling GeoIP overrides: %v", err) + return nil } } - - for _, geoIPOverride := range geoIPOverrides { - // Check for regular IP addresses before CIDR - overrideIP := net.ParseIP(geoIPOverride.IP) - if overrideIP == nil { - // The IP is malformed - if !invalidOverrideLogOnce[geoIPOverride.IP] && !strings.Contains(geoIPOverride.IP, "/") { - // Don't return here, because we have more to check. - // Do provide a notice to the user, however. - log.Warningf("Failed to parse configured GeoIPOverride address (%s). Unable to use for GeoIP resolution!", geoIPOverride.IP) - invalidOverrideLogOnce[geoIPOverride.IP] = true - } - } - if overrideIP.Equal(addr) { - return &geoIPOverride.Coordinate - } - - // Alternatively, we can match by CIDR blocks - if strings.Contains(geoIPOverride.IP, "/") { - _, ipNet, err := net.ParseCIDR(geoIPOverride.IP) - if err != nil { - if !invalidOverrideLogOnce[geoIPOverride.IP] { - // Same reason as above for not returning. - log.Warningf("Failed to parse configured GeoIPOverride CIDR address (%s): %v. Unable to use for GeoIP resolution!", geoIPOverride.IP, err) - invalidOverrideLogOnce[geoIPOverride.IP] = true - } - continue - } - if ipNet.Contains(addr) { - return &geoIPOverride.Coordinate - } + for _, override := range geoNetOverrides { + if override.IPNet.Contains(addr) { + return &override.Coordinate } } - return nil } -func getLatLong(ctx context.Context, addr netip.Addr) (lat float64, long float64, err error) { +func setProjectLabel(ctx context.Context, labels *prometheus.Labels) { + project, ok := ctx.Value(ProjectContextKey{}).(string) + if !ok || project == "" { + (*labels)["proj"] = "unknown" + } else { + (*labels)["proj"] = project + } +} + +func getLatLong(addr netip.Addr) (lat float64, long float64, err error) { ip := net.IP(addr.AsSlice()) override := checkOverrides(ip) if override != nil { @@ -159,7 +189,7 @@ func getLatLong(ctx context.Context, addr netip.Addr) (lat float64, long float64 labels := prometheus.Labels{ "network": "", "source": "", - "proj": "", + "proj": "", // this will be set in the setProjectLabel function } network, ok := utils.ApplyIPMask(addr.String()) @@ -170,26 +200,16 @@ func getLatLong(ctx context.Context, addr netip.Addr) (lat float64, long float64 labels["network"] = network } - project, ok := ctx.Value(ProjectContextKey{}).(string) - if !ok || project == "" { - log.Warningf("Failed to get project from context") - labels["proj"] = "unknown" - labels["source"] = "server" - } else { - labels["proj"] = project - } - reader := maxMindReader.Load() if reader == nil { - err = errors.New("No GeoIP database is available") labels["source"] = "server" - metrics.PelicanDirectorGeoIPErrors.With(labels).Inc() + err = geoIPError{labels: labels, errorMsg: "No GeoIP database is available"} return } record, err := reader.City(ip) if err != nil { labels["source"] = "server" - metrics.PelicanDirectorGeoIPErrors.With(labels).Inc() + err = geoIPError{labels: labels, errorMsg: err.Error()} return } lat = record.Location.Latitude @@ -199,9 +219,10 @@ func getLatLong(ctx context.Context, addr netip.Addr) (lat float64, long float64 // There's likely a problem with the GeoIP database or the IP address. Usually this just means the IP address // comes from a private range. if lat == 0 && long == 0 { - log.Warningf("GeoIP Resolution of the address %s resulted in the null lat/long. This will result in random server sorting.", ip.String()) + errMsg := fmt.Sprintf("GeoIP Resolution of the address %s resulted in the null lat/long. This will result in random server sorting.", ip.String()) + log.Warning(errMsg) labels["source"] = "client" - metrics.PelicanDirectorGeoIPErrors.With(labels).Inc() + err = geoIPError{labels: labels, errorMsg: errMsg} } // MaxMind provides an accuracy radius in kilometers. When it actually has no clue how to resolve a valid, public @@ -209,12 +230,13 @@ func getLatLong(ctx context.Context, addr netip.Addr) (lat float64, long float64 // should be very suspicious of the data, and mark it as appearing at the null lat/long (and provide a warning in // the Director), which also triggers random weighting in our sort algorithms. if record.Location.AccuracyRadius >= 900 { - log.Warningf("GeoIP resolution of the address %s resulted in a suspiciously large accuracy radius of %d km. "+ + errMsg := fmt.Sprintf("GeoIP resolution of the address %s resulted in a suspiciously large accuracy radius of %d km. "+ "This will be treated as GeoIP resolution failure and result in random server sorting. Setting lat/long to null.", ip.String(), record.Location.AccuracyRadius) + log.Warning(errMsg) lat = 0 long = 0 labels["source"] = "client" - metrics.PelicanDirectorGeoIPErrors.With(labels).Inc() + err = geoIPError{labels: labels, errorMsg: errMsg} } return @@ -229,8 +251,7 @@ func assignRandBoundedCoord(minLat, maxLat, minLong, maxLong float64) (lat, long // Given a client address, attempt to get the lat/long of the client. If the address is invalid or // the lat/long is not resolvable, assign a random location in the contiguous US. -func getClientLatLong(ctx context.Context, addr netip.Addr) (coord Coordinate) { - var err error +func getClientLatLong(addr netip.Addr) (coord Coordinate, err error) { if !addr.IsValid() { log.Warningf("Unable to sort servers based on client-server distance. Invalid client IP address: %s", addr.String()) coord.Lat, coord.Long = assignRandBoundedCoord(usLatMin, usLatMax, usLongMin, usLongMax) @@ -244,7 +265,7 @@ func getClientLatLong(ctx context.Context, addr netip.Addr) (coord Coordinate) { return } - coord.Lat, coord.Long, err = getLatLong(ctx, addr) + coord.Lat, coord.Long, err = getLatLong(addr) if err != nil || (coord.Lat == 0 && coord.Long == 0) { if err != nil { log.Warningf("Error while getting the client IP address: %v", err) @@ -286,7 +307,17 @@ func sortServerAds(ctx context.Context, clientAddr netip.Addr, ads []server_stru weights := make(SwapMaps, len(ads)) sortMethod := param.Director_CacheSortMethod.GetString() // This will handle the case where the client address is invalid or the lat/long is not resolvable. - clientCoord := getClientLatLong(ctx, clientAddr) + clientCoord, err := getClientLatLong(clientAddr) + if err != nil { + // If it is a geoIP error, then we get the labels and increment the error counter + // Otherwise we log the error and continue + if geoIPError, ok := err.(geoIPError); ok { + labels := geoIPError.labels + setProjectLabel(ctx, &labels) + metrics.PelicanDirectorGeoIPErrors.With(labels).Inc() + } + log.Warningf("Error while getting the client IP address: %v", err) + } // For each ad, we apply the configured sort method to determine a priority weight. for idx, ad := range ads { diff --git a/director/sort_test.go b/director/sort_test.go index ae1b64fbc..d20b6deb0 100644 --- a/director/sort_test.go +++ b/director/sort_test.go @@ -49,7 +49,7 @@ func TestCheckOverrides(t *testing.T) { server_utils.ResetTestState() t.Cleanup(func() { server_utils.ResetTestState() - geoIPOverrides = nil + geoNetOverrides = nil }) // We'll also check that our logging feature responsibly reports @@ -80,11 +80,9 @@ func TestCheckOverrides(t *testing.T) { t.Run("test-log-output", func(t *testing.T) { // Check that the log caught our malformed IP and CIDR. We only need to test this once, because it is only logged the very first time. require.Contains(t, logOutput.String(), "Failed to parse configured GeoIPOverride address (192.168.0). Unable to use for GeoIP resolution!") - require.Contains(t, logOutput.String(), "Failed to parse configured GeoIPOverride CIDR address (10.0.0./24): invalid CIDR address: 10.0.0./24."+ - " Unable to use for GeoIP resolution!") + require.Contains(t, logOutput.String(), "Failed to parse configured GeoIPOverride address (10.0.0./24). Unable to use for GeoIP resolution!") require.Contains(t, logOutput.String(), "Failed to parse configured GeoIPOverride address (FD00::000G). Unable to use for GeoIP resolution!") - require.Contains(t, logOutput.String(), "Failed to parse configured GeoIPOverride CIDR address (FD00::000F/11S): invalid CIDR address: FD00::000F/11S."+ - " Unable to use for GeoIP resolution!") + require.Contains(t, logOutput.String(), "Failed to parse configured GeoIPOverride address (FD00::000F/11S). Unable to use for GeoIP resolution!") }) t.Run("test-ipv4-match", func(t *testing.T) { @@ -196,7 +194,7 @@ func TestSortServerAds(t *testing.T) { server_utils.ResetTestState() t.Cleanup(func() { server_utils.ResetTestState() - geoIPOverrides = nil + geoNetOverrides = nil }) // A random IP that should geo-resolve to roughly the same location as the Madison server @@ -470,9 +468,7 @@ func TestGetClientLatLong(t *testing.T) { clientIp := netip.Addr{} assert.False(t, clientIpCache.Has(clientIp)) - ctx := context.Background() - ctx = context.WithValue(ctx, ProjectContextKey{}, "pelican-client/1.0.0 project/test") - coord1 := getClientLatLong(ctx, clientIp) + coord1, _ := getClientLatLong(clientIp) assert.True(t, coord1.Lat <= usLatMax && coord1.Lat >= usLatMin) assert.True(t, coord1.Long <= usLongMax && coord1.Long >= usLongMin) @@ -480,9 +476,7 @@ func TestGetClientLatLong(t *testing.T) { assert.NotContains(t, logOutput.String(), "Retrieving pre-assigned lat/long") // Get it again to make sure it's coming from the cache - ctx = context.Background() - ctx = context.WithValue(ctx, ProjectContextKey{}, "pelican-client/1.0.0 project/test") - coord2 := getClientLatLong(ctx, clientIp) + coord2, _ := getClientLatLong(clientIp) assert.Equal(t, coord1.Lat, coord2.Lat) assert.Equal(t, coord1.Long, coord2.Long) assert.Contains(t, logOutput.String(), "Retrieving pre-assigned lat/long for unresolved client IP") @@ -496,9 +490,7 @@ func TestGetClientLatLong(t *testing.T) { clientIp := netip.MustParseAddr("192.168.0.1") assert.False(t, clientIpCache.Has(clientIp)) - ctx := context.Background() - ctx = context.WithValue(ctx, ProjectContextKey{}, "pelican-client/1.0.0 project/test") - coord1 := getClientLatLong(ctx, clientIp) + coord1, _ := getClientLatLong(clientIp) assert.True(t, coord1.Lat <= usLatMax && coord1.Lat >= usLatMin) assert.True(t, coord1.Long <= usLongMax && coord1.Long >= usLongMin) @@ -506,9 +498,7 @@ func TestGetClientLatLong(t *testing.T) { assert.NotContains(t, logOutput.String(), "Retrieving pre-assigned lat/long") // Get it again to make sure it's coming from the cache - ctx = context.Background() - ctx = context.WithValue(ctx, ProjectContextKey{}, "pelican-client/1.0.0 project/test") - coord2 := getClientLatLong(ctx, clientIp) + coord2, _ := getClientLatLong(clientIp) assert.Equal(t, coord1.Lat, coord2.Lat) assert.Equal(t, coord1.Long, coord2.Long) assert.Contains(t, logOutput.String(), "Retrieving pre-assigned lat/long for client IP") diff --git a/docs/pages/install.mdx b/docs/pages/install.mdx index 15b826f25..e0e221240 100644 --- a/docs/pages/install.mdx +++ b/docs/pages/install.mdx @@ -1,9 +1,14 @@ import DownloadsComponent from "/components/DownloadsComponent"; +import { Callout } from 'nextra/components' # Install Pelican This document lists Pelican's operating system requirements and explains how you can download and install the correct Pelican executable. + + Know what you want? Visit our [Downloads Repository](https://dl.pelicanplatform.org). + + ## Before Starting Pelican executables can run as a **client** or a **server**, and both are packaged in the same executable. However, if you intend to run a server, some non-RPM installations may require additional package dependencies. diff --git a/docs/pages/install/macos.mdx b/docs/pages/install/macos.mdx index 23e51d5c9..b6f7e5185 100644 --- a/docs/pages/install/macos.mdx +++ b/docs/pages/install/macos.mdx @@ -20,7 +20,7 @@ Pelican provides a binary executable file instead of a `DMG` installer for MacOS Example to install Pelican executable for an Apple Silicon Mac: ```bash - curl -LO https://github.com/PelicanPlatform/pelican/releases/download/v7.10.5/pelican_Darwin_x86_64.tar.gz + curl -LO https://github.com/PelicanPlatform/pelican/releases/download/v7.10.5/pelican_Darwin_arm64.tar.gz tar -zxvf pelican_Darwin_arm64.tar.gz ``` diff --git a/docs/parameters.yaml b/docs/parameters.yaml index 6e3bd1742..efa0023cb 100644 --- a/docs/parameters.yaml +++ b/docs/parameters.yaml @@ -44,7 +44,7 @@ description: |+ Subdirectories of the provided directories are not read. Only the root config file's `ConfigLocations` key is used, and any redefinitions are ignored. type: stringSlice -default: none +default: [] components: ["*"] --- name: Debug @@ -224,9 +224,9 @@ components: ["origin"] --- name: Logging.Origin.Http description: |+ - Logging level for the HTTP component of the origin. Increasing to debug will cause the Xrootd daemon to log. + Logging level for the HTTP component of the origin. Increasing to debug + will cause the Xrootd daemon to log all headers and requests. Accepted values: `trace`, `debug`, `info`, `warn`, `error`, `fatal`, `panic` - all headers and requests. type: string default: error components: ["origin"] @@ -579,7 +579,7 @@ description: |+ This configuration is meant mostly to be used by passing the -v flag from the command line. Paths exported with this configuration will inherit the origin's abilities, so individual export configurations are not possible. type: stringSlice -default: none +default: [] components: ["origin"] --- name: Origin.EnablePublicReads @@ -776,35 +776,65 @@ components: ["origin"] --- name: Origin.ScitokensRestrictedPaths description: |+ - Enable the built-in issuer daemon for the origin. + This parameter is used to configure + [XRootD's SciTokens authorization plugin](https://github.com/xrootd/xrootd/tree/master/src/XrdSciTokens). + + Any restrictions on the paths that the issuer can authorize inside their + namespace. This is meant to be a mechanism to help with transitions, where + the underlying storage is setup such that an issuer's namespace contains + directories that should not be managed by the issuer. type: stringSlice -default: none +default: [] components: ["origin"] --- name: Origin.ScitokensMapSubject description: |+ - Enable the built-in issuer daemon for the origin. + This parameter is used to configure + [XRootD's SciTokens authorization plugin](https://github.com/xrootd/xrootd/tree/master/src/XrdSciTokens). + + If set to `true`, the contents of the token's `sub` claim will be copied + into the XRootD username. When `Origin.Multiuser` is also set to `true`, + this will allow XRootD to read and write files using the Unix username + specified in the token. type: bool default: false components: ["origin"] --- name: Origin.ScitokensDefaultUser description: |+ - Enable the built-in issuer daemon for the origin. + This parameter is used to configure + [XRootD's SciTokens authorization plugin](https://github.com/xrootd/xrootd/tree/master/src/XrdSciTokens). + + If set, then all authorized operations will be performed under the + provided username when interacting with the file system. This is useful + when all files owned by an issuer should be mapped to a particular Unix + user account. type: string default: none components: ["origin"] --- name: Origin.ScitokensUsernameClaim description: |+ - Enable the built-in issuer daemon for the origin. + This parameter is used to configure + [XRootD's SciTokens authorization plugin](https://github.com/xrootd/xrootd/tree/master/src/XrdSciTokens). + + If set, then the provided claim will be used to determine the XRootD + username, and it will override the + `Origin.ScitokensMapSubject` and `Origin.ScitokensDefaultUser` parameters. type: string default: none components: ["origin"] --- name: Origin.ScitokensNameMapFile description: |+ - Enable the built-in issuer daemon for the origin. + This parameter is used to configure + [XRootD's SciTokens authorization plugin](https://github.com/xrootd/xrootd/tree/master/src/XrdSciTokens). + + If set, then the referenced file is parsed as a JSON object and the + specified mappings are applied to the username inside the XRootD + framework. See the + [XrdSciTokens documentation](https://github.com/xrootd/xrootd/tree/master/src/XrdSciTokens#mapfile-format) + for more information on the mapfile's format. type: string default: none components: ["origin"] @@ -1079,48 +1109,125 @@ components: ["localcache"] ############################ # Cache-level configs # ############################ -name: Cache.DataLocation +name: Cache.StorageLocation description: |+ - [Deprecated] Cache.DataLocation is being deprecated and will be removed in a future release. It is replaced by a combination of Cache.DataLocations and Cache.MetaLocations -type: string -root_default: /run/pelican/xcache -default: $XDG_RUNTIME_DIR/pelican/xcache -deprecated: true -replacedby: Cache.LocalRoot -components: ["cache"] ---- -name: Cache.LocalRoot -description: |+ - The location where the filesystem namespace is actually rooted + An absolute path to the directory where xrootd will create its default `namespace`, `meta`, and `data` directories. For example, + setting `Cache.StorageLocation=/run/pelican/cache` without specifying further `Cache.DataLocations` or `Cache.MetaLocations` + values will result in the cache creating a directory structure like: + ``` + . + ā””ā”€ā”€ /run/pelican/cache/ + ā”œā”€ā”€ data/ + ā”‚ ā”œā”€ā”€ 00 # hexadecimal name values + ā”‚ ā”œā”€ā”€ 01 + ā”‚ ā”œā”€ā”€ ... + ā”‚ ā””ā”€ā”€ FF + ā”œā”€ā”€ meta/ + ā”‚ ā”œā”€ā”€ 00 # hexadecimal name values + ā”‚ ā”œā”€ā”€ 01 + ā”‚ ā”œā”€ā”€ ... + ā”‚ ā””ā”€ā”€ FF + ā””ā”€ā”€ namespace/ + ā”œā”€ā”€ namespace1/ + ā”‚ ā”œā”€ā”€ foo1.txt --> /run/pelican/cache/data/00 + ā”‚ ā””ā”€ā”€ foo2.txt --> /run/pelican/cache/data/01 + ā””ā”€ā”€ namespace2/ + ā””ā”€ā”€ bar.txt --> /run/pelican/cache/data/FF + ``` + In this setup, actual data files live at `/run/pelican/cache/data` and are given hexadecimal names, while + references (symbolic links) to those files are stored in `/run/pelican/cache/namespace`. The `meta` directory + is used for object metadata. Object requests to XRootD will be served from the namespace directories, and + resolve the underlying object through these symbolic links. + + We recommend tying the `Cache.StorageLocation` to a fast storage device, such as an SSD, to ensure optimal cache performance. + If this directory does not already exist, it will be created by Pelican. + + WARNING: The default value of /var/run/pelican should _never_ be used for production caches, as this directory is typically + cleared on system restarts, and may interfere with system services if it becomes full. Running a cache with the default value + set will generate a warning at cache startup. type: string root_default: /run/pelican/cache default: $XDG_RUNTIME_DIR/pelican/cache components: ["cache"] --- -name: Cache.ExportLocation +name: Cache.NamespaceLocation description: |+ - The location of the export directory. Everything under this directory will be exposed as part of the cache. This is - relative to the mount location. + A cache's namespace directory is used to duplicate/recreate the federation's namespace structure, and stores symbolic links from + object names to the actual data files (see `Cache.StorageLocation` for extra information). For example, requesting `/foo/bar.txt` from a + cache will check for the existence of a symbolic link at `${Cache.NamespaceLocation}/foo/bar.txt`, and if it exists, the cache will serve + the data file at the location the symbolic link points to. + + If this directory does not already exist, it will be created by Pelican. + + WARNING: It's important that any values for `Cache.DataLocations` and `Cache.MetaLocations` are NOT subdirectories of `Cache.NamespaceLocation`, + as this will make the raw data/meta files accessible through the cache's namespace structure, which is undefined behavior. type: string -default: / +default: ${Cache.StorageLocation}/namespace + components: ["cache"] --- name: Cache.DataLocations description: |+ - A list of directories for the locations of the cache data files - this is where the actual data in the cache is stored. - These paths should *not* be in same path as XrootD.Mount or else it will expose the data files as part of the files within the cache. + A list of absolute filesystem paths/directories where the cache's object data will be stored. This list of directories can be used to string together + multiple storage devices to increase the cache's storage capacity, as long as each of the directories is accessible by the cache service. + For example, setting `Cache.DataLocations=["/mnt/cache1", "/mnt/cache2"]` will result in splitting cache data between two mounted drives, + `/mnt/cache1` and `/mnt/cache2`. As such, these drives should be fast storage devices, such as SSDs. + + For more information, see the [xrootd oss documentation](https://xrootd.slac.stanford.edu/doc/dev56/ofs_config.pdf) for the `oss.space` directive + as well as the [xrootd pfc documentation](https://xrootd.slac.stanford.edu/doc/dev56/pss_config.pdf) for the `pfc.spaces` directive. + + If this directory does not already exist, it will be created by Pelican. + + WARNING: It's important that any values for `Cache.DataLocations` are NOT subdirectories of `Cache.NamespaceLocation`, + as this will make the raw data files accessible through the cache's namespace structure, which is undefined behavior. type: stringSlice -root_default: ["/run/pelican/cache/data"] -default: ["$XDG_RUNTIME_DIR/pelican/cache/data"] +default: ["${Cache.StorageLocation}/data"] components: ["cache"] --- name: Cache.MetaLocations description: |+ - A list of directories for the locations of the cache metadata. These paths should *not* be in the same path as XrootD.Mount or else it - will expose the metadata files as part of the files within the cache + A list of absolute filesystem paths/directories where the cache's object metadata will be stored. Values in this list may point to separate drives as long + as they're accessible by the cache service. For example, setting `Cache.MetaLocations=["/mnt/meta1", "/mnt/meta2"]` will result in + splitting cache metadata between two the mounted drives. As such, these drives should be fast storage devices, such as SSDs. + + For more information, see the [xrootd oss documentation](https://xrootd.slac.stanford.edu/doc/dev56/ofs_config.pdf) for the `oss.space` directive + as well as the [xrootd pfc documentation](https://xrootd.slac.stanford.edu/doc/dev56/pss_config.pdf) for the `pfc.spaces` directive. + + If this directory does not already exist, it will be created by Pelican. + + WARNING: It's important that any values for `Cache.MetaLocations` are NOT subdirectories of `Cache.NamespaceLocation`, + as this will make the raw metadata files accessible through the cache's namespace structure, which is undefined behavior. type: stringSlice -root_default: ["/run/pelican/cache/meta"] -default: ["$XDG_RUNTIME_DIR/pelican/cache/meta"] +default: ["${Cache.StorageLocation}/meta"] +components: ["cache"] +--- +name: Cache.LocalRoot +description: |+ + [Deprecated] Cache.LocalRoot is deprecated and replaced by Cache.StorageLocation. +type: string +root_default: /run/pelican/cache +default: $XDG_RUNTIME_DIR/pelican/cache +deprecated: true +replacedby: "Cache.StorageLocation" +components: ["cache"] +--- +name: Cache.DataLocation +description: |+ + [Deprecated] Cache.DataLocation is being deprecated and will be removed in a future release. It is replaced by Cache.StorageLocation +type: string +root_default: /run/pelican/cache +default: $XDG_RUNTIME_DIR/pelican/cache +deprecated: true +replacedby: Cache.StorageLocation +components: ["cache"] +--- +name: Cache.ExportLocation +description: |+ + A path that's relative to the `Cache.NamespaceLocation` where the cache will expose its contents. This path can be used to + control which namespaces are available through the cache. For example, setting `Cache.ExportLocation: /foo` will only expose + the `/foo` namespace to clients. +type: string +default: / components: ["cache"] --- name: Cache.RunLocation @@ -1223,7 +1330,7 @@ description: |+ the cache is allowed to access any namespace that's advertised to the director. Otherwise, it will only be allowed to access the listed namespaces. type: stringSlice -default: none +default: [] components: ["cache"] --- name: Cache.SelfTest @@ -1262,6 +1369,16 @@ type: int default: 0 components: ["cache"] --- +name: Cache.DefaultCacheTimeout +description: |+ + The default value of the cache operation timeout if one is not specified by the client. + + Newer clients should always specify a timeout; changing this default is rarely necessary. +type: duration +default: 9.5s +hidden: true +components: ["cache"] +--- ############################ # Director-level configs # ############################ @@ -1298,7 +1415,7 @@ description: |+ If present, the hostname is taken from the X-Forwarded-Host header in the request. Otherwise, Host is used. type: stringSlice -default: none +default: [] components: ["director"] --- name: Director.CacheSortMethod @@ -1327,7 +1444,7 @@ description: |+ If present, the hostname is taken from the X-Forwarded-Host header in the request. Otherwise, Host is used. type: stringSlice -default: none +default: [] components: ["director"] --- name: Director.MaxMindKeyFile @@ -1460,7 +1577,7 @@ description: |+ A list of server resource names that the Director should consider in downtime, preventing the Director from issuing redirects to them. Additional downtimes are aggregated from Topology (when the Director is served in OSDF mode), and the Web UI. type: stringSlice -default: none +default: [] components: ["director"] --- name: Director.SupportContactEmail @@ -1502,7 +1619,7 @@ description: |+ This setting allows for compatibility with specific legacy OSDF origins and is not needed for new origins. type: stringSlice -default: none +default: [] components: ["director"] hidden: true --- @@ -1865,7 +1982,7 @@ description: |+ The "subject" claim should be the "CILogon User Identifier" from CILogon user page: https://cilogon.org/ type: stringSlice -default: none +default: [] components: ["registry","origin","cache"] --- name: Server.StartupTimeout @@ -2291,6 +2408,15 @@ type: string default: none components: ["origin"] --- +name: Xrootd.MaxStartupWait +description: |+ + The maximum amount of time pelican will wait for the xrootd daemons to + successfully start +type: duration +default: 10s +hidden: true +components: ["origin", "cache"] +--- ############################ # Monitoring-level configs # ############################ @@ -2490,7 +2616,7 @@ name: Shoveler.OutputDestinations description: |+ A list of destinations to forward XRootD monitoring packet to. type: stringSlice -default: none +default: [] components: ["origin", "cache"] --- name: Shoveler.VerifyHeader diff --git a/fed_test_utils/fed.go b/fed_test_utils/fed.go index 300ffe27d..15973fab2 100644 --- a/fed_test_utils/fed.go +++ b/fed_test_utils/fed.go @@ -142,7 +142,7 @@ func NewFedTest(t *testing.T, originConfig string) (ft *FedTest) { viper.Set("Server.WebPort", 0) viper.Set("Origin.RunLocation", filepath.Join(tmpPath, "origin")) viper.Set("Cache.RunLocation", filepath.Join(tmpPath, "cache")) - viper.Set("Cache.LocalRoot", filepath.Join(tmpPath, "xcache-data")) + viper.Set("Cache.StorageLocation", filepath.Join(tmpPath, "xcache-data")) viper.Set("LocalCache.RunLocation", filepath.Join(tmpPath, "local-cache")) viper.Set("Registry.RequireOriginApproval", false) viper.Set("Registry.RequireCacheApproval", false) diff --git a/github_scripts/osx_install.sh b/github_scripts/osx_install.sh index ac9eff625..c702b414c 100755 --- a/github_scripts/osx_install.sh +++ b/github_scripts/osx_install.sh @@ -43,8 +43,9 @@ popd # Build XRootD from source # Add patches to xrootd source code if needed -git clone --depth=1 https://github.com/xrootd/xrootd.git +git clone https://github.com/xrootd/xrootd.git pushd xrootd +git checkout tags/v5.7.1 patch -p1 < $scriptdir/pelican_protocol.patch mkdir xrootd_build cd xrootd_build diff --git a/images/Dockerfile b/images/Dockerfile index 4689158fd..98d8573af 100644 --- a/images/Dockerfile +++ b/images/Dockerfile @@ -195,10 +195,10 @@ COPY --from=xrootd-plugin-builder /usr/lib64/libnlohmann_json_schema_validator.a COPY images/entrypoint.sh /entrypoint.sh # Copy here to reduce dependency on the pelican-build stage in the final-stage and x-base stage -COPY --from=pelican-build /pelican/dist/pelican_linux_amd64_v1/pelican /pelican/pelican -COPY --from=pelican-build /pelican/dist/pelican_linux_amd64_v1/pelican /pelican/osdf -RUN chmod +x /pelican/pelican \ - && chmod +x /pelican/osdf \ +COPY --from=pelican-build /pelican/dist/pelican_linux_amd64_v1/pelican /usr/local/bin/pelican +COPY --from=pelican-build /pelican/dist/pelican_linux_amd64_v1/pelican /usr/local/bin/osdf +RUN chmod +x /usr/local/bin/pelican \ + && chmod +x /usr/local/bin/osdf \ && chmod +x /entrypoint.sh ###################### @@ -206,22 +206,25 @@ RUN chmod +x /pelican/pelican \ ###################### FROM final-stage AS pelican-base -RUN rm -rf /pelican/osdf +RUN rm -f /usr/local/bin/osdf ###################### # OSDF base stage # ###################### FROM final-stage AS osdf-base -RUN rm -rf /pelican/pelican +RUN rm -f /usr/local/bin/pelican #################### # pelican/cache # #################### FROM pelican-base AS cache - -ENTRYPOINT [ "/entrypoint.sh", "pelican", "cache"] +RUN rm -f /usr/local/bin/pelican +COPY --from=pelican-build /pelican/dist/pelican-server_linux_amd64_v1/pelican-server /usr/local/sbin/pelican-server +RUN chmod +x /usr/local/sbin/pelican-server +# For now, we're only using pelican-server in the cache, but eventually we'll use it in all servers +ENTRYPOINT [ "/entrypoint.sh", "pelican-server", "cache"] CMD [ "serve" ] #################### @@ -260,8 +263,10 @@ CMD [ "serve" ] #################### FROM osdf-base AS osdf-cache - -ENTRYPOINT [ "/entrypoint.sh" ,"osdf", "cache"] +RUN rm -rf /pelican/osdf +COPY --from=pelican-build /pelican/dist/pelican-server_linux_amd64_v1/pelican-server /usr/local/sbin/osdf-server +RUN chmod +x /usr/local/sbin/osdf-server +ENTRYPOINT [ "/entrypoint.sh" ,"osdf-server", "cache"] CMD [ "serve" ] #################### diff --git a/images/dev.Dockerfile b/images/dev.Dockerfile index d048f6efb..7ca7cf8e1 100644 --- a/images/dev.Dockerfile +++ b/images/dev.Dockerfile @@ -56,6 +56,7 @@ RUN yum install -y --enablerepo=osg-testing goreleaser npm xrootd-devel xrootd-s xrdcl-http jq procps docker make curl-devel java-17-openjdk-headless git cmake3 gcc-c++ openssl-devel sqlite-devel libcap-devel sssd-client \ xrootd-multiuser \ zlib-devel \ + vim valgrind gdb gtest-devel \ && yum clean all # The ADD command with a api.github.com URL in the next couple of sections diff --git a/images/entrypoint.sh b/images/entrypoint.sh old mode 100644 new mode 100755 index d7f5943de..27763c1e4 --- a/images/entrypoint.sh +++ b/images/entrypoint.sh @@ -114,7 +114,16 @@ if [ $# -ne 0 ]; then pelican) # Run pelican with the rest of the arguments echo "Running pelican with arguments: $@" - exec tini -- /pelican/pelican "$@" + exec tini -- /usr/local/bin/pelican "$@" + # we shouldn't get here + echo >&2 "Exec of tini failed!" + exit 1 + ;; + pelican-server) + # Our server-specific binary which may come with additional + # features/system requirements (like Lotman) + echo "Running pelican-server with arguments: $@" + exec tini -- /usr/local/sbin/pelican-server "$@" # we shouldn't get here echo >&2 "Exec of tini failed!" exit 1 @@ -122,7 +131,14 @@ if [ $# -ne 0 ]; then osdf) # Run osdf with the rest of the arguments echo "Running osdf with arguments: $@" - exec tini -- /pelican/osdf "$@" + exec tini -- /usr/local/bin/osdf "$@" + # we shouldn't get here + echo >&2 "Exec of tini failed!" + exit 1 + ;; + osdf-server) + echo "Running osdf-server with arguments: $@" + exec tini -- /usr/local/sbin/osdf-server "$@" # we shouldn't get here echo >&2 "Exec of tini failed!" exit 1 diff --git a/launcher_utils/advertise.go b/launcher_utils/advertise.go index 6daddad55..4ca803ccd 100644 --- a/launcher_utils/advertise.go +++ b/launcher_utils/advertise.go @@ -68,9 +68,8 @@ func LaunchPeriodicAdvertise(ctx context.Context, egrp *errgroup.Group, servers doAdvertise(ctx, servers) ticker := time.NewTicker(1 * time.Minute) - defer ticker.Stop() egrp.Go(func() error { - + defer ticker.Stop() for { select { case <-ticker.C: diff --git a/launchers/launcher.go b/launchers/launcher.go index c8868b498..3eef5342d 100644 --- a/launchers/launcher.go +++ b/launchers/launcher.go @@ -60,13 +60,6 @@ func LaunchModules(ctx context.Context, modules server_structs.ServerType) (serv config.LogPelicanVersion() - // Print Pelican config at server start if it's in debug or info level - if log.GetLevel() >= log.InfoLevel { - if err = config.PrintConfig(); err != nil { - return - } - } - egrp.Go(func() error { _ = config.RestartFlag log.Debug("Will shutdown process on signal") @@ -97,6 +90,13 @@ func LaunchModules(ctx context.Context, modules server_structs.ServerType) (serv return } + // Print Pelican config at server start if it's in debug or info level + if log.GetLevel() >= log.InfoLevel { + if err = config.PrintConfig(); err != nil { + return + } + } + // Set up necessary APIs to support Web UI, including auth and metrics if err = web_ui.ConfigureServerWebAPI(ctx, engine, egrp); err != nil { return @@ -156,6 +156,13 @@ func LaunchModules(ctx context.Context, modules server_structs.ServerType) (serv if modules.IsEnabled(server_structs.OriginType) { + var server server_structs.XRootDServer + server, err = OriginServe(ctx, engine, egrp, modules) + if err != nil { + return + } + servers = append(servers, server) + var originExports []server_utils.OriginExport originExports, err = server_utils.GetOriginExports() if err != nil { @@ -167,13 +174,6 @@ func LaunchModules(ctx context.Context, modules server_structs.ServerType) (serv return } - var server server_structs.XRootDServer - server, err = OriginServe(ctx, engine, egrp, modules) - if err != nil { - return - } - servers = append(servers, server) - // Ordering: `LaunchBrokerListener` depends on the "right" value of Origin.FederationPrefix // which is possibly not set until `OriginServe` is called. // NOTE: Until the Broker supports multi-export origins, we've made the assumption that there diff --git a/local_cache/cache_test.go b/local_cache/cache_test.go index f57582ce4..5560f6d5a 100644 --- a/local_cache/cache_test.go +++ b/local_cache/cache_test.go @@ -330,7 +330,7 @@ func TestClient(t *testing.T) { _, err = client.DoGet(ctx, "pelican://"+param.Server_Hostname.GetString()+":"+strconv.Itoa(param.Server_WebPort.GetInt())+"/test/hello_world.txt.1", filepath.Join(tmpDir, "hello_world.txt.1"), false, client.WithToken(token), client.WithCaches(cacheUrl), client.WithAcquireToken(false)) - assert.Error(t, err) + require.Error(t, err) assert.Equal(t, "failed download from local-cache: server returned 404 Not Found", err.Error()) }) t.Cleanup(func() { @@ -373,7 +373,12 @@ func TestLargeFile(t *testing.T) { tmpDir := t.TempDir() server_utils.ResetTestState() - viper.Set("Client.MaximumDownloadSpeed", 40*1024*1024) + + clientConfig := map[string]interface{}{ + "Client.MaximumDownloadSpeed": 40 * 1024 * 1024, + "Transport.ResponseHeaderTimeout": "60s", + } + test_utils.InitClient(t, clientConfig) ft := fed_test_utils.NewFedTest(t, pubOriginCfg) ctx, cancel, egrp := test_utils.TestContext(context.Background(), t) @@ -408,7 +413,6 @@ func TestLargeFile(t *testing.T) { // Throw in a config.Reset for good measure. Keeps our env squeaky clean! server_utils.ResetTestState() }) - } // Create a federation then SIGSTOP the origin to prevent it from responding. diff --git a/local_cache/local_cache.go b/local_cache/local_cache.go index 71df19cdf..1cfd77dfb 100644 --- a/local_cache/local_cache.go +++ b/local_cache/local_cache.go @@ -718,6 +718,9 @@ func (lc *LocalCache) Stat(path, token string) (uint64, error) { dUrl.Path = path dUrl.Scheme = "pelican" statInfo, err := client.DoStat(context.Background(), dUrl.String(), client.WithToken(token)) + if err != nil { + return 0, err + } return uint64(statInfo.Size), err } diff --git a/lotman/lotman.go b/lotman/lotman.go index ee3544ffd..69ac4d302 100644 --- a/lotman/lotman.go +++ b/lotman/lotman.go @@ -1,4 +1,4 @@ -//go:build windows || darwin || linux +//go:build !lotman || (lotman && linux && ppc64le) || !linux // For now we're shutting off LotMan due to weirdness with purego. When we return to this, remember // that purego doesn't support (linux && ppc64le), so we'll need to add that back here. diff --git a/lotman/lotman_linux.go b/lotman/lotman_linux.go index 910859f90..1719911e3 100644 --- a/lotman/lotman_linux.go +++ b/lotman/lotman_linux.go @@ -1,5 +1,4 @@ -//go:build false -// For now we're shutting off LotMan due to weirdness with purego +//go:build lotman && linux && !ppc64le /*************************************************************** * @@ -25,6 +24,7 @@ package lotman import ( "bytes" + "context" "encoding/json" "fmt" "os" @@ -36,6 +36,7 @@ import ( "github.com/pkg/errors" log "github.com/sirupsen/logrus" + "github.com/pelicanplatform/pelican/config" "github.com/pelicanplatform/pelican/param" ) @@ -58,6 +59,7 @@ var ( // Auxilliary functions LotmanLotExists func(lotName string, errMsg *[]byte) int32 LotmanSetContextStr func(contextKey string, contextValue string, errMsg *[]byte) int32 + LotmanGetContextStr func(key string, output *[]byte, errMsg *[]byte) int32 // Functions that would normally take a char *** as an argument take an *unsafe.Pointer instead because // these functions are responsible for allocating and deallocating the memory for the char ***. The Go // runtime will handle the memory management for the *unsafe.Pointer. @@ -299,13 +301,17 @@ func GetAuthorizedCallers(lotName string) (*[]string, error) { // 1. The federation's discovery url // 2. The federation's director url // TODO: Consider what happens to the lot if either of these values change in the future after the lot is created? -func getFederationIssuer() string { - federationIssuer := param.Federation_DiscoveryUrl.GetString() +func getFederationIssuer() (string, error) { + fedInfo, err := config.GetFederation(context.Background()) + if err != nil { + return "", err + } + federationIssuer := fedInfo.DiscoveryEndpoint if federationIssuer == "" { - federationIssuer = param.Federation_DirectorUrl.GetString() + federationIssuer = fedInfo.DirectorEndpoint } - return federationIssuer + return federationIssuer, nil } // Initialize the LotMan library and bind its functions to the global vars @@ -334,6 +340,7 @@ func InitLotman() bool { // Auxilliary functions purego.RegisterLibFunc(&LotmanLotExists, lotmanLib, "lotman_lot_exists") purego.RegisterLibFunc(&LotmanSetContextStr, lotmanLib, "lotman_set_context_str") + purego.RegisterLibFunc(&LotmanGetContextStr, lotmanLib, "lotman_get_context_str") purego.RegisterLibFunc(&LotmanGetLotOwners, lotmanLib, "lotman_get_owners") purego.RegisterLibFunc(&LotmanGetLotParents, lotmanLib, "lotman_get_parent_names") purego.RegisterLibFunc(&LotmanGetLotsFromDir, lotmanLib, "lotman_get_lots_from_dir") @@ -359,14 +366,22 @@ func InitLotman() bool { log.Warningf("Error while unmarshaling Lots from config: %v", err) } - federationIssuer := getFederationIssuer() + federationIssuer, err := getFederationIssuer() + if err != nil { + log.Errorf("Error getting federation issuer: %v", err) + return false + } + if federationIssuer == "" { + log.Errorln("Unable to determine federation issuer which is needed by Lotman to determine lot ownership") + return false + } callerMutex.Lock() defer callerMutex.Unlock() ret = LotmanSetContextStr("caller", federationIssuer, &errMsg) if ret != 0 { trimBuf(&errMsg) - log.Errorf("Error setting context for default lot: %s", string(errMsg)) + log.Errorf("Error setting caller context to %s for default lot: %s", federationIssuer, string(errMsg)) return false } @@ -527,6 +542,7 @@ func InitLotman() bool { if ret != 0 { trimBuf(&errMsg) log.Errorf("Error creating lot %s: %s", lot.LotName, string(errMsg)) + log.Infoln("Full lot JSON passed to Lotman for lot creation:", string(lotJSON)) return false } } diff --git a/lotman/lotman_test.go b/lotman/lotman_test.go index 61975f6b1..b5a12c504 100644 --- a/lotman/lotman_test.go +++ b/lotman/lotman_test.go @@ -1,6 +1,5 @@ -//go:build false +//go:build lotman && linux && !ppc64le -//linux && !ppc64le /*************************************************************** * * Copyright (C) 2024, Pelican Project, Morgridge Institute for Research @@ -26,6 +25,8 @@ import ( _ "embed" "encoding/json" "fmt" + "net/http" + "net/http/httptest" "os" "strings" "testing" @@ -37,19 +38,33 @@ import ( ) //go:embed resources/lots-config.yaml - var yamlMockup string -func setupLotmanFromConf(t *testing.T, readConfig bool, name string) (bool, func()) { - // Load in our config - if readConfig { - viper.Set("Federation.DiscoveryUrl", "https://fake-federation.com") - viper.SetConfigType("yaml") - err := viper.ReadConfig(strings.NewReader(yamlMockup)) - if err != nil { - t.Fatalf("Error reading config: %v", err) - } - } +// Initialize Lotman +// If we read from the embedded yaml, we need to override the SHOULD_OVERRIDE keys with the discUrl +// so that underlying metadata discovery can happen against the mock discovery host +func setupLotmanFromConf(t *testing.T, readConfig bool, name string, discUrl string) (bool, func()) { + // Load in our config and handle overriding the SHOULD_OVERRIDE keys with the discUrl + // Load in our config + if readConfig { + viper.SetConfigType("yaml") + err := viper.ReadConfig(strings.NewReader(yamlMockup)) + if err != nil { + t.Fatalf("Error reading config: %v", err) + } + + // Traverse the settings and modify the "Owner" keys directly in Viper + lots := viper.Get("Lotman.Lots").([]interface{}) + for i, lot := range lots { + if lotMap, ok := lot.(map[string]interface{}); ok { + if owner, ok := lotMap["owner"].(string); ok && owner == "SHOULD_OVERRIDE" { + lotMap["owner"] = discUrl + lots[i] = lotMap + } + } + } + viper.Set("Lotman.Lots", lots) + } tmpPathPattern := name + "*" tmpPath, err := os.MkdirTemp("", tmpPathPattern) @@ -63,21 +78,43 @@ func setupLotmanFromConf(t *testing.T, readConfig bool, name string) (bool, func } } +// Create a mock discovery host that returns the servers URL as the value for each pelican-configuration key +func getMockDiscoveryHost() *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/.well-known/pelican-configuration" { + w.Header().Set("Content-Type", "application/json") + serverURL := r.Host + response := fmt.Sprintf(`{ + "director_endpoint": "https://%s/osdf-director.osg-htc.org", + "namespace_registration_endpoint": "https://%s/osdf-registry.osg-htc.org", + "jwks_uri": "https://%s/osdf/public_signing_key.jwks" +}`, serverURL, serverURL, serverURL) + w.Write([]byte(response)) + } else { + http.NotFound(w, r) + } + })) +} + // Test the library initializer. NOTE: this also tests CreateLot, which is a part of initialization. func TestLotmanInit(t *testing.T) { server_utils.ResetTestState() t.Run("TestBadInit", func(t *testing.T) { // We haven't set various bits needed to create the lots, like discovery URL - success, cleanup := setupLotmanFromConf(t, false, "LotmanBadInit") + success, cleanup := setupLotmanFromConf(t, false, "LotmanBadInit", "") defer cleanup() require.False(t, success) }) t.Run("TestGoodInit", func(t *testing.T) { viper.Set("Log.Level", "debug") - viper.Set("Federation.DiscoveryUrl", "https://fake-federation.com") - success, cleanup := setupLotmanFromConf(t, false, "LotmanGoodInit") + server := getMockDiscoveryHost() + // Set the Federation.DiscoveryUrl to the test server's URL + // Lotman uses the discovered URLs/keys to determine some aspects of lot ownership + viper.Set("Federation.DiscoveryUrl", server.URL) + + success, cleanup := setupLotmanFromConf(t, false, "LotmanGoodInit", server.URL) defer cleanup() require.True(t, success) @@ -95,7 +132,7 @@ func TestLotmanInit(t *testing.T) { err := json.Unmarshal(defaultOutput, &defaultLot) require.NoError(t, err, fmt.Sprintf("Error unmarshalling default lot JSON: %s", string(defaultOutput))) require.Equal(t, "default", defaultLot.LotName) - require.Equal(t, "https://fake-federation.com", defaultLot.Owner) + require.Equal(t, server.URL, defaultLot.Owner) require.Equal(t, "default", defaultLot.Parents[0]) require.Equal(t, 0.0, *(defaultLot.MPA.DedicatedGB)) require.Equal(t, int64(0), (defaultLot.MPA.MaxNumObjects.Value)) @@ -111,7 +148,7 @@ func TestLotmanInit(t *testing.T) { err = json.Unmarshal(rootOutput, &rootLot) require.NoError(t, err, fmt.Sprintf("Error unmarshalling root lot JSON: %s", string(rootOutput))) require.Equal(t, "root", rootLot.LotName) - require.Equal(t, "https://fake-federation.com", rootLot.Owner) + require.Equal(t, server.URL, rootLot.Owner) require.Equal(t, "root", rootLot.Parents[0]) require.Equal(t, 0.0, *(rootLot.MPA.DedicatedGB)) require.Equal(t, int64(0), (rootLot.MPA.MaxNumObjects.Value)) @@ -120,8 +157,9 @@ func TestLotmanInit(t *testing.T) { func TestLotmanInitFromConfig(t *testing.T) { server_utils.ResetTestState() - - success, cleanup := setupLotmanFromConf(t, true, "LotmanInitConf") + server := getMockDiscoveryHost() + viper.Set("Federation.DiscoveryUrl", server.URL) + success, cleanup := setupLotmanFromConf(t, true, "LotmanInitConf", server.URL) defer cleanup() require.True(t, success) @@ -140,7 +178,7 @@ func TestLotmanInitFromConfig(t *testing.T) { err := json.Unmarshal(defaultOutput, &defaultLot) require.NoError(t, err, fmt.Sprintf("Error unmarshalling default lot JSON: %s", string(defaultOutput))) require.Equal(t, "default", defaultLot.LotName) - require.Equal(t, "https://fake-federation.com", defaultLot.Owner) + require.Equal(t, server.URL, defaultLot.Owner) require.Equal(t, "default", defaultLot.Parents[0]) require.Equal(t, 100.0, *(defaultLot.MPA.DedicatedGB)) require.Equal(t, int64(1000), (defaultLot.MPA.MaxNumObjects.Value)) @@ -157,7 +195,7 @@ func TestLotmanInitFromConfig(t *testing.T) { err = json.Unmarshal(rootOutput, &rootLot) require.NoError(t, err, fmt.Sprintf("Error unmarshalling root lot JSON: %s", string(rootOutput))) require.Equal(t, "root", rootLot.LotName) - require.Equal(t, "https://fake-federation.com", rootLot.Owner) + require.Equal(t, server.URL, rootLot.Owner) require.Equal(t, "root", rootLot.Parents[0]) require.Equal(t, 1.0, *(rootLot.MPA.DedicatedGB)) require.Equal(t, int64(10), rootLot.MPA.MaxNumObjects.Value) @@ -220,7 +258,9 @@ func TestGetLotmanLib(t *testing.T) { func TestGetAuthzCallers(t *testing.T) { server_utils.ResetTestState() - success, cleanup := setupLotmanFromConf(t, true, "LotmanGetAuthzCalleres") + server := getMockDiscoveryHost() + viper.Set("Federation.DiscoveryUrl", server.URL) + success, cleanup := setupLotmanFromConf(t, true, "LotmanGetAuthzCalleres", server.URL) defer cleanup() require.True(t, success) @@ -229,7 +269,7 @@ func TestGetAuthzCallers(t *testing.T) { authzedCallers, err := GetAuthorizedCallers("test-2") require.NoError(t, err, "Failed to get authorized callers") require.Equal(t, 2, len(*authzedCallers)) - require.Contains(t, *authzedCallers, "https://fake-federation.com") + require.Contains(t, *authzedCallers, server.URL) require.Contains(t, *authzedCallers, "https://different-fake-federation.com") // test with a non-existent lot @@ -239,7 +279,9 @@ func TestGetAuthzCallers(t *testing.T) { func TestGetLot(t *testing.T) { server_utils.ResetTestState() - success, cleanup := setupLotmanFromConf(t, true, "LotmanGetLot") + server := getMockDiscoveryHost() + viper.Set("Federation.DiscoveryUrl", server.URL) + success, cleanup := setupLotmanFromConf(t, true, "LotmanGetLot", server.URL) defer cleanup() require.True(t, success) @@ -251,7 +293,7 @@ func TestGetLot(t *testing.T) { require.Contains(t, lot.Parents, "root") require.Contains(t, lot.Parents, "test-1") require.Equal(t, 3, len(lot.Owners)) - require.Contains(t, lot.Owners, "https://fake-federation.com") + require.Contains(t, lot.Owners, server.URL) require.Contains(t, lot.Owners, "https://different-fake-federation.com") require.Contains(t, lot.Owners, "https://another-fake-federation.com") require.Equal(t, 1.11, *(lot.MPA.DedicatedGB)) @@ -262,7 +304,9 @@ func TestGetLot(t *testing.T) { func TestUpdateLot(t *testing.T) { server_utils.ResetTestState() - success, cleanup := setupLotmanFromConf(t, true, "LotmanInitConf") + server := getMockDiscoveryHost() + viper.Set("Federation.DiscoveryUrl", server.URL) + success, cleanup := setupLotmanFromConf(t, true, "LotmanInitConf", server.URL) defer cleanup() require.True(t, success) @@ -285,7 +329,7 @@ func TestUpdateLot(t *testing.T) { }, } - err := UpdateLot(&lotUpdate, "https://fake-federation.com") + err := UpdateLot(&lotUpdate, server.URL) require.NoError(t, err, "Failed to update lot") // Now check that the update was successful @@ -300,12 +344,14 @@ func TestUpdateLot(t *testing.T) { func TestDeleteLotsRec(t *testing.T) { server_utils.ResetTestState() - success, cleanup := setupLotmanFromConf(t, true, "LotmanInitConf") + server := getMockDiscoveryHost() + viper.Set("Federation.DiscoveryUrl", server.URL) + success, cleanup := setupLotmanFromConf(t, true, "LotmanInitConf", server.URL) defer cleanup() require.True(t, success) // Delete test-1, then verify both it and test-2 are gone - err := DeleteLotsRecursive("test-1", "https://fake-federation.com") + err := DeleteLotsRecursive("test-1", server.URL) require.NoError(t, err, "Failed to delete lot") // Now check that the delete was successful diff --git a/lotman/lotman_ui.go b/lotman/lotman_ui.go index 57d44a856..b5dad97b8 100644 --- a/lotman/lotman_ui.go +++ b/lotman/lotman_ui.go @@ -1,5 +1,4 @@ -//go:build false -//linux && !ppc64le +//go:build lotman && linux && !ppc64le /*************************************************************** * @@ -37,7 +36,6 @@ import ( log "github.com/sirupsen/logrus" "github.com/pelicanplatform/pelican/config" - "github.com/pelicanplatform/pelican/param" "github.com/pelicanplatform/pelican/server_structs" "github.com/pelicanplatform/pelican/server_utils" "github.com/pelicanplatform/pelican/token" @@ -118,7 +116,10 @@ func VerifyNewLotToken(lot *Lot, strToken string) (bool, error) { if len(lot.Parents) != 0 && lot.Parents[0] == "root" { // We check that the token is signed by the federation // First check for discovery URL and then for director URL, both of which should host the federation's pubkey - issuerUrl := getFederationIssuer() + issuerUrl, err := getFederationIssuer() + if err != nil { + return false, err + } kSet, err := server_utils.GetJWKSFromIssUrl(issuerUrl) if err != nil { @@ -201,7 +202,12 @@ func VerifyNewLotToken(lot *Lot, strToken string) (bool, error) { // Get the namespace by querying the director and checking the headers errMsgPrefix := "the provided token is acceptible, but no owner could be determined because " - directorUrlStr := param.Federation_DirectorUrl.GetString() + + fedInfo, err := config.GetFederation(context.Background()) + if err != nil { + return false, errors.Wrap(err, errMsgPrefix+"the federation information could not be retrieved") + } + directorUrlStr := fedInfo.DirectorEndpoint if directorUrlStr == "" { return false, errors.New(errMsgPrefix + "the federation director URL is not set") } @@ -212,7 +218,7 @@ func VerifyNewLotToken(lot *Lot, strToken string) (bool, error) { directorUrl.Path, err = url.JoinPath("/api/v1.0/director/object", path) if err != nil { - return false, errors.Wrap(err, errMsgPrefix+"the director URL could not be joined with the path") + return false, errors.Wrap(err, errMsgPrefix+"the director's object path could not be constructed") } // Get the namespace by querying the director and checking the headers. The client should NOT diff --git a/lotman/resources/lots-config.yaml b/lotman/resources/lots-config.yaml index 874b0b7a3..9fdf31edb 100644 --- a/lotman/resources/lots-config.yaml +++ b/lotman/resources/lots-config.yaml @@ -20,7 +20,7 @@ Lotman: Lots: - LotName: "default" - Owner: "https://fake-federation.com" + Owner: "SHOULD_OVERRIDE" Parents: - "default" ManagementPolicyAttrs: @@ -38,7 +38,7 @@ Lotman: Value: 123456 - LotName: "root" - Owner: "https://fake-federation.com" + Owner: "SHOULD_OVERRIDE" Parents: - "root" Paths: diff --git a/param/parameters.go b/param/parameters.go index 883dad5a3..98e5a490d 100644 --- a/param/parameters.go +++ b/param/parameters.go @@ -51,7 +51,8 @@ type ObjectParam struct { func GetDeprecated() map[string][]string { return map[string][]string{ - "Cache.DataLocation": {"Cache.LocalRoot"}, + "Cache.DataLocation": {"Cache.StorageLocation"}, + "Cache.LocalRoot": {"Cache.StorageLocation"}, "Director.EnableStat": {"Director.CheckOriginPresence"}, "DisableHttpProxy": {"Client.DisableHttpProxy"}, "DisableProxyFallback": {"Client.DisableProxyFallback"}, @@ -148,8 +149,10 @@ var ( Cache_HighWaterMark = StringParam{"Cache.HighWaterMark"} Cache_LocalRoot = StringParam{"Cache.LocalRoot"} Cache_LowWatermark = StringParam{"Cache.LowWatermark"} + Cache_NamespaceLocation = StringParam{"Cache.NamespaceLocation"} Cache_RunLocation = StringParam{"Cache.RunLocation"} Cache_SentinelLocation = StringParam{"Cache.SentinelLocation"} + Cache_StorageLocation = StringParam{"Cache.StorageLocation"} Cache_Url = StringParam{"Cache.Url"} Cache_XRootDPrefix = StringParam{"Cache.XRootDPrefix"} Director_CacheSortMethod = StringParam{"Director.CacheSortMethod"} @@ -380,6 +383,7 @@ var ( ) var ( + Cache_DefaultCacheTimeout = DurationParam{"Cache.DefaultCacheTimeout"} Cache_SelfTestInterval = DurationParam{"Cache.SelfTestInterval"} Client_SlowTransferRampupTime = DurationParam{"Client.SlowTransferRampupTime"} Client_SlowTransferWindow = DurationParam{"Client.SlowTransferWindow"} @@ -403,6 +407,7 @@ var ( Transport_ResponseHeaderTimeout = DurationParam{"Transport.ResponseHeaderTimeout"} Transport_TLSHandshakeTimeout = DurationParam{"Transport.TLSHandshakeTimeout"} Xrootd_AuthRefreshInterval = DurationParam{"Xrootd.AuthRefreshInterval"} + Xrootd_MaxStartupWait = DurationParam{"Xrootd.MaxStartupWait"} ) var ( diff --git a/param/parameters_struct.go b/param/parameters_struct.go index 0009faa4c..979d506a7 100644 --- a/param/parameters_struct.go +++ b/param/parameters_struct.go @@ -29,6 +29,7 @@ type Config struct { Concurrency int `mapstructure:"concurrency" yaml:"Concurrency"` DataLocation string `mapstructure:"datalocation" yaml:"DataLocation"` DataLocations []string `mapstructure:"datalocations" yaml:"DataLocations"` + DefaultCacheTimeout time.Duration `mapstructure:"defaultcachetimeout" yaml:"DefaultCacheTimeout"` EnableLotman bool `mapstructure:"enablelotman" yaml:"EnableLotman"` EnableOIDC bool `mapstructure:"enableoidc" yaml:"EnableOIDC"` EnableVoms bool `mapstructure:"enablevoms" yaml:"EnableVoms"` @@ -37,12 +38,14 @@ type Config struct { LocalRoot string `mapstructure:"localroot" yaml:"LocalRoot"` LowWatermark string `mapstructure:"lowwatermark" yaml:"LowWatermark"` MetaLocations []string `mapstructure:"metalocations" yaml:"MetaLocations"` + NamespaceLocation string `mapstructure:"namespacelocation" yaml:"NamespaceLocation"` PermittedNamespaces []string `mapstructure:"permittednamespaces" yaml:"PermittedNamespaces"` Port int `mapstructure:"port" yaml:"Port"` RunLocation string `mapstructure:"runlocation" yaml:"RunLocation"` SelfTest bool `mapstructure:"selftest" yaml:"SelfTest"` SelfTestInterval time.Duration `mapstructure:"selftestinterval" yaml:"SelfTestInterval"` SentinelLocation string `mapstructure:"sentinellocation" yaml:"SentinelLocation"` + StorageLocation string `mapstructure:"storagelocation" yaml:"StorageLocation"` Url string `mapstructure:"url" yaml:"Url"` XRootDPrefix string `mapstructure:"xrootdprefix" yaml:"XRootDPrefix"` } `mapstructure:"cache" yaml:"Cache"` @@ -316,6 +319,7 @@ type Config struct { MacaroonsKeyFile string `mapstructure:"macaroonskeyfile" yaml:"MacaroonsKeyFile"` ManagerHost string `mapstructure:"managerhost" yaml:"ManagerHost"` ManagerPort int `mapstructure:"managerport" yaml:"ManagerPort"` + MaxStartupWait time.Duration `mapstructure:"maxstartupwait" yaml:"MaxStartupWait"` Mount string `mapstructure:"mount" yaml:"Mount"` Port int `mapstructure:"port" yaml:"Port"` RobotsTxtFile string `mapstructure:"robotstxtfile" yaml:"RobotsTxtFile"` @@ -334,6 +338,7 @@ type configWithType struct { Concurrency struct { Type string; Value int } DataLocation struct { Type string; Value string } DataLocations struct { Type string; Value []string } + DefaultCacheTimeout struct { Type string; Value time.Duration } EnableLotman struct { Type string; Value bool } EnableOIDC struct { Type string; Value bool } EnableVoms struct { Type string; Value bool } @@ -342,12 +347,14 @@ type configWithType struct { LocalRoot struct { Type string; Value string } LowWatermark struct { Type string; Value string } MetaLocations struct { Type string; Value []string } + NamespaceLocation struct { Type string; Value string } PermittedNamespaces struct { Type string; Value []string } Port struct { Type string; Value int } RunLocation struct { Type string; Value string } SelfTest struct { Type string; Value bool } SelfTestInterval struct { Type string; Value time.Duration } SentinelLocation struct { Type string; Value string } + StorageLocation struct { Type string; Value string } Url struct { Type string; Value string } XRootDPrefix struct { Type string; Value string } } @@ -621,6 +628,7 @@ type configWithType struct { MacaroonsKeyFile struct { Type string; Value string } ManagerHost struct { Type string; Value string } ManagerPort struct { Type string; Value int } + MaxStartupWait struct { Type string; Value time.Duration } Mount struct { Type string; Value string } Port struct { Type string; Value int } RobotsTxtFile struct { Type string; Value string } diff --git a/server_utils/origin.go b/server_utils/origin.go index e158ce5d9..02963b1f1 100644 --- a/server_utils/origin.go +++ b/server_utils/origin.go @@ -20,19 +20,23 @@ package server_utils import ( "fmt" - "os" + "net/http" + "net/url" "path" "path/filepath" "reflect" "strings" + "time" "github.com/mitchellh/mapstructure" "github.com/pkg/errors" log "github.com/sirupsen/logrus" "github.com/spf13/viper" + "github.com/pelicanplatform/pelican/config" "github.com/pelicanplatform/pelican/param" "github.com/pelicanplatform/pelican/server_structs" + "github.com/pelicanplatform/pelican/token" ) var originExports []OriginExport @@ -750,18 +754,64 @@ from S3 service URL. In this configuration, objects can be accessed at /federati return originExports, nil } +// Generate a minimally scoped auth token that allows the origin +// to query itself for its sentinel file +func generateSentinelReadToken(resourceScope string) (string, error) { + issuerUrl := param.Server_ExternalWebUrl.GetString() + if issuerUrl == "" { // if both are empty, then error + return "", errors.New("failed to create a sentinel check auth token because required configuration 'Server.ExternalWebUrl' is empty") + } + fTestTokenCfg := token.NewWLCGToken() + fTestTokenCfg.Lifetime = time.Minute + fTestTokenCfg.Issuer = issuerUrl + fTestTokenCfg.Subject = "origin" + fTestTokenCfg.Claims = map[string]string{"scope": fmt.Sprintf("storage.read:/%v", resourceScope)} + // For self-tests, the audience is the server itself + fTestTokenCfg.AddAudienceAny() + + // CreateToken also handles validation for us + tok, err := fTestTokenCfg.CreateToken() + if err != nil { + return "", errors.Wrap(err, "failed to create sentinel check auth token") + } + + return tok, nil +} + // Check the sentinel files from Origin.Exports func CheckOriginSentinelLocations(exports []OriginExport) (ok bool, err error) { for _, export := range exports { if export.SentinelLocation != "" { + log.Infof("Checking that sentinel object %v is present for federation prefix %s", export.SentinelLocation, export.FederationPrefix) sentinelPath := path.Clean(export.SentinelLocation) if path.Base(sentinelPath) != sentinelPath { - return false, errors.Errorf("invalid SentinelLocation path for StoragePrefix %s, file must not contain a directory. Got %s", export.StoragePrefix, export.SentinelLocation) + return false, errors.Errorf("invalid SentinelLocation path for federation prefix %s, path must not contain a directory. Got %s", export.FederationPrefix, export.SentinelLocation) } - fullPath := filepath.Join(export.StoragePrefix, sentinelPath) - _, err := os.Stat(fullPath) + + fullPath := filepath.Join(export.FederationPrefix, sentinelPath) + tkn, err := generateSentinelReadToken(sentinelPath) if err != nil { - return false, errors.Wrapf(err, "fail to open SentinelLocation %s for StoragePrefix %s. Collection check failed", export.SentinelLocation, export.StoragePrefix) + return false, errors.Wrap(err, "failed to generate self-auth token for sentinel object check") + } + + sentinelUrl, err := url.JoinPath(param.Origin_Url.GetString(), fullPath) + if err != nil { + return false, errors.Wrapf(err, "unable fo form sentinel URL for Origin.Url %v, sentinel path %v", param.Origin_Url.GetString(), fullPath) + } + req, err := http.NewRequest(http.MethodGet, sentinelUrl, nil) + if err != nil { + return false, errors.Wrap(err, "failed to create GET request for sentinel object check") + } + req.Header.Set("Authorization", "Bearer "+tkn) + + client := http.Client{Transport: config.GetTransport()} + resp, err := client.Do(req) + if err != nil { + return false, errors.Wrapf(err, "fail to open sentinel object %s for federation prefix %s.", export.SentinelLocation, export.FederationPrefix) + } + + if resp.StatusCode != 200 { + return false, errors.New(fmt.Sprintf("got non-200 response code %v when checking sentinel object %s for federation prefix %s", resp.StatusCode, export.SentinelLocation, export.FederationPrefix)) } } } diff --git a/server_utils/origin_test.go b/server_utils/origin_test.go index f31d9687a..9a5f25c3d 100644 --- a/server_utils/origin_test.go +++ b/server_utils/origin_test.go @@ -23,8 +23,6 @@ package server_utils import ( _ "embed" "fmt" - "os" - "path/filepath" "strings" "testing" @@ -395,64 +393,6 @@ func TestGetExports(t *testing.T) { }) } -func TestCheckOriginSentinelLocation(t *testing.T) { - tmpDir := t.TempDir() - tempStn := filepath.Join(tmpDir, "mock_sentinel") - file, err := os.Create(tempStn) - require.NoError(t, err) - err = file.Close() - require.NoError(t, err) - - mockExportNoStn := OriginExport{ - StoragePrefix: "/foo/bar", - FederationPrefix: "/demo/foo/bar", - Capabilities: server_structs.Capabilities{Reads: true}, - } - mockExportValidStn := OriginExport{ - StoragePrefix: tmpDir, - FederationPrefix: "/demo/foo/bar", - Capabilities: server_structs.Capabilities{Reads: true}, - SentinelLocation: "mock_sentinel", - } - mockExportInvalidStn := OriginExport{ - StoragePrefix: tmpDir, - FederationPrefix: "/demo/foo/bar", - Capabilities: server_structs.Capabilities{Reads: true}, - SentinelLocation: "sentinel_dne", - } - - t.Run("empty-sentinel-return-ok", func(t *testing.T) { - exports := make([]OriginExport, 0) - exports = append(exports, mockExportNoStn) - exports = append(exports, mockExportNoStn) - - ok, err := CheckOriginSentinelLocations(exports) - assert.NoError(t, err) - assert.True(t, ok) - }) - - t.Run("valid-sentinel-return-ok", func(t *testing.T) { - exports := make([]OriginExport, 0) - exports = append(exports, mockExportNoStn) - exports = append(exports, mockExportValidStn) - - ok, err := CheckOriginSentinelLocations(exports) - assert.NoError(t, err) - assert.True(t, ok) - }) - - t.Run("invalid-sentinel-return-error", func(t *testing.T) { - exports := make([]OriginExport, 0) - exports = append(exports, mockExportNoStn) - exports = append(exports, mockExportValidStn) - exports = append(exports, mockExportInvalidStn) - - ok, err := CheckOriginSentinelLocations(exports) - assert.Error(t, err) - assert.False(t, ok) - }) -} - func runBucketNameTest(t *testing.T, name string, valid bool) { t.Run(fmt.Sprintf("testBucketNameValidation-%s", name), func(t *testing.T) { err := validateBucketName(name) diff --git a/web_ui/frontend/app/(login)/initialization/code/page.tsx b/web_ui/frontend/app/(login)/initialization/code/page.tsx index 05f784bf6..904446ada 100644 --- a/web_ui/frontend/app/(login)/initialization/code/page.tsx +++ b/web_ui/frontend/app/(login)/initialization/code/page.tsx @@ -18,13 +18,15 @@ 'use client'; -import { Box, Typography, Grow } from '@mui/material'; +import { Box, Typography } from '@mui/material'; import { useRouter } from 'next/navigation'; -import { useState } from 'react'; +import { useContext, useState } from 'react'; import CodeInput, { Code } from '../../components/CodeInput'; import LoadingButton from '../../components/LoadingButton'; -import { getErrorMessage } from '@/helpers/util'; +import { initLogin } from '@/helpers/api'; +import { alertOnError } from '@/helpers/util'; +import { AlertDispatchContext } from '@/components/AlertProvider'; export default function Home() { const router = useRouter(); @@ -37,11 +39,11 @@ export default function Home() { undefined, ]); let [loading, setLoading] = useState(false); - let [error, setError] = useState(undefined); + + const dispatch = useContext(AlertDispatchContext); const setCode = (code: Code) => { _setCode(code); - setError(undefined); if (!code.includes(undefined)) { submit(code.map((x) => x!.toString()).join('')); @@ -51,26 +53,15 @@ export default function Home() { async function submit(code: string) { setLoading(true); - try { - let response = await fetch('/api/v1.0/auth/initLogin', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - code: code, - }), - }); - - if (response.ok) { - router.push('../password/'); - } else { - setLoading(false); - setError(await getErrorMessage(response)); - } - } catch { + let response = await alertOnError( + async () => await initLogin(code), + 'Could not login', + dispatch + ); + if (response) { + router.push('../password/'); + } else { setLoading(false); - setError('Could not connect to server'); } } @@ -97,16 +88,6 @@ export default function Home() {
- - - {error} - - (''); let [confirmPassword, _setConfirmPassword] = useState(''); let [loading, setLoading] = useState(false); - let [error, setError] = useState(undefined); async function submit(password: string) { setLoading(true); - try { - let response = await fetch('/api/v1.0/auth/resetLogin', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - password: password, - }), - }); - - if (response.ok) { - router.push('/'); - } else { - setLoading(false); - setError(await getErrorMessage(response)); - } - } catch { + let response = await alertOnError( + async () => await resetLogin(password), + 'Could not login', + dispatch + ); + if (response) { + router.push('/'); + } else { setLoading(false); - setError('Could not connect to server'); } } @@ -66,7 +58,16 @@ export default function Home() { if (password == confirmPassword) { submit(password); } else { - setError('Passwords do not match'); + dispatch({ + type: 'openAlert', + payload: { + alertProps: { + severity: 'warning', + }, + message: 'Passwords do not match', + onClose: () => dispatch({ type: 'closeAlert' }), + }, + }); } } @@ -89,7 +90,6 @@ export default function Home() { InputProps: { onChange: (e) => { _setPassword(e.target.value); - setError(undefined); }, }, }} @@ -102,7 +102,6 @@ export default function Home() { InputProps: { onChange: (e) => { _setConfirmPassword(e.target.value); - setError(undefined); }, }, error: password != confirmPassword, @@ -112,16 +111,6 @@ export default function Home() { /> - - - {error} - - { + const dispatch = useContext(AlertDispatchContext); + const router = useRouter(); const { mutate } = useSWR('getUser', getUser); let [password, setPassword] = useState(''); let [loading, setLoading] = useState(false); - let [error, setError] = useState(undefined); const [toggled, setToggled] = useState(false); - const { data: enabledServers } = useSWR( + const { data: enabledServers } = useSWR( 'getEnabledServers', - getEnabledServers + async () => + await alertOnError( + getEnabledServers, + 'Could not get enabled servers', + dispatch + ) ); - const { data: oauthServers } = useSWR( + const { data: oauthServers } = useSWR( 'getOauthEnabledServers', - getOauthEnabledServers, + async () => + await alertOnError( + getOauthEnabledServers, + 'Could not get oauth enabled servers', + dispatch + ), { fallbackData: [] } ); @@ -68,34 +82,20 @@ const AdminLogin = () => { async function submit(password: string) { setLoading(true); - let response; - try { - response = await fetch('/api/v1.0/auth/login', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - user: 'admin', - password: password, - }), - }); - - if (response.ok) { - await mutate(getUser); - - const url = new URL(window.location.href); - let returnUrl = url.searchParams.get('returnURL') || ''; - returnUrl = returnUrl.replace(`/view`, ''); - router.push(returnUrl ? returnUrl : '../'); - } else { - setLoading(false); - setError(await getErrorMessage(response)); - } - } catch (e) { - console.error(e); + const response = await alertOnError( + async () => await login(password), + 'Could not login', + dispatch + ); + if (response) { + await mutate(getUser); + + const url = new URL(window.location.href); + let returnUrl = url.searchParams.get('returnURL') || ''; + returnUrl = returnUrl.replace(`/view`, ''); + router.push(returnUrl ? returnUrl : '../'); + } else { setLoading(false); - setError('Could not connect to server'); } } @@ -116,23 +116,12 @@ const AdminLogin = () => { sx: { width: '50%' }, onChange: (e) => { setPassword(e.target.value); - setError(undefined); }, }, }} /> - - - {error} - - { }; export default function Home() { + const dispatch = useContext(AlertDispatchContext); + const [returnUrl, setReturnUrl] = useState(undefined); - const { data: enabledServers } = useSWR( + const { data: enabledServers } = useSWR( 'getEnabledServers', - getEnabledServers + async () => + await alertOnError( + getEnabledServers, + 'Could not get enabled servers', + dispatch + ) ); - const { data: oauthServers } = useSWR( + const { data: oauthServers } = useSWR( 'getOauthEnabledServers', - getOauthEnabledServers, + async () => + await alertOnError( + getOauthEnabledServers, + 'Could not determine if the active server had OAuth enabled', + dispatch + ), { fallbackData: [] } ); diff --git a/web_ui/frontend/app/cache/page.tsx b/web_ui/frontend/app/cache/page.tsx index ad12e417a..6f9b4618d 100644 --- a/web_ui/frontend/app/cache/page.tsx +++ b/web_ui/frontend/app/cache/page.tsx @@ -39,123 +39,6 @@ export default function Home() { - - - - Storage - - - console.log(chart) - }, - }, - }, - }} - datasetOptions={[ - { - label: 'Total Storage (Gigabytes)', - borderColor: '#000000', - }, - { label: 'Free Storage (Gigabytes)', borderColor: '#54ff80' }, - ]} - datasetTransform={(dataset) => { - dataset.data = dataset.data.map((p) => { - let { x, y } = p as DataPoint; - y = y / 10 ** 9; - return { x: x, y: y }; - }); - - return dataset; - }} - /> - - - - - - - Transfer Rate - - - console.log(chart) - }, - }, - }, - }} - datasetOptions={[ - { label: 'Bytes Received (Bps)', borderColor: '#0071ff' }, - { label: 'Bytes Sent (Bps)', borderColor: '#54ff80' }, - ]} - /> - - - ); diff --git a/web_ui/frontend/app/config/Config.tsx b/web_ui/frontend/app/config/Config.tsx index 45039f54f..bfd42fbde 100644 --- a/web_ui/frontend/app/config/Config.tsx +++ b/web_ui/frontend/app/config/Config.tsx @@ -27,7 +27,14 @@ import { IconButton, Alert, } from '@mui/material'; -import React, { memo, useCallback, useEffect, useMemo, useState } from 'react'; +import React, { + memo, + useCallback, + useContext, + useEffect, + useMemo, + useState, +} from 'react'; import { AppRegistration, AssistantDirection, @@ -51,21 +58,26 @@ import StatusSnackBar, { StatusSnackBarProps, } from '@/components/StatusSnackBar'; import { ServerType } from '@/index'; -import { getEnabledServers } from '@/helpers/util'; +import { alertOnError, getEnabledServers } from '@/helpers/util'; import DownloadButton from '@/components/DownloadButton'; import { PaddedContent } from '@/components/layout'; import { ConfigDisplay, TableOfContents } from '@/app/config/components'; import AuthenticatedContent from '@/components/layout/AuthenticatedContent'; +import { getConfig } from '@/helpers/api'; +import { AlertDispatchContext } from '@/components/AlertProvider'; function Config({ metadata }: { metadata: ParameterMetadataRecord }) { + const dispatch = useContext(AlertDispatchContext); + const [status, setStatus] = useState( undefined ); const [patch, _setPatch] = useState({}); - const { data, mutate, error } = useSWR( + const { data, mutate, error } = useSWR( 'getConfig', - getConfig + async () => + await alertOnError(getConfigJson, 'Could not get config', dispatch) ); const { data: enabledServers } = useSWR( 'getEnabledServers', @@ -94,8 +106,6 @@ function Config({ metadata }: { metadata: ParameterMetadataRecord }) { ); }, [serverConfig, patch]); - console.error(error, data); - return ( <> @@ -215,17 +225,11 @@ function Config({ metadata }: { metadata: ParameterMetadataRecord }) { ); } -const getConfig = async (): Promise => { - let response = await fetch('/api/v1.0/config'); - - if (!response.ok) { - if (response.status == 401) { - throw new Error('You must be logged in to view and access the config'); - } - throw new Error('Failed to fetch config'); +const getConfigJson = async (): Promise => { + const response = await getConfig(); + if (response) { + return await response.json(); } - - return await response.json(); }; export default Config; diff --git a/web_ui/frontend/app/director/components/DirectorCard.tsx b/web_ui/frontend/app/director/components/DirectorCard.tsx index 72419fd18..f99652142 100644 --- a/web_ui/frontend/app/director/components/DirectorCard.tsx +++ b/web_ui/frontend/app/director/components/DirectorCard.tsx @@ -1,5 +1,5 @@ import { Authenticated, secureFetch } from '@/helpers/login'; -import React, { useRef, useState } from 'react'; +import React, { useContext, useEffect, useRef, useState } from 'react'; import { Avatar, Box, @@ -21,19 +21,25 @@ import { NamespaceIcon } from '@/components/Namespace/index'; import useSWR from 'swr'; import Link from 'next/link'; import { User } from '@/index'; -import { getErrorMessage } from '@/helpers/util'; +import { alertOnError, getErrorMessage } from '@/helpers/util'; import { DirectorDropdown } from '@/app/director/components/DirectorDropdown'; +import { ServerDetailed, ServerGeneral } from '@/types'; +import { allowServer, filterServer, getDirectorServer } from '@/helpers/api'; +import { AlertDispatchContext } from '@/components/AlertProvider'; export interface DirectorCardProps { - server: Server; + server: ServerGeneral; authenticated?: User; } export const DirectorCard = ({ server, authenticated }: DirectorCardProps) => { - const [filtered, setFiltered] = useState(server.filtered); - const [error, setError] = useState(undefined); const [disabled, setDisabled] = useState(false); const [dropdownOpen, setDropdownOpen] = useState(false); + const [detailedServer, setDetailedServer] = useState< + ServerDetailed | undefined + >(); + + const dispatch = useContext(AlertDispatchContext); const { mutate } = useSWR('getServers'); @@ -56,7 +62,19 @@ export const DirectorCard = ({ server, authenticated }: DirectorCardProps) => { server.healthStatus === 'Error' ? red[100] : 'secondary.main', p: 1, }} - onClick={() => setDropdownOpen(!dropdownOpen)} + onClick={async () => { + setDropdownOpen(!dropdownOpen); + if (detailedServer === undefined) { + alertOnError( + async () => { + const response = await getDirectorServer(server.name); + setDetailedServer(await response.json()); + }, + 'Failed to fetch server details', + dispatch + ); + } + }} > { { - x.stopPropagation(); + onClick={async (e) => { + e.stopPropagation(); // Disable the switch setDisabled(true); - // Provide optimistic feedback - setFiltered(!filtered); - // Update the server - let error; - if (filtered) { - error = await allowServer(server.name); - } else { - error = await filterServer(server.name); - } + await alertOnError( + async () => { + if (server.filtered) { + await allowServer(server.name); + } else { + await filterServer(server.name); + } + }, + 'Failed to toggle server status', + dispatch + ); - // Revert if we were too optimistic - if (error) { - setFiltered(!filtered); - setError(error); - } else { - mutate(); - } + mutate(); setDisabled(false); }} /> } - label={!filtered ? 'Active' : 'Disabled'} + label={server.filtered ? 'Disabled' : 'Active'} /> @@ -126,70 +140,12 @@ export const DirectorCard = ({ server, authenticated }: DirectorCardProps) => { - - - setError(undefined)} - > - setError(undefined)} - severity='error' - variant='filled' - sx={{ width: '100%' }} - > - {error} -
- If this error persists on reload, please file a ticket via the (?) - in the bottom left. -
-
-
+ ); }; -const filterServer = async (name: string): Promise => { - try { - const response = await secureFetch( - `/api/v1.0/director_ui/servers/filter/${name}`, - { - method: 'PATCH', - } - ); - if (response.ok) { - return; - } else { - return await getErrorMessage(response); - } - } catch (e) { - if (e instanceof Error) { - return e.message; - } - return 'Could not connect to server'; - } -}; - -const allowServer = async (name: string): Promise => { - try { - const response = await secureFetch( - `/api/v1.0/director_ui/servers/allow/${name}`, - { - method: 'PATCH', - } - ); - if (response.ok) { - return; - } else { - return await getErrorMessage(response); - } - } catch (e) { - if (e instanceof Error) { - return e.message; - } - return 'Could not connect to server'; - } -}; - export default DirectorCard; diff --git a/web_ui/frontend/app/director/components/DirectorCardList.tsx b/web_ui/frontend/app/director/components/DirectorCardList.tsx index c461f5634..c5600a9ef 100644 --- a/web_ui/frontend/app/director/components/DirectorCardList.tsx +++ b/web_ui/frontend/app/director/components/DirectorCardList.tsx @@ -10,6 +10,7 @@ import { DirectorCard, DirectorCardProps } from './'; import { Server } from '@/index'; import { BooleanToggleButton, CardList } from '@/components'; import useFuse from '@/helpers/useFuse'; +import { ServerGeneral } from '@/types'; interface DirectorCardListProps { data: Partial[]; @@ -88,7 +89,7 @@ export function DirectorCardList({ data, cardProps }: DirectorCardListProps) { ); } -const serverHasError = (server?: Server) => { +const serverHasError = (server?: ServerGeneral) => { return server?.healthStatus === 'Error'; }; diff --git a/web_ui/frontend/app/director/components/DirectorDropdown.tsx b/web_ui/frontend/app/director/components/DirectorDropdown.tsx index 15d7bcb03..ee3250c8c 100644 --- a/web_ui/frontend/app/director/components/DirectorDropdown.tsx +++ b/web_ui/frontend/app/director/components/DirectorDropdown.tsx @@ -1,17 +1,14 @@ -import { Capabilities, Server, StringTree } from '@/index'; -import { - CapabilitiesChip, - CapabilitiesDisplay, - Dropdown, - InformationSpan, -} from '@/components'; +import { CapabilitiesChip, Dropdown, InformationSpan } from '@/components'; import { Box, Grid, Typography } from '@mui/material'; import DirectoryTree from '@/components/DirectoryTree'; import React from 'react'; import { SinglePointMap } from '@/components/Map'; +import { ServerCapabilitiesTable } from '@/components/ServerCapabilitiesTable'; +import { Capabilities, ServerDetailed, ServerGeneral } from '@/types'; +import { Capability } from '@/components/configuration'; interface DirectorDropdownProps { - server: Server; + server: ServerGeneral | ServerDetailed; transition: boolean; } @@ -20,94 +17,68 @@ export const DirectorDropdown = ({ transition, }: DirectorDropdownProps) => { return ( - - - - - - - - - - - - {transition && ( - - )} - + <> + + + + + + + + + + + + {transition && ( + + )} + + - - {server.capabilities && ( - - + + - )} - - - Namespace Prefixes - - - - + + ); }; -const CapabilitiesRow = ({ capabilities }: { capabilities: Capabilities }) => { +export const CapabilitiesRow = ({ + capabilities, + parentCapabilities, +}: { + capabilities: Capabilities; + parentCapabilities?: Capabilities; +}) => { return ( {Object.entries(capabilities).map(([key, value]) => { + const castKey = key as keyof Capabilities; return ( - + ); })} ); }; - -const directoryListToTree = (directoryList: string[]): StringTree => { - let tree = {}; - directoryList.forEach((directory) => { - const path = directory - .split('/') - .filter((x) => x != '') - .map((x) => '/' + x); - tree = directoryListToTreeHelper(path, tree); - }); - - return tree; -}; - -const directoryListToTreeHelper = ( - path: string[], - tree: StringTree -): true | StringTree => { - if (path.length == 0) { - return true; - } - - if (!tree[path[0]] || tree[path[0]] === true) { - tree[path[0]] = {}; - } - - tree[path[0]] = directoryListToTreeHelper(path.slice(1), tree[path[0]]); - - return tree; -}; diff --git a/web_ui/frontend/app/director/components/NamespaceCard.tsx b/web_ui/frontend/app/director/components/NamespaceCard.tsx new file mode 100644 index 000000000..057013ee3 --- /dev/null +++ b/web_ui/frontend/app/director/components/NamespaceCard.tsx @@ -0,0 +1,77 @@ +import { secureFetch } from '@/helpers/login'; +import React, { useContext, useState } from 'react'; +import { Box, Paper, Typography } from '@mui/material'; +import { NamespaceIcon } from '@/components/Namespace/index'; +import { NamespaceDropdown } from './NamespaceDropdown'; +import { DirectorNamespace, ServerDetailed, ServerGeneral } from '@/types'; +import { getDirectorServer } from '@/helpers/api'; +import { alertOnError } from '@/helpers/util'; +import { AlertDispatchContext } from '@/components/AlertProvider'; + +export interface NamespaceCardProps { + namespace: DirectorNamespace; +} + +export const NamespaceCard = ({ namespace }: NamespaceCardProps) => { + const dispatch = useContext(AlertDispatchContext); + const [dropdownOpen, setDropdownOpen] = useState(false); + const [servers, setServers] = useState( + undefined + ); + + return ( + <> + + { + setDropdownOpen(!dropdownOpen); + if (servers === undefined) { + alertOnError( + async () => setServers(await getAssociatedServers(namespace)), + 'Failed to fetch servers', + dispatch + ); + } + }} + > + + + {namespace.path} + + + + + + ); +}; + +const getAssociatedServers = async (namespace: DirectorNamespace) => { + const servers = await Promise.all( + [...namespace.origins, ...namespace.caches].map(async (name) => + (await getDirectorServer(name)).json() + ) + ); + + // Alert the console if any servers are undefined, as this is unlikely to happen naturally + if (servers.some((s) => s === undefined)) { + console.error('Failed to fetch all servers, some are undefined'); + } + + return servers.filter((s) => s !== undefined) as ServerDetailed[]; +}; + +export default NamespaceCard; diff --git a/web_ui/frontend/app/director/components/NamespaceCardList.tsx b/web_ui/frontend/app/director/components/NamespaceCardList.tsx new file mode 100644 index 000000000..a13a20fad --- /dev/null +++ b/web_ui/frontend/app/director/components/NamespaceCardList.tsx @@ -0,0 +1,31 @@ +import React, { useState } from 'react'; +import { Box, TextField } from '@mui/material'; +import { NamespaceCard, NamespaceCardProps } from './'; +import { CardList } from '@/components'; +import useFuse from '@/helpers/useFuse'; + +interface NamespaceCardListProps { + data?: Partial[]; +} + +export function NamespaceCardList({ data }: NamespaceCardListProps) { + const [search, setSearch] = useState(''); + + const searchedData = useFuse>(data || [], search); + + return ( + + + setSearch(e.target.value)} + label='Search' + /> + + + + ); +} + +export default NamespaceCardList; diff --git a/web_ui/frontend/app/director/components/NamespaceDropdown.tsx b/web_ui/frontend/app/director/components/NamespaceDropdown.tsx new file mode 100644 index 000000000..c8e0779b0 --- /dev/null +++ b/web_ui/frontend/app/director/components/NamespaceDropdown.tsx @@ -0,0 +1,84 @@ +import { Dropdown, InformationSpan, InformationSpanHeader } from '@/components'; +import { Box, Grid } from '@mui/material'; +import React, { Fragment } from 'react'; +import { DirectorNamespace, ServerDetailed } from '@/types'; +import { NamespaceCapabilitiesTable } from '@/components/NamespaceCapabilitiesTable'; + +interface NamespaceDropdownProps { + namespace: DirectorNamespace; + servers?: ServerDetailed[]; + transition: boolean; +} + +export const NamespaceDropdown = ({ + namespace, + servers, + transition, +}: NamespaceDropdownProps) => { + return ( + <> + + + + + + {namespace.tokenGeneration?.map((tg) => ( + + + + + + + ))} + + {namespace.tokenIssuer?.map((ti) => ( + + + + {ti.basePaths.map((bp) => ( + + ))} + {ti.restrictedPaths && ( + <> + + {ti.restrictedPaths?.map((rp) => ( + + ))} + + )} + + ))} + + + + + + + + ); +}; diff --git a/web_ui/frontend/app/director/components/index.tsx b/web_ui/frontend/app/director/components/index.tsx index 95b293cba..034cde56c 100644 --- a/web_ui/frontend/app/director/components/index.tsx +++ b/web_ui/frontend/app/director/components/index.tsx @@ -1,2 +1,3 @@ export * from './DirectorCard'; export * from './DirectorCardList'; +export * from './NamespaceCard'; diff --git a/web_ui/frontend/app/director/layout.tsx b/web_ui/frontend/app/director/layout.tsx index 5d9325d30..55e01cd65 100644 --- a/web_ui/frontend/app/director/layout.tsx +++ b/web_ui/frontend/app/director/layout.tsx @@ -20,7 +20,8 @@ import { Box } from '@mui/material'; import { ButtonLink, Sidebar } from '@/components/layout/Sidebar'; import BuildIcon from '@mui/icons-material/Build'; import Main from '@/components/layout/Main'; -import { Dashboard, Equalizer, MapOutlined } from '@mui/icons-material'; +import { Block, Dashboard, Equalizer, MapOutlined } from '@mui/icons-material'; +import AuthenticatedContent from '@/components/layout/AuthenticatedContent'; export const metadata = { title: 'Pelican Director', @@ -38,15 +39,19 @@ export default function RootLayout({ - - - - - - + + + + + + + + + +
{children}
diff --git a/web_ui/frontend/app/director/metrics/page.tsx b/web_ui/frontend/app/director/metrics/page.tsx index 2d47410f7..52590a59a 100644 --- a/web_ui/frontend/app/director/metrics/page.tsx +++ b/web_ui/frontend/app/director/metrics/page.tsx @@ -9,96 +9,103 @@ import { } from '@/app/director/metrics/components/MetricBoxPlot'; import { StorageTable } from '@/app/director/metrics/components/StorageTable'; import { TransferBarGraph } from '@/app/director/metrics/components/TransferBarGraph'; +import AuthenticatedContent from '@/components/layout/AuthenticatedContent'; const Page = () => { return ( - - - - {[ - , - , - ].map((component, index) => ( - - {component} - - ))} - - - - - - - - - - - - - - - + + + + + {[ + , + , + ].map((component, index) => ( + + {component} - - - - - - - - - - - - - - - - + ))} - - - {[ - , - , - ].map((component, index) => ( - - {component} + + + + + + + + + + + + + + + + + + + + + + + + - ))} + + + + + + + + + + {[ + , + , + ].map((component, index) => ( + + {component} + + ))} + - + ); }; diff --git a/web_ui/frontend/app/director/page.tsx b/web_ui/frontend/app/director/page.tsx index 3b1f64c1a..a9665b1f5 100644 --- a/web_ui/frontend/app/director/page.tsx +++ b/web_ui/frontend/app/director/page.tsx @@ -19,23 +19,45 @@ 'use client'; import { Box, Grid, Skeleton, Typography } from '@mui/material'; -import { useMemo } from 'react'; +import { useContext, useMemo } from 'react'; import useSWR from 'swr'; -import { Server } from '@/index'; -import { - DirectorCardList, - DirectorCard, - DirectorCardProps, -} from './components'; +import { DirectorCardList } from './components'; import { getUser } from '@/helpers/login'; import FederationOverview from '@/components/FederationOverview'; import AuthenticatedContent from '@/components/layout/AuthenticatedContent'; import { PaddedContent } from '@/components/layout'; +import { DirectorNamespace, ServerGeneral } from '@/types'; +import { NamespaceCardList } from './components/NamespaceCardList'; +import { getDirectorServers, getDirectorNamespaces } from '@/helpers/get'; +import { alertOnError } from '@/helpers/util'; +import { AlertDispatchContext } from '@/components/AlertProvider'; export default function Page() { - const { data } = useSWR('getServers', getServers); + const dispatch = useContext(AlertDispatchContext); - const { data: user, error } = useSWR('getUser', getUser); + const { data } = useSWR( + 'getDirectorServers', + async () => + await alertOnError( + getDirectorServers, + 'Failed to fetch servers', + dispatch + ) + ); + + const { data: namespaces } = useSWR( + 'getDirectorNamespaces', + async () => + await alertOnError( + getDirectorNamespaces, + 'Faild to fetch Namespaces', + dispatch + ) + ); + + const { data: user, error } = useSWR('getUser', () => + alertOnError(getUser, 'Failed to fetch user', dispatch) + ); const cacheData = useMemo(() => { return data?.filter((server) => server.type === 'Cache'); @@ -83,6 +105,24 @@ export default function Page() { )} + + + Namespaces + + {cacheData ? ( + { + return { namespace }; + }) || [] + } + /> + ) : ( + + + + )} + @@ -93,16 +133,3 @@ export default function Page() { ); } - -const getServers = async () => { - const url = new URL('/api/v1.0/director_ui/servers', window.location.origin); - - let response = await fetch(url); - if (response.ok) { - const responseData: Server[] = await response.json(); - responseData.sort((a, b) => a.name.localeCompare(b.name)); - return responseData; - } - - throw new Error('Failed to fetch servers'); -}; diff --git a/web_ui/frontend/app/layout.tsx b/web_ui/frontend/app/layout.tsx index bc4da3fc9..10f5ca8b3 100644 --- a/web_ui/frontend/app/layout.tsx +++ b/web_ui/frontend/app/layout.tsx @@ -17,7 +17,8 @@ ***************************************************************/ import { LocalizationProvider } from '@/clientComponents'; -import { ThemeProviderClient } from '@/public/theme'; +import { ThemeProviderClient } from '@/components/ThemeProvider'; +import { AlertProvider } from '@/components/AlertProvider'; import './globals.css'; export const metadata = { @@ -34,7 +35,9 @@ export default function RootLayout({ - {children} + + {children} + diff --git a/web_ui/frontend/app/origin/globus/callback/page.tsx b/web_ui/frontend/app/origin/globus/callback/page.tsx index 2bcad0b8c..19b5c2ea0 100644 --- a/web_ui/frontend/app/origin/globus/callback/page.tsx +++ b/web_ui/frontend/app/origin/globus/callback/page.tsx @@ -96,7 +96,7 @@ export default function Home() { u?.role == 'admin'} + allowedRoles={['admin']} > u?.role == 'admin'} - > + Globus Exports diff --git a/web_ui/frontend/app/origin/issuer/Issuer.tsx b/web_ui/frontend/app/origin/issuer/Issuer.tsx index 51921c646..66bdbf6ad 100644 --- a/web_ui/frontend/app/origin/issuer/Issuer.tsx +++ b/web_ui/frontend/app/origin/issuer/Issuer.tsx @@ -75,6 +75,7 @@ export function Issuer({ metadata }: { metadata: ParameterMetadataRecord }) { const configView = useMemo(() => { return merge(structuredClone(serverConfig), structuredClone(patch)); }, [serverConfig, patch]); + const submitPatch = useCallback(async (patch: any) => { setStatus({ message: 'Submitting', severity: 'info' }); diff --git a/web_ui/frontend/app/origin/page.tsx b/web_ui/frontend/app/origin/page.tsx index 98e5fbcd6..8ac2527f1 100644 --- a/web_ui/frontend/app/origin/page.tsx +++ b/web_ui/frontend/app/origin/page.tsx @@ -51,10 +51,7 @@ export default function Home() { }; return ( - u?.role == 'admin'} - > + @@ -87,62 +84,6 @@ export default function Home() { - - - - Transfer Rate - - - console.log(chart) - }, - }, - }, - }} - datasetOptions={[ - { label: 'Bytes Received (Bps)', borderColor: '#0071ff' }, - { label: 'Bytes Sent (Bps)', borderColor: '#54ff80' }, - ]} - /> - - - diff --git a/web_ui/frontend/app/registry/cache/edit/page.tsx b/web_ui/frontend/app/registry/cache/edit/page.tsx index 66ce2d284..c9f9eaec3 100644 --- a/web_ui/frontend/app/registry/cache/edit/page.tsx +++ b/web_ui/frontend/app/registry/cache/edit/page.tsx @@ -19,13 +19,13 @@ 'use client'; import { PutPage } from '@/app/registry/components/PutPage'; -import { - namespaceToCache, - putGeneralNamespace, -} from '@/app/registry/components/util'; +import { namespaceToCache } from '@/app/registry/components/util'; import { Box, Grid, Typography } from '@mui/material'; import React from 'react'; +import { putGeneralNamespace } from '@/helpers/api'; +import AuthenticatedContent from '@/components/layout/AuthenticatedContent'; + export default function Page() { const putCache = async (data: any) => { const cache = namespaceToCache(structuredClone(data)); @@ -44,7 +44,9 @@ export default function Page() { - + + + diff --git a/web_ui/frontend/app/registry/cache/register/page.tsx b/web_ui/frontend/app/registry/cache/register/page.tsx index 6221ddb01..0364bc5f5 100644 --- a/web_ui/frontend/app/registry/cache/register/page.tsx +++ b/web_ui/frontend/app/registry/cache/register/page.tsx @@ -18,13 +18,12 @@ 'use client'; -import { - namespaceToCache, - postGeneralNamespace, -} from '@/app/registry/components/util'; +import { namespaceToCache } from '@/app/registry/components/util'; import { PostPage } from '@/app/registry/components/PostPage'; import { Box, Grid, Typography } from '@mui/material'; import React from 'react'; +import { postGeneralNamespace } from '@/helpers/api'; +import AuthenticatedContent from '@/components/layout/AuthenticatedContent'; export default function Page() { const postCache = async (data: any) => { @@ -44,7 +43,9 @@ export default function Page() { - + + + diff --git a/web_ui/frontend/app/registry/components/CustomRegistrationField/BooleanField.tsx b/web_ui/frontend/app/registry/components/CustomRegistrationField/BooleanField.tsx index f541024ff..b3bbfe61f 100644 --- a/web_ui/frontend/app/registry/components/CustomRegistrationField/BooleanField.tsx +++ b/web_ui/frontend/app/registry/components/CustomRegistrationField/BooleanField.tsx @@ -9,7 +9,7 @@ import React, { ChangeEvent, ReactNode, SyntheticEvent, useMemo } from 'react'; import { createId } from '@/components/configuration/util'; import FormHelperText from '@mui/material/FormHelperText'; -import type { CustomRegistrationFieldProps } from './index.d'; +import type { BaseCustomRegistrationFieldProps } from './index'; const BooleanField = ({ onChange, @@ -18,7 +18,7 @@ const BooleanField = ({ required, description, value, -}: CustomRegistrationFieldProps) => { +}: BaseCustomRegistrationFieldProps) => { const id = useMemo(() => createId(name), [name]); const labelId = useMemo(() => `${id}-label`, [id]); diff --git a/web_ui/frontend/app/registry/components/CustomRegistrationField/EnumerationField.tsx b/web_ui/frontend/app/registry/components/CustomRegistrationField/EnumerationField.tsx index d0f6b699b..ec43e79e1 100644 --- a/web_ui/frontend/app/registry/components/CustomRegistrationField/EnumerationField.tsx +++ b/web_ui/frontend/app/registry/components/CustomRegistrationField/EnumerationField.tsx @@ -1,7 +1,7 @@ import { Autocomplete, TextField } from '@mui/material'; import React, { useMemo } from 'react'; -import type { CustomRegistrationFieldProps } from './index.d'; +import type { BaseCustomRegistrationFieldProps } from './index'; const EnumerationField = ({ onChange, @@ -11,7 +11,7 @@ const EnumerationField = ({ description, value, options, -}: CustomRegistrationFieldProps) => { +}: BaseCustomRegistrationFieldProps) => { const textValue = useMemo( () => options?.find((option) => option.id === value), [value, options] diff --git a/web_ui/frontend/app/registry/components/CustomRegistrationField/EpochTimeField.tsx b/web_ui/frontend/app/registry/components/CustomRegistrationField/EpochTimeField.tsx index 38d8c7a22..141aefc42 100644 --- a/web_ui/frontend/app/registry/components/CustomRegistrationField/EpochTimeField.tsx +++ b/web_ui/frontend/app/registry/components/CustomRegistrationField/EpochTimeField.tsx @@ -6,7 +6,7 @@ import FormControl from '@mui/material/FormControl'; import FormHelperText from '@mui/material/FormHelperText'; import { DateTime } from 'luxon'; -import type { CustomRegistrationFieldProps } from './index.d'; +import type { BaseCustomRegistrationFieldProps } from './index'; const EpochTimeField = ({ onChange, @@ -15,7 +15,7 @@ const EpochTimeField = ({ required, description, value, -}: CustomRegistrationFieldProps) => { +}: BaseCustomRegistrationFieldProps) => { return ( { if (value && isNaN(Number(value))) { @@ -17,7 +17,7 @@ const IntegerField = ({ required, description, value, -}: CustomRegistrationFieldProps) => { +}: BaseCustomRegistrationFieldProps) => { const [error, setError] = React.useState(undefined); // Check that the value is a number or undefined throwing error if not diff --git a/web_ui/frontend/app/registry/components/CustomRegistrationField/PubkeyField.tsx b/web_ui/frontend/app/registry/components/CustomRegistrationField/PubkeyField.tsx index 703e0d15e..a69e3f107 100644 --- a/web_ui/frontend/app/registry/components/CustomRegistrationField/PubkeyField.tsx +++ b/web_ui/frontend/app/registry/components/CustomRegistrationField/PubkeyField.tsx @@ -1,7 +1,7 @@ import React from 'react'; import { StringField } from './StringField'; -import type { CustomRegistrationFieldProps } from './index.d'; +import type { BaseCustomRegistrationFieldProps } from './index'; const JWKPlaceholder = { keys: [ @@ -25,7 +25,9 @@ const pubkeyValidator = (value: string) => { } }; -const PubkeyField = ({ ...props }: CustomRegistrationFieldProps) => { +const PubkeyField = ({ + ...props +}: BaseCustomRegistrationFieldProps) => { return ( & - CustomRegistrationFieldProps; + BaseCustomRegistrationFieldProps; interface StringFieldProps extends TextFieldProps { validator?: (value: string) => string | undefined; diff --git a/web_ui/frontend/app/registry/components/CustomRegistrationField/index.d.ts b/web_ui/frontend/app/registry/components/CustomRegistrationField/index.d.ts deleted file mode 100644 index e00428efe..000000000 --- a/web_ui/frontend/app/registry/components/CustomRegistrationField/index.d.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { CustomRegistrationField } from '@/components/configuration'; -import { Alert as AlertType, Namespace } from '@/index'; - -export interface NamespaceFormPage { - update: (data: Partial) => Promise; -} - -export interface CustomRegistrationProps extends CustomRegistrationField { - displayed_name: string; -} - -export type CustomRegistrationPropsEnum = - | (CustomRegistrationProps & { type: 'int' }) - | (CustomRegistrationProps & { type: 'string' }) - | (CustomRegistrationProps & { type: 'bool' }) - | (CustomRegistrationProps & { type: 'datetime' }) - | (CustomRegistrationProps & { type: 'enum' }); - -export interface CustomRegistrationFieldProps - extends CustomRegistrationProps { - onChange: (value: T | null) => void; - value?: T; -} - -export type CustomRegistrationFieldPropsEnum = - CustomRegistrationFieldProps & { - type: 'int' | 'string' | 'bool' | 'datetime' | 'enum'; - }; diff --git a/web_ui/frontend/app/registry/components/CustomRegistrationField/index.tsx b/web_ui/frontend/app/registry/components/CustomRegistrationField/index.tsx index 7c64a464e..f9050acf1 100644 --- a/web_ui/frontend/app/registry/components/CustomRegistrationField/index.tsx +++ b/web_ui/frontend/app/registry/components/CustomRegistrationField/index.tsx @@ -1,17 +1,30 @@ -import type { CustomRegistrationField } from '@/components/configuration/index'; import { BooleanField } from './BooleanField'; -import { ErrorField } from './ErrorField'; import { StringField } from './StringField'; import { IntegerField } from './IntegerField'; import PubkeyField from './PubkeyField'; -import { CustomRegistrationFieldPropsEnum } from './index.d'; import EpochTimeField from '@/app/registry/components/CustomRegistrationField/EpochTimeField'; import EnumerationField from '@/app/registry/components/CustomRegistrationField/EnumerationField'; +import { CustomRegistrationField as CustomRegistrationFieldConfiguration } from '@/components/configuration'; +import type { CustomRegistrationField } from '@/components/configuration'; + +export type CustomRegistrationFieldProps = + | (BaseCustomRegistrationFieldProps & { type: 'int' }) + | (BaseCustomRegistrationFieldProps & { type: 'string' }) + | (BaseCustomRegistrationFieldProps & { type: 'bool' }) + | (BaseCustomRegistrationFieldProps & { type: 'datetime' }) + | (BaseCustomRegistrationFieldProps & { type: 'enum' }); + +export interface BaseCustomRegistrationFieldProps + extends CustomRegistrationFieldConfiguration { + onChange: (value: T | null) => void; + value?: T; + displayed_name: string; +} const CustomRegistrationField = ({ ...props -}: CustomRegistrationFieldPropsEnum) => { +}: CustomRegistrationFieldProps) => { // If the field is the pubkey field, render the pubkey field if (props.type == 'string' && props.name === 'pubkey') { return ; diff --git a/web_ui/frontend/app/registry/components/Form.tsx b/web_ui/frontend/app/registry/components/Form.tsx index 24f8fa128..9a071e5a4 100644 --- a/web_ui/frontend/app/registry/components/Form.tsx +++ b/web_ui/frontend/app/registry/components/Form.tsx @@ -1,8 +1,14 @@ import { Box, Button, Alert } from '@mui/material'; -import React, { useEffect, useState, Dispatch, SetStateAction } from 'react'; +import React, { + useEffect, + useState, + Dispatch, + SetStateAction, + useContext, +} from 'react'; import useSWR from 'swr'; -import { Namespace } from '@/index'; +import { RegistryNamespace } from '@/index'; import CustomRegistrationField from '@/app/registry/components/CustomRegistrationField/index'; import { calculateKeys, @@ -11,31 +17,20 @@ import { populateKey, submitNamespaceForm, } from '@/app/registry/components/util'; -import { CustomRegistrationPropsEnum } from './CustomRegistrationField/index.d'; -import { getErrorMessage } from '@/helpers/util'; +import { CustomRegistrationFieldProps } from './CustomRegistrationField'; +import { alertOnError, getErrorMessage } from '@/helpers/util'; +import { optionsNamespaceRegistrationFields } from '@/helpers/api'; +import { AlertDispatchContext } from '@/components/AlertProvider'; interface FormProps { - namespace?: Namespace; - onSubmit: (data: Partial) => Promise; + namespace?: RegistryNamespace; + onSubmit: (data: Partial) => Promise; } -const getRegistrationFields = async (): Promise< - CustomRegistrationPropsEnum[] -> => { - const response = await fetch('/api/v1.0/registry_ui/namespaces', { - method: 'OPTIONS', - }); - if (response.ok) { - return await response.json(); - } else { - throw new Error(await getErrorMessage(response)); - } -}; - const onChange = ( name: string, value: string | number | boolean | null, - setData: Dispatch>> + setData: Dispatch>> ) => { setData((prevData) => { // If the value is undefined delete this key from the data dictionary @@ -53,13 +48,26 @@ const onChange = ( }; const Form = ({ namespace, onSubmit }: FormProps) => { - const [data, setData] = useState | undefined>( + const dispatch = useContext(AlertDispatchContext); + + const [data, setData] = useState | undefined>( namespace || {} ); - const { data: fields, error } = useSWR( - 'getRegistrationFields', - getRegistrationFields, + const { data: fields, error } = useSWR< + Omit[] | undefined + >( + 'optionsNamespaceRegistrationFields', + async () => { + const response = await alertOnError( + optionsNamespaceRegistrationFields, + "Couldn't fetch registration fields", + dispatch + ); + if (response) { + return await response.json(); + } + }, { fallbackData: [] } ); diff --git a/web_ui/frontend/app/registry/components/PostPage.tsx b/web_ui/frontend/app/registry/components/PostPage.tsx index 6c8ad7be2..3e22a1631 100644 --- a/web_ui/frontend/app/registry/components/PostPage.tsx +++ b/web_ui/frontend/app/registry/components/PostPage.tsx @@ -19,44 +19,49 @@ 'use client'; import { Box, Grid, Collapse, Alert, Skeleton } from '@mui/material'; -import React, { useEffect, useState } from 'react'; - -import { Alert as AlertType, Namespace } from '@/index'; +import React, { useContext, useEffect, useState } from 'react'; import Form from '@/app/registry/components/Form'; import AuthenticatedContent from '@/components/layout/AuthenticatedContent'; import { submitNamespaceForm } from '@/app/registry/components/util'; -import type { NamespaceFormPage } from './CustomRegistrationField/index.d'; +import { NamespaceFormPage } from '@/app/registry/components'; +import { alertOnError } from '@/helpers/util'; +import { AlertDispatchContext } from '@/components/AlertProvider'; const PostPage = ({ update }: NamespaceFormPage) => { + const dispatch = useContext(AlertDispatchContext); + const [fromUrl, setFromUrl] = useState(undefined); - const [alert, setAlert] = useState(undefined); useEffect(() => { - const urlParams = new URLSearchParams(window.location.search); - const fromUrl = urlParams.get('fromUrl'); + (async () => { + const urlParams = new URLSearchParams(window.location.search); + const fromUrl = urlParams.get('fromUrl'); - try { if (fromUrl != undefined) { - const parsedUrl = new URL(fromUrl); - setFromUrl(parsedUrl); + const parsedUrl = await alertOnError( + () => new URL(fromUrl), + 'Failed to parse URL', + dispatch + ); + if (parsedUrl) { + setFromUrl(parsedUrl); + } } - } catch (e) { - setAlert({ severity: 'error', message: 'Invalid fromUrl provided' }); - } + })(); }, []); return ( - - - {alert?.message} - - { - setAlert(await submitNamespaceForm(data, fromUrl, update)); + onSubmit={async (namespace) => { + await alertOnError( + async () => + await submitNamespaceForm(namespace, fromUrl, update), + 'Failed to update namespace', + dispatch + ); }} /> diff --git a/web_ui/frontend/app/registry/components/PutPage.tsx b/web_ui/frontend/app/registry/components/PutPage.tsx index b5e1ebce1..2cd369777 100644 --- a/web_ui/frontend/app/registry/components/PutPage.tsx +++ b/web_ui/frontend/app/registry/components/PutPage.tsx @@ -29,33 +29,46 @@ import { import React, { ReactNode, Suspense, + useContext, useEffect, useMemo, useState, } from 'react'; import AuthenticatedContent from '@/components/layout/AuthenticatedContent'; -import { Namespace, Alert as AlertType } from '@/index'; +import { RegistryNamespace, Alert as AlertType } from '@/index'; import Form from '@/app/registry/components/Form'; -import { - getNamespace, - submitNamespaceForm, -} from '@/app/registry/components/util'; -import type { NamespaceFormPage } from './CustomRegistrationField/index.d'; +import { submitNamespaceForm } from '@/app/registry/components/util'; +import { getNamespace } from '@/helpers/api'; +import { NamespaceFormPage } from '@/app/registry/components'; +import { AlertDispatchContext } from '@/components/AlertProvider'; +import { alertOnError } from '@/helpers/util'; const PutPage = ({ update }: NamespaceFormPage) => { const [id, setId] = useState(undefined); const [fromUrl, setFromUrl] = useState(undefined); - const [namespace, setNamespace] = useState(undefined); - const [alert, setAlert] = useState(undefined); + const [namespace, setNamespace] = useState( + undefined + ); + + const dispatch = useContext(AlertDispatchContext); useEffect(() => { const urlParams = new URLSearchParams(window.location.search); const id = urlParams.get('id'); const fromUrl = urlParams.get('fromUrl'); + const accessToken = urlParams.get('access_token'); if (id === null) { - setAlert({ severity: 'error', message: 'No Namespace ID Provided' }); + dispatch({ + type: 'openAlert', + payload: { + title: 'No Namespace ID Provided', + message: + "Your URL should contain a query parameter 'id' with the ID of the namespace you want to edit", + onClose: () => dispatch({ type: 'closeAlert' }), + }, + }); return; } @@ -65,43 +78,66 @@ const PutPage = ({ update }: NamespaceFormPage) => { setFromUrl(parsedUrl); } } catch (e) { - setAlert({ severity: 'error', message: 'Invalid fromUrl provided' }); + dispatch({ + type: 'openAlert', + payload: { + title: 'Invalid fromUrl provided', + message: + 'The `fromUrl` parameter provided is not a valid URL, this will only impact your redirection on completion of this form', + alertProps: { + severity: 'warning', + }, + onClose: () => dispatch({ type: 'closeAlert' }), + }, + }); } try { setId(parseInt(id)); } catch (e) { - setAlert({ severity: 'error', message: 'Invalid Namespace ID Provided' }); + dispatch({ + type: 'openAlert', + payload: { + title: 'Invalid Namespace ID provided', + message: + 'The Namespace Id provided is not a valid number. Please report this issue, as well as what link directed you here.', + alertProps: { + severity: 'error', + }, + onClose: () => dispatch({ type: 'closeAlert' }), + }, + }); } - }, []); - useEffect(() => { (async () => { if (id !== undefined) { - try { - setNamespace(await getNamespace(id)); - } catch (e) { - setAlert({ severity: 'error', message: e as string }); + const response = await alertOnError( + async () => await getNamespace(id, accessToken || undefined), + "Couldn't get namespace", + dispatch + ); + if (response) { + setNamespace(await response.json()); } } })(); - }, [id]); + }, []); return ( - - - {alert?.message} - - {namespace ? ( { let namespace = { ...data, id: id }; - setAlert(await submitNamespaceForm(namespace, fromUrl, update)); + await alertOnError( + async () => + await submitNamespaceForm(namespace, fromUrl, update), + 'Failed to update namespace', + dispatch + ); }} /> ) : ( diff --git a/web_ui/frontend/app/registry/components/index.ts b/web_ui/frontend/app/registry/components/index.ts new file mode 100644 index 000000000..c33ba9860 --- /dev/null +++ b/web_ui/frontend/app/registry/components/index.ts @@ -0,0 +1,5 @@ +import { Alert as AlertType, RegistryNamespace } from '@/index'; + +export interface NamespaceFormPage { + update: (data: Partial) => Promise; +} diff --git a/web_ui/frontend/app/registry/components/util.tsx b/web_ui/frontend/app/registry/components/util.tsx index 0eb3398ae..473d912b3 100644 --- a/web_ui/frontend/app/registry/components/util.tsx +++ b/web_ui/frontend/app/registry/components/util.tsx @@ -1,6 +1,6 @@ import React from 'react'; import { secureFetch } from '@/helpers/login'; -import { Alert, Namespace } from '@/index'; +import { Alert, RegistryNamespace } from '@/index'; import { getErrorMessage } from '@/helpers/util'; export const populateKey = (o: any, key: string[], value: any) => { @@ -26,7 +26,15 @@ export const calculateKeys = (key: string) => { return [key]; }; -export const getValue = (o: any, key: string[]): string | undefined => { +/** + * Get the value of a key in an object + * @param o Object to get the value from + * @param key List of keys to traverse + */ +export const getValue = ( + o: Record | undefined, + key: string[] +): any => { if (o === undefined) { return undefined; } @@ -49,31 +57,7 @@ export const deleteKey = (o: any, key: string[]) => { return o; }; -const handleRequestAlert = async ( - url: string, - options: any -): Promise => { - try { - const response = await secureFetch(url, options); - - if (!response.ok) { - let errorMessage = await getErrorMessage(response); - return { severity: 'error', message: errorMessage }; - } - } catch (e) { - return { severity: 'error', message: `Fetch error: ${e}` }; - } -}; - -const namespaceFormNodeToJSON = (formData: FormData) => { - let data: any = {}; - formData.forEach((value: any, name: any) => { - populateKey(data, calculateKeys(name), value); - }); - return data; -}; - -export const namespaceToCache = (data: Namespace) => { +export const namespaceToCache = (data: RegistryNamespace) => { // Build the cache prefix if (data.prefix.startsWith('/caches/')) { return data; @@ -83,7 +67,7 @@ export const namespaceToCache = (data: Namespace) => { return data; }; -export const namespaceToOrigin = (data: Namespace) => { +export const namespaceToOrigin = (data: RegistryNamespace) => { // Build the cache prefix if (data.prefix.startsWith('/origins/')) { return data; @@ -93,61 +77,15 @@ export const namespaceToOrigin = (data: Namespace) => { return data; }; -export const getNamespace = async ( - id: string | number -): Promise => { - const url = new URL( - `/api/v1.0/registry_ui/namespaces/${id}`, - window.location.origin - ); - const response = await fetch(url); - if (response.ok) { - return await response.json(); - } else { - throw new Error(await getErrorMessage(response)); - } -}; - -export const postGeneralNamespace = async ( - data: Namespace -): Promise => { - return await handleRequestAlert('/api/v1.0/registry_ui/namespaces', { - body: JSON.stringify(data), - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - credentials: 'include', - }); -}; - -export const putGeneralNamespace = async ( - data: Namespace -): Promise => { - return await handleRequestAlert( - `/api/v1.0/registry_ui/namespaces/${data.id}`, - { - body: JSON.stringify(data), - method: 'PUT', - headers: { - 'Content-Type': 'application/json', - }, - credentials: 'include', - } - ); -}; - export const submitNamespaceForm = async ( - data: Partial, + data: Partial, toUrl: URL | undefined, - handleSubmit: (data: Partial) => Promise + handleSubmit: (data: Partial) => Promise ) => { - const submitAlert = await handleSubmit(data); + const response = await handleSubmit(data); // Clear the form on successful submit - if (submitAlert == undefined) { + if (response != undefined) { window.location.href = toUrl ? toUrl.toString() : '/view/registry/'; } - - return submitAlert; }; diff --git a/web_ui/frontend/app/registry/denied/page.tsx b/web_ui/frontend/app/registry/denied/page.tsx index f1bee0e5e..851fb6d03 100644 --- a/web_ui/frontend/app/registry/denied/page.tsx +++ b/web_ui/frontend/app/registry/denied/page.tsx @@ -18,71 +18,29 @@ 'use client'; -import { - Box, - Button, - Grid, - Typography, - Paper, - Alert, - Collapse, - IconButton, -} from '@mui/material'; -import React, { useEffect, useMemo, useState } from 'react'; +import { Box, Grid, Typography, Alert, Collapse } from '@mui/material'; +import React, { useContext, useMemo } from 'react'; -import { - PendingCard, - Card, - CardSkeleton, - CreateNamespaceCard, -} from '@/components/Namespace'; -import Link from 'next/link'; -import { Namespace, Alert as AlertType } from '@/index'; +import { CardSkeleton } from '@/components/Namespace'; import { getUser } from '@/helpers/login'; -import { Add } from '@mui/icons-material'; import NamespaceCardList from '@/components/Namespace/NamespaceCardList'; import useSWR from 'swr'; import { CardProps } from '@/components/Namespace/Card'; -import { PendingCardProps } from '@/components/Namespace/PendingCard'; import AuthenticatedContent from '@/components/layout/AuthenticatedContent'; import DeniedCard from '@/components/Namespace/DeniedCard'; +import { getExtendedNamespaces } from '@/helpers/get'; +import { AlertDispatchContext } from '@/components/AlertProvider'; +import { alertOnError } from '@/helpers/util'; -const getData = async () => { - let data: { namespace: Namespace }[] = []; +export default function Home() { + const dispatch = useContext(AlertDispatchContext); - const url = new URL( - '/api/v1.0/registry_ui/namespaces', - window.location.origin + const { data } = useSWR('getExtendedNamespaces', async () => + alertOnError(getExtendedNamespaces, "Couldn't fetch namespaces", dispatch) + ); + const { data: user, error } = useSWR('getUser', async () => + alertOnError(getUser, "Couldn't fetch user", dispatch) ); - - const response = await fetch(url); - if (response.ok) { - const responseData: Namespace[] = await response.json(); - responseData.sort((a, b) => (a.id > b.id ? 1 : -1)); - responseData.forEach((namespace) => { - if (namespace.prefix.startsWith('/caches/')) { - namespace.type = 'cache'; - namespace.prefix = namespace.prefix.replace('/caches/', ''); - } else if (namespace.prefix.startsWith('/origins/')) { - namespace.type = 'origin'; - namespace.prefix = namespace.prefix.replace('/origins/', ''); - } else { - namespace.type = 'namespace'; - } - }); - - // Convert data to Partial CardProps - data = responseData.map((d) => { - return { namespace: d }; - }); - } - - return data; -}; - -export default function Home() { - const { data } = useSWR('getNamespaces', getData); - const { data: user, error } = useSWR('getUser', getUser); const deniedNamespaces = useMemo( () => @@ -97,11 +55,6 @@ export default function Home() { Namespace Registry - - - {error?.toString()} - - diff --git a/web_ui/frontend/app/registry/layout.tsx b/web_ui/frontend/app/registry/layout.tsx index 36e2b69cd..2c0febf4b 100644 --- a/web_ui/frontend/app/registry/layout.tsx +++ b/web_ui/frontend/app/registry/layout.tsx @@ -35,6 +35,7 @@ import SpeedDial, { } from '@/components/layout/SidebarSpeedDial'; import AuthenticatedContent from '@/components/layout/AuthenticatedContent'; import { PaddedContent } from '@/components/layout'; +import BuildIcon from '@mui/icons-material/Build'; export const metadata = { title: 'Pelican Registry', @@ -81,9 +82,11 @@ export default function RootLayout({ - - - + + + + +
{children} diff --git a/web_ui/frontend/app/registry/namespace/edit/page.tsx b/web_ui/frontend/app/registry/namespace/edit/page.tsx index 2abb51461..de07c32fd 100644 --- a/web_ui/frontend/app/registry/namespace/edit/page.tsx +++ b/web_ui/frontend/app/registry/namespace/edit/page.tsx @@ -19,9 +19,10 @@ 'use client'; import { PutPage } from '@/app/registry/components/PutPage'; -import { putGeneralNamespace } from '@/app/registry/components/util'; +import { putGeneralNamespace } from '@/helpers/api'; import { Box, Grid, Typography } from '@mui/material'; import React from 'react'; +import AuthenticatedContent from '@/components/layout/AuthenticatedContent'; export default function Page() { const putCache = async (data: any) => { @@ -40,7 +41,9 @@ export default function Page() { - + + + diff --git a/web_ui/frontend/app/registry/namespace/register/page.tsx b/web_ui/frontend/app/registry/namespace/register/page.tsx index 12d60291f..be3aaeae2 100644 --- a/web_ui/frontend/app/registry/namespace/register/page.tsx +++ b/web_ui/frontend/app/registry/namespace/register/page.tsx @@ -18,10 +18,11 @@ 'use client'; -import { postGeneralNamespace } from '@/app/registry/components/util'; +import { postGeneralNamespace } from '@/helpers/api'; import { PostPage } from '@/app/registry/components/PostPage'; import { Box, Grid, Typography } from '@mui/material'; import React from 'react'; +import AuthenticatedContent from '@/components/layout/AuthenticatedContent'; export default function Page() { const postCache = async (data: any) => { @@ -40,7 +41,9 @@ export default function Page() { - + + + diff --git a/web_ui/frontend/app/registry/origin/edit/page.tsx b/web_ui/frontend/app/registry/origin/edit/page.tsx index 683381232..a40a7b81b 100644 --- a/web_ui/frontend/app/registry/origin/edit/page.tsx +++ b/web_ui/frontend/app/registry/origin/edit/page.tsx @@ -19,12 +19,11 @@ 'use client'; import { PutPage } from '@/app/registry/components/PutPage'; -import { - namespaceToOrigin, - putGeneralNamespace, -} from '@/app/registry/components/util'; +import { namespaceToOrigin } from '@/app/registry/components/util'; import { Box, Grid, Typography } from '@mui/material'; import React from 'react'; +import { putGeneralNamespace } from '@/helpers/api'; +import AuthenticatedContent from '@/components/layout/AuthenticatedContent'; export default function Page() { const putCache = async (data: any) => { @@ -44,7 +43,9 @@ export default function Page() { - + + + diff --git a/web_ui/frontend/app/registry/origin/register/page.tsx b/web_ui/frontend/app/registry/origin/register/page.tsx index a89e3f83d..dc61fc18b 100644 --- a/web_ui/frontend/app/registry/origin/register/page.tsx +++ b/web_ui/frontend/app/registry/origin/register/page.tsx @@ -18,13 +18,12 @@ 'use client'; -import { - namespaceToOrigin, - postGeneralNamespace, -} from '@/app/registry/components/util'; +import { namespaceToOrigin } from '@/app/registry/components/util'; import { PostPage } from '@/app/registry/components/PostPage'; import { Box, Grid, Typography } from '@mui/material'; import React from 'react'; +import { postGeneralNamespace } from '@/helpers/api'; +import AuthenticatedContent from '@/components/layout/AuthenticatedContent'; export default function Page() { const postCache = async (data: any) => { @@ -44,7 +43,9 @@ export default function Page() { - + + + diff --git a/web_ui/frontend/app/registry/page.tsx b/web_ui/frontend/app/registry/page.tsx index caf27b17a..d509751d7 100644 --- a/web_ui/frontend/app/registry/page.tsx +++ b/web_ui/frontend/app/registry/page.tsx @@ -28,7 +28,7 @@ import { Collapse, IconButton, } from '@mui/material'; -import React, { useEffect, useMemo, useState } from 'react'; +import React, { useEffect, useMemo, useState, useContext } from 'react'; import { PendingCard, @@ -38,25 +38,38 @@ import { NamespaceCardList, } from '@/components'; import Link from 'next/link'; -import { Namespace, Alert as AlertType } from '@/index'; +import { RegistryNamespace, Alert as AlertType } from '@/index'; import { getUser } from '@/helpers/login'; import { Add } from '@mui/icons-material'; import useSWR from 'swr'; import { CardProps } from '@/components/Namespace/Card'; import { PendingCardProps } from '@/components/Namespace/PendingCard'; +import { AlertDispatchContext } from '@/components/AlertProvider'; +import { alertOnError } from '@/helpers/util'; +import { getExtendedNamespaces } from '@/helpers/get'; export default function Home() { - const [alert, setAlert] = useState(undefined); + const dispatch = useContext(AlertDispatchContext); - const { data, mutate: mutateNamespaces } = useSWR<{ namespace: Namespace }[]>( - 'getNamespaces', - getData, + const { data, mutate: mutateNamespaces } = useSWR< + { namespace: RegistryNamespace }[] | undefined + >( + 'getExtendedNamespaces', + () => + alertOnError( + getExtendedNamespaces, + 'Failed to fetch namespaces', + dispatch + ), { fallbackData: [], } ); - const { data: user, error } = useSWR('getUser', getUser); + const { data: user, error } = useSWR( + 'getUser', + async () => await alertOnError(getUser, 'Error Getting User', dispatch) + ); const pendingData = useMemo(() => { return data?.filter( @@ -97,33 +110,7 @@ export default function Home() { return ( - - Namespace Registry - - - {alert?.message} - - - - {user == undefined || - (!user.authenticated && ( - - - Login to register new namespaces. - - - - - - ))} {pendingData && pendingData.length > 0 && ( setAlert(a), onUpdate: () => mutateNamespaces(), }} /> @@ -263,36 +249,3 @@ export default function Home() { ); } - -const getData = async () => { - let data: { namespace: Namespace }[] = []; - - const url = new URL( - '/api/v1.0/registry_ui/namespaces', - window.location.origin - ); - - const response = await fetch(url); - if (response.ok) { - const responseData: Namespace[] = await response.json(); - responseData.sort((a, b) => (a.id > b.id ? 1 : -1)); - responseData.forEach((namespace) => { - if (namespace.prefix.startsWith('/caches/')) { - namespace.type = 'cache'; - namespace.prefix = namespace.prefix.replace('/caches/', ''); - } else if (namespace.prefix.startsWith('/origins/')) { - namespace.type = 'origin'; - namespace.prefix = namespace.prefix.replace('/origins/', ''); - } else { - namespace.type = 'namespace'; - } - }); - - // Convert data to Partial CardProps - data = responseData.map((d) => { - return { namespace: d }; - }); - } - - return data; -}; diff --git a/web_ui/frontend/app/test/page.tsx b/web_ui/frontend/app/test/page.tsx new file mode 100644 index 000000000..5f6e11e7e --- /dev/null +++ b/web_ui/frontend/app/test/page.tsx @@ -0,0 +1,53 @@ +'use client'; + +import { AlertDispatchContext } from '@/components/AlertProvider'; +import React, { useContext } from 'react'; +import { Box } from '@mui/material'; +import CodeBlock from '@/components/CodeBlock'; + +const Page = () => { + const dispatch = useContext(AlertDispatchContext); + + return ( +
+ + +
+ ); +}; + +export default Page; diff --git a/web_ui/frontend/components/AlertPortal.tsx b/web_ui/frontend/components/AlertPortal.tsx index 7fc32a48c..4c75a0574 100644 --- a/web_ui/frontend/components/AlertPortal.tsx +++ b/web_ui/frontend/components/AlertPortal.tsx @@ -1,35 +1,48 @@ import { Portal } from '@mui/base'; -import React from 'react'; -import { Alert, SnackbarProps, Snackbar } from '@mui/material'; - -import { Alert as AlertType } from '@/index'; +import React, { ReactNode } from 'react'; +import { + Alert, + AlertProps, + Snackbar, + SnackbarProps, + AlertTitle, +} from '@mui/material'; export interface AlertPortalProps { - alert?: AlertType; onClose: () => void; + title?: string; + autoHideDuration?: number; + message?: ReactNode | string; + alertProps?: Omit; snackBarProps?: SnackbarProps; } export const AlertPortal = ({ - alert, onClose, + title, + autoHideDuration, + message, + alertProps, snackBarProps, }: AlertPortalProps) => { + if (autoHideDuration) { + setTimeout(() => onClose(), autoHideDuration); + } + return ( - {alert?.message} + {title && {title}} + {message} diff --git a/web_ui/frontend/components/AlertProvider.tsx b/web_ui/frontend/components/AlertProvider.tsx new file mode 100644 index 000000000..11e356488 --- /dev/null +++ b/web_ui/frontend/components/AlertProvider.tsx @@ -0,0 +1,76 @@ +'use client'; + +import { createContext, Dispatch, useReducer } from 'react'; +import { AlertPortal, AlertPortalProps } from '@/components/AlertPortal'; +import CodeBlock from '@/components/CodeBlock'; + +const defaultAlertContext: AlertPortalProps | undefined = undefined; + +export const AlertContext = createContext( + defaultAlertContext +); + +export const AlertDispatchContext = createContext>( + () => {} +); + +export const AlertProvider = ({ children }: { children: React.ReactNode }) => { + const [state, dispatch] = useReducer(alertReducer, defaultAlertContext); + + return ( + + + {children} + {state && } + + + ); +}; + +const alertReducer = ( + state: AlertPortalProps | undefined, + action: AlertReducerAction +): AlertPortalProps | undefined => { + switch (action.type) { + case 'closeAlert': + return undefined; + case 'openErrorAlert': + const { title, error, onClose } = action.payload; + + return { + title, + onClose, + message: {error}, + alertProps: { + severity: 'error', + }, + }; + case 'openAlert': + return action.payload; + default: + return state; + } +}; + +export type AlertReducerAction = + | closeAlertAction + | openErrorAlertAction + | openAlertAction; + +type closeAlertAction = { + type: 'closeAlert'; +}; + +type openErrorAlertAction = { + type: 'openErrorAlert'; + payload: { + title: string; + error: string; + onClose: () => void; + }; +}; + +type openAlertAction = { + type: 'openAlert'; + payload: AlertPortalProps; +}; diff --git a/web_ui/frontend/components/CapabilitiesDisplay.tsx b/web_ui/frontend/components/CapabilitiesDisplay.tsx index 4907b3e83..d3d82c7f8 100644 --- a/web_ui/frontend/components/CapabilitiesDisplay.tsx +++ b/web_ui/frontend/components/CapabilitiesDisplay.tsx @@ -1,8 +1,8 @@ -import { Capabilities } from '@/index'; +import { Capabilities } from '@/types'; import { Box, Tooltip, Typography } from '@mui/material'; -import { grey } from '@mui/material/colors'; +import { green, grey } from '@mui/material/colors'; import { Check, Clear } from '@mui/icons-material'; -import React from 'react'; +import React, { useMemo } from 'react'; export const CapabilitiesDisplay = ({ capabilities, @@ -14,7 +14,7 @@ export const CapabilitiesDisplay = ({ {Object.entries(capabilities).map(([key, value]) => { return ( - + ); })} @@ -22,13 +22,29 @@ export const CapabilitiesDisplay = ({ ); }; +/** + * Capabilities chip used to convey the capabilities of a server or namespace + * There are two levels of activity to help represent the relationship between + * activity and the server or namespace. + * @param name + * @param value + * @param active + * @constructor + */ export const CapabilitiesChip = ({ name, value, + parentValue, }: { name: string; value: boolean; + parentValue?: boolean; }) => { + // Switch statement to determine the color of the chip + const isActive = useMemo(() => { + return parentValue !== undefined ? value && parentValue : value; + }, [value, parentValue]); + return ( diff --git a/web_ui/frontend/components/CardList.tsx b/web_ui/frontend/components/CardList.tsx index c4be45577..a9c7015c5 100644 --- a/web_ui/frontend/components/CardList.tsx +++ b/web_ui/frontend/components/CardList.tsx @@ -17,16 +17,18 @@ import { interface CardListProps { data?: Partial[]; Card: ComponentType; - cardProps: Partial; + cardProps?: Partial; } export function CardList({ data, Card, cardProps }: CardListProps) { const PAGE_SIZE = 5; const [page, setPage] = useState(1); - // Reset the page on data length change + // Minus the page if the data length changes useEffect(() => { - setPage(1); + if (data?.length && page > Math.ceil(data.length / PAGE_SIZE)) { + setPage(Math.max(1, Math.ceil(data.length / PAGE_SIZE))); + } }, [data?.length]); const count = useMemo(() => { diff --git a/web_ui/frontend/components/CodeBlock.tsx b/web_ui/frontend/components/CodeBlock.tsx new file mode 100644 index 000000000..f742793e7 --- /dev/null +++ b/web_ui/frontend/components/CodeBlock.tsx @@ -0,0 +1,27 @@ +import { stackoverflowLight } from 'react-syntax-highlighter/dist/cjs/styles/hljs'; +import SyntaxHighlighter from 'react-syntax-highlighter'; +import { Box } from '@mui/material'; + +/** + * CodeBlock component + * Copy onClick and darken onHover + * @param children + * @constructor + */ +export const CodeBlock = ({ children }: { children: string | string[] }) => { + return ( + + { + navigator.clipboard.writeText(children.toString()); + }} + > + {children} + + + ); +}; + +export default CodeBlock; diff --git a/web_ui/frontend/components/DataExportTable.tsx b/web_ui/frontend/components/DataExportTable.tsx index 9b43da0c8..a9a46748c 100644 --- a/web_ui/frontend/components/DataExportTable.tsx +++ b/web_ui/frontend/components/DataExportTable.tsx @@ -20,7 +20,7 @@ import { Skeleton } from '@mui/material'; import { Edit, Settings, Check, Clear } from '@mui/icons-material'; import useSWR from 'swr'; import { getErrorMessage } from '@/helpers/util'; -import type { Capabilities } from '@/index'; +import { Capabilities } from '@/types'; import { CapabilitiesDisplay } from '@/components'; type RegistrationStatus = diff --git a/web_ui/frontend/components/DirectoryTree.tsx b/web_ui/frontend/components/DirectoryTree.tsx index e25c2f36a..a77d4e11a 100644 --- a/web_ui/frontend/components/DirectoryTree.tsx +++ b/web_ui/frontend/components/DirectoryTree.tsx @@ -10,8 +10,6 @@ export const DirectoryTree = ({ data }: { data: StringTree }) => { const [selectedItems, setSelectedItems] = useState([]); const handleSelect = (ids: string[]) => { - console.log(ids, calculateSelectedItems(ids[0])); - setSelectedItems(calculateSelectedItems(ids[0])); }; @@ -30,6 +28,7 @@ const CustomTreeItemSmall = ({ ...props }: TreeItemProps) => { return ( {props.label}} /> ); diff --git a/web_ui/frontend/components/FederationOverview.tsx b/web_ui/frontend/components/FederationOverview.tsx index 40895926d..f47d36841 100644 --- a/web_ui/frontend/components/FederationOverview.tsx +++ b/web_ui/frontend/components/FederationOverview.tsx @@ -7,6 +7,9 @@ import { Box, Typography } from '@mui/material'; import AuthenticatedContent from '@/components/layout/AuthenticatedContent'; import Link from 'next/link'; import { getErrorMessage, getObjectValue } from '@/helpers/util'; +import { getConfig } from '@/helpers/api'; +import { getFederationUrls } from '@/helpers/get'; +import useSWR from 'swr'; const LinkBox = ({ href, text }: { href: string; text: string }) => { return ( @@ -29,74 +32,26 @@ const LinkBox = ({ href, text }: { href: string; text: string }) => { ); }; -const UrlData = [ - { key: ['Federation', 'NamespaceUrl', 'Value'], text: 'Namespace Registry' }, - { key: ['Federation', 'DirectorUrl', 'Value'], text: 'Director' }, - { key: ['Federation', 'RegistryUrl', 'Value'], text: 'Registry' }, - { - key: ['Federation', 'TopologyNamespaceUrl', 'Value'], - text: 'Topology Namespace', - }, - { key: ['Federation', 'DiscoveryUrl', 'Value'], text: 'Discovery' }, - { key: ['Federation', 'JwkUrl', 'Value'], text: 'JWK' }, -]; - const FederationOverview = () => { - const [config, setConfig] = useState< - { text: string; url: string | undefined }[] - >([]); - - let getConfig = async () => { - let response = await fetch('/api/v1.0/config'); - if (response.ok) { - const responseData = (await response.json()) as Config; - - const federationUrls = UrlData.map(({ key, text }) => { - let url = getObjectValue(responseData, key); - if ( - url && - !url?.startsWith('http://') && - !url?.startsWith('https://') - ) { - url = 'https://' + url; - } - - return { - text, - url, - }; - }); - - setConfig(federationUrls); - } else { - console.error(await getErrorMessage(response)); - } - }; - - useEffect(() => { - getConfig(); - }, []); - - if (config === undefined) { - return; - } + const { data: federationUrls, error } = useSWR( + 'getFederationUrls', + getFederationUrls, + { fallbackData: [] } + ); return ( - u?.role == 'admin'} - > - {!Object.values(config).every((x) => x == undefined) ? ( + <> + {!Object.values(federationUrls).every((x) => x == undefined) ? ( Federation Overview ) : null} - {config.map(({ text, url }) => { + {federationUrls.map(({ text, url }) => { if (url) { return ; } })} - + ); }; diff --git a/web_ui/frontend/components/InformationSpan.tsx b/web_ui/frontend/components/InformationSpan.tsx index 745f9602d..46ddd89b4 100644 --- a/web_ui/frontend/components/InformationSpan.tsx +++ b/web_ui/frontend/components/InformationSpan.tsx @@ -1,13 +1,47 @@ import { Box, Tooltip, Typography } from '@mui/material'; import { grey } from '@mui/material/colors'; -import React from 'react'; +import React, { ReactNode } from 'react'; + +export const InformationSpanHeader = ({ + title, + indent = 0, +}: { + title: string; + indent?: number; +}) => { + return ( + + + {'\u00A0\u00A0\u00A0\u00A0'.repeat(Math.max(indent - 1, 0))} + {indent > 0 ? 'ā†³\u00A0' : ''} + {title} + + + + ); +}; export const InformationSpan = ({ name, value, + indent = 0, }: { name: string; value: string; + indent?: number; }) => { return ( @@ -22,11 +56,12 @@ export const InformationSpan = ({ p: '4px 6px', }, display: 'flex', - justifyContent: 'space-between', }} > - {name} + {'\u00A0\u00A0\u00A0\u00A0'.repeat(Math.max(indent - 1, 0))} + {indent > 0 ? 'ā†³\u00A0' : ''} + {name}: {value} diff --git a/web_ui/frontend/components/Namespace/Card.tsx b/web_ui/frontend/components/Namespace/Card.tsx index c873ec115..f6fa4c377 100644 --- a/web_ui/frontend/components/Namespace/Card.tsx +++ b/web_ui/frontend/components/Namespace/Card.tsx @@ -1,5 +1,5 @@ -import { Alert, Alert as AlertType, Namespace } from '@/index'; -import React, { useRef, useState } from 'react'; +import { Alert, Alert as AlertType, RegistryNamespace } from '@/index'; +import React, { useContext, useRef, useState } from 'react'; import { Avatar, Box, @@ -14,20 +14,22 @@ import Link from 'next/link'; import InformationDropdown from './InformationDropdown'; import { NamespaceIcon } from '@/components/Namespace/index'; import { User } from '@/index'; -import AlertPortal from '@/components/AlertPortal'; -import { deleteNamespace } from './DeniedCard'; +import { deleteNamespace } from '@/helpers/api'; import { useSWRConfig } from 'swr'; +import { AlertDispatchContext } from '@/components/AlertProvider'; +import CodeBlock from '@/components/CodeBlock'; +import { alertOnError } from '@/helpers/util'; export interface CardProps { - namespace: Namespace; + namespace: RegistryNamespace; onUpdate?: () => void; authenticated?: User; } export const Card = ({ namespace, authenticated, onUpdate }: CardProps) => { + const dispatch = useContext(AlertDispatchContext); const ref = useRef(null); const [transition, setTransition] = useState(false); - const [alert, setAlert] = useState(undefined); const { mutate } = useSWRConfig(); return ( <> @@ -102,20 +104,14 @@ export const Card = ({ namespace, authenticated, onUpdate }: CardProps) => { color={'error'} onClick={async (e) => { e.stopPropagation(); - try { - await deleteNamespace(namespace.id); - setAlert({ - severity: 'success', - message: 'Registration deleted', - }); - setTimeout(() => mutate('getNamespaces'), 600); - if (onUpdate) { - onUpdate(); - } - } catch (e) { - if (e instanceof Error) { - setAlert({ severity: 'error', message: e.message }); - } + await alertOnError( + async () => await deleteNamespace(namespace.id), + 'Could Not Delete Registration', + dispatch + ); + setTimeout(() => mutate('getExtendedNamespaces'), 600); + if (onUpdate) { + onUpdate(); } }} > @@ -135,7 +131,6 @@ export const Card = ({ namespace, authenticated, onUpdate }: CardProps) => { /> - setAlert(undefined)} /> ); }; diff --git a/web_ui/frontend/components/Namespace/DeniedCard.tsx b/web_ui/frontend/components/Namespace/DeniedCard.tsx index 348cb7a45..689055b87 100644 --- a/web_ui/frontend/components/Namespace/DeniedCard.tsx +++ b/web_ui/frontend/components/Namespace/DeniedCard.tsx @@ -1,76 +1,30 @@ -import React, { useMemo, useRef, useState } from 'react'; +import React, { useContext, useMemo, useRef, useState } from 'react'; import { green, red } from '@mui/material/colors'; import { Authenticated, secureFetch } from '@/helpers/login'; import { Avatar, Box, IconButton, Tooltip, Typography } from '@mui/material'; import { Block, Check, Delete, Edit, Person } from '@mui/icons-material'; -import { Alert as AlertType, Alert, Namespace } from '@/index'; +import { Alert, RegistryNamespace } from '@/index'; import InformationDropdown from './InformationDropdown'; import { getServerType, NamespaceIcon } from '@/components/Namespace/index'; +import { AlertContext, AlertDispatchContext } from '@/components/AlertProvider'; import { User } from '@/index'; -import AlertPortal from '@/components/AlertPortal'; import { useSWRConfig } from 'swr'; +import CodeBlock from '@/components/CodeBlock'; +import { approveNamespace, deleteNamespace } from '@/helpers/api'; +import { alertOnError } from '@/helpers/util'; export interface DeniedCardProps { - namespace: Namespace; + namespace: RegistryNamespace; onUpdate: () => void; onAlert: (alert: Alert) => void; authenticated?: User; } -export const deleteNamespace = async (id: number) => { - const response = await secureFetch(`/api/v1.0/registry_ui/namespaces/${id}`, { - method: 'DELETE', - }); - - if (!response.ok) { - let alertMessage; - try { - let data = await response.json(); - if (data?.msg) { - alertMessage = data?.msg; - } - alertMessage = 'Details not provided'; - } catch (e) { - if (e instanceof Error) { - alertMessage = e.message; - } - } - - throw new Error('Failed to delete namespace: ' + alertMessage); - } -}; - -const approveNamespace = async (id: number) => { - const response = await secureFetch( - `/api/v1.0/registry_ui/namespaces/${id}/approve`, - { - method: 'PATCH', - } - ); - - if (!response.ok) { - let alertMessage; - try { - let data = await response.json(); - if (data?.msg) { - alertMessage = data?.msg; - } - alertMessage = 'Details not provided'; - } catch (e) { - if (e instanceof Error) { - alertMessage = e.message; - } - } - - throw new Error('Failed to approve registration: ' + alertMessage); - } -}; - export const DeniedCard = ({ namespace, authenticated }: DeniedCardProps) => { const ref = useRef(null); const [transition, setTransition] = useState(false); - const [alert, setAlert] = useState(undefined); - + const dispatch = useContext(AlertDispatchContext); + const alert = useContext(AlertContext); const { mutate } = useSWRConfig(); return ( @@ -86,9 +40,9 @@ export const DeniedCard = ({ namespace, authenticated }: DeniedCardProps) => { borderRadius: transition ? '10px 10px 0px 0px' : 2, transition: 'background-color .3s ease-out', bgcolor: - alert?.severity == 'success' + alert?.alertProps?.severity == 'success' ? green[100] - : alert?.severity == 'error' + : alert?.alertProps?.severity == 'error' ? red[100] : 'inherit', '&:hover': { @@ -130,18 +84,11 @@ export const DeniedCard = ({ namespace, authenticated }: DeniedCardProps) => { color={'error'} onClick={async (e) => { e.stopPropagation(); - try { - await deleteNamespace(namespace.id); - setAlert({ - severity: 'success', - message: 'Registration deleted', - }); - setTimeout(() => mutate('getNamespaces'), 600); - } catch (e) { - if (e instanceof Error) { - setAlert({ severity: 'error', message: e.message }); - } - } + await alertOnError( + () => deleteNamespace(namespace.id), + 'Could Not Delete Registration', + dispatch + ); }} > @@ -153,18 +100,12 @@ export const DeniedCard = ({ namespace, authenticated }: DeniedCardProps) => { color={'success'} onClick={async (e) => { e.stopPropagation(); - try { - await approveNamespace(namespace.id); - setAlert({ - severity: 'success', - message: 'Registration Approved', - }); - setTimeout(() => mutate('getNamespaces'), 600); - } catch (e) { - if (e instanceof Error) { - setAlert({ severity: 'error', message: e.message }); - } - } + await alertOnError( + () => approveNamespace(namespace.id), + 'Could Not Approve Registration', + dispatch + ); + setTimeout(() => mutate('getExtendedNamespaces'), 600); }} > @@ -183,13 +124,6 @@ export const DeniedCard = ({ namespace, authenticated }: DeniedCardProps) => { /> - {alert?.severity == 'error' && ( - setAlert(undefined)} - /> - )} ); }; diff --git a/web_ui/frontend/components/Namespace/InformationDropdown.tsx b/web_ui/frontend/components/Namespace/InformationDropdown.tsx index db87fc751..b83b5df00 100644 --- a/web_ui/frontend/components/Namespace/InformationDropdown.tsx +++ b/web_ui/frontend/components/Namespace/InformationDropdown.tsx @@ -1,7 +1,10 @@ import { Box, Tooltip, Collapse, Grid, Typography } from '@mui/material'; import React from 'react'; -import { NamespaceAdminMetadata } from './index.d'; -import { Dropdown, InformationSpan } from '@/components'; +import { + Dropdown, + InformationSpan, + NamespaceAdminMetadata, +} from '@/components'; interface InformationDropdownProps { adminMetadata: NamespaceAdminMetadata; diff --git a/web_ui/frontend/components/Namespace/NamespaceIcon.tsx b/web_ui/frontend/components/Namespace/NamespaceIcon.tsx index 051098af2..ba8bcd19c 100644 --- a/web_ui/frontend/components/Namespace/NamespaceIcon.tsx +++ b/web_ui/frontend/components/Namespace/NamespaceIcon.tsx @@ -1,66 +1,101 @@ import { Avatar, Box, Tooltip } from '@mui/material'; import { FolderOpen, Storage, TripOrigin } from '@mui/icons-material'; -import React from 'react'; +import React, { useMemo } from 'react'; const NamespaceIcon = ({ - serverType: prefixType, + serverType, + size, + color = 'white', + bgcolor = 'primary.main', }: { serverType: 'origin' | 'cache' | 'namespace'; + size?: 'large' | 'medium' | 'small'; + color?: string; + bgcolor?: string; }) => { - if (prefixType == 'namespace') { + const avatarPixelSize = useMemo(() => { + switch (size) { + case 'large': + return 50; + case 'medium': + return 30; + case 'small': + return 20; + default: + return 30; + } + }, [size]); + + const iconPixelSize = useMemo(() => { + switch (size) { + case 'large': + return 30; + case 'medium': + return 24; + case 'small': + return 15; + default: + return 24; + } + }, []); + + if (serverType == 'namespace') { return ( - + ); } - if (prefixType == 'origin') { + if (serverType == 'origin') { return ( - + ); } - if (prefixType == 'cache') { + if (serverType == 'cache') { return ( - + diff --git a/web_ui/frontend/components/Namespace/PendingCard.tsx b/web_ui/frontend/components/Namespace/PendingCard.tsx index 4257ef09a..1b55f43d9 100644 --- a/web_ui/frontend/components/Namespace/PendingCard.tsx +++ b/web_ui/frontend/components/Namespace/PendingCard.tsx @@ -1,16 +1,19 @@ -import React, { useMemo, useRef, useState } from 'react'; +import React, { useContext, useMemo, useRef, useState } from 'react'; import { Authenticated, secureFetch } from '@/helpers/login'; import { Avatar, Box, IconButton, Tooltip, Typography } from '@mui/material'; import { Block, Check, Edit, Person } from '@mui/icons-material'; import Link from 'next/link'; -import { Alert, Namespace } from '@/index'; +import { Alert, RegistryNamespace } from '@/index'; import InformationDropdown from './InformationDropdown'; import { getServerType, NamespaceIcon } from '@/components/Namespace/index'; import { User } from '@/index'; +import { alertOnError } from '@/helpers/util'; +import { AlertDispatchContext } from '@/components/AlertProvider'; +import { approveNamespace, denyNamespace } from '@/helpers/api'; export interface PendingCardProps { - namespace: Namespace; + namespace: RegistryNamespace; onUpdate: () => void; onAlert: (alert: Alert) => void; authenticated?: User; @@ -25,61 +28,7 @@ export const PendingCard = ({ const ref = useRef(null); const [transition, setTransition] = useState(false); - const approveNamespace = async (e: React.MouseEvent) => { - e.stopPropagation(); - - try { - const response = await secureFetch( - `/api/v1.0/registry_ui/namespaces/${namespace.id}/approve`, - { - method: 'PATCH', - } - ); - - if (!response.ok) { - onAlert({ - severity: 'error', - message: `Failed to approve ${namespace.type} registration: ${namespace.prefix}`, - }); - } else { - onUpdate(); - onAlert({ - severity: 'success', - message: `Successfully approved ${namespace.type} registration: ${namespace.prefix}`, - }); - } - } catch (error) { - console.error(error); - } - }; - - const denyNamespace = async (e: React.MouseEvent) => { - e.stopPropagation(); - - try { - const response = await secureFetch( - `/api/v1.0/registry_ui/namespaces/${namespace.id}/deny`, - { - method: 'PATCH', - } - ); - - if (!response.ok) { - onAlert({ - severity: 'error', - message: `Failed to deny ${namespace.type} registration: ${namespace.prefix}`, - }); - } else { - onUpdate(); - onAlert({ - severity: 'success', - message: `Successfully denied ${namespace.type} registration: ${namespace.prefix}`, - }); - } - } catch (error) { - console.error(error); - } - }; + const dispatch = useContext(AlertDispatchContext); return ( @@ -123,7 +72,15 @@ export const PendingCard = ({ denyNamespace(e)} + onClick={async (e) => { + e.stopPropagation(); + await alertOnError( + () => denyNamespace(namespace.id), + "Couldn't deny namespace", + dispatch + ); + onUpdate(); + }} > @@ -132,7 +89,15 @@ export const PendingCard = ({ approveNamespace(e)} + onClick={async (e) => { + e.stopPropagation(); + await alertOnError( + () => approveNamespace(namespace.id), + "Couldn't approve namespace", + dispatch + ); + onUpdate(); + }} > diff --git a/web_ui/frontend/components/Namespace/index.d.tsx b/web_ui/frontend/components/Namespace/index.d.tsx deleted file mode 100644 index c7292df96..000000000 --- a/web_ui/frontend/components/Namespace/index.d.tsx +++ /dev/null @@ -1,21 +0,0 @@ -import { PendingCardProps } from './PendingCard'; -import { CardProps } from './Card'; - -export interface NamespaceAdminMetadata { - user_id: string; - description: string; - site_name: string; - institution: string; - security_contact_user_id: string; - status: 'Pending' | 'Approved' | 'Denied' | 'Unknown'; - approver_id: number; - approved_at: string; - created_at: string; - updated_at: string; -} - -export interface FlatObject { - [key: string]: Exclude; -} - -export type NamespaceCardProps = CardProps & PendingCardProps; diff --git a/web_ui/frontend/components/Namespace/index.tsx b/web_ui/frontend/components/Namespace/index.tsx index 77dbe881d..4edb5c8d7 100644 --- a/web_ui/frontend/components/Namespace/index.tsx +++ b/web_ui/frontend/components/Namespace/index.tsx @@ -1,4 +1,4 @@ -import { Namespace } from '@/index'; +import { RegistryNamespace } from '@/index'; import Card from './Card'; import CreateNamespaceCard from './CreateNamespaceCard'; import CardSkeleton from './CardSkeleton'; @@ -15,7 +15,29 @@ export { NamespaceIcon, }; -export const getServerType = (namespace: Namespace) => { +import { PendingCardProps } from './PendingCard'; +import { CardProps } from './Card'; + +export interface NamespaceAdminMetadata { + user_id: string; + description: string; + site_name: string; + institution: string; + security_contact_user_id: string; + status: 'Pending' | 'Approved' | 'Denied' | 'Unknown'; + approver_id: number; + approved_at: string; + created_at: string; + updated_at: string; +} + +export interface FlatObject { + [key: string]: Exclude; +} + +export type NamespaceCardProps = CardProps & PendingCardProps; + +export const getServerType = (namespace: RegistryNamespace) => { // If the namespace is empty the value is undefined if (namespace?.prefix == null || namespace.prefix == '') { return ''; diff --git a/web_ui/frontend/components/NamespaceCapabilitiesTable.tsx b/web_ui/frontend/components/NamespaceCapabilitiesTable.tsx new file mode 100644 index 000000000..0ba7ae659 --- /dev/null +++ b/web_ui/frontend/components/NamespaceCapabilitiesTable.tsx @@ -0,0 +1,95 @@ +/** + * A table to display the capabilities of a namespace + */ + +/** + * Table to display the server capabilities with its namespaces + */ + +import { DirectorNamespace, ServerDetailed, ServerGeneral } from '@/types'; +import { Box, Grid, Typography, useTheme } from '@mui/material'; +import { CapabilitiesRow } from '@/app/director/components/DirectorDropdown'; +import { grey } from '@mui/material/colors'; +import { NamespaceIcon } from '@/components/Namespace'; + +interface NamespaceCapabilitiesTableProps { + namespace: DirectorNamespace; + servers?: ServerDetailed[]; +} + +/** + * Create a grid table that displays the server capabilities with the namespaces + * listed below indicating their individual capabilities and how they interact + * with the servers own capabilities. + * @param server + * @constructor + */ +export const NamespaceCapabilitiesTable = ({ + namespace, + servers, +}: NamespaceCapabilitiesTableProps) => { + const theme = useTheme(); + + return ( + + + + + + + + Namespace Capabilities + + + + + + + + + + {servers && + servers + ?.sort((a, b) => a.name.localeCompare(b.name)) + ?.map((server) => ( + + + + + + + + {server.name} + + + + + + + + + + ))} + + ); +}; diff --git a/web_ui/frontend/components/ServerCapabilitiesTable.tsx b/web_ui/frontend/components/ServerCapabilitiesTable.tsx new file mode 100644 index 000000000..1800406cd --- /dev/null +++ b/web_ui/frontend/components/ServerCapabilitiesTable.tsx @@ -0,0 +1,74 @@ +/** + * Table to display the server capabilities with its namespaces + */ + +import { ServerDetailed, ServerGeneral } from '@/types'; +import { Box, Grid, Typography } from '@mui/material'; +import { CapabilitiesRow } from '@/app/director/components/DirectorDropdown'; +import { grey } from '@mui/material/colors'; + +interface ServerCapabilitiesTableProps { + server: ServerGeneral | ServerDetailed; +} + +/** + * Create a grid table that displays the server capabilities with the namespaces + * listed below indicating their individual capabilities and how they interact + * with the servers own capabilities. + * @param server + * @constructor + */ +export const ServerCapabilitiesTable = ({ + server, +}: ServerCapabilitiesTableProps) => { + return ( + + + + + + + + {server.type}'s Namespace Capabilities + + + + + + + + + + {'namespaces' in server && + server?.namespaces + ?.sort((a, b) => a.path.localeCompare(b.path)) + ?.map((namespace) => ( + + + + + + + {namespace.path} + + + + + + + + + + ))} + + ); +}; diff --git a/web_ui/frontend/public/theme.tsx b/web_ui/frontend/components/ThemeProvider.tsx similarity index 92% rename from web_ui/frontend/public/theme.tsx rename to web_ui/frontend/components/ThemeProvider.tsx index 9d4e8cfcd..85aba2a81 100644 --- a/web_ui/frontend/public/theme.tsx +++ b/web_ui/frontend/components/ThemeProvider.tsx @@ -30,7 +30,7 @@ const poppins = Poppins({ display: 'swap', }); -let theme = createTheme({ +let themeProvider = createTheme({ palette: { primary: { main: '#0885ff', @@ -78,7 +78,7 @@ let theme = createTheme({ }, }); -theme = responsiveFontSizes(theme, { factor: 3 }); +themeProvider = responsiveFontSizes(themeProvider, { factor: 3 }); interface ThemeProviderClientProps { children: React.ReactNode; @@ -87,5 +87,5 @@ interface ThemeProviderClientProps { export const ThemeProviderClient: FC = ({ children, }) => { - return {children}; + return {children}; }; diff --git a/web_ui/frontend/components/layout/AuthenticatedContent.tsx b/web_ui/frontend/components/layout/AuthenticatedContent.tsx index 5eef49bcc..0da52c668 100644 --- a/web_ui/frontend/components/layout/AuthenticatedContent.tsx +++ b/web_ui/frontend/components/layout/AuthenticatedContent.tsx @@ -41,18 +41,30 @@ interface AuthenticatedContentProps { promptLogin?: boolean; redirect?: boolean; trustThenValidate?: boolean; - children: React.ReactNode; boxProps?: BoxProps; - checkAuthentication?: (user: User) => boolean; + allowedRoles?: User['role'][]; + replace?: boolean; + children: React.ReactNode; } +/** + * AuthenticatedContent is a component that will show the children if the user is authenticated. + * @param promptLogin If true then the user will be prompted to login if they are not authenticated + * @param redirect If true then the user will be redirected to the login page if they are not authenticated + * @param trustThenValidate If true then the user will be shown the content if they are not authenticated but will be validated after + * @param boxProps The props to pass to the Box component + * @param allowedRoles The roles that are allowed to see the content + * @param replace If true then the + * @param children The content to show if the user is authenticated + * @constructor + */ const AuthenticatedContent = ({ promptLogin = false, redirect = false, trustThenValidate = false, children, boxProps, - checkAuthentication, + allowedRoles, }: AuthenticatedContentProps) => { if (redirect && promptLogin) { throw new Error('redirect XOR promptLogin must be true'); @@ -66,12 +78,12 @@ const AuthenticatedContent = ({ const [pageUrl, setPageUrl] = useState(''); const authenticated = useMemo(() => { - if (data && checkAuthentication) { - return checkAuthentication(data); + if (data && allowedRoles) { + return data?.role && allowedRoles.includes(data?.role); } else { - return data?.authenticated !== undefined; + return !!data?.authenticated; } - }, [data, checkAuthentication]); + }, [data, allowedRoles]); useEffect(() => { // Keep pathname as is since backend handles the redirect after logging in and needs the full path @@ -84,9 +96,9 @@ const AuthenticatedContent = ({ // Redirect to login page if not authenticated and redirect is true useEffect(() => { if (!isValidating && !authenticated && redirect) { - router.push('/login/?returnURL=' + pageUrl); + router.replace('/login/?returnURL=' + pageUrl); } - }, [data, isValidating]); + }, [data, isValidating, authenticated]); // If there was a error then print it to the screen if (error) { diff --git a/web_ui/frontend/dev/image/nginx.conf b/web_ui/frontend/dev/image/nginx.conf index 97fa6bc7e..4c5f83fda 100644 --- a/web_ui/frontend/dev/image/nginx.conf +++ b/web_ui/frontend/dev/image/nginx.conf @@ -56,6 +56,8 @@ http { proxy_connect_timeout 10s; proxy_set_header X-Real-IP $remote_addr; proxy_pass http://host.docker.internal:3000; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; } gzip on; diff --git a/web_ui/frontend/helpers/api.ts b/web_ui/frontend/helpers/api.ts new file mode 100644 index 000000000..2d42a83db --- /dev/null +++ b/web_ui/frontend/helpers/api.ts @@ -0,0 +1,293 @@ +/** + * API Helper Functions + * + * Strictly return the response from the API, throwing an error if the response is not ok + */ + +import { secureFetch } from '@/helpers/login'; +import { getErrorMessage } from '@/helpers/util'; +import { RegistryNamespace } from '@/index'; +import { ServerGeneral } from '@/types'; + +/** + * Wraps an api request with error handling for both the request and the response if error + * @param fetchRequest The request to make to the api + * @returns The response from the api + */ +export async function fetchApi( + fetchRequest: () => Promise +): Promise { + try { + const response = await fetchRequest(); + if (!response.ok) { + let alertMessage; + try { + alertMessage = await getErrorMessage(response); + } catch (e) { + if (e instanceof Error) { + alertMessage = e.message; + } + } + throw new Error(alertMessage); + } + return response; + } catch (e) { + if (e instanceof Error) { + throw Error('Fetch to API Failed', { cause: e }); + } else { + throw Error('Fetch to API Failed', { cause: e }); + } + } +} + +/** + * Get config + */ +export const getConfig = async (): Promise => { + return fetchApi(async () => await secureFetch('/api/v1.0/config')); +}; + +/** + * Deletes a namespace + * @param id Namespace ID + */ +export const deleteNamespace = async (id: number) => { + return fetchApi( + async () => + await secureFetch(`/api/v1.0/registry_ui/namespaces/${id}`, { + method: 'DELETE', + }) + ); +}; + +/** + * Approves a namespace + * @param id Namespace ID + */ +export const approveNamespace = async (id: number): Promise => { + return fetchApi( + async () => + await secureFetch(`/api/v1.0/registry_ui/namespaces/${id}/approve`, { + method: 'PATCH', + }) + ); +}; + +/** + * Denies a namespace + * @param id Namespace ID + */ +export const denyNamespace = async (id: number): Promise => { + return fetchApi( + async () => + await secureFetch(`/api/v1.0/registry_ui/namespaces/${id}/deny`, { + method: 'PATCH', + }) + ); +}; + +/** + * Enables a server on the director + * @param name Server name + */ +export const allowServer = async (name: string): Promise => { + return fetchApi( + async () => + await secureFetch(`/api/v1.0/director_ui/servers/allow/${name}`, { + method: 'PATCH', + }) + ); +}; + +/** + * Filters ( Disables ) a server on the director + * @param name Server name + */ +export const filterServer = async (name: string): Promise => { + return fetchApi( + async () => + await secureFetch(`/api/v1.0/director_ui/servers/filter/${name}`, { + method: 'PATCH', + }) + ); +}; + +/** + * Get director servers + * + */ +export const getDirectorServers = async () => { + const url = new URL('/api/v1.0/director_ui/servers', window.location.origin); + + return await fetchApi(async () => await fetch(url)); +}; + +/** + * Get a director server by name + * @param name Server name + */ +export const getDirectorServer = async (name: string): Promise => { + const url = new URL( + `/api/v1.0/director_ui/servers/${name}`, + window.location.origin + ); + + return await fetchApi(async () => await fetch(url)); +}; + +/** + * Get namespaces from director + */ +export const getDirectorNamespaces = async () => { + const url = new URL( + '/api/v1.0/director_ui/namespaces', + window.location.origin + ); + + return await fetchApi(async () => await fetch(url)); +}; + +/** + * Get namespaces + */ +export const getNamespaces = async (): Promise => { + const url = new URL( + '/api/v1.0/registry_ui/namespaces', + window.location.origin + ); + + return await fetchApi(async () => await fetch(url)); +}; + +/** + * Gets a namespace by ID + * @param id Namespace ID + * @param accessToken Access token + */ +export const getNamespace = async ( + id: string | number, + accessToken?: string +): Promise => { + const url = new URL( + `/api/v1.0/registry_ui/namespaces/${id}`, + window.location.origin + ); + if (accessToken) { + url.searchParams.append('access_token', accessToken); + } + return await fetchApi(async () => await fetch(url)); +}; + +export const postGeneralNamespace = async ( + data: RegistryNamespace +): Promise => { + return await fetchApi( + async () => + await secureFetch('/api/v1.0/registry_ui/namespaces', { + body: JSON.stringify(data), + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + credentials: 'include', + }) + ); +}; + +export const putGeneralNamespace = async ( + data: RegistryNamespace +): Promise => { + // If an access_token is in the URL, add it to the request + const url = new URL( + `/api/v1.0/registry_ui/namespaces/${data.id}`, + window.location.origin + ); + const accessToken = new URLSearchParams(window.location.search).get( + 'access_token' + ); + if (accessToken) { + url.searchParams.append('access_token', accessToken); + } + + return await fetchApi(async () => { + return secureFetch(url.toString(), { + body: JSON.stringify(data), + method: 'PUT', + headers: { + 'Content-Type': 'application/json', + }, + credentials: 'include', + }); + }); +}; + +/** + * Get registration fields from options for namespace + */ +export const optionsNamespaceRegistrationFields = + async (): Promise => { + return await fetchApi( + async () => + await fetch('/api/v1.0/registry_ui/namespaces', { + method: 'OPTIONS', + }) + ); + }; + +/** + * Initializes a login via terminal code + */ +export const initLogin = async (code: string): Promise => { + return await fetchApi( + async () => + await fetch('/api/v1.0/auth/initLogin', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + code: code, + }), + }) + ); +}; + +/** + * Reset ( Do initial ) Login + */ +export const resetLogin = async (password: string): Promise => { + return await fetchApi( + async () => + await fetch('/api/v1.0/auth/resetLogin', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + password: password, + }), + }) + ); +}; + +/** + * Login + */ +export const login = async ( + password: string, + user: string = 'admin' +): Promise => { + return await fetchApi( + async () => + await fetch('/api/v1.0/auth/login', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + user: user, + password: password, + }), + }) + ); +}; diff --git a/web_ui/frontend/helpers/get.ts b/web_ui/frontend/helpers/get.ts index ca2be9954..f127c5b3e 100644 --- a/web_ui/frontend/helpers/get.ts +++ b/web_ui/frontend/helpers/get.ts @@ -1,9 +1,113 @@ +/** + * API wrappers for manipulating fetched data + * + * @module helpers/get + */ + import { Config, ParameterValueRecord } from '@/components/configuration'; +import { + getDirectorNamespaces as getDirectorNamespacesResponse, + getDirectorServers as getDirectorServersResponse, + getConfig as getConfigResponse, + getNamespaces, +} from '@/helpers/api'; import { flattenObject } from '@/app/config/util'; +import { DirectorNamespace } from '@/types'; +import { RegistryNamespace } from '@/index'; +import { getObjectValue } from '@/helpers/util'; +import { ServerGeneral } from '@/types'; + +/** + * Director Getters + */ + +/** + * Get and sort director servers + */ +export const getDirectorServers = async () => { + const response = await getDirectorServersResponse(); + const responseData: ServerGeneral[] = await response.json(); + responseData.sort((a, b) => a.name.localeCompare(b.name)); + return responseData; +}; + +/** + * Get and sort director namespaces + */ +export const getDirectorNamespaces = async () => { + const response = await getDirectorNamespacesResponse(); + const responseData: DirectorNamespace[] = await response.json(); + responseData.sort((a, b) => a.path.localeCompare(b.path)); + return responseData; +}; export const getConfig = async (): Promise => { - let response = await fetch('/api/v1.0/config'); + let response = await getConfigResponse(); let data = await response.json(); let flatData = flattenObject(data); return flatData; }; + +/** + * Get extended namespaces + */ +export const getExtendedNamespaces = async (): Promise< + { namespace: RegistryNamespace }[] +> => { + const response = await getNamespaces(); + const data: RegistryNamespace[] = await response.json(); + data.sort((a, b) => (a.id > b.id ? 1 : -1)); + data.forEach((namespace) => { + if (namespace.prefix.startsWith('/caches/')) { + namespace.type = 'cache'; + namespace.prefix = namespace.prefix.replace('/caches/', ''); + } else if (namespace.prefix.startsWith('/origins/')) { + namespace.type = 'origin'; + namespace.prefix = namespace.prefix.replace('/origins/', ''); + } else { + namespace.type = 'namespace'; + } + }); + + return data.map((d) => { + return { namespace: d }; + }); +}; + +/** + * Get federation URLs + */ +export const getFederationUrls = async () => { + try { + const response = await getConfigResponse(); + const responseData = (await response.json()) as Config; + + const federationUrls = UrlData.map(({ key, text }) => { + let url = getObjectValue(responseData, key); + if (url && !url?.startsWith('http://') && !url?.startsWith('https://')) { + url = 'https://' + url; + } + + return { + text, + url, + }; + }); + + return federationUrls; + } catch (e) { + console.error(e); + return []; + } +}; +const UrlData = [ + { key: ['Federation', 'NamespaceUrl'], text: 'Namespace Registry' }, + { key: ['Federation', 'DirectorUrl'], text: 'Director' }, + { key: ['Federation', 'RegistryUrl'], text: 'Registry' }, + { + key: ['Federation', 'TopologyNamespaceUrl'], + text: 'Topology Namespace', + }, + { key: ['Federation', 'DiscoveryUrl'], text: 'Discovery' }, + { key: ['Federation', 'JwkUrl'], text: 'JWK' }, +]; diff --git a/web_ui/frontend/helpers/login.tsx b/web_ui/frontend/helpers/login.ts similarity index 100% rename from web_ui/frontend/helpers/login.tsx rename to web_ui/frontend/helpers/login.ts diff --git a/web_ui/frontend/helpers/util.tsx b/web_ui/frontend/helpers/util.ts similarity index 50% rename from web_ui/frontend/helpers/util.tsx rename to web_ui/frontend/helpers/util.ts index e1c1e6be1..a79ad2196 100644 --- a/web_ui/frontend/helpers/util.tsx +++ b/web_ui/frontend/helpers/util.ts @@ -1,4 +1,6 @@ import { ServerType } from '@/index'; +import { Dispatch } from 'react'; +import { AlertReducerAction } from '@/components/AlertProvider'; const stringToTime = (time: string) => { return new Date(Date.parse(time)).toLocaleString(); @@ -36,6 +38,11 @@ export const getOauthEnabledServers = async () => { } }; +/** + * Extract the value from a object via a list of keys + * @param obj + * @param keys + */ export function getObjectValue(obj: any, keys: string[]): T | undefined { const currentValue = obj?.[keys[0]]; if (keys.length == 1) { @@ -44,21 +51,28 @@ export function getObjectValue(obj: any, keys: string[]): T | undefined { return getObjectValue(currentValue, keys.slice(1)); } +/** + * Get the error message from a response + * @param response + */ export const getErrorMessage = async (response: Response): Promise => { - let message; try { let data = await response.json(); - message = response.status + ': ' + data['msg']; + return response.status + ': ' + data['msg']; } catch (e) { - message = response.status + ': ' + response.statusText; + return response.status + ': ' + response.statusText; } - return message; }; export type TypeFunction = (x?: F) => T; export type TypeOrTypeFunction = T | TypeFunction; +/** + * Evaluate a function or return a value + * @param o Function or value + * @param functionProps Function properties + */ export function evaluateOrReturn( o: TypeOrTypeFunction, functionProps?: F @@ -70,6 +84,57 @@ export function evaluateOrReturn( return o as T; } +/** + * Get the average of an array of numbers + * @param arr Array of numbers + */ export const average = (arr: number[]) => { return arr.reduce((a, b) => a + b, 0) / arr.length; }; + +type ErrorWithCause = Error & { cause?: Error }; + +/** + * If an error is caught from f then display the error via an alert UI + */ +export async function alertOnError( + f: () => Promise | T | undefined, + title: string = 'Error', + dispatch: Dispatch +) { + try { + return await f(); + } catch (error) { + console.error(error); + if (error instanceof Error) { + dispatch({ + type: 'openErrorAlert', + payload: { + title, + error: errorToString(error as ErrorWithCause), + onClose: () => dispatch({ type: 'closeAlert' }), + }, + }); + } + } +} + +/** + * Convert a error into a string + * @param error + */ +export const errorToString = (error: ErrorWithCause): string => { + if (error?.cause) { + // Check that error is instance of Error + if (!(error?.cause instanceof Error)) { + console.error( + 'Malformed error, cause is not an instance of Error', + error + ); + } + + return `${error.message}\nā†³ ${errorToString(error.cause as ErrorWithCause)}`; + } + + return `${error.message}`; +}; diff --git a/web_ui/frontend/index.d.ts b/web_ui/frontend/index.ts similarity index 76% rename from web_ui/frontend/index.d.ts rename to web_ui/frontend/index.ts index 7be168325..dfcdcae95 100644 --- a/web_ui/frontend/index.d.ts +++ b/web_ui/frontend/index.ts @@ -1,4 +1,5 @@ import { NamespaceAdminMetadata } from '@/components/Namespace'; +import { Capabilities } from '@/types'; export interface User { authenticated: boolean; @@ -26,22 +27,14 @@ export interface Server { namespacePrefixes: string[]; } -export interface Capabilities { - PublicReads: boolean; - Reads: boolean; - Writes: boolean; - Listings: boolean; - DirectReads: boolean; -} - -export type StringTree = Record; +export type StringTree = { [key: string]: StringTree | true }; -interface Alert { +export interface Alert { severity: 'error' | 'warning' | 'info' | 'success'; message: string; } -export interface Namespace { +export interface RegistryNamespace { id: number; prefix: string; pubkey: string; @@ -50,7 +43,7 @@ export interface Namespace { custom_fields?: Record; } -interface Institution { +export interface Institution { id: string; name: string; } diff --git a/web_ui/frontend/types.ts b/web_ui/frontend/types.ts new file mode 100644 index 000000000..dd64aa694 --- /dev/null +++ b/web_ui/frontend/types.ts @@ -0,0 +1,57 @@ +export interface Capabilities { + PublicRead: boolean; + Read: boolean; + Write: boolean; + Listing: boolean; + FallBackRead: boolean; +} + +export interface TokenGeneration { + strategy: string; + vaultServer: string; + maxScopeDepth: number; + issuer: string; +} + +export interface TokenIssuer { + basePaths: string[]; + restrictedPaths: string[] | null; + issuer: string; +} + +export interface DirectorNamespace { + path: string; + capabilities: Capabilities; + tokenGeneration: TokenGeneration[] | null; + tokenIssuer: TokenIssuer[] | null; + fromTopology: boolean; + caches: string[]; + origins: string[]; +} + +interface ServerBase { + name: string; + storageType: string; + disableDirectorTest: boolean; + authUrl: string; + brokerUrl: string; + url: string; + webUrl: string; + type: 'Origin' | 'Cache'; + latitude: number; + longitude: number; + capabilities: Capabilities; + filtered: boolean; + filteredType: string; + fromTopology: boolean; + healthStatus: string; + ioLoad: number; +} + +export interface ServerDetailed extends ServerBase { + namespaces: DirectorNamespace[]; +} + +export interface ServerGeneral extends ServerBase { + namespacePrefixes: string[]; +} diff --git a/xrootd/launch.go b/xrootd/launch.go index 78de57ff6..78416b438 100644 --- a/xrootd/launch.go +++ b/xrootd/launch.go @@ -91,6 +91,8 @@ func makeUnprivilegedXrootdLauncher(daemonName string, configPath string, isCach if confDir := os.Getenv("XRD_PLUGINCONFDIR"); confDir != "" { result.ExtraEnv = append(result.ExtraEnv, "XRD_PLUGINCONFDIR="+confDir) } + result.ExtraEnv = append(result.ExtraEnv, "XRD_PELICANFEDERATIONMETADATATIMEOUT="+param.Cache_DefaultCacheTimeout.GetDuration().String()) + result.ExtraEnv = append(result.ExtraEnv, "XRD_PELICANDEFAULTHEADERTIMEOUT="+param.Cache_DefaultCacheTimeout.GetDuration().String()) } return } @@ -165,7 +167,7 @@ func LaunchDaemons(ctx context.Context, launchers []daemon.Launcher, egrp *errgr return } - ticker := time.NewTicker(10 * time.Second) + ticker := time.NewTicker(param.Xrootd_MaxStartupWait.GetDuration()) defer ticker.Stop() select { case <-ctx.Done(): @@ -180,8 +182,8 @@ func LaunchDaemons(ctx context.Context, launchers []daemon.Launcher, egrp *errgr portStartCallback(port) } case <-ticker.C: - log.Errorln("XRootD did not startup after 10s of waiting") - err = errors.New("XRootD did not startup after 10s of waiting") + log.Errorln("XRootD did not startup after", param.Xrootd_MaxStartupWait.GetDuration().String(), "of waiting") + err = errors.New("XRootD did not startup after " + param.Xrootd_MaxStartupWait.GetDuration().String() + " of waiting") return } diff --git a/xrootd/origin_test.go b/xrootd/origin_test.go index e2c6a696d..c64fb53c6 100644 --- a/xrootd/origin_test.go +++ b/xrootd/origin_test.go @@ -231,16 +231,7 @@ func TestMultiExportOrigin(t *testing.T) { require.True(t, ok) } -func runS3Test(t *testing.T, bucketName, urlStyle, objectName string) { - ctx, cancel, egrp := test_utils.TestContext(context.Background(), t) - defer func() { require.NoError(t, egrp.Wait()) }() - defer cancel() - - server_utils.ResetTestState() - - defer server_utils.ResetTestState() - - federationPrefix := "/test" +func mockupS3Origin(ctx context.Context, egrp *errgroup.Group, t *testing.T, federationPrefix, bucketName, urlStyle string) context.CancelFunc { regionName := "us-east-1" serviceUrl := "https://s3.amazonaws.com" viper.Set("Origin.FederationPrefix", federationPrefix) @@ -260,7 +251,19 @@ func runS3Test(t *testing.T, bucketName, urlStyle, objectName string) { viper.Set("Server.WebPort", 0) viper.Set("TLSSkipVerify", true) - mockupCancel := originMockup(ctx, egrp, t) + return originMockup(ctx, egrp, t) +} + +func runS3Test(t *testing.T, bucketName, urlStyle, objectName string) { + ctx, cancel, egrp := test_utils.TestContext(context.Background(), t) + defer func() { require.NoError(t, egrp.Wait()) }() + defer cancel() + server_utils.ResetTestState() + defer server_utils.ResetTestState() + + federationPrefix := "/test" + + mockupCancel := mockupS3Origin(ctx, egrp, t, federationPrefix, bucketName, urlStyle) defer mockupCancel() originEndpoint := param.Origin_Url.GetString() @@ -304,3 +307,149 @@ func TestS3OriginConfig(t *testing.T) { runS3Test(t, "", "path", "noaa-wod-pds/MD5SUMS") }) } + +func TestS3OriginWithSentinel(t *testing.T) { + ctx, cancel, egrp := test_utils.TestContext(context.Background(), t) + defer func() { require.NoError(t, egrp.Wait()) }() + defer cancel() + server_utils.ResetTestState() + defer server_utils.ResetTestState() + + federationPrefix := "/test" + bucketName := "noaa-wod-pds" + + mockupCancel := mockupS3Origin(ctx, egrp, t, federationPrefix, bucketName, "path") + defer mockupCancel() + + mockExportValidStn := server_utils.OriginExport{ + StoragePrefix: viper.GetString("Origin.StoragePrefix"), + FederationPrefix: viper.GetString("Origin.FederationPrefix"), + Capabilities: server_structs.Capabilities{Reads: true}, + SentinelLocation: "MD5SUMS", + } + + originEndpoint := param.Origin_Url.GetString() + // At this point, a 403 means the server is running, which means its ready to grab objects from + err := server_utils.WaitUntilWorking(ctx, "GET", originEndpoint, "xrootd", 403, true) + if err != nil { + t.Fatalf("Unsuccessful test: Server encountered an error: %v", err) + } + + // mock export with no sentinel + mockExportNoStn := server_utils.OriginExport{ + StoragePrefix: viper.GetString("Origin.StoragePrefix"), + FederationPrefix: viper.GetString("Origin.FederationPrefix"), + Capabilities: server_structs.Capabilities{Reads: true}, + } + + // mock export with an invalid sentinel + mockExportInvalidStn := server_utils.OriginExport{ + StoragePrefix: viper.GetString("Origin.StoragePrefix"), + FederationPrefix: viper.GetString("Origin.FederationPrefix"), + Capabilities: server_structs.Capabilities{Reads: true}, + SentinelLocation: "MD5SUMS_dne", + } + + t.Run("valid-sentinel-return-ok", func(t *testing.T) { + ok, err := server_utils.CheckOriginSentinelLocations([]server_utils.OriginExport{mockExportValidStn}) + require.NoError(t, err) + require.True(t, ok) + }) + t.Run("empty-sentinel-return-ok", func(t *testing.T) { + ok, err := server_utils.CheckOriginSentinelLocations([]server_utils.OriginExport{mockExportNoStn}) + require.NoError(t, err) + require.True(t, ok) + }) + + t.Run("invalid-sentinel-return-error", func(t *testing.T) { + ok, err := server_utils.CheckOriginSentinelLocations([]server_utils.OriginExport{mockExportInvalidStn}) + require.Error(t, err) + require.False(t, ok) + }) +} + +func TestPosixOriginWithSentinel(t *testing.T) { + ctx, cancel, egrp := test_utils.TestContext(context.Background(), t) + defer func() { require.NoError(t, egrp.Wait()) }() + defer cancel() + + server_utils.ResetTestState() + + defer server_utils.ResetTestState() + + // Create a test temp dir, ensure it's readable by XRootD + tmpPathPattern := "XRD-Tst_Orgn*" + tmpPath, err := os.MkdirTemp("", tmpPathPattern) + require.NoError(t, err) + err = os.Chmod(tmpPath, 0755) + require.NoError(t, err) + + viper.Set("Origin.StoragePrefix", tmpPath) + viper.Set("Origin.FederationPrefix", "/test") + viper.Set("Origin.StorageType", "posix") + // Disable functionality we're not using (and is difficult to make work on Mac) + viper.Set("Origin.EnableCmsd", false) + viper.Set("Origin.EnableMacaroons", false) + viper.Set("Origin.EnableVoms", false) + viper.Set("Origin.Port", 0) + viper.Set("Server.WebPort", 0) + viper.Set("TLSSkipVerify", true) + viper.Set("Logging.Origin.Scitokens", "trace") + + mockupCancel := originMockup(ctx, egrp, t) + defer mockupCancel() + + // mock export with a valid sentinel + mockExportValidStn := server_utils.OriginExport{ + StoragePrefix: viper.GetString("Origin.StoragePrefix"), + FederationPrefix: viper.GetString("Origin.FederationPrefix"), + Capabilities: server_structs.Capabilities{Reads: true}, + SentinelLocation: "mock_sentinel", + } + // mock export with no sentinel + mockExportNoStn := server_utils.OriginExport{ + StoragePrefix: viper.GetString("Origin.StoragePrefix"), + FederationPrefix: viper.GetString("Origin.FederationPrefix"), + Capabilities: server_structs.Capabilities{Reads: true}, + } + // mock export with an invalid sentinel + mockExportInvalidStn := server_utils.OriginExport{ + StoragePrefix: viper.GetString("Origin.StoragePrefix"), + FederationPrefix: viper.GetString("Origin.FederationPrefix"), + Capabilities: server_structs.Capabilities{Reads: true}, + SentinelLocation: "sentinel_dne", + } + + // Create a sentinel file, ensure it's readable by XRootD + tempStn := filepath.Join(mockExportValidStn.StoragePrefix, mockExportValidStn.SentinelLocation) + file, err := os.Create(tempStn) + require.NoError(t, err) + err = file.Close() + require.NoError(t, err) + err = os.Chmod(tempStn, 0755) + require.NoError(t, err) + + err = server_utils.WaitUntilWorking(ctx, "GET", param.Origin_Url.GetString(), "xrootd", 403, false) + if err != nil { + t.Fatalf("Unsuccessful test: Server encountered an error: %v", err) + } + require.NoError(t, err) + + t.Run("valid-sentinel-return-ok", func(t *testing.T) { + ok, err := server_utils.CheckOriginSentinelLocations([]server_utils.OriginExport{mockExportValidStn}) + require.NoError(t, err) + require.True(t, ok) + }) + + t.Run("empty-sentinel-return-ok", func(t *testing.T) { + ok, err := server_utils.CheckOriginSentinelLocations([]server_utils.OriginExport{mockExportNoStn}) + require.NoError(t, err) + require.True(t, ok) + }) + + t.Run("invalid-sentinel-return-error", func(t *testing.T) { + ok, err := server_utils.CheckOriginSentinelLocations([]server_utils.OriginExport{mockExportInvalidStn}) + require.Error(t, err) + require.False(t, ok) + }) +} diff --git a/xrootd/resources/xrootd-cache.cfg b/xrootd/resources/xrootd-cache.cfg index 7d1cd8a19..ae2aabb25 100644 --- a/xrootd/resources/xrootd-cache.cfg +++ b/xrootd/resources/xrootd-cache.cfg @@ -58,15 +58,11 @@ pfc.writequeue 16 4 pfc.ram 4g pfc.diskusage {{if .Cache.LowWatermark}}{{.Cache.LowWatermark}}{{else}}0.90{{end}} {{if .Cache.HighWaterMark}}{{.Cache.HighWaterMark}}{{else}}0.95{{end}} purgeinterval 300s xrootd.fslib ++ throttle # throttle plugin is needed to calculate server IO load -http.tlsclientauth defer -{{- range $Prefix := .Cache.X509ClientAuthenticationPrefixes}} -http.tlsrequiredprefix {{$Prefix}} -{{- end}} {{if .Cache.Concurrency}} throttle.throttle concurrency {{.Cache.Concurrency}} {{end}} pss.origin {{.Cache.PSSOrigin}} -oss.localroot {{.Cache.LocalRoot}} +oss.localroot {{.Cache.NamespaceLocation}} pfc.spaces data meta {{- range $value := .Cache.DataLocations}} oss.space data {{$value}} diff --git a/xrootd/xrootd_config.go b/xrootd/xrootd_config.go index e0de0703f..b0553387d 100644 --- a/xrootd/xrootd_config.go +++ b/xrootd/xrootd_config.go @@ -115,7 +115,7 @@ type ( RunLocation string DataLocations []string MetaLocations []string - LocalRoot string + NamespaceLocation string PSSOrigin string BlocksToPrefetch int Concurrency int @@ -274,32 +274,32 @@ func CheckOriginXrootdEnv(exportPath string, server server_structs.XRootDServer, return nil } -func CheckCacheXrootdEnv(exportPath string, server server_structs.XRootDServer, uid int, gid int) (string, error) { - viper.Set("Xrootd.Mount", exportPath) - filepath.Join(exportPath, "/") - err := config.MkdirAll(exportPath, 0775, uid, gid) - if err != nil { - return "", errors.Wrapf(err, "Unable to create export directory %v", - filepath.Dir(exportPath)) +func CheckCacheXrootdEnv(server server_structs.XRootDServer, uid int, gid int) error { + storageLocation := param.Cache_StorageLocation.GetString() + if err := config.MkdirAll(storageLocation, 0775, uid, gid); err != nil { + return errors.Wrapf(err, "Unable to create the cache's storage directory '%s'", storageLocation) + } + // Setting Cache.StorageLocation to /run/pelican/cache is a default we use for testing, but it shouldn't ever be used + // in a production setting. If the user hasn't overridden the default, log a warning. + if storageLocation == filepath.Join("/run", "pelican", "cache") { + log.Warnf("%s is set to the default /run/pelican/cache. This default is to allow quick testing but should not be used in production.", param.Cache_StorageLocation.GetName()) } - localRoot := param.Cache_LocalRoot.GetString() - - localRoot = filepath.Clean(localRoot) - err = config.MkdirAll(localRoot, 0775, uid, gid) - - if err != nil { - return "", errors.Wrapf(err, "Unable to create local root %v", - filepath.Dir(localRoot)) + namespaceLocation := param.Cache_NamespaceLocation.GetString() + if err := config.MkdirAll(namespaceLocation, 0775, uid, gid); err != nil { + return errors.Wrapf(err, "Unable to create the cache's storage directory '%s'", storageLocation) } dataPaths := param.Cache_DataLocations.GetStringSlice() for _, dPath := range dataPaths { dataPath := filepath.Clean(dPath) - err = config.MkdirAll(dataPath, 0775, uid, gid) + // Data locations should never be below the namespace location + if strings.HasPrefix(dPath, namespaceLocation) { + return errors.Errorf("A configured data location '%s' is a subdirectory of the namespace location '%s'. Please ensure these directories are not nested.", dPath, namespaceLocation) + } - if err != nil { - return "", errors.Wrapf(err, "Unable to create data directory %v", + if err := config.MkdirAll(dataPath, 0775, uid, gid); err != nil { + return errors.Wrapf(err, "Unable to create data directory %v", filepath.Dir(dataPath)) } } @@ -307,17 +307,20 @@ func CheckCacheXrootdEnv(exportPath string, server server_structs.XRootDServer, metaPaths := param.Cache_MetaLocations.GetStringSlice() for _, mPath := range metaPaths { metaPath := filepath.Clean(mPath) - err = config.MkdirAll(metaPath, 0775, uid, gid) + // Similar to data locations, meta locations should never be below the namespace location + if strings.HasPrefix(mPath, namespaceLocation) { + return errors.Errorf("The configured meta location '%s' is a subdirectory of the namespace location '%s'. Please ensure these directories are not nested.", mPath, namespaceLocation) + } - if err != nil { - return "", errors.Wrapf(err, "Unable to create meta directory %v", + if err := config.MkdirAll(metaPath, 0775, uid, gid); err != nil { + return errors.Wrapf(err, "Unable to create meta directory %v", filepath.Dir(metaPath)) } } fedInfo, err := config.GetFederation(context.Background()) if err != nil { - return "", errors.Wrap(err, "Failed to pull information from the federation") + return errors.Wrap(err, "Failed to pull information from the federation") } if discoveryUrlStr := param.Federation_DiscoveryUrl.GetString(); discoveryUrlStr != "" { @@ -328,14 +331,14 @@ func CheckCacheXrootdEnv(exportPath string, server server_structs.XRootDServer, discoveryUrl.Host = discoveryUrl.Path discoveryUrl.Path = "" } else if discoveryUrl.Path != "" && discoveryUrl.Path != "/" { - return "", errors.New("The Federation.DiscoveryUrl's path is non-empty, ensure the Federation.DiscoveryUrl has the format :") + return errors.New("The Federation.DiscoveryUrl's path is non-empty, ensure the Federation.DiscoveryUrl has the format :") } discoveryUrl.Scheme = "pelican" discoveryUrl.Path = "" discoveryUrl.RawQuery = "" viper.Set("Cache.PSSOrigin", discoveryUrl.String()) } else { - return "", errors.Wrapf(err, "Failed to parse discovery URL %s", discoveryUrlStr) + return errors.Wrapf(err, "Failed to parse discovery URL %s", discoveryUrlStr) } } @@ -344,27 +347,27 @@ func CheckCacheXrootdEnv(exportPath string, server server_structs.XRootDServer, if err == nil { log.Debugln("Parsing director URL for 'pss.origin' setting:", directorUrlStr) if directorUrl.Path != "" && directorUrl.Path != "/" { - return "", errors.New("The Federation.DirectorUrl's path is non-empty, ensure the Federation.DirectorUrl has the format :") + return errors.New("The Federation.DirectorUrl's path is non-empty, ensure the Federation.DirectorUrl has the format :") } directorUrl.Scheme = "pelican" viper.Set("Cache.PSSOrigin", directorUrl.String()) } else { - return "", errors.Wrapf(err, "Failed to parse director URL %s", directorUrlStr) + return errors.Wrapf(err, "Failed to parse director URL %s", directorUrlStr) } } if viper.GetString("Cache.PSSOrigin") == "" { - return "", errors.New("One of Federation.DiscoveryUrl or Federation.DirectorUrl must be set to configure a cache") + return errors.New("One of Federation.DiscoveryUrl or Federation.DirectorUrl must be set to configure a cache") } if cacheServer, ok := server.(*cache.CacheServer); ok { err := WriteCacheScitokensConfig(cacheServer.GetNamespaceAds()) if err != nil { - return "", errors.Wrap(err, "Failed to create scitokens configuration for the cache") + return errors.Wrap(err, "Failed to create scitokens configuration for the cache") } } - return exportPath, nil + return nil } func CheckXrootdEnv(server server_structs.XRootDServer) error { @@ -446,7 +449,7 @@ func CheckXrootdEnv(server server_structs.XRootDServer) error { if server.GetServerType().IsEnabled(server_structs.OriginType) { err = CheckOriginXrootdEnv(exportPath, server, uid, gid, groupname) } else { - exportPath, err = CheckCacheXrootdEnv(exportPath, server, uid, gid) + err = CheckCacheXrootdEnv(server, uid, gid) } if err != nil { return err diff --git a/xrootd/xrootd_config_test.go b/xrootd/xrootd_config_test.go index 1e192451b..35d6f1e0e 100644 --- a/xrootd/xrootd_config_test.go +++ b/xrootd/xrootd_config_test.go @@ -38,6 +38,7 @@ import ( "github.com/stretchr/testify/require" "golang.org/x/sync/errgroup" + "github.com/pelicanplatform/pelican/cache" "github.com/pelicanplatform/pelican/config" "github.com/pelicanplatform/pelican/origin" "github.com/pelicanplatform/pelican/param" @@ -581,50 +582,31 @@ func TestXrootDCacheConfig(t *testing.T) { assert.NotNil(t, configPath) }) - t.Run("TestCacheHTTPTLSRequiredPrefixCorrectConfig", func(t *testing.T) { - xrootd := xrootdTest{T: t} - xrootd.setup() - - // Set our config - viper.Set("Cache.X509ClientAuthenticationPrefixes", []string{"pref1", "pref2", "pref3"}) - - // Generate the xrootd config - configPath, err := ConfigXrootd(ctx, false) - require.NoError(t, err) - assert.NotNil(t, configPath) - - // Verify the output - file, err := os.Open(configPath) - assert.NoError(t, err) - defer file.Close() - - content, err := io.ReadAll(file) - assert.NoError(t, err) - assert.Contains(t, string(content), "http.tlsrequiredprefix pref1") - assert.Contains(t, string(content), "http.tlsrequiredprefix pref2") - assert.Contains(t, string(content), "http.tlsrequiredprefix pref3") - }) + t.Run("TestNestedDataMetaNamespace", func(t *testing.T) { + testDir := t.TempDir() + viper.Set("Cache.StorageLocation", testDir) + namespaceLocation := filepath.Join(testDir, "namespace") + viper.Set("Cache.NamespaceLocation", namespaceLocation) - t.Run("TestCacheAuthenticationPrefixes", func(t *testing.T) { - xrootd := xrootdTest{T: t} - xrootd.setup() - - // Set our config - viper.Set("Cache.X509AuthenticationPrefixes", []string{}) - - // Generate the xrootd config - configPath, err := ConfigXrootd(ctx, false) - require.NoError(t, err) - assert.NotNil(t, configPath) + cache := &cache.CacheServer{} + uid := os.Getuid() + gid := os.Getgid() - // Verify the output - file, err := os.Open(configPath) - assert.NoError(t, err) - defer file.Close() - - content, err := io.ReadAll(file) - assert.NoError(t, err) - assert.NotContains(t, string(content), "http.tlsrequiredprefix") + // Data location test + nestedDataLocation := filepath.Join(namespaceLocation, "data") + viper.Set("Cache.DataLocations", []string{nestedDataLocation}) + err := CheckCacheXrootdEnv(cache, uid, gid) + require.Error(t, err) + require.Contains(t, err.Error(), "Please ensure these directories are not nested.") + // Now set to a valid location so we can hit the meta error in the next part of the test + viper.Set("Cache.DataLocations", []string{filepath.Join(testDir, "data")}) + + // Meta location test + nestedMetaLocation := filepath.Join(namespaceLocation, "meta") + viper.Set("Cache.MetaLocations", []string{nestedMetaLocation}) + err = CheckCacheXrootdEnv(cache, uid, gid) + require.Error(t, err) + require.Contains(t, err.Error(), "Please ensure these directories are not nested.") }) }