Skip to content

Commit

Permalink
Merge pull request PelicanPlatform#1516 from turetske/scrub-dir-refer…
Browse files Browse the repository at this point in the history
…ences

Changed user facing references to directories to collections on origins
  • Loading branch information
turetske authored Aug 13, 2024
2 parents b2108b0 + 542585e commit eaf496b
Show file tree
Hide file tree
Showing 12 changed files with 47 additions and 47 deletions.
48 changes: 24 additions & 24 deletions client/handle_http.go
Original file line number Diff line number Diff line change
Expand Up @@ -2397,7 +2397,7 @@ func createWebDavClient(collectionsUrl *url.URL, token string, project string) (
return
}

// Walk a remote directory in a WebDAV server, emitting the files discovered
// Walk a remote collection in a WebDAV server, emitting the files discovered
func (te *TransferEngine) walkDirDownload(job *clientTransferJob, transfers []transferAttemptDetails, files chan *clientTransferFile, url *url.URL) error {
// Create the client to walk the filesystem
collUrl := job.job.dirResp.XPelNsHdr.CollectionsUrl
Expand All @@ -2412,7 +2412,7 @@ func (te *TransferEngine) walkDirDownload(job *clientTransferJob, transfers []tr

// Helper function for the `walkDirDownload`.
//
// Recursively walks through the remote server directory, emitting transfer files
// Recursively walks through the remote server collection, emitting transfer files
// for the engine to process.
func (te *TransferEngine) walkDirDownloadHelper(job *clientTransferJob, transfers []transferAttemptDetails, files chan *clientTransferFile, remotePath string, client *gowebdav.Client) error {
// Check for cancelation since the client does not respect the context
Expand All @@ -2421,7 +2421,7 @@ func (te *TransferEngine) walkDirDownloadHelper(job *clientTransferJob, transfer
}
infos, err := client.ReadDir(remotePath)
if err != nil {
return errors.Wrap(err, "failed to read remote directory")
return errors.Wrap(err, "failed to read remote collection")
}
localBase := strings.TrimPrefix(remotePath, job.job.remoteURL.Path)
for _, info := range infos {
Expand Down Expand Up @@ -2507,9 +2507,9 @@ func (te *TransferEngine) walkDirUpload(job *clientTransferJob, transfers []tran
return err
}

// This function performs the ls command by walking through the specified directory and printing the contents of the files
// This function performs the ls command by walking through the specified collections and printing the contents of the files
func listHttp(remoteObjectUrl *url.URL, dirResp server_structs.DirectorResponse, token string) (fileInfos []FileInfo, err error) {
// Get our directory listing host
// Get our collection listing host
collectionsUrl := dirResp.XPelNsHdr.CollectionsUrl
log.Debugln("Collections URL: ", collectionsUrl.String())

Expand All @@ -2528,14 +2528,14 @@ func listHttp(remoteObjectUrl *url.URL, dirResp server_structs.DirectorResponse,
if err != nil {
return nil, errors.Wrap(err, "failed to stat remote path")
}
// If the path leads to a file and not a directory, just add the filename
// If the path leads to a file and not a collection, just add the filename
if !info.IsDir() {
// NOTE: we implement our own FileInfo here because the one we get back from stat() does not have a .name field for some reason
file := FileInfo{
Name: remotePath,
Size: info.Size(),
ModTime: info.ModTime(),
IsDir: false,
Name: remotePath,
Size: info.Size(),
ModTime: info.ModTime(),
IsCollection: false,
}
fileInfos = append(fileInfos, file)
return fileInfos, nil
Expand All @@ -2545,17 +2545,17 @@ func listHttp(remoteObjectUrl *url.URL, dirResp server_structs.DirectorResponse,
return nil, errors.Errorf("405: object listings are not supported by the discovered origin")
}
// Otherwise, a different error occurred and we should return it
return nil, errors.Wrap(err, "failed to read remote directory")
return nil, errors.Wrap(err, "failed to read remote collection")
}

for _, info := range infos {
jPath, _ := url.JoinPath(remotePath, info.Name())
// Create a FileInfo for the file and append it to the slice
file := FileInfo{
Name: jPath,
Size: info.Size(),
ModTime: info.ModTime(),
IsDir: info.IsDir(),
Name: jPath,
Size: info.Size(),
ModTime: info.ModTime(),
IsCollection: info.IsDir(),
}
fileInfos = append(fileInfos, file)
}
Expand Down Expand Up @@ -2616,10 +2616,10 @@ func statHttp(dest *url.URL, dirResp server_structs.DirectorResponse, token stri
fsinfo, err := client.Stat(endpoint.Path)
if err == nil {
info = FileInfo{
Name: endpoint.Path,
Size: fsinfo.Size(),
IsDir: fsinfo.IsDir(),
ModTime: fsinfo.ModTime(),
Name: endpoint.Path,
Size: fsinfo.Size(),
IsCollection: fsinfo.IsDir(),
ModTime: fsinfo.ModTime(),
}
break
} else if gowebdav.IsErrCode(err, http.StatusMethodNotAllowed) {
Expand All @@ -2646,7 +2646,7 @@ func statHttp(dest *url.URL, dirResp server_structs.DirectorResponse, token stri
}

if info.Size == 0 {
if info.IsDir {
if info.IsCollection {
resultsChan <- statResults{info, nil}
}
err = errors.New("Stat response did not include a size")
Expand All @@ -2655,10 +2655,10 @@ func statHttp(dest *url.URL, dirResp server_structs.DirectorResponse, token stri
}

resultsChan <- statResults{FileInfo{
Name: endpoint.Path,
Size: info.Size,
IsDir: info.IsDir,
ModTime: info.ModTime,
Name: endpoint.Path,
Size: info.Size,
IsCollection: info.IsCollection,
ModTime: info.ModTime,
}, nil}

}(&destCopy)
Expand Down
8 changes: 4 additions & 4 deletions client/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,10 @@ var ObjectServersToTry int = 3
// NOTE: this was created to provide more flexibility to information on a file. The fs.FileInfo interface was causing some issues like not always returning a Name attribute
// ALSO NOTE: the fields are exported so they can be marshalled into JSON, it does not work otherwise
type FileInfo struct {
Name string
Size int64
ModTime time.Time
IsDir bool
Name string
Size int64
ModTime time.Time
IsCollection bool
}

// Determine the token name if it is embedded in the scheme, Condor-style
Expand Down
4 changes: 2 additions & 2 deletions cmd/object_copy.go
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ func init() {
flagSet := copyCmd.Flags()
flagSet.StringP("cache", "c", "", "Cache to use")
flagSet.StringP("token", "t", "", "Token file to use for transfer")
flagSet.BoolP("recursive", "r", false, "Recursively copy a directory. Forces methods to only be http to get the freshest directory contents")
flagSet.BoolP("recursive", "r", false, "Recursively copy a collection. Forces methods to only be http to get the freshest collection contents")
flagSet.StringP("cache-list-name", "n", "xroot", "(Deprecated) Cache list to use, currently either xroot or xroots; may be ignored")
flagSet.Lookup("cache-list-name").Hidden = true
// All the deprecated or hidden flags that are only relevant if we are in historical "stashcp mode"
Expand Down Expand Up @@ -158,7 +158,7 @@ func copyMain(cmd *cobra.Command, args []string) {
log.Errorln("Destination does not exist")
os.Exit(1)
} else if !destStat.IsDir() {
log.Errorln("Destination is not a directory")
log.Errorln("Destination is not a collection")
os.Exit(1)
}
}
Expand Down
2 changes: 1 addition & 1 deletion cmd/object_get.go
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ func init() {
flagSet := getCmd.Flags()
flagSet.StringP("cache", "c", "", "Cache to use")
flagSet.StringP("token", "t", "", "Token file to use for transfer")
flagSet.BoolP("recursive", "r", false, "Recursively download a directory. Forces methods to only be http to get the freshest directory contents")
flagSet.BoolP("recursive", "r", false, "Recursively download a collection. Forces methods to only be http to get the freshest collection contents")
flagSet.StringP("cache-list-name", "n", "xroot", "(Deprecated) Cache list to use, currently either xroot or xroots; may be ignored")
flagSet.Lookup("cache-list-name").Hidden = true
flagSet.String("caches", "", "A JSON file containing the list of caches")
Expand Down
10 changes: 5 additions & 5 deletions cmd/object_ls.go
Original file line number Diff line number Diff line change
Expand Up @@ -117,12 +117,12 @@ func listMain(cmd *cobra.Command, args []string) error {

filteredInfos := []client.FileInfo{}

// Filter by object or directory
// Filter by object or collection
for _, info := range fileInfos {
if collectionOnly && !info.IsDir {
if collectionOnly && !info.IsCollection {
continue
}
if objectOnly && info.IsDir {
if objectOnly && info.IsCollection {
continue
}
filteredInfos = append(filteredInfos, info)
Expand All @@ -136,7 +136,7 @@ func listMain(cmd *cobra.Command, args []string) error {
if asJSON {
jsonData, err := json.Marshal(filteredInfos)
if err != nil {
return errors.Errorf("failed to marshal object/directory info to JSON format: %v", err)
return errors.Errorf("failed to marshal object/collection info to JSON format: %v", err)
}
fmt.Println(string(jsonData))
return nil
Expand All @@ -155,7 +155,7 @@ func listMain(cmd *cobra.Command, args []string) error {
// Convert the FileInfo to JSON and print it
jsonData, err := json.Marshal(jsonInfo)
if err != nil {
return errors.Errorf("failed to marshal object/directory info to JSON format: %v", err)
return errors.Errorf("failed to marshal object/collection info to JSON format: %v", err)
}
fmt.Println(string(jsonData))
} else {
Expand Down
2 changes: 1 addition & 1 deletion cmd/object_put.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ var (
func init() {
flagSet := putCmd.Flags()
flagSet.StringP("token", "t", "", "Token file to use for transfer")
flagSet.BoolP("recursive", "r", false, "Recursively upload a directory. Forces methods to only be http to get the freshest directory contents")
flagSet.BoolP("recursive", "r", false, "Recursively upload a collection. Forces methods to only be http to get the freshest collection contents")
objectCmd.AddCommand(putCmd)
}

Expand Down
4 changes: 2 additions & 2 deletions cmd/object_stat.go
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ func statMain(cmd *cobra.Command, args []string) {
// Print our stat info in JSON format:
jsonData, err := json.Marshal(statInfo)
if err != nil {
log.Errorf("Failed to parse object/directory stat info to JSON format: %v", err)
log.Errorf("Failed to parse object/collection stat info to JSON format: %v", err)
os.Exit(1)
}
fmt.Println(string(jsonData))
Expand All @@ -107,7 +107,7 @@ func statMain(cmd *cobra.Command, args []string) {
fmt.Println("Name:", statInfo.Name)
fmt.Println("Size:", statInfo.Size)
fmt.Println("ModTime:", statInfo.ModTime)
fmt.Println("IsDir:", statInfo.IsDir)
fmt.Println("IsCollection:", statInfo.IsCollection)
return
}
}
2 changes: 1 addition & 1 deletion cmd/plugin_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -755,7 +755,7 @@ func TestPluginRecursiveDownload(t *testing.T) {
results := make(chan *classads.ClassAd, 5)
err = runPluginWorker(fed.Ctx, false, workChan, results)
assert.Error(t, err)
assert.Contains(t, err.Error(), "failed to read remote directory: PROPFIND /test/test/test.txt/: 500")
assert.Contains(t, err.Error(), "failed to read remote collection: PROPFIND /test/test/test.txt/: 500")
})

t.Run("TestRecursiveFailureDirNotFound", func(t *testing.T) {
Expand Down
2 changes: 1 addition & 1 deletion director/director.go
Original file line number Diff line number Diff line change
Expand Up @@ -744,7 +744,7 @@ func redirectToOrigin(ginCtx *gin.Context) {
}
ginCtx.JSON(http.StatusMethodNotAllowed, server_structs.SimpleApiResp{
Status: server_structs.RespFailed,
Msg: "No origins on specified endpoint allow directory listings",
Msg: "No origins on specified endpoint allow collection listings",
})
}

Expand Down
8 changes: 4 additions & 4 deletions docs/pages/getting-data-with-pelican/client.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ pelican object get pelican://<federation-url></namespace-prefix></path/to/file/f
To upload, specify a directory and Pelican will compress it for you:

```bash
pelican object put <local/path/to/directory> pelican://<federation-url></namespace-prefix></path/to/directory.tar.gz>?pack=tar.gz
pelican object put <local/path/to/directory> pelican://<federation-url></namespace-prefix></path/to/collection.tar.gz>?pack=tar.gz
```

Pelican accepts the following values for the `pack` query:
Expand All @@ -192,16 +192,16 @@ Pelican accepts the following values for the `pack` query:
- For uploading, create the object in the specified format (`tar`, `tar.gz`, `tar.xz`, `zip`, respectively).

### Recursive Downloads and Uploads with the `?recursive` Query
The `?recursive` query can be utilized if the desired remote object is a directory. When this query is enabled, it indicates to Pelican that all sub paths at the level of the provided namespace should be copied recursively. To use this query, run:
The `?recursive` query can be utilized if the desired remote object is a collection. When this query is enabled, it indicates to Pelican that all sub paths at the level of the provided namespace should be copied recursively. To use this query, run:

```bash
pelican object get pelican://<federation-url></namespace-prefix></path/to/directory>?recursive <local/path/to/file>
pelican object get pelican://<federation-url></namespace-prefix></path/to/collection>?recursive <local/path/to/file>
```

To upload, you can run something similar but with an `object put`:

```bash
pelican object put <local/path/to/directory> pelican://<federation-url></namespace-prefix></path/to/directory>?recursive
pelican object put <local/path/to/directory> pelican://<federation-url></namespace-prefix></path/to/collection>?recursive
```

>**Note:** This query functions the same as specifying the `-r` flag described below.
Expand Down
2 changes: 1 addition & 1 deletion docs/parameters.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -803,7 +803,7 @@ name: Origin.EnableDirListing
description: |+
[Deprecated] Origin.EnableDirListing is being deprecated and will be removed in a future release. It is replaced by Origin.EnableListings.
Allows the origin to enable directory listings. Needs to be enabled for recursive
Allows the origin to enable collection listings. Needs to be enabled for recursive
downloads to work properly and for directories to be visible.
type: bool
default: false
Expand Down
2 changes: 1 addition & 1 deletion server_utils/origin.go
Original file line number Diff line number Diff line change
Expand Up @@ -662,7 +662,7 @@ func CheckOriginSentinelLocations(exports []OriginExport) (ok bool, err error) {
fullPath := filepath.Join(export.StoragePrefix, sentinelPath)
_, err := os.Stat(fullPath)
if err != nil {
return false, errors.Wrapf(err, "fail to open SentinelLocation %s for StoragePrefix %s. Directory check failed", export.SentinelLocation, export.StoragePrefix)
return false, errors.Wrapf(err, "fail to open SentinelLocation %s for StoragePrefix %s. Collection check failed", export.SentinelLocation, export.StoragePrefix)
}
}
}
Expand Down

0 comments on commit eaf496b

Please sign in to comment.