diff --git a/.github/workflows/analysis.yml b/.github/workflows/analysis.yml index 1bc8089..4820c18 100644 --- a/.github/workflows/analysis.yml +++ b/.github/workflows/analysis.yml @@ -14,9 +14,9 @@ jobs: uses: actions/checkout@v4 - name: Install Go - uses: actions/setup-go@v3 + uses: actions/setup-go@v5 with: - go-version: 1.20.x + go-version: 1.22.x - name: Static Code Analysis uses: golangci/golangci-lint-action@v3 @@ -32,9 +32,9 @@ jobs: uses: actions/checkout@v4 - name: Install Go - uses: actions/setup-go@v3 + uses: actions/setup-go@v5 with: - go-version: 1.20.x + go-version: 1.22.x - name: Run Gosec Security Scanner uses: securego/gosec@master diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 7fac008..e5aa357 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -28,7 +28,7 @@ jobs: - name: Setup Go uses: actions/setup-go@v5 with: - go-version: 1.20.x + go-version: 1.22.x cache: false - name: Tests diff --git a/crypto/checksum.go b/crypto/checksum.go new file mode 100644 index 0000000..9f08666 --- /dev/null +++ b/crypto/checksum.go @@ -0,0 +1,179 @@ +package crypto + +import ( + "bufio" + "regexp" + + // #nosec G501 -- md5 is supported by Artifactory. + "crypto/md5" + // #nosec G505 -- sha1 is supported by Artifactory. + "crypto/sha1" + "fmt" + "hash" + "io" + "os" + + ioutils "github.com/jfrog/gofrog/io" + "github.com/minio/sha256-simd" +) + +type Algorithm int + +const ( + MD5 Algorithm = iota + SHA1 + SHA256 +) + +var algorithmFunc = map[Algorithm]func() hash.Hash{ + // Go native crypto algorithms: + MD5: md5.New, + SHA1: sha1.New, + // sha256-simd algorithm: + SHA256: sha256.New, +} + +type Checksum struct { + Sha1 string `json:"sha1,omitempty"` + Md5 string `json:"md5,omitempty"` + Sha256 string `json:"sha256,omitempty"` +} + +func (c *Checksum) IsEmpty() bool { + return c.Md5 == "" && c.Sha1 == "" && c.Sha256 == "" +} + +// If the 'other' checksum matches the current one, return true. +// 'other' checksum may contain regex values for sha1, sha256 and md5. +func (c *Checksum) IsEqual(other Checksum) (bool, error) { + match, err := regexp.MatchString(other.Md5, c.Md5) + if !match || err != nil { + return false, err + } + match, err = regexp.MatchString(other.Sha1, c.Sha1) + if !match || err != nil { + return false, err + } + match, err = regexp.MatchString(other.Sha256, c.Sha256) + if !match || err != nil { + return false, err + } + + return true, nil +} + +func GetFileChecksums(filePath string, checksumType ...Algorithm) (checksums map[Algorithm]string, err error) { + file, err := os.Open(filePath) + if err != nil { + return + } + defer ioutils.Close(file, &err) + return CalcChecksums(file, checksumType...) +} + +// CalcChecksums calculates all hashes at once using AsyncMultiWriter. The file is therefore read only once. +func CalcChecksums(reader io.Reader, checksumType ...Algorithm) (map[Algorithm]string, error) { + hashes, err := calcChecksums(reader, checksumType...) + if err != nil { + return nil, err + } + results := sumResults(hashes) + return results, nil +} + +// CalcChecksumsBytes calculates hashes like `CalcChecksums`, returns result as bytes +func CalcChecksumsBytes(reader io.Reader, checksumType ...Algorithm) (map[Algorithm][]byte, error) { + hashes, err := calcChecksums(reader, checksumType...) + if err != nil { + return nil, err + } + results := sumResultsBytes(hashes) + return results, nil +} + +func calcChecksums(reader io.Reader, checksumType ...Algorithm) (map[Algorithm]hash.Hash, error) { + hashes := getChecksumByAlgorithm(checksumType...) + var multiWriter io.Writer + pageSize := os.Getpagesize() + sizedReader := bufio.NewReaderSize(reader, pageSize) + var hashWriter []io.Writer + for _, v := range hashes { + hashWriter = append(hashWriter, v) + } + multiWriter = ioutils.AsyncMultiWriter(pageSize, hashWriter...) + _, err := io.Copy(multiWriter, sizedReader) + if err != nil { + return nil, err + } + return hashes, nil +} + +func sumResults(hashes map[Algorithm]hash.Hash) map[Algorithm]string { + results := map[Algorithm]string{} + for k, v := range hashes { + results[k] = fmt.Sprintf("%x", v.Sum(nil)) + } + return results +} + +func sumResultsBytes(hashes map[Algorithm]hash.Hash) map[Algorithm][]byte { + results := map[Algorithm][]byte{} + for k, v := range hashes { + results[k] = v.Sum(nil) + } + return results +} + +func getChecksumByAlgorithm(checksumType ...Algorithm) map[Algorithm]hash.Hash { + hashes := map[Algorithm]hash.Hash{} + if len(checksumType) == 0 { + for k, v := range algorithmFunc { + hashes[k] = v() + } + return hashes + } + + for _, v := range checksumType { + hashes[v] = algorithmFunc[v]() + } + return hashes +} + +func CalcChecksumDetails(filePath string) (checksum Checksum, err error) { + file, err := os.Open(filePath) + if err != nil { + return + } + defer ioutils.Close(file, &err) + + checksums, err := CalcChecksums(file) + if err != nil { + return Checksum{}, err + } + checksum = Checksum{Md5: checksums[MD5], Sha1: checksums[SHA1], Sha256: checksums[SHA256]} + return +} + +type FileDetails struct { + Checksum Checksum + Size int64 +} + +func GetFileDetails(filePath string, includeChecksums bool) (details *FileDetails, err error) { + details = new(FileDetails) + if includeChecksums { + details.Checksum, err = CalcChecksumDetails(filePath) + if err != nil { + return + } + } else { + details.Checksum = Checksum{} + } + + fileInfo, err := os.Stat(filePath) + if err != nil { + return + } + details.Size = fileInfo.Size() + return +} diff --git a/crypto/checksum_test.go b/crypto/checksum_test.go new file mode 100644 index 0000000..554e963 --- /dev/null +++ b/crypto/checksum_test.go @@ -0,0 +1,42 @@ +package crypto + +import ( + "os" + "testing" + + "github.com/stretchr/testify/assert" +) + +const ( + fileContent = "Why did the robot bring a ladder to the bar? It heard the drinks were on the house." + expectedMd5 = "70bd6370a86813f2504020281e4a2e2e" + expectedSha1 = "8c3578ac814c9f02803001a5d3e5d78a7fd0f9cc" + expectedSha256 = "093d901b28a59f7d95921f3f4fb97a03fe7a1cf8670507ffb1d6f9a01b3e890a" +) + +func TestGetFileChecksums(t *testing.T) { + // Create a temporary file + tempFile, err := os.CreateTemp("", "TestGetFileChecksums") + assert.NoError(t, err) + defer func() { + assert.NoError(t, tempFile.Close()) + assert.NoError(t, os.Remove(tempFile.Name())) + }() + + // Write something to the file + _, err = tempFile.Write([]byte(fileContent)) + assert.NoError(t, err) + + // Calculate only sha1 and match + checksums, err := GetFileChecksums(tempFile.Name(), SHA1) + assert.NoError(t, err) + assert.Len(t, checksums, 1) + assert.Equal(t, expectedSha1, checksums[SHA1]) + + // Calculate md5, sha1 and sha256 checksums and match + checksums, err = GetFileChecksums(tempFile.Name()) + assert.NoError(t, err) + assert.Equal(t, expectedMd5, checksums[MD5]) + assert.Equal(t, expectedSha1, checksums[SHA1]) + assert.Equal(t, expectedSha256, checksums[SHA256]) +} diff --git a/go.mod b/go.mod index 6118a40..3db1f26 100644 --- a/go.mod +++ b/go.mod @@ -1,12 +1,13 @@ module github.com/jfrog/gofrog -go 1.20 +go 1.22 require ( github.com/jfrog/archiver/v3 v3.6.0 + github.com/minio/sha256-simd v1.0.1 github.com/pkg/errors v0.9.1 github.com/schollz/progressbar/v3 v3.14.2 - github.com/stretchr/testify v1.8.4 + github.com/stretchr/testify v1.9.0 ) require ( @@ -15,6 +16,7 @@ require ( github.com/dsnet/compress v0.0.1 // indirect github.com/golang/snappy v0.0.4 // indirect github.com/klauspost/compress v1.17.4 // indirect + github.com/klauspost/cpuid/v2 v2.2.3 // indirect github.com/klauspost/pgzip v1.2.6 // indirect github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db // indirect github.com/nwaples/rardecode v1.1.3 // indirect diff --git a/go.sum b/go.sum index 8ce35d4..e8a7206 100644 --- a/go.sum +++ b/go.sum @@ -15,9 +15,13 @@ github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0 github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4= github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= +github.com/klauspost/cpuid/v2 v2.2.3 h1:sxCkb+qR91z4vsqw4vGGZlDgPz3G7gjaLyK3V8y70BU= +github.com/klauspost/cpuid/v2 v2.2.3/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY= github.com/klauspost/pgzip v1.2.6 h1:8RXeL5crjEUFnR2/Sn6GJNWtSQ3Dk8pq4CL3jvdDyjU= github.com/klauspost/pgzip v1.2.6/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/minio/sha256-simd v1.0.1 h1:6kaan5IFmwTNynnKKpDHe6FWHohJOHhCPchzK49dzMM= +github.com/minio/sha256-simd v1.0.1/go.mod h1:Pz6AKMiUdngCLpeTL/RJY1M9rUuPMYujV5xJjtbRSN8= github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db h1:62I3jR2EmQ4l5rM/4FEfDWcRD+abF5XlKShorW5LRoQ= github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw= github.com/nwaples/rardecode v1.1.3 h1:cWCaZwfM5H7nAD6PyEdcVnczzV8i/JtotnyW/dD9lEc= @@ -34,8 +38,8 @@ github.com/schollz/progressbar/v3 v3.14.2 h1:EducH6uNLIWsr560zSV1KrTeUb/wZGAHqyM github.com/schollz/progressbar/v3 v3.14.2/go.mod h1:aQAZQnhF4JGFtRJiw/eobaXpsqpVQAftEQ+hLGXaRc4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= -github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/ulikunitz/xz v0.5.6/go.mod h1:2bypXElzHzzJZwzH67Y6wb67pO62Rzfn7BSiF4ABRW8= github.com/ulikunitz/xz v0.5.11 h1:kpFauv27b6ynzBNT/Xy+1k+fK4WswhN/6PN5WhFAGw8= github.com/ulikunitz/xz v0.5.11/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= @@ -43,6 +47,7 @@ github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 h1:nIPpBwaJSVYIxUFsDv3M8ofm github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8/go.mod h1:HUYIGzjTL3rfEspMxjDjgmT5uz5wzYJKVo23qUhYTos= golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= diff --git a/io/fileutils.go b/io/fileutils.go index 78e3823..59c467e 100644 --- a/io/fileutils.go +++ b/io/fileutils.go @@ -3,16 +3,29 @@ package io import ( "bufio" cr "crypto/rand" + "encoding/json" "errors" "fmt" "io" "math/rand" + "net/http" "os" + "path" "path/filepath" + "slices" "sort" + "strconv" + "strings" "time" ) +const ( + tempDirPrefix = "temp-" + + // Max temp file age in hours + maxFileAge = 24.0 +) + type RandFile struct { *os.File Info os.FileInfo @@ -205,13 +218,31 @@ func readDirNames(dirname string) ([]string, error) { return names, nil } -func IsPathSymlink(path string) bool { - f, _ := os.Lstat(path) - return f != nil && IsFileSymlink(f) +// Close the reader/writer and append the error to the given error. +func Close(closer io.Closer, err *error) { + var closeErr error + if closeErr = closer.Close(); closeErr == nil { + return + } + + closeErr = fmt.Errorf("failed to close %T: %w", closer, closeErr) + if err != nil { + *err = errors.Join(*err, closeErr) + } } -func IsFileSymlink(file os.FileInfo) bool { - return file.Mode()&os.ModeSymlink != 0 +// Checxk if path points at a file. +// If path points at a symlink and `followSymlink == false`, +// function will return `true` regardless of the symlink target +func IsFileExists(path string, followSymlink bool) (bool, error) { + fileInfo, err := GetFileInfo(path, followSymlink) + if err != nil { + if os.IsNotExist(err) { // If doesn't exist, don't omit an error + return false, nil + } + return false, err + } + return !fileInfo.IsDir(), nil } // Check if path points at a directory. @@ -240,15 +271,501 @@ func GetFileInfo(path string, followSymlink bool) (fileInfo os.FileInfo, err err return fileInfo, err } -// Close the reader/writer and append the error to the given error. -func Close(closer io.Closer, err *error) { - var closeErr error - if closeErr = closer.Close(); closeErr == nil { +// Move directory content from one path to another. +func MoveDir(fromPath, toPath string) error { + err := CreateDirIfNotExist(toPath) + if err != nil { + return err + } + + files, err := ListFiles(fromPath, true) + if err != nil { + return err + } + + for _, v := range files { + dir, err := IsDirExists(v, true) + if err != nil { + return err + } + + if dir { + toPath := toPath + GetFileSeparator() + filepath.Base(v) + err := MoveDir(v, toPath) + if err != nil { + return err + } + continue + } + err = MoveFile(v, filepath.Join(toPath, filepath.Base(v))) + if err != nil { + return err + } + } + return err +} + +// GoLang: os.Rename() give error "invalid cross-device link" for Docker container with Volumes. +// MoveFile(source, destination) will work moving file between folders +// Therefore, we are using our own implementation (MoveFile) in order to rename files. +func MoveFile(sourcePath, destPath string) (err error) { + inputFileOpen := true + var inputFile *os.File + inputFile, err = os.Open(sourcePath) + if err != nil { + return + } + defer func() { + if inputFileOpen { + err = errors.Join(err, inputFile.Close()) + } + }() + inputFileInfo, err := inputFile.Stat() + if err != nil { return } - closeErr = fmt.Errorf("failed to close %T: %w", closer, closeErr) + var outputFile *os.File + outputFile, err = os.Create(destPath) if err != nil { - *err = errors.Join(*err, closeErr) + return + } + defer func() { + err = errors.Join(err, outputFile.Close()) + }() + + _, err = io.Copy(outputFile, inputFile) + if err != nil { + return + } + err = os.Chmod(destPath, inputFileInfo.Mode()) + if err != nil { + return + } + + // The copy was successful, so now delete the original file + err = inputFile.Close() + if err != nil { + return + } + inputFileOpen = false + err = os.Remove(sourcePath) + return +} + +// Return the list of files and directories in the specified path +func ListFiles(path string, includeDirs bool) ([]string, error) { + sep := GetFileSeparator() + if !strings.HasSuffix(path, sep) { + path += sep + } + fileList := []string{} + files, _ := os.ReadDir(path) + path = strings.TrimPrefix(path, "."+sep) + + for _, f := range files { + filePath := path + f.Name() + exists, err := IsFileExists(filePath, false) + if err != nil { + return nil, err + } + if exists || IsPathSymlink(filePath) { + fileList = append(fileList, filePath) + } else if includeDirs { + isDir, err := IsDirExists(filePath, false) + if err != nil { + return nil, err + } + if isDir { + fileList = append(fileList, filePath) + } + } + } + return fileList, nil +} + +// Return all files in the specified path who satisfy the filter func. Not recursive. +func ListFilesByFilterFunc(path string, filterFunc func(filePath string) (bool, error)) ([]string, error) { + sep := GetFileSeparator() + if !strings.HasSuffix(path, sep) { + path += sep + } + var fileList []string + files, _ := os.ReadDir(path) + path = strings.TrimPrefix(path, "."+sep) + + for _, f := range files { + filePath := path + f.Name() + satisfy, err := filterFunc(filePath) + if err != nil { + return nil, err + } + if !satisfy { + continue + } + exists, err := IsFileExists(filePath, false) + if err != nil { + return nil, err + } + if exists { + fileList = append(fileList, filePath) + continue + } + + // Checks if the filepath is a symlink. + if IsPathSymlink(filePath) { + // Gets the file info of the symlink. + file, err := GetFileInfo(filePath, false) + if err != nil { + return nil, err + } + // Checks if the symlink is a file. + if !file.IsDir() { + fileList = append(fileList, filePath) + } + } + } + return fileList, nil +} + +func DownloadFile(downloadTo string, fromUrl string) (err error) { + // Get the data + httpClient := &http.Client{} + req, err := http.NewRequest(http.MethodGet, fromUrl, nil) + if err != nil { + return err + } + + resp, err := httpClient.Do(req) + if err != nil { + return err + } + defer func() { + err = errors.Join(err, resp.Body.Close()) + }() + if resp.StatusCode != http.StatusOK { + return fmt.Errorf("download failed. status code: %s", resp.Status) + } + // Create the file + var out *os.File + out, err = os.Create(downloadTo) + if err != nil { + return + } + defer func() { + err = errors.Join(err, out.Close()) + }() + // Write the body to file + _, err = io.Copy(out, resp.Body) + return +} + +func DoubleWinPathSeparator(filePath string) string { + return strings.ReplaceAll(filePath, "\\", "\\\\") +} + +// IsPathExists checks if a path exists. +func IsPathExists(path string) bool { + _, err := os.Stat(path) + return !os.IsNotExist(err) +} + +func GetFileContentAndInfo(filePath string) (fileContent []byte, fileInfo os.FileInfo, err error) { + fileInfo, err = os.Stat(filePath) + if err != nil { + return + } + fileContent, err = os.ReadFile(filePath) + return +} + +// CreateTempDir creates a temporary directory and returns its path. +func CreateTempDir() (string, error) { + tempDirBase := os.TempDir() + timestamp := strconv.FormatInt(time.Now().Unix(), 10) + return os.MkdirTemp(tempDirBase, tempDirPrefix+timestamp+"-*") +} + +func RemoveTempDir(dirPath string) error { + exists, err := IsDirExists(dirPath, false) + if err != nil { + return err + } + if !exists { + return nil + } + err = os.RemoveAll(dirPath) + if err == nil { + return nil + } + // Sometimes removing the directory fails (in Windows) because it's locked by another process. + // That's a known issue, but its cause is unknown (golang.org/issue/30789). + // In this case, we'll only remove the contents of the directory, and let CleanOldDirs() remove the directory itself at a later time. + return removeDirContents(dirPath) +} + +// RemoveDirContents removes the contents of the directory, without removing the directory itself. +// If it encounters an error before removing all the files, it stops and returns that error. +func removeDirContents(dirPath string) (err error) { + d, err := os.Open(dirPath) + if err != nil { + return + } + defer func() { + err = errors.Join(err, d.Close()) + }() + names, err := d.Readdirnames(-1) + if err != nil { + return + } + for _, name := range names { + err = os.RemoveAll(filepath.Join(dirPath, name)) + if err != nil { + return + } + } + return +} + +// Old runs/tests may leave junk at temp dir. +// Each temp file/Dir is named with prefix+timestamp, search for all temp files/dirs that match the common prefix and validate their timestamp. +func CleanOldDirs() error { + // Get all files at temp dir + tempDirBase := os.TempDir() + files, err := os.ReadDir(tempDirBase) + if err != nil { + return err + } + now := time.Now() + // Search for files/dirs that match the template. + for _, file := range files { + if strings.HasPrefix(file.Name(), tempDirPrefix) { + timeStamp, err := extractTimestamp(file.Name()) + if err != nil { + return err + } + // Delete old file/dirs. + if now.Sub(timeStamp).Hours() > maxFileAge { + if err := os.RemoveAll(path.Join(tempDirBase, file.Name())); err != nil { + return err + } + } + } + } + return nil +} + +func extractTimestamp(item string) (time.Time, error) { + // Get timestamp from file/dir. + endTimestampIndex := strings.LastIndex(item, "-") + beginningTimestampIndex := strings.LastIndex(item[:endTimestampIndex], "-") + timestampStr := item[beginningTimestampIndex+1 : endTimestampIndex] + // Convert to int. + timestampInt, err := strconv.ParseInt(timestampStr, 10, 64) + if err != nil { + return time.Time{}, err + } + // Convert to time type. + return time.Unix(timestampInt, 0), nil +} + +// FindFileInDirAndParents looks for a file named fileName in dirPath and its parents, and returns the path of the directory where it was found. +// dirPath must be a full path. +func FindFileInDirAndParents(dirPath, fileName string) (string, error) { + // Create a map to store all paths visited, to avoid running in circles. + visitedPaths := make(map[string]bool) + currDir := dirPath + for { + // If the file is found in the current directory, return the path. + exists, err := IsFileExists(filepath.Join(currDir, fileName), true) + if err != nil || exists { + return currDir, err + } + + // Save this path. + visitedPaths[currDir] = true + + // CD to the parent directory. + currDir = filepath.Dir(currDir) + + // If we already visited this directory, it means that there's a loop, and we can stop. + if visitedPaths[currDir] { + return "", fmt.Errorf("could not find the %s file of the project", fileName) + } + } +} + +// Copy directory content from one path to another. +// includeDirs means to copy also the dirs if presented in the src folder. +// excludeNames - Skip files/dirs in the src folder that match names in provided slice. ONLY excludes first layer (only in src folder). +func CopyDir(fromPath, toPath string, includeDirs bool, excludeNames []string) error { + err := CreateDirIfNotExist(toPath) + if err != nil { + return err + } + + files, err := ListFiles(fromPath, includeDirs) + if err != nil { + return err + } + + for _, file := range files { + fileName := filepath.Base(file) + // Skip if excluded + if slices.Contains(excludeNames, fileName) { + continue + } + var isDir bool + isDir, err = IsDirExists(file, false) + if err != nil { + return err + } + + if isDir { + err = CopyDir(file, filepath.Join(toPath, fileName), true, nil) + } else { + err = CopyFile(toPath, file) + } + if err != nil { + return err + } + } + return nil +} + +func CopyFile(dst, src string) (err error) { + srcFile, err := os.Open(src) + if err != nil { + return + } + defer func() { + err = errors.Join(err, srcFile.Close()) + }() + srcInfo, err := srcFile.Stat() + if err != nil { + return + } + fileName, _ := GetFileAndDirFromPath(src) + dstPath, err := CreateFilePath(dst, fileName) + if err != nil { + return + } + dstFile, err := os.OpenFile(dstPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, srcInfo.Mode()) + if err != nil { + return + } + defer func() { + err = errors.Join(err, dstFile.Close()) + }() + _, err = io.Copy(dstFile, srcFile) + return +} + +func GetFileSeparator() string { + return string(os.PathSeparator) +} + +// Return the file's name and dir of a given path by finding the index of the last separator in the path. +// Support separators : "/" , "\\" and "\\\\" +func GetFileAndDirFromPath(path string) (fileName, dir string) { + index1 := strings.LastIndex(path, "/") + index2 := strings.LastIndex(path, "\\") + var index int + offset := 0 + if index1 >= index2 { + index = index1 + } else { + index = index2 + // Check if the last separator is "\\\\" or "\\". + index3 := strings.LastIndex(path, "\\\\") + if index3 != -1 && index2-index3 == 1 { + offset = 1 + } + } + if index != -1 { + fileName = path[index+1:] + // If the last separator is "\\\\" index will contain the index of the last "\\" , + // to get the dir path (without separator suffix) we will use the offset's value. + dir = path[:index-offset] + return + } + fileName = path + dir = "" + return +} + +func CreateFilePath(localPath, fileName string) (string, error) { + if localPath != "" { + err := os.MkdirAll(localPath, 0750) + if err != nil { + return "", err + } + fileName = filepath.Join(localPath, fileName) + } + return fileName, nil +} + +func CreateDirIfNotExist(path string) error { + exist, err := IsDirExists(path, false) + if exist || err != nil { + return err + } + _, err = CreateFilePath(path, "") + return err +} + +func IsPathSymlink(path string) bool { + f, _ := os.Lstat(path) + return f != nil && IsFileSymlink(f) +} + +func IsFileSymlink(file os.FileInfo) bool { + return file.Mode()&os.ModeSymlink != 0 +} + +// Parses the JSON-encoded data and stores the result in the value pointed to by 'loadTarget'. +// filePath - Path to json file. +// loadTarget - Pointer to a struct +func Unmarshal(filePath string, loadTarget interface{}) (err error) { + var jsonFile *os.File + jsonFile, err = os.Open(filePath) + if err != nil { + return + } + defer func() { + err = errors.Join(err, jsonFile.Close()) + }() + var byteValue []byte + byteValue, err = io.ReadAll(jsonFile) + if err != nil { + return + } + err = json.Unmarshal(byteValue, &loadTarget) + return +} + +// strip '\n' or read until EOF, return error if read error +// readNLines reads up to 'total' number of lines separated by \n. +func ReadNLines(path string, total int) (lines []string, err error) { + reader, err := os.Open(path) + if err != nil { + return + } + defer func() { + err = errors.Join(err, reader.Close()) + }() + bufferedReader := bufio.NewReader(reader) + for i := 0; i < total; i++ { + var line []byte + line, _, err = bufferedReader.ReadLine() + lines = append(lines, string(line)) + if err == io.EOF { + err = nil + break + } + if err != nil { + return + } } + return } diff --git a/io/fileutils_test.go b/io/fileutils_test.go index 398aaba..3e356b7 100644 --- a/io/fileutils_test.go +++ b/io/fileutils_test.go @@ -31,3 +31,65 @@ func TestClose(t *testing.T) { Close(f, nilErr) assert.NotNil(t, nilErr) } + +func TestFindFileInDirAndParents(t *testing.T) { + const goModFileName = "go.mod" + wd, err := os.Getwd() + assert.NoError(t, err) + projectRoot := filepath.Join(wd, "testdata", "project") + + // Find the file in the current directory + root, err := FindFileInDirAndParents(projectRoot, goModFileName) + assert.NoError(t, err) + assert.Equal(t, projectRoot, root) + + // Find the file in the current directory's parent + projectSubDirectory := filepath.Join(projectRoot, "dir") + root, err = FindFileInDirAndParents(projectSubDirectory, goModFileName) + assert.NoError(t, err) + assert.Equal(t, projectRoot, root) + + // Look for a file that doesn't exist + _, err = FindFileInDirAndParents(projectRoot, "notexist") + assert.Error(t, err) +} + +func TestReadNLines(t *testing.T) { + wd, err := os.Getwd() + assert.NoError(t, err) + path := filepath.Join(wd, "testdata", "oneline") + lines, err := ReadNLines(path, 2) + assert.NoError(t, err) + assert.Len(t, lines, 1) + assert.True(t, strings.HasPrefix(lines[0], "")) + + path = filepath.Join(wd, "testdata", "twolines") + lines, err = ReadNLines(path, 2) + assert.NoError(t, err) + assert.Len(t, lines, 2) + assert.True(t, strings.HasPrefix(lines[1], "781")) + assert.True(t, strings.HasSuffix(lines[1], ":true}}}")) + + path = filepath.Join(wd, "testdata", "threelines") + lines, err = ReadNLines(path, 2) + assert.NoError(t, err) + assert.Len(t, lines, 2) + assert.True(t, strings.HasPrefix(lines[1], "781")) + assert.True(t, strings.HasSuffix(lines[1], ":true}}}")) +} + +func TestCreateTempDir(t *testing.T) { + tempDir, err := CreateTempDir() + assert.NoError(t, err) + + assert.DirExists(t, tempDir) + + defer func() { + // Check that a timestamp can be extracted from the temp dir name + timestamp, err := extractTimestamp(tempDir) + assert.NoError(t, err) + assert.False(t, timestamp.IsZero()) + + assert.NoError(t, os.RemoveAll(tempDir)) + }() +} diff --git a/io/multiwriter.go b/io/multiwriter.go index 06597f2..d252dd8 100644 --- a/io/multiwriter.go +++ b/io/multiwriter.go @@ -7,7 +7,7 @@ import ( "golang.org/x/sync/errgroup" ) -var ErrShortWrite = errors.New("The number of bytes written is less than the length of the input") +var ErrShortWrite = errors.New("the number of bytes written is less than the length of the input") type asyncMultiWriter struct { writers []io.Writer diff --git a/io/testdata/oneline b/io/testdata/oneline new file mode 100644 index 0000000..e69de29 diff --git a/io/testdata/project/dir/f b/io/testdata/project/dir/f new file mode 100644 index 0000000..e69de29 diff --git a/io/testdata/project/go.mod b/io/testdata/project/go.mod new file mode 100644 index 0000000..e69de29 diff --git a/io/testdata/threelines b/io/testdata/threelines new file mode 100644 index 0000000..fe333bd --- /dev/null +++ b/io/testdata/threelines @@ -0,0 +1,2 @@ + +781d76ae5f48ddd1674161acd90024758fd6e14c {"key":"make-fetch-happen:request-cache:https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz","integrity":"sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==","time":1644222657888,"size":2821,"metadata":{"time":1644222657863,"url":"https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz","reqHeaders":{},"resHeaders":{"cache-control":"public, immutable, max-age=31557600","content-type":"application/octet-stream","date":"Mon, 07 Feb 2022 08:30:58 GMT","etag":"\"69fd1c7bc68c850139d20aefed955a71\"","last-modified":"Fri, 04 Oct 2019 11:29:17 GMT","vary":"Accept-Encoding"},"options":{"compress":true}}} diff --git a/io/testdata/twolines b/io/testdata/twolines new file mode 100644 index 0000000..e970ad9 --- /dev/null +++ b/io/testdata/twolines @@ -0,0 +1,2 @@ + +781d76ae5f48ddd1674161acd90024758fd6e14c {"key":"make-fetch-happen:request-cache:https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz","integrity":"sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==","time":1644222657888,"size":2821,"metadata":{"time":1644222657863,"url":"https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz","reqHeaders":{},"resHeaders":{"cache-control":"public, immutable, max-age=31557600","content-type":"application/octet-stream","date":"Mon, 07 Feb 2022 08:30:58 GMT","etag":"\"69fd1c7bc68c850139d20aefed955a71\"","last-modified":"Fri, 04 Oct 2019 11:29:17 GMT","vary":"Accept-Encoding"},"options":{"compress":true}}} \ No newline at end of file