diff --git a/CHANGELOG.md b/CHANGELOG.md index d750f5975..c9cf901a1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,8 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Added - `tt pack `: added TCM file packaging. +- `tt pack `: support `.packignore` file to specify files that should not be included + in package (works the same as `.gitignore`). ### Changed diff --git a/cli/pack/common.go b/cli/pack/common.go index 25c778c8a..c34094993 100644 --- a/cli/pack/common.go +++ b/cli/pack/common.go @@ -1,6 +1,7 @@ package pack import ( + "errors" "fmt" "io/fs" "os" @@ -33,6 +34,8 @@ const ( versionLuaFileName = "VERSION.lua" rocksManifestPath = ".rocks/share/tarantool/rocks/manifest" + + ignoreFile = ".packignore" ) var ( @@ -51,6 +54,8 @@ var ( } ) +type skipFilter func(srcInfo os.FileInfo, src string) bool + type RocksVersions map[string][]string // packFileInfo contains information to set for files/dirs in rpm/deb packages. @@ -76,9 +81,8 @@ func skipDefaults(srcInfo os.FileInfo, src string) bool { } // appArtifactsFilters returns a slice of skip functions to avoid copying application artifacts. -func appArtifactsFilters(cliOpts *config.CliOpts, srcAppPath string) []func( - srcInfo os.FileInfo, src string) bool { - filters := make([]func(srcInfo os.FileInfo, src string) bool, 0) +func appArtifactsFilters(cliOpts *config.CliOpts, srcAppPath string) []skipFilter { + filters := make([]skipFilter, 0) if cliOpts.App == nil { return filters } @@ -102,9 +106,8 @@ func appArtifactsFilters(cliOpts *config.CliOpts, srcAppPath string) []func( } // ttEnvironmentFilters prepares a slice of filters for tt environment directories/files. -func ttEnvironmentFilters(packCtx *PackCtx, cliOpts *config.CliOpts) []func( - srcInfo os.FileInfo, src string) bool { - filters := make([]func(srcInfo os.FileInfo, src string) bool, 0) +func ttEnvironmentFilters(packCtx *PackCtx, cliOpts *config.CliOpts) []skipFilter { + filters := make([]skipFilter, 0) if cliOpts == nil { return filters } @@ -139,10 +142,9 @@ func ttEnvironmentFilters(packCtx *PackCtx, cliOpts *config.CliOpts) []func( } // previousPackageFilters returns filters for the previously built packages. -func previousPackageFilters(packCtx *PackCtx) []func( - srcInfo os.FileInfo, src string) bool { +func previousPackageFilters(packCtx *PackCtx) []skipFilter { pkgName := packCtx.Name - return []func(srcInfo os.FileInfo, src string) bool{ + return []skipFilter{ func(srcInfo os.FileInfo, src string) bool { name := srcInfo.Name() if strings.HasPrefix(name, pkgName) { @@ -159,13 +161,18 @@ func previousPackageFilters(packCtx *PackCtx) []func( // appSrcCopySkip returns a filter func to filter out artifacts paths. func appSrcCopySkip(packCtx *PackCtx, cliOpts *config.CliOpts, - srcAppPath string) func(srcinfo os.FileInfo, src, dest string) (bool, error) { + srcAppPath string) (func(srcinfo os.FileInfo, src, dest string) (bool, error), error) { appCopyFilters := appArtifactsFilters(cliOpts, srcAppPath) appCopyFilters = append(appCopyFilters, ttEnvironmentFilters(packCtx, cliOpts)...) appCopyFilters = append(appCopyFilters, previousPackageFilters(packCtx)...) appCopyFilters = append(appCopyFilters, func(srcInfo os.FileInfo, src string) bool { return skipDefaults(srcInfo, src) }) + if f, err := ignoreFilter(util.GetOsFS(), filepath.Join(srcAppPath, ignoreFile)); err == nil { + appCopyFilters = append(appCopyFilters, f) + } else if !errors.Is(err, fs.ErrNotExist) { + return nil, fmt.Errorf("failed to load %q: %w", ignoreFile, err) + } return func(srcinfo os.FileInfo, src, dest string) (bool, error) { for _, shouldSkip := range appCopyFilters { @@ -174,7 +181,7 @@ func appSrcCopySkip(packCtx *PackCtx, cliOpts *config.CliOpts, } } return false, nil - } + }, nil } // getAppNamesToPack generates application names list to pack. @@ -430,7 +437,10 @@ func copyAppSrc(packCtx *PackCtx, cliOpts *config.CliOpts, srcAppPath, dstAppPat return err } - skipFunc := appSrcCopySkip(packCtx, cliOpts, resolvedAppPath) + skipFunc, err := appSrcCopySkip(packCtx, cliOpts, resolvedAppPath) + if err != nil { + return err + } // Copying application. log.Debugf("Copying application source %q -> %q", resolvedAppPath, dstAppPath) diff --git a/cli/pack/ignore.go b/cli/pack/ignore.go new file mode 100644 index 000000000..5f9fd5c50 --- /dev/null +++ b/cli/pack/ignore.go @@ -0,0 +1,111 @@ +package pack + +import ( + "bufio" + "bytes" + "fmt" + "io/fs" + "os" + "path/filepath" + "regexp" + "slices" + "strings" +) + +type ignorePattern struct { + re *regexp.Regexp + dirOnly bool + isNegate bool +} + +func createIgnorePattern(pattern string, basepath string) (ignorePattern, error) { + var p ignorePattern + var err error + + pattern, p.dirOnly = strings.CutSuffix(pattern, "/") + pattern, p.isNegate = strings.CutPrefix(pattern, "!") + + if !p.isNegate && (strings.HasPrefix(pattern, "\\!") || strings.HasPrefix(pattern, "\\#")) { + pattern = pattern[1:] + } + + expr := pattern + expr, found := strings.CutSuffix(expr, "/**") + if found { + expr = expr + "/([^/]+/)*[^/]*" + } + expr = strings.ReplaceAll(expr, "**/", "([^/]+/)*") + expr = strings.ReplaceAll(expr, "*", "[^/]*") + expr = strings.ReplaceAll(expr, "?", "[^/]") + + basepath, _ = strings.CutSuffix(basepath, "/") + if basepath == "." { + basepath = "" + } + + if strings.Contains(pattern, "/") { + expr = basepath + expr + } else { + expr = basepath + "/?([^/]+/)*" + expr + } + + p.re, err = regexp.Compile("^" + expr + "$") + if err != nil { + return ignorePattern{}, fmt.Errorf("failed to compile expression: %w", err) + } + + return p, nil +} + +// loadIgnorePatterns returns filter that excludes files based on the patterns. +func loadIgnorePatterns(fsys fs.FS, ignoreFile string) ([]ignorePattern, error) { + contents, err := fs.ReadFile(fsys, ignoreFile) + if err != nil { + return nil, err + } + + basepath := filepath.Dir(ignoreFile) + + var patterns []ignorePattern + s := bufio.NewScanner(bytes.NewReader(contents)) + for s.Scan() { + pattern := strings.TrimSpace(s.Text()) + if pattern == "" || strings.HasPrefix(pattern, "#") { + continue + } + + p, err := createIgnorePattern(pattern, basepath) + if err != nil { + return nil, err + } + + patterns = append(patterns, p) + } + return patterns, nil +} + +// ignoreFilter returns filter function that implements .gitignore approach of filtering files. +func ignoreFilter(fsys fs.FS, patternsFile string) (skipFilter, error) { + patterns, err := loadIgnorePatterns(fsys, patternsFile) + if err != nil { + return nil, err + } + + // According to .gitignore documentation "the last matching pattern decides the outcome" + // so we need to iterate in reverse order until the first match. + slices.Reverse(patterns) + + return func(srcInfo os.FileInfo, src string) bool { + // Skip ignore file itself. + if src == patternsFile { + return true + } + for _, p := range patterns { + isApplicable := srcInfo.IsDir() || !p.dirOnly + if isApplicable && p.re.MatchString(src) { + return !p.isNegate + } + } + return false + }, nil +} diff --git a/cli/pack/ignore_test.go b/cli/pack/ignore_test.go new file mode 100644 index 000000000..67915f8a2 --- /dev/null +++ b/cli/pack/ignore_test.go @@ -0,0 +1,570 @@ +package pack + +import ( + "errors" + "io/fs" + "os" + "path" + "path/filepath" + "strings" + "testing" + "testing/fstest" + + "github.com/apex/log" + "github.com/otiai10/copy" + "github.com/stretchr/testify/assert" +) + +func Test_ignoreFilter(t *testing.T) { + type testFile struct { + path string + expected bool + } + + type testCase struct { + name string + patterns []string + files []testFile + } + + createFS := func(tc testCase) fs.FS { + fsys := fstest.MapFS{} + if tc.patterns != nil { + fsys[ignoreFile] = &fstest.MapFile{ + Data: []byte(strings.Join(tc.patterns, "\n")), + Mode: fs.FileMode(0644), + } + } + for _, file := range tc.files { + fsys[file.path] = &fstest.MapFile{ + Mode: fs.FileMode(0644), + } + } + return fsys + } + + runTest := func(t *testing.T, dst string, tc testCase) { + fsys := createFS(tc) + + filter, err := ignoreFilter(fsys, ignoreFile) + assert.Nil(t, err) + assert.NotNil(t, filter) + + dst = filepath.Join(dst, tc.name) + log.Infof("do_test: dst=%s", dst) + err = os.MkdirAll(dst, 0755) + if err != nil { + assert.Nil(t, err) + } + + err = copy.Copy(".", dst, copy.Options{ + FS: fsys, + Skip: func(srcinfo os.FileInfo, src, dest string) (bool, error) { + return filter(srcinfo, src), nil + }, + PermissionControl: copy.AddPermission(0755), + }) + assert.Nil(t, err) + for _, f := range tc.files { + if f.expected { + assert.NoFileExists(t, path.Join(dst, f.path)) + } else { + assert.FileExists(t, path.Join(dst, f.path)) + } + } + } + + runTestSet := func(t *testing.T, dst string, name string, testCases []testCase) { + for _, tc := range testCases { + t.Run(name+"_"+tc.name, func(t *testing.T) { + runTest(t, filepath.Join(dst, name), tc) + }) + } + } + + dst := t.TempDir() + + // No ignore file. + t.Run("no ignore file", func(t *testing.T) { + f, err := ignoreFilter(fstest.MapFS{}, ignoreFile) + assert.NotNil(t, err) + assert.True(t, errors.Is(err, fs.ErrNotExist)) + assert.Nil(t, f) + }) + + // Single ignore pattern. + testCases_single := []testCase{ + { + name: "simple", + patterns: []string{ + "foo", + }, + files: []testFile{ + {"foo", true}, + {"foo_blabla", false}, + {"blabla_foo", false}, + {"bla_foo_bla", false}, + {"bar", false}, + {"dir/foo", true}, + {"dir/foo_blabla", false}, + {"dir/blabla_foo", false}, + {"dir/bla_foo_bla", false}, + {"dir/bar", false}, + {"dir2/foo/bar", true}, + {"dir2/foo/blabla", true}, + }, + }, + { + name: "question_prefix", + patterns: []string{ + "?foo", + }, + files: []testFile{ + {"foo", false}, + {".foo", true}, + {"2foo", true}, + {"foo_blabla", false}, + {"blabla_foo", false}, + {"bla_foo_bla", false}, + {"bar", false}, + {"dir/foo", false}, + {"dir/.foo", true}, + {"dir/2foo", true}, + {"dir/foo_blabla", false}, + {"dir/blabla_foo", false}, + {"dir/bla_foo_bla", false}, + {"dir/bar", false}, + {"dir2/2foo/bar", true}, + {"dir2/2foo/blabla", true}, + }, + }, + { + name: "question_suffix", + patterns: []string{ + "foo?", + }, + files: []testFile{ + {"foo", false}, + {"foo.", true}, + {"foo2", true}, + {"foo_blabla", false}, + {"blabla_foo", false}, + {"bla_foo_bla", false}, + {"bar", false}, + {"dir/foo", false}, + {"dir/foo.", true}, + {"dir/foo2", true}, + {"dir/foo_blabla", false}, + {"dir/blabla_foo", false}, + {"dir/bla_foo_bla", false}, + {"dir/bar", false}, + {"dir2/foo2/bar", true}, + {"dir2/foo2/blabla", true}, + }, + }, + { + name: "question", + patterns: []string{ + "f?o", + }, + files: []testFile{ + {"foo", true}, + {"f2o", true}, + {"fooo", false}, + {"2foo", false}, + {"foo_blabla", false}, + {"blabla_foo", false}, + {"bla_foo_bla", false}, + {"bar", false}, + {"dir/foo", true}, + {"dir/f2o", true}, + {"dir/fooo", false}, + {"dir/2foo", false}, + {"dir/foo_blabla", false}, + {"dir/blabla_foo", false}, + {"dir/bla_foo_bla", false}, + {"dir/bar", false}, + {"dir2/f2o/bar", true}, + {"dir2/f2o/blabla", true}, + }, + }, + { + name: "asterisk_prefix", + patterns: []string{ + "*foo", + }, + files: []testFile{ + {"foo", true}, + {".foo", true}, + {"foo_blabla", false}, + {"blabla_foo", true}, + {"bla_foo_bla", false}, + {"bar", false}, + {"dir/foo", true}, + {"dir/.foo", true}, + {"dir/foo_blabla", false}, + {"dir/blabla_foo", true}, + {"dir/bla_foo_bla", false}, + {"dir/bar", false}, + {"dir2/blabla_foo/bar", true}, + {"dir2/blabla_foo/blabla", true}, + }, + }, + { + name: "asterisk_suffix", + patterns: []string{ + "foo*", + }, + files: []testFile{ + {"foo", true}, + {"foo.", true}, + {"foo_blabla", true}, + {"blabla_foo", false}, + {"bla_foo_bla", false}, + {"bar", false}, + {"dir/foo", true}, + {"dir/foo.", true}, + {"dir/foo_blabla", true}, + {"dir/blabla_foo", false}, + {"dir/bla_foo_bla", false}, + {"dir/bar", false}, + {"dir2/foo_blabla/bar", true}, + {"dir2/foo_blabla/blabla", true}, + }, + }, + { + name: "asterisk", + patterns: []string{ + "f*o", + }, + files: []testFile{ + {"fo", true}, + {"foo", true}, + {"foo_blabla_foo", true}, + {"foo_blabla", false}, + {"blabla_foo", false}, + {"bla_foo_bla", false}, + {"bar", false}, + {"dir/fo", true}, + {"dir/foo", true}, + {"dir/foo_blabla_foo", true}, + {"dir/foo_blabla", false}, + {"dir/blabla_foo", false}, + {"dir/bla_foo_bla", false}, + {"dir/bar", false}, + {"dir2/foo_blabla_foo/bar", true}, + {"dir2/foo_blabla_foo/blabla", true}, + }, + }, + { + name: "range", + patterns: []string{ + "f[m-p]o", + }, + files: []testFile{ + {"foo", true}, + {"fmo", true}, + {"f2o", false}, + {"fo", false}, + {"foo_blabla", false}, + {"blabla_foo", false}, + {"bla_foo_bla", false}, + {"bar", false}, + {"dir/foo", true}, + {"dir/fmo", true}, + {"dir/f2o", false}, + {"dir/fo", false}, + {"dir/foo_blabla", false}, + {"dir/blabla_foo", false}, + {"dir/bla_foo_bla", false}, + {"dir/bar", false}, + {"dir2/foo/bar", true}, + {"dir2/foo/blabla", true}, + }, + }, + { + name: "range_exclude", + patterns: []string{ + "[^a-eg-z]oo", + }, + files: []testFile{ + {"foo", true}, + {"goo", false}, + {"foo_blabla", false}, + {"blabla_foo", false}, + {"bla_foo_bla", false}, + {"bar", false}, + {"dir/foo", true}, + {"dir/goo", false}, + {"dir/foo_blabla", false}, + {"dir/blabla_foo", false}, + {"dir/bla_foo_bla", false}, + {"dir/bar", false}, + {"dir2/foo/bar", true}, + {"dir2/foo/blabla", true}, + }, + }, + { + name: "mix", + patterns: []string{ + "?o[2ox]*", + }, + files: []testFile{ + {"foo", true}, + {"foo_blabla", true}, + {"blabla_foo", false}, + {"bla_foo_bla", false}, + {"bar", false}, + {"dir/foo", true}, + {"dir/foo_blabla", true}, + {"dir/blabla_foo", false}, + {"dir/bla_foo_bla", false}, + {"dir/bar", false}, + {"dir2/go2_blabla/bar", true}, + {"dir2/go2_blabla/blabla", true}, + }, + }, + { + name: "dir_simple", + patterns: []string{ + "dir/", + }, + files: []testFile{ + {"foo", false}, + {"dir/foo", true}, + {"2dir/foo", false}, + {"dir2/foo", false}, + }, + }, + { + name: "dir_question_prefix", + patterns: []string{ + "?dir/", + }, + files: []testFile{ + {"foo", false}, + {"dir/foo", false}, + {"2dir/foo", true}, + {"22dir/foo", false}, + {"dir2/foo", false}, + }, + }, + { + name: "dir_question_suffix", + patterns: []string{ + "dir?/", + }, + files: []testFile{ + {"foo", false}, + {"dir/foo", false}, + {"2dir/foo", false}, + {"dir2/foo", true}, + {"dir22/foo", false}, + }, + }, + { + name: "dir_question", + patterns: []string{ + "d?r/", + }, + files: []testFile{ + {"foo", false}, + {"dir/foo", true}, + {"d2r/foo", true}, + {"d22r/foo", false}, + {"2dir/foo", false}, + {"dir2/foo", false}, + }, + }, + { + name: "dir_asterisk_prefix", + patterns: []string{ + "*dir/", + }, + files: []testFile{ + {"foo", false}, + {"dir/foo", true}, + {"2dir/foo", true}, + {"22dir/foo", true}, + {"dir2/foo", false}, + }, + }, + { + name: "dir_asterisk_suffix", + patterns: []string{ + "dir*/", + }, + files: []testFile{ + {"foo", false}, + {"dir/foo", true}, + {"2dir/foo", false}, + {"dir2/foo", true}, + {"dir22/foo", true}, + }, + }, + { + name: "dir_asterisk", + patterns: []string{ + "d*r/", + }, + files: []testFile{ + {"foo", false}, + {"dir/foo", true}, + {"d2r/foo", true}, + {"d22r/foo", true}, + {"2dir/foo", false}, + {"dir2/foo", false}, + }, + }, + } + runTestSet(t, dst, "singlepattern", testCases_single) + + // An ignore pattern followed with the same but negated (started with '!'). + var testCases_selfNegate []testCase + for _, tc := range testCases_single { + var files []testFile + for _, f := range tc.files { + files = append(files, testFile{f.path, false}) + } + testCases_selfNegate = append(testCases_selfNegate, testCase{ + name: tc.name, + patterns: []string{tc.patterns[0], "!" + tc.patterns[0]}, + files: files, + }) + } + runTestSet(t, dst, "singlepattern_and_selfnegate", testCases_selfNegate) + + // An ignore pattern preceded with the same but negated (thus negated one has no effect). + var testCases_selfNegateWrongOrder []testCase + for i, tc := range testCases_selfNegate { + testCases_selfNegateWrongOrder = append(testCases_selfNegateWrongOrder, testCase{ + name: tc.name, + patterns: []string{tc.patterns[1], tc.patterns[0]}, + files: testCases_single[i].files, + }) + } + runTestSet(t, dst, "singlepattern_and_selfnegate_wrong_order", testCases_selfNegateWrongOrder) + + // Single ignore pattern that is negate. + var testCases_negateOnly []testCase + for _, tc := range testCases_selfNegate { + testCases_negateOnly = append(testCases_negateOnly, testCase{ + name: tc.name, + patterns: []string{tc.patterns[1]}, + files: tc.files, + }) + } + runTestSet(t, dst, "singlepattern_negate", testCases_negateOnly) + + // Multiple patterns. + testCases_multi := []testCase{ + { + name: "simple", + patterns: []string{ + "foo", + "dir", + }, + files: []testFile{ + {"foo", true}, + {"foo_blabla", false}, + {"blabla_foo", false}, + {"bla_foo_bla", false}, + {"bar", false}, + {"dir/foo", true}, + {"dir/foo_blabla", false}, + {"dir/blabla_foo", false}, + {"dir/bla_foo_bla", false}, + {"dir/bar", false}, + }, + }, + { + name: "question", + patterns: []string{ + "?foo", + "?bla?", + }, + files: []testFile{ + {"foo", false}, + {".foo", true}, + {"2foo", true}, + {"foo_blabla", false}, + {"blabla_foo", false}, + {"bla_foo_bla", false}, + {"bar", true}, + {"dir/foo", false}, + {"dir/.foo", true}, + {"dir/2foo", true}, + {"dir/foo_blabla", false}, + {"dir/blabla_foo", false}, + {"dir/bla_foo_bla", false}, + {"dir/bar", true}, + }, + }, + { + name: "asterisk", + patterns: []string{ + "foo*", + "*bla*", + }, + files: []testFile{ + {"foo", false}, + {"foo.", true}, + {"foo2", true}, + {"foo_blabla", false}, + {"blabla_foo", false}, + {"bla_foo_bla", false}, + {"bar", true}, + {"dir/foo", false}, + {"dir/foo.", true}, + {"dir/foo2", true}, + {"dir/foo_blabla", false}, + {"dir/blabla_foo", false}, + {"dir/bla_foo_bla", false}, + {"dir/bar", true}, + }, + }, + { + name: "range", + patterns: []string{ + "bar", + "*foo", + }, + files: []testFile{ + {"foo", true}, + {".foo", true}, + {"foo_blabla", false}, + {"blabla_foo", true}, + {"bla_foo_bla", false}, + {"bar", true}, + {"dir/foo", true}, + {"dir/.foo", true}, + {"dir/foo_blabla", false}, + {"dir/blabla_foo", true}, + {"dir/bla_foo_bla", false}, + {"dir/bar", true}, + }, + }, + { + name: "asterisk_suffix", + patterns: []string{ + "bar", + "foo*", + }, + files: []testFile{ + {"foo", true}, + {"foo.", true}, + {"foo_blabla", true}, + {"blabla_foo", false}, + {"bla_foo_bla", false}, + {"bar", true}, + {"dir/foo", true}, + {"dir/foo.", true}, + {"dir/foo_blabla", true}, + {"dir/blabla_foo", false}, + {"dir/bla_foo_bla", false}, + {"dir/bar", true}, + }, + }, + } + runTestSet(t, dst, "multipatterns", testCases_multi) +} diff --git a/cli/util/osfs.go b/cli/util/osfs.go new file mode 100644 index 000000000..8bb13bb14 --- /dev/null +++ b/cli/util/osfs.go @@ -0,0 +1,22 @@ +package util + +import ( + "io/fs" + "os" +) + +type osFS struct{} + +var osfs osFS + +func GetOsFS() fs.FS { + return osfs +} + +func (fs osFS) Open(name string) (fs.File, error) { + return os.Open(name) +} + +func (fs osFS) ReadFile(name string) ([]byte, error) { + return os.ReadFile(name) +}