diff --git a/code/go/internal/validator/semantic/validate_hbs_templates.go b/code/go/internal/validator/semantic/validate_hbs_templates.go
new file mode 100644
index 000000000..56bec0260
--- /dev/null
+++ b/code/go/internal/validator/semantic/validate_hbs_templates.go
@@ -0,0 +1,114 @@
+// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+// or more contributor license agreements. Licensed under the Elastic License;
+// you may not use this file except in compliance with the Elastic License.
+
+package semantic
+
+import (
+ "errors"
+ "io/fs"
+ "os"
+ "path"
+
+ "github.com/mailgun/raymond/v2"
+
+ "github.com/elastic/package-spec/v3/code/go/internal/fspath"
+ "github.com/elastic/package-spec/v3/code/go/internal/linkedfiles"
+ "github.com/elastic/package-spec/v3/code/go/pkg/specerrors"
+)
+
+var (
+ errInvalidHandlebarsTemplate = errors.New("invalid handlebars template")
+)
+
+// ValidateHandlebarsFiles validates all Handlebars (.hbs) files in the package filesystem.
+// It returns a list of validation errors if any Handlebars files are invalid.
+// hbs are located in both the package root and data stream directories under the agent folder.
+func ValidateHandlebarsFiles(fsys fspath.FS) specerrors.ValidationErrors {
+ var errs specerrors.ValidationErrors
+
+ // template files are placed at /agent/input directory or
+ // at the datastream /agent/stream directory
+ inputDir := path.Join("agent", "input")
+ if inputErrs := validateTemplateDir(fsys, inputDir); inputErrs != nil {
+ errs = append(errs, inputErrs...)
+ }
+
+ datastreamEntries, err := fs.ReadDir(fsys, "data_stream")
+ if err != nil && !errors.Is(err, fs.ErrNotExist) {
+ return specerrors.ValidationErrors{
+ specerrors.NewStructuredErrorf("error reading data_stream directory: %w", err),
+ }
+ }
+ for _, dsEntry := range datastreamEntries {
+ if !dsEntry.IsDir() {
+ continue
+ }
+ streamDir := path.Join("data_stream", dsEntry.Name(), "agent", "stream")
+ dsErrs := validateTemplateDir(fsys, streamDir)
+ if dsErrs != nil {
+ errs = append(errs, dsErrs...)
+ }
+ }
+
+ return errs
+}
+
+// validateTemplateDir validates all Handlebars files in the given directory.
+func validateTemplateDir(fsys fspath.FS, dir string) specerrors.ValidationErrors {
+ entries, err := fs.ReadDir(fsys, dir)
+ if err != nil && !errors.Is(err, fs.ErrNotExist) {
+ return specerrors.ValidationErrors{
+ specerrors.NewStructuredErrorf("error trying to read :%s", dir),
+ }
+ }
+ var errs specerrors.ValidationErrors
+ for _, entry := range entries {
+ if path.Ext(entry.Name()) == ".hbs" {
+ err := validateHandlebarsEntry(fsys, dir, entry.Name())
+ if err != nil {
+ errs = append(errs, specerrors.NewStructuredErrorf("%w: error validating %s: %w", errInvalidHandlebarsTemplate, path.Join(dir, entry.Name()), err))
+ }
+ continue
+ }
+ if path.Ext(entry.Name()) == ".link" {
+ linkFilePath := path.Join(dir, entry.Name())
+ linkFile, err := linkedfiles.NewLinkedFile(fsys.Path(linkFilePath))
+ if err != nil {
+ errs = append(errs, specerrors.NewStructuredErrorf("error reading linked file %s: %w", linkFilePath, err))
+ continue
+ }
+ err = validateHandlebarsEntry(fsys, dir, linkFile.IncludedFilePath)
+ if err != nil {
+ errs = append(errs, specerrors.NewStructuredErrorf("%w: error validating %s: %w", errInvalidHandlebarsTemplate, path.Join(dir, linkFile.IncludedFilePath), err))
+ }
+ }
+ }
+ return errs
+}
+
+// validateHandlebarsEntry validates a single Handlebars file located at filePath.
+// it parses the file using the raymond library to check for syntax errors.
+func validateHandlebarsEntry(fsys fspath.FS, dir, entryName string) error {
+ if entryName == "" {
+ return nil
+ }
+
+ var content []byte
+ var err error
+
+ // First try to read from filesystem (works for regular files and files within zip)
+ filePath := path.Join(dir, entryName)
+ if content, err = fs.ReadFile(fsys, filePath); err != nil {
+ // If fs.ReadFile fails (likely due to linked file path outside filesystem boundary),
+ // fall back to absolute path approach like linkedfiles.FS does
+ absolutePath := fsys.Path(filePath)
+ if content, err = os.ReadFile(absolutePath); err != nil {
+ return err
+ }
+ }
+
+ // Parse from content string instead of file path
+ _, err = raymond.Parse(string(content))
+ return err
+}
diff --git a/code/go/internal/validator/semantic/validate_hbs_templates_test.go b/code/go/internal/validator/semantic/validate_hbs_templates_test.go
new file mode 100644
index 000000000..89541a45b
--- /dev/null
+++ b/code/go/internal/validator/semantic/validate_hbs_templates_test.go
@@ -0,0 +1,125 @@
+// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+// or more contributor license agreements. Licensed under the Elastic License;
+// you may not use this file except in compliance with the Elastic License.
+
+package semantic
+
+import (
+ "os"
+ "path"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+
+ "github.com/elastic/package-spec/v3/code/go/internal/fspath"
+)
+
+func TestValidateTemplateDir(t *testing.T) {
+ t.Run("empty directory", func(t *testing.T) {
+ tmpDir := t.TempDir()
+ pkgDir := path.Join(tmpDir, "package")
+ err := os.MkdirAll(pkgDir, 0o755)
+ require.NoError(t, err)
+
+ templateDir := path.Join(pkgDir, "agent", "input")
+ err = os.MkdirAll(templateDir, 0o755)
+ require.NoError(t, err)
+
+ fsys := fspath.DirFS(pkgDir)
+ errs := validateTemplateDir(fsys, path.Join("agent", "input"))
+ require.Empty(t, errs)
+
+ })
+ t.Run("valid handlebars file", func(t *testing.T) {
+ tmpDir := t.TempDir()
+ pkgDir := path.Join(tmpDir, "package")
+ err := os.MkdirAll(pkgDir, 0o755)
+ require.NoError(t, err)
+
+ templateDir := path.Join(pkgDir, "agent", "input")
+ err = os.MkdirAll(templateDir, 0o755)
+ require.NoError(t, err)
+ hbsFilePath := path.Join(templateDir, "template.hbs")
+ hbsContent := `{{#if condition}}Valid Handlebars{{/if}}`
+ err = os.WriteFile(hbsFilePath, []byte(hbsContent), 0o644)
+ require.NoError(t, err)
+
+ fsys := fspath.DirFS(pkgDir)
+ errs := validateTemplateDir(fsys, path.Join("agent", "input"))
+ require.Empty(t, errs)
+ })
+ t.Run("invalid handlebars file", func(t *testing.T) {
+ tmpDir := t.TempDir()
+ pkgDir := path.Join(tmpDir, "package")
+ err := os.MkdirAll(pkgDir, 0o755)
+ require.NoError(t, err)
+
+ templateDir := path.Join(pkgDir, "agent", "input")
+ err = os.MkdirAll(templateDir, 0o755)
+ require.NoError(t, err)
+ hbsFilePath := path.Join(templateDir, "template.hbs")
+ hbsContent := `{{#if condition}}Valid Handlebars`
+ err = os.WriteFile(hbsFilePath, []byte(hbsContent), 0o644)
+ require.NoError(t, err)
+
+ fsys := fspath.DirFS(pkgDir)
+ errs := validateTemplateDir(fsys, path.Join("agent", "input"))
+ require.NotEmpty(t, errs)
+ assert.Len(t, errs, 1)
+ })
+ t.Run("valid linked handlebars file", func(t *testing.T) {
+ tmpDir := t.TempDir()
+ pkgDir := path.Join(tmpDir, "package")
+ err := os.MkdirAll(pkgDir, 0o755)
+ require.NoError(t, err)
+
+ pkgDirLinked := path.Join(tmpDir, "linked")
+ err = os.MkdirAll(pkgDirLinked, 0o755)
+ require.NoError(t, err)
+ linkedHbsFilePath := path.Join(pkgDirLinked, "linked_template.hbs")
+ linkedHbsContent := `{{#if condition}}Valid Linked Handlebars{{/if}}`
+ err = os.WriteFile(linkedHbsFilePath, []byte(linkedHbsContent), 0o644)
+ require.NoError(t, err)
+
+ templateDir := path.Join(pkgDir, "agent", "input")
+ err = os.MkdirAll(templateDir, 0o755)
+ require.NoError(t, err)
+ hbsFilePath := path.Join(templateDir, "template.hbs.link")
+ hbsContent := `../../../linked/linked_template.hbs`
+ err = os.WriteFile(hbsFilePath, []byte(hbsContent), 0o644)
+ require.NoError(t, err)
+
+ fsys := fspath.DirFS(pkgDir)
+ errs := validateTemplateDir(fsys, path.Join("agent", "input"))
+ require.Empty(t, errs)
+
+ })
+ t.Run("invalid linked handlebars file", func(t *testing.T) {
+ tmpDir := t.TempDir()
+ pkgDir := path.Join(tmpDir, "package")
+ err := os.MkdirAll(pkgDir, 0o755)
+ require.NoError(t, err)
+
+ pkgDirLinked := path.Join(tmpDir, "linked")
+ err = os.MkdirAll(pkgDirLinked, 0o755)
+ require.NoError(t, err)
+ linkedHbsFilePath := path.Join(pkgDirLinked, "linked_template.hbs")
+ linkedHbsContent := `{{#if condition}}Valid Linked Handlebars`
+ err = os.WriteFile(linkedHbsFilePath, []byte(linkedHbsContent), 0o644)
+ require.NoError(t, err)
+
+ templateDir := path.Join(pkgDir, "agent", "input")
+ err = os.MkdirAll(templateDir, 0o755)
+ require.NoError(t, err)
+ hbsFilePath := path.Join(templateDir, "template.hbs.link")
+ hbsContent := `../../../linked/linked_template.hbs`
+ err = os.WriteFile(hbsFilePath, []byte(hbsContent), 0o644)
+ require.NoError(t, err)
+
+ fsys := fspath.DirFS(pkgDir)
+ errs := validateTemplateDir(fsys, path.Join("agent", "input"))
+ require.NotEmpty(t, errs)
+ assert.Len(t, errs, 1)
+ })
+}
diff --git a/code/go/internal/validator/spec.go b/code/go/internal/validator/spec.go
index 3ac4e57fb..56187cc0b 100644
--- a/code/go/internal/validator/spec.go
+++ b/code/go/internal/validator/spec.go
@@ -224,6 +224,7 @@ func (s Spec) rules(pkgType string, rootSpec spectypes.ItemSpec) validationRules
{fn: semantic.ValidateInputPackagesPolicyTemplates, types: []string{"input"}},
{fn: semantic.ValidateMinimumAgentVersion},
{fn: semantic.ValidateIntegrationPolicyTemplates, types: []string{"integration"}},
+ {fn: semantic.ValidateHandlebarsFiles, types: []string{"integration", "input"}},
}
var validationRules validationRules
diff --git a/code/go/pkg/validator/validator_test.go b/code/go/pkg/validator/validator_test.go
index db1f41afa..ad8fffe3b 100644
--- a/code/go/pkg/validator/validator_test.go
+++ b/code/go/pkg/validator/validator_test.go
@@ -934,6 +934,29 @@ func TestLinksAreBlocked(t *testing.T) {
t.Error("links should not be allowed in package")
}
+func TestValidateHandlebarsFiles(t *testing.T) {
+ tests := map[string]string{
+ "bad_input_hbs": "invalid handlebars template: error validating agent/input/input.yml.hbs: Parse error on line 10:\nExpecting OpenEndBlock, got: 'EOF'",
+ "bad_integration_hbs": "invalid handlebars template: error validating data_stream/foo/agent/stream/filestream.yml.hbs: Parse error on line 43:\nExpecting OpenEndBlock, got: 'EOF'",
+ "bad_integration_hbs_linked": "invalid handlebars template: error validating ../bad_integration_hbs/data_stream/foo/agent/stream/filestream.yml.hbs: Parse error on line 43:\nExpecting OpenEndBlock, got: 'EOF'",
+ }
+
+ for pkgName, expectedErrorMessage := range tests {
+ t.Run(pkgName, func(t *testing.T) {
+ errs := ValidateFromPath(path.Join("..", "..", "..", "..", "test", "packages", pkgName))
+ require.Error(t, errs)
+ vErrs, ok := errs.(specerrors.ValidationErrors)
+ require.True(t, ok)
+
+ var errMessages []string
+ for _, vErr := range vErrs {
+ errMessages = append(errMessages, vErr.Error())
+ }
+ require.Contains(t, errMessages, expectedErrorMessage)
+ })
+ }
+}
+
func requireErrorMessage(t *testing.T, pkgName string, invalidItemsPerFolder map[string][]string, expectedErrorMessage string) {
pkgRootPath := filepath.Join("..", "..", "..", "..", "test", "packages", pkgName)
diff --git a/compliance/go.mod b/compliance/go.mod
index 9d5988e85..babac722b 100644
--- a/compliance/go.mod
+++ b/compliance/go.mod
@@ -110,6 +110,7 @@ require (
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect
github.com/magefile/mage v1.15.0 // indirect
+ github.com/mailgun/raymond/v2 v2.0.48 // indirect
github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
@@ -145,6 +146,7 @@ require (
github.com/shirou/gopsutil/v3 v3.24.5 // indirect
github.com/shoenig/go-m1cpu v0.1.6 // indirect
github.com/shopspring/decimal v1.4.0 // indirect
+ github.com/sirupsen/logrus v1.9.3 // indirect
github.com/spf13/afero v1.15.0 // indirect
github.com/spf13/cast v1.7.0 // indirect
github.com/spf13/cobra v1.10.1 // indirect
diff --git a/compliance/go.sum b/compliance/go.sum
index 851e6ec15..a8e7d6f02 100644
--- a/compliance/go.sum
+++ b/compliance/go.sum
@@ -248,6 +248,8 @@ github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I=
github.com/magefile/mage v1.15.0 h1:BvGheCMAsG3bWUDbZ8AyXXpCNwU9u5CB6sM+HNb9HYg=
github.com/magefile/mage v1.15.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A=
+github.com/mailgun/raymond/v2 v2.0.48 h1:5dmlB680ZkFG2RN/0lvTAghrSxIESeu9/2aeDqACtjw=
+github.com/mailgun/raymond/v2 v2.0.48/go.mod h1:lsgvL50kgt1ylcFJYZiULi5fjPBkkhNfj4KA0W54Z18=
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
@@ -337,6 +339,9 @@ github.com/shoenig/test v0.6.4/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnj
github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME=
+github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
+github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
+github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/spf13/afero v1.15.0 h1:b/YBCLWAJdFWJTN9cLhiXXcD7mzKn9Dm86dNnfyQw1I=
github.com/spf13/afero v1.15.0/go.mod h1:NC2ByUVxtQs4b3sIUphxK0NioZnmxgyCrfzeuq8lxMg=
github.com/spf13/cast v1.7.0 h1:ntdiHjuueXFgm5nzDRdOS4yfT43P5Fnud6DH50rz/7w=
@@ -354,6 +359,7 @@ github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSS
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
+github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
@@ -456,6 +462,7 @@ golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@@ -464,6 +471,7 @@ golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
diff --git a/go.mod b/go.mod
index e110d48f2..9f4dea54e 100644
--- a/go.mod
+++ b/go.mod
@@ -13,6 +13,7 @@ require (
github.com/evanphx/json-patch/v5 v5.9.11
github.com/go-viper/mapstructure/v2 v2.4.0
github.com/joeshaw/multierror v0.0.0-20140124173710-69b34d4ec901
+ github.com/mailgun/raymond/v2 v2.0.48
github.com/otiai10/copy v1.14.1
github.com/stretchr/testify v1.11.1
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415
@@ -39,6 +40,7 @@ require (
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/otiai10/mint v1.6.3 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
+ github.com/sirupsen/logrus v1.8.1 // indirect
github.com/stretchr/objx v0.5.2 // indirect
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f // indirect
golang.org/x/exp/typeparams v0.0.0-20231108232855-2478ac86f678 // indirect
diff --git a/go.sum b/go.sum
index 295e7f05d..002f3fff9 100644
--- a/go.sum
+++ b/go.sum
@@ -43,6 +43,8 @@ github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORN
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
+github.com/mailgun/raymond/v2 v2.0.48 h1:5dmlB680ZkFG2RN/0lvTAghrSxIESeu9/2aeDqACtjw=
+github.com/mailgun/raymond/v2 v2.0.48/go.mod h1:lsgvL50kgt1ylcFJYZiULi5fjPBkkhNfj4KA0W54Z18=
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
@@ -58,10 +60,14 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
+github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE=
+github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
+github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
+github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c=
@@ -86,6 +92,7 @@ golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
@@ -106,6 +113,9 @@ golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8T
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
+gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gotest.tools/gotestsum v1.13.0 h1:+Lh454O9mu9AMG1APV4o0y7oDYKyik/3kBOiCqiEpRo=
diff --git a/spec/changelog.yml b/spec/changelog.yml
index 7d751a9b3..7a1e47222 100644
--- a/spec/changelog.yml
+++ b/spec/changelog.yml
@@ -20,6 +20,9 @@
- description: Run validation semantic rules also in transform fields.
type: enhancement
link: https://github.com/elastic/package-spec/pull/1027
+ - description: Handlebars template files are now validated in input and integration packages.
+ type: enhancement
+ link: https://github.com/elastic/package-spec/pull/1030
- version: 3.5.4
changes:
- description: Fix rule matching processor for event.original handling.
diff --git a/spec/integration/agent/spec.yml b/spec/integration/agent/spec.yml
index d1236d0b6..97c7974c3 100644
--- a/spec/integration/agent/spec.yml
+++ b/spec/integration/agent/spec.yml
@@ -1,15 +1,26 @@
spec:
additionalContents: false
contents:
- - description: Folder containing input definitions
- type: folder
- name: input
- required: true
- additionalContents: false
- contents:
- - description: Config template file for inputs defined in the policy_templates section of the top level manifest
- type: file
- sizeLimit: 2MB
- pattern: '^.+\.yml\.hbs$'
+ - description: Folder containing input definitions
+ type: folder
+ name: input
required: true
- allowLink: true
+ additionalContents: false
+ contents:
+ - description: |
+ Config template file for inputs defined in the policy_templates section of the top level manifest.
+ The template should use standard Handlebars syntax (e.g., `{{vars.key}}`, `{{#if vars.condition}}`, `{{#each vars.items}}`)
+ and must compile to valid YAML.
+ Available Handlebars helpers include:
+ - `contains` (checks if item is in array/string),
+ - `escape_string` (wraps string in single quotes and escapes them),
+ - `escape_multiline_string` (escapes multiline strings without wrapping),
+ - `to_json` (converts object to JSON string), and
+ - `url_encode` (URI encodes string).
+ Check the Fleet implementation for the complete list of available helpers:
+ https://github.com/elastic/kibana/blob/70749d9216d7a5de6ce1e7a028d153d5390ad3ac/x-pack/platform/plugins/shared/fleet/server/services/epm/agent/agent.ts#L1).
+ type: file
+ sizeLimit: 2MB
+ pattern: '^.+\.yml\.hbs$'
+ required: true
+ allowLink: true
diff --git a/spec/integration/data_stream/agent/spec.yml b/spec/integration/data_stream/agent/spec.yml
index 86951478e..d9a25d205 100644
--- a/spec/integration/data_stream/agent/spec.yml
+++ b/spec/integration/data_stream/agent/spec.yml
@@ -1,15 +1,26 @@
spec:
additionalContents: false
contents:
- - description: Folder containing input definitions
- type: folder
- name: stream
- required: true
- additionalContents: false
- contents:
- - description: Config template file for inputs defined in the policy_templates section of the top level manifest
- type: file
- sizeLimit: 2MB
- pattern: '^.+\.yml\.hbs$'
+ - description: Folder containing input definitions
+ type: folder
+ name: stream
required: true
- allowLink: true
+ additionalContents: false
+ contents:
+ - description: |
+ Config template file for inputs defined in the policy_templates section of the top level manifest.
+ The template should use standard Handlebars syntax (e.g., `{{vars.key}}`, `{{#if vars.condition}}`, `{{#each vars.items}}`)
+ and must compile to valid YAML.
+ Available Handlebars helpers include:
+ - `contains` (checks if item is in array/string),
+ - `escape_string` (wraps string in single quotes and escapes them),
+ - `escape_multiline_string` (escapes multiline strings without wrapping),
+ - `to_json` (converts object to JSON string), and
+ - `url_encode` (URI encodes string).
+ Check the Fleet implementation for the complete list of available helpers:
+ https://github.com/elastic/kibana/blob/70749d9216d7a5de6ce1e7a028d153d5390ad3ac/x-pack/platform/plugins/shared/fleet/server/services/epm/agent/agent.ts#L1).
+ type: file
+ sizeLimit: 2MB
+ pattern: '^.+\.yml\.hbs$'
+ required: true
+ allowLink: true
diff --git a/test/packages/bad_input_hbs/LICENSE.txt b/test/packages/bad_input_hbs/LICENSE.txt
new file mode 100644
index 000000000..809108b85
--- /dev/null
+++ b/test/packages/bad_input_hbs/LICENSE.txt
@@ -0,0 +1,93 @@
+Elastic License 2.0
+
+URL: https://www.elastic.co/licensing/elastic-license
+
+## Acceptance
+
+By using the software, you agree to all of the terms and conditions below.
+
+## Copyright License
+
+The licensor grants you a non-exclusive, royalty-free, worldwide,
+non-sublicensable, non-transferable license to use, copy, distribute, make
+available, and prepare derivative works of the software, in each case subject to
+the limitations and conditions below.
+
+## Limitations
+
+You may not provide the software to third parties as a hosted or managed
+service, where the service provides users with access to any substantial set of
+the features or functionality of the software.
+
+You may not move, change, disable, or circumvent the license key functionality
+in the software, and you may not remove or obscure any functionality in the
+software that is protected by the license key.
+
+You may not alter, remove, or obscure any licensing, copyright, or other notices
+of the licensor in the software. Any use of the licensor’s trademarks is subject
+to applicable law.
+
+## Patents
+
+The licensor grants you a license, under any patent claims the licensor can
+license, or becomes able to license, to make, have made, use, sell, offer for
+sale, import and have imported the software, in each case subject to the
+limitations and conditions in this license. This license does not cover any
+patent claims that you cause to be infringed by modifications or additions to
+the software. If you or your company make any written claim that the software
+infringes or contributes to infringement of any patent, your patent license for
+the software granted under these terms ends immediately. If your company makes
+such a claim, your patent license ends immediately for work on behalf of your
+company.
+
+## Notices
+
+You must ensure that anyone who gets a copy of any part of the software from you
+also gets a copy of these terms.
+
+If you modify the software, you must include in any modified copies of the
+software prominent notices stating that you have modified the software.
+
+## No Other Rights
+
+These terms do not imply any licenses other than those expressly granted in
+these terms.
+
+## Termination
+
+If you use the software in violation of these terms, such use is not licensed,
+and your licenses will automatically terminate. If the licensor provides you
+with a notice of your violation, and you cease all violation of this license no
+later than 30 days after you receive that notice, your licenses will be
+reinstated retroactively. However, if you violate these terms after such
+reinstatement, any additional violation of these terms will cause your licenses
+to terminate automatically and permanently.
+
+## No Liability
+
+*As far as the law allows, the software comes as is, without any warranty or
+condition, and the licensor will not be liable to you for any damages arising
+out of these terms or the use or nature of the software, under any kind of
+legal claim.*
+
+## Definitions
+
+The **licensor** is the entity offering these terms, and the **software** is the
+software the licensor makes available under these terms, including any portion
+of it.
+
+**you** refers to the individual or entity agreeing to these terms.
+
+**your company** is any legal entity, sole proprietorship, or other kind of
+organization that you work for, plus all organizations that have control over,
+are under the control of, or are under common control with that
+organization. **control** means ownership of substantially all the assets of an
+entity, or the power to direct its management and policies by vote, contract, or
+otherwise. Control can be direct or indirect.
+
+**your licenses** are all the licenses granted to you for the software under
+these terms.
+
+**use** means anything you do with the software requiring one of your licenses.
+
+**trademark** means trademarks, service marks, and similar rights.
diff --git a/test/packages/bad_input_hbs/_dev/build/docs/README.md b/test/packages/bad_input_hbs/_dev/build/docs/README.md
new file mode 100644
index 000000000..a11147993
--- /dev/null
+++ b/test/packages/bad_input_hbs/_dev/build/docs/README.md
@@ -0,0 +1,101 @@
+{{- generatedHeader }}
+{{/*
+This template can be used as a starting point for writing documentation for your new integration. For each section, fill in the details
+described in the comments.
+
+Find more detailed documentation guidelines in https://www.elastic.co/docs/extend/integrations/documentation-guidelines
+*/}}
+# bad input hbs Integration for Elastic
+
+## Overview
+{{/* Complete this section with a short summary of what data this integration collects and what use cases it enables */}}
+The bad input hbs integration for Elastic enables collection of ...
+This integration facilitates ...
+
+### Compatibility
+{{/* Complete this section with information on what 3rd party software or hardware versions this integration is compatible with */}}
+This integration is compatible with ...
+
+### How it works
+{{/* Add a high level overview on how this integration works. For example, does it collect data from API calls or recieving data from a network or file.*/}}
+
+## What data does this integration collect?
+{{/* Complete this section with information on what types of data the integration collects, and link to reference documentation if available */}}
+The bad input hbs integration collects log messages of the following types:
+* ...
+
+### Supported use cases
+{{/* Add details on the use cases that can be enabled by using this integration. Explain why a user would want to install and use this integration. */}}
+
+## What do I need to use this integration?
+{{/* List any vendor-specific prerequisites needed before starting to install the integration. */}}
+
+## How do I deploy this integration?
+
+### Agent-based deployment
+
+Elastic Agent must be installed. For more details, check the Elastic Agent [installation instructions](docs-content://reference/fleet/install-elastic-agents.md). You can install only one Elastic Agent per host.
+
+Elastic Agent is required to stream data from the syslog or log file receiver and ship the data to Elastic, where the events will then be processed via the integration's ingest pipelines.
+
+{{/* If agentless is available for this integration, we'll want to include that here as well.
+### Agentless deployment
+
+Agentless deployments are only supported in Elastic Serverless and Elastic Cloud environments. Agentless deployments provide a means to ingest data while avoiding the orchestration, management, and maintenance needs associated with standard ingest infrastructure. Using an agentless deployment makes manual agent deployment unnecessary, allowing you to focus on your data instead of the agent that collects it.
+
+For more information, refer to [Agentless integrations](https://www.elastic.co/guide/en/serverless/current/security-agentless-integrations.html) and [Agentless integrations FAQ](https://www.elastic.co/guide/en/serverless/current/agentless-integration-troubleshooting.html)
+*/}}
+
+### Onboard / configure
+{{/* List the steps that will need to be followed in order to completely set up a working integration.
+For integrations that support multiple input types, be sure to add steps for all inputs.
+*/}}
+
+### Validation
+{{/* How can the user test whether the integration is working? Including example commands or test files if applicable */}}
+
+## Troubleshooting
+
+For help with Elastic ingest tools, check [Common problems](https://www.elastic.co/docs/troubleshoot/ingest/fleet/common-problems).
+{{/*
+Add any vendor specific troubleshooting here.
+
+Are there common issues or “gotchas” for deploying this integration? If so, how can they be resolved?
+If applicable, links to the third-party software’s troubleshooting documentation.
+*/}}
+
+## Scaling
+
+For more information on architectures that can be used for scaling this integration, check the [Ingest Architectures](https://www.elastic.co/docs/manage-data/ingest/ingest-reference-architectures) documentation.
+{{/* Add any vendor specific scaling information here */}}
+
+## Reference
+{{/* Repeat for each data stream of the current type
+### {Data stream name}
+
+The `{data stream name}` data stream provides events from {source} of the following types: {list types}.
+
+For each data_stream_name, include an optional summary of the datastream, the exported fields reference table and the sample event.
+
+The fields template function will be replaced by a generated list of all fields from the `fields/` directory of the data stream when building the integration.
+
+#### {data stream name} fields
+
+To include a generated list of fields from the `fields/` directory, uncomment and use:
+{{ fields "data_stream_name" }}
+
+The event template function will be replace by a sample event, taken from `sample_event.json`, when building this integration.
+
+To include a sample event from `sample_event.json`, uncomment and use:
+{{ event "data_stream_name" }}
+
+*/}}
+
+### Inputs used
+{{/* All inputs used by this package will be automatically listed here. */}}
+{{ inputDocs }}
+
+### API usage
+{{/* For integrations that use APIs to collect data, document all the APIs that are used, and link to relevent information */}}
+These APIs are used with this integration:
+* ...
diff --git a/test/packages/bad_input_hbs/agent/input/input.yml.hbs b/test/packages/bad_input_hbs/agent/input/input.yml.hbs
new file mode 100644
index 000000000..3bd54a4d5
--- /dev/null
+++ b/test/packages/bad_input_hbs/agent/input/input.yml.hbs
@@ -0,0 +1,9 @@
+data_stream:
+ dataset: {{data_stream.dataset}}
+paths:
+{{#each paths as |path i|}}
+ - {{path}}
+exclude_files: [".gz$"]
+processors:
+ - add_locale: ~
+
diff --git a/test/packages/bad_input_hbs/changelog.yml b/test/packages/bad_input_hbs/changelog.yml
new file mode 100644
index 000000000..bb0320a52
--- /dev/null
+++ b/test/packages/bad_input_hbs/changelog.yml
@@ -0,0 +1,6 @@
+# newer versions go on top
+- version: "0.0.1"
+ changes:
+ - description: Initial draft of the package
+ type: enhancement
+ link: https://github.com/elastic/integrations/pull/1 # FIXME Replace with the real PR link
diff --git a/test/packages/bad_input_hbs/docs/README.md b/test/packages/bad_input_hbs/docs/README.md
new file mode 100644
index 000000000..a11147993
--- /dev/null
+++ b/test/packages/bad_input_hbs/docs/README.md
@@ -0,0 +1,101 @@
+{{- generatedHeader }}
+{{/*
+This template can be used as a starting point for writing documentation for your new integration. For each section, fill in the details
+described in the comments.
+
+Find more detailed documentation guidelines in https://www.elastic.co/docs/extend/integrations/documentation-guidelines
+*/}}
+# bad input hbs Integration for Elastic
+
+## Overview
+{{/* Complete this section with a short summary of what data this integration collects and what use cases it enables */}}
+The bad input hbs integration for Elastic enables collection of ...
+This integration facilitates ...
+
+### Compatibility
+{{/* Complete this section with information on what 3rd party software or hardware versions this integration is compatible with */}}
+This integration is compatible with ...
+
+### How it works
+{{/* Add a high level overview on how this integration works. For example, does it collect data from API calls or recieving data from a network or file.*/}}
+
+## What data does this integration collect?
+{{/* Complete this section with information on what types of data the integration collects, and link to reference documentation if available */}}
+The bad input hbs integration collects log messages of the following types:
+* ...
+
+### Supported use cases
+{{/* Add details on the use cases that can be enabled by using this integration. Explain why a user would want to install and use this integration. */}}
+
+## What do I need to use this integration?
+{{/* List any vendor-specific prerequisites needed before starting to install the integration. */}}
+
+## How do I deploy this integration?
+
+### Agent-based deployment
+
+Elastic Agent must be installed. For more details, check the Elastic Agent [installation instructions](docs-content://reference/fleet/install-elastic-agents.md). You can install only one Elastic Agent per host.
+
+Elastic Agent is required to stream data from the syslog or log file receiver and ship the data to Elastic, where the events will then be processed via the integration's ingest pipelines.
+
+{{/* If agentless is available for this integration, we'll want to include that here as well.
+### Agentless deployment
+
+Agentless deployments are only supported in Elastic Serverless and Elastic Cloud environments. Agentless deployments provide a means to ingest data while avoiding the orchestration, management, and maintenance needs associated with standard ingest infrastructure. Using an agentless deployment makes manual agent deployment unnecessary, allowing you to focus on your data instead of the agent that collects it.
+
+For more information, refer to [Agentless integrations](https://www.elastic.co/guide/en/serverless/current/security-agentless-integrations.html) and [Agentless integrations FAQ](https://www.elastic.co/guide/en/serverless/current/agentless-integration-troubleshooting.html)
+*/}}
+
+### Onboard / configure
+{{/* List the steps that will need to be followed in order to completely set up a working integration.
+For integrations that support multiple input types, be sure to add steps for all inputs.
+*/}}
+
+### Validation
+{{/* How can the user test whether the integration is working? Including example commands or test files if applicable */}}
+
+## Troubleshooting
+
+For help with Elastic ingest tools, check [Common problems](https://www.elastic.co/docs/troubleshoot/ingest/fleet/common-problems).
+{{/*
+Add any vendor specific troubleshooting here.
+
+Are there common issues or “gotchas” for deploying this integration? If so, how can they be resolved?
+If applicable, links to the third-party software’s troubleshooting documentation.
+*/}}
+
+## Scaling
+
+For more information on architectures that can be used for scaling this integration, check the [Ingest Architectures](https://www.elastic.co/docs/manage-data/ingest/ingest-reference-architectures) documentation.
+{{/* Add any vendor specific scaling information here */}}
+
+## Reference
+{{/* Repeat for each data stream of the current type
+### {Data stream name}
+
+The `{data stream name}` data stream provides events from {source} of the following types: {list types}.
+
+For each data_stream_name, include an optional summary of the datastream, the exported fields reference table and the sample event.
+
+The fields template function will be replaced by a generated list of all fields from the `fields/` directory of the data stream when building the integration.
+
+#### {data stream name} fields
+
+To include a generated list of fields from the `fields/` directory, uncomment and use:
+{{ fields "data_stream_name" }}
+
+The event template function will be replace by a sample event, taken from `sample_event.json`, when building this integration.
+
+To include a sample event from `sample_event.json`, uncomment and use:
+{{ event "data_stream_name" }}
+
+*/}}
+
+### Inputs used
+{{/* All inputs used by this package will be automatically listed here. */}}
+{{ inputDocs }}
+
+### API usage
+{{/* For integrations that use APIs to collect data, document all the APIs that are used, and link to relevent information */}}
+These APIs are used with this integration:
+* ...
diff --git a/test/packages/bad_input_hbs/fields/base-fields.yml b/test/packages/bad_input_hbs/fields/base-fields.yml
new file mode 100644
index 000000000..7c798f453
--- /dev/null
+++ b/test/packages/bad_input_hbs/fields/base-fields.yml
@@ -0,0 +1,12 @@
+- name: data_stream.type
+ type: constant_keyword
+ description: Data stream type.
+- name: data_stream.dataset
+ type: constant_keyword
+ description: Data stream dataset.
+- name: data_stream.namespace
+ type: constant_keyword
+ description: Data stream namespace.
+- name: '@timestamp'
+ type: date
+ description: Event timestamp.
diff --git a/test/packages/bad_input_hbs/img/sample-logo.svg b/test/packages/bad_input_hbs/img/sample-logo.svg
new file mode 100644
index 000000000..6268dd88f
--- /dev/null
+++ b/test/packages/bad_input_hbs/img/sample-logo.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/test/packages/bad_input_hbs/img/sample-screenshot.png b/test/packages/bad_input_hbs/img/sample-screenshot.png
new file mode 100644
index 000000000..d7a56a3ec
Binary files /dev/null and b/test/packages/bad_input_hbs/img/sample-screenshot.png differ
diff --git a/test/packages/bad_input_hbs/manifest.yml b/test/packages/bad_input_hbs/manifest.yml
new file mode 100644
index 000000000..36c8c3205
--- /dev/null
+++ b/test/packages/bad_input_hbs/manifest.yml
@@ -0,0 +1,47 @@
+format_version: 3.5.4
+name: bad_input_hbs
+title: "bad input hbs"
+version: 0.0.1
+source:
+ license: "Elastic-2.0"
+description: "A package with wrong hbs template"
+type: input
+categories:
+ - config_management
+ - custom
+conditions:
+ kibana:
+ version: "^9.2.1"
+ elastic:
+ subscription: "basic"
+screenshots:
+ - src: /img/sample-screenshot.png
+ title: Sample screenshot
+ size: 600x600
+ type: image/png
+icons:
+ - src: /img/sample-logo.svg
+ title: Sample logo
+ size: 32x32
+ type: image/svg+xml
+policy_templates:
+ - name: sample
+ type: logs
+ title: Sample logs
+ description: Collect sample logs
+ input: logfile
+ template_path: input.yml.hbs
+ vars:
+ - name: paths
+ type: text
+ title: Paths
+ multi: true
+ default:
+ - /var/log/*.log
+elasticsearch:
+ index_template:
+ mappings:
+ subobjects: false
+owner:
+ github: elastic/integrations
+ type: elastic
diff --git a/test/packages/bad_input_template_path/agent/input/input.yml.hbs b/test/packages/bad_input_template_path/agent/input/input.yml.hbs
index 6eb03670b..7baca13fc 100644
--- a/test/packages/bad_input_template_path/agent/input/input.yml.hbs
+++ b/test/packages/bad_input_template_path/agent/input/input.yml.hbs
@@ -5,5 +5,5 @@ hosts:
- {{this}}
{{/each}}
driver: {{driver}}
-sql_query: {{sql_query}
+sql_query: {{sql_query}}
sql_response_format: {{sql_response_format}}
\ No newline at end of file
diff --git a/test/packages/bad_integration_hbs/LICENSE.txt b/test/packages/bad_integration_hbs/LICENSE.txt
new file mode 100644
index 000000000..809108b85
--- /dev/null
+++ b/test/packages/bad_integration_hbs/LICENSE.txt
@@ -0,0 +1,93 @@
+Elastic License 2.0
+
+URL: https://www.elastic.co/licensing/elastic-license
+
+## Acceptance
+
+By using the software, you agree to all of the terms and conditions below.
+
+## Copyright License
+
+The licensor grants you a non-exclusive, royalty-free, worldwide,
+non-sublicensable, non-transferable license to use, copy, distribute, make
+available, and prepare derivative works of the software, in each case subject to
+the limitations and conditions below.
+
+## Limitations
+
+You may not provide the software to third parties as a hosted or managed
+service, where the service provides users with access to any substantial set of
+the features or functionality of the software.
+
+You may not move, change, disable, or circumvent the license key functionality
+in the software, and you may not remove or obscure any functionality in the
+software that is protected by the license key.
+
+You may not alter, remove, or obscure any licensing, copyright, or other notices
+of the licensor in the software. Any use of the licensor’s trademarks is subject
+to applicable law.
+
+## Patents
+
+The licensor grants you a license, under any patent claims the licensor can
+license, or becomes able to license, to make, have made, use, sell, offer for
+sale, import and have imported the software, in each case subject to the
+limitations and conditions in this license. This license does not cover any
+patent claims that you cause to be infringed by modifications or additions to
+the software. If you or your company make any written claim that the software
+infringes or contributes to infringement of any patent, your patent license for
+the software granted under these terms ends immediately. If your company makes
+such a claim, your patent license ends immediately for work on behalf of your
+company.
+
+## Notices
+
+You must ensure that anyone who gets a copy of any part of the software from you
+also gets a copy of these terms.
+
+If you modify the software, you must include in any modified copies of the
+software prominent notices stating that you have modified the software.
+
+## No Other Rights
+
+These terms do not imply any licenses other than those expressly granted in
+these terms.
+
+## Termination
+
+If you use the software in violation of these terms, such use is not licensed,
+and your licenses will automatically terminate. If the licensor provides you
+with a notice of your violation, and you cease all violation of this license no
+later than 30 days after you receive that notice, your licenses will be
+reinstated retroactively. However, if you violate these terms after such
+reinstatement, any additional violation of these terms will cause your licenses
+to terminate automatically and permanently.
+
+## No Liability
+
+*As far as the law allows, the software comes as is, without any warranty or
+condition, and the licensor will not be liable to you for any damages arising
+out of these terms or the use or nature of the software, under any kind of
+legal claim.*
+
+## Definitions
+
+The **licensor** is the entity offering these terms, and the **software** is the
+software the licensor makes available under these terms, including any portion
+of it.
+
+**you** refers to the individual or entity agreeing to these terms.
+
+**your company** is any legal entity, sole proprietorship, or other kind of
+organization that you work for, plus all organizations that have control over,
+are under the control of, or are under common control with that
+organization. **control** means ownership of substantially all the assets of an
+entity, or the power to direct its management and policies by vote, contract, or
+otherwise. Control can be direct or indirect.
+
+**your licenses** are all the licenses granted to you for the software under
+these terms.
+
+**use** means anything you do with the software requiring one of your licenses.
+
+**trademark** means trademarks, service marks, and similar rights.
diff --git a/test/packages/bad_integration_hbs/_dev/build/docs/README.md b/test/packages/bad_integration_hbs/_dev/build/docs/README.md
new file mode 100644
index 000000000..7cc03b518
--- /dev/null
+++ b/test/packages/bad_integration_hbs/_dev/build/docs/README.md
@@ -0,0 +1,101 @@
+{{- generatedHeader }}
+{{/*
+This template can be used as a starting point for writing documentation for your new integration. For each section, fill in the details
+described in the comments.
+
+Find more detailed documentation guidelines in https://www.elastic.co/docs/extend/integrations/documentation-guidelines
+*/}}
+# bad integration hbs Integration for Elastic
+
+## Overview
+{{/* Complete this section with a short summary of what data this integration collects and what use cases it enables */}}
+The bad integration hbs integration for Elastic enables collection of ...
+This integration facilitates ...
+
+### Compatibility
+{{/* Complete this section with information on what 3rd party software or hardware versions this integration is compatible with */}}
+This integration is compatible with ...
+
+### How it works
+{{/* Add a high level overview on how this integration works. For example, does it collect data from API calls or recieving data from a network or file.*/}}
+
+## What data does this integration collect?
+{{/* Complete this section with information on what types of data the integration collects, and link to reference documentation if available */}}
+The bad integration hbs integration collects log messages of the following types:
+* ...
+
+### Supported use cases
+{{/* Add details on the use cases that can be enabled by using this integration. Explain why a user would want to install and use this integration. */}}
+
+## What do I need to use this integration?
+{{/* List any vendor-specific prerequisites needed before starting to install the integration. */}}
+
+## How do I deploy this integration?
+
+### Agent-based deployment
+
+Elastic Agent must be installed. For more details, check the Elastic Agent [installation instructions](docs-content://reference/fleet/install-elastic-agents.md). You can install only one Elastic Agent per host.
+
+Elastic Agent is required to stream data from the syslog or log file receiver and ship the data to Elastic, where the events will then be processed via the integration's ingest pipelines.
+
+{{/* If agentless is available for this integration, we'll want to include that here as well.
+### Agentless deployment
+
+Agentless deployments are only supported in Elastic Serverless and Elastic Cloud environments. Agentless deployments provide a means to ingest data while avoiding the orchestration, management, and maintenance needs associated with standard ingest infrastructure. Using an agentless deployment makes manual agent deployment unnecessary, allowing you to focus on your data instead of the agent that collects it.
+
+For more information, refer to [Agentless integrations](https://www.elastic.co/guide/en/serverless/current/security-agentless-integrations.html) and [Agentless integrations FAQ](https://www.elastic.co/guide/en/serverless/current/agentless-integration-troubleshooting.html)
+*/}}
+
+### Onboard / configure
+{{/* List the steps that will need to be followed in order to completely set up a working integration.
+For integrations that support multiple input types, be sure to add steps for all inputs.
+*/}}
+
+### Validation
+{{/* How can the user test whether the integration is working? Including example commands or test files if applicable */}}
+
+## Troubleshooting
+
+For help with Elastic ingest tools, check [Common problems](https://www.elastic.co/docs/troubleshoot/ingest/fleet/common-problems).
+{{/*
+Add any vendor specific troubleshooting here.
+
+Are there common issues or “gotchas” for deploying this integration? If so, how can they be resolved?
+If applicable, links to the third-party software’s troubleshooting documentation.
+*/}}
+
+## Scaling
+
+For more information on architectures that can be used for scaling this integration, check the [Ingest Architectures](https://www.elastic.co/docs/manage-data/ingest/ingest-reference-architectures) documentation.
+{{/* Add any vendor specific scaling information here */}}
+
+## Reference
+{{/* Repeat for each data stream of the current type
+### {Data stream name}
+
+The `{data stream name}` data stream provides events from {source} of the following types: {list types}.
+
+For each data_stream_name, include an optional summary of the datastream, the exported fields reference table and the sample event.
+
+The fields template function will be replaced by a generated list of all fields from the `fields/` directory of the data stream when building the integration.
+
+#### {data stream name} fields
+
+To include a generated list of fields from the `fields/` directory, uncomment and use:
+{{ fields "data_stream_name" }}
+
+The event template function will be replace by a sample event, taken from `sample_event.json`, when building this integration.
+
+To include a sample event from `sample_event.json`, uncomment and use:
+{{ event "data_stream_name" }}
+
+*/}}
+
+### Inputs used
+{{/* All inputs used by this package will be automatically listed here. */}}
+{{ inputDocs }}
+
+### API usage
+{{/* For integrations that use APIs to collect data, document all the APIs that are used, and link to relevent information */}}
+These APIs are used with this integration:
+* ...
diff --git a/test/packages/bad_integration_hbs/changelog.yml b/test/packages/bad_integration_hbs/changelog.yml
new file mode 100644
index 000000000..bb0320a52
--- /dev/null
+++ b/test/packages/bad_integration_hbs/changelog.yml
@@ -0,0 +1,6 @@
+# newer versions go on top
+- version: "0.0.1"
+ changes:
+ - description: Initial draft of the package
+ type: enhancement
+ link: https://github.com/elastic/integrations/pull/1 # FIXME Replace with the real PR link
diff --git a/test/packages/bad_integration_hbs/data_stream/foo/agent/stream/filestream.yml.hbs b/test/packages/bad_integration_hbs/data_stream/foo/agent/stream/filestream.yml.hbs
new file mode 100644
index 000000000..850784970
--- /dev/null
+++ b/test/packages/bad_integration_hbs/data_stream/foo/agent/stream/filestream.yml.hbs
@@ -0,0 +1,43 @@
+paths:
+{{#each paths as |path|}}
+ - {{path}}
+{{/each}}
+{{#if exclude_files}}
+prospector.scanner.exclude_files:
+{{#each exclude_files as |pattern f|}}
+ - {{pattern}}
+{{/each}}
+{{/if}}
+{{#if multiline_json}}
+multiline.pattern: '^{'
+multiline.negate: true
+multiline.match: after
+multiline.max_lines: 5000
+multiline.timeout: 10
+{{/if}}
+{{#if custom}}
+{{custom}}
+{{/if}}
+
+{{#if tags.length}}
+tags:
+{{#each tags as |tag|}}
+- {{tag}}
+{{/each}}
+{{#if preserve_original_event}}
+- preserve_original_event
+{{/if}}
+{{else}}
+{{#if preserve_original_event}}
+tags:
+- preserve_original_event
+{{/if}}
+
+{{#contains "forwarded" tags}}
+publisher_pipeline.disable_host: true
+{{/contains}}
+
+{{#if processors}}
+processors:
+{{processors}}
+{{/if}}
\ No newline at end of file
diff --git a/test/packages/bad_integration_hbs/data_stream/foo/elasticsearch/ingest_pipeline/default.yml b/test/packages/bad_integration_hbs/data_stream/foo/elasticsearch/ingest_pipeline/default.yml
new file mode 100644
index 000000000..1a308fded
--- /dev/null
+++ b/test/packages/bad_integration_hbs/data_stream/foo/elasticsearch/ingest_pipeline/default.yml
@@ -0,0 +1,10 @@
+---
+description: Pipeline for processing sample logs
+processors:
+- set:
+ field: sample_field
+ value: "1"
+on_failure:
+- set:
+ field: error.message
+ value: '{{ _ingest.on_failure_message }}'
diff --git a/test/packages/bad_integration_hbs/data_stream/foo/fields/base-fields.yml b/test/packages/bad_integration_hbs/data_stream/foo/fields/base-fields.yml
new file mode 100644
index 000000000..7c798f453
--- /dev/null
+++ b/test/packages/bad_integration_hbs/data_stream/foo/fields/base-fields.yml
@@ -0,0 +1,12 @@
+- name: data_stream.type
+ type: constant_keyword
+ description: Data stream type.
+- name: data_stream.dataset
+ type: constant_keyword
+ description: Data stream dataset.
+- name: data_stream.namespace
+ type: constant_keyword
+ description: Data stream namespace.
+- name: '@timestamp'
+ type: date
+ description: Event timestamp.
diff --git a/test/packages/bad_integration_hbs/data_stream/foo/manifest.yml b/test/packages/bad_integration_hbs/data_stream/foo/manifest.yml
new file mode 100644
index 000000000..251bec6c4
--- /dev/null
+++ b/test/packages/bad_integration_hbs/data_stream/foo/manifest.yml
@@ -0,0 +1,256 @@
+title: "New Data Stream"
+type: logs
+streams:
+ - input: filestream
+ title: "logs via filestream"
+ description: |-
+ Collect logs with filestream
+ template_path: filestream.yml.hbs
+ vars:
+ - name: paths
+ type: text
+ title: "Paths"
+ multi: true
+ required: true
+ show_user: true
+ default:
+ - /var/log/*.log
+ - name: data_stream.dataset
+ type: text
+ title: "Dataset name"
+ description: |-
+ Dataset to write data to. Changing the dataset will send the data to a different index. You can't use `-` in the name of a dataset and only valid characters for [Elasticsearch index names](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-index_.html).
+ required: true
+ show_user: true
+ default: filestream.generic
+ - name: pipeline
+ type: text
+ title: "Ingest Pipeline"
+ description: |-
+ The Ingest Node pipeline ID to be used by the integration.
+ show_user: true
+ - name: parsers
+ type: yaml
+ title: "Parsers"
+ description: |-
+ This option expects a list of parsers that the log line has to go through. For more information see [Parsers](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-filestream.html#_parsers)
+ show_user: true
+ default: ""
+ #- ndjson:
+ # target: ""
+ # message_key: msg
+ #- multiline:
+ # type: count
+ # count_lines: 3
+ - name: exclude_files
+ type: text
+ title: "Exclude Files"
+ description: |-
+ A list of regular expressions to match the files that you want Elastic Agent to ignore. By default no files are excluded.
+ multi: true
+ show_user: true
+ default:
+ - \.gz$
+ - name: include_files
+ type: text
+ title: "Include Files"
+ description: |-
+ A list of regular expressions to match the files that you want Elastic Agent to include. If a list of regexes is provided, only the files that are allowed by the patterns are harvested.
+ multi: true
+ show_user: true
+ - name: processors
+ type: yaml
+ title: "Processors"
+ description: |-
+ Processors are used to reduce the number of fields in the exported event or to enhance the event with metadata. This executes in the agent before the logs are parsed. See [Processors](https://www.elastic.co/guide/en/beats/filebeat/current/filtering-and-enhancing-data.html) for details.
+ - name: tags
+ type: text
+ title: "Tags"
+ description: |-
+ Tags to include in the published event
+ multi: true
+ show_user: true
+ - name: encoding
+ type: text
+ title: "Encoding"
+ description: |-
+ The file encoding to use for reading data that contains international characters. For a full list of valid encodings, see the [Documentation](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-filestream.html#_encoding_2)
+ - name: recursive_glob
+ type: bool
+ title: "Recursive Glob"
+ description: |-
+ Enable expanding `**` into recursive glob patterns. With this feature enabled, the rightmost `**` in each path is expanded into a fixed number of glob patterns. For example: `/foo/**` expands to `/foo`, `/foo/*`, `/foo/*/*`, and so on. If enabled it expands a single `**` into a 8-level deep `*` pattern.
+ This feature is enabled by default. Set prospector.scanner.recursive_glob to false to disable it.
+ default: true
+ - name: symlinks
+ type: bool
+ title: "Enable symlinks"
+ description: |-
+ The symlinks option allows Elastic Agent to harvest symlinks in addition to regular files. When harvesting symlinks, Elastic Agent opens and reads the original file even though it reports the path of the symlink.
+ **Because this option may lead to data loss, it is disabled by default.**
+ - name: resend_on_touch
+ type: bool
+ title: "Resend on touch"
+ description: |-
+ If this option is enabled a file is resent if its size has not changed but its modification time has changed to a later time than before. It is disabled by default to avoid accidentally resending files.
+ - name: check_interval
+ type: text
+ title: "Check Interval"
+ description: |-
+ How often Elastic Agent checks for new files in the paths that are specified for harvesting. For example Specify 1s to scan the directory as frequently as possible without causing Elastic Agent to scan too frequently. **We do not recommend to set this value <1s.**
+ - name: ignore_older
+ type: text
+ title: "Ignore Older"
+ description: |-
+ If this option is enabled, Elastic Agent ignores any files that were modified before the specified timespan. You can use time strings like 2h (2 hours) and 5m (5 minutes). The default is 0, which disables the setting.
+ You must set Ignore Older to be greater than On State Change Inactive.
+ For more information, please see the [Documentation](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-filestream.html#filebeat-input-filestream-ignore-older)
+ - name: ignore_inactive
+ type: text
+ title: "Ignore Inactive"
+ description: |-
+ If this option is enabled, Elastic Agent ignores every file that has not been updated since the selected time. Possible options are since_first_start and since_last_start.
+ - name: close_on_state_changed_inactive
+ type: text
+ title: "Close on State Changed Inactive"
+ description: |-
+ When this option is enabled, Elastic Agent closes the file handle if a file has not been harvested for the specified duration. The counter for the defined period starts when the last log line was read by the harvester. It is not based on the modification time of the file. If the closed file changes again, a new harvester is started and the latest changes will be picked up after Check Interval has elapsed.
+ - name: close_on_state_changed_renamed
+ type: bool
+ title: "Close on State Changed Renamed"
+ description: |-
+ **Only use this option if you understand that data loss is a potential side effect.**
+ When this option is enabled, Elastic Agent closes the file handler when a file is renamed. This happens, for example, when rotating files. By default, the harvester stays open and keeps reading the file because the file handler does not depend on the file name.
+ - name: close_on_state_changed_removed
+ type: bool
+ title: "Close on State Changed Removed"
+ description: |-
+ When this option is enabled, Elastic Agent closes the harvester when a file is removed. Normally a file should only be removed after it’s inactive for the duration specified by close.on_state_change.inactive.
+ - name: close_reader_eof
+ type: bool
+ title: "Close Reader EOF"
+ description: |-
+ **Only use this option if you understand that data loss is a potential side effect.**
+ When this option is enabled, Elastic Agent closes a file as soon as the end of a file is reached. This is useful when your files are only written once and not updated from time to time. For example, this happens when you are writing every single log event to a new file. This option is disabled by default.
+ - name: close_reader_after_interval
+ type: text
+ title: "Close Reader After Interval"
+ description: |-
+ **Only use this option if you understand that data loss is a potential side effect. Another side effect is that multiline events might not be completely sent before the timeout expires.**
+ This option is particularly useful in case the output is blocked, which makes Elastic Agent keep open file handlers even for files that were deleted from the disk.
+ For more information see the [documentation](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-filestream.html#filebeat-input-filestream-close-timeout).
+ - name: clean_inactive
+ type: text
+ title: "Clean Inactive"
+ description: |-
+ **Only use this option if you understand that data loss is a potential side effect.**
+ When this option is enabled, Elastic Agent removes the state of a file after the specified period of inactivity has elapsed.
+ E.g: "30m", Valid time units are "ns", "us" (or "µs"), "ms", "s", "m", "h". By default cleaning inactive states is disabled, -1 is used to disable it.
+ default: -1
+ - name: clean_removed
+ type: bool
+ title: "Clean Removed"
+ description: |-
+ When this option is enabled, Elastic Agent cleans files from the registry if they cannot be found on disk anymore under the last known name.
+ **You must disable this option if you also disable Close Removed.**
+ - name: harvester_limit
+ type: integer
+ title: "Harvester Limit"
+ description: |-
+ The harvester_limit option limits the number of harvesters
+ that are started in parallel for one input. This directly
+ relates to the maximum number of file handlers that are
+ opened. The default is 0 (no limit).
+ default: 0
+ - name: backoff_init
+ type: text
+ title: "Backoff Init"
+ description: |-
+ The backoff option defines how long Elastic Agent waits before checking a file again after EOF is reached. The default is 1s.
+ - name: backoff_max
+ type: text
+ title: "Backoff Max"
+ description: |-
+ The maximum time for Elastic Agent to wait before checking a file again after EOF is reached. The default is 10s.
+ **Requirement: Set Backoff Max to be greater than or equal to Backoff Init and less than or equal to Check Interval (Backoff Init <= Backoff Max <= Check Interval).**
+ - name: fingerprint
+ type: bool
+ title: "File identity: Fingerprint"
+ description: |-
+ **Changing file_identity methods between runs may result in
+ duplicated events in the output.**
+ Uses a fingerprint generated from the first few bytes (1k is
+ the default, this can be configured via Fingerprint offset
+ and length) to identify a file instead inode + device ID.
+ Refer to https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-filestream.html#_file_identity_2
+ for more details. If this option is disabled (and 'Native
+ file identity is not enabled'), Elastic-Agent < 9.0.0 will
+ use Native as the file identity, and >= 9.0.0 will use
+ Fingerprint with the default offset and length.
+ default: true
+ - name: fingerprint_offset
+ type: integer
+ title: "File identity: Fingerprint offset"
+ description: |-
+ Offset from the beginning of the file to start calculating
+ the fingerprint. The default is 0. Only used when the
+ fingerprint file identity is selected
+ default: 0
+ - name: fingerprint_length
+ type: integer
+ title: "File identity: Fingerprint length"
+ description: |-
+ The number of bytes used to calculate the fingerprint. The
+ default is 1024. Only used when the fingerprint file
+ identity is selected.
+ default: 1024
+ - name: file_identity_native
+ type: bool
+ title: "File identity: Native"
+ description: |-
+ **Changing file_identity methods between runs may result in
+ duplicated events in the output.**
+ Uses a native identifier for files, on most Unix-like
+ file systems this is the inode + device ID. On file systems
+ that do not support inode, the native equivalent is used.
+ If you enable this option you **MUST disable Fingerprint
+ file identity**. Refer to
+ https://www.elastic.co/docs/reference/beats/filebeat/filebeat-input-filestream
+ for more details.
+ default: false
+ - name: rotation_external_strategy_copytruncate
+ type: yaml
+ title: "Rotation Strategy"
+ description: "If the log rotating application copies the contents of the active file and then truncates the original file, use these options to help Elastic Agent to read files correctly.\nSet the option suffix_regex so Elastic Agent can tell active and rotated files apart. \nThere are two supported suffix types in the input: numberic and date."
+ - name: exclude_lines
+ type: text
+ title: "Exclude Lines"
+ description: |-
+ A list of regular expressions to match the lines that you want Elastic Agent to exclude. Elastic Agent drops any lines that match a regular expression in the list. By default, no lines are dropped. Empty lines are ignored.
+ multi: true
+ - name: include_lines
+ type: text
+ title: "Include Lines"
+ description: |-
+ A list of regular expressions to match the lines that you want Elastic Agent to include. Elastic Agent exports only the lines that match a regular expression in the list. By default, all lines are exported. Empty lines are ignored.
+ multi: true
+ - name: buffer_size
+ type: text
+ title: "Buffer Size"
+ description: |-
+ The size in bytes of the buffer that each harvester uses when fetching a file. The default is 16384.
+ - name: message_max_bytes
+ type: text
+ title: "Message Max Bytes"
+ description: |-
+ The maximum number of bytes that a single log message can have. All bytes after mesage_max_bytes are discarded and not sent. The default is 10MB (10485760).
+ - name: condition
+ type: text
+ title: "Condition"
+ description: |-
+ Condition to filter when to collect this input. See [Dynamic Input Configuration](https://www.elastic.co/guide/en/fleet/current/dynamic-input-configuration.html) for details.
+ show_user: true
+elasticsearch:
+ index_template:
+ mappings:
+ subobjects: false
diff --git a/test/packages/bad_integration_hbs/docs/README.md b/test/packages/bad_integration_hbs/docs/README.md
new file mode 100644
index 000000000..7cc03b518
--- /dev/null
+++ b/test/packages/bad_integration_hbs/docs/README.md
@@ -0,0 +1,101 @@
+{{- generatedHeader }}
+{{/*
+This template can be used as a starting point for writing documentation for your new integration. For each section, fill in the details
+described in the comments.
+
+Find more detailed documentation guidelines in https://www.elastic.co/docs/extend/integrations/documentation-guidelines
+*/}}
+# bad integration hbs Integration for Elastic
+
+## Overview
+{{/* Complete this section with a short summary of what data this integration collects and what use cases it enables */}}
+The bad integration hbs integration for Elastic enables collection of ...
+This integration facilitates ...
+
+### Compatibility
+{{/* Complete this section with information on what 3rd party software or hardware versions this integration is compatible with */}}
+This integration is compatible with ...
+
+### How it works
+{{/* Add a high level overview on how this integration works. For example, does it collect data from API calls or recieving data from a network or file.*/}}
+
+## What data does this integration collect?
+{{/* Complete this section with information on what types of data the integration collects, and link to reference documentation if available */}}
+The bad integration hbs integration collects log messages of the following types:
+* ...
+
+### Supported use cases
+{{/* Add details on the use cases that can be enabled by using this integration. Explain why a user would want to install and use this integration. */}}
+
+## What do I need to use this integration?
+{{/* List any vendor-specific prerequisites needed before starting to install the integration. */}}
+
+## How do I deploy this integration?
+
+### Agent-based deployment
+
+Elastic Agent must be installed. For more details, check the Elastic Agent [installation instructions](docs-content://reference/fleet/install-elastic-agents.md). You can install only one Elastic Agent per host.
+
+Elastic Agent is required to stream data from the syslog or log file receiver and ship the data to Elastic, where the events will then be processed via the integration's ingest pipelines.
+
+{{/* If agentless is available for this integration, we'll want to include that here as well.
+### Agentless deployment
+
+Agentless deployments are only supported in Elastic Serverless and Elastic Cloud environments. Agentless deployments provide a means to ingest data while avoiding the orchestration, management, and maintenance needs associated with standard ingest infrastructure. Using an agentless deployment makes manual agent deployment unnecessary, allowing you to focus on your data instead of the agent that collects it.
+
+For more information, refer to [Agentless integrations](https://www.elastic.co/guide/en/serverless/current/security-agentless-integrations.html) and [Agentless integrations FAQ](https://www.elastic.co/guide/en/serverless/current/agentless-integration-troubleshooting.html)
+*/}}
+
+### Onboard / configure
+{{/* List the steps that will need to be followed in order to completely set up a working integration.
+For integrations that support multiple input types, be sure to add steps for all inputs.
+*/}}
+
+### Validation
+{{/* How can the user test whether the integration is working? Including example commands or test files if applicable */}}
+
+## Troubleshooting
+
+For help with Elastic ingest tools, check [Common problems](https://www.elastic.co/docs/troubleshoot/ingest/fleet/common-problems).
+{{/*
+Add any vendor specific troubleshooting here.
+
+Are there common issues or “gotchas” for deploying this integration? If so, how can they be resolved?
+If applicable, links to the third-party software’s troubleshooting documentation.
+*/}}
+
+## Scaling
+
+For more information on architectures that can be used for scaling this integration, check the [Ingest Architectures](https://www.elastic.co/docs/manage-data/ingest/ingest-reference-architectures) documentation.
+{{/* Add any vendor specific scaling information here */}}
+
+## Reference
+{{/* Repeat for each data stream of the current type
+### {Data stream name}
+
+The `{data stream name}` data stream provides events from {source} of the following types: {list types}.
+
+For each data_stream_name, include an optional summary of the datastream, the exported fields reference table and the sample event.
+
+The fields template function will be replaced by a generated list of all fields from the `fields/` directory of the data stream when building the integration.
+
+#### {data stream name} fields
+
+To include a generated list of fields from the `fields/` directory, uncomment and use:
+{{ fields "data_stream_name" }}
+
+The event template function will be replace by a sample event, taken from `sample_event.json`, when building this integration.
+
+To include a sample event from `sample_event.json`, uncomment and use:
+{{ event "data_stream_name" }}
+
+*/}}
+
+### Inputs used
+{{/* All inputs used by this package will be automatically listed here. */}}
+{{ inputDocs }}
+
+### API usage
+{{/* For integrations that use APIs to collect data, document all the APIs that are used, and link to relevent information */}}
+These APIs are used with this integration:
+* ...
diff --git a/test/packages/bad_integration_hbs/img/sample-logo.svg b/test/packages/bad_integration_hbs/img/sample-logo.svg
new file mode 100644
index 000000000..6268dd88f
--- /dev/null
+++ b/test/packages/bad_integration_hbs/img/sample-logo.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/test/packages/bad_integration_hbs/img/sample-screenshot.png b/test/packages/bad_integration_hbs/img/sample-screenshot.png
new file mode 100644
index 000000000..d7a56a3ec
Binary files /dev/null and b/test/packages/bad_integration_hbs/img/sample-screenshot.png differ
diff --git a/test/packages/bad_integration_hbs/manifest.yml b/test/packages/bad_integration_hbs/manifest.yml
new file mode 100644
index 000000000..30b0184b1
--- /dev/null
+++ b/test/packages/bad_integration_hbs/manifest.yml
@@ -0,0 +1,37 @@
+format_version: 3.5.4
+name: bad_integration_hbs
+title: "bad integration hbs"
+version: 0.0.1
+source:
+ license: "Elastic-2.0"
+description: "package with wrong hbs templates at data stream"
+type: integration
+categories:
+ - custom
+ - aws
+conditions:
+ kibana:
+ version: "^9.2.1"
+ elastic:
+ subscription: "basic"
+screenshots:
+ - src: /img/sample-screenshot.png
+ title: Sample screenshot
+ size: 600x600
+ type: image/png
+icons:
+ - src: /img/sample-logo.svg
+ title: Sample logo
+ size: 32x32
+ type: image/svg+xml
+policy_templates:
+ - name: sample
+ title: Sample logs
+ description: Collect sample logs
+ inputs:
+ - type: logfile
+ title: Collect sample logs from instances
+ description: Collecting sample logs
+owner:
+ github: elastic/integrations
+ type: elastic
diff --git a/test/packages/bad_integration_hbs/sample_event.json b/test/packages/bad_integration_hbs/sample_event.json
new file mode 100644
index 000000000..e302589fd
--- /dev/null
+++ b/test/packages/bad_integration_hbs/sample_event.json
@@ -0,0 +1,3 @@
+{
+ "description": "This is an example sample-event for bad integration hbs. Replace it with a real sample event. Hint: If system tests exist, running `elastic-package test system --generate` will generate this file."
+}
diff --git a/test/packages/bad_integration_hbs_linked/LICENSE.txt b/test/packages/bad_integration_hbs_linked/LICENSE.txt
new file mode 100644
index 000000000..809108b85
--- /dev/null
+++ b/test/packages/bad_integration_hbs_linked/LICENSE.txt
@@ -0,0 +1,93 @@
+Elastic License 2.0
+
+URL: https://www.elastic.co/licensing/elastic-license
+
+## Acceptance
+
+By using the software, you agree to all of the terms and conditions below.
+
+## Copyright License
+
+The licensor grants you a non-exclusive, royalty-free, worldwide,
+non-sublicensable, non-transferable license to use, copy, distribute, make
+available, and prepare derivative works of the software, in each case subject to
+the limitations and conditions below.
+
+## Limitations
+
+You may not provide the software to third parties as a hosted or managed
+service, where the service provides users with access to any substantial set of
+the features or functionality of the software.
+
+You may not move, change, disable, or circumvent the license key functionality
+in the software, and you may not remove or obscure any functionality in the
+software that is protected by the license key.
+
+You may not alter, remove, or obscure any licensing, copyright, or other notices
+of the licensor in the software. Any use of the licensor’s trademarks is subject
+to applicable law.
+
+## Patents
+
+The licensor grants you a license, under any patent claims the licensor can
+license, or becomes able to license, to make, have made, use, sell, offer for
+sale, import and have imported the software, in each case subject to the
+limitations and conditions in this license. This license does not cover any
+patent claims that you cause to be infringed by modifications or additions to
+the software. If you or your company make any written claim that the software
+infringes or contributes to infringement of any patent, your patent license for
+the software granted under these terms ends immediately. If your company makes
+such a claim, your patent license ends immediately for work on behalf of your
+company.
+
+## Notices
+
+You must ensure that anyone who gets a copy of any part of the software from you
+also gets a copy of these terms.
+
+If you modify the software, you must include in any modified copies of the
+software prominent notices stating that you have modified the software.
+
+## No Other Rights
+
+These terms do not imply any licenses other than those expressly granted in
+these terms.
+
+## Termination
+
+If you use the software in violation of these terms, such use is not licensed,
+and your licenses will automatically terminate. If the licensor provides you
+with a notice of your violation, and you cease all violation of this license no
+later than 30 days after you receive that notice, your licenses will be
+reinstated retroactively. However, if you violate these terms after such
+reinstatement, any additional violation of these terms will cause your licenses
+to terminate automatically and permanently.
+
+## No Liability
+
+*As far as the law allows, the software comes as is, without any warranty or
+condition, and the licensor will not be liable to you for any damages arising
+out of these terms or the use or nature of the software, under any kind of
+legal claim.*
+
+## Definitions
+
+The **licensor** is the entity offering these terms, and the **software** is the
+software the licensor makes available under these terms, including any portion
+of it.
+
+**you** refers to the individual or entity agreeing to these terms.
+
+**your company** is any legal entity, sole proprietorship, or other kind of
+organization that you work for, plus all organizations that have control over,
+are under the control of, or are under common control with that
+organization. **control** means ownership of substantially all the assets of an
+entity, or the power to direct its management and policies by vote, contract, or
+otherwise. Control can be direct or indirect.
+
+**your licenses** are all the licenses granted to you for the software under
+these terms.
+
+**use** means anything you do with the software requiring one of your licenses.
+
+**trademark** means trademarks, service marks, and similar rights.
diff --git a/test/packages/bad_integration_hbs_linked/_dev/build/docs/README.md b/test/packages/bad_integration_hbs_linked/_dev/build/docs/README.md
new file mode 100644
index 000000000..7cc03b518
--- /dev/null
+++ b/test/packages/bad_integration_hbs_linked/_dev/build/docs/README.md
@@ -0,0 +1,101 @@
+{{- generatedHeader }}
+{{/*
+This template can be used as a starting point for writing documentation for your new integration. For each section, fill in the details
+described in the comments.
+
+Find more detailed documentation guidelines in https://www.elastic.co/docs/extend/integrations/documentation-guidelines
+*/}}
+# bad integration hbs Integration for Elastic
+
+## Overview
+{{/* Complete this section with a short summary of what data this integration collects and what use cases it enables */}}
+The bad integration hbs integration for Elastic enables collection of ...
+This integration facilitates ...
+
+### Compatibility
+{{/* Complete this section with information on what 3rd party software or hardware versions this integration is compatible with */}}
+This integration is compatible with ...
+
+### How it works
+{{/* Add a high level overview on how this integration works. For example, does it collect data from API calls or recieving data from a network or file.*/}}
+
+## What data does this integration collect?
+{{/* Complete this section with information on what types of data the integration collects, and link to reference documentation if available */}}
+The bad integration hbs integration collects log messages of the following types:
+* ...
+
+### Supported use cases
+{{/* Add details on the use cases that can be enabled by using this integration. Explain why a user would want to install and use this integration. */}}
+
+## What do I need to use this integration?
+{{/* List any vendor-specific prerequisites needed before starting to install the integration. */}}
+
+## How do I deploy this integration?
+
+### Agent-based deployment
+
+Elastic Agent must be installed. For more details, check the Elastic Agent [installation instructions](docs-content://reference/fleet/install-elastic-agents.md). You can install only one Elastic Agent per host.
+
+Elastic Agent is required to stream data from the syslog or log file receiver and ship the data to Elastic, where the events will then be processed via the integration's ingest pipelines.
+
+{{/* If agentless is available for this integration, we'll want to include that here as well.
+### Agentless deployment
+
+Agentless deployments are only supported in Elastic Serverless and Elastic Cloud environments. Agentless deployments provide a means to ingest data while avoiding the orchestration, management, and maintenance needs associated with standard ingest infrastructure. Using an agentless deployment makes manual agent deployment unnecessary, allowing you to focus on your data instead of the agent that collects it.
+
+For more information, refer to [Agentless integrations](https://www.elastic.co/guide/en/serverless/current/security-agentless-integrations.html) and [Agentless integrations FAQ](https://www.elastic.co/guide/en/serverless/current/agentless-integration-troubleshooting.html)
+*/}}
+
+### Onboard / configure
+{{/* List the steps that will need to be followed in order to completely set up a working integration.
+For integrations that support multiple input types, be sure to add steps for all inputs.
+*/}}
+
+### Validation
+{{/* How can the user test whether the integration is working? Including example commands or test files if applicable */}}
+
+## Troubleshooting
+
+For help with Elastic ingest tools, check [Common problems](https://www.elastic.co/docs/troubleshoot/ingest/fleet/common-problems).
+{{/*
+Add any vendor specific troubleshooting here.
+
+Are there common issues or “gotchas” for deploying this integration? If so, how can they be resolved?
+If applicable, links to the third-party software’s troubleshooting documentation.
+*/}}
+
+## Scaling
+
+For more information on architectures that can be used for scaling this integration, check the [Ingest Architectures](https://www.elastic.co/docs/manage-data/ingest/ingest-reference-architectures) documentation.
+{{/* Add any vendor specific scaling information here */}}
+
+## Reference
+{{/* Repeat for each data stream of the current type
+### {Data stream name}
+
+The `{data stream name}` data stream provides events from {source} of the following types: {list types}.
+
+For each data_stream_name, include an optional summary of the datastream, the exported fields reference table and the sample event.
+
+The fields template function will be replaced by a generated list of all fields from the `fields/` directory of the data stream when building the integration.
+
+#### {data stream name} fields
+
+To include a generated list of fields from the `fields/` directory, uncomment and use:
+{{ fields "data_stream_name" }}
+
+The event template function will be replace by a sample event, taken from `sample_event.json`, when building this integration.
+
+To include a sample event from `sample_event.json`, uncomment and use:
+{{ event "data_stream_name" }}
+
+*/}}
+
+### Inputs used
+{{/* All inputs used by this package will be automatically listed here. */}}
+{{ inputDocs }}
+
+### API usage
+{{/* For integrations that use APIs to collect data, document all the APIs that are used, and link to relevent information */}}
+These APIs are used with this integration:
+* ...
diff --git a/test/packages/bad_integration_hbs_linked/changelog.yml b/test/packages/bad_integration_hbs_linked/changelog.yml
new file mode 100644
index 000000000..bb0320a52
--- /dev/null
+++ b/test/packages/bad_integration_hbs_linked/changelog.yml
@@ -0,0 +1,6 @@
+# newer versions go on top
+- version: "0.0.1"
+ changes:
+ - description: Initial draft of the package
+ type: enhancement
+ link: https://github.com/elastic/integrations/pull/1 # FIXME Replace with the real PR link
diff --git a/test/packages/bad_integration_hbs_linked/data_stream/foo/agent/stream/filestream.yml.hbs.link b/test/packages/bad_integration_hbs_linked/data_stream/foo/agent/stream/filestream.yml.hbs.link
new file mode 100644
index 000000000..758fdf51c
--- /dev/null
+++ b/test/packages/bad_integration_hbs_linked/data_stream/foo/agent/stream/filestream.yml.hbs.link
@@ -0,0 +1 @@
+../../../../../bad_integration_hbs/data_stream/foo/agent/stream/filestream.yml.hbs fc8773ba7c0efda3fadd617e6fc4385a54effd8fd715683ff2c3b2ad7be894d0
\ No newline at end of file
diff --git a/test/packages/bad_integration_hbs_linked/data_stream/foo/elasticsearch/ingest_pipeline/default.yml b/test/packages/bad_integration_hbs_linked/data_stream/foo/elasticsearch/ingest_pipeline/default.yml
new file mode 100644
index 000000000..1a308fded
--- /dev/null
+++ b/test/packages/bad_integration_hbs_linked/data_stream/foo/elasticsearch/ingest_pipeline/default.yml
@@ -0,0 +1,10 @@
+---
+description: Pipeline for processing sample logs
+processors:
+- set:
+ field: sample_field
+ value: "1"
+on_failure:
+- set:
+ field: error.message
+ value: '{{ _ingest.on_failure_message }}'
diff --git a/test/packages/bad_integration_hbs_linked/data_stream/foo/fields/base-fields.yml b/test/packages/bad_integration_hbs_linked/data_stream/foo/fields/base-fields.yml
new file mode 100644
index 000000000..7c798f453
--- /dev/null
+++ b/test/packages/bad_integration_hbs_linked/data_stream/foo/fields/base-fields.yml
@@ -0,0 +1,12 @@
+- name: data_stream.type
+ type: constant_keyword
+ description: Data stream type.
+- name: data_stream.dataset
+ type: constant_keyword
+ description: Data stream dataset.
+- name: data_stream.namespace
+ type: constant_keyword
+ description: Data stream namespace.
+- name: '@timestamp'
+ type: date
+ description: Event timestamp.
diff --git a/test/packages/bad_integration_hbs_linked/data_stream/foo/manifest.yml b/test/packages/bad_integration_hbs_linked/data_stream/foo/manifest.yml
new file mode 100644
index 000000000..251bec6c4
--- /dev/null
+++ b/test/packages/bad_integration_hbs_linked/data_stream/foo/manifest.yml
@@ -0,0 +1,256 @@
+title: "New Data Stream"
+type: logs
+streams:
+ - input: filestream
+ title: "logs via filestream"
+ description: |-
+ Collect logs with filestream
+ template_path: filestream.yml.hbs
+ vars:
+ - name: paths
+ type: text
+ title: "Paths"
+ multi: true
+ required: true
+ show_user: true
+ default:
+ - /var/log/*.log
+ - name: data_stream.dataset
+ type: text
+ title: "Dataset name"
+ description: |-
+ Dataset to write data to. Changing the dataset will send the data to a different index. You can't use `-` in the name of a dataset and only valid characters for [Elasticsearch index names](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-index_.html).
+ required: true
+ show_user: true
+ default: filestream.generic
+ - name: pipeline
+ type: text
+ title: "Ingest Pipeline"
+ description: |-
+ The Ingest Node pipeline ID to be used by the integration.
+ show_user: true
+ - name: parsers
+ type: yaml
+ title: "Parsers"
+ description: |-
+ This option expects a list of parsers that the log line has to go through. For more information see [Parsers](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-filestream.html#_parsers)
+ show_user: true
+ default: ""
+ #- ndjson:
+ # target: ""
+ # message_key: msg
+ #- multiline:
+ # type: count
+ # count_lines: 3
+ - name: exclude_files
+ type: text
+ title: "Exclude Files"
+ description: |-
+ A list of regular expressions to match the files that you want Elastic Agent to ignore. By default no files are excluded.
+ multi: true
+ show_user: true
+ default:
+ - \.gz$
+ - name: include_files
+ type: text
+ title: "Include Files"
+ description: |-
+ A list of regular expressions to match the files that you want Elastic Agent to include. If a list of regexes is provided, only the files that are allowed by the patterns are harvested.
+ multi: true
+ show_user: true
+ - name: processors
+ type: yaml
+ title: "Processors"
+ description: |-
+ Processors are used to reduce the number of fields in the exported event or to enhance the event with metadata. This executes in the agent before the logs are parsed. See [Processors](https://www.elastic.co/guide/en/beats/filebeat/current/filtering-and-enhancing-data.html) for details.
+ - name: tags
+ type: text
+ title: "Tags"
+ description: |-
+ Tags to include in the published event
+ multi: true
+ show_user: true
+ - name: encoding
+ type: text
+ title: "Encoding"
+ description: |-
+ The file encoding to use for reading data that contains international characters. For a full list of valid encodings, see the [Documentation](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-filestream.html#_encoding_2)
+ - name: recursive_glob
+ type: bool
+ title: "Recursive Glob"
+ description: |-
+ Enable expanding `**` into recursive glob patterns. With this feature enabled, the rightmost `**` in each path is expanded into a fixed number of glob patterns. For example: `/foo/**` expands to `/foo`, `/foo/*`, `/foo/*/*`, and so on. If enabled it expands a single `**` into a 8-level deep `*` pattern.
+ This feature is enabled by default. Set prospector.scanner.recursive_glob to false to disable it.
+ default: true
+ - name: symlinks
+ type: bool
+ title: "Enable symlinks"
+ description: |-
+ The symlinks option allows Elastic Agent to harvest symlinks in addition to regular files. When harvesting symlinks, Elastic Agent opens and reads the original file even though it reports the path of the symlink.
+ **Because this option may lead to data loss, it is disabled by default.**
+ - name: resend_on_touch
+ type: bool
+ title: "Resend on touch"
+ description: |-
+ If this option is enabled a file is resent if its size has not changed but its modification time has changed to a later time than before. It is disabled by default to avoid accidentally resending files.
+ - name: check_interval
+ type: text
+ title: "Check Interval"
+ description: |-
+ How often Elastic Agent checks for new files in the paths that are specified for harvesting. For example Specify 1s to scan the directory as frequently as possible without causing Elastic Agent to scan too frequently. **We do not recommend to set this value <1s.**
+ - name: ignore_older
+ type: text
+ title: "Ignore Older"
+ description: |-
+ If this option is enabled, Elastic Agent ignores any files that were modified before the specified timespan. You can use time strings like 2h (2 hours) and 5m (5 minutes). The default is 0, which disables the setting.
+ You must set Ignore Older to be greater than On State Change Inactive.
+ For more information, please see the [Documentation](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-filestream.html#filebeat-input-filestream-ignore-older)
+ - name: ignore_inactive
+ type: text
+ title: "Ignore Inactive"
+ description: |-
+ If this option is enabled, Elastic Agent ignores every file that has not been updated since the selected time. Possible options are since_first_start and since_last_start.
+ - name: close_on_state_changed_inactive
+ type: text
+ title: "Close on State Changed Inactive"
+ description: |-
+ When this option is enabled, Elastic Agent closes the file handle if a file has not been harvested for the specified duration. The counter for the defined period starts when the last log line was read by the harvester. It is not based on the modification time of the file. If the closed file changes again, a new harvester is started and the latest changes will be picked up after Check Interval has elapsed.
+ - name: close_on_state_changed_renamed
+ type: bool
+ title: "Close on State Changed Renamed"
+ description: |-
+ **Only use this option if you understand that data loss is a potential side effect.**
+ When this option is enabled, Elastic Agent closes the file handler when a file is renamed. This happens, for example, when rotating files. By default, the harvester stays open and keeps reading the file because the file handler does not depend on the file name.
+ - name: close_on_state_changed_removed
+ type: bool
+ title: "Close on State Changed Removed"
+ description: |-
+ When this option is enabled, Elastic Agent closes the harvester when a file is removed. Normally a file should only be removed after it’s inactive for the duration specified by close.on_state_change.inactive.
+ - name: close_reader_eof
+ type: bool
+ title: "Close Reader EOF"
+ description: |-
+ **Only use this option if you understand that data loss is a potential side effect.**
+ When this option is enabled, Elastic Agent closes a file as soon as the end of a file is reached. This is useful when your files are only written once and not updated from time to time. For example, this happens when you are writing every single log event to a new file. This option is disabled by default.
+ - name: close_reader_after_interval
+ type: text
+ title: "Close Reader After Interval"
+ description: |-
+ **Only use this option if you understand that data loss is a potential side effect. Another side effect is that multiline events might not be completely sent before the timeout expires.**
+ This option is particularly useful in case the output is blocked, which makes Elastic Agent keep open file handlers even for files that were deleted from the disk.
+ For more information see the [documentation](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-filestream.html#filebeat-input-filestream-close-timeout).
+ - name: clean_inactive
+ type: text
+ title: "Clean Inactive"
+ description: |-
+ **Only use this option if you understand that data loss is a potential side effect.**
+ When this option is enabled, Elastic Agent removes the state of a file after the specified period of inactivity has elapsed.
+ E.g: "30m", Valid time units are "ns", "us" (or "µs"), "ms", "s", "m", "h". By default cleaning inactive states is disabled, -1 is used to disable it.
+ default: -1
+ - name: clean_removed
+ type: bool
+ title: "Clean Removed"
+ description: |-
+ When this option is enabled, Elastic Agent cleans files from the registry if they cannot be found on disk anymore under the last known name.
+ **You must disable this option if you also disable Close Removed.**
+ - name: harvester_limit
+ type: integer
+ title: "Harvester Limit"
+ description: |-
+ The harvester_limit option limits the number of harvesters
+ that are started in parallel for one input. This directly
+ relates to the maximum number of file handlers that are
+ opened. The default is 0 (no limit).
+ default: 0
+ - name: backoff_init
+ type: text
+ title: "Backoff Init"
+ description: |-
+ The backoff option defines how long Elastic Agent waits before checking a file again after EOF is reached. The default is 1s.
+ - name: backoff_max
+ type: text
+ title: "Backoff Max"
+ description: |-
+ The maximum time for Elastic Agent to wait before checking a file again after EOF is reached. The default is 10s.
+ **Requirement: Set Backoff Max to be greater than or equal to Backoff Init and less than or equal to Check Interval (Backoff Init <= Backoff Max <= Check Interval).**
+ - name: fingerprint
+ type: bool
+ title: "File identity: Fingerprint"
+ description: |-
+ **Changing file_identity methods between runs may result in
+ duplicated events in the output.**
+ Uses a fingerprint generated from the first few bytes (1k is
+ the default, this can be configured via Fingerprint offset
+ and length) to identify a file instead inode + device ID.
+ Refer to https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-filestream.html#_file_identity_2
+ for more details. If this option is disabled (and 'Native
+ file identity is not enabled'), Elastic-Agent < 9.0.0 will
+ use Native as the file identity, and >= 9.0.0 will use
+ Fingerprint with the default offset and length.
+ default: true
+ - name: fingerprint_offset
+ type: integer
+ title: "File identity: Fingerprint offset"
+ description: |-
+ Offset from the beginning of the file to start calculating
+ the fingerprint. The default is 0. Only used when the
+ fingerprint file identity is selected
+ default: 0
+ - name: fingerprint_length
+ type: integer
+ title: "File identity: Fingerprint length"
+ description: |-
+ The number of bytes used to calculate the fingerprint. The
+ default is 1024. Only used when the fingerprint file
+ identity is selected.
+ default: 1024
+ - name: file_identity_native
+ type: bool
+ title: "File identity: Native"
+ description: |-
+ **Changing file_identity methods between runs may result in
+ duplicated events in the output.**
+ Uses a native identifier for files, on most Unix-like
+ file systems this is the inode + device ID. On file systems
+ that do not support inode, the native equivalent is used.
+ If you enable this option you **MUST disable Fingerprint
+ file identity**. Refer to
+ https://www.elastic.co/docs/reference/beats/filebeat/filebeat-input-filestream
+ for more details.
+ default: false
+ - name: rotation_external_strategy_copytruncate
+ type: yaml
+ title: "Rotation Strategy"
+ description: "If the log rotating application copies the contents of the active file and then truncates the original file, use these options to help Elastic Agent to read files correctly.\nSet the option suffix_regex so Elastic Agent can tell active and rotated files apart. \nThere are two supported suffix types in the input: numberic and date."
+ - name: exclude_lines
+ type: text
+ title: "Exclude Lines"
+ description: |-
+ A list of regular expressions to match the lines that you want Elastic Agent to exclude. Elastic Agent drops any lines that match a regular expression in the list. By default, no lines are dropped. Empty lines are ignored.
+ multi: true
+ - name: include_lines
+ type: text
+ title: "Include Lines"
+ description: |-
+ A list of regular expressions to match the lines that you want Elastic Agent to include. Elastic Agent exports only the lines that match a regular expression in the list. By default, all lines are exported. Empty lines are ignored.
+ multi: true
+ - name: buffer_size
+ type: text
+ title: "Buffer Size"
+ description: |-
+ The size in bytes of the buffer that each harvester uses when fetching a file. The default is 16384.
+ - name: message_max_bytes
+ type: text
+ title: "Message Max Bytes"
+ description: |-
+ The maximum number of bytes that a single log message can have. All bytes after mesage_max_bytes are discarded and not sent. The default is 10MB (10485760).
+ - name: condition
+ type: text
+ title: "Condition"
+ description: |-
+ Condition to filter when to collect this input. See [Dynamic Input Configuration](https://www.elastic.co/guide/en/fleet/current/dynamic-input-configuration.html) for details.
+ show_user: true
+elasticsearch:
+ index_template:
+ mappings:
+ subobjects: false
diff --git a/test/packages/bad_integration_hbs_linked/docs/README.md b/test/packages/bad_integration_hbs_linked/docs/README.md
new file mode 100644
index 000000000..7cc03b518
--- /dev/null
+++ b/test/packages/bad_integration_hbs_linked/docs/README.md
@@ -0,0 +1,101 @@
+{{- generatedHeader }}
+{{/*
+This template can be used as a starting point for writing documentation for your new integration. For each section, fill in the details
+described in the comments.
+
+Find more detailed documentation guidelines in https://www.elastic.co/docs/extend/integrations/documentation-guidelines
+*/}}
+# bad integration hbs Integration for Elastic
+
+## Overview
+{{/* Complete this section with a short summary of what data this integration collects and what use cases it enables */}}
+The bad integration hbs integration for Elastic enables collection of ...
+This integration facilitates ...
+
+### Compatibility
+{{/* Complete this section with information on what 3rd party software or hardware versions this integration is compatible with */}}
+This integration is compatible with ...
+
+### How it works
+{{/* Add a high level overview on how this integration works. For example, does it collect data from API calls or recieving data from a network or file.*/}}
+
+## What data does this integration collect?
+{{/* Complete this section with information on what types of data the integration collects, and link to reference documentation if available */}}
+The bad integration hbs integration collects log messages of the following types:
+* ...
+
+### Supported use cases
+{{/* Add details on the use cases that can be enabled by using this integration. Explain why a user would want to install and use this integration. */}}
+
+## What do I need to use this integration?
+{{/* List any vendor-specific prerequisites needed before starting to install the integration. */}}
+
+## How do I deploy this integration?
+
+### Agent-based deployment
+
+Elastic Agent must be installed. For more details, check the Elastic Agent [installation instructions](docs-content://reference/fleet/install-elastic-agents.md). You can install only one Elastic Agent per host.
+
+Elastic Agent is required to stream data from the syslog or log file receiver and ship the data to Elastic, where the events will then be processed via the integration's ingest pipelines.
+
+{{/* If agentless is available for this integration, we'll want to include that here as well.
+### Agentless deployment
+
+Agentless deployments are only supported in Elastic Serverless and Elastic Cloud environments. Agentless deployments provide a means to ingest data while avoiding the orchestration, management, and maintenance needs associated with standard ingest infrastructure. Using an agentless deployment makes manual agent deployment unnecessary, allowing you to focus on your data instead of the agent that collects it.
+
+For more information, refer to [Agentless integrations](https://www.elastic.co/guide/en/serverless/current/security-agentless-integrations.html) and [Agentless integrations FAQ](https://www.elastic.co/guide/en/serverless/current/agentless-integration-troubleshooting.html)
+*/}}
+
+### Onboard / configure
+{{/* List the steps that will need to be followed in order to completely set up a working integration.
+For integrations that support multiple input types, be sure to add steps for all inputs.
+*/}}
+
+### Validation
+{{/* How can the user test whether the integration is working? Including example commands or test files if applicable */}}
+
+## Troubleshooting
+
+For help with Elastic ingest tools, check [Common problems](https://www.elastic.co/docs/troubleshoot/ingest/fleet/common-problems).
+{{/*
+Add any vendor specific troubleshooting here.
+
+Are there common issues or “gotchas” for deploying this integration? If so, how can they be resolved?
+If applicable, links to the third-party software’s troubleshooting documentation.
+*/}}
+
+## Scaling
+
+For more information on architectures that can be used for scaling this integration, check the [Ingest Architectures](https://www.elastic.co/docs/manage-data/ingest/ingest-reference-architectures) documentation.
+{{/* Add any vendor specific scaling information here */}}
+
+## Reference
+{{/* Repeat for each data stream of the current type
+### {Data stream name}
+
+The `{data stream name}` data stream provides events from {source} of the following types: {list types}.
+
+For each data_stream_name, include an optional summary of the datastream, the exported fields reference table and the sample event.
+
+The fields template function will be replaced by a generated list of all fields from the `fields/` directory of the data stream when building the integration.
+
+#### {data stream name} fields
+
+To include a generated list of fields from the `fields/` directory, uncomment and use:
+{{ fields "data_stream_name" }}
+
+The event template function will be replace by a sample event, taken from `sample_event.json`, when building this integration.
+
+To include a sample event from `sample_event.json`, uncomment and use:
+{{ event "data_stream_name" }}
+
+*/}}
+
+### Inputs used
+{{/* All inputs used by this package will be automatically listed here. */}}
+{{ inputDocs }}
+
+### API usage
+{{/* For integrations that use APIs to collect data, document all the APIs that are used, and link to relevent information */}}
+These APIs are used with this integration:
+* ...
diff --git a/test/packages/bad_integration_hbs_linked/img/sample-logo.svg b/test/packages/bad_integration_hbs_linked/img/sample-logo.svg
new file mode 100644
index 000000000..6268dd88f
--- /dev/null
+++ b/test/packages/bad_integration_hbs_linked/img/sample-logo.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/test/packages/bad_integration_hbs_linked/img/sample-screenshot.png b/test/packages/bad_integration_hbs_linked/img/sample-screenshot.png
new file mode 100644
index 000000000..d7a56a3ec
Binary files /dev/null and b/test/packages/bad_integration_hbs_linked/img/sample-screenshot.png differ
diff --git a/test/packages/bad_integration_hbs_linked/manifest.yml b/test/packages/bad_integration_hbs_linked/manifest.yml
new file mode 100644
index 000000000..30b0184b1
--- /dev/null
+++ b/test/packages/bad_integration_hbs_linked/manifest.yml
@@ -0,0 +1,37 @@
+format_version: 3.5.4
+name: bad_integration_hbs
+title: "bad integration hbs"
+version: 0.0.1
+source:
+ license: "Elastic-2.0"
+description: "package with wrong hbs templates at data stream"
+type: integration
+categories:
+ - custom
+ - aws
+conditions:
+ kibana:
+ version: "^9.2.1"
+ elastic:
+ subscription: "basic"
+screenshots:
+ - src: /img/sample-screenshot.png
+ title: Sample screenshot
+ size: 600x600
+ type: image/png
+icons:
+ - src: /img/sample-logo.svg
+ title: Sample logo
+ size: 32x32
+ type: image/svg+xml
+policy_templates:
+ - name: sample
+ title: Sample logs
+ description: Collect sample logs
+ inputs:
+ - type: logfile
+ title: Collect sample logs from instances
+ description: Collecting sample logs
+owner:
+ github: elastic/integrations
+ type: elastic
diff --git a/test/packages/bad_integration_hbs_linked/sample_event.json b/test/packages/bad_integration_hbs_linked/sample_event.json
new file mode 100644
index 000000000..e302589fd
--- /dev/null
+++ b/test/packages/bad_integration_hbs_linked/sample_event.json
@@ -0,0 +1,3 @@
+{
+ "description": "This is an example sample-event for bad integration hbs. Replace it with a real sample event. Hint: If system tests exist, running `elastic-package test system --generate` will generate this file."
+}
diff --git a/test/packages/good_input/agent/input/input.yml.hbs b/test/packages/good_input/agent/input/input.yml.hbs
index 6eb03670b..7baca13fc 100644
--- a/test/packages/good_input/agent/input/input.yml.hbs
+++ b/test/packages/good_input/agent/input/input.yml.hbs
@@ -5,5 +5,5 @@ hosts:
- {{this}}
{{/each}}
driver: {{driver}}
-sql_query: {{sql_query}
+sql_query: {{sql_query}}
sql_response_format: {{sql_response_format}}
\ No newline at end of file
diff --git a/test/packages/sql_input/agent/input/input.yml.hbs b/test/packages/sql_input/agent/input/input.yml.hbs
index 6eb03670b..7baca13fc 100644
--- a/test/packages/sql_input/agent/input/input.yml.hbs
+++ b/test/packages/sql_input/agent/input/input.yml.hbs
@@ -5,5 +5,5 @@ hosts:
- {{this}}
{{/each}}
driver: {{driver}}
-sql_query: {{sql_query}
+sql_query: {{sql_query}}
sql_response_format: {{sql_response_format}}
\ No newline at end of file