Skip to content

Commit

Permalink
[datadog_logs_custom_pipeline] Add tags and description to logs pipel…
Browse files Browse the repository at this point in the history
…ines (#2773)

* Add tags and description to logs pipelines

---------

Co-authored-by: skarimo <[email protected]>
  • Loading branch information
joboccara and skarimo authored Feb 4, 2025
1 parent 757fc64 commit e85d563
Show file tree
Hide file tree
Showing 19 changed files with 1,762 additions and 725 deletions.
17 changes: 17 additions & 0 deletions datadog/data_source_datadog_logs_pipelines.go
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,21 @@ func dataSourceDatadogLogsPipelines() *schema.Resource {
Type: schema.TypeBool,
Computed: true,
},
"tags": {
Description: "Tags of the pipeline",
Type: schema.TypeSet,
Computed: true,
Optional: true,
Elem: &schema.Schema{
Type: schema.TypeString,
},
},
"description": {
Description: "Description of the pipeline",
Type: schema.TypeString,
Computed: true,
Optional: true,
},
"is_read_only": {
Description: "Whether or not the pipeline can be edited.",
Type: schema.TypeBool,
Expand Down Expand Up @@ -119,6 +134,8 @@ func dataSourceDatadogLogsPipelinesRead(ctx context.Context, d *schema.ResourceD
"is_enabled": pipeline.GetIsEnabled(),
"is_read_only": pipeline.GetIsReadOnly(),
"type": pipeline.GetType(),
"tags": pipeline.GetTags(),
"description": pipeline.GetDescription(),
})
}
}
Expand Down
19 changes: 17 additions & 2 deletions datadog/resource_datadog_logs_custom_pipeline.go
Original file line number Diff line number Diff line change
Expand Up @@ -417,6 +417,12 @@ func updateLogsCustomPipelineState(d *schema.ResourceData, pipeline *datadogV1.L
if err := d.Set("is_enabled", pipeline.GetIsEnabled()); err != nil {
return diag.FromErr(err)
}
if err := d.Set("tags", pipeline.GetTags()); err != nil {
return diag.FromErr(err)
}
if err := d.Set("description", pipeline.GetDescription()); err != nil {
return diag.FromErr(err)
}
if err := d.Set("filter", buildTerraformFilter(pipeline.Filter)); err != nil {
return diag.FromErr(err)
}
Expand Down Expand Up @@ -757,6 +763,13 @@ func buildDatadogPipeline(d *schema.ResourceData) (*datadogV1.LogsPipeline, erro
var ddPipeline datadogV1.LogsPipeline
ddPipeline.SetName(d.Get("name").(string))
ddPipeline.SetIsEnabled(d.Get("is_enabled").(bool))
tagsSet := d.Get("tags").(*schema.Set).List()
tags := []string{}
for _, tag := range tagsSet {
tags = append(tags, tag.(string))
}
ddPipeline.SetTags(tags)
ddPipeline.SetDescription(d.Get("description").(string))
if tfFilter := d.Get("filter").([]interface{}); len(tfFilter) > 0 {
filter, ok := tfFilter[0].(map[string]interface{})
if !ok {
Expand Down Expand Up @@ -1205,8 +1218,10 @@ func buildDatadogFilter(tfFilter map[string]interface{}) *datadogV1.LogsFilter {

func getPipelineSchema(isNested bool) map[string]*schema.Schema {
return map[string]*schema.Schema{
"name": {Type: schema.TypeString, Required: true},
"is_enabled": {Type: schema.TypeBool, Optional: true},
"name": {Type: schema.TypeString, Required: true},
"is_enabled": {Type: schema.TypeBool, Optional: true},
"tags": {Type: schema.TypeSet, Optional: true, Elem: &schema.Schema{Type: schema.TypeString}},
"description": {Type: schema.TypeString, Optional: true},
"filter": {
Type: schema.TypeList,
Required: true,
Expand Down
Original file line number Diff line number Diff line change
@@ -1 +1 @@
2022-12-05T10:35:35.260642-05:00
2025-01-28T17:06:49.90292+01:00
271 changes: 178 additions & 93 deletions datadog/tests/cassettes/TestAccDatadogLogsPipelineEmptyFilterQuery.yaml
Original file line number Diff line number Diff line change
@@ -1,95 +1,180 @@
---
version: 2
interactions:
- request:
body: |
{"filter":{"query":""},"is_enabled":true,"name":"tf-TestAccDatadogLogsPipelineEmptyFilterQuery-local-1670254535","processors":[{"is_enabled":true,"name":"","sources":["redis.severity"],"type":"status-remapper"},{"categories":[{"filter":{"query":""},"name":"debug"}],"is_enabled":true,"name":"sample category processor","target":"foo.severity","type":"category-processor"},{"filter":{"query":""},"is_enabled":true,"name":"Nginx","type":"pipeline"}]}
form: {}
headers:
Accept:
- application/json
Content-Type:
- application/json
url: https://api.datadoghq.com/api/v1/logs/config/pipelines
method: POST
id: 0
response:
body: |
{"is_enabled":true,"name":"tf-TestAccDatadogLogsPipelineEmptyFilterQuery-local-1670254535","filter":{"query":""},"is_read_only":false,"type":"pipeline","id":"B5AHvyA3Qm2ABMgMwA0FBw","processors":[{"is_enabled":true,"sources":["redis.severity"],"type":"status-remapper","name":""},{"is_enabled":true,"type":"category-processor","name":"sample category processor","categories":[{"filter":{"query":""},"name":"debug"}],"target":"foo.severity"},{"is_enabled":true,"filter":{"query":""},"type":"pipeline","name":"Nginx","processors":[]}]}
headers:
Content-Type:
- application/json
status: 200 OK
code: 200
duration: "0ms"
- request:
body: ""
form: {}
headers:
Accept:
- application/json
url: https://api.datadoghq.com/api/v1/logs/config/pipelines/B5AHvyA3Qm2ABMgMwA0FBw
method: GET
id: 1
response:
body: |
{"is_enabled":true,"name":"tf-TestAccDatadogLogsPipelineEmptyFilterQuery-local-1670254535","filter":{"query":""},"is_read_only":false,"type":"pipeline","id":"B5AHvyA3Qm2ABMgMwA0FBw","processors":[{"is_enabled":true,"sources":["redis.severity"],"type":"status-remapper","name":""},{"is_enabled":true,"type":"category-processor","name":"sample category processor","categories":[{"filter":{"query":""},"name":"debug"}],"target":"foo.severity"},{"is_enabled":true,"filter":{"query":""},"type":"pipeline","name":"Nginx","processors":[]}]}
headers:
Content-Type:
- application/json
status: 200 OK
code: 200
duration: "0ms"
- request:
body: ""
form: {}
headers:
Accept:
- application/json
url: https://api.datadoghq.com/api/v1/logs/config/pipelines/B5AHvyA3Qm2ABMgMwA0FBw
method: GET
id: 2
response:
body: |
{"is_enabled":true,"name":"tf-TestAccDatadogLogsPipelineEmptyFilterQuery-local-1670254535","filter":{"query":""},"is_read_only":false,"type":"pipeline","id":"B5AHvyA3Qm2ABMgMwA0FBw","processors":[{"is_enabled":true,"sources":["redis.severity"],"type":"status-remapper","name":""},{"is_enabled":true,"type":"category-processor","name":"sample category processor","categories":[{"filter":{"query":""},"name":"debug"}],"target":"foo.severity"},{"is_enabled":true,"filter":{"query":""},"type":"pipeline","name":"Nginx","processors":[]}]}
headers:
Content-Type:
- application/json
status: 200 OK
code: 200
duration: "0ms"
- request:
body: ""
form: {}
headers:
Accept:
- '*/*'
url: https://api.datadoghq.com/api/v1/logs/config/pipelines/B5AHvyA3Qm2ABMgMwA0FBw
method: DELETE
id: 3
response:
body: |
{}
headers:
Content-Type:
- application/json
status: 200 OK
code: 200
duration: "0ms"
- request:
body: ""
form: {}
headers:
Accept:
- application/json
url: https://api.datadoghq.com/api/v1/logs/config/pipelines/B5AHvyA3Qm2ABMgMwA0FBw
method: GET
id: 4
response:
body: |
{"error":{"message":"Non existing pipeline","code":"InvalidArgument"}}
headers:
Content-Type:
- application/json
status: 400 Bad Request
code: 400
duration: "0ms"
- id: 0
request:
proto: HTTP/1.1
proto_major: 1
proto_minor: 1
content_length: 476
transfer_encoding: []
trailer: {}
host: api.datadoghq.com
remote_addr: ""
request_uri: ""
body: |
{"description":"","filter":{"query":""},"is_enabled":true,"name":"tf-TestAccDatadogLogsPipelineEmptyFilterQuery-local-1738080409","processors":[{"is_enabled":true,"name":"","sources":["redis.severity"],"type":"status-remapper"},{"categories":[{"filter":{"query":""},"name":"debug"}],"is_enabled":true,"name":"sample category processor","target":"foo.severity","type":"category-processor"},{"filter":{"query":""},"is_enabled":true,"name":"Nginx","type":"pipeline"}],"tags":[]}
form: {}
headers:
Accept:
- application/json
Content-Type:
- application/json
url: https://api.datadoghq.com/api/v1/logs/config/pipelines
method: POST
response:
proto: HTTP/1.1
proto_major: 1
proto_minor: 1
transfer_encoding:
- chunked
trailer: {}
content_length: -1
uncompressed: true
body: |
{"id":"MsnpTFYQQHGlqMjKsWlPBQ","type":"pipeline","name":"tf-TestAccDatadogLogsPipelineEmptyFilterQuery-local-1738080409","is_enabled":true,"is_read_only":false,"filter":{"query":""},"processors":[{"name":"","is_enabled":true,"sources":["redis.severity"],"type":"status-remapper"},{"name":"sample category processor","is_enabled":true,"categories":[{"filter":{"query":""},"name":"debug"}],"target":"foo.severity","type":"category-processor"},{"type":"pipeline","name":"Nginx","is_enabled":true,"filter":{"query":""},"processors":[]}],"tags":[],"description":""}
headers:
Content-Type:
- application/json
status: 200 OK
code: 200
duration: 412.800792ms
- id: 1
request:
proto: HTTP/1.1
proto_major: 1
proto_minor: 1
content_length: 0
transfer_encoding: []
trailer: {}
host: api.datadoghq.com
remote_addr: ""
request_uri: ""
body: ""
form: {}
headers:
Accept:
- application/json
url: https://api.datadoghq.com/api/v1/logs/config/pipelines/MsnpTFYQQHGlqMjKsWlPBQ
method: GET
response:
proto: HTTP/1.1
proto_major: 1
proto_minor: 1
transfer_encoding:
- chunked
trailer: {}
content_length: -1
uncompressed: true
body: |
{"id":"MsnpTFYQQHGlqMjKsWlPBQ","type":"pipeline","name":"tf-TestAccDatadogLogsPipelineEmptyFilterQuery-local-1738080409","is_enabled":true,"is_read_only":false,"filter":{"query":""},"processors":[{"name":"","is_enabled":true,"sources":["redis.severity"],"type":"status-remapper"},{"name":"sample category processor","is_enabled":true,"categories":[{"filter":{"query":""},"name":"debug"}],"target":"foo.severity","type":"category-processor"},{"type":"pipeline","name":"Nginx","is_enabled":true,"filter":{"query":""},"processors":[]}],"tags":[],"description":""}
headers:
Content-Type:
- application/json
status: 200 OK
code: 200
duration: 150.6ms
- id: 2
request:
proto: HTTP/1.1
proto_major: 1
proto_minor: 1
content_length: 0
transfer_encoding: []
trailer: {}
host: api.datadoghq.com
remote_addr: ""
request_uri: ""
body: ""
form: {}
headers:
Accept:
- application/json
url: https://api.datadoghq.com/api/v1/logs/config/pipelines/MsnpTFYQQHGlqMjKsWlPBQ
method: GET
response:
proto: HTTP/1.1
proto_major: 1
proto_minor: 1
transfer_encoding:
- chunked
trailer: {}
content_length: -1
uncompressed: true
body: |
{"id":"MsnpTFYQQHGlqMjKsWlPBQ","type":"pipeline","name":"tf-TestAccDatadogLogsPipelineEmptyFilterQuery-local-1738080409","is_enabled":true,"is_read_only":false,"filter":{"query":""},"processors":[{"name":"","is_enabled":true,"sources":["redis.severity"],"type":"status-remapper"},{"name":"sample category processor","is_enabled":true,"categories":[{"filter":{"query":""},"name":"debug"}],"target":"foo.severity","type":"category-processor"},{"type":"pipeline","name":"Nginx","is_enabled":true,"filter":{"query":""},"processors":[]}],"tags":[],"description":""}
headers:
Content-Type:
- application/json
status: 200 OK
code: 200
duration: 146.403208ms
- id: 3
request:
proto: HTTP/1.1
proto_major: 1
proto_minor: 1
content_length: 0
transfer_encoding: []
trailer: {}
host: api.datadoghq.com
remote_addr: ""
request_uri: ""
body: ""
form: {}
headers:
Accept:
- '*/*'
url: https://api.datadoghq.com/api/v1/logs/config/pipelines/MsnpTFYQQHGlqMjKsWlPBQ
method: DELETE
response:
proto: HTTP/1.1
proto_major: 1
proto_minor: 1
transfer_encoding: []
trailer: {}
content_length: 3
uncompressed: false
body: |
{}
headers:
Content-Type:
- application/json
status: 200 OK
code: 200
duration: 161.173ms
- id: 4
request:
proto: HTTP/1.1
proto_major: 1
proto_minor: 1
content_length: 0
transfer_encoding: []
trailer: {}
host: api.datadoghq.com
remote_addr: ""
request_uri: ""
body: ""
form: {}
headers:
Accept:
- application/json
url: https://api.datadoghq.com/api/v1/logs/config/pipelines/MsnpTFYQQHGlqMjKsWlPBQ
method: GET
response:
proto: HTTP/1.1
proto_major: 1
proto_minor: 1
transfer_encoding:
- chunked
trailer: {}
content_length: -1
uncompressed: true
body: |
{"error":{"code":"InvalidArgument","message":"Non existing pipeline"}}
headers:
Content-Type:
- application/json
status: 400 Bad Request
code: 400
duration: 136.473958ms
Original file line number Diff line number Diff line change
@@ -1 +1 @@
2023-03-06T13:31:10.412492-05:00
2025-01-27T19:12:45.636115+01:00
Loading

0 comments on commit e85d563

Please sign in to comment.