Skip to content

Commit fe15c35

Browse files
committed
add resource for datastream lifecycle
1 parent f05509d commit fe15c35

File tree

16 files changed

+985
-1
lines changed

16 files changed

+985
-1
lines changed

CHANGELOG.md

+1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
## [Unreleased]
22

3+
- Add resource `elasticstack_elasticsearch_data_stream_lifecycle` ([838](https://github.com./elastic/terraform-provider-elasticstack/issues/838))
34
- Fix bug updating alert delay ([#859](https://github.com./elastic/terraform-provider-elasticstack/pull/859))
45
- Support updating `elasticstack_elasticsearch_security_api_key` when supported by the backing cluster ([#843](https://github.com./elastic/terraform-provider-elasticstack/pull/843))
56
- Fix validation of `throttle`, and `interval` attributes in `elasticstack_kibana_alerting_rule` allowing all Elastic duration values ([#846](https://github.com./elastic/terraform-provider-elasticstack/pull/846))
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,109 @@
1+
---
2+
subcategory: "Index"
3+
layout: ""
4+
page_title: "Elasticstack: elasticstack_elasticsearch_data_stream_lifecycle Resource"
5+
description: |-
6+
Manages Lifecycle for Elasticsearch Data Streams
7+
---
8+
9+
# Resource: elasticstack_elasticsearch_data_stream
10+
11+
Configures the data stream lifecycle for the targeted data streams, see: https://www.elastic.co/guide/en/elasticsearch/reference/current/data-stream-apis.html
12+
13+
## Example Usage
14+
15+
```terraform
16+
provider "elasticstack" {
17+
elasticsearch {}
18+
}
19+
20+
// First we must have a index template created
21+
resource "elasticstack_elasticsearch_index_template" "my_data_stream_template" {
22+
name = "my_data_stream"
23+
24+
index_patterns = ["my-stream*"]
25+
26+
data_stream {}
27+
}
28+
29+
// and now we can create data stream based on the index template
30+
resource "elasticstack_elasticsearch_data_stream" "my_data_stream" {
31+
name = "my-stream"
32+
33+
// make sure that template is created before the data stream
34+
depends_on = [
35+
elasticstack_elasticsearch_index_template.my_data_stream_template
36+
]
37+
}
38+
39+
// finally we can manage lifecycle of data stream
40+
resource "elasticstack_elasticsearch_data_stream_lifecycle" "my_data_stream_lifecycle" {
41+
name = "my-stream"
42+
data_retention = "3d"
43+
44+
depends_on = [
45+
elasticstack_elasticsearch_data_stream.my_data_stream,
46+
]
47+
}
48+
49+
// or you can use wildcards to manage multiple lifecycles at once
50+
resource "elasticstack_elasticsearch_data_stream_lifecycle" "my_data_stream_lifecycle_multiple" {
51+
name = "stream-*"
52+
data_retention = "3d"
53+
}
54+
```
55+
56+
<!-- schema generated by tfplugindocs -->
57+
## Schema
58+
59+
### Required
60+
61+
- `name` (String) Name of the data stream. Supports wildcards.
62+
63+
### Optional
64+
65+
- `data_retention` (String) Every document added to this data stream will be stored at least for this time frame. When empty, every document in this data stream will be stored indefinitely
66+
- `downsampling` (Attributes List) Downsampling configuration objects, each defining an after interval representing when the backing index is meant to be downsampled and a fixed_interval representing the downsampling interval. (see [below for nested schema](#nestedatt--downsampling))
67+
- `elasticsearch_connection` (Block List, Deprecated) Elasticsearch connection configuration block. (see [below for nested schema](#nestedblock--elasticsearch_connection))
68+
- `enabled` (Boolean) Data stream lifecycle on/off.
69+
- `expand_wildcards` (String) Determines how wildcard patterns in the `indices` parameter match data streams and indices. Supports comma-separated values, such as `closed,hidden`.
70+
71+
### Read-Only
72+
73+
- `id` (String) Internal identifier of the resource.
74+
75+
<a id="nestedatt--downsampling"></a>
76+
### Nested Schema for `downsampling`
77+
78+
Required:
79+
80+
- `after` (String) Interval representing when the backing index is meant to be downsampled
81+
- `fixed_interval` (String) The interval at which to aggregate the original time series index.
82+
83+
84+
<a id="nestedblock--elasticsearch_connection"></a>
85+
### Nested Schema for `elasticsearch_connection`
86+
87+
Optional:
88+
89+
- `api_key` (String, Sensitive) API Key to use for authentication to Elasticsearch
90+
- `bearer_token` (String, Sensitive) Bearer Token to use for authentication to Elasticsearch
91+
- `ca_data` (String) PEM-encoded custom Certificate Authority certificate
92+
- `ca_file` (String) Path to a custom Certificate Authority certificate
93+
- `cert_data` (String) PEM encoded certificate for client auth
94+
- `cert_file` (String) Path to a file containing the PEM encoded certificate for client auth
95+
- `endpoints` (List of String, Sensitive) A list of endpoints where the terraform provider will point to, this must include the http(s) schema and port number.
96+
- `es_client_authentication` (String, Sensitive) ES Client Authentication field to be used with the bearer token
97+
- `insecure` (Boolean) Disable TLS certificate validation
98+
- `key_data` (String, Sensitive) PEM encoded private key for client auth
99+
- `key_file` (String) Path to a file containing the PEM encoded private key for client auth
100+
- `password` (String, Sensitive) Password to use for API authentication to Elasticsearch.
101+
- `username` (String) Username to use for API authentication to Elasticsearch.
102+
103+
## Import
104+
105+
Import is supported using the following syntax:
106+
107+
```shell
108+
terraform import elasticstack_elasticsearch_data_stream_lifecycle.my_data_stream_lifecycle <cluster_uuid>/<data_stream_name>
109+
```
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
terraform import elasticstack_elasticsearch_data_stream_lifecycle.my_data_stream_lifecycle <cluster_uuid>/<data_stream_name>
2+
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
provider "elasticstack" {
2+
elasticsearch {}
3+
}
4+
5+
// First we must have a index template created
6+
resource "elasticstack_elasticsearch_index_template" "my_data_stream_template" {
7+
name = "my_data_stream"
8+
9+
index_patterns = ["my-stream*"]
10+
11+
data_stream {}
12+
}
13+
14+
// and now we can create data stream based on the index template
15+
resource "elasticstack_elasticsearch_data_stream" "my_data_stream" {
16+
name = "my-stream"
17+
18+
// make sure that template is created before the data stream
19+
depends_on = [
20+
elasticstack_elasticsearch_index_template.my_data_stream_template
21+
]
22+
}
23+
24+
// finally we can manage lifecycle of data stream
25+
resource "elasticstack_elasticsearch_data_stream_lifecycle" "my_data_stream_lifecycle" {
26+
name = "my-stream"
27+
data_retention = "3d"
28+
29+
depends_on = [
30+
elasticstack_elasticsearch_data_stream.my_data_stream,
31+
]
32+
}
33+
34+
// or you can use wildcards to manage multiple lifecycles at once
35+
resource "elasticstack_elasticsearch_data_stream_lifecycle" "my_data_stream_lifecycle_multiple" {
36+
name = "stream-*"
37+
data_retention = "3d"
38+
}

internal/clients/elasticsearch/index.go

+79
Original file line numberDiff line numberDiff line change
@@ -499,6 +499,85 @@ func DeleteDataStream(ctx context.Context, apiClient *clients.ApiClient, dataStr
499499
return diags
500500
}
501501

502+
func PutDataStreamLifecycle(ctx context.Context, apiClient *clients.ApiClient, dataStreamName string, expand_wildcards string, lifecycle models.LifecycleSettings) fwdiags.Diagnostics {
503+
504+
esClient, err := apiClient.GetESClient()
505+
if err != nil {
506+
return utils.FrameworkDiagFromError(err)
507+
}
508+
509+
lifecycleBytes, err := json.Marshal(lifecycle)
510+
if err != nil {
511+
return utils.FrameworkDiagFromError(err)
512+
}
513+
514+
opts := []func(*esapi.IndicesPutDataLifecycleRequest){
515+
esClient.Indices.PutDataLifecycle.WithBody(bytes.NewReader(lifecycleBytes)),
516+
esClient.Indices.PutDataLifecycle.WithContext(ctx),
517+
esClient.Indices.PutDataLifecycle.WithExpandWildcards(expand_wildcards),
518+
}
519+
res, err := esClient.Indices.PutDataLifecycle([]string{dataStreamName}, opts...)
520+
if err != nil {
521+
return utils.FrameworkDiagFromError(err)
522+
}
523+
defer res.Body.Close()
524+
if diags := utils.CheckError(res, fmt.Sprintf("Unable to create DataStreamLifecycle: %s", dataStreamName)); diags.HasError() {
525+
return utils.FrameworkDiagsFromSDK(diags)
526+
}
527+
return nil
528+
}
529+
530+
func GetDataStreamLifecycle(ctx context.Context, apiClient *clients.ApiClient, dataStreamName string, expand_wildcards string) (*[]models.DataStreamLifecycle, fwdiags.Diagnostics) {
531+
esClient, err := apiClient.GetESClient()
532+
if err != nil {
533+
return nil, utils.FrameworkDiagFromError(err)
534+
}
535+
opts := []func(*esapi.IndicesGetDataLifecycleRequest){
536+
esClient.Indices.GetDataLifecycle.WithContext(ctx),
537+
esClient.Indices.GetDataLifecycle.WithExpandWildcards(expand_wildcards),
538+
}
539+
res, err := esClient.Indices.GetDataLifecycle([]string{dataStreamName}, opts...)
540+
if err != nil {
541+
return nil, utils.FrameworkDiagFromError(err)
542+
}
543+
defer res.Body.Close()
544+
if res.StatusCode == http.StatusNotFound {
545+
return nil, nil
546+
}
547+
if diags := utils.CheckError(res, fmt.Sprintf("Unable to get requested DataStreamLifecycle: %s", dataStreamName)); diags.HasError() {
548+
return nil, utils.FrameworkDiagsFromSDK(diags)
549+
}
550+
551+
dStreams := make(map[string][]models.DataStreamLifecycle)
552+
if err := json.NewDecoder(res.Body).Decode(&dStreams); err != nil {
553+
return nil, utils.FrameworkDiagFromError(err)
554+
}
555+
ds := dStreams["data_streams"]
556+
return &ds, nil
557+
}
558+
559+
func DeleteDataStreamLifecycle(ctx context.Context, apiClient *clients.ApiClient, dataStreamName string, expand_wildcards string) fwdiags.Diagnostics {
560+
561+
esClient, err := apiClient.GetESClient()
562+
if err != nil {
563+
return utils.FrameworkDiagFromError(err)
564+
}
565+
opts := []func(*esapi.IndicesDeleteDataLifecycleRequest){
566+
esClient.Indices.DeleteDataLifecycle.WithContext(ctx),
567+
esClient.Indices.DeleteDataLifecycle.WithExpandWildcards(expand_wildcards),
568+
}
569+
res, err := esClient.Indices.DeleteDataLifecycle([]string{dataStreamName}, opts...)
570+
if err != nil {
571+
return utils.FrameworkDiagFromError(err)
572+
}
573+
defer res.Body.Close()
574+
if diags := utils.CheckError(res, fmt.Sprintf("Unable to delete DataStreamLifecycle: %s", dataStreamName)); diags.HasError() {
575+
return utils.FrameworkDiagsFromSDK(diags)
576+
}
577+
578+
return nil
579+
}
580+
502581
func PutIngestPipeline(ctx context.Context, apiClient *clients.ApiClient, pipeline *models.IngestPipeline) diag.Diagnostics {
503582
var diags diag.Diagnostics
504583
pipelineBytes, err := json.Marshal(pipeline)

0 commit comments

Comments
 (0)