From b9e9199522b454306b9a45112f2902f0378d224b Mon Sep 17 00:00:00 2001 From: Nick Clark Date: Wed, 31 May 2023 12:05:58 +1000 Subject: [PATCH 01/15] wip --- go.mod | 2 + go.sum | 4 + internal/clients/api_client.go | 143 +++++++++++++++++++++ internal/schema/connection.go | 224 ++++++++++++++++++++++++++++++++- provider/factory.go | 6 +- provider/plugin_framework.go | 60 +++++++++ provider/provider.go | 2 + 7 files changed, 435 insertions(+), 6 deletions(-) create mode 100644 provider/plugin_framework.go diff --git a/go.mod b/go.mod index eee00d8e9..09af71655 100644 --- a/go.mod +++ b/go.mod @@ -8,6 +8,8 @@ require ( github.com/elastic/go-elasticsearch/v7 v7.17.10 github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320 github.com/hashicorp/go-version v1.6.0 + github.com/hashicorp/terraform-plugin-framework v1.2.0 + github.com/hashicorp/terraform-plugin-framework-validators v0.10.0 github.com/hashicorp/terraform-plugin-go v0.19.0 github.com/hashicorp/terraform-plugin-log v0.9.0 github.com/hashicorp/terraform-plugin-mux v0.12.0 diff --git a/go.sum b/go.sum index b89a3b9d2..efac6d9a8 100644 --- a/go.sum +++ b/go.sum @@ -80,6 +80,10 @@ github.com/hashicorp/terraform-exec v0.19.0 h1:FpqZ6n50Tk95mItTSS9BjeOVUb4eg81Sp github.com/hashicorp/terraform-exec v0.19.0/go.mod h1:tbxUpe3JKruE9Cuf65mycSIT8KiNPZ0FkuTE3H4urQg= github.com/hashicorp/terraform-json v0.17.1 h1:eMfvh/uWggKmY7Pmb3T85u86E2EQg6EQHgyRwf3RkyA= github.com/hashicorp/terraform-json v0.17.1/go.mod h1:Huy6zt6euxaY9knPAFKjUITn8QxUFIe9VuSzb4zn/0o= +github.com/hashicorp/terraform-plugin-framework v1.2.0 h1:MZjFFfULnFq8fh04FqrKPcJ/nGpHOvX4buIygT3MSNY= +github.com/hashicorp/terraform-plugin-framework v1.2.0/go.mod h1:nToI62JylqXDq84weLJ/U3umUsBhZAaTmU0HXIVUOcw= +github.com/hashicorp/terraform-plugin-framework-validators v0.10.0 h1:4L0tmy/8esP6OcvocVymw52lY0HyQ5OxB7VNl7k4bS0= +github.com/hashicorp/terraform-plugin-framework-validators v0.10.0/go.mod h1:qdQJCdimB9JeX2YwOpItEu+IrfoJjWQ5PhLpAOMDQAE= github.com/hashicorp/terraform-plugin-go v0.19.0 h1:BuZx/6Cp+lkmiG0cOBk6Zps0Cb2tmqQpDM3iAtnhDQU= github.com/hashicorp/terraform-plugin-go v0.19.0/go.mod h1:EhRSkEPNoylLQntYsk5KrDHTZJh9HQoumZXbOGOXmec= github.com/hashicorp/terraform-plugin-log v0.9.0 h1:i7hOA+vdAItN1/7UrfBqBwvYPQ9TFvymaRGZED3FCV0= diff --git a/internal/clients/api_client.go b/internal/clients/api_client.go index 7da6d38de..d85bb0056 100644 --- a/internal/clients/api_client.go +++ b/internal/clients/api_client.go @@ -21,6 +21,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/models" "github.com/elastic/terraform-provider-elasticstack/internal/utils" "github.com/hashicorp/go-version" + fwdiag "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/tfsdk" + "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/logging" @@ -74,6 +77,20 @@ type ApiClient struct { version string } +type ElasticsearchConnection struct { + Username types.String `tfsdk:"username"` + Password types.String `tfsdk:"password"` + APIKey types.String `tfsdk:"api_key"` + Endpoints types.List `tfsdk:"endpoints"` + Insecure types.Bool `tfsdk:"insecure"` + CAFile types.String `tfsdk:"ca_file"` + CAData types.String `tfsdk:"ca_data"` + CertFile types.String `tfsdk:"cert_file"` + KeyFile types.String `tfsdk:"key_file"` + CertData types.String `tfsdk:"cert_data"` + KeyData types.String `tfsdk:"key_data"` +} + func NewApiClientFunc(version string) func(context.Context, *schema.ResourceData) (interface{}, diag.Diagnostics) { return func(ctx context.Context, d *schema.ResourceData) (interface{}, diag.Diagnostics) { return newApiClient(d, version) @@ -344,6 +361,132 @@ func (a *ApiClient) ClusterID(ctx context.Context) (*string, diag.Diagnostics) { return nil, diags } +func NewFWApiClientFromState(ctx context.Context, state tfsdk.State, defaultClient *ApiClient) (*ApiClient, fwdiag.Diagnostics) { + var es struct { + Connection []*ElasticsearchConnection `tfsdk:"elasticsearch_connection"` + } + diags := state.Get(ctx, &es) + if diags.HasError() { + return nil, diags + } + if len(es.Connection) > 0 { + return NewFWApiClient(ctx, es.Connection[0], defaultClient.version, false) + } + + return defaultClient, nil +} + +func NewFWApiClient(ctx context.Context, esConn *ElasticsearchConnection, version string, useEnvAsDefault bool) (*ApiClient, fwdiag.Diagnostics) { + var diags fwdiag.Diagnostics + config := elasticsearch.Config{} + config.Username = getStringValue(esConn.Username, "ELASTICSEARCH_USERNAME", true) + config.Password = getStringValue(esConn.Password, "ELASTICSEARCH_PASSWORD", true) + config.APIKey = getStringValue(esConn.APIKey, "ELASTICSEARCH_API_KEY", true) + + var addrs []string + diags.Append(esConn.Endpoints.ElementsAs(ctx, &addrs, true)...) + if diags.HasError() { + return nil, diags + } + if len(addrs) == 0 && useEnvAsDefault { + if endpoints := os.Getenv("ELASTICSEARCH_ENDPOINTS"); endpoints != "" { + for _, e := range strings.Split(endpoints, ",") { + addrs = append(addrs, strings.TrimSpace(e)) + } + } + } + config.Addresses = addrs + + envInsecure, _ := strconv.ParseBool(os.Getenv("ELASTICSEARCH_INSECURE")) + if esConn.Insecure.ValueBool() || envInsecure { + tlsClientConfig := ensureTLSClientConfig(&config) + tlsClientConfig.InsecureSkipVerify = true + } + + if esConn.CAFile.ValueString() != "" { + caCert, err := os.ReadFile(esConn.CAFile.ValueString()) + if err != nil { + diags.Append(fwdiag.NewErrorDiagnostic( + "Unable to read CA File", + err.Error(), + )) + return nil, diags + } + config.CACert = caCert + } + if esConn.CAData.ValueString() != "" { + config.CACert = []byte(esConn.CAData.ValueString()) + } + + if certFile := esConn.CertFile.ValueString(); certFile != "" { + if keyFile := esConn.KeyFile.ValueString(); keyFile != "" { + cert, err := tls.LoadX509KeyPair(certFile, keyFile) + if err != nil { + diags.Append(fwdiag.NewErrorDiagnostic( + "Unable to read certificate or key file", + err.Error(), + )) + return nil, diags + } + tlsClientConfig := ensureTLSClientConfig(&config) + tlsClientConfig.Certificates = []tls.Certificate{cert} + } else { + diags.Append(fwdiag.NewErrorDiagnostic( + "Unable to read key file", + "Path to key file has not been configured or is empty", + )) + return nil, diags + } + } + if certData := esConn.CertData.ValueString(); certData != "" { + if keyData := esConn.KeyData.ValueString(); keyData != "" { + cert, err := tls.X509KeyPair([]byte(certData), []byte(keyData)) + if err != nil { + diags.Append(fwdiag.NewErrorDiagnostic( + "Unable to parse certificate or key", + err.Error(), + )) + return nil, diags + } + tlsClientConfig := ensureTLSClientConfig(&config) + tlsClientConfig.Certificates = []tls.Certificate{cert} + } else { + diags.Append(fwdiag.NewErrorDiagnostic( + "Unable to parse key", + "Key data has not been configured or is empty", + )) + return nil, diags + } + } + + es, err := elasticsearch.NewClient(config) + if err != nil { + diags.Append(fwdiag.NewErrorDiagnostic( + "Unable to create Elasticsearch client", + err.Error(), + )) + return nil, diags + } + if logging.IsDebugOrHigher() { + config.EnableDebugLogger = true + config.Logger = &debugLogger{Name: "elasticsearch"} + } + + return &ApiClient{ + elasticsearch: es, + version: version, + }, diags +} + +func getStringValue(s types.String, envKey string, useEnvAsDefault bool) string { + if s.IsNull() { + if useEnvAsDefault { + return os.Getenv(envKey) + } + } + return s.ValueString() +} + type BaseConfig struct { Username string Password string diff --git a/internal/schema/connection.go b/internal/schema/connection.go index f8e961447..7d25ed0b5 100644 --- a/internal/schema/connection.go +++ b/internal/schema/connection.go @@ -3,9 +3,218 @@ package schema import ( "fmt" + "github.com/hashicorp/terraform-plugin-framework-validators/listvalidator" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/path" + fwschema "github.com/hashicorp/terraform-plugin-framework/provider/schema" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) +func GetEsFWConnectionBlock(keyName string, isProviderConfiguration bool) fwschema.Block { + usernamePath := makePathRef(keyName, "username") + passwordPath := makePathRef(keyName, "password") + caFilePath := makePathRef(keyName, "ca_file") + caDataPath := makePathRef(keyName, "ca_data") + certFilePath := makePathRef(keyName, "cert_file") + certDataPath := makePathRef(keyName, "cert_data") + keyFilePath := makePathRef(keyName, "key_file") + keyDataPath := makePathRef(keyName, "key_data") + + usernameValidators := []validator.String{stringvalidator.AlsoRequires(path.MatchRoot(passwordPath))} + passwordValidators := []validator.String{stringvalidator.AlsoRequires(path.MatchRoot(usernamePath))} + + if isProviderConfiguration { + // RequireWith validation isn't compatible when used in conjunction with DefaultFunc + usernameValidators = nil + passwordValidators = nil + } + + return fwschema.ListNestedBlock{ + MarkdownDescription: fmt.Sprintf("Elasticsearch connection configuration block. %s", getDeprecationMessage(isProviderConfiguration)), + DeprecationMessage: getDeprecationMessage(isProviderConfiguration), + NestedObject: fwschema.NestedBlockObject{ + Attributes: map[string]fwschema.Attribute{ + "username": fwschema.StringAttribute{ + MarkdownDescription: "Username to use for API authentication to Elasticsearch.", + Optional: true, + Validators: usernameValidators, + }, + "password": fwschema.StringAttribute{ + MarkdownDescription: "Password to use for API authentication to Elasticsearch.", + Optional: true, + Sensitive: true, + Validators: passwordValidators, + }, + "api_key": fwschema.StringAttribute{ + MarkdownDescription: "API Key to use for authentication to Elasticsearch", + Optional: true, + Sensitive: true, + Validators: []validator.String{ + stringvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("username")), + stringvalidator.ConflictsWith(path.MatchRoot(passwordPath)), + }, + }, + "endpoints": fwschema.ListAttribute{ + MarkdownDescription: "A comma-separated list of endpoints where the terraform provider will point to, this must include the http(s) schema and port number.", + Optional: true, + Sensitive: true, + ElementType: types.StringType, + }, + "insecure": fwschema.BoolAttribute{ + MarkdownDescription: "Disable TLS certificate validation", + Optional: true, + }, + "ca_file": fwschema.StringAttribute{ + MarkdownDescription: "Path to a custom Certificate Authority certificate", + Optional: true, + Validators: []validator.String{ + stringvalidator.ConflictsWith(path.MatchRoot(caDataPath)), + }, + }, + "ca_data": fwschema.StringAttribute{ + MarkdownDescription: "PEM-encoded custom Certificate Authority certificate", + Optional: true, + Validators: []validator.String{ + stringvalidator.ConflictsWith(path.MatchRoot(caFilePath)), + }, + }, + "cert_file": fwschema.StringAttribute{ + MarkdownDescription: "Path to a file containing the PEM encoded certificate for client auth", + Optional: true, + Validators: []validator.String{ + stringvalidator.AlsoRequires(path.MatchRoot(keyFilePath)), + stringvalidator.ConflictsWith(path.MatchRoot(certDataPath)), + stringvalidator.ConflictsWith(path.MatchRoot(keyDataPath)), + }, + }, + "key_file": fwschema.StringAttribute{ + MarkdownDescription: "Path to a file containing the PEM encoded private key for client auth", + Optional: true, + Validators: []validator.String{ + stringvalidator.AlsoRequires(path.MatchRoot(certFilePath)), + stringvalidator.ConflictsWith(path.MatchRoot(certDataPath)), + stringvalidator.ConflictsWith(path.MatchRoot(keyDataPath)), + }, + }, + "cert_data": fwschema.StringAttribute{ + MarkdownDescription: "PEM encoded certificate for client auth", + Optional: true, + Validators: []validator.String{ + stringvalidator.AlsoRequires(path.MatchRoot(keyDataPath)), + stringvalidator.ConflictsWith(path.MatchRoot(certFilePath)), + stringvalidator.ConflictsWith(path.MatchRoot(keyFilePath)), + }, + }, + "key_data": fwschema.StringAttribute{ + MarkdownDescription: "PEM encoded private key for client auth", + Optional: true, + Sensitive: true, + Validators: []validator.String{ + stringvalidator.AlsoRequires(path.MatchRoot(certDataPath)), + stringvalidator.ConflictsWith(path.MatchRoot(certFilePath)), + stringvalidator.ConflictsWith(path.MatchRoot(keyFilePath)), + }, + }, + }, + }, + Validators: []validator.List{ + listvalidator.SizeAtMost(1), + }, + } +} + +func GetKbFWConnectionBlock(keyName string, isProviderConfiguration bool) fwschema.Block { + usernamePath := makePathRef(keyName, "username") + passwordPath := makePathRef(keyName, "password") + + usernameValidators := []validator.String{stringvalidator.AlsoRequires(path.MatchRoot(passwordPath))} + passwordValidators := []validator.String{stringvalidator.AlsoRequires(path.MatchRoot(usernamePath))} + + return fwschema.ListNestedBlock{ + MarkdownDescription: fmt.Sprintf("Kibana connection configuration block. %s", getDeprecationMessage(isProviderConfiguration)), + DeprecationMessage: getDeprecationMessage(isProviderConfiguration), + NestedObject: fwschema.NestedBlockObject{ + Attributes: map[string]fwschema.Attribute{ + "username": fwschema.StringAttribute{ + MarkdownDescription: "Username to use for API authentication to Kibana.", + Optional: true, + Validators: usernameValidators, + }, + "password": fwschema.StringAttribute{ + MarkdownDescription: "Password to use for API authentication to Kibana.", + Optional: true, + Sensitive: true, + Validators: passwordValidators, + }, + "endpoints": fwschema.ListAttribute{ + MarkdownDescription: "A comma-separated list of endpoints where the terraform provider will point to, this must include the http(s) schema and port number.", + Optional: true, + Sensitive: true, + ElementType: types.StringType, + }, + "insecure": fwschema.BoolAttribute{ + MarkdownDescription: "Disable TLS certificate validation", + Optional: true, + }, + }, + }, + Validators: []validator.List{ + listvalidator.SizeAtMost(1), + }, + } +} + +func GetFleetFWConnectionBlock(keyName string, isProviderConfiguration bool) fwschema.Block { + usernamePath := makePathRef(keyName, "username") + passwordPath := makePathRef(keyName, "password") + + usernameValidators := []validator.String{stringvalidator.AlsoRequires(path.MatchRoot(passwordPath))} + passwordValidators := []validator.String{stringvalidator.AlsoRequires(path.MatchRoot(usernamePath))} + + return fwschema.ListNestedBlock{ + MarkdownDescription: fmt.Sprintf("Kibana connection configuration block. %s", getDeprecationMessage(isProviderConfiguration)), + DeprecationMessage: getDeprecationMessage(isProviderConfiguration), + NestedObject: fwschema.NestedBlockObject{ + Attributes: map[string]fwschema.Attribute{ + "username": fwschema.StringAttribute{ + MarkdownDescription: "Username to use for API authentication to Fleet.", + Optional: true, + Validators: usernameValidators, + }, + "password": fwschema.StringAttribute{ + MarkdownDescription: "Password to use for API authentication to Fleet.", + Optional: true, + Sensitive: true, + Validators: passwordValidators, + }, + "api_key": fwschema.StringAttribute{ + MarkdownDescription: "API Key to use for authentication to Fleet.", + Optional: true, + Sensitive: true, + Validators: []validator.String{ + stringvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("username")), + stringvalidator.ConflictsWith(path.MatchRoot(passwordPath)), + }, + }, + "ca_certs": fwschema.ListAttribute{ + MarkdownDescription: "A list of paths to CA certificates to validate the certificate presented by the Fleet server.", + Optional: true, + ElementType: types.StringType, + }, + "insecure": fwschema.BoolAttribute{ + MarkdownDescription: "Disable TLS certificate validation", + Optional: true, + }, + }, + }, + Validators: []validator.List{ + listvalidator.SizeAtMost(1), + }, + } +} + func GetEsConnectionSchema(keyName string, isProviderConfiguration bool) *schema.Schema { usernamePath := makePathRef(keyName, "username") passwordPath := makePathRef(keyName, "password") @@ -20,11 +229,9 @@ func GetEsConnectionSchema(keyName string, isProviderConfiguration bool) *schema passwordRequiredWithValidation := []string{usernamePath} withEnvDefault := func(key string, dv interface{}) schema.SchemaDefaultFunc { return nil } - deprecationMessage := "This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead." if isProviderConfiguration { withEnvDefault = func(key string, dv interface{}) schema.SchemaDefaultFunc { return schema.EnvDefaultFunc(key, dv) } - deprecationMessage = "" // RequireWith validation isn't compatible when used in conjunction with DefaultFunc usernameRequiredWithValidation = nil @@ -32,15 +239,15 @@ func GetEsConnectionSchema(keyName string, isProviderConfiguration bool) *schema } return &schema.Schema{ - Description: fmt.Sprintf("Elasticsearch connection configuration block. %s", deprecationMessage), - Deprecated: deprecationMessage, + Description: fmt.Sprintf("Elasticsearch connection configuration block. %s", getDeprecationMessage(isProviderConfiguration)), + Deprecated: getDeprecationMessage(isProviderConfiguration), Type: schema.TypeList, MaxItems: 1, Optional: true, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "username": { - Description: "Username to use for API authentication to Elasticsearch.", + Description: fmt.Sprintf("Elasticsearch connection configuration block. %s", getDeprecationMessage(isProviderConfiguration)), Type: schema.TypeString, Optional: true, DefaultFunc: withEnvDefault("ELASTICSEARCH_USERNAME", nil), @@ -220,3 +427,10 @@ func GetFleetConnectionSchema() *schema.Schema { func makePathRef(keyName string, keyValue string) string { return fmt.Sprintf("%s.0.%s", keyName, keyValue) } + +func getDeprecationMessage(isProviderConfiguration bool) string { + if isProviderConfiguration { + return "" + } + return "This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead." +} diff --git a/provider/factory.go b/provider/factory.go index cf7404f61..1c3745f7d 100644 --- a/provider/factory.go +++ b/provider/factory.go @@ -2,7 +2,9 @@ package provider import ( "context" + "fmt" + "github.com/hashicorp/terraform-plugin-framework/providerserver" "github.com/hashicorp/terraform-plugin-go/tfprotov5" "github.com/hashicorp/terraform-plugin-mux/tf5muxserver" ) @@ -10,14 +12,16 @@ import ( // ProtoV5ProviderServerFactory returns a muxed terraform-plugin-go protocol v5 provider factory function. func ProtoV5ProviderServerFactory(ctx context.Context, version string) (func() tfprotov5.ProviderServer, error) { sdkv2Provider := New(version) + frameworkProvider := providerserver.NewProtocol5(NewFrameworkProvider(version)) servers := []func() tfprotov5.ProviderServer{ + frameworkProvider, sdkv2Provider.GRPCProvider, } muxServer, err := tf5muxserver.NewMuxServer(ctx, servers...) if err != nil { - return nil, err + return nil, fmt.Errorf("initialize mux server: %w", err) } return muxServer.ProviderServer, nil diff --git a/provider/plugin_framework.go b/provider/plugin_framework.go new file mode 100644 index 000000000..94c944189 --- /dev/null +++ b/provider/plugin_framework.go @@ -0,0 +1,60 @@ +package provider + +import ( + "context" + + "github.com/elastic/terraform-provider-elasticstack/internal/clients" + "github.com/elastic/terraform-provider-elasticstack/internal/schema" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/path" + fwprovider "github.com/hashicorp/terraform-plugin-framework/provider" + fwschema "github.com/hashicorp/terraform-plugin-framework/provider/schema" + "github.com/hashicorp/terraform-plugin-framework/resource" +) + +type Provider struct { + version string +} + +// NewFrameworkProvider instantiates plugin framework's provider +func NewFrameworkProvider(version string) fwprovider.Provider { + return &Provider{ + version: version, + } +} + +func (p *Provider) Metadata(_ context.Context, _ fwprovider.MetadataRequest, res *fwprovider.MetadataResponse) { + res.TypeName = "elasticstack" + res.Version = p.version +} + +func (p *Provider) Schema(ctx context.Context, req fwprovider.SchemaRequest, res *fwprovider.SchemaResponse) { + res.Schema = fwschema.Schema{ + Blocks: map[string]fwschema.Block{ + esKeyName: schema.GetEsFWConnectionBlock(esKeyName, true), + kbKeyName: schema.GetKbFWConnectionBlock(kbKeyName, true), + fleetKeyName: schema.GetFleetFWConnectionBlock(fleetKeyName, true), + }, + } +} + +func (p *Provider) Configure(ctx context.Context, req fwprovider.ConfigureRequest, res *fwprovider.ConfigureResponse) { + esConn := []*clients.ApiClient{} + diags := req.Config.GetAttribute(ctx, path.Root(esKeyName), &esConn) + res.Diagnostics.Append(diags...) + if res.Diagnostics.HasError() { + return + } + apiClient := clients.NewApiClientFunc(p.version) + + res.DataSourceData = apiClient + res.ResourceData = apiClient +} + +func (p *Provider) DataSources(ctx context.Context) []func() datasource.DataSource { + return []func() datasource.DataSource{} +} + +func (p *Provider) Resources(ctx context.Context) []func() resource.Resource { + return []func() resource.Resource{} +} diff --git a/provider/provider.go b/provider/provider.go index 408e3b58a..3d0e9bb71 100644 --- a/provider/provider.go +++ b/provider/provider.go @@ -17,6 +17,8 @@ import ( ) const esKeyName = "elasticsearch" +const kbKeyName = "kibana" +const fleetKeyName = "fleet" func init() { // Set descriptions to support markdown syntax, this will be used in document generation From 18bd25446fb98b7783fb547b464e2fdef5ca1e6b Mon Sep 17 00:00:00 2001 From: Nick Clark Date: Wed, 31 May 2023 12:06:43 +1000 Subject: [PATCH 02/15] add docs --- docs/data-sources/elasticsearch_security_role.md | 2 +- .../elasticsearch_security_role_mapping.md | 2 +- docs/data-sources/elasticsearch_security_user.md | 2 +- .../elasticsearch_snapshot_repository.md | 2 +- docs/index.md | 13 ++++++------- docs/resources/elasticsearch_cluster_settings.md | 2 +- docs/resources/elasticsearch_component_template.md | 2 +- docs/resources/elasticsearch_data_stream.md | 2 +- docs/resources/elasticsearch_enrich_policy.md | 2 +- docs/resources/elasticsearch_index.md | 2 +- docs/resources/elasticsearch_index_lifecycle.md | 2 +- docs/resources/elasticsearch_index_template.md | 2 +- docs/resources/elasticsearch_ingest_pipeline.md | 2 +- docs/resources/elasticsearch_logstash_pipeline.md | 2 +- docs/resources/elasticsearch_script.md | 2 +- docs/resources/elasticsearch_security_api_key.md | 2 +- docs/resources/elasticsearch_security_role.md | 2 +- .../elasticsearch_security_role_mapping.md | 2 +- .../resources/elasticsearch_security_system_user.md | 2 +- docs/resources/elasticsearch_security_user.md | 2 +- docs/resources/elasticsearch_snapshot_lifecycle.md | 2 +- docs/resources/elasticsearch_snapshot_repository.md | 2 +- 22 files changed, 27 insertions(+), 28 deletions(-) diff --git a/docs/data-sources/elasticsearch_security_role.md b/docs/data-sources/elasticsearch_security_role.md index ab82db146..e701c318e 100644 --- a/docs/data-sources/elasticsearch_security_role.md +++ b/docs/data-sources/elasticsearch_security_role.md @@ -62,7 +62,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Username to use for API authentication to Elasticsearch. +- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. diff --git a/docs/data-sources/elasticsearch_security_role_mapping.md b/docs/data-sources/elasticsearch_security_role_mapping.md index e095abebd..f95df12e7 100644 --- a/docs/data-sources/elasticsearch_security_role_mapping.md +++ b/docs/data-sources/elasticsearch_security_role_mapping.md @@ -61,4 +61,4 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Username to use for API authentication to Elasticsearch. +- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. diff --git a/docs/data-sources/elasticsearch_security_user.md b/docs/data-sources/elasticsearch_security_user.md index de9476bf1..e99327db6 100644 --- a/docs/data-sources/elasticsearch_security_user.md +++ b/docs/data-sources/elasticsearch_security_user.md @@ -61,4 +61,4 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Username to use for API authentication to Elasticsearch. +- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. diff --git a/docs/data-sources/elasticsearch_snapshot_repository.md b/docs/data-sources/elasticsearch_snapshot_repository.md index 487174580..86c9e86b9 100644 --- a/docs/data-sources/elasticsearch_snapshot_repository.md +++ b/docs/data-sources/elasticsearch_snapshot_repository.md @@ -89,7 +89,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Username to use for API authentication to Elasticsearch. +- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. diff --git a/docs/index.md b/docs/index.md index 3d358c5d7..95f000f12 100644 --- a/docs/index.md +++ b/docs/index.md @@ -118,9 +118,9 @@ provider "elasticstack" { ### Optional -- `elasticsearch` (Block List, Max: 1) Elasticsearch connection configuration block. (see [below for nested schema](#nestedblock--elasticsearch)) -- `fleet` (Block List, Max: 1) Fleet connection configuration block. (see [below for nested schema](#nestedblock--fleet)) -- `kibana` (Block List, Max: 1) Kibana connection configuration block. (see [below for nested schema](#nestedblock--kibana)) +- `elasticsearch` (Block List) Elasticsearch connection configuration block. (see [below for nested schema](#nestedblock--elasticsearch)) +- `fleet` (Block List) Kibana connection configuration block. (see [below for nested schema](#nestedblock--fleet)) +- `kibana` (Block List) Kibana connection configuration block. (see [below for nested schema](#nestedblock--kibana)) ### Nested Schema for `elasticsearch` @@ -132,7 +132,7 @@ Optional: - `ca_file` (String) Path to a custom Certificate Authority certificate - `cert_data` (String) PEM encoded certificate for client auth - `cert_file` (String) Path to a file containing the PEM encoded certificate for client auth -- `endpoints` (List of String, Sensitive) A list of endpoints where the terraform provider will point to, this must include the http(s) schema and port number. +- `endpoints` (List of String, Sensitive) A comma-separated list of endpoints where the terraform provider will point to, this must include the http(s) schema and port number. - `insecure` (Boolean) Disable TLS certificate validation - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth @@ -145,9 +145,8 @@ Optional: Optional: -- `api_key` (String, Sensitive) API key to use for API authentication to Fleet. +- `api_key` (String, Sensitive) API Key to use for authentication to Fleet. - `ca_certs` (List of String) A list of paths to CA certificates to validate the certificate presented by the Fleet server. -- `endpoint` (String, Sensitive) The Fleet server where the terraform provider will point to, this must include the http(s) schema and port number. - `insecure` (Boolean) Disable TLS certificate validation - `password` (String, Sensitive) Password to use for API authentication to Fleet. - `username` (String) Username to use for API authentication to Fleet. @@ -158,7 +157,7 @@ Optional: Optional: -- `endpoints` (List of String, Sensitive) A list of endpoints where the terraform provider will point to, this must include the http(s) schema and port number. +- `endpoints` (List of String, Sensitive) A comma-separated list of endpoints where the terraform provider will point to, this must include the http(s) schema and port number. - `insecure` (Boolean) Disable TLS certificate validation - `password` (String, Sensitive) Password to use for API authentication to Kibana. - `username` (String) Username to use for API authentication to Kibana. diff --git a/docs/resources/elasticsearch_cluster_settings.md b/docs/resources/elasticsearch_cluster_settings.md index d172e3816..bc9f5e2f2 100644 --- a/docs/resources/elasticsearch_cluster_settings.md +++ b/docs/resources/elasticsearch_cluster_settings.md @@ -74,7 +74,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Username to use for API authentication to Elasticsearch. +- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. diff --git a/docs/resources/elasticsearch_component_template.md b/docs/resources/elasticsearch_component_template.md index c064592aa..5a9271e32 100644 --- a/docs/resources/elasticsearch_component_template.md +++ b/docs/resources/elasticsearch_component_template.md @@ -99,7 +99,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Username to use for API authentication to Elasticsearch. +- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. ## Import diff --git a/docs/resources/elasticsearch_data_stream.md b/docs/resources/elasticsearch_data_stream.md index 1c5c1390d..6d43627fa 100644 --- a/docs/resources/elasticsearch_data_stream.md +++ b/docs/resources/elasticsearch_data_stream.md @@ -105,7 +105,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Username to use for API authentication to Elasticsearch. +- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. diff --git a/docs/resources/elasticsearch_enrich_policy.md b/docs/resources/elasticsearch_enrich_policy.md index 097a30e3a..7d946e7e5 100644 --- a/docs/resources/elasticsearch_enrich_policy.md +++ b/docs/resources/elasticsearch_enrich_policy.md @@ -81,7 +81,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Username to use for API authentication to Elasticsearch. +- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. ## Import diff --git a/docs/resources/elasticsearch_index.md b/docs/resources/elasticsearch_index.md index e53611314..6106cd621 100644 --- a/docs/resources/elasticsearch_index.md +++ b/docs/resources/elasticsearch_index.md @@ -169,7 +169,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Username to use for API authentication to Elasticsearch. +- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. diff --git a/docs/resources/elasticsearch_index_lifecycle.md b/docs/resources/elasticsearch_index_lifecycle.md index 799878f91..693849eb9 100644 --- a/docs/resources/elasticsearch_index_lifecycle.md +++ b/docs/resources/elasticsearch_index_lifecycle.md @@ -195,7 +195,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Username to use for API authentication to Elasticsearch. +- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. diff --git a/docs/resources/elasticsearch_index_template.md b/docs/resources/elasticsearch_index_template.md index 5bfbd097f..f13b0a27d 100644 --- a/docs/resources/elasticsearch_index_template.md +++ b/docs/resources/elasticsearch_index_template.md @@ -91,7 +91,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Username to use for API authentication to Elasticsearch. +- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. diff --git a/docs/resources/elasticsearch_ingest_pipeline.md b/docs/resources/elasticsearch_ingest_pipeline.md index 525ae686a..a1a6640a8 100644 --- a/docs/resources/elasticsearch_ingest_pipeline.md +++ b/docs/resources/elasticsearch_ingest_pipeline.md @@ -104,7 +104,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Username to use for API authentication to Elasticsearch. +- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. ## Import diff --git a/docs/resources/elasticsearch_logstash_pipeline.md b/docs/resources/elasticsearch_logstash_pipeline.md index 9b280209c..112b4613a 100644 --- a/docs/resources/elasticsearch_logstash_pipeline.md +++ b/docs/resources/elasticsearch_logstash_pipeline.md @@ -103,7 +103,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Username to use for API authentication to Elasticsearch. +- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. ## Import diff --git a/docs/resources/elasticsearch_script.md b/docs/resources/elasticsearch_script.md index 37a6a92b0..b826597ba 100644 --- a/docs/resources/elasticsearch_script.md +++ b/docs/resources/elasticsearch_script.md @@ -76,7 +76,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Username to use for API authentication to Elasticsearch. +- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. ## Import diff --git a/docs/resources/elasticsearch_security_api_key.md b/docs/resources/elasticsearch_security_api_key.md index 746a69073..63b664eff 100644 --- a/docs/resources/elasticsearch_security_api_key.md +++ b/docs/resources/elasticsearch_security_api_key.md @@ -83,7 +83,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Username to use for API authentication to Elasticsearch. +- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. ## Import diff --git a/docs/resources/elasticsearch_security_role.md b/docs/resources/elasticsearch_security_role.md index 4736af57c..e58d7a3fb 100644 --- a/docs/resources/elasticsearch_security_role.md +++ b/docs/resources/elasticsearch_security_role.md @@ -90,7 +90,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Username to use for API authentication to Elasticsearch. +- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. diff --git a/docs/resources/elasticsearch_security_role_mapping.md b/docs/resources/elasticsearch_security_role_mapping.md index 06644a20d..7e0163764 100644 --- a/docs/resources/elasticsearch_security_role_mapping.md +++ b/docs/resources/elasticsearch_security_role_mapping.md @@ -71,7 +71,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Username to use for API authentication to Elasticsearch. +- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. ## Import diff --git a/docs/resources/elasticsearch_security_system_user.md b/docs/resources/elasticsearch_security_system_user.md index 0186e1427..4ccbf7b59 100644 --- a/docs/resources/elasticsearch_security_system_user.md +++ b/docs/resources/elasticsearch_security_system_user.md @@ -65,4 +65,4 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Username to use for API authentication to Elasticsearch. +- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. diff --git a/docs/resources/elasticsearch_security_user.md b/docs/resources/elasticsearch_security_user.md index 152350be7..dbadf1dd2 100644 --- a/docs/resources/elasticsearch_security_user.md +++ b/docs/resources/elasticsearch_security_user.md @@ -90,7 +90,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Username to use for API authentication to Elasticsearch. +- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. ## Import diff --git a/docs/resources/elasticsearch_snapshot_lifecycle.md b/docs/resources/elasticsearch_snapshot_lifecycle.md index 88dfe6826..39f85c681 100644 --- a/docs/resources/elasticsearch_snapshot_lifecycle.md +++ b/docs/resources/elasticsearch_snapshot_lifecycle.md @@ -89,7 +89,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Username to use for API authentication to Elasticsearch. +- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. ## Import diff --git a/docs/resources/elasticsearch_snapshot_repository.md b/docs/resources/elasticsearch_snapshot_repository.md index 453f6c6ba..438d23c0e 100644 --- a/docs/resources/elasticsearch_snapshot_repository.md +++ b/docs/resources/elasticsearch_snapshot_repository.md @@ -92,7 +92,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Username to use for API authentication to Elasticsearch. +- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. From fb10085ff7184d09b273fca44bc3c8e1cdf23712 Mon Sep 17 00:00:00 2001 From: Nick Clark Date: Wed, 31 May 2023 16:29:56 +1000 Subject: [PATCH 03/15] wip --- internal/clients/api_client.go | 16 ---------------- internal/schema/connection.go | 17 +++++++++++------ 2 files changed, 11 insertions(+), 22 deletions(-) diff --git a/internal/clients/api_client.go b/internal/clients/api_client.go index d85bb0056..c75ce58cc 100644 --- a/internal/clients/api_client.go +++ b/internal/clients/api_client.go @@ -22,7 +22,6 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/utils" "github.com/hashicorp/go-version" fwdiag "github.com/hashicorp/terraform-plugin-framework/diag" - "github.com/hashicorp/terraform-plugin-framework/tfsdk" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -361,21 +360,6 @@ func (a *ApiClient) ClusterID(ctx context.Context) (*string, diag.Diagnostics) { return nil, diags } -func NewFWApiClientFromState(ctx context.Context, state tfsdk.State, defaultClient *ApiClient) (*ApiClient, fwdiag.Diagnostics) { - var es struct { - Connection []*ElasticsearchConnection `tfsdk:"elasticsearch_connection"` - } - diags := state.Get(ctx, &es) - if diags.HasError() { - return nil, diags - } - if len(es.Connection) > 0 { - return NewFWApiClient(ctx, es.Connection[0], defaultClient.version, false) - } - - return defaultClient, nil -} - func NewFWApiClient(ctx context.Context, esConn *ElasticsearchConnection, version string, useEnvAsDefault bool) (*ApiClient, fwdiag.Diagnostics) { var diags fwdiag.Diagnostics config := elasticsearch.Config{} diff --git a/internal/schema/connection.go b/internal/schema/connection.go index 7d25ed0b5..a85ea70ad 100644 --- a/internal/schema/connection.go +++ b/internal/schema/connection.go @@ -57,7 +57,7 @@ func GetEsFWConnectionBlock(keyName string, isProviderConfiguration bool) fwsche }, }, "endpoints": fwschema.ListAttribute{ - MarkdownDescription: "A comma-separated list of endpoints where the terraform provider will point to, this must include the http(s) schema and port number.", + MarkdownDescription: "A list of endpoints where the terraform provider will point to, this must include the http(s) schema and port number.", Optional: true, Sensitive: true, ElementType: types.StringType, @@ -133,7 +133,7 @@ func GetKbFWConnectionBlock(keyName string, isProviderConfiguration bool) fwsche passwordValidators := []validator.String{stringvalidator.AlsoRequires(path.MatchRoot(usernamePath))} return fwschema.ListNestedBlock{ - MarkdownDescription: fmt.Sprintf("Kibana connection configuration block. %s", getDeprecationMessage(isProviderConfiguration)), + MarkdownDescription: "Kibana connection configuration block.", DeprecationMessage: getDeprecationMessage(isProviderConfiguration), NestedObject: fwschema.NestedBlockObject{ Attributes: map[string]fwschema.Attribute{ @@ -174,7 +174,7 @@ func GetFleetFWConnectionBlock(keyName string, isProviderConfiguration bool) fws passwordValidators := []validator.String{stringvalidator.AlsoRequires(path.MatchRoot(usernamePath))} return fwschema.ListNestedBlock{ - MarkdownDescription: fmt.Sprintf("Kibana connection configuration block. %s", getDeprecationMessage(isProviderConfiguration)), + MarkdownDescription: "Fleet connection configuration block.", DeprecationMessage: getDeprecationMessage(isProviderConfiguration), NestedObject: fwschema.NestedBlockObject{ Attributes: map[string]fwschema.Attribute{ @@ -198,6 +198,11 @@ func GetFleetFWConnectionBlock(keyName string, isProviderConfiguration bool) fws stringvalidator.ConflictsWith(path.MatchRoot(passwordPath)), }, }, + "endpoint": fwschema.StringAttribute{ + MarkdownDescription: "The Fleet server where the terraform provider will point to, this must include the http(s) schema and port number.", + Optional: true, + Sensitive: true, + }, "ca_certs": fwschema.ListAttribute{ MarkdownDescription: "A list of paths to CA certificates to validate the certificate presented by the Fleet server.", Optional: true, @@ -247,7 +252,7 @@ func GetEsConnectionSchema(keyName string, isProviderConfiguration bool) *schema Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "username": { - Description: fmt.Sprintf("Elasticsearch connection configuration block. %s", getDeprecationMessage(isProviderConfiguration)), + Description: "Username to use for API authentication to Elasticsearch.", Type: schema.TypeString, Optional: true, DefaultFunc: withEnvDefault("ELASTICSEARCH_USERNAME", nil), @@ -352,7 +357,7 @@ func GetKibanaConnectionSchema() *schema.Schema { RequiredWith: []string{"kibana.0.username"}, }, "endpoints": { - Description: "A list of endpoints where the terraform provider will point to, this must include the http(s) schema and port number.", + Description: "A comma-separated list of endpoints where the terraform provider will point to, this must include the http(s) schema and port number.", Type: schema.TypeList, Optional: true, Sensitive: true, @@ -394,7 +399,7 @@ func GetFleetConnectionSchema() *schema.Schema { RequiredWith: []string{"fleet.0.username"}, }, "api_key": { - Description: "API key to use for API authentication to Fleet.", + Description: "API Key to use for authentication to Fleet.", Type: schema.TypeString, Optional: true, Sensitive: true, From 1c34a238a61941d97277e3e5f42d78d3992dff56 Mon Sep 17 00:00:00 2001 From: Nick Clark Date: Thu, 1 Jun 2023 09:44:49 +1000 Subject: [PATCH 04/15] wip --- go.mod | 1 + go.sum | 2 + internal/clients/api_client.go | 127 --------------------------------- provider/factory_test.go | 51 +++++++++++++ provider/plugin_framework.go | 4 +- provider/provider_test.go | 4 +- 6 files changed, 58 insertions(+), 131 deletions(-) create mode 100644 provider/factory_test.go diff --git a/go.mod b/go.mod index 09af71655..70dbbdde5 100644 --- a/go.mod +++ b/go.mod @@ -14,6 +14,7 @@ require ( github.com/hashicorp/terraform-plugin-log v0.9.0 github.com/hashicorp/terraform-plugin-mux v0.12.0 github.com/hashicorp/terraform-plugin-sdk/v2 v2.29.0 + github.com/hashicorp/terraform-plugin-testing v1.2.0 github.com/oapi-codegen/runtime v1.0.0 github.com/stretchr/testify v1.8.4 ) diff --git a/go.sum b/go.sum index efac6d9a8..f9d8610e5 100644 --- a/go.sum +++ b/go.sum @@ -92,6 +92,8 @@ github.com/hashicorp/terraform-plugin-mux v0.12.0 h1:TJlmeslQ11WlQtIFAfth0vXx+gS github.com/hashicorp/terraform-plugin-mux v0.12.0/go.mod h1:8MR0AgmV+Q03DIjyrAKxXyYlq2EUnYBQP8gxAAA0zeM= github.com/hashicorp/terraform-plugin-sdk/v2 v2.29.0 h1:wcOKYwPI9IorAJEBLzgclh3xVolO7ZorYd6U1vnok14= github.com/hashicorp/terraform-plugin-sdk/v2 v2.29.0/go.mod h1:qH/34G25Ugdj5FcM95cSoXzUgIbgfhVLXCcEcYaMwq8= +github.com/hashicorp/terraform-plugin-testing v1.2.0 h1:pASRAe6BOZFO4xSGQr9WzitXit0nrQAYDk8ziuRfn9E= +github.com/hashicorp/terraform-plugin-testing v1.2.0/go.mod h1:+8bp3O7xUb1UtBcdknrGdVRIuTw4b62TYSIgXHqlyew= github.com/hashicorp/terraform-registry-address v0.2.2 h1:lPQBg403El8PPicg/qONZJDC6YlgCVbWDtNmmZKtBno= github.com/hashicorp/terraform-registry-address v0.2.2/go.mod h1:LtwNbCihUoUZ3RYriyS2wF/lGPB6gF9ICLRtuDk7hSo= github.com/hashicorp/terraform-svchost v0.1.1 h1:EZZimZ1GxdqFRinZ1tpJwVxxt49xc/S52uzrw4x0jKQ= diff --git a/internal/clients/api_client.go b/internal/clients/api_client.go index c75ce58cc..7da6d38de 100644 --- a/internal/clients/api_client.go +++ b/internal/clients/api_client.go @@ -21,8 +21,6 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/models" "github.com/elastic/terraform-provider-elasticstack/internal/utils" "github.com/hashicorp/go-version" - fwdiag "github.com/hashicorp/terraform-plugin-framework/diag" - "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/logging" @@ -76,20 +74,6 @@ type ApiClient struct { version string } -type ElasticsearchConnection struct { - Username types.String `tfsdk:"username"` - Password types.String `tfsdk:"password"` - APIKey types.String `tfsdk:"api_key"` - Endpoints types.List `tfsdk:"endpoints"` - Insecure types.Bool `tfsdk:"insecure"` - CAFile types.String `tfsdk:"ca_file"` - CAData types.String `tfsdk:"ca_data"` - CertFile types.String `tfsdk:"cert_file"` - KeyFile types.String `tfsdk:"key_file"` - CertData types.String `tfsdk:"cert_data"` - KeyData types.String `tfsdk:"key_data"` -} - func NewApiClientFunc(version string) func(context.Context, *schema.ResourceData) (interface{}, diag.Diagnostics) { return func(ctx context.Context, d *schema.ResourceData) (interface{}, diag.Diagnostics) { return newApiClient(d, version) @@ -360,117 +344,6 @@ func (a *ApiClient) ClusterID(ctx context.Context) (*string, diag.Diagnostics) { return nil, diags } -func NewFWApiClient(ctx context.Context, esConn *ElasticsearchConnection, version string, useEnvAsDefault bool) (*ApiClient, fwdiag.Diagnostics) { - var diags fwdiag.Diagnostics - config := elasticsearch.Config{} - config.Username = getStringValue(esConn.Username, "ELASTICSEARCH_USERNAME", true) - config.Password = getStringValue(esConn.Password, "ELASTICSEARCH_PASSWORD", true) - config.APIKey = getStringValue(esConn.APIKey, "ELASTICSEARCH_API_KEY", true) - - var addrs []string - diags.Append(esConn.Endpoints.ElementsAs(ctx, &addrs, true)...) - if diags.HasError() { - return nil, diags - } - if len(addrs) == 0 && useEnvAsDefault { - if endpoints := os.Getenv("ELASTICSEARCH_ENDPOINTS"); endpoints != "" { - for _, e := range strings.Split(endpoints, ",") { - addrs = append(addrs, strings.TrimSpace(e)) - } - } - } - config.Addresses = addrs - - envInsecure, _ := strconv.ParseBool(os.Getenv("ELASTICSEARCH_INSECURE")) - if esConn.Insecure.ValueBool() || envInsecure { - tlsClientConfig := ensureTLSClientConfig(&config) - tlsClientConfig.InsecureSkipVerify = true - } - - if esConn.CAFile.ValueString() != "" { - caCert, err := os.ReadFile(esConn.CAFile.ValueString()) - if err != nil { - diags.Append(fwdiag.NewErrorDiagnostic( - "Unable to read CA File", - err.Error(), - )) - return nil, diags - } - config.CACert = caCert - } - if esConn.CAData.ValueString() != "" { - config.CACert = []byte(esConn.CAData.ValueString()) - } - - if certFile := esConn.CertFile.ValueString(); certFile != "" { - if keyFile := esConn.KeyFile.ValueString(); keyFile != "" { - cert, err := tls.LoadX509KeyPair(certFile, keyFile) - if err != nil { - diags.Append(fwdiag.NewErrorDiagnostic( - "Unable to read certificate or key file", - err.Error(), - )) - return nil, diags - } - tlsClientConfig := ensureTLSClientConfig(&config) - tlsClientConfig.Certificates = []tls.Certificate{cert} - } else { - diags.Append(fwdiag.NewErrorDiagnostic( - "Unable to read key file", - "Path to key file has not been configured or is empty", - )) - return nil, diags - } - } - if certData := esConn.CertData.ValueString(); certData != "" { - if keyData := esConn.KeyData.ValueString(); keyData != "" { - cert, err := tls.X509KeyPair([]byte(certData), []byte(keyData)) - if err != nil { - diags.Append(fwdiag.NewErrorDiagnostic( - "Unable to parse certificate or key", - err.Error(), - )) - return nil, diags - } - tlsClientConfig := ensureTLSClientConfig(&config) - tlsClientConfig.Certificates = []tls.Certificate{cert} - } else { - diags.Append(fwdiag.NewErrorDiagnostic( - "Unable to parse key", - "Key data has not been configured or is empty", - )) - return nil, diags - } - } - - es, err := elasticsearch.NewClient(config) - if err != nil { - diags.Append(fwdiag.NewErrorDiagnostic( - "Unable to create Elasticsearch client", - err.Error(), - )) - return nil, diags - } - if logging.IsDebugOrHigher() { - config.EnableDebugLogger = true - config.Logger = &debugLogger{Name: "elasticsearch"} - } - - return &ApiClient{ - elasticsearch: es, - version: version, - }, diags -} - -func getStringValue(s types.String, envKey string, useEnvAsDefault bool) string { - if s.IsNull() { - if useEnvAsDefault { - return os.Getenv(envKey) - } - } - return s.ValueString() -} - type BaseConfig struct { Username string Password string diff --git a/provider/factory_test.go b/provider/factory_test.go new file mode 100644 index 000000000..37418ebab --- /dev/null +++ b/provider/factory_test.go @@ -0,0 +1,51 @@ +package provider + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-framework/providerserver" + "github.com/hashicorp/terraform-plugin-go/tfprotov5" + "github.com/hashicorp/terraform-plugin-mux/tf5muxserver" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" +) + +func TestMuxServer(t *testing.T) { + const providerConfig = ` + provider "elasticstack" { + elasticsearch { + username = "sup" + password = "dawg" + endpoints = ["http://localhost:9200"] + } + } + ` + resource.Test(t, resource.TestCase{ + ProtoV5ProviderFactories: map[string]func() (tfprotov5.ProviderServer, error){ + "elasticstack": func() (tfprotov5.ProviderServer, error) { + version := "test" + sdkv2Provider := New(version) + frameworkProvider := providerserver.NewProtocol5(NewFrameworkProvider(version)) + ctx := context.Background() + providers := []func() tfprotov5.ProviderServer{ + frameworkProvider, + sdkv2Provider.GRPCProvider, + } + + muxServer, err := tf5muxserver.NewMuxServer(ctx, providers...) + + if err != nil { + return nil, err + } + + return muxServer.ProviderServer(), nil + }, + }, + Steps: []resource.TestStep{ + { + Config: fmt.Sprintf(providerConfig), + }, + }, + }) +} diff --git a/provider/plugin_framework.go b/provider/plugin_framework.go index 94c944189..6c467acce 100644 --- a/provider/plugin_framework.go +++ b/provider/plugin_framework.go @@ -39,8 +39,8 @@ func (p *Provider) Schema(ctx context.Context, req fwprovider.SchemaRequest, res } func (p *Provider) Configure(ctx context.Context, req fwprovider.ConfigureRequest, res *fwprovider.ConfigureResponse) { - esConn := []*clients.ApiClient{} - diags := req.Config.GetAttribute(ctx, path.Root(esKeyName), &esConn) + apiClients := []*clients.ApiClient{} + diags := req.Config.GetAttribute(ctx, path.Root(esKeyName), &apiClients) res.Diagnostics.Append(diags...) if res.Diagnostics.HasError() { return diff --git a/provider/provider_test.go b/provider/provider_test.go index 8d081d70d..7ed41dfeb 100644 --- a/provider/provider_test.go +++ b/provider/provider_test.go @@ -9,8 +9,8 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/security" "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" "github.com/elastic/terraform-provider-elasticstack/provider" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestProvider(t *testing.T) { From eecb3e1d19b51c1dbc1daa13a37b16692c157f3b Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Mon, 21 Aug 2023 22:35:36 +1000 Subject: [PATCH 05/15] Generate docs --- docs/data-sources/elasticsearch_security_role.md | 2 +- docs/data-sources/elasticsearch_security_role_mapping.md | 2 +- docs/data-sources/elasticsearch_security_user.md | 2 +- docs/data-sources/elasticsearch_snapshot_repository.md | 2 +- docs/index.md | 9 +++++---- docs/resources/elasticsearch_cluster_settings.md | 2 +- docs/resources/elasticsearch_component_template.md | 2 +- docs/resources/elasticsearch_data_stream.md | 2 +- docs/resources/elasticsearch_enrich_policy.md | 2 +- docs/resources/elasticsearch_index.md | 2 +- docs/resources/elasticsearch_index_lifecycle.md | 2 +- docs/resources/elasticsearch_index_template.md | 2 +- docs/resources/elasticsearch_ingest_pipeline.md | 2 +- docs/resources/elasticsearch_logstash_pipeline.md | 2 +- docs/resources/elasticsearch_script.md | 2 +- docs/resources/elasticsearch_security_api_key.md | 2 +- docs/resources/elasticsearch_security_role.md | 2 +- docs/resources/elasticsearch_security_role_mapping.md | 2 +- docs/resources/elasticsearch_security_system_user.md | 2 +- docs/resources/elasticsearch_security_user.md | 2 +- docs/resources/elasticsearch_snapshot_lifecycle.md | 2 +- docs/resources/elasticsearch_snapshot_repository.md | 2 +- 22 files changed, 26 insertions(+), 25 deletions(-) diff --git a/docs/data-sources/elasticsearch_security_role.md b/docs/data-sources/elasticsearch_security_role.md index e701c318e..ab82db146 100644 --- a/docs/data-sources/elasticsearch_security_role.md +++ b/docs/data-sources/elasticsearch_security_role.md @@ -62,7 +62,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. +- `username` (String) Username to use for API authentication to Elasticsearch. diff --git a/docs/data-sources/elasticsearch_security_role_mapping.md b/docs/data-sources/elasticsearch_security_role_mapping.md index f95df12e7..e095abebd 100644 --- a/docs/data-sources/elasticsearch_security_role_mapping.md +++ b/docs/data-sources/elasticsearch_security_role_mapping.md @@ -61,4 +61,4 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. +- `username` (String) Username to use for API authentication to Elasticsearch. diff --git a/docs/data-sources/elasticsearch_security_user.md b/docs/data-sources/elasticsearch_security_user.md index e99327db6..de9476bf1 100644 --- a/docs/data-sources/elasticsearch_security_user.md +++ b/docs/data-sources/elasticsearch_security_user.md @@ -61,4 +61,4 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. +- `username` (String) Username to use for API authentication to Elasticsearch. diff --git a/docs/data-sources/elasticsearch_snapshot_repository.md b/docs/data-sources/elasticsearch_snapshot_repository.md index 86c9e86b9..487174580 100644 --- a/docs/data-sources/elasticsearch_snapshot_repository.md +++ b/docs/data-sources/elasticsearch_snapshot_repository.md @@ -89,7 +89,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. +- `username` (String) Username to use for API authentication to Elasticsearch. diff --git a/docs/index.md b/docs/index.md index 95f000f12..d302157d7 100644 --- a/docs/index.md +++ b/docs/index.md @@ -118,9 +118,9 @@ provider "elasticstack" { ### Optional -- `elasticsearch` (Block List) Elasticsearch connection configuration block. (see [below for nested schema](#nestedblock--elasticsearch)) -- `fleet` (Block List) Kibana connection configuration block. (see [below for nested schema](#nestedblock--fleet)) -- `kibana` (Block List) Kibana connection configuration block. (see [below for nested schema](#nestedblock--kibana)) +- `elasticsearch` (Block List, Max: 1) Elasticsearch connection configuration block. (see [below for nested schema](#nestedblock--elasticsearch)) +- `fleet` (Block List, Max: 1) Fleet connection configuration block. (see [below for nested schema](#nestedblock--fleet)) +- `kibana` (Block List, Max: 1) Kibana connection configuration block. (see [below for nested schema](#nestedblock--kibana)) ### Nested Schema for `elasticsearch` @@ -132,7 +132,7 @@ Optional: - `ca_file` (String) Path to a custom Certificate Authority certificate - `cert_data` (String) PEM encoded certificate for client auth - `cert_file` (String) Path to a file containing the PEM encoded certificate for client auth -- `endpoints` (List of String, Sensitive) A comma-separated list of endpoints where the terraform provider will point to, this must include the http(s) schema and port number. +- `endpoints` (List of String, Sensitive) A list of endpoints where the terraform provider will point to, this must include the http(s) schema and port number. - `insecure` (Boolean) Disable TLS certificate validation - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth @@ -147,6 +147,7 @@ Optional: - `api_key` (String, Sensitive) API Key to use for authentication to Fleet. - `ca_certs` (List of String) A list of paths to CA certificates to validate the certificate presented by the Fleet server. +- `endpoint` (String, Sensitive) The Fleet server where the terraform provider will point to, this must include the http(s) schema and port number. - `insecure` (Boolean) Disable TLS certificate validation - `password` (String, Sensitive) Password to use for API authentication to Fleet. - `username` (String) Username to use for API authentication to Fleet. diff --git a/docs/resources/elasticsearch_cluster_settings.md b/docs/resources/elasticsearch_cluster_settings.md index bc9f5e2f2..d172e3816 100644 --- a/docs/resources/elasticsearch_cluster_settings.md +++ b/docs/resources/elasticsearch_cluster_settings.md @@ -74,7 +74,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. +- `username` (String) Username to use for API authentication to Elasticsearch. diff --git a/docs/resources/elasticsearch_component_template.md b/docs/resources/elasticsearch_component_template.md index 5a9271e32..c064592aa 100644 --- a/docs/resources/elasticsearch_component_template.md +++ b/docs/resources/elasticsearch_component_template.md @@ -99,7 +99,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. +- `username` (String) Username to use for API authentication to Elasticsearch. ## Import diff --git a/docs/resources/elasticsearch_data_stream.md b/docs/resources/elasticsearch_data_stream.md index 6d43627fa..1c5c1390d 100644 --- a/docs/resources/elasticsearch_data_stream.md +++ b/docs/resources/elasticsearch_data_stream.md @@ -105,7 +105,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. +- `username` (String) Username to use for API authentication to Elasticsearch. diff --git a/docs/resources/elasticsearch_enrich_policy.md b/docs/resources/elasticsearch_enrich_policy.md index 7d946e7e5..097a30e3a 100644 --- a/docs/resources/elasticsearch_enrich_policy.md +++ b/docs/resources/elasticsearch_enrich_policy.md @@ -81,7 +81,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. +- `username` (String) Username to use for API authentication to Elasticsearch. ## Import diff --git a/docs/resources/elasticsearch_index.md b/docs/resources/elasticsearch_index.md index 6106cd621..e53611314 100644 --- a/docs/resources/elasticsearch_index.md +++ b/docs/resources/elasticsearch_index.md @@ -169,7 +169,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. +- `username` (String) Username to use for API authentication to Elasticsearch. diff --git a/docs/resources/elasticsearch_index_lifecycle.md b/docs/resources/elasticsearch_index_lifecycle.md index 693849eb9..799878f91 100644 --- a/docs/resources/elasticsearch_index_lifecycle.md +++ b/docs/resources/elasticsearch_index_lifecycle.md @@ -195,7 +195,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. +- `username` (String) Username to use for API authentication to Elasticsearch. diff --git a/docs/resources/elasticsearch_index_template.md b/docs/resources/elasticsearch_index_template.md index f13b0a27d..5bfbd097f 100644 --- a/docs/resources/elasticsearch_index_template.md +++ b/docs/resources/elasticsearch_index_template.md @@ -91,7 +91,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. +- `username` (String) Username to use for API authentication to Elasticsearch. diff --git a/docs/resources/elasticsearch_ingest_pipeline.md b/docs/resources/elasticsearch_ingest_pipeline.md index a1a6640a8..525ae686a 100644 --- a/docs/resources/elasticsearch_ingest_pipeline.md +++ b/docs/resources/elasticsearch_ingest_pipeline.md @@ -104,7 +104,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. +- `username` (String) Username to use for API authentication to Elasticsearch. ## Import diff --git a/docs/resources/elasticsearch_logstash_pipeline.md b/docs/resources/elasticsearch_logstash_pipeline.md index 112b4613a..9b280209c 100644 --- a/docs/resources/elasticsearch_logstash_pipeline.md +++ b/docs/resources/elasticsearch_logstash_pipeline.md @@ -103,7 +103,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. +- `username` (String) Username to use for API authentication to Elasticsearch. ## Import diff --git a/docs/resources/elasticsearch_script.md b/docs/resources/elasticsearch_script.md index b826597ba..37a6a92b0 100644 --- a/docs/resources/elasticsearch_script.md +++ b/docs/resources/elasticsearch_script.md @@ -76,7 +76,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. +- `username` (String) Username to use for API authentication to Elasticsearch. ## Import diff --git a/docs/resources/elasticsearch_security_api_key.md b/docs/resources/elasticsearch_security_api_key.md index 63b664eff..746a69073 100644 --- a/docs/resources/elasticsearch_security_api_key.md +++ b/docs/resources/elasticsearch_security_api_key.md @@ -83,7 +83,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. +- `username` (String) Username to use for API authentication to Elasticsearch. ## Import diff --git a/docs/resources/elasticsearch_security_role.md b/docs/resources/elasticsearch_security_role.md index e58d7a3fb..4736af57c 100644 --- a/docs/resources/elasticsearch_security_role.md +++ b/docs/resources/elasticsearch_security_role.md @@ -90,7 +90,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. +- `username` (String) Username to use for API authentication to Elasticsearch. diff --git a/docs/resources/elasticsearch_security_role_mapping.md b/docs/resources/elasticsearch_security_role_mapping.md index 7e0163764..06644a20d 100644 --- a/docs/resources/elasticsearch_security_role_mapping.md +++ b/docs/resources/elasticsearch_security_role_mapping.md @@ -71,7 +71,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. +- `username` (String) Username to use for API authentication to Elasticsearch. ## Import diff --git a/docs/resources/elasticsearch_security_system_user.md b/docs/resources/elasticsearch_security_system_user.md index 4ccbf7b59..0186e1427 100644 --- a/docs/resources/elasticsearch_security_system_user.md +++ b/docs/resources/elasticsearch_security_system_user.md @@ -65,4 +65,4 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. +- `username` (String) Username to use for API authentication to Elasticsearch. diff --git a/docs/resources/elasticsearch_security_user.md b/docs/resources/elasticsearch_security_user.md index dbadf1dd2..152350be7 100644 --- a/docs/resources/elasticsearch_security_user.md +++ b/docs/resources/elasticsearch_security_user.md @@ -90,7 +90,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. +- `username` (String) Username to use for API authentication to Elasticsearch. ## Import diff --git a/docs/resources/elasticsearch_snapshot_lifecycle.md b/docs/resources/elasticsearch_snapshot_lifecycle.md index 39f85c681..88dfe6826 100644 --- a/docs/resources/elasticsearch_snapshot_lifecycle.md +++ b/docs/resources/elasticsearch_snapshot_lifecycle.md @@ -89,7 +89,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. +- `username` (String) Username to use for API authentication to Elasticsearch. ## Import diff --git a/docs/resources/elasticsearch_snapshot_repository.md b/docs/resources/elasticsearch_snapshot_repository.md index 438d23c0e..453f6c6ba 100644 --- a/docs/resources/elasticsearch_snapshot_repository.md +++ b/docs/resources/elasticsearch_snapshot_repository.md @@ -92,7 +92,7 @@ Optional: - `key_data` (String, Sensitive) PEM encoded private key for client auth - `key_file` (String) Path to a file containing the PEM encoded private key for client auth - `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. -- `username` (String) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. +- `username` (String) Username to use for API authentication to Elasticsearch. From a58fac93efe05109ceb737b706af4c030c7b6948 Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Mon, 21 Aug 2023 22:41:29 +1000 Subject: [PATCH 06/15] Existing provider resources work --- provider/plugin_framework.go | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/provider/plugin_framework.go b/provider/plugin_framework.go index 6c467acce..fb17f2d19 100644 --- a/provider/plugin_framework.go +++ b/provider/plugin_framework.go @@ -3,10 +3,8 @@ package provider import ( "context" - "github.com/elastic/terraform-provider-elasticstack/internal/clients" "github.com/elastic/terraform-provider-elasticstack/internal/schema" "github.com/hashicorp/terraform-plugin-framework/datasource" - "github.com/hashicorp/terraform-plugin-framework/path" fwprovider "github.com/hashicorp/terraform-plugin-framework/provider" fwschema "github.com/hashicorp/terraform-plugin-framework/provider/schema" "github.com/hashicorp/terraform-plugin-framework/resource" @@ -39,16 +37,6 @@ func (p *Provider) Schema(ctx context.Context, req fwprovider.SchemaRequest, res } func (p *Provider) Configure(ctx context.Context, req fwprovider.ConfigureRequest, res *fwprovider.ConfigureResponse) { - apiClients := []*clients.ApiClient{} - diags := req.Config.GetAttribute(ctx, path.Root(esKeyName), &apiClients) - res.Diagnostics.Append(diags...) - if res.Diagnostics.HasError() { - return - } - apiClient := clients.NewApiClientFunc(p.version) - - res.DataSourceData = apiClient - res.ResourceData = apiClient } func (p *Provider) DataSources(ctx context.Context) []func() datasource.DataSource { From 41d6e35c6c45642b19da149555af7eef799f630f Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Wed, 6 Sep 2023 21:35:13 +1000 Subject: [PATCH 07/15] Support client creation in the plugin framework --- internal/clients/api_client.go | 514 ++++-------------- internal/clients/config/base.go | 97 ++++ internal/clients/config/base_test.go | 37 ++ internal/clients/config/client.go | 14 + internal/clients/{ => config}/debug.go | 2 +- internal/clients/config/elasticsearch.go | 218 ++++++++ internal/clients/config/env.go | 29 + internal/clients/config/fleet.go | 113 ++++ internal/clients/config/framework.go | 43 ++ internal/clients/config/kibana.go | 101 ++++ internal/clients/config/provider.go | 39 ++ internal/clients/config/sdk.go | 60 ++ internal/elasticsearch/cluster/script.go | 6 +- internal/elasticsearch/cluster/settings.go | 6 +- internal/elasticsearch/cluster/slm.go | 6 +- .../cluster/snapshot_repository.go | 6 +- .../snapshot_repository_data_source.go | 2 +- internal/elasticsearch/enrich/policy.go | 6 +- .../enrich/policy_data_source.go | 2 +- .../elasticsearch/index/component_template.go | 6 +- internal/elasticsearch/index/data_stream.go | 6 +- internal/elasticsearch/index/ilm.go | 6 +- internal/elasticsearch/index/index.go | 10 +- internal/elasticsearch/index/template.go | 6 +- internal/elasticsearch/ingest/pipeline.go | 6 +- internal/elasticsearch/logstash/pipeline.go | 6 +- internal/elasticsearch/security/api_key.go | 6 +- internal/elasticsearch/security/role.go | 6 +- .../security/role_data_source.go | 2 +- .../elasticsearch/security/role_mapping.go | 6 +- .../security/role_mapping_data_source.go | 2 +- .../elasticsearch/security/system_user.go | 4 +- internal/elasticsearch/security/user.go | 6 +- .../security/user_data_source.go | 2 +- internal/elasticsearch/transform/transform.go | 8 +- internal/elasticsearch/watcher/watch.go | 6 +- internal/fleet/shared.go | 2 +- internal/kibana/alerting.go | 8 +- internal/kibana/connector.go | 8 +- internal/kibana/slo.go | 8 +- internal/kibana/space.go | 6 +- internal/schema/connection.go | 68 +-- provider/plugin_framework.go | 23 +- provider/provider.go | 2 +- 44 files changed, 983 insertions(+), 537 deletions(-) create mode 100644 internal/clients/config/base.go create mode 100644 internal/clients/config/base_test.go create mode 100644 internal/clients/config/client.go rename internal/clients/{ => config}/debug.go (99%) create mode 100644 internal/clients/config/elasticsearch.go create mode 100644 internal/clients/config/env.go create mode 100644 internal/clients/config/fleet.go create mode 100644 internal/clients/config/framework.go create mode 100644 internal/clients/config/kibana.go create mode 100644 internal/clients/config/provider.go create mode 100644 internal/clients/config/sdk.go diff --git a/internal/clients/api_client.go b/internal/clients/api_client.go index 7da6d38de..4edcc2849 100644 --- a/internal/clients/api_client.go +++ b/internal/clients/api_client.go @@ -2,13 +2,10 @@ package clients import ( "context" - "crypto/tls" "encoding/json" "errors" "fmt" "net/http" - "os" - "strconv" "strings" "github.com/deepmap/oapi-codegen/pkg/securityprovider" @@ -17,10 +14,12 @@ import ( "github.com/elastic/terraform-provider-elasticstack/generated/alerting" "github.com/elastic/terraform-provider-elasticstack/generated/connectors" "github.com/elastic/terraform-provider-elasticstack/generated/slo" + "github.com/elastic/terraform-provider-elasticstack/internal/clients/config" "github.com/elastic/terraform-provider-elasticstack/internal/clients/fleet" "github.com/elastic/terraform-provider-elasticstack/internal/models" "github.com/elastic/terraform-provider-elasticstack/internal/utils" "github.com/hashicorp/go-version" + fwdiags "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/logging" @@ -74,88 +73,32 @@ type ApiClient struct { version string } -func NewApiClientFunc(version string) func(context.Context, *schema.ResourceData) (interface{}, diag.Diagnostics) { +func NewApiClientFuncFromSDK(version string) func(context.Context, *schema.ResourceData) (interface{}, diag.Diagnostics) { return func(ctx context.Context, d *schema.ResourceData) (interface{}, diag.Diagnostics) { - return newApiClient(d, version) + return newApiClientFromSDK(d, version) } } func NewAcceptanceTestingClient() (*ApiClient, error) { - ua := buildUserAgent("tf-acceptance-testing") - baseConfig := BaseConfig{ - UserAgent: ua, - Header: http.Header{"User-Agent": []string{ua}}, - Username: os.Getenv("ELASTICSEARCH_USERNAME"), - Password: os.Getenv("ELASTICSEARCH_PASSWORD"), - } - - buildEsAccClient := func() (*elasticsearch.Client, error) { - config := elasticsearch.Config{ - Header: baseConfig.Header, - } - - if apiKey := os.Getenv("ELASTICSEARCH_API_KEY"); apiKey != "" { - config.APIKey = apiKey - } else { - config.Username = baseConfig.Username - config.Password = baseConfig.Password - } - - if es := os.Getenv("ELASTICSEARCH_ENDPOINTS"); es != "" { - endpoints := make([]string, 0) - for _, e := range strings.Split(es, ",") { - endpoints = append(endpoints, strings.TrimSpace(e)) - } - config.Addresses = endpoints - } - - if insecure := os.Getenv("ELASTICSEARCH_INSECURE"); insecure != "" { - if insecureValue, _ := strconv.ParseBool(insecure); insecureValue { - tlsClientConfig := ensureTLSClientConfig(&config) - tlsClientConfig.InsecureSkipVerify = true - } - } + version := "tf-acceptance-testing" + cfg := config.NewFromEnv(version) - return elasticsearch.NewClient(config) - } - - kibanaConfig := kibana.Config{ - Username: baseConfig.Username, - Password: baseConfig.Password, - Address: os.Getenv("KIBANA_ENDPOINT"), - } - if insecure := os.Getenv("KIBANA_INSECURE"); insecure != "" { - if insecureValue, _ := strconv.ParseBool(insecure); insecureValue { - kibanaConfig.DisableVerifySSL = true - } - } - - es, err := buildEsAccClient() + es, err := elasticsearch.NewClient(*cfg.Elasticsearch) if err != nil { return nil, err } - kib, err := kibana.NewClient(kibanaConfig) + kib, err := kibana.NewClient(*cfg.Kibana) if err != nil { return nil, err } - actionConnectors, err := buildConnectorsClient(baseConfig, kibanaConfig) + actionConnectors, err := buildConnectorsClient(cfg) if err != nil { return nil, fmt.Errorf("cannot create Kibana action connectors client: [%w]", err) } - fleetCfg := fleet.Config{ - URL: kibanaConfig.Address, - Username: kibanaConfig.Username, - Password: kibanaConfig.Password, - APIKey: os.Getenv("FLEET_API_KEY"), - Insecure: kibanaConfig.DisableVerifySSL, - } - if v := os.Getenv("FLEET_CA_CERTS"); v != "" { - fleetCfg.CACerts = strings.Split(os.Getenv("FLEET_CA_CERTS"), ",") - } - fleetClient, err := fleet.NewClient(fleetCfg) + fleetClient, err := fleet.NewClient(*cfg.Fleet) if err != nil { return nil, err } @@ -163,33 +106,49 @@ func NewAcceptanceTestingClient() (*ApiClient, error) { return &ApiClient{ elasticsearch: es, kibana: kib, - alerting: buildAlertingClient(baseConfig, kibanaConfig).AlertingApi, - slo: buildSloClient(baseConfig, kibanaConfig).SloAPI, + alerting: buildAlertingClient(cfg).AlertingApi, + slo: buildSloClient(cfg).SloAPI, connectors: actionConnectors, - kibanaConfig: kibanaConfig, + kibanaConfig: *cfg.Kibana, fleet: fleetClient, - version: "acceptance-testing", + version: version, }, nil } -const esConnectionKey string = "elasticsearch_connection" - -func NewApiClient(d *schema.ResourceData, meta interface{}) (*ApiClient, diag.Diagnostics) { - defaultClient := meta.(*ApiClient) +func NewApiClientFromFramework(ctx context.Context, cfg config.ProviderConfiguration, version string) (*ApiClient, fwdiags.Diagnostics) { + clientCfg, diags := config.NewFromFramework(ctx, cfg, version) + if diags.HasError() { + return nil, diags + } - if _, ok := d.GetOk(esConnectionKey); !ok { - return defaultClient, nil + client, err := newApiClientFromConfig(clientCfg, version) + if err != nil { + return nil, fwdiags.Diagnostics{ + fwdiags.NewErrorDiagnostic("Failed to create API client", err.Error()), + } } - version := defaultClient.version - baseConfig := buildBaseConfig(d, version, esConnectionKey) + return client, nil +} - esClient, diags := buildEsClient(d, baseConfig, false, esConnectionKey) +func NewApiClientFromSDKResource(d *schema.ResourceData, meta interface{}) (*ApiClient, diag.Diagnostics) { + defaultClient := meta.(*ApiClient) + version := defaultClient.version + resourceConfig, diags := config.NewFromSDKResource(d, version) if diags.HasError() { return nil, diags } + if resourceConfig == nil { + return defaultClient, nil + } + + esClient, err := buildEsClient(*resourceConfig) + if err != nil { + return nil, diag.FromErr(err) + } + return &ApiClient{ elasticsearch: esClient, elasticsearchClusterInfo: defaultClient.elasticsearchClusterInfo, @@ -199,16 +158,6 @@ func NewApiClient(d *schema.ResourceData, meta interface{}) (*ApiClient, diag.Di }, diags } -func ensureTLSClientConfig(config *elasticsearch.Config) *tls.Config { - if config.Transport == nil { - config.Transport = http.DefaultTransport.(*http.Transport) - } - if config.Transport.(*http.Transport).TLSClientConfig == nil { - config.Transport.(*http.Transport).TLSClientConfig = &tls.Config{} - } - return config.Transport.(*http.Transport).TLSClientConfig -} - func (a *ApiClient) GetESClient() (*elasticsearch.Client, error) { if a.elasticsearch == nil { return nil, errors.New("elasticsearch client not found") @@ -344,223 +293,27 @@ func (a *ApiClient) ClusterID(ctx context.Context) (*string, diag.Diagnostics) { return nil, diags } -type BaseConfig struct { - Username string - Password string - UserAgent string - Header http.Header -} - -// Build base config from ES which can be shared for other resources -func buildBaseConfig(d *schema.ResourceData, version string, esKey string) BaseConfig { - baseConfig := BaseConfig{} - baseConfig.UserAgent = buildUserAgent(version) - baseConfig.Header = http.Header{"User-Agent": []string{baseConfig.UserAgent}} - - if esConn, ok := d.GetOk(esKey); ok { - if resource := esConn.([]interface{})[0]; resource != nil { - config := resource.(map[string]interface{}) - - if username, ok := config["username"]; ok { - baseConfig.Username = username.(string) - } - if password, ok := config["password"]; ok { - baseConfig.Password = password.(string) - } - } - } - - return baseConfig -} - -func buildUserAgent(version string) string { - return fmt.Sprintf("elasticstack-terraform-provider/%s", version) -} - -func buildEsClient(d *schema.ResourceData, baseConfig BaseConfig, useEnvAsDefault bool, key string) (*elasticsearch.Client, diag.Diagnostics) { - var diags diag.Diagnostics - - esConn, ok := d.GetOk(key) - if !ok { - return nil, diags - } - - config := elasticsearch.Config{ - Header: baseConfig.Header, - Username: baseConfig.Username, - Password: baseConfig.Password, +func buildEsClient(cfg config.Client) (*elasticsearch.Client, error) { + if cfg.Elasticsearch == nil { + return nil, nil } - // if defined, then we only have a single entry - if es := esConn.([]interface{})[0]; es != nil { - esConfig := es.(map[string]interface{}) - - if apikey, ok := esConfig["api_key"]; ok { - config.APIKey = apikey.(string) - } - - if useEnvAsDefault { - if endpoints := os.Getenv("ELASTICSEARCH_ENDPOINTS"); endpoints != "" { - var addrs []string - for _, e := range strings.Split(endpoints, ",") { - addrs = append(addrs, strings.TrimSpace(e)) - } - config.Addresses = addrs - } - } - - if endpoints, ok := esConfig["endpoints"]; ok && len(endpoints.([]interface{})) > 0 { - var addrs []string - for _, e := range endpoints.([]interface{}) { - addrs = append(addrs, e.(string)) - } - config.Addresses = addrs - } - - if insecure, ok := esConfig["insecure"]; ok && insecure.(bool) { - tlsClientConfig := ensureTLSClientConfig(&config) - tlsClientConfig.InsecureSkipVerify = true - } - - if caFile, ok := esConfig["ca_file"]; ok && caFile.(string) != "" { - caCert, err := os.ReadFile(caFile.(string)) - if err != nil { - diags = append(diags, diag.Diagnostic{ - Severity: diag.Error, - Summary: "Unable to read CA File", - Detail: err.Error(), - }) - return nil, diags - } - config.CACert = caCert - } - if caData, ok := esConfig["ca_data"]; ok && caData.(string) != "" { - config.CACert = []byte(caData.(string)) - } - - if certFile, ok := esConfig["cert_file"]; ok && certFile.(string) != "" { - if keyFile, ok := esConfig["key_file"]; ok && keyFile.(string) != "" { - cert, err := tls.LoadX509KeyPair(certFile.(string), keyFile.(string)) - if err != nil { - diags = append(diags, diag.Diagnostic{ - Severity: diag.Error, - Summary: "Unable to read certificate or key file", - Detail: err.Error(), - }) - return nil, diags - } - tlsClientConfig := ensureTLSClientConfig(&config) - tlsClientConfig.Certificates = []tls.Certificate{cert} - } else { - diags = append(diags, diag.Diagnostic{ - Severity: diag.Error, - Summary: "Unable to read key file", - Detail: "Path to key file has not been configured or is empty", - }) - return nil, diags - } - } - if certData, ok := esConfig["cert_data"]; ok && certData.(string) != "" { - if keyData, ok := esConfig["key_data"]; ok && keyData.(string) != "" { - cert, err := tls.X509KeyPair([]byte(certData.(string)), []byte(keyData.(string))) - if err != nil { - diags = append(diags, diag.Diagnostic{ - Severity: diag.Error, - Summary: "Unable to parse certificate or key", - Detail: err.Error(), - }) - return nil, diags - } - tlsClientConfig := ensureTLSClientConfig(&config) - tlsClientConfig.Certificates = []tls.Certificate{cert} - } else { - diags = append(diags, diag.Diagnostic{ - Severity: diag.Error, - Summary: "Unable to parse key", - Detail: "Key data has not been configured or is empty", - }) - return nil, diags - } - } - } - - if logging.IsDebugOrHigher() { - config.EnableDebugLogger = true - config.Logger = &debugLogger{Name: "elasticsearch"} - } - - es, err := elasticsearch.NewClient(config) + es, err := elasticsearch.NewClient(*cfg.Elasticsearch) if err != nil { - diags = append(diags, diag.Diagnostic{ - Severity: diag.Error, - Summary: "Unable to create Elasticsearch client", - Detail: err.Error(), - }) - return nil, diags + return nil, fmt.Errorf("Unable to create Elasticsearch client: %w", err) } - return es, diags + return es, nil } -func buildKibanaConfig(d *schema.ResourceData, baseConfig BaseConfig) (kibana.Config, diag.Diagnostics) { - var diags diag.Diagnostics - - kibConn, ok := d.GetOk("kibana") - if !ok { - return kibana.Config{}, diags - } - - // Use ES details by default - config := kibana.Config{ - Username: baseConfig.Username, - Password: baseConfig.Password, +func buildKibanaClient(cfg config.Client) (*kibana.Client, error) { + if cfg.Kibana == nil { + return nil, nil } - // if defined, then we only have a single entry - if kib := kibConn.([]interface{})[0]; kib != nil { - kibConfig := kib.(map[string]interface{}) - - if username := os.Getenv("KIBANA_USERNAME"); username != "" { - config.Username = strings.TrimSpace(username) - } - if password := os.Getenv("KIBANA_PASSWORD"); password != "" { - config.Password = strings.TrimSpace(password) - } - if endpoint := os.Getenv("KIBANA_ENDPOINT"); endpoint != "" { - config.Address = endpoint - } - if insecure := os.Getenv("KIBANA_INSECURE"); insecure != "" { - if insecureValue, _ := strconv.ParseBool(insecure); insecureValue { - config.DisableVerifySSL = true - } - } - - if username, ok := kibConfig["username"]; ok && username != "" { - config.Username = username.(string) - } - if password, ok := kibConfig["password"]; ok && password != "" { - config.Password = password.(string) - } - - if endpoints, ok := kibConfig["endpoints"]; ok && len(endpoints.([]interface{})) > 0 { - // We're curently limited by the API to a single endpoint - if endpoint := endpoints.([]interface{})[0]; endpoint != nil { - config.Address = endpoint.(string) - } - } - - if insecure, ok := kibConfig["insecure"]; ok && insecure.(bool) { - config.DisableVerifySSL = true - } - } - - return config, nil -} - -func buildKibanaClient(config kibana.Config) (*kibana.Client, diag.Diagnostics) { - kib, err := kibana.NewClient(config) + kib, err := kibana.NewClient(*cfg.Kibana) if err != nil { - return nil, diag.FromErr(err) + return nil, err } if logging.IsDebugOrHigher() { @@ -570,12 +323,12 @@ func buildKibanaClient(config kibana.Config) (*kibana.Client, diag.Diagnostics) return kib, nil } -func buildAlertingClient(baseConfig BaseConfig, config kibana.Config) *alerting.APIClient { +func buildAlertingClient(cfg config.Client) *alerting.APIClient { alertingConfig := alerting.Configuration{ - UserAgent: baseConfig.UserAgent, + UserAgent: cfg.UserAgent, Servers: alerting.ServerConfigurations{ { - URL: config.Address, + URL: cfg.Kibana.Address, }, }, Debug: logging.IsDebugOrHigher(), @@ -583,8 +336,8 @@ func buildAlertingClient(baseConfig BaseConfig, config kibana.Config) *alerting. return alerting.NewAPIClient(&alertingConfig) } -func buildConnectorsClient(baseConfig BaseConfig, config kibana.Config) (*connectors.Client, error) { - basicAuthProvider, err := securityprovider.NewSecurityProviderBasicAuth(config.Username, config.Password) +func buildConnectorsClient(cfg config.Client) (*connectors.Client, error) { + basicAuthProvider, err := securityprovider.NewSecurityProviderBasicAuth(cfg.Kibana.Username, cfg.Kibana.Password) if err != nil { return nil, fmt.Errorf("unable to create basic auth provider: %w", err) } @@ -598,18 +351,18 @@ func buildConnectorsClient(baseConfig BaseConfig, config kibana.Config) (*connec } return connectors.NewClient( - config.Address, + cfg.Kibana.Address, connectors.WithRequestEditorFn(basicAuthProvider.Intercept), connectors.WithHTTPClient(httpClient), ) } -func buildSloClient(baseConfig BaseConfig, config kibana.Config) *slo.APIClient { +func buildSloClient(cfg config.Client) *slo.APIClient { sloConfig := slo.Configuration{ - UserAgent: baseConfig.UserAgent, + UserAgent: cfg.UserAgent, Servers: slo.ServerConfigurations{ { - URL: config.Address, + URL: cfg.Kibana.Address, }, }, Debug: logging.IsDebugOrHigher(), @@ -617,127 +370,68 @@ func buildSloClient(baseConfig BaseConfig, config kibana.Config) *slo.APIClient return slo.NewAPIClient(&sloConfig) } -func buildFleetClient(d *schema.ResourceData, kibanaCfg kibana.Config) (*fleet.Client, diag.Diagnostics) { - var diags diag.Diagnostics - - // Order of precedence for config options: - // 1 (highest): environment variables - // 2: resource config - // 3: kibana config - - // Set variables from kibana config. - config := fleet.Config{ - URL: kibanaCfg.Address, - Username: kibanaCfg.Username, - Password: kibanaCfg.Password, - Insecure: kibanaCfg.DisableVerifySSL, - } - - // Set variables from resource config. - if fleetDataRaw, ok := d.GetOk("fleet"); ok { - fleetData, ok := fleetDataRaw.([]interface{})[0].(map[string]any) - if !ok { - diags = append(diags, diag.Diagnostic{ - Severity: diag.Error, - Summary: "Unable to parse Fleet configuration", - Detail: "Fleet configuration data has not been configured correctly or is empty", - }) - return nil, diags - } - if v, ok := fleetData["endpoint"].(string); ok && v != "" { - config.URL = v - } - if v, ok := fleetData["username"].(string); ok && v != "" { - config.Username = v - } - if v, ok := fleetData["password"].(string); ok && v != "" { - config.Password = v - } - if v, ok := fleetData["api_key"].(string); ok && v != "" { - config.APIKey = v - } - if v, ok := fleetData["ca_certs"].([]interface{}); ok && len(v) > 0 { - for _, elem := range v { - if vStr, elemOk := elem.(string); elemOk { - config.CACerts = append(config.CACerts, vStr) - } - } - } - if v, ok := fleetData["insecure"].(bool); ok { - config.Insecure = v - } +func buildFleetClient(cfg config.Client) (*fleet.Client, error) { + client, err := fleet.NewClient(*cfg.Fleet) + if err != nil { + return nil, fmt.Errorf("Unable to create Fleet client: %w", err) } - if v := os.Getenv("FLEET_ENDPOINT"); v != "" { - config.URL = v - } - if v := os.Getenv("FLEET_USERNAME"); v != "" { - config.Username = v - } - if v := os.Getenv("FLEET_PASSWORD"); v != "" { - config.Password = v - } - if v := os.Getenv("FLEET_API_KEY"); v != "" { - config.APIKey = v - } - if v := os.Getenv("FLEET_CA_CERTS"); v != "" { - config.CACerts = strings.Split(v, ",") + return client, nil +} + +func newApiClientFromSDK(d *schema.ResourceData, version string) (*ApiClient, diag.Diagnostics) { + cfg, diags := config.NewFromSDK(d, version) + if diags.HasError() { + return nil, diags } - client, err := fleet.NewClient(config) + client, err := newApiClientFromConfig(cfg, version) if err != nil { - diags = append(diags, diag.Diagnostic{ - Severity: diag.Error, - Summary: "Unable to create Fleet client", - Detail: err.Error(), - }) - return nil, diags + return nil, diag.FromErr(err) } - return client, diags + return client, nil } -const esKey string = "elasticsearch" - -func newApiClient(d *schema.ResourceData, version string) (*ApiClient, diag.Diagnostics) { - baseConfig := buildBaseConfig(d, version, esKey) - kibanaConfig, diags := buildKibanaConfig(d, baseConfig) - if diags.HasError() { - return nil, diags +func newApiClientFromConfig(cfg config.Client, version string) (*ApiClient, error) { + client := &ApiClient{ + kibanaConfig: *cfg.Kibana, + version: version, } - esClient, diags := buildEsClient(d, baseConfig, true, esKey) - if diags.HasError() { - return nil, diags + if cfg.Elasticsearch != nil { + esClient, err := buildEsClient(cfg) + if err != nil { + return nil, err + } + client.elasticsearch = esClient } - kibanaClient, diags := buildKibanaClient(kibanaConfig) - if diags.HasError() { - return nil, diags - } + if cfg.Kibana != nil { + kibanaClient, err := buildKibanaClient(cfg) + if err != nil { + return nil, err + } - alertingClient := buildAlertingClient(baseConfig, kibanaConfig) - sloClient := buildSloClient(baseConfig, kibanaConfig) + connectorsClient, err := buildConnectorsClient(cfg) + if err != nil { + return nil, fmt.Errorf("cannot create Kibana connectors client: [%w]", err) + } - connectorsClient, err := buildConnectorsClient(baseConfig, kibanaConfig) - if err != nil { - return nil, diag.FromErr(fmt.Errorf("cannot create Kibana connectors client: [%w]", err)) + client.kibana = kibanaClient + client.alerting = buildAlertingClient(cfg).AlertingApi + client.slo = buildSloClient(cfg).SloAPI + client.connectors = connectorsClient } - fleetClient, diags := buildFleetClient(d, kibanaConfig) - if diags.HasError() { - return nil, diags + if cfg.Fleet != nil { + fleetClient, err := buildFleetClient(cfg) + if err != nil { + return nil, err + } + + client.fleet = fleetClient } - return &ApiClient{ - elasticsearch: esClient, - elasticsearchClusterInfo: nil, - kibana: kibanaClient, - kibanaConfig: kibanaConfig, - alerting: alertingClient.AlertingApi, - connectors: connectorsClient, - slo: sloClient.SloAPI, - fleet: fleetClient, - version: version, - }, nil + return client, nil } diff --git a/internal/clients/config/base.go b/internal/clients/config/base.go new file mode 100644 index 000000000..89e2f3b45 --- /dev/null +++ b/internal/clients/config/base.go @@ -0,0 +1,97 @@ +package config + +import ( + "fmt" + "net/http" + "os" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +type baseConfig struct { + Username string + Password string + ApiKey string + UserAgent string + Header http.Header +} + +func newBaseConfigFromSDK(d *schema.ResourceData, version string, esKey string) baseConfig { + userAgent := buildUserAgent(version) + baseConfig := baseConfig{ + UserAgent: userAgent, + Header: http.Header{"User-Agent": []string{userAgent}}, + } + + if esConn, ok := d.GetOk(esKey); ok { + if resource := esConn.([]interface{})[0]; resource != nil { + config := resource.(map[string]interface{}) + + if apiKey, ok := config["api_key"]; ok && apiKey != "" { + baseConfig.ApiKey = apiKey.(string) + } else { + if username, ok := config["username"]; ok { + baseConfig.Username = username.(string) + } + if password, ok := config["password"]; ok { + baseConfig.Password = password.(string) + } + } + } + } + + return baseConfig.withEnvironmentOverrides() +} + +func newBaseConfigFromFramework(config ProviderConfiguration, version string) baseConfig { + userAgent := buildUserAgent(version) + baseConfig := baseConfig{ + UserAgent: userAgent, + Header: http.Header{"User-Agent": []string{userAgent}}, + } + + if len(config.Elasticsearch) > 0 { + esConfig := config.Elasticsearch[0] + baseConfig.Username = esConfig.Username.ValueString() + baseConfig.Password = esConfig.Password.ValueString() + baseConfig.ApiKey = esConfig.APIKey.ValueString() + } + + return baseConfig.withEnvironmentOverrides() +} + +func (b baseConfig) withEnvironmentOverrides() baseConfig { + b.Username = withEnvironmentOverride(b.Username, "ELASTICSEARCH_USERNAME") + b.Password = withEnvironmentOverride(b.Password, "ELASTICSEARCH_PASSWORD") + b.ApiKey = withEnvironmentOverride(b.ApiKey, "ELASTICSEARCH_API_KEY") + + return b +} + +func (b baseConfig) toKibanaConfig() kibanaConfig { + return kibanaConfig{ + Username: b.Username, + Password: b.Password, + } +} + +func (b baseConfig) toElasticsearchConfig() elasticsearchConfig { + return elasticsearchConfig{ + Header: b.Header, + Username: b.Username, + Password: b.Password, + APIKey: b.ApiKey, + } +} + +func withEnvironmentOverride(currentValue, envOverrideKey string) string { + if envValue, ok := os.LookupEnv(envOverrideKey); ok { + return envValue + } + + return currentValue +} + +func buildUserAgent(version string) string { + return fmt.Sprintf("elasticstack-terraform-provider/%s", version) +} diff --git a/internal/clients/config/base_test.go b/internal/clients/config/base_test.go new file mode 100644 index 000000000..4c95fc24c --- /dev/null +++ b/internal/clients/config/base_test.go @@ -0,0 +1,37 @@ +package config + +import ( + "net/http" + "os" + "testing" + + providerSchema "github.com/elastic/terraform-provider-elasticstack/internal/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/stretchr/testify/require" +) + +func TestNewBaseConfigFromSDK(t *testing.T) { + os.Unsetenv("ELASTICSEARCH_USERNAME") + os.Unsetenv("ELASTICSEARCH_PASSWORD") + os.Unsetenv("ELASTICSEARCH_API_KEY") + + rd := schema.TestResourceDataRaw(t, map[string]*schema.Schema{ + "elasticsearch": providerSchema.GetEsConnectionSchema("elasticsearch", true), + }, map[string]interface{}{ + "elasticsearch": []interface{}{ + map[string]interface{}{ + "username": "elastic", + "password": "changeme", + }, + }, + }) + + baseCfg := newBaseConfigFromSDK(rd, "unit-testing", "elasticsearch") + ua := buildUserAgent("unit-testing") + require.Equal(t, baseConfig{ + Username: "elastic", + Password: "changeme", + UserAgent: ua, + Header: http.Header{"User-Agent": []string{ua}}, + }, baseCfg) +} diff --git a/internal/clients/config/client.go b/internal/clients/config/client.go new file mode 100644 index 000000000..83723d840 --- /dev/null +++ b/internal/clients/config/client.go @@ -0,0 +1,14 @@ +package config + +import ( + "github.com/disaster37/go-kibana-rest/v8" + "github.com/elastic/go-elasticsearch/v7" + "github.com/elastic/terraform-provider-elasticstack/internal/clients/fleet" +) + +type Client struct { + UserAgent string + Kibana *kibana.Config + Elasticsearch *elasticsearch.Config + Fleet *fleet.Config +} diff --git a/internal/clients/debug.go b/internal/clients/config/debug.go similarity index 99% rename from internal/clients/debug.go rename to internal/clients/config/debug.go index 93633b8fa..6c69ff2ff 100644 --- a/internal/clients/debug.go +++ b/internal/clients/config/debug.go @@ -1,4 +1,4 @@ -package clients +package config import ( "context" diff --git a/internal/clients/config/elasticsearch.go b/internal/clients/config/elasticsearch.go new file mode 100644 index 000000000..87db4d7fb --- /dev/null +++ b/internal/clients/config/elasticsearch.go @@ -0,0 +1,218 @@ +package config + +import ( + "context" + "crypto/tls" + "net/http" + "os" + "strconv" + "strings" + + "github.com/elastic/go-elasticsearch/v7" + fwdiags "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + sdkdiags "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-testing/helper/logging" +) + +type elasticsearchConfig elasticsearch.Config + +func newElasticsearchConfigFromSDK(d *schema.ResourceData, base baseConfig, key string, useEnvAsDefault bool) (*elasticsearchConfig, sdkdiags.Diagnostics) { + esConn, ok := d.GetOk(key) + if !ok { + return nil, nil + } + + var diags diag.Diagnostics + config := base.toElasticsearchConfig() + + // if defined, then we only have a single entry + if es := esConn.([]interface{})[0]; es != nil { + esConfig := es.(map[string]interface{}) + + if endpoints, ok := esConfig["endpoints"]; ok && len(endpoints.([]interface{})) > 0 { + var addrs []string + for _, e := range endpoints.([]interface{}) { + addrs = append(addrs, e.(string)) + } + config.Addresses = addrs + } + + if insecure, ok := esConfig["insecure"]; ok && insecure.(bool) { + tlsClientConfig := config.ensureTLSClientConfig() + tlsClientConfig.InsecureSkipVerify = true + } + + if caFile, ok := esConfig["ca_file"]; ok && caFile.(string) != "" { + caCert, err := os.ReadFile(caFile.(string)) + if err != nil { + diags = append(diags, diag.Diagnostic{ + Severity: diag.Error, + Summary: "Unable to read CA File", + Detail: err.Error(), + }) + return nil, diags + } + config.CACert = caCert + } + if caData, ok := esConfig["ca_data"]; ok && caData.(string) != "" { + config.CACert = []byte(caData.(string)) + } + + if certFile, ok := esConfig["cert_file"]; ok && certFile.(string) != "" { + if keyFile, ok := esConfig["key_file"]; ok && keyFile.(string) != "" { + cert, err := tls.LoadX509KeyPair(certFile.(string), keyFile.(string)) + if err != nil { + diags = append(diags, diag.Diagnostic{ + Severity: diag.Error, + Summary: "Unable to read certificate or key file", + Detail: err.Error(), + }) + return nil, diags + } + tlsClientConfig := config.ensureTLSClientConfig() + tlsClientConfig.Certificates = []tls.Certificate{cert} + } else { + diags = append(diags, diag.Diagnostic{ + Severity: diag.Error, + Summary: "Unable to read key file", + Detail: "Path to key file has not been configured or is empty", + }) + return nil, diags + } + } + if certData, ok := esConfig["cert_data"]; ok && certData.(string) != "" { + if keyData, ok := esConfig["key_data"]; ok && keyData.(string) != "" { + cert, err := tls.X509KeyPair([]byte(certData.(string)), []byte(keyData.(string))) + if err != nil { + diags = append(diags, diag.Diagnostic{ + Severity: diag.Error, + Summary: "Unable to parse certificate or key", + Detail: err.Error(), + }) + return nil, diags + } + tlsClientConfig := config.ensureTLSClientConfig() + tlsClientConfig.Certificates = []tls.Certificate{cert} + } else { + diags = append(diags, diag.Diagnostic{ + Severity: diag.Error, + Summary: "Unable to parse key", + Detail: "Key data has not been configured or is empty", + }) + return nil, diags + } + } + } + + if logging.IsDebugOrHigher() { + config.EnableDebugLogger = true + config.Logger = &debugLogger{Name: "elasticsearch"} + } + + config = config.withEnvironmentOverrides() + return &config, nil +} + +func newElasticsearchConfigFromFramework(ctx context.Context, cfg ProviderConfiguration, base baseConfig) (*elasticsearchConfig, fwdiags.Diagnostics) { + if len(cfg.Elasticsearch) == 0 { + return nil, nil + } + + config := base.toElasticsearchConfig() + esConfig := cfg.Elasticsearch[0] + + var endpoints []string + diags := esConfig.Endpoints.ElementsAs(ctx, &endpoints, true) + if diags.HasError() { + return nil, diags + } + + if len(endpoints) > 0 { + config.Addresses = endpoints + } + + if esConfig.Insecure.ValueBool() { + tlsClientConfig := config.ensureTLSClientConfig() + tlsClientConfig.InsecureSkipVerify = true + } + + if caFile := esConfig.CAFile.ValueString(); caFile != "" { + caCert, err := os.ReadFile(caFile) + if err != nil { + diags.Append(fwdiags.NewErrorDiagnostic("Unable to read CA file", err.Error())) + return nil, diags + } + config.CACert = caCert + } + if caData := esConfig.CAData.ValueString(); caData != "" { + config.CACert = []byte(caData) + } + + if certFile := esConfig.CertFile.ValueString(); certFile != "" { + if keyFile := esConfig.KeyFile.ValueString(); keyFile != "" { + cert, err := tls.LoadX509KeyPair(certFile, keyFile) + if err != nil { + diags.Append(fwdiags.NewErrorDiagnostic("Unable to read certificate or key file", err.Error())) + return nil, diags + } + tlsClientConfig := config.ensureTLSClientConfig() + tlsClientConfig.Certificates = []tls.Certificate{cert} + } else { + diags.Append(fwdiags.NewErrorDiagnostic("Unable to read key file", "Path to key file has not been configured or is empty")) + return nil, diags + } + } + if certData := esConfig.CertData.ValueString(); certData != "" { + if keyData := esConfig.KeyData.ValueString(); keyData != "" { + cert, err := tls.X509KeyPair([]byte(certData), []byte(keyData)) + if err != nil { + diags.Append(fwdiags.NewErrorDiagnostic("Unable to parse certificate or key", err.Error())) + return nil, diags + } + tlsClientConfig := config.ensureTLSClientConfig() + tlsClientConfig.Certificates = []tls.Certificate{cert} + } else { + diags.Append(fwdiags.NewErrorDiagnostic("Unable to parse key", "Key data has not been configured or is empty")) + return nil, diags + } + } + + if logging.IsDebugOrHigher() { + config.EnableDebugLogger = true + config.Logger = &debugLogger{Name: "elasticsearch"} + } + + config = config.withEnvironmentOverrides() + return &config, nil +} + +func (c *elasticsearchConfig) ensureTLSClientConfig() *tls.Config { + if c.Transport == nil { + c.Transport = http.DefaultTransport.(*http.Transport) + } + if c.Transport.(*http.Transport).TLSClientConfig == nil { + c.Transport.(*http.Transport).TLSClientConfig = &tls.Config{} + } + return c.Transport.(*http.Transport).TLSClientConfig +} + +func (c elasticsearchConfig) withEnvironmentOverrides() elasticsearchConfig { + if endpointsCSV, ok := os.LookupEnv("ELASTICSEARCH_ENDPOINTS"); ok { + endpoints := make([]string, 0) + for _, e := range strings.Split(endpointsCSV, ",") { + endpoints = append(endpoints, strings.TrimSpace(e)) + } + c.Addresses = endpoints + } + + if insecure, ok := os.LookupEnv("ELASTICSEARCH_INSECURE"); ok { + if insecureValue, _ := strconv.ParseBool(insecure); insecureValue { + tlsClientConfig := c.ensureTLSClientConfig() + tlsClientConfig.InsecureSkipVerify = true + } + } + + return c +} diff --git a/internal/clients/config/env.go b/internal/clients/config/env.go new file mode 100644 index 000000000..a01845350 --- /dev/null +++ b/internal/clients/config/env.go @@ -0,0 +1,29 @@ +package config + +import ( + "github.com/disaster37/go-kibana-rest/v8" + "github.com/elastic/go-elasticsearch/v7" + "github.com/elastic/terraform-provider-elasticstack/internal/clients/fleet" + "github.com/elastic/terraform-provider-elasticstack/internal/utils" +) + +func NewFromEnv(version string) Client { + base := baseConfig{ + UserAgent: buildUserAgent(version), + }.withEnvironmentOverrides() + + client := Client{ + UserAgent: base.UserAgent, + } + + esCfg := base.toElasticsearchConfig().withEnvironmentOverrides() + client.Elasticsearch = utils.Pointer(elasticsearch.Config(esCfg)) + + kibanaCfg := base.toKibanaConfig().withEnvironmentOverrides() + client.Kibana = (*kibana.Config)(&kibanaCfg) + + fleetCfg := kibanaCfg.toFleetConfig().withEnvironmentOverrides() + client.Fleet = (*fleet.Config)(&fleetCfg) + + return client +} diff --git a/internal/clients/config/fleet.go b/internal/clients/config/fleet.go new file mode 100644 index 000000000..58b46fc51 --- /dev/null +++ b/internal/clients/config/fleet.go @@ -0,0 +1,113 @@ +package config + +import ( + "context" + "os" + "strings" + + "github.com/elastic/terraform-provider-elasticstack/internal/clients/fleet" + fwdiags "github.com/hashicorp/terraform-plugin-framework/diag" + sdkdiags "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +type fleetConfig fleet.Config + +func newFleetConfigFromSDK(d *schema.ResourceData, kibanaCfg kibanaConfig) (fleetConfig, sdkdiags.Diagnostics) { + config := kibanaCfg.toFleetConfig() + + // Set variables from resource config. + if fleetDataRaw, ok := d.GetOk("fleet"); ok { + fleetData, ok := fleetDataRaw.([]interface{})[0].(map[string]any) + if !ok { + diags := sdkdiags.Diagnostics{ + sdkdiags.Diagnostic{ + Severity: sdkdiags.Error, + Summary: "Unable to parse Fleet configuration", + Detail: "Fleet configuration data has not been configured correctly or is empty", + }, + } + return fleetConfig{}, diags + } + if v, ok := fleetData["endpoint"].(string); ok && v != "" { + config.URL = v + } + if v, ok := fleetData["username"].(string); ok && v != "" { + config.Username = v + } + if v, ok := fleetData["password"].(string); ok && v != "" { + config.Password = v + } + if v, ok := fleetData["api_key"].(string); ok && v != "" { + config.APIKey = v + } + if v, ok := fleetData["ca_certs"].([]interface{}); ok && len(v) > 0 { + for _, elem := range v { + if vStr, elemOk := elem.(string); elemOk { + config.CACerts = append(config.CACerts, vStr) + } + } + } + if v, ok := fleetData["insecure"].(bool); ok { + config.Insecure = v + } + } + + return config.withEnvironmentOverrides(), nil +} + +func newFleetConfigFromFramework(ctx context.Context, cfg ProviderConfiguration, kibanaCfg kibanaConfig) (fleetConfig, fwdiags.Diagnostics) { + config := kibanaCfg.toFleetConfig() + + if len(cfg.Fleet) > 0 { + fleetCfg := cfg.Fleet[0] + if fleetCfg.Username.ValueString() != "" { + config.Username = fleetCfg.Username.ValueString() + } + if fleetCfg.Password.ValueString() != "" { + config.Password = fleetCfg.Password.ValueString() + } + if fleetCfg.Endpoint.ValueString() != "" { + config.URL = fleetCfg.Endpoint.ValueString() + } + if fleetCfg.APIKey.ValueString() != "" { + config.APIKey = fleetCfg.APIKey.ValueString() + } + + if !fleetCfg.Insecure.IsNull() && !fleetCfg.Insecure.IsUnknown() { + config.Insecure = fleetCfg.Insecure.ValueBool() + } + + var caCerts []string + diags := fleetCfg.CACerts.ElementsAs(ctx, &caCerts, true) + if diags.HasError() { + return fleetConfig{}, diags + } + + if len(caCerts) > 0 { + config.CACerts = caCerts + } + } + + return config.withEnvironmentOverrides(), nil +} + +func (c fleetConfig) withEnvironmentOverrides() fleetConfig { + if v, ok := os.LookupEnv("FLEET_ENDPOINT"); ok { + c.URL = v + } + if v, ok := os.LookupEnv("FLEET_USERNAME"); ok { + c.Username = v + } + if v, ok := os.LookupEnv("FLEET_PASSWORD"); ok { + c.Password = v + } + if v, ok := os.LookupEnv("FLEET_API_KEY"); ok { + c.APIKey = v + } + if v, ok := os.LookupEnv("FLEET_CA_CERTS"); ok { + c.CACerts = strings.Split(v, ",") + } + + return c +} diff --git a/internal/clients/config/framework.go b/internal/clients/config/framework.go new file mode 100644 index 000000000..87540e97f --- /dev/null +++ b/internal/clients/config/framework.go @@ -0,0 +1,43 @@ +package config + +import ( + "context" + + "github.com/disaster37/go-kibana-rest/v8" + "github.com/elastic/go-elasticsearch/v7" + "github.com/elastic/terraform-provider-elasticstack/internal/clients/fleet" + "github.com/elastic/terraform-provider-elasticstack/internal/utils" + "github.com/hashicorp/terraform-plugin-framework/diag" +) + +func NewFromFramework(ctx context.Context, cfg ProviderConfiguration, version string) (Client, diag.Diagnostics) { + base := newBaseConfigFromFramework(cfg, version) + client := Client{ + UserAgent: base.UserAgent, + } + + esCfg, diags := newElasticsearchConfigFromFramework(ctx, cfg, base) + if diags.HasError() { + return Client{}, diags + } + + if esCfg != nil { + client.Elasticsearch = utils.Pointer(elasticsearch.Config(*esCfg)) + } + + kibanaCfg, diags := newKibanaConfigFromFramework(ctx, cfg, base) + if diags.HasError() { + return Client{}, diags + } + + client.Kibana = (*kibana.Config)(&kibanaCfg) + + fleetCfg, diags := newFleetConfigFromFramework(ctx, cfg, kibanaCfg) + if diags.HasError() { + return Client{}, diags + } + + client.Fleet = (*fleet.Config)(&fleetCfg) + + return client, nil +} diff --git a/internal/clients/config/kibana.go b/internal/clients/config/kibana.go new file mode 100644 index 000000000..3737495ab --- /dev/null +++ b/internal/clients/config/kibana.go @@ -0,0 +1,101 @@ +package config + +import ( + "context" + "os" + "strconv" + + "github.com/disaster37/go-kibana-rest/v8" + fwdiags "github.com/hashicorp/terraform-plugin-framework/diag" + sdkdiags "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +type kibanaConfig kibana.Config + +func newKibanaConfigFromSDK(d *schema.ResourceData, base baseConfig) (kibanaConfig, sdkdiags.Diagnostics) { + var diags sdkdiags.Diagnostics + + kibConn, ok := d.GetOk("kibana") + if !ok { + return kibanaConfig{}, diags + } + + // Use ES details by default + config := base.toKibanaConfig() + + // if defined, then we only have a single entry + if kib := kibConn.([]interface{})[0]; kib != nil { + kibConfig := kib.(map[string]interface{}) + + if username, ok := kibConfig["username"]; ok && username != "" { + config.Username = username.(string) + } + if password, ok := kibConfig["password"]; ok && password != "" { + config.Password = password.(string) + } + + if endpoints, ok := kibConfig["endpoints"]; ok && len(endpoints.([]interface{})) > 0 { + // We're curently limited by the API to a single endpoint + if endpoint := endpoints.([]interface{})[0]; endpoint != nil { + config.Address = endpoint.(string) + } + } + + if insecure, ok := kibConfig["insecure"]; ok && insecure.(bool) { + config.DisableVerifySSL = true + } + } + + return config.withEnvironmentOverrides(), nil +} + +func newKibanaConfigFromFramework(ctx context.Context, cfg ProviderConfiguration, base baseConfig) (kibanaConfig, fwdiags.Diagnostics) { + config := base.toKibanaConfig() + + if len(cfg.Kibana) > 0 { + kibConfig := cfg.Kibana[0] + if kibConfig.Username.ValueString() != "" { + config.Username = kibConfig.Username.ValueString() + } + if kibConfig.Password.ValueString() != "" { + config.Password = kibConfig.Password.ValueString() + } + var endpoints []string + diags := kibConfig.Endpoints.ElementsAs(ctx, &endpoints, true) + if diags.HasError() { + return kibanaConfig{}, diags + } + + if len(endpoints) > 0 { + config.Address = endpoints[0] + } + + config.DisableVerifySSL = kibConfig.Insecure.ValueBool() + } + + return config.withEnvironmentOverrides(), nil +} + +func (k kibanaConfig) withEnvironmentOverrides() kibanaConfig { + k.Username = withEnvironmentOverride(k.Username, "KIBANA_USERNAME") + k.Password = withEnvironmentOverride(k.Password, "KIBANA_PASSWORD") + k.Address = withEnvironmentOverride(k.Address, "KIBANA_ENDPOINT") + + if insecure, ok := os.LookupEnv("KIBANA_INSECURE"); ok { + if insecureValue, _ := strconv.ParseBool(insecure); insecureValue { + k.DisableVerifySSL = true + } + } + + return k +} + +func (k kibanaConfig) toFleetConfig() fleetConfig { + return fleetConfig{ + URL: k.Address, + Username: k.Username, + Password: k.Password, + Insecure: k.DisableVerifySSL, + } +} diff --git a/internal/clients/config/provider.go b/internal/clients/config/provider.go new file mode 100644 index 000000000..7da987528 --- /dev/null +++ b/internal/clients/config/provider.go @@ -0,0 +1,39 @@ +package config + +import "github.com/hashicorp/terraform-plugin-framework/types" + +type ProviderConfiguration struct { + Elasticsearch []ElasticsearchConnection `tfsdk:"elasticsearch"` + Kibana []KibanaConnection `tfsdk:"kibana"` + Fleet []FleetConnection `tfsdk:"fleet"` +} + +type ElasticsearchConnection struct { + Username types.String `tfsdk:"username"` + Password types.String `tfsdk:"password"` + APIKey types.String `tfsdk:"api_key"` + Endpoints types.List `tfsdk:"endpoints"` + Insecure types.Bool `tfsdk:"insecure"` + CAFile types.String `tfsdk:"ca_file"` + CAData types.String `tfsdk:"ca_data"` + CertFile types.String `tfsdk:"cert_file"` + KeyFile types.String `tfsdk:"key_file"` + CertData types.String `tfsdk:"cert_data"` + KeyData types.String `tfsdk:"key_data"` +} + +type KibanaConnection struct { + Username types.String `tfsdk:"username"` + Password types.String `tfsdk:"password"` + Endpoints types.List `tfsdk:"endpoints"` + Insecure types.Bool `tfsdk:"insecure"` +} + +type FleetConnection struct { + Username types.String `tfsdk:"username"` + Password types.String `tfsdk:"password"` + APIKey types.String `tfsdk:"api_key"` + Endpoint types.String `tfsdk:"endpoint"` + Insecure types.Bool `tfsdk:"insecure"` + CACerts types.List `tfsdk:"ca_certs"` +} diff --git a/internal/clients/config/sdk.go b/internal/clients/config/sdk.go new file mode 100644 index 000000000..da7cf84e2 --- /dev/null +++ b/internal/clients/config/sdk.go @@ -0,0 +1,60 @@ +package config + +import ( + "github.com/disaster37/go-kibana-rest/v8" + "github.com/elastic/go-elasticsearch/v7" + "github.com/elastic/terraform-provider-elasticstack/internal/clients/fleet" + "github.com/elastic/terraform-provider-elasticstack/internal/utils" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +const ( + esKey string = "elasticsearch" + esConnectionKey string = "elasticsearch_connection" +) + +func NewFromSDK(d *schema.ResourceData, version string) (Client, diag.Diagnostics) { + return newFromSDK(d, version, esKey) +} + +func NewFromSDKResource(d *schema.ResourceData, version string) (*Client, diag.Diagnostics) { + if _, ok := d.GetOk(esConnectionKey); !ok { + return nil, nil + } + + client, diags := newFromSDK(d, version, esConnectionKey) + return &client, diags +} + +func newFromSDK(d *schema.ResourceData, version, esConfigKey string) (Client, diag.Diagnostics) { + base := newBaseConfigFromSDK(d, version, esConfigKey) + client := Client{ + UserAgent: base.UserAgent, + } + + esCfg, diags := newElasticsearchConfigFromSDK(d, base, esConfigKey, true) + if diags.HasError() { + return Client{}, diags + } + + if esCfg != nil { + client.Elasticsearch = utils.Pointer(elasticsearch.Config(*esCfg)) + } + + kibanaCfg, diags := newKibanaConfigFromSDK(d, base) + if diags.HasError() { + return Client{}, diags + } + + client.Kibana = (*kibana.Config)(&kibanaCfg) + + fleetCfg, diags := newFleetConfigFromSDK(d, kibanaCfg) + if diags.HasError() { + return Client{}, diags + } + + client.Fleet = (*fleet.Config)(&fleetCfg) + + return client, nil +} diff --git a/internal/elasticsearch/cluster/script.go b/internal/elasticsearch/cluster/script.go index 35eac8d29..8b09c443e 100644 --- a/internal/elasticsearch/cluster/script.go +++ b/internal/elasticsearch/cluster/script.go @@ -66,7 +66,7 @@ func ResourceScript() *schema.Resource { } func resourceScriptRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -100,7 +100,7 @@ func resourceScriptRead(ctx context.Context, d *schema.ResourceData, meta interf } func resourceScriptPut(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -137,7 +137,7 @@ func resourceScriptPut(ctx context.Context, d *schema.ResourceData, meta interfa } func resourceScriptDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/elasticsearch/cluster/settings.go b/internal/elasticsearch/cluster/settings.go index a37b0e32a..09910e7c8 100644 --- a/internal/elasticsearch/cluster/settings.go +++ b/internal/elasticsearch/cluster/settings.go @@ -86,7 +86,7 @@ func ResourceSettings() *schema.Resource { } func resourceClusterSettingsPut(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -204,7 +204,7 @@ func expandSettings(s interface{}) (map[string]interface{}, diag.Diagnostics) { } func resourceClusterSettingsRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -258,7 +258,7 @@ func flattenSettings(name string, old, new map[string]interface{}) []interface{} } func resourceClusterSettingsDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/elasticsearch/cluster/slm.go b/internal/elasticsearch/cluster/slm.go index 06fb9b834..77cd8a827 100644 --- a/internal/elasticsearch/cluster/slm.go +++ b/internal/elasticsearch/cluster/slm.go @@ -155,7 +155,7 @@ func ResourceSlm() *schema.Resource { } func resourceSlmPut(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -237,7 +237,7 @@ func resourceSlmPut(ctx context.Context, d *schema.ResourceData, meta interface{ } func resourceSlmRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -329,7 +329,7 @@ func resourceSlmRead(ctx context.Context, d *schema.ResourceData, meta interface } func resourceSlmDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/elasticsearch/cluster/snapshot_repository.go b/internal/elasticsearch/cluster/snapshot_repository.go index b6c21a8fa..ec38f1420 100644 --- a/internal/elasticsearch/cluster/snapshot_repository.go +++ b/internal/elasticsearch/cluster/snapshot_repository.go @@ -322,7 +322,7 @@ func ResourceSnapshotRepository() *schema.Resource { } func resourceSnapRepoPut(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -367,7 +367,7 @@ func expandFsSettings(source, target map[string]interface{}) { } func resourceSnapRepoRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -450,7 +450,7 @@ func flattenRepoSettings(r *models.SnapshotRepository, s map[string]*schema.Sche } func resourceSnapRepoDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/elasticsearch/cluster/snapshot_repository_data_source.go b/internal/elasticsearch/cluster/snapshot_repository_data_source.go index 71c03f3be..f29a58899 100644 --- a/internal/elasticsearch/cluster/snapshot_repository_data_source.go +++ b/internal/elasticsearch/cluster/snapshot_repository_data_source.go @@ -258,7 +258,7 @@ func DataSourceSnapshotRespository() *schema.Resource { } func dataSourceSnapRepoRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/elasticsearch/enrich/policy.go b/internal/elasticsearch/enrich/policy.go index 20e3ebcbf..16179c441 100644 --- a/internal/elasticsearch/enrich/policy.go +++ b/internal/elasticsearch/enrich/policy.go @@ -92,7 +92,7 @@ func ResourceEnrichPolicy() *schema.Resource { } func resourceEnrichPolicyRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -133,7 +133,7 @@ func resourceEnrichPolicyRead(ctx context.Context, d *schema.ResourceData, meta } func resourceEnrichPolicyPut(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -169,7 +169,7 @@ func resourceEnrichPolicyPut(ctx context.Context, d *schema.ResourceData, meta i } func resourceEnrichPolicyDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/elasticsearch/enrich/policy_data_source.go b/internal/elasticsearch/enrich/policy_data_source.go index a90dc11e3..975c50294 100644 --- a/internal/elasticsearch/enrich/policy_data_source.go +++ b/internal/elasticsearch/enrich/policy_data_source.go @@ -61,7 +61,7 @@ func DataSourceEnrichPolicy() *schema.Resource { } func dataSourceEnrichPolicyRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/elasticsearch/index/component_template.go b/internal/elasticsearch/index/component_template.go index a3a9559a7..8d0448dd8 100644 --- a/internal/elasticsearch/index/component_template.go +++ b/internal/elasticsearch/index/component_template.go @@ -135,7 +135,7 @@ func ResourceComponentTemplate() *schema.Resource { } func resourceComponentTemplatePut(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -228,7 +228,7 @@ func resourceComponentTemplatePut(ctx context.Context, d *schema.ResourceData, m } func resourceComponentTemplateRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -282,7 +282,7 @@ func resourceComponentTemplateRead(ctx context.Context, d *schema.ResourceData, } func resourceComponentTemplateDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/elasticsearch/index/data_stream.go b/internal/elasticsearch/index/data_stream.go index 28024a0cf..1c81f3215 100644 --- a/internal/elasticsearch/index/data_stream.go +++ b/internal/elasticsearch/index/data_stream.go @@ -119,7 +119,7 @@ func ResourceDataStream() *schema.Resource { } func resourceDataStreamPut(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -138,7 +138,7 @@ func resourceDataStreamPut(ctx context.Context, d *schema.ResourceData, meta int } func resourceDataStreamRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -211,7 +211,7 @@ func resourceDataStreamRead(ctx context.Context, d *schema.ResourceData, meta in } func resourceDataStreamDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/elasticsearch/index/ilm.go b/internal/elasticsearch/index/ilm.go index 420c2d442..5a8fc9650 100644 --- a/internal/elasticsearch/index/ilm.go +++ b/internal/elasticsearch/index/ilm.go @@ -409,7 +409,7 @@ func getSchema(actions ...string) map[string]*schema.Schema { } func resourceIlmPut(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -587,7 +587,7 @@ func expandAction(a []interface{}, serverVersion *version.Version, settings ...s } func resourceIlmRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -703,7 +703,7 @@ func flattenPhase(phaseName string, p models.Phase, d *schema.ResourceData) (int } func resourceIlmDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/elasticsearch/index/index.go b/internal/elasticsearch/index/index.go index c253dee00..f193ae3e0 100644 --- a/internal/elasticsearch/index/index.go +++ b/internal/elasticsearch/index/index.go @@ -582,7 +582,7 @@ If specified, this mapping can include: field names, [field data types](https:// return nil, fmt.Errorf("unable to import requested index") } - client, diags := clients.NewApiClient(d, m) + client, diags := clients.NewApiClientFromSDKResource(d, m) if diags.HasError() { return nil, fmt.Errorf("Unabled to create API client %v", diags) } @@ -661,7 +661,7 @@ If specified, this mapping can include: field names, [field data types](https:// } func resourceIndexCreate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -795,7 +795,7 @@ func resourceIndexCreate(ctx context.Context, d *schema.ResourceData, meta inter // Because of limitation of ES API we must handle changes to aliases, mappings and settings separately func resourceIndexUpdate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -899,7 +899,7 @@ func flattenIndexSettings(settings []interface{}) map[string]interface{} { } func resourceIndexRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -960,7 +960,7 @@ func resourceIndexDelete(ctx context.Context, d *schema.ResourceData, meta inter if d.Get("deletion_protection").(bool) { return diag.Errorf("cannot destroy index without setting deletion_protection=false and running `terraform apply`") } - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/elasticsearch/index/template.go b/internal/elasticsearch/index/template.go index b062677f6..9df3b649f 100644 --- a/internal/elasticsearch/index/template.go +++ b/internal/elasticsearch/index/template.go @@ -182,7 +182,7 @@ func ResourceTemplate() *schema.Resource { } func resourceIndexTemplatePut(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -304,7 +304,7 @@ func resourceIndexTemplatePut(ctx context.Context, d *schema.ResourceData, meta } func resourceIndexTemplateRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -408,7 +408,7 @@ func flattenTemplateData(template *models.Template) ([]interface{}, diag.Diagnos } func resourceIndexTemplateDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/elasticsearch/ingest/pipeline.go b/internal/elasticsearch/ingest/pipeline.go index 0c7f985a9..f0f4cd16a 100644 --- a/internal/elasticsearch/ingest/pipeline.go +++ b/internal/elasticsearch/ingest/pipeline.go @@ -84,7 +84,7 @@ func ResourceIngestPipeline() *schema.Resource { } func resourceIngestPipelineTemplatePut(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -138,7 +138,7 @@ func resourceIngestPipelineTemplatePut(ctx context.Context, d *schema.ResourceDa } func resourceIngestPipelineTemplateRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -206,7 +206,7 @@ func resourceIngestPipelineTemplateRead(ctx context.Context, d *schema.ResourceD } func resourceIngestPipelineTemplateDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/elasticsearch/logstash/pipeline.go b/internal/elasticsearch/logstash/pipeline.go index 01adee829..34fb98b42 100644 --- a/internal/elasticsearch/logstash/pipeline.go +++ b/internal/elasticsearch/logstash/pipeline.go @@ -183,7 +183,7 @@ func ResourceLogstashPipeline() *schema.Resource { } func resourceLogstashPipelinePut(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -223,7 +223,7 @@ func resourceLogstashPipelinePut(ctx context.Context, d *schema.ResourceData, me } func resourceLogstashPipelineRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -287,7 +287,7 @@ func resourceLogstashPipelineRead(ctx context.Context, d *schema.ResourceData, m } func resourceLogstashPipelineDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/elasticsearch/security/api_key.go b/internal/elasticsearch/security/api_key.go index c6d23780e..ef44aa1ad 100644 --- a/internal/elasticsearch/security/api_key.go +++ b/internal/elasticsearch/security/api_key.go @@ -92,7 +92,7 @@ func ResourceApiKey() *schema.Resource { } func resourceSecurityApiKeyCreate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -166,7 +166,7 @@ func resourceSecurityApiKeyUpdate(ctx context.Context, d *schema.ResourceData, m } func resourceSecurityApiKeyRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -216,7 +216,7 @@ func resourceSecurityApiKeyRead(ctx context.Context, d *schema.ResourceData, met } func resourceSecurityApiKeyDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/elasticsearch/security/role.go b/internal/elasticsearch/security/role.go index 6d957c091..3b886753c 100644 --- a/internal/elasticsearch/security/role.go +++ b/internal/elasticsearch/security/role.go @@ -174,7 +174,7 @@ func ResourceRole() *schema.Resource { } func resourceSecurityRolePut(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -312,7 +312,7 @@ func resourceSecurityRolePut(ctx context.Context, d *schema.ResourceData, meta i } func resourceSecurityRoleRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -420,7 +420,7 @@ func flattenIndicesData(indices *[]models.IndexPerms) []interface{} { } func resourceSecurityRoleDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/elasticsearch/security/role_data_source.go b/internal/elasticsearch/security/role_data_source.go index 06e0a7531..cc263de74 100644 --- a/internal/elasticsearch/security/role_data_source.go +++ b/internal/elasticsearch/security/role_data_source.go @@ -149,7 +149,7 @@ func DataSourceRole() *schema.Resource { } func dataSourceSecurityRoleRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/elasticsearch/security/role_mapping.go b/internal/elasticsearch/security/role_mapping.go index f666292e4..811b61c85 100644 --- a/internal/elasticsearch/security/role_mapping.go +++ b/internal/elasticsearch/security/role_mapping.go @@ -84,7 +84,7 @@ func ResourceRoleMapping() *schema.Resource { } func resourceSecurityRoleMappingPut(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -123,7 +123,7 @@ func resourceSecurityRoleMappingPut(ctx context.Context, d *schema.ResourceData, } func resourceSecurityRoleMappingRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -182,7 +182,7 @@ func resourceSecurityRoleMappingRead(ctx context.Context, d *schema.ResourceData } func resourceSecurityRoleMappingDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/elasticsearch/security/role_mapping_data_source.go b/internal/elasticsearch/security/role_mapping_data_source.go index 19baeec83..9484ca3fc 100644 --- a/internal/elasticsearch/security/role_mapping_data_source.go +++ b/internal/elasticsearch/security/role_mapping_data_source.go @@ -61,7 +61,7 @@ func DataSourceRoleMapping() *schema.Resource { } func dataSourceSecurityRoleMappingRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/elasticsearch/security/system_user.go b/internal/elasticsearch/security/system_user.go index 52d4e9f55..fc2b3cf2a 100644 --- a/internal/elasticsearch/security/system_user.go +++ b/internal/elasticsearch/security/system_user.go @@ -71,7 +71,7 @@ func ResourceSystemUser() *schema.Resource { } func resourceSecuritySystemUserPut(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -121,7 +121,7 @@ func resourceSecuritySystemUserPut(ctx context.Context, d *schema.ResourceData, } func resourceSecuritySystemUserRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/elasticsearch/security/user.go b/internal/elasticsearch/security/user.go index 430af00c6..ab58c1a64 100644 --- a/internal/elasticsearch/security/user.go +++ b/internal/elasticsearch/security/user.go @@ -106,7 +106,7 @@ func ResourceUser() *schema.Resource { } func resourceSecurityUserPut(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -161,7 +161,7 @@ func resourceSecurityUserPut(ctx context.Context, d *schema.ResourceData, meta i } func resourceSecurityUserRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -210,7 +210,7 @@ func resourceSecurityUserRead(ctx context.Context, d *schema.ResourceData, meta } func resourceSecurityUserDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/elasticsearch/security/user_data_source.go b/internal/elasticsearch/security/user_data_source.go index 5e7380940..77fd08269 100644 --- a/internal/elasticsearch/security/user_data_source.go +++ b/internal/elasticsearch/security/user_data_source.go @@ -63,7 +63,7 @@ func DataSourceUser() *schema.Resource { } func dataSourceSecurityUserRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/elasticsearch/transform/transform.go b/internal/elasticsearch/transform/transform.go index ff376c97c..412c3f0b6 100644 --- a/internal/elasticsearch/transform/transform.go +++ b/internal/elasticsearch/transform/transform.go @@ -297,7 +297,7 @@ func ResourceTransform() *schema.Resource { func resourceTransformCreate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -339,7 +339,7 @@ func resourceTransformCreate(ctx context.Context, d *schema.ResourceData, meta i func resourceTransformRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -384,7 +384,7 @@ func resourceTransformRead(ctx context.Context, d *schema.ResourceData, meta int func resourceTransformUpdate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -430,7 +430,7 @@ func resourceTransformUpdate(ctx context.Context, d *schema.ResourceData, meta i func resourceTransformDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/elasticsearch/watcher/watch.go b/internal/elasticsearch/watcher/watch.go index b8a4dd0d8..7190e4db8 100644 --- a/internal/elasticsearch/watcher/watch.go +++ b/internal/elasticsearch/watcher/watch.go @@ -105,7 +105,7 @@ func ResourceWatch() *schema.Resource { } func resourceWatchPut(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -169,7 +169,7 @@ func resourceWatchPut(ctx context.Context, d *schema.ResourceData, meta interfac } func resourceWatchRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -253,7 +253,7 @@ func resourceWatchRead(ctx context.Context, d *schema.ResourceData, meta interfa } func resourceWatchDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/fleet/shared.go b/internal/fleet/shared.go index ee8a590cf..da806e246 100644 --- a/internal/fleet/shared.go +++ b/internal/fleet/shared.go @@ -8,7 +8,7 @@ import ( ) func getFleetClient(d *schema.ResourceData, meta interface{}) (*fleet.Client, diag.Diagnostics) { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return nil, diags } diff --git a/internal/kibana/alerting.go b/internal/kibana/alerting.go index 161074d51..6878769cb 100644 --- a/internal/kibana/alerting.go +++ b/internal/kibana/alerting.go @@ -216,7 +216,7 @@ func getActionsFromResourceData(d *schema.ResourceData) ([]models.AlertingRuleAc } func resourceRuleCreate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -239,7 +239,7 @@ func resourceRuleCreate(ctx context.Context, d *schema.ResourceData, meta interf } func resourceRuleUpdate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -268,7 +268,7 @@ func resourceRuleUpdate(ctx context.Context, d *schema.ResourceData, meta interf } func resourceRuleRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -357,7 +357,7 @@ func resourceRuleRead(ctx context.Context, d *schema.ResourceData, meta interfac } func resourceRuleDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/kibana/connector.go b/internal/kibana/connector.go index eed6ed8fc..1a57b7fb7 100644 --- a/internal/kibana/connector.go +++ b/internal/kibana/connector.go @@ -124,7 +124,7 @@ func connectorCustomizeDiff(ctx context.Context, rd *schema.ResourceDiff, in int } func resourceConnectorCreate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -147,7 +147,7 @@ func resourceConnectorCreate(ctx context.Context, d *schema.ResourceData, meta i } func resourceConnectorUpdate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -176,7 +176,7 @@ func resourceConnectorUpdate(ctx context.Context, d *schema.ResourceData, meta i } func resourceConnectorRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -199,7 +199,7 @@ func resourceConnectorRead(ctx context.Context, d *schema.ResourceData, meta int } func resourceConnectorDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/kibana/slo.go b/internal/kibana/slo.go index 1d12ffe27..bbb1eea3d 100644 --- a/internal/kibana/slo.go +++ b/internal/kibana/slo.go @@ -603,7 +603,7 @@ func getSloFromResourceData(d *schema.ResourceData) (models.Slo, diag.Diagnostic } func resourceSloCreate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -626,7 +626,7 @@ func resourceSloCreate(ctx context.Context, d *schema.ResourceData, meta interfa } func resourceSloUpdate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -655,7 +655,7 @@ func resourceSloUpdate(ctx context.Context, d *schema.ResourceData, meta interfa } func resourceSloRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -832,7 +832,7 @@ func resourceSloRead(ctx context.Context, d *schema.ResourceData, meta interface } func resourceSloDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/kibana/space.go b/internal/kibana/space.go index 716b1f172..908e3e213 100644 --- a/internal/kibana/space.go +++ b/internal/kibana/space.go @@ -71,7 +71,7 @@ func ResourceSpace() *schema.Resource { } func resourceSpaceUpsert(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -132,7 +132,7 @@ func resourceSpaceUpsert(ctx context.Context, d *schema.ResourceData, meta inter } func resourceSpaceRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } @@ -180,7 +180,7 @@ func resourceSpaceRead(ctx context.Context, d *schema.ResourceData, meta interfa } func resourceSpaceDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClient(d, meta) + client, diags := clients.NewApiClientFromSDKResource(d, meta) if diags.HasError() { return diags } diff --git a/internal/schema/connection.go b/internal/schema/connection.go index a85ea70ad..39b9692de 100644 --- a/internal/schema/connection.go +++ b/internal/schema/connection.go @@ -12,48 +12,38 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) -func GetEsFWConnectionBlock(keyName string, isProviderConfiguration bool) fwschema.Block { - usernamePath := makePathRef(keyName, "username") - passwordPath := makePathRef(keyName, "password") - caFilePath := makePathRef(keyName, "ca_file") - caDataPath := makePathRef(keyName, "ca_data") - certFilePath := makePathRef(keyName, "cert_file") - certDataPath := makePathRef(keyName, "cert_data") - keyFilePath := makePathRef(keyName, "key_file") - keyDataPath := makePathRef(keyName, "key_data") - - usernameValidators := []validator.String{stringvalidator.AlsoRequires(path.MatchRoot(passwordPath))} - passwordValidators := []validator.String{stringvalidator.AlsoRequires(path.MatchRoot(usernamePath))} - - if isProviderConfiguration { - // RequireWith validation isn't compatible when used in conjunction with DefaultFunc - usernameValidators = nil - passwordValidators = nil - } +func GetEsFWConnectionBlock(keyName string) fwschema.Block { + usernamePath := path.MatchRelative().AtParent().AtName("username") + passwordPath := path.MatchRelative().AtParent().AtName("password") + caFilePath := path.MatchRelative().AtParent().AtName("ca_file") + caDataPath := path.MatchRelative().AtParent().AtName("ca_data") + certFilePath := path.MatchRelative().AtParent().AtName("cert_file") + certDataPath := path.MatchRelative().AtParent().AtName("cert_data") + keyFilePath := path.MatchRelative().AtParent().AtName("key_file") + keyDataPath := path.MatchRelative().AtParent().AtName("key_data") return fwschema.ListNestedBlock{ - MarkdownDescription: fmt.Sprintf("Elasticsearch connection configuration block. %s", getDeprecationMessage(isProviderConfiguration)), - DeprecationMessage: getDeprecationMessage(isProviderConfiguration), + MarkdownDescription: "Elasticsearch connection configuration block. ", + Description: "Elasticsearch connection configuration block. ", NestedObject: fwschema.NestedBlockObject{ Attributes: map[string]fwschema.Attribute{ "username": fwschema.StringAttribute{ MarkdownDescription: "Username to use for API authentication to Elasticsearch.", Optional: true, - Validators: usernameValidators, + Validators: []validator.String{stringvalidator.AlsoRequires(passwordPath)}, }, "password": fwschema.StringAttribute{ MarkdownDescription: "Password to use for API authentication to Elasticsearch.", Optional: true, Sensitive: true, - Validators: passwordValidators, + Validators: []validator.String{stringvalidator.AlsoRequires(usernamePath)}, }, "api_key": fwschema.StringAttribute{ MarkdownDescription: "API Key to use for authentication to Elasticsearch", Optional: true, Sensitive: true, Validators: []validator.String{ - stringvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("username")), - stringvalidator.ConflictsWith(path.MatchRoot(passwordPath)), + stringvalidator.ConflictsWith(usernamePath, passwordPath), }, }, "endpoints": fwschema.ListAttribute{ @@ -70,41 +60,38 @@ func GetEsFWConnectionBlock(keyName string, isProviderConfiguration bool) fwsche MarkdownDescription: "Path to a custom Certificate Authority certificate", Optional: true, Validators: []validator.String{ - stringvalidator.ConflictsWith(path.MatchRoot(caDataPath)), + stringvalidator.ConflictsWith(caDataPath), }, }, "ca_data": fwschema.StringAttribute{ MarkdownDescription: "PEM-encoded custom Certificate Authority certificate", Optional: true, Validators: []validator.String{ - stringvalidator.ConflictsWith(path.MatchRoot(caFilePath)), + stringvalidator.ConflictsWith(caFilePath), }, }, "cert_file": fwschema.StringAttribute{ MarkdownDescription: "Path to a file containing the PEM encoded certificate for client auth", Optional: true, Validators: []validator.String{ - stringvalidator.AlsoRequires(path.MatchRoot(keyFilePath)), - stringvalidator.ConflictsWith(path.MatchRoot(certDataPath)), - stringvalidator.ConflictsWith(path.MatchRoot(keyDataPath)), + stringvalidator.AlsoRequires(keyFilePath), + stringvalidator.ConflictsWith(caDataPath, keyDataPath), }, }, "key_file": fwschema.StringAttribute{ MarkdownDescription: "Path to a file containing the PEM encoded private key for client auth", Optional: true, Validators: []validator.String{ - stringvalidator.AlsoRequires(path.MatchRoot(certFilePath)), - stringvalidator.ConflictsWith(path.MatchRoot(certDataPath)), - stringvalidator.ConflictsWith(path.MatchRoot(keyDataPath)), + stringvalidator.AlsoRequires(certFilePath), + stringvalidator.ConflictsWith(certDataPath, keyDataPath), }, }, "cert_data": fwschema.StringAttribute{ MarkdownDescription: "PEM encoded certificate for client auth", Optional: true, Validators: []validator.String{ - stringvalidator.AlsoRequires(path.MatchRoot(keyDataPath)), - stringvalidator.ConflictsWith(path.MatchRoot(certFilePath)), - stringvalidator.ConflictsWith(path.MatchRoot(keyFilePath)), + stringvalidator.AlsoRequires(keyDataPath), + stringvalidator.ConflictsWith(certFilePath, keyFilePath), }, }, "key_data": fwschema.StringAttribute{ @@ -112,9 +99,8 @@ func GetEsFWConnectionBlock(keyName string, isProviderConfiguration bool) fwsche Optional: true, Sensitive: true, Validators: []validator.String{ - stringvalidator.AlsoRequires(path.MatchRoot(certDataPath)), - stringvalidator.ConflictsWith(path.MatchRoot(certFilePath)), - stringvalidator.ConflictsWith(path.MatchRoot(keyFilePath)), + stringvalidator.AlsoRequires(certDataPath), + stringvalidator.ConflictsWith(certFilePath, keyFilePath), }, }, }, @@ -125,7 +111,7 @@ func GetEsFWConnectionBlock(keyName string, isProviderConfiguration bool) fwsche } } -func GetKbFWConnectionBlock(keyName string, isProviderConfiguration bool) fwschema.Block { +func GetKbFWConnectionBlock(keyName string) fwschema.Block { usernamePath := makePathRef(keyName, "username") passwordPath := makePathRef(keyName, "password") @@ -134,7 +120,6 @@ func GetKbFWConnectionBlock(keyName string, isProviderConfiguration bool) fwsche return fwschema.ListNestedBlock{ MarkdownDescription: "Kibana connection configuration block.", - DeprecationMessage: getDeprecationMessage(isProviderConfiguration), NestedObject: fwschema.NestedBlockObject{ Attributes: map[string]fwschema.Attribute{ "username": fwschema.StringAttribute{ @@ -166,7 +151,7 @@ func GetKbFWConnectionBlock(keyName string, isProviderConfiguration bool) fwsche } } -func GetFleetFWConnectionBlock(keyName string, isProviderConfiguration bool) fwschema.Block { +func GetFleetFWConnectionBlock(keyName string) fwschema.Block { usernamePath := makePathRef(keyName, "username") passwordPath := makePathRef(keyName, "password") @@ -175,7 +160,6 @@ func GetFleetFWConnectionBlock(keyName string, isProviderConfiguration bool) fws return fwschema.ListNestedBlock{ MarkdownDescription: "Fleet connection configuration block.", - DeprecationMessage: getDeprecationMessage(isProviderConfiguration), NestedObject: fwschema.NestedBlockObject{ Attributes: map[string]fwschema.Attribute{ "username": fwschema.StringAttribute{ diff --git a/provider/plugin_framework.go b/provider/plugin_framework.go index fb17f2d19..ee9961cc6 100644 --- a/provider/plugin_framework.go +++ b/provider/plugin_framework.go @@ -3,6 +3,8 @@ package provider import ( "context" + "github.com/elastic/terraform-provider-elasticstack/internal/clients" + "github.com/elastic/terraform-provider-elasticstack/internal/clients/config" "github.com/elastic/terraform-provider-elasticstack/internal/schema" "github.com/hashicorp/terraform-plugin-framework/datasource" fwprovider "github.com/hashicorp/terraform-plugin-framework/provider" @@ -29,14 +31,29 @@ func (p *Provider) Metadata(_ context.Context, _ fwprovider.MetadataRequest, res func (p *Provider) Schema(ctx context.Context, req fwprovider.SchemaRequest, res *fwprovider.SchemaResponse) { res.Schema = fwschema.Schema{ Blocks: map[string]fwschema.Block{ - esKeyName: schema.GetEsFWConnectionBlock(esKeyName, true), - kbKeyName: schema.GetKbFWConnectionBlock(kbKeyName, true), - fleetKeyName: schema.GetFleetFWConnectionBlock(fleetKeyName, true), + esKeyName: schema.GetEsFWConnectionBlock(esKeyName), + kbKeyName: schema.GetKbFWConnectionBlock(kbKeyName), + fleetKeyName: schema.GetFleetFWConnectionBlock(fleetKeyName), }, } } func (p *Provider) Configure(ctx context.Context, req fwprovider.ConfigureRequest, res *fwprovider.ConfigureResponse) { + var config config.ProviderConfiguration + + res.Diagnostics.Append(req.Config.Get(ctx, &config)...) + if res.Diagnostics.HasError() { + return + } + + client, diags := clients.NewApiClientFromFramework(ctx, config, p.version) + res.Diagnostics.Append(diags...) + if res.Diagnostics.HasError() { + return + } + + res.DataSourceData = client + res.ResourceData = client } func (p *Provider) DataSources(ctx context.Context) []func() datasource.DataSource { diff --git a/provider/provider.go b/provider/provider.go index 3d0e9bb71..653733a57 100644 --- a/provider/provider.go +++ b/provider/provider.go @@ -112,7 +112,7 @@ func New(version string) *schema.Provider { }, } - p.ConfigureContextFunc = clients.NewApiClientFunc(version) + p.ConfigureContextFunc = clients.NewApiClientFuncFromSDK(version) return p } From c052e6247176e8e6d00fe42fb609224fd9644ee9 Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Wed, 6 Sep 2023 22:36:52 +1000 Subject: [PATCH 08/15] Remove plugin testing --- go.mod | 1 - go.sum | 2 -- internal/clients/api_client.go | 19 +++++++++++++++++++ internal/clients/config/elasticsearch.go | 2 +- provider/factory_test.go | 2 +- provider/provider_test.go | 4 ++-- 6 files changed, 23 insertions(+), 7 deletions(-) diff --git a/go.mod b/go.mod index 70dbbdde5..09af71655 100644 --- a/go.mod +++ b/go.mod @@ -14,7 +14,6 @@ require ( github.com/hashicorp/terraform-plugin-log v0.9.0 github.com/hashicorp/terraform-plugin-mux v0.12.0 github.com/hashicorp/terraform-plugin-sdk/v2 v2.29.0 - github.com/hashicorp/terraform-plugin-testing v1.2.0 github.com/oapi-codegen/runtime v1.0.0 github.com/stretchr/testify v1.8.4 ) diff --git a/go.sum b/go.sum index f9d8610e5..efac6d9a8 100644 --- a/go.sum +++ b/go.sum @@ -92,8 +92,6 @@ github.com/hashicorp/terraform-plugin-mux v0.12.0 h1:TJlmeslQ11WlQtIFAfth0vXx+gS github.com/hashicorp/terraform-plugin-mux v0.12.0/go.mod h1:8MR0AgmV+Q03DIjyrAKxXyYlq2EUnYBQP8gxAAA0zeM= github.com/hashicorp/terraform-plugin-sdk/v2 v2.29.0 h1:wcOKYwPI9IorAJEBLzgclh3xVolO7ZorYd6U1vnok14= github.com/hashicorp/terraform-plugin-sdk/v2 v2.29.0/go.mod h1:qH/34G25Ugdj5FcM95cSoXzUgIbgfhVLXCcEcYaMwq8= -github.com/hashicorp/terraform-plugin-testing v1.2.0 h1:pASRAe6BOZFO4xSGQr9WzitXit0nrQAYDk8ziuRfn9E= -github.com/hashicorp/terraform-plugin-testing v1.2.0/go.mod h1:+8bp3O7xUb1UtBcdknrGdVRIuTw4b62TYSIgXHqlyew= github.com/hashicorp/terraform-registry-address v0.2.2 h1:lPQBg403El8PPicg/qONZJDC6YlgCVbWDtNmmZKtBno= github.com/hashicorp/terraform-registry-address v0.2.2/go.mod h1:LtwNbCihUoUZ3RYriyS2wF/lGPB6gF9ICLRtuDk7hSo= github.com/hashicorp/terraform-svchost v0.1.1 h1:EZZimZ1GxdqFRinZ1tpJwVxxt49xc/S52uzrw4x0jKQ= diff --git a/internal/clients/api_client.go b/internal/clients/api_client.go index 4edcc2849..5db6c8e1c 100644 --- a/internal/clients/api_client.go +++ b/internal/clients/api_client.go @@ -132,6 +132,25 @@ func NewApiClientFromFramework(ctx context.Context, cfg config.ProviderConfigura return client, nil } +func ConvertProviderData(providerData any) (*ApiClient, fwdiags.Diagnostics) { + var diags fwdiags.Diagnostics + + if providerData == nil { + return nil, diags + } + + client, ok := providerData.(*ApiClient) + if !ok { + diags.AddError( + "Unexpected Provider Data", + fmt.Sprintf("Expected *ApiClient, got: %T. Please report this issue to the provider developers.", providerData), + ) + + return nil, diags + } + return client, diags +} + func NewApiClientFromSDKResource(d *schema.ResourceData, meta interface{}) (*ApiClient, diag.Diagnostics) { defaultClient := meta.(*ApiClient) version := defaultClient.version diff --git a/internal/clients/config/elasticsearch.go b/internal/clients/config/elasticsearch.go index 87db4d7fb..0325f9aa8 100644 --- a/internal/clients/config/elasticsearch.go +++ b/internal/clients/config/elasticsearch.go @@ -12,8 +12,8 @@ import ( fwdiags "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" sdkdiags "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/logging" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-plugin-testing/helper/logging" ) type elasticsearchConfig elasticsearch.Config diff --git a/provider/factory_test.go b/provider/factory_test.go index 37418ebab..0067faf57 100644 --- a/provider/factory_test.go +++ b/provider/factory_test.go @@ -8,7 +8,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/providerserver" "github.com/hashicorp/terraform-plugin-go/tfprotov5" "github.com/hashicorp/terraform-plugin-mux/tf5muxserver" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" ) func TestMuxServer(t *testing.T) { diff --git a/provider/provider_test.go b/provider/provider_test.go index 7ed41dfeb..8d081d70d 100644 --- a/provider/provider_test.go +++ b/provider/provider_test.go @@ -9,8 +9,8 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/security" "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" "github.com/elastic/terraform-provider-elasticstack/provider" - sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" + sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" ) func TestProvider(t *testing.T) { From c2ce6f9ba6b0d1dd322f1ffb156b8ff6b2999070 Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Mon, 11 Sep 2023 12:23:19 +1000 Subject: [PATCH 09/15] Import saved objects --- docs/resources/kibana_import_saved_objects.md | 83 +++++++++ .../kibana/import_saved_objects/acc_test.go | 43 +++++ .../kibana/import_saved_objects/create.go | 107 ++++++++++++ .../kibana/import_saved_objects/delete.go | 12 ++ internal/kibana/import_saved_objects/read.go | 12 ++ .../kibana/import_saved_objects/schema.go | 157 ++++++++++++++++++ .../kibana/import_saved_objects/update.go | 11 ++ provider/plugin_framework.go | 5 +- 8 files changed, 429 insertions(+), 1 deletion(-) create mode 100644 docs/resources/kibana_import_saved_objects.md create mode 100644 internal/kibana/import_saved_objects/acc_test.go create mode 100644 internal/kibana/import_saved_objects/create.go create mode 100644 internal/kibana/import_saved_objects/delete.go create mode 100644 internal/kibana/import_saved_objects/read.go create mode 100644 internal/kibana/import_saved_objects/schema.go create mode 100644 internal/kibana/import_saved_objects/update.go diff --git a/docs/resources/kibana_import_saved_objects.md b/docs/resources/kibana_import_saved_objects.md new file mode 100644 index 000000000..f40ba85be --- /dev/null +++ b/docs/resources/kibana_import_saved_objects.md @@ -0,0 +1,83 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_kibana_import_saved_objects Resource - terraform-provider-elasticstack" +subcategory: "" +description: |- + Imports saved objects from the referenced file +--- + +# elasticstack_kibana_import_saved_objects (Resource) + +Imports saved objects from the referenced file + + + + +## Schema + +### Required + +- `file_contents` (String) The contents of the exported saved objects file. + +### Optional + +- `compatibility_mode` (Boolean) Applies various adjustments to the saved objects that are being imported to maintain compatibility between different Kibana versions. Use this option only if you encounter issues with imported saved objects. +- `create_new_copies` (Boolean) Creates copies of saved objects, regenerates each object ID, and resets the origin. When used, potential conflict errors are avoided. +- `ignore_import_errors` (Boolean) If set to true, errors during the import process will not fail the configuration application +- `overwrite` (Boolean) Overwrites saved objects when they already exist. When used, potential conflict errors are automatically resolved by overwriting the destination object. +- `space_id` (String) An identifier for the space. If space_id is not provided, the default space is used. + +### Read-Only + +- `errors` (List of Object) (see [below for nested schema](#nestedatt--errors)) +- `id` (String) Generated ID for the import. +- `success` (Boolean) Indicates when the import was successfully completed. When set to false, some objects may not have been created. For additional information, refer to the errors and success_results properties. +- `success_count` (Number) Indicates the number of successfully imported records. +- `success_results` (List of Object) (see [below for nested schema](#nestedatt--success_results)) + + +### Nested Schema for `errors` + +Read-Only: + +- `error` (Object) (see [below for nested schema](#nestedobjatt--errors--error)) +- `id` (String) +- `meta` (Object) (see [below for nested schema](#nestedobjatt--errors--meta)) +- `title` (String) +- `type` (String) + + +### Nested Schema for `errors.error` + +Read-Only: + +- `type` (String) + + + +### Nested Schema for `errors.meta` + +Read-Only: + +- `icon` (String) +- `title` (String) + + + + +### Nested Schema for `success_results` + +Read-Only: + +- `destination_id` (String) +- `id` (String) +- `meta` (Object) (see [below for nested schema](#nestedobjatt--success_results--meta)) +- `type` (String) + + +### Nested Schema for `success_results.meta` + +Read-Only: + +- `icon` (String) +- `title` (String) diff --git a/internal/kibana/import_saved_objects/acc_test.go b/internal/kibana/import_saved_objects/acc_test.go new file mode 100644 index 000000000..bc3130823 --- /dev/null +++ b/internal/kibana/import_saved_objects/acc_test.go @@ -0,0 +1,43 @@ +package import_saved_objects_test + +import ( + "testing" + + "github.com/elastic/terraform-provider-elasticstack/internal/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccResourceImportSavedObjects(t *testing.T) { + resource.Test(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(t) }, + ProtoV5ProviderFactories: acctest.Providers, + Steps: []resource.TestStep{ + { + Config: testAccResourceImportSavedObjects(), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_kibana_import_saved_objects.settings", "success", "true"), + resource.TestCheckResourceAttr("elasticstack_kibana_import_saved_objects.settings", "success_count", "1"), + resource.TestCheckResourceAttr("elasticstack_kibana_import_saved_objects.settings", "success_results.#", "1"), + resource.TestCheckResourceAttr("elasticstack_kibana_import_saved_objects.settings", "errors.#", "0"), + ), + }, + }, + }) +} + +func testAccResourceImportSavedObjects() string { + return ` +provider "elasticstack" { + elasticsearch {} + kibana {} +} + +resource "elasticstack_kibana_import_saved_objects" "settings" { + overwrite = true + file_contents = <<-EOT +{"attributes":{"buildNum":42747,"defaultIndex":"metricbeat-*","theme:darkMode":true},"coreMigrationVersion":"7.0.0","id":"7.14.0","managed":false,"references":[],"type":"config","typeMigrationVersion":"7.0.0","updated_at":"2021-08-04T02:04:43.306Z","version":"WzY1MiwyXQ=="} +{"excludedObjects":[],"excludedObjectsCount":0,"exportedCount":1,"missingRefCount":0,"missingReferences":[]} +EOT +} + ` +} diff --git a/internal/kibana/import_saved_objects/create.go b/internal/kibana/import_saved_objects/create.go new file mode 100644 index 000000000..358a06bff --- /dev/null +++ b/internal/kibana/import_saved_objects/create.go @@ -0,0 +1,107 @@ +package import_saved_objects + +import ( + "context" + + "github.com/google/uuid" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/tfsdk" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/mitchellh/mapstructure" +) + +func (r *Resource) Create(ctx context.Context, request resource.CreateRequest, response *resource.CreateResponse) { + r.importObjects(ctx, request.Plan, &response.State, &response.Diagnostics) +} + +func (r *Resource) importObjects(ctx context.Context, plan tfsdk.Plan, state *tfsdk.State, diags *diag.Diagnostics) { + if !resourceReady(r, diags) { + return + } + + var model modelV0 + + diags.Append(plan.Get(ctx, &model)...) + if diags.HasError() { + return + } + + kibanaClient, err := r.client.GetKibanaClient() + if err != nil { + diags.AddError("unable to get kibana client", err.Error()) + return + } + + resp, err := kibanaClient.KibanaSavedObject.Import([]byte(model.FileContents.ValueString()), model.Overwrite.ValueBool(), model.SpaceID.ValueString()) + if err != nil { + diags.AddError("failed to import saved objects", err.Error()) + return + } + + var respModel responseModel + + decoder, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{ + Result: &respModel, + TagName: "json", + }) + if err != nil { + diags.AddError("failed to create model decoder", err.Error()) + return + } + + err = decoder.Decode(resp) + if err != nil { + diags.AddError("failed to decode response", err.Error()) + return + } + + if model.ID.IsUnknown() { + model.ID = types.StringValue(uuid.NewString()) + } + + diags.Append(state.Set(ctx, model)...) + diags.Append(state.SetAttribute(ctx, path.Root("success"), respModel.Success)...) + diags.Append(state.SetAttribute(ctx, path.Root("success_count"), respModel.SuccessCount)...) + diags.Append(state.SetAttribute(ctx, path.Root("errors"), respModel.Errors)...) + diags.Append(state.SetAttribute(ctx, path.Root("success_results"), respModel.SuccessResults)...) + if diags.HasError() { + return + } + + if !respModel.Success && !model.IgnoreImportErrors.ValueBool() { + diags.AddError("not all objects were imported successfully", "see errors attribute for more details") + } +} + +type responseModel struct { + Success bool `json:"success"` + SuccessCount int `json:"successCount"` + Errors []importError `json:"errors"` + SuccessResults []importSuccess `json:"successResults"` +} + +type importSuccess struct { + ID string `tfsdk:"id" json:"id"` + Type string `tfsdk:"type" json:"type"` + DestinationID string `tfsdk:"destination_id" json:"destinationId"` + Meta importMeta `tfsdk:"meta" json:"meta"` +} + +type importError struct { + ID string `json:"id"` + Type string `json:"type"` + Title string `json:"title"` + Error importErrorType `json:"error"` + Meta importMeta `json:"meta"` +} + +type importErrorType struct { + Type string `json:"type"` +} + +type importMeta struct { + Icon string `tfsdk:"icon" json:"icon"` + Title string `tfsdk:"title" json:"title"` +} diff --git a/internal/kibana/import_saved_objects/delete.go b/internal/kibana/import_saved_objects/delete.go new file mode 100644 index 000000000..46e65848e --- /dev/null +++ b/internal/kibana/import_saved_objects/delete.go @@ -0,0 +1,12 @@ +package import_saved_objects + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-log/tflog" +) + +func (r *Resource) Delete(ctx context.Context, request resource.DeleteRequest, response *resource.DeleteResponse) { + tflog.Info(ctx, "Delete isn't supported for elasticstack_kibana_import_saved_objects") +} diff --git a/internal/kibana/import_saved_objects/read.go b/internal/kibana/import_saved_objects/read.go new file mode 100644 index 000000000..454d1123a --- /dev/null +++ b/internal/kibana/import_saved_objects/read.go @@ -0,0 +1,12 @@ +package import_saved_objects + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-log/tflog" +) + +func (r *Resource) Read(ctx context.Context, request resource.ReadRequest, response *resource.ReadResponse) { + tflog.Info(ctx, "Read isn't supported for elasticstack_kibana_import_saved_objects") +} diff --git a/internal/kibana/import_saved_objects/schema.go b/internal/kibana/import_saved_objects/schema.go new file mode 100644 index 000000000..e525e43c7 --- /dev/null +++ b/internal/kibana/import_saved_objects/schema.go @@ -0,0 +1,157 @@ +package import_saved_objects + +import ( + "context" + + "github.com/elastic/terraform-provider-elasticstack/internal/clients" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +// Ensure provider defined types fully satisfy framework interfaces +var _ resource.Resource = &Resource{} +var _ resource.ResourceWithConfigure = &Resource{} + +// var _ resource.ResourceWithConfigValidators = &Resource{} + +// func (r *Resource) ConfigValidators(context.Context) []resource.ConfigValidator { +// return []resource.ConfigValidator{ +// resourcevalidator.Conflicting( +// path.MatchRoot("create_new_copies"), +// path.MatchRoot("overwrite"), +// path.MatchRoot("compatibility_mode"), +// ), +// } +// } + +func (r *Resource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = schema.Schema{ + Description: "Imports saved objects from the referenced file", + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + Computed: true, + MarkdownDescription: "Generated ID for the import.", + PlanModifiers: []planmodifier.String{ + stringplanmodifier.UseStateForUnknown(), + }, + }, + "space_id": schema.StringAttribute{ + Description: "An identifier for the space. If space_id is not provided, the default space is used.", + Optional: true, + }, + "ignore_import_errors": schema.BoolAttribute{ + Description: "If set to true, errors during the import process will not fail the configuration application", + Optional: true, + }, + // create_new_copies and compatibility_mode aren't supported by the current version of the Kibana client + // We can add these ourselves once https://github.com/elastic/terraform-provider-elasticstack/pull/372 is merged + // "create_new_copies": schema.BoolAttribute{ + // Description: "Creates copies of saved objects, regenerates each object ID, and resets the origin. When used, potential conflict errors are avoided.", + // Optional: true, + // }, + "overwrite": schema.BoolAttribute{ + Description: "Overwrites saved objects when they already exist. When used, potential conflict errors are automatically resolved by overwriting the destination object.", + Optional: true, + }, + // "compatibility_mode": schema.BoolAttribute{ + // Description: "Applies various adjustments to the saved objects that are being imported to maintain compatibility between different Kibana versions. Use this option only if you encounter issues with imported saved objects.", + // Optional: true, + // }, + "file_contents": schema.StringAttribute{ + Description: "The contents of the exported saved objects file.", + Required: true, + }, + + "success": schema.BoolAttribute{ + Description: "Indicates when the import was successfully completed. When set to false, some objects may not have been created. For additional information, refer to the errors and success_results properties.", + Computed: true, + }, + "success_count": schema.Int64Attribute{ + Description: "Indicates the number of successfully imported records.", + Computed: true, + }, + "errors": schema.ListAttribute{ + Computed: true, + ElementType: types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "id": types.StringType, + "type": types.StringType, + "title": types.StringType, + "error": types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "type": types.StringType, + }, + }, + "meta": types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "icon": types.StringType, + "title": types.StringType, + }, + }, + }, + }, + }, + "success_results": schema.ListAttribute{ + Computed: true, + ElementType: types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "id": types.StringType, + "type": types.StringType, + "destination_id": types.StringType, + "meta": types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "icon": types.StringType, + "title": types.StringType, + }, + }, + }, + }, + }, + }, + } +} + +type Resource struct { + client *clients.ApiClient +} + +func resourceReady(r *Resource, dg *diag.Diagnostics) bool { + if r.client == nil { + dg.AddError( + "Unconfigured Client", + "Expected configured client. Please report this issue to the provider developers.", + ) + + return false + } + return true +} + +func (r *Resource) Configure(ctx context.Context, request resource.ConfigureRequest, response *resource.ConfigureResponse) { + client, diags := clients.ConvertProviderData(request.ProviderData) + response.Diagnostics.Append(diags...) + r.client = client +} + +func (r *Resource) Metadata(ctx context.Context, request resource.MetadataRequest, response *resource.MetadataResponse) { + response.TypeName = request.ProviderTypeName + "_kibana_import_saved_objects" +} + +type modelV0 struct { + ID types.String `tfsdk:"id"` + SpaceID types.String `tfsdk:"space_id"` + IgnoreImportErrors types.Bool `tfsdk:"ignore_import_errors"` + // CreateNewCopies types.Bool `tfsdk:"create_new_copies"` + Overwrite types.Bool `tfsdk:"overwrite"` + // CompatibilityMode types.Bool `tfsdk:"compatibility_mode"` + FileContents types.String `tfsdk:"file_contents"` + Success types.Bool `tfsdk:"success"` + SuccessCount types.Int64 `tfsdk:"success_count"` + Errors types.List `tfsdk:"errors"` + SuccessResults types.List `tfsdk:"success_results"` +} diff --git a/internal/kibana/import_saved_objects/update.go b/internal/kibana/import_saved_objects/update.go new file mode 100644 index 000000000..0731c452c --- /dev/null +++ b/internal/kibana/import_saved_objects/update.go @@ -0,0 +1,11 @@ +package import_saved_objects + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework/resource" +) + +func (r *Resource) Update(ctx context.Context, request resource.UpdateRequest, response *resource.UpdateResponse) { + r.importObjects(ctx, request.Plan, &response.State, &response.Diagnostics) +} diff --git a/provider/plugin_framework.go b/provider/plugin_framework.go index ee9961cc6..ad5895141 100644 --- a/provider/plugin_framework.go +++ b/provider/plugin_framework.go @@ -5,6 +5,7 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/clients" "github.com/elastic/terraform-provider-elasticstack/internal/clients/config" + "github.com/elastic/terraform-provider-elasticstack/internal/kibana/import_saved_objects" "github.com/elastic/terraform-provider-elasticstack/internal/schema" "github.com/hashicorp/terraform-plugin-framework/datasource" fwprovider "github.com/hashicorp/terraform-plugin-framework/provider" @@ -61,5 +62,7 @@ func (p *Provider) DataSources(ctx context.Context) []func() datasource.DataSour } func (p *Provider) Resources(ctx context.Context) []func() resource.Resource { - return []func() resource.Resource{} + return []func() resource.Resource{ + func() resource.Resource { return &import_saved_objects.Resource{} }, + } } From 5fa573648c4ad565c698ace1f704d4721a9deb9a Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Tue, 12 Sep 2023 19:40:58 +1000 Subject: [PATCH 10/15] Add docs template --- docs/resources/kibana_import_saved_objects.md | 32 ++++++++++++++----- .../resource.tf | 11 +++++++ .../kibana_import_saved_objects.md.tmpl | 21 ++++++++++++ 3 files changed, 56 insertions(+), 8 deletions(-) create mode 100644 examples/resources/elasticstack_kibana_import_saved_objects/resource.tf create mode 100644 templates/resources/kibana_import_saved_objects.md.tmpl diff --git a/docs/resources/kibana_import_saved_objects.md b/docs/resources/kibana_import_saved_objects.md index f40ba85be..efbe94ac6 100644 --- a/docs/resources/kibana_import_saved_objects.md +++ b/docs/resources/kibana_import_saved_objects.md @@ -1,16 +1,30 @@ --- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "elasticstack_kibana_import_saved_objects Resource - terraform-provider-elasticstack" -subcategory: "" +subcategory: "Kibana" +layout: "" +page_title: "Elasticstack: elasticstack_kibana_import_saved_objects Resource" description: |- - Imports saved objects from the referenced file + Create sets of Kibana saved objects from a file created by the export API. --- -# elasticstack_kibana_import_saved_objects (Resource) +# Resource: elasticstack_kibana_import_saved_objects -Imports saved objects from the referenced file +Create sets of Kibana saved objects from a file created by the export API. See https://www.elastic.co/guide/en/kibana/current/saved-objects-api-import.html +## Example Usage +```terraform +provider "elasticstack" { + kibana {} +} + +resource "elasticstack_kibana_import_saved_objects" "settings" { + overwrite = true + file_contents = <<-EOT +{"attributes":{"buildNum":42747,"defaultIndex":"metricbeat-*","theme:darkMode":true},"coreMigrationVersion":"7.0.0","id":"7.14.0","managed":false,"references":[],"type":"config","typeMigrationVersion":"7.0.0","updated_at":"2021-08-04T02:04:43.306Z","version":"WzY1MiwyXQ=="} +{"excludedObjects":[],"excludedObjectsCount":0,"exportedCount":1,"missingRefCount":0,"missingReferences":[]} +EOT +} +``` ## Schema @@ -21,8 +35,6 @@ Imports saved objects from the referenced file ### Optional -- `compatibility_mode` (Boolean) Applies various adjustments to the saved objects that are being imported to maintain compatibility between different Kibana versions. Use this option only if you encounter issues with imported saved objects. -- `create_new_copies` (Boolean) Creates copies of saved objects, regenerates each object ID, and resets the origin. When used, potential conflict errors are avoided. - `ignore_import_errors` (Boolean) If set to true, errors during the import process will not fail the configuration application - `overwrite` (Boolean) Overwrites saved objects when they already exist. When used, potential conflict errors are automatically resolved by overwriting the destination object. - `space_id` (String) An identifier for the space. If space_id is not provided, the default space is used. @@ -81,3 +93,7 @@ Read-Only: - `icon` (String) - `title` (String) + +## Import + +Import is not supported. diff --git a/examples/resources/elasticstack_kibana_import_saved_objects/resource.tf b/examples/resources/elasticstack_kibana_import_saved_objects/resource.tf new file mode 100644 index 000000000..20a239cca --- /dev/null +++ b/examples/resources/elasticstack_kibana_import_saved_objects/resource.tf @@ -0,0 +1,11 @@ +provider "elasticstack" { + kibana {} +} + +resource "elasticstack_kibana_import_saved_objects" "settings" { + overwrite = true + file_contents = <<-EOT +{"attributes":{"buildNum":42747,"defaultIndex":"metricbeat-*","theme:darkMode":true},"coreMigrationVersion":"7.0.0","id":"7.14.0","managed":false,"references":[],"type":"config","typeMigrationVersion":"7.0.0","updated_at":"2021-08-04T02:04:43.306Z","version":"WzY1MiwyXQ=="} +{"excludedObjects":[],"excludedObjectsCount":0,"exportedCount":1,"missingRefCount":0,"missingReferences":[]} +EOT +} diff --git a/templates/resources/kibana_import_saved_objects.md.tmpl b/templates/resources/kibana_import_saved_objects.md.tmpl new file mode 100644 index 000000000..7679d6f13 --- /dev/null +++ b/templates/resources/kibana_import_saved_objects.md.tmpl @@ -0,0 +1,21 @@ +--- +subcategory: "Kibana" +layout: "" +page_title: "Elasticstack: elasticstack_kibana_import_saved_objects Resource" +description: |- + Create sets of Kibana saved objects from a file created by the export API. +--- + +# Resource: elasticstack_kibana_import_saved_objects + +Create sets of Kibana saved objects from a file created by the export API. See https://www.elastic.co/guide/en/kibana/current/saved-objects-api-import.html + +## Example Usage + +{{ tffile "examples/resources/elasticstack_kibana_import_saved_objects/resource.tf" }} + +{{ .SchemaMarkdown | trimspace }} + +## Import + +Import is not supported. From 8add7d9c21831e08917593ae2a9364d198bb7f09 Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Sun, 1 Oct 2023 13:48:38 +1100 Subject: [PATCH 11/15] Upgrade FW and validators --- go.mod | 4 ++-- go.sum | 4 ++++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/go.mod b/go.mod index 09af71655..87d0e4e8c 100644 --- a/go.mod +++ b/go.mod @@ -8,8 +8,8 @@ require ( github.com/elastic/go-elasticsearch/v7 v7.17.10 github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320 github.com/hashicorp/go-version v1.6.0 - github.com/hashicorp/terraform-plugin-framework v1.2.0 - github.com/hashicorp/terraform-plugin-framework-validators v0.10.0 + github.com/hashicorp/terraform-plugin-framework v1.4.0 + github.com/hashicorp/terraform-plugin-framework-validators v0.12.0 github.com/hashicorp/terraform-plugin-go v0.19.0 github.com/hashicorp/terraform-plugin-log v0.9.0 github.com/hashicorp/terraform-plugin-mux v0.12.0 diff --git a/go.sum b/go.sum index efac6d9a8..b6a729878 100644 --- a/go.sum +++ b/go.sum @@ -82,8 +82,12 @@ github.com/hashicorp/terraform-json v0.17.1 h1:eMfvh/uWggKmY7Pmb3T85u86E2EQg6EQH github.com/hashicorp/terraform-json v0.17.1/go.mod h1:Huy6zt6euxaY9knPAFKjUITn8QxUFIe9VuSzb4zn/0o= github.com/hashicorp/terraform-plugin-framework v1.2.0 h1:MZjFFfULnFq8fh04FqrKPcJ/nGpHOvX4buIygT3MSNY= github.com/hashicorp/terraform-plugin-framework v1.2.0/go.mod h1:nToI62JylqXDq84weLJ/U3umUsBhZAaTmU0HXIVUOcw= +github.com/hashicorp/terraform-plugin-framework v1.4.0 h1:WKbtCRtNrjsh10eA7NZvC/Qyr7zp77j+D21aDO5th9c= +github.com/hashicorp/terraform-plugin-framework v1.4.0/go.mod h1:XC0hPcQbBvlbxwmjxuV/8sn8SbZRg4XwGMs22f+kqV0= github.com/hashicorp/terraform-plugin-framework-validators v0.10.0 h1:4L0tmy/8esP6OcvocVymw52lY0HyQ5OxB7VNl7k4bS0= github.com/hashicorp/terraform-plugin-framework-validators v0.10.0/go.mod h1:qdQJCdimB9JeX2YwOpItEu+IrfoJjWQ5PhLpAOMDQAE= +github.com/hashicorp/terraform-plugin-framework-validators v0.12.0 h1:HOjBuMbOEzl7snOdOoUfE2Jgeto6JOjLVQ39Ls2nksc= +github.com/hashicorp/terraform-plugin-framework-validators v0.12.0/go.mod h1:jfHGE/gzjxYz6XoUwi/aYiiKrJDeutQNUtGQXkaHklg= github.com/hashicorp/terraform-plugin-go v0.19.0 h1:BuZx/6Cp+lkmiG0cOBk6Zps0Cb2tmqQpDM3iAtnhDQU= github.com/hashicorp/terraform-plugin-go v0.19.0/go.mod h1:EhRSkEPNoylLQntYsk5KrDHTZJh9HQoumZXbOGOXmec= github.com/hashicorp/terraform-plugin-log v0.9.0 h1:i7hOA+vdAItN1/7UrfBqBwvYPQ9TFvymaRGZED3FCV0= From bf7994ed683031ae0aec956a1398f5cfbe6d7d32 Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Tue, 3 Oct 2023 11:06:18 +1100 Subject: [PATCH 12/15] WIP --- internal/clients/config/base_test.go | 54 ++++++++++++++++++- .../kibana/import_saved_objects/schema.go | 4 ++ provider/provider.go | 6 +-- 3 files changed, 60 insertions(+), 4 deletions(-) diff --git a/internal/clients/config/base_test.go b/internal/clients/config/base_test.go index 4c95fc24c..e78f275a5 100644 --- a/internal/clients/config/base_test.go +++ b/internal/clients/config/base_test.go @@ -6,6 +6,7 @@ import ( "testing" providerSchema "github.com/elastic/terraform-provider-elasticstack/internal/schema" + "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/stretchr/testify/require" ) @@ -27,7 +28,7 @@ func TestNewBaseConfigFromSDK(t *testing.T) { }) baseCfg := newBaseConfigFromSDK(rd, "unit-testing", "elasticsearch") - ua := buildUserAgent("unit-testing") + ua := "elasticstack-terraform-provider/unit-testing" require.Equal(t, baseConfig{ Username: "elastic", Password: "changeme", @@ -35,3 +36,54 @@ func TestNewBaseConfigFromSDK(t *testing.T) { Header: http.Header{"User-Agent": []string{ua}}, }, baseCfg) } + +func TestNewBaseConfigFromFramework(t *testing.T) { + os.Unsetenv("ELASTICSEARCH_USERNAME") + os.Unsetenv("ELASTICSEARCH_PASSWORD") + os.Unsetenv("ELASTICSEARCH_API_KEY") + + expectedUA := "elasticstack-terraform-provider/unit-testing" + + tests := []struct { + name string + config ProviderConfiguration + expectedBaseConfig baseConfig + }{ + { + name: "with es config defined", + config: ProviderConfiguration{ + Elasticsearch: []ElasticsearchConnection{ + { + Username: types.StringValue("elastic"), + Password: types.StringValue("changeme"), + APIKey: types.StringValue("apikey"), + }, + }, + }, + expectedBaseConfig: baseConfig{ + Username: "elastic", + Password: "changeme", + ApiKey: "apikey", + UserAgent: expectedUA, + Header: http.Header{"User-Agent": []string{expectedUA}}, + }, + }, + { + name: "should not set credentials if no configuration available", + config: ProviderConfiguration{ + Elasticsearch: []ElasticsearchConnection{}, + }, + expectedBaseConfig: baseConfig{ + UserAgent: expectedUA, + Header: http.Header{"User-Agent": []string{expectedUA}}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + baseCfg := newBaseConfigFromFramework(tt.config, "unit-testing") + require.Equal(t, tt.expectedBaseConfig, baseCfg) + }) + } +} diff --git a/internal/kibana/import_saved_objects/schema.go b/internal/kibana/import_saved_objects/schema.go index e525e43c7..1d2b68706 100644 --- a/internal/kibana/import_saved_objects/schema.go +++ b/internal/kibana/import_saved_objects/schema.go @@ -17,6 +17,10 @@ import ( var _ resource.Resource = &Resource{} var _ resource.ResourceWithConfigure = &Resource{} +// TODO - Uncomment these lines when we're using a kibana client which supports create_new_copies and compatibility_mode +// create_new_copies and compatibility_mode aren't supported by the current version of the Kibana client +// We can add these ourselves once https://github.com/elastic/terraform-provider-elasticstack/pull/372 is merged + // var _ resource.ResourceWithConfigValidators = &Resource{} // func (r *Resource) ConfigValidators(context.Context) []resource.ConfigValidator { diff --git a/provider/provider.go b/provider/provider.go index 653733a57..beb174451 100644 --- a/provider/provider.go +++ b/provider/provider.go @@ -29,9 +29,9 @@ func init() { func New(version string) *schema.Provider { p := &schema.Provider{ Schema: map[string]*schema.Schema{ - esKeyName: providerSchema.GetEsConnectionSchema(esKeyName, true), - "kibana": providerSchema.GetKibanaConnectionSchema(), - "fleet": providerSchema.GetFleetConnectionSchema(), + esKeyName: providerSchema.GetEsConnectionSchema(esKeyName, true), + kbKeyName: providerSchema.GetKibanaConnectionSchema(), + fleetKeyName: providerSchema.GetFleetConnectionSchema(), }, DataSourcesMap: map[string]*schema.Resource{ "elasticstack_elasticsearch_ingest_processor_append": ingest.DataSourceProcessorAppend(), From 80b08fce0b84ff1d9140a5445e2d97c77485e70c Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Mon, 9 Oct 2023 13:26:57 +1100 Subject: [PATCH 13/15] PR feedback --- internal/clients/config/elasticsearch.go | 27 +- internal/clients/config/elasticsearch_test.go | 229 ++++++++++++++ internal/clients/config/fleet_test.go | 284 ++++++++++++++++++ internal/clients/config/kibana.go | 11 +- internal/clients/config/kibana_test.go | 250 +++++++++++++++ provider/factory_test.go | 17 +- 6 files changed, 784 insertions(+), 34 deletions(-) create mode 100644 internal/clients/config/elasticsearch_test.go create mode 100644 internal/clients/config/fleet_test.go create mode 100644 internal/clients/config/kibana_test.go diff --git a/internal/clients/config/elasticsearch.go b/internal/clients/config/elasticsearch.go index 0325f9aa8..a47f0a527 100644 --- a/internal/clients/config/elasticsearch.go +++ b/internal/clients/config/elasticsearch.go @@ -10,7 +10,6 @@ import ( "github.com/elastic/go-elasticsearch/v7" fwdiags "github.com/hashicorp/terraform-plugin-framework/diag" - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" sdkdiags "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/logging" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -24,7 +23,7 @@ func newElasticsearchConfigFromSDK(d *schema.ResourceData, base baseConfig, key return nil, nil } - var diags diag.Diagnostics + var diags sdkdiags.Diagnostics config := base.toElasticsearchConfig() // if defined, then we only have a single entry @@ -47,8 +46,8 @@ func newElasticsearchConfigFromSDK(d *schema.ResourceData, base baseConfig, key if caFile, ok := esConfig["ca_file"]; ok && caFile.(string) != "" { caCert, err := os.ReadFile(caFile.(string)) if err != nil { - diags = append(diags, diag.Diagnostic{ - Severity: diag.Error, + diags = append(diags, sdkdiags.Diagnostic{ + Severity: sdkdiags.Error, Summary: "Unable to read CA File", Detail: err.Error(), }) @@ -64,8 +63,8 @@ func newElasticsearchConfigFromSDK(d *schema.ResourceData, base baseConfig, key if keyFile, ok := esConfig["key_file"]; ok && keyFile.(string) != "" { cert, err := tls.LoadX509KeyPair(certFile.(string), keyFile.(string)) if err != nil { - diags = append(diags, diag.Diagnostic{ - Severity: diag.Error, + diags = append(diags, sdkdiags.Diagnostic{ + Severity: sdkdiags.Error, Summary: "Unable to read certificate or key file", Detail: err.Error(), }) @@ -74,8 +73,8 @@ func newElasticsearchConfigFromSDK(d *schema.ResourceData, base baseConfig, key tlsClientConfig := config.ensureTLSClientConfig() tlsClientConfig.Certificates = []tls.Certificate{cert} } else { - diags = append(diags, diag.Diagnostic{ - Severity: diag.Error, + diags = append(diags, sdkdiags.Diagnostic{ + Severity: sdkdiags.Error, Summary: "Unable to read key file", Detail: "Path to key file has not been configured or is empty", }) @@ -86,8 +85,8 @@ func newElasticsearchConfigFromSDK(d *schema.ResourceData, base baseConfig, key if keyData, ok := esConfig["key_data"]; ok && keyData.(string) != "" { cert, err := tls.X509KeyPair([]byte(certData.(string)), []byte(keyData.(string))) if err != nil { - diags = append(diags, diag.Diagnostic{ - Severity: diag.Error, + diags = append(diags, sdkdiags.Diagnostic{ + Severity: sdkdiags.Error, Summary: "Unable to parse certificate or key", Detail: err.Error(), }) @@ -96,8 +95,8 @@ func newElasticsearchConfigFromSDK(d *schema.ResourceData, base baseConfig, key tlsClientConfig := config.ensureTLSClientConfig() tlsClientConfig.Certificates = []tls.Certificate{cert} } else { - diags = append(diags, diag.Diagnostic{ - Severity: diag.Error, + diags = append(diags, sdkdiags.Diagnostic{ + Severity: sdkdiags.Error, Summary: "Unable to parse key", Detail: "Key data has not been configured or is empty", }) @@ -208,9 +207,9 @@ func (c elasticsearchConfig) withEnvironmentOverrides() elasticsearchConfig { } if insecure, ok := os.LookupEnv("ELASTICSEARCH_INSECURE"); ok { - if insecureValue, _ := strconv.ParseBool(insecure); insecureValue { + if insecureValue, err := strconv.ParseBool(insecure); err != nil { tlsClientConfig := c.ensureTLSClientConfig() - tlsClientConfig.InsecureSkipVerify = true + tlsClientConfig.InsecureSkipVerify = insecureValue } } diff --git a/internal/clients/config/elasticsearch_test.go b/internal/clients/config/elasticsearch_test.go new file mode 100644 index 000000000..cd93bfd1a --- /dev/null +++ b/internal/clients/config/elasticsearch_test.go @@ -0,0 +1,229 @@ +package config + +import ( + "context" + "os" + "testing" + + "github.com/hashicorp/terraform-plugin-framework/attr" + fwdiags "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + sdkdiags "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + + providerSchema "github.com/elastic/terraform-provider-elasticstack/internal/schema" + "github.com/elastic/terraform-provider-elasticstack/internal/utils" + "github.com/stretchr/testify/require" +) + +func Test_newElasticsearchConfigFromSDK(t *testing.T) { + type args struct { + resourceData map[string]interface{} + base baseConfig + env map[string]string + expectedESConfig *elasticsearchConfig + expectedDiags sdkdiags.Diagnostics + } + tests := []struct { + name string + args func(string) args + }{ + { + name: "should return nil if no config is specified", + args: func(key string) args { + return args{} + }, + }, + { + name: "should use the options set in config", + args: func(key string) args { + base := baseConfig{ + Username: "elastic", + Password: "changeme", + } + + config := base.toElasticsearchConfig() + config.Addresses = []string{"localhost", "example.com"} + tlsConfig := config.ensureTLSClientConfig() + tlsConfig.InsecureSkipVerify = true + + return args{ + resourceData: map[string]interface{}{ + key: []interface{}{ + map[string]interface{}{ + "endpoints": []interface{}{"localhost", "example.com"}, + "insecure": true, + }, + }, + }, + base: base, + expectedESConfig: &config, + } + }, + }, + { + name: "should prefer config defined in environment variables", + args: func(key string) args { + base := baseConfig{ + Username: "elastic", + Password: "changeme", + } + + config := base.toElasticsearchConfig() + config.Addresses = []string{"127.0.0.1", "example.com/elastic"} + tlsConfig := config.ensureTLSClientConfig() + tlsConfig.InsecureSkipVerify = false + + return args{ + resourceData: map[string]interface{}{ + key: []interface{}{ + map[string]interface{}{ + "endpoints": []interface{}{"localhost", "example.com"}, + "insecure": true, + }, + }, + }, + env: map[string]string{ + "ELASTICSEARCH_ENDPOINTS": "127.0.0.1,example.com/elastic", + "ELASTICSEARCH_INSECURE": "false", + }, + base: base, + expectedESConfig: &config, + } + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + os.Unsetenv("ELASTICSEARCH_ENDPOINTS") + os.Unsetenv("ELASTICSEARCH_INSECURE") + + key := "elasticsearch" + args := tt.args(key) + rd := schema.TestResourceDataRaw(t, map[string]*schema.Schema{ + key: providerSchema.GetEsConnectionSchema(key, true), + }, args.resourceData) + + for key, val := range args.env { + os.Setenv(key, val) + } + + esConfig, diags := newElasticsearchConfigFromSDK(rd, args.base, key, false) + + require.Equal(t, args.expectedESConfig, esConfig) + require.Equal(t, args.expectedDiags, diags) + }) + } +} + +func Test_newElasticsearchConfigFromFramework(t *testing.T) { + type args struct { + providerConfig ProviderConfiguration + base baseConfig + env map[string]string + expectedESConfig *elasticsearchConfig + expectedDiags fwdiags.Diagnostics + } + tests := []struct { + name string + args func() args + }{ + { + name: "should return nil if no config is specified", + args: func() args { + return args{ + providerConfig: ProviderConfiguration{}, + } + }, + }, + { + name: "should use the options set in config", + args: func() args { + base := baseConfig{ + Username: "elastic", + Password: "changeme", + } + + config := base.toElasticsearchConfig() + config.Addresses = []string{"localhost", "example.com"} + tlsConfig := config.ensureTLSClientConfig() + tlsConfig.InsecureSkipVerify = true + + return args{ + providerConfig: ProviderConfiguration{ + Elasticsearch: []ElasticsearchConnection{ + { + Endpoints: basetypes.NewListValueMust( + basetypes.StringType{}, + []attr.Value{ + basetypes.NewStringValue("localhost"), + basetypes.NewStringValue("example.com"), + }, + ), + Insecure: basetypes.NewBoolPointerValue(utils.Pointer(true)), + }, + }, + }, + base: base, + expectedESConfig: &config, + } + }, + }, + { + name: "should prefer config defined in environment variables", + args: func() args { + base := baseConfig{ + Username: "elastic", + Password: "changeme", + } + + config := base.toElasticsearchConfig() + config.Addresses = []string{"127.0.0.1", "example.com/elastic"} + tlsConfig := config.ensureTLSClientConfig() + tlsConfig.InsecureSkipVerify = false + + return args{ + providerConfig: ProviderConfiguration{ + Elasticsearch: []ElasticsearchConnection{ + { + Endpoints: basetypes.NewListValueMust( + basetypes.StringType{}, + []attr.Value{ + basetypes.NewStringValue("localhost"), + basetypes.NewStringValue("example.com"), + }, + ), + Insecure: basetypes.NewBoolPointerValue(utils.Pointer(true)), + }, + }, + }, + env: map[string]string{ + "ELASTICSEARCH_ENDPOINTS": "127.0.0.1,example.com/elastic", + "ELASTICSEARCH_INSECURE": "false", + }, + base: base, + expectedESConfig: &config, + } + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + os.Unsetenv("ELASTICSEARCH_ENDPOINTS") + os.Unsetenv("ELASTICSEARCH_INSECURE") + + args := tt.args() + + for key, val := range args.env { + os.Setenv(key, val) + } + + esConfig, diags := newElasticsearchConfigFromFramework(context.Background(), args.providerConfig, args.base) + + require.Equal(t, args.expectedESConfig, esConfig) + require.Equal(t, args.expectedDiags, diags) + }) + } +} diff --git a/internal/clients/config/fleet_test.go b/internal/clients/config/fleet_test.go new file mode 100644 index 000000000..decc53ff8 --- /dev/null +++ b/internal/clients/config/fleet_test.go @@ -0,0 +1,284 @@ +package config + +import ( + "context" + "os" + "testing" + + providerSchema "github.com/elastic/terraform-provider-elasticstack/internal/schema" + "github.com/hashicorp/terraform-plugin-framework/attr" + fwdiags "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/types" + sdkdiags "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/stretchr/testify/require" +) + +func Test_newFleetConfigFromSDK(t *testing.T) { + type args struct { + kibanaCfg kibanaConfig + resourceData map[string]interface{} + expectedConfig fleetConfig + expectedDiags sdkdiags.Diagnostics + env map[string]string + } + tests := []struct { + name string + args func() args + }{ + { + name: "should return kibana config if no fleet config defined", + args: func() args { + kibanaCfg := kibanaConfig{ + Address: "example.com/kibana", + Username: "elastic", + Password: "changeme", + DisableVerifySSL: true, + } + + return args{ + kibanaCfg: kibanaCfg, + resourceData: map[string]interface{}{}, + expectedConfig: kibanaCfg.toFleetConfig(), + } + }, + }, + { + name: "should use the provided config optios", + args: func() args { + kibanaCfg := kibanaConfig{ + Address: "example.com/kibana", + Username: "elastic", + Password: "changeme", + DisableVerifySSL: true, + } + + return args{ + kibanaCfg: kibanaCfg, + resourceData: map[string]interface{}{ + "fleet": []interface{}{ + map[string]interface{}{ + "endpoint": "example.com/fleet", + "username": "fleet", + "password": "baltic", + "api_key": "leviosa", + "ca_certs": []interface{}{"internal", "lets_decrypt"}, + "insecure": false, + }, + }, + }, + expectedConfig: fleetConfig{ + URL: "example.com/fleet", + Username: "fleet", + Password: "baltic", + APIKey: "leviosa", + CACerts: []string{"internal", "lets_decrypt"}, + Insecure: false, + }, + } + }, + }, + { + name: "should prefer environment variables", + args: func() args { + kibanaCfg := kibanaConfig{ + Address: "example.com/kibana", + Username: "elastic", + Password: "changeme", + DisableVerifySSL: true, + } + + return args{ + kibanaCfg: kibanaCfg, + resourceData: map[string]interface{}{ + "fleet": []interface{}{ + map[string]interface{}{ + "endpoint": "example.com/fleet", + "username": "fleet", + "password": "baltic", + "api_key": "leviosa", + "ca_certs": []interface{}{"internal", "lets_decrypt"}, + "insecure": false, + }, + }, + }, + env: map[string]string{ + "FLEET_ENDPOINT": "example.com/black_sea_fleet", + "FLEET_USERNAME": "black_sea", + "FLEET_PASSWORD": "fleet", + "FLEET_API_KEY": "stupefy", + "FLEET_CA_CERTS": "black,sea", + }, + expectedConfig: fleetConfig{ + URL: "example.com/black_sea_fleet", + Username: "black_sea", + Password: "fleet", + APIKey: "stupefy", + CACerts: []string{"black", "sea"}, + Insecure: false, + }, + } + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + os.Unsetenv("FLEET_ENDPOINT") + os.Unsetenv("FLEET_USERNAME") + os.Unsetenv("FLEET_PASSWORD") + os.Unsetenv("FLEET_API_KEY") + os.Unsetenv("FLEET_CA_CERTS") + + args := tt.args() + rd := schema.TestResourceDataRaw(t, map[string]*schema.Schema{ + "fleet": providerSchema.GetFleetConnectionSchema(), + }, args.resourceData) + + for key, val := range args.env { + os.Setenv(key, val) + } + + fleetConfig, diags := newFleetConfigFromSDK(rd, args.kibanaCfg) + + require.Equal(t, args.expectedConfig, fleetConfig) + require.Equal(t, args.expectedDiags, diags) + }) + } +} + +func Test_newFleetConfigFromFramework(t *testing.T) { + type args struct { + kibanaCfg kibanaConfig + providerConfig ProviderConfiguration + expectedConfig fleetConfig + expectedDiags fwdiags.Diagnostics + env map[string]string + } + tests := []struct { + name string + args func() args + }{ + { + name: "should return kibana config if no fleet config defined", + args: func() args { + kibanaCfg := kibanaConfig{ + Address: "example.com/kibana", + Username: "elastic", + Password: "changeme", + DisableVerifySSL: true, + } + + return args{ + kibanaCfg: kibanaCfg, + providerConfig: ProviderConfiguration{}, + expectedConfig: kibanaCfg.toFleetConfig(), + } + }, + }, + { + name: "should use the provided config optios", + args: func() args { + kibanaCfg := kibanaConfig{ + Address: "example.com/kibana", + Username: "elastic", + Password: "changeme", + DisableVerifySSL: true, + } + + return args{ + kibanaCfg: kibanaCfg, + providerConfig: ProviderConfiguration{ + Fleet: []FleetConnection{ + { + Username: types.StringValue("fleet"), + Password: types.StringValue("baltic"), + Endpoint: types.StringValue("example.com/fleet"), + APIKey: types.StringValue("leviosa"), + CACerts: types.ListValueMust(types.StringType, []attr.Value{ + types.StringValue("internal"), + types.StringValue("lets_decrypt"), + }), + Insecure: types.BoolValue(false), + }, + }, + }, + expectedConfig: fleetConfig{ + URL: "example.com/fleet", + Username: "fleet", + Password: "baltic", + APIKey: "leviosa", + CACerts: []string{"internal", "lets_decrypt"}, + Insecure: false, + }, + } + }, + }, + { + name: "should prefer environment variables", + args: func() args { + kibanaCfg := kibanaConfig{ + Address: "example.com/kibana", + Username: "elastic", + Password: "changeme", + DisableVerifySSL: true, + } + + return args{ + kibanaCfg: kibanaCfg, + providerConfig: ProviderConfiguration{ + Fleet: []FleetConnection{ + { + Username: types.StringValue("fleet"), + Password: types.StringValue("baltic"), + Endpoint: types.StringValue("example.com/fleet"), + APIKey: types.StringValue("leviosa"), + CACerts: types.ListValueMust(types.StringType, []attr.Value{ + types.StringValue("internal"), + types.StringValue("lets_decrypt"), + }), + Insecure: types.BoolValue(false), + }, + }, + }, + env: map[string]string{ + "FLEET_ENDPOINT": "example.com/black_sea_fleet", + "FLEET_USERNAME": "black_sea", + "FLEET_PASSWORD": "fleet", + "FLEET_API_KEY": "stupefy", + "FLEET_CA_CERTS": "black,sea", + }, + expectedConfig: fleetConfig{ + URL: "example.com/black_sea_fleet", + Username: "black_sea", + Password: "fleet", + APIKey: "stupefy", + CACerts: []string{"black", "sea"}, + Insecure: false, + }, + } + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + os.Unsetenv("FLEET_ENDPOINT") + os.Unsetenv("FLEET_USERNAME") + os.Unsetenv("FLEET_PASSWORD") + os.Unsetenv("FLEET_API_KEY") + os.Unsetenv("FLEET_CA_CERTS") + + args := tt.args() + + for key, val := range args.env { + os.Setenv(key, val) + } + + fleetConfig, diags := newFleetConfigFromFramework(context.Background(), args.providerConfig, args.kibanaCfg) + + require.Equal(t, args.expectedConfig, fleetConfig) + require.Equal(t, args.expectedDiags, diags) + }) + } +} diff --git a/internal/clients/config/kibana.go b/internal/clients/config/kibana.go index 3737495ab..b0f80c806 100644 --- a/internal/clients/config/kibana.go +++ b/internal/clients/config/kibana.go @@ -16,14 +16,13 @@ type kibanaConfig kibana.Config func newKibanaConfigFromSDK(d *schema.ResourceData, base baseConfig) (kibanaConfig, sdkdiags.Diagnostics) { var diags sdkdiags.Diagnostics + // Use ES details by default + config := base.toKibanaConfig() kibConn, ok := d.GetOk("kibana") if !ok { - return kibanaConfig{}, diags + return config, diags } - // Use ES details by default - config := base.toKibanaConfig() - // if defined, then we only have a single entry if kib := kibConn.([]interface{})[0]; kib != nil { kibConfig := kib.(map[string]interface{}) @@ -83,8 +82,8 @@ func (k kibanaConfig) withEnvironmentOverrides() kibanaConfig { k.Address = withEnvironmentOverride(k.Address, "KIBANA_ENDPOINT") if insecure, ok := os.LookupEnv("KIBANA_INSECURE"); ok { - if insecureValue, _ := strconv.ParseBool(insecure); insecureValue { - k.DisableVerifySSL = true + if insecureValue, err := strconv.ParseBool(insecure); err == nil { + k.DisableVerifySSL = insecureValue } } diff --git a/internal/clients/config/kibana_test.go b/internal/clients/config/kibana_test.go new file mode 100644 index 000000000..a7b8fbbac --- /dev/null +++ b/internal/clients/config/kibana_test.go @@ -0,0 +1,250 @@ +package config + +import ( + "context" + "os" + "testing" + + providerSchema "github.com/elastic/terraform-provider-elasticstack/internal/schema" + "github.com/hashicorp/terraform-plugin-framework/attr" + fwdiags "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/types" + sdkdiags "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/stretchr/testify/require" +) + +func Test_newKibanaConfigFromSDK(t *testing.T) { + type args struct { + baseCfg baseConfig + resourceData map[string]interface{} + expectedConfig kibanaConfig + expectedDiags sdkdiags.Diagnostics + env map[string]string + } + tests := []struct { + name string + args func() args + }{ + { + name: "should return kibana config if no fleet config defined", + args: func() args { + baseCfg := baseConfig{ + Username: "elastic", + Password: "changeme", + } + + return args{ + baseCfg: baseCfg, + resourceData: map[string]interface{}{}, + expectedConfig: baseCfg.toKibanaConfig(), + } + }, + }, + { + name: "should use the provided config optios", + args: func() args { + baseCfg := baseConfig{ + Username: "elastic", + Password: "changeme", + } + + return args{ + baseCfg: baseCfg, + resourceData: map[string]interface{}{ + "kibana": []interface{}{ + map[string]interface{}{ + "endpoints": []interface{}{"example.com/kibana"}, + "username": "kibana", + "password": "baltic", + "insecure": true, + }, + }, + }, + expectedConfig: kibanaConfig{ + Address: "example.com/kibana", + Username: "kibana", + Password: "baltic", + DisableVerifySSL: true, + }, + } + }, + }, + { + name: "should prefer environment variables", + args: func() args { + baseCfg := baseConfig{ + Username: "elastic", + Password: "changeme", + } + + return args{ + baseCfg: baseCfg, + resourceData: map[string]interface{}{ + "kibana": []interface{}{ + map[string]interface{}{ + "endpoints": []interface{}{"example.com/kibana"}, + "username": "kibana", + "password": "baltic", + "insecure": true, + }, + }, + }, + env: map[string]string{ + "KIBANA_ENDPOINT": "example.com/cabana", + "KIBANA_USERNAME": "elastic", + "KIBANA_PASSWORD": "thin-lines", + "KIBANA_INSECURE": "false", + }, + expectedConfig: kibanaConfig{ + Address: "example.com/cabana", + Username: "elastic", + Password: "thin-lines", + DisableVerifySSL: false, + }, + } + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + os.Unsetenv("KIBANA_USERNAME") + os.Unsetenv("KIBANA_PASSWORD") + os.Unsetenv("KIBANA_ENDPOINT") + os.Unsetenv("KIBANA_INSECURE") + + args := tt.args() + rd := schema.TestResourceDataRaw(t, map[string]*schema.Schema{ + "kibana": providerSchema.GetKibanaConnectionSchema(), + }, args.resourceData) + + for key, val := range args.env { + os.Setenv(key, val) + } + + kibanaCfg, diags := newKibanaConfigFromSDK(rd, args.baseCfg) + + require.Equal(t, args.expectedConfig, kibanaCfg) + require.Equal(t, args.expectedDiags, diags) + }) + } +} + +func Test_newKibanaConfigFromFramework(t *testing.T) { + type args struct { + baseCfg baseConfig + providerConfig ProviderConfiguration + expectedConfig kibanaConfig + expectedDiags fwdiags.Diagnostics + env map[string]string + } + tests := []struct { + name string + args func() args + }{ + { + name: "should return kibana config if no fleet config defined", + args: func() args { + baseCfg := baseConfig{ + Username: "elastic", + Password: "changeme", + } + + return args{ + baseCfg: baseCfg, + providerConfig: ProviderConfiguration{}, + expectedConfig: baseCfg.toKibanaConfig(), + } + }, + }, + { + name: "should use the provided config optios", + args: func() args { + baseCfg := baseConfig{ + Username: "elastic", + Password: "changeme", + } + + return args{ + baseCfg: baseCfg, + providerConfig: ProviderConfiguration{ + Kibana: []KibanaConnection{ + { + Username: types.StringValue("kibana"), + Password: types.StringValue("baltic"), + Endpoints: types.ListValueMust(types.StringType, []attr.Value{ + types.StringValue("example.com/kibana"), + }), + Insecure: types.BoolValue(true), + }, + }, + }, + expectedConfig: kibanaConfig{ + Address: "example.com/kibana", + Username: "kibana", + Password: "baltic", + DisableVerifySSL: true, + }, + } + }, + }, + { + name: "should prefer environment variables", + args: func() args { + baseCfg := baseConfig{ + Username: "elastic", + Password: "changeme", + } + + return args{ + baseCfg: baseCfg, + providerConfig: ProviderConfiguration{ + Kibana: []KibanaConnection{ + { + Username: types.StringValue("kibana"), + Password: types.StringValue("baltic"), + Endpoints: types.ListValueMust(types.StringType, []attr.Value{ + types.StringValue("example.com/kibana"), + }), + Insecure: types.BoolValue(true), + }, + }, + }, + env: map[string]string{ + "KIBANA_ENDPOINT": "example.com/cabana", + "KIBANA_USERNAME": "elastic", + "KIBANA_PASSWORD": "thin-lines", + "KIBANA_INSECURE": "false", + }, + expectedConfig: kibanaConfig{ + Address: "example.com/cabana", + Username: "elastic", + Password: "thin-lines", + DisableVerifySSL: false, + }, + } + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + os.Unsetenv("KIBANA_USERNAME") + os.Unsetenv("KIBANA_PASSWORD") + os.Unsetenv("KIBANA_ENDPOINT") + os.Unsetenv("KIBANA_INSECURE") + + args := tt.args() + + for key, val := range args.env { + os.Setenv(key, val) + } + + kibanaCfg, diags := newKibanaConfigFromFramework(context.Background(), args.providerConfig, args.baseCfg) + + require.Equal(t, args.expectedConfig, kibanaCfg) + require.Equal(t, args.expectedDiags, diags) + }) + } +} diff --git a/provider/factory_test.go b/provider/factory_test.go index 0067faf57..568360e58 100644 --- a/provider/factory_test.go +++ b/provider/factory_test.go @@ -5,9 +5,7 @@ import ( "fmt" "testing" - "github.com/hashicorp/terraform-plugin-framework/providerserver" "github.com/hashicorp/terraform-plugin-go/tfprotov5" - "github.com/hashicorp/terraform-plugin-mux/tf5muxserver" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" ) @@ -24,22 +22,13 @@ func TestMuxServer(t *testing.T) { resource.Test(t, resource.TestCase{ ProtoV5ProviderFactories: map[string]func() (tfprotov5.ProviderServer, error){ "elasticstack": func() (tfprotov5.ProviderServer, error) { - version := "test" - sdkv2Provider := New(version) - frameworkProvider := providerserver.NewProtocol5(NewFrameworkProvider(version)) - ctx := context.Background() - providers := []func() tfprotov5.ProviderServer{ - frameworkProvider, - sdkv2Provider.GRPCProvider, - } - - muxServer, err := tf5muxserver.NewMuxServer(ctx, providers...) - + version := "acceptance_test" + server, err := ProtoV5ProviderServerFactory(context.Background(), version) if err != nil { return nil, err } - return muxServer.ProviderServer(), nil + return server(), nil }, }, Steps: []resource.TestStep{ From aa5916cd114576d13335e8b721918e4c54fb4171 Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Mon, 9 Oct 2023 13:31:05 +1100 Subject: [PATCH 14/15] Update check on import saved objects --- .../kibana/import_saved_objects/acc_test.go | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/internal/kibana/import_saved_objects/acc_test.go b/internal/kibana/import_saved_objects/acc_test.go index bc3130823..f8a39df6f 100644 --- a/internal/kibana/import_saved_objects/acc_test.go +++ b/internal/kibana/import_saved_objects/acc_test.go @@ -21,6 +21,15 @@ func TestAccResourceImportSavedObjects(t *testing.T) { resource.TestCheckResourceAttr("elasticstack_kibana_import_saved_objects.settings", "errors.#", "0"), ), }, + { + Config: testAccResourceImportSavedObjectsUpdate(), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_kibana_import_saved_objects.settings", "success", "true"), + resource.TestCheckResourceAttr("elasticstack_kibana_import_saved_objects.settings", "success_count", "1"), + resource.TestCheckResourceAttr("elasticstack_kibana_import_saved_objects.settings", "success_results.#", "1"), + resource.TestCheckResourceAttr("elasticstack_kibana_import_saved_objects.settings", "errors.#", "0"), + ), + }, }, }) } @@ -41,3 +50,20 @@ EOT } ` } + +func testAccResourceImportSavedObjectsUpdate() string { + return ` +provider "elasticstack" { + elasticsearch {} + kibana {} +} + +resource "elasticstack_kibana_import_saved_objects" "settings" { + overwrite = true + file_contents = <<-EOT +{"attributes":{"buildNum":42747,"defaultIndex":"metricbeat-*","theme:darkMode":false},"coreMigrationVersion":"7.0.0","id":"7.14.0","managed":false,"references":[],"type":"config","typeMigrationVersion":"7.0.0","updated_at":"2021-08-04T02:04:43.306Z","version":"WzY1MiwyXQ=="} +{"excludedObjects":[],"excludedObjectsCount":0,"exportedCount":1,"missingRefCount":0,"missingReferences":[]} +EOT +} + ` +} From bf73cd77472a68aaaddd721b9e10b6d269f03d95 Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Thu, 12 Oct 2023 16:48:01 +1100 Subject: [PATCH 15/15] Changelog --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1c9ecaa12..99ba9ab42 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,9 @@ ## [Unreleased] +### Added +- Introduce `elasticstack_kibana_import_saved_objects` resource as an additive only way to manage Kibana saved objects ([#343](https://github.com/elastic/terraform-provider-elasticstack/pull/343)). +- Add support for Terraform Plugin Framework ([#343](https://github.com/elastic/terraform-provider-elasticstack/pull/343)). + ## [0.9.0] - 2023-10-09 ### Added