From d1a454ac470e8c4a226f6a4e866b1a32576f967b Mon Sep 17 00:00:00 2001 From: Raymond Lynch Date: Wed, 9 Oct 2024 21:36:23 -0400 Subject: [PATCH 1/5] wip --- Makefile | 56 +- generated/fleet/.gitignore | 3 +- generated/fleet/Makefile | 34 + generated/fleet/fleet.gen.go | 14240 ++++++++++++++-- generated/fleet/getschema.go | 573 - generated/fleet/oapi-config.yaml | 9 + generated/fleet/transform_schema.go | 1052 ++ internal/clients/fleet/client.go | 1 - internal/clients/fleet/errors.go | 26 + internal/clients/fleet/fleet.go | 175 +- internal/fleet/agent_policy/models.go | 16 +- internal/fleet/agent_policy/read.go | 2 +- internal/fleet/agent_policy/resource_test.go | 4 +- .../enrollment_tokens/data_source_test.go | 2 +- internal/fleet/enrollment_tokens/read.go | 2 +- internal/fleet/integration/read.go | 2 +- internal/fleet/integration_ds/models.go | 2 +- internal/fleet/integration_ds/read.go | 2 +- internal/fleet/integration_policy/models.go | 11 +- internal/fleet/integration_policy/read.go | 2 +- .../fleet/integration_policy/resource_test.go | 4 +- internal/fleet/integration_policy/secrets.go | 13 +- .../fleet/integration_policy/secrets_test.go | 28 +- internal/fleet/output/create.go | 2 +- internal/fleet/output/models.go | 259 +- internal/fleet/output/read.go | 4 +- internal/fleet/output/resource_test.go | 2 +- internal/fleet/output/update.go | 2 +- internal/fleet/server_host/models.go | 14 +- internal/fleet/server_host/read.go | 2 +- internal/fleet/server_host/resource_test.go | 2 +- libs/go-kibana-rest/kibana.go | 3 +- tools/fleet_gen.go | 3 +- tools/go.mod | 5 +- tools/go.sum | 8 +- tools/tools.go | 1 + 36 files changed, 14177 insertions(+), 2389 deletions(-) create mode 100644 generated/fleet/Makefile delete mode 100644 generated/fleet/getschema.go create mode 100644 generated/fleet/oapi-config.yaml create mode 100644 generated/fleet/transform_schema.go create mode 100644 internal/clients/fleet/errors.go diff --git a/Makefile b/Makefile index 10865096a..d952f63f5 100644 --- a/Makefile +++ b/Makefile @@ -35,7 +35,6 @@ FLEET_NAME ?= terraform-elasticstack-fleet FLEET_ENDPOINT ?= https://$(FLEET_NAME):8220 SOURCE_LOCATION ?= $(shell pwd) -, := , export GOBIN = $(shell pwd)/bin @@ -73,6 +72,11 @@ retry = until [ $$(if [ -z "$$attempt" ]; then echo -n "0"; else echo -n "$$atte backoff=$$((backoff * 2)); \ done +# wait_until_healthy command - first argument is the container name +wait_until_healthy = $(call retry, 5, [ "$$(docker inspect -f '{{ .State.Health.Status }}' $(1))" == "healthy" ]) + +CURL_OPTS = -sS --retry 5 --retry-all-errors -X POST -u $(ELASTICSEARCH_USERNAME):$(ELASTICSEARCH_PASSWORD) -H "Content-Type: application/json" + # To run specific test (e.g. TestAccResourceActionConnector) execute `make docker-testacc TESTARGS='-run ^TestAccResourceActionConnector$$'` # To enable tracing (or debugging), execute `make docker-testacc TF_LOG=TRACE` .PHONY: docker-testacc @@ -107,8 +111,7 @@ docker-testacc-with-token: .PHONY: docker-elasticsearch docker-elasticsearch: docker-network ## Start Elasticsearch single node cluster in docker container @ docker rm -f $(ELASTICSEARCH_NAME) &> /dev/null || true - @ $(call retry, 5, if ! docker ps --format '{{.Names}}' | grep -w $(ELASTICSEARCH_NAME) > /dev/null 2>&1 ; then \ - docker run -d \ + @ docker run -d \ --memory $(ELASTICSEARCH_MEM) \ -p 9200:9200 -p 9300:9300 \ -e "discovery.type=single-node" \ @@ -122,14 +125,15 @@ docker-elasticsearch: docker-network ## Start Elasticsearch single node cluster -e ELASTIC_PASSWORD=$(ELASTICSEARCH_PASSWORD) \ --name $(ELASTICSEARCH_NAME) \ --network $(ELASTICSEARCH_NETWORK) \ - docker.elastic.co/elasticsearch/elasticsearch:$(STACK_VERSION); \ - fi) + --health-cmd="curl http://localhost:9200/_cluster/health" \ + --health-interval=10s --health-timeout=5s --health-retries=10 \ + docker.elastic.co/elasticsearch/elasticsearch:$(STACK_VERSION) + @ $(call wait_until_healthy, $(ELASTICSEARCH_NAME)) .PHONY: docker-kibana docker-kibana: docker-network docker-elasticsearch set-kibana-password ## Start Kibana node in docker container @ docker rm -f $(KIBANA_NAME) &> /dev/null || true - @ $(call retry, 5, if ! docker ps --format '{{.Names}}' | grep -w $(KIBANA_NAME) > /dev/null 2>&1 ; then \ - docker run -d \ + @ docker run -d \ -p 5601:5601 \ -e SERVER_NAME=kibana \ -e ELASTICSEARCH_HOSTS=$(ELASTICSEARCH_ENDPOINTS) \ @@ -139,8 +143,10 @@ docker-kibana: docker-network docker-elasticsearch set-kibana-password ## Start -e LOGGING_ROOT_LEVEL=debug \ --name $(KIBANA_NAME) \ --network $(ELASTICSEARCH_NETWORK) \ - docker.elastic.co/kibana/kibana:$(STACK_VERSION); \ - fi) + --health-cmd="curl http://localhost:5601/api/status" \ + --health-interval=10s --health-timeout=5s --health-retries=10 \ + docker.elastic.co/kibana/kibana:$(STACK_VERSION) + @ $(call wait_until_healthy, $(KIBANA_NAME)) .PHONY: docker-kibana-with-tls docker-kibana-with-tls: docker-network docker-elasticsearch set-kibana-password @@ -148,9 +154,7 @@ docker-kibana-with-tls: docker-network docker-elasticsearch set-kibana-password @ mkdir -p certs @ CAROOT=certs mkcert localhost $(KIBANA_NAME) @ mv localhost*.pem certs/ - - @ $(call retry, 5, if ! docker ps --format '{{.Names}}' | grep -w $(KIBANA_NAME) > /dev/null 2>&1 ; then \ - docker run -d \ + @ docker run -d \ -p 5601:5601 \ -v $(shell pwd)/certs:/certs \ -e SERVER_NAME=kibana \ @@ -164,14 +168,15 @@ docker-kibana-with-tls: docker-network docker-elasticsearch set-kibana-password -e LOGGING_ROOT_LEVEL=debug \ --name $(KIBANA_NAME) \ --network $(ELASTICSEARCH_NETWORK) \ - docker.elastic.co/kibana/kibana:$(STACK_VERSION); \ - fi) + --health-cmd="curl -k https://localhost:5601/api/status" \ + --health-interval=10s --health-timeout=5s --health-retries=10 \ + docker.elastic.co/kibana/kibana:$(STACK_VERSION) + @ $(call wait_until_healthy, $(KIBANA_NAME)) .PHONY: docker-fleet docker-fleet: docker-network docker-elasticsearch docker-kibana setup-kibana-fleet ## Start Fleet node in docker container @ docker rm -f $(FLEET_NAME) &> /dev/null || true - @ $(call retry, 5, if ! docker ps --format '{{.Names}}' | grep -w $(FLEET_NAME) > /dev/null 2>&1 ; then \ - docker run -d \ + @ docker run -d \ -p 8220:8220 \ -e SERVER_NAME=fleet \ -e FLEET_ENROLL=1 \ @@ -188,33 +193,30 @@ docker-fleet: docker-network docker-elasticsearch docker-kibana setup-kibana-fle -e KIBANA_FLEET_PASSWORD=$(ELASTICSEARCH_PASSWORD) \ --name $(FLEET_NAME) \ --network $(ELASTICSEARCH_NETWORK) \ - docker.elastic.co/beats/elastic-agent:$(STACK_VERSION); \ - fi) + docker.elastic.co/beats/elastic-agent:$(STACK_VERSION) .PHONY: docker-network docker-network: ## Create a dedicated network for ES and test runs - @ if ! docker network ls --format '{{.Name}}' | grep -w $(ELASTICSEARCH_NETWORK) > /dev/null 2>&1 ; then \ - docker network create $(ELASTICSEARCH_NETWORK); \ - fi + @ docker network inspect $(ELASTICSEARCH_NETWORK) >/dev/null 2>&1 || docker network create $(ELASTICSEARCH_NETWORK) .PHONY: set-kibana-password set-kibana-password: ## Sets the ES KIBANA_SYSTEM_USERNAME's password to KIBANA_SYSTEM_PASSWORD. This expects Elasticsearch to be available at localhost:9200 - @ $(call retry, 10, curl -sS -X POST -u $(ELASTICSEARCH_USERNAME):$(ELASTICSEARCH_PASSWORD) -H "Content-Type: application/json" http://localhost:9200/_security/user/$(KIBANA_SYSTEM_USERNAME)/_password -d '{"password":"$(KIBANA_SYSTEM_PASSWORD)"}' | grep -q "^{}") + @ curl $(CURL_OPTS) http://localhost:9200/_security/user/$(KIBANA_SYSTEM_USERNAME)/_password -d '{"password":"$(KIBANA_SYSTEM_PASSWORD)"}' .PHONY: create-es-api-key create-es-api-key: ## Creates and outputs a new API Key. This expects Elasticsearch to be available at localhost:9200 - @ $(call retry, 10, curl -sS -X POST -u $(ELASTICSEARCH_USERNAME):$(ELASTICSEARCH_PASSWORD) -H "Content-Type: application/json" http://localhost:9200/_security/api_key -d '{"name":"$(KIBANA_API_KEY_NAME)"}') + @ curl $(CURL_OPTS) http://localhost:9200/_security/api_key -d '{"name":"$(KIBANA_API_KEY_NAME)"}' .PHONY: create-es-bearer-token create-es-bearer-token: ## Creates and outputs a new OAuth bearer token. This expects Elasticsearch to be available at localhost:9200 - @ $(call retry, 10, curl -sS -X POST -u $(ELASTICSEARCH_USERNAME):$(ELASTICSEARCH_PASSWORD) -H "Content-Type: application/json" http://localhost:9200/_security/oauth2/token -d '{"grant_type":"client_credentials"}') + @ curl $(CURL_OPTS) http://localhost:9200/_security/oauth2/token -d '{"grant_type":"client_credentials"}' .PHONY: setup-kibana-fleet setup-kibana-fleet: ## Creates the agent and integration policies required to run Fleet. This expects Kibana to be available at localhost:5601 - @ $(call retry, 10, curl -sS --fail-with-body -X POST -u $(ELASTICSEARCH_USERNAME):$(ELASTICSEARCH_PASSWORD) -H "Content-Type: application/json" -H "kbn-xsrf: true" http://localhost:5601/api/fleet/fleet_server_hosts -d '{"name":"default"$(,)"host_urls":["$(FLEET_ENDPOINT)"]$(,)"is_default":true}') - @ $(call retry, 10, curl -sS --fail-with-body -X POST -u $(ELASTICSEARCH_USERNAME):$(ELASTICSEARCH_PASSWORD) -H "Content-Type: application/json" -H "kbn-xsrf: true" http://localhost:5601/api/fleet/agent_policies -d '{"id":"fleet-server"$(,)"name":"Fleet Server"$(,)"namespace":"default"$(,)"monitoring_enabled":["logs"$(,)"metrics"]}') - @ $(call retry, 10, curl -sS --fail-with-body -X POST -u $(ELASTICSEARCH_USERNAME):$(ELASTICSEARCH_PASSWORD) -H "Content-Type: application/json" -H "kbn-xsrf: true" http://localhost:5601/api/fleet/package_policies -d '{"name":"fleet-server"$(,)"namespace":"default"$(,)"policy_id":"fleet-server"$(,)"enabled":true$(,)"inputs":[{"type":"fleet-server"$(,)"enabled":true$(,)"streams":[]$(,)"vars":{}}]$(,)"package":{"name":"fleet_server"$(,)"version":"1.5.0"}}') + curl $(CURL_OPTS) -H "kbn-xsrf: true" http://localhost:5601/api/fleet/fleet_server_hosts -d '{"name":"default","host_urls":["$(FLEET_ENDPOINT)"],"is_default":true}' + curl $(CURL_OPTS) -H "kbn-xsrf: true" http://localhost:5601/api/fleet/agent_policies -d '{"id":"fleet-server","name":"Fleet Server","namespace":"default","monitoring_enabled":["logs","metrics"]}' + curl $(CURL_OPTS) -H "kbn-xsrf: true" http://localhost:5601/api/fleet/package_policies -d '{"name":"fleet-server","namespace":"default","policy_id":"fleet-server","enabled":true,"inputs":[{"type":"fleet-server","enabled":true,"streams":[],"vars":{}}],"package":{"name":"fleet_server","version":"1.5.0"}}' .PHONY: docker-clean docker-clean: ## Try to remove provisioned nodes and assigned network diff --git a/generated/fleet/.gitignore b/generated/fleet/.gitignore index 9fc4aba05..1ec8ffcc0 100644 --- a/generated/fleet/.gitignore +++ b/generated/fleet/.gitignore @@ -1 +1,2 @@ -fleet-filtered.json \ No newline at end of file +oas.yaml +oas-filtered.yaml diff --git a/generated/fleet/Makefile b/generated/fleet/Makefile new file mode 100644 index 000000000..70a7690e7 --- /dev/null +++ b/generated/fleet/Makefile @@ -0,0 +1,34 @@ +.DEFAULT_GOAL = help +SHELL := /bin/bash +ROOT_DIR := $(shell dirname $(realpath $(firstword $(MAKEFILE_LIST)))) + +github_ref := refs/heads/main +oas_url := https://raw.githubusercontent.com/elastic/kibana/$(github_ref)/oas_docs/output/kibana.yaml + +.PHONY: all +all: download transform generate ## Fetch, bundle, transform, and generate the API + + +.PHONY: download +download: oas.yaml ## Download the remote schema + +oas.yaml: + curl -sSo oas.yaml "$(oas_url)" + +.PHONY: transform +transform: download ## Transform and filter the schema + go run $(ROOT_DIR)/transform_schema.go -i ./oas.yaml -o ./oas-filtered.yaml + +.PHONY: generate +generate: ## Generate the API + go run github.com/oapi-codegen/oapi-codegen/v2/cmd/oapi-codegen@v2.4.1 -config oapi-config.yaml ./oas-filtered.yaml + + +.PHONY: clean +clean: ## Remove any downloaded files + rm -rf oas.yaml oas-filtered.yaml + + +.PHONY: help +help: ## Show this help + @ awk 'BEGIN {FS = ":.*##"; printf "Usage: make \033[36m\033[0m\n\nTargets:\n"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-10s\033[0m\t%s\n", $$1, $$2 }' $(MAKEFILE_LIST) | column -s$$'\t' -t diff --git a/generated/fleet/fleet.gen.go b/generated/fleet/fleet.gen.go index 135643e8d..6b9b4405f 100644 --- a/generated/fleet/fleet.gen.go +++ b/generated/fleet/fleet.gen.go @@ -1,7 +1,7 @@ -// Package fleet provides primitives to interact with the openapi HTTP API. +// Package fleetapi provides primitives to interact with the openapi HTTP API. // // Code generated by github.com/oapi-codegen/oapi-codegen/v2 version v2.4.1 DO NOT EDIT. -package fleet +package fleetapi import ( "bytes" @@ -13,154 +13,525 @@ import ( "net/http" "net/url" "strings" - "time" "github.com/oapi-codegen/runtime" ) const ( - BasicAuthScopes = "basicAuth.Scopes" + ApiKeyAuthScopes = "apiKeyAuth.Scopes" + BasicAuthScopes = "basicAuth.Scopes" ) // Defines values for AgentPolicyMonitoringEnabled. const ( AgentPolicyMonitoringEnabledLogs AgentPolicyMonitoringEnabled = "logs" AgentPolicyMonitoringEnabledMetrics AgentPolicyMonitoringEnabled = "metrics" + AgentPolicyMonitoringEnabledTraces AgentPolicyMonitoringEnabled = "traces" ) -// Defines values for AgentPolicyCreateRequestMonitoringEnabled. +// Defines values for AgentPolicyPackagePolicies1InputsStreamsRelease. const ( - AgentPolicyCreateRequestMonitoringEnabledLogs AgentPolicyCreateRequestMonitoringEnabled = "logs" - AgentPolicyCreateRequestMonitoringEnabledMetrics AgentPolicyCreateRequestMonitoringEnabled = "metrics" + AgentPolicyPackagePolicies1InputsStreamsReleaseBeta AgentPolicyPackagePolicies1InputsStreamsRelease = "beta" + AgentPolicyPackagePolicies1InputsStreamsReleaseExperimental AgentPolicyPackagePolicies1InputsStreamsRelease = "experimental" + AgentPolicyPackagePolicies1InputsStreamsReleaseGa AgentPolicyPackagePolicies1InputsStreamsRelease = "ga" ) -// Defines values for AgentPolicyUpdateRequestMonitoringEnabled. +// Defines values for AgentPolicyStatus. const ( - Logs AgentPolicyUpdateRequestMonitoringEnabled = "logs" - Metrics AgentPolicyUpdateRequestMonitoringEnabled = "metrics" + Active AgentPolicyStatus = "active" + Inactive AgentPolicyStatus = "inactive" ) -// Defines values for ElasticsearchAssetType. +// Defines values for NewOutputElasticsearchPreset. const ( - ComponentTemplate ElasticsearchAssetType = "component_template" - DataStreamIlmPolicy ElasticsearchAssetType = "data_stream_ilm_policy" - IlmPolicy ElasticsearchAssetType = "ilm_policy" - IndexTemplate ElasticsearchAssetType = "index_template" - IngestPipeline ElasticsearchAssetType = "ingest_pipeline" - Transform ElasticsearchAssetType = "transform" + NewOutputElasticsearchPresetBalanced NewOutputElasticsearchPreset = "balanced" + NewOutputElasticsearchPresetCustom NewOutputElasticsearchPreset = "custom" + NewOutputElasticsearchPresetLatency NewOutputElasticsearchPreset = "latency" + NewOutputElasticsearchPresetScale NewOutputElasticsearchPreset = "scale" + NewOutputElasticsearchPresetThroughput NewOutputElasticsearchPreset = "throughput" ) -// Defines values for KibanaSavedObjectType. +// Defines values for NewOutputElasticsearchType. const ( - CspRuleTemplate KibanaSavedObjectType = "csp_rule_template" - Dashboard KibanaSavedObjectType = "dashboard" - IndexPattern KibanaSavedObjectType = "index-pattern" - Lens KibanaSavedObjectType = "lens" - Map KibanaSavedObjectType = "map" - MlModule KibanaSavedObjectType = "ml-module" - Search KibanaSavedObjectType = "search" - SecurityRule KibanaSavedObjectType = "security-rule" - Visualization KibanaSavedObjectType = "visualization" + NewOutputElasticsearchTypeElasticsearch NewOutputElasticsearchType = "elasticsearch" ) -// Defines values for OutputCreateRequestElasticsearchType. +// Defines values for NewOutputKafkaAuthType. const ( - OutputCreateRequestElasticsearchTypeElasticsearch OutputCreateRequestElasticsearchType = "elasticsearch" + NewOutputKafkaAuthTypeKerberos NewOutputKafkaAuthType = "kerberos" + NewOutputKafkaAuthTypeNone NewOutputKafkaAuthType = "none" + NewOutputKafkaAuthTypeSsl NewOutputKafkaAuthType = "ssl" + NewOutputKafkaAuthTypeUserPass NewOutputKafkaAuthType = "user_pass" ) -// Defines values for OutputCreateRequestKafkaConnectionType. +// Defines values for NewOutputKafkaCompression. const ( - OutputCreateRequestKafkaConnectionTypeEncryption OutputCreateRequestKafkaConnectionType = "encryption" - OutputCreateRequestKafkaConnectionTypePlaintext OutputCreateRequestKafkaConnectionType = "plaintext" + NewOutputKafkaCompressionGzip NewOutputKafkaCompression = "gzip" + NewOutputKafkaCompressionLz4 NewOutputKafkaCompression = "lz4" + NewOutputKafkaCompressionNone NewOutputKafkaCompression = "none" + NewOutputKafkaCompressionSnappy NewOutputKafkaCompression = "snappy" ) -// Defines values for OutputCreateRequestKafkaSslVerificationMode. +// Defines values for NewOutputKafkaPartition. const ( - OutputCreateRequestKafkaSslVerificationModeCertificate OutputCreateRequestKafkaSslVerificationMode = "certificate" - OutputCreateRequestKafkaSslVerificationModeFull OutputCreateRequestKafkaSslVerificationMode = "full" - OutputCreateRequestKafkaSslVerificationModeNone OutputCreateRequestKafkaSslVerificationMode = "none" - OutputCreateRequestKafkaSslVerificationModeStrict OutputCreateRequestKafkaSslVerificationMode = "strict" + NewOutputKafkaPartitionHash NewOutputKafkaPartition = "hash" + NewOutputKafkaPartitionRandom NewOutputKafkaPartition = "random" + NewOutputKafkaPartitionRoundRobin NewOutputKafkaPartition = "round_robin" ) -// Defines values for OutputCreateRequestKafkaType. +// Defines values for NewOutputKafkaRequiredAcks. const ( - OutputCreateRequestKafkaTypeKafka OutputCreateRequestKafkaType = "kafka" + NewOutputKafkaRequiredAcksMinus1 NewOutputKafkaRequiredAcks = -1 + NewOutputKafkaRequiredAcksN0 NewOutputKafkaRequiredAcks = 0 + NewOutputKafkaRequiredAcksN1 NewOutputKafkaRequiredAcks = 1 ) -// Defines values for OutputCreateRequestLogstashType. +// Defines values for NewOutputKafkaSaslMechanism. const ( - OutputCreateRequestLogstashTypeLogstash OutputCreateRequestLogstashType = "logstash" + NewOutputKafkaSaslMechanismPLAIN NewOutputKafkaSaslMechanism = "PLAIN" + NewOutputKafkaSaslMechanismSCRAMSHA256 NewOutputKafkaSaslMechanism = "SCRAM-SHA-256" + NewOutputKafkaSaslMechanismSCRAMSHA512 NewOutputKafkaSaslMechanism = "SCRAM-SHA-512" ) -// Defines values for OutputUpdateRequestElasticsearchType. +// Defines values for NewOutputKafkaTopicsWhenType. const ( - OutputUpdateRequestElasticsearchTypeElasticsearch OutputUpdateRequestElasticsearchType = "elasticsearch" + NewOutputKafkaTopicsWhenTypeContains NewOutputKafkaTopicsWhenType = "contains" + NewOutputKafkaTopicsWhenTypeEquals NewOutputKafkaTopicsWhenType = "equals" + NewOutputKafkaTopicsWhenTypeRegexp NewOutputKafkaTopicsWhenType = "regexp" ) -// Defines values for OutputUpdateRequestKafkaConnectionType. +// Defines values for NewOutputKafkaType. const ( - OutputUpdateRequestKafkaConnectionTypeEncryption OutputUpdateRequestKafkaConnectionType = "encryption" - OutputUpdateRequestKafkaConnectionTypePlaintext OutputUpdateRequestKafkaConnectionType = "plaintext" + NewOutputKafkaTypeKafka NewOutputKafkaType = "kafka" ) -// Defines values for OutputUpdateRequestKafkaSslVerificationMode. +// Defines values for NewOutputLogstashType. const ( - OutputUpdateRequestKafkaSslVerificationModeCertificate OutputUpdateRequestKafkaSslVerificationMode = "certificate" - OutputUpdateRequestKafkaSslVerificationModeFull OutputUpdateRequestKafkaSslVerificationMode = "full" - OutputUpdateRequestKafkaSslVerificationModeNone OutputUpdateRequestKafkaSslVerificationMode = "none" - OutputUpdateRequestKafkaSslVerificationModeStrict OutputUpdateRequestKafkaSslVerificationMode = "strict" + NewOutputLogstashTypeLogstash NewOutputLogstashType = "logstash" ) -// Defines values for OutputUpdateRequestKafkaType. +// Defines values for NewOutputRemoteElasticsearchPreset. const ( - OutputUpdateRequestKafkaTypeKafka OutputUpdateRequestKafkaType = "kafka" + NewOutputRemoteElasticsearchPresetBalanced NewOutputRemoteElasticsearchPreset = "balanced" + NewOutputRemoteElasticsearchPresetCustom NewOutputRemoteElasticsearchPreset = "custom" + NewOutputRemoteElasticsearchPresetLatency NewOutputRemoteElasticsearchPreset = "latency" + NewOutputRemoteElasticsearchPresetScale NewOutputRemoteElasticsearchPreset = "scale" + NewOutputRemoteElasticsearchPresetThroughput NewOutputRemoteElasticsearchPreset = "throughput" ) -// Defines values for OutputUpdateRequestLogstashType. +// Defines values for NewOutputRemoteElasticsearchType. const ( - OutputUpdateRequestLogstashTypeLogstash OutputUpdateRequestLogstashType = "logstash" + NewOutputRemoteElasticsearchTypeRemoteElasticsearch NewOutputRemoteElasticsearchType = "remote_elasticsearch" ) -// Defines values for PackageInfoConditionsElasticsearchSubscription. +// Defines values for NewOutputSslVerificationMode. const ( - Basic PackageInfoConditionsElasticsearchSubscription = "basic" - Enterprise PackageInfoConditionsElasticsearchSubscription = "enterprise" - Gold PackageInfoConditionsElasticsearchSubscription = "gold" - Platinum PackageInfoConditionsElasticsearchSubscription = "platinum" + NewOutputSslVerificationModeCertificate NewOutputSslVerificationMode = "certificate" + NewOutputSslVerificationModeFull NewOutputSslVerificationMode = "full" + NewOutputSslVerificationModeNone NewOutputSslVerificationMode = "none" + NewOutputSslVerificationModeStrict NewOutputSslVerificationMode = "strict" +) + +// Defines values for OutputElasticsearchPreset. +const ( + OutputElasticsearchPresetBalanced OutputElasticsearchPreset = "balanced" + OutputElasticsearchPresetCustom OutputElasticsearchPreset = "custom" + OutputElasticsearchPresetLatency OutputElasticsearchPreset = "latency" + OutputElasticsearchPresetScale OutputElasticsearchPreset = "scale" + OutputElasticsearchPresetThroughput OutputElasticsearchPreset = "throughput" +) + +// Defines values for OutputElasticsearchType. +const ( + OutputElasticsearchTypeElasticsearch OutputElasticsearchType = "elasticsearch" +) + +// Defines values for OutputKafkaAuthType. +const ( + OutputKafkaAuthTypeKerberos OutputKafkaAuthType = "kerberos" + OutputKafkaAuthTypeNone OutputKafkaAuthType = "none" + OutputKafkaAuthTypeSsl OutputKafkaAuthType = "ssl" + OutputKafkaAuthTypeUserPass OutputKafkaAuthType = "user_pass" +) + +// Defines values for OutputKafkaCompression. +const ( + OutputKafkaCompressionGzip OutputKafkaCompression = "gzip" + OutputKafkaCompressionLz4 OutputKafkaCompression = "lz4" + OutputKafkaCompressionNone OutputKafkaCompression = "none" + OutputKafkaCompressionSnappy OutputKafkaCompression = "snappy" +) + +// Defines values for OutputKafkaPartition. +const ( + OutputKafkaPartitionHash OutputKafkaPartition = "hash" + OutputKafkaPartitionRandom OutputKafkaPartition = "random" + OutputKafkaPartitionRoundRobin OutputKafkaPartition = "round_robin" +) + +// Defines values for OutputKafkaRequiredAcks. +const ( + OutputKafkaRequiredAcksMinus1 OutputKafkaRequiredAcks = -1 + OutputKafkaRequiredAcksN0 OutputKafkaRequiredAcks = 0 + OutputKafkaRequiredAcksN1 OutputKafkaRequiredAcks = 1 +) + +// Defines values for OutputKafkaSaslMechanism. +const ( + OutputKafkaSaslMechanismPLAIN OutputKafkaSaslMechanism = "PLAIN" + OutputKafkaSaslMechanismSCRAMSHA256 OutputKafkaSaslMechanism = "SCRAM-SHA-256" + OutputKafkaSaslMechanismSCRAMSHA512 OutputKafkaSaslMechanism = "SCRAM-SHA-512" +) + +// Defines values for OutputKafkaTopicsWhenType. +const ( + OutputKafkaTopicsWhenTypeContains OutputKafkaTopicsWhenType = "contains" + OutputKafkaTopicsWhenTypeEquals OutputKafkaTopicsWhenType = "equals" + OutputKafkaTopicsWhenTypeRegexp OutputKafkaTopicsWhenType = "regexp" +) + +// Defines values for OutputKafkaType. +const ( + OutputKafkaTypeKafka OutputKafkaType = "kafka" +) + +// Defines values for OutputLogstashType. +const ( + OutputLogstashTypeLogstash OutputLogstashType = "logstash" +) + +// Defines values for OutputRemoteElasticsearchPreset. +const ( + OutputRemoteElasticsearchPresetBalanced OutputRemoteElasticsearchPreset = "balanced" + OutputRemoteElasticsearchPresetCustom OutputRemoteElasticsearchPreset = "custom" + OutputRemoteElasticsearchPresetLatency OutputRemoteElasticsearchPreset = "latency" + OutputRemoteElasticsearchPresetScale OutputRemoteElasticsearchPreset = "scale" + OutputRemoteElasticsearchPresetThroughput OutputRemoteElasticsearchPreset = "throughput" +) + +// Defines values for OutputRemoteElasticsearchType. +const ( + OutputRemoteElasticsearchTypeRemoteElasticsearch OutputRemoteElasticsearchType = "remote_elasticsearch" +) + +// Defines values for OutputSslVerificationMode. +const ( + OutputSslVerificationModeCertificate OutputSslVerificationMode = "certificate" + OutputSslVerificationModeFull OutputSslVerificationMode = "full" + OutputSslVerificationModeNone OutputSslVerificationMode = "none" + OutputSslVerificationModeStrict OutputSslVerificationMode = "strict" +) + +// Defines values for PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType. +const ( + PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaTypeCspRuleTemplate PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType = "csp-rule-template" + PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaTypeDashboard PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType = "dashboard" + PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaTypeIndexPattern PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType = "index-pattern" + PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaTypeLens PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType = "lens" + PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaTypeMap PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType = "map" + PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaTypeMlModule PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType = "ml-module" + PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaTypeOsqueryPackAsset PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType = "osquery-pack-asset" + PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaTypeOsquerySavedQuery PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType = "osquery-saved-query" + PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaTypeSearch PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType = "search" + PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaTypeSecurityRule PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType = "security-rule" + PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaTypeTag PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType = "tag" + PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaTypeVisualization PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType = "visualization" +) + +// Defines values for PackageInfoInstallationInfoInstallSource. +const ( + PackageInfoInstallationInfoInstallSourceBundled PackageInfoInstallationInfoInstallSource = "bundled" + PackageInfoInstallationInfoInstallSourceCustom PackageInfoInstallationInfoInstallSource = "custom" + PackageInfoInstallationInfoInstallSourceRegistry PackageInfoInstallationInfoInstallSource = "registry" + PackageInfoInstallationInfoInstallSourceUpload PackageInfoInstallationInfoInstallSource = "upload" +) + +// Defines values for PackageInfoInstallationInfoInstallStatus. +const ( + PackageInfoInstallationInfoInstallStatusInstallFailed PackageInfoInstallationInfoInstallStatus = "install_failed" + PackageInfoInstallationInfoInstallStatusInstalled PackageInfoInstallationInfoInstallStatus = "installed" + PackageInfoInstallationInfoInstallStatusInstalling PackageInfoInstallationInfoInstallStatus = "installing" +) + +// Defines values for PackageInfoInstallationInfoInstalledEsType. +const ( + PackageInfoInstallationInfoInstalledEsTypeComponentTemplate PackageInfoInstallationInfoInstalledEsType = "component_template" + PackageInfoInstallationInfoInstalledEsTypeDataStreamIlmPolicy PackageInfoInstallationInfoInstalledEsType = "data_stream_ilm_policy" + PackageInfoInstallationInfoInstalledEsTypeIlmPolicy PackageInfoInstallationInfoInstalledEsType = "ilm_policy" + PackageInfoInstallationInfoInstalledEsTypeIndex PackageInfoInstallationInfoInstalledEsType = "index" + PackageInfoInstallationInfoInstalledEsTypeIndexTemplate PackageInfoInstallationInfoInstalledEsType = "index_template" + PackageInfoInstallationInfoInstalledEsTypeIngestPipeline PackageInfoInstallationInfoInstalledEsType = "ingest_pipeline" + PackageInfoInstallationInfoInstalledEsTypeMlModel PackageInfoInstallationInfoInstalledEsType = "ml_model" + PackageInfoInstallationInfoInstalledEsTypeTransform PackageInfoInstallationInfoInstalledEsType = "transform" +) + +// Defines values for PackageInfoInstallationInfoInstalledKibanaType. +const ( + PackageInfoInstallationInfoInstalledKibanaTypeCspRuleTemplate PackageInfoInstallationInfoInstalledKibanaType = "csp-rule-template" + PackageInfoInstallationInfoInstalledKibanaTypeDashboard PackageInfoInstallationInfoInstalledKibanaType = "dashboard" + PackageInfoInstallationInfoInstalledKibanaTypeIndexPattern PackageInfoInstallationInfoInstalledKibanaType = "index-pattern" + PackageInfoInstallationInfoInstalledKibanaTypeLens PackageInfoInstallationInfoInstalledKibanaType = "lens" + PackageInfoInstallationInfoInstalledKibanaTypeMap PackageInfoInstallationInfoInstalledKibanaType = "map" + PackageInfoInstallationInfoInstalledKibanaTypeMlModule PackageInfoInstallationInfoInstalledKibanaType = "ml-module" + PackageInfoInstallationInfoInstalledKibanaTypeOsqueryPackAsset PackageInfoInstallationInfoInstalledKibanaType = "osquery-pack-asset" + PackageInfoInstallationInfoInstalledKibanaTypeOsquerySavedQuery PackageInfoInstallationInfoInstalledKibanaType = "osquery-saved-query" + PackageInfoInstallationInfoInstalledKibanaTypeSearch PackageInfoInstallationInfoInstalledKibanaType = "search" + PackageInfoInstallationInfoInstalledKibanaTypeSecurityRule PackageInfoInstallationInfoInstalledKibanaType = "security-rule" + PackageInfoInstallationInfoInstalledKibanaTypeTag PackageInfoInstallationInfoInstalledKibanaType = "tag" + PackageInfoInstallationInfoInstalledKibanaTypeVisualization PackageInfoInstallationInfoInstalledKibanaType = "visualization" +) + +// Defines values for PackageInfoInstallationInfoVerificationStatus. +const ( + PackageInfoInstallationInfoVerificationStatusUnknown PackageInfoInstallationInfoVerificationStatus = "unknown" + PackageInfoInstallationInfoVerificationStatusUnverified PackageInfoInstallationInfoVerificationStatus = "unverified" + PackageInfoInstallationInfoVerificationStatusVerified PackageInfoInstallationInfoVerificationStatus = "verified" +) + +// Defines values for PackageInfoOwnerType. +const ( + PackageInfoOwnerTypeCommunity PackageInfoOwnerType = "community" + PackageInfoOwnerTypeElastic PackageInfoOwnerType = "elastic" + PackageInfoOwnerTypePartner PackageInfoOwnerType = "partner" ) // Defines values for PackageInfoRelease. const ( - Beta PackageInfoRelease = "beta" - Experimental PackageInfoRelease = "experimental" - Ga PackageInfoRelease = "ga" + PackageInfoReleaseBeta PackageInfoRelease = "beta" + PackageInfoReleaseExperimental PackageInfoRelease = "experimental" + PackageInfoReleaseGa PackageInfoRelease = "ga" +) + +// Defines values for PackageInfoType. +const ( + PackageInfoTypeInput PackageInfoType = "input" + PackageInfoTypeIntegration PackageInfoType = "integration" +) + +// Defines values for PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType. +const ( + PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaTypeCspRuleTemplate PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType = "csp-rule-template" + PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaTypeDashboard PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType = "dashboard" + PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaTypeIndexPattern PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType = "index-pattern" + PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaTypeLens PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType = "lens" + PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaTypeMap PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType = "map" + PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaTypeMlModule PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType = "ml-module" + PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaTypeOsqueryPackAsset PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType = "osquery-pack-asset" + PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaTypeOsquerySavedQuery PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType = "osquery-saved-query" + PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaTypeSearch PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType = "search" + PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaTypeSecurityRule PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType = "security-rule" + PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaTypeTag PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType = "tag" + PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaTypeVisualization PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType = "visualization" +) + +// Defines values for PackageListItemInstallationInfoInstallSource. +const ( + PackageListItemInstallationInfoInstallSourceBundled PackageListItemInstallationInfoInstallSource = "bundled" + PackageListItemInstallationInfoInstallSourceCustom PackageListItemInstallationInfoInstallSource = "custom" + PackageListItemInstallationInfoInstallSourceRegistry PackageListItemInstallationInfoInstallSource = "registry" + PackageListItemInstallationInfoInstallSourceUpload PackageListItemInstallationInfoInstallSource = "upload" +) + +// Defines values for PackageListItemInstallationInfoInstallStatus. +const ( + PackageListItemInstallationInfoInstallStatusInstallFailed PackageListItemInstallationInfoInstallStatus = "install_failed" + PackageListItemInstallationInfoInstallStatusInstalled PackageListItemInstallationInfoInstallStatus = "installed" + PackageListItemInstallationInfoInstallStatusInstalling PackageListItemInstallationInfoInstallStatus = "installing" +) + +// Defines values for PackageListItemInstallationInfoInstalledEsType. +const ( + PackageListItemInstallationInfoInstalledEsTypeComponentTemplate PackageListItemInstallationInfoInstalledEsType = "component_template" + PackageListItemInstallationInfoInstalledEsTypeDataStreamIlmPolicy PackageListItemInstallationInfoInstalledEsType = "data_stream_ilm_policy" + PackageListItemInstallationInfoInstalledEsTypeIlmPolicy PackageListItemInstallationInfoInstalledEsType = "ilm_policy" + PackageListItemInstallationInfoInstalledEsTypeIndex PackageListItemInstallationInfoInstalledEsType = "index" + PackageListItemInstallationInfoInstalledEsTypeIndexTemplate PackageListItemInstallationInfoInstalledEsType = "index_template" + PackageListItemInstallationInfoInstalledEsTypeIngestPipeline PackageListItemInstallationInfoInstalledEsType = "ingest_pipeline" + PackageListItemInstallationInfoInstalledEsTypeMlModel PackageListItemInstallationInfoInstalledEsType = "ml_model" + PackageListItemInstallationInfoInstalledEsTypeTransform PackageListItemInstallationInfoInstalledEsType = "transform" +) + +// Defines values for PackageListItemInstallationInfoInstalledKibanaType. +const ( + PackageListItemInstallationInfoInstalledKibanaTypeCspRuleTemplate PackageListItemInstallationInfoInstalledKibanaType = "csp-rule-template" + PackageListItemInstallationInfoInstalledKibanaTypeDashboard PackageListItemInstallationInfoInstalledKibanaType = "dashboard" + PackageListItemInstallationInfoInstalledKibanaTypeIndexPattern PackageListItemInstallationInfoInstalledKibanaType = "index-pattern" + PackageListItemInstallationInfoInstalledKibanaTypeLens PackageListItemInstallationInfoInstalledKibanaType = "lens" + PackageListItemInstallationInfoInstalledKibanaTypeMap PackageListItemInstallationInfoInstalledKibanaType = "map" + PackageListItemInstallationInfoInstalledKibanaTypeMlModule PackageListItemInstallationInfoInstalledKibanaType = "ml-module" + PackageListItemInstallationInfoInstalledKibanaTypeOsqueryPackAsset PackageListItemInstallationInfoInstalledKibanaType = "osquery-pack-asset" + PackageListItemInstallationInfoInstalledKibanaTypeOsquerySavedQuery PackageListItemInstallationInfoInstalledKibanaType = "osquery-saved-query" + PackageListItemInstallationInfoInstalledKibanaTypeSearch PackageListItemInstallationInfoInstalledKibanaType = "search" + PackageListItemInstallationInfoInstalledKibanaTypeSecurityRule PackageListItemInstallationInfoInstalledKibanaType = "security-rule" + PackageListItemInstallationInfoInstalledKibanaTypeTag PackageListItemInstallationInfoInstalledKibanaType = "tag" + PackageListItemInstallationInfoInstalledKibanaTypeVisualization PackageListItemInstallationInfoInstalledKibanaType = "visualization" +) + +// Defines values for PackageListItemInstallationInfoVerificationStatus. +const ( + PackageListItemInstallationInfoVerificationStatusUnknown PackageListItemInstallationInfoVerificationStatus = "unknown" + PackageListItemInstallationInfoVerificationStatusUnverified PackageListItemInstallationInfoVerificationStatus = "unverified" + PackageListItemInstallationInfoVerificationStatusVerified PackageListItemInstallationInfoVerificationStatus = "verified" +) + +// Defines values for PackageListItemOwnerType. +const ( + PackageListItemOwnerTypeCommunity PackageListItemOwnerType = "community" + PackageListItemOwnerTypeElastic PackageListItemOwnerType = "elastic" + PackageListItemOwnerTypePartner PackageListItemOwnerType = "partner" +) + +// Defines values for PackageListItemRelease. +const ( + Beta PackageListItemRelease = "beta" + Experimental PackageListItemRelease = "experimental" + Ga PackageListItemRelease = "ga" +) + +// Defines values for PackageListItemType. +const ( + PackageListItemTypeInput PackageListItemType = "input" + PackageListItemTypeIntegration PackageListItemType = "integration" +) + +// Defines values for UpdateOutputElasticsearchPreset. +const ( + UpdateOutputElasticsearchPresetBalanced UpdateOutputElasticsearchPreset = "balanced" + UpdateOutputElasticsearchPresetCustom UpdateOutputElasticsearchPreset = "custom" + UpdateOutputElasticsearchPresetLatency UpdateOutputElasticsearchPreset = "latency" + UpdateOutputElasticsearchPresetScale UpdateOutputElasticsearchPreset = "scale" + UpdateOutputElasticsearchPresetThroughput UpdateOutputElasticsearchPreset = "throughput" +) + +// Defines values for UpdateOutputElasticsearchType. +const ( + Elasticsearch UpdateOutputElasticsearchType = "elasticsearch" +) + +// Defines values for UpdateOutputKafkaAuthType. +const ( + UpdateOutputKafkaAuthTypeKerberos UpdateOutputKafkaAuthType = "kerberos" + UpdateOutputKafkaAuthTypeNone UpdateOutputKafkaAuthType = "none" + UpdateOutputKafkaAuthTypeSsl UpdateOutputKafkaAuthType = "ssl" + UpdateOutputKafkaAuthTypeUserPass UpdateOutputKafkaAuthType = "user_pass" +) + +// Defines values for UpdateOutputKafkaCompression. +const ( + UpdateOutputKafkaCompressionGzip UpdateOutputKafkaCompression = "gzip" + UpdateOutputKafkaCompressionLz4 UpdateOutputKafkaCompression = "lz4" + UpdateOutputKafkaCompressionNone UpdateOutputKafkaCompression = "none" + UpdateOutputKafkaCompressionSnappy UpdateOutputKafkaCompression = "snappy" +) + +// Defines values for UpdateOutputKafkaPartition. +const ( + Hash UpdateOutputKafkaPartition = "hash" + Random UpdateOutputKafkaPartition = "random" + RoundRobin UpdateOutputKafkaPartition = "round_robin" +) + +// Defines values for UpdateOutputKafkaRequiredAcks. +const ( + Minus1 UpdateOutputKafkaRequiredAcks = -1 + N0 UpdateOutputKafkaRequiredAcks = 0 + N1 UpdateOutputKafkaRequiredAcks = 1 +) + +// Defines values for UpdateOutputKafkaSaslMechanism. +const ( + PLAIN UpdateOutputKafkaSaslMechanism = "PLAIN" + SCRAMSHA256 UpdateOutputKafkaSaslMechanism = "SCRAM-SHA-256" + SCRAMSHA512 UpdateOutputKafkaSaslMechanism = "SCRAM-SHA-512" +) + +// Defines values for UpdateOutputKafkaTopicsWhenType. +const ( + Contains UpdateOutputKafkaTopicsWhenType = "contains" + Equals UpdateOutputKafkaTopicsWhenType = "equals" + Regexp UpdateOutputKafkaTopicsWhenType = "regexp" +) + +// Defines values for UpdateOutputKafkaType. +const ( + Kafka UpdateOutputKafkaType = "kafka" +) + +// Defines values for UpdateOutputLogstashType. +const ( + Logstash UpdateOutputLogstashType = "logstash" +) + +// Defines values for UpdateOutputRemoteElasticsearchPreset. +const ( + UpdateOutputRemoteElasticsearchPresetBalanced UpdateOutputRemoteElasticsearchPreset = "balanced" + UpdateOutputRemoteElasticsearchPresetCustom UpdateOutputRemoteElasticsearchPreset = "custom" + UpdateOutputRemoteElasticsearchPresetLatency UpdateOutputRemoteElasticsearchPreset = "latency" + UpdateOutputRemoteElasticsearchPresetScale UpdateOutputRemoteElasticsearchPreset = "scale" + UpdateOutputRemoteElasticsearchPresetThroughput UpdateOutputRemoteElasticsearchPreset = "throughput" +) + +// Defines values for UpdateOutputRemoteElasticsearchType. +const ( + RemoteElasticsearch UpdateOutputRemoteElasticsearchType = "remote_elasticsearch" +) + +// Defines values for UpdateOutputSslVerificationMode. +const ( + Certificate UpdateOutputSslVerificationMode = "certificate" + Full UpdateOutputSslVerificationMode = "full" + None UpdateOutputSslVerificationMode = "none" + Strict UpdateOutputSslVerificationMode = "strict" +) + +// Defines values for GetAgentPoliciesParamsSortOrder. +const ( + GetAgentPoliciesParamsSortOrderAsc GetAgentPoliciesParamsSortOrder = "asc" + GetAgentPoliciesParamsSortOrderDesc GetAgentPoliciesParamsSortOrder = "desc" +) + +// Defines values for GetAgentPoliciesParamsFormat. +const ( + GetAgentPoliciesParamsFormatLegacy GetAgentPoliciesParamsFormat = "legacy" + GetAgentPoliciesParamsFormatSimplified GetAgentPoliciesParamsFormat = "simplified" +) + +// Defines values for CreateAgentPolicyJSONBodyMonitoringEnabled. +const ( + CreateAgentPolicyJSONBodyMonitoringEnabledLogs CreateAgentPolicyJSONBodyMonitoringEnabled = "logs" + CreateAgentPolicyJSONBodyMonitoringEnabledMetrics CreateAgentPolicyJSONBodyMonitoringEnabled = "metrics" + CreateAgentPolicyJSONBodyMonitoringEnabledTraces CreateAgentPolicyJSONBodyMonitoringEnabled = "traces" +) + +// Defines values for GetAgentPolicyParamsFormat. +const ( + GetAgentPolicyParamsFormatLegacy GetAgentPolicyParamsFormat = "legacy" + GetAgentPolicyParamsFormatSimplified GetAgentPolicyParamsFormat = "simplified" ) -// Defines values for PackageInfoSourceLicense. +// Defines values for UpdateAgentPolicyParamsFormat. const ( - Apache20 PackageInfoSourceLicense = "Apache-2.0" - Elastic20 PackageInfoSourceLicense = "Elastic-2.0" + UpdateAgentPolicyParamsFormatLegacy UpdateAgentPolicyParamsFormat = "legacy" + UpdateAgentPolicyParamsFormatSimplified UpdateAgentPolicyParamsFormat = "simplified" ) -// Defines values for PackageInstallSource. +// Defines values for UpdateAgentPolicyJSONBodyMonitoringEnabled. const ( - Bundled PackageInstallSource = "bundled" - Registry PackageInstallSource = "registry" - Upload PackageInstallSource = "upload" + Logs UpdateAgentPolicyJSONBodyMonitoringEnabled = "logs" + Metrics UpdateAgentPolicyJSONBodyMonitoringEnabled = "metrics" + Traces UpdateAgentPolicyJSONBodyMonitoringEnabled = "traces" ) -// Defines values for PackageStatus. +// Defines values for GetPackagePoliciesParamsSortOrder. const ( - InstallFailed PackageStatus = "install_failed" - Installed PackageStatus = "installed" - Installing PackageStatus = "installing" - NotInstalled PackageStatus = "not_installed" + GetPackagePoliciesParamsSortOrderAsc GetPackagePoliciesParamsSortOrder = "asc" + GetPackagePoliciesParamsSortOrderDesc GetPackagePoliciesParamsSortOrder = "desc" ) -// Defines values for Format. +// Defines values for GetPackagePoliciesParamsFormat. const ( - FormatLegacy Format = "legacy" - FormatSimplified Format = "simplified" + GetPackagePoliciesParamsFormatLegacy GetPackagePoliciesParamsFormat = "legacy" + GetPackagePoliciesParamsFormatSimplified GetPackagePoliciesParamsFormat = "simplified" ) // Defines values for CreatePackagePolicyParamsFormat. @@ -177,12 +548,19 @@ const ( // Defines values for UpdatePackagePolicyParamsFormat. const ( - UpdatePackagePolicyParamsFormatLegacy UpdatePackagePolicyParamsFormat = "legacy" - UpdatePackagePolicyParamsFormatSimplified UpdatePackagePolicyParamsFormat = "simplified" + Legacy UpdatePackagePolicyParamsFormat = "legacy" + Simplified UpdatePackagePolicyParamsFormat = "simplified" ) // AgentPolicy defines model for agent_policy. type AgentPolicy struct { + AdvancedSettings *struct { + AgentDownloadTargetDirectory *interface{} `json:"agent_download_target_directory"` + AgentDownloadTimeout *interface{} `json:"agent_download_timeout"` + AgentLimitsGoMaxProcs *interface{} `json:"agent_limits_go_max_procs"` + AgentLoggingLevel *interface{} `json:"agent_logging_level"` + AgentLoggingMetricsPeriod *interface{} `json:"agent_logging_metrics_period"` + } `json:"advanced_settings,omitempty"` AgentFeatures *[]struct { Enabled bool `json:"enabled"` Name string `json:"name"` @@ -192,776 +570,10323 @@ type AgentPolicy struct { Description *string `json:"description,omitempty"` DownloadSourceId *string `json:"download_source_id"` FleetServerHostId *string `json:"fleet_server_host_id"` - Id string `json:"id"` - InactivityTimeout *float32 `json:"inactivity_timeout,omitempty"` + + // GlobalDataTags User defined data tags that are added to all of the inputs. The values can be strings or numbers. + GlobalDataTags *[]struct { + Name string `json:"name"` + Value AgentPolicy_GlobalDataTags_Value `json:"value"` + } `json:"global_data_tags,omitempty"` + HasFleetServer *bool `json:"has_fleet_server,omitempty"` + Id string `json:"id"` + InactivityTimeout *float32 `json:"inactivity_timeout,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultFleetServer *bool `json:"is_default_fleet_server,omitempty"` + IsManaged bool `json:"is_managed"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` // IsProtected Indicates whether the agent policy has tamper protection enabled. Default false. - IsProtected *bool `json:"is_protected,omitempty"` - MonitoringEnabled *[]AgentPolicyMonitoringEnabled `json:"monitoring_enabled,omitempty"` - MonitoringOutputId *string `json:"monitoring_output_id"` - Name string `json:"name"` - Namespace string `json:"namespace"` + IsProtected bool `json:"is_protected"` + + // KeepMonitoringAlive When set to true, monitoring will be enabled but logs/metrics collection will be disabled + KeepMonitoringAlive *bool `json:"keep_monitoring_alive"` + MonitoringDiagnostics *struct { + Limit *struct { + Burst *float32 `json:"burst,omitempty"` + Interval *string `json:"interval,omitempty"` + } `json:"limit,omitempty"` + Uploader *struct { + InitDur *string `json:"init_dur,omitempty"` + MaxDur *string `json:"max_dur,omitempty"` + MaxRetries *float32 `json:"max_retries,omitempty"` + } `json:"uploader,omitempty"` + } `json:"monitoring_diagnostics,omitempty"` + MonitoringEnabled *[]AgentPolicyMonitoringEnabled `json:"monitoring_enabled,omitempty"` + MonitoringHttp *struct { + Buffer *struct { + Enabled *bool `json:"enabled,omitempty"` + } `json:"buffer,omitempty"` + Enabled bool `json:"enabled"` + Host *string `json:"host,omitempty"` + Port *float32 `json:"port,omitempty"` + } `json:"monitoring_http,omitempty"` + MonitoringOutputId *string `json:"monitoring_output_id"` + MonitoringPprofEnabled *bool `json:"monitoring_pprof_enabled,omitempty"` + Name string `json:"name"` + Namespace string `json:"namespace"` // Overrides Override settings that are defined in the agent policy. Input settings cannot be overridden. The override option should be used only in unusual circumstances and not as a routine procedure. - Overrides *map[string]interface{} `json:"overrides"` - Revision *float32 `json:"revision,omitempty"` - UnenrollTimeout *float32 `json:"unenroll_timeout,omitempty"` - UpdatedBy *string `json:"updated_by,omitempty"` - UpdatedOn *time.Time `json:"updated_on,omitempty"` + Overrides *map[string]interface{} `json:"overrides"` + PackagePolicies *AgentPolicy_PackagePolicies `json:"package_policies,omitempty"` + Revision float32 `json:"revision"` + SchemaVersion *string `json:"schema_version,omitempty"` + SpaceIds *[]string `json:"space_ids,omitempty"` + Status AgentPolicyStatus `json:"status"` + + // SupportsAgentless Indicates whether the agent policy supports agentless integrations. + SupportsAgentless *bool `json:"supports_agentless"` + UnenrollTimeout *float32 `json:"unenroll_timeout,omitempty"` + UnprivilegedAgents *float32 `json:"unprivileged_agents,omitempty"` + UpdatedAt string `json:"updated_at"` + UpdatedBy string `json:"updated_by"` + Version *string `json:"version,omitempty"` +} + +// AgentPolicyGlobalDataTagsValue0 defines model for . +type AgentPolicyGlobalDataTagsValue0 = string + +// AgentPolicyGlobalDataTagsValue1 defines model for . +type AgentPolicyGlobalDataTagsValue1 = float32 + +// AgentPolicy_GlobalDataTags_Value defines model for AgentPolicy.GlobalDataTags.Value. +type AgentPolicy_GlobalDataTags_Value struct { + union json.RawMessage } // AgentPolicyMonitoringEnabled defines model for AgentPolicy.MonitoringEnabled. type AgentPolicyMonitoringEnabled string -// AgentPolicyCreateRequest defines model for agent_policy_create_request. -type AgentPolicyCreateRequest struct { - AgentFeatures *[]struct { - Enabled bool `json:"enabled"` - Name string `json:"name"` - } `json:"agent_features,omitempty"` - DataOutputId *string `json:"data_output_id"` - Description *string `json:"description,omitempty"` - DownloadSourceId *string `json:"download_source_id"` - FleetServerHostId *string `json:"fleet_server_host_id"` - Id *string `json:"id,omitempty"` - InactivityTimeout *float32 `json:"inactivity_timeout,omitempty"` - IsProtected *bool `json:"is_protected,omitempty"` - MonitoringEnabled *[]AgentPolicyCreateRequestMonitoringEnabled `json:"monitoring_enabled,omitempty"` - MonitoringOutputId *string `json:"monitoring_output_id"` - Name string `json:"name"` - Namespace string `json:"namespace"` - UnenrollTimeout *float32 `json:"unenroll_timeout,omitempty"` -} - -// AgentPolicyCreateRequestMonitoringEnabled defines model for AgentPolicyCreateRequest.MonitoringEnabled. -type AgentPolicyCreateRequestMonitoringEnabled string - -// AgentPolicyUpdateRequest defines model for agent_policy_update_request. -type AgentPolicyUpdateRequest struct { - AgentFeatures *[]struct { - Enabled bool `json:"enabled"` - Name string `json:"name"` - } `json:"agent_features,omitempty"` - DataOutputId *string `json:"data_output_id"` - Description *string `json:"description,omitempty"` - DownloadSourceId *string `json:"download_source_id"` - FleetServerHostId *string `json:"fleet_server_host_id"` - InactivityTimeout *float32 `json:"inactivity_timeout,omitempty"` - IsProtected *bool `json:"is_protected,omitempty"` - MonitoringEnabled *[]AgentPolicyUpdateRequestMonitoringEnabled `json:"monitoring_enabled,omitempty"` - MonitoringOutputId *string `json:"monitoring_output_id"` - Name string `json:"name"` - Namespace string `json:"namespace"` - UnenrollTimeout *float32 `json:"unenroll_timeout,omitempty"` -} +// AgentPolicyPackagePolicies0 defines model for . +type AgentPolicyPackagePolicies0 = []string -// AgentPolicyUpdateRequestMonitoringEnabled defines model for AgentPolicyUpdateRequest.MonitoringEnabled. -type AgentPolicyUpdateRequestMonitoringEnabled string +// AgentPolicyPackagePolicies1 This field is present only when retrieving a single agent policy, or when retrieving a list of agent policies with the ?full=true parameter +type AgentPolicyPackagePolicies1 = []struct { + CreatedAt string `json:"created_at"` + CreatedBy string `json:"created_by"` -// ElasticsearchAssetType defines model for elasticsearch_asset_type. -type ElasticsearchAssetType string + // Description Package policy description + Description *string `json:"description,omitempty"` + Elasticsearch *AgentPolicy_PackagePolicies_1_Elasticsearch `json:"elasticsearch,omitempty"` + Enabled bool `json:"enabled"` + Id string `json:"id"` + Inputs []struct { + CompiledInput interface{} `json:"compiled_input"` + + // Config Package variable (see integration documentation for more information) + Config *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"config,omitempty"` + Enabled bool `json:"enabled"` + Id *string `json:"id,omitempty"` + KeepEnabled *bool `json:"keep_enabled,omitempty"` + PolicyTemplate *string `json:"policy_template,omitempty"` + Streams []struct { + CompiledStream interface{} `json:"compiled_stream"` + + // Config Package variable (see integration documentation for more information) + Config *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"config,omitempty"` + DataStream struct { + Dataset string `json:"dataset"` + Elasticsearch *struct { + DynamicDataset *bool `json:"dynamic_dataset,omitempty"` + DynamicNamespace *bool `json:"dynamic_namespace,omitempty"` + Privileges *struct { + Indices *[]string `json:"indices,omitempty"` + } `json:"privileges,omitempty"` + } `json:"elasticsearch,omitempty"` + Type string `json:"type"` + } `json:"data_stream"` + Enabled bool `json:"enabled"` + Id *string `json:"id,omitempty"` + KeepEnabled *bool `json:"keep_enabled,omitempty"` + Release *AgentPolicyPackagePolicies1InputsStreamsRelease `json:"release,omitempty"` + + // Vars Package variable (see integration documentation for more information) + Vars *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"vars,omitempty"` + } `json:"streams"` + Type string `json:"type"` + + // Vars Package variable (see integration documentation for more information) + Vars *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"vars,omitempty"` + } `json:"inputs"` + IsManaged *bool `json:"is_managed,omitempty"` -// EnrollmentApiKey defines model for enrollment_api_key. -type EnrollmentApiKey struct { - Active bool `json:"active"` - ApiKey string `json:"api_key"` - ApiKeyId string `json:"api_key_id"` - CreatedAt string `json:"created_at"` - Id string `json:"id"` - Name *string `json:"name,omitempty"` - PolicyId *string `json:"policy_id,omitempty"` -} + // Name Package policy name (should be unique) + Name string `json:"name"` -// FleetServerHost defines model for fleet_server_host. -type FleetServerHost struct { - HostUrls []string `json:"host_urls"` - Id string `json:"id"` - IsDefault bool `json:"is_default"` - IsPreconfigured bool `json:"is_preconfigured"` - Name *string `json:"name,omitempty"` -} + // Namespace The package policy namespace. Leave blank to inherit the agent policy's namespace. + Namespace *string `json:"namespace,omitempty"` + OutputId *string `json:"output_id"` + + // Overrides Override settings that are defined in the package policy. The override option should be used only in unusual circumstances and not as a routine procedure. + Overrides *struct { + Inputs *map[string]interface{} `json:"inputs,omitempty"` + } `json:"overrides"` + Package *struct { + ExperimentalDataStreamFeatures *[]struct { + DataStream string `json:"data_stream"` + Features struct { + DocValueOnlyNumeric *bool `json:"doc_value_only_numeric,omitempty"` + DocValueOnlyOther *bool `json:"doc_value_only_other,omitempty"` + SyntheticSource *bool `json:"synthetic_source,omitempty"` + Tsdb *bool `json:"tsdb,omitempty"` + } `json:"features"` + } `json:"experimental_data_stream_features,omitempty"` -// GetPackagesResponse defines model for get_packages_response. -type GetPackagesResponse struct { - Items []SearchResult `json:"items"` - // Deprecated: - Response *[]SearchResult `json:"response,omitempty"` -} + // Name Package name + Name string `json:"name"` + RequiresRoot *bool `json:"requires_root,omitempty"` + Title *string `json:"title,omitempty"` -// KibanaSavedObjectType defines model for kibana_saved_object_type. -type KibanaSavedObjectType string + // Version Package version + Version string `json:"version"` + } `json:"package,omitempty"` -// NewPackagePolicy defines model for new_package_policy. -type NewPackagePolicy struct { - Description *string `json:"description,omitempty"` - Enabled *bool `json:"enabled,omitempty"` - Inputs map[string]PackagePolicyInput `json:"inputs"` - Name string `json:"name"` - Namespace *string `json:"namespace,omitempty"` + // PolicyId Agent policy ID where that package policy will be added // Deprecated: - OutputId *string `json:"output_id,omitempty"` - Package *PackagePolicyPackageInfo `json:"package,omitempty"` - PolicyId *string `json:"policy_id,omitempty"` + PolicyId *string `json:"policy_id"` + PolicyIds *[]string `json:"policy_ids,omitempty"` + Revision float32 `json:"revision"` SecretReferences *[]struct { - Id *string `json:"id,omitempty"` + Id string `json:"id"` } `json:"secret_references,omitempty"` - Vars *map[string]interface{} `json:"vars,omitempty"` -} + UpdatedAt string `json:"updated_at"` + UpdatedBy string `json:"updated_by"` -// OutputCreateRequest defines model for output_create_request. -type OutputCreateRequest struct { - union json.RawMessage + // Vars Package variable (see integration documentation for more information) + Vars *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"vars,omitempty"` + Version *string `json:"version,omitempty"` } -// OutputCreateRequestElasticsearch defines model for output_create_request_elasticsearch. -type OutputCreateRequestElasticsearch struct { - CaSha256 *string `json:"ca_sha256,omitempty"` - CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` - Config *map[string]interface{} `json:"config,omitempty"` - ConfigYaml *string `json:"config_yaml,omitempty"` - Hosts *[]string `json:"hosts,omitempty"` - Id *string `json:"id,omitempty"` - IsDefault *bool `json:"is_default,omitempty"` - IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` - Name string `json:"name"` - ProxyId *string `json:"proxy_id,omitempty"` - Shipper *struct { - CompressionLevel *float32 `json:"compression_level,omitempty"` - DiskQueueCompressionEnabled *bool `json:"disk_queue_compression_enabled,omitempty"` - DiskQueueEnabled *bool `json:"disk_queue_enabled,omitempty"` - DiskQueueEncryptionEnabled *bool `json:"disk_queue_encryption_enabled,omitempty"` - DiskQueueMaxSize *float32 `json:"disk_queue_max_size,omitempty"` - DiskQueuePath *string `json:"disk_queue_path,omitempty"` - Loadbalance *bool `json:"loadbalance,omitempty"` - } `json:"shipper,omitempty"` - Ssl *struct { - Certificate *string `json:"certificate,omitempty"` - CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` - Key *string `json:"key,omitempty"` - } `json:"ssl,omitempty"` - Type OutputCreateRequestElasticsearchType `json:"type"` -} - -// OutputCreateRequestElasticsearchType defines model for OutputCreateRequestElasticsearch.Type. -type OutputCreateRequestElasticsearchType string - -// OutputCreateRequestKafka defines model for output_create_request_kafka. -type OutputCreateRequestKafka struct { - AuthType string `json:"auth_type"` - BrokerTimeout *float32 `json:"broker_timeout,omitempty"` - CaSha256 *string `json:"ca_sha256,omitempty"` - CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` - ClientId *string `json:"client_id,omitempty"` - Compression *string `json:"compression,omitempty"` - CompressionLevel *float32 `json:"compression_level,omitempty"` - Config *map[string]interface{} `json:"config,omitempty"` - ConfigYaml *string `json:"config_yaml,omitempty"` - ConnectionType *OutputCreateRequestKafkaConnectionType `json:"connection_type,omitempty"` - Headers *[]struct { - Key *string `json:"key,omitempty"` - Value *string `json:"value,omitempty"` - } `json:"headers,omitempty"` - Hosts []string `json:"hosts"` - Id *string `json:"id,omitempty"` - IsDefault *bool `json:"is_default,omitempty"` - IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` - Key *string `json:"key,omitempty"` - Name string `json:"name"` - Partition *string `json:"partition,omitempty"` - Password *string `json:"password,omitempty"` - ProxyId *string `json:"proxy_id,omitempty"` - Random *struct { - GroupEvents *float32 `json:"group_events,omitempty"` - } `json:"random,omitempty"` - RequiredAcks *float32 `json:"required_acks,omitempty"` - RoundRobin *struct { - GroupEvents *float32 `json:"group_events,omitempty"` - } `json:"round_robin,omitempty"` - Sasl *struct { - Mechanism *string `json:"mechanism,omitempty"` - } `json:"sasl,omitempty"` - Shipper *struct { - CompressionLevel *float32 `json:"compression_level,omitempty"` - DiskQueueCompressionEnabled *bool `json:"disk_queue_compression_enabled,omitempty"` - DiskQueueEnabled *bool `json:"disk_queue_enabled,omitempty"` - DiskQueueEncryptionEnabled *bool `json:"disk_queue_encryption_enabled,omitempty"` - DiskQueueMaxSize *float32 `json:"disk_queue_max_size,omitempty"` - DiskQueuePath *string `json:"disk_queue_path,omitempty"` - Loadbalance *bool `json:"loadbalance,omitempty"` - } `json:"shipper,omitempty"` - Ssl *struct { - Certificate *string `json:"certificate,omitempty"` - CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` - Key *string `json:"key,omitempty"` - VerificationMode *OutputCreateRequestKafkaSslVerificationMode `json:"verification_mode,omitempty"` - } `json:"ssl,omitempty"` - Timeout *float32 `json:"timeout,omitempty"` - Topics []struct { - Topic *string `json:"topic,omitempty"` - When *struct { - Condition *string `json:"condition,omitempty"` - Type *string `json:"type,omitempty"` - } `json:"when,omitempty"` - } `json:"topics"` - Type OutputCreateRequestKafkaType `json:"type"` - Username *string `json:"username,omitempty"` - Version *string `json:"version,omitempty"` +// AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges defines model for AgentPolicy.PackagePolicies.1.Elasticsearch.Privileges. +type AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges struct { + Cluster *[]string `json:"cluster,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` } -// OutputCreateRequestKafkaConnectionType defines model for OutputCreateRequestKafka.ConnectionType. -type OutputCreateRequestKafkaConnectionType string - -// OutputCreateRequestKafkaSslVerificationMode defines model for OutputCreateRequestKafka.Ssl.VerificationMode. -type OutputCreateRequestKafkaSslVerificationMode string +// AgentPolicy_PackagePolicies_1_Elasticsearch defines model for AgentPolicy.PackagePolicies.1.Elasticsearch. +type AgentPolicy_PackagePolicies_1_Elasticsearch struct { + Privileges *AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges `json:"privileges,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} -// OutputCreateRequestKafkaType defines model for OutputCreateRequestKafka.Type. -type OutputCreateRequestKafkaType string +// AgentPolicyPackagePolicies1InputsStreamsRelease defines model for AgentPolicy.PackagePolicies.1.Inputs.Streams.Release. +type AgentPolicyPackagePolicies1InputsStreamsRelease string -// OutputCreateRequestLogstash defines model for output_create_request_logstash. -type OutputCreateRequestLogstash struct { - CaSha256 *string `json:"ca_sha256,omitempty"` - CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` - Config *map[string]interface{} `json:"config,omitempty"` - ConfigYaml *string `json:"config_yaml,omitempty"` - Hosts []string `json:"hosts"` - Id *string `json:"id,omitempty"` - IsDefault *bool `json:"is_default,omitempty"` - IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` - Name string `json:"name"` - ProxyId *string `json:"proxy_id,omitempty"` - Shipper *struct { - CompressionLevel *float32 `json:"compression_level,omitempty"` - DiskQueueCompressionEnabled *bool `json:"disk_queue_compression_enabled,omitempty"` - DiskQueueEnabled *bool `json:"disk_queue_enabled,omitempty"` - DiskQueueEncryptionEnabled *bool `json:"disk_queue_encryption_enabled,omitempty"` - DiskQueueMaxSize *float32 `json:"disk_queue_max_size,omitempty"` - DiskQueuePath *string `json:"disk_queue_path,omitempty"` - Loadbalance *bool `json:"loadbalance,omitempty"` - } `json:"shipper,omitempty"` - Ssl *struct { - Certificate *string `json:"certificate,omitempty"` - CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` - Key *string `json:"key,omitempty"` - } `json:"ssl,omitempty"` - Type OutputCreateRequestLogstashType `json:"type"` -} - -// OutputCreateRequestLogstashType defines model for OutputCreateRequestLogstash.Type. -type OutputCreateRequestLogstashType string - -// OutputUpdateRequest defines model for output_update_request. -type OutputUpdateRequest struct { +// AgentPolicy_PackagePolicies defines model for AgentPolicy.PackagePolicies. +type AgentPolicy_PackagePolicies struct { union json.RawMessage } -// OutputUpdateRequestElasticsearch defines model for output_update_request_elasticsearch. -type OutputUpdateRequestElasticsearch struct { - CaSha256 *string `json:"ca_sha256,omitempty"` - CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` - Config *map[string]interface{} `json:"config,omitempty"` - ConfigYaml *string `json:"config_yaml,omitempty"` - Hosts []string `json:"hosts"` - Id *string `json:"id,omitempty"` - IsDefault *bool `json:"is_default,omitempty"` - IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` - Name string `json:"name"` - ProxyId *string `json:"proxy_id,omitempty"` - Shipper *struct { - CompressionLevel *float32 `json:"compression_level,omitempty"` - DiskQueueCompressionEnabled *bool `json:"disk_queue_compression_enabled,omitempty"` - DiskQueueEnabled *bool `json:"disk_queue_enabled,omitempty"` - DiskQueueEncryptionEnabled *bool `json:"disk_queue_encryption_enabled,omitempty"` - DiskQueueMaxSize *float32 `json:"disk_queue_max_size,omitempty"` - DiskQueuePath *string `json:"disk_queue_path,omitempty"` - Loadbalance *bool `json:"loadbalance,omitempty"` - } `json:"shipper,omitempty"` - Ssl *struct { - Certificate *string `json:"certificate,omitempty"` - CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` - Key *string `json:"key,omitempty"` - } `json:"ssl,omitempty"` - Type OutputUpdateRequestElasticsearchType `json:"type"` -} - -// OutputUpdateRequestElasticsearchType defines model for OutputUpdateRequestElasticsearch.Type. -type OutputUpdateRequestElasticsearchType string - -// OutputUpdateRequestKafka defines model for output_update_request_kafka. -type OutputUpdateRequestKafka struct { - AuthType *string `json:"auth_type,omitempty"` - BrokerTimeout *float32 `json:"broker_timeout,omitempty"` - CaSha256 *string `json:"ca_sha256,omitempty"` - CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` - ClientId *string `json:"client_id,omitempty"` - Compression *string `json:"compression,omitempty"` - CompressionLevel *float32 `json:"compression_level,omitempty"` - Config *map[string]interface{} `json:"config,omitempty"` - ConfigYaml *string `json:"config_yaml,omitempty"` - ConnectionType *OutputUpdateRequestKafkaConnectionType `json:"connection_type,omitempty"` - Headers *[]struct { - Key *string `json:"key,omitempty"` - Value *string `json:"value,omitempty"` +// AgentPolicyStatus defines model for AgentPolicy.Status. +type AgentPolicyStatus string + +// EnrollmentApiKey defines model for enrollment_api_key. +type EnrollmentApiKey struct { + // Active When false, the enrollment API key is revoked and cannot be used for enrolling Elastic Agents. + Active bool `json:"active"` + + // ApiKey The enrollment API key (token) used for enrolling Elastic Agents. + ApiKey string `json:"api_key"` + + // ApiKeyId The ID of the API key in the Security API. + ApiKeyId string `json:"api_key_id"` + CreatedAt string `json:"created_at"` + Id string `json:"id"` + + // Name The name of the enrollment API key. + Name *string `json:"name,omitempty"` + + // PolicyId The ID of the agent policy the Elastic Agent will be enrolled in. + PolicyId *string `json:"policy_id,omitempty"` +} + +// NewOutputElasticsearch defines model for new_output_elasticsearch. +type NewOutputElasticsearch struct { + AllowEdit *[]string `json:"allow_edit,omitempty"` + CaSha256 *string `json:"ca_sha256,omitempty"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` + ConfigYaml *string `json:"config_yaml"` + Hosts []string `json:"hosts"` + Id *string `json:"id,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name string `json:"name"` + Preset *NewOutputElasticsearchPreset `json:"preset,omitempty"` + ProxyId *string `json:"proxy_id,omitempty"` + Shipper *NewOutputShipper `json:"shipper,omitempty"` + Ssl *NewOutputSsl `json:"ssl,omitempty"` + Type NewOutputElasticsearchType `json:"type"` +} + +// NewOutputElasticsearchPreset defines model for NewOutputElasticsearch.Preset. +type NewOutputElasticsearchPreset string + +// NewOutputElasticsearchType defines model for NewOutputElasticsearch.Type. +type NewOutputElasticsearchType string + +// NewOutputKafka defines model for new_output_kafka. +type NewOutputKafka struct { + AllowEdit *[]string `json:"allow_edit,omitempty"` + AuthType NewOutputKafkaAuthType `json:"auth_type"` + BrokerTimeout *float32 `json:"broker_timeout,omitempty"` + CaSha256 *string `json:"ca_sha256,omitempty"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` + ClientId *string `json:"client_id,omitempty"` + Compression *NewOutputKafkaCompression `json:"compression,omitempty"` + CompressionLevel interface{} `json:"compression_level"` + ConfigYaml *string `json:"config_yaml"` + ConnectionType interface{} `json:"connection_type"` + Hash *struct { + Hash *string `json:"hash,omitempty"` + Random *bool `json:"random,omitempty"` + } `json:"hash,omitempty"` + Headers *[]struct { + Key string `json:"key"` + Value string `json:"value"` } `json:"headers,omitempty"` - Hosts *[]string `json:"hosts,omitempty"` - Id *string `json:"id,omitempty"` - IsDefault *bool `json:"is_default,omitempty"` - IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` - Key *string `json:"key,omitempty"` - Name string `json:"name"` - Partition *string `json:"partition,omitempty"` - Password *string `json:"password,omitempty"` - ProxyId *string `json:"proxy_id,omitempty"` + Hosts []string `json:"hosts"` + Id *string `json:"id,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Key *string `json:"key,omitempty"` + Name string `json:"name"` + Partition *NewOutputKafkaPartition `json:"partition,omitempty"` + Password interface{} `json:"password"` + ProxyId *string `json:"proxy_id,omitempty"` Random *struct { GroupEvents *float32 `json:"group_events,omitempty"` } `json:"random,omitempty"` - RequiredAcks *float32 `json:"required_acks,omitempty"` + RequiredAcks *NewOutputKafkaRequiredAcks `json:"required_acks,omitempty"` RoundRobin *struct { GroupEvents *float32 `json:"group_events,omitempty"` } `json:"round_robin,omitempty"` Sasl *struct { - Mechanism *string `json:"mechanism,omitempty"` - } `json:"sasl,omitempty"` - Shipper *struct { - CompressionLevel *float32 `json:"compression_level,omitempty"` - DiskQueueCompressionEnabled *bool `json:"disk_queue_compression_enabled,omitempty"` - DiskQueueEnabled *bool `json:"disk_queue_enabled,omitempty"` - DiskQueueEncryptionEnabled *bool `json:"disk_queue_encryption_enabled,omitempty"` - DiskQueueMaxSize *float32 `json:"disk_queue_max_size,omitempty"` - DiskQueuePath *string `json:"disk_queue_path,omitempty"` - Loadbalance *bool `json:"loadbalance,omitempty"` - } `json:"shipper,omitempty"` - Ssl *struct { - Certificate *string `json:"certificate,omitempty"` - CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` - Key *string `json:"key,omitempty"` - VerificationMode *OutputUpdateRequestKafkaSslVerificationMode `json:"verification_mode,omitempty"` - } `json:"ssl,omitempty"` - Timeout *float32 `json:"timeout,omitempty"` + Mechanism *NewOutputKafkaSaslMechanism `json:"mechanism,omitempty"` + } `json:"sasl"` + Secrets *struct { + Password *NewOutputKafka_Secrets_Password `json:"password,omitempty"` + Ssl *struct { + Key NewOutputKafka_Secrets_Ssl_Key `json:"key"` + } `json:"ssl,omitempty"` + } `json:"secrets,omitempty"` + Shipper *NewOutputShipper `json:"shipper,omitempty"` + Ssl *NewOutputSsl `json:"ssl,omitempty"` + Timeout *float32 `json:"timeout,omitempty"` + Topic *string `json:"topic,omitempty"` Topics *[]struct { - Topic *string `json:"topic,omitempty"` + Topic string `json:"topic"` When *struct { - Condition *string `json:"condition,omitempty"` - Type *string `json:"type,omitempty"` + Condition *string `json:"condition,omitempty"` + Type *NewOutputKafkaTopicsWhenType `json:"type,omitempty"` } `json:"when,omitempty"` } `json:"topics,omitempty"` - Type OutputUpdateRequestKafkaType `json:"type"` - Username *string `json:"username,omitempty"` - Version *string `json:"version,omitempty"` + Type NewOutputKafkaType `json:"type"` + Username interface{} `json:"username"` + Version *string `json:"version,omitempty"` } -// OutputUpdateRequestKafkaConnectionType defines model for OutputUpdateRequestKafka.ConnectionType. -type OutputUpdateRequestKafkaConnectionType string +// NewOutputKafkaAuthType defines model for NewOutputKafka.AuthType. +type NewOutputKafkaAuthType string -// OutputUpdateRequestKafkaSslVerificationMode defines model for OutputUpdateRequestKafka.Ssl.VerificationMode. -type OutputUpdateRequestKafkaSslVerificationMode string +// NewOutputKafkaCompression defines model for NewOutputKafka.Compression. +type NewOutputKafkaCompression string -// OutputUpdateRequestKafkaType defines model for OutputUpdateRequestKafka.Type. -type OutputUpdateRequestKafkaType string +// NewOutputKafkaPartition defines model for NewOutputKafka.Partition. +type NewOutputKafkaPartition string -// OutputUpdateRequestLogstash defines model for output_update_request_logstash. -type OutputUpdateRequestLogstash struct { - CaSha256 *string `json:"ca_sha256,omitempty"` - CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` - Config *map[string]interface{} `json:"config,omitempty"` - ConfigYaml *string `json:"config_yaml,omitempty"` - Hosts *[]string `json:"hosts,omitempty"` - Id *string `json:"id,omitempty"` - IsDefault *bool `json:"is_default,omitempty"` - IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` - Name string `json:"name"` - ProxyId *string `json:"proxy_id,omitempty"` - Shipper *struct { - CompressionLevel *float32 `json:"compression_level,omitempty"` - DiskQueueCompressionEnabled *bool `json:"disk_queue_compression_enabled,omitempty"` - DiskQueueEnabled *bool `json:"disk_queue_enabled,omitempty"` - DiskQueueEncryptionEnabled *bool `json:"disk_queue_encryption_enabled,omitempty"` - DiskQueueMaxSize *float32 `json:"disk_queue_max_size,omitempty"` - DiskQueuePath *string `json:"disk_queue_path,omitempty"` - Loadbalance *bool `json:"loadbalance,omitempty"` - } `json:"shipper,omitempty"` - Ssl *struct { - Certificate *string `json:"certificate,omitempty"` - CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` - Key *string `json:"key,omitempty"` - } `json:"ssl,omitempty"` - Type OutputUpdateRequestLogstashType `json:"type"` -} - -// OutputUpdateRequestLogstashType defines model for OutputUpdateRequestLogstash.Type. -type OutputUpdateRequestLogstashType string +// NewOutputKafkaRequiredAcks defines model for NewOutputKafka.RequiredAcks. +type NewOutputKafkaRequiredAcks int -// PackageInfo defines model for package_info. -type PackageInfo struct { - Assets []string `json:"assets"` - Categories []string `json:"categories"` - Conditions struct { - Elasticsearch *struct { - Subscription *PackageInfoConditionsElasticsearchSubscription `json:"subscription,omitempty"` - } `json:"elasticsearch,omitempty"` - Kibana *struct { - Versions *string `json:"versions,omitempty"` - } `json:"kibana,omitempty"` - } `json:"conditions"` - DataStreams *[]struct { - IngesetPipeline string `json:"ingeset_pipeline"` - Name string `json:"name"` - Package string `json:"package"` - Release string `json:"release"` - Title string `json:"title"` - Type string `json:"type"` - Vars *[]struct { - Default string `json:"default"` - Name string `json:"name"` - } `json:"vars,omitempty"` - } `json:"data_streams,omitempty"` - Description string `json:"description"` - Download string `json:"download"` - Elasticsearch *struct { - Privileges *struct { - Cluster *[]string `json:"cluster,omitempty"` - } `json:"privileges,omitempty"` - } `json:"elasticsearch,omitempty"` - FormatVersion string `json:"format_version"` - Icons *[]string `json:"icons,omitempty"` - Internal *bool `json:"internal,omitempty"` - Name string `json:"name"` - Path string `json:"path"` - Readme *string `json:"readme,omitempty"` - - // Release release label is deprecated, derive from the version instead (packages follow semver) - // Deprecated: - Release *PackageInfoRelease `json:"release,omitempty"` - Screenshots *[]struct { - Path string `json:"path"` - Size *string `json:"size,omitempty"` - Src string `json:"src"` - Title *string `json:"title,omitempty"` - Type *string `json:"type,omitempty"` - } `json:"screenshots,omitempty"` - Source *struct { - License *PackageInfoSourceLicense `json:"license,omitempty"` - } `json:"source,omitempty"` - Title string `json:"title"` - Type string `json:"type"` - Version string `json:"version"` +// NewOutputKafkaSaslMechanism defines model for NewOutputKafka.Sasl.Mechanism. +type NewOutputKafkaSaslMechanism string + +// NewOutputKafkaSecretsPassword0 defines model for . +type NewOutputKafkaSecretsPassword0 struct { + Id string `json:"id"` } -// PackageInfoConditionsElasticsearchSubscription defines model for PackageInfo.Conditions.Elasticsearch.Subscription. -type PackageInfoConditionsElasticsearchSubscription string +// NewOutputKafkaSecretsPassword1 defines model for . +type NewOutputKafkaSecretsPassword1 = string -// PackageInfoRelease release label is deprecated, derive from the version instead (packages follow semver) -type PackageInfoRelease string +// NewOutputKafka_Secrets_Password defines model for NewOutputKafka.Secrets.Password. +type NewOutputKafka_Secrets_Password struct { + union json.RawMessage +} -// PackageInfoSourceLicense defines model for PackageInfo.Source.License. -type PackageInfoSourceLicense string +// NewOutputKafkaSecretsSslKey0 defines model for . +type NewOutputKafkaSecretsSslKey0 struct { + Id string `json:"id"` +} -// PackageInstallSource defines model for package_install_source. -type PackageInstallSource string +// NewOutputKafkaSecretsSslKey1 defines model for . +type NewOutputKafkaSecretsSslKey1 = string -// PackageItemType defines model for package_item_type. -type PackageItemType struct { +// NewOutputKafka_Secrets_Ssl_Key defines model for NewOutputKafka.Secrets.Ssl.Key. +type NewOutputKafka_Secrets_Ssl_Key struct { union json.RawMessage } -// PackagePolicy defines model for package_policy. -type PackagePolicy struct { - Description *string `json:"description,omitempty"` - Enabled *bool `json:"enabled,omitempty"` - Id string `json:"id"` - Inputs map[string]PackagePolicyInput `json:"inputs"` - Name string `json:"name"` - Namespace *string `json:"namespace,omitempty"` - // Deprecated: - OutputId *string `json:"output_id,omitempty"` - Package *PackagePolicyPackageInfo `json:"package,omitempty"` - PolicyId *string `json:"policy_id,omitempty"` - Revision float32 `json:"revision"` - SecretReferences *[]struct { - Id *string `json:"id,omitempty"` - } `json:"secret_references,omitempty"` - Vars *map[string]interface{} `json:"vars,omitempty"` +// NewOutputKafkaTopicsWhenType defines model for NewOutputKafka.Topics.When.Type. +type NewOutputKafkaTopicsWhenType string + +// NewOutputKafkaType defines model for NewOutputKafka.Type. +type NewOutputKafkaType string + +// NewOutputLogstash defines model for new_output_logstash. +type NewOutputLogstash struct { + AllowEdit *[]string `json:"allow_edit,omitempty"` + CaSha256 *string `json:"ca_sha256,omitempty"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` + ConfigYaml *string `json:"config_yaml"` + Hosts []string `json:"hosts"` + Id *string `json:"id,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name string `json:"name"` + ProxyId *string `json:"proxy_id,omitempty"` + Secrets *struct { + Ssl *struct { + Key *NewOutputLogstash_Secrets_Ssl_Key `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"secrets,omitempty"` + Shipper *NewOutputShipper `json:"shipper,omitempty"` + Ssl *NewOutputSsl `json:"ssl,omitempty"` + Type NewOutputLogstashType `json:"type"` +} + +// NewOutputLogstashSecretsSslKey0 defines model for . +type NewOutputLogstashSecretsSslKey0 struct { + Id string `json:"id"` +} + +// NewOutputLogstashSecretsSslKey1 defines model for . +type NewOutputLogstashSecretsSslKey1 = string + +// NewOutputLogstash_Secrets_Ssl_Key defines model for NewOutputLogstash.Secrets.Ssl.Key. +type NewOutputLogstash_Secrets_Ssl_Key struct { + union json.RawMessage } -// PackagePolicyInput defines model for package_policy_input. -type PackagePolicyInput struct { - Config *map[string]interface{} `json:"config,omitempty"` - Enabled bool `json:"enabled"` - Processors *[]string `json:"processors,omitempty"` - Streams *map[string]interface{} `json:"streams,omitempty"` - Type string `json:"type"` - Vars *map[string]interface{} `json:"vars,omitempty"` +// NewOutputLogstashType defines model for NewOutputLogstash.Type. +type NewOutputLogstashType string + +// NewOutputRemoteElasticsearch defines model for new_output_remote_elasticsearch. +type NewOutputRemoteElasticsearch struct { + AllowEdit *[]string `json:"allow_edit,omitempty"` + CaSha256 *string `json:"ca_sha256,omitempty"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` + ConfigYaml *string `json:"config_yaml"` + Hosts []string `json:"hosts"` + Id *string `json:"id,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name string `json:"name"` + Preset *NewOutputRemoteElasticsearchPreset `json:"preset,omitempty"` + ProxyId *string `json:"proxy_id,omitempty"` + Secrets *struct { + ServiceToken *NewOutputRemoteElasticsearch_Secrets_ServiceToken `json:"service_token,omitempty"` + } `json:"secrets,omitempty"` + ServiceToken *string `json:"service_token"` + Shipper *NewOutputShipper `json:"shipper,omitempty"` + Ssl *NewOutputSsl `json:"ssl,omitempty"` + Type NewOutputRemoteElasticsearchType `json:"type"` +} + +// NewOutputRemoteElasticsearchPreset defines model for NewOutputRemoteElasticsearch.Preset. +type NewOutputRemoteElasticsearchPreset string + +// NewOutputRemoteElasticsearchSecretsServiceToken0 defines model for . +type NewOutputRemoteElasticsearchSecretsServiceToken0 struct { + Id string `json:"id"` +} + +// NewOutputRemoteElasticsearchSecretsServiceToken1 defines model for . +type NewOutputRemoteElasticsearchSecretsServiceToken1 = string + +// NewOutputRemoteElasticsearch_Secrets_ServiceToken defines model for NewOutputRemoteElasticsearch.Secrets.ServiceToken. +type NewOutputRemoteElasticsearch_Secrets_ServiceToken struct { + union json.RawMessage } -// PackagePolicyPackageInfo defines model for package_policy_package_info. -type PackagePolicyPackageInfo struct { - Name string `json:"name"` - Title *string `json:"title,omitempty"` - Version string `json:"version"` +// NewOutputRemoteElasticsearchType defines model for NewOutputRemoteElasticsearch.Type. +type NewOutputRemoteElasticsearchType string + +// NewOutputShipper defines model for new_output_shipper. +type NewOutputShipper struct { + CompressionLevel *float32 `json:"compression_level"` + DiskQueueCompressionEnabled *bool `json:"disk_queue_compression_enabled"` + DiskQueueEnabled *bool `json:"disk_queue_enabled"` + DiskQueueEncryptionEnabled *bool `json:"disk_queue_encryption_enabled"` + DiskQueueMaxSize *float32 `json:"disk_queue_max_size"` + DiskQueuePath *string `json:"disk_queue_path"` + Loadbalance *bool `json:"loadbalance"` + MaxBatchBytes *float32 `json:"max_batch_bytes"` + MemQueueEvents *float32 `json:"mem_queue_events"` + QueueFlushTimeout *float32 `json:"queue_flush_timeout"` } -// PackagePolicyRequest defines model for package_policy_request. -type PackagePolicyRequest struct { - // Description Package policy description - Description *string `json:"description,omitempty"` +// NewOutputSsl defines model for new_output_ssl. +type NewOutputSsl struct { + Certificate *string `json:"certificate,omitempty"` + CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` + Key *string `json:"key,omitempty"` + VerificationMode *NewOutputSslVerificationMode `json:"verification_mode,omitempty"` +} - // Force Force package policy creation even if package is not verified, or if the agent policy is managed. - Force *bool `json:"force,omitempty"` +// NewOutputSslVerificationMode defines model for NewOutputSsl.VerificationMode. +type NewOutputSslVerificationMode string - // Id Package policy unique identifier - Id *string `json:"id,omitempty"` +// NewOutputUnion defines model for new_output_union. +type NewOutputUnion struct { + union json.RawMessage +} - // Inputs Package policy inputs (see integration documentation to know what inputs are available) - Inputs *map[string]PackagePolicyRequestInput `json:"inputs,omitempty"` +// OutputElasticsearch defines model for output_elasticsearch. +type OutputElasticsearch struct { + AllowEdit *[]string `json:"allow_edit,omitempty"` + CaSha256 *string `json:"ca_sha256"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint"` + ConfigYaml *string `json:"config_yaml"` + Hosts []string `json:"hosts"` + Id *string `json:"id,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name string `json:"name"` + Preset *OutputElasticsearchPreset `json:"preset,omitempty"` + ProxyId *string `json:"proxy_id"` + Shipper *OutputShipper `json:"shipper"` + Ssl *OutputSsl `json:"ssl"` + Type OutputElasticsearchType `json:"type"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputElasticsearchPreset defines model for OutputElasticsearch.Preset. +type OutputElasticsearchPreset string + +// OutputElasticsearchType defines model for OutputElasticsearch.Type. +type OutputElasticsearchType string + +// OutputKafka defines model for output_kafka. +type OutputKafka struct { + AllowEdit *[]string `json:"allow_edit,omitempty"` + AuthType OutputKafkaAuthType `json:"auth_type"` + BrokerTimeout *float32 `json:"broker_timeout,omitempty"` + CaSha256 *string `json:"ca_sha256"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint"` + ClientId *string `json:"client_id,omitempty"` + Compression *OutputKafkaCompression `json:"compression,omitempty"` + CompressionLevel interface{} `json:"compression_level"` + ConfigYaml *string `json:"config_yaml"` + ConnectionType interface{} `json:"connection_type"` + Hash *OutputKafka_Hash `json:"hash,omitempty"` + Headers *[]OutputKafka_Headers_Item `json:"headers,omitempty"` + Hosts []string `json:"hosts"` + Id *string `json:"id,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Key *string `json:"key,omitempty"` + Name string `json:"name"` + Partition *OutputKafkaPartition `json:"partition,omitempty"` + Password interface{} `json:"password"` + ProxyId *string `json:"proxy_id"` + Random *OutputKafka_Random `json:"random,omitempty"` + RequiredAcks *OutputKafkaRequiredAcks `json:"required_acks,omitempty"` + RoundRobin *OutputKafka_RoundRobin `json:"round_robin,omitempty"` + Sasl *OutputKafka_Sasl `json:"sasl"` + Secrets *OutputKafka_Secrets `json:"secrets,omitempty"` + Shipper *OutputShipper `json:"shipper"` + Ssl *OutputSsl `json:"ssl"` + Timeout *float32 `json:"timeout,omitempty"` + Topic *string `json:"topic,omitempty"` + Topics *[]OutputKafka_Topics_Item `json:"topics,omitempty"` + Type OutputKafkaType `json:"type"` + Username interface{} `json:"username"` + Version *string `json:"version,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputKafkaAuthType defines model for OutputKafka.AuthType. +type OutputKafkaAuthType string + +// OutputKafkaCompression defines model for OutputKafka.Compression. +type OutputKafkaCompression string + +// OutputKafka_Hash defines model for OutputKafka.Hash. +type OutputKafka_Hash struct { + Hash *string `json:"hash,omitempty"` + Random *bool `json:"random,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputKafka_Headers_Item defines model for output_kafka.headers.Item. +type OutputKafka_Headers_Item struct { + Key string `json:"key"` + Value string `json:"value"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputKafkaPartition defines model for OutputKafka.Partition. +type OutputKafkaPartition string + +// OutputKafka_Random defines model for OutputKafka.Random. +type OutputKafka_Random struct { + GroupEvents *float32 `json:"group_events,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputKafkaRequiredAcks defines model for OutputKafka.RequiredAcks. +type OutputKafkaRequiredAcks int + +// OutputKafka_RoundRobin defines model for OutputKafka.RoundRobin. +type OutputKafka_RoundRobin struct { + GroupEvents *float32 `json:"group_events,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputKafkaSaslMechanism defines model for OutputKafka.Sasl.Mechanism. +type OutputKafkaSaslMechanism string + +// OutputKafka_Sasl defines model for OutputKafka.Sasl. +type OutputKafka_Sasl struct { + Mechanism *OutputKafkaSaslMechanism `json:"mechanism,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputKafkaSecretsPassword0 defines model for . +type OutputKafkaSecretsPassword0 struct { + Id string `json:"id"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputKafkaSecretsPassword1 defines model for . +type OutputKafkaSecretsPassword1 = string + +// OutputKafka_Secrets_Password defines model for OutputKafka.Secrets.Password. +type OutputKafka_Secrets_Password struct { + union json.RawMessage +} - // Name Package policy name (should be unique) - Name string `json:"name"` +// OutputKafkaSecretsSslKey0 defines model for . +type OutputKafkaSecretsSslKey0 struct { + Id string `json:"id"` + AdditionalProperties map[string]interface{} `json:"-"` +} - // Namespace namespace by default "default" - Namespace *string `json:"namespace,omitempty"` - Package struct { - // Name Package name - Name string `json:"name"` +// OutputKafkaSecretsSslKey1 defines model for . +type OutputKafkaSecretsSslKey1 = string - // Version Package version - Version string `json:"version"` - } `json:"package"` +// OutputKafka_Secrets_Ssl_Key defines model for OutputKafka.Secrets.Ssl.Key. +type OutputKafka_Secrets_Ssl_Key struct { + union json.RawMessage +} - // PolicyId Agent policy ID where that package policy will be added - PolicyId string `json:"policy_id"` +// OutputKafka_Secrets_Ssl defines model for OutputKafka.Secrets.Ssl. +type OutputKafka_Secrets_Ssl struct { + Key OutputKafka_Secrets_Ssl_Key `json:"key"` + AdditionalProperties map[string]interface{} `json:"-"` +} - // Vars Package root level variable (see integration documentation for more information) - Vars *map[string]interface{} `json:"vars,omitempty"` +// OutputKafka_Secrets defines model for OutputKafka.Secrets. +type OutputKafka_Secrets struct { + Password *OutputKafka_Secrets_Password `json:"password,omitempty"` + Ssl *OutputKafka_Secrets_Ssl `json:"ssl,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` } -// PackagePolicyRequestInput defines model for package_policy_request_input. -type PackagePolicyRequestInput struct { - // Enabled enable or disable that input, (default to true) - Enabled *bool `json:"enabled,omitempty"` +// OutputKafkaTopicsWhenType defines model for OutputKafka.Topics.When.Type. +type OutputKafkaTopicsWhenType string + +// OutputKafka_Topics_When defines model for OutputKafka.Topics.When. +type OutputKafka_Topics_When struct { + Condition *string `json:"condition,omitempty"` + Type *OutputKafkaTopicsWhenType `json:"type,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputKafka_Topics_Item defines model for output_kafka.topics.Item. +type OutputKafka_Topics_Item struct { + Topic string `json:"topic"` + When *OutputKafka_Topics_When `json:"when,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputKafkaType defines model for OutputKafka.Type. +type OutputKafkaType string + +// OutputLogstash defines model for output_logstash. +type OutputLogstash struct { + AllowEdit *[]string `json:"allow_edit,omitempty"` + CaSha256 *string `json:"ca_sha256"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint"` + ConfigYaml *string `json:"config_yaml"` + Hosts []string `json:"hosts"` + Id *string `json:"id,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name string `json:"name"` + ProxyId *string `json:"proxy_id"` + Secrets *OutputLogstash_Secrets `json:"secrets,omitempty"` + Shipper *OutputShipper `json:"shipper"` + Ssl *OutputSsl `json:"ssl"` + Type OutputLogstashType `json:"type"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputLogstashSecretsSslKey0 defines model for . +type OutputLogstashSecretsSslKey0 struct { + Id string `json:"id"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputLogstashSecretsSslKey1 defines model for . +type OutputLogstashSecretsSslKey1 = string + +// OutputLogstash_Secrets_Ssl_Key defines model for OutputLogstash.Secrets.Ssl.Key. +type OutputLogstash_Secrets_Ssl_Key struct { + union json.RawMessage +} + +// OutputLogstash_Secrets_Ssl defines model for OutputLogstash.Secrets.Ssl. +type OutputLogstash_Secrets_Ssl struct { + Key *OutputLogstash_Secrets_Ssl_Key `json:"key,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputLogstash_Secrets defines model for OutputLogstash.Secrets. +type OutputLogstash_Secrets struct { + Ssl *OutputLogstash_Secrets_Ssl `json:"ssl,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputLogstashType defines model for OutputLogstash.Type. +type OutputLogstashType string + +// OutputRemoteElasticsearch defines model for output_remote_elasticsearch. +type OutputRemoteElasticsearch struct { + AllowEdit *[]string `json:"allow_edit,omitempty"` + CaSha256 *string `json:"ca_sha256"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint"` + ConfigYaml *string `json:"config_yaml"` + Hosts []string `json:"hosts"` + Id *string `json:"id,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name string `json:"name"` + Preset *OutputRemoteElasticsearchPreset `json:"preset,omitempty"` + ProxyId *string `json:"proxy_id"` + Secrets *OutputRemoteElasticsearch_Secrets `json:"secrets,omitempty"` + ServiceToken *string `json:"service_token"` + Shipper *OutputShipper `json:"shipper"` + Ssl *OutputSsl `json:"ssl"` + Type OutputRemoteElasticsearchType `json:"type"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputRemoteElasticsearchPreset defines model for OutputRemoteElasticsearch.Preset. +type OutputRemoteElasticsearchPreset string + +// OutputRemoteElasticsearchSecretsServiceToken0 defines model for . +type OutputRemoteElasticsearchSecretsServiceToken0 struct { + Id string `json:"id"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputRemoteElasticsearchSecretsServiceToken1 defines model for . +type OutputRemoteElasticsearchSecretsServiceToken1 = string + +// OutputRemoteElasticsearch_Secrets_ServiceToken defines model for OutputRemoteElasticsearch.Secrets.ServiceToken. +type OutputRemoteElasticsearch_Secrets_ServiceToken struct { + union json.RawMessage +} + +// OutputRemoteElasticsearch_Secrets defines model for OutputRemoteElasticsearch.Secrets. +type OutputRemoteElasticsearch_Secrets struct { + ServiceToken *OutputRemoteElasticsearch_Secrets_ServiceToken `json:"service_token,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputRemoteElasticsearchType defines model for OutputRemoteElasticsearch.Type. +type OutputRemoteElasticsearchType string + +// OutputShipper defines model for output_shipper. +type OutputShipper struct { + CompressionLevel *float32 `json:"compression_level"` + DiskQueueCompressionEnabled *bool `json:"disk_queue_compression_enabled"` + DiskQueueEnabled *bool `json:"disk_queue_enabled"` + DiskQueueEncryptionEnabled *bool `json:"disk_queue_encryption_enabled"` + DiskQueueMaxSize *float32 `json:"disk_queue_max_size"` + DiskQueuePath *string `json:"disk_queue_path"` + Loadbalance *bool `json:"loadbalance"` + MaxBatchBytes *float32 `json:"max_batch_bytes"` + MemQueueEvents *float32 `json:"mem_queue_events"` + QueueFlushTimeout *float32 `json:"queue_flush_timeout"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputSsl defines model for output_ssl. +type OutputSsl struct { + Certificate *string `json:"certificate,omitempty"` + CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` + Key *string `json:"key,omitempty"` + VerificationMode *OutputSslVerificationMode `json:"verification_mode,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputSslVerificationMode defines model for OutputSsl.VerificationMode. +type OutputSslVerificationMode string + +// OutputUnion defines model for output_union. +type OutputUnion struct { + union json.RawMessage +} + +// PackageInfo defines model for package_info. +type PackageInfo struct { + Agent *struct { + Privileges *struct { + Root *bool `json:"root,omitempty"` + } `json:"privileges,omitempty"` + } `json:"agent,omitempty"` + AssetTags *[]struct { + AssetIds *[]string `json:"asset_ids,omitempty"` + AssetTypes *[]string `json:"asset_types,omitempty"` + Text string `json:"text"` + } `json:"asset_tags,omitempty"` + Assets map[string]interface{} `json:"assets"` + Categories *[]string `json:"categories,omitempty"` + Conditions *PackageInfo_Conditions `json:"conditions,omitempty"` + DataStreams *[]map[string]interface{} `json:"data_streams,omitempty"` + Description *string `json:"description,omitempty"` + Download *string `json:"download,omitempty"` + Elasticsearch *map[string]interface{} `json:"elasticsearch,omitempty"` + FormatVersion *string `json:"format_version,omitempty"` + Icons *[]PackageInfo_Icons_Item `json:"icons,omitempty"` + InstallationInfo *PackageInfo_InstallationInfo `json:"installationInfo,omitempty"` + Internal *bool `json:"internal,omitempty"` + KeepPoliciesUpToDate *bool `json:"keepPoliciesUpToDate,omitempty"` + LatestVersion *string `json:"latestVersion,omitempty"` + License *string `json:"license,omitempty"` + LicensePath *string `json:"licensePath,omitempty"` + Name string `json:"name"` + Notice *string `json:"notice,omitempty"` + Owner *PackageInfo_Owner `json:"owner,omitempty"` + Path *string `json:"path,omitempty"` + PolicyTemplates *[]map[string]interface{} `json:"policy_templates,omitempty"` + Readme *string `json:"readme,omitempty"` + Release *PackageInfoRelease `json:"release,omitempty"` + SavedObject interface{} `json:"savedObject"` + Screenshots *[]struct { + DarkMode *bool `json:"dark_mode,omitempty"` + Path *string `json:"path,omitempty"` + Size *string `json:"size,omitempty"` + Src string `json:"src"` + Title *string `json:"title,omitempty"` + Type *string `json:"type,omitempty"` + } `json:"screenshots,omitempty"` + SignaturePath *string `json:"signature_path,omitempty"` + Source *PackageInfo_Source `json:"source,omitempty"` + Status *string `json:"status,omitempty"` + Title string `json:"title"` + Type *PackageInfoType `json:"type,omitempty"` + Vars *[]map[string]interface{} `json:"vars,omitempty"` + Version string `json:"version"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfo_Conditions_Elastic defines model for PackageInfo.Conditions.Elastic. +type PackageInfo_Conditions_Elastic struct { + Capabilities *[]string `json:"capabilities,omitempty"` + Subscription *string `json:"subscription,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfo_Conditions_Kibana defines model for PackageInfo.Conditions.Kibana. +type PackageInfo_Conditions_Kibana struct { + Version *string `json:"version,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfo_Conditions defines model for PackageInfo.Conditions. +type PackageInfo_Conditions struct { + Elastic *PackageInfo_Conditions_Elastic `json:"elastic,omitempty"` + Kibana *PackageInfo_Conditions_Kibana `json:"kibana,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfo_Icons_Item defines model for package_info.icons.Item. +type PackageInfo_Icons_Item struct { + DarkMode *bool `json:"dark_mode,omitempty"` + Path *string `json:"path,omitempty"` + Size *string `json:"size,omitempty"` + Src string `json:"src"` + Title *string `json:"title,omitempty"` + Type *string `json:"type,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType defines model for PackageInfo.InstallationInfo.AdditionalSpacesInstalledKibana.Type. +type PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType string + +// PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item defines model for PackageInfo.InstallationInfo.AdditionalSpacesInstalledKibana.Item. +type PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item struct { + Id string `json:"id"` + OriginId *string `json:"originId,omitempty"` + Type PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType `json:"type"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features defines model for PackageInfo.InstallationInfo.ExperimentalDataStreamFeatures.Features. +type PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features struct { + DocValueOnlyNumeric *bool `json:"doc_value_only_numeric,omitempty"` + DocValueOnlyOther *bool `json:"doc_value_only_other,omitempty"` + SyntheticSource *bool `json:"synthetic_source,omitempty"` + Tsdb *bool `json:"tsdb,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item defines model for PackageInfo.InstallationInfo.ExperimentalDataStreamFeatures.Item. +type PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item struct { + DataStream string `json:"data_stream"` + Features PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features `json:"features"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfoInstallationInfoInstallSource defines model for PackageInfo.InstallationInfo.InstallSource. +type PackageInfoInstallationInfoInstallSource string + +// PackageInfoInstallationInfoInstallStatus defines model for PackageInfo.InstallationInfo.InstallStatus. +type PackageInfoInstallationInfoInstallStatus string + +// PackageInfoInstallationInfoInstalledEsType defines model for PackageInfo.InstallationInfo.InstalledEs.Type. +type PackageInfoInstallationInfoInstalledEsType string + +// PackageInfo_InstallationInfo_InstalledEs_Item defines model for PackageInfo.InstallationInfo.InstalledEs.Item. +type PackageInfo_InstallationInfo_InstalledEs_Item struct { + Deferred *bool `json:"deferred,omitempty"` + Id string `json:"id"` + Type PackageInfoInstallationInfoInstalledEsType `json:"type"` + Version *string `json:"version,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfoInstallationInfoInstalledKibanaType defines model for PackageInfo.InstallationInfo.InstalledKibana.Type. +type PackageInfoInstallationInfoInstalledKibanaType string + +// PackageInfo_InstallationInfo_InstalledKibana_Item defines model for PackageInfo.InstallationInfo.InstalledKibana.Item. +type PackageInfo_InstallationInfo_InstalledKibana_Item struct { + Id string `json:"id"` + OriginId *string `json:"originId,omitempty"` + Type PackageInfoInstallationInfoInstalledKibanaType `json:"type"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfo_InstallationInfo_LatestExecutedState defines model for PackageInfo.InstallationInfo.LatestExecutedState. +type PackageInfo_InstallationInfo_LatestExecutedState struct { + Error *string `json:"error,omitempty"` + Name string `json:"name"` + StartedAt string `json:"started_at"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error defines model for PackageInfo.InstallationInfo.LatestInstallFailedAttempts.Error. +type PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error struct { + Message string `json:"message"` + Name string `json:"name"` + Stack *string `json:"stack,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item defines model for PackageInfo.InstallationInfo.LatestInstallFailedAttempts.Item. +type PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item struct { + CreatedAt string `json:"created_at"` + Error PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error `json:"error"` + TargetVersion string `json:"target_version"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfoInstallationInfoVerificationStatus defines model for PackageInfo.InstallationInfo.VerificationStatus. +type PackageInfoInstallationInfoVerificationStatus string + +// PackageInfo_InstallationInfo defines model for PackageInfo.InstallationInfo. +type PackageInfo_InstallationInfo struct { + AdditionalSpacesInstalledKibana *map[string][]PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item `json:"additional_spaces_installed_kibana,omitempty"` + CreatedAt *string `json:"created_at,omitempty"` + ExperimentalDataStreamFeatures *[]PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item `json:"experimental_data_stream_features,omitempty"` + InstallFormatSchemaVersion *string `json:"install_format_schema_version,omitempty"` + InstallSource PackageInfoInstallationInfoInstallSource `json:"install_source"` + InstallStatus PackageInfoInstallationInfoInstallStatus `json:"install_status"` + InstalledEs []PackageInfo_InstallationInfo_InstalledEs_Item `json:"installed_es"` + InstalledKibana []PackageInfo_InstallationInfo_InstalledKibana_Item `json:"installed_kibana"` + InstalledKibanaSpaceId *string `json:"installed_kibana_space_id,omitempty"` + LatestExecutedState *PackageInfo_InstallationInfo_LatestExecutedState `json:"latest_executed_state,omitempty"` + LatestInstallFailedAttempts *[]PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item `json:"latest_install_failed_attempts,omitempty"` + Name string `json:"name"` + Namespaces *[]string `json:"namespaces,omitempty"` + Type string `json:"type"` + UpdatedAt *string `json:"updated_at,omitempty"` + VerificationKeyId *string `json:"verification_key_id"` + VerificationStatus PackageInfoInstallationInfoVerificationStatus `json:"verification_status"` + Version string `json:"version"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfoOwnerType defines model for PackageInfo.Owner.Type. +type PackageInfoOwnerType string + +// PackageInfo_Owner defines model for PackageInfo.Owner. +type PackageInfo_Owner struct { + Github *string `json:"github,omitempty"` + Type *PackageInfoOwnerType `json:"type,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfoRelease defines model for PackageInfo.Release. +type PackageInfoRelease string + +// PackageInfo_Source defines model for PackageInfo.Source. +type PackageInfo_Source struct { + License string `json:"license"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfoType defines model for PackageInfo.Type. +type PackageInfoType string + +// PackageListItem defines model for package_list_item. +type PackageListItem struct { + Categories *[]string `json:"categories,omitempty"` + Conditions *PackageListItem_Conditions `json:"conditions,omitempty"` + DataStreams *[]map[string]interface{} `json:"data_streams,omitempty"` + Description *string `json:"description,omitempty"` + Download *string `json:"download,omitempty"` + FormatVersion *string `json:"format_version,omitempty"` + Icons *[]PackageListItem_Icons_Item `json:"icons,omitempty"` + Id string `json:"id"` + InstallationInfo *PackageListItem_InstallationInfo `json:"installationInfo,omitempty"` + Integration *string `json:"integration,omitempty"` + Internal *bool `json:"internal,omitempty"` + LatestVersion *string `json:"latestVersion,omitempty"` + Name string `json:"name"` + Owner *PackageListItem_Owner `json:"owner,omitempty"` + Path *string `json:"path,omitempty"` + PolicyTemplates *[]map[string]interface{} `json:"policy_templates,omitempty"` + Readme *string `json:"readme,omitempty"` + Release *PackageListItemRelease `json:"release,omitempty"` + SavedObject interface{} `json:"savedObject"` + SignaturePath *string `json:"signature_path,omitempty"` + Source *PackageListItem_Source `json:"source,omitempty"` + Status *string `json:"status,omitempty"` + Title string `json:"title"` + Type *PackageListItemType `json:"type,omitempty"` + Vars *[]map[string]interface{} `json:"vars,omitempty"` + Version string `json:"version"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItem_Conditions_Elastic defines model for PackageListItem.Conditions.Elastic. +type PackageListItem_Conditions_Elastic struct { + Capabilities *[]string `json:"capabilities,omitempty"` + Subscription *string `json:"subscription,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItem_Conditions_Kibana defines model for PackageListItem.Conditions.Kibana. +type PackageListItem_Conditions_Kibana struct { + Version *string `json:"version,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItem_Conditions defines model for PackageListItem.Conditions. +type PackageListItem_Conditions struct { + Elastic *PackageListItem_Conditions_Elastic `json:"elastic,omitempty"` + Kibana *PackageListItem_Conditions_Kibana `json:"kibana,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItem_Icons_Item defines model for package_list_item.icons.Item. +type PackageListItem_Icons_Item struct { + DarkMode *bool `json:"dark_mode,omitempty"` + Path *string `json:"path,omitempty"` + Size *string `json:"size,omitempty"` + Src string `json:"src"` + Title *string `json:"title,omitempty"` + Type *string `json:"type,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType defines model for PackageListItem.InstallationInfo.AdditionalSpacesInstalledKibana.Type. +type PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType string + +// PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item defines model for PackageListItem.InstallationInfo.AdditionalSpacesInstalledKibana.Item. +type PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item struct { + Id string `json:"id"` + OriginId *string `json:"originId,omitempty"` + Type PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType `json:"type"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features defines model for PackageListItem.InstallationInfo.ExperimentalDataStreamFeatures.Features. +type PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features struct { + DocValueOnlyNumeric *bool `json:"doc_value_only_numeric,omitempty"` + DocValueOnlyOther *bool `json:"doc_value_only_other,omitempty"` + SyntheticSource *bool `json:"synthetic_source,omitempty"` + Tsdb *bool `json:"tsdb,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item defines model for PackageListItem.InstallationInfo.ExperimentalDataStreamFeatures.Item. +type PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item struct { + DataStream string `json:"data_stream"` + Features PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features `json:"features"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItemInstallationInfoInstallSource defines model for PackageListItem.InstallationInfo.InstallSource. +type PackageListItemInstallationInfoInstallSource string + +// PackageListItemInstallationInfoInstallStatus defines model for PackageListItem.InstallationInfo.InstallStatus. +type PackageListItemInstallationInfoInstallStatus string + +// PackageListItemInstallationInfoInstalledEsType defines model for PackageListItem.InstallationInfo.InstalledEs.Type. +type PackageListItemInstallationInfoInstalledEsType string + +// PackageListItem_InstallationInfo_InstalledEs_Item defines model for PackageListItem.InstallationInfo.InstalledEs.Item. +type PackageListItem_InstallationInfo_InstalledEs_Item struct { + Deferred *bool `json:"deferred,omitempty"` + Id string `json:"id"` + Type PackageListItemInstallationInfoInstalledEsType `json:"type"` + Version *string `json:"version,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItemInstallationInfoInstalledKibanaType defines model for PackageListItem.InstallationInfo.InstalledKibana.Type. +type PackageListItemInstallationInfoInstalledKibanaType string + +// PackageListItem_InstallationInfo_InstalledKibana_Item defines model for PackageListItem.InstallationInfo.InstalledKibana.Item. +type PackageListItem_InstallationInfo_InstalledKibana_Item struct { + Id string `json:"id"` + OriginId *string `json:"originId,omitempty"` + Type PackageListItemInstallationInfoInstalledKibanaType `json:"type"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItem_InstallationInfo_LatestExecutedState defines model for PackageListItem.InstallationInfo.LatestExecutedState. +type PackageListItem_InstallationInfo_LatestExecutedState struct { + Error *string `json:"error,omitempty"` + Name string `json:"name"` + StartedAt string `json:"started_at"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error defines model for PackageListItem.InstallationInfo.LatestInstallFailedAttempts.Error. +type PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error struct { + Message string `json:"message"` + Name string `json:"name"` + Stack *string `json:"stack,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item defines model for PackageListItem.InstallationInfo.LatestInstallFailedAttempts.Item. +type PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item struct { + CreatedAt string `json:"created_at"` + Error PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error `json:"error"` + TargetVersion string `json:"target_version"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItemInstallationInfoVerificationStatus defines model for PackageListItem.InstallationInfo.VerificationStatus. +type PackageListItemInstallationInfoVerificationStatus string + +// PackageListItem_InstallationInfo defines model for PackageListItem.InstallationInfo. +type PackageListItem_InstallationInfo struct { + AdditionalSpacesInstalledKibana *map[string][]PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item `json:"additional_spaces_installed_kibana,omitempty"` + CreatedAt *string `json:"created_at,omitempty"` + ExperimentalDataStreamFeatures *[]PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item `json:"experimental_data_stream_features,omitempty"` + InstallFormatSchemaVersion *string `json:"install_format_schema_version,omitempty"` + InstallSource PackageListItemInstallationInfoInstallSource `json:"install_source"` + InstallStatus PackageListItemInstallationInfoInstallStatus `json:"install_status"` + InstalledEs []PackageListItem_InstallationInfo_InstalledEs_Item `json:"installed_es"` + InstalledKibana []PackageListItem_InstallationInfo_InstalledKibana_Item `json:"installed_kibana"` + InstalledKibanaSpaceId *string `json:"installed_kibana_space_id,omitempty"` + LatestExecutedState *PackageListItem_InstallationInfo_LatestExecutedState `json:"latest_executed_state,omitempty"` + LatestInstallFailedAttempts *[]PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item `json:"latest_install_failed_attempts,omitempty"` + Name string `json:"name"` + Namespaces *[]string `json:"namespaces,omitempty"` + Type string `json:"type"` + UpdatedAt *string `json:"updated_at,omitempty"` + VerificationKeyId *string `json:"verification_key_id"` + VerificationStatus PackageListItemInstallationInfoVerificationStatus `json:"verification_status"` + Version string `json:"version"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItemOwnerType defines model for PackageListItem.Owner.Type. +type PackageListItemOwnerType string + +// PackageListItem_Owner defines model for PackageListItem.Owner. +type PackageListItem_Owner struct { + Github *string `json:"github,omitempty"` + Type *PackageListItemOwnerType `json:"type,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItemRelease defines model for PackageListItem.Release. +type PackageListItemRelease string + +// PackageListItem_Source defines model for PackageListItem.Source. +type PackageListItem_Source struct { + License string `json:"license"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItemType defines model for PackageListItem.Type. +type PackageListItemType string + +// PackagePolicy defines model for package_policy. +type PackagePolicy struct { + Agents *float32 `json:"agents,omitempty"` + CreatedAt string `json:"created_at"` + CreatedBy string `json:"created_by"` + + // Description Package policy description + Description *string `json:"description,omitempty"` + Elasticsearch *PackagePolicy_Elasticsearch `json:"elasticsearch,omitempty"` + Enabled bool `json:"enabled"` + Id string `json:"id"` + + // Inputs Package policy inputs (see integration documentation to know what inputs are available) + Inputs map[string]PackagePolicyInput `json:"inputs"` + IsManaged *bool `json:"is_managed,omitempty"` + + // Name Package policy name (should be unique) + Name string `json:"name"` + + // Namespace The package policy namespace. Leave blank to inherit the agent policy's namespace. + Namespace *string `json:"namespace,omitempty"` + OutputId *string `json:"output_id"` + + // Overrides Override settings that are defined in the package policy. The override option should be used only in unusual circumstances and not as a routine procedure. + Overrides *struct { + Inputs *map[string]interface{} `json:"inputs,omitempty"` + } `json:"overrides"` + Package *struct { + ExperimentalDataStreamFeatures *[]struct { + DataStream string `json:"data_stream"` + Features struct { + DocValueOnlyNumeric *bool `json:"doc_value_only_numeric,omitempty"` + DocValueOnlyOther *bool `json:"doc_value_only_other,omitempty"` + SyntheticSource *bool `json:"synthetic_source,omitempty"` + Tsdb *bool `json:"tsdb,omitempty"` + } `json:"features"` + } `json:"experimental_data_stream_features,omitempty"` + + // Name Package name + Name string `json:"name"` + RequiresRoot *bool `json:"requires_root,omitempty"` + Title *string `json:"title,omitempty"` + + // Version Package version + Version string `json:"version"` + } `json:"package,omitempty"` + + // PolicyId Agent policy ID where that package policy will be added + // Deprecated: + PolicyId *string `json:"policy_id"` + PolicyIds *[]string `json:"policy_ids,omitempty"` + Revision float32 `json:"revision"` + SecretReferences *[]PackagePolicySecretRef `json:"secret_references,omitempty"` + SpaceIds *[]string `json:"spaceIds,omitempty"` + UpdatedAt string `json:"updated_at"` + UpdatedBy string `json:"updated_by"` + Vars *map[string]interface{} `json:"vars,omitempty"` + Version *string `json:"version,omitempty"` +} + +// PackagePolicy_Elasticsearch_Privileges defines model for PackagePolicy.Elasticsearch.Privileges. +type PackagePolicy_Elasticsearch_Privileges struct { + Cluster *[]string `json:"cluster,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackagePolicy_Elasticsearch defines model for PackagePolicy.Elasticsearch. +type PackagePolicy_Elasticsearch struct { + Privileges *PackagePolicy_Elasticsearch_Privileges `json:"privileges,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackagePolicyInput defines model for package_policy_input. +type PackagePolicyInput struct { + // Enabled enable or disable that input, (default to true) + Enabled *bool `json:"enabled,omitempty"` // Streams Input streams (see integration documentation to know what streams are available) - Streams *map[string]PackagePolicyRequestInputStream `json:"streams,omitempty"` + Streams *map[string]PackagePolicyInputStream `json:"streams,omitempty"` + Vars *map[string]interface{} `json:"vars,omitempty"` +} + +// PackagePolicyInputStream defines model for package_policy_input_stream. +type PackagePolicyInputStream struct { + // Enabled enable or disable that stream, (default to true) + Enabled *bool `json:"enabled,omitempty"` + Vars *map[string]interface{} `json:"vars,omitempty"` +} + +// PackagePolicyRequest defines model for package_policy_request. +type PackagePolicyRequest struct { + Description *string `json:"description,omitempty"` + Force *bool `json:"force,omitempty"` + Id *string `json:"id,omitempty"` - // Vars Input level variable (see integration documentation for more information) - Vars *map[string]interface{} `json:"vars,omitempty"` + // Inputs Package policy inputs (see integration documentation to know what inputs are available) + Inputs *map[string]PackagePolicyRequestInput `json:"inputs,omitempty"` + Name string `json:"name"` + Namespace *string `json:"namespace,omitempty"` + OutputId *string `json:"output_id,omitempty"` + Package PackagePolicyRequestPackage `json:"package"` + PolicyId *string `json:"policy_id"` + PolicyIds *[]string `json:"policy_ids,omitempty"` + Vars *map[string]interface{} `json:"vars,omitempty"` +} + +// PackagePolicyRequestInput defines model for package_policy_request_input. +type PackagePolicyRequestInput struct { + // Enabled enable or disable that input, (default to true) + Enabled *bool `json:"enabled,omitempty"` + + // Streams Input streams (see integration documentation to know what streams are available) + Streams *map[string]PackagePolicyRequestInputStream `json:"streams,omitempty"` + Vars *map[string]interface{} `json:"vars,omitempty"` } // PackagePolicyRequestInputStream defines model for package_policy_request_input_stream. type PackagePolicyRequestInputStream struct { // Enabled enable or disable that stream, (default to true) - Enabled *bool `json:"enabled,omitempty"` + Enabled *bool `json:"enabled,omitempty"` + Vars *map[string]interface{} `json:"vars,omitempty"` +} + +// PackagePolicyRequestPackage defines model for package_policy_request_package. +type PackagePolicyRequestPackage struct { + ExperimentalDataStreamFeatures *[]struct { + DataStream string `json:"data_stream"` + Features struct { + DocValueOnlyNumeric *bool `json:"doc_value_only_numeric,omitempty"` + DocValueOnlyOther *bool `json:"doc_value_only_other,omitempty"` + SyntheticSource *bool `json:"synthetic_source,omitempty"` + Tsdb *bool `json:"tsdb,omitempty"` + } `json:"features"` + } `json:"experimental_data_stream_features,omitempty"` + + // Name Package name + Name string `json:"name"` + RequiresRoot *bool `json:"requires_root,omitempty"` + Title *string `json:"title,omitempty"` + + // Version Package version + Version string `json:"version"` +} + +// PackagePolicySecretRef defines model for package_policy_secret_ref. +type PackagePolicySecretRef struct { + Id string `json:"id"` +} + +// ServerHost defines model for server_host. +type ServerHost struct { + HostUrls []string `json:"host_urls"` + Id string `json:"id"` + IsDefault *bool `json:"is_default,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name string `json:"name"` + ProxyId *string `json:"proxy_id"` +} + +// UpdateOutputElasticsearch defines model for update_output_elasticsearch. +type UpdateOutputElasticsearch struct { + AllowEdit *[]string `json:"allow_edit,omitempty"` + CaSha256 *string `json:"ca_sha256,omitempty"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` + ConfigYaml *string `json:"config_yaml"` + Hosts *[]string `json:"hosts,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name *string `json:"name,omitempty"` + Preset *UpdateOutputElasticsearchPreset `json:"preset,omitempty"` + ProxyId *string `json:"proxy_id,omitempty"` + Shipper *UpdateOutputShipper `json:"shipper,omitempty"` + Ssl *UpdateOutputSsl `json:"ssl,omitempty"` + Type UpdateOutputElasticsearchType `json:"type"` +} + +// UpdateOutputElasticsearchPreset defines model for UpdateOutputElasticsearch.Preset. +type UpdateOutputElasticsearchPreset string + +// UpdateOutputElasticsearchType defines model for UpdateOutputElasticsearch.Type. +type UpdateOutputElasticsearchType string + +// UpdateOutputKafka defines model for update_output_kafka. +type UpdateOutputKafka struct { + AllowEdit *[]string `json:"allow_edit,omitempty"` + AuthType *UpdateOutputKafkaAuthType `json:"auth_type,omitempty"` + BrokerTimeout *float32 `json:"broker_timeout,omitempty"` + CaSha256 *string `json:"ca_sha256,omitempty"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` + ClientId *string `json:"client_id,omitempty"` + Compression *UpdateOutputKafkaCompression `json:"compression,omitempty"` + CompressionLevel *interface{} `json:"compression_level,omitempty"` + ConfigYaml *string `json:"config_yaml"` + ConnectionType *interface{} `json:"connection_type,omitempty"` + Hash *struct { + Hash *string `json:"hash,omitempty"` + Random *bool `json:"random,omitempty"` + } `json:"hash,omitempty"` + Headers *[]struct { + Key string `json:"key"` + Value string `json:"value"` + } `json:"headers,omitempty"` + Hosts *[]string `json:"hosts,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Key *string `json:"key,omitempty"` + Name *string `json:"name,omitempty"` + Partition *UpdateOutputKafkaPartition `json:"partition,omitempty"` + Password *interface{} `json:"password,omitempty"` + ProxyId *string `json:"proxy_id,omitempty"` + Random *struct { + GroupEvents *float32 `json:"group_events,omitempty"` + } `json:"random,omitempty"` + RequiredAcks *UpdateOutputKafkaRequiredAcks `json:"required_acks,omitempty"` + RoundRobin *struct { + GroupEvents *float32 `json:"group_events,omitempty"` + } `json:"round_robin,omitempty"` + Sasl *struct { + Mechanism *UpdateOutputKafkaSaslMechanism `json:"mechanism,omitempty"` + } `json:"sasl"` + Secrets *struct { + Password *UpdateOutputKafka_Secrets_Password `json:"password,omitempty"` + Ssl *struct { + Key UpdateOutputKafka_Secrets_Ssl_Key `json:"key"` + } `json:"ssl,omitempty"` + } `json:"secrets,omitempty"` + Shipper *UpdateOutputShipper `json:"shipper,omitempty"` + Ssl *UpdateOutputSsl `json:"ssl,omitempty"` + Timeout *float32 `json:"timeout,omitempty"` + Topic *string `json:"topic,omitempty"` + Topics *[]struct { + Topic string `json:"topic"` + When *struct { + Condition *string `json:"condition,omitempty"` + Type *UpdateOutputKafkaTopicsWhenType `json:"type,omitempty"` + } `json:"when,omitempty"` + } `json:"topics,omitempty"` + Type UpdateOutputKafkaType `json:"type"` + Username *interface{} `json:"username,omitempty"` + Version *string `json:"version,omitempty"` +} + +// UpdateOutputKafkaAuthType defines model for UpdateOutputKafka.AuthType. +type UpdateOutputKafkaAuthType string - // Vars Stream level variable (see integration documentation for more information) - Vars *map[string]interface{} `json:"vars,omitempty"` +// UpdateOutputKafkaCompression defines model for UpdateOutputKafka.Compression. +type UpdateOutputKafkaCompression string + +// UpdateOutputKafkaPartition defines model for UpdateOutputKafka.Partition. +type UpdateOutputKafkaPartition string + +// UpdateOutputKafkaRequiredAcks defines model for UpdateOutputKafka.RequiredAcks. +type UpdateOutputKafkaRequiredAcks int + +// UpdateOutputKafkaSaslMechanism defines model for UpdateOutputKafka.Sasl.Mechanism. +type UpdateOutputKafkaSaslMechanism string + +// UpdateOutputKafkaSecretsPassword0 defines model for . +type UpdateOutputKafkaSecretsPassword0 struct { + Id string `json:"id"` } -// PackageStatus defines model for package_status. -type PackageStatus string +// UpdateOutputKafkaSecretsPassword1 defines model for . +type UpdateOutputKafkaSecretsPassword1 = string -// SearchResult defines model for search_result. -type SearchResult struct { - Description string `json:"description"` - Download string `json:"download"` - Name string `json:"name"` - Path string `json:"path"` - // Deprecated: - SavedObject *map[string]interface{} `json:"savedObject,omitempty"` - Status string `json:"status"` - Title string `json:"title"` - Type string `json:"type"` - Version string `json:"version"` +// UpdateOutputKafka_Secrets_Password defines model for UpdateOutputKafka.Secrets.Password. +type UpdateOutputKafka_Secrets_Password struct { + union json.RawMessage +} + +// UpdateOutputKafkaSecretsSslKey0 defines model for . +type UpdateOutputKafkaSecretsSslKey0 struct { + Id string `json:"id"` +} + +// UpdateOutputKafkaSecretsSslKey1 defines model for . +type UpdateOutputKafkaSecretsSslKey1 = string + +// UpdateOutputKafka_Secrets_Ssl_Key defines model for UpdateOutputKafka.Secrets.Ssl.Key. +type UpdateOutputKafka_Secrets_Ssl_Key struct { + union json.RawMessage +} + +// UpdateOutputKafkaTopicsWhenType defines model for UpdateOutputKafka.Topics.When.Type. +type UpdateOutputKafkaTopicsWhenType string + +// UpdateOutputKafkaType defines model for UpdateOutputKafka.Type. +type UpdateOutputKafkaType string + +// UpdateOutputLogstash defines model for update_output_logstash. +type UpdateOutputLogstash struct { + AllowEdit *[]string `json:"allow_edit,omitempty"` + CaSha256 *string `json:"ca_sha256,omitempty"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` + ConfigYaml *string `json:"config_yaml"` + Hosts *[]string `json:"hosts,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name *string `json:"name,omitempty"` + ProxyId *string `json:"proxy_id,omitempty"` + Secrets *struct { + Ssl *struct { + Key *UpdateOutputLogstash_Secrets_Ssl_Key `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"secrets,omitempty"` + Shipper *UpdateOutputShipper `json:"shipper,omitempty"` + Ssl *UpdateOutputSsl `json:"ssl,omitempty"` + Type UpdateOutputLogstashType `json:"type"` +} + +// UpdateOutputLogstashSecretsSslKey0 defines model for . +type UpdateOutputLogstashSecretsSslKey0 struct { + Id string `json:"id"` +} + +// UpdateOutputLogstashSecretsSslKey1 defines model for . +type UpdateOutputLogstashSecretsSslKey1 = string + +// UpdateOutputLogstash_Secrets_Ssl_Key defines model for UpdateOutputLogstash.Secrets.Ssl.Key. +type UpdateOutputLogstash_Secrets_Ssl_Key struct { + union json.RawMessage +} + +// UpdateOutputLogstashType defines model for UpdateOutputLogstash.Type. +type UpdateOutputLogstashType string + +// UpdateOutputRemoteElasticsearch defines model for update_output_remote_elasticsearch. +type UpdateOutputRemoteElasticsearch struct { + AllowEdit *[]string `json:"allow_edit,omitempty"` + CaSha256 *string `json:"ca_sha256,omitempty"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` + ConfigYaml *string `json:"config_yaml"` + Hosts *[]string `json:"hosts,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name *string `json:"name,omitempty"` + Preset *UpdateOutputRemoteElasticsearchPreset `json:"preset,omitempty"` + ProxyId *string `json:"proxy_id,omitempty"` + Secrets *struct { + ServiceToken *UpdateOutputRemoteElasticsearch_Secrets_ServiceToken `json:"service_token,omitempty"` + } `json:"secrets,omitempty"` + ServiceToken *string `json:"service_token"` + Shipper *UpdateOutputShipper `json:"shipper,omitempty"` + Ssl *UpdateOutputSsl `json:"ssl,omitempty"` + Type UpdateOutputRemoteElasticsearchType `json:"type"` +} + +// UpdateOutputRemoteElasticsearchPreset defines model for UpdateOutputRemoteElasticsearch.Preset. +type UpdateOutputRemoteElasticsearchPreset string + +// UpdateOutputRemoteElasticsearchSecretsServiceToken0 defines model for . +type UpdateOutputRemoteElasticsearchSecretsServiceToken0 struct { + Id string `json:"id"` +} + +// UpdateOutputRemoteElasticsearchSecretsServiceToken1 defines model for . +type UpdateOutputRemoteElasticsearchSecretsServiceToken1 = string + +// UpdateOutputRemoteElasticsearch_Secrets_ServiceToken defines model for UpdateOutputRemoteElasticsearch.Secrets.ServiceToken. +type UpdateOutputRemoteElasticsearch_Secrets_ServiceToken struct { + union json.RawMessage +} + +// UpdateOutputRemoteElasticsearchType defines model for UpdateOutputRemoteElasticsearch.Type. +type UpdateOutputRemoteElasticsearchType string + +// UpdateOutputShipper defines model for update_output_shipper. +type UpdateOutputShipper struct { + CompressionLevel *float32 `json:"compression_level"` + DiskQueueCompressionEnabled *bool `json:"disk_queue_compression_enabled"` + DiskQueueEnabled *bool `json:"disk_queue_enabled"` + DiskQueueEncryptionEnabled *bool `json:"disk_queue_encryption_enabled"` + DiskQueueMaxSize *float32 `json:"disk_queue_max_size"` + DiskQueuePath *string `json:"disk_queue_path"` + Loadbalance *bool `json:"loadbalance"` + MaxBatchBytes *float32 `json:"max_batch_bytes"` + MemQueueEvents *float32 `json:"mem_queue_events"` + QueueFlushTimeout *float32 `json:"queue_flush_timeout"` +} + +// UpdateOutputSsl defines model for update_output_ssl. +type UpdateOutputSsl struct { + Certificate *string `json:"certificate,omitempty"` + CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` + Key *string `json:"key,omitempty"` + VerificationMode *UpdateOutputSslVerificationMode `json:"verification_mode,omitempty"` +} + +// UpdateOutputSslVerificationMode defines model for UpdateOutputSsl.VerificationMode. +type UpdateOutputSslVerificationMode string + +// UpdateOutputUnion defines model for update_output_union. +type UpdateOutputUnion struct { + union json.RawMessage +} + +// GetAgentPoliciesParams defines parameters for GetAgentPolicies. +type GetAgentPoliciesParams struct { + Page *float32 `form:"page,omitempty" json:"page,omitempty"` + PerPage *float32 `form:"perPage,omitempty" json:"perPage,omitempty"` + SortField *string `form:"sortField,omitempty" json:"sortField,omitempty"` + SortOrder *GetAgentPoliciesParamsSortOrder `form:"sortOrder,omitempty" json:"sortOrder,omitempty"` + ShowUpgradeable *bool `form:"showUpgradeable,omitempty" json:"showUpgradeable,omitempty"` + Kuery *string `form:"kuery,omitempty" json:"kuery,omitempty"` + NoAgentCount *bool `form:"noAgentCount,omitempty" json:"noAgentCount,omitempty"` + Full *bool `form:"full,omitempty" json:"full,omitempty"` + Format *GetAgentPoliciesParamsFormat `form:"format,omitempty" json:"format,omitempty"` +} + +// GetAgentPoliciesParamsSortOrder defines parameters for GetAgentPolicies. +type GetAgentPoliciesParamsSortOrder string + +// GetAgentPoliciesParamsFormat defines parameters for GetAgentPolicies. +type GetAgentPoliciesParamsFormat string + +// CreateAgentPolicyJSONBody defines parameters for CreateAgentPolicy. +type CreateAgentPolicyJSONBody struct { + AdvancedSettings *struct { + AgentDownloadTargetDirectory *interface{} `json:"agent_download_target_directory"` + AgentDownloadTimeout *interface{} `json:"agent_download_timeout"` + AgentLimitsGoMaxProcs *interface{} `json:"agent_limits_go_max_procs"` + AgentLoggingLevel *interface{} `json:"agent_logging_level"` + AgentLoggingMetricsPeriod *interface{} `json:"agent_logging_metrics_period"` + } `json:"advanced_settings,omitempty"` + AgentFeatures *[]struct { + Enabled bool `json:"enabled"` + Name string `json:"name"` + } `json:"agent_features,omitempty"` + DataOutputId *string `json:"data_output_id"` + Description *string `json:"description,omitempty"` + DownloadSourceId *string `json:"download_source_id"` + FleetServerHostId *string `json:"fleet_server_host_id"` + Force *bool `json:"force,omitempty"` + + // GlobalDataTags User defined data tags that are added to all of the inputs. The values can be strings or numbers. + GlobalDataTags *[]struct { + Name string `json:"name"` + Value CreateAgentPolicyJSONBody_GlobalDataTags_Value `json:"value"` + } `json:"global_data_tags,omitempty"` + HasFleetServer *bool `json:"has_fleet_server,omitempty"` + Id *string `json:"id,omitempty"` + InactivityTimeout *float32 `json:"inactivity_timeout,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultFleetServer *bool `json:"is_default_fleet_server,omitempty"` + IsManaged *bool `json:"is_managed,omitempty"` + IsProtected *bool `json:"is_protected,omitempty"` + + // KeepMonitoringAlive When set to true, monitoring will be enabled but logs/metrics collection will be disabled + KeepMonitoringAlive *bool `json:"keep_monitoring_alive,omitempty"` + MonitoringDiagnostics *struct { + Limit *struct { + Burst *float32 `json:"burst,omitempty"` + Interval *string `json:"interval,omitempty"` + } `json:"limit,omitempty"` + Uploader *struct { + InitDur *string `json:"init_dur,omitempty"` + MaxDur *string `json:"max_dur,omitempty"` + MaxRetries *float32 `json:"max_retries,omitempty"` + } `json:"uploader,omitempty"` + } `json:"monitoring_diagnostics,omitempty"` + MonitoringEnabled *[]CreateAgentPolicyJSONBodyMonitoringEnabled `json:"monitoring_enabled,omitempty"` + MonitoringHttp *struct { + Buffer *struct { + Enabled *bool `json:"enabled,omitempty"` + } `json:"buffer,omitempty"` + Enabled bool `json:"enabled"` + Host *string `json:"host,omitempty"` + Port *float32 `json:"port,omitempty"` + } `json:"monitoring_http,omitempty"` + MonitoringOutputId *string `json:"monitoring_output_id"` + MonitoringPprofEnabled *bool `json:"monitoring_pprof_enabled,omitempty"` + Name string `json:"name"` + Namespace string `json:"namespace"` + + // Overrides Override settings that are defined in the agent policy. Input settings cannot be overridden. The override option should be used only in unusual circumstances and not as a routine procedure. + Overrides *map[string]interface{} `json:"overrides,omitempty"` + SpaceIds *[]string `json:"space_ids,omitempty"` + + // SupportsAgentless Indicates whether the agent policy supports agentless integrations. + SupportsAgentless *bool `json:"supports_agentless,omitempty"` + UnenrollTimeout *float32 `json:"unenroll_timeout,omitempty"` } -// Format defines model for format. -type Format string +// CreateAgentPolicyParams defines parameters for CreateAgentPolicy. +type CreateAgentPolicyParams struct { + SysMonitoring *bool `form:"sys_monitoring,omitempty" json:"sys_monitoring,omitempty"` +} + +// CreateAgentPolicyJSONBodyGlobalDataTagsValue0 defines parameters for CreateAgentPolicy. +type CreateAgentPolicyJSONBodyGlobalDataTagsValue0 = string -// Error defines model for error. -type Error struct { - Error *string `json:"error,omitempty"` - Message *string `json:"message,omitempty"` - StatusCode *float32 `json:"statusCode,omitempty"` +// CreateAgentPolicyJSONBodyGlobalDataTagsValue1 defines parameters for CreateAgentPolicy. +type CreateAgentPolicyJSONBodyGlobalDataTagsValue1 = float32 + +// CreateAgentPolicyJSONBody_GlobalDataTags_Value defines parameters for CreateAgentPolicy. +type CreateAgentPolicyJSONBody_GlobalDataTags_Value struct { + union json.RawMessage } +// CreateAgentPolicyJSONBodyMonitoringEnabled defines parameters for CreateAgentPolicy. +type CreateAgentPolicyJSONBodyMonitoringEnabled string + // DeleteAgentPolicyJSONBody defines parameters for DeleteAgentPolicy. type DeleteAgentPolicyJSONBody struct { AgentPolicyId string `json:"agentPolicyId"` + + // Force bypass validation checks that can prevent agent policy deletion + Force *bool `json:"force,omitempty"` +} + +// GetAgentPolicyParams defines parameters for GetAgentPolicy. +type GetAgentPolicyParams struct { + Format *GetAgentPolicyParamsFormat `form:"format,omitempty" json:"format,omitempty"` +} + +// GetAgentPolicyParamsFormat defines parameters for GetAgentPolicy. +type GetAgentPolicyParamsFormat string + +// UpdateAgentPolicyJSONBody defines parameters for UpdateAgentPolicy. +type UpdateAgentPolicyJSONBody struct { + AdvancedSettings *struct { + AgentDownloadTargetDirectory *interface{} `json:"agent_download_target_directory"` + AgentDownloadTimeout *interface{} `json:"agent_download_timeout"` + AgentLimitsGoMaxProcs *interface{} `json:"agent_limits_go_max_procs"` + AgentLoggingLevel *interface{} `json:"agent_logging_level"` + AgentLoggingMetricsPeriod *interface{} `json:"agent_logging_metrics_period"` + } `json:"advanced_settings,omitempty"` + AgentFeatures *[]struct { + Enabled bool `json:"enabled"` + Name string `json:"name"` + } `json:"agent_features,omitempty"` + DataOutputId *string `json:"data_output_id"` + Description *string `json:"description,omitempty"` + DownloadSourceId *string `json:"download_source_id"` + FleetServerHostId *string `json:"fleet_server_host_id"` + Force *bool `json:"force,omitempty"` + + // GlobalDataTags User defined data tags that are added to all of the inputs. The values can be strings or numbers. + GlobalDataTags *[]struct { + Name string `json:"name"` + Value UpdateAgentPolicyJSONBody_GlobalDataTags_Value `json:"value"` + } `json:"global_data_tags,omitempty"` + HasFleetServer *bool `json:"has_fleet_server,omitempty"` + Id *string `json:"id,omitempty"` + InactivityTimeout *float32 `json:"inactivity_timeout,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultFleetServer *bool `json:"is_default_fleet_server,omitempty"` + IsManaged *bool `json:"is_managed,omitempty"` + IsProtected *bool `json:"is_protected,omitempty"` + + // KeepMonitoringAlive When set to true, monitoring will be enabled but logs/metrics collection will be disabled + KeepMonitoringAlive *bool `json:"keep_monitoring_alive,omitempty"` + MonitoringDiagnostics *struct { + Limit *struct { + Burst *float32 `json:"burst,omitempty"` + Interval *string `json:"interval,omitempty"` + } `json:"limit,omitempty"` + Uploader *struct { + InitDur *string `json:"init_dur,omitempty"` + MaxDur *string `json:"max_dur,omitempty"` + MaxRetries *float32 `json:"max_retries,omitempty"` + } `json:"uploader,omitempty"` + } `json:"monitoring_diagnostics,omitempty"` + MonitoringEnabled *[]UpdateAgentPolicyJSONBodyMonitoringEnabled `json:"monitoring_enabled,omitempty"` + MonitoringHttp *struct { + Buffer *struct { + Enabled *bool `json:"enabled,omitempty"` + } `json:"buffer,omitempty"` + Enabled bool `json:"enabled"` + Host *string `json:"host,omitempty"` + Port *float32 `json:"port,omitempty"` + } `json:"monitoring_http,omitempty"` + MonitoringOutputId *string `json:"monitoring_output_id"` + MonitoringPprofEnabled *bool `json:"monitoring_pprof_enabled,omitempty"` + Name string `json:"name"` + Namespace string `json:"namespace"` + + // Overrides Override settings that are defined in the agent policy. Input settings cannot be overridden. The override option should be used only in unusual circumstances and not as a routine procedure. + Overrides *map[string]interface{} `json:"overrides,omitempty"` + SpaceIds *[]string `json:"space_ids,omitempty"` + + // SupportsAgentless Indicates whether the agent policy supports agentless integrations. + SupportsAgentless *bool `json:"supports_agentless,omitempty"` + UnenrollTimeout *float32 `json:"unenroll_timeout,omitempty"` +} + +// UpdateAgentPolicyParams defines parameters for UpdateAgentPolicy. +type UpdateAgentPolicyParams struct { + Format *UpdateAgentPolicyParamsFormat `form:"format,omitempty" json:"format,omitempty"` +} + +// UpdateAgentPolicyParamsFormat defines parameters for UpdateAgentPolicy. +type UpdateAgentPolicyParamsFormat string + +// UpdateAgentPolicyJSONBodyGlobalDataTagsValue0 defines parameters for UpdateAgentPolicy. +type UpdateAgentPolicyJSONBodyGlobalDataTagsValue0 = string + +// UpdateAgentPolicyJSONBodyGlobalDataTagsValue1 defines parameters for UpdateAgentPolicy. +type UpdateAgentPolicyJSONBodyGlobalDataTagsValue1 = float32 + +// UpdateAgentPolicyJSONBody_GlobalDataTags_Value defines parameters for UpdateAgentPolicy. +type UpdateAgentPolicyJSONBody_GlobalDataTags_Value struct { + union json.RawMessage +} + +// UpdateAgentPolicyJSONBodyMonitoringEnabled defines parameters for UpdateAgentPolicy. +type UpdateAgentPolicyJSONBodyMonitoringEnabled string + +// GetEnrollmentApiKeysParams defines parameters for GetEnrollmentApiKeys. +type GetEnrollmentApiKeysParams struct { + Page *float32 `form:"page,omitempty" json:"page,omitempty"` + PerPage *float32 `form:"perPage,omitempty" json:"perPage,omitempty"` + Kuery *string `form:"kuery,omitempty" json:"kuery,omitempty"` } -// ListAllPackagesParams defines parameters for ListAllPackages. -type ListAllPackagesParams struct { - // ExcludeInstallStatus Whether to exclude the install status of each package. Enabling this option will opt in to caching for the response via `cache-control` headers. If you don't need up-to-date installation info for a package, and are querying for a list of available packages, providing this flag can improve performance substantially. - ExcludeInstallStatus *bool `form:"excludeInstallStatus,omitempty" json:"excludeInstallStatus,omitempty"` +// ListPackagesParams defines parameters for ListPackages. +type ListPackagesParams struct { + Category *string `form:"category,omitempty" json:"category,omitempty"` + Prerelease *bool `form:"prerelease,omitempty" json:"prerelease,omitempty"` + Experimental *bool `form:"experimental,omitempty" json:"experimental,omitempty"` + ExcludeInstallStatus *bool `form:"excludeInstallStatus,omitempty" json:"excludeInstallStatus,omitempty"` +} - // Prerelease Whether to return prerelease versions of packages (e.g. beta, rc, preview) - Prerelease *bool `form:"prerelease,omitempty" json:"prerelease,omitempty"` - Experimental *bool `form:"experimental,omitempty" json:"experimental,omitempty"` - Category *string `form:"category,omitempty" json:"category,omitempty"` +// InstallPackageByUploadParams defines parameters for InstallPackageByUpload. +type InstallPackageByUploadParams struct { + IgnoreMappingUpdateErrors *bool `form:"ignoreMappingUpdateErrors,omitempty" json:"ignoreMappingUpdateErrors,omitempty"` + SkipDataStreamRollover *bool `form:"skipDataStreamRollover,omitempty" json:"skipDataStreamRollover,omitempty"` } // DeletePackageJSONBody defines parameters for DeletePackage. type DeletePackageJSONBody struct { - Force *bool `json:"force,omitempty"` + Force bool `json:"force"` } // DeletePackageParams defines parameters for DeletePackage. type DeletePackageParams struct { - // IgnoreUnverified Ignore if the package is fails signature verification + Force *bool `form:"force,omitempty" json:"force,omitempty"` +} + +// GetPackageParams defines parameters for GetPackage. +type GetPackageParams struct { IgnoreUnverified *bool `form:"ignoreUnverified,omitempty" json:"ignoreUnverified,omitempty"` + Prerelease *bool `form:"prerelease,omitempty" json:"prerelease,omitempty"` + Full *bool `form:"full,omitempty" json:"full,omitempty"` + WithMetadata *bool `form:"withMetadata,omitempty" json:"withMetadata,omitempty"` +} + +// InstallPackageJSONBody defines parameters for InstallPackage. +type InstallPackageJSONBody struct { + Force *bool `json:"force,omitempty"` + IgnoreConstraints *bool `json:"ignore_constraints,omitempty"` +} - // Full Return all fields from the package manifest, not just those supported by the Elastic Package Registry - Full *bool `form:"full,omitempty" json:"full,omitempty"` +// InstallPackageParams defines parameters for InstallPackage. +type InstallPackageParams struct { + Prerelease *bool `form:"prerelease,omitempty" json:"prerelease,omitempty"` + IgnoreMappingUpdateErrors *bool `form:"ignoreMappingUpdateErrors,omitempty" json:"ignoreMappingUpdateErrors,omitempty"` + SkipDataStreamRollover *bool `form:"skipDataStreamRollover,omitempty" json:"skipDataStreamRollover,omitempty"` +} + +// CreateFleetServerHostJSONBody defines parameters for CreateFleetServerHost. +type CreateFleetServerHostJSONBody struct { + HostUrls []string `json:"host_urls"` + Id *string `json:"id,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name string `json:"name"` + ProxyId *string `json:"proxy_id,omitempty"` +} + +// UpdateFleetServerHostJSONBody defines parameters for UpdateFleetServerHost. +type UpdateFleetServerHostJSONBody struct { + HostUrls *[]string `json:"host_urls,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + Name *string `json:"name,omitempty"` + ProxyId *string `json:"proxy_id,omitempty"` +} + +// GetPackagePoliciesParams defines parameters for GetPackagePolicies. +type GetPackagePoliciesParams struct { + Page *float32 `form:"page,omitempty" json:"page,omitempty"` + PerPage *float32 `form:"perPage,omitempty" json:"perPage,omitempty"` + SortField *string `form:"sortField,omitempty" json:"sortField,omitempty"` + SortOrder *GetPackagePoliciesParamsSortOrder `form:"sortOrder,omitempty" json:"sortOrder,omitempty"` + ShowUpgradeable *bool `form:"showUpgradeable,omitempty" json:"showUpgradeable,omitempty"` + Kuery *string `form:"kuery,omitempty" json:"kuery,omitempty"` + Format *GetPackagePoliciesParamsFormat `form:"format,omitempty" json:"format,omitempty"` + WithAgentCount *bool `form:"withAgentCount,omitempty" json:"withAgentCount,omitempty"` +} + +// GetPackagePoliciesParamsSortOrder defines parameters for GetPackagePolicies. +type GetPackagePoliciesParamsSortOrder string + +// GetPackagePoliciesParamsFormat defines parameters for GetPackagePolicies. +type GetPackagePoliciesParamsFormat string + +// CreatePackagePolicyParams defines parameters for CreatePackagePolicy. +type CreatePackagePolicyParams struct { + Format *CreatePackagePolicyParamsFormat `form:"format,omitempty" json:"format,omitempty"` +} + +// CreatePackagePolicyParamsFormat defines parameters for CreatePackagePolicy. +type CreatePackagePolicyParamsFormat string + +// DeletePackagePolicyParams defines parameters for DeletePackagePolicy. +type DeletePackagePolicyParams struct { + Force *bool `form:"force,omitempty" json:"force,omitempty"` +} + +// GetPackagePolicyParams defines parameters for GetPackagePolicy. +type GetPackagePolicyParams struct { + Format *GetPackagePolicyParamsFormat `form:"format,omitempty" json:"format,omitempty"` +} + +// GetPackagePolicyParamsFormat defines parameters for GetPackagePolicy. +type GetPackagePolicyParamsFormat string + +// UpdatePackagePolicyParams defines parameters for UpdatePackagePolicy. +type UpdatePackagePolicyParams struct { + Format *UpdatePackagePolicyParamsFormat `form:"format,omitempty" json:"format,omitempty"` +} + +// UpdatePackagePolicyParamsFormat defines parameters for UpdatePackagePolicy. +type UpdatePackagePolicyParamsFormat string + +// CreateAgentPolicyJSONRequestBody defines body for CreateAgentPolicy for application/json ContentType. +type CreateAgentPolicyJSONRequestBody CreateAgentPolicyJSONBody + +// DeleteAgentPolicyJSONRequestBody defines body for DeleteAgentPolicy for application/json ContentType. +type DeleteAgentPolicyJSONRequestBody DeleteAgentPolicyJSONBody + +// UpdateAgentPolicyJSONRequestBody defines body for UpdateAgentPolicy for application/json ContentType. +type UpdateAgentPolicyJSONRequestBody UpdateAgentPolicyJSONBody + +// DeletePackageJSONRequestBody defines body for DeletePackage for application/json ContentType. +type DeletePackageJSONRequestBody DeletePackageJSONBody + +// InstallPackageJSONRequestBody defines body for InstallPackage for application/json ContentType. +type InstallPackageJSONRequestBody InstallPackageJSONBody + +// CreateFleetServerHostJSONRequestBody defines body for CreateFleetServerHost for application/json ContentType. +type CreateFleetServerHostJSONRequestBody CreateFleetServerHostJSONBody + +// UpdateFleetServerHostJSONRequestBody defines body for UpdateFleetServerHost for application/json ContentType. +type UpdateFleetServerHostJSONRequestBody UpdateFleetServerHostJSONBody + +// CreateOutputJSONRequestBody defines body for CreateOutput for application/json ContentType. +type CreateOutputJSONRequestBody = NewOutputUnion + +// UpdateOutputJSONRequestBody defines body for UpdateOutput for application/json ContentType. +type UpdateOutputJSONRequestBody = UpdateOutputUnion + +// CreatePackagePolicyJSONRequestBody defines body for CreatePackagePolicy for application/json ContentType. +type CreatePackagePolicyJSONRequestBody = PackagePolicyRequest + +// UpdatePackagePolicyJSONRequestBody defines body for UpdatePackagePolicy for application/json ContentType. +type UpdatePackagePolicyJSONRequestBody = PackagePolicyRequest + +// Getter for additional properties for AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges. Returns the specified +// element and whether it was found +func (a AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges +func (a *AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges to handle AdditionalProperties +func (a *AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["cluster"]; found { + err = json.Unmarshal(raw, &a.Cluster) + if err != nil { + return fmt.Errorf("error reading 'cluster': %w", err) + } + delete(object, "cluster") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges to handle AdditionalProperties +func (a AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Cluster != nil { + object["cluster"], err = json.Marshal(a.Cluster) + if err != nil { + return nil, fmt.Errorf("error marshaling 'cluster': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for AgentPolicy_PackagePolicies_1_Elasticsearch. Returns the specified +// element and whether it was found +func (a AgentPolicy_PackagePolicies_1_Elasticsearch) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for AgentPolicy_PackagePolicies_1_Elasticsearch +func (a *AgentPolicy_PackagePolicies_1_Elasticsearch) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for AgentPolicy_PackagePolicies_1_Elasticsearch to handle AdditionalProperties +func (a *AgentPolicy_PackagePolicies_1_Elasticsearch) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["privileges"]; found { + err = json.Unmarshal(raw, &a.Privileges) + if err != nil { + return fmt.Errorf("error reading 'privileges': %w", err) + } + delete(object, "privileges") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for AgentPolicy_PackagePolicies_1_Elasticsearch to handle AdditionalProperties +func (a AgentPolicy_PackagePolicies_1_Elasticsearch) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Privileges != nil { + object["privileges"], err = json.Marshal(a.Privileges) + if err != nil { + return nil, fmt.Errorf("error marshaling 'privileges': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputElasticsearch. Returns the specified +// element and whether it was found +func (a OutputElasticsearch) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputElasticsearch +func (a *OutputElasticsearch) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputElasticsearch to handle AdditionalProperties +func (a *OutputElasticsearch) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["allow_edit"]; found { + err = json.Unmarshal(raw, &a.AllowEdit) + if err != nil { + return fmt.Errorf("error reading 'allow_edit': %w", err) + } + delete(object, "allow_edit") + } + + if raw, found := object["ca_sha256"]; found { + err = json.Unmarshal(raw, &a.CaSha256) + if err != nil { + return fmt.Errorf("error reading 'ca_sha256': %w", err) + } + delete(object, "ca_sha256") + } + + if raw, found := object["ca_trusted_fingerprint"]; found { + err = json.Unmarshal(raw, &a.CaTrustedFingerprint) + if err != nil { + return fmt.Errorf("error reading 'ca_trusted_fingerprint': %w", err) + } + delete(object, "ca_trusted_fingerprint") + } + + if raw, found := object["config_yaml"]; found { + err = json.Unmarshal(raw, &a.ConfigYaml) + if err != nil { + return fmt.Errorf("error reading 'config_yaml': %w", err) + } + delete(object, "config_yaml") + } + + if raw, found := object["hosts"]; found { + err = json.Unmarshal(raw, &a.Hosts) + if err != nil { + return fmt.Errorf("error reading 'hosts': %w", err) + } + delete(object, "hosts") + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if raw, found := object["is_default"]; found { + err = json.Unmarshal(raw, &a.IsDefault) + if err != nil { + return fmt.Errorf("error reading 'is_default': %w", err) + } + delete(object, "is_default") + } + + if raw, found := object["is_default_monitoring"]; found { + err = json.Unmarshal(raw, &a.IsDefaultMonitoring) + if err != nil { + return fmt.Errorf("error reading 'is_default_monitoring': %w", err) + } + delete(object, "is_default_monitoring") + } + + if raw, found := object["is_internal"]; found { + err = json.Unmarshal(raw, &a.IsInternal) + if err != nil { + return fmt.Errorf("error reading 'is_internal': %w", err) + } + delete(object, "is_internal") + } + + if raw, found := object["is_preconfigured"]; found { + err = json.Unmarshal(raw, &a.IsPreconfigured) + if err != nil { + return fmt.Errorf("error reading 'is_preconfigured': %w", err) + } + delete(object, "is_preconfigured") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["preset"]; found { + err = json.Unmarshal(raw, &a.Preset) + if err != nil { + return fmt.Errorf("error reading 'preset': %w", err) + } + delete(object, "preset") + } + + if raw, found := object["proxy_id"]; found { + err = json.Unmarshal(raw, &a.ProxyId) + if err != nil { + return fmt.Errorf("error reading 'proxy_id': %w", err) + } + delete(object, "proxy_id") + } + + if raw, found := object["shipper"]; found { + err = json.Unmarshal(raw, &a.Shipper) + if err != nil { + return fmt.Errorf("error reading 'shipper': %w", err) + } + delete(object, "shipper") + } + + if raw, found := object["ssl"]; found { + err = json.Unmarshal(raw, &a.Ssl) + if err != nil { + return fmt.Errorf("error reading 'ssl': %w", err) + } + delete(object, "ssl") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputElasticsearch to handle AdditionalProperties +func (a OutputElasticsearch) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.AllowEdit != nil { + object["allow_edit"], err = json.Marshal(a.AllowEdit) + if err != nil { + return nil, fmt.Errorf("error marshaling 'allow_edit': %w", err) + } + } + + if a.CaSha256 != nil { + object["ca_sha256"], err = json.Marshal(a.CaSha256) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ca_sha256': %w", err) + } + } + + if a.CaTrustedFingerprint != nil { + object["ca_trusted_fingerprint"], err = json.Marshal(a.CaTrustedFingerprint) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ca_trusted_fingerprint': %w", err) + } + } + + if a.ConfigYaml != nil { + object["config_yaml"], err = json.Marshal(a.ConfigYaml) + if err != nil { + return nil, fmt.Errorf("error marshaling 'config_yaml': %w", err) + } + } + + object["hosts"], err = json.Marshal(a.Hosts) + if err != nil { + return nil, fmt.Errorf("error marshaling 'hosts': %w", err) + } + + if a.Id != nil { + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + } + + if a.IsDefault != nil { + object["is_default"], err = json.Marshal(a.IsDefault) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_default': %w", err) + } + } + + if a.IsDefaultMonitoring != nil { + object["is_default_monitoring"], err = json.Marshal(a.IsDefaultMonitoring) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_default_monitoring': %w", err) + } + } + + if a.IsInternal != nil { + object["is_internal"], err = json.Marshal(a.IsInternal) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_internal': %w", err) + } + } + + if a.IsPreconfigured != nil { + object["is_preconfigured"], err = json.Marshal(a.IsPreconfigured) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_preconfigured': %w", err) + } + } + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + if a.Preset != nil { + object["preset"], err = json.Marshal(a.Preset) + if err != nil { + return nil, fmt.Errorf("error marshaling 'preset': %w", err) + } + } + + if a.ProxyId != nil { + object["proxy_id"], err = json.Marshal(a.ProxyId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'proxy_id': %w", err) + } + } + + if a.Shipper != nil { + object["shipper"], err = json.Marshal(a.Shipper) + if err != nil { + return nil, fmt.Errorf("error marshaling 'shipper': %w", err) + } + } + + if a.Ssl != nil { + object["ssl"], err = json.Marshal(a.Ssl) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ssl': %w", err) + } + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputKafka. Returns the specified +// element and whether it was found +func (a OutputKafka) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputKafka +func (a *OutputKafka) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputKafka to handle AdditionalProperties +func (a *OutputKafka) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["allow_edit"]; found { + err = json.Unmarshal(raw, &a.AllowEdit) + if err != nil { + return fmt.Errorf("error reading 'allow_edit': %w", err) + } + delete(object, "allow_edit") + } + + if raw, found := object["auth_type"]; found { + err = json.Unmarshal(raw, &a.AuthType) + if err != nil { + return fmt.Errorf("error reading 'auth_type': %w", err) + } + delete(object, "auth_type") + } + + if raw, found := object["broker_timeout"]; found { + err = json.Unmarshal(raw, &a.BrokerTimeout) + if err != nil { + return fmt.Errorf("error reading 'broker_timeout': %w", err) + } + delete(object, "broker_timeout") + } + + if raw, found := object["ca_sha256"]; found { + err = json.Unmarshal(raw, &a.CaSha256) + if err != nil { + return fmt.Errorf("error reading 'ca_sha256': %w", err) + } + delete(object, "ca_sha256") + } + + if raw, found := object["ca_trusted_fingerprint"]; found { + err = json.Unmarshal(raw, &a.CaTrustedFingerprint) + if err != nil { + return fmt.Errorf("error reading 'ca_trusted_fingerprint': %w", err) + } + delete(object, "ca_trusted_fingerprint") + } + + if raw, found := object["client_id"]; found { + err = json.Unmarshal(raw, &a.ClientId) + if err != nil { + return fmt.Errorf("error reading 'client_id': %w", err) + } + delete(object, "client_id") + } + + if raw, found := object["compression"]; found { + err = json.Unmarshal(raw, &a.Compression) + if err != nil { + return fmt.Errorf("error reading 'compression': %w", err) + } + delete(object, "compression") + } + + if raw, found := object["compression_level"]; found { + err = json.Unmarshal(raw, &a.CompressionLevel) + if err != nil { + return fmt.Errorf("error reading 'compression_level': %w", err) + } + delete(object, "compression_level") + } + + if raw, found := object["config_yaml"]; found { + err = json.Unmarshal(raw, &a.ConfigYaml) + if err != nil { + return fmt.Errorf("error reading 'config_yaml': %w", err) + } + delete(object, "config_yaml") + } + + if raw, found := object["connection_type"]; found { + err = json.Unmarshal(raw, &a.ConnectionType) + if err != nil { + return fmt.Errorf("error reading 'connection_type': %w", err) + } + delete(object, "connection_type") + } + + if raw, found := object["hash"]; found { + err = json.Unmarshal(raw, &a.Hash) + if err != nil { + return fmt.Errorf("error reading 'hash': %w", err) + } + delete(object, "hash") + } + + if raw, found := object["headers"]; found { + err = json.Unmarshal(raw, &a.Headers) + if err != nil { + return fmt.Errorf("error reading 'headers': %w", err) + } + delete(object, "headers") + } + + if raw, found := object["hosts"]; found { + err = json.Unmarshal(raw, &a.Hosts) + if err != nil { + return fmt.Errorf("error reading 'hosts': %w", err) + } + delete(object, "hosts") + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if raw, found := object["is_default"]; found { + err = json.Unmarshal(raw, &a.IsDefault) + if err != nil { + return fmt.Errorf("error reading 'is_default': %w", err) + } + delete(object, "is_default") + } + + if raw, found := object["is_default_monitoring"]; found { + err = json.Unmarshal(raw, &a.IsDefaultMonitoring) + if err != nil { + return fmt.Errorf("error reading 'is_default_monitoring': %w", err) + } + delete(object, "is_default_monitoring") + } + + if raw, found := object["is_internal"]; found { + err = json.Unmarshal(raw, &a.IsInternal) + if err != nil { + return fmt.Errorf("error reading 'is_internal': %w", err) + } + delete(object, "is_internal") + } + + if raw, found := object["is_preconfigured"]; found { + err = json.Unmarshal(raw, &a.IsPreconfigured) + if err != nil { + return fmt.Errorf("error reading 'is_preconfigured': %w", err) + } + delete(object, "is_preconfigured") + } + + if raw, found := object["key"]; found { + err = json.Unmarshal(raw, &a.Key) + if err != nil { + return fmt.Errorf("error reading 'key': %w", err) + } + delete(object, "key") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["partition"]; found { + err = json.Unmarshal(raw, &a.Partition) + if err != nil { + return fmt.Errorf("error reading 'partition': %w", err) + } + delete(object, "partition") + } + + if raw, found := object["password"]; found { + err = json.Unmarshal(raw, &a.Password) + if err != nil { + return fmt.Errorf("error reading 'password': %w", err) + } + delete(object, "password") + } + + if raw, found := object["proxy_id"]; found { + err = json.Unmarshal(raw, &a.ProxyId) + if err != nil { + return fmt.Errorf("error reading 'proxy_id': %w", err) + } + delete(object, "proxy_id") + } + + if raw, found := object["random"]; found { + err = json.Unmarshal(raw, &a.Random) + if err != nil { + return fmt.Errorf("error reading 'random': %w", err) + } + delete(object, "random") + } + + if raw, found := object["required_acks"]; found { + err = json.Unmarshal(raw, &a.RequiredAcks) + if err != nil { + return fmt.Errorf("error reading 'required_acks': %w", err) + } + delete(object, "required_acks") + } + + if raw, found := object["round_robin"]; found { + err = json.Unmarshal(raw, &a.RoundRobin) + if err != nil { + return fmt.Errorf("error reading 'round_robin': %w", err) + } + delete(object, "round_robin") + } + + if raw, found := object["sasl"]; found { + err = json.Unmarshal(raw, &a.Sasl) + if err != nil { + return fmt.Errorf("error reading 'sasl': %w", err) + } + delete(object, "sasl") + } + + if raw, found := object["secrets"]; found { + err = json.Unmarshal(raw, &a.Secrets) + if err != nil { + return fmt.Errorf("error reading 'secrets': %w", err) + } + delete(object, "secrets") + } + + if raw, found := object["shipper"]; found { + err = json.Unmarshal(raw, &a.Shipper) + if err != nil { + return fmt.Errorf("error reading 'shipper': %w", err) + } + delete(object, "shipper") + } + + if raw, found := object["ssl"]; found { + err = json.Unmarshal(raw, &a.Ssl) + if err != nil { + return fmt.Errorf("error reading 'ssl': %w", err) + } + delete(object, "ssl") + } + + if raw, found := object["timeout"]; found { + err = json.Unmarshal(raw, &a.Timeout) + if err != nil { + return fmt.Errorf("error reading 'timeout': %w", err) + } + delete(object, "timeout") + } + + if raw, found := object["topic"]; found { + err = json.Unmarshal(raw, &a.Topic) + if err != nil { + return fmt.Errorf("error reading 'topic': %w", err) + } + delete(object, "topic") + } + + if raw, found := object["topics"]; found { + err = json.Unmarshal(raw, &a.Topics) + if err != nil { + return fmt.Errorf("error reading 'topics': %w", err) + } + delete(object, "topics") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if raw, found := object["username"]; found { + err = json.Unmarshal(raw, &a.Username) + if err != nil { + return fmt.Errorf("error reading 'username': %w", err) + } + delete(object, "username") + } + + if raw, found := object["version"]; found { + err = json.Unmarshal(raw, &a.Version) + if err != nil { + return fmt.Errorf("error reading 'version': %w", err) + } + delete(object, "version") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputKafka to handle AdditionalProperties +func (a OutputKafka) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.AllowEdit != nil { + object["allow_edit"], err = json.Marshal(a.AllowEdit) + if err != nil { + return nil, fmt.Errorf("error marshaling 'allow_edit': %w", err) + } + } + + object["auth_type"], err = json.Marshal(a.AuthType) + if err != nil { + return nil, fmt.Errorf("error marshaling 'auth_type': %w", err) + } + + if a.BrokerTimeout != nil { + object["broker_timeout"], err = json.Marshal(a.BrokerTimeout) + if err != nil { + return nil, fmt.Errorf("error marshaling 'broker_timeout': %w", err) + } + } + + if a.CaSha256 != nil { + object["ca_sha256"], err = json.Marshal(a.CaSha256) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ca_sha256': %w", err) + } + } + + if a.CaTrustedFingerprint != nil { + object["ca_trusted_fingerprint"], err = json.Marshal(a.CaTrustedFingerprint) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ca_trusted_fingerprint': %w", err) + } + } + + if a.ClientId != nil { + object["client_id"], err = json.Marshal(a.ClientId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'client_id': %w", err) + } + } + + if a.Compression != nil { + object["compression"], err = json.Marshal(a.Compression) + if err != nil { + return nil, fmt.Errorf("error marshaling 'compression': %w", err) + } + } + + object["compression_level"], err = json.Marshal(a.CompressionLevel) + if err != nil { + return nil, fmt.Errorf("error marshaling 'compression_level': %w", err) + } + + if a.ConfigYaml != nil { + object["config_yaml"], err = json.Marshal(a.ConfigYaml) + if err != nil { + return nil, fmt.Errorf("error marshaling 'config_yaml': %w", err) + } + } + + object["connection_type"], err = json.Marshal(a.ConnectionType) + if err != nil { + return nil, fmt.Errorf("error marshaling 'connection_type': %w", err) + } + + if a.Hash != nil { + object["hash"], err = json.Marshal(a.Hash) + if err != nil { + return nil, fmt.Errorf("error marshaling 'hash': %w", err) + } + } + + if a.Headers != nil { + object["headers"], err = json.Marshal(a.Headers) + if err != nil { + return nil, fmt.Errorf("error marshaling 'headers': %w", err) + } + } + + object["hosts"], err = json.Marshal(a.Hosts) + if err != nil { + return nil, fmt.Errorf("error marshaling 'hosts': %w", err) + } + + if a.Id != nil { + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + } + + if a.IsDefault != nil { + object["is_default"], err = json.Marshal(a.IsDefault) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_default': %w", err) + } + } + + if a.IsDefaultMonitoring != nil { + object["is_default_monitoring"], err = json.Marshal(a.IsDefaultMonitoring) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_default_monitoring': %w", err) + } + } + + if a.IsInternal != nil { + object["is_internal"], err = json.Marshal(a.IsInternal) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_internal': %w", err) + } + } + + if a.IsPreconfigured != nil { + object["is_preconfigured"], err = json.Marshal(a.IsPreconfigured) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_preconfigured': %w", err) + } + } + + if a.Key != nil { + object["key"], err = json.Marshal(a.Key) + if err != nil { + return nil, fmt.Errorf("error marshaling 'key': %w", err) + } + } + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + if a.Partition != nil { + object["partition"], err = json.Marshal(a.Partition) + if err != nil { + return nil, fmt.Errorf("error marshaling 'partition': %w", err) + } + } + + object["password"], err = json.Marshal(a.Password) + if err != nil { + return nil, fmt.Errorf("error marshaling 'password': %w", err) + } + + if a.ProxyId != nil { + object["proxy_id"], err = json.Marshal(a.ProxyId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'proxy_id': %w", err) + } + } + + if a.Random != nil { + object["random"], err = json.Marshal(a.Random) + if err != nil { + return nil, fmt.Errorf("error marshaling 'random': %w", err) + } + } + + if a.RequiredAcks != nil { + object["required_acks"], err = json.Marshal(a.RequiredAcks) + if err != nil { + return nil, fmt.Errorf("error marshaling 'required_acks': %w", err) + } + } + + if a.RoundRobin != nil { + object["round_robin"], err = json.Marshal(a.RoundRobin) + if err != nil { + return nil, fmt.Errorf("error marshaling 'round_robin': %w", err) + } + } + + if a.Sasl != nil { + object["sasl"], err = json.Marshal(a.Sasl) + if err != nil { + return nil, fmt.Errorf("error marshaling 'sasl': %w", err) + } + } + + if a.Secrets != nil { + object["secrets"], err = json.Marshal(a.Secrets) + if err != nil { + return nil, fmt.Errorf("error marshaling 'secrets': %w", err) + } + } + + if a.Shipper != nil { + object["shipper"], err = json.Marshal(a.Shipper) + if err != nil { + return nil, fmt.Errorf("error marshaling 'shipper': %w", err) + } + } + + if a.Ssl != nil { + object["ssl"], err = json.Marshal(a.Ssl) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ssl': %w", err) + } + } + + if a.Timeout != nil { + object["timeout"], err = json.Marshal(a.Timeout) + if err != nil { + return nil, fmt.Errorf("error marshaling 'timeout': %w", err) + } + } + + if a.Topic != nil { + object["topic"], err = json.Marshal(a.Topic) + if err != nil { + return nil, fmt.Errorf("error marshaling 'topic': %w", err) + } + } + + if a.Topics != nil { + object["topics"], err = json.Marshal(a.Topics) + if err != nil { + return nil, fmt.Errorf("error marshaling 'topics': %w", err) + } + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + object["username"], err = json.Marshal(a.Username) + if err != nil { + return nil, fmt.Errorf("error marshaling 'username': %w", err) + } + + if a.Version != nil { + object["version"], err = json.Marshal(a.Version) + if err != nil { + return nil, fmt.Errorf("error marshaling 'version': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputKafka_Hash. Returns the specified +// element and whether it was found +func (a OutputKafka_Hash) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputKafka_Hash +func (a *OutputKafka_Hash) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputKafka_Hash to handle AdditionalProperties +func (a *OutputKafka_Hash) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["hash"]; found { + err = json.Unmarshal(raw, &a.Hash) + if err != nil { + return fmt.Errorf("error reading 'hash': %w", err) + } + delete(object, "hash") + } + + if raw, found := object["random"]; found { + err = json.Unmarshal(raw, &a.Random) + if err != nil { + return fmt.Errorf("error reading 'random': %w", err) + } + delete(object, "random") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputKafka_Hash to handle AdditionalProperties +func (a OutputKafka_Hash) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Hash != nil { + object["hash"], err = json.Marshal(a.Hash) + if err != nil { + return nil, fmt.Errorf("error marshaling 'hash': %w", err) + } + } + + if a.Random != nil { + object["random"], err = json.Marshal(a.Random) + if err != nil { + return nil, fmt.Errorf("error marshaling 'random': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputKafka_Headers_Item. Returns the specified +// element and whether it was found +func (a OutputKafka_Headers_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputKafka_Headers_Item +func (a *OutputKafka_Headers_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputKafka_Headers_Item to handle AdditionalProperties +func (a *OutputKafka_Headers_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["key"]; found { + err = json.Unmarshal(raw, &a.Key) + if err != nil { + return fmt.Errorf("error reading 'key': %w", err) + } + delete(object, "key") + } + + if raw, found := object["value"]; found { + err = json.Unmarshal(raw, &a.Value) + if err != nil { + return fmt.Errorf("error reading 'value': %w", err) + } + delete(object, "value") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputKafka_Headers_Item to handle AdditionalProperties +func (a OutputKafka_Headers_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["key"], err = json.Marshal(a.Key) + if err != nil { + return nil, fmt.Errorf("error marshaling 'key': %w", err) + } + + object["value"], err = json.Marshal(a.Value) + if err != nil { + return nil, fmt.Errorf("error marshaling 'value': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputKafka_Random. Returns the specified +// element and whether it was found +func (a OutputKafka_Random) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputKafka_Random +func (a *OutputKafka_Random) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputKafka_Random to handle AdditionalProperties +func (a *OutputKafka_Random) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["group_events"]; found { + err = json.Unmarshal(raw, &a.GroupEvents) + if err != nil { + return fmt.Errorf("error reading 'group_events': %w", err) + } + delete(object, "group_events") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputKafka_Random to handle AdditionalProperties +func (a OutputKafka_Random) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.GroupEvents != nil { + object["group_events"], err = json.Marshal(a.GroupEvents) + if err != nil { + return nil, fmt.Errorf("error marshaling 'group_events': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputKafka_RoundRobin. Returns the specified +// element and whether it was found +func (a OutputKafka_RoundRobin) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputKafka_RoundRobin +func (a *OutputKafka_RoundRobin) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputKafka_RoundRobin to handle AdditionalProperties +func (a *OutputKafka_RoundRobin) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["group_events"]; found { + err = json.Unmarshal(raw, &a.GroupEvents) + if err != nil { + return fmt.Errorf("error reading 'group_events': %w", err) + } + delete(object, "group_events") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputKafka_RoundRobin to handle AdditionalProperties +func (a OutputKafka_RoundRobin) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.GroupEvents != nil { + object["group_events"], err = json.Marshal(a.GroupEvents) + if err != nil { + return nil, fmt.Errorf("error marshaling 'group_events': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputKafka_Sasl. Returns the specified +// element and whether it was found +func (a OutputKafka_Sasl) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputKafka_Sasl +func (a *OutputKafka_Sasl) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputKafka_Sasl to handle AdditionalProperties +func (a *OutputKafka_Sasl) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["mechanism"]; found { + err = json.Unmarshal(raw, &a.Mechanism) + if err != nil { + return fmt.Errorf("error reading 'mechanism': %w", err) + } + delete(object, "mechanism") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputKafka_Sasl to handle AdditionalProperties +func (a OutputKafka_Sasl) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Mechanism != nil { + object["mechanism"], err = json.Marshal(a.Mechanism) + if err != nil { + return nil, fmt.Errorf("error marshaling 'mechanism': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputKafkaSecretsPassword0. Returns the specified +// element and whether it was found +func (a OutputKafkaSecretsPassword0) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputKafkaSecretsPassword0 +func (a *OutputKafkaSecretsPassword0) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputKafkaSecretsPassword0 to handle AdditionalProperties +func (a *OutputKafkaSecretsPassword0) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputKafkaSecretsPassword0 to handle AdditionalProperties +func (a OutputKafkaSecretsPassword0) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputKafkaSecretsSslKey0. Returns the specified +// element and whether it was found +func (a OutputKafkaSecretsSslKey0) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputKafkaSecretsSslKey0 +func (a *OutputKafkaSecretsSslKey0) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputKafkaSecretsSslKey0 to handle AdditionalProperties +func (a *OutputKafkaSecretsSslKey0) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputKafkaSecretsSslKey0 to handle AdditionalProperties +func (a OutputKafkaSecretsSslKey0) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputKafka_Secrets_Ssl. Returns the specified +// element and whether it was found +func (a OutputKafka_Secrets_Ssl) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputKafka_Secrets_Ssl +func (a *OutputKafka_Secrets_Ssl) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputKafka_Secrets_Ssl to handle AdditionalProperties +func (a *OutputKafka_Secrets_Ssl) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["key"]; found { + err = json.Unmarshal(raw, &a.Key) + if err != nil { + return fmt.Errorf("error reading 'key': %w", err) + } + delete(object, "key") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputKafka_Secrets_Ssl to handle AdditionalProperties +func (a OutputKafka_Secrets_Ssl) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["key"], err = json.Marshal(a.Key) + if err != nil { + return nil, fmt.Errorf("error marshaling 'key': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputKafka_Secrets. Returns the specified +// element and whether it was found +func (a OutputKafka_Secrets) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputKafka_Secrets +func (a *OutputKafka_Secrets) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputKafka_Secrets to handle AdditionalProperties +func (a *OutputKafka_Secrets) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["password"]; found { + err = json.Unmarshal(raw, &a.Password) + if err != nil { + return fmt.Errorf("error reading 'password': %w", err) + } + delete(object, "password") + } + + if raw, found := object["ssl"]; found { + err = json.Unmarshal(raw, &a.Ssl) + if err != nil { + return fmt.Errorf("error reading 'ssl': %w", err) + } + delete(object, "ssl") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputKafka_Secrets to handle AdditionalProperties +func (a OutputKafka_Secrets) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Password != nil { + object["password"], err = json.Marshal(a.Password) + if err != nil { + return nil, fmt.Errorf("error marshaling 'password': %w", err) + } + } + + if a.Ssl != nil { + object["ssl"], err = json.Marshal(a.Ssl) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ssl': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputKafka_Topics_When. Returns the specified +// element and whether it was found +func (a OutputKafka_Topics_When) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputKafka_Topics_When +func (a *OutputKafka_Topics_When) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputKafka_Topics_When to handle AdditionalProperties +func (a *OutputKafka_Topics_When) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["condition"]; found { + err = json.Unmarshal(raw, &a.Condition) + if err != nil { + return fmt.Errorf("error reading 'condition': %w", err) + } + delete(object, "condition") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputKafka_Topics_When to handle AdditionalProperties +func (a OutputKafka_Topics_When) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Condition != nil { + object["condition"], err = json.Marshal(a.Condition) + if err != nil { + return nil, fmt.Errorf("error marshaling 'condition': %w", err) + } + } + + if a.Type != nil { + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputKafka_Topics_Item. Returns the specified +// element and whether it was found +func (a OutputKafka_Topics_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputKafka_Topics_Item +func (a *OutputKafka_Topics_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputKafka_Topics_Item to handle AdditionalProperties +func (a *OutputKafka_Topics_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["topic"]; found { + err = json.Unmarshal(raw, &a.Topic) + if err != nil { + return fmt.Errorf("error reading 'topic': %w", err) + } + delete(object, "topic") + } + + if raw, found := object["when"]; found { + err = json.Unmarshal(raw, &a.When) + if err != nil { + return fmt.Errorf("error reading 'when': %w", err) + } + delete(object, "when") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputKafka_Topics_Item to handle AdditionalProperties +func (a OutputKafka_Topics_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["topic"], err = json.Marshal(a.Topic) + if err != nil { + return nil, fmt.Errorf("error marshaling 'topic': %w", err) + } + + if a.When != nil { + object["when"], err = json.Marshal(a.When) + if err != nil { + return nil, fmt.Errorf("error marshaling 'when': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputLogstash. Returns the specified +// element and whether it was found +func (a OutputLogstash) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputLogstash +func (a *OutputLogstash) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputLogstash to handle AdditionalProperties +func (a *OutputLogstash) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["allow_edit"]; found { + err = json.Unmarshal(raw, &a.AllowEdit) + if err != nil { + return fmt.Errorf("error reading 'allow_edit': %w", err) + } + delete(object, "allow_edit") + } + + if raw, found := object["ca_sha256"]; found { + err = json.Unmarshal(raw, &a.CaSha256) + if err != nil { + return fmt.Errorf("error reading 'ca_sha256': %w", err) + } + delete(object, "ca_sha256") + } + + if raw, found := object["ca_trusted_fingerprint"]; found { + err = json.Unmarshal(raw, &a.CaTrustedFingerprint) + if err != nil { + return fmt.Errorf("error reading 'ca_trusted_fingerprint': %w", err) + } + delete(object, "ca_trusted_fingerprint") + } + + if raw, found := object["config_yaml"]; found { + err = json.Unmarshal(raw, &a.ConfigYaml) + if err != nil { + return fmt.Errorf("error reading 'config_yaml': %w", err) + } + delete(object, "config_yaml") + } + + if raw, found := object["hosts"]; found { + err = json.Unmarshal(raw, &a.Hosts) + if err != nil { + return fmt.Errorf("error reading 'hosts': %w", err) + } + delete(object, "hosts") + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if raw, found := object["is_default"]; found { + err = json.Unmarshal(raw, &a.IsDefault) + if err != nil { + return fmt.Errorf("error reading 'is_default': %w", err) + } + delete(object, "is_default") + } + + if raw, found := object["is_default_monitoring"]; found { + err = json.Unmarshal(raw, &a.IsDefaultMonitoring) + if err != nil { + return fmt.Errorf("error reading 'is_default_monitoring': %w", err) + } + delete(object, "is_default_monitoring") + } + + if raw, found := object["is_internal"]; found { + err = json.Unmarshal(raw, &a.IsInternal) + if err != nil { + return fmt.Errorf("error reading 'is_internal': %w", err) + } + delete(object, "is_internal") + } + + if raw, found := object["is_preconfigured"]; found { + err = json.Unmarshal(raw, &a.IsPreconfigured) + if err != nil { + return fmt.Errorf("error reading 'is_preconfigured': %w", err) + } + delete(object, "is_preconfigured") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["proxy_id"]; found { + err = json.Unmarshal(raw, &a.ProxyId) + if err != nil { + return fmt.Errorf("error reading 'proxy_id': %w", err) + } + delete(object, "proxy_id") + } + + if raw, found := object["secrets"]; found { + err = json.Unmarshal(raw, &a.Secrets) + if err != nil { + return fmt.Errorf("error reading 'secrets': %w", err) + } + delete(object, "secrets") + } + + if raw, found := object["shipper"]; found { + err = json.Unmarshal(raw, &a.Shipper) + if err != nil { + return fmt.Errorf("error reading 'shipper': %w", err) + } + delete(object, "shipper") + } + + if raw, found := object["ssl"]; found { + err = json.Unmarshal(raw, &a.Ssl) + if err != nil { + return fmt.Errorf("error reading 'ssl': %w", err) + } + delete(object, "ssl") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputLogstash to handle AdditionalProperties +func (a OutputLogstash) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.AllowEdit != nil { + object["allow_edit"], err = json.Marshal(a.AllowEdit) + if err != nil { + return nil, fmt.Errorf("error marshaling 'allow_edit': %w", err) + } + } + + if a.CaSha256 != nil { + object["ca_sha256"], err = json.Marshal(a.CaSha256) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ca_sha256': %w", err) + } + } + + if a.CaTrustedFingerprint != nil { + object["ca_trusted_fingerprint"], err = json.Marshal(a.CaTrustedFingerprint) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ca_trusted_fingerprint': %w", err) + } + } + + if a.ConfigYaml != nil { + object["config_yaml"], err = json.Marshal(a.ConfigYaml) + if err != nil { + return nil, fmt.Errorf("error marshaling 'config_yaml': %w", err) + } + } + + object["hosts"], err = json.Marshal(a.Hosts) + if err != nil { + return nil, fmt.Errorf("error marshaling 'hosts': %w", err) + } + + if a.Id != nil { + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + } + + if a.IsDefault != nil { + object["is_default"], err = json.Marshal(a.IsDefault) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_default': %w", err) + } + } + + if a.IsDefaultMonitoring != nil { + object["is_default_monitoring"], err = json.Marshal(a.IsDefaultMonitoring) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_default_monitoring': %w", err) + } + } + + if a.IsInternal != nil { + object["is_internal"], err = json.Marshal(a.IsInternal) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_internal': %w", err) + } + } + + if a.IsPreconfigured != nil { + object["is_preconfigured"], err = json.Marshal(a.IsPreconfigured) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_preconfigured': %w", err) + } + } + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + if a.ProxyId != nil { + object["proxy_id"], err = json.Marshal(a.ProxyId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'proxy_id': %w", err) + } + } + + if a.Secrets != nil { + object["secrets"], err = json.Marshal(a.Secrets) + if err != nil { + return nil, fmt.Errorf("error marshaling 'secrets': %w", err) + } + } + + if a.Shipper != nil { + object["shipper"], err = json.Marshal(a.Shipper) + if err != nil { + return nil, fmt.Errorf("error marshaling 'shipper': %w", err) + } + } + + if a.Ssl != nil { + object["ssl"], err = json.Marshal(a.Ssl) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ssl': %w", err) + } + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputLogstashSecretsSslKey0. Returns the specified +// element and whether it was found +func (a OutputLogstashSecretsSslKey0) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputLogstashSecretsSslKey0 +func (a *OutputLogstashSecretsSslKey0) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputLogstashSecretsSslKey0 to handle AdditionalProperties +func (a *OutputLogstashSecretsSslKey0) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputLogstashSecretsSslKey0 to handle AdditionalProperties +func (a OutputLogstashSecretsSslKey0) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputLogstash_Secrets_Ssl. Returns the specified +// element and whether it was found +func (a OutputLogstash_Secrets_Ssl) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputLogstash_Secrets_Ssl +func (a *OutputLogstash_Secrets_Ssl) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputLogstash_Secrets_Ssl to handle AdditionalProperties +func (a *OutputLogstash_Secrets_Ssl) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["key"]; found { + err = json.Unmarshal(raw, &a.Key) + if err != nil { + return fmt.Errorf("error reading 'key': %w", err) + } + delete(object, "key") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputLogstash_Secrets_Ssl to handle AdditionalProperties +func (a OutputLogstash_Secrets_Ssl) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Key != nil { + object["key"], err = json.Marshal(a.Key) + if err != nil { + return nil, fmt.Errorf("error marshaling 'key': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputLogstash_Secrets. Returns the specified +// element and whether it was found +func (a OutputLogstash_Secrets) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputLogstash_Secrets +func (a *OutputLogstash_Secrets) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputLogstash_Secrets to handle AdditionalProperties +func (a *OutputLogstash_Secrets) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["ssl"]; found { + err = json.Unmarshal(raw, &a.Ssl) + if err != nil { + return fmt.Errorf("error reading 'ssl': %w", err) + } + delete(object, "ssl") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputLogstash_Secrets to handle AdditionalProperties +func (a OutputLogstash_Secrets) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Ssl != nil { + object["ssl"], err = json.Marshal(a.Ssl) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ssl': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputRemoteElasticsearch. Returns the specified +// element and whether it was found +func (a OutputRemoteElasticsearch) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputRemoteElasticsearch +func (a *OutputRemoteElasticsearch) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputRemoteElasticsearch to handle AdditionalProperties +func (a *OutputRemoteElasticsearch) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["allow_edit"]; found { + err = json.Unmarshal(raw, &a.AllowEdit) + if err != nil { + return fmt.Errorf("error reading 'allow_edit': %w", err) + } + delete(object, "allow_edit") + } + + if raw, found := object["ca_sha256"]; found { + err = json.Unmarshal(raw, &a.CaSha256) + if err != nil { + return fmt.Errorf("error reading 'ca_sha256': %w", err) + } + delete(object, "ca_sha256") + } + + if raw, found := object["ca_trusted_fingerprint"]; found { + err = json.Unmarshal(raw, &a.CaTrustedFingerprint) + if err != nil { + return fmt.Errorf("error reading 'ca_trusted_fingerprint': %w", err) + } + delete(object, "ca_trusted_fingerprint") + } + + if raw, found := object["config_yaml"]; found { + err = json.Unmarshal(raw, &a.ConfigYaml) + if err != nil { + return fmt.Errorf("error reading 'config_yaml': %w", err) + } + delete(object, "config_yaml") + } + + if raw, found := object["hosts"]; found { + err = json.Unmarshal(raw, &a.Hosts) + if err != nil { + return fmt.Errorf("error reading 'hosts': %w", err) + } + delete(object, "hosts") + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if raw, found := object["is_default"]; found { + err = json.Unmarshal(raw, &a.IsDefault) + if err != nil { + return fmt.Errorf("error reading 'is_default': %w", err) + } + delete(object, "is_default") + } + + if raw, found := object["is_default_monitoring"]; found { + err = json.Unmarshal(raw, &a.IsDefaultMonitoring) + if err != nil { + return fmt.Errorf("error reading 'is_default_monitoring': %w", err) + } + delete(object, "is_default_monitoring") + } + + if raw, found := object["is_internal"]; found { + err = json.Unmarshal(raw, &a.IsInternal) + if err != nil { + return fmt.Errorf("error reading 'is_internal': %w", err) + } + delete(object, "is_internal") + } + + if raw, found := object["is_preconfigured"]; found { + err = json.Unmarshal(raw, &a.IsPreconfigured) + if err != nil { + return fmt.Errorf("error reading 'is_preconfigured': %w", err) + } + delete(object, "is_preconfigured") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["preset"]; found { + err = json.Unmarshal(raw, &a.Preset) + if err != nil { + return fmt.Errorf("error reading 'preset': %w", err) + } + delete(object, "preset") + } + + if raw, found := object["proxy_id"]; found { + err = json.Unmarshal(raw, &a.ProxyId) + if err != nil { + return fmt.Errorf("error reading 'proxy_id': %w", err) + } + delete(object, "proxy_id") + } + + if raw, found := object["secrets"]; found { + err = json.Unmarshal(raw, &a.Secrets) + if err != nil { + return fmt.Errorf("error reading 'secrets': %w", err) + } + delete(object, "secrets") + } + + if raw, found := object["service_token"]; found { + err = json.Unmarshal(raw, &a.ServiceToken) + if err != nil { + return fmt.Errorf("error reading 'service_token': %w", err) + } + delete(object, "service_token") + } + + if raw, found := object["shipper"]; found { + err = json.Unmarshal(raw, &a.Shipper) + if err != nil { + return fmt.Errorf("error reading 'shipper': %w", err) + } + delete(object, "shipper") + } + + if raw, found := object["ssl"]; found { + err = json.Unmarshal(raw, &a.Ssl) + if err != nil { + return fmt.Errorf("error reading 'ssl': %w", err) + } + delete(object, "ssl") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputRemoteElasticsearch to handle AdditionalProperties +func (a OutputRemoteElasticsearch) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.AllowEdit != nil { + object["allow_edit"], err = json.Marshal(a.AllowEdit) + if err != nil { + return nil, fmt.Errorf("error marshaling 'allow_edit': %w", err) + } + } + + if a.CaSha256 != nil { + object["ca_sha256"], err = json.Marshal(a.CaSha256) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ca_sha256': %w", err) + } + } + + if a.CaTrustedFingerprint != nil { + object["ca_trusted_fingerprint"], err = json.Marshal(a.CaTrustedFingerprint) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ca_trusted_fingerprint': %w", err) + } + } + + if a.ConfigYaml != nil { + object["config_yaml"], err = json.Marshal(a.ConfigYaml) + if err != nil { + return nil, fmt.Errorf("error marshaling 'config_yaml': %w", err) + } + } + + object["hosts"], err = json.Marshal(a.Hosts) + if err != nil { + return nil, fmt.Errorf("error marshaling 'hosts': %w", err) + } + + if a.Id != nil { + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + } + + if a.IsDefault != nil { + object["is_default"], err = json.Marshal(a.IsDefault) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_default': %w", err) + } + } + + if a.IsDefaultMonitoring != nil { + object["is_default_monitoring"], err = json.Marshal(a.IsDefaultMonitoring) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_default_monitoring': %w", err) + } + } + + if a.IsInternal != nil { + object["is_internal"], err = json.Marshal(a.IsInternal) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_internal': %w", err) + } + } + + if a.IsPreconfigured != nil { + object["is_preconfigured"], err = json.Marshal(a.IsPreconfigured) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_preconfigured': %w", err) + } + } + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + if a.Preset != nil { + object["preset"], err = json.Marshal(a.Preset) + if err != nil { + return nil, fmt.Errorf("error marshaling 'preset': %w", err) + } + } + + if a.ProxyId != nil { + object["proxy_id"], err = json.Marshal(a.ProxyId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'proxy_id': %w", err) + } + } + + if a.Secrets != nil { + object["secrets"], err = json.Marshal(a.Secrets) + if err != nil { + return nil, fmt.Errorf("error marshaling 'secrets': %w", err) + } + } + + if a.ServiceToken != nil { + object["service_token"], err = json.Marshal(a.ServiceToken) + if err != nil { + return nil, fmt.Errorf("error marshaling 'service_token': %w", err) + } + } + + if a.Shipper != nil { + object["shipper"], err = json.Marshal(a.Shipper) + if err != nil { + return nil, fmt.Errorf("error marshaling 'shipper': %w", err) + } + } + + if a.Ssl != nil { + object["ssl"], err = json.Marshal(a.Ssl) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ssl': %w", err) + } + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputRemoteElasticsearchSecretsServiceToken0. Returns the specified +// element and whether it was found +func (a OutputRemoteElasticsearchSecretsServiceToken0) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputRemoteElasticsearchSecretsServiceToken0 +func (a *OutputRemoteElasticsearchSecretsServiceToken0) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputRemoteElasticsearchSecretsServiceToken0 to handle AdditionalProperties +func (a *OutputRemoteElasticsearchSecretsServiceToken0) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputRemoteElasticsearchSecretsServiceToken0 to handle AdditionalProperties +func (a OutputRemoteElasticsearchSecretsServiceToken0) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputRemoteElasticsearch_Secrets. Returns the specified +// element and whether it was found +func (a OutputRemoteElasticsearch_Secrets) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputRemoteElasticsearch_Secrets +func (a *OutputRemoteElasticsearch_Secrets) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputRemoteElasticsearch_Secrets to handle AdditionalProperties +func (a *OutputRemoteElasticsearch_Secrets) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["service_token"]; found { + err = json.Unmarshal(raw, &a.ServiceToken) + if err != nil { + return fmt.Errorf("error reading 'service_token': %w", err) + } + delete(object, "service_token") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputRemoteElasticsearch_Secrets to handle AdditionalProperties +func (a OutputRemoteElasticsearch_Secrets) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.ServiceToken != nil { + object["service_token"], err = json.Marshal(a.ServiceToken) + if err != nil { + return nil, fmt.Errorf("error marshaling 'service_token': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputShipper. Returns the specified +// element and whether it was found +func (a OutputShipper) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputShipper +func (a *OutputShipper) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputShipper to handle AdditionalProperties +func (a *OutputShipper) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["compression_level"]; found { + err = json.Unmarshal(raw, &a.CompressionLevel) + if err != nil { + return fmt.Errorf("error reading 'compression_level': %w", err) + } + delete(object, "compression_level") + } + + if raw, found := object["disk_queue_compression_enabled"]; found { + err = json.Unmarshal(raw, &a.DiskQueueCompressionEnabled) + if err != nil { + return fmt.Errorf("error reading 'disk_queue_compression_enabled': %w", err) + } + delete(object, "disk_queue_compression_enabled") + } + + if raw, found := object["disk_queue_enabled"]; found { + err = json.Unmarshal(raw, &a.DiskQueueEnabled) + if err != nil { + return fmt.Errorf("error reading 'disk_queue_enabled': %w", err) + } + delete(object, "disk_queue_enabled") + } + + if raw, found := object["disk_queue_encryption_enabled"]; found { + err = json.Unmarshal(raw, &a.DiskQueueEncryptionEnabled) + if err != nil { + return fmt.Errorf("error reading 'disk_queue_encryption_enabled': %w", err) + } + delete(object, "disk_queue_encryption_enabled") + } + + if raw, found := object["disk_queue_max_size"]; found { + err = json.Unmarshal(raw, &a.DiskQueueMaxSize) + if err != nil { + return fmt.Errorf("error reading 'disk_queue_max_size': %w", err) + } + delete(object, "disk_queue_max_size") + } + + if raw, found := object["disk_queue_path"]; found { + err = json.Unmarshal(raw, &a.DiskQueuePath) + if err != nil { + return fmt.Errorf("error reading 'disk_queue_path': %w", err) + } + delete(object, "disk_queue_path") + } + + if raw, found := object["loadbalance"]; found { + err = json.Unmarshal(raw, &a.Loadbalance) + if err != nil { + return fmt.Errorf("error reading 'loadbalance': %w", err) + } + delete(object, "loadbalance") + } + + if raw, found := object["max_batch_bytes"]; found { + err = json.Unmarshal(raw, &a.MaxBatchBytes) + if err != nil { + return fmt.Errorf("error reading 'max_batch_bytes': %w", err) + } + delete(object, "max_batch_bytes") + } + + if raw, found := object["mem_queue_events"]; found { + err = json.Unmarshal(raw, &a.MemQueueEvents) + if err != nil { + return fmt.Errorf("error reading 'mem_queue_events': %w", err) + } + delete(object, "mem_queue_events") + } + + if raw, found := object["queue_flush_timeout"]; found { + err = json.Unmarshal(raw, &a.QueueFlushTimeout) + if err != nil { + return fmt.Errorf("error reading 'queue_flush_timeout': %w", err) + } + delete(object, "queue_flush_timeout") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputShipper to handle AdditionalProperties +func (a OutputShipper) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["compression_level"], err = json.Marshal(a.CompressionLevel) + if err != nil { + return nil, fmt.Errorf("error marshaling 'compression_level': %w", err) + } + + object["disk_queue_compression_enabled"], err = json.Marshal(a.DiskQueueCompressionEnabled) + if err != nil { + return nil, fmt.Errorf("error marshaling 'disk_queue_compression_enabled': %w", err) + } + + if a.DiskQueueEnabled != nil { + object["disk_queue_enabled"], err = json.Marshal(a.DiskQueueEnabled) + if err != nil { + return nil, fmt.Errorf("error marshaling 'disk_queue_enabled': %w", err) + } + } + + object["disk_queue_encryption_enabled"], err = json.Marshal(a.DiskQueueEncryptionEnabled) + if err != nil { + return nil, fmt.Errorf("error marshaling 'disk_queue_encryption_enabled': %w", err) + } + + object["disk_queue_max_size"], err = json.Marshal(a.DiskQueueMaxSize) + if err != nil { + return nil, fmt.Errorf("error marshaling 'disk_queue_max_size': %w", err) + } + + object["disk_queue_path"], err = json.Marshal(a.DiskQueuePath) + if err != nil { + return nil, fmt.Errorf("error marshaling 'disk_queue_path': %w", err) + } + + object["loadbalance"], err = json.Marshal(a.Loadbalance) + if err != nil { + return nil, fmt.Errorf("error marshaling 'loadbalance': %w", err) + } + + object["max_batch_bytes"], err = json.Marshal(a.MaxBatchBytes) + if err != nil { + return nil, fmt.Errorf("error marshaling 'max_batch_bytes': %w", err) + } + + object["mem_queue_events"], err = json.Marshal(a.MemQueueEvents) + if err != nil { + return nil, fmt.Errorf("error marshaling 'mem_queue_events': %w", err) + } + + object["queue_flush_timeout"], err = json.Marshal(a.QueueFlushTimeout) + if err != nil { + return nil, fmt.Errorf("error marshaling 'queue_flush_timeout': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputSsl. Returns the specified +// element and whether it was found +func (a OutputSsl) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputSsl +func (a *OutputSsl) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputSsl to handle AdditionalProperties +func (a *OutputSsl) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["certificate"]; found { + err = json.Unmarshal(raw, &a.Certificate) + if err != nil { + return fmt.Errorf("error reading 'certificate': %w", err) + } + delete(object, "certificate") + } + + if raw, found := object["certificate_authorities"]; found { + err = json.Unmarshal(raw, &a.CertificateAuthorities) + if err != nil { + return fmt.Errorf("error reading 'certificate_authorities': %w", err) + } + delete(object, "certificate_authorities") + } + + if raw, found := object["key"]; found { + err = json.Unmarshal(raw, &a.Key) + if err != nil { + return fmt.Errorf("error reading 'key': %w", err) + } + delete(object, "key") + } + + if raw, found := object["verification_mode"]; found { + err = json.Unmarshal(raw, &a.VerificationMode) + if err != nil { + return fmt.Errorf("error reading 'verification_mode': %w", err) + } + delete(object, "verification_mode") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputSsl to handle AdditionalProperties +func (a OutputSsl) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Certificate != nil { + object["certificate"], err = json.Marshal(a.Certificate) + if err != nil { + return nil, fmt.Errorf("error marshaling 'certificate': %w", err) + } + } + + if a.CertificateAuthorities != nil { + object["certificate_authorities"], err = json.Marshal(a.CertificateAuthorities) + if err != nil { + return nil, fmt.Errorf("error marshaling 'certificate_authorities': %w", err) + } + } + + if a.Key != nil { + object["key"], err = json.Marshal(a.Key) + if err != nil { + return nil, fmt.Errorf("error marshaling 'key': %w", err) + } + } + + if a.VerificationMode != nil { + object["verification_mode"], err = json.Marshal(a.VerificationMode) + if err != nil { + return nil, fmt.Errorf("error marshaling 'verification_mode': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo. Returns the specified +// element and whether it was found +func (a PackageInfo) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo +func (a *PackageInfo) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo to handle AdditionalProperties +func (a *PackageInfo) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["agent"]; found { + err = json.Unmarshal(raw, &a.Agent) + if err != nil { + return fmt.Errorf("error reading 'agent': %w", err) + } + delete(object, "agent") + } + + if raw, found := object["asset_tags"]; found { + err = json.Unmarshal(raw, &a.AssetTags) + if err != nil { + return fmt.Errorf("error reading 'asset_tags': %w", err) + } + delete(object, "asset_tags") + } + + if raw, found := object["assets"]; found { + err = json.Unmarshal(raw, &a.Assets) + if err != nil { + return fmt.Errorf("error reading 'assets': %w", err) + } + delete(object, "assets") + } + + if raw, found := object["categories"]; found { + err = json.Unmarshal(raw, &a.Categories) + if err != nil { + return fmt.Errorf("error reading 'categories': %w", err) + } + delete(object, "categories") + } + + if raw, found := object["conditions"]; found { + err = json.Unmarshal(raw, &a.Conditions) + if err != nil { + return fmt.Errorf("error reading 'conditions': %w", err) + } + delete(object, "conditions") + } + + if raw, found := object["data_streams"]; found { + err = json.Unmarshal(raw, &a.DataStreams) + if err != nil { + return fmt.Errorf("error reading 'data_streams': %w", err) + } + delete(object, "data_streams") + } + + if raw, found := object["description"]; found { + err = json.Unmarshal(raw, &a.Description) + if err != nil { + return fmt.Errorf("error reading 'description': %w", err) + } + delete(object, "description") + } + + if raw, found := object["download"]; found { + err = json.Unmarshal(raw, &a.Download) + if err != nil { + return fmt.Errorf("error reading 'download': %w", err) + } + delete(object, "download") + } + + if raw, found := object["elasticsearch"]; found { + err = json.Unmarshal(raw, &a.Elasticsearch) + if err != nil { + return fmt.Errorf("error reading 'elasticsearch': %w", err) + } + delete(object, "elasticsearch") + } + + if raw, found := object["format_version"]; found { + err = json.Unmarshal(raw, &a.FormatVersion) + if err != nil { + return fmt.Errorf("error reading 'format_version': %w", err) + } + delete(object, "format_version") + } + + if raw, found := object["icons"]; found { + err = json.Unmarshal(raw, &a.Icons) + if err != nil { + return fmt.Errorf("error reading 'icons': %w", err) + } + delete(object, "icons") + } + + if raw, found := object["installationInfo"]; found { + err = json.Unmarshal(raw, &a.InstallationInfo) + if err != nil { + return fmt.Errorf("error reading 'installationInfo': %w", err) + } + delete(object, "installationInfo") + } + + if raw, found := object["internal"]; found { + err = json.Unmarshal(raw, &a.Internal) + if err != nil { + return fmt.Errorf("error reading 'internal': %w", err) + } + delete(object, "internal") + } + + if raw, found := object["keepPoliciesUpToDate"]; found { + err = json.Unmarshal(raw, &a.KeepPoliciesUpToDate) + if err != nil { + return fmt.Errorf("error reading 'keepPoliciesUpToDate': %w", err) + } + delete(object, "keepPoliciesUpToDate") + } + + if raw, found := object["latestVersion"]; found { + err = json.Unmarshal(raw, &a.LatestVersion) + if err != nil { + return fmt.Errorf("error reading 'latestVersion': %w", err) + } + delete(object, "latestVersion") + } + + if raw, found := object["license"]; found { + err = json.Unmarshal(raw, &a.License) + if err != nil { + return fmt.Errorf("error reading 'license': %w", err) + } + delete(object, "license") + } + + if raw, found := object["licensePath"]; found { + err = json.Unmarshal(raw, &a.LicensePath) + if err != nil { + return fmt.Errorf("error reading 'licensePath': %w", err) + } + delete(object, "licensePath") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["notice"]; found { + err = json.Unmarshal(raw, &a.Notice) + if err != nil { + return fmt.Errorf("error reading 'notice': %w", err) + } + delete(object, "notice") + } + + if raw, found := object["owner"]; found { + err = json.Unmarshal(raw, &a.Owner) + if err != nil { + return fmt.Errorf("error reading 'owner': %w", err) + } + delete(object, "owner") + } + + if raw, found := object["path"]; found { + err = json.Unmarshal(raw, &a.Path) + if err != nil { + return fmt.Errorf("error reading 'path': %w", err) + } + delete(object, "path") + } + + if raw, found := object["policy_templates"]; found { + err = json.Unmarshal(raw, &a.PolicyTemplates) + if err != nil { + return fmt.Errorf("error reading 'policy_templates': %w", err) + } + delete(object, "policy_templates") + } + + if raw, found := object["readme"]; found { + err = json.Unmarshal(raw, &a.Readme) + if err != nil { + return fmt.Errorf("error reading 'readme': %w", err) + } + delete(object, "readme") + } + + if raw, found := object["release"]; found { + err = json.Unmarshal(raw, &a.Release) + if err != nil { + return fmt.Errorf("error reading 'release': %w", err) + } + delete(object, "release") + } + + if raw, found := object["savedObject"]; found { + err = json.Unmarshal(raw, &a.SavedObject) + if err != nil { + return fmt.Errorf("error reading 'savedObject': %w", err) + } + delete(object, "savedObject") + } + + if raw, found := object["screenshots"]; found { + err = json.Unmarshal(raw, &a.Screenshots) + if err != nil { + return fmt.Errorf("error reading 'screenshots': %w", err) + } + delete(object, "screenshots") + } + + if raw, found := object["signature_path"]; found { + err = json.Unmarshal(raw, &a.SignaturePath) + if err != nil { + return fmt.Errorf("error reading 'signature_path': %w", err) + } + delete(object, "signature_path") + } + + if raw, found := object["source"]; found { + err = json.Unmarshal(raw, &a.Source) + if err != nil { + return fmt.Errorf("error reading 'source': %w", err) + } + delete(object, "source") + } + + if raw, found := object["status"]; found { + err = json.Unmarshal(raw, &a.Status) + if err != nil { + return fmt.Errorf("error reading 'status': %w", err) + } + delete(object, "status") + } + + if raw, found := object["title"]; found { + err = json.Unmarshal(raw, &a.Title) + if err != nil { + return fmt.Errorf("error reading 'title': %w", err) + } + delete(object, "title") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if raw, found := object["vars"]; found { + err = json.Unmarshal(raw, &a.Vars) + if err != nil { + return fmt.Errorf("error reading 'vars': %w", err) + } + delete(object, "vars") + } + + if raw, found := object["version"]; found { + err = json.Unmarshal(raw, &a.Version) + if err != nil { + return fmt.Errorf("error reading 'version': %w", err) + } + delete(object, "version") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo to handle AdditionalProperties +func (a PackageInfo) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Agent != nil { + object["agent"], err = json.Marshal(a.Agent) + if err != nil { + return nil, fmt.Errorf("error marshaling 'agent': %w", err) + } + } + + if a.AssetTags != nil { + object["asset_tags"], err = json.Marshal(a.AssetTags) + if err != nil { + return nil, fmt.Errorf("error marshaling 'asset_tags': %w", err) + } + } + + object["assets"], err = json.Marshal(a.Assets) + if err != nil { + return nil, fmt.Errorf("error marshaling 'assets': %w", err) + } + + if a.Categories != nil { + object["categories"], err = json.Marshal(a.Categories) + if err != nil { + return nil, fmt.Errorf("error marshaling 'categories': %w", err) + } + } + + if a.Conditions != nil { + object["conditions"], err = json.Marshal(a.Conditions) + if err != nil { + return nil, fmt.Errorf("error marshaling 'conditions': %w", err) + } + } + + if a.DataStreams != nil { + object["data_streams"], err = json.Marshal(a.DataStreams) + if err != nil { + return nil, fmt.Errorf("error marshaling 'data_streams': %w", err) + } + } + + if a.Description != nil { + object["description"], err = json.Marshal(a.Description) + if err != nil { + return nil, fmt.Errorf("error marshaling 'description': %w", err) + } + } + + if a.Download != nil { + object["download"], err = json.Marshal(a.Download) + if err != nil { + return nil, fmt.Errorf("error marshaling 'download': %w", err) + } + } + + if a.Elasticsearch != nil { + object["elasticsearch"], err = json.Marshal(a.Elasticsearch) + if err != nil { + return nil, fmt.Errorf("error marshaling 'elasticsearch': %w", err) + } + } + + if a.FormatVersion != nil { + object["format_version"], err = json.Marshal(a.FormatVersion) + if err != nil { + return nil, fmt.Errorf("error marshaling 'format_version': %w", err) + } + } + + if a.Icons != nil { + object["icons"], err = json.Marshal(a.Icons) + if err != nil { + return nil, fmt.Errorf("error marshaling 'icons': %w", err) + } + } + + if a.InstallationInfo != nil { + object["installationInfo"], err = json.Marshal(a.InstallationInfo) + if err != nil { + return nil, fmt.Errorf("error marshaling 'installationInfo': %w", err) + } + } + + if a.Internal != nil { + object["internal"], err = json.Marshal(a.Internal) + if err != nil { + return nil, fmt.Errorf("error marshaling 'internal': %w", err) + } + } + + if a.KeepPoliciesUpToDate != nil { + object["keepPoliciesUpToDate"], err = json.Marshal(a.KeepPoliciesUpToDate) + if err != nil { + return nil, fmt.Errorf("error marshaling 'keepPoliciesUpToDate': %w", err) + } + } + + if a.LatestVersion != nil { + object["latestVersion"], err = json.Marshal(a.LatestVersion) + if err != nil { + return nil, fmt.Errorf("error marshaling 'latestVersion': %w", err) + } + } + + if a.License != nil { + object["license"], err = json.Marshal(a.License) + if err != nil { + return nil, fmt.Errorf("error marshaling 'license': %w", err) + } + } + + if a.LicensePath != nil { + object["licensePath"], err = json.Marshal(a.LicensePath) + if err != nil { + return nil, fmt.Errorf("error marshaling 'licensePath': %w", err) + } + } + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + if a.Notice != nil { + object["notice"], err = json.Marshal(a.Notice) + if err != nil { + return nil, fmt.Errorf("error marshaling 'notice': %w", err) + } + } + + if a.Owner != nil { + object["owner"], err = json.Marshal(a.Owner) + if err != nil { + return nil, fmt.Errorf("error marshaling 'owner': %w", err) + } + } + + if a.Path != nil { + object["path"], err = json.Marshal(a.Path) + if err != nil { + return nil, fmt.Errorf("error marshaling 'path': %w", err) + } + } + + if a.PolicyTemplates != nil { + object["policy_templates"], err = json.Marshal(a.PolicyTemplates) + if err != nil { + return nil, fmt.Errorf("error marshaling 'policy_templates': %w", err) + } + } + + if a.Readme != nil { + object["readme"], err = json.Marshal(a.Readme) + if err != nil { + return nil, fmt.Errorf("error marshaling 'readme': %w", err) + } + } + + if a.Release != nil { + object["release"], err = json.Marshal(a.Release) + if err != nil { + return nil, fmt.Errorf("error marshaling 'release': %w", err) + } + } + + object["savedObject"], err = json.Marshal(a.SavedObject) + if err != nil { + return nil, fmt.Errorf("error marshaling 'savedObject': %w", err) + } + + if a.Screenshots != nil { + object["screenshots"], err = json.Marshal(a.Screenshots) + if err != nil { + return nil, fmt.Errorf("error marshaling 'screenshots': %w", err) + } + } + + if a.SignaturePath != nil { + object["signature_path"], err = json.Marshal(a.SignaturePath) + if err != nil { + return nil, fmt.Errorf("error marshaling 'signature_path': %w", err) + } + } + + if a.Source != nil { + object["source"], err = json.Marshal(a.Source) + if err != nil { + return nil, fmt.Errorf("error marshaling 'source': %w", err) + } + } + + if a.Status != nil { + object["status"], err = json.Marshal(a.Status) + if err != nil { + return nil, fmt.Errorf("error marshaling 'status': %w", err) + } + } + + object["title"], err = json.Marshal(a.Title) + if err != nil { + return nil, fmt.Errorf("error marshaling 'title': %w", err) + } + + if a.Type != nil { + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + } + + if a.Vars != nil { + object["vars"], err = json.Marshal(a.Vars) + if err != nil { + return nil, fmt.Errorf("error marshaling 'vars': %w", err) + } + } + + object["version"], err = json.Marshal(a.Version) + if err != nil { + return nil, fmt.Errorf("error marshaling 'version': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_Conditions_Elastic. Returns the specified +// element and whether it was found +func (a PackageInfo_Conditions_Elastic) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_Conditions_Elastic +func (a *PackageInfo_Conditions_Elastic) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_Conditions_Elastic to handle AdditionalProperties +func (a *PackageInfo_Conditions_Elastic) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["capabilities"]; found { + err = json.Unmarshal(raw, &a.Capabilities) + if err != nil { + return fmt.Errorf("error reading 'capabilities': %w", err) + } + delete(object, "capabilities") + } + + if raw, found := object["subscription"]; found { + err = json.Unmarshal(raw, &a.Subscription) + if err != nil { + return fmt.Errorf("error reading 'subscription': %w", err) + } + delete(object, "subscription") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_Conditions_Elastic to handle AdditionalProperties +func (a PackageInfo_Conditions_Elastic) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Capabilities != nil { + object["capabilities"], err = json.Marshal(a.Capabilities) + if err != nil { + return nil, fmt.Errorf("error marshaling 'capabilities': %w", err) + } + } + + if a.Subscription != nil { + object["subscription"], err = json.Marshal(a.Subscription) + if err != nil { + return nil, fmt.Errorf("error marshaling 'subscription': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_Conditions_Kibana. Returns the specified +// element and whether it was found +func (a PackageInfo_Conditions_Kibana) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_Conditions_Kibana +func (a *PackageInfo_Conditions_Kibana) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_Conditions_Kibana to handle AdditionalProperties +func (a *PackageInfo_Conditions_Kibana) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["version"]; found { + err = json.Unmarshal(raw, &a.Version) + if err != nil { + return fmt.Errorf("error reading 'version': %w", err) + } + delete(object, "version") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_Conditions_Kibana to handle AdditionalProperties +func (a PackageInfo_Conditions_Kibana) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Version != nil { + object["version"], err = json.Marshal(a.Version) + if err != nil { + return nil, fmt.Errorf("error marshaling 'version': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_Conditions. Returns the specified +// element and whether it was found +func (a PackageInfo_Conditions) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_Conditions +func (a *PackageInfo_Conditions) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_Conditions to handle AdditionalProperties +func (a *PackageInfo_Conditions) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["elastic"]; found { + err = json.Unmarshal(raw, &a.Elastic) + if err != nil { + return fmt.Errorf("error reading 'elastic': %w", err) + } + delete(object, "elastic") + } + + if raw, found := object["kibana"]; found { + err = json.Unmarshal(raw, &a.Kibana) + if err != nil { + return fmt.Errorf("error reading 'kibana': %w", err) + } + delete(object, "kibana") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_Conditions to handle AdditionalProperties +func (a PackageInfo_Conditions) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Elastic != nil { + object["elastic"], err = json.Marshal(a.Elastic) + if err != nil { + return nil, fmt.Errorf("error marshaling 'elastic': %w", err) + } + } + + if a.Kibana != nil { + object["kibana"], err = json.Marshal(a.Kibana) + if err != nil { + return nil, fmt.Errorf("error marshaling 'kibana': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_Icons_Item. Returns the specified +// element and whether it was found +func (a PackageInfo_Icons_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_Icons_Item +func (a *PackageInfo_Icons_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_Icons_Item to handle AdditionalProperties +func (a *PackageInfo_Icons_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["dark_mode"]; found { + err = json.Unmarshal(raw, &a.DarkMode) + if err != nil { + return fmt.Errorf("error reading 'dark_mode': %w", err) + } + delete(object, "dark_mode") + } + + if raw, found := object["path"]; found { + err = json.Unmarshal(raw, &a.Path) + if err != nil { + return fmt.Errorf("error reading 'path': %w", err) + } + delete(object, "path") + } + + if raw, found := object["size"]; found { + err = json.Unmarshal(raw, &a.Size) + if err != nil { + return fmt.Errorf("error reading 'size': %w", err) + } + delete(object, "size") + } + + if raw, found := object["src"]; found { + err = json.Unmarshal(raw, &a.Src) + if err != nil { + return fmt.Errorf("error reading 'src': %w", err) + } + delete(object, "src") + } + + if raw, found := object["title"]; found { + err = json.Unmarshal(raw, &a.Title) + if err != nil { + return fmt.Errorf("error reading 'title': %w", err) + } + delete(object, "title") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_Icons_Item to handle AdditionalProperties +func (a PackageInfo_Icons_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.DarkMode != nil { + object["dark_mode"], err = json.Marshal(a.DarkMode) + if err != nil { + return nil, fmt.Errorf("error marshaling 'dark_mode': %w", err) + } + } + + if a.Path != nil { + object["path"], err = json.Marshal(a.Path) + if err != nil { + return nil, fmt.Errorf("error marshaling 'path': %w", err) + } + } + + if a.Size != nil { + object["size"], err = json.Marshal(a.Size) + if err != nil { + return nil, fmt.Errorf("error marshaling 'size': %w", err) + } + } + + object["src"], err = json.Marshal(a.Src) + if err != nil { + return nil, fmt.Errorf("error marshaling 'src': %w", err) + } + + if a.Title != nil { + object["title"], err = json.Marshal(a.Title) + if err != nil { + return nil, fmt.Errorf("error marshaling 'title': %w", err) + } + } + + if a.Type != nil { + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item. Returns the specified +// element and whether it was found +func (a PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item +func (a *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item to handle AdditionalProperties +func (a *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if raw, found := object["originId"]; found { + err = json.Unmarshal(raw, &a.OriginId) + if err != nil { + return fmt.Errorf("error reading 'originId': %w", err) + } + delete(object, "originId") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item to handle AdditionalProperties +func (a PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + + if a.OriginId != nil { + object["originId"], err = json.Marshal(a.OriginId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'originId': %w", err) + } + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features. Returns the specified +// element and whether it was found +func (a PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features +func (a *PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features to handle AdditionalProperties +func (a *PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["doc_value_only_numeric"]; found { + err = json.Unmarshal(raw, &a.DocValueOnlyNumeric) + if err != nil { + return fmt.Errorf("error reading 'doc_value_only_numeric': %w", err) + } + delete(object, "doc_value_only_numeric") + } + + if raw, found := object["doc_value_only_other"]; found { + err = json.Unmarshal(raw, &a.DocValueOnlyOther) + if err != nil { + return fmt.Errorf("error reading 'doc_value_only_other': %w", err) + } + delete(object, "doc_value_only_other") + } + + if raw, found := object["synthetic_source"]; found { + err = json.Unmarshal(raw, &a.SyntheticSource) + if err != nil { + return fmt.Errorf("error reading 'synthetic_source': %w", err) + } + delete(object, "synthetic_source") + } + + if raw, found := object["tsdb"]; found { + err = json.Unmarshal(raw, &a.Tsdb) + if err != nil { + return fmt.Errorf("error reading 'tsdb': %w", err) + } + delete(object, "tsdb") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features to handle AdditionalProperties +func (a PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.DocValueOnlyNumeric != nil { + object["doc_value_only_numeric"], err = json.Marshal(a.DocValueOnlyNumeric) + if err != nil { + return nil, fmt.Errorf("error marshaling 'doc_value_only_numeric': %w", err) + } + } + + if a.DocValueOnlyOther != nil { + object["doc_value_only_other"], err = json.Marshal(a.DocValueOnlyOther) + if err != nil { + return nil, fmt.Errorf("error marshaling 'doc_value_only_other': %w", err) + } + } + + if a.SyntheticSource != nil { + object["synthetic_source"], err = json.Marshal(a.SyntheticSource) + if err != nil { + return nil, fmt.Errorf("error marshaling 'synthetic_source': %w", err) + } + } + + if a.Tsdb != nil { + object["tsdb"], err = json.Marshal(a.Tsdb) + if err != nil { + return nil, fmt.Errorf("error marshaling 'tsdb': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item. Returns the specified +// element and whether it was found +func (a PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item +func (a *PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item to handle AdditionalProperties +func (a *PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["data_stream"]; found { + err = json.Unmarshal(raw, &a.DataStream) + if err != nil { + return fmt.Errorf("error reading 'data_stream': %w", err) + } + delete(object, "data_stream") + } + + if raw, found := object["features"]; found { + err = json.Unmarshal(raw, &a.Features) + if err != nil { + return fmt.Errorf("error reading 'features': %w", err) + } + delete(object, "features") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item to handle AdditionalProperties +func (a PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["data_stream"], err = json.Marshal(a.DataStream) + if err != nil { + return nil, fmt.Errorf("error marshaling 'data_stream': %w", err) + } + + object["features"], err = json.Marshal(a.Features) + if err != nil { + return nil, fmt.Errorf("error marshaling 'features': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_InstallationInfo_InstalledEs_Item. Returns the specified +// element and whether it was found +func (a PackageInfo_InstallationInfo_InstalledEs_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_InstallationInfo_InstalledEs_Item +func (a *PackageInfo_InstallationInfo_InstalledEs_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_InstallationInfo_InstalledEs_Item to handle AdditionalProperties +func (a *PackageInfo_InstallationInfo_InstalledEs_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["deferred"]; found { + err = json.Unmarshal(raw, &a.Deferred) + if err != nil { + return fmt.Errorf("error reading 'deferred': %w", err) + } + delete(object, "deferred") + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if raw, found := object["version"]; found { + err = json.Unmarshal(raw, &a.Version) + if err != nil { + return fmt.Errorf("error reading 'version': %w", err) + } + delete(object, "version") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_InstallationInfo_InstalledEs_Item to handle AdditionalProperties +func (a PackageInfo_InstallationInfo_InstalledEs_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Deferred != nil { + object["deferred"], err = json.Marshal(a.Deferred) + if err != nil { + return nil, fmt.Errorf("error marshaling 'deferred': %w", err) + } + } + + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + if a.Version != nil { + object["version"], err = json.Marshal(a.Version) + if err != nil { + return nil, fmt.Errorf("error marshaling 'version': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_InstallationInfo_InstalledKibana_Item. Returns the specified +// element and whether it was found +func (a PackageInfo_InstallationInfo_InstalledKibana_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_InstallationInfo_InstalledKibana_Item +func (a *PackageInfo_InstallationInfo_InstalledKibana_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_InstallationInfo_InstalledKibana_Item to handle AdditionalProperties +func (a *PackageInfo_InstallationInfo_InstalledKibana_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if raw, found := object["originId"]; found { + err = json.Unmarshal(raw, &a.OriginId) + if err != nil { + return fmt.Errorf("error reading 'originId': %w", err) + } + delete(object, "originId") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_InstallationInfo_InstalledKibana_Item to handle AdditionalProperties +func (a PackageInfo_InstallationInfo_InstalledKibana_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + + if a.OriginId != nil { + object["originId"], err = json.Marshal(a.OriginId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'originId': %w", err) + } + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_InstallationInfo_LatestExecutedState. Returns the specified +// element and whether it was found +func (a PackageInfo_InstallationInfo_LatestExecutedState) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_InstallationInfo_LatestExecutedState +func (a *PackageInfo_InstallationInfo_LatestExecutedState) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_InstallationInfo_LatestExecutedState to handle AdditionalProperties +func (a *PackageInfo_InstallationInfo_LatestExecutedState) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["error"]; found { + err = json.Unmarshal(raw, &a.Error) + if err != nil { + return fmt.Errorf("error reading 'error': %w", err) + } + delete(object, "error") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["started_at"]; found { + err = json.Unmarshal(raw, &a.StartedAt) + if err != nil { + return fmt.Errorf("error reading 'started_at': %w", err) + } + delete(object, "started_at") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_InstallationInfo_LatestExecutedState to handle AdditionalProperties +func (a PackageInfo_InstallationInfo_LatestExecutedState) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Error != nil { + object["error"], err = json.Marshal(a.Error) + if err != nil { + return nil, fmt.Errorf("error marshaling 'error': %w", err) + } + } + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + object["started_at"], err = json.Marshal(a.StartedAt) + if err != nil { + return nil, fmt.Errorf("error marshaling 'started_at': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error. Returns the specified +// element and whether it was found +func (a PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error +func (a *PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error to handle AdditionalProperties +func (a *PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["message"]; found { + err = json.Unmarshal(raw, &a.Message) + if err != nil { + return fmt.Errorf("error reading 'message': %w", err) + } + delete(object, "message") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["stack"]; found { + err = json.Unmarshal(raw, &a.Stack) + if err != nil { + return fmt.Errorf("error reading 'stack': %w", err) + } + delete(object, "stack") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error to handle AdditionalProperties +func (a PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["message"], err = json.Marshal(a.Message) + if err != nil { + return nil, fmt.Errorf("error marshaling 'message': %w", err) + } + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + if a.Stack != nil { + object["stack"], err = json.Marshal(a.Stack) + if err != nil { + return nil, fmt.Errorf("error marshaling 'stack': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item. Returns the specified +// element and whether it was found +func (a PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item +func (a *PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item to handle AdditionalProperties +func (a *PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["created_at"]; found { + err = json.Unmarshal(raw, &a.CreatedAt) + if err != nil { + return fmt.Errorf("error reading 'created_at': %w", err) + } + delete(object, "created_at") + } + + if raw, found := object["error"]; found { + err = json.Unmarshal(raw, &a.Error) + if err != nil { + return fmt.Errorf("error reading 'error': %w", err) + } + delete(object, "error") + } + + if raw, found := object["target_version"]; found { + err = json.Unmarshal(raw, &a.TargetVersion) + if err != nil { + return fmt.Errorf("error reading 'target_version': %w", err) + } + delete(object, "target_version") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item to handle AdditionalProperties +func (a PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["created_at"], err = json.Marshal(a.CreatedAt) + if err != nil { + return nil, fmt.Errorf("error marshaling 'created_at': %w", err) + } + + object["error"], err = json.Marshal(a.Error) + if err != nil { + return nil, fmt.Errorf("error marshaling 'error': %w", err) + } + + object["target_version"], err = json.Marshal(a.TargetVersion) + if err != nil { + return nil, fmt.Errorf("error marshaling 'target_version': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_InstallationInfo. Returns the specified +// element and whether it was found +func (a PackageInfo_InstallationInfo) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_InstallationInfo +func (a *PackageInfo_InstallationInfo) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_InstallationInfo to handle AdditionalProperties +func (a *PackageInfo_InstallationInfo) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["additional_spaces_installed_kibana"]; found { + err = json.Unmarshal(raw, &a.AdditionalSpacesInstalledKibana) + if err != nil { + return fmt.Errorf("error reading 'additional_spaces_installed_kibana': %w", err) + } + delete(object, "additional_spaces_installed_kibana") + } + + if raw, found := object["created_at"]; found { + err = json.Unmarshal(raw, &a.CreatedAt) + if err != nil { + return fmt.Errorf("error reading 'created_at': %w", err) + } + delete(object, "created_at") + } + + if raw, found := object["experimental_data_stream_features"]; found { + err = json.Unmarshal(raw, &a.ExperimentalDataStreamFeatures) + if err != nil { + return fmt.Errorf("error reading 'experimental_data_stream_features': %w", err) + } + delete(object, "experimental_data_stream_features") + } + + if raw, found := object["install_format_schema_version"]; found { + err = json.Unmarshal(raw, &a.InstallFormatSchemaVersion) + if err != nil { + return fmt.Errorf("error reading 'install_format_schema_version': %w", err) + } + delete(object, "install_format_schema_version") + } + + if raw, found := object["install_source"]; found { + err = json.Unmarshal(raw, &a.InstallSource) + if err != nil { + return fmt.Errorf("error reading 'install_source': %w", err) + } + delete(object, "install_source") + } + + if raw, found := object["install_status"]; found { + err = json.Unmarshal(raw, &a.InstallStatus) + if err != nil { + return fmt.Errorf("error reading 'install_status': %w", err) + } + delete(object, "install_status") + } + + if raw, found := object["installed_es"]; found { + err = json.Unmarshal(raw, &a.InstalledEs) + if err != nil { + return fmt.Errorf("error reading 'installed_es': %w", err) + } + delete(object, "installed_es") + } + + if raw, found := object["installed_kibana"]; found { + err = json.Unmarshal(raw, &a.InstalledKibana) + if err != nil { + return fmt.Errorf("error reading 'installed_kibana': %w", err) + } + delete(object, "installed_kibana") + } + + if raw, found := object["installed_kibana_space_id"]; found { + err = json.Unmarshal(raw, &a.InstalledKibanaSpaceId) + if err != nil { + return fmt.Errorf("error reading 'installed_kibana_space_id': %w", err) + } + delete(object, "installed_kibana_space_id") + } + + if raw, found := object["latest_executed_state"]; found { + err = json.Unmarshal(raw, &a.LatestExecutedState) + if err != nil { + return fmt.Errorf("error reading 'latest_executed_state': %w", err) + } + delete(object, "latest_executed_state") + } + + if raw, found := object["latest_install_failed_attempts"]; found { + err = json.Unmarshal(raw, &a.LatestInstallFailedAttempts) + if err != nil { + return fmt.Errorf("error reading 'latest_install_failed_attempts': %w", err) + } + delete(object, "latest_install_failed_attempts") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["namespaces"]; found { + err = json.Unmarshal(raw, &a.Namespaces) + if err != nil { + return fmt.Errorf("error reading 'namespaces': %w", err) + } + delete(object, "namespaces") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if raw, found := object["updated_at"]; found { + err = json.Unmarshal(raw, &a.UpdatedAt) + if err != nil { + return fmt.Errorf("error reading 'updated_at': %w", err) + } + delete(object, "updated_at") + } + + if raw, found := object["verification_key_id"]; found { + err = json.Unmarshal(raw, &a.VerificationKeyId) + if err != nil { + return fmt.Errorf("error reading 'verification_key_id': %w", err) + } + delete(object, "verification_key_id") + } + + if raw, found := object["verification_status"]; found { + err = json.Unmarshal(raw, &a.VerificationStatus) + if err != nil { + return fmt.Errorf("error reading 'verification_status': %w", err) + } + delete(object, "verification_status") + } + + if raw, found := object["version"]; found { + err = json.Unmarshal(raw, &a.Version) + if err != nil { + return fmt.Errorf("error reading 'version': %w", err) + } + delete(object, "version") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_InstallationInfo to handle AdditionalProperties +func (a PackageInfo_InstallationInfo) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.AdditionalSpacesInstalledKibana != nil { + object["additional_spaces_installed_kibana"], err = json.Marshal(a.AdditionalSpacesInstalledKibana) + if err != nil { + return nil, fmt.Errorf("error marshaling 'additional_spaces_installed_kibana': %w", err) + } + } + + if a.CreatedAt != nil { + object["created_at"], err = json.Marshal(a.CreatedAt) + if err != nil { + return nil, fmt.Errorf("error marshaling 'created_at': %w", err) + } + } + + if a.ExperimentalDataStreamFeatures != nil { + object["experimental_data_stream_features"], err = json.Marshal(a.ExperimentalDataStreamFeatures) + if err != nil { + return nil, fmt.Errorf("error marshaling 'experimental_data_stream_features': %w", err) + } + } + + if a.InstallFormatSchemaVersion != nil { + object["install_format_schema_version"], err = json.Marshal(a.InstallFormatSchemaVersion) + if err != nil { + return nil, fmt.Errorf("error marshaling 'install_format_schema_version': %w", err) + } + } + + object["install_source"], err = json.Marshal(a.InstallSource) + if err != nil { + return nil, fmt.Errorf("error marshaling 'install_source': %w", err) + } + + object["install_status"], err = json.Marshal(a.InstallStatus) + if err != nil { + return nil, fmt.Errorf("error marshaling 'install_status': %w", err) + } + + object["installed_es"], err = json.Marshal(a.InstalledEs) + if err != nil { + return nil, fmt.Errorf("error marshaling 'installed_es': %w", err) + } + + object["installed_kibana"], err = json.Marshal(a.InstalledKibana) + if err != nil { + return nil, fmt.Errorf("error marshaling 'installed_kibana': %w", err) + } + + if a.InstalledKibanaSpaceId != nil { + object["installed_kibana_space_id"], err = json.Marshal(a.InstalledKibanaSpaceId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'installed_kibana_space_id': %w", err) + } + } + + if a.LatestExecutedState != nil { + object["latest_executed_state"], err = json.Marshal(a.LatestExecutedState) + if err != nil { + return nil, fmt.Errorf("error marshaling 'latest_executed_state': %w", err) + } + } + + if a.LatestInstallFailedAttempts != nil { + object["latest_install_failed_attempts"], err = json.Marshal(a.LatestInstallFailedAttempts) + if err != nil { + return nil, fmt.Errorf("error marshaling 'latest_install_failed_attempts': %w", err) + } + } + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + if a.Namespaces != nil { + object["namespaces"], err = json.Marshal(a.Namespaces) + if err != nil { + return nil, fmt.Errorf("error marshaling 'namespaces': %w", err) + } + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + if a.UpdatedAt != nil { + object["updated_at"], err = json.Marshal(a.UpdatedAt) + if err != nil { + return nil, fmt.Errorf("error marshaling 'updated_at': %w", err) + } + } + + if a.VerificationKeyId != nil { + object["verification_key_id"], err = json.Marshal(a.VerificationKeyId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'verification_key_id': %w", err) + } + } + + object["verification_status"], err = json.Marshal(a.VerificationStatus) + if err != nil { + return nil, fmt.Errorf("error marshaling 'verification_status': %w", err) + } + + object["version"], err = json.Marshal(a.Version) + if err != nil { + return nil, fmt.Errorf("error marshaling 'version': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_Owner. Returns the specified +// element and whether it was found +func (a PackageInfo_Owner) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_Owner +func (a *PackageInfo_Owner) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_Owner to handle AdditionalProperties +func (a *PackageInfo_Owner) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["github"]; found { + err = json.Unmarshal(raw, &a.Github) + if err != nil { + return fmt.Errorf("error reading 'github': %w", err) + } + delete(object, "github") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_Owner to handle AdditionalProperties +func (a PackageInfo_Owner) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Github != nil { + object["github"], err = json.Marshal(a.Github) + if err != nil { + return nil, fmt.Errorf("error marshaling 'github': %w", err) + } + } + + if a.Type != nil { + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_Source. Returns the specified +// element and whether it was found +func (a PackageInfo_Source) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_Source +func (a *PackageInfo_Source) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_Source to handle AdditionalProperties +func (a *PackageInfo_Source) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["license"]; found { + err = json.Unmarshal(raw, &a.License) + if err != nil { + return fmt.Errorf("error reading 'license': %w", err) + } + delete(object, "license") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_Source to handle AdditionalProperties +func (a PackageInfo_Source) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["license"], err = json.Marshal(a.License) + if err != nil { + return nil, fmt.Errorf("error marshaling 'license': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem. Returns the specified +// element and whether it was found +func (a PackageListItem) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem +func (a *PackageListItem) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem to handle AdditionalProperties +func (a *PackageListItem) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["categories"]; found { + err = json.Unmarshal(raw, &a.Categories) + if err != nil { + return fmt.Errorf("error reading 'categories': %w", err) + } + delete(object, "categories") + } + + if raw, found := object["conditions"]; found { + err = json.Unmarshal(raw, &a.Conditions) + if err != nil { + return fmt.Errorf("error reading 'conditions': %w", err) + } + delete(object, "conditions") + } + + if raw, found := object["data_streams"]; found { + err = json.Unmarshal(raw, &a.DataStreams) + if err != nil { + return fmt.Errorf("error reading 'data_streams': %w", err) + } + delete(object, "data_streams") + } + + if raw, found := object["description"]; found { + err = json.Unmarshal(raw, &a.Description) + if err != nil { + return fmt.Errorf("error reading 'description': %w", err) + } + delete(object, "description") + } + + if raw, found := object["download"]; found { + err = json.Unmarshal(raw, &a.Download) + if err != nil { + return fmt.Errorf("error reading 'download': %w", err) + } + delete(object, "download") + } + + if raw, found := object["format_version"]; found { + err = json.Unmarshal(raw, &a.FormatVersion) + if err != nil { + return fmt.Errorf("error reading 'format_version': %w", err) + } + delete(object, "format_version") + } + + if raw, found := object["icons"]; found { + err = json.Unmarshal(raw, &a.Icons) + if err != nil { + return fmt.Errorf("error reading 'icons': %w", err) + } + delete(object, "icons") + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if raw, found := object["installationInfo"]; found { + err = json.Unmarshal(raw, &a.InstallationInfo) + if err != nil { + return fmt.Errorf("error reading 'installationInfo': %w", err) + } + delete(object, "installationInfo") + } + + if raw, found := object["integration"]; found { + err = json.Unmarshal(raw, &a.Integration) + if err != nil { + return fmt.Errorf("error reading 'integration': %w", err) + } + delete(object, "integration") + } + + if raw, found := object["internal"]; found { + err = json.Unmarshal(raw, &a.Internal) + if err != nil { + return fmt.Errorf("error reading 'internal': %w", err) + } + delete(object, "internal") + } + + if raw, found := object["latestVersion"]; found { + err = json.Unmarshal(raw, &a.LatestVersion) + if err != nil { + return fmt.Errorf("error reading 'latestVersion': %w", err) + } + delete(object, "latestVersion") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["owner"]; found { + err = json.Unmarshal(raw, &a.Owner) + if err != nil { + return fmt.Errorf("error reading 'owner': %w", err) + } + delete(object, "owner") + } + + if raw, found := object["path"]; found { + err = json.Unmarshal(raw, &a.Path) + if err != nil { + return fmt.Errorf("error reading 'path': %w", err) + } + delete(object, "path") + } + + if raw, found := object["policy_templates"]; found { + err = json.Unmarshal(raw, &a.PolicyTemplates) + if err != nil { + return fmt.Errorf("error reading 'policy_templates': %w", err) + } + delete(object, "policy_templates") + } + + if raw, found := object["readme"]; found { + err = json.Unmarshal(raw, &a.Readme) + if err != nil { + return fmt.Errorf("error reading 'readme': %w", err) + } + delete(object, "readme") + } + + if raw, found := object["release"]; found { + err = json.Unmarshal(raw, &a.Release) + if err != nil { + return fmt.Errorf("error reading 'release': %w", err) + } + delete(object, "release") + } + + if raw, found := object["savedObject"]; found { + err = json.Unmarshal(raw, &a.SavedObject) + if err != nil { + return fmt.Errorf("error reading 'savedObject': %w", err) + } + delete(object, "savedObject") + } + + if raw, found := object["signature_path"]; found { + err = json.Unmarshal(raw, &a.SignaturePath) + if err != nil { + return fmt.Errorf("error reading 'signature_path': %w", err) + } + delete(object, "signature_path") + } + + if raw, found := object["source"]; found { + err = json.Unmarshal(raw, &a.Source) + if err != nil { + return fmt.Errorf("error reading 'source': %w", err) + } + delete(object, "source") + } + + if raw, found := object["status"]; found { + err = json.Unmarshal(raw, &a.Status) + if err != nil { + return fmt.Errorf("error reading 'status': %w", err) + } + delete(object, "status") + } + + if raw, found := object["title"]; found { + err = json.Unmarshal(raw, &a.Title) + if err != nil { + return fmt.Errorf("error reading 'title': %w", err) + } + delete(object, "title") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if raw, found := object["vars"]; found { + err = json.Unmarshal(raw, &a.Vars) + if err != nil { + return fmt.Errorf("error reading 'vars': %w", err) + } + delete(object, "vars") + } + + if raw, found := object["version"]; found { + err = json.Unmarshal(raw, &a.Version) + if err != nil { + return fmt.Errorf("error reading 'version': %w", err) + } + delete(object, "version") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem to handle AdditionalProperties +func (a PackageListItem) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Categories != nil { + object["categories"], err = json.Marshal(a.Categories) + if err != nil { + return nil, fmt.Errorf("error marshaling 'categories': %w", err) + } + } + + if a.Conditions != nil { + object["conditions"], err = json.Marshal(a.Conditions) + if err != nil { + return nil, fmt.Errorf("error marshaling 'conditions': %w", err) + } + } + + if a.DataStreams != nil { + object["data_streams"], err = json.Marshal(a.DataStreams) + if err != nil { + return nil, fmt.Errorf("error marshaling 'data_streams': %w", err) + } + } + + if a.Description != nil { + object["description"], err = json.Marshal(a.Description) + if err != nil { + return nil, fmt.Errorf("error marshaling 'description': %w", err) + } + } + + if a.Download != nil { + object["download"], err = json.Marshal(a.Download) + if err != nil { + return nil, fmt.Errorf("error marshaling 'download': %w", err) + } + } + + if a.FormatVersion != nil { + object["format_version"], err = json.Marshal(a.FormatVersion) + if err != nil { + return nil, fmt.Errorf("error marshaling 'format_version': %w", err) + } + } + + if a.Icons != nil { + object["icons"], err = json.Marshal(a.Icons) + if err != nil { + return nil, fmt.Errorf("error marshaling 'icons': %w", err) + } + } + + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + + if a.InstallationInfo != nil { + object["installationInfo"], err = json.Marshal(a.InstallationInfo) + if err != nil { + return nil, fmt.Errorf("error marshaling 'installationInfo': %w", err) + } + } + + if a.Integration != nil { + object["integration"], err = json.Marshal(a.Integration) + if err != nil { + return nil, fmt.Errorf("error marshaling 'integration': %w", err) + } + } + + if a.Internal != nil { + object["internal"], err = json.Marshal(a.Internal) + if err != nil { + return nil, fmt.Errorf("error marshaling 'internal': %w", err) + } + } + + if a.LatestVersion != nil { + object["latestVersion"], err = json.Marshal(a.LatestVersion) + if err != nil { + return nil, fmt.Errorf("error marshaling 'latestVersion': %w", err) + } + } + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + if a.Owner != nil { + object["owner"], err = json.Marshal(a.Owner) + if err != nil { + return nil, fmt.Errorf("error marshaling 'owner': %w", err) + } + } + + if a.Path != nil { + object["path"], err = json.Marshal(a.Path) + if err != nil { + return nil, fmt.Errorf("error marshaling 'path': %w", err) + } + } + + if a.PolicyTemplates != nil { + object["policy_templates"], err = json.Marshal(a.PolicyTemplates) + if err != nil { + return nil, fmt.Errorf("error marshaling 'policy_templates': %w", err) + } + } + + if a.Readme != nil { + object["readme"], err = json.Marshal(a.Readme) + if err != nil { + return nil, fmt.Errorf("error marshaling 'readme': %w", err) + } + } + + if a.Release != nil { + object["release"], err = json.Marshal(a.Release) + if err != nil { + return nil, fmt.Errorf("error marshaling 'release': %w", err) + } + } + + object["savedObject"], err = json.Marshal(a.SavedObject) + if err != nil { + return nil, fmt.Errorf("error marshaling 'savedObject': %w", err) + } + + if a.SignaturePath != nil { + object["signature_path"], err = json.Marshal(a.SignaturePath) + if err != nil { + return nil, fmt.Errorf("error marshaling 'signature_path': %w", err) + } + } + + if a.Source != nil { + object["source"], err = json.Marshal(a.Source) + if err != nil { + return nil, fmt.Errorf("error marshaling 'source': %w", err) + } + } + + if a.Status != nil { + object["status"], err = json.Marshal(a.Status) + if err != nil { + return nil, fmt.Errorf("error marshaling 'status': %w", err) + } + } + + object["title"], err = json.Marshal(a.Title) + if err != nil { + return nil, fmt.Errorf("error marshaling 'title': %w", err) + } + + if a.Type != nil { + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + } + + if a.Vars != nil { + object["vars"], err = json.Marshal(a.Vars) + if err != nil { + return nil, fmt.Errorf("error marshaling 'vars': %w", err) + } + } + + object["version"], err = json.Marshal(a.Version) + if err != nil { + return nil, fmt.Errorf("error marshaling 'version': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_Conditions_Elastic. Returns the specified +// element and whether it was found +func (a PackageListItem_Conditions_Elastic) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_Conditions_Elastic +func (a *PackageListItem_Conditions_Elastic) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_Conditions_Elastic to handle AdditionalProperties +func (a *PackageListItem_Conditions_Elastic) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["capabilities"]; found { + err = json.Unmarshal(raw, &a.Capabilities) + if err != nil { + return fmt.Errorf("error reading 'capabilities': %w", err) + } + delete(object, "capabilities") + } + + if raw, found := object["subscription"]; found { + err = json.Unmarshal(raw, &a.Subscription) + if err != nil { + return fmt.Errorf("error reading 'subscription': %w", err) + } + delete(object, "subscription") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_Conditions_Elastic to handle AdditionalProperties +func (a PackageListItem_Conditions_Elastic) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Capabilities != nil { + object["capabilities"], err = json.Marshal(a.Capabilities) + if err != nil { + return nil, fmt.Errorf("error marshaling 'capabilities': %w", err) + } + } + + if a.Subscription != nil { + object["subscription"], err = json.Marshal(a.Subscription) + if err != nil { + return nil, fmt.Errorf("error marshaling 'subscription': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_Conditions_Kibana. Returns the specified +// element and whether it was found +func (a PackageListItem_Conditions_Kibana) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_Conditions_Kibana +func (a *PackageListItem_Conditions_Kibana) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_Conditions_Kibana to handle AdditionalProperties +func (a *PackageListItem_Conditions_Kibana) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["version"]; found { + err = json.Unmarshal(raw, &a.Version) + if err != nil { + return fmt.Errorf("error reading 'version': %w", err) + } + delete(object, "version") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_Conditions_Kibana to handle AdditionalProperties +func (a PackageListItem_Conditions_Kibana) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Version != nil { + object["version"], err = json.Marshal(a.Version) + if err != nil { + return nil, fmt.Errorf("error marshaling 'version': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_Conditions. Returns the specified +// element and whether it was found +func (a PackageListItem_Conditions) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_Conditions +func (a *PackageListItem_Conditions) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_Conditions to handle AdditionalProperties +func (a *PackageListItem_Conditions) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["elastic"]; found { + err = json.Unmarshal(raw, &a.Elastic) + if err != nil { + return fmt.Errorf("error reading 'elastic': %w", err) + } + delete(object, "elastic") + } + + if raw, found := object["kibana"]; found { + err = json.Unmarshal(raw, &a.Kibana) + if err != nil { + return fmt.Errorf("error reading 'kibana': %w", err) + } + delete(object, "kibana") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_Conditions to handle AdditionalProperties +func (a PackageListItem_Conditions) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Elastic != nil { + object["elastic"], err = json.Marshal(a.Elastic) + if err != nil { + return nil, fmt.Errorf("error marshaling 'elastic': %w", err) + } + } + + if a.Kibana != nil { + object["kibana"], err = json.Marshal(a.Kibana) + if err != nil { + return nil, fmt.Errorf("error marshaling 'kibana': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_Icons_Item. Returns the specified +// element and whether it was found +func (a PackageListItem_Icons_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_Icons_Item +func (a *PackageListItem_Icons_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_Icons_Item to handle AdditionalProperties +func (a *PackageListItem_Icons_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["dark_mode"]; found { + err = json.Unmarshal(raw, &a.DarkMode) + if err != nil { + return fmt.Errorf("error reading 'dark_mode': %w", err) + } + delete(object, "dark_mode") + } + + if raw, found := object["path"]; found { + err = json.Unmarshal(raw, &a.Path) + if err != nil { + return fmt.Errorf("error reading 'path': %w", err) + } + delete(object, "path") + } + + if raw, found := object["size"]; found { + err = json.Unmarshal(raw, &a.Size) + if err != nil { + return fmt.Errorf("error reading 'size': %w", err) + } + delete(object, "size") + } + + if raw, found := object["src"]; found { + err = json.Unmarshal(raw, &a.Src) + if err != nil { + return fmt.Errorf("error reading 'src': %w", err) + } + delete(object, "src") + } + + if raw, found := object["title"]; found { + err = json.Unmarshal(raw, &a.Title) + if err != nil { + return fmt.Errorf("error reading 'title': %w", err) + } + delete(object, "title") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_Icons_Item to handle AdditionalProperties +func (a PackageListItem_Icons_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.DarkMode != nil { + object["dark_mode"], err = json.Marshal(a.DarkMode) + if err != nil { + return nil, fmt.Errorf("error marshaling 'dark_mode': %w", err) + } + } + + if a.Path != nil { + object["path"], err = json.Marshal(a.Path) + if err != nil { + return nil, fmt.Errorf("error marshaling 'path': %w", err) + } + } + + if a.Size != nil { + object["size"], err = json.Marshal(a.Size) + if err != nil { + return nil, fmt.Errorf("error marshaling 'size': %w", err) + } + } + + object["src"], err = json.Marshal(a.Src) + if err != nil { + return nil, fmt.Errorf("error marshaling 'src': %w", err) + } + + if a.Title != nil { + object["title"], err = json.Marshal(a.Title) + if err != nil { + return nil, fmt.Errorf("error marshaling 'title': %w", err) + } + } + + if a.Type != nil { + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item. Returns the specified +// element and whether it was found +func (a PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item +func (a *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item to handle AdditionalProperties +func (a *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if raw, found := object["originId"]; found { + err = json.Unmarshal(raw, &a.OriginId) + if err != nil { + return fmt.Errorf("error reading 'originId': %w", err) + } + delete(object, "originId") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item to handle AdditionalProperties +func (a PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + + if a.OriginId != nil { + object["originId"], err = json.Marshal(a.OriginId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'originId': %w", err) + } + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features. Returns the specified +// element and whether it was found +func (a PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features +func (a *PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features to handle AdditionalProperties +func (a *PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["doc_value_only_numeric"]; found { + err = json.Unmarshal(raw, &a.DocValueOnlyNumeric) + if err != nil { + return fmt.Errorf("error reading 'doc_value_only_numeric': %w", err) + } + delete(object, "doc_value_only_numeric") + } + + if raw, found := object["doc_value_only_other"]; found { + err = json.Unmarshal(raw, &a.DocValueOnlyOther) + if err != nil { + return fmt.Errorf("error reading 'doc_value_only_other': %w", err) + } + delete(object, "doc_value_only_other") + } + + if raw, found := object["synthetic_source"]; found { + err = json.Unmarshal(raw, &a.SyntheticSource) + if err != nil { + return fmt.Errorf("error reading 'synthetic_source': %w", err) + } + delete(object, "synthetic_source") + } + + if raw, found := object["tsdb"]; found { + err = json.Unmarshal(raw, &a.Tsdb) + if err != nil { + return fmt.Errorf("error reading 'tsdb': %w", err) + } + delete(object, "tsdb") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features to handle AdditionalProperties +func (a PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.DocValueOnlyNumeric != nil { + object["doc_value_only_numeric"], err = json.Marshal(a.DocValueOnlyNumeric) + if err != nil { + return nil, fmt.Errorf("error marshaling 'doc_value_only_numeric': %w", err) + } + } + + if a.DocValueOnlyOther != nil { + object["doc_value_only_other"], err = json.Marshal(a.DocValueOnlyOther) + if err != nil { + return nil, fmt.Errorf("error marshaling 'doc_value_only_other': %w", err) + } + } + + if a.SyntheticSource != nil { + object["synthetic_source"], err = json.Marshal(a.SyntheticSource) + if err != nil { + return nil, fmt.Errorf("error marshaling 'synthetic_source': %w", err) + } + } + + if a.Tsdb != nil { + object["tsdb"], err = json.Marshal(a.Tsdb) + if err != nil { + return nil, fmt.Errorf("error marshaling 'tsdb': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item. Returns the specified +// element and whether it was found +func (a PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item +func (a *PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item to handle AdditionalProperties +func (a *PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["data_stream"]; found { + err = json.Unmarshal(raw, &a.DataStream) + if err != nil { + return fmt.Errorf("error reading 'data_stream': %w", err) + } + delete(object, "data_stream") + } + + if raw, found := object["features"]; found { + err = json.Unmarshal(raw, &a.Features) + if err != nil { + return fmt.Errorf("error reading 'features': %w", err) + } + delete(object, "features") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item to handle AdditionalProperties +func (a PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["data_stream"], err = json.Marshal(a.DataStream) + if err != nil { + return nil, fmt.Errorf("error marshaling 'data_stream': %w", err) + } + + object["features"], err = json.Marshal(a.Features) + if err != nil { + return nil, fmt.Errorf("error marshaling 'features': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_InstallationInfo_InstalledEs_Item. Returns the specified +// element and whether it was found +func (a PackageListItem_InstallationInfo_InstalledEs_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_InstallationInfo_InstalledEs_Item +func (a *PackageListItem_InstallationInfo_InstalledEs_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_InstallationInfo_InstalledEs_Item to handle AdditionalProperties +func (a *PackageListItem_InstallationInfo_InstalledEs_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["deferred"]; found { + err = json.Unmarshal(raw, &a.Deferred) + if err != nil { + return fmt.Errorf("error reading 'deferred': %w", err) + } + delete(object, "deferred") + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if raw, found := object["version"]; found { + err = json.Unmarshal(raw, &a.Version) + if err != nil { + return fmt.Errorf("error reading 'version': %w", err) + } + delete(object, "version") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_InstallationInfo_InstalledEs_Item to handle AdditionalProperties +func (a PackageListItem_InstallationInfo_InstalledEs_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Deferred != nil { + object["deferred"], err = json.Marshal(a.Deferred) + if err != nil { + return nil, fmt.Errorf("error marshaling 'deferred': %w", err) + } + } + + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + if a.Version != nil { + object["version"], err = json.Marshal(a.Version) + if err != nil { + return nil, fmt.Errorf("error marshaling 'version': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_InstallationInfo_InstalledKibana_Item. Returns the specified +// element and whether it was found +func (a PackageListItem_InstallationInfo_InstalledKibana_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_InstallationInfo_InstalledKibana_Item +func (a *PackageListItem_InstallationInfo_InstalledKibana_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_InstallationInfo_InstalledKibana_Item to handle AdditionalProperties +func (a *PackageListItem_InstallationInfo_InstalledKibana_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if raw, found := object["originId"]; found { + err = json.Unmarshal(raw, &a.OriginId) + if err != nil { + return fmt.Errorf("error reading 'originId': %w", err) + } + delete(object, "originId") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_InstallationInfo_InstalledKibana_Item to handle AdditionalProperties +func (a PackageListItem_InstallationInfo_InstalledKibana_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + + if a.OriginId != nil { + object["originId"], err = json.Marshal(a.OriginId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'originId': %w", err) + } + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_InstallationInfo_LatestExecutedState. Returns the specified +// element and whether it was found +func (a PackageListItem_InstallationInfo_LatestExecutedState) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_InstallationInfo_LatestExecutedState +func (a *PackageListItem_InstallationInfo_LatestExecutedState) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_InstallationInfo_LatestExecutedState to handle AdditionalProperties +func (a *PackageListItem_InstallationInfo_LatestExecutedState) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["error"]; found { + err = json.Unmarshal(raw, &a.Error) + if err != nil { + return fmt.Errorf("error reading 'error': %w", err) + } + delete(object, "error") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["started_at"]; found { + err = json.Unmarshal(raw, &a.StartedAt) + if err != nil { + return fmt.Errorf("error reading 'started_at': %w", err) + } + delete(object, "started_at") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_InstallationInfo_LatestExecutedState to handle AdditionalProperties +func (a PackageListItem_InstallationInfo_LatestExecutedState) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Error != nil { + object["error"], err = json.Marshal(a.Error) + if err != nil { + return nil, fmt.Errorf("error marshaling 'error': %w", err) + } + } + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + object["started_at"], err = json.Marshal(a.StartedAt) + if err != nil { + return nil, fmt.Errorf("error marshaling 'started_at': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error. Returns the specified +// element and whether it was found +func (a PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error +func (a *PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error to handle AdditionalProperties +func (a *PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["message"]; found { + err = json.Unmarshal(raw, &a.Message) + if err != nil { + return fmt.Errorf("error reading 'message': %w", err) + } + delete(object, "message") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["stack"]; found { + err = json.Unmarshal(raw, &a.Stack) + if err != nil { + return fmt.Errorf("error reading 'stack': %w", err) + } + delete(object, "stack") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error to handle AdditionalProperties +func (a PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["message"], err = json.Marshal(a.Message) + if err != nil { + return nil, fmt.Errorf("error marshaling 'message': %w", err) + } + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + if a.Stack != nil { + object["stack"], err = json.Marshal(a.Stack) + if err != nil { + return nil, fmt.Errorf("error marshaling 'stack': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item. Returns the specified +// element and whether it was found +func (a PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item +func (a *PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item to handle AdditionalProperties +func (a *PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["created_at"]; found { + err = json.Unmarshal(raw, &a.CreatedAt) + if err != nil { + return fmt.Errorf("error reading 'created_at': %w", err) + } + delete(object, "created_at") + } + + if raw, found := object["error"]; found { + err = json.Unmarshal(raw, &a.Error) + if err != nil { + return fmt.Errorf("error reading 'error': %w", err) + } + delete(object, "error") + } + + if raw, found := object["target_version"]; found { + err = json.Unmarshal(raw, &a.TargetVersion) + if err != nil { + return fmt.Errorf("error reading 'target_version': %w", err) + } + delete(object, "target_version") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item to handle AdditionalProperties +func (a PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["created_at"], err = json.Marshal(a.CreatedAt) + if err != nil { + return nil, fmt.Errorf("error marshaling 'created_at': %w", err) + } + + object["error"], err = json.Marshal(a.Error) + if err != nil { + return nil, fmt.Errorf("error marshaling 'error': %w", err) + } + + object["target_version"], err = json.Marshal(a.TargetVersion) + if err != nil { + return nil, fmt.Errorf("error marshaling 'target_version': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_InstallationInfo. Returns the specified +// element and whether it was found +func (a PackageListItem_InstallationInfo) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_InstallationInfo +func (a *PackageListItem_InstallationInfo) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_InstallationInfo to handle AdditionalProperties +func (a *PackageListItem_InstallationInfo) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["additional_spaces_installed_kibana"]; found { + err = json.Unmarshal(raw, &a.AdditionalSpacesInstalledKibana) + if err != nil { + return fmt.Errorf("error reading 'additional_spaces_installed_kibana': %w", err) + } + delete(object, "additional_spaces_installed_kibana") + } + + if raw, found := object["created_at"]; found { + err = json.Unmarshal(raw, &a.CreatedAt) + if err != nil { + return fmt.Errorf("error reading 'created_at': %w", err) + } + delete(object, "created_at") + } + + if raw, found := object["experimental_data_stream_features"]; found { + err = json.Unmarshal(raw, &a.ExperimentalDataStreamFeatures) + if err != nil { + return fmt.Errorf("error reading 'experimental_data_stream_features': %w", err) + } + delete(object, "experimental_data_stream_features") + } + + if raw, found := object["install_format_schema_version"]; found { + err = json.Unmarshal(raw, &a.InstallFormatSchemaVersion) + if err != nil { + return fmt.Errorf("error reading 'install_format_schema_version': %w", err) + } + delete(object, "install_format_schema_version") + } + + if raw, found := object["install_source"]; found { + err = json.Unmarshal(raw, &a.InstallSource) + if err != nil { + return fmt.Errorf("error reading 'install_source': %w", err) + } + delete(object, "install_source") + } + + if raw, found := object["install_status"]; found { + err = json.Unmarshal(raw, &a.InstallStatus) + if err != nil { + return fmt.Errorf("error reading 'install_status': %w", err) + } + delete(object, "install_status") + } + + if raw, found := object["installed_es"]; found { + err = json.Unmarshal(raw, &a.InstalledEs) + if err != nil { + return fmt.Errorf("error reading 'installed_es': %w", err) + } + delete(object, "installed_es") + } + + if raw, found := object["installed_kibana"]; found { + err = json.Unmarshal(raw, &a.InstalledKibana) + if err != nil { + return fmt.Errorf("error reading 'installed_kibana': %w", err) + } + delete(object, "installed_kibana") + } + + if raw, found := object["installed_kibana_space_id"]; found { + err = json.Unmarshal(raw, &a.InstalledKibanaSpaceId) + if err != nil { + return fmt.Errorf("error reading 'installed_kibana_space_id': %w", err) + } + delete(object, "installed_kibana_space_id") + } + + if raw, found := object["latest_executed_state"]; found { + err = json.Unmarshal(raw, &a.LatestExecutedState) + if err != nil { + return fmt.Errorf("error reading 'latest_executed_state': %w", err) + } + delete(object, "latest_executed_state") + } + + if raw, found := object["latest_install_failed_attempts"]; found { + err = json.Unmarshal(raw, &a.LatestInstallFailedAttempts) + if err != nil { + return fmt.Errorf("error reading 'latest_install_failed_attempts': %w", err) + } + delete(object, "latest_install_failed_attempts") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["namespaces"]; found { + err = json.Unmarshal(raw, &a.Namespaces) + if err != nil { + return fmt.Errorf("error reading 'namespaces': %w", err) + } + delete(object, "namespaces") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if raw, found := object["updated_at"]; found { + err = json.Unmarshal(raw, &a.UpdatedAt) + if err != nil { + return fmt.Errorf("error reading 'updated_at': %w", err) + } + delete(object, "updated_at") + } + + if raw, found := object["verification_key_id"]; found { + err = json.Unmarshal(raw, &a.VerificationKeyId) + if err != nil { + return fmt.Errorf("error reading 'verification_key_id': %w", err) + } + delete(object, "verification_key_id") + } + + if raw, found := object["verification_status"]; found { + err = json.Unmarshal(raw, &a.VerificationStatus) + if err != nil { + return fmt.Errorf("error reading 'verification_status': %w", err) + } + delete(object, "verification_status") + } + + if raw, found := object["version"]; found { + err = json.Unmarshal(raw, &a.Version) + if err != nil { + return fmt.Errorf("error reading 'version': %w", err) + } + delete(object, "version") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_InstallationInfo to handle AdditionalProperties +func (a PackageListItem_InstallationInfo) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.AdditionalSpacesInstalledKibana != nil { + object["additional_spaces_installed_kibana"], err = json.Marshal(a.AdditionalSpacesInstalledKibana) + if err != nil { + return nil, fmt.Errorf("error marshaling 'additional_spaces_installed_kibana': %w", err) + } + } + + if a.CreatedAt != nil { + object["created_at"], err = json.Marshal(a.CreatedAt) + if err != nil { + return nil, fmt.Errorf("error marshaling 'created_at': %w", err) + } + } + + if a.ExperimentalDataStreamFeatures != nil { + object["experimental_data_stream_features"], err = json.Marshal(a.ExperimentalDataStreamFeatures) + if err != nil { + return nil, fmt.Errorf("error marshaling 'experimental_data_stream_features': %w", err) + } + } + + if a.InstallFormatSchemaVersion != nil { + object["install_format_schema_version"], err = json.Marshal(a.InstallFormatSchemaVersion) + if err != nil { + return nil, fmt.Errorf("error marshaling 'install_format_schema_version': %w", err) + } + } + + object["install_source"], err = json.Marshal(a.InstallSource) + if err != nil { + return nil, fmt.Errorf("error marshaling 'install_source': %w", err) + } + + object["install_status"], err = json.Marshal(a.InstallStatus) + if err != nil { + return nil, fmt.Errorf("error marshaling 'install_status': %w", err) + } + + object["installed_es"], err = json.Marshal(a.InstalledEs) + if err != nil { + return nil, fmt.Errorf("error marshaling 'installed_es': %w", err) + } + + object["installed_kibana"], err = json.Marshal(a.InstalledKibana) + if err != nil { + return nil, fmt.Errorf("error marshaling 'installed_kibana': %w", err) + } + + if a.InstalledKibanaSpaceId != nil { + object["installed_kibana_space_id"], err = json.Marshal(a.InstalledKibanaSpaceId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'installed_kibana_space_id': %w", err) + } + } + + if a.LatestExecutedState != nil { + object["latest_executed_state"], err = json.Marshal(a.LatestExecutedState) + if err != nil { + return nil, fmt.Errorf("error marshaling 'latest_executed_state': %w", err) + } + } + + if a.LatestInstallFailedAttempts != nil { + object["latest_install_failed_attempts"], err = json.Marshal(a.LatestInstallFailedAttempts) + if err != nil { + return nil, fmt.Errorf("error marshaling 'latest_install_failed_attempts': %w", err) + } + } + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + if a.Namespaces != nil { + object["namespaces"], err = json.Marshal(a.Namespaces) + if err != nil { + return nil, fmt.Errorf("error marshaling 'namespaces': %w", err) + } + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + if a.UpdatedAt != nil { + object["updated_at"], err = json.Marshal(a.UpdatedAt) + if err != nil { + return nil, fmt.Errorf("error marshaling 'updated_at': %w", err) + } + } + + if a.VerificationKeyId != nil { + object["verification_key_id"], err = json.Marshal(a.VerificationKeyId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'verification_key_id': %w", err) + } + } + + object["verification_status"], err = json.Marshal(a.VerificationStatus) + if err != nil { + return nil, fmt.Errorf("error marshaling 'verification_status': %w", err) + } + + object["version"], err = json.Marshal(a.Version) + if err != nil { + return nil, fmt.Errorf("error marshaling 'version': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_Owner. Returns the specified +// element and whether it was found +func (a PackageListItem_Owner) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_Owner +func (a *PackageListItem_Owner) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_Owner to handle AdditionalProperties +func (a *PackageListItem_Owner) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["github"]; found { + err = json.Unmarshal(raw, &a.Github) + if err != nil { + return fmt.Errorf("error reading 'github': %w", err) + } + delete(object, "github") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_Owner to handle AdditionalProperties +func (a PackageListItem_Owner) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Github != nil { + object["github"], err = json.Marshal(a.Github) + if err != nil { + return nil, fmt.Errorf("error marshaling 'github': %w", err) + } + } + + if a.Type != nil { + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_Source. Returns the specified +// element and whether it was found +func (a PackageListItem_Source) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_Source +func (a *PackageListItem_Source) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_Source to handle AdditionalProperties +func (a *PackageListItem_Source) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["license"]; found { + err = json.Unmarshal(raw, &a.License) + if err != nil { + return fmt.Errorf("error reading 'license': %w", err) + } + delete(object, "license") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_Source to handle AdditionalProperties +func (a PackageListItem_Source) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["license"], err = json.Marshal(a.License) + if err != nil { + return nil, fmt.Errorf("error marshaling 'license': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackagePolicy_Elasticsearch_Privileges. Returns the specified +// element and whether it was found +func (a PackagePolicy_Elasticsearch_Privileges) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackagePolicy_Elasticsearch_Privileges +func (a *PackagePolicy_Elasticsearch_Privileges) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackagePolicy_Elasticsearch_Privileges to handle AdditionalProperties +func (a *PackagePolicy_Elasticsearch_Privileges) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["cluster"]; found { + err = json.Unmarshal(raw, &a.Cluster) + if err != nil { + return fmt.Errorf("error reading 'cluster': %w", err) + } + delete(object, "cluster") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackagePolicy_Elasticsearch_Privileges to handle AdditionalProperties +func (a PackagePolicy_Elasticsearch_Privileges) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Cluster != nil { + object["cluster"], err = json.Marshal(a.Cluster) + if err != nil { + return nil, fmt.Errorf("error marshaling 'cluster': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackagePolicy_Elasticsearch. Returns the specified +// element and whether it was found +func (a PackagePolicy_Elasticsearch) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackagePolicy_Elasticsearch +func (a *PackagePolicy_Elasticsearch) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackagePolicy_Elasticsearch to handle AdditionalProperties +func (a *PackagePolicy_Elasticsearch) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["privileges"]; found { + err = json.Unmarshal(raw, &a.Privileges) + if err != nil { + return fmt.Errorf("error reading 'privileges': %w", err) + } + delete(object, "privileges") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackagePolicy_Elasticsearch to handle AdditionalProperties +func (a PackagePolicy_Elasticsearch) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Privileges != nil { + object["privileges"], err = json.Marshal(a.Privileges) + if err != nil { + return nil, fmt.Errorf("error marshaling 'privileges': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// AsAgentPolicyGlobalDataTagsValue0 returns the union data inside the AgentPolicy_GlobalDataTags_Value as a AgentPolicyGlobalDataTagsValue0 +func (t AgentPolicy_GlobalDataTags_Value) AsAgentPolicyGlobalDataTagsValue0() (AgentPolicyGlobalDataTagsValue0, error) { + var body AgentPolicyGlobalDataTagsValue0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyGlobalDataTagsValue0 overwrites any union data inside the AgentPolicy_GlobalDataTags_Value as the provided AgentPolicyGlobalDataTagsValue0 +func (t *AgentPolicy_GlobalDataTags_Value) FromAgentPolicyGlobalDataTagsValue0(v AgentPolicyGlobalDataTagsValue0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyGlobalDataTagsValue0 performs a merge with any union data inside the AgentPolicy_GlobalDataTags_Value, using the provided AgentPolicyGlobalDataTagsValue0 +func (t *AgentPolicy_GlobalDataTags_Value) MergeAgentPolicyGlobalDataTagsValue0(v AgentPolicyGlobalDataTagsValue0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsAgentPolicyGlobalDataTagsValue1 returns the union data inside the AgentPolicy_GlobalDataTags_Value as a AgentPolicyGlobalDataTagsValue1 +func (t AgentPolicy_GlobalDataTags_Value) AsAgentPolicyGlobalDataTagsValue1() (AgentPolicyGlobalDataTagsValue1, error) { + var body AgentPolicyGlobalDataTagsValue1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyGlobalDataTagsValue1 overwrites any union data inside the AgentPolicy_GlobalDataTags_Value as the provided AgentPolicyGlobalDataTagsValue1 +func (t *AgentPolicy_GlobalDataTags_Value) FromAgentPolicyGlobalDataTagsValue1(v AgentPolicyGlobalDataTagsValue1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyGlobalDataTagsValue1 performs a merge with any union data inside the AgentPolicy_GlobalDataTags_Value, using the provided AgentPolicyGlobalDataTagsValue1 +func (t *AgentPolicy_GlobalDataTags_Value) MergeAgentPolicyGlobalDataTagsValue1(v AgentPolicyGlobalDataTagsValue1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t AgentPolicy_GlobalDataTags_Value) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *AgentPolicy_GlobalDataTags_Value) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsAgentPolicyPackagePolicies0 returns the union data inside the AgentPolicy_PackagePolicies as a AgentPolicyPackagePolicies0 +func (t AgentPolicy_PackagePolicies) AsAgentPolicyPackagePolicies0() (AgentPolicyPackagePolicies0, error) { + var body AgentPolicyPackagePolicies0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyPackagePolicies0 overwrites any union data inside the AgentPolicy_PackagePolicies as the provided AgentPolicyPackagePolicies0 +func (t *AgentPolicy_PackagePolicies) FromAgentPolicyPackagePolicies0(v AgentPolicyPackagePolicies0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyPackagePolicies0 performs a merge with any union data inside the AgentPolicy_PackagePolicies, using the provided AgentPolicyPackagePolicies0 +func (t *AgentPolicy_PackagePolicies) MergeAgentPolicyPackagePolicies0(v AgentPolicyPackagePolicies0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsAgentPolicyPackagePolicies1 returns the union data inside the AgentPolicy_PackagePolicies as a AgentPolicyPackagePolicies1 +func (t AgentPolicy_PackagePolicies) AsAgentPolicyPackagePolicies1() (AgentPolicyPackagePolicies1, error) { + var body AgentPolicyPackagePolicies1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyPackagePolicies1 overwrites any union data inside the AgentPolicy_PackagePolicies as the provided AgentPolicyPackagePolicies1 +func (t *AgentPolicy_PackagePolicies) FromAgentPolicyPackagePolicies1(v AgentPolicyPackagePolicies1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyPackagePolicies1 performs a merge with any union data inside the AgentPolicy_PackagePolicies, using the provided AgentPolicyPackagePolicies1 +func (t *AgentPolicy_PackagePolicies) MergeAgentPolicyPackagePolicies1(v AgentPolicyPackagePolicies1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t AgentPolicy_PackagePolicies) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *AgentPolicy_PackagePolicies) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsNewOutputKafkaSecretsPassword0 returns the union data inside the NewOutputKafka_Secrets_Password as a NewOutputKafkaSecretsPassword0 +func (t NewOutputKafka_Secrets_Password) AsNewOutputKafkaSecretsPassword0() (NewOutputKafkaSecretsPassword0, error) { + var body NewOutputKafkaSecretsPassword0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputKafkaSecretsPassword0 overwrites any union data inside the NewOutputKafka_Secrets_Password as the provided NewOutputKafkaSecretsPassword0 +func (t *NewOutputKafka_Secrets_Password) FromNewOutputKafkaSecretsPassword0(v NewOutputKafkaSecretsPassword0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputKafkaSecretsPassword0 performs a merge with any union data inside the NewOutputKafka_Secrets_Password, using the provided NewOutputKafkaSecretsPassword0 +func (t *NewOutputKafka_Secrets_Password) MergeNewOutputKafkaSecretsPassword0(v NewOutputKafkaSecretsPassword0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsNewOutputKafkaSecretsPassword1 returns the union data inside the NewOutputKafka_Secrets_Password as a NewOutputKafkaSecretsPassword1 +func (t NewOutputKafka_Secrets_Password) AsNewOutputKafkaSecretsPassword1() (NewOutputKafkaSecretsPassword1, error) { + var body NewOutputKafkaSecretsPassword1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputKafkaSecretsPassword1 overwrites any union data inside the NewOutputKafka_Secrets_Password as the provided NewOutputKafkaSecretsPassword1 +func (t *NewOutputKafka_Secrets_Password) FromNewOutputKafkaSecretsPassword1(v NewOutputKafkaSecretsPassword1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputKafkaSecretsPassword1 performs a merge with any union data inside the NewOutputKafka_Secrets_Password, using the provided NewOutputKafkaSecretsPassword1 +func (t *NewOutputKafka_Secrets_Password) MergeNewOutputKafkaSecretsPassword1(v NewOutputKafkaSecretsPassword1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t NewOutputKafka_Secrets_Password) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *NewOutputKafka_Secrets_Password) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsNewOutputKafkaSecretsSslKey0 returns the union data inside the NewOutputKafka_Secrets_Ssl_Key as a NewOutputKafkaSecretsSslKey0 +func (t NewOutputKafka_Secrets_Ssl_Key) AsNewOutputKafkaSecretsSslKey0() (NewOutputKafkaSecretsSslKey0, error) { + var body NewOutputKafkaSecretsSslKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputKafkaSecretsSslKey0 overwrites any union data inside the NewOutputKafka_Secrets_Ssl_Key as the provided NewOutputKafkaSecretsSslKey0 +func (t *NewOutputKafka_Secrets_Ssl_Key) FromNewOutputKafkaSecretsSslKey0(v NewOutputKafkaSecretsSslKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputKafkaSecretsSslKey0 performs a merge with any union data inside the NewOutputKafka_Secrets_Ssl_Key, using the provided NewOutputKafkaSecretsSslKey0 +func (t *NewOutputKafka_Secrets_Ssl_Key) MergeNewOutputKafkaSecretsSslKey0(v NewOutputKafkaSecretsSslKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsNewOutputKafkaSecretsSslKey1 returns the union data inside the NewOutputKafka_Secrets_Ssl_Key as a NewOutputKafkaSecretsSslKey1 +func (t NewOutputKafka_Secrets_Ssl_Key) AsNewOutputKafkaSecretsSslKey1() (NewOutputKafkaSecretsSslKey1, error) { + var body NewOutputKafkaSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputKafkaSecretsSslKey1 overwrites any union data inside the NewOutputKafka_Secrets_Ssl_Key as the provided NewOutputKafkaSecretsSslKey1 +func (t *NewOutputKafka_Secrets_Ssl_Key) FromNewOutputKafkaSecretsSslKey1(v NewOutputKafkaSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputKafkaSecretsSslKey1 performs a merge with any union data inside the NewOutputKafka_Secrets_Ssl_Key, using the provided NewOutputKafkaSecretsSslKey1 +func (t *NewOutputKafka_Secrets_Ssl_Key) MergeNewOutputKafkaSecretsSslKey1(v NewOutputKafkaSecretsSslKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t NewOutputKafka_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *NewOutputKafka_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsNewOutputLogstashSecretsSslKey0 returns the union data inside the NewOutputLogstash_Secrets_Ssl_Key as a NewOutputLogstashSecretsSslKey0 +func (t NewOutputLogstash_Secrets_Ssl_Key) AsNewOutputLogstashSecretsSslKey0() (NewOutputLogstashSecretsSslKey0, error) { + var body NewOutputLogstashSecretsSslKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputLogstashSecretsSslKey0 overwrites any union data inside the NewOutputLogstash_Secrets_Ssl_Key as the provided NewOutputLogstashSecretsSslKey0 +func (t *NewOutputLogstash_Secrets_Ssl_Key) FromNewOutputLogstashSecretsSslKey0(v NewOutputLogstashSecretsSslKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputLogstashSecretsSslKey0 performs a merge with any union data inside the NewOutputLogstash_Secrets_Ssl_Key, using the provided NewOutputLogstashSecretsSslKey0 +func (t *NewOutputLogstash_Secrets_Ssl_Key) MergeNewOutputLogstashSecretsSslKey0(v NewOutputLogstashSecretsSslKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsNewOutputLogstashSecretsSslKey1 returns the union data inside the NewOutputLogstash_Secrets_Ssl_Key as a NewOutputLogstashSecretsSslKey1 +func (t NewOutputLogstash_Secrets_Ssl_Key) AsNewOutputLogstashSecretsSslKey1() (NewOutputLogstashSecretsSslKey1, error) { + var body NewOutputLogstashSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputLogstashSecretsSslKey1 overwrites any union data inside the NewOutputLogstash_Secrets_Ssl_Key as the provided NewOutputLogstashSecretsSslKey1 +func (t *NewOutputLogstash_Secrets_Ssl_Key) FromNewOutputLogstashSecretsSslKey1(v NewOutputLogstashSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputLogstashSecretsSslKey1 performs a merge with any union data inside the NewOutputLogstash_Secrets_Ssl_Key, using the provided NewOutputLogstashSecretsSslKey1 +func (t *NewOutputLogstash_Secrets_Ssl_Key) MergeNewOutputLogstashSecretsSslKey1(v NewOutputLogstashSecretsSslKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t NewOutputLogstash_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *NewOutputLogstash_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsNewOutputRemoteElasticsearchSecretsServiceToken0 returns the union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken as a NewOutputRemoteElasticsearchSecretsServiceToken0 +func (t NewOutputRemoteElasticsearch_Secrets_ServiceToken) AsNewOutputRemoteElasticsearchSecretsServiceToken0() (NewOutputRemoteElasticsearchSecretsServiceToken0, error) { + var body NewOutputRemoteElasticsearchSecretsServiceToken0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputRemoteElasticsearchSecretsServiceToken0 overwrites any union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken as the provided NewOutputRemoteElasticsearchSecretsServiceToken0 +func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) FromNewOutputRemoteElasticsearchSecretsServiceToken0(v NewOutputRemoteElasticsearchSecretsServiceToken0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputRemoteElasticsearchSecretsServiceToken0 performs a merge with any union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken, using the provided NewOutputRemoteElasticsearchSecretsServiceToken0 +func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) MergeNewOutputRemoteElasticsearchSecretsServiceToken0(v NewOutputRemoteElasticsearchSecretsServiceToken0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsNewOutputRemoteElasticsearchSecretsServiceToken1 returns the union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken as a NewOutputRemoteElasticsearchSecretsServiceToken1 +func (t NewOutputRemoteElasticsearch_Secrets_ServiceToken) AsNewOutputRemoteElasticsearchSecretsServiceToken1() (NewOutputRemoteElasticsearchSecretsServiceToken1, error) { + var body NewOutputRemoteElasticsearchSecretsServiceToken1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputRemoteElasticsearchSecretsServiceToken1 overwrites any union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken as the provided NewOutputRemoteElasticsearchSecretsServiceToken1 +func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) FromNewOutputRemoteElasticsearchSecretsServiceToken1(v NewOutputRemoteElasticsearchSecretsServiceToken1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputRemoteElasticsearchSecretsServiceToken1 performs a merge with any union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken, using the provided NewOutputRemoteElasticsearchSecretsServiceToken1 +func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) MergeNewOutputRemoteElasticsearchSecretsServiceToken1(v NewOutputRemoteElasticsearchSecretsServiceToken1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t NewOutputRemoteElasticsearch_Secrets_ServiceToken) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsNewOutputElasticsearch returns the union data inside the NewOutputUnion as a NewOutputElasticsearch +func (t NewOutputUnion) AsNewOutputElasticsearch() (NewOutputElasticsearch, error) { + var body NewOutputElasticsearch + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputElasticsearch overwrites any union data inside the NewOutputUnion as the provided NewOutputElasticsearch +func (t *NewOutputUnion) FromNewOutputElasticsearch(v NewOutputElasticsearch) error { + v.Type = "elasticsearch" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputElasticsearch performs a merge with any union data inside the NewOutputUnion, using the provided NewOutputElasticsearch +func (t *NewOutputUnion) MergeNewOutputElasticsearch(v NewOutputElasticsearch) error { + v.Type = "elasticsearch" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsNewOutputRemoteElasticsearch returns the union data inside the NewOutputUnion as a NewOutputRemoteElasticsearch +func (t NewOutputUnion) AsNewOutputRemoteElasticsearch() (NewOutputRemoteElasticsearch, error) { + var body NewOutputRemoteElasticsearch + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputRemoteElasticsearch overwrites any union data inside the NewOutputUnion as the provided NewOutputRemoteElasticsearch +func (t *NewOutputUnion) FromNewOutputRemoteElasticsearch(v NewOutputRemoteElasticsearch) error { + v.Type = "remote_elasticsearch" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputRemoteElasticsearch performs a merge with any union data inside the NewOutputUnion, using the provided NewOutputRemoteElasticsearch +func (t *NewOutputUnion) MergeNewOutputRemoteElasticsearch(v NewOutputRemoteElasticsearch) error { + v.Type = "remote_elasticsearch" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsNewOutputLogstash returns the union data inside the NewOutputUnion as a NewOutputLogstash +func (t NewOutputUnion) AsNewOutputLogstash() (NewOutputLogstash, error) { + var body NewOutputLogstash + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputLogstash overwrites any union data inside the NewOutputUnion as the provided NewOutputLogstash +func (t *NewOutputUnion) FromNewOutputLogstash(v NewOutputLogstash) error { + v.Type = "logstash" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputLogstash performs a merge with any union data inside the NewOutputUnion, using the provided NewOutputLogstash +func (t *NewOutputUnion) MergeNewOutputLogstash(v NewOutputLogstash) error { + v.Type = "logstash" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsNewOutputKafka returns the union data inside the NewOutputUnion as a NewOutputKafka +func (t NewOutputUnion) AsNewOutputKafka() (NewOutputKafka, error) { + var body NewOutputKafka + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputKafka overwrites any union data inside the NewOutputUnion as the provided NewOutputKafka +func (t *NewOutputUnion) FromNewOutputKafka(v NewOutputKafka) error { + v.Type = "kafka" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputKafka performs a merge with any union data inside the NewOutputUnion, using the provided NewOutputKafka +func (t *NewOutputUnion) MergeNewOutputKafka(v NewOutputKafka) error { + v.Type = "kafka" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t NewOutputUnion) Discriminator() (string, error) { + var discriminator struct { + Discriminator string `json:"type"` + } + err := json.Unmarshal(t.union, &discriminator) + return discriminator.Discriminator, err +} + +func (t NewOutputUnion) ValueByDiscriminator() (interface{}, error) { + discriminator, err := t.Discriminator() + if err != nil { + return nil, err + } + switch discriminator { + case "elasticsearch": + return t.AsNewOutputElasticsearch() + case "kafka": + return t.AsNewOutputKafka() + case "logstash": + return t.AsNewOutputLogstash() + case "remote_elasticsearch": + return t.AsNewOutputRemoteElasticsearch() + default: + return nil, errors.New("unknown discriminator value: " + discriminator) + } +} + +func (t NewOutputUnion) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *NewOutputUnion) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsOutputKafkaSecretsPassword0 returns the union data inside the OutputKafka_Secrets_Password as a OutputKafkaSecretsPassword0 +func (t OutputKafka_Secrets_Password) AsOutputKafkaSecretsPassword0() (OutputKafkaSecretsPassword0, error) { + var body OutputKafkaSecretsPassword0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputKafkaSecretsPassword0 overwrites any union data inside the OutputKafka_Secrets_Password as the provided OutputKafkaSecretsPassword0 +func (t *OutputKafka_Secrets_Password) FromOutputKafkaSecretsPassword0(v OutputKafkaSecretsPassword0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputKafkaSecretsPassword0 performs a merge with any union data inside the OutputKafka_Secrets_Password, using the provided OutputKafkaSecretsPassword0 +func (t *OutputKafka_Secrets_Password) MergeOutputKafkaSecretsPassword0(v OutputKafkaSecretsPassword0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOutputKafkaSecretsPassword1 returns the union data inside the OutputKafka_Secrets_Password as a OutputKafkaSecretsPassword1 +func (t OutputKafka_Secrets_Password) AsOutputKafkaSecretsPassword1() (OutputKafkaSecretsPassword1, error) { + var body OutputKafkaSecretsPassword1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputKafkaSecretsPassword1 overwrites any union data inside the OutputKafka_Secrets_Password as the provided OutputKafkaSecretsPassword1 +func (t *OutputKafka_Secrets_Password) FromOutputKafkaSecretsPassword1(v OutputKafkaSecretsPassword1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputKafkaSecretsPassword1 performs a merge with any union data inside the OutputKafka_Secrets_Password, using the provided OutputKafkaSecretsPassword1 +func (t *OutputKafka_Secrets_Password) MergeOutputKafkaSecretsPassword1(v OutputKafkaSecretsPassword1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t OutputKafka_Secrets_Password) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *OutputKafka_Secrets_Password) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsOutputKafkaSecretsSslKey0 returns the union data inside the OutputKafka_Secrets_Ssl_Key as a OutputKafkaSecretsSslKey0 +func (t OutputKafka_Secrets_Ssl_Key) AsOutputKafkaSecretsSslKey0() (OutputKafkaSecretsSslKey0, error) { + var body OutputKafkaSecretsSslKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputKafkaSecretsSslKey0 overwrites any union data inside the OutputKafka_Secrets_Ssl_Key as the provided OutputKafkaSecretsSslKey0 +func (t *OutputKafka_Secrets_Ssl_Key) FromOutputKafkaSecretsSslKey0(v OutputKafkaSecretsSslKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputKafkaSecretsSslKey0 performs a merge with any union data inside the OutputKafka_Secrets_Ssl_Key, using the provided OutputKafkaSecretsSslKey0 +func (t *OutputKafka_Secrets_Ssl_Key) MergeOutputKafkaSecretsSslKey0(v OutputKafkaSecretsSslKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOutputKafkaSecretsSslKey1 returns the union data inside the OutputKafka_Secrets_Ssl_Key as a OutputKafkaSecretsSslKey1 +func (t OutputKafka_Secrets_Ssl_Key) AsOutputKafkaSecretsSslKey1() (OutputKafkaSecretsSslKey1, error) { + var body OutputKafkaSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputKafkaSecretsSslKey1 overwrites any union data inside the OutputKafka_Secrets_Ssl_Key as the provided OutputKafkaSecretsSslKey1 +func (t *OutputKafka_Secrets_Ssl_Key) FromOutputKafkaSecretsSslKey1(v OutputKafkaSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputKafkaSecretsSslKey1 performs a merge with any union data inside the OutputKafka_Secrets_Ssl_Key, using the provided OutputKafkaSecretsSslKey1 +func (t *OutputKafka_Secrets_Ssl_Key) MergeOutputKafkaSecretsSslKey1(v OutputKafkaSecretsSslKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t OutputKafka_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *OutputKafka_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsOutputLogstashSecretsSslKey0 returns the union data inside the OutputLogstash_Secrets_Ssl_Key as a OutputLogstashSecretsSslKey0 +func (t OutputLogstash_Secrets_Ssl_Key) AsOutputLogstashSecretsSslKey0() (OutputLogstashSecretsSslKey0, error) { + var body OutputLogstashSecretsSslKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputLogstashSecretsSslKey0 overwrites any union data inside the OutputLogstash_Secrets_Ssl_Key as the provided OutputLogstashSecretsSslKey0 +func (t *OutputLogstash_Secrets_Ssl_Key) FromOutputLogstashSecretsSslKey0(v OutputLogstashSecretsSslKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputLogstashSecretsSslKey0 performs a merge with any union data inside the OutputLogstash_Secrets_Ssl_Key, using the provided OutputLogstashSecretsSslKey0 +func (t *OutputLogstash_Secrets_Ssl_Key) MergeOutputLogstashSecretsSslKey0(v OutputLogstashSecretsSslKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOutputLogstashSecretsSslKey1 returns the union data inside the OutputLogstash_Secrets_Ssl_Key as a OutputLogstashSecretsSslKey1 +func (t OutputLogstash_Secrets_Ssl_Key) AsOutputLogstashSecretsSslKey1() (OutputLogstashSecretsSslKey1, error) { + var body OutputLogstashSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputLogstashSecretsSslKey1 overwrites any union data inside the OutputLogstash_Secrets_Ssl_Key as the provided OutputLogstashSecretsSslKey1 +func (t *OutputLogstash_Secrets_Ssl_Key) FromOutputLogstashSecretsSslKey1(v OutputLogstashSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputLogstashSecretsSslKey1 performs a merge with any union data inside the OutputLogstash_Secrets_Ssl_Key, using the provided OutputLogstashSecretsSslKey1 +func (t *OutputLogstash_Secrets_Ssl_Key) MergeOutputLogstashSecretsSslKey1(v OutputLogstashSecretsSslKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t OutputLogstash_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *OutputLogstash_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsOutputRemoteElasticsearchSecretsServiceToken0 returns the union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken as a OutputRemoteElasticsearchSecretsServiceToken0 +func (t OutputRemoteElasticsearch_Secrets_ServiceToken) AsOutputRemoteElasticsearchSecretsServiceToken0() (OutputRemoteElasticsearchSecretsServiceToken0, error) { + var body OutputRemoteElasticsearchSecretsServiceToken0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputRemoteElasticsearchSecretsServiceToken0 overwrites any union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken as the provided OutputRemoteElasticsearchSecretsServiceToken0 +func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) FromOutputRemoteElasticsearchSecretsServiceToken0(v OutputRemoteElasticsearchSecretsServiceToken0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputRemoteElasticsearchSecretsServiceToken0 performs a merge with any union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken, using the provided OutputRemoteElasticsearchSecretsServiceToken0 +func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) MergeOutputRemoteElasticsearchSecretsServiceToken0(v OutputRemoteElasticsearchSecretsServiceToken0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOutputRemoteElasticsearchSecretsServiceToken1 returns the union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken as a OutputRemoteElasticsearchSecretsServiceToken1 +func (t OutputRemoteElasticsearch_Secrets_ServiceToken) AsOutputRemoteElasticsearchSecretsServiceToken1() (OutputRemoteElasticsearchSecretsServiceToken1, error) { + var body OutputRemoteElasticsearchSecretsServiceToken1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputRemoteElasticsearchSecretsServiceToken1 overwrites any union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken as the provided OutputRemoteElasticsearchSecretsServiceToken1 +func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) FromOutputRemoteElasticsearchSecretsServiceToken1(v OutputRemoteElasticsearchSecretsServiceToken1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputRemoteElasticsearchSecretsServiceToken1 performs a merge with any union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken, using the provided OutputRemoteElasticsearchSecretsServiceToken1 +func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) MergeOutputRemoteElasticsearchSecretsServiceToken1(v OutputRemoteElasticsearchSecretsServiceToken1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t OutputRemoteElasticsearch_Secrets_ServiceToken) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsOutputElasticsearch returns the union data inside the OutputUnion as a OutputElasticsearch +func (t OutputUnion) AsOutputElasticsearch() (OutputElasticsearch, error) { + var body OutputElasticsearch + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputElasticsearch overwrites any union data inside the OutputUnion as the provided OutputElasticsearch +func (t *OutputUnion) FromOutputElasticsearch(v OutputElasticsearch) error { + v.Type = "elasticsearch" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputElasticsearch performs a merge with any union data inside the OutputUnion, using the provided OutputElasticsearch +func (t *OutputUnion) MergeOutputElasticsearch(v OutputElasticsearch) error { + v.Type = "elasticsearch" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} - // Prerelease Whether to return prerelease versions of packages (e.g. beta, rc, preview) - Prerelease *bool `form:"prerelease,omitempty" json:"prerelease,omitempty"` +// AsOutputRemoteElasticsearch returns the union data inside the OutputUnion as a OutputRemoteElasticsearch +func (t OutputUnion) AsOutputRemoteElasticsearch() (OutputRemoteElasticsearch, error) { + var body OutputRemoteElasticsearch + err := json.Unmarshal(t.union, &body) + return body, err } -// GetPackageParams defines parameters for GetPackage. -type GetPackageParams struct { - // IgnoreUnverified Ignore if the package is fails signature verification - IgnoreUnverified *bool `form:"ignoreUnverified,omitempty" json:"ignoreUnverified,omitempty"` +// FromOutputRemoteElasticsearch overwrites any union data inside the OutputUnion as the provided OutputRemoteElasticsearch +func (t *OutputUnion) FromOutputRemoteElasticsearch(v OutputRemoteElasticsearch) error { + v.Type = "remote_elasticsearch" + b, err := json.Marshal(v) + t.union = b + return err +} - // Full Return all fields from the package manifest, not just those supported by the Elastic Package Registry - Full *bool `form:"full,omitempty" json:"full,omitempty"` +// MergeOutputRemoteElasticsearch performs a merge with any union data inside the OutputUnion, using the provided OutputRemoteElasticsearch +func (t *OutputUnion) MergeOutputRemoteElasticsearch(v OutputRemoteElasticsearch) error { + v.Type = "remote_elasticsearch" + b, err := json.Marshal(v) + if err != nil { + return err + } - // Prerelease Whether to return prerelease versions of packages (e.g. beta, rc, preview) - Prerelease *bool `form:"prerelease,omitempty" json:"prerelease,omitempty"` + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err } -// InstallPackageJSONBody defines parameters for InstallPackage. -type InstallPackageJSONBody struct { - Force *bool `json:"force,omitempty"` - IgnoreConstraints *bool `json:"ignore_constraints,omitempty"` +// AsOutputLogstash returns the union data inside the OutputUnion as a OutputLogstash +func (t OutputUnion) AsOutputLogstash() (OutputLogstash, error) { + var body OutputLogstash + err := json.Unmarshal(t.union, &body) + return body, err } -// InstallPackageParams defines parameters for InstallPackage. -type InstallPackageParams struct { - // IgnoreUnverified Ignore if the package is fails signature verification - IgnoreUnverified *bool `form:"ignoreUnverified,omitempty" json:"ignoreUnverified,omitempty"` +// FromOutputLogstash overwrites any union data inside the OutputUnion as the provided OutputLogstash +func (t *OutputUnion) FromOutputLogstash(v OutputLogstash) error { + v.Type = "logstash" + b, err := json.Marshal(v) + t.union = b + return err +} - // Full Return all fields from the package manifest, not just those supported by the Elastic Package Registry - Full *bool `form:"full,omitempty" json:"full,omitempty"` +// MergeOutputLogstash performs a merge with any union data inside the OutputUnion, using the provided OutputLogstash +func (t *OutputUnion) MergeOutputLogstash(v OutputLogstash) error { + v.Type = "logstash" + b, err := json.Marshal(v) + if err != nil { + return err + } - // Prerelease Whether to return prerelease versions of packages (e.g. beta, rc, preview) - Prerelease *bool `form:"prerelease,omitempty" json:"prerelease,omitempty"` + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err } -// UpdatePackageJSONBody defines parameters for UpdatePackage. -type UpdatePackageJSONBody struct { - KeepPoliciesUpToDate *bool `json:"keepPoliciesUpToDate,omitempty"` +// AsOutputKafka returns the union data inside the OutputUnion as a OutputKafka +func (t OutputUnion) AsOutputKafka() (OutputKafka, error) { + var body OutputKafka + err := json.Unmarshal(t.union, &body) + return body, err } -// UpdatePackageParams defines parameters for UpdatePackage. -type UpdatePackageParams struct { - // IgnoreUnverified Ignore if the package is fails signature verification - IgnoreUnverified *bool `form:"ignoreUnverified,omitempty" json:"ignoreUnverified,omitempty"` +// FromOutputKafka overwrites any union data inside the OutputUnion as the provided OutputKafka +func (t *OutputUnion) FromOutputKafka(v OutputKafka) error { + v.Type = "kafka" + b, err := json.Marshal(v) + t.union = b + return err +} - // Full Return all fields from the package manifest, not just those supported by the Elastic Package Registry - Full *bool `form:"full,omitempty" json:"full,omitempty"` +// MergeOutputKafka performs a merge with any union data inside the OutputUnion, using the provided OutputKafka +func (t *OutputUnion) MergeOutputKafka(v OutputKafka) error { + v.Type = "kafka" + b, err := json.Marshal(v) + if err != nil { + return err + } - // Prerelease Whether to return prerelease versions of packages (e.g. beta, rc, preview) - Prerelease *bool `form:"prerelease,omitempty" json:"prerelease,omitempty"` + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err } -// PostFleetServerHostsJSONBody defines parameters for PostFleetServerHosts. -type PostFleetServerHostsJSONBody struct { - HostUrls []string `json:"host_urls"` - Id *string `json:"id,omitempty"` - IsDefault *bool `json:"is_default,omitempty"` - Name string `json:"name"` +func (t OutputUnion) Discriminator() (string, error) { + var discriminator struct { + Discriminator string `json:"type"` + } + err := json.Unmarshal(t.union, &discriminator) + return discriminator.Discriminator, err } -// UpdateFleetServerHostsJSONBody defines parameters for UpdateFleetServerHosts. -type UpdateFleetServerHostsJSONBody struct { - HostUrls *[]string `json:"host_urls,omitempty"` - IsDefault *bool `json:"is_default,omitempty"` - Name *string `json:"name,omitempty"` +func (t OutputUnion) ValueByDiscriminator() (interface{}, error) { + discriminator, err := t.Discriminator() + if err != nil { + return nil, err + } + switch discriminator { + case "elasticsearch": + return t.AsOutputElasticsearch() + case "kafka": + return t.AsOutputKafka() + case "logstash": + return t.AsOutputLogstash() + case "remote_elasticsearch": + return t.AsOutputRemoteElasticsearch() + default: + return nil, errors.New("unknown discriminator value: " + discriminator) + } } -// CreatePackagePolicyParams defines parameters for CreatePackagePolicy. -type CreatePackagePolicyParams struct { - // Format Simplified or legacy format for package inputs - Format *CreatePackagePolicyParamsFormat `form:"format,omitempty" json:"format,omitempty"` +func (t OutputUnion) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err } -// CreatePackagePolicyParamsFormat defines parameters for CreatePackagePolicy. -type CreatePackagePolicyParamsFormat string - -// DeletePackagePolicyParams defines parameters for DeletePackagePolicy. -type DeletePackagePolicyParams struct { - Force *bool `form:"force,omitempty" json:"force,omitempty"` +func (t *OutputUnion) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err } -// GetPackagePolicyParams defines parameters for GetPackagePolicy. -type GetPackagePolicyParams struct { - // Format Simplified or legacy format for package inputs - Format *GetPackagePolicyParamsFormat `form:"format,omitempty" json:"format,omitempty"` +// AsUpdateOutputKafkaSecretsPassword0 returns the union data inside the UpdateOutputKafka_Secrets_Password as a UpdateOutputKafkaSecretsPassword0 +func (t UpdateOutputKafka_Secrets_Password) AsUpdateOutputKafkaSecretsPassword0() (UpdateOutputKafkaSecretsPassword0, error) { + var body UpdateOutputKafkaSecretsPassword0 + err := json.Unmarshal(t.union, &body) + return body, err } -// GetPackagePolicyParamsFormat defines parameters for GetPackagePolicy. -type GetPackagePolicyParamsFormat string - -// UpdatePackagePolicyParams defines parameters for UpdatePackagePolicy. -type UpdatePackagePolicyParams struct { - // Format Simplified or legacy format for package inputs - Format *UpdatePackagePolicyParamsFormat `form:"format,omitempty" json:"format,omitempty"` +// FromUpdateOutputKafkaSecretsPassword0 overwrites any union data inside the UpdateOutputKafka_Secrets_Password as the provided UpdateOutputKafkaSecretsPassword0 +func (t *UpdateOutputKafka_Secrets_Password) FromUpdateOutputKafkaSecretsPassword0(v UpdateOutputKafkaSecretsPassword0) error { + b, err := json.Marshal(v) + t.union = b + return err } -// UpdatePackagePolicyParamsFormat defines parameters for UpdatePackagePolicy. -type UpdatePackagePolicyParamsFormat string - -// CreateAgentPolicyJSONRequestBody defines body for CreateAgentPolicy for application/json ContentType. -type CreateAgentPolicyJSONRequestBody = AgentPolicyCreateRequest - -// DeleteAgentPolicyJSONRequestBody defines body for DeleteAgentPolicy for application/json ContentType. -type DeleteAgentPolicyJSONRequestBody DeleteAgentPolicyJSONBody - -// UpdateAgentPolicyJSONRequestBody defines body for UpdateAgentPolicy for application/json ContentType. -type UpdateAgentPolicyJSONRequestBody = AgentPolicyUpdateRequest - -// DeletePackageJSONRequestBody defines body for DeletePackage for application/json ContentType. -type DeletePackageJSONRequestBody DeletePackageJSONBody - -// InstallPackageJSONRequestBody defines body for InstallPackage for application/json ContentType. -type InstallPackageJSONRequestBody InstallPackageJSONBody +// MergeUpdateOutputKafkaSecretsPassword0 performs a merge with any union data inside the UpdateOutputKafka_Secrets_Password, using the provided UpdateOutputKafkaSecretsPassword0 +func (t *UpdateOutputKafka_Secrets_Password) MergeUpdateOutputKafkaSecretsPassword0(v UpdateOutputKafkaSecretsPassword0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } -// UpdatePackageJSONRequestBody defines body for UpdatePackage for application/json ContentType. -type UpdatePackageJSONRequestBody UpdatePackageJSONBody + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} -// PostFleetServerHostsJSONRequestBody defines body for PostFleetServerHosts for application/json ContentType. -type PostFleetServerHostsJSONRequestBody PostFleetServerHostsJSONBody +// AsUpdateOutputKafkaSecretsPassword1 returns the union data inside the UpdateOutputKafka_Secrets_Password as a UpdateOutputKafkaSecretsPassword1 +func (t UpdateOutputKafka_Secrets_Password) AsUpdateOutputKafkaSecretsPassword1() (UpdateOutputKafkaSecretsPassword1, error) { + var body UpdateOutputKafkaSecretsPassword1 + err := json.Unmarshal(t.union, &body) + return body, err +} -// UpdateFleetServerHostsJSONRequestBody defines body for UpdateFleetServerHosts for application/json ContentType. -type UpdateFleetServerHostsJSONRequestBody UpdateFleetServerHostsJSONBody +// FromUpdateOutputKafkaSecretsPassword1 overwrites any union data inside the UpdateOutputKafka_Secrets_Password as the provided UpdateOutputKafkaSecretsPassword1 +func (t *UpdateOutputKafka_Secrets_Password) FromUpdateOutputKafkaSecretsPassword1(v UpdateOutputKafkaSecretsPassword1) error { + b, err := json.Marshal(v) + t.union = b + return err +} -// PostOutputsJSONRequestBody defines body for PostOutputs for application/json ContentType. -type PostOutputsJSONRequestBody = OutputCreateRequest +// MergeUpdateOutputKafkaSecretsPassword1 performs a merge with any union data inside the UpdateOutputKafka_Secrets_Password, using the provided UpdateOutputKafkaSecretsPassword1 +func (t *UpdateOutputKafka_Secrets_Password) MergeUpdateOutputKafkaSecretsPassword1(v UpdateOutputKafkaSecretsPassword1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } -// UpdateOutputJSONRequestBody defines body for UpdateOutput for application/json ContentType. -type UpdateOutputJSONRequestBody = OutputUpdateRequest + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} -// CreatePackagePolicyJSONRequestBody defines body for CreatePackagePolicy for application/json ContentType. -type CreatePackagePolicyJSONRequestBody = PackagePolicyRequest +func (t UpdateOutputKafka_Secrets_Password) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} -// UpdatePackagePolicyJSONRequestBody defines body for UpdatePackagePolicy for application/json ContentType. -type UpdatePackagePolicyJSONRequestBody = PackagePolicyRequest +func (t *UpdateOutputKafka_Secrets_Password) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} -// AsOutputCreateRequestElasticsearch returns the union data inside the OutputCreateRequest as a OutputCreateRequestElasticsearch -func (t OutputCreateRequest) AsOutputCreateRequestElasticsearch() (OutputCreateRequestElasticsearch, error) { - var body OutputCreateRequestElasticsearch +// AsUpdateOutputKafkaSecretsSslKey0 returns the union data inside the UpdateOutputKafka_Secrets_Ssl_Key as a UpdateOutputKafkaSecretsSslKey0 +func (t UpdateOutputKafka_Secrets_Ssl_Key) AsUpdateOutputKafkaSecretsSslKey0() (UpdateOutputKafkaSecretsSslKey0, error) { + var body UpdateOutputKafkaSecretsSslKey0 err := json.Unmarshal(t.union, &body) return body, err } -// FromOutputCreateRequestElasticsearch overwrites any union data inside the OutputCreateRequest as the provided OutputCreateRequestElasticsearch -func (t *OutputCreateRequest) FromOutputCreateRequestElasticsearch(v OutputCreateRequestElasticsearch) error { - v.Type = "elasticsearch" +// FromUpdateOutputKafkaSecretsSslKey0 overwrites any union data inside the UpdateOutputKafka_Secrets_Ssl_Key as the provided UpdateOutputKafkaSecretsSslKey0 +func (t *UpdateOutputKafka_Secrets_Ssl_Key) FromUpdateOutputKafkaSecretsSslKey0(v UpdateOutputKafkaSecretsSslKey0) error { b, err := json.Marshal(v) t.union = b return err } -// MergeOutputCreateRequestElasticsearch performs a merge with any union data inside the OutputCreateRequest, using the provided OutputCreateRequestElasticsearch -func (t *OutputCreateRequest) MergeOutputCreateRequestElasticsearch(v OutputCreateRequestElasticsearch) error { - v.Type = "elasticsearch" +// MergeUpdateOutputKafkaSecretsSslKey0 performs a merge with any union data inside the UpdateOutputKafka_Secrets_Ssl_Key, using the provided UpdateOutputKafkaSecretsSslKey0 +func (t *UpdateOutputKafka_Secrets_Ssl_Key) MergeUpdateOutputKafkaSecretsSslKey0(v UpdateOutputKafkaSecretsSslKey0) error { b, err := json.Marshal(v) if err != nil { return err @@ -972,24 +10897,22 @@ func (t *OutputCreateRequest) MergeOutputCreateRequestElasticsearch(v OutputCrea return err } -// AsOutputCreateRequestKafka returns the union data inside the OutputCreateRequest as a OutputCreateRequestKafka -func (t OutputCreateRequest) AsOutputCreateRequestKafka() (OutputCreateRequestKafka, error) { - var body OutputCreateRequestKafka +// AsUpdateOutputKafkaSecretsSslKey1 returns the union data inside the UpdateOutputKafka_Secrets_Ssl_Key as a UpdateOutputKafkaSecretsSslKey1 +func (t UpdateOutputKafka_Secrets_Ssl_Key) AsUpdateOutputKafkaSecretsSslKey1() (UpdateOutputKafkaSecretsSslKey1, error) { + var body UpdateOutputKafkaSecretsSslKey1 err := json.Unmarshal(t.union, &body) return body, err } -// FromOutputCreateRequestKafka overwrites any union data inside the OutputCreateRequest as the provided OutputCreateRequestKafka -func (t *OutputCreateRequest) FromOutputCreateRequestKafka(v OutputCreateRequestKafka) error { - v.Type = "kafka" +// FromUpdateOutputKafkaSecretsSslKey1 overwrites any union data inside the UpdateOutputKafka_Secrets_Ssl_Key as the provided UpdateOutputKafkaSecretsSslKey1 +func (t *UpdateOutputKafka_Secrets_Ssl_Key) FromUpdateOutputKafkaSecretsSslKey1(v UpdateOutputKafkaSecretsSslKey1) error { b, err := json.Marshal(v) t.union = b return err } -// MergeOutputCreateRequestKafka performs a merge with any union data inside the OutputCreateRequest, using the provided OutputCreateRequestKafka -func (t *OutputCreateRequest) MergeOutputCreateRequestKafka(v OutputCreateRequestKafka) error { - v.Type = "kafka" +// MergeUpdateOutputKafkaSecretsSslKey1 performs a merge with any union data inside the UpdateOutputKafka_Secrets_Ssl_Key, using the provided UpdateOutputKafkaSecretsSslKey1 +func (t *UpdateOutputKafka_Secrets_Ssl_Key) MergeUpdateOutputKafkaSecretsSslKey1(v UpdateOutputKafkaSecretsSslKey1) error { b, err := json.Marshal(v) if err != nil { return err @@ -1000,24 +10923,32 @@ func (t *OutputCreateRequest) MergeOutputCreateRequestKafka(v OutputCreateReques return err } -// AsOutputCreateRequestLogstash returns the union data inside the OutputCreateRequest as a OutputCreateRequestLogstash -func (t OutputCreateRequest) AsOutputCreateRequestLogstash() (OutputCreateRequestLogstash, error) { - var body OutputCreateRequestLogstash +func (t UpdateOutputKafka_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *UpdateOutputKafka_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsUpdateOutputLogstashSecretsSslKey0 returns the union data inside the UpdateOutputLogstash_Secrets_Ssl_Key as a UpdateOutputLogstashSecretsSslKey0 +func (t UpdateOutputLogstash_Secrets_Ssl_Key) AsUpdateOutputLogstashSecretsSslKey0() (UpdateOutputLogstashSecretsSslKey0, error) { + var body UpdateOutputLogstashSecretsSslKey0 err := json.Unmarshal(t.union, &body) return body, err } -// FromOutputCreateRequestLogstash overwrites any union data inside the OutputCreateRequest as the provided OutputCreateRequestLogstash -func (t *OutputCreateRequest) FromOutputCreateRequestLogstash(v OutputCreateRequestLogstash) error { - v.Type = "logstash" +// FromUpdateOutputLogstashSecretsSslKey0 overwrites any union data inside the UpdateOutputLogstash_Secrets_Ssl_Key as the provided UpdateOutputLogstashSecretsSslKey0 +func (t *UpdateOutputLogstash_Secrets_Ssl_Key) FromUpdateOutputLogstashSecretsSslKey0(v UpdateOutputLogstashSecretsSslKey0) error { b, err := json.Marshal(v) t.union = b return err } -// MergeOutputCreateRequestLogstash performs a merge with any union data inside the OutputCreateRequest, using the provided OutputCreateRequestLogstash -func (t *OutputCreateRequest) MergeOutputCreateRequestLogstash(v OutputCreateRequestLogstash) error { - v.Type = "logstash" +// MergeUpdateOutputLogstashSecretsSslKey0 performs a merge with any union data inside the UpdateOutputLogstash_Secrets_Ssl_Key, using the provided UpdateOutputLogstashSecretsSslKey0 +func (t *UpdateOutputLogstash_Secrets_Ssl_Key) MergeUpdateOutputLogstashSecretsSslKey0(v UpdateOutputLogstashSecretsSslKey0) error { b, err := json.Marshal(v) if err != nil { return err @@ -1028,59 +10959,58 @@ func (t *OutputCreateRequest) MergeOutputCreateRequestLogstash(v OutputCreateReq return err } -func (t OutputCreateRequest) Discriminator() (string, error) { - var discriminator struct { - Discriminator string `json:"type"` - } - err := json.Unmarshal(t.union, &discriminator) - return discriminator.Discriminator, err +// AsUpdateOutputLogstashSecretsSslKey1 returns the union data inside the UpdateOutputLogstash_Secrets_Ssl_Key as a UpdateOutputLogstashSecretsSslKey1 +func (t UpdateOutputLogstash_Secrets_Ssl_Key) AsUpdateOutputLogstashSecretsSslKey1() (UpdateOutputLogstashSecretsSslKey1, error) { + var body UpdateOutputLogstashSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err } -func (t OutputCreateRequest) ValueByDiscriminator() (interface{}, error) { - discriminator, err := t.Discriminator() +// FromUpdateOutputLogstashSecretsSslKey1 overwrites any union data inside the UpdateOutputLogstash_Secrets_Ssl_Key as the provided UpdateOutputLogstashSecretsSslKey1 +func (t *UpdateOutputLogstash_Secrets_Ssl_Key) FromUpdateOutputLogstashSecretsSslKey1(v UpdateOutputLogstashSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeUpdateOutputLogstashSecretsSslKey1 performs a merge with any union data inside the UpdateOutputLogstash_Secrets_Ssl_Key, using the provided UpdateOutputLogstashSecretsSslKey1 +func (t *UpdateOutputLogstash_Secrets_Ssl_Key) MergeUpdateOutputLogstashSecretsSslKey1(v UpdateOutputLogstashSecretsSslKey1) error { + b, err := json.Marshal(v) if err != nil { - return nil, err - } - switch discriminator { - case "elasticsearch": - return t.AsOutputCreateRequestElasticsearch() - case "kafka": - return t.AsOutputCreateRequestKafka() - case "logstash": - return t.AsOutputCreateRequestLogstash() - default: - return nil, errors.New("unknown discriminator value: " + discriminator) + return err } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err } -func (t OutputCreateRequest) MarshalJSON() ([]byte, error) { +func (t UpdateOutputLogstash_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { b, err := t.union.MarshalJSON() return b, err } -func (t *OutputCreateRequest) UnmarshalJSON(b []byte) error { +func (t *UpdateOutputLogstash_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { err := t.union.UnmarshalJSON(b) return err } -// AsOutputUpdateRequestElasticsearch returns the union data inside the OutputUpdateRequest as a OutputUpdateRequestElasticsearch -func (t OutputUpdateRequest) AsOutputUpdateRequestElasticsearch() (OutputUpdateRequestElasticsearch, error) { - var body OutputUpdateRequestElasticsearch +// AsUpdateOutputRemoteElasticsearchSecretsServiceToken0 returns the union data inside the UpdateOutputRemoteElasticsearch_Secrets_ServiceToken as a UpdateOutputRemoteElasticsearchSecretsServiceToken0 +func (t UpdateOutputRemoteElasticsearch_Secrets_ServiceToken) AsUpdateOutputRemoteElasticsearchSecretsServiceToken0() (UpdateOutputRemoteElasticsearchSecretsServiceToken0, error) { + var body UpdateOutputRemoteElasticsearchSecretsServiceToken0 err := json.Unmarshal(t.union, &body) return body, err } -// FromOutputUpdateRequestElasticsearch overwrites any union data inside the OutputUpdateRequest as the provided OutputUpdateRequestElasticsearch -func (t *OutputUpdateRequest) FromOutputUpdateRequestElasticsearch(v OutputUpdateRequestElasticsearch) error { - v.Type = "elasticsearch" +// FromUpdateOutputRemoteElasticsearchSecretsServiceToken0 overwrites any union data inside the UpdateOutputRemoteElasticsearch_Secrets_ServiceToken as the provided UpdateOutputRemoteElasticsearchSecretsServiceToken0 +func (t *UpdateOutputRemoteElasticsearch_Secrets_ServiceToken) FromUpdateOutputRemoteElasticsearchSecretsServiceToken0(v UpdateOutputRemoteElasticsearchSecretsServiceToken0) error { b, err := json.Marshal(v) t.union = b return err } -// MergeOutputUpdateRequestElasticsearch performs a merge with any union data inside the OutputUpdateRequest, using the provided OutputUpdateRequestElasticsearch -func (t *OutputUpdateRequest) MergeOutputUpdateRequestElasticsearch(v OutputUpdateRequestElasticsearch) error { - v.Type = "elasticsearch" +// MergeUpdateOutputRemoteElasticsearchSecretsServiceToken0 performs a merge with any union data inside the UpdateOutputRemoteElasticsearch_Secrets_ServiceToken, using the provided UpdateOutputRemoteElasticsearchSecretsServiceToken0 +func (t *UpdateOutputRemoteElasticsearch_Secrets_ServiceToken) MergeUpdateOutputRemoteElasticsearchSecretsServiceToken0(v UpdateOutputRemoteElasticsearchSecretsServiceToken0) error { b, err := json.Marshal(v) if err != nil { return err @@ -1091,24 +11021,22 @@ func (t *OutputUpdateRequest) MergeOutputUpdateRequestElasticsearch(v OutputUpda return err } -// AsOutputUpdateRequestKafka returns the union data inside the OutputUpdateRequest as a OutputUpdateRequestKafka -func (t OutputUpdateRequest) AsOutputUpdateRequestKafka() (OutputUpdateRequestKafka, error) { - var body OutputUpdateRequestKafka +// AsUpdateOutputRemoteElasticsearchSecretsServiceToken1 returns the union data inside the UpdateOutputRemoteElasticsearch_Secrets_ServiceToken as a UpdateOutputRemoteElasticsearchSecretsServiceToken1 +func (t UpdateOutputRemoteElasticsearch_Secrets_ServiceToken) AsUpdateOutputRemoteElasticsearchSecretsServiceToken1() (UpdateOutputRemoteElasticsearchSecretsServiceToken1, error) { + var body UpdateOutputRemoteElasticsearchSecretsServiceToken1 err := json.Unmarshal(t.union, &body) return body, err } -// FromOutputUpdateRequestKafka overwrites any union data inside the OutputUpdateRequest as the provided OutputUpdateRequestKafka -func (t *OutputUpdateRequest) FromOutputUpdateRequestKafka(v OutputUpdateRequestKafka) error { - v.Type = "kafka" +// FromUpdateOutputRemoteElasticsearchSecretsServiceToken1 overwrites any union data inside the UpdateOutputRemoteElasticsearch_Secrets_ServiceToken as the provided UpdateOutputRemoteElasticsearchSecretsServiceToken1 +func (t *UpdateOutputRemoteElasticsearch_Secrets_ServiceToken) FromUpdateOutputRemoteElasticsearchSecretsServiceToken1(v UpdateOutputRemoteElasticsearchSecretsServiceToken1) error { b, err := json.Marshal(v) t.union = b return err } -// MergeOutputUpdateRequestKafka performs a merge with any union data inside the OutputUpdateRequest, using the provided OutputUpdateRequestKafka -func (t *OutputUpdateRequest) MergeOutputUpdateRequestKafka(v OutputUpdateRequestKafka) error { - v.Type = "kafka" +// MergeUpdateOutputRemoteElasticsearchSecretsServiceToken1 performs a merge with any union data inside the UpdateOutputRemoteElasticsearch_Secrets_ServiceToken, using the provided UpdateOutputRemoteElasticsearchSecretsServiceToken1 +func (t *UpdateOutputRemoteElasticsearch_Secrets_ServiceToken) MergeUpdateOutputRemoteElasticsearchSecretsServiceToken1(v UpdateOutputRemoteElasticsearchSecretsServiceToken1) error { b, err := json.Marshal(v) if err != nil { return err @@ -1119,24 +11047,34 @@ func (t *OutputUpdateRequest) MergeOutputUpdateRequestKafka(v OutputUpdateReques return err } -// AsOutputUpdateRequestLogstash returns the union data inside the OutputUpdateRequest as a OutputUpdateRequestLogstash -func (t OutputUpdateRequest) AsOutputUpdateRequestLogstash() (OutputUpdateRequestLogstash, error) { - var body OutputUpdateRequestLogstash +func (t UpdateOutputRemoteElasticsearch_Secrets_ServiceToken) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *UpdateOutputRemoteElasticsearch_Secrets_ServiceToken) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsUpdateOutputElasticsearch returns the union data inside the UpdateOutputUnion as a UpdateOutputElasticsearch +func (t UpdateOutputUnion) AsUpdateOutputElasticsearch() (UpdateOutputElasticsearch, error) { + var body UpdateOutputElasticsearch err := json.Unmarshal(t.union, &body) return body, err } -// FromOutputUpdateRequestLogstash overwrites any union data inside the OutputUpdateRequest as the provided OutputUpdateRequestLogstash -func (t *OutputUpdateRequest) FromOutputUpdateRequestLogstash(v OutputUpdateRequestLogstash) error { - v.Type = "logstash" +// FromUpdateOutputElasticsearch overwrites any union data inside the UpdateOutputUnion as the provided UpdateOutputElasticsearch +func (t *UpdateOutputUnion) FromUpdateOutputElasticsearch(v UpdateOutputElasticsearch) error { + v.Type = "elasticsearch" b, err := json.Marshal(v) t.union = b return err } -// MergeOutputUpdateRequestLogstash performs a merge with any union data inside the OutputUpdateRequest, using the provided OutputUpdateRequestLogstash -func (t *OutputUpdateRequest) MergeOutputUpdateRequestLogstash(v OutputUpdateRequestLogstash) error { - v.Type = "logstash" +// MergeUpdateOutputElasticsearch performs a merge with any union data inside the UpdateOutputUnion, using the provided UpdateOutputElasticsearch +func (t *UpdateOutputUnion) MergeUpdateOutputElasticsearch(v UpdateOutputElasticsearch) error { + v.Type = "elasticsearch" b, err := json.Marshal(v) if err != nil { return err @@ -1147,57 +11085,52 @@ func (t *OutputUpdateRequest) MergeOutputUpdateRequestLogstash(v OutputUpdateReq return err } -func (t OutputUpdateRequest) Discriminator() (string, error) { - var discriminator struct { - Discriminator string `json:"type"` - } - err := json.Unmarshal(t.union, &discriminator) - return discriminator.Discriminator, err +// AsUpdateOutputRemoteElasticsearch returns the union data inside the UpdateOutputUnion as a UpdateOutputRemoteElasticsearch +func (t UpdateOutputUnion) AsUpdateOutputRemoteElasticsearch() (UpdateOutputRemoteElasticsearch, error) { + var body UpdateOutputRemoteElasticsearch + err := json.Unmarshal(t.union, &body) + return body, err } -func (t OutputUpdateRequest) ValueByDiscriminator() (interface{}, error) { - discriminator, err := t.Discriminator() - if err != nil { - return nil, err - } - switch discriminator { - case "elasticsearch": - return t.AsOutputUpdateRequestElasticsearch() - case "kafka": - return t.AsOutputUpdateRequestKafka() - case "logstash": - return t.AsOutputUpdateRequestLogstash() - default: - return nil, errors.New("unknown discriminator value: " + discriminator) - } +// FromUpdateOutputRemoteElasticsearch overwrites any union data inside the UpdateOutputUnion as the provided UpdateOutputRemoteElasticsearch +func (t *UpdateOutputUnion) FromUpdateOutputRemoteElasticsearch(v UpdateOutputRemoteElasticsearch) error { + v.Type = "remote_elasticsearch" + b, err := json.Marshal(v) + t.union = b + return err } -func (t OutputUpdateRequest) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} +// MergeUpdateOutputRemoteElasticsearch performs a merge with any union data inside the UpdateOutputUnion, using the provided UpdateOutputRemoteElasticsearch +func (t *UpdateOutputUnion) MergeUpdateOutputRemoteElasticsearch(v UpdateOutputRemoteElasticsearch) error { + v.Type = "remote_elasticsearch" + b, err := json.Marshal(v) + if err != nil { + return err + } -func (t *OutputUpdateRequest) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged return err } -// AsKibanaSavedObjectType returns the union data inside the PackageItemType as a KibanaSavedObjectType -func (t PackageItemType) AsKibanaSavedObjectType() (KibanaSavedObjectType, error) { - var body KibanaSavedObjectType +// AsUpdateOutputLogstash returns the union data inside the UpdateOutputUnion as a UpdateOutputLogstash +func (t UpdateOutputUnion) AsUpdateOutputLogstash() (UpdateOutputLogstash, error) { + var body UpdateOutputLogstash err := json.Unmarshal(t.union, &body) return body, err } -// FromKibanaSavedObjectType overwrites any union data inside the PackageItemType as the provided KibanaSavedObjectType -func (t *PackageItemType) FromKibanaSavedObjectType(v KibanaSavedObjectType) error { +// FromUpdateOutputLogstash overwrites any union data inside the UpdateOutputUnion as the provided UpdateOutputLogstash +func (t *UpdateOutputUnion) FromUpdateOutputLogstash(v UpdateOutputLogstash) error { + v.Type = "logstash" b, err := json.Marshal(v) t.union = b return err } -// MergeKibanaSavedObjectType performs a merge with any union data inside the PackageItemType, using the provided KibanaSavedObjectType -func (t *PackageItemType) MergeKibanaSavedObjectType(v KibanaSavedObjectType) error { +// MergeUpdateOutputLogstash performs a merge with any union data inside the UpdateOutputUnion, using the provided UpdateOutputLogstash +func (t *UpdateOutputUnion) MergeUpdateOutputLogstash(v UpdateOutputLogstash) error { + v.Type = "logstash" b, err := json.Marshal(v) if err != nil { return err @@ -1208,22 +11141,24 @@ func (t *PackageItemType) MergeKibanaSavedObjectType(v KibanaSavedObjectType) er return err } -// AsElasticsearchAssetType returns the union data inside the PackageItemType as a ElasticsearchAssetType -func (t PackageItemType) AsElasticsearchAssetType() (ElasticsearchAssetType, error) { - var body ElasticsearchAssetType +// AsUpdateOutputKafka returns the union data inside the UpdateOutputUnion as a UpdateOutputKafka +func (t UpdateOutputUnion) AsUpdateOutputKafka() (UpdateOutputKafka, error) { + var body UpdateOutputKafka err := json.Unmarshal(t.union, &body) return body, err } -// FromElasticsearchAssetType overwrites any union data inside the PackageItemType as the provided ElasticsearchAssetType -func (t *PackageItemType) FromElasticsearchAssetType(v ElasticsearchAssetType) error { +// FromUpdateOutputKafka overwrites any union data inside the UpdateOutputUnion as the provided UpdateOutputKafka +func (t *UpdateOutputUnion) FromUpdateOutputKafka(v UpdateOutputKafka) error { + v.Type = "kafka" b, err := json.Marshal(v) t.union = b return err } -// MergeElasticsearchAssetType performs a merge with any union data inside the PackageItemType, using the provided ElasticsearchAssetType -func (t *PackageItemType) MergeElasticsearchAssetType(v ElasticsearchAssetType) error { +// MergeUpdateOutputKafka performs a merge with any union data inside the UpdateOutputUnion, using the provided UpdateOutputKafka +func (t *UpdateOutputUnion) MergeUpdateOutputKafka(v UpdateOutputKafka) error { + v.Type = "kafka" b, err := json.Marshal(v) if err != nil { return err @@ -1234,12 +11169,39 @@ func (t *PackageItemType) MergeElasticsearchAssetType(v ElasticsearchAssetType) return err } -func (t PackageItemType) MarshalJSON() ([]byte, error) { +func (t UpdateOutputUnion) Discriminator() (string, error) { + var discriminator struct { + Discriminator string `json:"type"` + } + err := json.Unmarshal(t.union, &discriminator) + return discriminator.Discriminator, err +} + +func (t UpdateOutputUnion) ValueByDiscriminator() (interface{}, error) { + discriminator, err := t.Discriminator() + if err != nil { + return nil, err + } + switch discriminator { + case "elasticsearch": + return t.AsUpdateOutputElasticsearch() + case "kafka": + return t.AsUpdateOutputKafka() + case "logstash": + return t.AsUpdateOutputLogstash() + case "remote_elasticsearch": + return t.AsUpdateOutputRemoteElasticsearch() + default: + return nil, errors.New("unknown discriminator value: " + discriminator) + } +} + +func (t UpdateOutputUnion) MarshalJSON() ([]byte, error) { b, err := t.union.MarshalJSON() return b, err } -func (t *PackageItemType) UnmarshalJSON(b []byte) error { +func (t *UpdateOutputUnion) UnmarshalJSON(b []byte) error { err := t.union.UnmarshalJSON(b) return err } @@ -1317,29 +11279,35 @@ func WithRequestEditorFn(fn RequestEditorFn) ClientOption { // The interface specification for the client above. type ClientInterface interface { + // GetAgentPolicies request + GetAgentPolicies(ctx context.Context, params *GetAgentPoliciesParams, reqEditors ...RequestEditorFn) (*http.Response, error) + // CreateAgentPolicyWithBody request with any body - CreateAgentPolicyWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + CreateAgentPolicyWithBody(ctx context.Context, params *CreateAgentPolicyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) - CreateAgentPolicy(ctx context.Context, body CreateAgentPolicyJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + CreateAgentPolicy(ctx context.Context, params *CreateAgentPolicyParams, body CreateAgentPolicyJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) // DeleteAgentPolicyWithBody request with any body DeleteAgentPolicyWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) DeleteAgentPolicy(ctx context.Context, body DeleteAgentPolicyJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) - // AgentPolicyInfo request - AgentPolicyInfo(ctx context.Context, agentPolicyId string, reqEditors ...RequestEditorFn) (*http.Response, error) + // GetAgentPolicy request + GetAgentPolicy(ctx context.Context, agentPolicyId string, params *GetAgentPolicyParams, reqEditors ...RequestEditorFn) (*http.Response, error) // UpdateAgentPolicyWithBody request with any body - UpdateAgentPolicyWithBody(ctx context.Context, agentPolicyId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + UpdateAgentPolicyWithBody(ctx context.Context, agentPolicyId string, params *UpdateAgentPolicyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) - UpdateAgentPolicy(ctx context.Context, agentPolicyId string, body UpdateAgentPolicyJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + UpdateAgentPolicy(ctx context.Context, agentPolicyId string, params *UpdateAgentPolicyParams, body UpdateAgentPolicyJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) // GetEnrollmentApiKeys request - GetEnrollmentApiKeys(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + GetEnrollmentApiKeys(ctx context.Context, params *GetEnrollmentApiKeysParams, reqEditors ...RequestEditorFn) (*http.Response, error) - // ListAllPackages request - ListAllPackages(ctx context.Context, params *ListAllPackagesParams, reqEditors ...RequestEditorFn) (*http.Response, error) + // ListPackages request + ListPackages(ctx context.Context, params *ListPackagesParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // InstallPackageByUploadWithBody request with any body + InstallPackageByUploadWithBody(ctx context.Context, params *InstallPackageByUploadParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) // DeletePackageWithBody request with any body DeletePackageWithBody(ctx context.Context, pkgName string, pkgVersion string, params *DeletePackageParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) @@ -1354,31 +11322,32 @@ type ClientInterface interface { InstallPackage(ctx context.Context, pkgName string, pkgVersion string, params *InstallPackageParams, body InstallPackageJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) - // UpdatePackageWithBody request with any body - UpdatePackageWithBody(ctx context.Context, pkgName string, pkgVersion string, params *UpdatePackageParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + // GetFleetServerHosts request + GetFleetServerHosts(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) - UpdatePackage(ctx context.Context, pkgName string, pkgVersion string, params *UpdatePackageParams, body UpdatePackageJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + // CreateFleetServerHostWithBody request with any body + CreateFleetServerHostWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) - // PostFleetServerHostsWithBody request with any body - PostFleetServerHostsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + CreateFleetServerHost(ctx context.Context, body CreateFleetServerHostJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) - PostFleetServerHosts(ctx context.Context, body PostFleetServerHostsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + // DeleteFleetServerHost request + DeleteFleetServerHost(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*http.Response, error) - // DeleteFleetServerHosts request - DeleteFleetServerHosts(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*http.Response, error) + // GetFleetServerHost request + GetFleetServerHost(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*http.Response, error) - // GetOneFleetServerHosts request - GetOneFleetServerHosts(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*http.Response, error) + // UpdateFleetServerHostWithBody request with any body + UpdateFleetServerHostWithBody(ctx context.Context, itemId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) - // UpdateFleetServerHostsWithBody request with any body - UpdateFleetServerHostsWithBody(ctx context.Context, itemId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + UpdateFleetServerHost(ctx context.Context, itemId string, body UpdateFleetServerHostJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) - UpdateFleetServerHosts(ctx context.Context, itemId string, body UpdateFleetServerHostsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + // GetOutputs request + GetOutputs(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) - // PostOutputsWithBody request with any body - PostOutputsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + // CreateOutputWithBody request with any body + CreateOutputWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) - PostOutputs(ctx context.Context, body PostOutputsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + CreateOutput(ctx context.Context, body CreateOutputJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) // DeleteOutput request DeleteOutput(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*http.Response, error) @@ -1391,6 +11360,9 @@ type ClientInterface interface { UpdateOutput(ctx context.Context, outputId string, body UpdateOutputJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + // GetPackagePolicies request + GetPackagePolicies(ctx context.Context, params *GetPackagePoliciesParams, reqEditors ...RequestEditorFn) (*http.Response, error) + // CreatePackagePolicyWithBody request with any body CreatePackagePolicyWithBody(ctx context.Context, params *CreatePackagePolicyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) @@ -1408,8 +11380,20 @@ type ClientInterface interface { UpdatePackagePolicy(ctx context.Context, packagePolicyId string, params *UpdatePackagePolicyParams, body UpdatePackagePolicyJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) } -func (c *Client) CreateAgentPolicyWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewCreateAgentPolicyRequestWithBody(c.Server, contentType, body) +func (c *Client) GetAgentPolicies(ctx context.Context, params *GetAgentPoliciesParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetAgentPoliciesRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateAgentPolicyWithBody(ctx context.Context, params *CreateAgentPolicyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateAgentPolicyRequestWithBody(c.Server, params, contentType, body) if err != nil { return nil, err } @@ -1420,8 +11404,8 @@ func (c *Client) CreateAgentPolicyWithBody(ctx context.Context, contentType stri return c.Client.Do(req) } -func (c *Client) CreateAgentPolicy(ctx context.Context, body CreateAgentPolicyJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewCreateAgentPolicyRequest(c.Server, body) +func (c *Client) CreateAgentPolicy(ctx context.Context, params *CreateAgentPolicyParams, body CreateAgentPolicyJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateAgentPolicyRequest(c.Server, params, body) if err != nil { return nil, err } @@ -1456,8 +11440,8 @@ func (c *Client) DeleteAgentPolicy(ctx context.Context, body DeleteAgentPolicyJS return c.Client.Do(req) } -func (c *Client) AgentPolicyInfo(ctx context.Context, agentPolicyId string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewAgentPolicyInfoRequest(c.Server, agentPolicyId) +func (c *Client) GetAgentPolicy(ctx context.Context, agentPolicyId string, params *GetAgentPolicyParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetAgentPolicyRequest(c.Server, agentPolicyId, params) if err != nil { return nil, err } @@ -1468,8 +11452,8 @@ func (c *Client) AgentPolicyInfo(ctx context.Context, agentPolicyId string, reqE return c.Client.Do(req) } -func (c *Client) UpdateAgentPolicyWithBody(ctx context.Context, agentPolicyId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewUpdateAgentPolicyRequestWithBody(c.Server, agentPolicyId, contentType, body) +func (c *Client) UpdateAgentPolicyWithBody(ctx context.Context, agentPolicyId string, params *UpdateAgentPolicyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateAgentPolicyRequestWithBody(c.Server, agentPolicyId, params, contentType, body) if err != nil { return nil, err } @@ -1480,8 +11464,8 @@ func (c *Client) UpdateAgentPolicyWithBody(ctx context.Context, agentPolicyId st return c.Client.Do(req) } -func (c *Client) UpdateAgentPolicy(ctx context.Context, agentPolicyId string, body UpdateAgentPolicyJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewUpdateAgentPolicyRequest(c.Server, agentPolicyId, body) +func (c *Client) UpdateAgentPolicy(ctx context.Context, agentPolicyId string, params *UpdateAgentPolicyParams, body UpdateAgentPolicyJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateAgentPolicyRequest(c.Server, agentPolicyId, params, body) if err != nil { return nil, err } @@ -1492,8 +11476,8 @@ func (c *Client) UpdateAgentPolicy(ctx context.Context, agentPolicyId string, bo return c.Client.Do(req) } -func (c *Client) GetEnrollmentApiKeys(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewGetEnrollmentApiKeysRequest(c.Server) +func (c *Client) GetEnrollmentApiKeys(ctx context.Context, params *GetEnrollmentApiKeysParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetEnrollmentApiKeysRequest(c.Server, params) if err != nil { return nil, err } @@ -1504,8 +11488,20 @@ func (c *Client) GetEnrollmentApiKeys(ctx context.Context, reqEditors ...Request return c.Client.Do(req) } -func (c *Client) ListAllPackages(ctx context.Context, params *ListAllPackagesParams, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewListAllPackagesRequest(c.Server, params) +func (c *Client) ListPackages(ctx context.Context, params *ListPackagesParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewListPackagesRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) InstallPackageByUploadWithBody(ctx context.Context, params *InstallPackageByUploadParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewInstallPackageByUploadRequestWithBody(c.Server, params, contentType, body) if err != nil { return nil, err } @@ -1576,8 +11572,140 @@ func (c *Client) InstallPackage(ctx context.Context, pkgName string, pkgVersion return c.Client.Do(req) } -func (c *Client) UpdatePackageWithBody(ctx context.Context, pkgName string, pkgVersion string, params *UpdatePackageParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewUpdatePackageRequestWithBody(c.Server, pkgName, pkgVersion, params, contentType, body) +func (c *Client) GetFleetServerHosts(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetServerHostsRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateFleetServerHostWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateFleetServerHostRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateFleetServerHost(ctx context.Context, body CreateFleetServerHostJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateFleetServerHostRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteFleetServerHost(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteFleetServerHostRequest(c.Server, itemId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetServerHost(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetServerHostRequest(c.Server, itemId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateFleetServerHostWithBody(ctx context.Context, itemId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateFleetServerHostRequestWithBody(c.Server, itemId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateFleetServerHost(ctx context.Context, itemId string, body UpdateFleetServerHostJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateFleetServerHostRequest(c.Server, itemId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetOutputs(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetOutputsRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateOutputWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateOutputRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateOutput(ctx context.Context, body CreateOutputJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateOutputRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteOutput(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteOutputRequest(c.Server, outputId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetOutput(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetOutputRequest(c.Server, outputId) if err != nil { return nil, err } @@ -1588,8 +11716,8 @@ func (c *Client) UpdatePackageWithBody(ctx context.Context, pkgName string, pkgV return c.Client.Do(req) } -func (c *Client) UpdatePackage(ctx context.Context, pkgName string, pkgVersion string, params *UpdatePackageParams, body UpdatePackageJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewUpdatePackageRequest(c.Server, pkgName, pkgVersion, params, body) +func (c *Client) UpdateOutputWithBody(ctx context.Context, outputId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateOutputRequestWithBody(c.Server, outputId, contentType, body) if err != nil { return nil, err } @@ -1600,8 +11728,8 @@ func (c *Client) UpdatePackage(ctx context.Context, pkgName string, pkgVersion s return c.Client.Do(req) } -func (c *Client) PostFleetServerHostsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPostFleetServerHostsRequestWithBody(c.Server, contentType, body) +func (c *Client) UpdateOutput(ctx context.Context, outputId string, body UpdateOutputJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateOutputRequest(c.Server, outputId, body) if err != nil { return nil, err } @@ -1612,8 +11740,8 @@ func (c *Client) PostFleetServerHostsWithBody(ctx context.Context, contentType s return c.Client.Do(req) } -func (c *Client) PostFleetServerHosts(ctx context.Context, body PostFleetServerHostsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPostFleetServerHostsRequest(c.Server, body) +func (c *Client) GetPackagePolicies(ctx context.Context, params *GetPackagePoliciesParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetPackagePoliciesRequest(c.Server, params) if err != nil { return nil, err } @@ -1624,8 +11752,8 @@ func (c *Client) PostFleetServerHosts(ctx context.Context, body PostFleetServerH return c.Client.Do(req) } -func (c *Client) DeleteFleetServerHosts(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewDeleteFleetServerHostsRequest(c.Server, itemId) +func (c *Client) CreatePackagePolicyWithBody(ctx context.Context, params *CreatePackagePolicyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreatePackagePolicyRequestWithBody(c.Server, params, contentType, body) if err != nil { return nil, err } @@ -1636,8 +11764,8 @@ func (c *Client) DeleteFleetServerHosts(ctx context.Context, itemId string, reqE return c.Client.Do(req) } -func (c *Client) GetOneFleetServerHosts(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewGetOneFleetServerHostsRequest(c.Server, itemId) +func (c *Client) CreatePackagePolicy(ctx context.Context, params *CreatePackagePolicyParams, body CreatePackagePolicyJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreatePackagePolicyRequest(c.Server, params, body) if err != nil { return nil, err } @@ -1648,8 +11776,8 @@ func (c *Client) GetOneFleetServerHosts(ctx context.Context, itemId string, reqE return c.Client.Do(req) } -func (c *Client) UpdateFleetServerHostsWithBody(ctx context.Context, itemId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewUpdateFleetServerHostsRequestWithBody(c.Server, itemId, contentType, body) +func (c *Client) DeletePackagePolicy(ctx context.Context, packagePolicyId string, params *DeletePackagePolicyParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeletePackagePolicyRequest(c.Server, packagePolicyId, params) if err != nil { return nil, err } @@ -1660,8 +11788,8 @@ func (c *Client) UpdateFleetServerHostsWithBody(ctx context.Context, itemId stri return c.Client.Do(req) } -func (c *Client) UpdateFleetServerHosts(ctx context.Context, itemId string, body UpdateFleetServerHostsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewUpdateFleetServerHostsRequest(c.Server, itemId, body) +func (c *Client) GetPackagePolicy(ctx context.Context, packagePolicyId string, params *GetPackagePolicyParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetPackagePolicyRequest(c.Server, packagePolicyId, params) if err != nil { return nil, err } @@ -1672,8 +11800,8 @@ func (c *Client) UpdateFleetServerHosts(ctx context.Context, itemId string, body return c.Client.Do(req) } -func (c *Client) PostOutputsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPostOutputsRequestWithBody(c.Server, contentType, body) +func (c *Client) UpdatePackagePolicyWithBody(ctx context.Context, packagePolicyId string, params *UpdatePackagePolicyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdatePackagePolicyRequestWithBody(c.Server, packagePolicyId, params, contentType, body) if err != nil { return nil, err } @@ -1684,8 +11812,8 @@ func (c *Client) PostOutputsWithBody(ctx context.Context, contentType string, bo return c.Client.Do(req) } -func (c *Client) PostOutputs(ctx context.Context, body PostOutputsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPostOutputsRequest(c.Server, body) +func (c *Client) UpdatePackagePolicy(ctx context.Context, packagePolicyId string, params *UpdatePackagePolicyParams, body UpdatePackagePolicyJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdatePackagePolicyRequest(c.Server, packagePolicyId, params, body) if err != nil { return nil, err } @@ -1696,139 +11824,196 @@ func (c *Client) PostOutputs(ctx context.Context, body PostOutputsJSONRequestBod return c.Client.Do(req) } -func (c *Client) DeleteOutput(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewDeleteOutputRequest(c.Server, outputId) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} +// NewGetAgentPoliciesRequest generates requests for GetAgentPolicies +func NewGetAgentPoliciesRequest(server string, params *GetAgentPoliciesParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agent_policies") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "perPage", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortField != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortField", runtime.ParamLocationQuery, *params.SortField); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortOrder", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.ShowUpgradeable != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "showUpgradeable", runtime.ParamLocationQuery, *params.ShowUpgradeable); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Kuery != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "kuery", runtime.ParamLocationQuery, *params.Kuery); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } -func (c *Client) GetOutput(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewGetOutputRequest(c.Server, outputId) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} + if params.NoAgentCount != nil { -func (c *Client) UpdateOutputWithBody(ctx context.Context, outputId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewUpdateOutputRequestWithBody(c.Server, outputId, contentType, body) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "noAgentCount", runtime.ParamLocationQuery, *params.NoAgentCount); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } -func (c *Client) UpdateOutput(ctx context.Context, outputId string, body UpdateOutputJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewUpdateOutputRequest(c.Server, outputId, body) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} + } -func (c *Client) CreatePackagePolicyWithBody(ctx context.Context, params *CreatePackagePolicyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewCreatePackagePolicyRequestWithBody(c.Server, params, contentType, body) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} + if params.Full != nil { -func (c *Client) CreatePackagePolicy(ctx context.Context, params *CreatePackagePolicyParams, body CreatePackagePolicyJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewCreatePackagePolicyRequest(c.Server, params, body) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "full", runtime.ParamLocationQuery, *params.Full); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } -func (c *Client) DeletePackagePolicy(ctx context.Context, packagePolicyId string, params *DeletePackagePolicyParams, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewDeletePackagePolicyRequest(c.Server, packagePolicyId, params) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} + } -func (c *Client) GetPackagePolicy(ctx context.Context, packagePolicyId string, params *GetPackagePolicyParams, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewGetPackagePolicyRequest(c.Server, packagePolicyId, params) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} + if params.Format != nil { -func (c *Client) UpdatePackagePolicyWithBody(ctx context.Context, packagePolicyId string, params *UpdatePackagePolicyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewUpdatePackagePolicyRequestWithBody(c.Server, packagePolicyId, params, contentType, body) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "format", runtime.ParamLocationQuery, *params.Format); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() } - return c.Client.Do(req) -} -func (c *Client) UpdatePackagePolicy(ctx context.Context, packagePolicyId string, params *UpdatePackagePolicyParams, body UpdatePackagePolicyJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewUpdatePackagePolicyRequest(c.Server, packagePolicyId, params, body) + req, err := http.NewRequest("GET", queryURL.String(), nil) if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) + + return req, nil } // NewCreateAgentPolicyRequest calls the generic CreateAgentPolicy builder with application/json body -func NewCreateAgentPolicyRequest(server string, body CreateAgentPolicyJSONRequestBody) (*http.Request, error) { +func NewCreateAgentPolicyRequest(server string, params *CreateAgentPolicyParams, body CreateAgentPolicyJSONRequestBody) (*http.Request, error) { var bodyReader io.Reader buf, err := json.Marshal(body) if err != nil { return nil, err } bodyReader = bytes.NewReader(buf) - return NewCreateAgentPolicyRequestWithBody(server, "application/json", bodyReader) + return NewCreateAgentPolicyRequestWithBody(server, params, "application/json", bodyReader) } // NewCreateAgentPolicyRequestWithBody generates requests for CreateAgentPolicy with any type of body -func NewCreateAgentPolicyRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { +func NewCreateAgentPolicyRequestWithBody(server string, params *CreateAgentPolicyParams, contentType string, body io.Reader) (*http.Request, error) { var err error serverURL, err := url.Parse(server) @@ -1836,7 +12021,7 @@ func NewCreateAgentPolicyRequestWithBody(server string, contentType string, body return nil, err } - operationPath := fmt.Sprintf("/agent_policies") + operationPath := fmt.Sprintf("/api/fleet/agent_policies") if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -1846,6 +12031,28 @@ func NewCreateAgentPolicyRequestWithBody(server string, contentType string, body return nil, err } + if params != nil { + queryValues := queryURL.Query() + + if params.SysMonitoring != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sys_monitoring", runtime.ParamLocationQuery, *params.SysMonitoring); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + req, err := http.NewRequest("POST", queryURL.String(), body) if err != nil { return nil, err @@ -1876,7 +12083,7 @@ func NewDeleteAgentPolicyRequestWithBody(server string, contentType string, body return nil, err } - operationPath := fmt.Sprintf("/agent_policies/delete") + operationPath := fmt.Sprintf("/api/fleet/agent_policies/delete") if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -1896,8 +12103,8 @@ func NewDeleteAgentPolicyRequestWithBody(server string, contentType string, body return req, nil } -// NewAgentPolicyInfoRequest generates requests for AgentPolicyInfo -func NewAgentPolicyInfoRequest(server string, agentPolicyId string) (*http.Request, error) { +// NewGetAgentPolicyRequest generates requests for GetAgentPolicy +func NewGetAgentPolicyRequest(server string, agentPolicyId string, params *GetAgentPolicyParams) (*http.Request, error) { var err error var pathParam0 string @@ -1912,7 +12119,7 @@ func NewAgentPolicyInfoRequest(server string, agentPolicyId string) (*http.Reque return nil, err } - operationPath := fmt.Sprintf("/agent_policies/%s", pathParam0) + operationPath := fmt.Sprintf("/api/fleet/agent_policies/%s", pathParam0) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -1922,6 +12129,28 @@ func NewAgentPolicyInfoRequest(server string, agentPolicyId string) (*http.Reque return nil, err } + if params != nil { + queryValues := queryURL.Query() + + if params.Format != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "format", runtime.ParamLocationQuery, *params.Format); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + req, err := http.NewRequest("GET", queryURL.String(), nil) if err != nil { return nil, err @@ -1931,18 +12160,18 @@ func NewAgentPolicyInfoRequest(server string, agentPolicyId string) (*http.Reque } // NewUpdateAgentPolicyRequest calls the generic UpdateAgentPolicy builder with application/json body -func NewUpdateAgentPolicyRequest(server string, agentPolicyId string, body UpdateAgentPolicyJSONRequestBody) (*http.Request, error) { +func NewUpdateAgentPolicyRequest(server string, agentPolicyId string, params *UpdateAgentPolicyParams, body UpdateAgentPolicyJSONRequestBody) (*http.Request, error) { var bodyReader io.Reader buf, err := json.Marshal(body) if err != nil { return nil, err } bodyReader = bytes.NewReader(buf) - return NewUpdateAgentPolicyRequestWithBody(server, agentPolicyId, "application/json", bodyReader) + return NewUpdateAgentPolicyRequestWithBody(server, agentPolicyId, params, "application/json", bodyReader) } // NewUpdateAgentPolicyRequestWithBody generates requests for UpdateAgentPolicy with any type of body -func NewUpdateAgentPolicyRequestWithBody(server string, agentPolicyId string, contentType string, body io.Reader) (*http.Request, error) { +func NewUpdateAgentPolicyRequestWithBody(server string, agentPolicyId string, params *UpdateAgentPolicyParams, contentType string, body io.Reader) (*http.Request, error) { var err error var pathParam0 string @@ -1957,7 +12186,7 @@ func NewUpdateAgentPolicyRequestWithBody(server string, agentPolicyId string, co return nil, err } - operationPath := fmt.Sprintf("/agent_policies/%s", pathParam0) + operationPath := fmt.Sprintf("/api/fleet/agent_policies/%s", pathParam0) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -1967,6 +12196,28 @@ func NewUpdateAgentPolicyRequestWithBody(server string, agentPolicyId string, co return nil, err } + if params != nil { + queryValues := queryURL.Query() + + if params.Format != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "format", runtime.ParamLocationQuery, *params.Format); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + req, err := http.NewRequest("PUT", queryURL.String(), body) if err != nil { return nil, err @@ -1974,26 +12225,80 @@ func NewUpdateAgentPolicyRequestWithBody(server string, agentPolicyId string, co req.Header.Add("Content-Type", contentType) - return req, nil -} + return req, nil +} + +// NewGetEnrollmentApiKeysRequest generates requests for GetEnrollmentApiKeys +func NewGetEnrollmentApiKeysRequest(server string, params *GetEnrollmentApiKeysParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/enrollment_api_keys") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "perPage", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } -// NewGetEnrollmentApiKeysRequest generates requests for GetEnrollmentApiKeys -func NewGetEnrollmentApiKeysRequest(server string) (*http.Request, error) { - var err error + if params.Kuery != nil { - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "kuery", runtime.ParamLocationQuery, *params.Kuery); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } - operationPath := fmt.Sprintf("/enrollment_api_keys") - if operationPath[0] == '/' { - operationPath = "." + operationPath - } + } - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err + queryURL.RawQuery = queryValues.Encode() } req, err := http.NewRequest("GET", queryURL.String(), nil) @@ -2004,8 +12309,8 @@ func NewGetEnrollmentApiKeysRequest(server string) (*http.Request, error) { return req, nil } -// NewListAllPackagesRequest generates requests for ListAllPackages -func NewListAllPackagesRequest(server string, params *ListAllPackagesParams) (*http.Request, error) { +// NewListPackagesRequest generates requests for ListPackages +func NewListPackagesRequest(server string, params *ListPackagesParams) (*http.Request, error) { var err error serverURL, err := url.Parse(server) @@ -2013,7 +12318,7 @@ func NewListAllPackagesRequest(server string, params *ListAllPackagesParams) (*h return nil, err } - operationPath := fmt.Sprintf("/epm/packages") + operationPath := fmt.Sprintf("/api/fleet/epm/packages") if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -2026,9 +12331,9 @@ func NewListAllPackagesRequest(server string, params *ListAllPackagesParams) (*h if params != nil { queryValues := queryURL.Query() - if params.ExcludeInstallStatus != nil { + if params.Category != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "excludeInstallStatus", runtime.ParamLocationQuery, *params.ExcludeInstallStatus); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "category", runtime.ParamLocationQuery, *params.Category); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -2074,9 +12379,9 @@ func NewListAllPackagesRequest(server string, params *ListAllPackagesParams) (*h } - if params.Category != nil { + if params.ExcludeInstallStatus != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "category", runtime.ParamLocationQuery, *params.Category); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "excludeInstallStatus", runtime.ParamLocationQuery, *params.ExcludeInstallStatus); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -2101,41 +12406,16 @@ func NewListAllPackagesRequest(server string, params *ListAllPackagesParams) (*h return req, nil } -// NewDeletePackageRequest calls the generic DeletePackage builder with application/json body -func NewDeletePackageRequest(server string, pkgName string, pkgVersion string, params *DeletePackageParams, body DeletePackageJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err - } - bodyReader = bytes.NewReader(buf) - return NewDeletePackageRequestWithBody(server, pkgName, pkgVersion, params, "application/json", bodyReader) -} - -// NewDeletePackageRequestWithBody generates requests for DeletePackage with any type of body -func NewDeletePackageRequestWithBody(server string, pkgName string, pkgVersion string, params *DeletePackageParams, contentType string, body io.Reader) (*http.Request, error) { +// NewInstallPackageByUploadRequestWithBody generates requests for InstallPackageByUpload with any type of body +func NewInstallPackageByUploadRequestWithBody(server string, params *InstallPackageByUploadParams, contentType string, body io.Reader) (*http.Request, error) { var err error - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "pkgName", runtime.ParamLocationPath, pkgName) - if err != nil { - return nil, err - } - - var pathParam1 string - - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "pkgVersion", runtime.ParamLocationPath, pkgVersion) - if err != nil { - return nil, err - } - serverURL, err := url.Parse(server) if err != nil { return nil, err } - operationPath := fmt.Sprintf("/epm/packages/%s/%s", pathParam0, pathParam1) + operationPath := fmt.Sprintf("/api/fleet/epm/packages") if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -2148,25 +12428,9 @@ func NewDeletePackageRequestWithBody(server string, pkgName string, pkgVersion s if params != nil { queryValues := queryURL.Query() - if params.IgnoreUnverified != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "ignoreUnverified", runtime.ParamLocationQuery, *params.IgnoreUnverified); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } - - } - - if params.Full != nil { + if params.IgnoreMappingUpdateErrors != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "full", runtime.ParamLocationQuery, *params.Full); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "ignoreMappingUpdateErrors", runtime.ParamLocationQuery, *params.IgnoreMappingUpdateErrors); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -2180,9 +12444,9 @@ func NewDeletePackageRequestWithBody(server string, pkgName string, pkgVersion s } - if params.Prerelease != nil { + if params.SkipDataStreamRollover != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "prerelease", runtime.ParamLocationQuery, *params.Prerelease); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "skipDataStreamRollover", runtime.ParamLocationQuery, *params.SkipDataStreamRollover); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -2199,7 +12463,7 @@ func NewDeletePackageRequestWithBody(server string, pkgName string, pkgVersion s queryURL.RawQuery = queryValues.Encode() } - req, err := http.NewRequest("DELETE", queryURL.String(), body) + req, err := http.NewRequest("POST", queryURL.String(), body) if err != nil { return nil, err } @@ -2209,8 +12473,19 @@ func NewDeletePackageRequestWithBody(server string, pkgName string, pkgVersion s return req, nil } -// NewGetPackageRequest generates requests for GetPackage -func NewGetPackageRequest(server string, pkgName string, pkgVersion string, params *GetPackageParams) (*http.Request, error) { +// NewDeletePackageRequest calls the generic DeletePackage builder with application/json body +func NewDeletePackageRequest(server string, pkgName string, pkgVersion string, params *DeletePackageParams, body DeletePackageJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewDeletePackageRequestWithBody(server, pkgName, pkgVersion, params, "application/json", bodyReader) +} + +// NewDeletePackageRequestWithBody generates requests for DeletePackage with any type of body +func NewDeletePackageRequestWithBody(server string, pkgName string, pkgVersion string, params *DeletePackageParams, contentType string, body io.Reader) (*http.Request, error) { var err error var pathParam0 string @@ -2232,7 +12507,7 @@ func NewGetPackageRequest(server string, pkgName string, pkgVersion string, para return nil, err } - operationPath := fmt.Sprintf("/epm/packages/%s/%s", pathParam0, pathParam1) + operationPath := fmt.Sprintf("/api/fleet/epm/packages/%s/%s", pathParam0, pathParam1) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -2245,41 +12520,9 @@ func NewGetPackageRequest(server string, pkgName string, pkgVersion string, para if params != nil { queryValues := queryURL.Query() - if params.IgnoreUnverified != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "ignoreUnverified", runtime.ParamLocationQuery, *params.IgnoreUnverified); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } - - } - - if params.Full != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "full", runtime.ParamLocationQuery, *params.Full); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } - - } - - if params.Prerelease != nil { + if params.Force != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "prerelease", runtime.ParamLocationQuery, *params.Prerelease); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "force", runtime.ParamLocationQuery, *params.Force); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -2296,27 +12539,18 @@ func NewGetPackageRequest(server string, pkgName string, pkgVersion string, para queryURL.RawQuery = queryValues.Encode() } - req, err := http.NewRequest("GET", queryURL.String(), nil) + req, err := http.NewRequest("DELETE", queryURL.String(), body) if err != nil { return nil, err } - return req, nil -} + req.Header.Add("Content-Type", contentType) -// NewInstallPackageRequest calls the generic InstallPackage builder with application/json body -func NewInstallPackageRequest(server string, pkgName string, pkgVersion string, params *InstallPackageParams, body InstallPackageJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err - } - bodyReader = bytes.NewReader(buf) - return NewInstallPackageRequestWithBody(server, pkgName, pkgVersion, params, "application/json", bodyReader) + return req, nil } -// NewInstallPackageRequestWithBody generates requests for InstallPackage with any type of body -func NewInstallPackageRequestWithBody(server string, pkgName string, pkgVersion string, params *InstallPackageParams, contentType string, body io.Reader) (*http.Request, error) { +// NewGetPackageRequest generates requests for GetPackage +func NewGetPackageRequest(server string, pkgName string, pkgVersion string, params *GetPackageParams) (*http.Request, error) { var err error var pathParam0 string @@ -2338,7 +12572,7 @@ func NewInstallPackageRequestWithBody(server string, pkgName string, pkgVersion return nil, err } - operationPath := fmt.Sprintf("/epm/packages/%s/%s", pathParam0, pathParam1) + operationPath := fmt.Sprintf("/api/fleet/epm/packages/%s/%s", pathParam0, pathParam1) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -2367,6 +12601,22 @@ func NewInstallPackageRequestWithBody(server string, pkgName string, pkgVersion } + if params.Prerelease != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "prerelease", runtime.ParamLocationQuery, *params.Prerelease); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + if params.Full != nil { if queryFrag, err := runtime.StyleParamWithLocation("form", true, "full", runtime.ParamLocationQuery, *params.Full); err != nil { @@ -2383,9 +12633,9 @@ func NewInstallPackageRequestWithBody(server string, pkgName string, pkgVersion } - if params.Prerelease != nil { + if params.WithMetadata != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "prerelease", runtime.ParamLocationQuery, *params.Prerelease); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "withMetadata", runtime.ParamLocationQuery, *params.WithMetadata); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -2402,29 +12652,27 @@ func NewInstallPackageRequestWithBody(server string, pkgName string, pkgVersion queryURL.RawQuery = queryValues.Encode() } - req, err := http.NewRequest("POST", queryURL.String(), body) + req, err := http.NewRequest("GET", queryURL.String(), nil) if err != nil { return nil, err } - req.Header.Add("Content-Type", contentType) - return req, nil } -// NewUpdatePackageRequest calls the generic UpdatePackage builder with application/json body -func NewUpdatePackageRequest(server string, pkgName string, pkgVersion string, params *UpdatePackageParams, body UpdatePackageJSONRequestBody) (*http.Request, error) { +// NewInstallPackageRequest calls the generic InstallPackage builder with application/json body +func NewInstallPackageRequest(server string, pkgName string, pkgVersion string, params *InstallPackageParams, body InstallPackageJSONRequestBody) (*http.Request, error) { var bodyReader io.Reader buf, err := json.Marshal(body) if err != nil { return nil, err } bodyReader = bytes.NewReader(buf) - return NewUpdatePackageRequestWithBody(server, pkgName, pkgVersion, params, "application/json", bodyReader) + return NewInstallPackageRequestWithBody(server, pkgName, pkgVersion, params, "application/json", bodyReader) } -// NewUpdatePackageRequestWithBody generates requests for UpdatePackage with any type of body -func NewUpdatePackageRequestWithBody(server string, pkgName string, pkgVersion string, params *UpdatePackageParams, contentType string, body io.Reader) (*http.Request, error) { +// NewInstallPackageRequestWithBody generates requests for InstallPackage with any type of body +func NewInstallPackageRequestWithBody(server string, pkgName string, pkgVersion string, params *InstallPackageParams, contentType string, body io.Reader) (*http.Request, error) { var err error var pathParam0 string @@ -2446,7 +12694,7 @@ func NewUpdatePackageRequestWithBody(server string, pkgName string, pkgVersion s return nil, err } - operationPath := fmt.Sprintf("/epm/packages/%s/%s", pathParam0, pathParam1) + operationPath := fmt.Sprintf("/api/fleet/epm/packages/%s/%s", pathParam0, pathParam1) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -2459,9 +12707,9 @@ func NewUpdatePackageRequestWithBody(server string, pkgName string, pkgVersion s if params != nil { queryValues := queryURL.Query() - if params.IgnoreUnverified != nil { + if params.Prerelease != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "ignoreUnverified", runtime.ParamLocationQuery, *params.IgnoreUnverified); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "prerelease", runtime.ParamLocationQuery, *params.Prerelease); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -2475,9 +12723,9 @@ func NewUpdatePackageRequestWithBody(server string, pkgName string, pkgVersion s } - if params.Full != nil { + if params.IgnoreMappingUpdateErrors != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "full", runtime.ParamLocationQuery, *params.Full); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "ignoreMappingUpdateErrors", runtime.ParamLocationQuery, *params.IgnoreMappingUpdateErrors); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -2491,9 +12739,9 @@ func NewUpdatePackageRequestWithBody(server string, pkgName string, pkgVersion s } - if params.Prerelease != nil { + if params.SkipDataStreamRollover != nil { - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "prerelease", runtime.ParamLocationQuery, *params.Prerelease); err != nil { + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "skipDataStreamRollover", runtime.ParamLocationQuery, *params.SkipDataStreamRollover); err != nil { return nil, err } else if parsed, err := url.ParseQuery(queryFrag); err != nil { return nil, err @@ -2510,7 +12758,7 @@ func NewUpdatePackageRequestWithBody(server string, pkgName string, pkgVersion s queryURL.RawQuery = queryValues.Encode() } - req, err := http.NewRequest("PUT", queryURL.String(), body) + req, err := http.NewRequest("POST", queryURL.String(), body) if err != nil { return nil, err } @@ -2520,19 +12768,46 @@ func NewUpdatePackageRequestWithBody(server string, pkgName string, pkgVersion s return req, nil } -// NewPostFleetServerHostsRequest calls the generic PostFleetServerHosts builder with application/json body -func NewPostFleetServerHostsRequest(server string, body PostFleetServerHostsJSONRequestBody) (*http.Request, error) { +// NewGetFleetServerHostsRequest generates requests for GetFleetServerHosts +func NewGetFleetServerHostsRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/fleet_server_hosts") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewCreateFleetServerHostRequest calls the generic CreateFleetServerHost builder with application/json body +func NewCreateFleetServerHostRequest(server string, body CreateFleetServerHostJSONRequestBody) (*http.Request, error) { var bodyReader io.Reader buf, err := json.Marshal(body) if err != nil { return nil, err } bodyReader = bytes.NewReader(buf) - return NewPostFleetServerHostsRequestWithBody(server, "application/json", bodyReader) + return NewCreateFleetServerHostRequestWithBody(server, "application/json", bodyReader) } -// NewPostFleetServerHostsRequestWithBody generates requests for PostFleetServerHosts with any type of body -func NewPostFleetServerHostsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { +// NewCreateFleetServerHostRequestWithBody generates requests for CreateFleetServerHost with any type of body +func NewCreateFleetServerHostRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { var err error serverURL, err := url.Parse(server) @@ -2540,7 +12815,7 @@ func NewPostFleetServerHostsRequestWithBody(server string, contentType string, b return nil, err } - operationPath := fmt.Sprintf("/fleet_server_hosts") + operationPath := fmt.Sprintf("/api/fleet/fleet_server_hosts") if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -2560,8 +12835,8 @@ func NewPostFleetServerHostsRequestWithBody(server string, contentType string, b return req, nil } -// NewDeleteFleetServerHostsRequest generates requests for DeleteFleetServerHosts -func NewDeleteFleetServerHostsRequest(server string, itemId string) (*http.Request, error) { +// NewDeleteFleetServerHostRequest generates requests for DeleteFleetServerHost +func NewDeleteFleetServerHostRequest(server string, itemId string) (*http.Request, error) { var err error var pathParam0 string @@ -2576,7 +12851,7 @@ func NewDeleteFleetServerHostsRequest(server string, itemId string) (*http.Reque return nil, err } - operationPath := fmt.Sprintf("/fleet_server_hosts/%s", pathParam0) + operationPath := fmt.Sprintf("/api/fleet/fleet_server_hosts/%s", pathParam0) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -2594,8 +12869,8 @@ func NewDeleteFleetServerHostsRequest(server string, itemId string) (*http.Reque return req, nil } -// NewGetOneFleetServerHostsRequest generates requests for GetOneFleetServerHosts -func NewGetOneFleetServerHostsRequest(server string, itemId string) (*http.Request, error) { +// NewGetFleetServerHostRequest generates requests for GetFleetServerHost +func NewGetFleetServerHostRequest(server string, itemId string) (*http.Request, error) { var err error var pathParam0 string @@ -2610,7 +12885,7 @@ func NewGetOneFleetServerHostsRequest(server string, itemId string) (*http.Reque return nil, err } - operationPath := fmt.Sprintf("/fleet_server_hosts/%s", pathParam0) + operationPath := fmt.Sprintf("/api/fleet/fleet_server_hosts/%s", pathParam0) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -2625,37 +12900,66 @@ func NewGetOneFleetServerHostsRequest(server string, itemId string) (*http.Reque return nil, err } - return req, nil -} - -// NewUpdateFleetServerHostsRequest calls the generic UpdateFleetServerHosts builder with application/json body -func NewUpdateFleetServerHostsRequest(server string, itemId string, body UpdateFleetServerHostsJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) + return req, nil +} + +// NewUpdateFleetServerHostRequest calls the generic UpdateFleetServerHost builder with application/json body +func NewUpdateFleetServerHostRequest(server string, itemId string, body UpdateFleetServerHostJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpdateFleetServerHostRequestWithBody(server, itemId, "application/json", bodyReader) +} + +// NewUpdateFleetServerHostRequestWithBody generates requests for UpdateFleetServerHost with any type of body +func NewUpdateFleetServerHostRequestWithBody(server string, itemId string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "itemId", runtime.ParamLocationPath, itemId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/fleet_server_hosts/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) if err != nil { return nil, err } - bodyReader = bytes.NewReader(buf) - return NewUpdateFleetServerHostsRequestWithBody(server, itemId, "application/json", bodyReader) -} -// NewUpdateFleetServerHostsRequestWithBody generates requests for UpdateFleetServerHosts with any type of body -func NewUpdateFleetServerHostsRequestWithBody(server string, itemId string, contentType string, body io.Reader) (*http.Request, error) { - var err error + req.Header.Add("Content-Type", contentType) - var pathParam0 string + return req, nil +} - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "itemId", runtime.ParamLocationPath, itemId) - if err != nil { - return nil, err - } +// NewGetOutputsRequest generates requests for GetOutputs +func NewGetOutputsRequest(server string) (*http.Request, error) { + var err error serverURL, err := url.Parse(server) if err != nil { return nil, err } - operationPath := fmt.Sprintf("/fleet_server_hosts/%s", pathParam0) + operationPath := fmt.Sprintf("/api/fleet/outputs") if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -2665,29 +12969,27 @@ func NewUpdateFleetServerHostsRequestWithBody(server string, itemId string, cont return nil, err } - req, err := http.NewRequest("PUT", queryURL.String(), body) + req, err := http.NewRequest("GET", queryURL.String(), nil) if err != nil { return nil, err } - req.Header.Add("Content-Type", contentType) - return req, nil } -// NewPostOutputsRequest calls the generic PostOutputs builder with application/json body -func NewPostOutputsRequest(server string, body PostOutputsJSONRequestBody) (*http.Request, error) { +// NewCreateOutputRequest calls the generic CreateOutput builder with application/json body +func NewCreateOutputRequest(server string, body CreateOutputJSONRequestBody) (*http.Request, error) { var bodyReader io.Reader buf, err := json.Marshal(body) if err != nil { return nil, err } bodyReader = bytes.NewReader(buf) - return NewPostOutputsRequestWithBody(server, "application/json", bodyReader) + return NewCreateOutputRequestWithBody(server, "application/json", bodyReader) } -// NewPostOutputsRequestWithBody generates requests for PostOutputs with any type of body -func NewPostOutputsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { +// NewCreateOutputRequestWithBody generates requests for CreateOutput with any type of body +func NewCreateOutputRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { var err error serverURL, err := url.Parse(server) @@ -2695,7 +12997,7 @@ func NewPostOutputsRequestWithBody(server string, contentType string, body io.Re return nil, err } - operationPath := fmt.Sprintf("/outputs") + operationPath := fmt.Sprintf("/api/fleet/outputs") if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -2731,7 +13033,7 @@ func NewDeleteOutputRequest(server string, outputId string) (*http.Request, erro return nil, err } - operationPath := fmt.Sprintf("/outputs/%s", pathParam0) + operationPath := fmt.Sprintf("/api/fleet/outputs/%s", pathParam0) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -2765,7 +13067,7 @@ func NewGetOutputRequest(server string, outputId string) (*http.Request, error) return nil, err } - operationPath := fmt.Sprintf("/outputs/%s", pathParam0) + operationPath := fmt.Sprintf("/api/fleet/outputs/%s", pathParam0) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -2810,7 +13112,7 @@ func NewUpdateOutputRequestWithBody(server string, outputId string, contentType return nil, err } - operationPath := fmt.Sprintf("/outputs/%s", pathParam0) + operationPath := fmt.Sprintf("/api/fleet/outputs/%s", pathParam0) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -2830,6 +13132,167 @@ func NewUpdateOutputRequestWithBody(server string, outputId string, contentType return req, nil } +// NewGetPackagePoliciesRequest generates requests for GetPackagePolicies +func NewGetPackagePoliciesRequest(server string, params *GetPackagePoliciesParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/package_policies") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "perPage", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortField != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortField", runtime.ParamLocationQuery, *params.SortField); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortOrder", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.ShowUpgradeable != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "showUpgradeable", runtime.ParamLocationQuery, *params.ShowUpgradeable); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Kuery != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "kuery", runtime.ParamLocationQuery, *params.Kuery); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Format != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "format", runtime.ParamLocationQuery, *params.Format); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.WithAgentCount != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "withAgentCount", runtime.ParamLocationQuery, *params.WithAgentCount); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + // NewCreatePackagePolicyRequest calls the generic CreatePackagePolicy builder with application/json body func NewCreatePackagePolicyRequest(server string, params *CreatePackagePolicyParams, body CreatePackagePolicyJSONRequestBody) (*http.Request, error) { var bodyReader io.Reader @@ -2850,7 +13313,7 @@ func NewCreatePackagePolicyRequestWithBody(server string, params *CreatePackageP return nil, err } - operationPath := fmt.Sprintf("/package_policies") + operationPath := fmt.Sprintf("/api/fleet/package_policies") if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -2908,7 +13371,7 @@ func NewDeletePackagePolicyRequest(server string, packagePolicyId string, params return nil, err } - operationPath := fmt.Sprintf("/package_policies/%s", pathParam0) + operationPath := fmt.Sprintf("/api/fleet/package_policies/%s", pathParam0) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -2964,7 +13427,7 @@ func NewGetPackagePolicyRequest(server string, packagePolicyId string, params *G return nil, err } - operationPath := fmt.Sprintf("/package_policies/%s", pathParam0) + operationPath := fmt.Sprintf("/api/fleet/package_policies/%s", pathParam0) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -3031,7 +13494,7 @@ func NewUpdatePackagePolicyRequestWithBody(server string, packagePolicyId string return nil, err } - operationPath := fmt.Sprintf("/package_policies/%s", pathParam0) + operationPath := fmt.Sprintf("/api/fleet/package_policies/%s", pathParam0) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -3116,29 +13579,35 @@ func WithBaseURL(baseURL string) ClientOption { // ClientWithResponsesInterface is the interface specification for the client with responses above. type ClientWithResponsesInterface interface { + // GetAgentPoliciesWithResponse request + GetAgentPoliciesWithResponse(ctx context.Context, params *GetAgentPoliciesParams, reqEditors ...RequestEditorFn) (*GetAgentPoliciesResponse, error) + // CreateAgentPolicyWithBodyWithResponse request with any body - CreateAgentPolicyWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateAgentPolicyResponse, error) + CreateAgentPolicyWithBodyWithResponse(ctx context.Context, params *CreateAgentPolicyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateAgentPolicyResponse, error) - CreateAgentPolicyWithResponse(ctx context.Context, body CreateAgentPolicyJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateAgentPolicyResponse, error) + CreateAgentPolicyWithResponse(ctx context.Context, params *CreateAgentPolicyParams, body CreateAgentPolicyJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateAgentPolicyResponse, error) // DeleteAgentPolicyWithBodyWithResponse request with any body DeleteAgentPolicyWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteAgentPolicyResponse, error) DeleteAgentPolicyWithResponse(ctx context.Context, body DeleteAgentPolicyJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteAgentPolicyResponse, error) - // AgentPolicyInfoWithResponse request - AgentPolicyInfoWithResponse(ctx context.Context, agentPolicyId string, reqEditors ...RequestEditorFn) (*AgentPolicyInfoResponse, error) + // GetAgentPolicyWithResponse request + GetAgentPolicyWithResponse(ctx context.Context, agentPolicyId string, params *GetAgentPolicyParams, reqEditors ...RequestEditorFn) (*GetAgentPolicyResponse, error) // UpdateAgentPolicyWithBodyWithResponse request with any body - UpdateAgentPolicyWithBodyWithResponse(ctx context.Context, agentPolicyId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateAgentPolicyResponse, error) + UpdateAgentPolicyWithBodyWithResponse(ctx context.Context, agentPolicyId string, params *UpdateAgentPolicyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateAgentPolicyResponse, error) - UpdateAgentPolicyWithResponse(ctx context.Context, agentPolicyId string, body UpdateAgentPolicyJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateAgentPolicyResponse, error) + UpdateAgentPolicyWithResponse(ctx context.Context, agentPolicyId string, params *UpdateAgentPolicyParams, body UpdateAgentPolicyJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateAgentPolicyResponse, error) // GetEnrollmentApiKeysWithResponse request - GetEnrollmentApiKeysWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetEnrollmentApiKeysResponse, error) + GetEnrollmentApiKeysWithResponse(ctx context.Context, params *GetEnrollmentApiKeysParams, reqEditors ...RequestEditorFn) (*GetEnrollmentApiKeysResponse, error) + + // ListPackagesWithResponse request + ListPackagesWithResponse(ctx context.Context, params *ListPackagesParams, reqEditors ...RequestEditorFn) (*ListPackagesResponse, error) - // ListAllPackagesWithResponse request - ListAllPackagesWithResponse(ctx context.Context, params *ListAllPackagesParams, reqEditors ...RequestEditorFn) (*ListAllPackagesResponse, error) + // InstallPackageByUploadWithBodyWithResponse request with any body + InstallPackageByUploadWithBodyWithResponse(ctx context.Context, params *InstallPackageByUploadParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*InstallPackageByUploadResponse, error) // DeletePackageWithBodyWithResponse request with any body DeletePackageWithBodyWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *DeletePackageParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeletePackageResponse, error) @@ -3153,31 +13622,32 @@ type ClientWithResponsesInterface interface { InstallPackageWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *InstallPackageParams, body InstallPackageJSONRequestBody, reqEditors ...RequestEditorFn) (*InstallPackageResponse, error) - // UpdatePackageWithBodyWithResponse request with any body - UpdatePackageWithBodyWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *UpdatePackageParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdatePackageResponse, error) + // GetFleetServerHostsWithResponse request + GetFleetServerHostsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetServerHostsResponse, error) - UpdatePackageWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *UpdatePackageParams, body UpdatePackageJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdatePackageResponse, error) + // CreateFleetServerHostWithBodyWithResponse request with any body + CreateFleetServerHostWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateFleetServerHostResponse, error) - // PostFleetServerHostsWithBodyWithResponse request with any body - PostFleetServerHostsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetServerHostsResponse, error) + CreateFleetServerHostWithResponse(ctx context.Context, body CreateFleetServerHostJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateFleetServerHostResponse, error) - PostFleetServerHostsWithResponse(ctx context.Context, body PostFleetServerHostsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetServerHostsResponse, error) + // DeleteFleetServerHostWithResponse request + DeleteFleetServerHostWithResponse(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*DeleteFleetServerHostResponse, error) - // DeleteFleetServerHostsWithResponse request - DeleteFleetServerHostsWithResponse(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*DeleteFleetServerHostsResponse, error) + // GetFleetServerHostWithResponse request + GetFleetServerHostWithResponse(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*GetFleetServerHostResponse, error) - // GetOneFleetServerHostsWithResponse request - GetOneFleetServerHostsWithResponse(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*GetOneFleetServerHostsResponse, error) + // UpdateFleetServerHostWithBodyWithResponse request with any body + UpdateFleetServerHostWithBodyWithResponse(ctx context.Context, itemId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateFleetServerHostResponse, error) - // UpdateFleetServerHostsWithBodyWithResponse request with any body - UpdateFleetServerHostsWithBodyWithResponse(ctx context.Context, itemId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateFleetServerHostsResponse, error) + UpdateFleetServerHostWithResponse(ctx context.Context, itemId string, body UpdateFleetServerHostJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateFleetServerHostResponse, error) - UpdateFleetServerHostsWithResponse(ctx context.Context, itemId string, body UpdateFleetServerHostsJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateFleetServerHostsResponse, error) + // GetOutputsWithResponse request + GetOutputsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetOutputsResponse, error) - // PostOutputsWithBodyWithResponse request with any body - PostOutputsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostOutputsResponse, error) + // CreateOutputWithBodyWithResponse request with any body + CreateOutputWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateOutputResponse, error) - PostOutputsWithResponse(ctx context.Context, body PostOutputsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostOutputsResponse, error) + CreateOutputWithResponse(ctx context.Context, body CreateOutputJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateOutputResponse, error) // DeleteOutputWithResponse request DeleteOutputWithResponse(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*DeleteOutputResponse, error) @@ -3190,6 +13660,9 @@ type ClientWithResponsesInterface interface { UpdateOutputWithResponse(ctx context.Context, outputId string, body UpdateOutputJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateOutputResponse, error) + // GetPackagePoliciesWithResponse request + GetPackagePoliciesWithResponse(ctx context.Context, params *GetPackagePoliciesParams, reqEditors ...RequestEditorFn) (*GetPackagePoliciesResponse, error) + // CreatePackagePolicyWithBodyWithResponse request with any body CreatePackagePolicyWithBodyWithResponse(ctx context.Context, params *CreatePackagePolicyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreatePackagePolicyResponse, error) @@ -3207,13 +13680,49 @@ type ClientWithResponsesInterface interface { UpdatePackagePolicyWithResponse(ctx context.Context, packagePolicyId string, params *UpdatePackagePolicyParams, body UpdatePackagePolicyJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdatePackagePolicyResponse, error) } +type GetAgentPoliciesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []AgentPolicy `json:"items"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Total float32 `json:"total"` + } + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetAgentPoliciesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetAgentPoliciesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + type CreateAgentPolicyResponse struct { Body []byte HTTPResponse *http.Response JSON200 *struct { - Item *AgentPolicy `json:"item,omitempty"` + Item AgentPolicy `json:"item"` + } + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } - JSON400 *Error } // Status returns HTTPResponse.Status @@ -3236,10 +13745,14 @@ type DeleteAgentPolicyResponse struct { Body []byte HTTPResponse *http.Response JSON200 *struct { - Id string `json:"id"` - Success bool `json:"success"` + Id string `json:"id"` + Name string `json:"name"` + } + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } - JSON400 *Error } // Status returns HTTPResponse.Status @@ -3258,17 +13771,21 @@ func (r DeleteAgentPolicyResponse) StatusCode() int { return 0 } -type AgentPolicyInfoResponse struct { +type GetAgentPolicyResponse struct { Body []byte HTTPResponse *http.Response JSON200 *struct { Item AgentPolicy `json:"item"` } - JSON400 *Error + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } } // Status returns HTTPResponse.Status -func (r AgentPolicyInfoResponse) Status() string { +func (r GetAgentPolicyResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -3276,7 +13793,7 @@ func (r AgentPolicyInfoResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r AgentPolicyInfoResponse) StatusCode() int { +func (r GetAgentPolicyResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } @@ -3289,7 +13806,11 @@ type UpdateAgentPolicyResponse struct { JSON200 *struct { Item AgentPolicy `json:"item"` } - JSON400 *Error + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } } // Status returns HTTPResponse.Status @@ -3314,12 +13835,33 @@ type GetEnrollmentApiKeysResponse struct { JSON200 *struct { Items []EnrollmentApiKey `json:"items"` // Deprecated: - List *[]EnrollmentApiKey `json:"list,omitempty"` - Page float32 `json:"page"` - PerPage float32 `json:"perPage"` - Total float32 `json:"total"` + List []struct { + // Active When false, the enrollment API key is revoked and cannot be used for enrolling Elastic Agents. + Active bool `json:"active"` + + // ApiKey The enrollment API key (token) used for enrolling Elastic Agents. + ApiKey string `json:"api_key"` + + // ApiKeyId The ID of the API key in the Security API. + ApiKeyId string `json:"api_key_id"` + CreatedAt string `json:"created_at"` + Id string `json:"id"` + + // Name The name of the enrollment API key. + Name *string `json:"name,omitempty"` + + // PolicyId The ID of the agent policy the Elastic Agent will be enrolled in. + PolicyId *string `json:"policy_id,omitempty"` + } `json:"list"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Total float32 `json:"total"` + } + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } - JSON400 *Error } // Status returns HTTPResponse.Status @@ -3338,15 +13880,183 @@ func (r GetEnrollmentApiKeysResponse) StatusCode() int { return 0 } -type ListAllPackagesResponse struct { +type ListPackagesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []PackageListItem `json:"items"` + Response *[]ListPackages_200_Response_Item `json:"response,omitempty"` + } + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type ListPackages_200_Response_Conditions_Elastic struct { + Capabilities *[]string `json:"capabilities,omitempty"` + Subscription *string `json:"subscription,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type ListPackages_200_Response_Conditions_Kibana struct { + Version *string `json:"version,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type ListPackages_200_Response_Conditions struct { + Elastic *ListPackages_200_Response_Conditions_Elastic `json:"elastic,omitempty"` + Kibana *ListPackages_200_Response_Conditions_Kibana `json:"kibana,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type ListPackages_200_Response_Icons_Item struct { + DarkMode *bool `json:"dark_mode,omitempty"` + Path *string `json:"path,omitempty"` + Size *string `json:"size,omitempty"` + Src string `json:"src"` + Title *string `json:"title,omitempty"` + Type *string `json:"type,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type ListPackages200ResponseInstallationInfoAdditionalSpacesInstalledKibanaType string +type ListPackages_200_Response_InstallationInfo_AdditionalSpacesInstalledKibana_Item struct { + Id string `json:"id"` + OriginId *string `json:"originId,omitempty"` + Type ListPackages200ResponseInstallationInfoAdditionalSpacesInstalledKibanaType `json:"type"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type ListPackages_200_Response_InstallationInfo_ExperimentalDataStreamFeatures_Features struct { + DocValueOnlyNumeric *bool `json:"doc_value_only_numeric,omitempty"` + DocValueOnlyOther *bool `json:"doc_value_only_other,omitempty"` + SyntheticSource *bool `json:"synthetic_source,omitempty"` + Tsdb *bool `json:"tsdb,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type ListPackages_200_Response_InstallationInfo_ExperimentalDataStreamFeatures_Item struct { + DataStream string `json:"data_stream"` + Features ListPackages_200_Response_InstallationInfo_ExperimentalDataStreamFeatures_Features `json:"features"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type ListPackages200ResponseInstallationInfoInstallSource string +type ListPackages200ResponseInstallationInfoInstallStatus string +type ListPackages200ResponseInstallationInfoInstalledEsType string +type ListPackages_200_Response_InstallationInfo_InstalledEs_Item struct { + Deferred *bool `json:"deferred,omitempty"` + Id string `json:"id"` + Type ListPackages200ResponseInstallationInfoInstalledEsType `json:"type"` + Version *string `json:"version,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type ListPackages200ResponseInstallationInfoInstalledKibanaType string +type ListPackages_200_Response_InstallationInfo_InstalledKibana_Item struct { + Id string `json:"id"` + OriginId *string `json:"originId,omitempty"` + Type ListPackages200ResponseInstallationInfoInstalledKibanaType `json:"type"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type ListPackages_200_Response_InstallationInfo_LatestExecutedState struct { + Error *string `json:"error,omitempty"` + Name string `json:"name"` + StartedAt string `json:"started_at"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type ListPackages_200_Response_InstallationInfo_LatestInstallFailedAttempts_Error struct { + Message string `json:"message"` + Name string `json:"name"` + Stack *string `json:"stack,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type ListPackages_200_Response_InstallationInfo_LatestInstallFailedAttempts_Item struct { + CreatedAt string `json:"created_at"` + Error ListPackages_200_Response_InstallationInfo_LatestInstallFailedAttempts_Error `json:"error"` + TargetVersion string `json:"target_version"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type ListPackages200ResponseInstallationInfoVerificationStatus string +type ListPackages_200_Response_InstallationInfo struct { + AdditionalSpacesInstalledKibana *map[string][]ListPackages_200_Response_InstallationInfo_AdditionalSpacesInstalledKibana_Item `json:"additional_spaces_installed_kibana,omitempty"` + CreatedAt *string `json:"created_at,omitempty"` + ExperimentalDataStreamFeatures *[]ListPackages_200_Response_InstallationInfo_ExperimentalDataStreamFeatures_Item `json:"experimental_data_stream_features,omitempty"` + InstallFormatSchemaVersion *string `json:"install_format_schema_version,omitempty"` + InstallSource ListPackages200ResponseInstallationInfoInstallSource `json:"install_source"` + InstallStatus ListPackages200ResponseInstallationInfoInstallStatus `json:"install_status"` + InstalledEs []ListPackages_200_Response_InstallationInfo_InstalledEs_Item `json:"installed_es"` + InstalledKibana []ListPackages_200_Response_InstallationInfo_InstalledKibana_Item `json:"installed_kibana"` + InstalledKibanaSpaceId *string `json:"installed_kibana_space_id,omitempty"` + LatestExecutedState *ListPackages_200_Response_InstallationInfo_LatestExecutedState `json:"latest_executed_state,omitempty"` + LatestInstallFailedAttempts *[]ListPackages_200_Response_InstallationInfo_LatestInstallFailedAttempts_Item `json:"latest_install_failed_attempts,omitempty"` + Name string `json:"name"` + Namespaces *[]string `json:"namespaces,omitempty"` + Type string `json:"type"` + UpdatedAt *string `json:"updated_at,omitempty"` + VerificationKeyId *string `json:"verification_key_id"` + VerificationStatus ListPackages200ResponseInstallationInfoVerificationStatus `json:"verification_status"` + Version string `json:"version"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type ListPackages200ResponseOwnerType string +type ListPackages_200_Response_Owner struct { + Github *string `json:"github,omitempty"` + Type *ListPackages200ResponseOwnerType `json:"type,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type ListPackages200ResponseRelease string +type ListPackages_200_Response_Source struct { + License string `json:"license"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type ListPackages200ResponseType string +type ListPackages_200_Response_Item struct { + Categories *[]string `json:"categories,omitempty"` + Conditions *ListPackages_200_Response_Conditions `json:"conditions,omitempty"` + DataStreams *[]map[string]interface{} `json:"data_streams,omitempty"` + Description *string `json:"description,omitempty"` + Download *string `json:"download,omitempty"` + FormatVersion *string `json:"format_version,omitempty"` + Icons *[]ListPackages_200_Response_Icons_Item `json:"icons,omitempty"` + Id string `json:"id"` + InstallationInfo *ListPackages_200_Response_InstallationInfo `json:"installationInfo,omitempty"` + Integration *string `json:"integration,omitempty"` + Internal *bool `json:"internal,omitempty"` + LatestVersion *string `json:"latestVersion,omitempty"` + Name string `json:"name"` + Owner *ListPackages_200_Response_Owner `json:"owner,omitempty"` + Path *string `json:"path,omitempty"` + PolicyTemplates *[]map[string]interface{} `json:"policy_templates,omitempty"` + Readme *string `json:"readme,omitempty"` + Release *ListPackages200ResponseRelease `json:"release,omitempty"` + SavedObject interface{} `json:"savedObject"` + SignaturePath *string `json:"signature_path,omitempty"` + Source *ListPackages_200_Response_Source `json:"source,omitempty"` + Status *string `json:"status,omitempty"` + Title string `json:"title"` + Type *ListPackages200ResponseType `json:"type,omitempty"` + Vars *[]map[string]interface{} `json:"vars,omitempty"` + Version string `json:"version"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// Status returns HTTPResponse.Status +func (r ListPackagesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ListPackagesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type InstallPackageByUploadResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *GetPackagesResponse - JSON400 *Error } // Status returns HTTPResponse.Status -func (r ListAllPackagesResponse) Status() string { +func (r InstallPackageByUploadResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -3354,7 +14064,7 @@ func (r ListAllPackagesResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r ListAllPackagesResponse) StatusCode() int { +func (r InstallPackageByUploadResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } @@ -3365,12 +14075,47 @@ type DeletePackageResponse struct { Body []byte HTTPResponse *http.Response JSON200 *struct { - Items []struct { - Id string `json:"id"` - Type PackageItemType `json:"type"` - } `json:"items"` + Items []DeletePackage_200_Items_Item `json:"items"` + // Deprecated: + Response *[]DeletePackage_200_Response_Item `json:"response,omitempty"` + } + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } - JSON400 *Error +} +type DeletePackage200Items0 struct { + Id string `json:"id"` + OriginId *string `json:"originId,omitempty"` + Type DeletePackage200Items0Type `json:"type"` +} +type DeletePackage200Items0Type string +type DeletePackage200Items1 struct { + Deferred *bool `json:"deferred,omitempty"` + Id string `json:"id"` + Type DeletePackage200Items1Type `json:"type"` + Version *string `json:"version,omitempty"` +} +type DeletePackage200Items1Type string +type DeletePackage_200_Items_Item struct { + union json.RawMessage +} +type DeletePackage200Response0 struct { + Id string `json:"id"` + OriginId *string `json:"originId,omitempty"` + Type DeletePackage200Response0Type `json:"type"` +} +type DeletePackage200Response0Type string +type DeletePackage200Response1 struct { + Deferred *bool `json:"deferred,omitempty"` + Id string `json:"id"` + Type DeletePackage200Response1Type `json:"type"` + Version *string `json:"version,omitempty"` +} +type DeletePackage200Response1Type string +type DeletePackage_200_Response_Item struct { + union json.RawMessage } // Status returns HTTPResponse.Status @@ -3382,31 +14127,261 @@ func (r DeletePackageResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r DeletePackageResponse) StatusCode() int { +func (r DeletePackageResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetPackageResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item PackageInfo `json:"item"` + Metadata *struct { + HasPolicies bool `json:"has_policies"` + } `json:"metadata,omitempty"` + // Deprecated: + Response *GetPackage_200_Response `json:"response,omitempty"` + } + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type GetPackage_200_Response_Conditions_Elastic struct { + Capabilities *[]string `json:"capabilities,omitempty"` + Subscription *string `json:"subscription,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetPackage_200_Response_Conditions_Kibana struct { + Version *string `json:"version,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetPackage_200_Response_Conditions struct { + Elastic *GetPackage_200_Response_Conditions_Elastic `json:"elastic,omitempty"` + Kibana *GetPackage_200_Response_Conditions_Kibana `json:"kibana,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetPackage_200_Response_Icons_Item struct { + DarkMode *bool `json:"dark_mode,omitempty"` + Path *string `json:"path,omitempty"` + Size *string `json:"size,omitempty"` + Src string `json:"src"` + Title *string `json:"title,omitempty"` + Type *string `json:"type,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetPackage200ResponseInstallationInfoAdditionalSpacesInstalledKibanaType string +type GetPackage_200_Response_InstallationInfo_AdditionalSpacesInstalledKibana_Item struct { + Id string `json:"id"` + OriginId *string `json:"originId,omitempty"` + Type GetPackage200ResponseInstallationInfoAdditionalSpacesInstalledKibanaType `json:"type"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetPackage_200_Response_InstallationInfo_ExperimentalDataStreamFeatures_Features struct { + DocValueOnlyNumeric *bool `json:"doc_value_only_numeric,omitempty"` + DocValueOnlyOther *bool `json:"doc_value_only_other,omitempty"` + SyntheticSource *bool `json:"synthetic_source,omitempty"` + Tsdb *bool `json:"tsdb,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetPackage_200_Response_InstallationInfo_ExperimentalDataStreamFeatures_Item struct { + DataStream string `json:"data_stream"` + Features GetPackage_200_Response_InstallationInfo_ExperimentalDataStreamFeatures_Features `json:"features"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetPackage200ResponseInstallationInfoInstallSource string +type GetPackage200ResponseInstallationInfoInstallStatus string +type GetPackage200ResponseInstallationInfoInstalledEsType string +type GetPackage_200_Response_InstallationInfo_InstalledEs_Item struct { + Deferred *bool `json:"deferred,omitempty"` + Id string `json:"id"` + Type GetPackage200ResponseInstallationInfoInstalledEsType `json:"type"` + Version *string `json:"version,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetPackage200ResponseInstallationInfoInstalledKibanaType string +type GetPackage_200_Response_InstallationInfo_InstalledKibana_Item struct { + Id string `json:"id"` + OriginId *string `json:"originId,omitempty"` + Type GetPackage200ResponseInstallationInfoInstalledKibanaType `json:"type"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetPackage_200_Response_InstallationInfo_LatestExecutedState struct { + Error *string `json:"error,omitempty"` + Name string `json:"name"` + StartedAt string `json:"started_at"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetPackage_200_Response_InstallationInfo_LatestInstallFailedAttempts_Error struct { + Message string `json:"message"` + Name string `json:"name"` + Stack *string `json:"stack,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetPackage_200_Response_InstallationInfo_LatestInstallFailedAttempts_Item struct { + CreatedAt string `json:"created_at"` + Error GetPackage_200_Response_InstallationInfo_LatestInstallFailedAttempts_Error `json:"error"` + TargetVersion string `json:"target_version"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetPackage200ResponseInstallationInfoVerificationStatus string +type GetPackage_200_Response_InstallationInfo struct { + AdditionalSpacesInstalledKibana *map[string][]GetPackage_200_Response_InstallationInfo_AdditionalSpacesInstalledKibana_Item `json:"additional_spaces_installed_kibana,omitempty"` + CreatedAt *string `json:"created_at,omitempty"` + ExperimentalDataStreamFeatures *[]GetPackage_200_Response_InstallationInfo_ExperimentalDataStreamFeatures_Item `json:"experimental_data_stream_features,omitempty"` + InstallFormatSchemaVersion *string `json:"install_format_schema_version,omitempty"` + InstallSource GetPackage200ResponseInstallationInfoInstallSource `json:"install_source"` + InstallStatus GetPackage200ResponseInstallationInfoInstallStatus `json:"install_status"` + InstalledEs []GetPackage_200_Response_InstallationInfo_InstalledEs_Item `json:"installed_es"` + InstalledKibana []GetPackage_200_Response_InstallationInfo_InstalledKibana_Item `json:"installed_kibana"` + InstalledKibanaSpaceId *string `json:"installed_kibana_space_id,omitempty"` + LatestExecutedState *GetPackage_200_Response_InstallationInfo_LatestExecutedState `json:"latest_executed_state,omitempty"` + LatestInstallFailedAttempts *[]GetPackage_200_Response_InstallationInfo_LatestInstallFailedAttempts_Item `json:"latest_install_failed_attempts,omitempty"` + Name string `json:"name"` + Namespaces *[]string `json:"namespaces,omitempty"` + Type string `json:"type"` + UpdatedAt *string `json:"updated_at,omitempty"` + VerificationKeyId *string `json:"verification_key_id"` + VerificationStatus GetPackage200ResponseInstallationInfoVerificationStatus `json:"verification_status"` + Version string `json:"version"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetPackage200ResponseOwnerType string +type GetPackage_200_Response_Owner struct { + Github *string `json:"github,omitempty"` + Type *GetPackage200ResponseOwnerType `json:"type,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetPackage200ResponseRelease string +type GetPackage_200_Response_Source struct { + License string `json:"license"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetPackage200ResponseType string +type GetPackage_200_Response struct { + Agent *struct { + Privileges *struct { + Root *bool `json:"root,omitempty"` + } `json:"privileges,omitempty"` + } `json:"agent,omitempty"` + AssetTags *[]struct { + AssetIds *[]string `json:"asset_ids,omitempty"` + AssetTypes *[]string `json:"asset_types,omitempty"` + Text string `json:"text"` + } `json:"asset_tags,omitempty"` + Assets map[string]interface{} `json:"assets"` + Categories *[]string `json:"categories,omitempty"` + Conditions *GetPackage_200_Response_Conditions `json:"conditions,omitempty"` + DataStreams *[]map[string]interface{} `json:"data_streams,omitempty"` + Description *string `json:"description,omitempty"` + Download *string `json:"download,omitempty"` + Elasticsearch *map[string]interface{} `json:"elasticsearch,omitempty"` + FormatVersion *string `json:"format_version,omitempty"` + Icons *[]GetPackage_200_Response_Icons_Item `json:"icons,omitempty"` + InstallationInfo *GetPackage_200_Response_InstallationInfo `json:"installationInfo,omitempty"` + Internal *bool `json:"internal,omitempty"` + KeepPoliciesUpToDate *bool `json:"keepPoliciesUpToDate,omitempty"` + LatestVersion *string `json:"latestVersion,omitempty"` + License *string `json:"license,omitempty"` + LicensePath *string `json:"licensePath,omitempty"` + Name string `json:"name"` + Notice *string `json:"notice,omitempty"` + Owner *GetPackage_200_Response_Owner `json:"owner,omitempty"` + Path *string `json:"path,omitempty"` + PolicyTemplates *[]map[string]interface{} `json:"policy_templates,omitempty"` + Readme *string `json:"readme,omitempty"` + Release *GetPackage200ResponseRelease `json:"release,omitempty"` + SavedObject interface{} `json:"savedObject"` + Screenshots *[]struct { + DarkMode *bool `json:"dark_mode,omitempty"` + Path *string `json:"path,omitempty"` + Size *string `json:"size,omitempty"` + Src string `json:"src"` + Title *string `json:"title,omitempty"` + Type *string `json:"type,omitempty"` + } `json:"screenshots,omitempty"` + SignaturePath *string `json:"signature_path,omitempty"` + Source *GetPackage_200_Response_Source `json:"source,omitempty"` + Status *string `json:"status,omitempty"` + Title string `json:"title"` + Type *GetPackage200ResponseType `json:"type,omitempty"` + Vars *[]map[string]interface{} `json:"vars,omitempty"` + Version string `json:"version"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// Status returns HTTPResponse.Status +func (r GetPackageResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetPackageResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type GetPackageResponse struct { +type InstallPackageResponse struct { Body []byte HTTPResponse *http.Response JSON200 *struct { - Item *PackageInfo `json:"item,omitempty"` - KeepPoliciesUpToDate *bool `json:"keepPoliciesUpToDate,omitempty"` - LatestVersion *string `json:"latestVersion,omitempty"` - LicensePath *string `json:"licensePath,omitempty"` - Notice *string `json:"notice,omitempty"` + Meta struct { + InstallSource string `json:"install_source"` + } `json:"_meta"` + Items []InstallPackage_200_Items_Item `json:"items"` // Deprecated: - SavedObject map[string]interface{} `json:"savedObject"` - Status PackageStatus `json:"status"` + Response *[]InstallPackage_200_Response_Item `json:"response,omitempty"` } - JSON400 *Error + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type InstallPackage200Items0 struct { + Id string `json:"id"` + OriginId *string `json:"originId,omitempty"` + Type InstallPackage200Items0Type `json:"type"` +} +type InstallPackage200Items0Type string +type InstallPackage200Items1 struct { + Deferred *bool `json:"deferred,omitempty"` + Id string `json:"id"` + Type InstallPackage200Items1Type `json:"type"` + Version *string `json:"version,omitempty"` +} +type InstallPackage200Items1Type string +type InstallPackage_200_Items_Item struct { + union json.RawMessage +} +type InstallPackage200Response0 struct { + Id string `json:"id"` + OriginId *string `json:"originId,omitempty"` + Type InstallPackage200Response0Type `json:"type"` +} +type InstallPackage200Response0Type string +type InstallPackage200Response1 struct { + Deferred *bool `json:"deferred,omitempty"` + Id string `json:"id"` + Type InstallPackage200Response1Type `json:"type"` + Version *string `json:"version,omitempty"` +} +type InstallPackage200Response1Type string +type InstallPackage_200_Response_Item struct { + union json.RawMessage } // Status returns HTTPResponse.Status -func (r GetPackageResponse) Status() string { +func (r InstallPackageResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -3414,30 +14389,31 @@ func (r GetPackageResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r GetPackageResponse) StatusCode() int { +func (r InstallPackageResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type InstallPackageResponse struct { +type GetFleetServerHostsResponse struct { Body []byte HTTPResponse *http.Response JSON200 *struct { - Meta *struct { - InstallSource *PackageInstallSource `json:"install_source,omitempty"` - } `json:"_meta,omitempty"` - Items []struct { - Id string `json:"id"` - Type PackageItemType `json:"type"` - } `json:"items"` + Items []ServerHost `json:"items"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Total float32 `json:"total"` + } + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } - JSON400 *Error } // Status returns HTTPResponse.Status -func (r InstallPackageResponse) Status() string { +func (r GetFleetServerHostsResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -3445,27 +14421,28 @@ func (r InstallPackageResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r InstallPackageResponse) StatusCode() int { +func (r GetFleetServerHostsResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type UpdatePackageResponse struct { +type CreateFleetServerHostResponse struct { Body []byte HTTPResponse *http.Response JSON200 *struct { - Items []struct { - Id string `json:"id"` - Type PackageItemType `json:"type"` - } `json:"items"` + Item ServerHost `json:"item"` + } + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } - JSON400 *Error } // Status returns HTTPResponse.Status -func (r UpdatePackageResponse) Status() string { +func (r CreateFleetServerHostResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -3473,24 +14450,28 @@ func (r UpdatePackageResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r UpdatePackageResponse) StatusCode() int { +func (r CreateFleetServerHostResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type PostFleetServerHostsResponse struct { +type DeleteFleetServerHostResponse struct { Body []byte HTTPResponse *http.Response JSON200 *struct { - Item *FleetServerHost `json:"item,omitempty"` + Id string `json:"id"` + } + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } - JSON400 *Error } // Status returns HTTPResponse.Status -func (r PostFleetServerHostsResponse) Status() string { +func (r DeleteFleetServerHostResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -3498,24 +14479,28 @@ func (r PostFleetServerHostsResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r PostFleetServerHostsResponse) StatusCode() int { +func (r DeleteFleetServerHostResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type DeleteFleetServerHostsResponse struct { +type GetFleetServerHostResponse struct { Body []byte HTTPResponse *http.Response JSON200 *struct { - Id string `json:"id"` + Item ServerHost `json:"item"` + } + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } - JSON400 *Error } // Status returns HTTPResponse.Status -func (r DeleteFleetServerHostsResponse) Status() string { +func (r GetFleetServerHostResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -3523,24 +14508,28 @@ func (r DeleteFleetServerHostsResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r DeleteFleetServerHostsResponse) StatusCode() int { +func (r GetFleetServerHostResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type GetOneFleetServerHostsResponse struct { +type UpdateFleetServerHostResponse struct { Body []byte HTTPResponse *http.Response JSON200 *struct { - Item FleetServerHost `json:"item"` + Item ServerHost `json:"item"` + } + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } - JSON400 *Error } // Status returns HTTPResponse.Status -func (r GetOneFleetServerHostsResponse) Status() string { +func (r UpdateFleetServerHostResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -3548,24 +14537,31 @@ func (r GetOneFleetServerHostsResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r GetOneFleetServerHostsResponse) StatusCode() int { +func (r UpdateFleetServerHostResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type UpdateFleetServerHostsResponse struct { +type GetOutputsResponse struct { Body []byte HTTPResponse *http.Response JSON200 *struct { - Item FleetServerHost `json:"item"` + Items []OutputUnion `json:"items"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Total float32 `json:"total"` + } + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } - JSON400 *Error } // Status returns HTTPResponse.Status -func (r UpdateFleetServerHostsResponse) Status() string { +func (r GetOutputsResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -3573,24 +14569,28 @@ func (r UpdateFleetServerHostsResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r UpdateFleetServerHostsResponse) StatusCode() int { +func (r GetOutputsResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type PostOutputsResponse struct { +type CreateOutputResponse struct { Body []byte HTTPResponse *http.Response JSON200 *struct { - Item *OutputCreateRequest `json:"item,omitempty"` + Item OutputUnion `json:"item"` + } + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } - JSON400 *Error } // Status returns HTTPResponse.Status -func (r PostOutputsResponse) Status() string { +func (r CreateOutputResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -3598,7 +14598,7 @@ func (r PostOutputsResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r PostOutputsResponse) StatusCode() int { +func (r CreateOutputResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } @@ -3611,7 +14611,16 @@ type DeleteOutputResponse struct { JSON200 *struct { Id string `json:"id"` } - JSON400 *Error + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + JSON404 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } } // Status returns HTTPResponse.Status @@ -3634,9 +14643,13 @@ type GetOutputResponse struct { Body []byte HTTPResponse *http.Response JSON200 *struct { - Item *OutputCreateRequest `json:"item,omitempty"` + Item OutputUnion `json:"item"` + } + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } - JSON400 *Error } // Status returns HTTPResponse.Status @@ -3659,9 +14672,13 @@ type UpdateOutputResponse struct { Body []byte HTTPResponse *http.Response JSON200 *struct { - Item *OutputUpdateRequest `json:"item,omitempty"` + Item OutputUnion `json:"item"` + } + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } - JSON400 *Error } // Status returns HTTPResponse.Status @@ -3680,14 +14697,54 @@ func (r UpdateOutputResponse) StatusCode() int { return 0 } +type GetPackagePoliciesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []PackagePolicy `json:"items"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Total float32 `json:"total"` + } + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetPackagePoliciesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetPackagePoliciesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + type CreatePackagePolicyResponse struct { Body []byte HTTPResponse *http.Response JSON200 *struct { Item PackagePolicy `json:"item"` } - JSON400 *Error - JSON409 *Error + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + JSON409 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } } // Status returns HTTPResponse.Status @@ -3712,7 +14769,11 @@ type DeletePackagePolicyResponse struct { JSON200 *struct { Id string `json:"id"` } - JSON400 *Error + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } } // Status returns HTTPResponse.Status @@ -3737,7 +14798,14 @@ type GetPackagePolicyResponse struct { JSON200 *struct { Item PackagePolicy `json:"item"` } - JSON400 *Error + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + JSON404 *struct { + Message string `json:"message"` + } } // Status returns HTTPResponse.Status @@ -3760,10 +14828,18 @@ type UpdatePackagePolicyResponse struct { Body []byte HTTPResponse *http.Response JSON200 *struct { - Item PackagePolicy `json:"item"` - Sucess bool `json:"sucess"` + Item PackagePolicy `json:"item"` + } + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + JSON403 *struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } - JSON400 *Error } // Status returns HTTPResponse.Status @@ -3782,17 +14858,26 @@ func (r UpdatePackagePolicyResponse) StatusCode() int { return 0 } +// GetAgentPoliciesWithResponse request returning *GetAgentPoliciesResponse +func (c *ClientWithResponses) GetAgentPoliciesWithResponse(ctx context.Context, params *GetAgentPoliciesParams, reqEditors ...RequestEditorFn) (*GetAgentPoliciesResponse, error) { + rsp, err := c.GetAgentPolicies(ctx, params, reqEditors...) + if err != nil { + return nil, err + } + return ParseGetAgentPoliciesResponse(rsp) +} + // CreateAgentPolicyWithBodyWithResponse request with arbitrary body returning *CreateAgentPolicyResponse -func (c *ClientWithResponses) CreateAgentPolicyWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateAgentPolicyResponse, error) { - rsp, err := c.CreateAgentPolicyWithBody(ctx, contentType, body, reqEditors...) +func (c *ClientWithResponses) CreateAgentPolicyWithBodyWithResponse(ctx context.Context, params *CreateAgentPolicyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateAgentPolicyResponse, error) { + rsp, err := c.CreateAgentPolicyWithBody(ctx, params, contentType, body, reqEditors...) if err != nil { return nil, err } return ParseCreateAgentPolicyResponse(rsp) } -func (c *ClientWithResponses) CreateAgentPolicyWithResponse(ctx context.Context, body CreateAgentPolicyJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateAgentPolicyResponse, error) { - rsp, err := c.CreateAgentPolicy(ctx, body, reqEditors...) +func (c *ClientWithResponses) CreateAgentPolicyWithResponse(ctx context.Context, params *CreateAgentPolicyParams, body CreateAgentPolicyJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateAgentPolicyResponse, error) { + rsp, err := c.CreateAgentPolicy(ctx, params, body, reqEditors...) if err != nil { return nil, err } @@ -3816,26 +14901,26 @@ func (c *ClientWithResponses) DeleteAgentPolicyWithResponse(ctx context.Context, return ParseDeleteAgentPolicyResponse(rsp) } -// AgentPolicyInfoWithResponse request returning *AgentPolicyInfoResponse -func (c *ClientWithResponses) AgentPolicyInfoWithResponse(ctx context.Context, agentPolicyId string, reqEditors ...RequestEditorFn) (*AgentPolicyInfoResponse, error) { - rsp, err := c.AgentPolicyInfo(ctx, agentPolicyId, reqEditors...) +// GetAgentPolicyWithResponse request returning *GetAgentPolicyResponse +func (c *ClientWithResponses) GetAgentPolicyWithResponse(ctx context.Context, agentPolicyId string, params *GetAgentPolicyParams, reqEditors ...RequestEditorFn) (*GetAgentPolicyResponse, error) { + rsp, err := c.GetAgentPolicy(ctx, agentPolicyId, params, reqEditors...) if err != nil { return nil, err } - return ParseAgentPolicyInfoResponse(rsp) + return ParseGetAgentPolicyResponse(rsp) } // UpdateAgentPolicyWithBodyWithResponse request with arbitrary body returning *UpdateAgentPolicyResponse -func (c *ClientWithResponses) UpdateAgentPolicyWithBodyWithResponse(ctx context.Context, agentPolicyId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateAgentPolicyResponse, error) { - rsp, err := c.UpdateAgentPolicyWithBody(ctx, agentPolicyId, contentType, body, reqEditors...) +func (c *ClientWithResponses) UpdateAgentPolicyWithBodyWithResponse(ctx context.Context, agentPolicyId string, params *UpdateAgentPolicyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateAgentPolicyResponse, error) { + rsp, err := c.UpdateAgentPolicyWithBody(ctx, agentPolicyId, params, contentType, body, reqEditors...) if err != nil { return nil, err } return ParseUpdateAgentPolicyResponse(rsp) } -func (c *ClientWithResponses) UpdateAgentPolicyWithResponse(ctx context.Context, agentPolicyId string, body UpdateAgentPolicyJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateAgentPolicyResponse, error) { - rsp, err := c.UpdateAgentPolicy(ctx, agentPolicyId, body, reqEditors...) +func (c *ClientWithResponses) UpdateAgentPolicyWithResponse(ctx context.Context, agentPolicyId string, params *UpdateAgentPolicyParams, body UpdateAgentPolicyJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateAgentPolicyResponse, error) { + rsp, err := c.UpdateAgentPolicy(ctx, agentPolicyId, params, body, reqEditors...) if err != nil { return nil, err } @@ -3843,21 +14928,30 @@ func (c *ClientWithResponses) UpdateAgentPolicyWithResponse(ctx context.Context, } // GetEnrollmentApiKeysWithResponse request returning *GetEnrollmentApiKeysResponse -func (c *ClientWithResponses) GetEnrollmentApiKeysWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetEnrollmentApiKeysResponse, error) { - rsp, err := c.GetEnrollmentApiKeys(ctx, reqEditors...) +func (c *ClientWithResponses) GetEnrollmentApiKeysWithResponse(ctx context.Context, params *GetEnrollmentApiKeysParams, reqEditors ...RequestEditorFn) (*GetEnrollmentApiKeysResponse, error) { + rsp, err := c.GetEnrollmentApiKeys(ctx, params, reqEditors...) if err != nil { return nil, err } return ParseGetEnrollmentApiKeysResponse(rsp) } -// ListAllPackagesWithResponse request returning *ListAllPackagesResponse -func (c *ClientWithResponses) ListAllPackagesWithResponse(ctx context.Context, params *ListAllPackagesParams, reqEditors ...RequestEditorFn) (*ListAllPackagesResponse, error) { - rsp, err := c.ListAllPackages(ctx, params, reqEditors...) +// ListPackagesWithResponse request returning *ListPackagesResponse +func (c *ClientWithResponses) ListPackagesWithResponse(ctx context.Context, params *ListPackagesParams, reqEditors ...RequestEditorFn) (*ListPackagesResponse, error) { + rsp, err := c.ListPackages(ctx, params, reqEditors...) + if err != nil { + return nil, err + } + return ParseListPackagesResponse(rsp) +} + +// InstallPackageByUploadWithBodyWithResponse request with arbitrary body returning *InstallPackageByUploadResponse +func (c *ClientWithResponses) InstallPackageByUploadWithBodyWithResponse(ctx context.Context, params *InstallPackageByUploadParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*InstallPackageByUploadResponse, error) { + rsp, err := c.InstallPackageByUploadWithBody(ctx, params, contentType, body, reqEditors...) if err != nil { return nil, err } - return ParseListAllPackagesResponse(rsp) + return ParseInstallPackageByUploadResponse(rsp) } // DeletePackageWithBodyWithResponse request with arbitrary body returning *DeletePackageResponse @@ -3903,90 +14997,91 @@ func (c *ClientWithResponses) InstallPackageWithResponse(ctx context.Context, pk return ParseInstallPackageResponse(rsp) } -// UpdatePackageWithBodyWithResponse request with arbitrary body returning *UpdatePackageResponse -func (c *ClientWithResponses) UpdatePackageWithBodyWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *UpdatePackageParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdatePackageResponse, error) { - rsp, err := c.UpdatePackageWithBody(ctx, pkgName, pkgVersion, params, contentType, body, reqEditors...) +// GetFleetServerHostsWithResponse request returning *GetFleetServerHostsResponse +func (c *ClientWithResponses) GetFleetServerHostsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetServerHostsResponse, error) { + rsp, err := c.GetFleetServerHosts(ctx, reqEditors...) if err != nil { return nil, err } - return ParseUpdatePackageResponse(rsp) + return ParseGetFleetServerHostsResponse(rsp) } -func (c *ClientWithResponses) UpdatePackageWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *UpdatePackageParams, body UpdatePackageJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdatePackageResponse, error) { - rsp, err := c.UpdatePackage(ctx, pkgName, pkgVersion, params, body, reqEditors...) +// CreateFleetServerHostWithBodyWithResponse request with arbitrary body returning *CreateFleetServerHostResponse +func (c *ClientWithResponses) CreateFleetServerHostWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateFleetServerHostResponse, error) { + rsp, err := c.CreateFleetServerHostWithBody(ctx, contentType, body, reqEditors...) if err != nil { return nil, err } - return ParseUpdatePackageResponse(rsp) + return ParseCreateFleetServerHostResponse(rsp) } -// PostFleetServerHostsWithBodyWithResponse request with arbitrary body returning *PostFleetServerHostsResponse -func (c *ClientWithResponses) PostFleetServerHostsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetServerHostsResponse, error) { - rsp, err := c.PostFleetServerHostsWithBody(ctx, contentType, body, reqEditors...) +func (c *ClientWithResponses) CreateFleetServerHostWithResponse(ctx context.Context, body CreateFleetServerHostJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateFleetServerHostResponse, error) { + rsp, err := c.CreateFleetServerHost(ctx, body, reqEditors...) if err != nil { return nil, err } - return ParsePostFleetServerHostsResponse(rsp) + return ParseCreateFleetServerHostResponse(rsp) } -func (c *ClientWithResponses) PostFleetServerHostsWithResponse(ctx context.Context, body PostFleetServerHostsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetServerHostsResponse, error) { - rsp, err := c.PostFleetServerHosts(ctx, body, reqEditors...) +// DeleteFleetServerHostWithResponse request returning *DeleteFleetServerHostResponse +func (c *ClientWithResponses) DeleteFleetServerHostWithResponse(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*DeleteFleetServerHostResponse, error) { + rsp, err := c.DeleteFleetServerHost(ctx, itemId, reqEditors...) if err != nil { return nil, err } - return ParsePostFleetServerHostsResponse(rsp) + return ParseDeleteFleetServerHostResponse(rsp) } -// DeleteFleetServerHostsWithResponse request returning *DeleteFleetServerHostsResponse -func (c *ClientWithResponses) DeleteFleetServerHostsWithResponse(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*DeleteFleetServerHostsResponse, error) { - rsp, err := c.DeleteFleetServerHosts(ctx, itemId, reqEditors...) +// GetFleetServerHostWithResponse request returning *GetFleetServerHostResponse +func (c *ClientWithResponses) GetFleetServerHostWithResponse(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*GetFleetServerHostResponse, error) { + rsp, err := c.GetFleetServerHost(ctx, itemId, reqEditors...) if err != nil { return nil, err } - return ParseDeleteFleetServerHostsResponse(rsp) + return ParseGetFleetServerHostResponse(rsp) } -// GetOneFleetServerHostsWithResponse request returning *GetOneFleetServerHostsResponse -func (c *ClientWithResponses) GetOneFleetServerHostsWithResponse(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*GetOneFleetServerHostsResponse, error) { - rsp, err := c.GetOneFleetServerHosts(ctx, itemId, reqEditors...) +// UpdateFleetServerHostWithBodyWithResponse request with arbitrary body returning *UpdateFleetServerHostResponse +func (c *ClientWithResponses) UpdateFleetServerHostWithBodyWithResponse(ctx context.Context, itemId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateFleetServerHostResponse, error) { + rsp, err := c.UpdateFleetServerHostWithBody(ctx, itemId, contentType, body, reqEditors...) if err != nil { return nil, err } - return ParseGetOneFleetServerHostsResponse(rsp) + return ParseUpdateFleetServerHostResponse(rsp) } -// UpdateFleetServerHostsWithBodyWithResponse request with arbitrary body returning *UpdateFleetServerHostsResponse -func (c *ClientWithResponses) UpdateFleetServerHostsWithBodyWithResponse(ctx context.Context, itemId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateFleetServerHostsResponse, error) { - rsp, err := c.UpdateFleetServerHostsWithBody(ctx, itemId, contentType, body, reqEditors...) +func (c *ClientWithResponses) UpdateFleetServerHostWithResponse(ctx context.Context, itemId string, body UpdateFleetServerHostJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateFleetServerHostResponse, error) { + rsp, err := c.UpdateFleetServerHost(ctx, itemId, body, reqEditors...) if err != nil { return nil, err } - return ParseUpdateFleetServerHostsResponse(rsp) + return ParseUpdateFleetServerHostResponse(rsp) } -func (c *ClientWithResponses) UpdateFleetServerHostsWithResponse(ctx context.Context, itemId string, body UpdateFleetServerHostsJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateFleetServerHostsResponse, error) { - rsp, err := c.UpdateFleetServerHosts(ctx, itemId, body, reqEditors...) +// GetOutputsWithResponse request returning *GetOutputsResponse +func (c *ClientWithResponses) GetOutputsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetOutputsResponse, error) { + rsp, err := c.GetOutputs(ctx, reqEditors...) if err != nil { return nil, err } - return ParseUpdateFleetServerHostsResponse(rsp) + return ParseGetOutputsResponse(rsp) } -// PostOutputsWithBodyWithResponse request with arbitrary body returning *PostOutputsResponse -func (c *ClientWithResponses) PostOutputsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostOutputsResponse, error) { - rsp, err := c.PostOutputsWithBody(ctx, contentType, body, reqEditors...) +// CreateOutputWithBodyWithResponse request with arbitrary body returning *CreateOutputResponse +func (c *ClientWithResponses) CreateOutputWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateOutputResponse, error) { + rsp, err := c.CreateOutputWithBody(ctx, contentType, body, reqEditors...) if err != nil { return nil, err } - return ParsePostOutputsResponse(rsp) + return ParseCreateOutputResponse(rsp) } -func (c *ClientWithResponses) PostOutputsWithResponse(ctx context.Context, body PostOutputsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostOutputsResponse, error) { - rsp, err := c.PostOutputs(ctx, body, reqEditors...) +func (c *ClientWithResponses) CreateOutputWithResponse(ctx context.Context, body CreateOutputJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateOutputResponse, error) { + rsp, err := c.CreateOutput(ctx, body, reqEditors...) if err != nil { return nil, err } - return ParsePostOutputsResponse(rsp) + return ParseCreateOutputResponse(rsp) } // DeleteOutputWithResponse request returning *DeleteOutputResponse @@ -4024,6 +15119,15 @@ func (c *ClientWithResponses) UpdateOutputWithResponse(ctx context.Context, outp return ParseUpdateOutputResponse(rsp) } +// GetPackagePoliciesWithResponse request returning *GetPackagePoliciesResponse +func (c *ClientWithResponses) GetPackagePoliciesWithResponse(ctx context.Context, params *GetPackagePoliciesParams, reqEditors ...RequestEditorFn) (*GetPackagePoliciesResponse, error) { + rsp, err := c.GetPackagePolicies(ctx, params, reqEditors...) + if err != nil { + return nil, err + } + return ParseGetPackagePoliciesResponse(rsp) +} + // CreatePackagePolicyWithBodyWithResponse request with arbitrary body returning *CreatePackagePolicyResponse func (c *ClientWithResponses) CreatePackagePolicyWithBodyWithResponse(ctx context.Context, params *CreatePackagePolicyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreatePackagePolicyResponse, error) { rsp, err := c.CreatePackagePolicyWithBody(ctx, params, contentType, body, reqEditors...) @@ -4076,6 +15180,48 @@ func (c *ClientWithResponses) UpdatePackagePolicyWithResponse(ctx context.Contex return ParseUpdatePackagePolicyResponse(rsp) } +// ParseGetAgentPoliciesResponse parses an HTTP response from a GetAgentPoliciesWithResponse call +func ParseGetAgentPoliciesResponse(rsp *http.Response) (*GetAgentPoliciesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &GetAgentPoliciesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []AgentPolicy `json:"items"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Total float32 `json:"total"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + } + + return response, nil +} + // ParseCreateAgentPolicyResponse parses an HTTP response from a CreateAgentPolicyWithResponse call func ParseCreateAgentPolicyResponse(rsp *http.Response) (*CreateAgentPolicyResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) @@ -4092,7 +15238,7 @@ func ParseCreateAgentPolicyResponse(rsp *http.Response) (*CreateAgentPolicyRespo switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: var dest struct { - Item *AgentPolicy `json:"item,omitempty"` + Item AgentPolicy `json:"item"` } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err @@ -4100,7 +15246,11 @@ func ParseCreateAgentPolicyResponse(rsp *http.Response) (*CreateAgentPolicyRespo response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest Error + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -4127,8 +15277,8 @@ func ParseDeleteAgentPolicyResponse(rsp *http.Response) (*DeleteAgentPolicyRespo switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: var dest struct { - Id string `json:"id"` - Success bool `json:"success"` + Id string `json:"id"` + Name string `json:"name"` } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err @@ -4136,7 +15286,11 @@ func ParseDeleteAgentPolicyResponse(rsp *http.Response) (*DeleteAgentPolicyRespo response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest Error + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -4147,15 +15301,15 @@ func ParseDeleteAgentPolicyResponse(rsp *http.Response) (*DeleteAgentPolicyRespo return response, nil } -// ParseAgentPolicyInfoResponse parses an HTTP response from a AgentPolicyInfoWithResponse call -func ParseAgentPolicyInfoResponse(rsp *http.Response) (*AgentPolicyInfoResponse, error) { +// ParseGetAgentPolicyResponse parses an HTTP response from a GetAgentPolicyWithResponse call +func ParseGetAgentPolicyResponse(rsp *http.Response) (*GetAgentPolicyResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &AgentPolicyInfoResponse{ + response := &GetAgentPolicyResponse{ Body: bodyBytes, HTTPResponse: rsp, } @@ -4171,7 +15325,11 @@ func ParseAgentPolicyInfoResponse(rsp *http.Response) (*AgentPolicyInfoResponse, response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest Error + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -4206,7 +15364,11 @@ func ParseUpdateAgentPolicyResponse(rsp *http.Response) (*UpdateAgentPolicyRespo response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest Error + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -4235,10 +15397,27 @@ func ParseGetEnrollmentApiKeysResponse(rsp *http.Response) (*GetEnrollmentApiKey var dest struct { Items []EnrollmentApiKey `json:"items"` // Deprecated: - List *[]EnrollmentApiKey `json:"list,omitempty"` - Page float32 `json:"page"` - PerPage float32 `json:"perPage"` - Total float32 `json:"total"` + List []struct { + // Active When false, the enrollment API key is revoked and cannot be used for enrolling Elastic Agents. + Active bool `json:"active"` + + // ApiKey The enrollment API key (token) used for enrolling Elastic Agents. + ApiKey string `json:"api_key"` + + // ApiKeyId The ID of the API key in the Security API. + ApiKeyId string `json:"api_key_id"` + CreatedAt string `json:"created_at"` + Id string `json:"id"` + + // Name The name of the enrollment API key. + Name *string `json:"name,omitempty"` + + // PolicyId The ID of the agent policy the Elastic Agent will be enrolled in. + PolicyId *string `json:"policy_id,omitempty"` + } `json:"list"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Total float32 `json:"total"` } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err @@ -4246,7 +15425,11 @@ func ParseGetEnrollmentApiKeysResponse(rsp *http.Response) (*GetEnrollmentApiKey response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest Error + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -4257,29 +15440,36 @@ func ParseGetEnrollmentApiKeysResponse(rsp *http.Response) (*GetEnrollmentApiKey return response, nil } -// ParseListAllPackagesResponse parses an HTTP response from a ListAllPackagesWithResponse call -func ParseListAllPackagesResponse(rsp *http.Response) (*ListAllPackagesResponse, error) { +// ParseListPackagesResponse parses an HTTP response from a ListPackagesWithResponse call +func ParseListPackagesResponse(rsp *http.Response) (*ListPackagesResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &ListAllPackagesResponse{ + response := &ListPackagesResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest GetPackagesResponse + var dest struct { + Items []PackageListItem `json:"items"` + Response *[]ListPackages_200_Response_Item `json:"response,omitempty"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest Error + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -4290,6 +15480,22 @@ func ParseListAllPackagesResponse(rsp *http.Response) (*ListAllPackagesResponse, return response, nil } +// ParseInstallPackageByUploadResponse parses an HTTP response from a InstallPackageByUploadWithResponse call +func ParseInstallPackageByUploadResponse(rsp *http.Response) (*InstallPackageByUploadResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &InstallPackageByUploadResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil +} + // ParseDeletePackageResponse parses an HTTP response from a DeletePackageWithResponse call func ParseDeletePackageResponse(rsp *http.Response) (*DeletePackageResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) @@ -4306,10 +15512,9 @@ func ParseDeletePackageResponse(rsp *http.Response) (*DeletePackageResponse, err switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: var dest struct { - Items []struct { - Id string `json:"id"` - Type PackageItemType `json:"type"` - } `json:"items"` + Items []DeletePackage_200_Items_Item `json:"items"` + // Deprecated: + Response *[]DeletePackage_200_Response_Item `json:"response,omitempty"` } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err @@ -4317,7 +15522,11 @@ func ParseDeletePackageResponse(rsp *http.Response) (*DeletePackageResponse, err response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest Error + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -4344,14 +15553,12 @@ func ParseGetPackageResponse(rsp *http.Response) (*GetPackageResponse, error) { switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: var dest struct { - Item *PackageInfo `json:"item,omitempty"` - KeepPoliciesUpToDate *bool `json:"keepPoliciesUpToDate,omitempty"` - LatestVersion *string `json:"latestVersion,omitempty"` - LicensePath *string `json:"licensePath,omitempty"` - Notice *string `json:"notice,omitempty"` + Item PackageInfo `json:"item"` + Metadata *struct { + HasPolicies bool `json:"has_policies"` + } `json:"metadata,omitempty"` // Deprecated: - SavedObject map[string]interface{} `json:"savedObject"` - Status PackageStatus `json:"status"` + Response *GetPackage_200_Response `json:"response,omitempty"` } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err @@ -4359,7 +15566,11 @@ func ParseGetPackageResponse(rsp *http.Response) (*GetPackageResponse, error) { response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest Error + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -4386,13 +15597,12 @@ func ParseInstallPackageResponse(rsp *http.Response) (*InstallPackageResponse, e switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: var dest struct { - Meta *struct { - InstallSource *PackageInstallSource `json:"install_source,omitempty"` - } `json:"_meta,omitempty"` - Items []struct { - Id string `json:"id"` - Type PackageItemType `json:"type"` - } `json:"items"` + Meta struct { + InstallSource string `json:"install_source"` + } `json:"_meta"` + Items []InstallPackage_200_Items_Item `json:"items"` + // Deprecated: + Response *[]InstallPackage_200_Response_Item `json:"response,omitempty"` } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err @@ -4400,7 +15610,11 @@ func ParseInstallPackageResponse(rsp *http.Response) (*InstallPackageResponse, e response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest Error + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -4411,15 +15625,15 @@ func ParseInstallPackageResponse(rsp *http.Response) (*InstallPackageResponse, e return response, nil } -// ParseUpdatePackageResponse parses an HTTP response from a UpdatePackageWithResponse call -func ParseUpdatePackageResponse(rsp *http.Response) (*UpdatePackageResponse, error) { +// ParseGetFleetServerHostsResponse parses an HTTP response from a GetFleetServerHostsWithResponse call +func ParseGetFleetServerHostsResponse(rsp *http.Response) (*GetFleetServerHostsResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &UpdatePackageResponse{ + response := &GetFleetServerHostsResponse{ Body: bodyBytes, HTTPResponse: rsp, } @@ -4427,10 +15641,10 @@ func ParseUpdatePackageResponse(rsp *http.Response) (*UpdatePackageResponse, err switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: var dest struct { - Items []struct { - Id string `json:"id"` - Type PackageItemType `json:"type"` - } `json:"items"` + Items []ServerHost `json:"items"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Total float32 `json:"total"` } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err @@ -4438,7 +15652,11 @@ func ParseUpdatePackageResponse(rsp *http.Response) (*UpdatePackageResponse, err response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest Error + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -4449,15 +15667,15 @@ func ParseUpdatePackageResponse(rsp *http.Response) (*UpdatePackageResponse, err return response, nil } -// ParsePostFleetServerHostsResponse parses an HTTP response from a PostFleetServerHostsWithResponse call -func ParsePostFleetServerHostsResponse(rsp *http.Response) (*PostFleetServerHostsResponse, error) { +// ParseCreateFleetServerHostResponse parses an HTTP response from a CreateFleetServerHostWithResponse call +func ParseCreateFleetServerHostResponse(rsp *http.Response) (*CreateFleetServerHostResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &PostFleetServerHostsResponse{ + response := &CreateFleetServerHostResponse{ Body: bodyBytes, HTTPResponse: rsp, } @@ -4465,7 +15683,7 @@ func ParsePostFleetServerHostsResponse(rsp *http.Response) (*PostFleetServerHost switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: var dest struct { - Item *FleetServerHost `json:"item,omitempty"` + Item ServerHost `json:"item"` } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err @@ -4473,7 +15691,11 @@ func ParsePostFleetServerHostsResponse(rsp *http.Response) (*PostFleetServerHost response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest Error + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -4484,15 +15706,15 @@ func ParsePostFleetServerHostsResponse(rsp *http.Response) (*PostFleetServerHost return response, nil } -// ParseDeleteFleetServerHostsResponse parses an HTTP response from a DeleteFleetServerHostsWithResponse call -func ParseDeleteFleetServerHostsResponse(rsp *http.Response) (*DeleteFleetServerHostsResponse, error) { +// ParseDeleteFleetServerHostResponse parses an HTTP response from a DeleteFleetServerHostWithResponse call +func ParseDeleteFleetServerHostResponse(rsp *http.Response) (*DeleteFleetServerHostResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &DeleteFleetServerHostsResponse{ + response := &DeleteFleetServerHostResponse{ Body: bodyBytes, HTTPResponse: rsp, } @@ -4508,7 +15730,50 @@ func ParseDeleteFleetServerHostsResponse(rsp *http.Response) (*DeleteFleetServer response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest Error + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + } + + return response, nil +} + +// ParseGetFleetServerHostResponse parses an HTTP response from a GetFleetServerHostWithResponse call +func ParseGetFleetServerHostResponse(rsp *http.Response) (*GetFleetServerHostResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &GetFleetServerHostResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item ServerHost `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -4519,15 +15784,15 @@ func ParseDeleteFleetServerHostsResponse(rsp *http.Response) (*DeleteFleetServer return response, nil } -// ParseGetOneFleetServerHostsResponse parses an HTTP response from a GetOneFleetServerHostsWithResponse call -func ParseGetOneFleetServerHostsResponse(rsp *http.Response) (*GetOneFleetServerHostsResponse, error) { +// ParseUpdateFleetServerHostResponse parses an HTTP response from a UpdateFleetServerHostWithResponse call +func ParseUpdateFleetServerHostResponse(rsp *http.Response) (*UpdateFleetServerHostResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &GetOneFleetServerHostsResponse{ + response := &UpdateFleetServerHostResponse{ Body: bodyBytes, HTTPResponse: rsp, } @@ -4535,7 +15800,7 @@ func ParseGetOneFleetServerHostsResponse(rsp *http.Response) (*GetOneFleetServer switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: var dest struct { - Item FleetServerHost `json:"item"` + Item ServerHost `json:"item"` } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err @@ -4543,7 +15808,11 @@ func ParseGetOneFleetServerHostsResponse(rsp *http.Response) (*GetOneFleetServer response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest Error + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -4554,15 +15823,15 @@ func ParseGetOneFleetServerHostsResponse(rsp *http.Response) (*GetOneFleetServer return response, nil } -// ParseUpdateFleetServerHostsResponse parses an HTTP response from a UpdateFleetServerHostsWithResponse call -func ParseUpdateFleetServerHostsResponse(rsp *http.Response) (*UpdateFleetServerHostsResponse, error) { +// ParseGetOutputsResponse parses an HTTP response from a GetOutputsWithResponse call +func ParseGetOutputsResponse(rsp *http.Response) (*GetOutputsResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &UpdateFleetServerHostsResponse{ + response := &GetOutputsResponse{ Body: bodyBytes, HTTPResponse: rsp, } @@ -4570,7 +15839,10 @@ func ParseUpdateFleetServerHostsResponse(rsp *http.Response) (*UpdateFleetServer switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: var dest struct { - Item FleetServerHost `json:"item"` + Items []OutputUnion `json:"items"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Total float32 `json:"total"` } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err @@ -4578,7 +15850,11 @@ func ParseUpdateFleetServerHostsResponse(rsp *http.Response) (*UpdateFleetServer response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest Error + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -4589,15 +15865,15 @@ func ParseUpdateFleetServerHostsResponse(rsp *http.Response) (*UpdateFleetServer return response, nil } -// ParsePostOutputsResponse parses an HTTP response from a PostOutputsWithResponse call -func ParsePostOutputsResponse(rsp *http.Response) (*PostOutputsResponse, error) { +// ParseCreateOutputResponse parses an HTTP response from a CreateOutputWithResponse call +func ParseCreateOutputResponse(rsp *http.Response) (*CreateOutputResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &PostOutputsResponse{ + response := &CreateOutputResponse{ Body: bodyBytes, HTTPResponse: rsp, } @@ -4605,7 +15881,7 @@ func ParsePostOutputsResponse(rsp *http.Response) (*PostOutputsResponse, error) switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: var dest struct { - Item *OutputCreateRequest `json:"item,omitempty"` + Item OutputUnion `json:"item"` } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err @@ -4613,7 +15889,11 @@ func ParsePostOutputsResponse(rsp *http.Response) (*PostOutputsResponse, error) response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest Error + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -4648,12 +15928,27 @@ func ParseDeleteOutputResponse(rsp *http.Response) (*DeleteOutputResponse, error response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest Error + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON400 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + } return response, nil @@ -4675,7 +15970,7 @@ func ParseGetOutputResponse(rsp *http.Response) (*GetOutputResponse, error) { switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: var dest struct { - Item *OutputCreateRequest `json:"item,omitempty"` + Item OutputUnion `json:"item"` } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err @@ -4683,7 +15978,11 @@ func ParseGetOutputResponse(rsp *http.Response) (*GetOutputResponse, error) { response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest Error + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -4710,7 +16009,49 @@ func ParseUpdateOutputResponse(rsp *http.Response) (*UpdateOutputResponse, error switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: var dest struct { - Item *OutputUpdateRequest `json:"item,omitempty"` + Item OutputUnion `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + } + + return response, nil +} + +// ParseGetPackagePoliciesResponse parses an HTTP response from a GetPackagePoliciesWithResponse call +func ParseGetPackagePoliciesResponse(rsp *http.Response) (*GetPackagePoliciesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &GetPackagePoliciesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []PackagePolicy `json:"items"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Total float32 `json:"total"` } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err @@ -4718,7 +16059,11 @@ func ParseUpdateOutputResponse(rsp *http.Response) (*UpdateOutputResponse, error response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest Error + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -4753,14 +16098,22 @@ func ParseCreatePackagePolicyResponse(rsp *http.Response) (*CreatePackagePolicyR response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest Error + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON400 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 409: - var dest Error + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -4795,7 +16148,11 @@ func ParseDeletePackagePolicyResponse(rsp *http.Response) (*DeletePackagePolicyR response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest Error + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -4830,12 +16187,25 @@ func ParseGetPackagePolicyResponse(rsp *http.Response) (*GetPackagePolicyRespons response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest Error + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON400 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest struct { + Message string `json:"message"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + } return response, nil @@ -4857,8 +16227,7 @@ func ParseUpdatePackagePolicyResponse(rsp *http.Response) (*UpdatePackagePolicyR switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: var dest struct { - Item PackagePolicy `json:"item"` - Sucess bool `json:"sucess"` + Item PackagePolicy `json:"item"` } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err @@ -4866,12 +16235,27 @@ func ParseUpdatePackagePolicyResponse(rsp *http.Response) (*UpdatePackagePolicyR response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest Error + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON400 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest struct { + Error *string `json:"error,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest + } return response, nil diff --git a/generated/fleet/getschema.go b/generated/fleet/getschema.go deleted file mode 100644 index b08753c35..000000000 --- a/generated/fleet/getschema.go +++ /dev/null @@ -1,573 +0,0 @@ -//go:build ignore -// +build ignore - -package main - -import ( - "encoding/json" - "flag" - "fmt" - "io" - "log" - "net/http" - "os" - "strconv" - "strings" -) - -const ( - fleetSchemaURLTmpl = "https://raw.githubusercontent.com/elastic/kibana/%s/x-pack/plugins/fleet/common/openapi/bundled.json" -) - -type Schema struct { - Paths map[string]*Path `json:"paths"` - OpenAPIVersion string `json:"openapi"` - Tags []any `json:"tags,omitempty"` - Servers []any `json:"servers,omitempty"` - Components Fields `json:"components,omitempty"` - Security []any `json:"security,omitempty"` - Info map[string]any `json:"info"` -} - -type Path struct { - Parameters []Fields `json:"parameters,omitempty"` - Get *Endpoint `json:"get,omitempty"` - Post *Endpoint `json:"post,omitempty"` - Put *Endpoint `json:"put,omitempty"` - Delete *Endpoint `json:"delete,omitempty"` -} - -func (p *Path) GetEndpoint(method string) *Endpoint { - switch strings.ToUpper(method) { - case http.MethodGet: - return p.Get - case http.MethodPost: - return p.Post - case http.MethodPut: - return p.Put - case http.MethodDelete: - return p.Delete - } - - return nil -} - -type Endpoint struct { - Summary string `json:"summary,omitempty"` - Tags []string `json:"tags,omitempty"` - Responses Fields `json:"responses,omitempty"` - RequestBody Fields `json:"requestBody,omitempty"` - OperationID string `json:"operationId,omitempty"` - Parameters []Fields `json:"parameters,omitempty"` - Deprecated bool `json:"deprecated,omitempty"` -} - -type TransformFunc func(schema *Schema) - -var transformers = []TransformFunc{ - transformFilterPaths, - transformOutputTypeRequired, - transformOutputResponseType, - transformSchemasInputsType, - transformInlinePackageDefinitions, - transformAddPackagePolicyVars, - transformAddPackagePolicySecretReferences, - transformFixPackageSearchResult, -} - -// transformFilterPaths filters the paths in a schema down to -// a specified list of endpoints and methods. -func transformFilterPaths(schema *Schema) { - var includePaths = map[string][]string{ - "/agent_policies": {"post"}, - "/agent_policies/{agentPolicyId}": {"get", "put"}, - "/agent_policies/delete": {"post"}, - "/enrollment_api_keys": {"get"}, - "/fleet_server_hosts": {"post"}, - "/fleet_server_hosts/{itemId}": {"get", "put", "delete"}, - "/outputs": {"post"}, - "/outputs/{outputId}": {"get", "put", "delete"}, - "/package_policies": {"post"}, - "/package_policies/{packagePolicyId}": {"get", "put", "delete"}, - "/epm/packages/{pkgName}/{pkgVersion}": {"get", "put", "post", "delete"}, - "/epm/packages": {"get"}, - } - - // filterKbnXsrfParameter filters out an entry if it is a kbn_xsrf parameter. - // Returns a copy of the slice if it was modified, otherwise returns the original - // slice if no match was found. - filterKbnXsrfParameter := func(parameters []Fields) []Fields { - removeIndex := -1 - - for i, param := range parameters { - if ref, ok := param["$ref"].(string); ok && ref == "#/components/parameters/kbn_xsrf" { - removeIndex = i - break - } - } - if removeIndex != -1 { - ret := make([]Fields, 0) - ret = append(ret, parameters[:removeIndex]...) - return append(ret, parameters[removeIndex+1:]...) - } - - return parameters - } - - for path, pathInfo := range schema.Paths { - // Remove paths not in filter list. - if _, exists := includePaths[path]; !exists { - delete(schema.Paths, path) - continue - } - - // Filter out kbn-xsrf parameter (already set by API client). - pathInfo.Parameters = filterKbnXsrfParameter(pathInfo.Parameters) - - // Filter out endpoints not if filter list, filter out kbn-xsrf - // parameter in endpoint (already set by API client). - allowedMethods := includePaths[path] - filterEndpointFn := func(endpoint *Endpoint, method string) *Endpoint { - if endpoint == nil { - return nil - } - if !stringInSlice(method, allowedMethods) { - return nil - } - - endpoint.Parameters = filterKbnXsrfParameter(endpoint.Parameters) - - return endpoint - } - pathInfo.Get = filterEndpointFn(pathInfo.Get, "get") - pathInfo.Post = filterEndpointFn(pathInfo.Post, "post") - pathInfo.Put = filterEndpointFn(pathInfo.Put, "put") - pathInfo.Delete = filterEndpointFn(pathInfo.Delete, "delete") - } - - return -} - -// transformOutputTypeRequired ensures that the type key is -// in the list of required keys for an output type. -func transformOutputTypeRequired(schema *Schema) { - path := []string{ - "schemas.output_create_request_elasticsearch.required", - "schemas.output_create_request_kafka.required", - "schemas.output_create_request_logstash.required", - "schemas.output_update_request_elasticsearch.required", - "schemas.output_update_request_kafka.required", - "schemas.output_update_request_logstash.required", - } - - for _, v := range path { - raw, ok := schema.Components.Get(v) - if !ok { - continue - } - required, ok := raw.([]any) - if !ok { - continue - } - - if stringInAnySlice("type", required) { - continue - } - - required = append(required, "type") - schema.Components.Set(v, required) - } -} - -// transformOutputTypeRequired ensures that the response object is wrapped -// in an `item` key/value pair. Remove once the following issue is closed: -// https://github.com/elastic/kibana/issues/167181 -func transformOutputResponseType(schema *Schema) { - methods := []string{http.MethodGet, http.MethodPut} - for _, method := range methods { - endpoint := schema.Paths["/outputs/{outputId}"].GetEndpoint(method) - resSchema, ok := endpoint.Responses.GetFields("200.content.application/json.schema") - if !ok { - continue - } - ref, ok := resSchema.Get("$ref") - if ok { - resSchema.Set("type", "object") - resSchema.Set("properties.item.$ref", ref) - resSchema.Delete("$ref") - } - } -} - -// transformSchemasInputsType transforms the "inputs" property on the -// "new_package_policy" component schema from an array to an object, -// so it aligns with expected data type from the Fleet API. -func transformSchemasInputsType(schema *Schema) { - inputs, ok := schema.Components.GetFields("schemas.new_package_policy.properties.inputs") - if !ok { - return - } - - inputs.Set("items.properties.streams.type", "object") - - inputs.Set("type", "object") - inputs.Move("items", "additionalProperties") - - // Drop package_policies from Agent Policy, these will be managed separately - // through the Package Policy resource. - agentPolicy, _ := schema.Components.GetFields("schemas.agent_policy") - agentPolicy.Delete("properties.package_policies") -} - -// transformInlinePackageDefinitions relocates inline type definitions for the -// EPM endpoints to the dedicated schemas section of the OpenAPI schema. This needs -// to be done as there is a bug in the OpenAPI generator which causes types to -// be generated with invalid names: -// https://github.com/deepmap/oapi-codegen/issues/1121 -func transformInlinePackageDefinitions(schema *Schema) { - epmPath, ok := schema.Paths["/epm/packages/{pkgName}/{pkgVersion}"] - if !ok { - panic("epm path not found") - } - - // Get - { - props, ok := epmPath.Get.Responses.GetFields("200.content.application/json.schema.allOf.1.properties") - if !ok { - panic("properties not found") - } - - // status needs to be moved to schemes and a ref inserted in its place. - value, _ := props.Get("status") - schema.Components.Set("schemas.package_status", value) - props.Delete("status") - props.Set("status.$ref", "#/components/schemas/package_status") - } - - // Post - { - props, ok := epmPath.Post.Responses.GetFields("200.content.application/json.schema.properties") - if !ok { - panic("properties not found") - } - - // _meta.properties.install_source - value, _ := props.GetFields("_meta.properties.install_source") - schema.Components.Set("schemas.package_install_source", value) - props.Delete("_meta.properties.install_source") - props.Set("_meta.properties.install_source.$ref", "#/components/schemas/package_install_source") - - // items.items.properties.type - value, _ = props.GetFields("items.items.properties.type") - schema.Components.Set("schemas.package_item_type", value) - props.Delete("items.items.properties.type") - props.Set("items.items.properties.type.$ref", "#/components/schemas/package_item_type") - } - - // Put - { - props, ok := epmPath.Put.Responses.GetFields("200.content.application/json.schema.properties") - if !ok { - panic("properties not found") - } - - // items.items.properties.type (definition already moved by Post) - props.Delete("items.items.properties.type") - props.Set("items.items.properties.type.$ref", "#/components/schemas/package_item_type") - } - - // Delete - { - props, ok := epmPath.Delete.Responses.GetFields("200.content.application/json.schema.properties") - if !ok { - panic("properties not found") - } - - // items.items.properties.type (definition already moved by Post) - props.Delete("items.items.properties.type") - props.Set("items.items.properties.type.$ref", "#/components/schemas/package_item_type") - } - - // Move embedded objects (structs) to schemas so Go-types are generated. - { - // package_policy_request_input_stream - field, _ := schema.Components.GetFields("schemas.package_policy_request.properties.inputs.additionalProperties.properties.streams") - props, _ := field.Get("additionalProperties") - schema.Components.Set("schemas.package_policy_request_input_stream", props) - field.Delete("additionalProperties") - field.Set("additionalProperties.$ref", "#/components/schemas/package_policy_request_input_stream") - - // package_policy_request_input - field, _ = schema.Components.GetFields("schemas.package_policy_request.properties.inputs") - props, _ = field.Get("additionalProperties") - schema.Components.Set("schemas.package_policy_request_input", props) - field.Delete("additionalProperties") - field.Set("additionalProperties.$ref", "#/components/schemas/package_policy_request_input") - - // package_policy_package_info - field, _ = schema.Components.GetFields("schemas.new_package_policy.properties") - props, _ = field.Get("package") - schema.Components.Set("schemas.package_policy_package_info", props) - field.Delete("package") - field.Set("package.$ref", "#/components/schemas/package_policy_package_info") - - // package_policy_input - field, _ = schema.Components.GetFields("schemas.new_package_policy.properties.inputs") - props, _ = field.Get("additionalProperties") - schema.Components.Set("schemas.package_policy_input", props) - field.Delete("additionalProperties") - field.Set("additionalProperties.$ref", "#/components/schemas/package_policy_input") - } -} - -// transformAddPackagePolicyVars adds the missing 'vars' field to the -// PackagePolicy schema struct. -func transformAddPackagePolicyVars(schema *Schema) { - inputs, ok := schema.Components.GetFields("schemas.new_package_policy.properties") - if !ok { - panic("properties not found") - } - - // Only add it if it doesn't exist. - if _, ok = inputs.Get("vars"); !ok { - inputs.Set("vars.type", "object") - } -} - -// transformAddPackagePolicySecretReferences adds the missing 'secretReferences' -// field to the PackagePolicy schema struct. -func transformAddPackagePolicySecretReferences(schema *Schema) { - inputs, ok := schema.Components.GetFields("schemas.new_package_policy.properties") - if !ok { - panic("properties not found") - } - - // Only add it if it doesn't exist. - if _, ok = inputs.Get("secret_references"); !ok { - inputs.Set("secret_references", map[string]any{ - "type": "array", - "items": map[string]any{ - "type": "object", - "properties": map[string]any{ - "id": map[string]any{ - "type": "string", - }, - }, - }, - }) - } -} - -// transformFixPackageSearchResult removes unneeded fields from the -// SearchResult struct. These fields are also causing parsing errors. -func transformFixPackageSearchResult(schema *Schema) { - properties, ok := schema.Components.GetFields("schemas.search_result.properties") - if !ok { - panic("properties not found") - } - properties.Delete("icons") - properties.Delete("installationInfo") -} - -// downloadFile will download a file from url and return the -// bytes. If the request fails, or a non 200 error code is -// observed in the response, an error is returned instead. -func downloadFile(url string) ([]byte, error) { - resp, err := http.Get(url) - if err != nil { - return nil, err - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - return nil, fmt.Errorf("unexpected status: HTTP %v: %v", resp.StatusCode, resp.Status) - } - - return io.ReadAll(resp.Body) -} - -func main() { - outFile := flag.String("o", "", "output file") - inFile := flag.String("i", "", "input file") - apiVersion := flag.String("v", "main", "api version") - flag.Parse() - - if *outFile == "" { - flag.Usage() - os.Exit(1) - } - - var err error - var rawData []byte - if *inFile != "" { - rawData, err = os.ReadFile(*inFile) - } else { - rawData, err = downloadFile(fmt.Sprintf(fleetSchemaURLTmpl, *apiVersion)) - } - if err != nil { - log.Fatal(err) - } - - var schema Schema - if err = json.Unmarshal(rawData, &schema); err != nil { - log.Fatal(err) - } - - for _, fn := range transformers { - fn(&schema) - } - - outData, err := json.MarshalIndent(&schema, "", " ") - if err != nil { - log.Fatal(err) - } - if err = os.WriteFile(*outFile, outData, 0664); err != nil { - log.Fatal(err) - } -} - -// Fields wraps map[string]any with convenience functions for interacting -// with nested map values. -type Fields map[string]any - -// Get will get the value at 'key' as the first returned -// parameter. The second parameter is a bool indicating -// if 'key' exists. -func (f Fields) Get(key string) (any, bool) { - indexSliceFn := func(slice []any, key string) (any, string, bool) { - indexStr, subKeys, _ := strings.Cut(key, ".") - index, err := strconv.Atoi(indexStr) - if err != nil { - log.Printf("Failed to parse slice index key %q: %v", indexStr, err) - return nil, "", false - } - - if index < 0 || index >= len(slice) { - log.Printf("Slice index is out of bounds (%d, target slice len: %d)", index, len(slice)) - return nil, "", false - } - - return slice[index], subKeys, true - } - - rootKey, subKeys, split := strings.Cut(key, ".") - if split { - switch t := f[rootKey].(type) { - case Fields: - return t.Get(subKeys) - case map[string]any: - return Fields(t).Get(subKeys) - case []any: - slicedValue, postSliceKeys, ok := indexSliceFn(t, subKeys) - if !ok { - return nil, false - } - if m, isMap := slicedValue.(map[string]any); ok && isMap { - return Fields(m).Get(postSliceKeys) - } - return slicedValue, true - - default: - rootKey = key - } - } - - value, ok := f[rootKey] - return value, ok -} - -// GetFields is like Get, but converts the found value to Fields. -// If the key is not found or the type conversion fails, the -// second return value will be false. -func (f Fields) GetFields(key string) (Fields, bool) { - value, ok := f.Get(key) - if !ok { - return nil, false - } - - switch t := value.(type) { - case Fields: - return t, true - case map[string]any: - return t, true - } - - return nil, false -} - -// Set will set key to the value of 'value'. -func (f Fields) Set(key string, value any) { - rootKey, subKeys, split := strings.Cut(key, ".") - if split { - if v, ok := f[rootKey]; ok { - switch t := v.(type) { - case Fields: - t.Set(subKeys, value) - case map[string]any: - Fields(t).Set(subKeys, value) - } - } else { - subMap := Fields{} - subMap.Set(subKeys, value) - f[rootKey] = subMap - } - } else { - f[rootKey] = value - } -} - -// Move will move the value from 'key' to 'target'. If 'key' does not -// exist, the operation is a no-op. -func (f Fields) Move(key, target string) { - value, ok := f.Get(key) - if !ok { - return - } - - f.Set(target, value) - f.Delete(key) -} - -// Delete will remove the key from the Fields. If key is nested, -// empty sub-keys will be removed as well. -func (f Fields) Delete(key string) { - rootKey, subKeys, split := strings.Cut(key, ".") - if split { - if v, ok := f[rootKey]; ok { - switch t := v.(type) { - case Fields: - t.Delete(subKeys) - case map[string]any: - Fields(t).Delete(subKeys) - } - } - } else { - delete(f, rootKey) - } -} - -// stringInSlice returns true if value is present in slice. -func stringInSlice(value string, slice []string) bool { - for _, v := range slice { - if value == v { - return true - } - } - - return false -} - -// stringInAnySlice returns true if value is present in slice. -func stringInAnySlice(value string, slice []any) bool { - for _, v := range slice { - s, ok := v.(string) - if !ok { - continue - } - if value == s { - return true - } - } - - return false -} diff --git a/generated/fleet/oapi-config.yaml b/generated/fleet/oapi-config.yaml new file mode 100644 index 000000000..4885498aa --- /dev/null +++ b/generated/fleet/oapi-config.yaml @@ -0,0 +1,9 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/oapi-codegen/oapi-codegen/HEAD/configuration-schema.json + +package: fleetapi +output: fleet.gen.go +generate: + client: true + models: true +# output-options: +# nullable-type: true diff --git a/generated/fleet/transform_schema.go b/generated/fleet/transform_schema.go new file mode 100644 index 000000000..af8be5581 --- /dev/null +++ b/generated/fleet/transform_schema.go @@ -0,0 +1,1052 @@ +//go:build ignore +// +build ignore + +package main + +import ( + "bytes" + "errors" + "flag" + "fmt" + "log" + "maps" + "os" + "path" + "reflect" + "slices" + "strconv" + "strings" + + "gopkg.in/yaml.v3" +) + +func main() { + _inFile := flag.String("i", "", "input file") + _outFile := flag.String("o", "", "output file") + flag.Parse() + + inFile := *_inFile + outFile := *_outFile + + if inFile == "" || outFile == "" { + flag.Usage() + os.Exit(1) + } + + outDir, _ := path.Split(outFile) + if !pathExists(outDir) { + if err := os.MkdirAll(outDir, 0755); err != nil { + log.Fatalf("failed to create directory %q: %v", outDir, err) + } + } + + bytes, err := os.ReadFile(inFile) + if err != nil { + log.Fatalf("failed to read file %q: %v", inFile, err) + } + + var schema Schema + err = yaml.Unmarshal(bytes, &schema) + if err != nil { + log.Fatalf("failed to unmarshal schema from %q: %v", inFile, err) + } + + // Run each transform + for _, fn := range transformers { + fn(&schema) + } + + saveFile(schema, outFile) +} + +// pathExists checks if path exists. +func pathExists(path string) bool { + _, err := os.Stat(path) + return !errors.Is(err, os.ErrNotExist) +} + +// saveFile marshal and writes obj to path. +func saveFile(obj any, path string) { + var buf bytes.Buffer + enc := yaml.NewEncoder(&buf) + enc.SetIndent(2) + if err := enc.Encode(obj); err != nil { + log.Fatalf("failed to marshal to file %q: %v", path, err) + } + + if err := os.WriteFile(path, buf.Bytes(), 0664); err != nil { + log.Fatalf("failed to write file %q: %v", path, err) + } +} + +// ============================================================================ + +type Schema struct { + Paths map[string]*Path `yaml:"paths"` + Version string `yaml:"openapi"` + Tags []Map `yaml:"tags,omitempty"` + Servers []Map `yaml:"servers,omitempty"` + Components Map `yaml:"components,omitempty"` + Security []Map `yaml:"security,omitempty"` + Info Map `yaml:"info"` +} + +func (s Schema) GetPath(path string) *Path { + return s.Paths[path] +} + +func (s Schema) MustGetPath(path string) *Path { + p := s.GetPath(path) + if p == nil { + log.Panicf("Path not found: %q", path) + } + return p +} + +// ============================================================================ + +type Path struct { + Parameters []Map `yaml:"parameters,omitempty"` + Get Map `yaml:"get,omitempty"` + Post Map `yaml:"post,omitempty"` + Put Map `yaml:"put,omitempty"` + Delete Map `yaml:"delete,omitempty"` +} + +func (p Path) Endpoints(yield func(key string, endpoint Map) bool) { + if p.Get != nil { + yield("get", p.Get) + } + if p.Post != nil { + yield("post", p.Post) + } + if p.Put != nil { + yield("put", p.Put) + } + if p.Delete != nil { + yield("delete", p.Delete) + } +} + +func (p Path) GetEndpoint(method string) Map { + switch method { + case "get": + return p.Get + case "post": + return p.Post + case "put": + return p.Put + case "delete": + return p.Delete + default: + log.Panicf("Unhandled method: %q", method) + } + return nil +} + +func (p Path) MustGetEndpoint(method string) Map { + endpoint := p.GetEndpoint(method) + if endpoint == nil { + log.Panicf("Method not found: %q", method) + } + return endpoint +} + +func (p *Path) SetEndpoint(method string, endpoint Map) { + switch method { + case "get": + p.Get = endpoint + case "post": + p.Post = endpoint + case "put": + p.Put = endpoint + case "delete": + p.Delete = endpoint + default: + log.Panicf("Invalid method %q", method) + } +} + +// ============================================================================ + +type Map map[string]any + +func (m Map) Keys() []string { + keys := slices.Collect(maps.Keys(m)) + slices.Sort(keys) + return keys +} + +func (m Map) Has(key string) bool { + _, ok := m.Get(key) + return ok +} + +func (m Map) Get(key string) (any, bool) { + rootKey, subKeys, found := strings.Cut(key, ".") + if found { + switch t := m[rootKey].(type) { + case Map: + return t.Get(subKeys) + case map[string]any: + return Map(t).Get(subKeys) + case Slice: + return t.Get(subKeys) + case []any: + return Slice(t).Get(subKeys) + default: + rootKey = key + } + } + + value, ok := m[rootKey] + return value, ok +} + +func (m Map) MustGet(key string) any { + v, ok := m.Get(key) + if !ok { + log.Panicf("%q not found", key) + } + return v +} + +func (m Map) GetSlice(key string) (Slice, bool) { + value, ok := m.Get(key) + if !ok { + return nil, false + } + + switch t := value.(type) { + case Slice: + return t, true + case []any: + return t, true + } + + log.Panicf("%q is not a slice", key) + return nil, false +} + +func (m Map) MustGetSlice(key string) Slice { + v, ok := m.GetSlice(key) + if !ok { + log.Panicf("%q not found", key) + } + return v +} + +func (m Map) GetMap(key string) (Map, bool) { + value, ok := m.Get(key) + if !ok { + return nil, false + } + + switch t := value.(type) { + case Map: + return t, true + case map[string]any: + return t, true + } + + log.Panicf("%q is not a map", key) + return nil, false +} + +func (m Map) MustGetMap(key string) Map { + v, ok := m.GetMap(key) + if !ok { + log.Panicf("%q not found", key) + } + return v +} + +func (m Map) Set(key string, value any) { + rootKey, subKeys, found := strings.Cut(key, ".") + if found { + if v, ok := m[rootKey]; ok { + switch t := v.(type) { + case Slice: + t.Set(subKeys, value) + case []any: + Slice(t).Set(subKeys, value) + case Map: + t.Set(subKeys, value) + case map[string]any: + Map(t).Set(subKeys, value) + } + } else { + subMap := Map{} + subMap.Set(subKeys, value) + m[rootKey] = subMap + } + } else { + m[rootKey] = value + } +} + +func (m Map) Move(src string, dst string) { + value := m.MustGet(src) + m.Set(dst, value) + m.Delete(src) +} + +func (m Map) Delete(key string) bool { + rootKey, subKeys, found := strings.Cut(key, ".") + if found { + if v, ok := m[rootKey]; ok { + switch t := v.(type) { + case Slice: + return t.Delete(subKeys) + case []any: + return Slice(t).Delete(subKeys) + case Map: + return t.Delete(subKeys) + case map[string]any: + return Map(t).Delete(subKeys) + } + } + } else { + delete(m, rootKey) + return true + } + return false +} + +func (m Map) MustDelete(key string) { + if !m.Delete(key) { + log.Panicf("%q not found", key) + } +} + +func (m Map) CreateRef(schema *Schema, name string, key string) Map { + refTarget := m.MustGet(key) // Check the full path + refPath := fmt.Sprintf("schemas.%s", name) + refValue := Map{"$ref": fmt.Sprintf("#/components/schemas/%s", name)} + + // If the component schema already exists and is not the same, panic + writeComponent := true + if existing, ok := schema.Components.Get(refPath); ok { + if reflect.DeepEqual(refTarget, existing) { + writeComponent = false + } else { + //os.WriteFile("./existing.txt", []byte(spew.Sdump(existing)), 0644) + //os.WriteFile("./target.txt", []byte(spew.Sdump(refTarget)), 0644) + log.Panicf("Component schema key already in use and not an exact duplicate: %q", refPath) + return nil + } + } + + var parent any + var childKey string + // Get the parent of the refTarget + i := strings.LastIndex(key, ".") + if i == -1 { + parent = m + childKey = key + } else { + parent = m.MustGet(key[:i]) + childKey = key[i+1:] + } + + doMap := func(target Map, key string) { + if writeComponent { + schema.Components.Set(refPath, target.MustGet(key)) + } + target.Set(key, refValue) + } + + doSlice := func(target Slice, key string) { + index := target.atoi(key) + if writeComponent { + schema.Components.Set(refPath, target[index]) + } + target[index] = refValue + } + + switch t := parent.(type) { + case map[string]any: + doMap(Map(t), childKey) + case Map: + doMap(t, childKey) + case []any: + doSlice(Slice(t), childKey) + case Slice: + doSlice(t, childKey) + default: + log.Panicf("Cannot create a ref of target type %T at %q", parent, key) + } + + return refValue +} + +func (m Map) Iterate(iteratee func(key string, node Map)) { + joinPath := func(existing string, next string) string { + if existing == "" { + return next + } else { + return fmt.Sprintf("%s.%s", existing, next) + } + } + joinIndex := func(existing string, next int) string { + if existing == "" { + return fmt.Sprintf("%d", next) + } else { + return fmt.Sprintf("%s.%d", existing, next) + } + } + + var iterate func(key string, val any) + iterate = func(key string, val any) { + switch tval := val.(type) { + case []any: + iterate(key, Slice(tval)) + case Slice: + for i, v := range tval { + iterate(joinIndex(key, i), v) + } + case map[string]any: + iterate(key, Map(tval)) + case Map: + for _, k := range tval.Keys() { + iterate(joinPath(key, k), tval[k]) + } + iteratee(key, tval) + } + } + + iterate("", m) +} + +// ============================================================================ + +type Slice []any + +func (s Slice) Get(key string) (any, bool) { + rootKey, subKeys, found := strings.Cut(key, ".") + index := s.atoi(rootKey) + + if found { + switch t := s[index].(type) { + case Slice: + return t.Get(subKeys) + case []any: + return Slice(t).Get(subKeys) + case Map: + return t.Get(subKeys) + case map[string]any: + return Map(t).Get(subKeys) + } + } + + value := s[index] + return value, true +} + +func (s Slice) GetMap(key string) (Map, bool) { + value, ok := s.Get(key) + if !ok { + return nil, false + } + + switch t := value.(type) { + case Map: + return t, true + case map[string]any: + return t, true + } + + log.Panicf("%q is not a map", key) + return nil, false +} + +func (s Slice) MustGetMap(key string) Map { + v, ok := s.GetMap(key) + if !ok { + log.Panicf("%q not found", key) + } + return v +} + +func (s Slice) Set(key string, value any) { + rootKey, subKeys, found := strings.Cut(key, ".") + index := s.atoi(rootKey) + if found { + v := s[index] + switch t := v.(type) { + case Slice: + t.Set(subKeys, value) + case []any: + Slice(t).Set(subKeys, value) + case Map: + t.Set(subKeys, value) + case map[string]any: + Map(t).Set(subKeys, value) + } + } else { + s[index] = value + } +} + +func (s Slice) Delete(key string) bool { + rootKey, subKeys, found := strings.Cut(key, ".") + index := s.atoi(rootKey) + if found { + item := (s)[index] + switch t := item.(type) { + case Slice: + return t.Delete(subKeys) + case []any: + return Slice(t).Delete(subKeys) + case Map: + return t.Delete(subKeys) + case map[string]any: + return Map(t).Delete(subKeys) + } + } else { + log.Panicf("Unable to delete from slice directly") + return true + } + return false +} + +func (s Slice) Contains(value string) bool { + for _, v := range s { + s, ok := v.(string) + if !ok { + continue + } + if value == s { + return true + } + } + + return false +} + +func (s Slice) atoi(key string) int { + index, err := strconv.Atoi(key) + if err != nil { + log.Panicf("Failed to parse slice index key %q: %v", key, err) + } + if index < 0 || index >= len(s) { + log.Panicf("Slice index is out of bounds (%d, target slice len: %d)", index, len(s)) + } + return index +} + +// ============================================================================ + +type TransformFunc func(schema *Schema) + +var transformers = []TransformFunc{ + transformFilterPaths, + transformRemoveKbnXsrf, + transformRemoveApiVersionParam, + transformSimplifyContentType, + transformFleetPaths, + // transformRemoveEnums, + // transformAddGoPointersFlag, + transformRemoveExamples, + transformRemoveUnusedComponents, +} + +// transformFilterPaths filters the paths in a schema down to a specified list +// of endpoints and methods. +func transformFilterPaths(schema *Schema) { + var includePaths = map[string][]string{ + "/api/fleet/agent_policies": {"get", "post"}, + "/api/fleet/agent_policies/delete": {"post"}, + "/api/fleet/agent_policies/{agentPolicyId}": {"get", "put"}, + "/api/fleet/enrollment_api_keys": {"get"}, + "/api/fleet/epm/packages": {"get", "post"}, + "/api/fleet/epm/packages/{pkgName}/{pkgVersion}": {"get", "post", "delete"}, + "/api/fleet/fleet_server_hosts": {"get", "post"}, + "/api/fleet/fleet_server_hosts/{itemId}": {"get", "put", "delete"}, + "/api/fleet/outputs": {"get", "post"}, + "/api/fleet/outputs/{outputId}": {"get", "put", "delete"}, + "/api/fleet/package_policies": {"get", "post"}, + "/api/fleet/package_policies/{packagePolicyId}": {"get", "put", "delete"}, + } + + for path, pathInfo := range schema.Paths { + if allowedMethods, ok := includePaths[path]; ok { + // Filter out endpoints not if filter list + for method := range pathInfo.Endpoints { + if !slices.Contains(allowedMethods, method) { + pathInfo.SetEndpoint(method, nil) + } + } + } else { + // Remove paths not in filter list. + delete(schema.Paths, path) + } + } + + // Go through again, verify each entry exists + for path, methods := range includePaths { + pathInfo := schema.GetPath(path) + if pathInfo == nil { + log.Panicf("Missing path %q", path) + } + + for _, method := range methods { + endpoint := pathInfo.GetEndpoint(method) + if endpoint == nil { + log.Panicf("Missing method %q of %q", method, path) + } + } + } +} + +// transformRemoveKbnXsrf removes the kbn-xsrf header as it is already applied +// in the client. +func transformRemoveKbnXsrf(schema *Schema) { + removeKbnXsrf := func(node any) bool { + param := node.(Map) + if v, ok := param["name"]; ok { + name := v.(string) + if strings.HasSuffix(name, "kbn_xsrf") || strings.HasSuffix(name, "kbn-xsrf") { + return true + } + } + // Data_views_kbn_xsrf, Saved_objects_kbn_xsrf, etc + if v, ok := param["$ref"]; ok { + ref := v.(string) + if strings.HasSuffix(ref, "kbn_xsrf") || strings.HasSuffix(ref, "kbn-xsrf") { + return true + } + } + return false + } + + for _, pathInfo := range schema.Paths { + for _, endpoint := range pathInfo.Endpoints { + if params, ok := endpoint.GetSlice("parameters"); ok { + params = slices.DeleteFunc(params, removeKbnXsrf) + endpoint["parameters"] = params + } + } + } +} + +// transformRemoveApiVersionParam removes the Elastic API Version query +// parameter header. +func transformRemoveApiVersionParam(schema *Schema) { + removeApiVersion := func(node any) bool { + param := node.(Map) + if name, ok := param["name"]; ok && name == "elastic-api-version" { + return true + } + return false + } + + for _, pathInfo := range schema.Paths { + for _, endpoint := range pathInfo.Endpoints { + if params, ok := endpoint.GetSlice("parameters"); ok { + params = slices.DeleteFunc(params, removeApiVersion) + endpoint["parameters"] = params + } + } + } +} + +// transformSimplifyContentType simplifies Content-Type headers such as +// 'application/json; Elastic-Api-Version=2023-10-31' by stripping everything +// after the ';'. +func transformSimplifyContentType(schema *Schema) { + simplifyContentType := func(fields Map) { + if content, ok := fields.GetMap("content"); ok { + for key := range content { + newKey, _, found := strings.Cut(key, ";") + if found { + content.Move(key, newKey) + } + } + } + } + + for _, pathInfo := range schema.Paths { + for _, endpoint := range pathInfo.Endpoints { + if req, ok := endpoint.GetMap("requestBody"); ok { + simplifyContentType(req) + } + if resp, ok := endpoint.GetMap("responses"); ok { + for code := range resp { + simplifyContentType(resp.MustGetMap(code)) + } + } + } + } + + if responses, ok := schema.Components.GetMap("responses"); ok { + for key := range responses { + resp := responses.MustGetMap(key) + simplifyContentType(resp) + } + } +} + +// transformFleetPaths fixes the fleet paths. +func transformFleetPaths(schema *Schema) { + operationIds := map[string]map[string]string{ + "/api/fleet/agent_policies": { + "get": "get_agent_policies", + "post": "create_agent_policy", + }, + "/api/fleet/agent_policies/delete": { + "post": "delete_agent_policy", + }, + "/api/fleet/agent_policies/{agentPolicyId}": { + "get": "get_agent_policy", + "put": "update_agent_policy", + }, + "/api/fleet/enrollment_api_keys": { + "get": "get_enrollment_api_keys", + }, + "/api/fleet/epm/packages": { + "get": "list_packages", + "post": "install_package_by_upload", + }, + "/api/fleet/epm/packages/{pkgName}/{pkgVersion}": { + "get": "get_package", + "post": "install_package", + "delete": "delete_package", + }, + "/api/fleet/fleet_server_hosts": { + "get": "get_fleet_server_hosts", + "post": "create_fleet_server_host", + }, + "/api/fleet/fleet_server_hosts/{itemId}": { + "get": "get_fleet_server_host", + "put": "update_fleet_server_host", + "delete": "delete_fleet_server_host", + }, + "/api/fleet/outputs": { + "get": "get_outputs", + "post": "create_output", + }, + "/api/fleet/outputs/{outputId}": { + "get": "get_output", + "put": "update_output", + "delete": "delete_output", + }, + "/api/fleet/package_policies": { + "get": "get_package_policies", + "post": "create_package_policy", + }, + "/api/fleet/package_policies/{packagePolicyId}": { + "get": "get_package_policy", + "put": "update_package_policy", + "delete": "delete_package_policy", + }, + } + + // Set each missing operationId + for path, methods := range operationIds { + pathInfo := schema.MustGetPath(path) + for method, operationId := range methods { + endpoint := pathInfo.GetEndpoint(method) + endpoint.Set("operationId", operationId) + } + } + + // Fix OpenAPI error: set each missing description + for _, pathInfo := range schema.Paths { + for _, endpoint := range pathInfo.Endpoints { + responses := endpoint.MustGetMap("responses") + for code := range responses { + response := responses.MustGetMap(code) + if _, ok := response["description"]; !ok { + response["description"] = "" + } + } + } + } + + // Agent policies + // https://github.com/elastic/kibana/blob/main/x-pack/plugins/fleet/common/types/models/agent_policy.ts + // https://github.com/elastic/kibana/blob/main/x-pack/plugins/fleet/common/types/rest_spec/agent_policy.ts + + agentPoliciesPath := schema.MustGetPath("/api/fleet/agent_policies") + agentPolicyPath := schema.MustGetPath("/api/fleet/agent_policies/{agentPolicyId}") + + agentPoliciesPath.Get.CreateRef(schema, "agent_policy", "responses.200.content.application/json.schema.properties.items.items") + agentPoliciesPath.Post.CreateRef(schema, "agent_policy", "responses.200.content.application/json.schema.properties.item") + agentPolicyPath.Get.CreateRef(schema, "agent_policy", "responses.200.content.application/json.schema.properties.item") + agentPolicyPath.Put.CreateRef(schema, "agent_policy", "responses.200.content.application/json.schema.properties.item") + + // Enrollment api keys + // https://github.com/elastic/kibana/blob/main/x-pack/plugins/fleet/common/types/models/enrollment_api_key.ts + // https://github.com/elastic/kibana/blob/main/x-pack/plugins/fleet/common/types/rest_spec/enrollment_api_key.ts + + apiKeysPath := schema.MustGetPath("/api/fleet/enrollment_api_keys") + apiKeysPath.Get.CreateRef(schema, "enrollment_api_key", "responses.200.content.application/json.schema.properties.items.items") + + // EPM + // https://github.com/elastic/kibana/blob/main/x-pack/plugins/fleet/common/types/models/epm.ts + // https://github.com/elastic/kibana/blob/main/x-pack/plugins/fleet/common/types/rest_spec/epm.ts + + packagesPath := schema.MustGetPath("/api/fleet/epm/packages") + packagePath := schema.MustGetPath("/api/fleet/epm/packages/{pkgName}/{pkgVersion}") + packagesPath.Get.CreateRef(schema, "package_list_item", "responses.200.content.application/json.schema.properties.items.items") + packagePath.Get.CreateRef(schema, "package_info", "responses.200.content.application/json.schema.properties.item") + + // Server hosts + // https://github.com/elastic/kibana/blob/main/x-pack/plugins/fleet/common/types/models/fleet_server_policy_config.ts + // https://github.com/elastic/kibana/blob/main/x-pack/plugins/fleet/common/types/rest_spec/fleet_server_hosts.ts + + hostsPath := schema.MustGetPath("/api/fleet/fleet_server_hosts") + hostPath := schema.MustGetPath("/api/fleet/fleet_server_hosts/{itemId}") + + hostsPath.Get.CreateRef(schema, "server_host", "responses.200.content.application/json.schema.properties.items.items") + hostsPath.Post.CreateRef(schema, "server_host", "responses.200.content.application/json.schema.properties.item") + hostPath.Get.CreateRef(schema, "server_host", "responses.200.content.application/json.schema.properties.item") + hostPath.Put.CreateRef(schema, "server_host", "responses.200.content.application/json.schema.properties.item") + + // Outputs + // https://github.com/elastic/kibana/blob/main/x-pack/plugins/fleet/common/types/models/output.ts + // https://github.com/elastic/kibana/blob/main/x-pack/plugins/fleet/common/types/rest_spec/output.ts + + outputByIdPath := schema.MustGetPath("/api/fleet/outputs/{outputId}") + outputsPath := schema.MustGetPath("/api/fleet/outputs") + + outputsPath.Get.CreateRef(schema, "output_union", "responses.200.content.application/json.schema.properties.items.items") + outputsPath.Post.CreateRef(schema, "new_output_union", "requestBody.content.application/json.schema") + outputsPath.Post.CreateRef(schema, "output_union", "responses.200.content.application/json.schema.properties.item") + outputByIdPath.Get.CreateRef(schema, "output_union", "responses.200.content.application/json.schema.properties.item") + outputByIdPath.Put.CreateRef(schema, "update_output_union", "requestBody.content.application/json.schema") + outputByIdPath.Put.CreateRef(schema, "output_union", "responses.200.content.application/json.schema.properties.item") + + for _, name := range []string{"output", "new_output", "update_output"} { + // Ref each index in the anyOf union + schema.Components.CreateRef(schema, fmt.Sprintf("%s_elasticsearch", name), fmt.Sprintf("schemas.%s_union.anyOf.0", name)) + schema.Components.CreateRef(schema, fmt.Sprintf("%s_remote_elasticsearch", name), fmt.Sprintf("schemas.%s_union.anyOf.1", name)) + schema.Components.CreateRef(schema, fmt.Sprintf("%s_logstash", name), fmt.Sprintf("schemas.%s_union.anyOf.2", name)) + schema.Components.CreateRef(schema, fmt.Sprintf("%s_kafka", name), fmt.Sprintf("schemas.%s_union.anyOf.3", name)) + + // Add the missing discriminator + schema.Components.Set(fmt.Sprintf("schemas.%s_union.discriminator", name), Map{ + "propertyName": "type", + "mapping": Map{ + "elasticsearch": fmt.Sprintf("#/components/schemas/%s_elasticsearch", name), + "remote_elasticsearch": fmt.Sprintf("#/components/schemas/%s_remote_elasticsearch", name), + "logstash": fmt.Sprintf("#/components/schemas/%s_logstash", name), + "kafka": fmt.Sprintf("#/components/schemas/%s_kafka", name), + }, + }) + + // Extract child structs + for _, typ := range []string{"elasticsearch", "remote_elasticsearch", "logstash", "kafka"} { + schema.Components.CreateRef(schema, fmt.Sprintf("%s_shipper", name), fmt.Sprintf("schemas.%s_%s.properties.shipper", name, typ)) + schema.Components.CreateRef(schema, fmt.Sprintf("%s_ssl", name), fmt.Sprintf("schemas.%s_%s.properties.ssl", name, typ)) + } + + /* + // These look like this and oapi breaks hard on it. + // Turn them into an `any` type. + anyOf: + - items: {} + type: array + - type: boolean + - type: number + - type: object + - type: string + nullable: true + oneOf: + - type: number + - not: {} + */ + node := schema.Components.MustGetMap(fmt.Sprintf("schemas.%s_kafka.properties", name)) + for _, typ := range []string{"compression_level", "connection_type", "password", "username"} { + node[typ] = Map{} + } + } + + // Package policies + // https://github.com/elastic/kibana/blob/main/x-pack/plugins/fleet/common/types/models/package_policy.ts + // https://github.com/elastic/kibana/blob/main/x-pack/plugins/fleet/common/types/rest_spec/package_policy.ts + + epmPoliciesPath := schema.MustGetPath("/api/fleet/package_policies") + epmPolicyPath := schema.MustGetPath("/api/fleet/package_policies/{packagePolicyId}") + + epmPoliciesPath.Get.CreateRef(schema, "package_policy", "responses.200.content.application/json.schema.properties.items.items") + epmPoliciesPath.Post.CreateRef(schema, "package_policy", "responses.200.content.application/json.schema.properties.item") + + epmPoliciesPath.Post.Move("requestBody.content.application/json.schema.anyOf.1", "requestBody.content.application/json.schema") // anyOf.0 is the deprecated array format + epmPolicyPath.Put.Move("requestBody.content.application/json.schema.anyOf.1", "requestBody.content.application/json.schema") // anyOf.0 is the deprecated array format + epmPoliciesPath.Post.CreateRef(schema, "package_policy_request", "requestBody.content.application/json.schema") + epmPolicyPath.Put.CreateRef(schema, "package_policy_request", "requestBody.content.application/json.schema") + + epmPolicyPath.Get.CreateRef(schema, "package_policy", "responses.200.content.application/json.schema.properties.item") + epmPolicyPath.Put.CreateRef(schema, "package_policy", "responses.200.content.application/json.schema.properties.item") + + schema.Components.CreateRef(schema, "package_policy_secret_ref", "schemas.package_policy.properties.secret_references.items") + schema.Components.Move("schemas.package_policy.properties.inputs.anyOf.1", "schemas.package_policy.properties.inputs") // anyOf.0 is the deprecated array format + + schema.Components.CreateRef(schema, "package_policy_input", "schemas.package_policy.properties.inputs.additionalProperties") + schema.Components.CreateRef(schema, "package_policy_input_stream", "schemas.package_policy_input.properties.streams.additionalProperties") + + schema.Components.CreateRef(schema, "package_policy_request_package", "schemas.package_policy_request.properties.package") + schema.Components.CreateRef(schema, "package_policy_request_input", "schemas.package_policy_request.properties.inputs.additionalProperties") + schema.Components.CreateRef(schema, "package_policy_request_input_stream", "schemas.package_policy_request_input.properties.streams.additionalProperties") + + // Simplify all of the vars + schema.Components.Set("schemas.package_policy.properties.vars", Map{"type": "object"}) + schema.Components.Set("schemas.package_policy_input.properties.vars", Map{"type": "object"}) + schema.Components.Set("schemas.package_policy_input_stream.properties.vars", Map{"type": "object"}) + schema.Components.Set("schemas.package_policy_request.properties.vars", Map{"type": "object"}) + schema.Components.Set("schemas.package_policy_request_input.properties.vars", Map{"type": "object"}) + schema.Components.Set("schemas.package_policy_request_input_stream.properties.vars", Map{"type": "object"}) + + // Upstream issues + + // [request body.keep_monitoring_alive]: expected value of type [boolean] but got [null] + agentPoliciesPath.Post.Set("requestBody.content.application/json.schema.properties.keep_monitoring_alive.x-omitempty", true) + agentPolicyPath.Put.Set("requestBody.content.application/json.schema.properties.keep_monitoring_alive.x-omitempty", true) + + // [request body.supports_agentless]: expected value of type [boolean] but got [null] + agentPoliciesPath.Post.Set("requestBody.content.application/json.schema.properties.supports_agentless.x-omitempty", true) + agentPolicyPath.Put.Set("requestBody.content.application/json.schema.properties.supports_agentless.x-omitempty", true) + + // [request body.supports_agentless]: expected value of type [boolean] but got [null] + agentPoliciesPath.Post.Set("requestBody.content.application/json.schema.properties.overrides.x-omitempty", true) + agentPolicyPath.Put.Set("requestBody.content.application/json.schema.properties.overrides.x-omitempty", true) + + // 8.6.2 regression + // [request body.proxy_id]: definition for this key is missing + hostsPath.Post.Set("requestBody.content.application/json.schema.properties.proxy_id.x-omitempty", true) + hostPath.Put.Set("requestBody.content.application/json.schema.properties.proxy_id.x-omitempty", true) + + for _, typ := range []string{"elasticsearch", "remote_elasticsearch", "logstash", "kafka"} { + // Discriminator codegen failure, may not be required upstream, have not tested + schema.Components.Set(fmt.Sprintf("schemas.update_output_%s.required", typ), []string{"type"}) + + // [request body.3.ca_sha256]: expected value of type [string] but got [null]" + schema.Components.Set(fmt.Sprintf("schemas.new_output_%s.properties.ca_sha256.x-omitempty", typ), true) + schema.Components.Set(fmt.Sprintf("schemas.update_output_%s.properties.ca_sha256.x-omitempty", typ), true) + + // [request body.1.ca_trusted_fingerprint]: expected value of type [string] but got [null] + schema.Components.Set(fmt.Sprintf("schemas.new_output_%s.properties.ca_trusted_fingerprint.x-omitempty", typ), true) + schema.Components.Set(fmt.Sprintf("schemas.update_output_%s.properties.ca_trusted_fingerprint.x-omitempty", typ), true) + + // 8.6.2 regression + // [request body.proxy_id]: definition for this key is missing" + schema.Components.Set(fmt.Sprintf("schemas.new_output_%s.properties.proxy_id.x-omitempty", typ), true) + schema.Components.Set(fmt.Sprintf("schemas.update_output_%s.properties.proxy_id.x-omitempty", typ), true) + + // strict_dynamic_mapping_exception: [1:345] mapping set to strict, dynamic introduction of [id] within [ingest-outputs] is not allowed" + schema.Components.MustDelete(fmt.Sprintf("schemas.update_output_%s.properties.id", typ)) + } + + // [request body.0.shipper]: expected a plain object value, but found [null] instead + schema.Components.Set("schemas.new_output_shipper.x-omitempty", true) + schema.Components.Set("schemas.new_output_ssl.x-omitempty", true) + + // [request body.1.shipper]: expected a plain object value, but found [null] instead + schema.Components.Set("schemas.update_output_shipper.x-omitempty", true) + schema.Components.Set("schemas.update_output_ssl.x-omitempty", true) + + // [request body.1.output_id]: definition for this key is missing" + schema.Components.Set("schemas.package_policy_request.properties.output_id.x-omitempty", true) +} + +// transformRemoveEnums remove all enums. +func transformRemoveEnums(schema *Schema) { + deleteEnumFn := func(key string, node Map) { + if node.Has("enum") { + delete(node, "enum") + } + } + + for _, pathInfo := range schema.Paths { + for _, methInfo := range pathInfo.Endpoints { + methInfo.Iterate(deleteEnumFn) + } + } + schema.Components.Iterate(deleteEnumFn) +} + +// transformRemoveExamples removes all examples. +func transformRemoveExamples(schema *Schema) { + deleteExampleFn := func(key string, node Map) { + if node.Has("example") { + delete(node, "example") + } + if node.Has("examples") { + delete(node, "examples") + } + } + + for _, pathInfo := range schema.Paths { + for _, methInfo := range pathInfo.Endpoints { + methInfo.Iterate(deleteExampleFn) + } + } + schema.Components.Iterate(deleteExampleFn) + schema.Components.Set("examples", Map{}) +} + +// transformAddOptionalPointersFlag adds a x-go-type-skip-optional-pointer +// flag to maps and arrays, since they are already nullable types. +func transformAddOptionalPointersFlag(schema *Schema) { + addFlagFn := func(key string, node Map) { + if node["type"] == "array" { + node["x-go-type-skip-optional-pointer"] = true + } else if node["type"] == "object" { + if _, ok := node["properties"]; !ok { + node["x-go-type-skip-optional-pointer"] = true + } + } + } + + for _, pathInfo := range schema.Paths { + for _, methInfo := range pathInfo.Endpoints { + methInfo.Iterate(addFlagFn) + } + } + schema.Components.Iterate(addFlagFn) +} + +// transformRemoveUnusedComponents removes all unused schema components. +func transformRemoveUnusedComponents(schema *Schema) { + var refs map[string]any + collectRefsFn := func(key string, node Map) { + if ref, ok := node["$ref"].(string); ok { + i := strings.LastIndex(ref, "/") + ref = ref[i+1:] + refs[ref] = nil + } + } + + componentParams := schema.Components.MustGetMap("parameters") + componentSchemas := schema.Components.MustGetMap("schemas") + + for { + // Collect refs + refs = make(map[string]any) + for _, pathInfo := range schema.Paths { + for _, methInfo := range pathInfo.Endpoints { + methInfo.Iterate(collectRefsFn) + } + } + schema.Components.Iterate(collectRefsFn) + + loop := false + for key := range componentSchemas { + if _, ok := refs[key]; !ok { + delete(componentSchemas, key) + loop = true + } + } + for key := range componentParams { + if _, ok := refs[key]; !ok { + delete(componentParams, key) + loop = true + } + } + if !loop { + break + } + } +} diff --git a/internal/clients/fleet/client.go b/internal/clients/fleet/client.go index 5d5ea71d2..d2d211e2e 100644 --- a/internal/clients/fleet/client.go +++ b/internal/clients/fleet/client.go @@ -67,7 +67,6 @@ func NewClient(cfg Config) (*Client, error) { if !strings.HasSuffix(endpoint, "/") { endpoint += "/" } - endpoint += "api/fleet/" fleetAPIClient, err := fleetapi.NewClientWithResponses(endpoint, fleetapi.WithHTTPClient(httpClient)) if err != nil { diff --git a/internal/clients/fleet/errors.go b/internal/clients/fleet/errors.go new file mode 100644 index 000000000..4db2f16c0 --- /dev/null +++ b/internal/clients/fleet/errors.go @@ -0,0 +1,26 @@ +package fleet + +import ( + "fmt" + + "github.com/hashicorp/terraform-plugin-framework/diag" +) + +// fromErr recreates the sdkdiag.FromErr functionality. +func fromErr(err error) diag.Diagnostics { + if err == nil { + return nil + } + return diag.Diagnostics{ + diag.NewErrorDiagnostic(err.Error(), ""), + } +} + +func reportUnknownError(statusCode int, body []byte) diag.Diagnostics { + return diag.Diagnostics{ + diag.NewErrorDiagnostic( + fmt.Sprintf("Unexpected status code from server: got HTTP %d", statusCode), + string(body), + ), + } +} diff --git a/internal/clients/fleet/fleet.go b/internal/clients/fleet/fleet.go index 607e74682..e0649079a 100644 --- a/internal/clients/fleet/fleet.go +++ b/internal/clients/fleet/fleet.go @@ -3,11 +3,10 @@ package fleet import ( "context" "errors" - "fmt" - "io" "net/http" fleetapi "github.com/elastic/terraform-provider-elasticstack/generated/fleet" + "github.com/elastic/terraform-provider-elasticstack/internal/utils" "github.com/hashicorp/terraform-plugin-framework/diag" ) @@ -15,41 +14,43 @@ var ( ErrPackageNotFound = errors.New("package not found") ) -// AllEnrollmentTokens reads all enrollment tokens from the API. -func AllEnrollmentTokens(ctx context.Context, client *Client) ([]fleetapi.EnrollmentApiKey, diag.Diagnostics) { - resp, err := client.API.GetEnrollmentApiKeysWithResponse(ctx) +// GetEnrollmentTokens reads all enrollment tokens from the API. +func GetEnrollmentTokens(ctx context.Context, client *Client) ([]fleetapi.EnrollmentApiKey, diag.Diagnostics) { + resp, err := client.API.GetEnrollmentApiKeysWithResponse(ctx, nil) if err != nil { return nil, fromErr(err) } - if resp.StatusCode() == http.StatusOK { + switch resp.StatusCode() { + case http.StatusOK: return resp.JSON200.Items, nil + default: + return nil, reportUnknownError(resp.StatusCode(), resp.Body) } - return nil, reportUnknownError(resp.StatusCode(), resp.Body) } // GetEnrollmentTokensByPolicy Get enrollment tokens by given policy ID func GetEnrollmentTokensByPolicy(ctx context.Context, client *Client, policyID string) ([]fleetapi.EnrollmentApiKey, diag.Diagnostics) { - resp, err := client.API.GetEnrollmentApiKeysWithResponse(ctx, func(ctx context.Context, req *http.Request) error { - q := req.URL.Query() - q.Set("kuery", "policy_id:"+policyID) - req.URL.RawQuery = q.Encode() + params := fleetapi.GetEnrollmentApiKeysParams{ + Kuery: utils.Pointer("policy_id:" + policyID), + } - return nil - }) + resp, err := client.API.GetEnrollmentApiKeysWithResponse(ctx, ¶ms) if err != nil { return nil, fromErr(err) } - if resp.StatusCode() == http.StatusOK { + switch resp.StatusCode() { + case http.StatusOK: return resp.JSON200.Items, nil + default: + return nil, reportUnknownError(resp.StatusCode(), resp.Body) } - return nil, reportUnknownError(resp.StatusCode(), resp.Body) } -// ReadAgentPolicy reads a specific agent policy from the API. -func ReadAgentPolicy(ctx context.Context, client *Client, id string) (*fleetapi.AgentPolicy, diag.Diagnostics) { - resp, err := client.API.AgentPolicyInfoWithResponse(ctx, id) +// GetAgentPolicy reads a specific agent policy from the API. +func GetAgentPolicy(ctx context.Context, client *Client, id string) (*fleetapi.AgentPolicy, diag.Diagnostics) { + resp, err := client.API.GetAgentPolicyWithResponse(ctx, id, nil) if err != nil { return nil, fromErr(err) } @@ -65,31 +66,27 @@ func ReadAgentPolicy(ctx context.Context, client *Client, id string) (*fleetapi. } // CreateAgentPolicy creates a new agent policy. -func CreateAgentPolicy(ctx context.Context, client *Client, req fleetapi.AgentPolicyCreateRequest, sysMonitoring bool) (*fleetapi.AgentPolicy, diag.Diagnostics) { - resp, err := client.API.CreateAgentPolicyWithResponse(ctx, req, func(ctx context.Context, req *http.Request) error { - if sysMonitoring { - qs := req.URL.Query() - qs.Add("sys_monitoring", "true") - req.URL.RawQuery = qs.Encode() - } +func CreateAgentPolicy(ctx context.Context, client *Client, req fleetapi.CreateAgentPolicyJSONRequestBody, sysMonitoring bool) (*fleetapi.AgentPolicy, diag.Diagnostics) { + params := fleetapi.CreateAgentPolicyParams{ + SysMonitoring: utils.Pointer(sysMonitoring), + } - return nil - }) + resp, err := client.API.CreateAgentPolicyWithResponse(ctx, ¶ms, req) if err != nil { return nil, fromErr(err) } switch resp.StatusCode() { case http.StatusOK: - return resp.JSON200.Item, nil + return &resp.JSON200.Item, nil default: return nil, reportUnknownError(resp.StatusCode(), resp.Body) } } // UpdateAgentPolicy updates an existing agent policy. -func UpdateAgentPolicy(ctx context.Context, client *Client, id string, req fleetapi.AgentPolicyUpdateRequest) (*fleetapi.AgentPolicy, diag.Diagnostics) { - resp, err := client.API.UpdateAgentPolicyWithResponse(ctx, id, req) +func UpdateAgentPolicy(ctx context.Context, client *Client, id string, req fleetapi.UpdateAgentPolicyJSONRequestBody) (*fleetapi.AgentPolicy, diag.Diagnostics) { + resp, err := client.API.UpdateAgentPolicyWithResponse(ctx, id, nil, req) if err != nil { return nil, fromErr(err) } @@ -123,8 +120,8 @@ func DeleteAgentPolicy(ctx context.Context, client *Client, id string) diag.Diag } } -// ReadOutput reads a specific output from the API. -func ReadOutput(ctx context.Context, client *Client, id string) (*fleetapi.OutputCreateRequest, diag.Diagnostics) { +// GetOutput reads a specific output from the API. +func GetOutput(ctx context.Context, client *Client, id string) (*fleetapi.OutputUnion, diag.Diagnostics) { resp, err := client.API.GetOutputWithResponse(ctx, id) if err != nil { return nil, fromErr(err) @@ -132,7 +129,7 @@ func ReadOutput(ctx context.Context, client *Client, id string) (*fleetapi.Outpu switch resp.StatusCode() { case http.StatusOK: - return resp.JSON200.Item, nil + return &resp.JSON200.Item, nil case http.StatusNotFound: return nil, nil default: @@ -141,22 +138,22 @@ func ReadOutput(ctx context.Context, client *Client, id string) (*fleetapi.Outpu } // CreateOutput creates a new output. -func CreateOutput(ctx context.Context, client *Client, req fleetapi.PostOutputsJSONRequestBody) (*fleetapi.OutputCreateRequest, diag.Diagnostics) { - resp, err := client.API.PostOutputsWithResponse(ctx, req) +func CreateOutput(ctx context.Context, client *Client, req fleetapi.NewOutputUnion) (*fleetapi.OutputUnion, diag.Diagnostics) { + resp, err := client.API.CreateOutputWithResponse(ctx, req) if err != nil { return nil, fromErr(err) } switch resp.StatusCode() { case http.StatusOK: - return resp.JSON200.Item, nil + return &resp.JSON200.Item, nil default: return nil, reportUnknownError(resp.StatusCode(), resp.Body) } } // UpdateOutput updates an existing output. -func UpdateOutput(ctx context.Context, client *Client, id string, req fleetapi.UpdateOutputJSONRequestBody) (*fleetapi.OutputUpdateRequest, diag.Diagnostics) { +func UpdateOutput(ctx context.Context, client *Client, id string, req fleetapi.UpdateOutputUnion) (*fleetapi.OutputUnion, diag.Diagnostics) { resp, err := client.API.UpdateOutputWithResponse(ctx, id, req) if err != nil { return nil, fromErr(err) @@ -164,7 +161,7 @@ func UpdateOutput(ctx context.Context, client *Client, id string, req fleetapi.U switch resp.StatusCode() { case http.StatusOK: - return resp.JSON200.Item, nil + return &resp.JSON200.Item, nil default: return nil, reportUnknownError(resp.StatusCode(), resp.Body) } @@ -187,9 +184,9 @@ func DeleteOutput(ctx context.Context, client *Client, id string) diag.Diagnosti } } -// ReadFleetServerHost reads a specific fleet server host from the API. -func ReadFleetServerHost(ctx context.Context, client *Client, id string) (*fleetapi.FleetServerHost, diag.Diagnostics) { - resp, err := client.API.GetOneFleetServerHostsWithResponse(ctx, id) +// GetFleetServerHost reads a specific fleet server host from the API. +func GetFleetServerHost(ctx context.Context, client *Client, id string) (*fleetapi.ServerHost, diag.Diagnostics) { + resp, err := client.API.GetFleetServerHostWithResponse(ctx, id) if err != nil { return nil, fromErr(err) } @@ -205,23 +202,23 @@ func ReadFleetServerHost(ctx context.Context, client *Client, id string) (*fleet } // CreateFleetServerHost creates a new fleet server host. -func CreateFleetServerHost(ctx context.Context, client *Client, req fleetapi.PostFleetServerHostsJSONRequestBody) (*fleetapi.FleetServerHost, diag.Diagnostics) { - resp, err := client.API.PostFleetServerHostsWithResponse(ctx, req) +func CreateFleetServerHost(ctx context.Context, client *Client, req fleetapi.CreateFleetServerHostJSONRequestBody) (*fleetapi.ServerHost, diag.Diagnostics) { + resp, err := client.API.CreateFleetServerHostWithResponse(ctx, req) if err != nil { return nil, fromErr(err) } switch resp.StatusCode() { case http.StatusOK: - return resp.JSON200.Item, nil + return &resp.JSON200.Item, nil default: return nil, reportUnknownError(resp.StatusCode(), resp.Body) } } // UpdateFleetServerHost updates an existing fleet server host. -func UpdateFleetServerHost(ctx context.Context, client *Client, id string, req fleetapi.UpdateFleetServerHostsJSONRequestBody) (*fleetapi.FleetServerHost, diag.Diagnostics) { - resp, err := client.API.UpdateFleetServerHostsWithResponse(ctx, id, req) +func UpdateFleetServerHost(ctx context.Context, client *Client, id string, req fleetapi.UpdateFleetServerHostJSONRequestBody) (*fleetapi.ServerHost, diag.Diagnostics) { + resp, err := client.API.UpdateFleetServerHostWithResponse(ctx, id, req) if err != nil { return nil, fromErr(err) } @@ -236,7 +233,7 @@ func UpdateFleetServerHost(ctx context.Context, client *Client, id string, req f // DeleteFleetServerHost deletes an existing fleet server host. func DeleteFleetServerHost(ctx context.Context, client *Client, id string) diag.Diagnostics { - resp, err := client.API.DeleteFleetServerHostsWithResponse(ctx, id) + resp, err := client.API.DeleteFleetServerHostWithResponse(ctx, id) if err != nil { return fromErr(err) } @@ -251,11 +248,10 @@ func DeleteFleetServerHost(ctx context.Context, client *Client, id string) diag. } } -// ReadPackagePolicy reads a specific package policy from the API. -func ReadPackagePolicy(ctx context.Context, client *Client, id string) (*fleetapi.PackagePolicy, diag.Diagnostics) { - format := fleetapi.GetPackagePolicyParamsFormatSimplified +// GetPackagePolicy reads a specific package policy from the API. +func GetPackagePolicy(ctx context.Context, client *Client, id string) (*fleetapi.PackagePolicy, diag.Diagnostics) { params := fleetapi.GetPackagePolicyParams{ - Format: &format, + Format: utils.Pointer(fleetapi.GetPackagePolicyParamsFormatSimplified), } resp, err := client.API.GetPackagePolicyWithResponse(ctx, id, ¶ms) @@ -275,9 +271,8 @@ func ReadPackagePolicy(ctx context.Context, client *Client, id string) (*fleetap // CreatePackagePolicy creates a new package policy. func CreatePackagePolicy(ctx context.Context, client *Client, req fleetapi.CreatePackagePolicyJSONRequestBody) (*fleetapi.PackagePolicy, diag.Diagnostics) { - format := fleetapi.CreatePackagePolicyParamsFormatSimplified params := fleetapi.CreatePackagePolicyParams{ - Format: &format, + Format: utils.Pointer(fleetapi.CreatePackagePolicyParamsFormatSimplified), } resp, err := client.API.CreatePackagePolicyWithResponse(ctx, ¶ms, req) @@ -295,9 +290,8 @@ func CreatePackagePolicy(ctx context.Context, client *Client, req fleetapi.Creat // UpdatePackagePolicy updates an existing package policy. func UpdatePackagePolicy(ctx context.Context, client *Client, id string, req fleetapi.UpdatePackagePolicyJSONRequestBody) (*fleetapi.PackagePolicy, diag.Diagnostics) { - format := fleetapi.UpdatePackagePolicyParamsFormatSimplified params := fleetapi.UpdatePackagePolicyParams{ - Format: &format, + Format: utils.Pointer(fleetapi.Simplified), } resp, err := client.API.UpdatePackagePolicyWithResponse(ctx, id, ¶ms, req) @@ -315,7 +309,10 @@ func UpdatePackagePolicy(ctx context.Context, client *Client, id string, req fle // DeletePackagePolicy deletes an existing package policy. func DeletePackagePolicy(ctx context.Context, client *Client, id string, force bool) diag.Diagnostics { - params := fleetapi.DeletePackagePolicyParams{Force: &force} + params := fleetapi.DeletePackagePolicyParams{ + Force: &force, + } + resp, err := client.API.DeletePackagePolicyWithResponse(ctx, id, ¶ms) if err != nil { return fromErr(err) @@ -331,66 +328,49 @@ func DeletePackagePolicy(ctx context.Context, client *Client, id string, force b } } -// ReadPackage reads a specific package from the API. -func ReadPackage(ctx context.Context, client *Client, name, version string) diag.Diagnostics { - params := fleetapi.GetPackageParams{} - - resp, err := client.API.GetPackage(ctx, name, version, ¶ms) +// GetPackage reads a specific package from the API. +func GetPackage(ctx context.Context, client *Client, name, version string) diag.Diagnostics { + resp, err := client.API.GetPackageWithResponse(ctx, name, version, nil) if err != nil { return fromErr(err) } - defer resp.Body.Close() - switch resp.StatusCode { + switch resp.StatusCode() { case http.StatusOK: return nil case http.StatusNotFound: return fromErr(ErrPackageNotFound) default: - errData, err := io.ReadAll(resp.Body) - if err != nil { - return fromErr(err) - } - - return reportUnknownError(resp.StatusCode, errData) + return reportUnknownError(resp.StatusCode(), resp.Body) } } // InstallPackage installs a package. func InstallPackage(ctx context.Context, client *Client, name, version string, force bool) diag.Diagnostics { - params := fleetapi.InstallPackageParams{} body := fleetapi.InstallPackageJSONRequestBody{ - Force: &force, - IgnoreConstraints: nil, + Force: &force, } - resp, err := client.API.InstallPackage(ctx, name, version, ¶ms, body) + resp, err := client.API.InstallPackageWithResponse(ctx, name, version, nil, body) if err != nil { return fromErr(err) } - defer resp.Body.Close() - switch resp.StatusCode { + switch resp.StatusCode() { case http.StatusOK: return nil default: - errData, err := io.ReadAll(resp.Body) - if err != nil { - return fromErr(err) - } - - return reportUnknownError(resp.StatusCode, errData) + return reportUnknownError(resp.StatusCode(), resp.Body) } } // Uninstall uninstalls a package. func Uninstall(ctx context.Context, client *Client, name, version string, force bool) diag.Diagnostics { - params := fleetapi.DeletePackageParams{} body := fleetapi.DeletePackageJSONRequestBody{ - Force: &force, + Force: force, } - resp, err := client.API.DeletePackageWithResponse(ctx, name, version, ¶ms, body) + resp, err := client.API.DeletePackageWithResponse(ctx, name, version, nil, body) if err != nil { return fromErr(err) } @@ -405,13 +385,13 @@ func Uninstall(ctx context.Context, client *Client, name, version string, force } } -// AllPackages returns information about the latest packages known to Fleet. -func AllPackages(ctx context.Context, client *Client, prerelease bool) ([]fleetapi.SearchResult, diag.Diagnostics) { - params := fleetapi.ListAllPackagesParams{ +// GetPackages returns information about the latest packages known to Fleet. +func GetPackages(ctx context.Context, client *Client, prerelease bool) ([]fleetapi.PackageListItem, diag.Diagnostics) { + params := fleetapi.ListPackagesParams{ Prerelease: &prerelease, } - resp, err := client.API.ListAllPackagesWithResponse(ctx, ¶ms) + resp, err := client.API.ListPackagesWithResponse(ctx, ¶ms) if err != nil { return nil, fromErr(err) } @@ -423,22 +403,3 @@ func AllPackages(ctx context.Context, client *Client, prerelease bool) ([]fleeta return nil, reportUnknownError(resp.StatusCode(), resp.Body) } } - -// fromErr recreates the sdkdiag.FromErr functionality. -func fromErr(err error) diag.Diagnostics { - if err == nil { - return nil - } - return diag.Diagnostics{ - diag.NewErrorDiagnostic(err.Error(), ""), - } -} - -func reportUnknownError(statusCode int, body []byte) diag.Diagnostics { - return diag.Diagnostics{ - diag.NewErrorDiagnostic( - fmt.Sprintf("Unexpected status code from server: got HTTP %d", statusCode), - string(body), - ), - } -} diff --git a/internal/fleet/agent_policy/models.go b/internal/fleet/agent_policy/models.go index 2072b383e..8cfe8d1e2 100644 --- a/internal/fleet/agent_policy/models.go +++ b/internal/fleet/agent_policy/models.go @@ -56,16 +56,16 @@ func (model *agentPolicyModel) populateFromAPI(data *fleetapi.AgentPolicy) { model.Namespace = types.StringValue(data.Namespace) } -func (model agentPolicyModel) toAPICreateModel() fleetapi.AgentPolicyCreateRequest { - monitoring := make([]fleetapi.AgentPolicyCreateRequestMonitoringEnabled, 0, 2) +func (model agentPolicyModel) toAPICreateModel() fleetapi.CreateAgentPolicyJSONRequestBody { + monitoring := make([]fleetapi.CreateAgentPolicyJSONBodyMonitoringEnabled, 0, 2) if model.MonitorLogs.ValueBool() { - monitoring = append(monitoring, fleetapi.AgentPolicyCreateRequestMonitoringEnabledLogs) + monitoring = append(monitoring, fleetapi.CreateAgentPolicyJSONBodyMonitoringEnabledLogs) } if model.MonitorMetrics.ValueBool() { - monitoring = append(monitoring, fleetapi.AgentPolicyCreateRequestMonitoringEnabledMetrics) + monitoring = append(monitoring, fleetapi.CreateAgentPolicyJSONBodyMonitoringEnabledMetrics) } - body := fleetapi.AgentPolicyCreateRequest{ + body := fleetapi.CreateAgentPolicyJSONRequestBody{ DataOutputId: model.DataOutputId.ValueStringPointer(), Description: model.Description.ValueStringPointer(), DownloadSourceId: model.DownloadSourceId.ValueStringPointer(), @@ -80,8 +80,8 @@ func (model agentPolicyModel) toAPICreateModel() fleetapi.AgentPolicyCreateReque return body } -func (model agentPolicyModel) toAPIUpdateModel() fleetapi.AgentPolicyUpdateRequest { - monitoring := make([]fleetapi.AgentPolicyUpdateRequestMonitoringEnabled, 0, 2) +func (model agentPolicyModel) toAPIUpdateModel() fleetapi.UpdateAgentPolicyJSONRequestBody { + monitoring := make([]fleetapi.UpdateAgentPolicyJSONBodyMonitoringEnabled, 0, 2) if model.MonitorLogs.ValueBool() { monitoring = append(monitoring, fleetapi.Logs) } @@ -89,7 +89,7 @@ func (model agentPolicyModel) toAPIUpdateModel() fleetapi.AgentPolicyUpdateReque monitoring = append(monitoring, fleetapi.Metrics) } - body := fleetapi.AgentPolicyUpdateRequest{ + body := fleetapi.UpdateAgentPolicyJSONRequestBody{ DataOutputId: model.DataOutputId.ValueStringPointer(), Description: model.Description.ValueStringPointer(), DownloadSourceId: model.DownloadSourceId.ValueStringPointer(), diff --git a/internal/fleet/agent_policy/read.go b/internal/fleet/agent_policy/read.go index 43a2c2572..7d6714c13 100644 --- a/internal/fleet/agent_policy/read.go +++ b/internal/fleet/agent_policy/read.go @@ -23,7 +23,7 @@ func (r *agentPolicyResource) Read(ctx context.Context, req resource.ReadRequest } policyID := stateModel.PolicyID.ValueString() - policy, diags := fleet.ReadAgentPolicy(ctx, client, policyID) + policy, diags := fleet.GetAgentPolicy(ctx, client, policyID) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return diff --git a/internal/fleet/agent_policy/resource_test.go b/internal/fleet/agent_policy/resource_test.go index 3f827da47..a885e6175 100644 --- a/internal/fleet/agent_policy/resource_test.go +++ b/internal/fleet/agent_policy/resource_test.go @@ -188,7 +188,7 @@ func checkResourceAgentPolicyDestroy(s *terraform.State) error { if err != nil { return err } - policy, diags := fleet.ReadAgentPolicy(context.Background(), fleetClient, rs.Primary.ID) + policy, diags := fleet.GetAgentPolicy(context.Background(), fleetClient, rs.Primary.ID) if diags.HasError() { return utils.FwDiagsAsError(diags) } @@ -214,7 +214,7 @@ func checkResourceAgentPolicySkipDestroy(s *terraform.State) error { if err != nil { return err } - policy, diags := fleet.ReadAgentPolicy(context.Background(), fleetClient, rs.Primary.ID) + policy, diags := fleet.GetAgentPolicy(context.Background(), fleetClient, rs.Primary.ID) if diags.HasError() { return utils.FwDiagsAsError(diags) } diff --git a/internal/fleet/enrollment_tokens/data_source_test.go b/internal/fleet/enrollment_tokens/data_source_test.go index 586170ec4..e4ad91346 100644 --- a/internal/fleet/enrollment_tokens/data_source_test.go +++ b/internal/fleet/enrollment_tokens/data_source_test.go @@ -68,7 +68,7 @@ func checkResourceAgentPolicyDestroy(s *terraform.State) error { if err != nil { return err } - policy, diags := fleet.ReadAgentPolicy(context.Background(), fleetClient, rs.Primary.ID) + policy, diags := fleet.GetAgentPolicy(context.Background(), fleetClient, rs.Primary.ID) if diags.HasError() { return utils.FwDiagsAsError(diags) } diff --git a/internal/fleet/enrollment_tokens/read.go b/internal/fleet/enrollment_tokens/read.go index a35798c85..325285897 100644 --- a/internal/fleet/enrollment_tokens/read.go +++ b/internal/fleet/enrollment_tokens/read.go @@ -28,7 +28,7 @@ func (d *enrollmentTokensDataSource) Read(ctx context.Context, req datasource.Re var tokens []fleetapi.EnrollmentApiKey policyID := model.PolicyID.ValueString() if policyID == "" { - tokens, diags = fleet.AllEnrollmentTokens(ctx, client) + tokens, diags = fleet.GetEnrollmentTokens(ctx, client) } else { tokens, diags = fleet.GetEnrollmentTokensByPolicy(ctx, client, policyID) } diff --git a/internal/fleet/integration/read.go b/internal/fleet/integration/read.go index fbd345851..80e719d22 100644 --- a/internal/fleet/integration/read.go +++ b/internal/fleet/integration/read.go @@ -25,7 +25,7 @@ func (r *integrationResource) Read(ctx context.Context, req resource.ReadRequest name := stateModel.Name.ValueString() version := stateModel.Version.ValueString() - diags = fleet.ReadPackage(ctx, client, name, version) + diags = fleet.GetPackage(ctx, client, name, version) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { resp.State.RemoveResource(ctx) diff --git a/internal/fleet/integration_ds/models.go b/internal/fleet/integration_ds/models.go index f489852e1..75125cbd6 100644 --- a/internal/fleet/integration_ds/models.go +++ b/internal/fleet/integration_ds/models.go @@ -12,7 +12,7 @@ type integrationDataSourceModel struct { Version types.String `tfsdk:"version"` } -func (m *integrationDataSourceModel) populateFromAPI(pkgName string, packages []fleetapi.SearchResult) { +func (m *integrationDataSourceModel) populateFromAPI(pkgName string, packages []fleetapi.PackageListItem) { m.Version = types.StringNull() for _, pkg := range packages { if pkg.Name == pkgName { diff --git a/internal/fleet/integration_ds/read.go b/internal/fleet/integration_ds/read.go index 7c0acf45b..8ec5ffd96 100644 --- a/internal/fleet/integration_ds/read.go +++ b/internal/fleet/integration_ds/read.go @@ -26,7 +26,7 @@ func (d *integrationDataSource) Read(ctx context.Context, req datasource.ReadReq name := model.Name.ValueString() prerelease := model.Prerelease.ValueBool() - packages, diags := fleet.AllPackages(ctx, client, prerelease) + packages, diags := fleet.GetPackages(ctx, client, prerelease) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return diff --git a/internal/fleet/integration_policy/models.go b/internal/fleet/integration_policy/models.go index 5dfe88c73..9f44f9e69 100644 --- a/internal/fleet/integration_policy/models.go +++ b/internal/fleet/integration_policy/models.go @@ -47,7 +47,7 @@ func (model *integrationPolicyModel) populateFromAPI(ctx context.Context, data * model.Namespace = types.StringPointerValue(data.Namespace) model.AgentPolicyID = types.StringPointerValue(data.PolicyId) model.Description = types.StringPointerValue(data.Description) - model.Enabled = types.BoolPointerValue(data.Enabled) + model.Enabled = types.BoolValue(data.Enabled) model.IntegrationName = types.StringValue(data.Package.Name) model.IntegrationVersion = types.StringValue(data.Package.Version) model.VarsJson = utils.MapToNormalizedType(utils.Deref(data.Vars), path.Root("vars_json"), diags) @@ -62,7 +62,7 @@ func (model *integrationPolicyModel) populateInputFromAPI(ctx context.Context, i func(inputData fleetapi.PackagePolicyInput, meta utils.MapMeta) integrationPolicyInputModel { return integrationPolicyInputModel{ InputID: types.StringValue(meta.Key), - Enabled: types.BoolValue(inputData.Enabled), + Enabled: types.BoolPointerValue(inputData.Enabled), StreamsJson: utils.MapToNormalizedType(utils.Deref(inputData.Streams), meta.Path.AtName("streams_json"), diags), VarsJson: utils.MapToNormalizedType(utils.Deref(inputData.Vars), meta.Path.AtName("vars_json"), diags), } @@ -88,14 +88,11 @@ func (model integrationPolicyModel) toAPIModel(ctx context.Context, isUpdate boo Force: model.Force.ValueBoolPointer(), Name: model.Name.ValueString(), Namespace: model.Namespace.ValueStringPointer(), - Package: struct { - Name string `json:"name"` - Version string `json:"version"` - }{ + Package: fleetapi.PackagePolicyRequestPackage{ Name: model.IntegrationName.ValueString(), Version: model.IntegrationVersion.ValueString(), }, - PolicyId: model.AgentPolicyID.ValueString(), + PolicyId: model.AgentPolicyID.ValueStringPointer(), Vars: utils.MapRef(utils.NormalizedTypeToMap[any](model.VarsJson, path.Root("vars_json"), diags)), } diff --git a/internal/fleet/integration_policy/read.go b/internal/fleet/integration_policy/read.go index e82f28bda..3e4ac7ad3 100644 --- a/internal/fleet/integration_policy/read.go +++ b/internal/fleet/integration_policy/read.go @@ -23,7 +23,7 @@ func (r *integrationPolicyResource) Read(ctx context.Context, req resource.ReadR } policyID := stateModel.PolicyID.ValueString() - policy, diags := fleet.ReadPackagePolicy(ctx, client, policyID) + policy, diags := fleet.GetPackagePolicy(ctx, client, policyID) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return diff --git a/internal/fleet/integration_policy/resource_test.go b/internal/fleet/integration_policy/resource_test.go index 6d4d3c41d..6c1aea768 100644 --- a/internal/fleet/integration_policy/resource_test.go +++ b/internal/fleet/integration_policy/resource_test.go @@ -177,7 +177,7 @@ func checkResourceIntegrationPolicyDestroy(s *terraform.State) error { for _, rs := range s.RootModule().Resources { switch rs.Type { case "elasticstack_fleet_agent_policy": - policy, diags := fleet.ReadAgentPolicy(context.Background(), fleetClient, rs.Primary.ID) + policy, diags := fleet.GetAgentPolicy(context.Background(), fleetClient, rs.Primary.ID) if diags.HasError() { return utils.FwDiagsAsError(diags) } @@ -185,7 +185,7 @@ func checkResourceIntegrationPolicyDestroy(s *terraform.State) error { return fmt.Errorf("agent policy id=%v still exists, but it should have been removed", rs.Primary.ID) } case "elasticstack_fleet_integration_policy": - policy, diags := fleet.ReadPackagePolicy(context.Background(), fleetClient, rs.Primary.ID) + policy, diags := fleet.GetPackagePolicy(context.Background(), fleetClient, rs.Primary.ID) if diags.HasError() { return utils.FwDiagsAsError(diags) } diff --git a/internal/fleet/integration_policy/secrets.go b/internal/fleet/integration_policy/secrets.go index 114604c92..832b2f9da 100644 --- a/internal/fleet/integration_policy/secrets.go +++ b/internal/fleet/integration_policy/secrets.go @@ -38,7 +38,7 @@ func newSecretStore(ctx context.Context, resp *fleetapi.PackagePolicy, private p // Remove any saved secret refs not present in the API response. refs := make(map[string]any) for _, r := range utils.Deref(resp.SecretReferences) { - refs[*r.Id] = nil + refs[r.Id] = nil } for id := range store { @@ -103,10 +103,8 @@ func HandleRespSecrets(ctx context.Context, resp *fleetapi.PackagePolicy, privat handleVars(utils.Deref(resp.Vars)) for _, input := range resp.Inputs { handleVars(utils.Deref(input.Vars)) - for _, _stream := range utils.Deref(input.Streams) { - stream := _stream.(map[string]any) - streamVars := stream["vars"].(map[string]any) - handleVars(streamVars) + for _, stream := range utils.Deref(input.Streams) { + handleVars(*stream.Vars) } } @@ -170,9 +168,8 @@ func HandleReqRespSecrets(ctx context.Context, req fleetapi.PackagePolicyRequest handleVars(utils.Deref(inputReq.Vars), utils.Deref(inputResp.Vars)) streamsResp := utils.Deref(inputResp.Streams) for streamID, streamReq := range utils.Deref(inputReq.Streams) { - streamResp := streamsResp[streamID].(map[string]any) - streamRespVars := streamResp["vars"].(map[string]any) - handleVars(utils.Deref(streamReq.Vars), streamRespVars) + streamResp := streamsResp[streamID] + handleVars(utils.Deref(streamReq.Vars), utils.Deref(streamResp.Vars)) } } diff --git a/internal/fleet/integration_policy/secrets_test.go b/internal/fleet/integration_policy/secrets_test.go index fae7ed225..a5e8599b4 100644 --- a/internal/fleet/integration_policy/secrets_test.go +++ b/internal/fleet/integration_policy/secrets_test.go @@ -35,10 +35,8 @@ func TestHandleRespSecrets(t *testing.T) { ctx := context.Background() private := privateData{"secrets": `{"known-secret":"secret"}`} - secretRefs := &[]struct { - Id *string `json:"id,omitempty"` - }{ - {Id: utils.Pointer("known-secret")}, + secretRefs := &[]fleetapi.PackagePolicySecretRef{ + {Id: "known-secret"}, } tests := []struct { @@ -89,7 +87,7 @@ func TestHandleRespSecrets(t *testing.T) { SecretReferences: secretRefs, Inputs: map[string]fleetapi.PackagePolicyInput{ "input1": { - Streams: &Map{"stream1": Map{"vars": maps.Clone(tt.input)}}, + Streams: &map[string]fleetapi.PackagePolicyInputStream{"stream1": fleetapi.PackagePolicyInputStream{Vars: utils.Pointer(maps.Clone(tt.input))}}, Vars: utils.Pointer(maps.Clone(tt.input)), }, }, @@ -98,7 +96,7 @@ func TestHandleRespSecrets(t *testing.T) { wants := fleetapi.PackagePolicy{ Inputs: map[string]fleetapi.PackagePolicyInput{ "input1": { - Streams: &Map{"stream1": Map{"vars": tt.want}}, + Streams: &map[string]fleetapi.PackagePolicyInputStream{"stream1": fleetapi.PackagePolicyInputStream{Vars: utils.Pointer(tt.want)}}, Vars: &tt.want, }, }, @@ -118,8 +116,8 @@ func TestHandleRespSecrets(t *testing.T) { require.Equal(t, want, got) // Stream vars - got = (*resp.Inputs["input1"].Streams)["stream1"].(Map)["vars"].(Map) - want = (*wants.Inputs["input1"].Streams)["stream1"].(Map)["vars"].(Map) + got = *(*resp.Inputs["input1"].Streams)["stream1"].Vars + want = *(*wants.Inputs["input1"].Streams)["stream1"].Vars require.Equal(t, want, got) // privateData @@ -134,10 +132,8 @@ func TestHandleReqRespSecrets(t *testing.T) { ctx := context.Background() - secretRefs := &[]struct { - Id *string `json:"id,omitempty"` - }{ - {Id: utils.Pointer("known-secret")}, + secretRefs := &[]fleetapi.PackagePolicySecretRef{ + {Id: "known-secret"}, } tests := []struct { @@ -205,7 +201,7 @@ func TestHandleReqRespSecrets(t *testing.T) { SecretReferences: secretRefs, Inputs: map[string]fleetapi.PackagePolicyInput{ "input1": { - Streams: &Map{"stream1": Map{"vars": maps.Clone(tt.respInput)}}, + Streams: &map[string]fleetapi.PackagePolicyInputStream{"stream1": fleetapi.PackagePolicyInputStream{Vars: utils.Pointer(maps.Clone(tt.respInput))}}, Vars: utils.Pointer(maps.Clone(tt.respInput)), }, }, @@ -214,7 +210,7 @@ func TestHandleReqRespSecrets(t *testing.T) { wants := fleetapi.PackagePolicy{ Inputs: map[string]fleetapi.PackagePolicyInput{ "input1": { - Streams: &Map{"stream1": Map{"vars": tt.want}}, + Streams: &map[string]fleetapi.PackagePolicyInputStream{"stream1": fleetapi.PackagePolicyInputStream{Vars: utils.Pointer(tt.want)}}, Vars: &tt.want, }, }, @@ -236,8 +232,8 @@ func TestHandleReqRespSecrets(t *testing.T) { require.Equal(t, want, got) // Stream vars - got = (*resp.Inputs["input1"].Streams)["stream1"].(Map)["vars"].(Map) - want = (*wants.Inputs["input1"].Streams)["stream1"].(Map)["vars"].(Map) + got = *(*resp.Inputs["input1"].Streams)["stream1"].Vars + want = *(*wants.Inputs["input1"].Streams)["stream1"].Vars require.Equal(t, want, got) if v, ok := (*req.Vars)["k"]; ok && v == "secret" { diff --git a/internal/fleet/output/create.go b/internal/fleet/output/create.go index f8cba6e80..b4f0c7858 100644 --- a/internal/fleet/output/create.go +++ b/internal/fleet/output/create.go @@ -34,7 +34,7 @@ func (r *outputResource) Create(ctx context.Context, req resource.CreateRequest, return } - diags = planModel.populateFromAPICreate(ctx, output) + diags = planModel.populateFromAPI(ctx, output) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return diff --git a/internal/fleet/output/models.go b/internal/fleet/output/models.go index afa5b866c..73689bc46 100644 --- a/internal/fleet/output/models.go +++ b/internal/fleet/output/models.go @@ -31,90 +31,41 @@ type outputSslModel struct { Key types.String `tfsdk:"key"` } -func (model *outputModel) populateFromAPICreate(ctx context.Context, data *fleetapi.OutputCreateRequest) (diags diag.Diagnostics) { - if data == nil { +func (model *outputModel) populateFromAPI(ctx context.Context, union *fleetapi.OutputUnion) (diags diag.Diagnostics) { + if union == nil { return } - union, err := data.ValueByDiscriminator() - if err != nil { - diags.AddError(err.Error(), "") - return - } - - var nd diag.Diagnostics - switch data := union.(type) { - case fleetapi.OutputCreateRequestElasticsearch: - model.ID = types.StringPointerValue(data.Id) - model.OutputID = types.StringPointerValue(data.Id) - model.Name = types.StringValue(data.Name) - model.Type = types.StringValue(string(data.Type)) - model.Hosts = utils.SliceToListType_String(ctx, utils.Deref(data.Hosts), path.Root("hosts"), diags) - model.CaSha256 = types.StringPointerValue(data.CaSha256) - model.CaTrustedFingerprint = types.StringPointerValue(data.CaTrustedFingerprint) - model.DefaultIntegrations = types.BoolPointerValue(data.IsDefault) - model.DefaultMonitoring = types.BoolPointerValue(data.IsDefaultMonitoring) - model.ConfigYaml = types.StringPointerValue(data.ConfigYaml) - - if data.Ssl != nil { - p := path.Root("ssl") - sslModels := []outputSslModel{{ - CertificateAuthorities: utils.SliceToListType_String(ctx, utils.Deref(data.Ssl.CertificateAuthorities), p.AtName("certificate_authorities"), diags), - Certificate: types.StringPointerValue(data.Ssl.Certificate), - Key: types.StringPointerValue(data.Ssl.Key), - }} - model.Ssl, nd = types.ListValueFrom(ctx, getSslAttrTypes(), sslModels) - diags.Append(nd...) - } else { - model.Ssl = types.ListNull(getSslAttrTypes()) - } - - case fleetapi.OutputCreateRequestLogstash: - model.ID = types.StringPointerValue(data.Id) - model.OutputID = types.StringPointerValue(data.Id) - model.Name = types.StringValue(data.Name) - model.Type = types.StringValue(string(data.Type)) - model.Hosts = utils.SliceToListType_String(ctx, data.Hosts, path.Root("hosts"), diags) - model.CaSha256 = types.StringPointerValue(data.CaSha256) - model.CaTrustedFingerprint = types.StringPointerValue(data.CaTrustedFingerprint) - model.DefaultIntegrations = types.BoolPointerValue(data.IsDefault) - model.DefaultMonitoring = types.BoolPointerValue(data.IsDefaultMonitoring) - model.ConfigYaml = types.StringPointerValue(data.ConfigYaml) - - if data.Ssl != nil { + doSsl := func(ssl *fleetapi.OutputSsl) types.List { + if ssl != nil { p := path.Root("ssl") sslModels := []outputSslModel{{ - CertificateAuthorities: utils.SliceToListType_String(ctx, utils.Deref(data.Ssl.CertificateAuthorities), p.AtName("certificate_authorities"), diags), - Certificate: types.StringPointerValue(data.Ssl.Certificate), - Key: types.StringPointerValue(data.Ssl.Key), + CertificateAuthorities: utils.SliceToListType_String(ctx, utils.Deref(ssl.CertificateAuthorities), p.AtName("certificate_authorities"), diags), + Certificate: types.StringPointerValue(ssl.Certificate), + Key: types.StringPointerValue(ssl.Key), }} - model.Ssl, nd = types.ListValueFrom(ctx, getSslAttrTypes(), sslModels) + list, nd := types.ListValueFrom(ctx, getSslAttrTypes(), sslModels) diags.Append(nd...) + return list } else { - model.Ssl = types.ListNull(getSslAttrTypes()) + return types.ListNull(getSslAttrTypes()) } - - default: - diags.AddError(fmt.Sprintf("unhandled output type: %T", data), "") - } - - return -} - -func (model *outputModel) populateFromAPIUpdate(ctx context.Context, data *fleetapi.OutputUpdateRequest) (diags diag.Diagnostics) { - if data == nil { - return } - union, err := data.ValueByDiscriminator() + discriminator, err := union.Discriminator() if err != nil { diags.AddError(err.Error(), "") return } - var nd diag.Diagnostics - switch data := union.(type) { - case fleetapi.OutputUpdateRequestElasticsearch: + switch discriminator { + case "elasticsearch": + data, err := union.AsOutputElasticsearch() + if err != nil { + diags.AddError(err.Error(), "") + return + } + model.ID = types.StringPointerValue(data.Id) model.OutputID = types.StringPointerValue(data.Id) model.Name = types.StringValue(data.Name) @@ -125,91 +76,74 @@ func (model *outputModel) populateFromAPIUpdate(ctx context.Context, data *fleet model.DefaultIntegrations = types.BoolPointerValue(data.IsDefault) model.DefaultMonitoring = types.BoolPointerValue(data.IsDefaultMonitoring) model.ConfigYaml = types.StringPointerValue(data.ConfigYaml) + model.Ssl = doSsl(data.Ssl) - if data.Ssl != nil { - p := path.Root("ssl") - sslModel := []outputSslModel{{ - CertificateAuthorities: utils.SliceToListType_String(ctx, utils.Deref(data.Ssl.CertificateAuthorities), p.AtName("certificate_authorities"), diags), - Certificate: types.StringPointerValue(data.Ssl.Certificate), - Key: types.StringPointerValue(data.Ssl.Key), - }} - model.Ssl, nd = types.ListValueFrom(ctx, getSslAttrTypes(), sslModel) - diags.Append(nd...) - } else { - model.Ssl = types.ListNull(getSslAttrTypes()) + case "logstash": + data, err := union.AsOutputLogstash() + if err != nil { + diags.AddError(err.Error(), "") + return } - case fleetapi.OutputUpdateRequestLogstash: model.ID = types.StringPointerValue(data.Id) model.OutputID = types.StringPointerValue(data.Id) model.Name = types.StringValue(data.Name) model.Type = types.StringValue(string(data.Type)) - model.Hosts = utils.SliceToListType_String(ctx, utils.Deref(data.Hosts), path.Root("hosts"), diags) + model.Hosts = utils.SliceToListType_String(ctx, data.Hosts, path.Root("hosts"), diags) model.CaSha256 = types.StringPointerValue(data.CaSha256) model.CaTrustedFingerprint = types.StringPointerValue(data.CaTrustedFingerprint) model.DefaultIntegrations = types.BoolPointerValue(data.IsDefault) model.DefaultMonitoring = types.BoolPointerValue(data.IsDefaultMonitoring) model.ConfigYaml = types.StringPointerValue(data.ConfigYaml) - - if data.Ssl != nil { - p := path.Root("ssl") - sslModel := []outputSslModel{{ - CertificateAuthorities: utils.SliceToListType_String(ctx, utils.Deref(data.Ssl.CertificateAuthorities), p.AtName("certificate_authorities"), diags), - Certificate: types.StringPointerValue(data.Ssl.Certificate), - Key: types.StringPointerValue(data.Ssl.Key), - }} - model.Ssl, nd = types.ListValueFrom(ctx, getSslAttrTypes(), sslModel) - diags.Append(nd...) - } else { - model.Ssl = types.ListNull(getSslAttrTypes()) - } + model.Ssl = doSsl(data.Ssl) default: - diags.AddError(fmt.Sprintf("unhandled output type: %T", data), "") + diags.AddError(fmt.Sprintf("unhandled output type: %s", discriminator), "") } return } -func (model outputModel) toAPICreateModel(ctx context.Context) (union fleetapi.OutputCreateRequest, diags diag.Diagnostics) { +func (model outputModel) toAPICreateModel(ctx context.Context) (union fleetapi.NewOutputUnion, diags diag.Diagnostics) { + doSsl := func() *fleetapi.NewOutputSsl { + if utils.IsKnown(model.Ssl) { + sslModels := utils.ListTypeAs[outputSslModel](ctx, model.Ssl, path.Root("ssl"), diags) + if len(sslModels) > 0 { + return &fleetapi.NewOutputSsl{ + Certificate: sslModels[0].Certificate.ValueStringPointer(), + CertificateAuthorities: utils.SliceRef(utils.ListTypeToSlice_String(ctx, sslModels[0].CertificateAuthorities, path.Root("certificate_authorities"), diags)), + Key: sslModels[0].Key.ValueStringPointer(), + } + } + } + return nil + } + outputType := model.Type.ValueString() switch outputType { case "elasticsearch": - body := fleetapi.OutputCreateRequestElasticsearch{ - Type: fleetapi.OutputCreateRequestElasticsearchTypeElasticsearch, + body := fleetapi.NewOutputElasticsearch{ + Type: "elasticsearch", CaSha256: model.CaSha256.ValueStringPointer(), CaTrustedFingerprint: model.CaTrustedFingerprint.ValueStringPointer(), ConfigYaml: model.ConfigYaml.ValueStringPointer(), - Hosts: utils.SliceRef(utils.ListTypeToSlice_String(ctx, model.Hosts, path.Root("hosts"), diags)), + Hosts: utils.ListTypeToSlice_String(ctx, model.Hosts, path.Root("hosts"), diags), Id: model.OutputID.ValueStringPointer(), IsDefault: model.DefaultIntegrations.ValueBoolPointer(), IsDefaultMonitoring: model.DefaultMonitoring.ValueBoolPointer(), Name: model.Name.ValueString(), + Ssl: doSsl(), } - // Can't use helpers for anonymous structs - if utils.IsKnown(model.Ssl) { - sslModels := utils.ListTypeAs[outputSslModel](ctx, model.Ssl, path.Root("ssl"), diags) - if len(sslModels) > 0 { - body.Ssl = &struct { - Certificate *string `json:"certificate,omitempty"` - CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` - Key *string `json:"key,omitempty"` - }{ - Certificate: sslModels[0].Certificate.ValueStringPointer(), - CertificateAuthorities: utils.SliceRef(utils.ListTypeToSlice_String(ctx, sslModels[0].CertificateAuthorities, path.Root("certificate_authorities"), diags)), - Key: sslModels[0].Key.ValueStringPointer(), - } - } - } - - err := union.FromOutputCreateRequestElasticsearch(body) + err := union.FromNewOutputElasticsearch(body) if err != nil { diags.AddError(err.Error(), "") + return } case "logstash": - body := fleetapi.OutputCreateRequestLogstash{ + body := fleetapi.NewOutputLogstash{ + Type: "logstash", CaSha256: model.CaSha256.ValueStringPointer(), CaTrustedFingerprint: model.CaTrustedFingerprint.ValueStringPointer(), ConfigYaml: model.ConfigYaml.ValueStringPointer(), @@ -218,28 +152,13 @@ func (model outputModel) toAPICreateModel(ctx context.Context) (union fleetapi.O IsDefault: model.DefaultIntegrations.ValueBoolPointer(), IsDefaultMonitoring: model.DefaultMonitoring.ValueBoolPointer(), Name: model.Name.ValueString(), - Type: fleetapi.OutputCreateRequestLogstashTypeLogstash, - } - - // Can't use helpers for anonymous structs - if utils.IsKnown(model.Ssl) { - sslModels := utils.ListTypeAs[outputSslModel](ctx, model.Ssl, path.Root("ssl"), diags) - if len(sslModels) > 0 { - body.Ssl = &struct { - Certificate *string `json:"certificate,omitempty"` - CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` - Key *string `json:"key,omitempty"` - }{ - Certificate: sslModels[0].Certificate.ValueStringPointer(), - CertificateAuthorities: utils.SliceRef(utils.ListTypeToSlice_String(ctx, sslModels[0].CertificateAuthorities, path.Root("certificate_authorities"), diags)), - Key: sslModels[0].Key.ValueStringPointer(), - } - } + Ssl: doSsl(), } - err := union.FromOutputCreateRequestLogstash(body) + err := union.FromNewOutputLogstash(body) if err != nil { diags.AddError(err.Error(), "") + return } default: @@ -249,73 +168,59 @@ func (model outputModel) toAPICreateModel(ctx context.Context) (union fleetapi.O return } -func (model outputModel) toAPIUpdateModel(ctx context.Context) (union fleetapi.OutputUpdateRequest, diags diag.Diagnostics) { - outputType := model.Type.ValueString() - switch outputType { - case "elasticsearch": - body := fleetapi.OutputUpdateRequestElasticsearch{ - Type: fleetapi.OutputUpdateRequestElasticsearchTypeElasticsearch, - CaSha256: model.CaSha256.ValueStringPointer(), - CaTrustedFingerprint: model.CaTrustedFingerprint.ValueStringPointer(), - ConfigYaml: model.ConfigYaml.ValueStringPointer(), - Hosts: utils.ListTypeToSlice_String(ctx, model.Hosts, path.Root("hosts"), diags), - IsDefault: model.DefaultIntegrations.ValueBoolPointer(), - IsDefaultMonitoring: model.DefaultMonitoring.ValueBoolPointer(), - Name: model.Name.ValueString(), - } - - // Can't use helpers for anonymous structs +func (model outputModel) toAPIUpdateModel(ctx context.Context) (union fleetapi.UpdateOutputUnion, diags diag.Diagnostics) { + doSsl := func() *fleetapi.UpdateOutputSsl { if utils.IsKnown(model.Ssl) { sslModels := utils.ListTypeAs[outputSslModel](ctx, model.Ssl, path.Root("ssl"), diags) if len(sslModels) > 0 { - body.Ssl = &struct { - Certificate *string `json:"certificate,omitempty"` - CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` - Key *string `json:"key,omitempty"` - }{ + return &fleetapi.UpdateOutputSsl{ Certificate: sslModels[0].Certificate.ValueStringPointer(), CertificateAuthorities: utils.SliceRef(utils.ListTypeToSlice_String(ctx, sslModels[0].CertificateAuthorities, path.Root("certificate_authorities"), diags)), Key: sslModels[0].Key.ValueStringPointer(), } } } + return nil + } + + outputType := model.Type.ValueString() + switch outputType { + case "elasticsearch": + body := fleetapi.UpdateOutputElasticsearch{ + Type: "elasticsearch", + CaSha256: model.CaSha256.ValueStringPointer(), + CaTrustedFingerprint: model.CaTrustedFingerprint.ValueStringPointer(), + ConfigYaml: model.ConfigYaml.ValueStringPointer(), + Hosts: utils.SliceRef(utils.ListTypeToSlice_String(ctx, model.Hosts, path.Root("hosts"), diags)), + IsDefault: model.DefaultIntegrations.ValueBoolPointer(), + IsDefaultMonitoring: model.DefaultMonitoring.ValueBoolPointer(), + Name: model.Name.ValueStringPointer(), + Ssl: doSsl(), + } - err := union.FromOutputUpdateRequestElasticsearch(body) + err := union.FromUpdateOutputElasticsearch(body) if err != nil { diags.AddError(err.Error(), "") + return } case "logstash": - body := fleetapi.OutputUpdateRequestLogstash{ + body := fleetapi.UpdateOutputLogstash{ + Type: "logstash", CaSha256: model.CaSha256.ValueStringPointer(), CaTrustedFingerprint: model.CaTrustedFingerprint.ValueStringPointer(), ConfigYaml: model.ConfigYaml.ValueStringPointer(), Hosts: utils.SliceRef(utils.ListTypeToSlice_String(ctx, model.Hosts, path.Root("hosts"), diags)), IsDefault: model.DefaultIntegrations.ValueBoolPointer(), IsDefaultMonitoring: model.DefaultMonitoring.ValueBoolPointer(), - Name: model.Name.ValueString(), - Type: fleetapi.OutputUpdateRequestLogstashTypeLogstash, - } - - // Can't use helpers for anonymous structs - if utils.IsKnown(model.Ssl) { - sslModels := utils.ListTypeAs[outputSslModel](ctx, model.Ssl, path.Root("ssl"), diags) - if len(sslModels) > 0 { - body.Ssl = &struct { - Certificate *string `json:"certificate,omitempty"` - CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` - Key *string `json:"key,omitempty"` - }{ - Certificate: sslModels[0].Certificate.ValueStringPointer(), - CertificateAuthorities: utils.SliceRef(utils.ListTypeToSlice_String(ctx, sslModels[0].CertificateAuthorities, path.Root("certificate_authorities"), diags)), - Key: sslModels[0].Key.ValueStringPointer(), - } - } + Name: model.Name.ValueStringPointer(), + Ssl: doSsl(), } - err := union.FromOutputUpdateRequestLogstash(body) + err := union.FromUpdateOutputLogstash(body) if err != nil { diags.AddError(err.Error(), "") + return } default: diff --git a/internal/fleet/output/read.go b/internal/fleet/output/read.go index 3e0467990..6e3989ac3 100644 --- a/internal/fleet/output/read.go +++ b/internal/fleet/output/read.go @@ -23,7 +23,7 @@ func (r *outputResource) Read(ctx context.Context, req resource.ReadRequest, res } outputID := stateModel.OutputID.ValueString() - output, diags := fleet.ReadOutput(ctx, client, outputID) + output, diags := fleet.GetOutput(ctx, client, outputID) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { resp.State.RemoveResource(ctx) @@ -35,7 +35,7 @@ func (r *outputResource) Read(ctx context.Context, req resource.ReadRequest, res return } - diags = stateModel.populateFromAPICreate(ctx, output) + diags = stateModel.populateFromAPI(ctx, output) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return diff --git a/internal/fleet/output/resource_test.go b/internal/fleet/output/resource_test.go index 425c50085..cfdb5e66d 100644 --- a/internal/fleet/output/resource_test.go +++ b/internal/fleet/output/resource_test.go @@ -318,7 +318,7 @@ func checkResourceOutputDestroy(s *terraform.State) error { if err != nil { return err } - output, diags := fleet.ReadOutput(context.Background(), fleetClient, rs.Primary.ID) + output, diags := fleet.GetOutput(context.Background(), fleetClient, rs.Primary.ID) if diags.HasError() { return utils.FwDiagsAsError(diags) } diff --git a/internal/fleet/output/update.go b/internal/fleet/output/update.go index 6aca90f4c..ca95177a9 100644 --- a/internal/fleet/output/update.go +++ b/internal/fleet/output/update.go @@ -35,7 +35,7 @@ func (r *outputResource) Update(ctx context.Context, req resource.UpdateRequest, return } - diags = planModel.populateFromAPIUpdate(ctx, output) + diags = planModel.populateFromAPI(ctx, output) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return diff --git a/internal/fleet/server_host/models.go b/internal/fleet/server_host/models.go index cd2ce6e51..1684b8aba 100644 --- a/internal/fleet/server_host/models.go +++ b/internal/fleet/server_host/models.go @@ -18,22 +18,22 @@ type serverHostModel struct { Default types.Bool `tfsdk:"default"` } -func (model *serverHostModel) populateFromAPI(ctx context.Context, data *fleetapi.FleetServerHost) (diags diag.Diagnostics) { +func (model *serverHostModel) populateFromAPI(ctx context.Context, data *fleetapi.ServerHost) (diags diag.Diagnostics) { if data == nil { return nil } model.Id = types.StringValue(data.Id) model.HostID = types.StringValue(data.Id) - model.Name = types.StringPointerValue(data.Name) + model.Name = types.StringValue(data.Name) model.Hosts = utils.SliceToListType_String(ctx, data.HostUrls, path.Root("hosts"), diags) - model.Default = types.BoolValue(data.IsDefault) + model.Default = types.BoolPointerValue(data.IsDefault) return } -func (model serverHostModel) toAPICreateModel(ctx context.Context) (body fleetapi.PostFleetServerHostsJSONRequestBody, diags diag.Diagnostics) { - body = fleetapi.PostFleetServerHostsJSONRequestBody{ +func (model serverHostModel) toAPICreateModel(ctx context.Context) (body fleetapi.CreateFleetServerHostJSONRequestBody, diags diag.Diagnostics) { + body = fleetapi.CreateFleetServerHostJSONRequestBody{ HostUrls: utils.ListTypeToSlice_String(ctx, model.Hosts, path.Root("hosts"), diags), Id: model.HostID.ValueStringPointer(), IsDefault: model.Default.ValueBoolPointer(), @@ -42,8 +42,8 @@ func (model serverHostModel) toAPICreateModel(ctx context.Context) (body fleetap return } -func (model serverHostModel) toAPIUpdateModel(ctx context.Context) (body fleetapi.UpdateFleetServerHostsJSONRequestBody, diags diag.Diagnostics) { - body = fleetapi.UpdateFleetServerHostsJSONRequestBody{ +func (model serverHostModel) toAPIUpdateModel(ctx context.Context) (body fleetapi.UpdateFleetServerHostJSONRequestBody, diags diag.Diagnostics) { + body = fleetapi.UpdateFleetServerHostJSONRequestBody{ HostUrls: utils.SliceRef(utils.ListTypeToSlice_String(ctx, model.Hosts, path.Root("hosts"), diags)), IsDefault: model.Default.ValueBoolPointer(), Name: model.Name.ValueStringPointer(), diff --git a/internal/fleet/server_host/read.go b/internal/fleet/server_host/read.go index 138249b2f..8785dd5da 100644 --- a/internal/fleet/server_host/read.go +++ b/internal/fleet/server_host/read.go @@ -23,7 +23,7 @@ func (r *serverHostResource) Read(ctx context.Context, req resource.ReadRequest, } hostID := stateModel.HostID.ValueString() - host, diags := fleet.ReadFleetServerHost(ctx, client, hostID) + host, diags := fleet.GetFleetServerHost(ctx, client, hostID) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return diff --git a/internal/fleet/server_host/resource_test.go b/internal/fleet/server_host/resource_test.go index a9f514b5f..904aad671 100644 --- a/internal/fleet/server_host/resource_test.go +++ b/internal/fleet/server_host/resource_test.go @@ -147,7 +147,7 @@ func checkResourceFleetServerHostDestroy(s *terraform.State) error { if err != nil { return err } - host, diags := fleet.ReadFleetServerHost(context.Background(), fleetClient, rs.Primary.ID) + host, diags := fleet.GetFleetServerHost(context.Background(), fleetClient, rs.Primary.ID) if diags.HasError() { return utils.FwDiagsAsError(diags) } diff --git a/libs/go-kibana-rest/kibana.go b/libs/go-kibana-rest/kibana.go index 560820a4b..d7beb1989 100644 --- a/libs/go-kibana-rest/kibana.go +++ b/libs/go-kibana-rest/kibana.go @@ -37,7 +37,8 @@ func NewClient(cfg Config) (*Client, error) { restyClient := resty.New(). SetBaseURL(cfg.Address). SetHeader("kbn-xsrf", "true"). - SetHeader("Content-Type", "application/json") + SetHeader("Content-Type", "application/json"). + SetDisableWarn(true) if cfg.ApiKey != "" { restyClient.SetAuthScheme("ApiKey").SetAuthToken(cfg.ApiKey) diff --git a/tools/fleet_gen.go b/tools/fleet_gen.go index d01d7493c..ee36a2df1 100644 --- a/tools/fleet_gen.go +++ b/tools/fleet_gen.go @@ -1,4 +1,3 @@ package tools -//go:generate go run ../generated/fleet/getschema.go -v v8.10.0 -o ../generated/fleet/fleet-filtered.json -//go:generate go run github.com/oapi-codegen/oapi-codegen/v2/cmd/oapi-codegen -package=fleet -generate=types,client -o ../generated/fleet/fleet.gen.go ../generated/fleet/fleet-filtered.json +//go:generate make -C ../generated/fleet clean all diff --git a/tools/go.mod b/tools/go.mod index f9b9ea7c6..b4d9eb522 100644 --- a/tools/go.mod +++ b/tools/go.mod @@ -11,6 +11,7 @@ require ( github.com/hashicorp/terraform-plugin-docs v0.18.0 github.com/oapi-codegen/oapi-codegen/v2 v2.4.1 go.uber.org/mock v0.4.0 + gopkg.in/yaml.v3 v3.0.1 ) require ( @@ -251,7 +252,7 @@ require ( github.com/hashicorp/terraform-exec v0.20.0 // indirect github.com/hashicorp/terraform-json v0.21.0 // indirect github.com/hexops/gotextdiff v1.0.3 // indirect - github.com/huandu/xstrings v1.4.0 // indirect + github.com/huandu/xstrings v1.3.3 // indirect github.com/imdario/mergo v0.3.16 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/invopop/jsonschema v0.12.0 // indirect @@ -465,8 +466,6 @@ require ( gopkg.in/mail.v2 v2.3.1 // indirect gopkg.in/warnings.v0 v0.1.2 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect - gopkg.in/yaml.v3 v3.0.1 // indirect - gotest.tools/v3 v3.1.0 // indirect honnef.co/go/tools v0.5.1 // indirect lukechampine.com/blake3 v1.2.1 // indirect mvdan.cc/gofumpt v0.7.0 // indirect diff --git a/tools/go.sum b/tools/go.sum index f7ca8d177..13d513022 100644 --- a/tools/go.sum +++ b/tools/go.sum @@ -642,9 +642,8 @@ github.com/hashicorp/terraform-plugin-docs v0.18.0/go.mod h1:iIUfaJpdUmpi+rI42Kg github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/huandu/xstrings v1.3.3 h1:/Gcsuc1x8JVbJ9/rlye4xZnVAbEkGauT8lbebqcQws4= github.com/huandu/xstrings v1.3.3/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= -github.com/huandu/xstrings v1.4.0 h1:D17IlohoQq4UcpqD7fDk80P7l+lwAmlFaBHgOipl2FU= -github.com/huandu/xstrings v1.4.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4= @@ -1013,7 +1012,6 @@ github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0= github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM= github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y= -github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.19.0 h1:RWq5SEjt8o25SROyN3z2OrDB9l7RPd3lwTWU8EcEdcI= @@ -1455,8 +1453,8 @@ gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gotest.tools/v3 v3.1.0 h1:rVV8Tcg/8jHUkPUorwjaMTtemIMVXfIPKiOqnhEhakk= -gotest.tools/v3 v3.1.0/go.mod h1:fHy7eyTmJFO5bQbUsEGQ1v4m2J3Jz9eWL54TP2/ZuYQ= +gotest.tools/v3 v3.0.3 h1:4AuOwCGf4lLR9u3YOe2awrHygurzhO/HeQ6laiA6Sx0= +gotest.tools/v3 v3.0.3/go.mod h1:Z7Lb0S5l+klDB31fvDQX8ss/FlKDxtlFlw3Oa8Ymbl8= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= diff --git a/tools/tools.go b/tools/tools.go index 752c0db79..22d85a010 100644 --- a/tools/tools.go +++ b/tools/tools.go @@ -10,4 +10,5 @@ import ( _ "github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs" _ "github.com/oapi-codegen/oapi-codegen/v2/cmd/oapi-codegen" _ "go.uber.org/mock/mockgen" + _ "gopkg.in/yaml.v3" ) From c0e19c2973851d5f26001dee6e858b969da5fa15 Mon Sep 17 00:00:00 2001 From: Raymond Lynch Date: Mon, 21 Oct 2024 10:45:38 -0400 Subject: [PATCH 2/5] lint --- tools/go.sum | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tools/go.sum b/tools/go.sum index 7e1b305b6..484898b91 100644 --- a/tools/go.sum +++ b/tools/go.sum @@ -1176,6 +1176,8 @@ go.uber.org/automaxprocs v1.5.3/go.mod h1:eRbA25aqJrxAbsLO0xy5jVwPt7FQnRgjW+efnw go.uber.org/goleak v1.1.11-0.20210813005559-691160354723/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= +go.uber.org/mock v0.4.0 h1:VcM4ZOtdbR4f6VXfiOpwpVJDL6lCReaZ6mw31wqh7KU= +go.uber.org/mock v0.4.0/go.mod h1:a6FSlNadKUHUa9IP5Vyt1zh4fC7uAwxMutEAscFbkZc= go.uber.org/mock v0.5.0 h1:KAMbZvZPyBPWgD14IrIQ38QCyjwpvVVV6K/bHl1IwQU= go.uber.org/mock v0.5.0/go.mod h1:ge71pBPLYDk7QIi1LupWxdAykm7KIEFchiOqd6z7qMM= go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= From 28071e78683cd855cd5d479e2d842612de9445f5 Mon Sep 17 00:00:00 2001 From: Raymond Lynch Date: Mon, 21 Oct 2024 19:30:34 -0400 Subject: [PATCH 3/5] fix downgrade --- tools/go.mod | 2 +- tools/go.sum | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/tools/go.mod b/tools/go.mod index ef04679a5..cf8571ac3 100644 --- a/tools/go.mod +++ b/tools/go.mod @@ -10,7 +10,7 @@ require ( github.com/goreleaser/goreleaser/v2 v2.3.2 github.com/hashicorp/terraform-plugin-docs v0.18.0 github.com/oapi-codegen/oapi-codegen/v2 v2.4.1 - go.uber.org/mock v0.4.0 + go.uber.org/mock v0.5.0 gopkg.in/yaml.v3 v3.0.1 ) diff --git a/tools/go.sum b/tools/go.sum index 484898b91..7e1b305b6 100644 --- a/tools/go.sum +++ b/tools/go.sum @@ -1176,8 +1176,6 @@ go.uber.org/automaxprocs v1.5.3/go.mod h1:eRbA25aqJrxAbsLO0xy5jVwPt7FQnRgjW+efnw go.uber.org/goleak v1.1.11-0.20210813005559-691160354723/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= -go.uber.org/mock v0.4.0 h1:VcM4ZOtdbR4f6VXfiOpwpVJDL6lCReaZ6mw31wqh7KU= -go.uber.org/mock v0.4.0/go.mod h1:a6FSlNadKUHUa9IP5Vyt1zh4fC7uAwxMutEAscFbkZc= go.uber.org/mock v0.5.0 h1:KAMbZvZPyBPWgD14IrIQ38QCyjwpvVVV6K/bHl1IwQU= go.uber.org/mock v0.5.0/go.mod h1:ge71pBPLYDk7QIi1LupWxdAykm7KIEFchiOqd6z7qMM= go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= From 97384a8e6569a2fcd668a68bd01195e99841a421 Mon Sep 17 00:00:00 2001 From: Raymond Lynch Date: Thu, 24 Oct 2024 11:15:54 -0400 Subject: [PATCH 4/5] fresh pull and documentation --- generated/fleet/fleet.gen.go | 473 ++++++++++++++++++++++------ generated/fleet/transform_schema.go | 162 +++++----- internal/fleet/output/models.go | 8 +- 3 files changed, 471 insertions(+), 172 deletions(-) diff --git a/generated/fleet/fleet.gen.go b/generated/fleet/fleet.gen.go index 6b9b4405f..47987097e 100644 --- a/generated/fleet/fleet.gen.go +++ b/generated/fleet/fleet.gen.go @@ -304,6 +304,7 @@ const ( // Defines values for PackageInfoType. const ( + PackageInfoTypeContent PackageInfoType = "content" PackageInfoTypeInput PackageInfoType = "input" PackageInfoTypeIntegration PackageInfoType = "integration" ) @@ -390,6 +391,7 @@ const ( // Defines values for PackageListItemType. const ( + PackageListItemTypeContent PackageListItemType = "content" PackageListItemTypeInput PackageListItemType = "input" PackageListItemTypeIntegration PackageListItemType = "integration" ) @@ -1381,6 +1383,7 @@ type PackageInfo struct { Conditions *PackageInfo_Conditions `json:"conditions,omitempty"` DataStreams *[]map[string]interface{} `json:"data_streams,omitempty"` Description *string `json:"description,omitempty"` + Discovery *PackageInfo_Discovery `json:"discovery,omitempty"` Download *string `json:"download,omitempty"` Elasticsearch *map[string]interface{} `json:"elasticsearch,omitempty"` FormatVersion *string `json:"format_version,omitempty"` @@ -1437,6 +1440,18 @@ type PackageInfo_Conditions struct { AdditionalProperties map[string]interface{} `json:"-"` } +// PackageInfo_Discovery_Fields_Item defines model for PackageInfo.Discovery.Fields.Item. +type PackageInfo_Discovery_Fields_Item struct { + Name string `json:"name"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfo_Discovery defines model for PackageInfo.Discovery. +type PackageInfo_Discovery struct { + Fields *[]PackageInfo_Discovery_Fields_Item `json:"fields,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + // PackageInfo_Icons_Item defines model for package_info.icons.Item. type PackageInfo_Icons_Item struct { DarkMode *bool `json:"dark_mode,omitempty"` @@ -1582,6 +1597,7 @@ type PackageListItem struct { Conditions *PackageListItem_Conditions `json:"conditions,omitempty"` DataStreams *[]map[string]interface{} `json:"data_streams,omitempty"` Description *string `json:"description,omitempty"` + Discovery *PackageListItem_Discovery `json:"discovery,omitempty"` Download *string `json:"download,omitempty"` FormatVersion *string `json:"format_version,omitempty"` Icons *[]PackageListItem_Icons_Item `json:"icons,omitempty"` @@ -1627,6 +1643,18 @@ type PackageListItem_Conditions struct { AdditionalProperties map[string]interface{} `json:"-"` } +// PackageListItem_Discovery_Fields_Item defines model for PackageListItem.Discovery.Fields.Item. +type PackageListItem_Discovery_Fields_Item struct { + Name string `json:"name"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItem_Discovery defines model for PackageListItem.Discovery. +type PackageListItem_Discovery struct { + Fields *[]PackageListItem_Discovery_Fields_Item `json:"fields,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + // PackageListItem_Icons_Item defines model for package_list_item.icons.Item. type PackageListItem_Icons_Item struct { DarkMode *bool `json:"dark_mode,omitempty"` @@ -1942,7 +1970,7 @@ type UpdateOutputElasticsearch struct { ProxyId *string `json:"proxy_id,omitempty"` Shipper *UpdateOutputShipper `json:"shipper,omitempty"` Ssl *UpdateOutputSsl `json:"ssl,omitempty"` - Type UpdateOutputElasticsearchType `json:"type"` + Type *UpdateOutputElasticsearchType `json:"type,omitempty"` } // UpdateOutputElasticsearchPreset defines model for UpdateOutputElasticsearch.Preset. @@ -1960,9 +1988,9 @@ type UpdateOutputKafka struct { CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` ClientId *string `json:"client_id,omitempty"` Compression *UpdateOutputKafkaCompression `json:"compression,omitempty"` - CompressionLevel *interface{} `json:"compression_level,omitempty"` + CompressionLevel interface{} `json:"compression_level"` ConfigYaml *string `json:"config_yaml"` - ConnectionType *interface{} `json:"connection_type,omitempty"` + ConnectionType interface{} `json:"connection_type"` Hash *struct { Hash *string `json:"hash,omitempty"` Random *bool `json:"random,omitempty"` @@ -1977,9 +2005,9 @@ type UpdateOutputKafka struct { IsInternal *bool `json:"is_internal,omitempty"` IsPreconfigured *bool `json:"is_preconfigured,omitempty"` Key *string `json:"key,omitempty"` - Name *string `json:"name,omitempty"` + Name string `json:"name"` Partition *UpdateOutputKafkaPartition `json:"partition,omitempty"` - Password *interface{} `json:"password,omitempty"` + Password interface{} `json:"password"` ProxyId *string `json:"proxy_id,omitempty"` Random *struct { GroupEvents *float32 `json:"group_events,omitempty"` @@ -2008,9 +2036,9 @@ type UpdateOutputKafka struct { Type *UpdateOutputKafkaTopicsWhenType `json:"type,omitempty"` } `json:"when,omitempty"` } `json:"topics,omitempty"` - Type UpdateOutputKafkaType `json:"type"` - Username *interface{} `json:"username,omitempty"` - Version *string `json:"version,omitempty"` + Type *UpdateOutputKafkaType `json:"type,omitempty"` + Username interface{} `json:"username"` + Version *string `json:"version,omitempty"` } // UpdateOutputKafkaAuthType defines model for UpdateOutputKafka.AuthType. @@ -2078,9 +2106,9 @@ type UpdateOutputLogstash struct { Key *UpdateOutputLogstash_Secrets_Ssl_Key `json:"key,omitempty"` } `json:"ssl,omitempty"` } `json:"secrets,omitempty"` - Shipper *UpdateOutputShipper `json:"shipper,omitempty"` - Ssl *UpdateOutputSsl `json:"ssl,omitempty"` - Type UpdateOutputLogstashType `json:"type"` + Shipper *UpdateOutputShipper `json:"shipper,omitempty"` + Ssl *UpdateOutputSsl `json:"ssl,omitempty"` + Type *UpdateOutputLogstashType `json:"type,omitempty"` } // UpdateOutputLogstashSecretsSslKey0 defines model for . @@ -2116,10 +2144,10 @@ type UpdateOutputRemoteElasticsearch struct { Secrets *struct { ServiceToken *UpdateOutputRemoteElasticsearch_Secrets_ServiceToken `json:"service_token,omitempty"` } `json:"secrets,omitempty"` - ServiceToken *string `json:"service_token"` - Shipper *UpdateOutputShipper `json:"shipper,omitempty"` - Ssl *UpdateOutputSsl `json:"ssl,omitempty"` - Type UpdateOutputRemoteElasticsearchType `json:"type"` + ServiceToken *string `json:"service_token"` + Shipper *UpdateOutputShipper `json:"shipper,omitempty"` + Ssl *UpdateOutputSsl `json:"ssl,omitempty"` + Type *UpdateOutputRemoteElasticsearchType `json:"type,omitempty"` } // UpdateOutputRemoteElasticsearchPreset defines model for UpdateOutputRemoteElasticsearch.Preset. @@ -2179,9 +2207,16 @@ type GetAgentPoliciesParams struct { SortOrder *GetAgentPoliciesParamsSortOrder `form:"sortOrder,omitempty" json:"sortOrder,omitempty"` ShowUpgradeable *bool `form:"showUpgradeable,omitempty" json:"showUpgradeable,omitempty"` Kuery *string `form:"kuery,omitempty" json:"kuery,omitempty"` - NoAgentCount *bool `form:"noAgentCount,omitempty" json:"noAgentCount,omitempty"` - Full *bool `form:"full,omitempty" json:"full,omitempty"` - Format *GetAgentPoliciesParamsFormat `form:"format,omitempty" json:"format,omitempty"` + + // NoAgentCount use withAgentCount instead + NoAgentCount *bool `form:"noAgentCount,omitempty" json:"noAgentCount,omitempty"` + + // WithAgentCount get policies with agent count + WithAgentCount *bool `form:"withAgentCount,omitempty" json:"withAgentCount,omitempty"` + + // Full get full policies with package policies populated + Full *bool `form:"full,omitempty" json:"full,omitempty"` + Format *GetAgentPoliciesParamsFormat `form:"format,omitempty" json:"format,omitempty"` } // GetAgentPoliciesParamsSortOrder defines parameters for GetAgentPolicies. @@ -5665,6 +5700,14 @@ func (a *PackageInfo) UnmarshalJSON(b []byte) error { delete(object, "description") } + if raw, found := object["discovery"]; found { + err = json.Unmarshal(raw, &a.Discovery) + if err != nil { + return fmt.Errorf("error reading 'discovery': %w", err) + } + delete(object, "discovery") + } + if raw, found := object["download"]; found { err = json.Unmarshal(raw, &a.Download) if err != nil { @@ -5939,6 +5982,13 @@ func (a PackageInfo) MarshalJSON() ([]byte, error) { } } + if a.Discovery != nil { + object["discovery"], err = json.Marshal(a.Discovery) + if err != nil { + return nil, fmt.Errorf("error marshaling 'discovery': %w", err) + } + } + if a.Download != nil { object["download"], err = json.Marshal(a.Download) if err != nil { @@ -6356,6 +6406,140 @@ func (a PackageInfo_Conditions) MarshalJSON() ([]byte, error) { return json.Marshal(object) } +// Getter for additional properties for PackageInfo_Discovery_Fields_Item. Returns the specified +// element and whether it was found +func (a PackageInfo_Discovery_Fields_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_Discovery_Fields_Item +func (a *PackageInfo_Discovery_Fields_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_Discovery_Fields_Item to handle AdditionalProperties +func (a *PackageInfo_Discovery_Fields_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_Discovery_Fields_Item to handle AdditionalProperties +func (a PackageInfo_Discovery_Fields_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_Discovery. Returns the specified +// element and whether it was found +func (a PackageInfo_Discovery) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_Discovery +func (a *PackageInfo_Discovery) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_Discovery to handle AdditionalProperties +func (a *PackageInfo_Discovery) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["fields"]; found { + err = json.Unmarshal(raw, &a.Fields) + if err != nil { + return fmt.Errorf("error reading 'fields': %w", err) + } + delete(object, "fields") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_Discovery to handle AdditionalProperties +func (a PackageInfo_Discovery) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Fields != nil { + object["fields"], err = json.Marshal(a.Fields) + if err != nil { + return nil, fmt.Errorf("error marshaling 'fields': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + // Getter for additional properties for PackageInfo_Icons_Item. Returns the specified // element and whether it was found func (a PackageInfo_Icons_Item) Get(fieldName string) (value interface{}, found bool) { @@ -7779,6 +7963,14 @@ func (a *PackageListItem) UnmarshalJSON(b []byte) error { delete(object, "description") } + if raw, found := object["discovery"]; found { + err = json.Unmarshal(raw, &a.Discovery) + if err != nil { + return fmt.Errorf("error reading 'discovery': %w", err) + } + delete(object, "discovery") + } + if raw, found := object["download"]; found { err = json.Unmarshal(raw, &a.Download) if err != nil { @@ -8002,6 +8194,13 @@ func (a PackageListItem) MarshalJSON() ([]byte, error) { } } + if a.Discovery != nil { + object["discovery"], err = json.Marshal(a.Discovery) + if err != nil { + return nil, fmt.Errorf("error marshaling 'discovery': %w", err) + } + } + if a.Download != nil { object["download"], err = json.Marshal(a.Download) if err != nil { @@ -8389,6 +8588,140 @@ func (a PackageListItem_Conditions) MarshalJSON() ([]byte, error) { return json.Marshal(object) } +// Getter for additional properties for PackageListItem_Discovery_Fields_Item. Returns the specified +// element and whether it was found +func (a PackageListItem_Discovery_Fields_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_Discovery_Fields_Item +func (a *PackageListItem_Discovery_Fields_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_Discovery_Fields_Item to handle AdditionalProperties +func (a *PackageListItem_Discovery_Fields_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_Discovery_Fields_Item to handle AdditionalProperties +func (a PackageListItem_Discovery_Fields_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_Discovery. Returns the specified +// element and whether it was found +func (a PackageListItem_Discovery) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_Discovery +func (a *PackageListItem_Discovery) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_Discovery to handle AdditionalProperties +func (a *PackageListItem_Discovery) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["fields"]; found { + err = json.Unmarshal(raw, &a.Fields) + if err != nil { + return fmt.Errorf("error reading 'fields': %w", err) + } + delete(object, "fields") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_Discovery to handle AdditionalProperties +func (a PackageListItem_Discovery) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Fields != nil { + object["fields"], err = json.Marshal(a.Fields) + if err != nil { + return nil, fmt.Errorf("error marshaling 'fields': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + // Getter for additional properties for PackageListItem_Icons_Item. Returns the specified // element and whether it was found func (a PackageListItem_Icons_Item) Get(fieldName string) (value interface{}, found bool) { @@ -10272,7 +10605,6 @@ func (t NewOutputUnion) AsNewOutputElasticsearch() (NewOutputElasticsearch, erro // FromNewOutputElasticsearch overwrites any union data inside the NewOutputUnion as the provided NewOutputElasticsearch func (t *NewOutputUnion) FromNewOutputElasticsearch(v NewOutputElasticsearch) error { - v.Type = "elasticsearch" b, err := json.Marshal(v) t.union = b return err @@ -10280,7 +10612,6 @@ func (t *NewOutputUnion) FromNewOutputElasticsearch(v NewOutputElasticsearch) er // MergeNewOutputElasticsearch performs a merge with any union data inside the NewOutputUnion, using the provided NewOutputElasticsearch func (t *NewOutputUnion) MergeNewOutputElasticsearch(v NewOutputElasticsearch) error { - v.Type = "elasticsearch" b, err := json.Marshal(v) if err != nil { return err @@ -10300,7 +10631,6 @@ func (t NewOutputUnion) AsNewOutputRemoteElasticsearch() (NewOutputRemoteElastic // FromNewOutputRemoteElasticsearch overwrites any union data inside the NewOutputUnion as the provided NewOutputRemoteElasticsearch func (t *NewOutputUnion) FromNewOutputRemoteElasticsearch(v NewOutputRemoteElasticsearch) error { - v.Type = "remote_elasticsearch" b, err := json.Marshal(v) t.union = b return err @@ -10308,7 +10638,6 @@ func (t *NewOutputUnion) FromNewOutputRemoteElasticsearch(v NewOutputRemoteElast // MergeNewOutputRemoteElasticsearch performs a merge with any union data inside the NewOutputUnion, using the provided NewOutputRemoteElasticsearch func (t *NewOutputUnion) MergeNewOutputRemoteElasticsearch(v NewOutputRemoteElasticsearch) error { - v.Type = "remote_elasticsearch" b, err := json.Marshal(v) if err != nil { return err @@ -10328,7 +10657,6 @@ func (t NewOutputUnion) AsNewOutputLogstash() (NewOutputLogstash, error) { // FromNewOutputLogstash overwrites any union data inside the NewOutputUnion as the provided NewOutputLogstash func (t *NewOutputUnion) FromNewOutputLogstash(v NewOutputLogstash) error { - v.Type = "logstash" b, err := json.Marshal(v) t.union = b return err @@ -10336,7 +10664,6 @@ func (t *NewOutputUnion) FromNewOutputLogstash(v NewOutputLogstash) error { // MergeNewOutputLogstash performs a merge with any union data inside the NewOutputUnion, using the provided NewOutputLogstash func (t *NewOutputUnion) MergeNewOutputLogstash(v NewOutputLogstash) error { - v.Type = "logstash" b, err := json.Marshal(v) if err != nil { return err @@ -10356,7 +10683,6 @@ func (t NewOutputUnion) AsNewOutputKafka() (NewOutputKafka, error) { // FromNewOutputKafka overwrites any union data inside the NewOutputUnion as the provided NewOutputKafka func (t *NewOutputUnion) FromNewOutputKafka(v NewOutputKafka) error { - v.Type = "kafka" b, err := json.Marshal(v) t.union = b return err @@ -10364,7 +10690,6 @@ func (t *NewOutputUnion) FromNewOutputKafka(v NewOutputKafka) error { // MergeNewOutputKafka performs a merge with any union data inside the NewOutputUnion, using the provided NewOutputKafka func (t *NewOutputUnion) MergeNewOutputKafka(v NewOutputKafka) error { - v.Type = "kafka" b, err := json.Marshal(v) if err != nil { return err @@ -10375,33 +10700,6 @@ func (t *NewOutputUnion) MergeNewOutputKafka(v NewOutputKafka) error { return err } -func (t NewOutputUnion) Discriminator() (string, error) { - var discriminator struct { - Discriminator string `json:"type"` - } - err := json.Unmarshal(t.union, &discriminator) - return discriminator.Discriminator, err -} - -func (t NewOutputUnion) ValueByDiscriminator() (interface{}, error) { - discriminator, err := t.Discriminator() - if err != nil { - return nil, err - } - switch discriminator { - case "elasticsearch": - return t.AsNewOutputElasticsearch() - case "kafka": - return t.AsNewOutputKafka() - case "logstash": - return t.AsNewOutputLogstash() - case "remote_elasticsearch": - return t.AsNewOutputRemoteElasticsearch() - default: - return nil, errors.New("unknown discriminator value: " + discriminator) - } -} - func (t NewOutputUnion) MarshalJSON() ([]byte, error) { b, err := t.union.MarshalJSON() return b, err @@ -11066,7 +11364,6 @@ func (t UpdateOutputUnion) AsUpdateOutputElasticsearch() (UpdateOutputElasticsea // FromUpdateOutputElasticsearch overwrites any union data inside the UpdateOutputUnion as the provided UpdateOutputElasticsearch func (t *UpdateOutputUnion) FromUpdateOutputElasticsearch(v UpdateOutputElasticsearch) error { - v.Type = "elasticsearch" b, err := json.Marshal(v) t.union = b return err @@ -11074,7 +11371,6 @@ func (t *UpdateOutputUnion) FromUpdateOutputElasticsearch(v UpdateOutputElastics // MergeUpdateOutputElasticsearch performs a merge with any union data inside the UpdateOutputUnion, using the provided UpdateOutputElasticsearch func (t *UpdateOutputUnion) MergeUpdateOutputElasticsearch(v UpdateOutputElasticsearch) error { - v.Type = "elasticsearch" b, err := json.Marshal(v) if err != nil { return err @@ -11094,7 +11390,6 @@ func (t UpdateOutputUnion) AsUpdateOutputRemoteElasticsearch() (UpdateOutputRemo // FromUpdateOutputRemoteElasticsearch overwrites any union data inside the UpdateOutputUnion as the provided UpdateOutputRemoteElasticsearch func (t *UpdateOutputUnion) FromUpdateOutputRemoteElasticsearch(v UpdateOutputRemoteElasticsearch) error { - v.Type = "remote_elasticsearch" b, err := json.Marshal(v) t.union = b return err @@ -11102,7 +11397,6 @@ func (t *UpdateOutputUnion) FromUpdateOutputRemoteElasticsearch(v UpdateOutputRe // MergeUpdateOutputRemoteElasticsearch performs a merge with any union data inside the UpdateOutputUnion, using the provided UpdateOutputRemoteElasticsearch func (t *UpdateOutputUnion) MergeUpdateOutputRemoteElasticsearch(v UpdateOutputRemoteElasticsearch) error { - v.Type = "remote_elasticsearch" b, err := json.Marshal(v) if err != nil { return err @@ -11122,7 +11416,6 @@ func (t UpdateOutputUnion) AsUpdateOutputLogstash() (UpdateOutputLogstash, error // FromUpdateOutputLogstash overwrites any union data inside the UpdateOutputUnion as the provided UpdateOutputLogstash func (t *UpdateOutputUnion) FromUpdateOutputLogstash(v UpdateOutputLogstash) error { - v.Type = "logstash" b, err := json.Marshal(v) t.union = b return err @@ -11130,7 +11423,6 @@ func (t *UpdateOutputUnion) FromUpdateOutputLogstash(v UpdateOutputLogstash) err // MergeUpdateOutputLogstash performs a merge with any union data inside the UpdateOutputUnion, using the provided UpdateOutputLogstash func (t *UpdateOutputUnion) MergeUpdateOutputLogstash(v UpdateOutputLogstash) error { - v.Type = "logstash" b, err := json.Marshal(v) if err != nil { return err @@ -11150,7 +11442,6 @@ func (t UpdateOutputUnion) AsUpdateOutputKafka() (UpdateOutputKafka, error) { // FromUpdateOutputKafka overwrites any union data inside the UpdateOutputUnion as the provided UpdateOutputKafka func (t *UpdateOutputUnion) FromUpdateOutputKafka(v UpdateOutputKafka) error { - v.Type = "kafka" b, err := json.Marshal(v) t.union = b return err @@ -11158,7 +11449,6 @@ func (t *UpdateOutputUnion) FromUpdateOutputKafka(v UpdateOutputKafka) error { // MergeUpdateOutputKafka performs a merge with any union data inside the UpdateOutputUnion, using the provided UpdateOutputKafka func (t *UpdateOutputUnion) MergeUpdateOutputKafka(v UpdateOutputKafka) error { - v.Type = "kafka" b, err := json.Marshal(v) if err != nil { return err @@ -11169,33 +11459,6 @@ func (t *UpdateOutputUnion) MergeUpdateOutputKafka(v UpdateOutputKafka) error { return err } -func (t UpdateOutputUnion) Discriminator() (string, error) { - var discriminator struct { - Discriminator string `json:"type"` - } - err := json.Unmarshal(t.union, &discriminator) - return discriminator.Discriminator, err -} - -func (t UpdateOutputUnion) ValueByDiscriminator() (interface{}, error) { - discriminator, err := t.Discriminator() - if err != nil { - return nil, err - } - switch discriminator { - case "elasticsearch": - return t.AsUpdateOutputElasticsearch() - case "kafka": - return t.AsUpdateOutputKafka() - case "logstash": - return t.AsUpdateOutputLogstash() - case "remote_elasticsearch": - return t.AsUpdateOutputRemoteElasticsearch() - default: - return nil, errors.New("unknown discriminator value: " + discriminator) - } -} - func (t UpdateOutputUnion) MarshalJSON() ([]byte, error) { b, err := t.union.MarshalJSON() return b, err @@ -11958,6 +12221,22 @@ func NewGetAgentPoliciesRequest(server string, params *GetAgentPoliciesParams) ( } + if params.WithAgentCount != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "withAgentCount", runtime.ParamLocationQuery, *params.WithAgentCount); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + if params.Full != nil { if queryFrag, err := runtime.StyleParamWithLocation("form", true, "full", runtime.ParamLocationQuery, *params.Full); err != nil { @@ -13907,6 +14186,14 @@ type ListPackages_200_Response_Conditions struct { Kibana *ListPackages_200_Response_Conditions_Kibana `json:"kibana,omitempty"` AdditionalProperties map[string]interface{} `json:"-"` } +type ListPackages_200_Response_Discovery_Fields_Item struct { + Name string `json:"name"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type ListPackages_200_Response_Discovery struct { + Fields *[]ListPackages_200_Response_Discovery_Fields_Item `json:"fields,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} type ListPackages_200_Response_Icons_Item struct { DarkMode *bool `json:"dark_mode,omitempty"` Path *string `json:"path,omitempty"` @@ -14009,6 +14296,7 @@ type ListPackages_200_Response_Item struct { Conditions *ListPackages_200_Response_Conditions `json:"conditions,omitempty"` DataStreams *[]map[string]interface{} `json:"data_streams,omitempty"` Description *string `json:"description,omitempty"` + Discovery *ListPackages_200_Response_Discovery `json:"discovery,omitempty"` Download *string `json:"download,omitempty"` FormatVersion *string `json:"format_version,omitempty"` Icons *[]ListPackages_200_Response_Icons_Item `json:"icons,omitempty"` @@ -14165,6 +14453,14 @@ type GetPackage_200_Response_Conditions struct { Kibana *GetPackage_200_Response_Conditions_Kibana `json:"kibana,omitempty"` AdditionalProperties map[string]interface{} `json:"-"` } +type GetPackage_200_Response_Discovery_Fields_Item struct { + Name string `json:"name"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetPackage_200_Response_Discovery struct { + Fields *[]GetPackage_200_Response_Discovery_Fields_Item `json:"fields,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} type GetPackage_200_Response_Icons_Item struct { DarkMode *bool `json:"dark_mode,omitempty"` Path *string `json:"path,omitempty"` @@ -14278,6 +14574,7 @@ type GetPackage_200_Response struct { Conditions *GetPackage_200_Response_Conditions `json:"conditions,omitempty"` DataStreams *[]map[string]interface{} `json:"data_streams,omitempty"` Description *string `json:"description,omitempty"` + Discovery *GetPackage_200_Response_Discovery `json:"discovery,omitempty"` Download *string `json:"download,omitempty"` Elasticsearch *map[string]interface{} `json:"elasticsearch,omitempty"` FormatVersion *string `json:"format_version,omitempty"` diff --git a/generated/fleet/transform_schema.go b/generated/fleet/transform_schema.go index af8be5581..9417797f3 100644 --- a/generated/fleet/transform_schema.go +++ b/generated/fleet/transform_schema.go @@ -330,8 +330,6 @@ func (m Map) CreateRef(schema *Schema, name string, key string) Map { if reflect.DeepEqual(refTarget, existing) { writeComponent = false } else { - //os.WriteFile("./existing.txt", []byte(spew.Sdump(existing)), 0644) - //os.WriteFile("./target.txt", []byte(spew.Sdump(refTarget)), 0644) log.Panicf("Component schema key already in use and not an exact duplicate: %q", refPath) return nil } @@ -776,6 +774,15 @@ func transformFleetPaths(schema *Schema) { agentPolicyPath.Get.CreateRef(schema, "agent_policy", "responses.200.content.application/json.schema.properties.item") agentPolicyPath.Put.CreateRef(schema, "agent_policy", "responses.200.content.application/json.schema.properties.item") + // See: https://github.com/elastic/kibana/issues/197155 + // [request body.keep_monitoring_alive]: expected value of type [boolean] but got [null] + // [request body.supports_agentless]: expected value of type [boolean] but got [null] + // [request body.overrides]: expected value of type [boolean] but got [null] + for _, key := range []string{"keep_monitoring_alive", "supports_agentless", "overrides"} { + agentPoliciesPath.Post.Set(fmt.Sprintf("requestBody.content.application/json.schema.properties.%s.x-omitempty", key), true) + agentPolicyPath.Put.Set(fmt.Sprintf("requestBody.content.application/json.schema.properties.%s.x-omitempty", key), true) + } + // Enrollment api keys // https://github.com/elastic/kibana/blob/main/x-pack/plugins/fleet/common/types/models/enrollment_api_key.ts // https://github.com/elastic/kibana/blob/main/x-pack/plugins/fleet/common/types/rest_spec/enrollment_api_key.ts @@ -804,6 +811,12 @@ func transformFleetPaths(schema *Schema) { hostPath.Get.CreateRef(schema, "server_host", "responses.200.content.application/json.schema.properties.item") hostPath.Put.CreateRef(schema, "server_host", "responses.200.content.application/json.schema.properties.item") + // 8.6.2 regression + // [request body.proxy_id]: definition for this key is missing + // See: https://github.com/elastic/kibana/issues/197155 + hostsPath.Post.Set("requestBody.content.application/json.schema.properties.proxy_id.x-omitempty", true) + hostPath.Put.Set("requestBody.content.application/json.schema.properties.proxy_id.x-omitempty", true) + // Outputs // https://github.com/elastic/kibana/blob/main/x-pack/plugins/fleet/common/types/models/output.ts // https://github.com/elastic/kibana/blob/main/x-pack/plugins/fleet/common/types/rest_spec/output.ts @@ -811,11 +824,11 @@ func transformFleetPaths(schema *Schema) { outputByIdPath := schema.MustGetPath("/api/fleet/outputs/{outputId}") outputsPath := schema.MustGetPath("/api/fleet/outputs") - outputsPath.Get.CreateRef(schema, "output_union", "responses.200.content.application/json.schema.properties.items.items") outputsPath.Post.CreateRef(schema, "new_output_union", "requestBody.content.application/json.schema") - outputsPath.Post.CreateRef(schema, "output_union", "responses.200.content.application/json.schema.properties.item") - outputByIdPath.Get.CreateRef(schema, "output_union", "responses.200.content.application/json.schema.properties.item") outputByIdPath.Put.CreateRef(schema, "update_output_union", "requestBody.content.application/json.schema") + outputsPath.Get.CreateRef(schema, "output_union", "responses.200.content.application/json.schema.properties.items.items") + outputByIdPath.Get.CreateRef(schema, "output_union", "responses.200.content.application/json.schema.properties.item") + outputsPath.Post.CreateRef(schema, "output_union", "responses.200.content.application/json.schema.properties.item") outputByIdPath.Put.CreateRef(schema, "output_union", "responses.200.content.application/json.schema.properties.item") for _, name := range []string{"output", "new_output", "update_output"} { @@ -825,44 +838,79 @@ func transformFleetPaths(schema *Schema) { schema.Components.CreateRef(schema, fmt.Sprintf("%s_logstash", name), fmt.Sprintf("schemas.%s_union.anyOf.2", name)) schema.Components.CreateRef(schema, fmt.Sprintf("%s_kafka", name), fmt.Sprintf("schemas.%s_union.anyOf.3", name)) - // Add the missing discriminator - schema.Components.Set(fmt.Sprintf("schemas.%s_union.discriminator", name), Map{ - "propertyName": "type", - "mapping": Map{ - "elasticsearch": fmt.Sprintf("#/components/schemas/%s_elasticsearch", name), - "remote_elasticsearch": fmt.Sprintf("#/components/schemas/%s_remote_elasticsearch", name), - "logstash": fmt.Sprintf("#/components/schemas/%s_logstash", name), - "kafka": fmt.Sprintf("#/components/schemas/%s_kafka", name), - }, - }) - // Extract child structs for _, typ := range []string{"elasticsearch", "remote_elasticsearch", "logstash", "kafka"} { schema.Components.CreateRef(schema, fmt.Sprintf("%s_shipper", name), fmt.Sprintf("schemas.%s_%s.properties.shipper", name, typ)) schema.Components.CreateRef(schema, fmt.Sprintf("%s_ssl", name), fmt.Sprintf("schemas.%s_%s.properties.ssl", name, typ)) } + // Ideally just remove the "anyOf", however then we would need to make + // refs for each of the "oneOf" options. So turn them into an "any" instead. + // See: https://github.com/elastic/kibana/issues/197153 /* - // These look like this and oapi breaks hard on it. - // Turn them into an `any` type. - anyOf: - - items: {} - type: array - - type: boolean - - type: number - - type: object - - type: string - nullable: true - oneOf: - - type: number - - not: {} + anyOf: + - items: {} + type: array + - type: boolean + - type: number + - type: object + - type: string + nullable: true + oneOf: + - type: number + - not: {} */ - node := schema.Components.MustGetMap(fmt.Sprintf("schemas.%s_kafka.properties", name)) - for _, typ := range []string{"compression_level", "connection_type", "password", "username"} { - node[typ] = Map{} + + props := schema.Components.MustGetMap(fmt.Sprintf("schemas.%s_kafka.properties", name)) + for _, key := range []string{"compression_level", "connection_type", "password", "username"} { + props.Set(key, Map{}) } } + // Add the missing discriminator to the response union + // See: https://github.com/elastic/kibana/issues/181994 + schema.Components.Set("schemas.output_union.discriminator", Map{ + "propertyName": "type", + "mapping": Map{ + "elasticsearch": "#/components/schemas/output_elasticsearch", + "remote_elasticsearch": "#/components/schemas/output_remote_elasticsearch", + "logstash": "#/components/schemas/output_logstash", + "kafka": "#/components/schemas/output_kafka", + }, + }) + + for _, name := range []string{"new_output", "update_output"} { + for _, typ := range []string{"elasticsearch", "remote_elasticsearch", "logstash", "kafka"} { + // [request body.1.ca_sha256]: expected value of type [string] but got [null]" + // See: https://github.com/elastic/kibana/issues/197155 + schema.Components.Set(fmt.Sprintf("schemas.%s_%s.properties.ca_sha256.x-omitempty", name, typ), true) + + // [request body.1.ca_trusted_fingerprint]: expected value of type [string] but got [null] + // See: https://github.com/elastic/kibana/issues/197155 + schema.Components.Set(fmt.Sprintf("schemas.%s_%s.properties.ca_trusted_fingerprint.x-omitempty", name, typ), true) + + // 8.6.2 regression + // [request body.proxy_id]: definition for this key is missing" + // See: https://github.com/elastic/kibana/issues/197155 + schema.Components.Set(fmt.Sprintf("schemas.%s_%s.properties.proxy_id.x-omitempty", name, typ), true) + } + + // [request body.1.shipper]: expected a plain object value, but found [null] instead + // See: https://github.com/elastic/kibana/issues/197155 + schema.Components.Set(fmt.Sprintf("schemas.%s_shipper.x-omitempty", name), true) + + // [request body.1.ssl]: expected a plain object value, but found [null] instead + // See: https://github.com/elastic/kibana/issues/197155 + schema.Components.Set(fmt.Sprintf("schemas.%s_ssl.x-omitempty", name), true) + + } + + for _, typ := range []string{"elasticsearch", "remote_elasticsearch", "logstash", "kafka"} { + // strict_dynamic_mapping_exception: [1:345] mapping set to strict, dynamic introduction of [id] within [ingest-outputs] is not allowed" + // See: https://github.com/elastic/kibana/issues/197155 + schema.Components.MustDelete(fmt.Sprintf("schemas.update_output_%s.properties.id", typ)) + } + // Package policies // https://github.com/elastic/kibana/blob/main/x-pack/plugins/fleet/common/types/models/package_policy.ts // https://github.com/elastic/kibana/blob/main/x-pack/plugins/fleet/common/types/rest_spec/package_policy.ts @@ -899,55 +947,9 @@ func transformFleetPaths(schema *Schema) { schema.Components.Set("schemas.package_policy_request_input.properties.vars", Map{"type": "object"}) schema.Components.Set("schemas.package_policy_request_input_stream.properties.vars", Map{"type": "object"}) - // Upstream issues - - // [request body.keep_monitoring_alive]: expected value of type [boolean] but got [null] - agentPoliciesPath.Post.Set("requestBody.content.application/json.schema.properties.keep_monitoring_alive.x-omitempty", true) - agentPolicyPath.Put.Set("requestBody.content.application/json.schema.properties.keep_monitoring_alive.x-omitempty", true) - - // [request body.supports_agentless]: expected value of type [boolean] but got [null] - agentPoliciesPath.Post.Set("requestBody.content.application/json.schema.properties.supports_agentless.x-omitempty", true) - agentPolicyPath.Put.Set("requestBody.content.application/json.schema.properties.supports_agentless.x-omitempty", true) - - // [request body.supports_agentless]: expected value of type [boolean] but got [null] - agentPoliciesPath.Post.Set("requestBody.content.application/json.schema.properties.overrides.x-omitempty", true) - agentPolicyPath.Put.Set("requestBody.content.application/json.schema.properties.overrides.x-omitempty", true) - - // 8.6.2 regression - // [request body.proxy_id]: definition for this key is missing - hostsPath.Post.Set("requestBody.content.application/json.schema.properties.proxy_id.x-omitempty", true) - hostPath.Put.Set("requestBody.content.application/json.schema.properties.proxy_id.x-omitempty", true) - - for _, typ := range []string{"elasticsearch", "remote_elasticsearch", "logstash", "kafka"} { - // Discriminator codegen failure, may not be required upstream, have not tested - schema.Components.Set(fmt.Sprintf("schemas.update_output_%s.required", typ), []string{"type"}) - - // [request body.3.ca_sha256]: expected value of type [string] but got [null]" - schema.Components.Set(fmt.Sprintf("schemas.new_output_%s.properties.ca_sha256.x-omitempty", typ), true) - schema.Components.Set(fmt.Sprintf("schemas.update_output_%s.properties.ca_sha256.x-omitempty", typ), true) - - // [request body.1.ca_trusted_fingerprint]: expected value of type [string] but got [null] - schema.Components.Set(fmt.Sprintf("schemas.new_output_%s.properties.ca_trusted_fingerprint.x-omitempty", typ), true) - schema.Components.Set(fmt.Sprintf("schemas.update_output_%s.properties.ca_trusted_fingerprint.x-omitempty", typ), true) - - // 8.6.2 regression - // [request body.proxy_id]: definition for this key is missing" - schema.Components.Set(fmt.Sprintf("schemas.new_output_%s.properties.proxy_id.x-omitempty", typ), true) - schema.Components.Set(fmt.Sprintf("schemas.update_output_%s.properties.proxy_id.x-omitempty", typ), true) - - // strict_dynamic_mapping_exception: [1:345] mapping set to strict, dynamic introduction of [id] within [ingest-outputs] is not allowed" - schema.Components.MustDelete(fmt.Sprintf("schemas.update_output_%s.properties.id", typ)) - } - - // [request body.0.shipper]: expected a plain object value, but found [null] instead - schema.Components.Set("schemas.new_output_shipper.x-omitempty", true) - schema.Components.Set("schemas.new_output_ssl.x-omitempty", true) - - // [request body.1.shipper]: expected a plain object value, but found [null] instead - schema.Components.Set("schemas.update_output_shipper.x-omitempty", true) - schema.Components.Set("schemas.update_output_ssl.x-omitempty", true) - + // [request body.0.output_id]: expected value of type [string] but got [null] // [request body.1.output_id]: definition for this key is missing" + // See: https://github.com/elastic/kibana/issues/197155 schema.Components.Set("schemas.package_policy_request.properties.output_id.x-omitempty", true) } diff --git a/internal/fleet/output/models.go b/internal/fleet/output/models.go index b5f87c337..d6371d916 100644 --- a/internal/fleet/output/models.go +++ b/internal/fleet/output/models.go @@ -123,7 +123,7 @@ func (model outputModel) toAPICreateModel(ctx context.Context) (union fleetapi.N switch outputType { case "elasticsearch": body := fleetapi.NewOutputElasticsearch{ - Type: "elasticsearch", + Type: fleetapi.NewOutputElasticsearchTypeElasticsearch, CaSha256: model.CaSha256.ValueStringPointer(), CaTrustedFingerprint: model.CaTrustedFingerprint.ValueStringPointer(), ConfigYaml: model.ConfigYaml.ValueStringPointer(), @@ -143,7 +143,7 @@ func (model outputModel) toAPICreateModel(ctx context.Context) (union fleetapi.N case "logstash": body := fleetapi.NewOutputLogstash{ - Type: "logstash", + Type: fleetapi.NewOutputLogstashTypeLogstash, CaSha256: model.CaSha256.ValueStringPointer(), CaTrustedFingerprint: model.CaTrustedFingerprint.ValueStringPointer(), ConfigYaml: model.ConfigYaml.ValueStringPointer(), @@ -187,7 +187,7 @@ func (model outputModel) toAPIUpdateModel(ctx context.Context) (union fleetapi.U switch outputType { case "elasticsearch": body := fleetapi.UpdateOutputElasticsearch{ - Type: "elasticsearch", + Type: utils.Pointer(fleetapi.Elasticsearch), CaSha256: model.CaSha256.ValueStringPointer(), CaTrustedFingerprint: model.CaTrustedFingerprint.ValueStringPointer(), ConfigYaml: model.ConfigYaml.ValueStringPointer(), @@ -206,7 +206,7 @@ func (model outputModel) toAPIUpdateModel(ctx context.Context) (union fleetapi.U case "logstash": body := fleetapi.UpdateOutputLogstash{ - Type: "logstash", + Type: utils.Pointer(fleetapi.Logstash), CaSha256: model.CaSha256.ValueStringPointer(), CaTrustedFingerprint: model.CaTrustedFingerprint.ValueStringPointer(), ConfigYaml: model.ConfigYaml.ValueStringPointer(), From f244dabd9a35dc6cc914660cc733e22f1ca5e41a Mon Sep 17 00:00:00 2001 From: Raymond Lynch Date: Thu, 24 Oct 2024 11:39:17 -0400 Subject: [PATCH 5/5] changelog --- CHANGELOG.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ea4cdf1f4..3fb395a4d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,8 @@ ## [Unreleased] - Allow `elasticstack_kibana_alerting_rule` to be used without Elasticsearch being configured. ([#869](https://github.com/elastic/terraform-provider-elasticstack/pull/869)) -- Add resource `elasticstack_elasticsearch_data_stream_lifecycle` ([838](https://github.com/elastic/terraform-provider-elasticstack/issues/838)) +- Add resource `elasticstack_elasticsearch_data_stream_lifecycle` ([#838](https://github.com/elastic/terraform-provider-elasticstack/issues/838)) +- Use the auto-generated OAS schema from elastic/kibana for the Fleet API. ([#834](https://github.com/elastic/terraform-provider-elasticstack/issues/834)) ## [0.11.10] - 2024-10-23