Skip to content

chore: bump terraform helm version v3 and fix errors #136

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 18 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ provider "ibm" {
}

provider "helm" {
kubernetes {
kubernetes = {
host = data.ibm_container_cluster_config.cluster_config.host
token = data.ibm_container_cluster_config.cluster_config.token
cluster_ca_certificate = data.ibm_container_cluster_config.cluster_config.ca_certificate
Expand Down Expand Up @@ -88,7 +88,7 @@ You need the following permissions to run this module.
| Name | Version |
|------|---------|
| <a name="requirement_terraform"></a> [terraform](#requirement\_terraform) | >= 1.9.0 |
| <a name="requirement_helm"></a> [helm](#requirement\_helm) | >= 2.15.0, <3.0.0 |
| <a name="requirement_helm"></a> [helm](#requirement\_helm) | >= 3.0.0, <4.0.0 |
| <a name="requirement_ibm"></a> [ibm](#requirement\_ibm) | >= 1.79.2, <2.0.0 |

### Modules
Expand Down
2 changes: 1 addition & 1 deletion examples/obs-agent-iks/provider.tf
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ provider "ibm" {
}

provider "helm" {
kubernetes {
kubernetes = {
host = data.ibm_container_cluster_config.cluster_config.host
token = data.ibm_container_cluster_config.cluster_config.token
cluster_ca_certificate = data.ibm_container_cluster_config.cluster_config.ca_certificate
Expand Down
2 changes: 1 addition & 1 deletion examples/obs-agent-iks/version.tf
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ terraform {
}
helm = {
source = "hashicorp/helm"
version = "2.15.0"
version = "3.0.2"
}
# The kubernetes provider is not actually required by the module itself, just this example, so OK to use ">=" here instead of locking into a version
kubernetes = {
Expand Down
2 changes: 1 addition & 1 deletion examples/obs-agent-ocp/provider.tf
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ provider "ibm" {
}

provider "helm" {
kubernetes {
kubernetes = {
host = data.ibm_container_cluster_config.cluster_config.host
token = data.ibm_container_cluster_config.cluster_config.token
cluster_ca_certificate = data.ibm_container_cluster_config.cluster_config.ca_certificate
Expand Down
2 changes: 1 addition & 1 deletion examples/obs-agent-ocp/version.tf
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ terraform {
}
helm = {
source = "hashicorp/helm"
version = ">= 2.15.0"
version = ">= 3.0.0, <4.0.0"
}
kubernetes = {
source = "hashicorp/kubernetes"
Expand Down
235 changes: 114 additions & 121 deletions main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,17 @@ locals {
base_endpoint = var.use_scc_wp_endpoint ? local.scc_wp_api_endpoint : local.monitoring_api_endpoint
ingestion_endpoint = var.use_private_endpoint ? "ingest.private.${local.base_endpoint}" : "ingest.${local.base_endpoint}"
api_host = replace(local.ingestion_endpoint, "ingest.", "")
dynamic_set_access_key_secret = var.existing_access_key_secret_name != null && var.existing_access_key_secret_name != "" ? [{
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Does new helm provider no longer support dynamic ?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not really. Since all of the set variables are now combined into one array, the dynamic ones have to be added on at the end with a concat. That was the only solution I was able to find.

name = "global.sysdig.accessKeySecret"
type = "string"
value = var.existing_access_key_secret_name
}] : []
dynamic_agent_tags = [for k, v in var.agent_tags :
{
name = "global.sysdig.tags.${k}"
value = v
}
]
}

resource "helm_release" "cloud_monitoring_agent" {
Expand All @@ -51,132 +62,114 @@ resource "helm_release" "cloud_monitoring_agent" {
force_update = true
reset_values = true

# Values
set {
name = "Values.image.repository"
type = "string"
value = var.image_registry_base_url
}

# Global
set {
name = "global.imageRegistry"
type = "string"
value = "${var.image_registry_base_url}/${var.image_registry_namespace}"
}
set {
name = "global.sysdig.apiHost"
value = local.api_host
}
dynamic "set_sensitive" {
for_each = var.access_key != null && var.access_key != "" ? [1] : []
content {
name = "global.sysdig.accessKey"
set = concat([
# Values
{
name = "Values.image.repository"
type = "string"
value = var.access_key
}
}
dynamic "set" {
for_each = var.existing_access_key_secret_name != null && var.existing_access_key_secret_name != "" ? [1] : []
content {
name = "global.sysdig.accessKeySecret"
value = var.image_registry_base_url
},
# Global
{
name = "global.imageRegistry"
type = "string"
value = var.existing_access_key_secret_name
}
}
set {
name = "global.clusterConfig.name"
type = "string"
value = local.cluster_name
}
set {
name = "global.sysdig.tags.deployment"
type = "string"
value = var.deployment_tag
}
set {
name = "global.sysdig.tags.ibm-containers-kubernetes-cluster-name"
type = "string"
value = var.add_cluster_name ? local.cluster_name : null
}
dynamic "set" {
for_each = var.agent_tags
content {
name = "global.sysdig.tags.${set.key}"
value = set.value
value = "${var.image_registry_base_url}/${var.image_registry_namespace}"
},
{
name = "global.sysdig.apiHost"
value = local.api_host
},
{
name = "global.clusterConfig.name"
type = "string"
value = local.cluster_name
},
{
name = "global.sysdig.tags.deployment"
type = "string"
value = var.deployment_tag
},
{
name = "global.sysdig.tags.ibm-containers-kubernetes-cluster-name"
type = "string"
value = var.add_cluster_name ? local.cluster_name : null
},
# Cluster shield
{
name = "clusterShield.enabled"
value = var.cluster_shield_deploy
},
{
name = "clusterShield.image.repository"
value = var.cluster_shield_image_repository
},
{
name = "clusterShield.image.tag"
value = var.cluster_shield_image_tag_digest
},
{
name = "clusterShield.resources.requests.cpu"
type = "string"
value = var.cluster_shield_requests_cpu
},
{
name = "clusterShield.resources.requests.memory"
type = "string"
value = var.cluster_shield_requests_memory
},
{
name = "clusterShield.resources.limits.cpu"
type = "string"
value = var.cluster_shield_limits_cpu
},
{
name = "clusterShield.resources.limits.memory"
type = "string"
value = var.cluster_shield_limits_memory
},
{
name = "clusterShield.cluster_shield.sysdig_endpoint.region"
type = "string"
value = "custom"
},
{
name = "clusterShield.cluster_shield.log_level"
type = "string"
value = "info"
},
{
name = "clusterShield.cluster_shield.features.admission_control.enabled"
value = var.cluster_shield_deploy
},
{
name = "clusterShield.cluster_shield.features.container_vulnerability_management.enabled"
value = var.cluster_shield_deploy
},
{
name = "clusterShield.cluster_shield.features.audit.enabled"
value = var.cluster_shield_deploy
},
{
name = "clusterShield.cluster_shield.features.posture.enabled"
value = var.cluster_shield_deploy
},
# nodeAnalyzer has been replaced by the host_scanner and kspm_analyzer functionality of main agent daemonset
{
name = "nodeAnalyzer.enabled"
value = false
},
# clusterScanner has been replaced by cluster_shield component
{
name = "clusterScanner.enabled"
value = false
}
}
], local.dynamic_agent_tags, local.dynamic_set_access_key_secret)

# Cluster shield
set {
name = "clusterShield.enabled"
value = var.cluster_shield_deploy
}
set {
name = "clusterShield.image.repository"
value = var.cluster_shield_image_repository
}
set {
name = "clusterShield.image.tag"
value = var.cluster_shield_image_tag_digest
}
set {
name = "clusterShield.resources.requests.cpu"
type = "string"
value = var.cluster_shield_requests_cpu
}
set {
name = "clusterShield.resources.requests.memory"
type = "string"
value = var.cluster_shield_requests_memory
}
set {
name = "clusterShield.resources.limits.cpu"
type = "string"
value = var.cluster_shield_limits_cpu
}
set {
name = "clusterShield.resources.limits.memory"
set_sensitive = var.access_key != null && var.access_key != "" ? [{
name = "global.sysdig.accessKey"
type = "string"
value = var.cluster_shield_limits_memory
}
set {
name = "clusterShield.cluster_shield.sysdig_endpoint.region"
type = "string"
value = "custom"
}
set {
name = "clusterShield.cluster_shield.log_level"
type = "string"
value = "info"
}
set {
name = "clusterShield.cluster_shield.features.admission_control.enabled"
value = var.cluster_shield_deploy
}
set {
name = "clusterShield.cluster_shield.features.container_vulnerability_management.enabled"
value = var.cluster_shield_deploy
}
set {
name = "clusterShield.cluster_shield.features.audit.enabled"
value = var.cluster_shield_deploy
}
set {
name = "clusterShield.cluster_shield.features.posture.enabled"
value = var.cluster_shield_deploy
}

# nodeAnalyzer has been replaced by the host_scanner and kspm_analyzer functionality of main agent daemonset
set {
name = "nodeAnalyzer.enabled"
value = false
}
# clusterScanner has been replaced by cluster_shield component
set {
name = "clusterScanner.enabled"
value = false
}
value = var.access_key
}] : []

# Had to use raw yaml here instead of converting HCL to yaml due to this issue with boolean getting converted to string which sysdig helm chart rejects:
# https://github.com/hashicorp/terraform-provider-helm/issues/1677
Expand Down
2 changes: 1 addition & 1 deletion solutions/fully-configurable/provider.tf
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ provider "kubernetes" {
}

provider "helm" {
kubernetes {
kubernetes = {
host = data.ibm_container_cluster_config.cluster_config.host
token = data.ibm_container_cluster_config.cluster_config.token
cluster_ca_certificate = data.ibm_container_cluster_config.cluster_config.ca_certificate
Expand Down
2 changes: 1 addition & 1 deletion solutions/fully-configurable/version.tf
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ terraform {
}
helm = {
source = "hashicorp/helm"
version = "2.17.0"
version = "3.0.2"
}
kubernetes = {
source = "hashicorp/kubernetes"
Expand Down
5 changes: 3 additions & 2 deletions tests/pr_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -261,8 +261,9 @@ func TestRunAgentClassicKubernetes(t *testing.T) {
CloudInfoService: sharedInfoSvc,
})
options.TerraformVars = map[string]any{
"datacenter": "syd01",
"prefix": options.Prefix,
"resource_group": resourceGroup,
"datacenter": "syd01",
"prefix": options.Prefix,
}

output, err := options.RunTestConsistency()
Expand Down
2 changes: 1 addition & 1 deletion version.tf
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ terraform {
}
helm = {
source = "hashicorp/helm"
version = ">= 2.15.0, <3.0.0"
version = ">= 3.0.0, <4.0.0"
}
}
}