Skip to content

Commit 0eb77fa

Browse files
committed
Initial terraform setup for workshop.
1 parent ae02a79 commit 0eb77fa

File tree

10 files changed

+417
-0
lines changed

10 files changed

+417
-0
lines changed

.gitignore

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -58,3 +58,5 @@ gradle-app.setting
5858
.gradletasknamecache
5959
.project
6060
.classpath
61+
62+
cloud.properties

cloud.properties.example

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
client.cloud=*****
2+
client.compute-pool-id=*****
3+
client.environment-id=*****
4+
client.flink-api-key=*****
5+
client.flink-api-secret=*****
6+
client.kafka.boostrap.servers=*****
7+
client.kafka.sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required username='*****' password='*****';
8+
client.organization-id=*****
9+
client.region=*****
10+
client.registry.key=*****
11+
client.registry.secret=*****
12+
client.registry.url=*****

terraform/.gitignore

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
.terraform.lock.hcl
2+
.terraform/
3+
.tfplan
4+
tfplan
5+
terraform.tfstate
6+
terraform.tfstate.backup

terraform/README.adoc

Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
= Provisioning Confluent Cloud Infrastructure
2+
3+
Let's assume you have completed the following prereqs:
4+
5+
* https://confluent.cloud[Confluent Cloud Account]
6+
* https://docs.confluent.io/confluent-cli/current/install.html[Confluent CLI]
7+
* https://www.terraform.io/[Terraform]
8+
* https://jqlang.github.io/jq/[jq]
9+
10+
== Shell Setup
11+
12+
Log into Confluent Cloud and find the API Key and API Secret. Use the commands below to set environment variables needed to authenticate to Confluent Cloud:
13+
14+
```shell
15+
export CONFLUENT_CLOUD_API_KEY=<API KEY>
16+
export CONFLUENT_CLOUD_API_SECRET<API SECRET>
17+
```
18+
19+
== Execute Terraform Manifests
20+
21+
The terraform manifests require the Confluent Cloud organization ID in order to provision infrastructure. This can be found in the Confluent Cloud console in the "Organization Settings" and exported to an environment variable:
22+
23+
```bash
24+
export TF_VAR_org_id=<ORG ID VALUE FROM CONSOLE>
25+
```
26+
27+
This value can also be queried by using the Confluent CLI to query your account, piping the resulting to a `jq` query:
28+
29+
```bash
30+
export TF_VAR_org_id=$(confluent organization list -o json | jq -c -r '.[] | select(.is_current)' | jq '.id')
31+
```
32+
33+
From this directory, execute the following commands:
34+
35+
```bash
36+
terraform init
37+
terraform plan -out "tfplan"
38+
terraform apply
39+
```
40+
41+
Once completed, verify the infrastructure is created in the Confluent Cloud console.
42+
43+
== Using Infrastructure in the Workshop
44+
45+
In the Table API exercises, we'll need API keys and secrets to connect Flink to Confluent Cloud. This `terraform output` command will create a file with those parameters:
46+
47+
```bash
48+
terraform output -json \
49+
| jq -r 'to_entries | map( {key: .key|tostring|split("_")|join("."), value: .value} ) | map("client.\(.key)=\(.value.value)") | .[]' \
50+
| while read -r line ; do echo "$line"; done > ../cloud.properties
51+
```
52+
53+
== Teardown
54+
55+
When the workshop is complete, run the following command to destroy all Confluent Cloud assets:
56+
57+
```bash
58+
terraform destroy --auto-approve
59+
```

terraform/flink-compute.tf

Lines changed: 81 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,81 @@
1+
2+
3+
resource "confluent_flink_compute_pool" "main_flink_pool" {
4+
display_name = "main_flink_pool"
5+
cloud = var.cloud_provider
6+
region = var.cloud_region
7+
max_cfu = 5
8+
environment {
9+
id = confluent_environment.cc_env.id
10+
}
11+
}
12+
13+
data "confluent_flink_region" "main_flink_region" {
14+
cloud = var.cloud_provider
15+
region = var.cloud_region
16+
}
17+
18+
resource "confluent_service_account" "flink_developer" {
19+
display_name = "${var.cc_env_name}-flink_developer"
20+
description = "Service account for flink developer"
21+
}
22+
23+
resource "confluent_role_binding" "fd_flink_developer" {
24+
principal = "User:${confluent_service_account.flink_developer.id}"
25+
role_name = "FlinkDeveloper"
26+
crn_pattern = confluent_environment.cc_env.resource_name
27+
28+
depends_on = [ confluent_flink_compute_pool.main_flink_pool]
29+
}
30+
31+
resource "confluent_role_binding" "fd_kafka_write" {
32+
principal = "User:${confluent_service_account.flink_developer.id}"
33+
role_name = "DeveloperWrite"
34+
crn_pattern = "${confluent_kafka_cluster.kafka_cluster.rbac_crn}/kafka=${confluent_kafka_cluster.kafka_cluster.id}/topic=*"
35+
36+
depends_on = [ confluent_kafka_cluster.kafka_cluster]
37+
}
38+
39+
resource "confluent_role_binding" "fd_kafka_read" {
40+
principal = "User:${confluent_service_account.flink_developer.id}"
41+
role_name = "DeveloperRead"
42+
crn_pattern = "${confluent_kafka_cluster.kafka_cluster.rbac_crn}/kafka=${confluent_kafka_cluster.kafka_cluster.id}/topic=*"
43+
44+
depends_on = [ confluent_kafka_cluster.kafka_cluster]
45+
}
46+
47+
resource "confluent_role_binding" "fd_schema_registry_write" {
48+
principal = "User:${confluent_service_account.flink_developer.id}"
49+
role_name = "DeveloperWrite"
50+
crn_pattern = "${data.confluent_schema_registry_cluster.advanced.resource_name}/subject=*"
51+
}
52+
53+
resource "confluent_role_binding" "fd_schema_registry_read" {
54+
principal = "User:${confluent_service_account.flink_developer.id}"
55+
role_name = "DeveloperRead"
56+
crn_pattern = "${data.confluent_schema_registry_cluster.advanced.resource_name}/subject=*"
57+
}
58+
59+
resource "confluent_api_key" "flink_developer_api_key" {
60+
display_name = "flink_developer_api_key"
61+
description = "Flink Developer API Key that is owned by 'flink_developer' service account"
62+
owner {
63+
id = confluent_service_account.flink_developer.id
64+
api_version = confluent_service_account.flink_developer.api_version
65+
kind = confluent_service_account.flink_developer.kind
66+
}
67+
68+
managed_resource {
69+
id = data.confluent_flink_region.main_flink_region.id
70+
api_version = data.confluent_flink_region.main_flink_region.api_version
71+
kind = data.confluent_flink_region.main_flink_region.kind
72+
73+
environment {
74+
id = confluent_environment.cc_env.id
75+
}
76+
}
77+
78+
depends_on = [
79+
confluent_service_account.flink_developer
80+
]
81+
}

terraform/kafka.tf

Lines changed: 97 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,97 @@
1+
resource "confluent_kafka_cluster" "kafka_cluster" {
2+
display_name = var.cc_default_kafka_cluster_name
3+
availability = "SINGLE_ZONE"
4+
cloud = var.cloud_provider
5+
region = var.cloud_region
6+
standard {}
7+
environment {
8+
id = confluent_environment.cc_env.id
9+
}
10+
11+
depends_on = [confluent_environment.cc_env]
12+
}
13+
14+
# ---------------------------------------------------------------------------
15+
# API KEY and Role for Administration of Kafka
16+
# ---------------------------------------------------------------------------
17+
resource "confluent_service_account" "kafka_manager" {
18+
display_name = "${var.cc_env_name}-kafka_manager"
19+
description = "Service account to manage Kafka cluster"
20+
}
21+
22+
resource "confluent_role_binding" "kafka_manager_kafka_cluster_admin" {
23+
principal = "User:${confluent_service_account.kafka_manager.id}"
24+
role_name = "CloudClusterAdmin"
25+
crn_pattern = confluent_kafka_cluster.kafka_cluster.rbac_crn
26+
}
27+
28+
resource "confluent_api_key" "kafka_manager_kafka_api_key" {
29+
display_name = "kafka_manager_kafka_api_key"
30+
description = "Kafka API Key that is owned by 'kafka_manager' service account"
31+
owner {
32+
id = confluent_service_account.kafka_manager.id
33+
api_version = confluent_service_account.kafka_manager.api_version
34+
kind = confluent_service_account.kafka_manager.kind
35+
}
36+
37+
managed_resource {
38+
id = confluent_kafka_cluster.kafka_cluster.id
39+
api_version = confluent_kafka_cluster.kafka_cluster.api_version
40+
kind = confluent_kafka_cluster.kafka_cluster.kind
41+
42+
environment {
43+
id = confluent_environment.cc_env.id
44+
}
45+
}
46+
47+
depends_on = [
48+
confluent_environment.cc_env,
49+
confluent_role_binding.kafka_manager_kafka_cluster_admin
50+
]
51+
}
52+
53+
# ---------------------------------------------------------------------------
54+
# API KEY and Role for Developers on Kafka
55+
# ---------------------------------------------------------------------------
56+
57+
resource "confluent_service_account" "kafka_developer" {
58+
display_name = "${var.cc_env_name}-kafka_developer"
59+
description = "Service account for developer using Kafka cluster"
60+
}
61+
62+
resource "confluent_role_binding" "kafka_developer_read_all_topics" {
63+
principal = "User:${confluent_service_account.kafka_manager.id}"
64+
role_name = "DeveloperRead"
65+
crn_pattern = "${confluent_kafka_cluster.kafka_cluster.rbac_crn}/kafka=${confluent_kafka_cluster.kafka_cluster.id}/topic=*"
66+
}
67+
68+
resource "confluent_role_binding" "kafka_developer_write_all_topics" {
69+
principal = "User:${confluent_service_account.kafka_manager.id}"
70+
role_name = "DeveloperWrite"
71+
crn_pattern = "${confluent_kafka_cluster.kafka_cluster.rbac_crn}/kafka=${confluent_kafka_cluster.kafka_cluster.id}/topic=*"
72+
}
73+
74+
resource "confluent_api_key" "kafka_developer_kafka_api_key" {
75+
display_name = "kafka_developer_kafka_api_key"
76+
description = "Kafka API Key that is owned by 'kafka_developer' service account"
77+
owner {
78+
id = confluent_service_account.kafka_developer.id
79+
api_version = confluent_service_account.kafka_developer.api_version
80+
kind = confluent_service_account.kafka_developer.kind
81+
}
82+
83+
managed_resource {
84+
id = confluent_kafka_cluster.kafka_cluster.id
85+
api_version = confluent_kafka_cluster.kafka_cluster.api_version
86+
kind = confluent_kafka_cluster.kafka_cluster.kind
87+
88+
environment {
89+
id = confluent_environment.cc_env.id
90+
}
91+
}
92+
93+
depends_on = [
94+
confluent_role_binding.kafka_developer_read_all_topics,
95+
confluent_role_binding.kafka_developer_write_all_topics
96+
]
97+
}

terraform/main.tf

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
# Configure the Confluent Provider
2+
terraform {
3+
backend "local" {
4+
workspace_dir = ".tfstate/terraform.state"
5+
}
6+
7+
required_providers {
8+
confluent = {
9+
source = "confluentinc/confluent"
10+
version = "2.12.0"
11+
}
12+
}
13+
}
14+
15+
provider "confluent" {
16+
}
17+
18+
resource "confluent_environment" "cc_env" {
19+
display_name = var.cc_env_name
20+
21+
stream_governance {
22+
package = "ADVANCED"
23+
}
24+
25+
lifecycle {
26+
prevent_destroy = false
27+
}
28+
}

terraform/output.tf

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
# output "schema_registry_url" {
2+
# value = data.confluent_schema_registry_cluster.advanced.rest_endpoint
3+
# }
4+
5+
output "kafka_boostrap_servers" {
6+
value = replace(confluent_kafka_cluster.kafka_cluster.bootstrap_endpoint, "SASL_SSL://", "")
7+
}
8+
9+
output "cloud" {
10+
value = var.cloud_provider
11+
}
12+
13+
output "region" {
14+
value = var.cloud_region
15+
}
16+
17+
output "organization-id" {
18+
value = replace(var.org_id, "\"", "")
19+
}
20+
21+
output "environment-id" {
22+
value = confluent_environment.cc_env.id
23+
}
24+
25+
output "compute-pool-id" {
26+
value = confluent_flink_compute_pool.main_flink_pool.id
27+
}
28+
29+
output "kafka_sasl_jaas_config" {
30+
value = "org.apache.kafka.common.security.plain.PlainLoginModule required username='${confluent_api_key.kafka_developer_kafka_api_key.id}' password='${nonsensitive(confluent_api_key.kafka_developer_kafka_api_key.secret)}';"
31+
}
32+
33+
output "registry_url" {
34+
value = data.confluent_schema_registry_cluster.advanced.rest_endpoint
35+
}
36+
37+
output "registry_key" {
38+
value = confluent_api_key.sr_manager_kafka_api_key.id
39+
}
40+
41+
output "registry_secret" {
42+
value = nonsensitive(confluent_api_key.sr_manager_kafka_api_key.secret)
43+
}
44+
45+
output "flink-api-key" {
46+
value = confluent_api_key.flink_developer_api_key.id
47+
}
48+
49+
output "flink-api-secret" {
50+
value = nonsensitive(confluent_api_key.flink_developer_api_key.secret)
51+
}

terraform/schema-registry.tf

Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
# Add Stream Governance, Schema Registry.
2+
data "confluent_schema_registry_cluster" "advanced" {
3+
environment {
4+
id = confluent_environment.cc_env.id
5+
}
6+
depends_on = [confluent_kafka_cluster.kafka_cluster]
7+
}
8+
9+
# ---------------------------------------------------------------------------
10+
# API KEY and Role for management of Schema Registry
11+
# ---------------------------------------------------------------------------
12+
resource "confluent_service_account" "sr_manager" {
13+
display_name = "${var.cc_env_name}-sr_manager"
14+
description = "Service account to manage Kafka cluster"
15+
}
16+
17+
resource "confluent_role_binding" "sr_manager_data_steward" {
18+
principal = "User:${confluent_service_account.sr_manager.id}"
19+
role_name = "DataSteward"
20+
crn_pattern = confluent_environment.cc_env.resource_name
21+
22+
depends_on = [ data.confluent_schema_registry_cluster.advanced ]
23+
}
24+
25+
resource "confluent_api_key" "sr_manager_kafka_api_key" {
26+
display_name = "sr_manager_kafka_api_key"
27+
description = "SR API Key that is owned by 'sr_manager' service account"
28+
owner {
29+
id = confluent_service_account.sr_manager.id
30+
api_version = confluent_service_account.sr_manager.api_version
31+
kind = confluent_service_account.sr_manager.kind
32+
}
33+
34+
managed_resource {
35+
id = data.confluent_schema_registry_cluster.advanced.id
36+
api_version = data.confluent_schema_registry_cluster.advanced.api_version
37+
kind = data.confluent_schema_registry_cluster.advanced.kind
38+
39+
environment {
40+
id = confluent_environment.cc_env.id
41+
}
42+
}
43+
44+
depends_on = [
45+
confluent_role_binding.sr_manager_data_steward
46+
]
47+
}

0 commit comments

Comments
 (0)