diff --git a/castai/resource_node_configuration_gke_test.go b/castai/resource_node_configuration_gke_test.go new file mode 100644 index 00000000..0c561089 --- /dev/null +++ b/castai/resource_node_configuration_gke_test.go @@ -0,0 +1,167 @@ +package castai + +import ( + "fmt" + "os" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccResourceNodeConfiguration_gke(t *testing.T) { + rName := fmt.Sprintf("%v-gke-%v", ResourcePrefix, acctest.RandString(8)) + resourceName := "castai_node_configuration.test" + clusterName := "core-tf-acc" + projectID := os.Getenv("GKE_PROJECT_ID") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProviderFactories: providerFactories, + // Destroy of the cluster is not working properly. Cluster wasn't full onboarded and it's getting destroyed. + // https://castai.atlassian.net/browse/CORE-2868 should solve the issue + //CheckDestroy: testAccCheckAKSClusterDestroy, + Steps: []resource.TestStep{ + { + Config: testAccGKENodeConfigurationConfig(rName, clusterName, projectID), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "name", rName), + resource.TestCheckResourceAttr(resourceName, "disk_cpu_ratio", "35"), + resource.TestCheckResourceAttr(resourceName, "min_disk_size", "122"), + resource.TestCheckResourceAttr(resourceName, "aks.#", "0"), + resource.TestCheckResourceAttr(resourceName, "eks.#", "0"), + resource.TestCheckResourceAttr(resourceName, "kops.#", "0"), + resource.TestCheckResourceAttr(resourceName, "gke.#", "0"), + ), + }, + { + Config: testAccGKENodeConfigurationUpdated(rName, clusterName, projectID), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "name", rName), + resource.TestCheckResourceAttr(resourceName, "disk_cpu_ratio", "0"), + resource.TestCheckResourceAttr(resourceName, "min_disk_size", "121"), + resource.TestCheckResourceAttr(resourceName, "aks.0.max_pods_per_node", "32"), + resource.TestCheckResourceAttr(resourceName, "eks.#", "0"), + resource.TestCheckResourceAttr(resourceName, "kops.#", "0"), + resource.TestCheckResourceAttr(resourceName, "gke.#", "0"), + ), + }, + }, + ExternalProviders: map[string]resource.ExternalProvider{ + "google": { + Source: "hashicorp/google", + VersionConstraint: "> 4.75.0", + }, + "google-beta": { + Source: "hashicorp/google-beta", + VersionConstraint: "> 4.75.0", + }, + }, + }) +} + +func testAccGKENodeConfigurationConfig(rName, clusterName, projectID string) string { + return ConfigCompose(testAccGKEClusterConfig(rName, clusterName, projectID), fmt.Sprintf(` +resource "castai_node_configuration" "test" { + name = %[1]q + cluster_id = castai_gke_cluster.test.id + disk_cpu_ratio = 35 + min_disk_size = 122 + subnets = [data.google_compute_subnetwork.nodes.self_link] + tags = { + env = "development" + } + gke { + max_pods_per_node = 31 + network_tags = ["ab", "bc"] + disk_type = "pd-balanced" + } +} + +resource "castai_node_configuration_default" "test" { + cluster_id = castai_gke_cluster.test.id + configuration_id = castai_node_configuration.test.id +} +`, rName)) +} + +func testAccGKENodeConfigurationUpdated(rName, clusterName, projectID string) string { + return ConfigCompose(testAccGKEClusterConfig(rName, clusterName, projectID), fmt.Sprintf(` +resource "castai_node_configuration" "test" { + name = %[1]q + cluster_id = castai_gke_cluster.test.id + disk_cpu_ratio = 0 + min_disk_size = 121 + subnets = [data.google_compute_subnetwork.nodes.self_link] + tags = { + env = "development" + } + gke { + max_pods_per_node = 32 + network_tags = ["ab", "bc"] + disk_type = "pd-balanced" + } +} +`, rName)) +} +func testAccGKEClusterConfig(rName string, clusterName string, projectID string) string { + return ConfigCompose(testAccGCPConfig(rName, clusterName, projectID), fmt.Sprintf(` +resource "castai_gke_cluster" "test" { + project_id = %[1]q + location = "us-central1-c" + name = %[2]q + credentials_json = base64decode(google_service_account_key.castai_key.private_key) +} + +`, projectID, clusterName)) +} + +func testAccGCPConfig(rName, clusterName, projectID string) string { + return fmt.Sprintf(` +data "google_compute_subnetwork" "nodes" { + name = "tf-core-acc-20230723-ip-range-nodes" + region = "us-central1" +} + +locals { + service_account_id = %[3]q + service_account_email = "${local.service_account_id}@$%[2]s.iam.gserviceaccount.com" + custom_role_id = "castai.gkeAccess.${substr(sha1(%[1]s),0,8)}.tf" +} + +resource "google_service_account" "castai_service_account" { + account_id = local.service_account_id + display_name = "Service account to manage %[1]s cluster via CAST" + project = %[2]q +} + +data "castai_gke_user_policies" "gke" {} + +resource "google_project_iam_custom_role" "castai_role" { + role_id = local.custom_role_id + title = "Role to manage GKE cluster via CAST AI" + description = "Role to manage GKE cluster via CAST AI" + permissions = toset(data.castai_gke_user_policies.gke.policy) + project = %[2]q + stage = "GA" +} + +resource "google_project_iam_member" "project" { + for_each = toset([ + "roles/container.developer", + "roles/iam.serviceAccountUser", + "projects/%[2]s/roles/${local.custom_role_id}" + ]) + + project = %[2]q + role = each.key + member = "serviceAccount:${local.service_account_email}" +} + +resource "google_service_account_key" "castai_key" { + service_account_id = google_service_account.castai_service_account.name + public_key_type = "TYPE_X509_PEM_FILE" +} + +`, clusterName, projectID, rName) +}