diff --git a/mmv1/products/discoveryengine/DataConnector.yaml b/mmv1/products/discoveryengine/DataConnector.yaml index 860690e027cc..7b1c616f1171 100644 --- a/mmv1/products/discoveryengine/DataConnector.yaml +++ b/mmv1/products/discoveryengine/DataConnector.yaml @@ -35,6 +35,7 @@ import_format: - 'projects/{{project}}/locations/{{location}}/collections/{{collection_id}}/dataConnector' custom_code: constants: "templates/terraform/constants/discoveryengine_data_connector.go.tmpl" + pre_delete: "templates/terraform/pre_delete/discoveryengine_data_connector.go.tmpl" state_upgraders: true schema_version: 1 timeouts: @@ -58,6 +59,16 @@ examples: primary_resource_id: 'jira-with-actions' vars: collection_id: 'collection-id' + +virtual_fields: + - name: 'detach_stores_on_destroy' + description: | + If set to `true`, Terraform will detach the associated Data Stores from any Search Engine before deleting the Data Connector. + This addresses a circular dependency issue where deleting the Data Connector fails if its stores are in use. + Defaults to `false`. + type: Boolean + default_value: false + parameters: - name: 'location' type: String diff --git a/mmv1/templates/terraform/pre_delete/discoveryengine_data_connector.go.tmpl b/mmv1/templates/terraform/pre_delete/discoveryengine_data_connector.go.tmpl new file mode 100644 index 000000000000..a760488976b8 --- /dev/null +++ b/mmv1/templates/terraform/pre_delete/discoveryengine_data_connector.go.tmpl @@ -0,0 +1,134 @@ +if d.Get("detach_stores_on_destroy").(bool) { + log.Printf("[DEBUG] detach_stores_on_destroy is true, checking for linked search engines") + + // 1. Get stores from entities + entitiesObj := d.Get("entities") + entities, ok := entitiesObj.([]interface{}) + if !ok { + return fmt.Errorf("error converting entities to []interface{}") + } + + var storeIds []string + for _, entityObj := range entities { + entity, ok := entityObj.(map[string]interface{}) + if !ok { + continue + } + storeNameObj, ok := entity["data_store"] + if !ok { + continue + } + storeName, ok := storeNameObj.(string) + if !ok || storeName == "" { + continue + } + // Extract store ID from full name: projects/*/locations/*/collections/*/dataStores/* + parts := strings.Split(storeName, "/") + if len(parts) > 0 { + storeIds = append(storeIds, parts[len(parts)-1]) + } + } + + if len(storeIds) == 0 { + log.Printf("[DEBUG] No stores found in DataConnector to detach") + return nil + } + + log.Printf("[DEBUG] Stores to detach: %v", storeIds) + + // 2. List engines in the collection + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + location := d.Get("location").(string) + collectionId := d.Get("collection_id").(string) + + baseUrl, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}DiscoveryEngineBasePath{{"}}"}}") + if err != nil { + return err + } + enginesUrl := baseUrl + fmt.Sprintf("projects/%s/locations/%s/collections/%s/engines", project, location, collectionId) + + resp, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: project, + RawURL: enginesUrl, + UserAgent: userAgent, + }) + if err != nil { + return fmt.Errorf("error listing engines: %s", err) + } + + enginesObj, ok := resp["engines"] + if !ok { + log.Printf("[DEBUG] No engines found in collection") + return nil + } + engines, ok := enginesObj.([]interface{}) + if !ok { + return fmt.Errorf("error converting engines to []interface{}") + } + + for _, engineObj := range engines { + engine, ok := engineObj.(map[string]interface{}) + if !ok { + continue + } + engineName := engine["name"].(string) + dataStoreIdsObj, ok := engine["dataStoreIds"] + if !ok { + continue + } + dataStoreIds, ok := dataStoreIdsObj.([]interface{}) + if !ok { + continue + } + + var newStoreIds []string + modified := false + for _, idObj := range dataStoreIds { + id, ok := idObj.(string) + if !ok { + continue + } + matched := false + for _, targetId := range storeIds { + if id == targetId { + matched = true + modified = true + break + } + } + if !matched { + newStoreIds = append(newStoreIds, id) + } + } + + if modified { + log.Printf("[DEBUG] Detaching stores from engine %s", engineName) + // Call PATCH on engine to update dataStoreIds + engineUrl := baseUrl + engineName + updateMask := "dataStoreIds" + + // Construct request body + body := map[string]interface{}{ + "dataStoreIds": newStoreIds, + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "PATCH", + Project: project, + RawURL: engineUrl + "?updateMask=" + updateMask, + UserAgent: userAgent, + Body: body, + }) + if err != nil { + return fmt.Errorf("error updating engine %s: %s", engineName, err) + } + log.Printf("[DEBUG] Successfully detached stores from engine %s", engineName) + } + } +} diff --git a/mmv1/third_party/terraform/services/discoveryengine/resource_discovery_engine_data_connector_test.go b/mmv1/third_party/terraform/services/discoveryengine/resource_discovery_engine_data_connector_test.go index fecb4897fef7..aa50ab4dcd9c 100644 --- a/mmv1/third_party/terraform/services/discoveryengine/resource_discovery_engine_data_connector_test.go +++ b/mmv1/third_party/terraform/services/discoveryengine/resource_discovery_engine_data_connector_test.go @@ -256,3 +256,97 @@ func TestAccDiscoveryEngineDataConnector_DataConnectorEntitiesParamsDiffSuppress } } } + +func TestAccDiscoveryEngineDataConnector_detachStoresOnDestroy(t *testing.T) { + // Skips this test as it requires complex IdP setup. + t.Skip() + + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccDiscoveryEngineDataConnector_detachStoresOnDestroy(context), + }, + { + ResourceName: "google_discovery_engine_data_connector.servicenow-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"collection_display_name", "collection_id", "location", "params", "update_time", "action_config.0.action_params", "action_config.0.create_bap_connection"}, + }, + }, + }) +} + +func testAccDiscoveryEngineDataConnector_detachStoresOnDestroy(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_discovery_engine_data_connector" "servicenow-basic" { + location = "global" + collection_id = "tf-test-collection-id%{random_suffix}" + collection_display_name = "tf-test-dataconnector-servicenow" + data_source = "servicenow" + data_source_version = 3 + params = { + auth_type = "OAUTH_PASSWORD_GRANT" + instance_uri = "https://gcpconnector1.service-now.com/" + client_id = "SECRET_MANAGER_RESOURCE_NAME" + client_secret = "SECRET_MANAGER_RESOURCE_NAME" + static_ip_enabled = "false" + user_account = "connectorsuserqa@google.com" + password = "SECRET_MANAGER_RESOURCE_NAME" + } + refresh_interval = "86400s" + entities { + entity_name = "catalog" + + params = jsonencode({ + "inclusion_filters" : { + "knowledgeBaseSysId" : [ + "123" + ] + } + }) + } + entities { + entity_name = "incident" + params = jsonencode({ + "inclusion_filters" : { + "knowledgeBaseSysId" : [ + "123" + ] + } + }) + } + entities { + entity_name = "knowledge_base" + params = jsonencode({ + "inclusion_filters" : { + "knowledgeBaseSysId" : [ + "123" + ] + } + }) + } + static_ip_enabled = false + destination_configs { + key = "url" + destinations { + host = "https://gcpconnector1.service-now.com/" + port = 123 + } + } + connector_modes = ["DATA_INGESTION"] + sync_mode = "PERIODIC" + detach_stores_on_destroy = true +} +`, context) +}