diff --git a/.changelog/2388.txt b/.changelog/2388.txt new file mode 100644 index 0000000000..14807c8714 --- /dev/null +++ b/.changelog/2388.txt @@ -0,0 +1,11 @@ +```release-note:enhancement +resource/mongodbatlas_search_index: Adds attribute `stored_source` +``` + +```release-note:enhancement +data-source/mongodbatlas_search_index: Adds attribute `stored_source` +``` + +```release-note:enhancement +data-source/mongodbatlas_search_indexes: Adds attribute `stored_source` +``` diff --git a/internal/service/searchindex/data_source_search_index.go b/internal/service/searchindex/data_source_search_index.go index 3283ff1c8c..495e7033e6 100644 --- a/internal/service/searchindex/data_source_search_index.go +++ b/internal/service/searchindex/data_source_search_index.go @@ -32,37 +32,35 @@ func returnSearchIndexDSSchema() map[string]*schema.Schema { }, "analyzer": { Type: schema.TypeString, - Optional: true, + Computed: true, }, "analyzers": { - Type: schema.TypeString, - Optional: true, - DiffSuppressFunc: validateSearchAnalyzersDiff, + Type: schema.TypeString, + Computed: true, }, "collection_name": { Type: schema.TypeString, - Optional: true, + Computed: true, }, "database": { Type: schema.TypeString, - Optional: true, + Computed: true, }, "name": { Type: schema.TypeString, - Optional: true, + Computed: true, }, "search_analyzer": { Type: schema.TypeString, - Optional: true, + Computed: true, }, "mappings_dynamic": { Type: schema.TypeBool, - Optional: true, + Computed: true, }, "mappings_fields": { - Type: schema.TypeString, - Optional: true, - DiffSuppressFunc: validateSearchIndexMappingDiff, + Type: schema.TypeString, + Computed: true, }, "synonyms": { Type: schema.TypeSet, @@ -90,12 +88,15 @@ func returnSearchIndexDSSchema() map[string]*schema.Schema { }, "type": { Type: schema.TypeString, - Optional: true, + Computed: true, }, "fields": { - Type: schema.TypeString, - Optional: true, - DiffSuppressFunc: validateSearchIndexMappingDiff, + Type: schema.TypeString, + Computed: true, + }, + "stored_source": { + Type: schema.TypeString, + Computed: true, }, } } @@ -185,6 +186,15 @@ func dataSourceMongoDBAtlasSearchIndexRead(ctx context.Context, d *schema.Resour } } + storedSource := searchIndex.LatestDefinition.GetStoredSource() + strStoredSource, errStoredSource := MarshalStoredSource(storedSource) + if errStoredSource != nil { + return diag.FromErr(errStoredSource) + } + if err := d.Set("stored_source", strStoredSource); err != nil { + return diag.Errorf("error setting `stored_source` for search index (%s): %s", d.Id(), err) + } + d.SetId(conversion.EncodeStateID(map[string]string{ "project_id": projectID.(string), "cluster_name": clusterName.(string), diff --git a/internal/service/searchindex/data_source_search_indexes.go b/internal/service/searchindex/data_source_search_indexes.go index 63a272af64..3cfd89f617 100644 --- a/internal/service/searchindex/data_source_search_indexes.go +++ b/internal/service/searchindex/data_source_search_indexes.go @@ -35,7 +35,7 @@ func PluralDataSource() *schema.Resource { Type: schema.TypeList, Computed: true, Elem: &schema.Resource{ - Schema: returnSearchIndexSchema(), + Schema: returnSearchIndexDSSchema(), }, }, "total_count": { @@ -131,7 +131,13 @@ func flattenSearchIndexes(searchIndexes []admin.SearchIndexResponse, projectID, } searchIndexesMap[i]["fields"] = fieldsMarshaled } - } + storedSource := searchIndexes[i].LatestDefinition.GetStoredSource() + strStoredSource, errStoredSource := MarshalStoredSource(storedSource) + if errStoredSource != nil { + return nil, errStoredSource + } + searchIndexesMap[i]["stored_source"] = strStoredSource + } return searchIndexesMap, nil } diff --git a/internal/service/searchindex/model_search_index.go b/internal/service/searchindex/model_search_index.go new file mode 100644 index 0000000000..4fcb07b7a8 --- /dev/null +++ b/internal/service/searchindex/model_search_index.go @@ -0,0 +1,151 @@ +package searchindex + +import ( + "bytes" + "context" + "encoding/json" + "log" + "strconv" + + "github.com/go-test/deep" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/mongodb/terraform-provider-mongodbatlas/internal/common/conversion" + "go.mongodb.org/atlas-sdk/v20240530002/admin" +) + +func flattenSearchIndexSynonyms(synonyms []admin.SearchSynonymMappingDefinition) []map[string]any { + synonymsMap := make([]map[string]any, len(synonyms)) + for i, s := range synonyms { + synonymsMap[i] = map[string]any{ + "name": s.Name, + "analyzer": s.Analyzer, + "source_collection": s.Source.Collection, + } + } + return synonymsMap +} + +func expandSearchIndexSynonyms(d *schema.ResourceData) []admin.SearchSynonymMappingDefinition { + var synonymsList []admin.SearchSynonymMappingDefinition + if vSynonyms, ok := d.GetOk("synonyms"); ok { + for _, s := range vSynonyms.(*schema.Set).List() { + synonym := s.(map[string]any) + synonymsDoc := admin.SearchSynonymMappingDefinition{ + Name: synonym["name"].(string), + Analyzer: synonym["analyzer"].(string), + Source: admin.SynonymSource{ + Collection: synonym["source_collection"].(string), + }, + } + synonymsList = append(synonymsList, synonymsDoc) + } + } + return synonymsList +} + +func marshalSearchIndex(fields any) (string, error) { + respBytes, err := json.Marshal(fields) + return string(respBytes), err +} + +func unmarshalSearchIndexMappingFields(str string) (map[string]any, diag.Diagnostics) { + fields := map[string]any{} + if str == "" { + return fields, nil + } + if err := json.Unmarshal([]byte(str), &fields); err != nil { + return nil, diag.Errorf("cannot unmarshal search index attribute `mappings_fields` because it has an incorrect format") + } + return fields, nil +} + +func unmarshalSearchIndexFields(str string) ([]map[string]any, diag.Diagnostics) { + fields := []map[string]any{} + if str == "" { + return fields, nil + } + if err := json.Unmarshal([]byte(str), &fields); err != nil { + return nil, diag.Errorf("cannot unmarshal search index attribute `fields` because it has an incorrect format") + } + + return fields, nil +} + +func unmarshalSearchIndexAnalyzersFields(str string) ([]admin.AtlasSearchAnalyzer, diag.Diagnostics) { + fields := []admin.AtlasSearchAnalyzer{} + if str == "" { + return fields, nil + } + dec := json.NewDecoder(bytes.NewReader([]byte(str))) + dec.DisallowUnknownFields() + if err := dec.Decode(&fields); err != nil { + return nil, diag.Errorf("cannot unmarshal search index attribute `analyzers` because it has an incorrect format") + } + return fields, nil +} + +func MarshalStoredSource(obj any) (string, error) { + if obj == nil { + return "", nil + } + if b, ok := obj.(bool); ok { + return strconv.FormatBool(b), nil + } + respBytes, err := json.Marshal(obj) + return string(respBytes), err +} + +func UnmarshalStoredSource(str string) (any, diag.Diagnostics) { + switch str { + case "": + return any(nil), nil + case "true": + return true, nil + case "false": + return false, nil + default: + var obj any + if err := json.Unmarshal([]byte(str), &obj); err != nil { + return nil, diag.Errorf("cannot unmarshal search index attribute `stored_source` because it has an incorrect format") + } + return obj, nil + } +} + +func diffSuppressJSON(k, old, newStr string, d *schema.ResourceData) bool { + var j, j2 any + + if old == "" { + old = "{}" + } + + if newStr == "" { + newStr = "{}" + } + + if err := json.Unmarshal([]byte(old), &j); err != nil { + log.Printf("[ERROR] cannot unmarshal old search index analyzer json %v", err) + } + if err := json.Unmarshal([]byte(newStr), &j2); err != nil { + log.Printf("[ERROR] cannot unmarshal new search index analyzer json %v", err) + } + if diff := deep.Equal(&j, &j2); diff != nil { + log.Printf("[DEBUG] deep equal not passed: %v", diff) + return false + } + + return true +} + +func resourceSearchIndexRefreshFunc(ctx context.Context, clusterName, projectID, indexID string, connV2 *admin.APIClient) retry.StateRefreshFunc { + return func() (any, string, error) { + searchIndex, _, err := connV2.AtlasSearchApi.GetAtlasSearchIndex(ctx, projectID, clusterName, indexID).Execute() + if err != nil { + return nil, "ERROR", err + } + status := conversion.SafeString(searchIndex.Status) + return searchIndex, status, nil + } +} diff --git a/internal/service/searchindex/resource_search_index.go b/internal/service/searchindex/resource_search_index.go index da544e5b27..0139101588 100644 --- a/internal/service/searchindex/resource_search_index.go +++ b/internal/service/searchindex/resource_search_index.go @@ -1,16 +1,13 @@ package searchindex import ( - "bytes" "context" - "encoding/json" "errors" "fmt" "log" "strings" "time" - "github.com/go-test/deep" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -64,7 +61,7 @@ func returnSearchIndexSchema() map[string]*schema.Schema { "analyzers": { Type: schema.TypeString, Optional: true, - DiffSuppressFunc: validateSearchAnalyzersDiff, + DiffSuppressFunc: diffSuppressJSON, }, "collection_name": { Type: schema.TypeString, @@ -89,7 +86,7 @@ func returnSearchIndexSchema() map[string]*schema.Schema { "mappings_fields": { Type: schema.TypeString, Optional: true, - DiffSuppressFunc: validateSearchIndexMappingDiff, + DiffSuppressFunc: diffSuppressJSON, }, "synonyms": { Type: schema.TypeSet, @@ -126,7 +123,12 @@ func returnSearchIndexSchema() map[string]*schema.Schema { "fields": { Type: schema.TypeString, Optional: true, - DiffSuppressFunc: validateSearchIndexMappingDiff, + DiffSuppressFunc: diffSuppressJSON, + }, + "stored_source": { + Type: schema.TypeString, + Optional: true, + DiffSuppressFunc: diffSuppressJSON, }, } } @@ -258,6 +260,14 @@ func resourceUpdate(ctx context.Context, d *schema.ResourceData, meta any) diag. searchIndex.Definition.Synonyms = &synonyms } + if d.HasChange("stored_source") { + obj, err := UnmarshalStoredSource(d.Get("stored_source").(string)) + if err != nil { + return err + } + searchIndex.Definition.StoredSource = obj + } + if _, _, err := connV2.AtlasSearchApi.UpdateAtlasSearchIndex(ctx, projectID, clusterName, indexID, searchIndex).Execute(); err != nil { return diag.Errorf("error updating search index (%s): %s", indexName, err) } @@ -372,24 +382,16 @@ func resourceRead(ctx context.Context, d *schema.ResourceData, meta any) diag.Di } } - return nil -} - -func flattenSearchIndexSynonyms(synonyms []admin.SearchSynonymMappingDefinition) []map[string]any { - synonymsMap := make([]map[string]any, len(synonyms)) - for i, s := range synonyms { - synonymsMap[i] = map[string]any{ - "name": s.Name, - "analyzer": s.Analyzer, - "source_collection": s.Source.Collection, - } + storedSource := searchIndex.LatestDefinition.GetStoredSource() + strStoredSource, errStoredSource := MarshalStoredSource(storedSource) + if errStoredSource != nil { + return diag.FromErr(errStoredSource) + } + if err := d.Set("stored_source", strStoredSource); err != nil { + return diag.Errorf("error setting `stored_source` for search index (%s): %s", d.Id(), err) } - return synonymsMap -} -func marshalSearchIndex(fields any) (string, error) { - respBytes, err := json.Marshal(fields) - return string(respBytes), err + return nil } func resourceCreate(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { @@ -433,6 +435,12 @@ func resourceCreate(ctx context.Context, d *schema.ResourceData, meta any) diag. searchIndexRequest.Definition.Synonyms = &synonyms } + objStoredSource, errStoredSource := UnmarshalStoredSource(d.Get("stored_source").(string)) + if errStoredSource != nil { + return errStoredSource + } + searchIndexRequest.Definition.StoredSource = objStoredSource + dbSearchIndexRes, _, err := connV2.AtlasSearchApi.CreateAtlasSearchIndex(ctx, projectID, clusterName, searchIndexRequest).Execute() if err != nil { return diag.Errorf("error creating index: %s", err) @@ -470,119 +478,3 @@ func resourceCreate(ctx context.Context, d *schema.ResourceData, meta any) diag. return resourceRead(ctx, d, meta) } - -func expandSearchIndexSynonyms(d *schema.ResourceData) []admin.SearchSynonymMappingDefinition { - var synonymsList []admin.SearchSynonymMappingDefinition - if vSynonyms, ok := d.GetOk("synonyms"); ok { - for _, s := range vSynonyms.(*schema.Set).List() { - synonym := s.(map[string]any) - synonymsDoc := admin.SearchSynonymMappingDefinition{ - Name: synonym["name"].(string), - Analyzer: synonym["analyzer"].(string), - Source: admin.SynonymSource{ - Collection: synonym["source_collection"].(string), - }, - } - synonymsList = append(synonymsList, synonymsDoc) - } - } - return synonymsList -} - -func validateSearchIndexMappingDiff(k, old, newStr string, d *schema.ResourceData) bool { - var j, j2 any - - if old == "" { - old = "{}" - } - - if newStr == "" { - newStr = "{}" - } - - if err := json.Unmarshal([]byte(old), &j); err != nil { - log.Printf("[ERROR] cannot unmarshal old search index mapping json %v", err) - } - if err := json.Unmarshal([]byte(newStr), &j2); err != nil { - log.Printf("[ERROR] cannot unmarshal new search index mapping json %v", err) - } - if diff := deep.Equal(&j, &j2); diff != nil { - log.Printf("[DEBUG] deep equal not passed: %v", diff) - return false - } - - return true -} - -func validateSearchAnalyzersDiff(k, old, newStr string, d *schema.ResourceData) bool { - var j, j2 any - - if old == "" { - old = "{}" - } - - if newStr == "" { - newStr = "{}" - } - - if err := json.Unmarshal([]byte(old), &j); err != nil { - log.Printf("[ERROR] cannot unmarshal old search index analyzer json %v", err) - } - if err := json.Unmarshal([]byte(newStr), &j2); err != nil { - log.Printf("[ERROR] cannot unmarshal new search index analyzer json %v", err) - } - if diff := deep.Equal(&j, &j2); diff != nil { - log.Printf("[DEBUG] deep equal not passed: %v", diff) - return false - } - - return true -} - -func unmarshalSearchIndexMappingFields(str string) (map[string]any, diag.Diagnostics) { - fields := map[string]any{} - if str == "" { - return fields, nil - } - if err := json.Unmarshal([]byte(str), &fields); err != nil { - return nil, diag.Errorf("cannot unmarshal search index attribute `mappings_fields` because it has an incorrect format") - } - return fields, nil -} - -func unmarshalSearchIndexFields(str string) ([]map[string]any, diag.Diagnostics) { - fields := []map[string]any{} - if str == "" { - return fields, nil - } - if err := json.Unmarshal([]byte(str), &fields); err != nil { - return nil, diag.Errorf("cannot unmarshal search index attribute `fields` because it has an incorrect format") - } - - return fields, nil -} - -func unmarshalSearchIndexAnalyzersFields(str string) ([]admin.AtlasSearchAnalyzer, diag.Diagnostics) { - fields := []admin.AtlasSearchAnalyzer{} - if str == "" { - return fields, nil - } - dec := json.NewDecoder(bytes.NewReader([]byte(str))) - dec.DisallowUnknownFields() - - if err := dec.Decode(&fields); err != nil { - return nil, diag.Errorf("cannot unmarshal search index attribute `analyzers` because it has an incorrect format") - } - return fields, nil -} - -func resourceSearchIndexRefreshFunc(ctx context.Context, clusterName, projectID, indexID string, connV2 *admin.APIClient) retry.StateRefreshFunc { - return func() (any, string, error) { - searchIndex, _, err := connV2.AtlasSearchApi.GetAtlasSearchIndex(ctx, projectID, clusterName, indexID).Execute() - if err != nil { - return nil, "ERROR", err - } - status := conversion.SafeString(searchIndex.Status) - return searchIndex, status, nil - } -} diff --git a/internal/service/searchindex/resource_search_index_migration_test.go b/internal/service/searchindex/resource_search_index_migration_test.go index 0cc1138662..a131d500ff 100644 --- a/internal/service/searchindex/resource_search_index_migration_test.go +++ b/internal/service/searchindex/resource_search_index_migration_test.go @@ -7,6 +7,7 @@ import ( ) func TestMigSearchIndex_basic(t *testing.T) { + mig.SkipIfVersionBelow(t, "1.17.4") mig.CreateAndRunTest(t, basicTestCase(t)) } diff --git a/internal/service/searchindex/resource_search_index_test.go b/internal/service/searchindex/resource_search_index_test.go index 4600a2cb0a..6bb1a76db2 100644 --- a/internal/service/searchindex/resource_search_index_test.go +++ b/internal/service/searchindex/resource_search_index_test.go @@ -28,8 +28,8 @@ func TestAccSearchIndex_withSearchType(t *testing.T) { CheckDestroy: acc.CheckDestroySearchIndex, Steps: []resource.TestStep{ { - Config: configBasic(projectID, clusterName, indexName, "search", databaseName), - Check: checkBasic(projectID, clusterName, indexName, "search", databaseName), + Config: configBasic(projectID, clusterName, indexName, "search", databaseName, ""), + Check: checkBasic(projectID, clusterName, indexName, "search", databaseName, ""), }, }, }) @@ -163,11 +163,11 @@ func basicTestCase(tb testing.TB) *resource.TestCase { CheckDestroy: acc.CheckDestroySearchIndex, Steps: []resource.TestStep{ { - Config: configBasic(projectID, clusterName, indexName, "", databaseName), - Check: checkBasic(projectID, clusterName, indexName, "", databaseName), + Config: configBasic(projectID, clusterName, indexName, "", databaseName, ""), + Check: checkBasic(projectID, clusterName, indexName, "", databaseName, ""), }, { - Config: configBasic(projectID, clusterName, indexName, "", databaseName), + Config: configBasic(projectID, clusterName, indexName, "", databaseName, ""), ResourceName: resourceName, ImportStateIdFunc: importStateIDFunc(resourceName), ImportState: true, @@ -177,6 +177,74 @@ func basicTestCase(tb testing.TB) *resource.TestCase { } } +func TestAccSearchIndex_withStoredSourceFalse(t *testing.T) { + resource.ParallelTest(t, *storedSourceTestCase(t, "false")) +} + +func TestAccSearchIndex_withStoredSourceTrue(t *testing.T) { + resource.ParallelTest(t, *storedSourceTestCase(t, "true")) +} + +func TestAccSearchIndex_withStoredSourceInclude(t *testing.T) { + resource.ParallelTest(t, *storedSourceTestCase(t, storedSourceIncludeJSON)) +} + +func TestAccSearchIndex_withStoredSourceExclude(t *testing.T) { + resource.ParallelTest(t, *storedSourceTestCase(t, storedSourceExcludeJSON)) +} + +func TestAccSearchIndex_withStoredSourceUpdateEmptyType(t *testing.T) { + resource.ParallelTest(t, *storedSourceTestCaseUpdate(t, "")) +} + +func TestAccSearchIndex_withStoredSourceUpdateSearchType(t *testing.T) { + resource.ParallelTest(t, *storedSourceTestCaseUpdate(t, "search")) +} + +func storedSourceTestCase(tb testing.TB, storedSource string) *resource.TestCase { + tb.Helper() + var ( + projectID, clusterName = acc.ClusterNameExecution(tb) + indexName = acc.RandomName() + databaseName = acc.RandomName() + ) + return &resource.TestCase{ + PreCheck: func() { acc.PreCheckBasic(tb) }, + ProtoV6ProviderFactories: acc.TestAccProviderV6Factories, + CheckDestroy: acc.CheckDestroySearchIndex, + Steps: []resource.TestStep{ + { + Config: configBasic(projectID, clusterName, indexName, "search", databaseName, storedSource), + Check: checkBasic(projectID, clusterName, indexName, "search", databaseName, storedSource), + }, + }, + } +} + +func storedSourceTestCaseUpdate(tb testing.TB, searchType string) *resource.TestCase { + tb.Helper() + var ( + projectID, clusterName = acc.ClusterNameExecution(tb) + indexName = acc.RandomName() + databaseName = acc.RandomName() + ) + return &resource.TestCase{ + PreCheck: func() { acc.PreCheckBasic(tb) }, + ProtoV6ProviderFactories: acc.TestAccProviderV6Factories, + CheckDestroy: acc.CheckDestroySearchIndex, + Steps: []resource.TestStep{ + { + Config: configBasic(projectID, clusterName, indexName, searchType, databaseName, "false"), + Check: checkBasic(projectID, clusterName, indexName, searchType, databaseName, "false"), + }, + { + Config: configBasic(projectID, clusterName, indexName, searchType, databaseName, "true"), + Check: checkBasic(projectID, clusterName, indexName, searchType, databaseName, "true"), + }, + }, + } +} + func basicVectorTestCase(tb testing.TB) *resource.TestCase { tb.Helper() var ( @@ -238,11 +306,19 @@ func checkExists(resourceName string) resource.TestCheckFunc { } } -func configBasic(projectID, clusterName, indexName, indexType, databaseName string) string { - var indexTypeStr string +func configBasic(projectID, clusterName, indexName, indexType, databaseName, storedSource string) string { + var extra string if indexType != "" { - indexTypeStr = fmt.Sprintf("type=%q", indexType) + extra += fmt.Sprintf("type=%q\n", indexType) } + if storedSource != "" { + if storedSource == "true" || storedSource == "false" { + extra += fmt.Sprintf("stored_source=%q\n", storedSource) + } else { + extra += fmt.Sprintf("stored_source= <<-EOF\n%s\nEOF\n", storedSource) + } + } + return fmt.Sprintf(` resource "mongodbatlas_search_index" "test" { cluster_name = %[1]q @@ -260,12 +336,22 @@ func configBasic(projectID, clusterName, indexName, indexType, databaseName stri project_id = mongodbatlas_search_index.test.project_id index_id = mongodbatlas_search_index.test.index_id } - `, clusterName, projectID, indexName, databaseName, collectionName, searchAnalyzer, indexTypeStr) + `, clusterName, projectID, indexName, databaseName, collectionName, searchAnalyzer, extra) } -func checkBasic(projectID, clusterName, indexName, indexType, databaseName string) resource.TestCheckFunc { +func checkBasic(projectID, clusterName, indexName, indexType, databaseName, storedSource string) resource.TestCheckFunc { mappingsDynamic := "true" - return checkAggr(projectID, clusterName, indexName, indexType, databaseName, mappingsDynamic) + checks := []resource.TestCheckFunc{ + resource.TestCheckResourceAttr(resourceName, "stored_source", storedSource), + resource.TestCheckResourceAttr(datasourceName, "stored_source", storedSource), + } + if storedSource != "" && storedSource != "true" && storedSource != "false" { + checks = []resource.TestCheckFunc{ + resource.TestCheckResourceAttrWith(resourceName, "stored_source", acc.JSONEquals(storedSource)), + resource.TestCheckResourceAttrWith(datasourceName, "stored_source", acc.JSONEquals(storedSource)), + } + } + return checkAggr(projectID, clusterName, indexName, indexType, databaseName, mappingsDynamic, checks...) } func configWithMapping(projectID, indexName, databaseName, clusterName string) string { @@ -472,7 +558,21 @@ const ( ] } ] -` + ` + + incorrectFormatAnalyzersJSON = ` + [ + { + "wrongField":[ + { + "type":"length", + "min":20, + "max":33 + } + ] + } + ] + ` mappingsFieldsJSON = ` { @@ -516,17 +616,15 @@ const ( }] ` - incorrectFormatAnalyzersJSON = ` - [ - { - "wrongField":[ - { - "type":"length", - "min":20, - "max":33 - } - ] - } - ] + storedSourceIncludeJSON = ` + { + "include": ["include1","include2"] + } + ` + + storedSourceExcludeJSON = ` + { + "exclude": ["exclude1", "exclude2"] + } ` ) diff --git a/website/docs/d/search_index.html.markdown b/website/docs/d/search_index.html.markdown index ebe0b7fc81..2eae237422 100644 --- a/website/docs/d/search_index.html.markdown +++ b/website/docs/d/search_index.html.markdown @@ -45,8 +45,6 @@ data "mongodbatlas_search_index" "test" { * `synonyms.#.name` - Name of the [synonym mapping definition](https://docs.atlas.mongodb.com/reference/atlas-search/synonyms/#std-label-synonyms-ref). * `synonyms.#.source_collection` - Name of the source MongoDB collection for the synonyms. * `synonyms.#.analyzer` - Name of the [analyzer](https://docs.atlas.mongodb.com/reference/atlas-search/analyzers/#std-label-analyzers-ref) to use with this synonym mapping. - - - +* `stored_source` - String that can be "true" (store all fields), "false" (default, don't store any field), or a JSON string that contains the list of fields to store (include) or not store (exclude) on Atlas Search. To learn more, see [Stored Source Fields](https://www.mongodb.com/docs/atlas/atlas-search/stored-source-definition/). For more information see: [MongoDB Atlas API Reference.](https://docs.atlas.mongodb.com/atlas-search/) - [and MongoDB Atlas API - Search](https://docs.atlas.mongodb.com/reference/api/atlas-search/) Documentation for more information. diff --git a/website/docs/d/search_indexes.html.markdown b/website/docs/d/search_indexes.html.markdown index 6f31eca1f1..84b346244b 100644 --- a/website/docs/d/search_indexes.html.markdown +++ b/website/docs/d/search_indexes.html.markdown @@ -37,6 +37,7 @@ data "mongodbatlas_search_indexes" "test" { ### Results +* `index_id` - The unique identifier of the Atlas Search index. * `name` - Name of the index. * `status` - Current status of the index. * `analyzer` - [Analyzer](https://docs.atlas.mongodb.com/reference/atlas-search/analyzers/#std-label-analyzers-ref) to use when creating the index. @@ -50,8 +51,6 @@ data "mongodbatlas_search_indexes" "test" { * `synonyms.#.name` - Name of the [synonym mapping definition](https://docs.atlas.mongodb.com/reference/atlas-search/synonyms/#std-label-synonyms-ref). * `synonyms.#.source_collection` - Name of the source MongoDB collection for the synonyms. * `synonyms.#.analyzer` - Name of the [analyzer](https://docs.atlas.mongodb.com/reference/atlas-search/analyzers/#std-label-analyzers-ref) to use with this synonym mapping. - - - +* `stored_source` - String that can be "true" (store all fields), "false" (default, don't store any field), or a JSON string that contains the list of fields to store (include) or not store (exclude) on Atlas Search. To learn more, see [Stored Source Fields](https://www.mongodb.com/docs/atlas/atlas-search/stored-source-definition/). For more information see: [MongoDB Atlas API Reference.](https://docs.atlas.mongodb.com/atlas-search/) - [and MongoDB Atlas API - Search](https://docs.atlas.mongodb.com/reference/api/atlas-search/) Documentation for more information. diff --git a/website/docs/r/search_index.html.markdown b/website/docs/r/search_index.html.markdown index 2630213f75..b695c7ca1c 100644 --- a/website/docs/r/search_index.html.markdown +++ b/website/docs/r/search_index.html.markdown @@ -162,35 +162,36 @@ EOF ```terraform mappings_fields = <<-EOF { - "address": { - "type": "document", - "fields": { - "city": { - "type": "string", - "analyzer": "lucene.simple", - "ignoreAbove": 255 - }, - "state": { - "type": "string", - "analyzer": "lucene.english" + "address": { + "type": "document", + "fields": { + "city": { + "type": "string", + "analyzer": "lucene.simple", + "ignoreAbove": 255 + }, + "state": { + "type": "string", + "analyzer": "lucene.english" + } } - } - }, - "company": { - "type": "string", - "analyzer": "lucene.whitespace", - "multi": { - "mySecondaryAnalyzer": { - "type": "string", - "analyzer": "lucene.french" + }, + "company": { + "type": "string", + "analyzer": "lucene.whitespace", + "multi": { + "mySecondaryAnalyzer": { + "type": "string", + "analyzer": "lucene.french" + } } - } - }, - "employees": { - "type": "string", - "analyzer": "lucene.standard" + }, + "employees": { + "type": "string", + "analyzer": "lucene.standard" } } + EOF ``` * `search_analyzer` - [Analyzer](https://docs.atlas.mongodb.com/reference/atlas-search/analyzers/#std-label-analyzers-ref) to use when searching the index. Defaults to [lucene.standard](https://docs.atlas.mongodb.com/reference/atlas-search/analyzers/standard/#std-label-ref-standard-analyzer) @@ -198,10 +199,20 @@ EOF * `fields` - Array of [Fields](https://www.mongodb.com/docs/atlas/atlas-search/field-types/knn-vector/#std-label-fts-data-types-knn-vector) to configure this `vectorSearch` index. It is mandatory for vector searches and it must contain at least one `vector` type field. This field needs to be a JSON string in order to be decoded correctly. +* `stored_source` - String that can be "true" (store all fields), "false" (default, don't store any field), or a JSON string that contains the list of fields to store (include) or not store (exclude) on Atlas Search. To learn more, see [Stored Source Fields](https://www.mongodb.com/docs/atlas/atlas-search/stored-source-definition/). + ```terraform + stored_source = <<-EOF + { + "include": ["field1", "field2"] + } + EOF + ``` + ## Attributes Reference In addition to all arguments above, the following attributes are exported: +* `index_id` - The unique identifier of the Atlas Search index. * `status` - Current status of the index. ### Analyzers (search index)