Hu,我正在尝试从一个Elasticsearch示例复制一些索引到另一个示例。我正在尝试复制这个名为"tagindex_v2"的索引。我使用了http://localhost:9400/tagindex_v2
,在主体中我复制了json,
{
"tagindex_v2": {
"aliases": {},
"mappings": {
"properties": {
"deprecated": {
"type": "boolean"
},
"description": {
"type": "keyword",
"normalizer": "keyword_normalizer",
"fields": {
"delimited": {
"type": "text",
"analyzer": "word_delimited"
},
"keyword": {
"type": "keyword"
}
}
},
"hasOwners": {
"type": "boolean"
},
"id": {
"type": "keyword",
"normalizer": "keyword_normalizer",
"fields": {
"delimited": {
"type": "text",
"analyzer": "word_delimited"
},
"keyword": {
"type": "keyword"
},
"ngram": {
"type": "text",
"analyzer": "partial"
}
}
},
"name": {
"type": "keyword",
"normalizer": "keyword_normalizer",
"fields": {
"delimited": {
"type": "text",
"analyzer": "word_delimited"
},
"keyword": {
"type": "keyword"
},
"ngram": {
"type": "text",
"analyzer": "partial"
}
}
},
"owners": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword"
}
},
"analyzer": "urn_component"
},
"removed": {
"type": "boolean"
},
"urn": {
"type": "keyword"
}
}
},
"settings": {
"index": {
"max_ngram_diff": "17",
"routing": {
"allocation": {
"include": {
"_tier_preference": "data_content"
}
}
},
"number_of_shards": "1",
"provided_name": "tagindex_v2",
"creation_date": "1660141415133",
"analysis": {
"filter": {
"partial_filter": {
"type": "edge_ngram",
"min_gram": "3",
"max_gram": "20"
},
"custom_delimiter": {
"type": "word_delimiter",
"preserve_original": "true",
"split_on_numerics": "false"
},
"urn_stop_filter": {
"type": "stop",
"stopwords": [
"urn",
"li",
"container",
"datahubpolicy",
"datahubaccesstoken",
"datahubupgrade",
"corpgroup",
"dataprocess",
"mlfeaturetable",
"mlmodelgroup",
"datahubexecutionrequest",
"invitetoken",
"datajob",
"assertion",
"dataplatforminstance",
"schemafield",
"tag",
"glossaryterm",
"mlprimarykey",
"dashboard",
"notebook",
"mlmodeldeployment",
"datahubretention",
"dataplatform",
"corpuser",
"test",
"mlmodel",
"glossarynode",
"mlfeature",
"dataflow",
"datahubingestionsource",
"domain",
"telemetry",
"datahubsecret",
"dataset",
"chart",
"dataprocessinstance"
]
}
},
"normalizer": {
"keyword_normalizer": {
"filter": [
"lowercase",
"asciifolding"
]
}
},
"analyzer": {
"browse_path_hierarchy": {
"tokenizer": "path_hierarchy"
},
"slash_pattern": {
"filter": [
"lowercase"
],
"tokenizer": "slash_tokenizer"
},
"partial_urn_component": {
"filter": [
"lowercase",
"urn_stop_filter",
"custom_delimiter",
"partial_filter"
],
"tokenizer": "urn_char_group"
},
"word_delimited": {
"filter": [
"custom_delimiter",
"lowercase",
"stop"
],
"tokenizer": "main_tokenizer"
},
"partial": {
"filter": [
"custom_delimiter",
"lowercase",
"partial_filter"
],
"tokenizer": "main_tokenizer"
},
"urn_component": {
"filter": [
"lowercase",
"urn_stop_filter",
"custom_delimiter"
],
"tokenizer": "urn_char_group"
},
"custom_keyword": {
"filter": [
"lowercase",
"asciifolding"
],
"tokenizer": "keyword"
}
},
"tokenizer": {
"main_tokenizer": {
"pattern": "[ ./]",
"type": "pattern"
},
"slash_tokenizer": {
"pattern": "[/]",
"type": "pattern"
},
"urn_char_group": {
"pattern": "[:\\s(),]",
"type": "pattern"
}
}
},
"number_of_replicas": "1",
"uuid": "AoFgpzTXRHyyTL7cuLsS1A",
"version": {
"created": "7160299"
}
}
}
}
}
我得到这个错误
{
"error": {
"root_cause": [
{
"type": "parse_exception",
"reason": "unknown key [tagindex_v2] for create index"
}
],
"type": "parse_exception",
"reason": "unknown key [tagindex_v2] for create index"
},
"status": 400
}
有人能告诉我该怎么解决这个问题吗?我只是简单地从一个不同示例的现有索引中复制JSON内容,然后在这里创建它。
先谢了
1条答案
按热度按时间iqxoj9l91#
您只需获取位于
tagindex_v2
密钥内的内容,即您还需要从
settings
部分删除以下属性:provided_name
creation_date
uuid
version