在弹性搜索中创建索引时出错

Error While creating Index in Elastic Search

我是弹性搜索的新手 我正在尝试使用我在网上找到的以下映射创建索引并使用 kibana 作为我的客户端它抛出错误 .

PUT /local_test
    {
      "settings": {
        "index.mapping.total_fields.limit": 1000,
        "index.mapping.depth.limit": 20,
        "index.mapping.nested_fields.limit": 50,
        "number_of_shards": 5,
        "number_of_replicas": 1,
        "analysis": {
          "analyzer": {
            "edge_ngram_analyzer": {
              "type": "custom",
              "tokenizer": "edge_ngram_tokenizer",
              "filter": [
                "lowercase",
                "en_stopwords"
              ]
            },
            "standard_custom": {
              "type": "custom",
              "char_filter": [
                "punctuation_remap"
              ],
              "tokenizer": "standard",
              "filter": [
                "lowercase",
                "en_stopwords"
              ]
            },
            "lowercase_keyword": {
              "type": "custom",
              "tokenizer": "keyword",
              "filter": [
                "lowercase"
              ]
            }
          },
          "tokenizer": {
            "edge_ngram_tokenizer": {
              "type": "edge_ngram",
              "min_gram": 2,
              "max_gram": 50,
              "token_chars": [
                "letter",
                "digit"
              ]
            }
          },
          "filter": {
            "en_stopwords": {
              "type": "stop",
              "stopwords": "_english_"
            }
          },
          "char_filter": {
            "punctuation_remap": {
              "type": "mapping",
              "mappings": [
                ". => -",
                ": => -",
                "' => -"
              ]
            }
          }
        }
      },
      "mappings": {
        "local_test": {
          "_all": {
            "enabled": false
          },
          "properties": {
            "id": {
              "type": "keyword"
            },
            "user_id": {
              "type": "keyword"
            },
            "created_at": {
              "type": "date",
              "format": "yyyy-MM-dd HH:mm:ss||epoch_millis"
            },
            "docvalue": {
              "type": "object",
              "dynamic": false,
              "enabled": true,
              "properties": {
                "key": {
                  "type": "text",
                  "analyzer": "lowercase_keyword"
                },
                "value": {
                  "type": "text",
                  "analyzer": "lowercase_keyword"
                }
              }
            },
            "recurring": {
              "type": "boolean"
            },
            "amount": {
              "type": "long"
            }
          }
        }
      }
    }

"type" : "mapper_parsing_exception", "reason" : "Root mapping definition has unsupported parameters: [local_test : {_all={enabled=false}, properties={amount={type=long}, user_id={type=keyword}, recurring={type=boolean}, created_at={format=yyyy-MM-dd HH:mm:ss||epoch_millis, type=date}, id={type=keyword}, docvalue={dynamic=false, type=object, enabled=true, properties={value={analyzer=lowercase_keyword, type=text}, key={analyzer=lowercase_keyword, type=text}}}}}]"

以下是您请求中的两个问题,我假设您使用的是最新的主要版本,即 7.X。

  1. 您需要删除_all,最新版本已删除。参见 this official blog on this change
  2. 删除您的类型 local_test,因为在最新版本中也删除了类型。有关详细信息,请参阅 the removal of types

所以使用下面的请求它工作正常:

放/local_test

{
    "settings": {
        "index.mapping.total_fields.limit": 1000,
        "index.mapping.depth.limit": 20,
        "index.mapping.nested_fields.limit": 50,
        "number_of_shards": 5,
        "number_of_replicas": 1,
        "analysis": {
            "analyzer": {
                "edge_ngram_analyzer": {
                    "type": "custom",
                    "tokenizer": "edge_ngram_tokenizer",
                    "filter": [
                        "lowercase",
                        "en_stopwords"
                    ]
                },
                "standard_custom": {
                    "type": "custom",
                    "char_filter": [
                        "punctuation_remap"
                    ],
                    "tokenizer": "standard",
                    "filter": [
                        "lowercase",
                        "en_stopwords"
                    ]
                },
                "lowercase_keyword": {
                    "type": "custom",
                    "tokenizer": "keyword",
                    "filter": [
                        "lowercase"
                    ]
                }
            },
            "tokenizer": {
                "edge_ngram_tokenizer": {
                    "type": "edge_ngram",
                    "min_gram": 2,
                    "max_gram": 50,
                    "token_chars": [
                        "letter",
                        "digit"
                    ]
                }
            },
            "filter": {
                "en_stopwords": {
                    "type": "stop",
                    "stopwords": "_english_"
                }
            },
            "char_filter": {
                "punctuation_remap": {
                    "type": "mapping",
                    "mappings": [
                        ". => -",
                        ": => -",
                        "' => -"
                    ]
                }
            }
        }
    },
    "mappings": {
        "properties": {
            "id": {
                "type": "keyword"
            },
            "user_id": {
                "type": "keyword"
            },
            "created_at": {
                "type": "date",
                "format": "yyyy-MM-dd HH:mm:ss||epoch_millis"
            },
            "docvalue": {
                "type": "object",
                "dynamic": false,
                "enabled": true,
                "properties": {
                    "key": {
                        "type": "text",
                        "analyzer": "lowercase_keyword"
                    },
                    "value": {
                        "type": "text",
                        "analyzer": "lowercase_keyword"
                    }
                }
            },
            "recurring": {
                "type": "boolean"
            },
            "amount": {
                "type": "long"
            }
        }
    }
}

输出

{
    "acknowledged": true,
    "shards_acknowledged": true,
    "index": "local_test"
}