elasticsearch中的非法的参数

时间:2019-05-28 16:20:44

标签: elasticsearch

我正在尝试对“名称”和“ name_auto”字段进行模糊,同义词,自动完成,ngram搜索。但是我收到了非法异常错误。
如果我仅删除同义词搜索,那么我的搜索工作正常。但是在下面,我什至无法创建索引。这里可能是问题所在,请帮忙。

   {
            "settings": {
                "index": {
                    "analysis": {
                        "filter": {
                            "synonym": {
                                "type": "synonym",
                                "format": "wordnet",
                                "synonyms_path": "analysis/wn_s.pl"
                            },
                            "english_stop": {
                                "type": "stop",
                                "stopwords": "_english_"
                            },
                            "english_stemmer": {
                                "type": "stemmer",
                                "language": "english"
                            }
                        },
                        "analyzer": {
                            "synonym": {
                                "tokenizer": "standard",
                                "filter": ["english_stop", "english_stemmer", "synonym"]
                            },

                            "keyword_analyzer": {
                                "filter": [
                                    "lowercase",
                                    "asciifolding",
                                    "trim"
                                ],
                                "char_filter": [],
                                "type": "custom",
                                "tokenizer": "keyword"
                            },
                            "edge_ngram_analyzer": {
                                "filter": [
                                    "lowercase"
                                ],
                                "tokenizer": "edge_ngram_tokenizer"
                            },
                            "edge_ngram_search_analyzer": {
                                "tokenizer": "lowercase"
                            }
                        },

                        "tokenizer": {
                            "edge_ngram_tokenizer": {
                                "type": "edge_ngram",
                                "min_gram": 1,
                                "max_gram": 25,
                                "token_chars": [
                                    "letter"
                                ]
                            }

                        },
                        "mappings": {
                            "properties": {
                                "firebaseId": {
                                    "type": "text"
                                },
                                "name": {
                                    "fielddata": true,
                                    "type": "text",
                                    "analyzer": "standard"
                                },
                                "name_auto": {
                                    "type": "text",

                                    "fields": {
                                        "keywordstring": {
                                            "type": "text",
                                            "analyzer": "keyword_analyzer"
                                        },
                                        "edgengram": {
                                            "type": "text",
                                            "analyzer": "edge_ngram_analyzer",
                                            "search_analyzer": "edge_ngram_search_analyzer"
                                        },
                                        "completion": {
                                            "type": "completion"
                                        },
                                        "synonym_analyzer": {
                                            "type": "string",
                                            "analyzer": "synonym"
                                        }
                                    }
                                }

                            }
                        }
                    }
                }
            }
        }

以下是我收到的回复

{
    "error": {
        "root_cause": [
            {
                "type": "illegal_argument_exception",
                "reason": "failed to build synonyms"
            }
        ],
        "type": "illegal_argument_exception",
        "reason": "failed to build synonyms",
        "caused_by": {
            "type": "parse_exception",
            "reason": "Invalid synonym rule at line 109",
            "caused_by": {
                "type": "illegal_argument_exception",
                "reason": "term: course of action analyzed to a token (action) with position increment != 1 (got: 2)"
            }
        }
    },
    "statu

s”:400 }

1 个答案:

答案 0 :(得分:0)

解决方案非常简单,我必须删除“ english_stop”,“ english_stemmer”过滤器。它似乎不支持。

  {
            "settings": {
                "index": {
                    "analysis": {
                        "filter": {
                            "synonym": {
                                "type": "synonym",
                                "format": "wordnet",
                                "synonyms_path": "analysis/wn_s.pl"
                            }

                        },
                        "analyzer": {
                            "synonym": {
                                "tokenizer": "standard",
                                "filter": ["synonym"]
                            },

                            "keyword_analyzer": {
                                "filter": [
                                    "lowercase",
                                    "asciifolding",
                                    "trim"
                                ],
                                "char_filter": [],
                                "type": "custom",
                                "tokenizer": "keyword"
                            },
                            "edge_ngram_analyzer": {
                                "filter": [
                                    "lowercase"
                                ],
                                "tokenizer": "edge_ngram_tokenizer"
                            },
                            "edge_ngram_search_analyzer": {
                                "tokenizer": "lowercase"
                            }
                        },

                        "tokenizer": {
                            "edge_ngram_tokenizer": {
                                "type": "edge_ngram",
                                "min_gram": 1,
                                "max_gram": 25,
                                "token_chars": [
                                    "letter"
                                ]
                            }

                        },
                        "mappings": {
                            "properties": {
                                "firebaseId": {
                                    "type": "text"
                                },
                                "name": {
                                    "fielddata": true,
                                    "type": "text",
                                    "analyzer": "standard"
                                },
                                "name_auto": {
                                    "type": "text",

                                    "fields": {
                                        "keywordstring": {
                                            "type": "text",
                                            "analyzer": "keyword_analyzer"
                                        },
                                        "edgengram": {
                                            "type": "text",
                                            "analyzer": "edge_ngram_analyzer",
                                            "search_analyzer": "edge_ngram_search_analyzer"
                                        },
                                        "completion": {
                                            "type": "completion"
                                        },
                                        "synonym_analyzer": {
                                            "type": "string",
                                            "analyzer": "synonym"
                                        }
                                    }
                                }

                            }
                        }
                    }
                }
            }
        }