所以我正在尝试将索引配置为具有某些映射和过滤器,但每当我尝试创建索引时,我都会收到以下错误:
"[amgindex] failed to create index]; nested: IllegalArgumentException[Custom Analyzer [amgsearch] failed to find filter under name [synonym]"
这是我用来创建索引的代码
public void newIndex() {
var amgBasic = new CustomAnalyzer {
Tokenizer = "edgeNGram",
Filter = new string[] { "lowercase", "worddelimiter", "stemmerEng", "stemmerNl", "stopper", "snowball" } //
};
var amgBasicText = new CustomAnalyzer {
Tokenizer = "standard",
Filter = new string[] { "lowercase", "worddelimiter" }
};
var amgSearch = new CustomAnalyzer {
Tokenizer = "whitespace",
Filter = new string[] { "lowercase", "synonym" }
};
var synonmyfilter = new SynonymTokenFilter() {
Format = "Solr",
SynonymsPath = "analysis/synonym.txt"
};
try {
var result = client.CreateIndex("amgindex", i => i
.Analysis(descriptor => descriptor
.Analyzers(bases => bases
//.Add("amgBasic", amgBasic)
//.Add("amgBasicText", amgBasicText)
.Add("amgsearch", amgSearch)
)
.TokenFilters(c => c.Add("stemmereng", new StemmerTokenFilter() { Language = "english" }))
.TokenFilters(c => c.Add("stemmernl", new StemmerTokenFilter() { Language = "english" }))
.TokenFilters(c => c.Add("stopper", new StopTokenFilter() { Stopwords = new List<string>() { "_english_", "_dutch_" } }))
.TokenFilters(c => c.Add("snowball", new SnowballTokenFilter() { Language = "english" }))
.TokenFilters(c => c.Add("worddelimiter ", new WordDelimiterTokenFilter() { }))
.TokenFilters(c => c.Add("synonym ", synonmyfilter))
)
.AddMapping<general_document>(m => m
.Properties(o => o
.String(p => p.Name(x => x.object_name).IndexAnalyzer("amgSearch"))
.String(p => p.Name(x => x.title).IndexAnalyzer("amgSearch"))
.String(p => p.Name(x => x.Text).IndexAnalyzer("amgSearch"))
)
)
);
Log.Info("Index created? " + result.Acknowledged);
} catch (Exception ex) {
Log.Error("[index-creation] " + ex.Message);
throw;
}
}
每次我使用自己的过滤器时,都会弹出错误。 任何线索为什么会发生这种情况?
答案 0 :(得分:0)
花了很多时间在这之后我发现了!! :)
在2个过滤器的名称中有一个空格,而这些空间导致了这个问题;
▼
.TokenFilters(c => c.Add("worddelimiter ", new WordDelimiterTokenFilter() { }))
▼
.TokenFilters(c => c.Add("synonym ", synonmyfilter))