我正在本地 1.7.5 elasticsearch 安装上尝试这个
http://localhost:9200/_analyze?filter=shingle&tokenizer=keyword&text=alkis stack
我看到这个
{
"tokens":[
{
"token":"alkis stack",
"start_offset":0,
"end_offset":11,
"type":"word",
"position":1
}
]
}
我希望看到这样的东西
{
"tokens":[
{
"token":"alkis stack",
"start_offset":0,
"end_offset":11,
"type":"word",
"position":1
},
{
"token":"stack alkis",
"start_offset":0,
"end_offset":11,
"type":"word",
"position":1
}
]
}
我错过了什么吗?
更新
{
"number_of_shards": 2,
"number_of_replicas": 0,
"analysis": {
"char_filter": {
"map_special_chars": {
"type": "mapping",
"mappings": [
"- => \\u0020",
". => \\u0020",
"? => \\u0020",
", => \\u0020",
"` => \\u0020",
"' => \\u0020",
"\" => \\u0020"
]
}
},
"filter": {
"permutate_fullname": {
"type": "shingle",
"max_shingle_size": 4,
"min_shingle_size": 2,
"output_unigrams": true,
"token_separator": " ",
"filler_token": "_"
}
},
"analyzer": {
"fullname_analyzer_search": {
"char_filter": [
"map_special_chars"
],
"filter": [
"asciifolding",
"lowercase",
"trim"
],
"type": "custom",
"tokenizer": "keyword"
},
"fullname_analyzer_index": {
"char_filter": [
"map_special_chars"
],
"filter": [
"asciifolding",
"lowercase",
"trim",
"permutate_fullname"
],
"type": "custom",
"tokenizer": "keyword"
}
}
}
}
我正在尝试这样的测试
http://localhost:9200/INDEX_NAME/_analyze?analyzer=fullname_analyzer_index&text=alkis stack