删除
DELETE my_test
创建索引
创建自定义ngram分词器
PUT my_test
{"settings": {"index.max_ngram_diff": "32","analysis": {"analyzer": {"code_analyzer": {"tokenizer": "code_tokenizer","filter": ["lowercase"]}},"tokenizer": {"code_tokenizer": {"token_chars": ["letter","digit"],"min_gram": "3","type": "ngram","max_gram": "32"}}}}
}
设置字段
PUT my_test/_mapping
{"properties": {"name":{"type":"text","analyzer":"code_analyzer"}}
}
上传测试文档
PUT my_test/_doc/1
{"name":"测试搜索09"
}PUT my_test/_doc/2
{"name":"2009"
}
测试
GET my_test/_search
{"query":{"match_phrase": {"name": "092"}}
}
测试分词
GET my_test/_analyze
{"text": "测试搜索09","tokenizer": "ngram"
}GET core_product_basic/_analyze
{"text": "测试搜索09","tokenizer": "ngram"
}