返回 首页

python elasticseach quickStart


几个快速入门小例子, 支持中文分词和拼音搜索. elastic Search 版本为: 6.7, docker-compose 地址为: https://github.com/sazima/docker-compose/tree/master/elastic_search_pinyin

创建索引index, 指定字段中文分词器和pinyin.

es = Elasticsearch()

es.indices.create('blog', body={
        "settings": {
            "number_of_shards": 3,
            "number_of_replicas": 1,
            "analysis": {
                "analyzer": {
                    "default": {
                        "tokenizer": "ik_smart"
                    },
                    "pinyin_analyzer": {
                        "type": "custom",
                        "tokenizer": "my_pinyin",
                        "filter": ["word_delimiter"]
                    }
                },
                "tokenizer": {
                    "my_pinyin": {
                        "type": "pinyin",
                        "keep_first_letter": True,
                        "keep_separate_first_letter": True,
                        "keep_full_pinyin": True,
                        "keep_original": True,
                        "limit_first_letter_length": 16,
                        "lowercase": True
                    }
                }
            }
        }
    })

创建doc, 设置字段mapping

es.indices.put_mapping(index='blog', doc_type='article', body={
        "properties": {
            "title": {
                "type": "text",
                "analyzer": "ik_max_word",
                "copy_to": True,
                "fields": {
                    "pinyin": {
                        "type": "text",
                        "term_vector": "with_positions_offsets",
                        "analyzer": "pinyin_analyzer",
                        "boost": 10
                    }
                }
            },
            "content": {
                "type": "text",
                "analyzer": "ik_max_word",
                "copy_to": True,
                "fields": {
                    "pinyin": {
                        "type": "text",
                        "term_vector": "with_positions_offsets",
                        "analyzer": "pinyin_analyzer",
                        "boost": 10
                    }
                }
            },
            "author": {
                "type": "text",
                "analyzer": "ik_max_word",
                "copy_to": True,
                "fields": {
                    "pinyin": {
                        "type": "text",
                        "term_vector": "with_positions_offsets",
                        "analyzer": "pinyin_analyzer",
                        "boost": 10
                    }
                }
            },
            "keyword": {
                "type": "text",
                "analyzer": "ik_max_word",
                "fields": {
                    "pinyin": {
                        "type": "text",
                        "term_vector": "with_positions_offsets",
                        "analyzer": "pinyin_analyzer",
                        "boost": 10
                    }
                }
            }}})

文档增删查

   es.create('blog', 'article', '2', {
        'title': '这是一个标题',
        'content': '这里是内容'
    })
    es.get('blog', 'article', 1)
    es.search('blog', 'article', {
        "query": {
            "multi_match": {
                "type": "best_fields",
                "operator": "and",
                "query": "zheshi",
                "fields": ["title", 'title.pinyin']
            }
        }})

登录