I have a two node elastic search setup where the same search query on the one node results in different results than on the other and I would like to find out why that is the case. Details:
- The same documents (equal content and id) have a different score on the two nodes resulting in different sort order.
- It is reproducible: I can delete the whole index and rebuild it from database and still the results are different.
- Two es nodes are deployed embedded in a java ee war. On each deployment the index is rebuild from database.
- Initially when the problem was found the hits.total results for the same query where different on the two nodes. They are the same after I have deleted and rebuilt the index.
- My workaround for now is to use preferences=_local as suggested here.
- I couldn't find any interesting errors in the logs so far.
_cluster/state:
{
"cluster_name": "elasticsearch.abc",
"version": 330,
"master_node": "HexGKOoHSxqRaMmwduCVIA",
"blocks": {},
"nodes": {
"rUZDrUfMR2-RWcy4t0YQNw": {
"name": "Owl",
"transport_address": "inet[/10.123.123.123:9303]",
"attributes": {}
},
"HexGKOoHSxqRaMmwduCVIA": {
"name": "Bloodlust II",
"transport_address": "inet[/10.123.123.124:9303]",
"attributes": {}
}
},
"metadata": {
"templates": {},
"indices": {
"abc": {
"state": "open",
"settings": {
"index": {
"creation_date": "1432297566361",
"uuid": "LKx6Ro9CRXq6JZ9a29jWeA",
"analysis": {
"filter": {
"substring": {
"type": "nGram",
"min_gram": "1",
"max_gram": "50"
}
},
"analyzer": {
"str_index_analyzer": {
"filter": [
"lowercase",
"substring"
],
"tokenizer": "keyword"
},
"str_search_analyzer": {
"filter": [
"lowercase"
],
"tokenizer": "keyword"
}
}
},
"number_of_replicas": "1",
"number_of_shards": "5",
"version": {
"created": "1050099"
}
}
},
"mappings": {
"some_mapping": {
...
}
...
},
"aliases": []
}
}
},
"routing_table": {
"indices": {
"abc": {
"shards": {
"0": [
{
"state": "STARTED",
"primary": true,
"node": "HexGKOoHSxqRaMmwduCVIA",
"relocating_node": null,
"shard": 0,
"index": "abc"
},
{
"state": "STARTED",
"primary": false,
"node": "rUZDrUfMR2-RWcy4t0YQNw",
"relocating_node": null,
"shard": 0,
"index": "abc"
}
],
"1": [
{
"state": "STARTED",
"primary": false,
"node": "HexGKOoHSxqRaMmwduCVIA",
"relocating_node": null,
"shard": 1,
"index": "abc"
},
{
"state": "STARTED",
"primary": true,
"node": "rUZDrUfMR2-RWcy4t0YQNw",
"relocating_node": null,
"shard": 1,
"index": "abc"
}
],
"2": [
{
"state": "STARTED",
"primary": true,
"node": "HexGKOoHSxqRaMmwduCVIA",
"relocating_node": null,
"shard": 2,
"index": "abc"
},
{
"state": "STARTED",
"primary": false,
"node": "rUZDrUfMR2-RWcy4t0YQNw",
"relocating_node": null,
"shard": 2,
"index": "abc"
}
],
"3": [
{
"state": "STARTED",
"primary": false,
"node": "HexGKOoHSxqRaMmwduCVIA",
"relocating_node": null,
"shard": 3,
"index": "abc"
},
{
"state": "STARTED",
"primary": true,
"node": "rUZDrUfMR2-RWcy4t0YQNw",
"relocating_node": null,
"shard": 3,
"index": "abc"
}
],
"4": [
{
"state": "STARTED",
"primary": true,
"node": "HexGKOoHSxqRaMmwduCVIA",
"relocating_node": null,
"shard": 4,
"index": "abc"
},
{
"state": "STARTED",
"primary": false,
"node": "rUZDrUfMR2-RWcy4t0YQNw",
"relocating_node": null,
"shard": 4,
"index": "abc"
}
]
}
}
}
},
"routing_nodes": {
"unassigned": [],
"nodes": {
"HexGKOoHSxqRaMmwduCVIA": [
{
"state": "STARTED",
"primary": true,
"node": "HexGKOoHSxqRaMmwduCVIA",
"relocating_node": null,
"shard": 4,
"index": "abc"
},
{
"state": "STARTED",
"primary": true,
"node": "HexGKOoHSxqRaMmwduCVIA",
"relocating_node": null,
"shard": 0,
"index": "abc"
},
{
"state": "STARTED",
"primary": false,
"node": "HexGKOoHSxqRaMmwduCVIA",
"relocating_node": null,
"shard": 3,
"index": "abc"
},
{
"state": "STARTED",
"primary": false,
"node": "HexGKOoHSxqRaMmwduCVIA",
"relocating_node": null,
"shard": 1,
"index": "abc"
},
{
"state": "STARTED",
"primary": true,
"node": "HexGKOoHSxqRaMmwduCVIA",
"relocating_node": null,
"shard": 2,
"index": "abc"
}
],
"rUZDrUfMR2-RWcy4t0YQNw": [
{
"state": "STARTED",
"primary": false,
"node": "rUZDrUfMR2-RWcy4t0YQNw",
"relocating_node": null,
"shard": 4,
"index": "abc"
},
{
"state": "STARTED",
"primary": false,
"node": "rUZDrUfMR2-RWcy4t0YQNw",
"relocating_node": null,
"shard": 0,
"index": "abc"
},
{
"state": "STARTED",
"primary": true,
"node": "rUZDrUfMR2-RWcy4t0YQNw",
"relocating_node": null,
"shard": 3,
"index": "abc"
},
{
"state": "STARTED",
"primary": true,
"node": "rUZDrUfMR2-RWcy4t0YQNw",
"relocating_node": null,
"shard": 1,
"index": "abc"
},
{
"state": "STARTED",
"primary": false,
"node": "rUZDrUfMR2-RWcy4t0YQNw",
"relocating_node": null,
"shard": 2,
"index": "abc"
}
]
}
},
"allocations": []
}
_cluster/health
{
"cluster_name": "elasticsearch.abc",
"status": "green",
"timed_out": false,
"number_of_nodes": 2,
"number_of_data_nodes": 2,
"active_primary_shards": 5,
"active_shards": 10,
"relocating_shards": 0,
"initializing_shards": 0,
"unassigned_shards": 0,
"number_of_pending_tasks": 0
}
_cluster/stats
{
"timestamp": 1432312770877,
"cluster_name": "elasticsearch.abc",
"status": "green",
"indices": {
"count": 1,
"shards": {
"total": 10,
"primaries": 5,
"replication": 1,
"index": {
"shards": {
"min": 10,
"max": 10,
"avg": 10
},
"primaries": {
"min": 5,
"max": 5,
"avg": 5
},
"replication": {
"min": 1,
"max": 1,
"avg": 1
}
}
},
"docs": {
"count": 19965,
"deleted": 4
},
"store": {
"size_in_bytes": 399318082,
"throttle_time_in_millis": 0
},
"fielddata": {
"memory_size_in_bytes": 60772,
"evictions": 0
},
"filter_cache": {
"memory_size_in_bytes": 15284,
"evictions": 0
},
"id_cache": {
"memory_size_in_bytes": 0
},
"completion": {
"size_in_bytes": 0
},
"segments": {
"count": 68,
"memory_in_bytes": 10079288,
"index_writer_memory_in_bytes": 0,
"index_writer_max_memory_in_bytes": 5120000,
"version_map_memory_in_bytes": 0,
"fixed_bit_set_memory_in_bytes": 0
},
"percolate": {
"total": 0,
"time_in_millis": 0,
"current": 0,
"memory_size_in_bytes": -1,
"memory_size": "-1b",
"queries": 0
}
},
"nodes": {
"count": {
"total": 2,
"master_only": 0,
"data_only": 0,
"master_data": 2,
"client": 0
},
"versions": [
"1.5.0"
],
"os": {
"available_processors": 8,
"mem": {
"total_in_bytes": 0
},
"cpu": []
},
"process": {
"cpu": {
"percent": 0
},
"open_file_descriptors": {
"min": 649,
"max": 654,
"avg": 651
}
},
"jvm": {
"max_uptime_in_millis": 2718272183,
"versions": [
{
"version": "1.7.0_40",
"vm_name": "Java HotSpot(TM) 64-Bit Server VM",
"vm_version": "24.0-b56",
"vm_vendor": "Oracle Corporation",
"count": 2
}
],
"mem": {
"heap_used_in_bytes": 2665186528,
"heap_max_in_bytes": 4060086272
},
"threads": 670
},
"fs": {
"total_in_bytes": 631353901056,
"free_in_bytes": 209591468032,
"available_in_bytes": 209591468032
},
"plugins": []
}
}
Example query:
/_search?from=22&size=1
{
"query": {
"bool": {
"should": [{
"match": {
"address.city": {
"query": "Bremen",
"boost": 2
}
}
}],
"must": [{
"match": {
"type": "L"
}
}]
}
}
}
Response for the first request
{
"took": 30,
"timed_out": false,
"_shards": {
"total": 5,
"successful": 5,
"failed": 0
},
"hits": {
"total": 19543,
"max_score": 6.407021,
"hits": [{
"_index": "abc",
"_type": "xyz",
"_id": "ABC123",
"_score": 5.8341036,
"_source": {
...
}
}]
}
}
Response for the second request
{
"took": 27,
"timed_out": false,
"_shards": {
"total": 5,
"successful": 5,
"failed": 0
},
"hits": {
"total": 19543,
"max_score": 6.407021,
"hits": [
{
"_index": "abc",
"_type": "xyz",
"_id": "FGH12343",
"_score": 5.8341036,
"_source": {
...
}
}
]
}
}
What could be the cause for this and how can I ensure the same results for different nodes?
Aucun commentaire:
Enregistrer un commentaire