将单个弹性搜索查询拆分为多个查询以进行优化

时间:2019-04-09 19:01:01

标签: elasticsearch

我有一个弹性查询,我想进一步将其分为多个小型查询。这是一个查询,可获取30天的数据。我想要的是,如果我可以分别进行10天的3个独立查询。这个想法是,也许如果我们拆分请求以进一步并行化处理,我可以更快地从ES中获取数据。我不确定这是否会有所作为,但是我查询的想法就像:

  "size": 0,
  "query": {
    "bool": {
      "filter": {
        "bool": {
          "filter": [
            {
              "term": {
                "scan.access_level": "public"
              }
            },
            {
              "bool": {
                "should": [
                  {
                    "range": {
                      "tracking.last_found": {
                        "from": "now-30d/d",
                        "to": "now/d",
                        "include_lower": true,
                        "include_upper": true
                      }
                    }
                  },
                  {
                    "range": {
                      "tracking.last_fixed": {
                        "from": "now-30d/d",
                        "to": "now/d",
                        "include_lower": true,
                        "include_upper": true
                      }
                    }
                  },
                  {
                    "bool": {
                      "filter": {
                        "range": {
                          "scan.started_at": {
                            "from": "now-30d/d",
                            "to": "now/d",
                            "include_lower": true,
                            "include_upper": true
                          }
                        }
                      },
                      "must_not": {
                        "exists": {
                          "field": "tracking"
                        }
                      }
                    }
                  }
                ],
                "minimum_should_match": "1"
              }
            }
          ]
        }
      }
    }
  },
  "aggregations": {
    "critical-line": {
      "aggregations": {
        "by_day": {
          "date_histogram": {
            "field": "scan.started_at",
            "interval": "1d",
            "order": {
              "_count": "asc"
            },
            "time_zone": "+05:30"
          },
          "aggregations": {
            "countedHosts": {
              "cardinality": {
                "field": "grouping_keys.summary_id",
                "precision_threshold": 250
              }
            }
          }
        }
      }
    },
    "high-line": {
      "aggregations": {
        "by_day": {
          "date_histogram": {
            "field": "scan.started_at",
            "interval": "1d",
            "order": {
              "_count": "asc"
            },
            "time_zone": "+05:30"
          },
          "aggregations": {
            "countedHosts": {
              "cardinality": {
                "field": "grouping_keys.summary_id",
                "precision_threshold": 250
              }
            }
          }
        }
      }
    },
    "medium-line": {
      "aggregations": {
        "by_day": {
          "date_histogram": {
            "field": "scan.started_at",
            "interval": "1d",
            "order": {
              "_count": "asc"
            },
            "time_zone": "+05:30"
          },
          "aggregations": {
            "countedHosts": {
              "cardinality": {
                "field": "grouping_keys.summary_id",
                "precision_threshold": 250
              }
            }
          }
        }
      }
    },
    "low-line": {
      "aggregations": {
        "by_day": {
          "date_histogram": {
            "field": "scan.started_at",
            "interval": "1d",
            "order": {
              "_count": "asc"
            },
            "time_zone": "+05:30"
          },
          "aggregations": {
            "countedHosts": {
              "cardinality": {
                "field": "grouping_keys.summary_id",
                "precision_threshold": 250
              }
            }
          }
        }
      }
    }
  }
}```

Please let me know how to do the split of single query into multiple queries. 

0 个答案:

没有答案