Python源码示例:elasticsearch.ElasticsearchException()

示例1
def extract_mappings_and_corpora(client, output_path, indices_to_extract):
    indices = []
    corpora = []
    # first extract index metadata (which is cheap) and defer extracting data to reduce the potential for
    # errors due to invalid index names late in the process.
    for index_name in indices_to_extract:
        try:
            indices += index.extract(client, output_path, index_name)
        except ElasticsearchException:
            logging.getLogger(__name__).exception("Failed to extract index [%s]", index_name)

    # That list only contains valid indices (with index patterns already resolved)
    for i in indices:
        c = corpus.extract(client, output_path, i["name"])
        if c:
            corpora.append(c)

    return indices, corpora 
示例2
def _bulk_upload(self, es_batch):
        max_attempts = 4
        attempts = 0
        # Initial time to wait between indexing attempts
        # Grows exponentially
        cooloff = 5
        while True:
            try:
                deque(helpers.parallel_bulk(self.es, es_batch, chunk_size=400))
            except elasticsearch.ElasticsearchException:
                # Something went wrong during indexing.
                log.warning(
                    f"Elasticsearch rejected bulk query. We will retry in"
                    f" {cooloff}s. Attempt {attempts}. Details: ",
                    exc_info=True
                )
                time.sleep(cooloff)
                cooloff *= 2
                if attempts >= max_attempts:
                    raise ValueError('Exceeded maximum bulk index retries')
                attempts += 1
                continue
            break 
示例3
def register_metadata_dashboard(event, context):
    if event['RequestType'] != 'Create':
        return send_cfnresponse(event, context, CFN_SUCCESS, {})
    quickstart_bucket = s3_resource.Bucket(event['ResourceProperties']['QSS3BucketName'])
    kibana_dashboards_key = os.path.join(
        event['ResourceProperties']['QSS3KeyPrefix'],
        'assets/kibana/kibana_metadata_visualizations.json'
    )
    elasticsearch_endpoint = event['ResourceProperties']['ElasticsearchEndpoint']
    try:
        quickstart_bucket.download_file(kibana_dashboards_key, TMP_KIBANA_JSON_PATH)
        create_metadata_visualizations(elasticsearch_endpoint)
        return send_cfnresponse(event, context, CFN_SUCCESS, {})
    except (ClientError, ElasticsearchException) as e:
        print(e)
        return send_cfnresponse(event, context, CFN_FAILED, {}) 
示例4
def bulk_sync_wrapper(self, actions):
        """
        Wrapper to publish events.
        Workaround for elasticsearch_async not supporting bulk operations
        """
        from elasticsearch import ElasticsearchException
        from elasticsearch.helpers import bulk

        try:
            bulk_response = bulk(self._gateway.get_sync_client(), actions)
            LOGGER.debug("Elasticsearch bulk response: %s",
                         str(bulk_response))
            LOGGER.info("Publish Succeeded")
        except ElasticsearchException as err:
            LOGGER.exception(
                "Error publishing documents to Elasticsearch: %s", err) 
示例5
def queryAlerts(maxAlerts, clientDomain, relevantIndex):
    """ Get IP addresses from alerts in elasticsearch """

    esquery="""{
            "query": {
                "terms": {
                    "clientDomain": [ %s ]
                }
            },
            "sort": {
                "recievedTime": {
                    "order": "desc"
                    }
                },
            "size": %s,
            "_source": [
                "createTime",
                "recievedTime",
                "peerIdent",
                "peerType",
                "country",
                "targetCountry",
                "originalRequestString",
                "location",
                "sourceEntryIp"
                ]
            }""" % (clientDomain, maxAlerts)
    try:
        res = es.search(index=relevantIndex, body=esquery)
        return res["hits"]["hits"]
    except ElasticsearchException as err:
        app.logger.error('ElasticSearch error: %s' %  err)

    return False 
示例6
def queryAlertsWithoutIP(maxAlerts, clientDomain, relevantIndex):
    """ Get IP addresses from alerts in elasticsearch """

    esquery="""
    {
            "query": {
                "terms": {
                    "clientDomain": [ %s ]
                }
            },
            "sort": {
                "recievedTime": {
                    "order": "desc"
                    }
                },
            "size": %s,
            "_source": [
                "createTime",
                "peerType",
                "country",
                "originalRequestString",
                "location",
                "targetCountry",
                "countryName",
                "locationDestination",
                "recievedTime",
                "username",
                "password",
                "login",
                "clientDomain"
                ]
            }""" % (clientDomain, maxAlerts)

    try:
        res = es.search(index=relevantIndex, body=esquery)
        return res["hits"]["hits"]
    except ElasticsearchException as err:
        app.logger.error('ElasticSearch error: %s' % err)

    return False 
示例7
def getNumberAlerts(timeframe, clientDomain):
    ''' retrieves number of alerts from index in timeframe (minutes)'''
    try:
        res = es.search(index=esindex, body={
            "query": {
                "bool": {
                    "must": [
                        {
                            "match": {
                                "clientDomain": clientDomain
                            }
                        }
                    ],
                    "filter": [
                        {
                            "range": {
                                "createTime": {
                                    "gte": "now-"+str(timeframe)+"m"
                                }
                            }
                        }
                    ]
                }
            },
            "size": 0
        })
        return res['hits']['total']
    except ElasticsearchException as err:
        print('ElasticSearch error: %s' % err)

    return False 
示例8
def getHPStats(peerIdent):
    for i in range(days):

        esquery = """
           {
             "query": {
               "bool": 
                   {
                       "must":
                       [
                           {"term":
                               {"clientDomain" : "false" }
                           }
                       ]
                   }
             },
             "from": 0,
             "size": 0,
             "sort": [],
             "aggs": {
               "peers": {
                 "terms": {
                   "field": "peerIdent",
                   "size": 10000
                 }
               }
               }}
               """

        try:
            res = es.search(index=getRelevantIndex(i), body=esquery)
            print(res["aggregations"]["peers"]["buckets"])


        except ElasticsearchException as err:
            print('ElasticSearch error: %s' % err) 
示例9
def exists(self):
        """Check if this task has already run successfully in the past."""
        try:
            self.elasticsearch_client.get(
                index=self.marker_index,
                doc_type=self.marker_doc_type,
                id=self.marker_index_document_id()
            )
            return True
        except elasticsearch.NotFoundError:
            log.debug('Marker document not found.')
        except elasticsearch.ElasticsearchException as err:
            log.warn(err)
        return False 
示例10
def handle_bucket_event(event, context):
    sns_message = json.loads(event["Records"][0]["Sns"]["Message"])
    bucket = sns_message["Records"][0]["s3"]["bucket"]["name"]
    key = urllib.parse.unquote_plus(sns_message["Records"][0]["s3"]["object"]["key"])
    print(bucket, key)
    try:
        response = s3.head_object(Bucket=bucket, Key=key)
    except ClientError as e:
        print(e)
        print('Error getting object {} from bucket {}. Make sure they exist, your bucket is in the same region as this function and necessary permissions have been granted.'.format(key, bucket))
        raise e

    metadata = {
        'key': key,
        'ContentLength': response['ContentLength'],
        'SizeMiB': response['ContentLength'] / 1024**2,
        'LastModified': response['LastModified'].isoformat(),
        'ContentType': response['ContentType'],
        'ETag': response['ETag'],
        'Dataset': key.split('/')[0]
    }
    print("METADATA: " + str(metadata))

    es_client = make_elasticsearch_client(os.environ['ELASTICSEARCH_ENDPOINT'])

    try:
        es_client.index(index=es_index, doc_type=bucket, body=json.dumps(metadata))
    except ElasticsearchException as e:
        print(e)
        print("Could not index in Elasticsearch")
        raise e 
示例11
def more_like_this(elastic_url, fields: list, like: list, size: int, filters: list, aggregations: list, include: bool, if_agg_only: bool, dataset: Dataset, return_fields=None):
        # Create the base query creator and unite with ES gateway.
        search = Search(using=Elasticsearch(elastic_url)).index(dataset.index).doc_type(dataset.mapping)
        mlt = MoreLikeThis(like=like, fields=fields, min_term_freq=1, max_query_terms=12, include=include)  # Prepare the MLT part of the query.

        paginated_search = search[0:size]  # Set how many documents to return.
        limited_search = paginated_search.source(return_fields) if return_fields else paginated_search  # If added, choose which FIELDS to return.
        finished_search = limited_search.query(mlt)  # Add the premade MLT into the query.

        # Apply all the user-set filters, if they didn't add any this value will be [] and it quits.
        for filter_dict in filters:
            finished_search = finished_search.filter(Q(filter_dict))

        # Apply all the user-set aggregations, if they didn't add any this value will be [] and it quits.
        for aggregation_dict in aggregations:
            # aggs.bucket() does not return a Search object but changes it instead.
            if aggregation_dict["agg_type"] == "composite":
                after = aggregation_dict.get("after_key", None)
                finished_search = ES_Manager.handle_composition_aggregation(finished_search.to_dict(), aggregation_dict, after)
            else:
                field_name = aggregation_dict["field"]
                index = like[0]["_index"]
                field = "{}.keyword".format(field_name) if ES_Manager.is_field_text_field(field_name=field_name, index_name=index) else field_name
                finished_search.aggs.bucket(name=aggregation_dict["bucket_name"], agg_type=aggregation_dict["agg_type"], field=field)

        # Choose if you want to return only the aggregations in {"bucket_name": {results...}} format.
        if if_agg_only:
            finished_search = finished_search.params(size=0)
            response = finished_search.execute()
            return response.aggs.to_dict()

        try:
            response = finished_search.execute()
            result = {"hits": [hit.to_dict() for hit in response]}  # Throw out all metadata and keep only the documents.
            if response.aggs: result.update({"aggregations": response.aggs.to_dict()})  # IF the aggregation query returned anything, THEN add the "aggregatons" key with results.
            return result

        except ElasticsearchException as e:
            logging.getLogger(ERROR_LOGGER).exception(e)
            return {"elasticsearch": [str(e)]} 
示例12
def elasticsearch_connection(hosts, sniff=False, sniffer_timeout=60):
    """Crea una conexión a Elasticsearch.

    Args:
        hosts (list): Lista de nodos Elasticsearch a los cuales conectarse.
        sniff (bool): Activa la función de sniffing, la cual permite descubrir
            nuevos nodos en un cluster y conectarse a ellos.

    Raises:
        DataConnectionException: si la conexión no pudo ser establecida.

    Returns:
        Elasticsearch: Conexión a Elasticsearch.

    """
    try:
        options = {
            'hosts': hosts
        }

        if sniff:
            options['sniff_on_start'] = True
            options['sniff_on_connection_fail'] = True
            options['sniffer_timeout'] = sniffer_timeout

        return elasticsearch.Elasticsearch(**options)
    except elasticsearch.ElasticsearchException as e:
        raise DataConnectionException from e 
示例13
def _run_multisearch(es, searches):
    """Ejecuta una lista de búsquedas Elasticsearch utilizando la función
    MultiSearch. La cantidad de búsquedas que se envían a la vez es
    configurable vía la variable ES_MULTISEARCH_MAX_LEN.

    Args:
        es (Elasticsearch): Conexión a Elasticsearch.
        searches (list): Lista de elasticsearch_dsl.Search.

    Raises:
        DataConnectionException: Si ocurrió un error al ejecutar las búsquedas.

    Returns:
        list: Lista de respuestas a cada búsqueda.

    """
    step_size = constants.ES_MULTISEARCH_MAX_LEN
    responses = []

    # Partir las búsquedas en varios baches si es necesario.
    for i in range(0, len(searches), step_size):
        end = min(i + step_size, len(searches))
        ms = MultiSearch(using=es)

        for j in range(i, end):
            ms = ms.add(searches[j])

        try:
            responses.extend(ms.execute(raise_on_error=True))
        except elasticsearch.ElasticsearchException as e:
            raise DataConnectionException() from e

    return responses 
示例14
def test_elasticsearch_connection_error(self):
        """Se debería devolver un error 500 cuando falla la conexión a
        Elasticsearch."""
        self.es.side_effect = elasticsearch.ElasticsearchException()
        self.assert_500_error(random.choice(ENDPOINTS)) 
示例15
def test_elasticsearch_msearch_error(self):
        """Se debería devolver un error 500 cuando falla la query
        MultiSearch."""
        self.es.return_value.msearch.side_effect = \
            elasticsearch.ElasticsearchException()
        self.assert_500_error(random.choice(ENDPOINTS)) 
示例16
def execute(self, size=10):
        """Executes the knowledge base search with provided criteria and returns matching documents.

        Args:
            size (int): The maximum number of records to fetch, default to 10.

        Returns:
            a list of matching documents.
        """
        try:
            # TODO: move the ES API call logic to ES helper
            es_query = self._build_es_query(size=size)

            response = self.client.search(index=self.index, body=es_query)
            results = [hit["_source"] for hit in response["hits"]["hits"]]
            return results
        except EsConnectionError as e:
            logger.error(
                "Unable to connect to Elasticsearch: %s details: %s", e.error, e.info
            )
            raise KnowledgeBaseConnectionError(es_host=self.client.transport.hosts)
        except TransportError as e:
            logger.error(
                "Unexpected error occurred when sending requests to Elasticsearch: %s "
                "Status code: %s details: %s",
                e.error,
                e.status_code,
                e.info,
            )
            raise KnowledgeBaseError
        except ElasticsearchException:
            raise KnowledgeBaseError 
示例17
def async_do_publish(self):
        "Publishes all queued documents to the Elasticsearch cluster"
        from elasticsearch import ElasticsearchException

        if self.publish_queue.empty():
            LOGGER.debug("Skipping publish because queue is empty")
            return

        LOGGER.debug("Collecting queued documents for publish")
        actions = []
        entity_counts = {}
        self._last_publish_time = datetime.now()

        while not self.publish_queue.empty():
            entry = self.publish_queue.get()

            key = entry["state"].entity_id

            entity_counts[key] = 1 if key not in entity_counts else entity_counts[key] + 1
            actions.append(self._state_to_bulk_action(
                entry["state"], entry["event"].time_fired))

        if not self._only_publish_changed:
            all_states = self._hass.states.async_all()
            for state in all_states:
                if (state.domain in self._excluded_domains
                        or state.entity_id in self._excluded_entities):
                    continue

                if state.entity_id not in entity_counts:
                    actions.append(self._state_to_bulk_action(
                        state, self._last_publish_time))

        LOGGER.info("Publishing %i documents to Elasticsearch", len(actions))

        try:
            await self._hass.async_add_executor_job(self.bulk_sync_wrapper, actions)
        except ElasticsearchException as err:
            LOGGER.exception(
                "Error publishing documents to Elasticsearch: %s", err)
        return 
示例18
def test_itersearch_raises_assertion_error_when_less_docs_fetched(self):
        mocked_value_template = {
            "took": 27,
            "timed_out": False,
            "_scroll_id": 123213,
            "_shards": {
                "total": 2,
                "successful": 2,
                "failed": 0
            },
            "hits": {
                "total": 13,
                "max_score": None,
                "hits": [
                    dict(some_doc="with_some_val") for i in xrange(10)
                ]
            }
        }

        ss = SuperElasticsearch(hosts=['localhost:9200'])

        def assertion(chunked):
            # mock the client's scroll method
            mocked_search_result = deepcopy(mocked_value_template)
            ss.search = Mock(return_value=mocked_search_result)
            mocked_scroll_result = deepcopy(mocked_value_template)
            mocked_scroll_result['_scroll_id'] = 456456
            mocked_scroll_result['hits']['hits'] = [
                dict(some_doc="with_some_val") for i in xrange(2)
            ]
            ss.scroll = Mock(return_value=mocked_scroll_result)

            search_generator = ss.itersearch(index=self._index,
                                             doc_type=self._doc_type,
                                             body=dict(query=dict(
                                                 match_all={})),
                                             scroll='10m',
                                             chunked=chunked)
            if chunked:
                iterate_times = 2
            else:
                iterate_times = 12

            for _ in range(0, iterate_times):
                search_generator.next()

            mocked_scroll_result = deepcopy(mocked_value_template)
            mocked_scroll_result['_scroll_id'] = 789789
            mocked_scroll_result['hits']['hits'] = []
            ss.scroll = Mock(return_value=mocked_scroll_result)
            search_generator.next()

        self.assertRaises(ElasticsearchException,
                          functools.partial(assertion, True))
        self.assertRaises(ElasticsearchException,
                          functools.partial(assertion, False)) 
示例19
def authenticate(username, token):
    """ Authenticate user from cache or in ES """

    # check for user in cache
    authtoken = getCache(username, "user")
    if authtoken is not False:
        if len(authtoken) == 128:
            tokenhash = hashlib.sha512(token.encode('utf-8')).hexdigest()
            if authtoken == tokenhash:
                return True
        elif len(authtoken) == 32:
            tokenhash = hashlib.md5(token.encode('utf-8')).hexdigest()
            if authtoken == tokenhash:
                return True
        else:
            app.logger.error('authenticate(): Hash "{0}" for user "{1}" is not matching md5 or sha512 length! Needs to be checked in memcache!'.format(authtoken, username))

    # query ES
    else:
        try:
            res = es.search(index=app.config['WSUSERINDEX'], body={
                  "query": {
                    "term": {
                      "peerName.keyword": username
                    }
                  }
                })

            if res["hits"]["total"] > 1:
                app.logger.error('authenticate(): More than one user "%s" in ES index "users" found!' % username)
            elif res["hits"]["total"] < 1:
                app.logger.error('authenticate(): No user "%s" in ES index "users" found!' % username)
            elif res["hits"]["total"] == 1:
                authtoken = res["hits"]["hits"][0]["_source"]["token"]
                getOnly = res["hits"]["hits"][0]["_source"]["getOnly"]
                community = res["hits"]["hits"][0]["_source"]["community"]

                if len(authtoken) == 128:
                    tokenhash = hashlib.sha512(token.encode('utf-8')).hexdigest()
                    if authtoken == tokenhash:
                        # add user and token to cache for 24h
                        setCache(username, authtoken, (60 * 60 * 24), "user")
                        return True
                elif len(authtoken) == 32:
                    tokenhash = hashlib.md5(token.encode('utf-8')).hexdigest()
                    if authtoken == tokenhash:
                        # add user and token to cache for 24h
                        setCache(username, authtoken, (60 * 60 * 24),"user")
                        return True
                else:
                    app.logger.error('authenticate(): Hash "{0}" for user "{1}" is not matching md5 or sha512 length! Needs to be checked in ES index!'.format(authtoken, username))
                    return False

        except ElasticsearchException as err:
            app.logger.error('ElasticSearch error: %s' %  err)

    return False 
示例20
def queryBadIPs(badIpTimespan, clientDomain, relevantIndex):
    """ Get IP addresses from alerts in elasticsearch """

    esquery="""
    {
          "query": {
            "bool": {
              "must": [
                {
                  "range": {
                    "recievedTime": {
                        "gte": "now-%sm" 
                    }
                  }
                },
                {
                  "terms": {
                      "clientDomain": [ %s ]
                    }
                }
              ]
            }
          },
          "aggs": {
            "ips": {
              "terms": {
                "field": "sourceEntryIp",
                "size": 1000000
              }
            }
          },
          "size": 0
        } 
    """ % (badIpTimespan, clientDomain)

    try:
        res = es.search(index=relevantIndex, body=esquery)
        if 'aggregations' in res:
            return res["aggregations"]["ips"]
        else:
            return False
    except ElasticsearchException as err:
        app.logger.error('ElasticSearch error: %s' %  err)

    return False 
示例21
def queryAlertsCount(timeframe, clientDomain, relevantIndex):
    """ Get number of Alerts in timeframe in elasticsearch """

    # check if timespan = d or number
    if timeframe == "day":
        span = "now/d"
    elif timeframe.isdecimal():
        span = "now-%sm" % timeframe
    else:
        app.logger.error('Non numeric value in retrieveAlertsCount timespan. Must be decimal number (in minutes) or string "day"')
        return False

    esquery="""{
          "query": {
            "bool": {
              "must": [
                {
                  "terms": {
                    "clientDomain": [ %s ]
                  }
                }
              ],
              "filter": [
                {
                  "range": {
                    "recievedTime": {
                        "gte": "%s"
                    }
                  }
                }
              ]
            }
          },
          "size": 0
        }
    """ % (clientDomain, str(span))

    try:
        res = es.search(index=relevantIndex, body=esquery)
        return res['hits']['total']
    except ElasticsearchException as err:
        app.logger.error('ElasticSearch error: %s' %  err)

    return False 
示例22
def queryDatasetAlertsPerMonth(days, clientDomain, relevantIndex):
    # check if months is a number
    if days is None:
        span = "now-1M/d"
    elif days.isdecimal():
        span = "now-%sd/d" % days
    else:
        app.logger.error('Non numeric value in datasetAlertsPerMonth timespan. Must be decimal number in days')
        return False

    esquery="""{
              "query": {
                "range": {
                  "createTime": {
                    "gte": "%s"
                  }
                }
              },
            "aggs": {
                "communityfilter": {
                    "filter": {
                        "terms": {
                            "clientDomain": [ % s ]
                        }
                    },
            "aggs": {
                "range": {
                  "date_histogram": {
                    "field": "createTime",
                    "interval": "day"
                            }
                        }
                    }
                }
              },
              "size": 0
                }""" % (str(span), clientDomain)

    try:
        res = es.search(index=relevantIndex, body=esquery)
        return res["aggregations"]["communityfilter"]["range"]
    except ElasticsearchException as err:
        app.logger.error('ElasticSearch error: %s' %  err)

    return False 
示例23
def queryDatasetAlertTypesPerMonth(days, clientDomain, relevantIndex):
    # check if days is a number
    if days is None:
        span = "now-1M/d"
    elif days.isdecimal():
        span = "now-%sd/d" % days
    else:
        app.logger.error('Non numeric value in datasetAlertsTypesPerMonth timespan. Must be decimal number in days')
        return False

    esquery="""
    {
          "query": {
            "range": {
              "createTime": {
                "gte": "%s"
              }
            }
          },
         "aggs": {
                "communityfilter": {
                    "filter": {
                        "terms": {
                            "clientDomain": [ %s ]
                        }
            },
          "aggs": {
            "range": {
              "date_histogram": {
                "field": "createTime",
                "interval": "day"
              },
              "aggs": {
                "nested_terms_agg": {
                  "terms": {
                    "field": "peerType.keyword"
                  }}}
                }
              }
            }
          },
          "size": 0
        }
    """ % (str(span), clientDomain )

    try:
        res = es.search(index=relevantIndex, body=esquery)
        return res["aggregations"]["communityfilter"]["range"]
    except ElasticsearchException as err:
        app.logger.error('ElasticSearch error: %s' %  err)

    return False 
示例24
def queryAlertStats(clientDomain, relevantIndex):
    """ Get combined statistics from elasticsearch """
    esquery="""{
            "aggs": {
                "communityfilter": {
                    "filter": {
                        "terms": {
                            "clientDomain": [ %s ]
                        }
                    },
            "aggs": {
            "ctr": {
              "range": {
                "field": "recievedTime",
                "ranges": [
                  {
                    "key": "1d",
                    "from": "now-1440m"
                  },
                  {
                    "key": "1h",
                    "from": "now-60m"
                  },
                  {
                    "key": "5m",
                    "from": "now-5m"
                  },
                  {
                    "key": "1m",
                    "from": "now-1m"
                  }
                ]
              }
            }}}
          },
          "size": 0
        }""" % clientDomain

    try:
        res = es.search(index=relevantIndex, body=esquery)
        if 'aggregations' in res:
            return res['aggregations']['communityfilter']['ctr']['buckets']
        else:
            return False
    except ElasticsearchException as err:
        app.logger.error('ElasticSearch error: %s' % err)

    return False 
示例25
def queryForSingleIP(maxAlerts, ip, clientDomain, relevantIndex):
    """ Get data for specific IP addresse from elasticsearch """
    try:
        ipaddress.IPv4Address(ip)
        if not ipaddress.ip_address(ip).is_global:
            app.logger.debug('No global IP address given on /querySingleIP: %s' % str(request.args.get('ip')))
            return False

    except:
        app.logger.debug('No valid IP given on /querySingleIP: %s' % str(request.args.get('ip')))
        return False

    esquery="""{
          "query": {
            "bool": {
              "must": [
                {
                  "term": {
                    "sourceEntryIp": "%s"
                  }
                },
                {
                  "terms": {
                    "clientDomain": [ %s ]
                  }
                }
              ]
            }
          },
          "size": %s,
          "sort": {
            "createTime": {
              "order": "desc"
            }
          },
          "_source": [
            "createTime",
            "peerType",
            "targetCountry",
            "originalRequestString"
          ]
        }""" % (ip, clientDomain, maxAlerts)

    try:
        res = es.search(index=relevantIndex, body=esquery)
        return res["hits"]["hits"]
    except ElasticsearchException as err:
        app.logger.error('ElasticSearch error: %s' %  err)

    return False

# Formatting functions 
示例26
def getAlertsPerHoneypotType(app,clientDomain, es, esindex, utcTimeFrom,utcTimeTo):
    ''' retrieves number of alerts from index in timeframe (minutes)'''

    esquery="""
    {
          "query": {
            "bool": {
              "must": [
                {
                  "range": {
                    "recievedTime": {
                        "gte": "%s",
                        "lte": "%s"
                    }
                  }
                },
                {
                  "terms": {
                      "clientDomain":  [ %s ]
                    }
                }
              ]
            }
          },
          "aggs": {
            "peerType": {
              "terms": {
                "field": "peerType",
                "size": 1000
              }
            }
          },
          "size": 0
        }"""% (utcTimeFrom, utcTimeTo, str(clientDomain).lower())

    try:
        res = es.search(index=esindex, body=esquery)
        return res


    except ElasticsearchException as err:
        app.logger.error('ElasticSearch error: %s' % err)

    return True 
示例27
def queryAlertStats(clientDomain,relevantIndex):
    """ Get combined statistics from elasticsearch """
    esquery = """{
               "aggs": {
                   "communityfilter": {
                       "filter": {
                           "terms": {
                               "clientDomain": [ %s ]
                           }
                       },
               "aggs": {
               "ctr": {
                 "range": {
                   "field": "recievedTime",
                   "ranges": [
                     {
                       "key": "1d",
                       "from": "now-1440m"
                     },
                     {
                       "key": "1h",
                       "from": "now-60m"
                     },
                     {
                       "key": "5m",
                       "from": "now-5m"
                     },
                     {
                       "key": "1m",
                       "from": "now-1m"
                     }
                   ]
                 }
               }}}
             },
             "size": 0
           }""" % clientDomain

    try:
        res = es.search(index=relevantIndex, body=esquery)
        return res['aggregations']['communityfilter']['ctr']['buckets']
    except ElasticsearchException as err:
        print('ElasticSearch error: %s' % err)

    return False 
示例28
def queryAlertsCountWithType(timeframe, clientDomain, relevantIndex):
    """ Get number of Alerts in timeframe in elasticsearch """

    # check if timespan = d or number
    if timeframe == "day":
        span = "now/d"
    elif timeframe.isdecimal():
        span = "now-%sm" % timeframe
    else:
        print('Non numeric value in retrieveAlertsCountWithType timespan. Must be decimal number (in minutes) or string "day"')
        return False

    esquery=""" 
    {
          "query": {
            "range": {
              "recievedTime": {
                  "gte": "%s"
              }
            }
          },
          "aggs": {
            "communityfilter": {
              "filter": {
                "terms": {
                  "clientDomain": [ %s ] 
                }
              },
              "aggs": {
                "honeypotTypes": {
                  "terms": {
                    "field": "peerType"
                  }
                }
              }
            }
          },
          "size": 0
        }
    """ % (span, clientDomain)

    try:
        res = es.search(index=relevantIndex, body=esquery)
        return res
    except ElasticsearchException as err:
        print('ElasticSearch error: %s' %  err)

    return False 
示例29
def getAlertsPerHoneypotType(timeframe, clientDomain):
    ''' retrieves number of alerts from index in timeframe (minutes)'''

    esquery="""
    {
          "query": {
            "bool": {
              "must": [
                {
                  "range": {
                    "recievedTime": {
                        "gte": "now-%sm" 
                    }
                  }
                },
                {
                  "terms": {
                      "clientDomain": [ %s ]
                    }
                }
              ]
            }
          },
          "aggs": {
            "peerType": {
              "terms": {
                "field": "peerType",
                "size": 1000
              }
            }
          },
          "size": 0
        }"""% (timeframe, str(clientDomain).lower())

    try:
        res = es.search(index=esindex, body=esquery)
        return res


    except ElasticsearchException as err:
        print('ElasticSearch error: %s' % err)

    return True 
示例30
def getHoneypotCount(days):
    esquery = """
    {
      "query": {
        "bool": {
          "must": [
            {
              "term": {
                "clientDomain": "false"
              }
            }
          ]
        }
      },
      "from": 0,
      "size": 0,
      "sort": [],
      "aggs": {
        "range": {
          "date_histogram": {
            "field": "recievedTime",
            "interval": "day"
          },
          "aggs": {
            "peers": {
              "terms": {
                "field": "peerIdent",
                "size": 100000
              }
            }
          }
        }
      }
    }
    
    """
    try:
        res = es.search(index=getRelevantIndices(days), body=esquery)
        # print(len(res["aggregations"]["peers"]["buckets"]))
        return res["aggregations"]["range"]["buckets"]


    except ElasticsearchException as err:
        print('ElasticSearch error: %s' % err)
        return False