Python源码示例:elasticsearch.NotFoundError()

示例1
def delete_item(self, item):
        # Make sure the object can be indexed
        if not class_is_indexed(item.__class__):
            return

        # Get mapping
        mapping = self.mapping_class(item.__class__)

        # Delete document
        try:
            self.es.delete(
                self.name,
                mapping.get_document_type(),
                mapping.get_document_id(item),
            )
        except NotFoundError:
            pass  # Document doesn't exist, ignore this exception 
示例2
def test_execute_single_with_http_400(self):
        import elasticsearch
        es = None
        params = None
        runner = mock.Mock(side_effect=
                           as_future(exception=elasticsearch.NotFoundError(404, "not found", "the requested document could not be found")))

        ops, unit, request_meta_data = await driver.execute_single(
            self.context_managed(runner), es, params, on_error="continue-on-non-fatal")

        self.assertEqual(0, ops)
        self.assertEqual("ops", unit)
        self.assertEqual({
            "http-status": 404,
            "error-type": "transport",
            "error-description": "not found (the requested document could not be found)",
            "success": False
        }, request_meta_data) 
示例3
def _kibana_remove(self, _type, body):
        i = 0
        ids = []

        if get_es_major_version() >= 6:
            body['query']['query_string']['query'] += ' type:%s' % _type
            _type = self.doc_type

        while True:
            res = self.client.search(index='.kibana', from_=i, doc_type=_type, body=body, request_cache=False)
            if len(res['hits']['hits']) == 0:
                break
            i += 10

            _ids = [hit['_id'] for hit in res['hits']['hits']]
            ids += _ids

        for _id in ids:
            try:
                self.client.delete(index='.kibana', doc_type=_type, id=_id, refresh=True)
            except NotFoundError:
                pass 
示例4
def _kibana_inject(self, _type, _file):
        with open(_file) as f:
            content = f.read()
        name = _file.rsplit('/', 1)[1]
        name = name.rsplit('.', 1)[0]
        if get_es_major_version() < 6:
            doc_type = _type
        else:
            doc_type = self.doc_type

        try:
            # Delete the document first, to prevent an error when it's already there
            self.client.delete(index='.kibana', doc_type=doc_type, id=name, refresh=True)
        except NotFoundError:
            pass
        try:
            self.client.create(index='.kibana', doc_type=doc_type, id=name, body=content, refresh=True)
        except Exception as e:
            print 'While processing %s:\n' % _file
            raise 
示例5
def delete(obj, index=None, using=None):
    """
    Shortcut to delete a Django object from the ES index based on it's model class.
    """
    from django.contrib.contenttypes.models import ContentType
    model_class = ContentType.objects.get_for_model(obj).model_class()
    for doc_class in model_documents.get(model_class, []):
        doc_using = using or doc_class._doc_type.using or 'default'
        doc_index = index or doc_class._doc_type.index or getattr(settings, 'SEEKER_INDEX', 'seeker')
        es = connections.get_connection(doc_using)
        try:
            es.delete(
                index=doc_index,
                doc_type=doc_class._doc_type.name,
                id=doc_class.get_id(obj),
                refresh=True
            )
        except NotFoundError:
            # If this object wasn't indexed for some reason (maybe not in the document's queryset), no big deal.
            pass 
示例6
def es_query_total(cls, cluster: Elasticsearch, index: str, group: str, **kwargs) -> 'MetricMonitor':
        def fetch_stat() -> Optional[float]:
            try:
                response = cluster.indices.stats(index=index, groups=[group], metric='search')
            except elasticsearch.NotFoundError:
                # If our index doesn't exist we can't possibly allow things
                # to continue. Report the metric unavailable and wait for
                # the index to exist.
                log.exception('Index not found while fetching index stats for %s', index)
                return None
            except elasticsearch.TransportError:
                # Connection error to elasticsearch, could be network, restarts, etc.
                log.exception('Transport error while fetching index stats for %s', index)
                return None

            try:
                query_total = response['_all']['total']['search']['groups'][group]['query_total']
                log.debug('Group %s in index %s reported query_total of %d', group, index, query_total)
                return query_total
            except KeyError:
                # Typically this means the group hasn't collected any stats.
                # This could happen after a full cluster restart but before any
                # prod traffic is run through. I'm a bit wary of always
                # returning 0, but it is correct.
                log.info('No stats in index %s for group %s', index, group)
                return 0.0
        return cls(fetch_stat, StreamingEMA(), **kwargs) 
示例7
def complete(self):
        """ Check, if out hashed date:url id is already in the index. """
        id = hashlib.sha1('%s:%s' % (self.date, self.url)).hexdigest()
        es = elasticsearch.Elasticsearch()
        try:
            es.get(index='frontpage', doc_type='html', id=id)
        except elasticsearch.NotFoundError:
            return False
        return True

# Wrapper tasks
# ============= 
示例8
def _clear(self):
        """
        We do a index deletion and creation when we clean a index.
        """
        try:
            self.connection.indices.delete(settings.SEARCH_INDEX)
        except NotFoundError:
            pass
        self.connection.indices.create(settings.SEARCH_INDEX) 
示例9
def _refresh_template(self, template_name="lego-search"):
        context = {"index": self._index_name()}
        template = render_to_string("search/elasticsearch/index_template.json", context)
        try:
            self.connection.indices.delete_template(template_name)
        except NotFoundError:
            pass
        return self.connection.indices.put_template(template_name, template) 
示例10
def delete(self):
        try:
            self.es.indices.delete(self.name)
        except NotFoundError:
            pass 
示例11
def delete_item(self, item):
        # Make sure the object can be indexed
        if not class_is_indexed(item.__class__):
            return

        # Get mapping
        mapping = self.mapping_class(item.__class__)

        # Delete document
        try:
            self.es.delete(self.name, mapping.get_document_id(item))
        except NotFoundError:
            pass  # Document doesn't exist, ignore this exception 
示例12
def get_es_client(self):
        self._es = Elasticsearch(hosts=self.es_hosts)
        self._es_version = [int(i) for i in self._es.info()["version"]["number"].split(".")]
        # template
        template_body = ES_TEMPLATE
        if self._es_version < [7]:
            template_body["mappings"] = {"_doc": template_body.pop("mappings")}
        self._es.indices.put_template(ES_TEMPLATE_NAME, template_body)
        # create index
        for i in range(10):
            existing_indices = self._es.indices.get(ES_INDEX_PATTERN).keys()
            if not len(existing_indices):
                next_id = 0
            else:
                next_id = max(int(index.rsplit("-", 1)[-1]) for index in existing_indices) + 1
            index_name = ES_INDEX_PATTERN.replace("*", "{:08d}".format(next_id))
            try:
                self._es.indices.create(index_name)
            except RequestError:
                # probably race
                pass
            else:
                # move alias
                update_aliases_body = {
                    "actions": [
                        {"add": {"index": index_name, "alias": ES_ALIAS}}
                    ]
                }
                try:
                    old_indices = self._es.indices.get_alias(ES_ALIAS)
                except NotFoundError:
                    old_indices = []
                for old_index in old_indices:
                    if old_index != index_name:
                        update_aliases_body["actions"].append(
                            {"remove": {"index": old_index, "alias": ES_ALIAS}}
                        )
                self._es.indices.update_aliases(update_aliases_body)
                return index_name 
示例13
def latest_enrichment_date(self):
        """Get the most recent enrichment date.

        :return: latest date based on `metadata__enriched_on` field,
                 None if no values found for that field.

        :raises NotFoundError: index not found in ElasticSearch
        """
        latest_date = None

        search = Search(using=self._es_conn, index=self._es_index)
        # from:to parameters (=> from: 0, size: 0)
        search = search[0:0]
        search = search.aggs.metric('max_date', 'max', field='metadata__enriched_on')

        try:
            response = search.execute()

            aggs = response.to_dict()['aggregations']
            if aggs['max_date']['value'] is None:
                logger.debug("{} No data for metadata__enriched_on field found in {} index".format(
                             self.__log_prefix, self._es_index))

            else:
                # Incremental case: retrieve items from last item in ES write index
                max_date = aggs['max_date']['value_as_string']
                latest_date = gl_dt.str_to_datetime(max_date)

        except NotFoundError as nfe:
            raise nfe

        return latest_date 
示例14
def test_get_raises_404_on_index_missing(data_client):
    with raises(NotFoundError):
        Repository.get('elasticsearch-dsl-php', index='not-there') 
示例15
def test_get_raises_404_on_non_existent_id(data_client):
    with raises(NotFoundError):
        Repository.get('elasticsearch-dsl-php') 
示例16
def test_mget_raises_404_when_missing_param_is_raise(data_client):
    with raises(NotFoundError):
        Commit.mget(COMMIT_DOCS_WITH_MISSING, missing='raise') 
示例17
def exists(self):
        """Check if this task has already run successfully in the past."""
        try:
            self.elasticsearch_client.get(
                index=self.marker_index,
                doc_type=self.marker_doc_type,
                id=self.marker_index_document_id()
            )
            return True
        except elasticsearch.NotFoundError:
            log.debug('Marker document not found.')
        except elasticsearch.ElasticsearchException as err:
            log.warn(err)
        return False 
示例18
def test_error_is_properly_logged(connection, caplog, port, server):
    server.register_response('/i', status=404)
    with raises(NotFoundError):
        yield from connection.perform_request('GET', '/i', params={'some': 'data'})

    for logger, level, message in caplog.record_tuples:
        if logger == 'elasticsearch' and level == logging.WARNING:
            assert message.startswith('GET http://localhost:%s/i?some=data [status:404 request:' % port)
            break
    else:
        assert False, "Log not received" 
示例19
def test_404_properly_raised(server, client):
    server.register_response('/i/t/42', status=404)
    with raises(NotFoundError):
        yield from client.get(index='i', doc_type='t', id=42) 
示例20
def get_object(self, queryset=None):
        try:
            return super(ElasticsearchDetailView, self).get_object(queryset=queryset)
        except NotFoundError:
            raise Http404 
示例21
def test_delete(self):
        self.instance.es.delete()
        with self.assertRaises(NotFoundError):
            self.instance.es.get() 
示例22
def get_object(self):
        try:
            return super(IndexableModelMixin, self).get_object()
        except NotFoundError:
            raise Http404 
示例23
def get_object(self):
        try:
            return super(IndexableModelMixin, self).get_object()
        except NotFoundError:
            raise Http404 
示例24
def delete_model(self) -> None:
        """
        Delete an inference model saved in Elasticsearch

        If model doesn't exist, ignore failure.
        """
        try:
            self._client.ml.delete_trained_model(model_id=self._model_id, ignore=(404,))
        except elasticsearch.NotFoundError:
            pass 
示例25
def unload_submission(record_id, version=1):

    submission = get_latest_hepsubmission(publication_recid=record_id)

    if not submission:
        print('Record {0} not found'.format(record_id))
        return

    if version == submission.version:
        print('Unloading record {0} version {1}...'.format(record_id, version))
        remove_submission(record_id, version)
    else:
        print('Not unloading record {0} version {1} (latest version {2})...'.format(record_id, version, submission.version))
        return

    if version == 1:

        data_records = get_records_matching_field("related_publication", record_id)
        for record in data_records["hits"]["hits"]:
            print("\t Removed data table {0} from index".format(record["_id"]))
            try:
                delete_item_from_index(doc_type=CFG_DATA_TYPE, id=record["_id"], parent=record_id)
            except Exception as e:
                logging.error("Unable to remove {0} from index. {1}".format(record["_id"], e))

        try:
            delete_item_from_index(doc_type=CFG_PUB_TYPE, id=record_id)
            print("Removed publication {0} from index".format(record_id))
        except NotFoundError as nfe:
            print(nfe)

    print('Finished unloading record {0} version {1}.'.format(record_id, version)) 
示例26
def delete(self, searchindex_id):
        """Handles DELETE request to the resource."""
        searchindex = SearchIndex.query.get_with_acl(searchindex_id)
        if not searchindex:
            abort(
                HTTP_STATUS_CODE_NOT_FOUND,
                'No searchindex found with this ID.')

        if not searchindex.has_permission(current_user, 'delete'):
            abort(
                HTTP_STATUS_CODE_FORBIDDEN, (
                    'User does not have sufficient access rights to '
                    'delete the search index.'))

        if searchindex.get_status.status == 'deleted':
            abort(
                HTTP_STATUS_CODE_BAD_REQUEST, 'Search index already deleted.')

        timelines = Timeline.query.filter_by(searchindex=searchindex).all()
        sketches = [
            t.sketch for t in timelines
            if t.sketch and t.sketch.get_status.status != 'deleted'
        ]

        if sketches:
            error_strings = ['WARNING: This timeline is in use by:']
            for sketch in sketches:
                error_strings.append(' * {0:s}'.format(sketch.name))
            abort(
                HTTP_STATUS_CODE_FORBIDDEN,
                '\n'.join(error_strings))

        searchindex.set_status(status='deleted')
        db_session.commit()

        other_indexes = SearchIndex.query.filter_by(
            index_name=searchindex.index_name).all()
        if len(other_indexes) > 1:
            logger.warning(
                'Search index: {0:s} belongs to more than one '
                'db entry.'.format(searchindex.index_name))
            return HTTP_STATUS_CODE_OK

        try:
            self.datastore.client.indices.close(index=searchindex.index_name)
        except elasticsearch.NotFoundError:
            logger.warning(
                'Unable to close index: {0:s}, the index wasn\'t '
                'found.'.format(searchindex.index_name))

        return HTTP_STATUS_CODE_OK 
示例27
def setup(self):
        """
        Defers loading until needed.
        Compares the existing mapping for each language with the current codebase.
        If they differ, it automatically updates the index.
        """
        # Get the existing mapping & cache it. We'll compare it
        # during the ``update`` & if it doesn't match, we'll put the new
        # mapping.
        for language in self.languages:
            self.index_name = self._index_name_for_language(language)
            try:
                self.existing_mapping[language] = self.conn.indices.get_mapping(
                    index=self.index_name)
            except NotFoundError:
                pass
            except Exception:
                if not self.silently_fail:
                    raise

            unified_index = haystack.connections[self.connection_alias].get_unified_index()

            self.content_field_name, field_mapping = self.build_schema(
                unified_index.all_searchfields(), language)

            current_mapping = {
                'modelresult': {
                    'properties': field_mapping,
                    '_boost': {
                        'name': 'boost',
                        'null_value': 1.0
                    }
                }
            }

            if current_mapping != self.existing_mapping[language]:
                try:
                    # Make sure the index is there first.
                    self.conn.indices.create(
                        index=self.index_name,
                        body=self.DEFAULT_SETTINGS,
                        ignore=400)
                    self.conn.indices.put_mapping(
                        index=self.index_name,
                        doc_type='modelresult',
                        body=current_mapping
                    )
                    self.existing_mapping[language] = current_mapping
                except Exception:
                    if not self.silently_fail:
                        raise

        self.setup_complete = True