Python源码示例:asyncio.Queue()

示例1
def __init__(self, bot: Bot):
        super().__init__()

        self.bot = bot

        # Categories
        self.available_category: discord.CategoryChannel = None
        self.in_use_category: discord.CategoryChannel = None
        self.dormant_category: discord.CategoryChannel = None

        # Queues
        self.channel_queue: asyncio.Queue[discord.TextChannel] = None
        self.name_queue: t.Deque[str] = None

        self.name_positions = self.get_names()
        self.last_notification: t.Optional[datetime] = None

        # Asyncio stuff
        self.queue_tasks: t.List[asyncio.Task] = []
        self.ready = asyncio.Event()
        self.on_message_lock = asyncio.Lock()
        self.init_task = self.bot.loop.create_task(self.init_cog()) 
示例2
def __init__(self, loop=None, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.duofern_parser.asyncio = True
        self.initialization_step = 0
        self.loop = loop
        self.write_queue = asyncio.Queue()
        self._ready = asyncio.Event()
        self.transport = None
        self.buffer = bytearray(b'')

        self.last_packet = 0.0
        self.callback = None

        if loop == None:
            loop = asyncio.get_event_loop()

        self.send_loop = asyncio.ensure_future(self._send_messages(), loop=loop)

        self.available = asyncio.Future()

        # DuofernStick.__init__(self, device, system_code, config_file_json, duofern_parser)

    #        self.serial_connection = serial.Serial(self.port, baudrate=115200, timeout=1)
    #        self.running = False 
示例3
def sse():
    queue = asyncio.Queue()
    app.clients.add(queue)
    async def send_events():
        while True:
            try:
                data = await queue.get()
                event = ServerSentEvent(data)
                yield event.encode()
            except asyncio.CancelledError as error:
                app.clients.remove(queue)

    response = await make_response(
        send_events(),
        {
            'Content-Type': 'text/event-stream',
            'Cache-Control': 'no-cache',
            'Transfer-Encoding': 'chunked',
        },
    )
    response.timeout = None
    return response 
示例4
def test_http_completion() -> None:
    # Ensure that the connecion callable returns on completion
    app = Quart(__name__)
    scope = {
        "headers": [(b"host", b"quart")],
        "http_version": "1.1",
        "method": "GET",
        "scheme": "https",
        "path": "/",
        "query_string": b"",
    }
    connection = ASGIHTTPConnection(app, scope)

    queue: asyncio.Queue = asyncio.Queue()
    queue.put_nowait({"type": "http.request", "body": b"", "more_body": False})

    async def receive() -> dict:
        # This will block after returning the first and only entry
        return await queue.get()

    async def send(message: dict) -> None:
        pass

    # This test fails if a timeout error is raised here
    await asyncio.wait_for(connection(receive, send), timeout=1) 
示例5
def test_task_local() -> None:
    local_ = TaskLocal()
    queue: asyncio.Queue = asyncio.Queue()
    tasks = 2
    for _ in range(tasks):
        queue.put_nowait(None)

    async def _test_local(value: int) -> int:
        local_.test = value
        await queue.get()
        queue.task_done()
        await queue.join()
        return local_.test

    futures = [asyncio.ensure_future(_test_local(value)) for value in range(tasks)]
    asyncio.gather(*futures)
    for value, future in enumerate(futures):
        assert (await future) == value 
示例6
def on_body(self, body):
        if self.is_request_stream and self._is_stream_handler:
            # body chunks can be put into asyncio.Queue out of order if
            # multiple tasks put concurrently and the queue is full in python
            # 3.7. so we should not create more than one task putting into the
            # queue simultaneously.
            self._body_chunks.append(body)
            if (
                not self._request_stream_task
                or self._request_stream_task.done()
            ):
                self._request_stream_task = self.loop.create_task(
                    self.stream_append()
                )
        else:
            self.request.body_push(body) 
示例7
def __init__(self, app, domains, bucket=None, get_root=False, verbose=False, max_tasks_per_node=100):
        log.info(f"FolderCrawler.__init__  {len(domains)} domain names")
        self._app = app
        self._get_root = get_root
        self._verbose = verbose
        self._q = asyncio.Queue()
        self._domain_dict = {}
        self._group_dict = {}
        for domain in domains:
            self._q.put_nowait(domain)
        self._bucket = bucket
        max_tasks = max_tasks_per_node * app['node_count']
        if len(domains) > max_tasks:
            self._max_tasks = max_tasks
        else:
            self._max_tasks = len(domains) 
示例8
def __init__(self, ip, port, dc_id, *, loop, loggers, proxy=None):
        self._ip = ip
        self._port = port
        self._dc_id = dc_id  # only for MTProxy, it's an abstraction leak
        self._loop = loop
        self._log = loggers[__name__]
        self._proxy = proxy
        self._reader = None
        self._writer = None
        self._connected = False
        self._send_task = None
        self._recv_task = None
        self._codec = None
        self._obfuscation = None  # TcpObfuscated and MTProxy
        self._send_queue = asyncio.Queue(1)
        self._recv_queue = asyncio.Queue(1) 
示例9
def create_channel_queue(self) -> asyncio.Queue:
        """
        Return a queue of dormant channels to use for getting the next available channel.

        The channels are added to the queue in a random order.
        """
        log.trace("Creating the channel queue.")

        channels = list(self.get_category_channels(self.dormant_category))
        random.shuffle(channels)

        log.trace("Populating the channel queue with channels.")
        queue = asyncio.Queue()
        for channel in channels:
            queue.put_nowait(channel)

        return queue 
示例10
def __init__(self, bot, limit, before=None, after=None):

        if isinstance(before, datetime.datetime):
            before = Object(id=time_snowflake(before, high=False))
        if isinstance(after, datetime.datetime):
            after = Object(id=time_snowflake(after, high=True))

        self.bot = bot
        self.limit = limit
        self.before = before
        self.after = after

        self._filter = None

        self.state = self.bot._connection
        self.get_guilds = self.bot.http.get_guilds
        self.guilds = asyncio.Queue()

        if self.before and self.after:
            self._retrieve_guilds = self._retrieve_guilds_before_strategy
            self._filter = lambda m: int(m['id']) > self.after.id
        elif self.after:
            self._retrieve_guilds = self._retrieve_guilds_after_strategy
        else:
            self._retrieve_guilds = self._retrieve_guilds_before_strategy 
示例11
def test_watcher_fires_after_nonode(zk, data_watcher, child1):
    """
    Test that waiting for a nonexistent node is allowed if
    CREATED is in the watched events
    """
    messages = asyncio.Queue()
    data_watcher.watched_events.append(WatchEvent.CREATED)

    async def callback(d):
        print('Callback sees', d)
        await messages.put(d)

    # should trigger fetch right away, getting NoNode
    data_watcher.add_callback(child1, callback)

    no_node = await asyncio.wait_for(messages.get(), 1)
    assert no_node == NoNode

    # should trigger watch, which triggers fetch, which gets 'some data'
    await zk.create(child1, 'some data')
    some_data = await asyncio.wait_for(messages.get(), 1)
    assert some_data == b'some data'

    data_watcher.remove_callback(child1, callback)
    await zk.delete(child1) 
示例12
def __init__(self, token, base_url, incoming_queue=None, outgoing_queue=None, loop=None):

        self.logger = Logger.get_logger()
        self._base_url = base_url
        self._token = token
        self._running = False
        self._incoming_queue = incoming_queue or asyncio.Queue()
        self._outgoing_queue = outgoing_queue or asyncio.Queue()
        self._session = None
        self._ws = None
        self._loop = loop

        self._listener_task = None
        self._sender_task = None
        self._heartbeat = Config.heartbeat
        self._receive_timeout = Config.receive_timeout
        self.retry = 0 
示例13
def __init__(self,
                 impl:           SubscriberImpl[MessageClass],
                 loop:           asyncio.AbstractEventLoop,
                 queue_capacity: typing.Optional[int]):
        """
        Do not call this directly! Use :meth:`Presentation.make_subscriber`.
        """
        if queue_capacity is None:
            queue_capacity = 0      # This case is defined by the Queue API. Means unlimited.
        else:
            queue_capacity = int(queue_capacity)
            if queue_capacity < 1:
                raise ValueError(f'Invalid queue capacity: {queue_capacity}')

        self._closed = False
        self._impl = impl
        self._loop = loop
        self._maybe_task: typing.Optional[asyncio.Task[None]] = None
        self._rx: _Listener[MessageClass] = _Listener(asyncio.Queue(maxsize=queue_capacity, loop=loop))
        impl.add_listener(self._rx)

    # ----------------------------------------  HANDLER-BASED API  ---------------------------------------- 
示例14
def __init__(self,
                 specifier:        pyuavcan.transport.InputSessionSpecifier,
                 payload_metadata: pyuavcan.transport.PayloadMetadata,
                 loop:             asyncio.AbstractEventLoop,
                 finalizer:        typing.Callable[[], None]):
        self._specifier = specifier
        self._payload_metadata = payload_metadata
        self._loop = loop
        self._maybe_finalizer: typing.Optional[typing.Callable[[], None]] = finalizer
        assert isinstance(self._specifier, pyuavcan.transport.InputSessionSpecifier)
        assert isinstance(self._payload_metadata, pyuavcan.transport.PayloadMetadata)
        assert isinstance(self._loop, asyncio.AbstractEventLoop)
        assert callable(self._maybe_finalizer)

        self._transfer_id_timeout = self.DEFAULT_TRANSFER_ID_TIMEOUT
        self._queue: asyncio.Queue[pyuavcan.transport.TransferFrom] = asyncio.Queue() 
示例15
def __init__(self,
                 specifier:        pyuavcan.transport.InputSessionSpecifier,
                 payload_metadata: pyuavcan.transport.PayloadMetadata,
                 loop:             asyncio.AbstractEventLoop,
                 finalizer:        _base.SessionFinalizer):
        """Use the factory method."""
        self._specifier = specifier
        self._payload_metadata = payload_metadata

        self._queue: asyncio.Queue[CANInputSession._QueueItem] = asyncio.Queue()
        assert loop is not None
        self._loop = loop
        self._transfer_id_timeout_ns = int(CANInputSession.DEFAULT_TRANSFER_ID_TIMEOUT / _NANO)

        self._receivers = [_transfer_reassembler.TransferReassembler(nid, payload_metadata.max_size_bytes)
                           for nid in _node_id_range()]

        self._statistics = CANInputSessionStatistics()   # We could easily support per-source-node statistics if needed

        super(CANInputSession, self).__init__(finalizer=finalizer) 
示例16
def __init__(self,
                 specifier:        pyuavcan.transport.InputSessionSpecifier,
                 payload_metadata: pyuavcan.transport.PayloadMetadata,
                 loop:             asyncio.AbstractEventLoop,
                 finalizer:        typing.Callable[[], None]):
        """
        Do not call this directly.
        Instead, use the factory method :meth:`pyuavcan.transport.serial.SerialTransport.get_input_session`.
        """
        self._specifier = specifier
        self._payload_metadata = payload_metadata
        self._loop = loop
        assert self._loop is not None

        if not isinstance(self._specifier, pyuavcan.transport.InputSessionSpecifier) or \
                not isinstance(self._payload_metadata, pyuavcan.transport.PayloadMetadata):  # pragma: no cover
            raise TypeError('Invalid parameters')

        self._statistics = SerialInputSessionStatistics()
        self._transfer_id_timeout = self.DEFAULT_TRANSFER_ID_TIMEOUT
        self._queue: asyncio.Queue[pyuavcan.transport.TransferFrom] = asyncio.Queue()
        self._reassemblers: typing.Dict[int, TransferReassembler] = {}

        super(SerialInputSession, self).__init__(finalizer) 
示例17
def __init__(self, reader, writer, client_address, loop):
    """Constructor.

        When called without arguments, create an unconnected instance.
        With a hostname argument, it connects the instance; a port
        number is optional.
        """
    # Am I doing the echoing?
    self.loop = loop
    self.DOECHO = True
    # What opts have I sent DO/DONT for and what did I send?
    self.DOOPTS = {}
    # What opts have I sent WILL/WONT for and what did I send?
    self.WILLOPTS = {}
    self.reader = reader
    self.writer = writer
    # What commands does this CLI support
    self.rawq = b''  # Raw input string
    self.sbdataq = b''  # Sub-Neg string
    self.eof = 0  # Has EOF been reached?
    self.iacseq = b''  # Buffer for IAC sequence.
    self.sb = 0  # Flag for SB and SE sequence.
    self.history = []  # Command history
    self.cookedq = asyncio.Queue(loop=self.loop)
    super().__init__(reader, writer, client_address) 
示例18
def __init__(self, host=None, listen=15):
        assert V.DATA_PATH is not None, 'Setup p2p params before CoreClass init.'
        assert host is None or host == 'localhost'
        # status params
        self.f_stop = False
        self.f_finish = False
        self.f_running = False
        # working info
        self.start_time = int(time())
        self.number = 0
        self.user: List[User] = list()
        self.user_lock = asyncio.Lock()
        self.host = host  # local=>'localhost', 'global'=>None
        self.core_que = asyncio.Queue()
        self.backlog = listen
        self.traffic = Traffic()
        self.ping_status: Dict[int, asyncio.Event] = ExpiringDict(max_len=5000, max_age_seconds=900) 
示例19
def test_bot_init(monkeypatch, fx_config):
    importlib = FakeImportLib()

    monkeypatch.setattr('importlib.import_module', importlib.import_module)

    fx_config.APPS = ['yui.app1', 'yui.app2']
    box = Box()
    bot = Bot(fx_config, using_box=box)

    assert bot.config == fx_config
    assert bot.channels == []
    assert bot.ims == []
    assert bot.groups == []
    assert bot.restart is False
    assert isinstance(bot.api, SlackAPI)
    assert bot.box is box
    assert isinstance(bot.queue, asyncio.Queue)
    assert importlib.import_queue == [
        'yui.app1',
        'yui.app2',
    ] 
示例20
def reset(self):
        """
        Clear the values of all attributes of the transaction store.
        """
        self.getsCounter = 0

        # dictionary of processed requests for each client. Value for each
        # client is a dictionary with request id as key and transaction id as
        # value
        self.processedRequests = {}  # type: Dict[str, Dict[int, str]]

        # dictionary of responses to be sent for each client. Value for each
        # client is an asyncio Queue
        self.responses = {}  # type: Dict[str, asyncio.Queue]

        # dictionary with key as transaction id and `Reply` as
        # value
        self.transactions = {}  # type: Dict[str, Reply]

    # Used in test only. 
示例21
def __init__(self, query_id: int, send_stop: bool) -> None:
        self.query_id: int = query_id
        self.send_stop: bool = send_stop
        self._queue: asyncio.Queue = asyncio.Queue()
        self._closed: bool = False 
示例22
def concurrent(streamqueue: asyncio.Queue,
                     trace_name='concurrent',
                     name='stream'):
    """Run code concurrently in different streams.

    :param streamqueue: asyncio.Queue instance.

    Queue tasks define the pool of streams used for concurrent execution.
    """
    if not torch.cuda.is_available():
        yield
        return

    initial_stream = torch.cuda.current_stream()

    with torch.cuda.stream(initial_stream):
        stream = await streamqueue.get()
        assert isinstance(stream, torch.cuda.Stream)

        try:
            with torch.cuda.stream(stream):
                logger.debug('%s %s is starting, stream: %s', trace_name, name,
                             stream)
                yield
                current = torch.cuda.current_stream()
                assert current == stream
                logger.debug('%s %s has finished, stream: %s', trace_name,
                             name, stream)
        finally:
            streamqueue.task_done()
            streamqueue.put_nowait(stream) 
示例23
def init(self):
        self.streamqueue = asyncio.Queue()
        for _ in range(self.streamqueue_size):
            stream = torch.cuda.Stream(device=self.device)
            self.streamqueue.put_nowait(stream) 
示例24
def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self._max_inbox_size = kwargs.get('max_inbox_size', 0)
        self._inbox = asyncio.Queue(maxsize=self._max_inbox_size,
                                    loop=self._loop)
        self._handlers = {}

        # Create handler for the 'poison pill' message
        self.register_handler(StopMessage, self._stop_message_handler) 
示例25
def setUp(self):
        self.tracer = MockTracer(AsyncioScopeManager())
        self.queue = asyncio.Queue()
        self.loop = asyncio.get_event_loop()
        self.server = Server(tracer=self.tracer, queue=self.queue) 
示例26
def setUp(self):
        self.tracer = MockTracer(ContextVarsScopeManager())
        self.queue = asyncio.Queue()
        self.loop = asyncio.get_event_loop()
        self.server = Server(tracer=self.tracer, queue=self.queue) 
示例27
def __init__(self, remote_queue: asyncio.Queue) -> None:
        self.remote_queue = remote_queue
        self.local_queue: asyncio.Queue = asyncio.Queue()
        self.accepted = False
        self.task: Optional[asyncio.Future] = None 
示例28
def __init__(self, app: "Quart", scope: dict) -> None:
        self.app = app
        self.scope = scope
        self.queue: asyncio.Queue = asyncio.Queue()
        self._accepted = False 
示例29
def test_websocket_completion() -> None:
    # Ensure that the connecion callable returns on completion
    app = Quart(__name__)
    scope = {
        "headers": [(b"host", b"quart")],
        "http_version": "1.1",
        "method": "GET",
        "scheme": "wss",
        "path": "/",
        "query_string": b"",
        "subprotocols": [],
        "extensions": {"websocket.http.response": {}},
    }
    connection = ASGIWebsocketConnection(app, scope)

    queue: asyncio.Queue = asyncio.Queue()
    queue.put_nowait({"type": "websocket.connect"})

    async def receive() -> dict:
        # This will block after returning the first and only entry
        return await queue.get()

    async def send(message: dict) -> None:
        pass

    # This test fails if a timeout error is raised here
    await asyncio.wait_for(connection(receive, send), timeout=1) 
示例30
def test_stream_request_cancel_when_conn_lost(app):
    app.still_serving_cancelled_request = False

    @app.post("/post/<id>", stream=True)
    async def post(request, id):
        assert isinstance(request.stream, asyncio.Queue)

        async def streaming(response):
            while True:
                body = await request.stream.get()
                if body is None:
                    break
                await response.write(body.decode("utf-8"))

        await asyncio.sleep(1.0)
        # at this point client is already disconnected
        app.still_serving_cancelled_request = True

        return stream(streaming)

    # schedule client call
    loop = asyncio.get_event_loop()
    task = loop.create_task(app.asgi_client.post("/post/1"))
    loop.call_later(0.01, task)
    await asyncio.sleep(0.5)

    # cancelling request and closing connection after 0.5 sec
    task.cancel()

    with contextlib.suppress(asyncio.CancelledError):
        await task

    # Wait for server and check if it's still serving the cancelled request
    await asyncio.sleep(1.0)

    assert app.still_serving_cancelled_request is False