Initial commit

This commit is contained in:
2025-10-29 14:37:08 +08:00
commit 034509181f
4131 changed files with 555736 additions and 0 deletions

View File

@@ -0,0 +1,33 @@
# This is a stub package designed to roughly emulate the _yaml
# extension module, which previously existed as a standalone module
# and has been moved into the `yaml` package namespace.
# It does not perfectly mimic its old counterpart, but should get
# close enough for anyone who's relying on it even when they shouldn't.
import yaml
# in some circumstances, the yaml module we imoprted may be from a different version, so we need
# to tread carefully when poking at it here (it may not have the attributes we expect)
if not getattr(yaml, '__with_libyaml__', False):
from sys import version_info
exc = ModuleNotFoundError if version_info >= (3, 6) else ImportError
raise exc("No module named '_yaml'")
else:
from yaml._yaml import *
import warnings
warnings.warn(
'The _yaml extension module is now located at yaml._yaml'
' and its location is subject to change. To use the'
' LibYAML-based parser and emitter, import from `yaml`:'
' `from yaml import CLoader as Loader, CDumper as Dumper`.',
DeprecationWarning
)
del warnings
# Don't `del yaml` here because yaml is actually an existing
# namespace member of _yaml.
__name__ = '_yaml'
# If the module is top-level (i.e. not a part of any specific package)
# then the attribute should be set to ''.
# https://docs.python.org/3.8/library/types.html
__package__ = ''

View File

@@ -0,0 +1,291 @@
Metadata-Version: 2.1
Name: aiofiles
Version: 23.2.1
Summary: File support for asyncio.
Project-URL: Changelog, https://github.com/Tinche/aiofiles#history
Project-URL: Bug Tracker, https://github.com/Tinche/aiofiles/issues
Project-URL: repository, https://github.com/Tinche/aiofiles
Author-email: Tin Tvrtkovic <tinchester@gmail.com>
License: Apache-2.0
License-File: LICENSE
License-File: NOTICE
Classifier: Development Status :: 5 - Production/Stable
Classifier: Framework :: AsyncIO
Classifier: License :: OSI Approved :: Apache Software License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Requires-Python: >=3.7
Description-Content-Type: text/markdown
# aiofiles: file support for asyncio
[![PyPI](https://img.shields.io/pypi/v/aiofiles.svg)](https://pypi.python.org/pypi/aiofiles)
[![Build](https://github.com/Tinche/aiofiles/workflows/CI/badge.svg)](https://github.com/Tinche/aiofiles/actions)
[![Coverage](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/Tinche/882f02e3df32136c847ba90d2688f06e/raw/covbadge.json)](https://github.com/Tinche/aiofiles/actions/workflows/main.yml)
[![Supported Python versions](https://img.shields.io/pypi/pyversions/aiofiles.svg)](https://github.com/Tinche/aiofiles)
[![Black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
**aiofiles** is an Apache2 licensed library, written in Python, for handling local
disk files in asyncio applications.
Ordinary local file IO is blocking, and cannot easily and portably be made
asynchronous. This means doing file IO may interfere with asyncio applications,
which shouldn't block the executing thread. aiofiles helps with this by
introducing asynchronous versions of files that support delegating operations to
a separate thread pool.
```python
async with aiofiles.open('filename', mode='r') as f:
contents = await f.read()
print(contents)
'My file contents'
```
Asynchronous iteration is also supported.
```python
async with aiofiles.open('filename') as f:
async for line in f:
...
```
Asynchronous interface to tempfile module.
```python
async with aiofiles.tempfile.TemporaryFile('wb') as f:
await f.write(b'Hello, World!')
```
## Features
- a file API very similar to Python's standard, blocking API
- support for buffered and unbuffered binary files, and buffered text files
- support for `async`/`await` ([PEP 492](https://peps.python.org/pep-0492/)) constructs
- async interface to tempfile module
## Installation
To install aiofiles, simply:
```bash
$ pip install aiofiles
```
## Usage
Files are opened using the `aiofiles.open()` coroutine, which in addition to
mirroring the builtin `open` accepts optional `loop` and `executor`
arguments. If `loop` is absent, the default loop will be used, as per the
set asyncio policy. If `executor` is not specified, the default event loop
executor will be used.
In case of success, an asynchronous file object is returned with an
API identical to an ordinary file, except the following methods are coroutines
and delegate to an executor:
- `close`
- `flush`
- `isatty`
- `read`
- `readall`
- `read1`
- `readinto`
- `readline`
- `readlines`
- `seek`
- `seekable`
- `tell`
- `truncate`
- `writable`
- `write`
- `writelines`
In case of failure, one of the usual exceptions will be raised.
`aiofiles.stdin`, `aiofiles.stdout`, `aiofiles.stderr`,
`aiofiles.stdin_bytes`, `aiofiles.stdout_bytes`, and
`aiofiles.stderr_bytes` provide async access to `sys.stdin`,
`sys.stdout`, `sys.stderr`, and their corresponding `.buffer` properties.
The `aiofiles.os` module contains executor-enabled coroutine versions of
several useful `os` functions that deal with files:
- `stat`
- `statvfs`
- `sendfile`
- `rename`
- `renames`
- `replace`
- `remove`
- `unlink`
- `mkdir`
- `makedirs`
- `rmdir`
- `removedirs`
- `link`
- `symlink`
- `readlink`
- `listdir`
- `scandir`
- `access`
- `path.exists`
- `path.isfile`
- `path.isdir`
- `path.islink`
- `path.ismount`
- `path.getsize`
- `path.getatime`
- `path.getctime`
- `path.samefile`
- `path.sameopenfile`
### Tempfile
**aiofiles.tempfile** implements the following interfaces:
- TemporaryFile
- NamedTemporaryFile
- SpooledTemporaryFile
- TemporaryDirectory
Results return wrapped with a context manager allowing use with async with and async for.
```python
async with aiofiles.tempfile.NamedTemporaryFile('wb+') as f:
await f.write(b'Line1\n Line2')
await f.seek(0)
async for line in f:
print(line)
async with aiofiles.tempfile.TemporaryDirectory() as d:
filename = os.path.join(d, "file.ext")
```
### Writing tests for aiofiles
Real file IO can be mocked by patching `aiofiles.threadpool.sync_open`
as desired. The return type also needs to be registered with the
`aiofiles.threadpool.wrap` dispatcher:
```python
aiofiles.threadpool.wrap.register(mock.MagicMock)(
lambda *args, **kwargs: threadpool.AsyncBufferedIOBase(*args, **kwargs))
async def test_stuff():
data = 'data'
mock_file = mock.MagicMock()
with mock.patch('aiofiles.threadpool.sync_open', return_value=mock_file) as mock_open:
async with aiofiles.open('filename', 'w') as f:
await f.write(data)
mock_file.write.assert_called_once_with(data)
```
### History
#### 23.2.1 (2023-08-09)
- Import `os.statvfs` conditionally to fix importing on non-UNIX systems.
[#171](https://github.com/Tinche/aiofiles/issues/171) [#172](https://github.com/Tinche/aiofiles/pull/172)
#### 23.2.0 (2023-08-09)
- aiofiles is now tested on Python 3.12 too.
[#166](https://github.com/Tinche/aiofiles/issues/166) [#168](https://github.com/Tinche/aiofiles/pull/168)
- On Python 3.12, `aiofiles.tempfile.NamedTemporaryFile` now accepts a `delete_on_close` argument, just like the stdlib version.
- On Python 3.12, `aiofiles.tempfile.NamedTemporaryFile` no longer exposes a `delete` attribute, just like the stdlib version.
- Added `aiofiles.os.statvfs` and `aiofiles.os.path.ismount`.
[#162](https://github.com/Tinche/aiofiles/pull/162)
- Use [PDM](https://pdm.fming.dev/latest/) instead of Poetry.
[#169](https://github.com/Tinche/aiofiles/pull/169)
#### 23.1.0 (2023-02-09)
- Added `aiofiles.os.access`.
[#146](https://github.com/Tinche/aiofiles/pull/146)
- Removed `aiofiles.tempfile.temptypes.AsyncSpooledTemporaryFile.softspace`.
[#151](https://github.com/Tinche/aiofiles/pull/151)
- Added `aiofiles.stdin`, `aiofiles.stdin_bytes`, and other stdio streams.
[#154](https://github.com/Tinche/aiofiles/pull/154)
- Transition to `asyncio.get_running_loop` (vs `asyncio.get_event_loop`) internally.
#### 22.1.0 (2022-09-04)
- Added `aiofiles.os.path.islink`.
[#126](https://github.com/Tinche/aiofiles/pull/126)
- Added `aiofiles.os.readlink`.
[#125](https://github.com/Tinche/aiofiles/pull/125)
- Added `aiofiles.os.symlink`.
[#124](https://github.com/Tinche/aiofiles/pull/124)
- Added `aiofiles.os.unlink`.
[#123](https://github.com/Tinche/aiofiles/pull/123)
- Added `aiofiles.os.link`.
[#121](https://github.com/Tinche/aiofiles/pull/121)
- Added `aiofiles.os.renames`.
[#120](https://github.com/Tinche/aiofiles/pull/120)
- Added `aiofiles.os.{listdir, scandir}`.
[#143](https://github.com/Tinche/aiofiles/pull/143)
- Switched to CalVer.
- Dropped Python 3.6 support. If you require it, use version 0.8.0.
- aiofiles is now tested on Python 3.11.
#### 0.8.0 (2021-11-27)
- aiofiles is now tested on Python 3.10.
- Added `aiofiles.os.replace`.
[#107](https://github.com/Tinche/aiofiles/pull/107)
- Added `aiofiles.os.{makedirs, removedirs}`.
- Added `aiofiles.os.path.{exists, isfile, isdir, getsize, getatime, getctime, samefile, sameopenfile}`.
[#63](https://github.com/Tinche/aiofiles/pull/63)
- Added `suffix`, `prefix`, `dir` args to `aiofiles.tempfile.TemporaryDirectory`.
[#116](https://github.com/Tinche/aiofiles/pull/116)
#### 0.7.0 (2021-05-17)
- Added the `aiofiles.tempfile` module for async temporary files.
[#56](https://github.com/Tinche/aiofiles/pull/56)
- Switched to Poetry and GitHub actions.
- Dropped 3.5 support.
#### 0.6.0 (2020-10-27)
- `aiofiles` is now tested on ppc64le.
- Added `name` and `mode` properties to async file objects.
[#82](https://github.com/Tinche/aiofiles/pull/82)
- Fixed a DeprecationWarning internally.
[#75](https://github.com/Tinche/aiofiles/pull/75)
- Python 3.9 support and tests.
#### 0.5.0 (2020-04-12)
- Python 3.8 support. Code base modernization (using `async/await` instead of `asyncio.coroutine`/`yield from`).
- Added `aiofiles.os.remove`, `aiofiles.os.rename`, `aiofiles.os.mkdir`, `aiofiles.os.rmdir`.
[#62](https://github.com/Tinche/aiofiles/pull/62)
#### 0.4.0 (2018-08-11)
- Python 3.7 support.
- Removed Python 3.3/3.4 support. If you use these versions, stick to aiofiles 0.3.x.
#### 0.3.2 (2017-09-23)
- The LICENSE is now included in the sdist.
[#31](https://github.com/Tinche/aiofiles/pull/31)
#### 0.3.1 (2017-03-10)
- Introduced a changelog.
- `aiofiles.os.sendfile` will now work if the standard `os` module contains a `sendfile` function.
### Contributing
Contributions are very welcome. Tests can be run with `tox`, please ensure
the coverage at least stays the same before you submit a pull request.

View File

@@ -0,0 +1,27 @@
aiofiles-23.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
aiofiles-23.2.1.dist-info/METADATA,sha256=cot28p_PNjdl_MK--l9Qu2e6QOv9OxdHrKbjLmYf9Uw,9673
aiofiles-23.2.1.dist-info/RECORD,,
aiofiles-23.2.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
aiofiles-23.2.1.dist-info/WHEEL,sha256=KGYbc1zXlYddvwxnNty23BeaKzh7YuoSIvIMO4jEhvw,87
aiofiles-23.2.1.dist-info/licenses/LICENSE,sha256=y16Ofl9KOYjhBjwULGDcLfdWBfTEZRXnduOspt-XbhQ,11325
aiofiles-23.2.1.dist-info/licenses/NOTICE,sha256=EExY0dRQvWR0wJ2LZLwBgnM6YKw9jCU-M0zegpRSD_E,55
aiofiles/__init__.py,sha256=1iAMJQyJtX3LGIS0AoFTJeO1aJ_RK2jpBSBhg0VoIrE,344
aiofiles/__pycache__/__init__.cpython-312.pyc,,
aiofiles/__pycache__/base.cpython-312.pyc,,
aiofiles/__pycache__/os.cpython-312.pyc,,
aiofiles/__pycache__/ospath.cpython-312.pyc,,
aiofiles/base.py,sha256=rZwA151Ji8XlBkzvDmcF1CgDTY2iKNuJMfvNlM0s0E0,2684
aiofiles/os.py,sha256=zuFGaIyGCGUuFb7trFFEm6SLdCRqTFsSV0mY6SO8z3M,970
aiofiles/ospath.py,sha256=zqG2VFzRb6yYiIOWipqsdgvZmoMTFvZmBdkxkAl1FT4,764
aiofiles/tempfile/__init__.py,sha256=hFSNTOjOUv371Ozdfy6FIxeln46Nm3xOVh4ZR3Q94V0,10244
aiofiles/tempfile/__pycache__/__init__.cpython-312.pyc,,
aiofiles/tempfile/__pycache__/temptypes.cpython-312.pyc,,
aiofiles/tempfile/temptypes.py,sha256=ddEvNjMLVlr7WUILCe6ypTqw77yREeIonTk16Uw_NVs,2093
aiofiles/threadpool/__init__.py,sha256=c_aexl1t193iKdPZaolPEEbHDrQ0RrsH_HTAToMPQBo,3171
aiofiles/threadpool/__pycache__/__init__.cpython-312.pyc,,
aiofiles/threadpool/__pycache__/binary.cpython-312.pyc,,
aiofiles/threadpool/__pycache__/text.cpython-312.pyc,,
aiofiles/threadpool/__pycache__/utils.cpython-312.pyc,,
aiofiles/threadpool/binary.py,sha256=hp-km9VCRu0MLz_wAEUfbCz7OL7xtn9iGAawabpnp5U,2315
aiofiles/threadpool/text.py,sha256=fNmpw2PEkj0BZSldipJXAgZqVGLxALcfOMiuDQ54Eas,1223
aiofiles/threadpool/utils.py,sha256=B59dSZwO_WZs2dFFycKeA91iD2Xq2nNw1EFF8YMBI5k,1868

View File

@@ -0,0 +1,4 @@
Wheel-Version: 1.0
Generator: hatchling 1.17.1
Root-Is-Purelib: true
Tag: py3-none-any

View File

@@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright {yyyy} {name of copyright owner}
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@@ -0,0 +1,2 @@
Asyncio support for files
Copyright 2016 Tin Tvrtkovic

View File

@@ -0,0 +1,22 @@
"""Utilities for asyncio-friendly file handling."""
from .threadpool import (
open,
stdin,
stdout,
stderr,
stdin_bytes,
stdout_bytes,
stderr_bytes,
)
from . import tempfile
__all__ = [
"open",
"tempfile",
"stdin",
"stdout",
"stderr",
"stdin_bytes",
"stdout_bytes",
"stderr_bytes",
]

View File

@@ -0,0 +1,113 @@
"""Various base classes."""
from types import coroutine
from collections.abc import Coroutine
from asyncio import get_running_loop
class AsyncBase:
def __init__(self, file, loop, executor):
self._file = file
self._executor = executor
self._ref_loop = loop
@property
def _loop(self):
return self._ref_loop or get_running_loop()
def __aiter__(self):
"""We are our own iterator."""
return self
def __repr__(self):
return super().__repr__() + " wrapping " + repr(self._file)
async def __anext__(self):
"""Simulate normal file iteration."""
line = await self.readline()
if line:
return line
else:
raise StopAsyncIteration
class AsyncIndirectBase(AsyncBase):
def __init__(self, name, loop, executor, indirect):
self._indirect = indirect
self._name = name
super().__init__(None, loop, executor)
@property
def _file(self):
return self._indirect()
@_file.setter
def _file(self, v):
pass # discard writes
class _ContextManager(Coroutine):
__slots__ = ("_coro", "_obj")
def __init__(self, coro):
self._coro = coro
self._obj = None
def send(self, value):
return self._coro.send(value)
def throw(self, typ, val=None, tb=None):
if val is None:
return self._coro.throw(typ)
elif tb is None:
return self._coro.throw(typ, val)
else:
return self._coro.throw(typ, val, tb)
def close(self):
return self._coro.close()
@property
def gi_frame(self):
return self._coro.gi_frame
@property
def gi_running(self):
return self._coro.gi_running
@property
def gi_code(self):
return self._coro.gi_code
def __next__(self):
return self.send(None)
@coroutine
def __iter__(self):
resp = yield from self._coro
return resp
def __await__(self):
resp = yield from self._coro
return resp
async def __anext__(self):
resp = await self._coro
return resp
async def __aenter__(self):
self._obj = await self._coro
return self._obj
async def __aexit__(self, exc_type, exc, tb):
self._obj.close()
self._obj = None
class AiofilesContextManager(_ContextManager):
"""An adjusted async context manager for aiofiles."""
async def __aexit__(self, exc_type, exc_val, exc_tb):
await get_running_loop().run_in_executor(
None, self._obj._file.__exit__, exc_type, exc_val, exc_tb
)
self._obj = None

View File

@@ -0,0 +1,51 @@
"""Async executor versions of file functions from the os module."""
import os
from . import ospath as path
from .ospath import wrap
__all__ = [
"path",
"stat",
"statvfs",
"rename",
"renames",
"replace",
"remove",
"unlink",
"mkdir",
"makedirs",
"rmdir",
"removedirs",
"link",
"symlink",
"readlink",
"listdir",
"scandir",
"access",
"sendfile",
"wrap",
]
stat = wrap(os.stat)
rename = wrap(os.rename)
renames = wrap(os.renames)
replace = wrap(os.replace)
remove = wrap(os.remove)
unlink = wrap(os.unlink)
mkdir = wrap(os.mkdir)
makedirs = wrap(os.makedirs)
rmdir = wrap(os.rmdir)
removedirs = wrap(os.removedirs)
link = wrap(os.link)
symlink = wrap(os.symlink)
readlink = wrap(os.readlink)
listdir = wrap(os.listdir)
scandir = wrap(os.scandir)
access = wrap(os.access)
if hasattr(os, "sendfile"):
sendfile = wrap(os.sendfile)
if hasattr(os, "statvfs"):
statvfs = wrap(os.statvfs)

View File

@@ -0,0 +1,28 @@
"""Async executor versions of file functions from the os.path module."""
import asyncio
from functools import partial, wraps
from os import path
def wrap(func):
@wraps(func)
async def run(*args, loop=None, executor=None, **kwargs):
if loop is None:
loop = asyncio.get_running_loop()
pfunc = partial(func, *args, **kwargs)
return await loop.run_in_executor(executor, pfunc)
return run
exists = wrap(path.exists)
isfile = wrap(path.isfile)
isdir = wrap(path.isdir)
islink = wrap(path.islink)
ismount = wrap(path.ismount)
getsize = wrap(path.getsize)
getmtime = wrap(path.getmtime)
getatime = wrap(path.getatime)
getctime = wrap(path.getctime)
samefile = wrap(path.samefile)
sameopenfile = wrap(path.sameopenfile)

View File

@@ -0,0 +1,357 @@
import asyncio
from functools import partial, singledispatch
from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOBase
from tempfile import NamedTemporaryFile as syncNamedTemporaryFile
from tempfile import SpooledTemporaryFile as syncSpooledTemporaryFile
from tempfile import TemporaryDirectory as syncTemporaryDirectory
from tempfile import TemporaryFile as syncTemporaryFile
from tempfile import _TemporaryFileWrapper as syncTemporaryFileWrapper
from ..base import AiofilesContextManager
from ..threadpool.binary import AsyncBufferedIOBase, AsyncBufferedReader, AsyncFileIO
from ..threadpool.text import AsyncTextIOWrapper
from .temptypes import AsyncSpooledTemporaryFile, AsyncTemporaryDirectory
import sys
__all__ = [
"NamedTemporaryFile",
"TemporaryFile",
"SpooledTemporaryFile",
"TemporaryDirectory",
]
# ================================================================
# Public methods for async open and return of temp file/directory
# objects with async interface
# ================================================================
if sys.version_info >= (3, 12):
def NamedTemporaryFile(
mode="w+b",
buffering=-1,
encoding=None,
newline=None,
suffix=None,
prefix=None,
dir=None,
delete=True,
delete_on_close=True,
loop=None,
executor=None,
):
"""Async open a named temporary file"""
return AiofilesContextManager(
_temporary_file(
named=True,
mode=mode,
buffering=buffering,
encoding=encoding,
newline=newline,
suffix=suffix,
prefix=prefix,
dir=dir,
delete=delete,
delete_on_close=delete_on_close,
loop=loop,
executor=executor,
)
)
else:
def NamedTemporaryFile(
mode="w+b",
buffering=-1,
encoding=None,
newline=None,
suffix=None,
prefix=None,
dir=None,
delete=True,
loop=None,
executor=None,
):
"""Async open a named temporary file"""
return AiofilesContextManager(
_temporary_file(
named=True,
mode=mode,
buffering=buffering,
encoding=encoding,
newline=newline,
suffix=suffix,
prefix=prefix,
dir=dir,
delete=delete,
loop=loop,
executor=executor,
)
)
def TemporaryFile(
mode="w+b",
buffering=-1,
encoding=None,
newline=None,
suffix=None,
prefix=None,
dir=None,
loop=None,
executor=None,
):
"""Async open an unnamed temporary file"""
return AiofilesContextManager(
_temporary_file(
named=False,
mode=mode,
buffering=buffering,
encoding=encoding,
newline=newline,
suffix=suffix,
prefix=prefix,
dir=dir,
loop=loop,
executor=executor,
)
)
def SpooledTemporaryFile(
max_size=0,
mode="w+b",
buffering=-1,
encoding=None,
newline=None,
suffix=None,
prefix=None,
dir=None,
loop=None,
executor=None,
):
"""Async open a spooled temporary file"""
return AiofilesContextManager(
_spooled_temporary_file(
max_size=max_size,
mode=mode,
buffering=buffering,
encoding=encoding,
newline=newline,
suffix=suffix,
prefix=prefix,
dir=dir,
loop=loop,
executor=executor,
)
)
def TemporaryDirectory(suffix=None, prefix=None, dir=None, loop=None, executor=None):
"""Async open a temporary directory"""
return AiofilesContextManagerTempDir(
_temporary_directory(
suffix=suffix, prefix=prefix, dir=dir, loop=loop, executor=executor
)
)
# =========================================================
# Internal coroutines to open new temp files/directories
# =========================================================
if sys.version_info >= (3, 12):
async def _temporary_file(
named=True,
mode="w+b",
buffering=-1,
encoding=None,
newline=None,
suffix=None,
prefix=None,
dir=None,
delete=True,
delete_on_close=True,
loop=None,
executor=None,
max_size=0,
):
"""Async method to open a temporary file with async interface"""
if loop is None:
loop = asyncio.get_running_loop()
if named:
cb = partial(
syncNamedTemporaryFile,
mode=mode,
buffering=buffering,
encoding=encoding,
newline=newline,
suffix=suffix,
prefix=prefix,
dir=dir,
delete=delete,
delete_on_close=delete_on_close,
)
else:
cb = partial(
syncTemporaryFile,
mode=mode,
buffering=buffering,
encoding=encoding,
newline=newline,
suffix=suffix,
prefix=prefix,
dir=dir,
)
f = await loop.run_in_executor(executor, cb)
# Wrap based on type of underlying IO object
if type(f) is syncTemporaryFileWrapper:
# _TemporaryFileWrapper was used (named files)
result = wrap(f.file, f, loop=loop, executor=executor)
result._closer = f._closer
return result
else:
# IO object was returned directly without wrapper
return wrap(f, f, loop=loop, executor=executor)
else:
async def _temporary_file(
named=True,
mode="w+b",
buffering=-1,
encoding=None,
newline=None,
suffix=None,
prefix=None,
dir=None,
delete=True,
loop=None,
executor=None,
max_size=0,
):
"""Async method to open a temporary file with async interface"""
if loop is None:
loop = asyncio.get_running_loop()
if named:
cb = partial(
syncNamedTemporaryFile,
mode=mode,
buffering=buffering,
encoding=encoding,
newline=newline,
suffix=suffix,
prefix=prefix,
dir=dir,
delete=delete,
)
else:
cb = partial(
syncTemporaryFile,
mode=mode,
buffering=buffering,
encoding=encoding,
newline=newline,
suffix=suffix,
prefix=prefix,
dir=dir,
)
f = await loop.run_in_executor(executor, cb)
# Wrap based on type of underlying IO object
if type(f) is syncTemporaryFileWrapper:
# _TemporaryFileWrapper was used (named files)
result = wrap(f.file, f, loop=loop, executor=executor)
# add delete property
result.delete = f.delete
return result
else:
# IO object was returned directly without wrapper
return wrap(f, f, loop=loop, executor=executor)
async def _spooled_temporary_file(
max_size=0,
mode="w+b",
buffering=-1,
encoding=None,
newline=None,
suffix=None,
prefix=None,
dir=None,
loop=None,
executor=None,
):
"""Open a spooled temporary file with async interface"""
if loop is None:
loop = asyncio.get_running_loop()
cb = partial(
syncSpooledTemporaryFile,
max_size=max_size,
mode=mode,
buffering=buffering,
encoding=encoding,
newline=newline,
suffix=suffix,
prefix=prefix,
dir=dir,
)
f = await loop.run_in_executor(executor, cb)
# Single interface provided by SpooledTemporaryFile for all modes
return AsyncSpooledTemporaryFile(f, loop=loop, executor=executor)
async def _temporary_directory(
suffix=None, prefix=None, dir=None, loop=None, executor=None
):
"""Async method to open a temporary directory with async interface"""
if loop is None:
loop = asyncio.get_running_loop()
cb = partial(syncTemporaryDirectory, suffix, prefix, dir)
f = await loop.run_in_executor(executor, cb)
return AsyncTemporaryDirectory(f, loop=loop, executor=executor)
class AiofilesContextManagerTempDir(AiofilesContextManager):
"""With returns the directory location, not the object (matching sync lib)"""
async def __aenter__(self):
self._obj = await self._coro
return self._obj.name
@singledispatch
def wrap(base_io_obj, file, *, loop=None, executor=None):
"""Wrap the object with interface based on type of underlying IO"""
raise TypeError("Unsupported IO type: {}".format(base_io_obj))
@wrap.register(TextIOBase)
def _(base_io_obj, file, *, loop=None, executor=None):
return AsyncTextIOWrapper(file, loop=loop, executor=executor)
@wrap.register(BufferedWriter)
def _(base_io_obj, file, *, loop=None, executor=None):
return AsyncBufferedIOBase(file, loop=loop, executor=executor)
@wrap.register(BufferedReader)
@wrap.register(BufferedRandom)
def _(base_io_obj, file, *, loop=None, executor=None):
return AsyncBufferedReader(file, loop=loop, executor=executor)
@wrap.register(FileIO)
def _(base_io_obj, file, *, loop=None, executor=None):
return AsyncFileIO(file, loop=loop, executor=executor)

View File

@@ -0,0 +1,69 @@
"""Async wrappers for spooled temp files and temp directory objects"""
from functools import partial
from ..base import AsyncBase
from ..threadpool.utils import (
cond_delegate_to_executor,
delegate_to_executor,
proxy_property_directly,
)
@delegate_to_executor("fileno", "rollover")
@cond_delegate_to_executor(
"close",
"flush",
"isatty",
"read",
"readline",
"readlines",
"seek",
"tell",
"truncate",
)
@proxy_property_directly("closed", "encoding", "mode", "name", "newlines")
class AsyncSpooledTemporaryFile(AsyncBase):
"""Async wrapper for SpooledTemporaryFile class"""
async def _check(self):
if self._file._rolled:
return
max_size = self._file._max_size
if max_size and self._file.tell() > max_size:
await self.rollover()
async def write(self, s):
"""Implementation to anticipate rollover"""
if self._file._rolled:
cb = partial(self._file.write, s)
return await self._loop.run_in_executor(self._executor, cb)
else:
file = self._file._file # reference underlying base IO object
rv = file.write(s)
await self._check()
return rv
async def writelines(self, iterable):
"""Implementation to anticipate rollover"""
if self._file._rolled:
cb = partial(self._file.writelines, iterable)
return await self._loop.run_in_executor(self._executor, cb)
else:
file = self._file._file # reference underlying base IO object
rv = file.writelines(iterable)
await self._check()
return rv
@delegate_to_executor("cleanup")
@proxy_property_directly("name")
class AsyncTemporaryDirectory:
"""Async wrapper for TemporaryDirectory class"""
def __init__(self, file, loop, executor):
self._file = file
self._loop = loop
self._executor = executor
async def close(self):
await self.cleanup()

View File

@@ -0,0 +1,141 @@
"""Handle files using a thread pool executor."""
import asyncio
import sys
from functools import partial, singledispatch
from io import (
BufferedIOBase,
BufferedRandom,
BufferedReader,
BufferedWriter,
FileIO,
TextIOBase,
)
from types import coroutine
from ..base import AiofilesContextManager
from .binary import (
AsyncBufferedIOBase,
AsyncBufferedReader,
AsyncFileIO,
AsyncIndirectBufferedIOBase,
)
from .text import AsyncTextIndirectIOWrapper, AsyncTextIOWrapper
sync_open = open
__all__ = (
"open",
"stdin",
"stdout",
"stderr",
"stdin_bytes",
"stdout_bytes",
"stderr_bytes",
)
def open(
file,
mode="r",
buffering=-1,
encoding=None,
errors=None,
newline=None,
closefd=True,
opener=None,
*,
loop=None,
executor=None,
):
return AiofilesContextManager(
_open(
file,
mode=mode,
buffering=buffering,
encoding=encoding,
errors=errors,
newline=newline,
closefd=closefd,
opener=opener,
loop=loop,
executor=executor,
)
)
@coroutine
def _open(
file,
mode="r",
buffering=-1,
encoding=None,
errors=None,
newline=None,
closefd=True,
opener=None,
*,
loop=None,
executor=None,
):
"""Open an asyncio file."""
if loop is None:
loop = asyncio.get_running_loop()
cb = partial(
sync_open,
file,
mode=mode,
buffering=buffering,
encoding=encoding,
errors=errors,
newline=newline,
closefd=closefd,
opener=opener,
)
f = yield from loop.run_in_executor(executor, cb)
return wrap(f, loop=loop, executor=executor)
@singledispatch
def wrap(file, *, loop=None, executor=None):
raise TypeError("Unsupported io type: {}.".format(file))
@wrap.register(TextIOBase)
def _(file, *, loop=None, executor=None):
return AsyncTextIOWrapper(file, loop=loop, executor=executor)
@wrap.register(BufferedWriter)
@wrap.register(BufferedIOBase)
def _(file, *, loop=None, executor=None):
return AsyncBufferedIOBase(file, loop=loop, executor=executor)
@wrap.register(BufferedReader)
@wrap.register(BufferedRandom)
def _(file, *, loop=None, executor=None):
return AsyncBufferedReader(file, loop=loop, executor=executor)
@wrap.register(FileIO)
def _(file, *, loop=None, executor=None):
return AsyncFileIO(file, loop=loop, executor=executor)
stdin = AsyncTextIndirectIOWrapper("sys.stdin", None, None, indirect=lambda: sys.stdin)
stdout = AsyncTextIndirectIOWrapper(
"sys.stdout", None, None, indirect=lambda: sys.stdout
)
stderr = AsyncTextIndirectIOWrapper(
"sys.stderr", None, None, indirect=lambda: sys.stderr
)
stdin_bytes = AsyncIndirectBufferedIOBase(
"sys.stdin.buffer", None, None, indirect=lambda: sys.stdin.buffer
)
stdout_bytes = AsyncIndirectBufferedIOBase(
"sys.stdout.buffer", None, None, indirect=lambda: sys.stdout.buffer
)
stderr_bytes = AsyncIndirectBufferedIOBase(
"sys.stderr.buffer", None, None, indirect=lambda: sys.stderr.buffer
)

View File

@@ -0,0 +1,104 @@
from ..base import AsyncBase, AsyncIndirectBase
from .utils import delegate_to_executor, proxy_method_directly, proxy_property_directly
@delegate_to_executor(
"close",
"flush",
"isatty",
"read",
"read1",
"readinto",
"readline",
"readlines",
"seek",
"seekable",
"tell",
"truncate",
"writable",
"write",
"writelines",
)
@proxy_method_directly("detach", "fileno", "readable")
@proxy_property_directly("closed", "raw", "name", "mode")
class AsyncBufferedIOBase(AsyncBase):
"""The asyncio executor version of io.BufferedWriter and BufferedIOBase."""
@delegate_to_executor("peek")
class AsyncBufferedReader(AsyncBufferedIOBase):
"""The asyncio executor version of io.BufferedReader and Random."""
@delegate_to_executor(
"close",
"flush",
"isatty",
"read",
"readall",
"readinto",
"readline",
"readlines",
"seek",
"seekable",
"tell",
"truncate",
"writable",
"write",
"writelines",
)
@proxy_method_directly("fileno", "readable")
@proxy_property_directly("closed", "name", "mode")
class AsyncFileIO(AsyncBase):
"""The asyncio executor version of io.FileIO."""
@delegate_to_executor(
"close",
"flush",
"isatty",
"read",
"read1",
"readinto",
"readline",
"readlines",
"seek",
"seekable",
"tell",
"truncate",
"writable",
"write",
"writelines",
)
@proxy_method_directly("detach", "fileno", "readable")
@proxy_property_directly("closed", "raw", "name", "mode")
class AsyncIndirectBufferedIOBase(AsyncIndirectBase):
"""The indirect asyncio executor version of io.BufferedWriter and BufferedIOBase."""
@delegate_to_executor("peek")
class AsyncIndirectBufferedReader(AsyncIndirectBufferedIOBase):
"""The indirect asyncio executor version of io.BufferedReader and Random."""
@delegate_to_executor(
"close",
"flush",
"isatty",
"read",
"readall",
"readinto",
"readline",
"readlines",
"seek",
"seekable",
"tell",
"truncate",
"writable",
"write",
"writelines",
)
@proxy_method_directly("fileno", "readable")
@proxy_property_directly("closed", "name", "mode")
class AsyncIndirectFileIO(AsyncIndirectBase):
"""The indirect asyncio executor version of io.FileIO."""

View File

@@ -0,0 +1,64 @@
from ..base import AsyncBase, AsyncIndirectBase
from .utils import delegate_to_executor, proxy_method_directly, proxy_property_directly
@delegate_to_executor(
"close",
"flush",
"isatty",
"read",
"readable",
"readline",
"readlines",
"seek",
"seekable",
"tell",
"truncate",
"write",
"writable",
"writelines",
)
@proxy_method_directly("detach", "fileno", "readable")
@proxy_property_directly(
"buffer",
"closed",
"encoding",
"errors",
"line_buffering",
"newlines",
"name",
"mode",
)
class AsyncTextIOWrapper(AsyncBase):
"""The asyncio executor version of io.TextIOWrapper."""
@delegate_to_executor(
"close",
"flush",
"isatty",
"read",
"readable",
"readline",
"readlines",
"seek",
"seekable",
"tell",
"truncate",
"write",
"writable",
"writelines",
)
@proxy_method_directly("detach", "fileno", "readable")
@proxy_property_directly(
"buffer",
"closed",
"encoding",
"errors",
"line_buffering",
"newlines",
"name",
"mode",
)
class AsyncTextIndirectIOWrapper(AsyncIndirectBase):
"""The indirect asyncio executor version of io.TextIOWrapper."""

View File

@@ -0,0 +1,72 @@
import functools
def delegate_to_executor(*attrs):
def cls_builder(cls):
for attr_name in attrs:
setattr(cls, attr_name, _make_delegate_method(attr_name))
return cls
return cls_builder
def proxy_method_directly(*attrs):
def cls_builder(cls):
for attr_name in attrs:
setattr(cls, attr_name, _make_proxy_method(attr_name))
return cls
return cls_builder
def proxy_property_directly(*attrs):
def cls_builder(cls):
for attr_name in attrs:
setattr(cls, attr_name, _make_proxy_property(attr_name))
return cls
return cls_builder
def cond_delegate_to_executor(*attrs):
def cls_builder(cls):
for attr_name in attrs:
setattr(cls, attr_name, _make_cond_delegate_method(attr_name))
return cls
return cls_builder
def _make_delegate_method(attr_name):
async def method(self, *args, **kwargs):
cb = functools.partial(getattr(self._file, attr_name), *args, **kwargs)
return await self._loop.run_in_executor(self._executor, cb)
return method
def _make_proxy_method(attr_name):
def method(self, *args, **kwargs):
return getattr(self._file, attr_name)(*args, **kwargs)
return method
def _make_proxy_property(attr_name):
def proxy_property(self):
return getattr(self._file, attr_name)
return property(proxy_property)
def _make_cond_delegate_method(attr_name):
"""For spooled temp files, delegate only if rolled to file object"""
async def method(self, *args, **kwargs):
if self._file._rolled:
cb = functools.partial(getattr(self._file, attr_name), *args, **kwargs)
return await self._loop.run_in_executor(self._executor, cb)
else:
return getattr(self._file, attr_name)(*args, **kwargs)
return method

View File

@@ -0,0 +1,295 @@
Metadata-Version: 2.3
Name: annotated-types
Version: 0.7.0
Summary: Reusable constraint types to use with typing.Annotated
Project-URL: Homepage, https://github.com/annotated-types/annotated-types
Project-URL: Source, https://github.com/annotated-types/annotated-types
Project-URL: Changelog, https://github.com/annotated-types/annotated-types/releases
Author-email: Adrian Garcia Badaracco <1755071+adriangb@users.noreply.github.com>, Samuel Colvin <s@muelcolvin.com>, Zac Hatfield-Dodds <zac@zhd.dev>
License-File: LICENSE
Classifier: Development Status :: 4 - Beta
Classifier: Environment :: Console
Classifier: Environment :: MacOS X
Classifier: Intended Audience :: Developers
Classifier: Intended Audience :: Information Technology
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: POSIX :: Linux
Classifier: Operating System :: Unix
Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Classifier: Typing :: Typed
Requires-Python: >=3.8
Requires-Dist: typing-extensions>=4.0.0; python_version < '3.9'
Description-Content-Type: text/markdown
# annotated-types
[![CI](https://github.com/annotated-types/annotated-types/workflows/CI/badge.svg?event=push)](https://github.com/annotated-types/annotated-types/actions?query=event%3Apush+branch%3Amain+workflow%3ACI)
[![pypi](https://img.shields.io/pypi/v/annotated-types.svg)](https://pypi.python.org/pypi/annotated-types)
[![versions](https://img.shields.io/pypi/pyversions/annotated-types.svg)](https://github.com/annotated-types/annotated-types)
[![license](https://img.shields.io/github/license/annotated-types/annotated-types.svg)](https://github.com/annotated-types/annotated-types/blob/main/LICENSE)
[PEP-593](https://peps.python.org/pep-0593/) added `typing.Annotated` as a way of
adding context-specific metadata to existing types, and specifies that
`Annotated[T, x]` _should_ be treated as `T` by any tool or library without special
logic for `x`.
This package provides metadata objects which can be used to represent common
constraints such as upper and lower bounds on scalar values and collection sizes,
a `Predicate` marker for runtime checks, and
descriptions of how we intend these metadata to be interpreted. In some cases,
we also note alternative representations which do not require this package.
## Install
```bash
pip install annotated-types
```
## Examples
```python
from typing import Annotated
from annotated_types import Gt, Len, Predicate
class MyClass:
age: Annotated[int, Gt(18)] # Valid: 19, 20, ...
# Invalid: 17, 18, "19", 19.0, ...
factors: list[Annotated[int, Predicate(is_prime)]] # Valid: 2, 3, 5, 7, 11, ...
# Invalid: 4, 8, -2, 5.0, "prime", ...
my_list: Annotated[list[int], Len(0, 10)] # Valid: [], [10, 20, 30, 40, 50]
# Invalid: (1, 2), ["abc"], [0] * 20
```
## Documentation
_While `annotated-types` avoids runtime checks for performance, users should not
construct invalid combinations such as `MultipleOf("non-numeric")` or `Annotated[int, Len(3)]`.
Downstream implementors may choose to raise an error, emit a warning, silently ignore
a metadata item, etc., if the metadata objects described below are used with an
incompatible type - or for any other reason!_
### Gt, Ge, Lt, Le
Express inclusive and/or exclusive bounds on orderable values - which may be numbers,
dates, times, strings, sets, etc. Note that the boundary value need not be of the
same type that was annotated, so long as they can be compared: `Annotated[int, Gt(1.5)]`
is fine, for example, and implies that the value is an integer x such that `x > 1.5`.
We suggest that implementors may also interpret `functools.partial(operator.le, 1.5)`
as being equivalent to `Gt(1.5)`, for users who wish to avoid a runtime dependency on
the `annotated-types` package.
To be explicit, these types have the following meanings:
* `Gt(x)` - value must be "Greater Than" `x` - equivalent to exclusive minimum
* `Ge(x)` - value must be "Greater than or Equal" to `x` - equivalent to inclusive minimum
* `Lt(x)` - value must be "Less Than" `x` - equivalent to exclusive maximum
* `Le(x)` - value must be "Less than or Equal" to `x` - equivalent to inclusive maximum
### Interval
`Interval(gt, ge, lt, le)` allows you to specify an upper and lower bound with a single
metadata object. `None` attributes should be ignored, and non-`None` attributes
treated as per the single bounds above.
### MultipleOf
`MultipleOf(multiple_of=x)` might be interpreted in two ways:
1. Python semantics, implying `value % multiple_of == 0`, or
2. [JSONschema semantics](https://json-schema.org/draft/2020-12/json-schema-validation.html#rfc.section.6.2.1),
where `int(value / multiple_of) == value / multiple_of`.
We encourage users to be aware of these two common interpretations and their
distinct behaviours, especially since very large or non-integer numbers make
it easy to cause silent data corruption due to floating-point imprecision.
We encourage libraries to carefully document which interpretation they implement.
### MinLen, MaxLen, Len
`Len()` implies that `min_length <= len(value) <= max_length` - lower and upper bounds are inclusive.
As well as `Len()` which can optionally include upper and lower bounds, we also
provide `MinLen(x)` and `MaxLen(y)` which are equivalent to `Len(min_length=x)`
and `Len(max_length=y)` respectively.
`Len`, `MinLen`, and `MaxLen` may be used with any type which supports `len(value)`.
Examples of usage:
* `Annotated[list, MaxLen(10)]` (or `Annotated[list, Len(max_length=10))`) - list must have a length of 10 or less
* `Annotated[str, MaxLen(10)]` - string must have a length of 10 or less
* `Annotated[list, MinLen(3))` (or `Annotated[list, Len(min_length=3))`) - list must have a length of 3 or more
* `Annotated[list, Len(4, 6)]` - list must have a length of 4, 5, or 6
* `Annotated[list, Len(8, 8)]` - list must have a length of exactly 8
#### Changed in v0.4.0
* `min_inclusive` has been renamed to `min_length`, no change in meaning
* `max_exclusive` has been renamed to `max_length`, upper bound is now **inclusive** instead of **exclusive**
* The recommendation that slices are interpreted as `Len` has been removed due to ambiguity and different semantic
meaning of the upper bound in slices vs. `Len`
See [issue #23](https://github.com/annotated-types/annotated-types/issues/23) for discussion.
### Timezone
`Timezone` can be used with a `datetime` or a `time` to express which timezones
are allowed. `Annotated[datetime, Timezone(None)]` must be a naive datetime.
`Timezone[...]` ([literal ellipsis](https://docs.python.org/3/library/constants.html#Ellipsis))
expresses that any timezone-aware datetime is allowed. You may also pass a specific
timezone string or [`tzinfo`](https://docs.python.org/3/library/datetime.html#tzinfo-objects)
object such as `Timezone(timezone.utc)` or `Timezone("Africa/Abidjan")` to express that you only
allow a specific timezone, though we note that this is often a symptom of fragile design.
#### Changed in v0.x.x
* `Timezone` accepts [`tzinfo`](https://docs.python.org/3/library/datetime.html#tzinfo-objects) objects instead of
`timezone`, extending compatibility to [`zoneinfo`](https://docs.python.org/3/library/zoneinfo.html) and third party libraries.
### Unit
`Unit(unit: str)` expresses that the annotated numeric value is the magnitude of
a quantity with the specified unit. For example, `Annotated[float, Unit("m/s")]`
would be a float representing a velocity in meters per second.
Please note that `annotated_types` itself makes no attempt to parse or validate
the unit string in any way. That is left entirely to downstream libraries,
such as [`pint`](https://pint.readthedocs.io) or
[`astropy.units`](https://docs.astropy.org/en/stable/units/).
An example of how a library might use this metadata:
```python
from annotated_types import Unit
from typing import Annotated, TypeVar, Callable, Any, get_origin, get_args
# given a type annotated with a unit:
Meters = Annotated[float, Unit("m")]
# you can cast the annotation to a specific unit type with any
# callable that accepts a string and returns the desired type
T = TypeVar("T")
def cast_unit(tp: Any, unit_cls: Callable[[str], T]) -> T | None:
if get_origin(tp) is Annotated:
for arg in get_args(tp):
if isinstance(arg, Unit):
return unit_cls(arg.unit)
return None
# using `pint`
import pint
pint_unit = cast_unit(Meters, pint.Unit)
# using `astropy.units`
import astropy.units as u
astropy_unit = cast_unit(Meters, u.Unit)
```
### Predicate
`Predicate(func: Callable)` expresses that `func(value)` is truthy for valid values.
Users should prefer the statically inspectable metadata above, but if you need
the full power and flexibility of arbitrary runtime predicates... here it is.
For some common constraints, we provide generic types:
* `IsLower = Annotated[T, Predicate(str.islower)]`
* `IsUpper = Annotated[T, Predicate(str.isupper)]`
* `IsDigit = Annotated[T, Predicate(str.isdigit)]`
* `IsFinite = Annotated[T, Predicate(math.isfinite)]`
* `IsNotFinite = Annotated[T, Predicate(Not(math.isfinite))]`
* `IsNan = Annotated[T, Predicate(math.isnan)]`
* `IsNotNan = Annotated[T, Predicate(Not(math.isnan))]`
* `IsInfinite = Annotated[T, Predicate(math.isinf)]`
* `IsNotInfinite = Annotated[T, Predicate(Not(math.isinf))]`
so that you can write e.g. `x: IsFinite[float] = 2.0` instead of the longer
(but exactly equivalent) `x: Annotated[float, Predicate(math.isfinite)] = 2.0`.
Some libraries might have special logic to handle known or understandable predicates,
for example by checking for `str.isdigit` and using its presence to both call custom
logic to enforce digit-only strings, and customise some generated external schema.
Users are therefore encouraged to avoid indirection like `lambda s: s.lower()`, in
favor of introspectable methods such as `str.lower` or `re.compile("pattern").search`.
To enable basic negation of commonly used predicates like `math.isnan` without introducing introspection that makes it impossible for implementers to introspect the predicate we provide a `Not` wrapper that simply negates the predicate in an introspectable manner. Several of the predicates listed above are created in this manner.
We do not specify what behaviour should be expected for predicates that raise
an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently
skip invalid constraints, or statically raise an error; or it might try calling it
and then propagate or discard the resulting
`TypeError: descriptor 'isdigit' for 'str' objects doesn't apply to a 'int' object`
exception. We encourage libraries to document the behaviour they choose.
### Doc
`doc()` can be used to add documentation information in `Annotated`, for function and method parameters, variables, class attributes, return types, and any place where `Annotated` can be used.
It expects a value that can be statically analyzed, as the main use case is for static analysis, editors, documentation generators, and similar tools.
It returns a `DocInfo` class with a single attribute `documentation` containing the value passed to `doc()`.
This is the early adopter's alternative form of the [`typing-doc` proposal](https://github.com/tiangolo/fastapi/blob/typing-doc/typing_doc.md).
### Integrating downstream types with `GroupedMetadata`
Implementers may choose to provide a convenience wrapper that groups multiple pieces of metadata.
This can help reduce verbosity and cognitive overhead for users.
For example, an implementer like Pydantic might provide a `Field` or `Meta` type that accepts keyword arguments and transforms these into low-level metadata:
```python
from dataclasses import dataclass
from typing import Iterator
from annotated_types import GroupedMetadata, Ge
@dataclass
class Field(GroupedMetadata):
ge: int | None = None
description: str | None = None
def __iter__(self) -> Iterator[object]:
# Iterating over a GroupedMetadata object should yield annotated-types
# constraint metadata objects which describe it as fully as possible,
# and may include other unknown objects too.
if self.ge is not None:
yield Ge(self.ge)
if self.description is not None:
yield Description(self.description)
```
Libraries consuming annotated-types constraints should check for `GroupedMetadata` and unpack it by iterating over the object and treating the results as if they had been "unpacked" in the `Annotated` type. The same logic should be applied to the [PEP 646 `Unpack` type](https://peps.python.org/pep-0646/), so that `Annotated[T, Field(...)]`, `Annotated[T, Unpack[Field(...)]]` and `Annotated[T, *Field(...)]` are all treated consistently.
Libraries consuming annotated-types should also ignore any metadata they do not recongize that came from unpacking a `GroupedMetadata`, just like they ignore unrecognized metadata in `Annotated` itself.
Our own `annotated_types.Interval` class is a `GroupedMetadata` which unpacks itself into `Gt`, `Lt`, etc., so this is not an abstract concern. Similarly, `annotated_types.Len` is a `GroupedMetadata` which unpacks itself into `MinLen` (optionally) and `MaxLen`.
### Consuming metadata
We intend to not be prescriptive as to _how_ the metadata and constraints are used, but as an example of how one might parse constraints from types annotations see our [implementation in `test_main.py`](https://github.com/annotated-types/annotated-types/blob/f59cf6d1b5255a0fe359b93896759a180bec30ae/tests/test_main.py#L94-L103).
It is up to the implementer to determine how this metadata is used.
You could use the metadata for runtime type checking, for generating schemas or to generate example data, amongst other use cases.
## Design & History
This package was designed at the PyCon 2022 sprints by the maintainers of Pydantic
and Hypothesis, with the goal of making it as easy as possible for end-users to
provide more informative annotations for use by runtime libraries.
It is deliberately minimal, and following PEP-593 allows considerable downstream
discretion in what (if anything!) they choose to support. Nonetheless, we expect
that staying simple and covering _only_ the most common use-cases will give users
and maintainers the best experience we can. If you'd like more constraints for your
types - follow our lead, by defining them and documenting them downstream!

View File

@@ -0,0 +1,10 @@
annotated_types-0.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
annotated_types-0.7.0.dist-info/METADATA,sha256=7ltqxksJJ0wCYFGBNIQCWTlWQGeAH0hRFdnK3CB895E,15046
annotated_types-0.7.0.dist-info/RECORD,,
annotated_types-0.7.0.dist-info/WHEEL,sha256=zEMcRr9Kr03x1ozGwg5v9NQBKn3kndp6LSoSlVg-jhU,87
annotated_types-0.7.0.dist-info/licenses/LICENSE,sha256=_hBJiEsaDZNCkB6I4H8ykl0ksxIdmXK2poBfuYJLCV0,1083
annotated_types/__init__.py,sha256=RynLsRKUEGI0KimXydlD1fZEfEzWwDo0Uon3zOKhG1Q,13819
annotated_types/__pycache__/__init__.cpython-312.pyc,,
annotated_types/__pycache__/test_cases.cpython-312.pyc,,
annotated_types/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
annotated_types/test_cases.py,sha256=zHFX6EpcMbGJ8FzBYDbO56bPwx_DYIVSKbZM-4B3_lg,6421

View File

@@ -0,0 +1,4 @@
Wheel-Version: 1.0
Generator: hatchling 1.24.2
Root-Is-Purelib: true
Tag: py3-none-any

View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2022 the contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,432 @@
import math
import sys
import types
from dataclasses import dataclass
from datetime import tzinfo
from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, SupportsFloat, SupportsIndex, TypeVar, Union
if sys.version_info < (3, 8):
from typing_extensions import Protocol, runtime_checkable
else:
from typing import Protocol, runtime_checkable
if sys.version_info < (3, 9):
from typing_extensions import Annotated, Literal
else:
from typing import Annotated, Literal
if sys.version_info < (3, 10):
EllipsisType = type(Ellipsis)
KW_ONLY = {}
SLOTS = {}
else:
from types import EllipsisType
KW_ONLY = {"kw_only": True}
SLOTS = {"slots": True}
__all__ = (
'BaseMetadata',
'GroupedMetadata',
'Gt',
'Ge',
'Lt',
'Le',
'Interval',
'MultipleOf',
'MinLen',
'MaxLen',
'Len',
'Timezone',
'Predicate',
'LowerCase',
'UpperCase',
'IsDigits',
'IsFinite',
'IsNotFinite',
'IsNan',
'IsNotNan',
'IsInfinite',
'IsNotInfinite',
'doc',
'DocInfo',
'__version__',
)
__version__ = '0.7.0'
T = TypeVar('T')
# arguments that start with __ are considered
# positional only
# see https://peps.python.org/pep-0484/#positional-only-arguments
class SupportsGt(Protocol):
def __gt__(self: T, __other: T) -> bool:
...
class SupportsGe(Protocol):
def __ge__(self: T, __other: T) -> bool:
...
class SupportsLt(Protocol):
def __lt__(self: T, __other: T) -> bool:
...
class SupportsLe(Protocol):
def __le__(self: T, __other: T) -> bool:
...
class SupportsMod(Protocol):
def __mod__(self: T, __other: T) -> T:
...
class SupportsDiv(Protocol):
def __div__(self: T, __other: T) -> T:
...
class BaseMetadata:
"""Base class for all metadata.
This exists mainly so that implementers
can do `isinstance(..., BaseMetadata)` while traversing field annotations.
"""
__slots__ = ()
@dataclass(frozen=True, **SLOTS)
class Gt(BaseMetadata):
"""Gt(gt=x) implies that the value must be greater than x.
It can be used with any type that supports the ``>`` operator,
including numbers, dates and times, strings, sets, and so on.
"""
gt: SupportsGt
@dataclass(frozen=True, **SLOTS)
class Ge(BaseMetadata):
"""Ge(ge=x) implies that the value must be greater than or equal to x.
It can be used with any type that supports the ``>=`` operator,
including numbers, dates and times, strings, sets, and so on.
"""
ge: SupportsGe
@dataclass(frozen=True, **SLOTS)
class Lt(BaseMetadata):
"""Lt(lt=x) implies that the value must be less than x.
It can be used with any type that supports the ``<`` operator,
including numbers, dates and times, strings, sets, and so on.
"""
lt: SupportsLt
@dataclass(frozen=True, **SLOTS)
class Le(BaseMetadata):
"""Le(le=x) implies that the value must be less than or equal to x.
It can be used with any type that supports the ``<=`` operator,
including numbers, dates and times, strings, sets, and so on.
"""
le: SupportsLe
@runtime_checkable
class GroupedMetadata(Protocol):
"""A grouping of multiple objects, like typing.Unpack.
`GroupedMetadata` on its own is not metadata and has no meaning.
All of the constraints and metadata should be fully expressable
in terms of the `BaseMetadata`'s returned by `GroupedMetadata.__iter__()`.
Concrete implementations should override `GroupedMetadata.__iter__()`
to add their own metadata.
For example:
>>> @dataclass
>>> class Field(GroupedMetadata):
>>> gt: float | None = None
>>> description: str | None = None
...
>>> def __iter__(self) -> Iterable[object]:
>>> if self.gt is not None:
>>> yield Gt(self.gt)
>>> if self.description is not None:
>>> yield Description(self.gt)
Also see the implementation of `Interval` below for an example.
Parsers should recognize this and unpack it so that it can be used
both with and without unpacking:
- `Annotated[int, Field(...)]` (parser must unpack Field)
- `Annotated[int, *Field(...)]` (PEP-646)
""" # noqa: trailing-whitespace
@property
def __is_annotated_types_grouped_metadata__(self) -> Literal[True]:
return True
def __iter__(self) -> Iterator[object]:
...
if not TYPE_CHECKING:
__slots__ = () # allow subclasses to use slots
def __init_subclass__(cls, *args: Any, **kwargs: Any) -> None:
# Basic ABC like functionality without the complexity of an ABC
super().__init_subclass__(*args, **kwargs)
if cls.__iter__ is GroupedMetadata.__iter__:
raise TypeError("Can't subclass GroupedMetadata without implementing __iter__")
def __iter__(self) -> Iterator[object]: # noqa: F811
raise NotImplementedError # more helpful than "None has no attribute..." type errors
@dataclass(frozen=True, **KW_ONLY, **SLOTS)
class Interval(GroupedMetadata):
"""Interval can express inclusive or exclusive bounds with a single object.
It accepts keyword arguments ``gt``, ``ge``, ``lt``, and/or ``le``, which
are interpreted the same way as the single-bound constraints.
"""
gt: Union[SupportsGt, None] = None
ge: Union[SupportsGe, None] = None
lt: Union[SupportsLt, None] = None
le: Union[SupportsLe, None] = None
def __iter__(self) -> Iterator[BaseMetadata]:
"""Unpack an Interval into zero or more single-bounds."""
if self.gt is not None:
yield Gt(self.gt)
if self.ge is not None:
yield Ge(self.ge)
if self.lt is not None:
yield Lt(self.lt)
if self.le is not None:
yield Le(self.le)
@dataclass(frozen=True, **SLOTS)
class MultipleOf(BaseMetadata):
"""MultipleOf(multiple_of=x) might be interpreted in two ways:
1. Python semantics, implying ``value % multiple_of == 0``, or
2. JSONschema semantics, where ``int(value / multiple_of) == value / multiple_of``
We encourage users to be aware of these two common interpretations,
and libraries to carefully document which they implement.
"""
multiple_of: Union[SupportsDiv, SupportsMod]
@dataclass(frozen=True, **SLOTS)
class MinLen(BaseMetadata):
"""
MinLen() implies minimum inclusive length,
e.g. ``len(value) >= min_length``.
"""
min_length: Annotated[int, Ge(0)]
@dataclass(frozen=True, **SLOTS)
class MaxLen(BaseMetadata):
"""
MaxLen() implies maximum inclusive length,
e.g. ``len(value) <= max_length``.
"""
max_length: Annotated[int, Ge(0)]
@dataclass(frozen=True, **SLOTS)
class Len(GroupedMetadata):
"""
Len() implies that ``min_length <= len(value) <= max_length``.
Upper bound may be omitted or ``None`` to indicate no upper length bound.
"""
min_length: Annotated[int, Ge(0)] = 0
max_length: Optional[Annotated[int, Ge(0)]] = None
def __iter__(self) -> Iterator[BaseMetadata]:
"""Unpack a Len into zone or more single-bounds."""
if self.min_length > 0:
yield MinLen(self.min_length)
if self.max_length is not None:
yield MaxLen(self.max_length)
@dataclass(frozen=True, **SLOTS)
class Timezone(BaseMetadata):
"""Timezone(tz=...) requires a datetime to be aware (or ``tz=None``, naive).
``Annotated[datetime, Timezone(None)]`` must be a naive datetime.
``Timezone[...]`` (the ellipsis literal) expresses that the datetime must be
tz-aware but any timezone is allowed.
You may also pass a specific timezone string or tzinfo object such as
``Timezone(timezone.utc)`` or ``Timezone("Africa/Abidjan")`` to express that
you only allow a specific timezone, though we note that this is often
a symptom of poor design.
"""
tz: Union[str, tzinfo, EllipsisType, None]
@dataclass(frozen=True, **SLOTS)
class Unit(BaseMetadata):
"""Indicates that the value is a physical quantity with the specified unit.
It is intended for usage with numeric types, where the value represents the
magnitude of the quantity. For example, ``distance: Annotated[float, Unit('m')]``
or ``speed: Annotated[float, Unit('m/s')]``.
Interpretation of the unit string is left to the discretion of the consumer.
It is suggested to follow conventions established by python libraries that work
with physical quantities, such as
- ``pint`` : <https://pint.readthedocs.io/en/stable/>
- ``astropy.units``: <https://docs.astropy.org/en/stable/units/>
For indicating a quantity with a certain dimensionality but without a specific unit
it is recommended to use square brackets, e.g. `Annotated[float, Unit('[time]')]`.
Note, however, ``annotated_types`` itself makes no use of the unit string.
"""
unit: str
@dataclass(frozen=True, **SLOTS)
class Predicate(BaseMetadata):
"""``Predicate(func: Callable)`` implies `func(value)` is truthy for valid values.
Users should prefer statically inspectable metadata, but if you need the full
power and flexibility of arbitrary runtime predicates... here it is.
We provide a few predefined predicates for common string constraints:
``IsLower = Predicate(str.islower)``, ``IsUpper = Predicate(str.isupper)``, and
``IsDigits = Predicate(str.isdigit)``. Users are encouraged to use methods which
can be given special handling, and avoid indirection like ``lambda s: s.lower()``.
Some libraries might have special logic to handle certain predicates, e.g. by
checking for `str.isdigit` and using its presence to both call custom logic to
enforce digit-only strings, and customise some generated external schema.
We do not specify what behaviour should be expected for predicates that raise
an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently
skip invalid constraints, or statically raise an error; or it might try calling it
and then propagate or discard the resulting exception.
"""
func: Callable[[Any], bool]
def __repr__(self) -> str:
if getattr(self.func, "__name__", "<lambda>") == "<lambda>":
return f"{self.__class__.__name__}({self.func!r})"
if isinstance(self.func, (types.MethodType, types.BuiltinMethodType)) and (
namespace := getattr(self.func.__self__, "__name__", None)
):
return f"{self.__class__.__name__}({namespace}.{self.func.__name__})"
if isinstance(self.func, type(str.isascii)): # method descriptor
return f"{self.__class__.__name__}({self.func.__qualname__})"
return f"{self.__class__.__name__}({self.func.__name__})"
@dataclass
class Not:
func: Callable[[Any], bool]
def __call__(self, __v: Any) -> bool:
return not self.func(__v)
_StrType = TypeVar("_StrType", bound=str)
LowerCase = Annotated[_StrType, Predicate(str.islower)]
"""
Return True if the string is a lowercase string, False otherwise.
A string is lowercase if all cased characters in the string are lowercase and there is at least one cased character in the string.
""" # noqa: E501
UpperCase = Annotated[_StrType, Predicate(str.isupper)]
"""
Return True if the string is an uppercase string, False otherwise.
A string is uppercase if all cased characters in the string are uppercase and there is at least one cased character in the string.
""" # noqa: E501
IsDigit = Annotated[_StrType, Predicate(str.isdigit)]
IsDigits = IsDigit # type: ignore # plural for backwards compatibility, see #63
"""
Return True if the string is a digit string, False otherwise.
A string is a digit string if all characters in the string are digits and there is at least one character in the string.
""" # noqa: E501
IsAscii = Annotated[_StrType, Predicate(str.isascii)]
"""
Return True if all characters in the string are ASCII, False otherwise.
ASCII characters have code points in the range U+0000-U+007F. Empty string is ASCII too.
"""
_NumericType = TypeVar('_NumericType', bound=Union[SupportsFloat, SupportsIndex])
IsFinite = Annotated[_NumericType, Predicate(math.isfinite)]
"""Return True if x is neither an infinity nor a NaN, and False otherwise."""
IsNotFinite = Annotated[_NumericType, Predicate(Not(math.isfinite))]
"""Return True if x is one of infinity or NaN, and False otherwise"""
IsNan = Annotated[_NumericType, Predicate(math.isnan)]
"""Return True if x is a NaN (not a number), and False otherwise."""
IsNotNan = Annotated[_NumericType, Predicate(Not(math.isnan))]
"""Return True if x is anything but NaN (not a number), and False otherwise."""
IsInfinite = Annotated[_NumericType, Predicate(math.isinf)]
"""Return True if x is a positive or negative infinity, and False otherwise."""
IsNotInfinite = Annotated[_NumericType, Predicate(Not(math.isinf))]
"""Return True if x is neither a positive or negative infinity, and False otherwise."""
try:
from typing_extensions import DocInfo, doc # type: ignore [attr-defined]
except ImportError:
@dataclass(frozen=True, **SLOTS)
class DocInfo: # type: ignore [no-redef]
""" "
The return value of doc(), mainly to be used by tools that want to extract the
Annotated documentation at runtime.
"""
documentation: str
"""The documentation string passed to doc()."""
def doc(
documentation: str,
) -> DocInfo:
"""
Add documentation to a type annotation inside of Annotated.
For example:
>>> def hi(name: Annotated[int, doc("The name of the user")]) -> None: ...
"""
return DocInfo(documentation)

View File

@@ -0,0 +1,151 @@
import math
import sys
from datetime import date, datetime, timedelta, timezone
from decimal import Decimal
from typing import Any, Dict, Iterable, Iterator, List, NamedTuple, Set, Tuple
if sys.version_info < (3, 9):
from typing_extensions import Annotated
else:
from typing import Annotated
import annotated_types as at
class Case(NamedTuple):
"""
A test case for `annotated_types`.
"""
annotation: Any
valid_cases: Iterable[Any]
invalid_cases: Iterable[Any]
def cases() -> Iterable[Case]:
# Gt, Ge, Lt, Le
yield Case(Annotated[int, at.Gt(4)], (5, 6, 1000), (4, 0, -1))
yield Case(Annotated[float, at.Gt(0.5)], (0.6, 0.7, 0.8, 0.9), (0.5, 0.0, -0.1))
yield Case(
Annotated[datetime, at.Gt(datetime(2000, 1, 1))],
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
[datetime(2000, 1, 1), datetime(1999, 12, 31)],
)
yield Case(
Annotated[datetime, at.Gt(date(2000, 1, 1))],
[date(2000, 1, 2), date(2000, 1, 3)],
[date(2000, 1, 1), date(1999, 12, 31)],
)
yield Case(
Annotated[datetime, at.Gt(Decimal('1.123'))],
[Decimal('1.1231'), Decimal('123')],
[Decimal('1.123'), Decimal('0')],
)
yield Case(Annotated[int, at.Ge(4)], (4, 5, 6, 1000, 4), (0, -1))
yield Case(Annotated[float, at.Ge(0.5)], (0.5, 0.6, 0.7, 0.8, 0.9), (0.4, 0.0, -0.1))
yield Case(
Annotated[datetime, at.Ge(datetime(2000, 1, 1))],
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
[datetime(1998, 1, 1), datetime(1999, 12, 31)],
)
yield Case(Annotated[int, at.Lt(4)], (0, -1), (4, 5, 6, 1000, 4))
yield Case(Annotated[float, at.Lt(0.5)], (0.4, 0.0, -0.1), (0.5, 0.6, 0.7, 0.8, 0.9))
yield Case(
Annotated[datetime, at.Lt(datetime(2000, 1, 1))],
[datetime(1999, 12, 31), datetime(1999, 12, 31)],
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
)
yield Case(Annotated[int, at.Le(4)], (4, 0, -1), (5, 6, 1000))
yield Case(Annotated[float, at.Le(0.5)], (0.5, 0.0, -0.1), (0.6, 0.7, 0.8, 0.9))
yield Case(
Annotated[datetime, at.Le(datetime(2000, 1, 1))],
[datetime(2000, 1, 1), datetime(1999, 12, 31)],
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
)
# Interval
yield Case(Annotated[int, at.Interval(gt=4)], (5, 6, 1000), (4, 0, -1))
yield Case(Annotated[int, at.Interval(gt=4, lt=10)], (5, 6), (4, 10, 1000, 0, -1))
yield Case(Annotated[float, at.Interval(ge=0.5, le=1)], (0.5, 0.9, 1), (0.49, 1.1))
yield Case(
Annotated[datetime, at.Interval(gt=datetime(2000, 1, 1), le=datetime(2000, 1, 3))],
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
[datetime(2000, 1, 1), datetime(2000, 1, 4)],
)
yield Case(Annotated[int, at.MultipleOf(multiple_of=3)], (0, 3, 9), (1, 2, 4))
yield Case(Annotated[float, at.MultipleOf(multiple_of=0.5)], (0, 0.5, 1, 1.5), (0.4, 1.1))
# lengths
yield Case(Annotated[str, at.MinLen(3)], ('123', '1234', 'x' * 10), ('', '1', '12'))
yield Case(Annotated[str, at.Len(3)], ('123', '1234', 'x' * 10), ('', '1', '12'))
yield Case(Annotated[List[int], at.MinLen(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2]))
yield Case(Annotated[List[int], at.Len(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2]))
yield Case(Annotated[str, at.MaxLen(4)], ('', '1234'), ('12345', 'x' * 10))
yield Case(Annotated[str, at.Len(0, 4)], ('', '1234'), ('12345', 'x' * 10))
yield Case(Annotated[List[str], at.MaxLen(4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10))
yield Case(Annotated[List[str], at.Len(0, 4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10))
yield Case(Annotated[str, at.Len(3, 5)], ('123', '12345'), ('', '1', '12', '123456', 'x' * 10))
yield Case(Annotated[str, at.Len(3, 3)], ('123',), ('12', '1234'))
yield Case(Annotated[Dict[int, int], at.Len(2, 3)], [{1: 1, 2: 2}], [{}, {1: 1}, {1: 1, 2: 2, 3: 3, 4: 4}])
yield Case(Annotated[Set[int], at.Len(2, 3)], ({1, 2}, {1, 2, 3}), (set(), {1}, {1, 2, 3, 4}))
yield Case(Annotated[Tuple[int, ...], at.Len(2, 3)], ((1, 2), (1, 2, 3)), ((), (1,), (1, 2, 3, 4)))
# Timezone
yield Case(
Annotated[datetime, at.Timezone(None)], [datetime(2000, 1, 1)], [datetime(2000, 1, 1, tzinfo=timezone.utc)]
)
yield Case(
Annotated[datetime, at.Timezone(...)], [datetime(2000, 1, 1, tzinfo=timezone.utc)], [datetime(2000, 1, 1)]
)
yield Case(
Annotated[datetime, at.Timezone(timezone.utc)],
[datetime(2000, 1, 1, tzinfo=timezone.utc)],
[datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))],
)
yield Case(
Annotated[datetime, at.Timezone('Europe/London')],
[datetime(2000, 1, 1, tzinfo=timezone(timedelta(0), name='Europe/London'))],
[datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))],
)
# Quantity
yield Case(Annotated[float, at.Unit(unit='m')], (5, 4.2), ('5m', '4.2m'))
# predicate types
yield Case(at.LowerCase[str], ['abc', 'foobar'], ['', 'A', 'Boom'])
yield Case(at.UpperCase[str], ['ABC', 'DEFO'], ['', 'a', 'abc', 'AbC'])
yield Case(at.IsDigit[str], ['123'], ['', 'ab', 'a1b2'])
yield Case(at.IsAscii[str], ['123', 'foo bar'], ['£100', '😊', 'whatever 👀'])
yield Case(Annotated[int, at.Predicate(lambda x: x % 2 == 0)], [0, 2, 4], [1, 3, 5])
yield Case(at.IsFinite[float], [1.23], [math.nan, math.inf, -math.inf])
yield Case(at.IsNotFinite[float], [math.nan, math.inf], [1.23])
yield Case(at.IsNan[float], [math.nan], [1.23, math.inf])
yield Case(at.IsNotNan[float], [1.23, math.inf], [math.nan])
yield Case(at.IsInfinite[float], [math.inf], [math.nan, 1.23])
yield Case(at.IsNotInfinite[float], [math.nan, 1.23], [math.inf])
# check stacked predicates
yield Case(at.IsInfinite[Annotated[float, at.Predicate(lambda x: x > 0)]], [math.inf], [-math.inf, 1.23, math.nan])
# doc
yield Case(Annotated[int, at.doc("A number")], [1, 2], [])
# custom GroupedMetadata
class MyCustomGroupedMetadata(at.GroupedMetadata):
def __iter__(self) -> Iterator[at.Predicate]:
yield at.Predicate(lambda x: float(x).is_integer())
yield Case(Annotated[float, MyCustomGroupedMetadata()], [0, 2.0], [0.01, 1.5])

View File

@@ -0,0 +1,94 @@
Metadata-Version: 2.4
Name: anyio
Version: 4.11.0
Summary: High-level concurrency and networking framework on top of asyncio or Trio
Author-email: Alex Grönholm <alex.gronholm@nextday.fi>
License-Expression: MIT
Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/
Project-URL: Changelog, https://anyio.readthedocs.io/en/stable/versionhistory.html
Project-URL: Source code, https://github.com/agronholm/anyio
Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: Framework :: AnyIO
Classifier: Typing :: Typed
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: 3.13
Classifier: Programming Language :: Python :: 3.14
Requires-Python: >=3.9
Description-Content-Type: text/x-rst
License-File: LICENSE
Requires-Dist: exceptiongroup>=1.0.2; python_version < "3.11"
Requires-Dist: idna>=2.8
Requires-Dist: sniffio>=1.1
Requires-Dist: typing_extensions>=4.5; python_version < "3.13"
Provides-Extra: trio
Requires-Dist: trio>=0.31.0; extra == "trio"
Dynamic: license-file
.. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg
:target: https://github.com/agronholm/anyio/actions/workflows/test.yml
:alt: Build Status
.. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master
:target: https://coveralls.io/github/agronholm/anyio?branch=master
:alt: Code Coverage
.. image:: https://readthedocs.org/projects/anyio/badge/?version=latest
:target: https://anyio.readthedocs.io/en/latest/?badge=latest
:alt: Documentation
.. image:: https://badges.gitter.im/gitterHQ/gitter.svg
:target: https://gitter.im/python-trio/AnyIO
:alt: Gitter chat
AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or
Trio_. It implements Trio-like `structured concurrency`_ (SC) on top of asyncio and works in harmony
with the native SC of Trio itself.
Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or
Trio_. AnyIO can also be adopted into a library or application incrementally bit by bit, no full
refactoring necessary. It will blend in with the native libraries of your chosen backend.
To find out why you might want to use AnyIO's APIs instead of asyncio's, you can read about it
`here <https://anyio.readthedocs.io/en/stable/why.html>`_.
Documentation
-------------
View full documentation at: https://anyio.readthedocs.io/
Features
--------
AnyIO offers the following functionality:
* Task groups (nurseries_ in trio terminology)
* High-level networking (TCP, UDP and UNIX sockets)
* `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python
3.8)
* async/await style UDP sockets (unlike asyncio where you still have to use Transports and
Protocols)
* A versatile API for byte streams and object streams
* Inter-task synchronization and communication (locks, conditions, events, semaphores, object
streams)
* Worker threads
* Subprocesses
* Subinterpreter support for code parallelization (on Python 3.13 and later)
* Asynchronous file I/O (using worker threads)
* Signal handling
AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures.
It even works with the popular Hypothesis_ library.
.. _asyncio: https://docs.python.org/3/library/asyncio.html
.. _Trio: https://github.com/python-trio/trio
.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency
.. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning
.. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs
.. _pytest: https://docs.pytest.org/en/latest/
.. _Hypothesis: https://hypothesis.works/

View File

@@ -0,0 +1,90 @@
anyio-4.11.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
anyio-4.11.0.dist-info/METADATA,sha256=yrQcStE3zgP4G-Zz-7qVaw99qb-DTM2gHgwP_1pABu4,4091
anyio-4.11.0.dist-info/RECORD,,
anyio-4.11.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
anyio-4.11.0.dist-info/entry_points.txt,sha256=_d6Yu6uiaZmNe0CydowirE9Cmg7zUL2g08tQpoS3Qvc,39
anyio-4.11.0.dist-info/licenses/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081
anyio-4.11.0.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6
anyio/__init__.py,sha256=M21FJC_0BhRH7AdXvs4z3uayj4ytNAxuJ7UnshLD5RM,6091
anyio/__pycache__/__init__.cpython-312.pyc,,
anyio/__pycache__/from_thread.cpython-312.pyc,,
anyio/__pycache__/lowlevel.cpython-312.pyc,,
anyio/__pycache__/pytest_plugin.cpython-312.pyc,,
anyio/__pycache__/to_interpreter.cpython-312.pyc,,
anyio/__pycache__/to_process.cpython-312.pyc,,
anyio/__pycache__/to_thread.cpython-312.pyc,,
anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
anyio/_backends/__pycache__/__init__.cpython-312.pyc,,
anyio/_backends/__pycache__/_asyncio.cpython-312.pyc,,
anyio/_backends/__pycache__/_trio.cpython-312.pyc,,
anyio/_backends/_asyncio.py,sha256=jTPlTu4WO9bTXUJkxFQ0P1GbG4I14nOWpD1FMzKmkvA,98052
anyio/_backends/_trio.py,sha256=ScNVMQB0iiuJMAon1epQCVOVbIbf-Lxnfb5OxujzMok,42398
anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
anyio/_core/__pycache__/__init__.cpython-312.pyc,,
anyio/_core/__pycache__/_asyncio_selector_thread.cpython-312.pyc,,
anyio/_core/__pycache__/_contextmanagers.cpython-312.pyc,,
anyio/_core/__pycache__/_eventloop.cpython-312.pyc,,
anyio/_core/__pycache__/_exceptions.cpython-312.pyc,,
anyio/_core/__pycache__/_fileio.cpython-312.pyc,,
anyio/_core/__pycache__/_resources.cpython-312.pyc,,
anyio/_core/__pycache__/_signals.cpython-312.pyc,,
anyio/_core/__pycache__/_sockets.cpython-312.pyc,,
anyio/_core/__pycache__/_streams.cpython-312.pyc,,
anyio/_core/__pycache__/_subprocesses.cpython-312.pyc,,
anyio/_core/__pycache__/_synchronization.cpython-312.pyc,,
anyio/_core/__pycache__/_tasks.cpython-312.pyc,,
anyio/_core/__pycache__/_tempfile.cpython-312.pyc,,
anyio/_core/__pycache__/_testing.cpython-312.pyc,,
anyio/_core/__pycache__/_typedattr.cpython-312.pyc,,
anyio/_core/_asyncio_selector_thread.py,sha256=2PdxFM3cs02Kp6BSppbvmRT7q7asreTW5FgBxEsflBo,5626
anyio/_core/_contextmanagers.py,sha256=YInBCabiEeS-UaP_Jdxa1CaFC71ETPW8HZTHIM8Rsc8,7215
anyio/_core/_eventloop.py,sha256=mg_TuXTbfUiuWz2Evb19dxVSC2TVTOtypKpzHV7-tlI,4667
anyio/_core/_exceptions.py,sha256=fR2SvRUBYVHvolNKbzWSLt8FC_5NFB2OAzGD738fD8Q,4257
anyio/_core/_fileio.py,sha256=KATysDZP7bvwwjpUwEaGAc0xGouJgAPqNVpnBMTsToY,23332
anyio/_core/_resources.py,sha256=NbmU5O5UX3xEyACnkmYX28Fmwdl-f-ny0tHym26e0w0,435
anyio/_core/_signals.py,sha256=vulT1M1xdLYtAR-eY5TamIgaf1WTlOwOrMGwswlTTr8,905
anyio/_core/_sockets.py,sha256=aTbgMr0qPmBPfrapxLykyajsmS7IAerhW9_Qk5r5E18,34311
anyio/_core/_streams.py,sha256=OnaKgoDD-FcMSwLvkoAUGP51sG2ZdRvMpxt9q2w1gYA,1804
anyio/_core/_subprocesses.py,sha256=EXm5igL7dj55iYkPlbYVAqtbqxJxjU-6OndSTIx9SRg,8047
anyio/_core/_synchronization.py,sha256=pZ_9gl9g0bsb92-wnJ5HX-q9TgWf-SjK1Xo-DGZj2Ok,20858
anyio/_core/_tasks.py,sha256=km6hVE1fsuIenya3MDud8KP6-J_bNzlgYC10wUxI7iA,4880
anyio/_core/_tempfile.py,sha256=lHb7CW4FyIlpkf5ADAf4VmLHCKwEHF9nxqNyBCFFUiA,19697
anyio/_core/_testing.py,sha256=YUGwA5cgFFbUTv4WFd7cv_BSVr4ryTtPp8owQA3JdWE,2118
anyio/_core/_typedattr.py,sha256=P4ozZikn3-DbpoYcvyghS_FOYAgbmUxeoU8-L_07pZM,2508
anyio/abc/__init__.py,sha256=6mWhcl_pGXhrgZVHP_TCfMvIXIOp9mroEFM90fYCU_U,2869
anyio/abc/__pycache__/__init__.cpython-312.pyc,,
anyio/abc/__pycache__/_eventloop.cpython-312.pyc,,
anyio/abc/__pycache__/_resources.cpython-312.pyc,,
anyio/abc/__pycache__/_sockets.cpython-312.pyc,,
anyio/abc/__pycache__/_streams.cpython-312.pyc,,
anyio/abc/__pycache__/_subprocesses.cpython-312.pyc,,
anyio/abc/__pycache__/_tasks.cpython-312.pyc,,
anyio/abc/__pycache__/_testing.cpython-312.pyc,,
anyio/abc/_eventloop.py,sha256=GTZbdItBHcj_b-8K2XylET2-bBYLZ3XjW4snY7vK7LE,10900
anyio/abc/_resources.py,sha256=DrYvkNN1hH6Uvv5_5uKySvDsnknGVDe8FCKfko0VtN8,783
anyio/abc/_sockets.py,sha256=ECTY0jLEF18gryANHR3vFzXzGdZ-xPwELq1QdgOb0Jo,13258
anyio/abc/_streams.py,sha256=005GKSCXGprxnhucILboSqc2JFovECZk9m3p-qqxXVc,7640
anyio/abc/_subprocesses.py,sha256=cumAPJTktOQtw63IqG0lDpyZqu_l1EElvQHMiwJgL08,2067
anyio/abc/_tasks.py,sha256=KC7wrciE48AINOI-AhPutnFhe1ewfP7QnamFlDzqesQ,3721
anyio/abc/_testing.py,sha256=tBJUzkSfOXJw23fe8qSJ03kJlShOYjjaEyFB6k6MYT8,1821
anyio/from_thread.py,sha256=MmzkWz7yq-d0G3aOoEyyG2nh_Dm7h8ggRWFsYteNVJs,18927
anyio/lowlevel.py,sha256=2a3elbKSw59boeyHBzC9VT7vark3o-kBypXWKGQ3jqU,4405
anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
anyio/pytest_plugin.py,sha256=cTvttH7R4e8Jh4C3mTOV8QVhOYEJtN1dCZFnRoH9lX4,10204
anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
anyio/streams/__pycache__/__init__.cpython-312.pyc,,
anyio/streams/__pycache__/buffered.cpython-312.pyc,,
anyio/streams/__pycache__/file.cpython-312.pyc,,
anyio/streams/__pycache__/memory.cpython-312.pyc,,
anyio/streams/__pycache__/stapled.cpython-312.pyc,,
anyio/streams/__pycache__/text.cpython-312.pyc,,
anyio/streams/__pycache__/tls.cpython-312.pyc,,
anyio/streams/buffered.py,sha256=joUPdz0OoRfKgGmMpHI9vZyMNm6ly9iFlofrZUPs9cQ,6162
anyio/streams/file.py,sha256=6uoTNb5KbMoj-6gS3_xrrL8uZN8Q4iIvOS1WtGyFfKw,4383
anyio/streams/memory.py,sha256=GcbF3cahdsdFZtcTZaIKpZXPDZKogj18wWPPmE0OmGU,10620
anyio/streams/stapled.py,sha256=U09pCrmOw9kkNhe6tKopsm1QIMT1lFTFvtb-A7SIe4k,4302
anyio/streams/text.py,sha256=tCJ8ljavGM-HY0aL-5Twxv-Kyw1BfR0B4OtVIB6kZ9w,5662
anyio/streams/tls.py,sha256=siSaaRyX-XnfC7Jbn9VjtIdVzJkDsvIW_2pSEVheDFQ,15275
anyio/to_interpreter.py,sha256=Z0-kLCxlITjFG_RM_TNdUlEnog94l48GXVDZ80w0URc,6986
anyio/to_process.py,sha256=ZvruelRM-HNmqDaql4sdNODg2QD_uSlwSCxnV4OhsfQ,9595
anyio/to_thread.py,sha256=WM2JQ2MbVsd5D5CM08bQiTwzZIvpsGjfH1Fy247KoDQ,2396

View File

@@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: setuptools (80.9.0)
Root-Is-Purelib: true
Tag: py3-none-any

View File

@@ -0,0 +1,2 @@
[pytest11]
anyio = anyio.pytest_plugin

View File

@@ -0,0 +1,20 @@
The MIT License (MIT)
Copyright (c) 2018 Alex Grönholm
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -0,0 +1,110 @@
from __future__ import annotations
from ._core._contextmanagers import AsyncContextManagerMixin as AsyncContextManagerMixin
from ._core._contextmanagers import ContextManagerMixin as ContextManagerMixin
from ._core._eventloop import current_time as current_time
from ._core._eventloop import get_all_backends as get_all_backends
from ._core._eventloop import get_cancelled_exc_class as get_cancelled_exc_class
from ._core._eventloop import run as run
from ._core._eventloop import sleep as sleep
from ._core._eventloop import sleep_forever as sleep_forever
from ._core._eventloop import sleep_until as sleep_until
from ._core._exceptions import BrokenResourceError as BrokenResourceError
from ._core._exceptions import BrokenWorkerInterpreter as BrokenWorkerInterpreter
from ._core._exceptions import BrokenWorkerProcess as BrokenWorkerProcess
from ._core._exceptions import BusyResourceError as BusyResourceError
from ._core._exceptions import ClosedResourceError as ClosedResourceError
from ._core._exceptions import ConnectionFailed as ConnectionFailed
from ._core._exceptions import DelimiterNotFound as DelimiterNotFound
from ._core._exceptions import EndOfStream as EndOfStream
from ._core._exceptions import IncompleteRead as IncompleteRead
from ._core._exceptions import NoEventLoopError as NoEventLoopError
from ._core._exceptions import RunFinishedError as RunFinishedError
from ._core._exceptions import TypedAttributeLookupError as TypedAttributeLookupError
from ._core._exceptions import WouldBlock as WouldBlock
from ._core._fileio import AsyncFile as AsyncFile
from ._core._fileio import Path as Path
from ._core._fileio import open_file as open_file
from ._core._fileio import wrap_file as wrap_file
from ._core._resources import aclose_forcefully as aclose_forcefully
from ._core._signals import open_signal_receiver as open_signal_receiver
from ._core._sockets import TCPConnectable as TCPConnectable
from ._core._sockets import UNIXConnectable as UNIXConnectable
from ._core._sockets import as_connectable as as_connectable
from ._core._sockets import connect_tcp as connect_tcp
from ._core._sockets import connect_unix as connect_unix
from ._core._sockets import create_connected_udp_socket as create_connected_udp_socket
from ._core._sockets import (
create_connected_unix_datagram_socket as create_connected_unix_datagram_socket,
)
from ._core._sockets import create_tcp_listener as create_tcp_listener
from ._core._sockets import create_udp_socket as create_udp_socket
from ._core._sockets import create_unix_datagram_socket as create_unix_datagram_socket
from ._core._sockets import create_unix_listener as create_unix_listener
from ._core._sockets import getaddrinfo as getaddrinfo
from ._core._sockets import getnameinfo as getnameinfo
from ._core._sockets import notify_closing as notify_closing
from ._core._sockets import wait_readable as wait_readable
from ._core._sockets import wait_socket_readable as wait_socket_readable
from ._core._sockets import wait_socket_writable as wait_socket_writable
from ._core._sockets import wait_writable as wait_writable
from ._core._streams import create_memory_object_stream as create_memory_object_stream
from ._core._subprocesses import open_process as open_process
from ._core._subprocesses import run_process as run_process
from ._core._synchronization import CapacityLimiter as CapacityLimiter
from ._core._synchronization import (
CapacityLimiterStatistics as CapacityLimiterStatistics,
)
from ._core._synchronization import Condition as Condition
from ._core._synchronization import ConditionStatistics as ConditionStatistics
from ._core._synchronization import Event as Event
from ._core._synchronization import EventStatistics as EventStatistics
from ._core._synchronization import Lock as Lock
from ._core._synchronization import LockStatistics as LockStatistics
from ._core._synchronization import ResourceGuard as ResourceGuard
from ._core._synchronization import Semaphore as Semaphore
from ._core._synchronization import SemaphoreStatistics as SemaphoreStatistics
from ._core._tasks import TASK_STATUS_IGNORED as TASK_STATUS_IGNORED
from ._core._tasks import CancelScope as CancelScope
from ._core._tasks import create_task_group as create_task_group
from ._core._tasks import current_effective_deadline as current_effective_deadline
from ._core._tasks import fail_after as fail_after
from ._core._tasks import move_on_after as move_on_after
from ._core._tempfile import NamedTemporaryFile as NamedTemporaryFile
from ._core._tempfile import SpooledTemporaryFile as SpooledTemporaryFile
from ._core._tempfile import TemporaryDirectory as TemporaryDirectory
from ._core._tempfile import TemporaryFile as TemporaryFile
from ._core._tempfile import gettempdir as gettempdir
from ._core._tempfile import gettempdirb as gettempdirb
from ._core._tempfile import mkdtemp as mkdtemp
from ._core._tempfile import mkstemp as mkstemp
from ._core._testing import TaskInfo as TaskInfo
from ._core._testing import get_current_task as get_current_task
from ._core._testing import get_running_tasks as get_running_tasks
from ._core._testing import wait_all_tasks_blocked as wait_all_tasks_blocked
from ._core._typedattr import TypedAttributeProvider as TypedAttributeProvider
from ._core._typedattr import TypedAttributeSet as TypedAttributeSet
from ._core._typedattr import typed_attribute as typed_attribute
# Re-export imports so they look like they live directly in this package
for __value in list(locals().values()):
if getattr(__value, "__module__", "").startswith("anyio."):
__value.__module__ = __name__
del __value
def __getattr__(attr: str) -> type[BrokenWorkerInterpreter]:
"""Support deprecated aliases."""
if attr == "BrokenWorkerIntepreter":
import warnings
warnings.warn(
"The 'BrokenWorkerIntepreter' alias is deprecated, use 'BrokenWorkerInterpreter' instead.",
DeprecationWarning,
stacklevel=2,
)
return BrokenWorkerInterpreter
raise AttributeError(f"module {__name__!r} has no attribute {attr!r}")

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,167 @@
from __future__ import annotations
import asyncio
import socket
import threading
from collections.abc import Callable
from selectors import EVENT_READ, EVENT_WRITE, DefaultSelector
from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from _typeshed import FileDescriptorLike
_selector_lock = threading.Lock()
_selector: Selector | None = None
class Selector:
def __init__(self) -> None:
self._thread = threading.Thread(target=self.run, name="AnyIO socket selector")
self._selector = DefaultSelector()
self._send, self._receive = socket.socketpair()
self._send.setblocking(False)
self._receive.setblocking(False)
# This somewhat reduces the amount of memory wasted queueing up data
# for wakeups. With these settings, maximum number of 1-byte sends
# before getting BlockingIOError:
# Linux 4.8: 6
# macOS (darwin 15.5): 1
# Windows 10: 525347
# Windows you're weird. (And on Windows setting SNDBUF to 0 makes send
# blocking, even on non-blocking sockets, so don't do that.)
self._receive.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 1)
self._send.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1)
# On Windows this is a TCP socket so this might matter. On other
# platforms this fails b/c AF_UNIX sockets aren't actually TCP.
try:
self._send.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
except OSError:
pass
self._selector.register(self._receive, EVENT_READ)
self._closed = False
def start(self) -> None:
self._thread.start()
threading._register_atexit(self._stop) # type: ignore[attr-defined]
def _stop(self) -> None:
global _selector
self._closed = True
self._notify_self()
self._send.close()
self._thread.join()
self._selector.unregister(self._receive)
self._receive.close()
self._selector.close()
_selector = None
assert not self._selector.get_map(), (
"selector still has registered file descriptors after shutdown"
)
def _notify_self(self) -> None:
try:
self._send.send(b"\x00")
except BlockingIOError:
pass
def add_reader(self, fd: FileDescriptorLike, callback: Callable[[], Any]) -> None:
loop = asyncio.get_running_loop()
try:
key = self._selector.get_key(fd)
except KeyError:
self._selector.register(fd, EVENT_READ, {EVENT_READ: (loop, callback)})
else:
if EVENT_READ in key.data:
raise ValueError(
"this file descriptor is already registered for reading"
)
key.data[EVENT_READ] = loop, callback
self._selector.modify(fd, key.events | EVENT_READ, key.data)
self._notify_self()
def add_writer(self, fd: FileDescriptorLike, callback: Callable[[], Any]) -> None:
loop = asyncio.get_running_loop()
try:
key = self._selector.get_key(fd)
except KeyError:
self._selector.register(fd, EVENT_WRITE, {EVENT_WRITE: (loop, callback)})
else:
if EVENT_WRITE in key.data:
raise ValueError(
"this file descriptor is already registered for writing"
)
key.data[EVENT_WRITE] = loop, callback
self._selector.modify(fd, key.events | EVENT_WRITE, key.data)
self._notify_self()
def remove_reader(self, fd: FileDescriptorLike) -> bool:
try:
key = self._selector.get_key(fd)
except KeyError:
return False
if new_events := key.events ^ EVENT_READ:
del key.data[EVENT_READ]
self._selector.modify(fd, new_events, key.data)
else:
self._selector.unregister(fd)
return True
def remove_writer(self, fd: FileDescriptorLike) -> bool:
try:
key = self._selector.get_key(fd)
except KeyError:
return False
if new_events := key.events ^ EVENT_WRITE:
del key.data[EVENT_WRITE]
self._selector.modify(fd, new_events, key.data)
else:
self._selector.unregister(fd)
return True
def run(self) -> None:
while not self._closed:
for key, events in self._selector.select():
if key.fileobj is self._receive:
try:
while self._receive.recv(4096):
pass
except BlockingIOError:
pass
continue
if events & EVENT_READ:
loop, callback = key.data[EVENT_READ]
self.remove_reader(key.fd)
try:
loop.call_soon_threadsafe(callback)
except RuntimeError:
pass # the loop was already closed
if events & EVENT_WRITE:
loop, callback = key.data[EVENT_WRITE]
self.remove_writer(key.fd)
try:
loop.call_soon_threadsafe(callback)
except RuntimeError:
pass # the loop was already closed
def get_selector() -> Selector:
global _selector
with _selector_lock:
if _selector is None:
_selector = Selector()
_selector.start()
return _selector

View File

@@ -0,0 +1,200 @@
from __future__ import annotations
from abc import abstractmethod
from contextlib import AbstractAsyncContextManager, AbstractContextManager
from inspect import isasyncgen, iscoroutine, isgenerator
from types import TracebackType
from typing import Protocol, TypeVar, cast, final
_T_co = TypeVar("_T_co", covariant=True)
_ExitT_co = TypeVar("_ExitT_co", covariant=True, bound="bool | None")
class _SupportsCtxMgr(Protocol[_T_co, _ExitT_co]):
def __contextmanager__(self) -> AbstractContextManager[_T_co, _ExitT_co]: ...
class _SupportsAsyncCtxMgr(Protocol[_T_co, _ExitT_co]):
def __asynccontextmanager__(
self,
) -> AbstractAsyncContextManager[_T_co, _ExitT_co]: ...
class ContextManagerMixin:
"""
Mixin class providing context manager functionality via a generator-based
implementation.
This class allows you to implement a context manager via :meth:`__contextmanager__`
which should return a generator. The mechanics are meant to mirror those of
:func:`@contextmanager <contextlib.contextmanager>`.
.. note:: Classes using this mix-in are not reentrant as context managers, meaning
that once you enter it, you can't re-enter before first exiting it.
.. seealso:: :doc:`contextmanagers`
"""
__cm: AbstractContextManager[object, bool | None] | None = None
@final
def __enter__(self: _SupportsCtxMgr[_T_co, bool | None]) -> _T_co:
# Needed for mypy to assume self still has the __cm member
assert isinstance(self, ContextManagerMixin)
if self.__cm is not None:
raise RuntimeError(
f"this {self.__class__.__qualname__} has already been entered"
)
cm = self.__contextmanager__()
if not isinstance(cm, AbstractContextManager):
if isgenerator(cm):
raise TypeError(
"__contextmanager__() returned a generator object instead of "
"a context manager. Did you forget to add the @contextmanager "
"decorator?"
)
raise TypeError(
f"__contextmanager__() did not return a context manager object, "
f"but {cm.__class__!r}"
)
if cm is self:
raise TypeError(
f"{self.__class__.__qualname__}.__contextmanager__() returned "
f"self. Did you forget to add the @contextmanager decorator and a "
f"'yield' statement?"
)
value = cm.__enter__()
self.__cm = cm
return value
@final
def __exit__(
self: _SupportsCtxMgr[object, _ExitT_co],
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> _ExitT_co:
# Needed for mypy to assume self still has the __cm member
assert isinstance(self, ContextManagerMixin)
if self.__cm is None:
raise RuntimeError(
f"this {self.__class__.__qualname__} has not been entered yet"
)
# Prevent circular references
cm = self.__cm
del self.__cm
return cast(_ExitT_co, cm.__exit__(exc_type, exc_val, exc_tb))
@abstractmethod
def __contextmanager__(self) -> AbstractContextManager[object, bool | None]:
"""
Implement your context manager logic here.
This method **must** be decorated with
:func:`@contextmanager <contextlib.contextmanager>`.
.. note:: Remember that the ``yield`` will raise any exception raised in the
enclosed context block, so use a ``finally:`` block to clean up resources!
:return: a context manager object
"""
class AsyncContextManagerMixin:
"""
Mixin class providing async context manager functionality via a generator-based
implementation.
This class allows you to implement a context manager via
:meth:`__asynccontextmanager__`. The mechanics are meant to mirror those of
:func:`@asynccontextmanager <contextlib.asynccontextmanager>`.
.. note:: Classes using this mix-in are not reentrant as context managers, meaning
that once you enter it, you can't re-enter before first exiting it.
.. seealso:: :doc:`contextmanagers`
"""
__cm: AbstractAsyncContextManager[object, bool | None] | None = None
@final
async def __aenter__(self: _SupportsAsyncCtxMgr[_T_co, bool | None]) -> _T_co:
# Needed for mypy to assume self still has the __cm member
assert isinstance(self, AsyncContextManagerMixin)
if self.__cm is not None:
raise RuntimeError(
f"this {self.__class__.__qualname__} has already been entered"
)
cm = self.__asynccontextmanager__()
if not isinstance(cm, AbstractAsyncContextManager):
if isasyncgen(cm):
raise TypeError(
"__asynccontextmanager__() returned an async generator instead of "
"an async context manager. Did you forget to add the "
"@asynccontextmanager decorator?"
)
elif iscoroutine(cm):
cm.close()
raise TypeError(
"__asynccontextmanager__() returned a coroutine object instead of "
"an async context manager. Did you forget to add the "
"@asynccontextmanager decorator and a 'yield' statement?"
)
raise TypeError(
f"__asynccontextmanager__() did not return an async context manager, "
f"but {cm.__class__!r}"
)
if cm is self:
raise TypeError(
f"{self.__class__.__qualname__}.__asynccontextmanager__() returned "
f"self. Did you forget to add the @asynccontextmanager decorator and a "
f"'yield' statement?"
)
value = await cm.__aenter__()
self.__cm = cm
return value
@final
async def __aexit__(
self: _SupportsAsyncCtxMgr[object, _ExitT_co],
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> _ExitT_co:
assert isinstance(self, AsyncContextManagerMixin)
if self.__cm is None:
raise RuntimeError(
f"this {self.__class__.__qualname__} has not been entered yet"
)
# Prevent circular references
cm = self.__cm
del self.__cm
return cast(_ExitT_co, await cm.__aexit__(exc_type, exc_val, exc_tb))
@abstractmethod
def __asynccontextmanager__(
self,
) -> AbstractAsyncContextManager[object, bool | None]:
"""
Implement your async context manager logic here.
This method **must** be decorated with
:func:`@asynccontextmanager <contextlib.asynccontextmanager>`.
.. note:: Remember that the ``yield`` will raise any exception raised in the
enclosed context block, so use a ``finally:`` block to clean up resources!
:return: an async context manager object
"""

View File

@@ -0,0 +1,166 @@
from __future__ import annotations
import math
import sys
import threading
from collections.abc import Awaitable, Callable, Generator
from contextlib import contextmanager
from importlib import import_module
from typing import TYPE_CHECKING, Any, TypeVar
import sniffio
if sys.version_info >= (3, 11):
from typing import TypeVarTuple, Unpack
else:
from typing_extensions import TypeVarTuple, Unpack
if TYPE_CHECKING:
from ..abc import AsyncBackend
# This must be updated when new backends are introduced
BACKENDS = "asyncio", "trio"
T_Retval = TypeVar("T_Retval")
PosArgsT = TypeVarTuple("PosArgsT")
threadlocals = threading.local()
loaded_backends: dict[str, type[AsyncBackend]] = {}
def run(
func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]],
*args: Unpack[PosArgsT],
backend: str = "asyncio",
backend_options: dict[str, Any] | None = None,
) -> T_Retval:
"""
Run the given coroutine function in an asynchronous event loop.
The current thread must not be already running an event loop.
:param func: a coroutine function
:param args: positional arguments to ``func``
:param backend: name of the asynchronous event loop implementation currently
either ``asyncio`` or ``trio``
:param backend_options: keyword arguments to call the backend ``run()``
implementation with (documented :ref:`here <backend options>`)
:return: the return value of the coroutine function
:raises RuntimeError: if an asynchronous event loop is already running in this
thread
:raises LookupError: if the named backend is not found
"""
try:
asynclib_name = sniffio.current_async_library()
except sniffio.AsyncLibraryNotFoundError:
pass
else:
raise RuntimeError(f"Already running {asynclib_name} in this thread")
try:
async_backend = get_async_backend(backend)
except ImportError as exc:
raise LookupError(f"No such backend: {backend}") from exc
token = None
if sniffio.current_async_library_cvar.get(None) is None:
# Since we're in control of the event loop, we can cache the name of the async
# library
token = sniffio.current_async_library_cvar.set(backend)
try:
backend_options = backend_options or {}
return async_backend.run(func, args, {}, backend_options)
finally:
if token:
sniffio.current_async_library_cvar.reset(token)
async def sleep(delay: float) -> None:
"""
Pause the current task for the specified duration.
:param delay: the duration, in seconds
"""
return await get_async_backend().sleep(delay)
async def sleep_forever() -> None:
"""
Pause the current task until it's cancelled.
This is a shortcut for ``sleep(math.inf)``.
.. versionadded:: 3.1
"""
await sleep(math.inf)
async def sleep_until(deadline: float) -> None:
"""
Pause the current task until the given time.
:param deadline: the absolute time to wake up at (according to the internal
monotonic clock of the event loop)
.. versionadded:: 3.1
"""
now = current_time()
await sleep(max(deadline - now, 0))
def current_time() -> float:
"""
Return the current value of the event loop's internal clock.
:return: the clock value (seconds)
"""
return get_async_backend().current_time()
def get_all_backends() -> tuple[str, ...]:
"""Return a tuple of the names of all built-in backends."""
return BACKENDS
def get_cancelled_exc_class() -> type[BaseException]:
"""Return the current async library's cancellation exception class."""
return get_async_backend().cancelled_exception_class()
#
# Private API
#
@contextmanager
def claim_worker_thread(
backend_class: type[AsyncBackend], token: object
) -> Generator[Any, None, None]:
from ..lowlevel import EventLoopToken
threadlocals.current_token = EventLoopToken(backend_class, token)
try:
yield
finally:
del threadlocals.current_token
def get_async_backend(asynclib_name: str | None = None) -> type[AsyncBackend]:
if asynclib_name is None:
asynclib_name = sniffio.current_async_library()
# We use our own dict instead of sys.modules to get the already imported back-end
# class because the appropriate modules in sys.modules could potentially be only
# partially initialized
try:
return loaded_backends[asynclib_name]
except KeyError:
module = import_module(f"anyio._backends._{asynclib_name}")
loaded_backends[asynclib_name] = module.backend_class
return module.backend_class

View File

@@ -0,0 +1,153 @@
from __future__ import annotations
import sys
from collections.abc import Generator
from textwrap import dedent
from typing import Any
if sys.version_info < (3, 11):
from exceptiongroup import BaseExceptionGroup
class BrokenResourceError(Exception):
"""
Raised when trying to use a resource that has been rendered unusable due to external
causes (e.g. a send stream whose peer has disconnected).
"""
class BrokenWorkerProcess(Exception):
"""
Raised by :meth:`~anyio.to_process.run_sync` if the worker process terminates abruptly or
otherwise misbehaves.
"""
class BrokenWorkerInterpreter(Exception):
"""
Raised by :meth:`~anyio.to_interpreter.run_sync` if an unexpected exception is
raised in the subinterpreter.
"""
def __init__(self, excinfo: Any):
# This was adapted from concurrent.futures.interpreter.ExecutionFailed
msg = excinfo.formatted
if not msg:
if excinfo.type and excinfo.msg:
msg = f"{excinfo.type.__name__}: {excinfo.msg}"
else:
msg = excinfo.type.__name__ or excinfo.msg
super().__init__(msg)
self.excinfo = excinfo
def __str__(self) -> str:
try:
formatted = self.excinfo.errdisplay
except Exception:
return super().__str__()
else:
return dedent(
f"""
{super().__str__()}
Uncaught in the interpreter:
{formatted}
""".strip()
)
class BusyResourceError(Exception):
"""
Raised when two tasks are trying to read from or write to the same resource
concurrently.
"""
def __init__(self, action: str):
super().__init__(f"Another task is already {action} this resource")
class ClosedResourceError(Exception):
"""Raised when trying to use a resource that has been closed."""
class ConnectionFailed(OSError):
"""
Raised when a connection attempt fails.
.. note:: This class inherits from :exc:`OSError` for backwards compatibility.
"""
def iterate_exceptions(
exception: BaseException,
) -> Generator[BaseException, None, None]:
if isinstance(exception, BaseExceptionGroup):
for exc in exception.exceptions:
yield from iterate_exceptions(exc)
else:
yield exception
class DelimiterNotFound(Exception):
"""
Raised during
:meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the
maximum number of bytes has been read without the delimiter being found.
"""
def __init__(self, max_bytes: int) -> None:
super().__init__(
f"The delimiter was not found among the first {max_bytes} bytes"
)
class EndOfStream(Exception):
"""
Raised when trying to read from a stream that has been closed from the other end.
"""
class IncompleteRead(Exception):
"""
Raised during
:meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or
:meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the
connection is closed before the requested amount of bytes has been read.
"""
def __init__(self) -> None:
super().__init__(
"The stream was closed before the read operation could be completed"
)
class TypedAttributeLookupError(LookupError):
"""
Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute
is not found and no default value has been given.
"""
class WouldBlock(Exception):
"""Raised by ``X_nowait`` functions if ``X()`` would block."""
class NoEventLoopError(RuntimeError):
"""
Raised by :func:`.from_thread.run` and :func:`.from_thread.run_sync` if
not calling from an AnyIO worker thread, and no ``token`` was passed.
"""
class RunFinishedError(RuntimeError):
"""
Raised by :func:`.from_thread.run` and :func:`.from_thread.run_sync` if the event
loop associated with the explicitly passed token has already finished.
"""
def __init__(self) -> None:
super().__init__(
"The event loop associated with the given token has already finished"
)

View File

@@ -0,0 +1,740 @@
from __future__ import annotations
import os
import pathlib
import sys
from collections.abc import (
AsyncIterator,
Callable,
Iterable,
Iterator,
Sequence,
)
from dataclasses import dataclass
from functools import partial
from os import PathLike
from typing import (
IO,
TYPE_CHECKING,
Any,
AnyStr,
ClassVar,
Final,
Generic,
overload,
)
from .. import to_thread
from ..abc import AsyncResource
if TYPE_CHECKING:
from types import ModuleType
from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer
else:
ReadableBuffer = OpenBinaryMode = OpenTextMode = WriteableBuffer = object
class AsyncFile(AsyncResource, Generic[AnyStr]):
"""
An asynchronous file object.
This class wraps a standard file object and provides async friendly versions of the
following blocking methods (where available on the original file object):
* read
* read1
* readline
* readlines
* readinto
* readinto1
* write
* writelines
* truncate
* seek
* tell
* flush
All other methods are directly passed through.
This class supports the asynchronous context manager protocol which closes the
underlying file at the end of the context block.
This class also supports asynchronous iteration::
async with await open_file(...) as f:
async for line in f:
print(line)
"""
def __init__(self, fp: IO[AnyStr]) -> None:
self._fp: Any = fp
def __getattr__(self, name: str) -> object:
return getattr(self._fp, name)
@property
def wrapped(self) -> IO[AnyStr]:
"""The wrapped file object."""
return self._fp
async def __aiter__(self) -> AsyncIterator[AnyStr]:
while True:
line = await self.readline()
if line:
yield line
else:
break
async def aclose(self) -> None:
return await to_thread.run_sync(self._fp.close)
async def read(self, size: int = -1) -> AnyStr:
return await to_thread.run_sync(self._fp.read, size)
async def read1(self: AsyncFile[bytes], size: int = -1) -> bytes:
return await to_thread.run_sync(self._fp.read1, size)
async def readline(self) -> AnyStr:
return await to_thread.run_sync(self._fp.readline)
async def readlines(self) -> list[AnyStr]:
return await to_thread.run_sync(self._fp.readlines)
async def readinto(self: AsyncFile[bytes], b: WriteableBuffer) -> int:
return await to_thread.run_sync(self._fp.readinto, b)
async def readinto1(self: AsyncFile[bytes], b: WriteableBuffer) -> int:
return await to_thread.run_sync(self._fp.readinto1, b)
@overload
async def write(self: AsyncFile[bytes], b: ReadableBuffer) -> int: ...
@overload
async def write(self: AsyncFile[str], b: str) -> int: ...
async def write(self, b: ReadableBuffer | str) -> int:
return await to_thread.run_sync(self._fp.write, b)
@overload
async def writelines(
self: AsyncFile[bytes], lines: Iterable[ReadableBuffer]
) -> None: ...
@overload
async def writelines(self: AsyncFile[str], lines: Iterable[str]) -> None: ...
async def writelines(self, lines: Iterable[ReadableBuffer] | Iterable[str]) -> None:
return await to_thread.run_sync(self._fp.writelines, lines)
async def truncate(self, size: int | None = None) -> int:
return await to_thread.run_sync(self._fp.truncate, size)
async def seek(self, offset: int, whence: int | None = os.SEEK_SET) -> int:
return await to_thread.run_sync(self._fp.seek, offset, whence)
async def tell(self) -> int:
return await to_thread.run_sync(self._fp.tell)
async def flush(self) -> None:
return await to_thread.run_sync(self._fp.flush)
@overload
async def open_file(
file: str | PathLike[str] | int,
mode: OpenBinaryMode,
buffering: int = ...,
encoding: str | None = ...,
errors: str | None = ...,
newline: str | None = ...,
closefd: bool = ...,
opener: Callable[[str, int], int] | None = ...,
) -> AsyncFile[bytes]: ...
@overload
async def open_file(
file: str | PathLike[str] | int,
mode: OpenTextMode = ...,
buffering: int = ...,
encoding: str | None = ...,
errors: str | None = ...,
newline: str | None = ...,
closefd: bool = ...,
opener: Callable[[str, int], int] | None = ...,
) -> AsyncFile[str]: ...
async def open_file(
file: str | PathLike[str] | int,
mode: str = "r",
buffering: int = -1,
encoding: str | None = None,
errors: str | None = None,
newline: str | None = None,
closefd: bool = True,
opener: Callable[[str, int], int] | None = None,
) -> AsyncFile[Any]:
"""
Open a file asynchronously.
The arguments are exactly the same as for the builtin :func:`open`.
:return: an asynchronous file object
"""
fp = await to_thread.run_sync(
open, file, mode, buffering, encoding, errors, newline, closefd, opener
)
return AsyncFile(fp)
def wrap_file(file: IO[AnyStr]) -> AsyncFile[AnyStr]:
"""
Wrap an existing file as an asynchronous file.
:param file: an existing file-like object
:return: an asynchronous file object
"""
return AsyncFile(file)
@dataclass(eq=False)
class _PathIterator(AsyncIterator["Path"]):
iterator: Iterator[PathLike[str]]
async def __anext__(self) -> Path:
nextval = await to_thread.run_sync(
next, self.iterator, None, abandon_on_cancel=True
)
if nextval is None:
raise StopAsyncIteration from None
return Path(nextval)
class Path:
"""
An asynchronous version of :class:`pathlib.Path`.
This class cannot be substituted for :class:`pathlib.Path` or
:class:`pathlib.PurePath`, but it is compatible with the :class:`os.PathLike`
interface.
It implements the Python 3.10 version of :class:`pathlib.Path` interface, except for
the deprecated :meth:`~pathlib.Path.link_to` method.
Some methods may be unavailable or have limited functionality, based on the Python
version:
* :meth:`~pathlib.Path.copy` (available on Python 3.14 or later)
* :meth:`~pathlib.Path.copy_into` (available on Python 3.14 or later)
* :meth:`~pathlib.Path.from_uri` (available on Python 3.13 or later)
* :meth:`~pathlib.PurePath.full_match` (available on Python 3.13 or later)
* :attr:`~pathlib.Path.info` (available on Python 3.14 or later)
* :meth:`~pathlib.Path.is_junction` (available on Python 3.12 or later)
* :meth:`~pathlib.PurePath.match` (the ``case_sensitive`` parameter is only
available on Python 3.13 or later)
* :meth:`~pathlib.Path.move` (available on Python 3.14 or later)
* :meth:`~pathlib.Path.move_into` (available on Python 3.14 or later)
* :meth:`~pathlib.PurePath.relative_to` (the ``walk_up`` parameter is only available
on Python 3.12 or later)
* :meth:`~pathlib.Path.walk` (available on Python 3.12 or later)
Any methods that do disk I/O need to be awaited on. These methods are:
* :meth:`~pathlib.Path.absolute`
* :meth:`~pathlib.Path.chmod`
* :meth:`~pathlib.Path.cwd`
* :meth:`~pathlib.Path.exists`
* :meth:`~pathlib.Path.expanduser`
* :meth:`~pathlib.Path.group`
* :meth:`~pathlib.Path.hardlink_to`
* :meth:`~pathlib.Path.home`
* :meth:`~pathlib.Path.is_block_device`
* :meth:`~pathlib.Path.is_char_device`
* :meth:`~pathlib.Path.is_dir`
* :meth:`~pathlib.Path.is_fifo`
* :meth:`~pathlib.Path.is_file`
* :meth:`~pathlib.Path.is_junction`
* :meth:`~pathlib.Path.is_mount`
* :meth:`~pathlib.Path.is_socket`
* :meth:`~pathlib.Path.is_symlink`
* :meth:`~pathlib.Path.lchmod`
* :meth:`~pathlib.Path.lstat`
* :meth:`~pathlib.Path.mkdir`
* :meth:`~pathlib.Path.open`
* :meth:`~pathlib.Path.owner`
* :meth:`~pathlib.Path.read_bytes`
* :meth:`~pathlib.Path.read_text`
* :meth:`~pathlib.Path.readlink`
* :meth:`~pathlib.Path.rename`
* :meth:`~pathlib.Path.replace`
* :meth:`~pathlib.Path.resolve`
* :meth:`~pathlib.Path.rmdir`
* :meth:`~pathlib.Path.samefile`
* :meth:`~pathlib.Path.stat`
* :meth:`~pathlib.Path.symlink_to`
* :meth:`~pathlib.Path.touch`
* :meth:`~pathlib.Path.unlink`
* :meth:`~pathlib.Path.walk`
* :meth:`~pathlib.Path.write_bytes`
* :meth:`~pathlib.Path.write_text`
Additionally, the following methods return an async iterator yielding
:class:`~.Path` objects:
* :meth:`~pathlib.Path.glob`
* :meth:`~pathlib.Path.iterdir`
* :meth:`~pathlib.Path.rglob`
"""
__slots__ = "_path", "__weakref__"
__weakref__: Any
def __init__(self, *args: str | PathLike[str]) -> None:
self._path: Final[pathlib.Path] = pathlib.Path(*args)
def __fspath__(self) -> str:
return self._path.__fspath__()
def __str__(self) -> str:
return self._path.__str__()
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self.as_posix()!r})"
def __bytes__(self) -> bytes:
return self._path.__bytes__()
def __hash__(self) -> int:
return self._path.__hash__()
def __eq__(self, other: object) -> bool:
target = other._path if isinstance(other, Path) else other
return self._path.__eq__(target)
def __lt__(self, other: pathlib.PurePath | Path) -> bool:
target = other._path if isinstance(other, Path) else other
return self._path.__lt__(target)
def __le__(self, other: pathlib.PurePath | Path) -> bool:
target = other._path if isinstance(other, Path) else other
return self._path.__le__(target)
def __gt__(self, other: pathlib.PurePath | Path) -> bool:
target = other._path if isinstance(other, Path) else other
return self._path.__gt__(target)
def __ge__(self, other: pathlib.PurePath | Path) -> bool:
target = other._path if isinstance(other, Path) else other
return self._path.__ge__(target)
def __truediv__(self, other: str | PathLike[str]) -> Path:
return Path(self._path / other)
def __rtruediv__(self, other: str | PathLike[str]) -> Path:
return Path(other) / self
@property
def parts(self) -> tuple[str, ...]:
return self._path.parts
@property
def drive(self) -> str:
return self._path.drive
@property
def root(self) -> str:
return self._path.root
@property
def anchor(self) -> str:
return self._path.anchor
@property
def parents(self) -> Sequence[Path]:
return tuple(Path(p) for p in self._path.parents)
@property
def parent(self) -> Path:
return Path(self._path.parent)
@property
def name(self) -> str:
return self._path.name
@property
def suffix(self) -> str:
return self._path.suffix
@property
def suffixes(self) -> list[str]:
return self._path.suffixes
@property
def stem(self) -> str:
return self._path.stem
async def absolute(self) -> Path:
path = await to_thread.run_sync(self._path.absolute)
return Path(path)
def as_posix(self) -> str:
return self._path.as_posix()
def as_uri(self) -> str:
return self._path.as_uri()
if sys.version_info >= (3, 13):
parser: ClassVar[ModuleType] = pathlib.Path.parser
@classmethod
def from_uri(cls, uri: str) -> Path:
return Path(pathlib.Path.from_uri(uri))
def full_match(
self, path_pattern: str, *, case_sensitive: bool | None = None
) -> bool:
return self._path.full_match(path_pattern, case_sensitive=case_sensitive)
def match(
self, path_pattern: str, *, case_sensitive: bool | None = None
) -> bool:
return self._path.match(path_pattern, case_sensitive=case_sensitive)
else:
def match(self, path_pattern: str) -> bool:
return self._path.match(path_pattern)
if sys.version_info >= (3, 14):
@property
def info(self) -> Any: # TODO: add return type annotation when Typeshed gets it
return self._path.info
async def copy(
self,
target: str | os.PathLike[str],
*,
follow_symlinks: bool = True,
preserve_metadata: bool = False,
) -> Path:
func = partial(
self._path.copy,
follow_symlinks=follow_symlinks,
preserve_metadata=preserve_metadata,
)
return Path(await to_thread.run_sync(func, pathlib.Path(target)))
async def copy_into(
self,
target_dir: str | os.PathLike[str],
*,
follow_symlinks: bool = True,
preserve_metadata: bool = False,
) -> Path:
func = partial(
self._path.copy_into,
follow_symlinks=follow_symlinks,
preserve_metadata=preserve_metadata,
)
return Path(await to_thread.run_sync(func, pathlib.Path(target_dir)))
async def move(self, target: str | os.PathLike[str]) -> Path:
# Upstream does not handle anyio.Path properly as a PathLike
target = pathlib.Path(target)
return Path(await to_thread.run_sync(self._path.move, target))
async def move_into(
self,
target_dir: str | os.PathLike[str],
) -> Path:
return Path(await to_thread.run_sync(self._path.move_into, target_dir))
def is_relative_to(self, other: str | PathLike[str]) -> bool:
try:
self.relative_to(other)
return True
except ValueError:
return False
async def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None:
func = partial(os.chmod, follow_symlinks=follow_symlinks)
return await to_thread.run_sync(func, self._path, mode)
@classmethod
async def cwd(cls) -> Path:
path = await to_thread.run_sync(pathlib.Path.cwd)
return cls(path)
async def exists(self) -> bool:
return await to_thread.run_sync(self._path.exists, abandon_on_cancel=True)
async def expanduser(self) -> Path:
return Path(
await to_thread.run_sync(self._path.expanduser, abandon_on_cancel=True)
)
def glob(self, pattern: str) -> AsyncIterator[Path]:
gen = self._path.glob(pattern)
return _PathIterator(gen)
async def group(self) -> str:
return await to_thread.run_sync(self._path.group, abandon_on_cancel=True)
async def hardlink_to(
self, target: str | bytes | PathLike[str] | PathLike[bytes]
) -> None:
if isinstance(target, Path):
target = target._path
await to_thread.run_sync(os.link, target, self)
@classmethod
async def home(cls) -> Path:
home_path = await to_thread.run_sync(pathlib.Path.home)
return cls(home_path)
def is_absolute(self) -> bool:
return self._path.is_absolute()
async def is_block_device(self) -> bool:
return await to_thread.run_sync(
self._path.is_block_device, abandon_on_cancel=True
)
async def is_char_device(self) -> bool:
return await to_thread.run_sync(
self._path.is_char_device, abandon_on_cancel=True
)
async def is_dir(self) -> bool:
return await to_thread.run_sync(self._path.is_dir, abandon_on_cancel=True)
async def is_fifo(self) -> bool:
return await to_thread.run_sync(self._path.is_fifo, abandon_on_cancel=True)
async def is_file(self) -> bool:
return await to_thread.run_sync(self._path.is_file, abandon_on_cancel=True)
if sys.version_info >= (3, 12):
async def is_junction(self) -> bool:
return await to_thread.run_sync(self._path.is_junction)
async def is_mount(self) -> bool:
return await to_thread.run_sync(
os.path.ismount, self._path, abandon_on_cancel=True
)
def is_reserved(self) -> bool:
return self._path.is_reserved()
async def is_socket(self) -> bool:
return await to_thread.run_sync(self._path.is_socket, abandon_on_cancel=True)
async def is_symlink(self) -> bool:
return await to_thread.run_sync(self._path.is_symlink, abandon_on_cancel=True)
async def iterdir(self) -> AsyncIterator[Path]:
gen = (
self._path.iterdir()
if sys.version_info < (3, 13)
else await to_thread.run_sync(self._path.iterdir, abandon_on_cancel=True)
)
async for path in _PathIterator(gen):
yield path
def joinpath(self, *args: str | PathLike[str]) -> Path:
return Path(self._path.joinpath(*args))
async def lchmod(self, mode: int) -> None:
await to_thread.run_sync(self._path.lchmod, mode)
async def lstat(self) -> os.stat_result:
return await to_thread.run_sync(self._path.lstat, abandon_on_cancel=True)
async def mkdir(
self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False
) -> None:
await to_thread.run_sync(self._path.mkdir, mode, parents, exist_ok)
@overload
async def open(
self,
mode: OpenBinaryMode,
buffering: int = ...,
encoding: str | None = ...,
errors: str | None = ...,
newline: str | None = ...,
) -> AsyncFile[bytes]: ...
@overload
async def open(
self,
mode: OpenTextMode = ...,
buffering: int = ...,
encoding: str | None = ...,
errors: str | None = ...,
newline: str | None = ...,
) -> AsyncFile[str]: ...
async def open(
self,
mode: str = "r",
buffering: int = -1,
encoding: str | None = None,
errors: str | None = None,
newline: str | None = None,
) -> AsyncFile[Any]:
fp = await to_thread.run_sync(
self._path.open, mode, buffering, encoding, errors, newline
)
return AsyncFile(fp)
async def owner(self) -> str:
return await to_thread.run_sync(self._path.owner, abandon_on_cancel=True)
async def read_bytes(self) -> bytes:
return await to_thread.run_sync(self._path.read_bytes)
async def read_text(
self, encoding: str | None = None, errors: str | None = None
) -> str:
return await to_thread.run_sync(self._path.read_text, encoding, errors)
if sys.version_info >= (3, 12):
def relative_to(
self, *other: str | PathLike[str], walk_up: bool = False
) -> Path:
# relative_to() should work with any PathLike but it doesn't
others = [pathlib.Path(other) for other in other]
return Path(self._path.relative_to(*others, walk_up=walk_up))
else:
def relative_to(self, *other: str | PathLike[str]) -> Path:
return Path(self._path.relative_to(*other))
async def readlink(self) -> Path:
target = await to_thread.run_sync(os.readlink, self._path)
return Path(target)
async def rename(self, target: str | pathlib.PurePath | Path) -> Path:
if isinstance(target, Path):
target = target._path
await to_thread.run_sync(self._path.rename, target)
return Path(target)
async def replace(self, target: str | pathlib.PurePath | Path) -> Path:
if isinstance(target, Path):
target = target._path
await to_thread.run_sync(self._path.replace, target)
return Path(target)
async def resolve(self, strict: bool = False) -> Path:
func = partial(self._path.resolve, strict=strict)
return Path(await to_thread.run_sync(func, abandon_on_cancel=True))
def rglob(self, pattern: str) -> AsyncIterator[Path]:
gen = self._path.rglob(pattern)
return _PathIterator(gen)
async def rmdir(self) -> None:
await to_thread.run_sync(self._path.rmdir)
async def samefile(self, other_path: str | PathLike[str]) -> bool:
if isinstance(other_path, Path):
other_path = other_path._path
return await to_thread.run_sync(
self._path.samefile, other_path, abandon_on_cancel=True
)
async def stat(self, *, follow_symlinks: bool = True) -> os.stat_result:
func = partial(os.stat, follow_symlinks=follow_symlinks)
return await to_thread.run_sync(func, self._path, abandon_on_cancel=True)
async def symlink_to(
self,
target: str | bytes | PathLike[str] | PathLike[bytes],
target_is_directory: bool = False,
) -> None:
if isinstance(target, Path):
target = target._path
await to_thread.run_sync(self._path.symlink_to, target, target_is_directory)
async def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None:
await to_thread.run_sync(self._path.touch, mode, exist_ok)
async def unlink(self, missing_ok: bool = False) -> None:
try:
await to_thread.run_sync(self._path.unlink)
except FileNotFoundError:
if not missing_ok:
raise
if sys.version_info >= (3, 12):
async def walk(
self,
top_down: bool = True,
on_error: Callable[[OSError], object] | None = None,
follow_symlinks: bool = False,
) -> AsyncIterator[tuple[Path, list[str], list[str]]]:
def get_next_value() -> tuple[pathlib.Path, list[str], list[str]] | None:
try:
return next(gen)
except StopIteration:
return None
gen = self._path.walk(top_down, on_error, follow_symlinks)
while True:
value = await to_thread.run_sync(get_next_value)
if value is None:
return
root, dirs, paths = value
yield Path(root), dirs, paths
def with_name(self, name: str) -> Path:
return Path(self._path.with_name(name))
def with_stem(self, stem: str) -> Path:
return Path(self._path.with_name(stem + self._path.suffix))
def with_suffix(self, suffix: str) -> Path:
return Path(self._path.with_suffix(suffix))
def with_segments(self, *pathsegments: str | PathLike[str]) -> Path:
return Path(*pathsegments)
async def write_bytes(self, data: bytes) -> int:
return await to_thread.run_sync(self._path.write_bytes, data)
async def write_text(
self,
data: str,
encoding: str | None = None,
errors: str | None = None,
newline: str | None = None,
) -> int:
# Path.write_text() does not support the "newline" parameter before Python 3.10
def sync_write_text() -> int:
with self._path.open(
"w", encoding=encoding, errors=errors, newline=newline
) as fp:
return fp.write(data)
return await to_thread.run_sync(sync_write_text)
PathLike.register(Path)

View File

@@ -0,0 +1,18 @@
from __future__ import annotations
from ..abc import AsyncResource
from ._tasks import CancelScope
async def aclose_forcefully(resource: AsyncResource) -> None:
"""
Close an asynchronous resource in a cancelled scope.
Doing this closes the resource without waiting on anything.
:param resource: the resource to close
"""
with CancelScope() as scope:
scope.cancel()
await resource.aclose()

View File

@@ -0,0 +1,27 @@
from __future__ import annotations
from collections.abc import AsyncIterator
from contextlib import AbstractContextManager
from signal import Signals
from ._eventloop import get_async_backend
def open_signal_receiver(
*signals: Signals,
) -> AbstractContextManager[AsyncIterator[Signals]]:
"""
Start receiving operating system signals.
:param signals: signals to receive (e.g. ``signal.SIGINT``)
:return: an asynchronous context manager for an asynchronous iterator which yields
signal numbers
.. warning:: Windows does not support signals natively so it is best to avoid
relying on this in cross-platform applications.
.. warning:: On asyncio, this permanently replaces any previous signal handler for
the given signals, as set via :meth:`~asyncio.loop.add_signal_handler`.
"""
return get_async_backend().open_signal_receiver(*signals)

View File

@@ -0,0 +1,991 @@
from __future__ import annotations
import errno
import os
import socket
import ssl
import stat
import sys
from collections.abc import Awaitable
from dataclasses import dataclass
from ipaddress import IPv4Address, IPv6Address, ip_address
from os import PathLike, chmod
from socket import AddressFamily, SocketKind
from typing import TYPE_CHECKING, Any, Literal, cast, overload
from .. import ConnectionFailed, to_thread
from ..abc import (
ByteStreamConnectable,
ConnectedUDPSocket,
ConnectedUNIXDatagramSocket,
IPAddressType,
IPSockAddrType,
SocketListener,
SocketStream,
UDPSocket,
UNIXDatagramSocket,
UNIXSocketStream,
)
from ..streams.stapled import MultiListener
from ..streams.tls import TLSConnectable, TLSStream
from ._eventloop import get_async_backend
from ._resources import aclose_forcefully
from ._synchronization import Event
from ._tasks import create_task_group, move_on_after
if TYPE_CHECKING:
from _typeshed import FileDescriptorLike
else:
FileDescriptorLike = object
if sys.version_info < (3, 11):
from exceptiongroup import ExceptionGroup
if sys.version_info >= (3, 12):
from typing import override
else:
from typing_extensions import override
if sys.version_info < (3, 13):
from typing_extensions import deprecated
else:
from warnings import deprecated
IPPROTO_IPV6 = getattr(socket, "IPPROTO_IPV6", 41) # https://bugs.python.org/issue29515
AnyIPAddressFamily = Literal[
AddressFamily.AF_UNSPEC, AddressFamily.AF_INET, AddressFamily.AF_INET6
]
IPAddressFamily = Literal[AddressFamily.AF_INET, AddressFamily.AF_INET6]
# tls_hostname given
@overload
async def connect_tcp(
remote_host: IPAddressType,
remote_port: int,
*,
local_host: IPAddressType | None = ...,
ssl_context: ssl.SSLContext | None = ...,
tls_standard_compatible: bool = ...,
tls_hostname: str,
happy_eyeballs_delay: float = ...,
) -> TLSStream: ...
# ssl_context given
@overload
async def connect_tcp(
remote_host: IPAddressType,
remote_port: int,
*,
local_host: IPAddressType | None = ...,
ssl_context: ssl.SSLContext,
tls_standard_compatible: bool = ...,
tls_hostname: str | None = ...,
happy_eyeballs_delay: float = ...,
) -> TLSStream: ...
# tls=True
@overload
async def connect_tcp(
remote_host: IPAddressType,
remote_port: int,
*,
local_host: IPAddressType | None = ...,
tls: Literal[True],
ssl_context: ssl.SSLContext | None = ...,
tls_standard_compatible: bool = ...,
tls_hostname: str | None = ...,
happy_eyeballs_delay: float = ...,
) -> TLSStream: ...
# tls=False
@overload
async def connect_tcp(
remote_host: IPAddressType,
remote_port: int,
*,
local_host: IPAddressType | None = ...,
tls: Literal[False],
ssl_context: ssl.SSLContext | None = ...,
tls_standard_compatible: bool = ...,
tls_hostname: str | None = ...,
happy_eyeballs_delay: float = ...,
) -> SocketStream: ...
# No TLS arguments
@overload
async def connect_tcp(
remote_host: IPAddressType,
remote_port: int,
*,
local_host: IPAddressType | None = ...,
happy_eyeballs_delay: float = ...,
) -> SocketStream: ...
async def connect_tcp(
remote_host: IPAddressType,
remote_port: int,
*,
local_host: IPAddressType | None = None,
tls: bool = False,
ssl_context: ssl.SSLContext | None = None,
tls_standard_compatible: bool = True,
tls_hostname: str | None = None,
happy_eyeballs_delay: float = 0.25,
) -> SocketStream | TLSStream:
"""
Connect to a host using the TCP protocol.
This function implements the stateless version of the Happy Eyeballs algorithm (RFC
6555). If ``remote_host`` is a host name that resolves to multiple IP addresses,
each one is tried until one connection attempt succeeds. If the first attempt does
not connected within 250 milliseconds, a second attempt is started using the next
address in the list, and so on. On IPv6 enabled systems, an IPv6 address (if
available) is tried first.
When the connection has been established, a TLS handshake will be done if either
``ssl_context`` or ``tls_hostname`` is not ``None``, or if ``tls`` is ``True``.
:param remote_host: the IP address or host name to connect to
:param remote_port: port on the target host to connect to
:param local_host: the interface address or name to bind the socket to before
connecting
:param tls: ``True`` to do a TLS handshake with the connected stream and return a
:class:`~anyio.streams.tls.TLSStream` instead
:param ssl_context: the SSL context object to use (if omitted, a default context is
created)
:param tls_standard_compatible: If ``True``, performs the TLS shutdown handshake
before closing the stream and requires that the server does this as well.
Otherwise, :exc:`~ssl.SSLEOFError` may be raised during reads from the stream.
Some protocols, such as HTTP, require this option to be ``False``.
See :meth:`~ssl.SSLContext.wrap_socket` for details.
:param tls_hostname: host name to check the server certificate against (defaults to
the value of ``remote_host``)
:param happy_eyeballs_delay: delay (in seconds) before starting the next connection
attempt
:return: a socket stream object if no TLS handshake was done, otherwise a TLS stream
:raises ConnectionFailed: if the connection fails
"""
# Placed here due to https://github.com/python/mypy/issues/7057
connected_stream: SocketStream | None = None
async def try_connect(remote_host: str, event: Event) -> None:
nonlocal connected_stream
try:
stream = await asynclib.connect_tcp(remote_host, remote_port, local_address)
except OSError as exc:
oserrors.append(exc)
return
else:
if connected_stream is None:
connected_stream = stream
tg.cancel_scope.cancel()
else:
await stream.aclose()
finally:
event.set()
asynclib = get_async_backend()
local_address: IPSockAddrType | None = None
family = socket.AF_UNSPEC
if local_host:
gai_res = await getaddrinfo(str(local_host), None)
family, *_, local_address = gai_res[0]
target_host = str(remote_host)
try:
addr_obj = ip_address(remote_host)
except ValueError:
addr_obj = None
if addr_obj is not None:
if isinstance(addr_obj, IPv6Address):
target_addrs = [(socket.AF_INET6, addr_obj.compressed)]
else:
target_addrs = [(socket.AF_INET, addr_obj.compressed)]
else:
# getaddrinfo() will raise an exception if name resolution fails
gai_res = await getaddrinfo(
target_host, remote_port, family=family, type=socket.SOCK_STREAM
)
# Organize the list so that the first address is an IPv6 address (if available)
# and the second one is an IPv4 addresses. The rest can be in whatever order.
v6_found = v4_found = False
target_addrs = []
for af, *_, sa in gai_res:
if af == socket.AF_INET6 and not v6_found:
v6_found = True
target_addrs.insert(0, (af, sa[0]))
elif af == socket.AF_INET and not v4_found and v6_found:
v4_found = True
target_addrs.insert(1, (af, sa[0]))
else:
target_addrs.append((af, sa[0]))
oserrors: list[OSError] = []
try:
async with create_task_group() as tg:
for _af, addr in target_addrs:
event = Event()
tg.start_soon(try_connect, addr, event)
with move_on_after(happy_eyeballs_delay):
await event.wait()
if connected_stream is None:
cause = (
oserrors[0]
if len(oserrors) == 1
else ExceptionGroup("multiple connection attempts failed", oserrors)
)
raise OSError("All connection attempts failed") from cause
finally:
oserrors.clear()
if tls or tls_hostname or ssl_context:
try:
return await TLSStream.wrap(
connected_stream,
server_side=False,
hostname=tls_hostname or str(remote_host),
ssl_context=ssl_context,
standard_compatible=tls_standard_compatible,
)
except BaseException:
await aclose_forcefully(connected_stream)
raise
return connected_stream
async def connect_unix(path: str | bytes | PathLike[Any]) -> UNIXSocketStream:
"""
Connect to the given UNIX socket.
Not available on Windows.
:param path: path to the socket
:return: a socket stream object
:raises ConnectionFailed: if the connection fails
"""
path = os.fspath(path)
return await get_async_backend().connect_unix(path)
async def create_tcp_listener(
*,
local_host: IPAddressType | None = None,
local_port: int = 0,
family: AnyIPAddressFamily = socket.AddressFamily.AF_UNSPEC,
backlog: int = 65536,
reuse_port: bool = False,
) -> MultiListener[SocketStream]:
"""
Create a TCP socket listener.
:param local_port: port number to listen on
:param local_host: IP address of the interface to listen on. If omitted, listen on
all IPv4 and IPv6 interfaces. To listen on all interfaces on a specific address
family, use ``0.0.0.0`` for IPv4 or ``::`` for IPv6.
:param family: address family (used if ``local_host`` was omitted)
:param backlog: maximum number of queued incoming connections (up to a maximum of
2**16, or 65536)
:param reuse_port: ``True`` to allow multiple sockets to bind to the same
address/port (not supported on Windows)
:return: a multi-listener object containing one or more socket listeners
:raises OSError: if there's an error creating a socket, or binding to one or more
interfaces failed
"""
asynclib = get_async_backend()
backlog = min(backlog, 65536)
local_host = str(local_host) if local_host is not None else None
def setup_raw_socket(
fam: AddressFamily,
bind_addr: tuple[str, int] | tuple[str, int, int, int],
*,
v6only: bool = True,
) -> socket.socket:
sock = socket.socket(fam)
try:
sock.setblocking(False)
if fam == AddressFamily.AF_INET6:
sock.setsockopt(IPPROTO_IPV6, socket.IPV6_V6ONLY, v6only)
# For Windows, enable exclusive address use. For others, enable address
# reuse.
if sys.platform == "win32":
sock.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1)
else:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if reuse_port:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
# Workaround for #554
if fam == socket.AF_INET6 and "%" in bind_addr[0]:
addr, scope_id = bind_addr[0].split("%", 1)
bind_addr = (addr, bind_addr[1], 0, int(scope_id))
sock.bind(bind_addr)
sock.listen(backlog)
except BaseException:
sock.close()
raise
return sock
# We passing type=0 on non-Windows platforms as a workaround for a uvloop bug
# where we don't get the correct scope ID for IPv6 link-local addresses when passing
# type=socket.SOCK_STREAM to getaddrinfo():
# https://github.com/MagicStack/uvloop/issues/539
gai_res = await getaddrinfo(
local_host,
local_port,
family=family,
type=socket.SOCK_STREAM if sys.platform == "win32" else 0,
flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG,
)
# The set comprehension is here to work around a glibc bug:
# https://sourceware.org/bugzilla/show_bug.cgi?id=14969
sockaddrs = sorted({res for res in gai_res if res[1] == SocketKind.SOCK_STREAM})
# Special case for dual-stack binding on the "any" interface
if (
local_host is None
and family == AddressFamily.AF_UNSPEC
and socket.has_dualstack_ipv6()
and any(fam == AddressFamily.AF_INET6 for fam, *_ in gai_res)
):
raw_socket = setup_raw_socket(
AddressFamily.AF_INET6, ("::", local_port), v6only=False
)
listener = asynclib.create_tcp_listener(raw_socket)
return MultiListener([listener])
errors: list[OSError] = []
try:
for _ in range(len(sockaddrs)):
listeners: list[SocketListener] = []
bound_ephemeral_port = local_port
try:
for fam, *_, sockaddr in sockaddrs:
sockaddr = sockaddr[0], bound_ephemeral_port, *sockaddr[2:]
raw_socket = setup_raw_socket(fam, sockaddr)
# Store the assigned port if an ephemeral port was requested, so
# we'll bind to the same port on all interfaces
if local_port == 0 and len(gai_res) > 1:
bound_ephemeral_port = raw_socket.getsockname()[1]
listeners.append(asynclib.create_tcp_listener(raw_socket))
except BaseException as exc:
for listener in listeners:
await listener.aclose()
# If an ephemeral port was requested but binding the assigned port
# failed for another interface, rotate the address list and try again
if (
isinstance(exc, OSError)
and exc.errno == errno.EADDRINUSE
and local_port == 0
and bound_ephemeral_port
):
errors.append(exc)
sockaddrs.append(sockaddrs.pop(0))
continue
raise
return MultiListener(listeners)
raise OSError(
f"Could not create {len(sockaddrs)} listeners with a consistent port"
) from ExceptionGroup("Several bind attempts failed", errors)
finally:
del errors # Prevent reference cycles
async def create_unix_listener(
path: str | bytes | PathLike[Any],
*,
mode: int | None = None,
backlog: int = 65536,
) -> SocketListener:
"""
Create a UNIX socket listener.
Not available on Windows.
:param path: path of the socket
:param mode: permissions to set on the socket
:param backlog: maximum number of queued incoming connections (up to a maximum of
2**16, or 65536)
:return: a listener object
.. versionchanged:: 3.0
If a socket already exists on the file system in the given path, it will be
removed first.
"""
backlog = min(backlog, 65536)
raw_socket = await setup_unix_local_socket(path, mode, socket.SOCK_STREAM)
try:
raw_socket.listen(backlog)
return get_async_backend().create_unix_listener(raw_socket)
except BaseException:
raw_socket.close()
raise
async def create_udp_socket(
family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC,
*,
local_host: IPAddressType | None = None,
local_port: int = 0,
reuse_port: bool = False,
) -> UDPSocket:
"""
Create a UDP socket.
If ``port`` has been given, the socket will be bound to this port on the local
machine, making this socket suitable for providing UDP based services.
:param family: address family (``AF_INET`` or ``AF_INET6``) automatically
determined from ``local_host`` if omitted
:param local_host: IP address or host name of the local interface to bind to
:param local_port: local port to bind to
:param reuse_port: ``True`` to allow multiple sockets to bind to the same
address/port (not supported on Windows)
:return: a UDP socket
"""
if family is AddressFamily.AF_UNSPEC and not local_host:
raise ValueError('Either "family" or "local_host" must be given')
if local_host:
gai_res = await getaddrinfo(
str(local_host),
local_port,
family=family,
type=socket.SOCK_DGRAM,
flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG,
)
family = cast(AnyIPAddressFamily, gai_res[0][0])
local_address = gai_res[0][-1]
elif family is AddressFamily.AF_INET6:
local_address = ("::", 0)
else:
local_address = ("0.0.0.0", 0)
sock = await get_async_backend().create_udp_socket(
family, local_address, None, reuse_port
)
return cast(UDPSocket, sock)
async def create_connected_udp_socket(
remote_host: IPAddressType,
remote_port: int,
*,
family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC,
local_host: IPAddressType | None = None,
local_port: int = 0,
reuse_port: bool = False,
) -> ConnectedUDPSocket:
"""
Create a connected UDP socket.
Connected UDP sockets can only communicate with the specified remote host/port, an
any packets sent from other sources are dropped.
:param remote_host: remote host to set as the default target
:param remote_port: port on the remote host to set as the default target
:param family: address family (``AF_INET`` or ``AF_INET6``) automatically
determined from ``local_host`` or ``remote_host`` if omitted
:param local_host: IP address or host name of the local interface to bind to
:param local_port: local port to bind to
:param reuse_port: ``True`` to allow multiple sockets to bind to the same
address/port (not supported on Windows)
:return: a connected UDP socket
"""
local_address = None
if local_host:
gai_res = await getaddrinfo(
str(local_host),
local_port,
family=family,
type=socket.SOCK_DGRAM,
flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG,
)
family = cast(AnyIPAddressFamily, gai_res[0][0])
local_address = gai_res[0][-1]
gai_res = await getaddrinfo(
str(remote_host), remote_port, family=family, type=socket.SOCK_DGRAM
)
family = cast(AnyIPAddressFamily, gai_res[0][0])
remote_address = gai_res[0][-1]
sock = await get_async_backend().create_udp_socket(
family, local_address, remote_address, reuse_port
)
return cast(ConnectedUDPSocket, sock)
async def create_unix_datagram_socket(
*,
local_path: None | str | bytes | PathLike[Any] = None,
local_mode: int | None = None,
) -> UNIXDatagramSocket:
"""
Create a UNIX datagram socket.
Not available on Windows.
If ``local_path`` has been given, the socket will be bound to this path, making this
socket suitable for receiving datagrams from other processes. Other processes can
send datagrams to this socket only if ``local_path`` is set.
If a socket already exists on the file system in the ``local_path``, it will be
removed first.
:param local_path: the path on which to bind to
:param local_mode: permissions to set on the local socket
:return: a UNIX datagram socket
"""
raw_socket = await setup_unix_local_socket(
local_path, local_mode, socket.SOCK_DGRAM
)
return await get_async_backend().create_unix_datagram_socket(raw_socket, None)
async def create_connected_unix_datagram_socket(
remote_path: str | bytes | PathLike[Any],
*,
local_path: None | str | bytes | PathLike[Any] = None,
local_mode: int | None = None,
) -> ConnectedUNIXDatagramSocket:
"""
Create a connected UNIX datagram socket.
Connected datagram sockets can only communicate with the specified remote path.
If ``local_path`` has been given, the socket will be bound to this path, making
this socket suitable for receiving datagrams from other processes. Other processes
can send datagrams to this socket only if ``local_path`` is set.
If a socket already exists on the file system in the ``local_path``, it will be
removed first.
:param remote_path: the path to set as the default target
:param local_path: the path on which to bind to
:param local_mode: permissions to set on the local socket
:return: a connected UNIX datagram socket
"""
remote_path = os.fspath(remote_path)
raw_socket = await setup_unix_local_socket(
local_path, local_mode, socket.SOCK_DGRAM
)
return await get_async_backend().create_unix_datagram_socket(
raw_socket, remote_path
)
async def getaddrinfo(
host: bytes | str | None,
port: str | int | None,
*,
family: int | AddressFamily = 0,
type: int | SocketKind = 0,
proto: int = 0,
flags: int = 0,
) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int]]]:
"""
Look up a numeric IP address given a host name.
Internationalized domain names are translated according to the (non-transitional)
IDNA 2008 standard.
.. note:: 4-tuple IPv6 socket addresses are automatically converted to 2-tuples of
(host, port), unlike what :func:`socket.getaddrinfo` does.
:param host: host name
:param port: port number
:param family: socket family (`'AF_INET``, ...)
:param type: socket type (``SOCK_STREAM``, ...)
:param proto: protocol number
:param flags: flags to pass to upstream ``getaddrinfo()``
:return: list of tuples containing (family, type, proto, canonname, sockaddr)
.. seealso:: :func:`socket.getaddrinfo`
"""
# Handle unicode hostnames
if isinstance(host, str):
try:
encoded_host: bytes | None = host.encode("ascii")
except UnicodeEncodeError:
import idna
encoded_host = idna.encode(host, uts46=True)
else:
encoded_host = host
gai_res = await get_async_backend().getaddrinfo(
encoded_host, port, family=family, type=type, proto=proto, flags=flags
)
return [
(family, type, proto, canonname, convert_ipv6_sockaddr(sockaddr))
for family, type, proto, canonname, sockaddr in gai_res
# filter out IPv6 results when IPv6 is disabled
if not isinstance(sockaddr[0], int)
]
def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Awaitable[tuple[str, str]]:
"""
Look up the host name of an IP address.
:param sockaddr: socket address (e.g. (ipaddress, port) for IPv4)
:param flags: flags to pass to upstream ``getnameinfo()``
:return: a tuple of (host name, service name)
.. seealso:: :func:`socket.getnameinfo`
"""
return get_async_backend().getnameinfo(sockaddr, flags)
@deprecated("This function is deprecated; use `wait_readable` instead")
def wait_socket_readable(sock: socket.socket) -> Awaitable[None]:
"""
.. deprecated:: 4.7.0
Use :func:`wait_readable` instead.
Wait until the given socket has data to be read.
.. warning:: Only use this on raw sockets that have not been wrapped by any higher
level constructs like socket streams!
:param sock: a socket object
:raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the
socket to become readable
:raises ~anyio.BusyResourceError: if another task is already waiting for the socket
to become readable
"""
return get_async_backend().wait_readable(sock.fileno())
@deprecated("This function is deprecated; use `wait_writable` instead")
def wait_socket_writable(sock: socket.socket) -> Awaitable[None]:
"""
.. deprecated:: 4.7.0
Use :func:`wait_writable` instead.
Wait until the given socket can be written to.
This does **NOT** work on Windows when using the asyncio backend with a proactor
event loop (default on py3.8+).
.. warning:: Only use this on raw sockets that have not been wrapped by any higher
level constructs like socket streams!
:param sock: a socket object
:raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the
socket to become writable
:raises ~anyio.BusyResourceError: if another task is already waiting for the socket
to become writable
"""
return get_async_backend().wait_writable(sock.fileno())
def wait_readable(obj: FileDescriptorLike) -> Awaitable[None]:
"""
Wait until the given object has data to be read.
On Unix systems, ``obj`` must either be an integer file descriptor, or else an
object with a ``.fileno()`` method which returns an integer file descriptor. Any
kind of file descriptor can be passed, though the exact semantics will depend on
your kernel. For example, this probably won't do anything useful for on-disk files.
On Windows systems, ``obj`` must either be an integer ``SOCKET`` handle, or else an
object with a ``.fileno()`` method which returns an integer ``SOCKET`` handle. File
descriptors aren't supported, and neither are handles that refer to anything besides
a ``SOCKET``.
On backends where this functionality is not natively provided (asyncio
``ProactorEventLoop`` on Windows), it is provided using a separate selector thread
which is set to shut down when the interpreter shuts down.
.. warning:: Don't use this on raw sockets that have been wrapped by any higher
level constructs like socket streams!
:param obj: an object with a ``.fileno()`` method or an integer handle
:raises ~anyio.ClosedResourceError: if the object was closed while waiting for the
object to become readable
:raises ~anyio.BusyResourceError: if another task is already waiting for the object
to become readable
"""
return get_async_backend().wait_readable(obj)
def wait_writable(obj: FileDescriptorLike) -> Awaitable[None]:
"""
Wait until the given object can be written to.
:param obj: an object with a ``.fileno()`` method or an integer handle
:raises ~anyio.ClosedResourceError: if the object was closed while waiting for the
object to become writable
:raises ~anyio.BusyResourceError: if another task is already waiting for the object
to become writable
.. seealso:: See the documentation of :func:`wait_readable` for the definition of
``obj`` and notes on backend compatibility.
.. warning:: Don't use this on raw sockets that have been wrapped by any higher
level constructs like socket streams!
"""
return get_async_backend().wait_writable(obj)
def notify_closing(obj: FileDescriptorLike) -> None:
"""
Call this before closing a file descriptor (on Unix) or socket (on
Windows). This will cause any `wait_readable` or `wait_writable`
calls on the given object to immediately wake up and raise
`~anyio.ClosedResourceError`.
This doesn't actually close the object you still have to do that
yourself afterwards. Also, you want to be careful to make sure no
new tasks start waiting on the object in between when you call this
and when it's actually closed. So to close something properly, you
usually want to do these steps in order:
1. Explicitly mark the object as closed, so that any new attempts
to use it will abort before they start.
2. Call `notify_closing` to wake up any already-existing users.
3. Actually close the object.
It's also possible to do them in a different order if that's more
convenient, *but only if* you make sure not to have any checkpoints in
between the steps. This way they all happen in a single atomic
step, so other tasks won't be able to tell what order they happened
in anyway.
:param obj: an object with a ``.fileno()`` method or an integer handle
"""
get_async_backend().notify_closing(obj)
#
# Private API
#
def convert_ipv6_sockaddr(
sockaddr: tuple[str, int, int, int] | tuple[str, int],
) -> tuple[str, int]:
"""
Convert a 4-tuple IPv6 socket address to a 2-tuple (address, port) format.
If the scope ID is nonzero, it is added to the address, separated with ``%``.
Otherwise the flow id and scope id are simply cut off from the tuple.
Any other kinds of socket addresses are returned as-is.
:param sockaddr: the result of :meth:`~socket.socket.getsockname`
:return: the converted socket address
"""
# This is more complicated than it should be because of MyPy
if isinstance(sockaddr, tuple) and len(sockaddr) == 4:
host, port, flowinfo, scope_id = sockaddr
if scope_id:
# PyPy (as of v7.3.11) leaves the interface name in the result, so
# we discard it and only get the scope ID from the end
# (https://foss.heptapod.net/pypy/pypy/-/issues/3938)
host = host.split("%")[0]
# Add scope_id to the address
return f"{host}%{scope_id}", port
else:
return host, port
else:
return sockaddr
async def setup_unix_local_socket(
path: None | str | bytes | PathLike[Any],
mode: int | None,
socktype: int,
) -> socket.socket:
"""
Create a UNIX local socket object, deleting the socket at the given path if it
exists.
Not available on Windows.
:param path: path of the socket
:param mode: permissions to set on the socket
:param socktype: socket.SOCK_STREAM or socket.SOCK_DGRAM
"""
path_str: str | None
if path is not None:
path_str = os.fsdecode(path)
# Linux abstract namespace sockets aren't backed by a concrete file so skip stat call
if not path_str.startswith("\0"):
# Copied from pathlib...
try:
stat_result = os.stat(path)
except OSError as e:
if e.errno not in (
errno.ENOENT,
errno.ENOTDIR,
errno.EBADF,
errno.ELOOP,
):
raise
else:
if stat.S_ISSOCK(stat_result.st_mode):
os.unlink(path)
else:
path_str = None
raw_socket = socket.socket(socket.AF_UNIX, socktype)
raw_socket.setblocking(False)
if path_str is not None:
try:
await to_thread.run_sync(raw_socket.bind, path_str, abandon_on_cancel=True)
if mode is not None:
await to_thread.run_sync(chmod, path_str, mode, abandon_on_cancel=True)
except BaseException:
raw_socket.close()
raise
return raw_socket
@dataclass
class TCPConnectable(ByteStreamConnectable):
"""
Connects to a TCP server at the given host and port.
:param host: host name or IP address of the server
:param port: TCP port number of the server
"""
host: str | IPv4Address | IPv6Address
port: int
def __post_init__(self) -> None:
if self.port < 1 or self.port > 65535:
raise ValueError("TCP port number out of range")
@override
async def connect(self) -> SocketStream:
try:
return await connect_tcp(self.host, self.port)
except OSError as exc:
raise ConnectionFailed(
f"error connecting to {self.host}:{self.port}: {exc}"
) from exc
@dataclass
class UNIXConnectable(ByteStreamConnectable):
"""
Connects to a UNIX domain socket at the given path.
:param path: the file system path of the socket
"""
path: str | bytes | PathLike[str] | PathLike[bytes]
@override
async def connect(self) -> UNIXSocketStream:
try:
return await connect_unix(self.path)
except OSError as exc:
raise ConnectionFailed(f"error connecting to {self.path!r}: {exc}") from exc
def as_connectable(
remote: ByteStreamConnectable
| tuple[str | IPv4Address | IPv6Address, int]
| str
| bytes
| PathLike[str],
/,
*,
tls: bool = False,
ssl_context: ssl.SSLContext | None = None,
tls_hostname: str | None = None,
tls_standard_compatible: bool = True,
) -> ByteStreamConnectable:
"""
Return a byte stream connectable from the given object.
If a bytestream connectable is given, it is returned unchanged.
If a tuple of (host, port) is given, a TCP connectable is returned.
If a string or bytes path is given, a UNIX connectable is returned.
If ``tls=True``, the connectable will be wrapped in a
:class:`~.streams.tls.TLSConnectable`.
:param remote: a connectable, a tuple of (host, port) or a path to a UNIX socket
:param tls: if ``True``, wrap the plaintext connectable in a
:class:`~.streams.tls.TLSConnectable`, using the provided TLS settings)
:param ssl_context: if ``tls=True``, the SSLContext object to use (if not provided,
a secure default will be created)
:param tls_hostname: if ``tls=True``, host name of the server to use for checking
the server certificate (defaults to the host portion of the address for TCP
connectables)
:param tls_standard_compatible: if ``False`` and ``tls=True``, makes the TLS stream
skip the closing handshake when closing the connection, so it won't raise an
exception if the server does the same
"""
connectable: TCPConnectable | UNIXConnectable | TLSConnectable
if isinstance(remote, ByteStreamConnectable):
return remote
elif isinstance(remote, tuple) and len(remote) == 2:
connectable = TCPConnectable(*remote)
elif isinstance(remote, (str, bytes, PathLike)):
connectable = UNIXConnectable(remote)
else:
raise TypeError(f"cannot convert {remote!r} to a connectable")
if tls:
if not tls_hostname and isinstance(connectable, TCPConnectable):
tls_hostname = str(connectable.host)
connectable = TLSConnectable(
connectable,
ssl_context=ssl_context,
hostname=tls_hostname,
standard_compatible=tls_standard_compatible,
)
return connectable

View File

@@ -0,0 +1,52 @@
from __future__ import annotations
import math
from typing import TypeVar
from warnings import warn
from ..streams.memory import (
MemoryObjectReceiveStream,
MemoryObjectSendStream,
MemoryObjectStreamState,
)
T_Item = TypeVar("T_Item")
class create_memory_object_stream(
tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]],
):
"""
Create a memory object stream.
The stream's item type can be annotated like
:func:`create_memory_object_stream[T_Item]`.
:param max_buffer_size: number of items held in the buffer until ``send()`` starts
blocking
:param item_type: old way of marking the streams with the right generic type for
static typing (does nothing on AnyIO 4)
.. deprecated:: 4.0
Use ``create_memory_object_stream[YourItemType](...)`` instead.
:return: a tuple of (send stream, receive stream)
"""
def __new__( # type: ignore[misc]
cls, max_buffer_size: float = 0, item_type: object = None
) -> tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]]:
if max_buffer_size != math.inf and not isinstance(max_buffer_size, int):
raise ValueError("max_buffer_size must be either an integer or math.inf")
if max_buffer_size < 0:
raise ValueError("max_buffer_size cannot be negative")
if item_type is not None:
warn(
"The item_type argument has been deprecated in AnyIO 4.0. "
"Use create_memory_object_stream[YourItemType](...) instead.",
DeprecationWarning,
stacklevel=2,
)
state = MemoryObjectStreamState[T_Item](max_buffer_size)
return (MemoryObjectSendStream(state), MemoryObjectReceiveStream(state))

View File

@@ -0,0 +1,202 @@
from __future__ import annotations
import sys
from collections.abc import AsyncIterable, Iterable, Mapping, Sequence
from io import BytesIO
from os import PathLike
from subprocess import PIPE, CalledProcessError, CompletedProcess
from typing import IO, Any, Union, cast
from ..abc import Process
from ._eventloop import get_async_backend
from ._tasks import create_task_group
if sys.version_info >= (3, 10):
from typing import TypeAlias
else:
from typing_extensions import TypeAlias
StrOrBytesPath: TypeAlias = Union[str, bytes, "PathLike[str]", "PathLike[bytes]"]
async def run_process(
command: StrOrBytesPath | Sequence[StrOrBytesPath],
*,
input: bytes | None = None,
stdin: int | IO[Any] | None = None,
stdout: int | IO[Any] | None = PIPE,
stderr: int | IO[Any] | None = PIPE,
check: bool = True,
cwd: StrOrBytesPath | None = None,
env: Mapping[str, str] | None = None,
startupinfo: Any = None,
creationflags: int = 0,
start_new_session: bool = False,
pass_fds: Sequence[int] = (),
user: str | int | None = None,
group: str | int | None = None,
extra_groups: Iterable[str | int] | None = None,
umask: int = -1,
) -> CompletedProcess[bytes]:
"""
Run an external command in a subprocess and wait until it completes.
.. seealso:: :func:`subprocess.run`
:param command: either a string to pass to the shell, or an iterable of strings
containing the executable name or path and its arguments
:param input: bytes passed to the standard input of the subprocess
:param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`,
a file-like object, or `None`; ``input`` overrides this
:param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`,
a file-like object, or `None`
:param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`,
:data:`subprocess.STDOUT`, a file-like object, or `None`
:param check: if ``True``, raise :exc:`~subprocess.CalledProcessError` if the
process terminates with a return code other than 0
:param cwd: If not ``None``, change the working directory to this before running the
command
:param env: if not ``None``, this mapping replaces the inherited environment
variables from the parent process
:param startupinfo: an instance of :class:`subprocess.STARTUPINFO` that can be used
to specify process startup parameters (Windows only)
:param creationflags: flags that can be used to control the creation of the
subprocess (see :class:`subprocess.Popen` for the specifics)
:param start_new_session: if ``true`` the setsid() system call will be made in the
child process prior to the execution of the subprocess. (POSIX only)
:param pass_fds: sequence of file descriptors to keep open between the parent and
child processes. (POSIX only)
:param user: effective user to run the process as (Python >= 3.9, POSIX only)
:param group: effective group to run the process as (Python >= 3.9, POSIX only)
:param extra_groups: supplementary groups to set in the subprocess (Python >= 3.9,
POSIX only)
:param umask: if not negative, this umask is applied in the child process before
running the given command (Python >= 3.9, POSIX only)
:return: an object representing the completed process
:raises ~subprocess.CalledProcessError: if ``check`` is ``True`` and the process
exits with a nonzero return code
"""
async def drain_stream(stream: AsyncIterable[bytes], index: int) -> None:
buffer = BytesIO()
async for chunk in stream:
buffer.write(chunk)
stream_contents[index] = buffer.getvalue()
if stdin is not None and input is not None:
raise ValueError("only one of stdin and input is allowed")
async with await open_process(
command,
stdin=PIPE if input else stdin,
stdout=stdout,
stderr=stderr,
cwd=cwd,
env=env,
startupinfo=startupinfo,
creationflags=creationflags,
start_new_session=start_new_session,
pass_fds=pass_fds,
user=user,
group=group,
extra_groups=extra_groups,
umask=umask,
) as process:
stream_contents: list[bytes | None] = [None, None]
async with create_task_group() as tg:
if process.stdout:
tg.start_soon(drain_stream, process.stdout, 0)
if process.stderr:
tg.start_soon(drain_stream, process.stderr, 1)
if process.stdin and input:
await process.stdin.send(input)
await process.stdin.aclose()
await process.wait()
output, errors = stream_contents
if check and process.returncode != 0:
raise CalledProcessError(cast(int, process.returncode), command, output, errors)
return CompletedProcess(command, cast(int, process.returncode), output, errors)
async def open_process(
command: StrOrBytesPath | Sequence[StrOrBytesPath],
*,
stdin: int | IO[Any] | None = PIPE,
stdout: int | IO[Any] | None = PIPE,
stderr: int | IO[Any] | None = PIPE,
cwd: StrOrBytesPath | None = None,
env: Mapping[str, str] | None = None,
startupinfo: Any = None,
creationflags: int = 0,
start_new_session: bool = False,
pass_fds: Sequence[int] = (),
user: str | int | None = None,
group: str | int | None = None,
extra_groups: Iterable[str | int] | None = None,
umask: int = -1,
) -> Process:
"""
Start an external command in a subprocess.
.. seealso:: :class:`subprocess.Popen`
:param command: either a string to pass to the shell, or an iterable of strings
containing the executable name or path and its arguments
:param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, a
file-like object, or ``None``
:param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`,
a file-like object, or ``None``
:param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`,
:data:`subprocess.STDOUT`, a file-like object, or ``None``
:param cwd: If not ``None``, the working directory is changed before executing
:param env: If env is not ``None``, it must be a mapping that defines the
environment variables for the new process
:param creationflags: flags that can be used to control the creation of the
subprocess (see :class:`subprocess.Popen` for the specifics)
:param startupinfo: an instance of :class:`subprocess.STARTUPINFO` that can be used
to specify process startup parameters (Windows only)
:param start_new_session: if ``true`` the setsid() system call will be made in the
child process prior to the execution of the subprocess. (POSIX only)
:param pass_fds: sequence of file descriptors to keep open between the parent and
child processes. (POSIX only)
:param user: effective user to run the process as (POSIX only)
:param group: effective group to run the process as (POSIX only)
:param extra_groups: supplementary groups to set in the subprocess (POSIX only)
:param umask: if not negative, this umask is applied in the child process before
running the given command (POSIX only)
:return: an asynchronous process object
"""
kwargs: dict[str, Any] = {}
if user is not None:
kwargs["user"] = user
if group is not None:
kwargs["group"] = group
if extra_groups is not None:
kwargs["extra_groups"] = group
if umask >= 0:
kwargs["umask"] = umask
return await get_async_backend().open_process(
command,
stdin=stdin,
stdout=stdout,
stderr=stderr,
cwd=cwd,
env=env,
startupinfo=startupinfo,
creationflags=creationflags,
start_new_session=start_new_session,
pass_fds=pass_fds,
**kwargs,
)

View File

@@ -0,0 +1,753 @@
from __future__ import annotations
import math
from collections import deque
from collections.abc import Callable
from dataclasses import dataclass
from types import TracebackType
from typing import TypeVar
from sniffio import AsyncLibraryNotFoundError
from ..lowlevel import checkpoint_if_cancelled
from ._eventloop import get_async_backend
from ._exceptions import BusyResourceError
from ._tasks import CancelScope
from ._testing import TaskInfo, get_current_task
T = TypeVar("T")
@dataclass(frozen=True)
class EventStatistics:
"""
:ivar int tasks_waiting: number of tasks waiting on :meth:`~.Event.wait`
"""
tasks_waiting: int
@dataclass(frozen=True)
class CapacityLimiterStatistics:
"""
:ivar int borrowed_tokens: number of tokens currently borrowed by tasks
:ivar float total_tokens: total number of available tokens
:ivar tuple borrowers: tasks or other objects currently holding tokens borrowed from
this limiter
:ivar int tasks_waiting: number of tasks waiting on
:meth:`~.CapacityLimiter.acquire` or
:meth:`~.CapacityLimiter.acquire_on_behalf_of`
"""
borrowed_tokens: int
total_tokens: float
borrowers: tuple[object, ...]
tasks_waiting: int
@dataclass(frozen=True)
class LockStatistics:
"""
:ivar bool locked: flag indicating if this lock is locked or not
:ivar ~anyio.TaskInfo owner: task currently holding the lock (or ``None`` if the
lock is not held by any task)
:ivar int tasks_waiting: number of tasks waiting on :meth:`~.Lock.acquire`
"""
locked: bool
owner: TaskInfo | None
tasks_waiting: int
@dataclass(frozen=True)
class ConditionStatistics:
"""
:ivar int tasks_waiting: number of tasks blocked on :meth:`~.Condition.wait`
:ivar ~anyio.LockStatistics lock_statistics: statistics of the underlying
:class:`~.Lock`
"""
tasks_waiting: int
lock_statistics: LockStatistics
@dataclass(frozen=True)
class SemaphoreStatistics:
"""
:ivar int tasks_waiting: number of tasks waiting on :meth:`~.Semaphore.acquire`
"""
tasks_waiting: int
class Event:
def __new__(cls) -> Event:
try:
return get_async_backend().create_event()
except AsyncLibraryNotFoundError:
return EventAdapter()
def set(self) -> None:
"""Set the flag, notifying all listeners."""
raise NotImplementedError
def is_set(self) -> bool:
"""Return ``True`` if the flag is set, ``False`` if not."""
raise NotImplementedError
async def wait(self) -> None:
"""
Wait until the flag has been set.
If the flag has already been set when this method is called, it returns
immediately.
"""
raise NotImplementedError
def statistics(self) -> EventStatistics:
"""Return statistics about the current state of this event."""
raise NotImplementedError
class EventAdapter(Event):
_internal_event: Event | None = None
_is_set: bool = False
def __new__(cls) -> EventAdapter:
return object.__new__(cls)
@property
def _event(self) -> Event:
if self._internal_event is None:
self._internal_event = get_async_backend().create_event()
if self._is_set:
self._internal_event.set()
return self._internal_event
def set(self) -> None:
if self._internal_event is None:
self._is_set = True
else:
self._event.set()
def is_set(self) -> bool:
if self._internal_event is None:
return self._is_set
return self._internal_event.is_set()
async def wait(self) -> None:
await self._event.wait()
def statistics(self) -> EventStatistics:
if self._internal_event is None:
return EventStatistics(tasks_waiting=0)
return self._internal_event.statistics()
class Lock:
def __new__(cls, *, fast_acquire: bool = False) -> Lock:
try:
return get_async_backend().create_lock(fast_acquire=fast_acquire)
except AsyncLibraryNotFoundError:
return LockAdapter(fast_acquire=fast_acquire)
async def __aenter__(self) -> None:
await self.acquire()
async def __aexit__(
self,
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
self.release()
async def acquire(self) -> None:
"""Acquire the lock."""
raise NotImplementedError
def acquire_nowait(self) -> None:
"""
Acquire the lock, without blocking.
:raises ~anyio.WouldBlock: if the operation would block
"""
raise NotImplementedError
def release(self) -> None:
"""Release the lock."""
raise NotImplementedError
def locked(self) -> bool:
"""Return True if the lock is currently held."""
raise NotImplementedError
def statistics(self) -> LockStatistics:
"""
Return statistics about the current state of this lock.
.. versionadded:: 3.0
"""
raise NotImplementedError
class LockAdapter(Lock):
_internal_lock: Lock | None = None
def __new__(cls, *, fast_acquire: bool = False) -> LockAdapter:
return object.__new__(cls)
def __init__(self, *, fast_acquire: bool = False):
self._fast_acquire = fast_acquire
@property
def _lock(self) -> Lock:
if self._internal_lock is None:
self._internal_lock = get_async_backend().create_lock(
fast_acquire=self._fast_acquire
)
return self._internal_lock
async def __aenter__(self) -> None:
await self._lock.acquire()
async def __aexit__(
self,
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
if self._internal_lock is not None:
self._internal_lock.release()
async def acquire(self) -> None:
"""Acquire the lock."""
await self._lock.acquire()
def acquire_nowait(self) -> None:
"""
Acquire the lock, without blocking.
:raises ~anyio.WouldBlock: if the operation would block
"""
self._lock.acquire_nowait()
def release(self) -> None:
"""Release the lock."""
self._lock.release()
def locked(self) -> bool:
"""Return True if the lock is currently held."""
return self._lock.locked()
def statistics(self) -> LockStatistics:
"""
Return statistics about the current state of this lock.
.. versionadded:: 3.0
"""
if self._internal_lock is None:
return LockStatistics(False, None, 0)
return self._internal_lock.statistics()
class Condition:
_owner_task: TaskInfo | None = None
def __init__(self, lock: Lock | None = None):
self._lock = lock or Lock()
self._waiters: deque[Event] = deque()
async def __aenter__(self) -> None:
await self.acquire()
async def __aexit__(
self,
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
self.release()
def _check_acquired(self) -> None:
if self._owner_task != get_current_task():
raise RuntimeError("The current task is not holding the underlying lock")
async def acquire(self) -> None:
"""Acquire the underlying lock."""
await self._lock.acquire()
self._owner_task = get_current_task()
def acquire_nowait(self) -> None:
"""
Acquire the underlying lock, without blocking.
:raises ~anyio.WouldBlock: if the operation would block
"""
self._lock.acquire_nowait()
self._owner_task = get_current_task()
def release(self) -> None:
"""Release the underlying lock."""
self._lock.release()
def locked(self) -> bool:
"""Return True if the lock is set."""
return self._lock.locked()
def notify(self, n: int = 1) -> None:
"""Notify exactly n listeners."""
self._check_acquired()
for _ in range(n):
try:
event = self._waiters.popleft()
except IndexError:
break
event.set()
def notify_all(self) -> None:
"""Notify all the listeners."""
self._check_acquired()
for event in self._waiters:
event.set()
self._waiters.clear()
async def wait(self) -> None:
"""Wait for a notification."""
await checkpoint_if_cancelled()
self._check_acquired()
event = Event()
self._waiters.append(event)
self.release()
try:
await event.wait()
except BaseException:
if not event.is_set():
self._waiters.remove(event)
raise
finally:
with CancelScope(shield=True):
await self.acquire()
async def wait_for(self, predicate: Callable[[], T]) -> T:
"""
Wait until a predicate becomes true.
:param predicate: a callable that returns a truthy value when the condition is
met
:return: the result of the predicate
.. versionadded:: 4.11.0
"""
while not (result := predicate()):
await self.wait()
return result
def statistics(self) -> ConditionStatistics:
"""
Return statistics about the current state of this condition.
.. versionadded:: 3.0
"""
return ConditionStatistics(len(self._waiters), self._lock.statistics())
class Semaphore:
def __new__(
cls,
initial_value: int,
*,
max_value: int | None = None,
fast_acquire: bool = False,
) -> Semaphore:
try:
return get_async_backend().create_semaphore(
initial_value, max_value=max_value, fast_acquire=fast_acquire
)
except AsyncLibraryNotFoundError:
return SemaphoreAdapter(initial_value, max_value=max_value)
def __init__(
self,
initial_value: int,
*,
max_value: int | None = None,
fast_acquire: bool = False,
):
if not isinstance(initial_value, int):
raise TypeError("initial_value must be an integer")
if initial_value < 0:
raise ValueError("initial_value must be >= 0")
if max_value is not None:
if not isinstance(max_value, int):
raise TypeError("max_value must be an integer or None")
if max_value < initial_value:
raise ValueError(
"max_value must be equal to or higher than initial_value"
)
self._fast_acquire = fast_acquire
async def __aenter__(self) -> Semaphore:
await self.acquire()
return self
async def __aexit__(
self,
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
self.release()
async def acquire(self) -> None:
"""Decrement the semaphore value, blocking if necessary."""
raise NotImplementedError
def acquire_nowait(self) -> None:
"""
Acquire the underlying lock, without blocking.
:raises ~anyio.WouldBlock: if the operation would block
"""
raise NotImplementedError
def release(self) -> None:
"""Increment the semaphore value."""
raise NotImplementedError
@property
def value(self) -> int:
"""The current value of the semaphore."""
raise NotImplementedError
@property
def max_value(self) -> int | None:
"""The maximum value of the semaphore."""
raise NotImplementedError
def statistics(self) -> SemaphoreStatistics:
"""
Return statistics about the current state of this semaphore.
.. versionadded:: 3.0
"""
raise NotImplementedError
class SemaphoreAdapter(Semaphore):
_internal_semaphore: Semaphore | None = None
def __new__(
cls,
initial_value: int,
*,
max_value: int | None = None,
fast_acquire: bool = False,
) -> SemaphoreAdapter:
return object.__new__(cls)
def __init__(
self,
initial_value: int,
*,
max_value: int | None = None,
fast_acquire: bool = False,
) -> None:
super().__init__(initial_value, max_value=max_value, fast_acquire=fast_acquire)
self._initial_value = initial_value
self._max_value = max_value
@property
def _semaphore(self) -> Semaphore:
if self._internal_semaphore is None:
self._internal_semaphore = get_async_backend().create_semaphore(
self._initial_value, max_value=self._max_value
)
return self._internal_semaphore
async def acquire(self) -> None:
await self._semaphore.acquire()
def acquire_nowait(self) -> None:
self._semaphore.acquire_nowait()
def release(self) -> None:
self._semaphore.release()
@property
def value(self) -> int:
if self._internal_semaphore is None:
return self._initial_value
return self._semaphore.value
@property
def max_value(self) -> int | None:
return self._max_value
def statistics(self) -> SemaphoreStatistics:
if self._internal_semaphore is None:
return SemaphoreStatistics(tasks_waiting=0)
return self._semaphore.statistics()
class CapacityLimiter:
def __new__(cls, total_tokens: float) -> CapacityLimiter:
try:
return get_async_backend().create_capacity_limiter(total_tokens)
except AsyncLibraryNotFoundError:
return CapacityLimiterAdapter(total_tokens)
async def __aenter__(self) -> None:
raise NotImplementedError
async def __aexit__(
self,
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
raise NotImplementedError
@property
def total_tokens(self) -> float:
"""
The total number of tokens available for borrowing.
This is a read-write property. If the total number of tokens is increased, the
proportionate number of tasks waiting on this limiter will be granted their
tokens.
.. versionchanged:: 3.0
The property is now writable.
"""
raise NotImplementedError
@total_tokens.setter
def total_tokens(self, value: float) -> None:
raise NotImplementedError
@property
def borrowed_tokens(self) -> int:
"""The number of tokens that have currently been borrowed."""
raise NotImplementedError
@property
def available_tokens(self) -> float:
"""The number of tokens currently available to be borrowed"""
raise NotImplementedError
def acquire_nowait(self) -> None:
"""
Acquire a token for the current task without waiting for one to become
available.
:raises ~anyio.WouldBlock: if there are no tokens available for borrowing
"""
raise NotImplementedError
def acquire_on_behalf_of_nowait(self, borrower: object) -> None:
"""
Acquire a token without waiting for one to become available.
:param borrower: the entity borrowing a token
:raises ~anyio.WouldBlock: if there are no tokens available for borrowing
"""
raise NotImplementedError
async def acquire(self) -> None:
"""
Acquire a token for the current task, waiting if necessary for one to become
available.
"""
raise NotImplementedError
async def acquire_on_behalf_of(self, borrower: object) -> None:
"""
Acquire a token, waiting if necessary for one to become available.
:param borrower: the entity borrowing a token
"""
raise NotImplementedError
def release(self) -> None:
"""
Release the token held by the current task.
:raises RuntimeError: if the current task has not borrowed a token from this
limiter.
"""
raise NotImplementedError
def release_on_behalf_of(self, borrower: object) -> None:
"""
Release the token held by the given borrower.
:raises RuntimeError: if the borrower has not borrowed a token from this
limiter.
"""
raise NotImplementedError
def statistics(self) -> CapacityLimiterStatistics:
"""
Return statistics about the current state of this limiter.
.. versionadded:: 3.0
"""
raise NotImplementedError
class CapacityLimiterAdapter(CapacityLimiter):
_internal_limiter: CapacityLimiter | None = None
def __new__(cls, total_tokens: float) -> CapacityLimiterAdapter:
return object.__new__(cls)
def __init__(self, total_tokens: float) -> None:
self.total_tokens = total_tokens
@property
def _limiter(self) -> CapacityLimiter:
if self._internal_limiter is None:
self._internal_limiter = get_async_backend().create_capacity_limiter(
self._total_tokens
)
return self._internal_limiter
async def __aenter__(self) -> None:
await self._limiter.__aenter__()
async def __aexit__(
self,
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
return await self._limiter.__aexit__(exc_type, exc_val, exc_tb)
@property
def total_tokens(self) -> float:
if self._internal_limiter is None:
return self._total_tokens
return self._internal_limiter.total_tokens
@total_tokens.setter
def total_tokens(self, value: float) -> None:
if not isinstance(value, int) and value is not math.inf:
raise TypeError("total_tokens must be an int or math.inf")
elif value < 1:
raise ValueError("total_tokens must be >= 1")
if self._internal_limiter is None:
self._total_tokens = value
return
self._limiter.total_tokens = value
@property
def borrowed_tokens(self) -> int:
if self._internal_limiter is None:
return 0
return self._internal_limiter.borrowed_tokens
@property
def available_tokens(self) -> float:
if self._internal_limiter is None:
return self._total_tokens
return self._internal_limiter.available_tokens
def acquire_nowait(self) -> None:
self._limiter.acquire_nowait()
def acquire_on_behalf_of_nowait(self, borrower: object) -> None:
self._limiter.acquire_on_behalf_of_nowait(borrower)
async def acquire(self) -> None:
await self._limiter.acquire()
async def acquire_on_behalf_of(self, borrower: object) -> None:
await self._limiter.acquire_on_behalf_of(borrower)
def release(self) -> None:
self._limiter.release()
def release_on_behalf_of(self, borrower: object) -> None:
self._limiter.release_on_behalf_of(borrower)
def statistics(self) -> CapacityLimiterStatistics:
if self._internal_limiter is None:
return CapacityLimiterStatistics(
borrowed_tokens=0,
total_tokens=self.total_tokens,
borrowers=(),
tasks_waiting=0,
)
return self._internal_limiter.statistics()
class ResourceGuard:
"""
A context manager for ensuring that a resource is only used by a single task at a
time.
Entering this context manager while the previous has not exited it yet will trigger
:exc:`BusyResourceError`.
:param action: the action to guard against (visible in the :exc:`BusyResourceError`
when triggered, e.g. "Another task is already {action} this resource")
.. versionadded:: 4.1
"""
__slots__ = "action", "_guarded"
def __init__(self, action: str = "using"):
self.action: str = action
self._guarded = False
def __enter__(self) -> None:
if self._guarded:
raise BusyResourceError(self.action)
self._guarded = True
def __exit__(
self,
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
self._guarded = False

View File

@@ -0,0 +1,163 @@
from __future__ import annotations
import math
from collections.abc import Generator
from contextlib import contextmanager
from types import TracebackType
from ..abc._tasks import TaskGroup, TaskStatus
from ._eventloop import get_async_backend
class _IgnoredTaskStatus(TaskStatus[object]):
def started(self, value: object = None) -> None:
pass
TASK_STATUS_IGNORED = _IgnoredTaskStatus()
class CancelScope:
"""
Wraps a unit of work that can be made separately cancellable.
:param deadline: The time (clock value) when this scope is cancelled automatically
:param shield: ``True`` to shield the cancel scope from external cancellation
"""
def __new__(
cls, *, deadline: float = math.inf, shield: bool = False
) -> CancelScope:
return get_async_backend().create_cancel_scope(shield=shield, deadline=deadline)
def cancel(self, reason: str | None = None) -> None:
"""
Cancel this scope immediately.
:param reason: a message describing the reason for the cancellation
"""
raise NotImplementedError
@property
def deadline(self) -> float:
"""
The time (clock value) when this scope is cancelled automatically.
Will be ``float('inf')`` if no timeout has been set.
"""
raise NotImplementedError
@deadline.setter
def deadline(self, value: float) -> None:
raise NotImplementedError
@property
def cancel_called(self) -> bool:
"""``True`` if :meth:`cancel` has been called."""
raise NotImplementedError
@property
def cancelled_caught(self) -> bool:
"""
``True`` if this scope suppressed a cancellation exception it itself raised.
This is typically used to check if any work was interrupted, or to see if the
scope was cancelled due to its deadline being reached. The value will, however,
only be ``True`` if the cancellation was triggered by the scope itself (and not
an outer scope).
"""
raise NotImplementedError
@property
def shield(self) -> bool:
"""
``True`` if this scope is shielded from external cancellation.
While a scope is shielded, it will not receive cancellations from outside.
"""
raise NotImplementedError
@shield.setter
def shield(self, value: bool) -> None:
raise NotImplementedError
def __enter__(self) -> CancelScope:
raise NotImplementedError
def __exit__(
self,
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> bool:
raise NotImplementedError
@contextmanager
def fail_after(
delay: float | None, shield: bool = False
) -> Generator[CancelScope, None, None]:
"""
Create a context manager which raises a :class:`TimeoutError` if does not finish in
time.
:param delay: maximum allowed time (in seconds) before raising the exception, or
``None`` to disable the timeout
:param shield: ``True`` to shield the cancel scope from external cancellation
:return: a context manager that yields a cancel scope
:rtype: :class:`~typing.ContextManager`\\[:class:`~anyio.CancelScope`\\]
"""
current_time = get_async_backend().current_time
deadline = (current_time() + delay) if delay is not None else math.inf
with get_async_backend().create_cancel_scope(
deadline=deadline, shield=shield
) as cancel_scope:
yield cancel_scope
if cancel_scope.cancelled_caught and current_time() >= cancel_scope.deadline:
raise TimeoutError
def move_on_after(delay: float | None, shield: bool = False) -> CancelScope:
"""
Create a cancel scope with a deadline that expires after the given delay.
:param delay: maximum allowed time (in seconds) before exiting the context block, or
``None`` to disable the timeout
:param shield: ``True`` to shield the cancel scope from external cancellation
:return: a cancel scope
"""
deadline = (
(get_async_backend().current_time() + delay) if delay is not None else math.inf
)
return get_async_backend().create_cancel_scope(deadline=deadline, shield=shield)
def current_effective_deadline() -> float:
"""
Return the nearest deadline among all the cancel scopes effective for the current
task.
:return: a clock value from the event loop's internal clock (or ``float('inf')`` if
there is no deadline in effect, or ``float('-inf')`` if the current scope has
been cancelled)
:rtype: float
"""
return get_async_backend().current_effective_deadline()
def create_task_group() -> TaskGroup:
"""
Create a task group.
:return: a task group
"""
return get_async_backend().create_task_group()

View File

@@ -0,0 +1,616 @@
from __future__ import annotations
import os
import sys
import tempfile
from collections.abc import Iterable
from io import BytesIO, TextIOWrapper
from types import TracebackType
from typing import (
TYPE_CHECKING,
Any,
AnyStr,
Generic,
overload,
)
from .. import to_thread
from .._core._fileio import AsyncFile
from ..lowlevel import checkpoint_if_cancelled
if TYPE_CHECKING:
from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer
class TemporaryFile(Generic[AnyStr]):
"""
An asynchronous temporary file that is automatically created and cleaned up.
This class provides an asynchronous context manager interface to a temporary file.
The file is created using Python's standard `tempfile.TemporaryFile` function in a
background thread, and is wrapped as an asynchronous file using `AsyncFile`.
:param mode: The mode in which the file is opened. Defaults to "w+b".
:param buffering: The buffering policy (-1 means the default buffering).
:param encoding: The encoding used to decode or encode the file. Only applicable in
text mode.
:param newline: Controls how universal newlines mode works (only applicable in text
mode).
:param suffix: The suffix for the temporary file name.
:param prefix: The prefix for the temporary file name.
:param dir: The directory in which the temporary file is created.
:param errors: The error handling scheme used for encoding/decoding errors.
"""
_async_file: AsyncFile[AnyStr]
@overload
def __init__(
self: TemporaryFile[bytes],
mode: OpenBinaryMode = ...,
buffering: int = ...,
encoding: str | None = ...,
newline: str | None = ...,
suffix: str | None = ...,
prefix: str | None = ...,
dir: str | None = ...,
*,
errors: str | None = ...,
): ...
@overload
def __init__(
self: TemporaryFile[str],
mode: OpenTextMode,
buffering: int = ...,
encoding: str | None = ...,
newline: str | None = ...,
suffix: str | None = ...,
prefix: str | None = ...,
dir: str | None = ...,
*,
errors: str | None = ...,
): ...
def __init__(
self,
mode: OpenTextMode | OpenBinaryMode = "w+b",
buffering: int = -1,
encoding: str | None = None,
newline: str | None = None,
suffix: str | None = None,
prefix: str | None = None,
dir: str | None = None,
*,
errors: str | None = None,
) -> None:
self.mode = mode
self.buffering = buffering
self.encoding = encoding
self.newline = newline
self.suffix: str | None = suffix
self.prefix: str | None = prefix
self.dir: str | None = dir
self.errors = errors
async def __aenter__(self) -> AsyncFile[AnyStr]:
fp = await to_thread.run_sync(
lambda: tempfile.TemporaryFile(
self.mode,
self.buffering,
self.encoding,
self.newline,
self.suffix,
self.prefix,
self.dir,
errors=self.errors,
)
)
self._async_file = AsyncFile(fp)
return self._async_file
async def __aexit__(
self,
exc_type: type[BaseException] | None,
exc_value: BaseException | None,
traceback: TracebackType | None,
) -> None:
await self._async_file.aclose()
class NamedTemporaryFile(Generic[AnyStr]):
"""
An asynchronous named temporary file that is automatically created and cleaned up.
This class provides an asynchronous context manager for a temporary file with a
visible name in the file system. It uses Python's standard
:func:`~tempfile.NamedTemporaryFile` function and wraps the file object with
:class:`AsyncFile` for asynchronous operations.
:param mode: The mode in which the file is opened. Defaults to "w+b".
:param buffering: The buffering policy (-1 means the default buffering).
:param encoding: The encoding used to decode or encode the file. Only applicable in
text mode.
:param newline: Controls how universal newlines mode works (only applicable in text
mode).
:param suffix: The suffix for the temporary file name.
:param prefix: The prefix for the temporary file name.
:param dir: The directory in which the temporary file is created.
:param delete: Whether to delete the file when it is closed.
:param errors: The error handling scheme used for encoding/decoding errors.
:param delete_on_close: (Python 3.12+) Whether to delete the file on close.
"""
_async_file: AsyncFile[AnyStr]
@overload
def __init__(
self: NamedTemporaryFile[bytes],
mode: OpenBinaryMode = ...,
buffering: int = ...,
encoding: str | None = ...,
newline: str | None = ...,
suffix: str | None = ...,
prefix: str | None = ...,
dir: str | None = ...,
delete: bool = ...,
*,
errors: str | None = ...,
delete_on_close: bool = ...,
): ...
@overload
def __init__(
self: NamedTemporaryFile[str],
mode: OpenTextMode,
buffering: int = ...,
encoding: str | None = ...,
newline: str | None = ...,
suffix: str | None = ...,
prefix: str | None = ...,
dir: str | None = ...,
delete: bool = ...,
*,
errors: str | None = ...,
delete_on_close: bool = ...,
): ...
def __init__(
self,
mode: OpenBinaryMode | OpenTextMode = "w+b",
buffering: int = -1,
encoding: str | None = None,
newline: str | None = None,
suffix: str | None = None,
prefix: str | None = None,
dir: str | None = None,
delete: bool = True,
*,
errors: str | None = None,
delete_on_close: bool = True,
) -> None:
self._params: dict[str, Any] = {
"mode": mode,
"buffering": buffering,
"encoding": encoding,
"newline": newline,
"suffix": suffix,
"prefix": prefix,
"dir": dir,
"delete": delete,
"errors": errors,
}
if sys.version_info >= (3, 12):
self._params["delete_on_close"] = delete_on_close
async def __aenter__(self) -> AsyncFile[AnyStr]:
fp = await to_thread.run_sync(
lambda: tempfile.NamedTemporaryFile(**self._params)
)
self._async_file = AsyncFile(fp)
return self._async_file
async def __aexit__(
self,
exc_type: type[BaseException] | None,
exc_value: BaseException | None,
traceback: TracebackType | None,
) -> None:
await self._async_file.aclose()
class SpooledTemporaryFile(AsyncFile[AnyStr]):
"""
An asynchronous spooled temporary file that starts in memory and is spooled to disk.
This class provides an asynchronous interface to a spooled temporary file, much like
Python's standard :class:`~tempfile.SpooledTemporaryFile`. It supports asynchronous
write operations and provides a method to force a rollover to disk.
:param max_size: Maximum size in bytes before the file is rolled over to disk.
:param mode: The mode in which the file is opened. Defaults to "w+b".
:param buffering: The buffering policy (-1 means the default buffering).
:param encoding: The encoding used to decode or encode the file (text mode only).
:param newline: Controls how universal newlines mode works (text mode only).
:param suffix: The suffix for the temporary file name.
:param prefix: The prefix for the temporary file name.
:param dir: The directory in which the temporary file is created.
:param errors: The error handling scheme used for encoding/decoding errors.
"""
_rolled: bool = False
@overload
def __init__(
self: SpooledTemporaryFile[bytes],
max_size: int = ...,
mode: OpenBinaryMode = ...,
buffering: int = ...,
encoding: str | None = ...,
newline: str | None = ...,
suffix: str | None = ...,
prefix: str | None = ...,
dir: str | None = ...,
*,
errors: str | None = ...,
): ...
@overload
def __init__(
self: SpooledTemporaryFile[str],
max_size: int = ...,
mode: OpenTextMode = ...,
buffering: int = ...,
encoding: str | None = ...,
newline: str | None = ...,
suffix: str | None = ...,
prefix: str | None = ...,
dir: str | None = ...,
*,
errors: str | None = ...,
): ...
def __init__(
self,
max_size: int = 0,
mode: OpenBinaryMode | OpenTextMode = "w+b",
buffering: int = -1,
encoding: str | None = None,
newline: str | None = None,
suffix: str | None = None,
prefix: str | None = None,
dir: str | None = None,
*,
errors: str | None = None,
) -> None:
self._tempfile_params: dict[str, Any] = {
"mode": mode,
"buffering": buffering,
"encoding": encoding,
"newline": newline,
"suffix": suffix,
"prefix": prefix,
"dir": dir,
"errors": errors,
}
self._max_size = max_size
if "b" in mode:
super().__init__(BytesIO()) # type: ignore[arg-type]
else:
super().__init__(
TextIOWrapper( # type: ignore[arg-type]
BytesIO(),
encoding=encoding,
errors=errors,
newline=newline,
write_through=True,
)
)
async def aclose(self) -> None:
if not self._rolled:
self._fp.close()
return
await super().aclose()
async def _check(self) -> None:
if self._rolled or self._fp.tell() <= self._max_size:
return
await self.rollover()
async def rollover(self) -> None:
if self._rolled:
return
self._rolled = True
buffer = self._fp
buffer.seek(0)
self._fp = await to_thread.run_sync(
lambda: tempfile.TemporaryFile(**self._tempfile_params)
)
await self.write(buffer.read())
buffer.close()
@property
def closed(self) -> bool:
return self._fp.closed
async def read(self, size: int = -1) -> AnyStr:
if not self._rolled:
await checkpoint_if_cancelled()
return self._fp.read(size)
return await super().read(size) # type: ignore[return-value]
async def read1(self: SpooledTemporaryFile[bytes], size: int = -1) -> bytes:
if not self._rolled:
await checkpoint_if_cancelled()
return self._fp.read1(size)
return await super().read1(size)
async def readline(self) -> AnyStr:
if not self._rolled:
await checkpoint_if_cancelled()
return self._fp.readline()
return await super().readline() # type: ignore[return-value]
async def readlines(self) -> list[AnyStr]:
if not self._rolled:
await checkpoint_if_cancelled()
return self._fp.readlines()
return await super().readlines() # type: ignore[return-value]
async def readinto(self: SpooledTemporaryFile[bytes], b: WriteableBuffer) -> int:
if not self._rolled:
await checkpoint_if_cancelled()
self._fp.readinto(b)
return await super().readinto(b)
async def readinto1(self: SpooledTemporaryFile[bytes], b: WriteableBuffer) -> int:
if not self._rolled:
await checkpoint_if_cancelled()
self._fp.readinto(b)
return await super().readinto1(b)
async def seek(self, offset: int, whence: int | None = os.SEEK_SET) -> int:
if not self._rolled:
await checkpoint_if_cancelled()
return self._fp.seek(offset, whence)
return await super().seek(offset, whence)
async def tell(self) -> int:
if not self._rolled:
await checkpoint_if_cancelled()
return self._fp.tell()
return await super().tell()
async def truncate(self, size: int | None = None) -> int:
if not self._rolled:
await checkpoint_if_cancelled()
return self._fp.truncate(size)
return await super().truncate(size)
@overload
async def write(self: SpooledTemporaryFile[bytes], b: ReadableBuffer) -> int: ...
@overload
async def write(self: SpooledTemporaryFile[str], b: str) -> int: ...
async def write(self, b: ReadableBuffer | str) -> int:
"""
Asynchronously write data to the spooled temporary file.
If the file has not yet been rolled over, the data is written synchronously,
and a rollover is triggered if the size exceeds the maximum size.
:param s: The data to write.
:return: The number of bytes written.
:raises RuntimeError: If the underlying file is not initialized.
"""
if not self._rolled:
await checkpoint_if_cancelled()
result = self._fp.write(b)
await self._check()
return result
return await super().write(b) # type: ignore[misc]
@overload
async def writelines(
self: SpooledTemporaryFile[bytes], lines: Iterable[ReadableBuffer]
) -> None: ...
@overload
async def writelines(
self: SpooledTemporaryFile[str], lines: Iterable[str]
) -> None: ...
async def writelines(self, lines: Iterable[str] | Iterable[ReadableBuffer]) -> None:
"""
Asynchronously write a list of lines to the spooled temporary file.
If the file has not yet been rolled over, the lines are written synchronously,
and a rollover is triggered if the size exceeds the maximum size.
:param lines: An iterable of lines to write.
:raises RuntimeError: If the underlying file is not initialized.
"""
if not self._rolled:
await checkpoint_if_cancelled()
result = self._fp.writelines(lines)
await self._check()
return result
return await super().writelines(lines) # type: ignore[misc]
class TemporaryDirectory(Generic[AnyStr]):
"""
An asynchronous temporary directory that is created and cleaned up automatically.
This class provides an asynchronous context manager for creating a temporary
directory. It wraps Python's standard :class:`~tempfile.TemporaryDirectory` to
perform directory creation and cleanup operations in a background thread.
:param suffix: Suffix to be added to the temporary directory name.
:param prefix: Prefix to be added to the temporary directory name.
:param dir: The parent directory where the temporary directory is created.
:param ignore_cleanup_errors: Whether to ignore errors during cleanup
(Python 3.10+).
:param delete: Whether to delete the directory upon closing (Python 3.12+).
"""
def __init__(
self,
suffix: AnyStr | None = None,
prefix: AnyStr | None = None,
dir: AnyStr | None = None,
*,
ignore_cleanup_errors: bool = False,
delete: bool = True,
) -> None:
self.suffix: AnyStr | None = suffix
self.prefix: AnyStr | None = prefix
self.dir: AnyStr | None = dir
self.ignore_cleanup_errors = ignore_cleanup_errors
self.delete = delete
self._tempdir: tempfile.TemporaryDirectory | None = None
async def __aenter__(self) -> str:
params: dict[str, Any] = {
"suffix": self.suffix,
"prefix": self.prefix,
"dir": self.dir,
}
if sys.version_info >= (3, 10):
params["ignore_cleanup_errors"] = self.ignore_cleanup_errors
if sys.version_info >= (3, 12):
params["delete"] = self.delete
self._tempdir = await to_thread.run_sync(
lambda: tempfile.TemporaryDirectory(**params)
)
return await to_thread.run_sync(self._tempdir.__enter__)
async def __aexit__(
self,
exc_type: type[BaseException] | None,
exc_value: BaseException | None,
traceback: TracebackType | None,
) -> None:
if self._tempdir is not None:
await to_thread.run_sync(
self._tempdir.__exit__, exc_type, exc_value, traceback
)
async def cleanup(self) -> None:
if self._tempdir is not None:
await to_thread.run_sync(self._tempdir.cleanup)
@overload
async def mkstemp(
suffix: str | None = None,
prefix: str | None = None,
dir: str | None = None,
text: bool = False,
) -> tuple[int, str]: ...
@overload
async def mkstemp(
suffix: bytes | None = None,
prefix: bytes | None = None,
dir: bytes | None = None,
text: bool = False,
) -> tuple[int, bytes]: ...
async def mkstemp(
suffix: AnyStr | None = None,
prefix: AnyStr | None = None,
dir: AnyStr | None = None,
text: bool = False,
) -> tuple[int, str | bytes]:
"""
Asynchronously create a temporary file and return an OS-level handle and the file
name.
This function wraps `tempfile.mkstemp` and executes it in a background thread.
:param suffix: Suffix to be added to the file name.
:param prefix: Prefix to be added to the file name.
:param dir: Directory in which the temporary file is created.
:param text: Whether the file is opened in text mode.
:return: A tuple containing the file descriptor and the file name.
"""
return await to_thread.run_sync(tempfile.mkstemp, suffix, prefix, dir, text)
@overload
async def mkdtemp(
suffix: str | None = None,
prefix: str | None = None,
dir: str | None = None,
) -> str: ...
@overload
async def mkdtemp(
suffix: bytes | None = None,
prefix: bytes | None = None,
dir: bytes | None = None,
) -> bytes: ...
async def mkdtemp(
suffix: AnyStr | None = None,
prefix: AnyStr | None = None,
dir: AnyStr | None = None,
) -> str | bytes:
"""
Asynchronously create a temporary directory and return its path.
This function wraps `tempfile.mkdtemp` and executes it in a background thread.
:param suffix: Suffix to be added to the directory name.
:param prefix: Prefix to be added to the directory name.
:param dir: Parent directory where the temporary directory is created.
:return: The path of the created temporary directory.
"""
return await to_thread.run_sync(tempfile.mkdtemp, suffix, prefix, dir)
async def gettempdir() -> str:
"""
Asynchronously return the name of the directory used for temporary files.
This function wraps `tempfile.gettempdir` and executes it in a background thread.
:return: The path of the temporary directory as a string.
"""
return await to_thread.run_sync(tempfile.gettempdir)
async def gettempdirb() -> bytes:
"""
Asynchronously return the name of the directory used for temporary files in bytes.
This function wraps `tempfile.gettempdirb` and executes it in a background thread.
:return: The path of the temporary directory as bytes.
"""
return await to_thread.run_sync(tempfile.gettempdirb)

View File

@@ -0,0 +1,78 @@
from __future__ import annotations
from collections.abc import Awaitable, Generator
from typing import Any, cast
from ._eventloop import get_async_backend
class TaskInfo:
"""
Represents an asynchronous task.
:ivar int id: the unique identifier of the task
:ivar parent_id: the identifier of the parent task, if any
:vartype parent_id: Optional[int]
:ivar str name: the description of the task (if any)
:ivar ~collections.abc.Coroutine coro: the coroutine object of the task
"""
__slots__ = "_name", "id", "parent_id", "name", "coro"
def __init__(
self,
id: int,
parent_id: int | None,
name: str | None,
coro: Generator[Any, Any, Any] | Awaitable[Any],
):
func = get_current_task
self._name = f"{func.__module__}.{func.__qualname__}"
self.id: int = id
self.parent_id: int | None = parent_id
self.name: str | None = name
self.coro: Generator[Any, Any, Any] | Awaitable[Any] = coro
def __eq__(self, other: object) -> bool:
if isinstance(other, TaskInfo):
return self.id == other.id
return NotImplemented
def __hash__(self) -> int:
return hash(self.id)
def __repr__(self) -> str:
return f"{self.__class__.__name__}(id={self.id!r}, name={self.name!r})"
def has_pending_cancellation(self) -> bool:
"""
Return ``True`` if the task has a cancellation pending, ``False`` otherwise.
"""
return False
def get_current_task() -> TaskInfo:
"""
Return the current task.
:return: a representation of the current task
"""
return get_async_backend().get_current_task()
def get_running_tasks() -> list[TaskInfo]:
"""
Return a list of running tasks in the current event loop.
:return: a list of task info objects
"""
return cast("list[TaskInfo]", get_async_backend().get_running_tasks())
async def wait_all_tasks_blocked() -> None:
"""Wait until all other tasks are waiting for something."""
await get_async_backend().wait_all_tasks_blocked()

View File

@@ -0,0 +1,81 @@
from __future__ import annotations
from collections.abc import Callable, Mapping
from typing import Any, TypeVar, final, overload
from ._exceptions import TypedAttributeLookupError
T_Attr = TypeVar("T_Attr")
T_Default = TypeVar("T_Default")
undefined = object()
def typed_attribute() -> Any:
"""Return a unique object, used to mark typed attributes."""
return object()
class TypedAttributeSet:
"""
Superclass for typed attribute collections.
Checks that every public attribute of every subclass has a type annotation.
"""
def __init_subclass__(cls) -> None:
annotations: dict[str, Any] = getattr(cls, "__annotations__", {})
for attrname in dir(cls):
if not attrname.startswith("_") and attrname not in annotations:
raise TypeError(
f"Attribute {attrname!r} is missing its type annotation"
)
super().__init_subclass__()
class TypedAttributeProvider:
"""Base class for classes that wish to provide typed extra attributes."""
@property
def extra_attributes(self) -> Mapping[T_Attr, Callable[[], T_Attr]]:
"""
A mapping of the extra attributes to callables that return the corresponding
values.
If the provider wraps another provider, the attributes from that wrapper should
also be included in the returned mapping (but the wrapper may override the
callables from the wrapped instance).
"""
return {}
@overload
def extra(self, attribute: T_Attr) -> T_Attr: ...
@overload
def extra(self, attribute: T_Attr, default: T_Default) -> T_Attr | T_Default: ...
@final
def extra(self, attribute: Any, default: object = undefined) -> object:
"""
extra(attribute, default=undefined)
Return the value of the given typed extra attribute.
:param attribute: the attribute (member of a :class:`~TypedAttributeSet`) to
look for
:param default: the value that should be returned if no value is found for the
attribute
:raises ~anyio.TypedAttributeLookupError: if the search failed and no default
value was given
"""
try:
getter = self.extra_attributes[attribute]
except KeyError:
if default is undefined:
raise TypedAttributeLookupError("Attribute not found") from None
else:
return default
return getter()

View File

@@ -0,0 +1,58 @@
from __future__ import annotations
from ._eventloop import AsyncBackend as AsyncBackend
from ._resources import AsyncResource as AsyncResource
from ._sockets import ConnectedUDPSocket as ConnectedUDPSocket
from ._sockets import ConnectedUNIXDatagramSocket as ConnectedUNIXDatagramSocket
from ._sockets import IPAddressType as IPAddressType
from ._sockets import IPSockAddrType as IPSockAddrType
from ._sockets import SocketAttribute as SocketAttribute
from ._sockets import SocketListener as SocketListener
from ._sockets import SocketStream as SocketStream
from ._sockets import UDPPacketType as UDPPacketType
from ._sockets import UDPSocket as UDPSocket
from ._sockets import UNIXDatagramPacketType as UNIXDatagramPacketType
from ._sockets import UNIXDatagramSocket as UNIXDatagramSocket
from ._sockets import UNIXSocketStream as UNIXSocketStream
from ._streams import AnyByteReceiveStream as AnyByteReceiveStream
from ._streams import AnyByteSendStream as AnyByteSendStream
from ._streams import AnyByteStream as AnyByteStream
from ._streams import AnyByteStreamConnectable as AnyByteStreamConnectable
from ._streams import AnyUnreliableByteReceiveStream as AnyUnreliableByteReceiveStream
from ._streams import AnyUnreliableByteSendStream as AnyUnreliableByteSendStream
from ._streams import AnyUnreliableByteStream as AnyUnreliableByteStream
from ._streams import ByteReceiveStream as ByteReceiveStream
from ._streams import ByteSendStream as ByteSendStream
from ._streams import ByteStream as ByteStream
from ._streams import ByteStreamConnectable as ByteStreamConnectable
from ._streams import Listener as Listener
from ._streams import ObjectReceiveStream as ObjectReceiveStream
from ._streams import ObjectSendStream as ObjectSendStream
from ._streams import ObjectStream as ObjectStream
from ._streams import ObjectStreamConnectable as ObjectStreamConnectable
from ._streams import UnreliableObjectReceiveStream as UnreliableObjectReceiveStream
from ._streams import UnreliableObjectSendStream as UnreliableObjectSendStream
from ._streams import UnreliableObjectStream as UnreliableObjectStream
from ._subprocesses import Process as Process
from ._tasks import TaskGroup as TaskGroup
from ._tasks import TaskStatus as TaskStatus
from ._testing import TestRunner as TestRunner
# Re-exported here, for backwards compatibility
# isort: off
from .._core._synchronization import (
CapacityLimiter as CapacityLimiter,
Condition as Condition,
Event as Event,
Lock as Lock,
Semaphore as Semaphore,
)
from .._core._tasks import CancelScope as CancelScope
from ..from_thread import BlockingPortal as BlockingPortal
# Re-export imports so they look like they live directly in this package
for __value in list(locals().values()):
if getattr(__value, "__module__", "").startswith("anyio.abc."):
__value.__module__ = __name__
del __value

Some files were not shown because too many files have changed in this diff Show More