|
| 1 | +import os |
| 2 | +from pathlib import Path |
1 | 3 | import platform |
2 | | -from typing import Any, Iterator, List, Optional, Tuple, Union |
| 4 | +from typing import Any, AnyStr, Iterator, List, Optional, Tuple, Union |
3 | 5 | from warnings import warn |
4 | 6 | from packaging.utils import canonicalize_name as normalize |
5 | 7 | import requests |
6 | 8 | from . import PYPI_SIMPLE_ENDPOINT, __url__, __version__ |
7 | 9 | from .classes import DistributionPackage, IndexPage, ProjectPage |
8 | 10 | from .parse_repo import parse_repo_index_response, parse_repo_project_response |
9 | 11 | from .parse_stream import parse_links_stream_response |
| 12 | +from .util import AbstractDigestChecker, DigestChecker, NullDigestChecker |
10 | 13 |
|
11 | 14 | #: The User-Agent header used for requests; not used when the user provides eir |
12 | 15 | #: own session object |
@@ -206,6 +209,65 @@ def get_project_url(self, project: str) -> str: |
206 | 209 | """ |
207 | 210 | return self.endpoint + normalize(project) + "/" |
208 | 211 |
|
| 212 | + def download_package( |
| 213 | + self, |
| 214 | + pkg: DistributionPackage, |
| 215 | + path: Union[AnyStr, "os.PathLike[AnyStr]"], |
| 216 | + verify: bool = True, |
| 217 | + keep_on_error: bool = False, |
| 218 | + timeout: Union[float, Tuple[float, float], None] = None, |
| 219 | + ) -> None: |
| 220 | + """ |
| 221 | + .. versionadded:: 0.10.0 |
| 222 | +
|
| 223 | + Download the given `DistributionPackage` to the given path. |
| 224 | +
|
| 225 | + If an error occurs while downloading or verifying digests, and |
| 226 | + ``keep_on_error`` is not true, the downloaded file is not saved. |
| 227 | +
|
| 228 | + :param DistributionPackage pkg: the distribution package to download |
| 229 | + :param path: |
| 230 | + the path at which to save the downloaded file; any parent |
| 231 | + directories of this path will be created as needed |
| 232 | + :param bool verify: |
| 233 | + whether to verify the package's digests against the downloaded file |
| 234 | + :param bool keep_on_error: |
| 235 | + whether to keep (true) or delete (false) the downloaded file if an |
| 236 | + error occurs |
| 237 | + :param timeout: optional timeout to pass to the ``requests`` call |
| 238 | + :type timeout: Union[float, Tuple[float,float], None] |
| 239 | + :raises requests.HTTPError: if the repository responds with an HTTP |
| 240 | + error code |
| 241 | + :raises NoDigestsError: |
| 242 | + if ``verify`` is true and the given package does not have any |
| 243 | + digests with known algorithms |
| 244 | + :raises DigestMismatchError: |
| 245 | + if ``verify`` is true and the digest of the downloaded file does |
| 246 | + not match the expected value |
| 247 | + """ |
| 248 | + target = Path(os.fsdecode(path)) |
| 249 | + target.parent.mkdir(parents=True, exist_ok=True) |
| 250 | + digester: AbstractDigestChecker |
| 251 | + if verify: |
| 252 | + digester = DigestChecker(pkg.digests) |
| 253 | + else: |
| 254 | + digester = NullDigestChecker() |
| 255 | + with self.s.get(pkg.url, stream=True, timeout=timeout) as r: |
| 256 | + r.raise_for_status() |
| 257 | + try: |
| 258 | + with target.open("wb") as fp: |
| 259 | + for chunk in r.iter_content(65535): |
| 260 | + fp.write(chunk) |
| 261 | + digester.update(chunk) |
| 262 | + digester.finalize() |
| 263 | + except Exception: |
| 264 | + if not keep_on_error: |
| 265 | + try: |
| 266 | + target.unlink() |
| 267 | + except FileNotFoundError: |
| 268 | + pass |
| 269 | + raise |
| 270 | + |
209 | 271 | def get_projects(self) -> Iterator[str]: |
210 | 272 | """ |
211 | 273 | Returns a generator of names of projects available in the repository. |
|
0 commit comments