Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit e927ee4

Browse filesBrowse files
authored
Merge pull request #29002 from charris/fix-download-wheels
MAINT: Update download-wheels for multiple pages
2 parents 8d722b8 + c0e2559 commit e927ee4
Copy full SHA for e927ee4

File tree

1 file changed

+15
-12
lines changed
Filter options

1 file changed

+15
-12
lines changed

‎tools/download-wheels.py

Copy file name to clipboardExpand all lines: tools/download-wheels.py
+15-12Lines changed: 15 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -31,10 +31,18 @@
3131
import urllib3
3232
from bs4 import BeautifulSoup
3333

34-
__version__ = "0.1"
34+
__version__ = "0.2"
3535

3636
# Edit these for other projects.
37-
STAGING_URL = "https://anaconda.org/multibuild-wheels-staging/numpy"
37+
38+
# The first URL is used to get the file names as it avoids the need for paging
39+
# when the number of files exceeds the page length. Note that files/page is not
40+
# stable and can change when the page layout changes. The second URL is used to
41+
# retrieve the files themselves. This workaround is copied from SciPy.
42+
NAMES_URL = "https://pypi.anaconda.org/multibuild-wheels-staging/simple/numpy/"
43+
FILES_URL = "https://anaconda.org/multibuild-wheels-staging/numpy"
44+
45+
# Name prefix of the files to download.
3846
PREFIX = "numpy"
3947

4048
# Name endings of the files to download.
@@ -56,17 +64,12 @@ def get_wheel_names(version):
5664
The release version. For instance, "1.18.3".
5765
5866
"""
59-
ret = []
6067
http = urllib3.PoolManager(cert_reqs="CERT_REQUIRED")
6168
tmpl = re.compile(rf"^.*{PREFIX}-{version}{SUFFIX}")
62-
# TODO: generalize this by searching for `showing 1 of N` and
63-
# looping over N pages, starting from 1
64-
for i in range(1, 3):
65-
index_url = f"{STAGING_URL}/files?page={i}"
66-
index_html = http.request("GET", index_url)
67-
soup = BeautifulSoup(index_html.data, "html.parser")
68-
ret += soup.find_all(string=tmpl)
69-
return ret
69+
index_url = f"{NAMES_URL}"
70+
index_html = http.request('GET', index_url)
71+
soup = BeautifulSoup(index_html.data, 'html.parser')
72+
return sorted(soup.find_all(string=tmpl))
7073

7174

7275
def download_wheels(version, wheelhouse, test=False):
@@ -87,7 +90,7 @@ def download_wheels(version, wheelhouse, test=False):
8790
wheel_names = get_wheel_names(version)
8891

8992
for i, wheel_name in enumerate(wheel_names):
90-
wheel_url = f"{STAGING_URL}/{version}/download/{wheel_name}"
93+
wheel_url = f"{FILES_URL}/{version}/download/{wheel_name}"
9194
wheel_path = os.path.join(wheelhouse, wheel_name)
9295
with open(wheel_path, "wb") as f:
9396
with http.request("GET", wheel_url, preload_content=False,) as r:

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.