1616import shutil
1717import tempfile
1818import time
19- import re
2019
2120from collections import defaultdict
2221from typing import List
3029import packageurl
3130import requests
3231
33- from requests .auth import AuthBase , HTTPBasicAuth
34-
3532from bs4 import BeautifulSoup
3633from commoncode import fileutils
3734from commoncode .hash import multi_checksums
10299
103100"""
104101
105- TRACE = True
106- TRACE_DEEP = True
102+ TRACE = False
103+ TRACE_DEEP = False
107104TRACE_ULTRA_DEEP = False
108105
109106# Supported environments
@@ -220,31 +217,6 @@ def get_python_dot_version(version):
220217class DistributionNotFound (Exception ):
221218 pass
222219
223- class HTTPUrlAuth (AuthBase ):
224- """Modifies the URL to contain the credentials aka https://user:[email protected] """ 225- def __init__ (self , username , password ):
226- self .username = username
227- self .password = password
228-
229- def __eq__ (self , other ):
230- return all (
231- [
232- self .username == getattr (other , "username" , None ),
233- self .password == getattr (other , "password" , None ),
234- ]
235- )
236-
237- def __ne__ (self , other ):
238- return not self == other
239-
240- def __call__ (self , r ):
241- url_re = re .compile ("(https?://)(.*)" )
242- match = url_re .search (r .url )
243- if match :
244- url = f"{ match .group (1 )} { self .username } :{ self .password } @{ match .group (2 )} "
245- r .url = url
246- return r
247-
248220def download_wheel (
249221 name ,
250222 version ,
@@ -281,6 +253,7 @@ def download_wheel(
281253 )
282254 continue
283255 for wheel in supported_and_valid_wheels :
256+ wheel .credentials = repo .credentials
284257 fetched_wheel_filename = wheel .download (
285258 dest_dir = dest_dir ,
286259 verbose = verbose ,
@@ -653,10 +626,6 @@ def get_best_download_url(self, repos=tuple()):
653626
654627 for repo in repos :
655628 package = repo .get_package_version (name = self .name , version = self .version )
656- if TRACE :
657- print (f"Repo: { repo = } " )
658- print (f"get_best_download_url: { self .name = } and { self .version = } " )
659- print (f"{ repo .index_url = } " )
660629 if not package :
661630 if TRACE :
662631 print (
@@ -693,9 +662,6 @@ def download(
693662 )
694663
695664 # FIXME:
696- if TRACE :
697- print (f"utils_pypi.py -> download() => { self .path_or_url = } " )
698- print (f"utils_pypi.py -> download() => { dest_dir = } " )
699665 fetch_and_save (
700666 path_or_url = self .path_or_url ,
701667 dest_dir = dest_dir ,
@@ -1153,7 +1119,7 @@ def is_supported_by_tags(self, tags):
11531119 """
11541120 Return True is this wheel is compatible with one of a list of PEP 425 tags.
11551121 """
1156- if TRACE_ULTRA_DEEP :
1122+ if TRACE_DEEP :
11571123 print ()
11581124 print ("is_supported_by_tags: tags:" , tags )
11591125 print ("self.tags:" , self .tags )
@@ -1169,8 +1135,6 @@ def to_filename(self):
11691135 abis = "." .join (self .abis )
11701136 plats = "." .join (self .platforms )
11711137 name = f"{ self .name } -{ self .version } { build } -{ pyvers } -{ abis } -{ plats } .whl"
1172- if TRACE :
1173- print (f"The wheel filename is: { name = } " )
11741138 return name
11751139
11761140 def is_pure (self ):
@@ -1634,18 +1598,6 @@ def fetch_links(
16341598 name using the `index_url` of this repository.
16351599 """
16361600 package_url = f"{ self .index_url } /{ normalized_name } "
1637- if TRACE :
1638- print (f"utils_pypi.py -> fetch_links() => { package_url = } " )
1639- # if len(package_url) >= 256:
1640- # base64_re = re.compile(f"https://(.*:.*)@(.*){normalized_name}")
1641- # match = base64_re.search(self.index_url)
1642- # if match:
1643- # auth = match.group(1)
1644- # username = auth.split(":")[0]
1645- # token = auth.split(":")[1]
1646- # remainder = match.group(2)
1647- # new_index_url = f"https://{username}:auth_token@{remainder}"
1648- # package_url = f"{new_index_url}/{normalized_name}"
16491601 text = CACHE .get (
16501602 path_or_url = package_url ,
16511603 credentials = self .credentials ,
@@ -1688,7 +1640,10 @@ def resolve_relative_url(package_url, url):
16881640 path = urlunparse (
16891641 ("" , "" , url_parts .path , url_parts .params , url_parts .query , url_parts .fragment )
16901642 )
1691- resolved_url_parts = base_url_parts ._replace (path = path )
1643+ if base_url_parts .path != "" :
1644+ resolved_url_parts = base_url_parts ._replace (path = base_url_parts .path + "/" + path )
1645+ else :
1646+ resolved_url_parts = base_url_parts ._replace (path = path )
16921647 url = urlunparse (resolved_url_parts )
16931648 return url
16941649
@@ -1731,15 +1686,13 @@ def get(
17311686 True otherwise as treat as binary. `path_or_url` can be a path or a URL
17321687 to a file.
17331688 """
1689+
1690+
17341691 cache_key = quote_plus (path_or_url .strip ("/" ))
17351692 cached = os .path .join (self .directory , cache_key )
1736- if TRACE :
1737- print (f"Cache.get() => { path_or_url = } " )
1738- print (f"Cache.get() => { cache_key = } " )
1739- print (f"Cache.get() => { cached = } " )
17401693
17411694 if force or not os .path .exists (cached ):
1742- if TRACE :
1695+ if TRACE_DEEP :
17431696 print (f" FILE CACHE MISS: { path_or_url } " )
17441697 content = get_file_content (
17451698 path_or_url = path_or_url ,
@@ -1753,7 +1706,7 @@ def get(
17531706 fo .write (content )
17541707 return content
17551708 else :
1756- if TRACE :
1709+ if TRACE_DEEP :
17571710 print (f" FILE CACHE HIT: { path_or_url } " )
17581711 return get_local_file_content (path = cached , as_text = as_text )
17591712
@@ -1773,7 +1726,7 @@ def get_file_content(
17731726 remote URL. Return the content as bytes is `as_text` is False.
17741727 """
17751728 if path_or_url .startswith ("https://" ):
1776- if TRACE :
1729+ if TRACE_DEEP :
17771730 print (f"Fetching: { path_or_url } " )
17781731 _headers , content = get_remote_file_content (
17791732 url = path_or_url ,
@@ -1839,13 +1792,11 @@ def get_remote_file_content(
18391792 if verbose :
18401793 echo_func (f"DOWNLOADING: { url } " )
18411794
1842- # if credentials and not change_auth:
1843- # auth = HTTPUrlAuth(credentials.get("login"), credentials.get("password"))
1844- # elif credentials and change_auth:
1845- # auth = HTTPBasicAuth(credentials.get("login"), credentials.get("password"))
1795+ if TRACE :
1796+ print (f"DOWNLOADING: { url } " )
18461797
18471798 if credentials :
1848- auth = HTTPBasicAuth (credentials .get ("login" ), credentials .get ("password" ))
1799+ auth = (credentials .get ("login" ), credentials .get ("password" ))
18491800 else :
18501801 auth = None
18511802
@@ -1859,15 +1810,8 @@ def get_remote_file_content(
18591810
18601811 with stream as response :
18611812 status = response .status_code
1862- if TRACE :
1863- print (f"The status is { status } " )
1864- print (f"The full response is: { response } " )
1865- print (f"The response url is: { response .url } " )
1866- # print(f"The original prepared request headers are: {response.request.headers}")
18671813 if status != requests .codes .ok : # NOQA
18681814 if status == 429 and _delay < 20 :
1869- if TRACE :
1870- print ("Delay exponentially..." )
18711815 # too many requests: start some exponential delay
18721816 increased_delay = (_delay * 2 ) or 1
18731817
@@ -1903,8 +1847,6 @@ def fetch_and_save(
19031847 errors. Treats the content as text if as_text is True otherwise as treat as
19041848 binary.
19051849 """
1906- if TRACE :
1907- print (f"fetch_and_save() => { path_or_url = } " )
19081850 content = CACHE .get (
19091851 path_or_url = path_or_url ,
19101852 credentials = credentials ,
0 commit comments