12
12
13
13
import nox # pylint: disable=import-error
14
14
15
- debugpy_urls = {
15
+ DEBUGPY_WHEEL_URLS = {
16
16
"any" : {
17
17
"url" : "https://files.pythonhosted.org/packages/39/2f/c8a8cfac6c7fa3d9e163a6bf46e6d27d027b7a1331028e99a6ef7fd3699d/debugpy-1.7.0-py2.py3-none-any.whl" ,
18
- "hash" : "f6de2e6f24f62969e0f0ef682d78c98161c4dca29e9fb05df4d2989005005502" ,
18
+ "hash" : (
19
+ "sha256" ,
20
+ "f6de2e6f24f62969e0f0ef682d78c98161c4dca29e9fb05df4d2989005005502" ,
21
+ ),
19
22
},
20
23
"macOS" : {
21
24
"url" : "https://files.pythonhosted.org/packages/bd/a3/5e37ce13c7dd850b72a52be544a058ed49606ebbbf8b95b2ba3c1db5620a/debugpy-1.7.0-cp311-cp311-macosx_11_0_universal2.whl" ,
22
- "hash" : "538765a41198aa88cc089295b39c7322dd598f9ef1d52eaae12145c63bf9430a" ,
25
+ "sha256" : (
26
+ "sha256" ,
27
+ "538765a41198aa88cc089295b39c7322dd598f9ef1d52eaae12145c63bf9430a" ,
28
+ ),
23
29
},
24
30
"linux" : {
25
31
"url" : "https://files.pythonhosted.org/packages/b4/fc/087324d46dab8e21e084ce2cf670fa7e524ab5e7691692438e4987bd3ecb/debugpy-1.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" ,
26
- "hash" : "c7e8cf91f8f3f9b5fad844dd88427b85d398bda1e2a0cd65d5a21312fcbc0c6f" ,
32
+ "hash" : (
33
+ "sha256" ,
34
+ "c7e8cf91f8f3f9b5fad844dd88427b85d398bda1e2a0cd65d5a21312fcbc0c6f" ,
35
+ ),
27
36
},
28
37
"win32" : {
29
38
"url" : "https://files.pythonhosted.org/packages/52/59/3591e9f709b7ee4d3a926a8903a395669cd0e0279204a94b6acccf6ed6ee/debugpy-1.7.0-cp311-cp311-win32.whl" ,
30
- "hash" : "18a69f8e142a716310dd0af6d7db08992aed99e2606108732efde101e7c65e2a" ,
39
+ "hash" : (
40
+ "sha256" ,
41
+ "18a69f8e142a716310dd0af6d7db08992aed99e2606108732efde101e7c65e2a" ,
42
+ ),
31
43
},
32
44
"win64" : {
33
45
"url" : "https://files.pythonhosted.org/packages/51/59/84ebd58d3e9de33a54ca8aa4532e03906e5458092dafe240264c2937a99b/debugpy-1.7.0-cp311-cp311-win_amd64.whl" ,
34
- "hash" : "7515a5ba5ee9bfe956685909c5f28734c1cecd4ee813523363acfe3ca824883a" ,
46
+ "hash" : (
47
+ "sha256" ,
48
+ "7515a5ba5ee9bfe956685909c5f28734c1cecd4ee813523363acfe3ca824883a" ,
49
+ ),
35
50
},
36
51
}
37
52
38
53
39
- def _install_bundle (session : nox .Session ) -> None :
40
- session .install (
41
- "-t" ,
42
- "./bundled/libs" ,
43
- "--no-cache-dir" ,
44
- "--implementation" ,
45
- "py" ,
46
- "--no-deps" ,
47
- "--upgrade" ,
48
- "-r" ,
49
- "./requirements.txt" ,
50
- )
51
- session .install ("packaging" )
52
-
53
-
54
- def _update_pip_packages (session : nox .Session ) -> None :
55
- session .run ("pip-compile" , "--generate-hashes" , "--upgrade" , "./requirements.in" )
56
-
57
-
58
54
@nox .session ()
59
55
def lint (session : nox .Session ) -> None :
60
56
"""Runs linter and formatter checks on python files."""
@@ -81,152 +77,50 @@ def tests(session: nox.Session) -> None:
81
77
session .run ("npm" , "run" , "test" )
82
78
83
79
84
- def _get_package_data (package ):
85
- json_uri = f"https://registry.npmjs.org/{ package } "
86
- with url_lib .urlopen (json_uri ) as response :
87
- return json .loads (response .read ())
88
-
89
-
90
- def _update_npm_packages (session : nox .Session ) -> None :
91
- pinned = {
92
- "vscode-languageclient" ,
93
- "@types/vscode" ,
94
- "@types/node" ,
95
- }
96
- package_json_path = pathlib .Path (__file__ ).parent / "package.json"
97
- package_json = json .loads (package_json_path .read_text (encoding = "utf-8" ))
98
-
99
- for package in package_json ["dependencies" ]:
100
- if package not in pinned :
101
- data = _get_package_data (package )
102
- latest = "^" + data ["dist-tags" ]["latest" ]
103
- package_json ["dependencies" ][package ] = latest
104
-
105
- for package in package_json ["devDependencies" ]:
106
- if package not in pinned :
107
- data = _get_package_data (package )
108
- latest = "^" + data ["dist-tags" ]["latest" ]
109
- package_json ["devDependencies" ][package ] = latest
110
-
111
- # Ensure engine matches the package
112
- if (
113
- package_json ["engines" ]["vscode" ]
114
- != package_json ["devDependencies" ]["@types/vscode" ]
115
- ):
116
- print (
117
- "Please check VS Code engine version and @types/vscode version in package.json."
118
- )
119
-
120
- new_package_json = json .dumps (package_json , indent = 4 )
121
- # JSON dumps uses \n for line ending on all platforms by default
122
- if not new_package_json .endswith ("\n " ):
123
- new_package_json += "\n "
124
- package_json_path .write_text (new_package_json , encoding = "utf-8" )
125
-
126
- session .run ("npm" , "audit" , "fix" , external = True )
127
- session .run ("npm" , "install" , external = True )
128
-
129
-
130
- def _setup_template_environment (session : nox .Session ) -> None :
131
- session .install ("wheel" , "pip-tools" )
132
- _update_pip_packages (session )
133
- _install_bundle (session )
134
-
135
-
136
- @nox .session (python = "3.7" )
80
+ @nox .session ()
137
81
def install_bundled_libs (session ):
138
- # Install debugpy by url and platform
82
+ # Install debugpy by URL and platform
139
83
"""Installs the libraries that will be bundled with the extension."""
140
84
session .install ("wheel" )
141
- _install_bundle (session )
85
+ session .install (
86
+ "-t" ,
87
+ "./bundled/libs" ,
88
+ "--no-cache-dir" ,
89
+ "--implementation" ,
90
+ "py" ,
91
+ "--no-deps" ,
92
+ "--upgrade" ,
93
+ "-r" ,
94
+ "./requirements.txt" ,
95
+ )
96
+ session .install ("packaging" )
142
97
143
98
target = os .environ .get ("VSCETARGET" , "" )
144
99
print ("target:" , target )
145
100
if "darwin" in target :
146
- download_url (f" { os . getcwd () } /bundled/libs" , debugpy_urls ["macOS" ])
101
+ download_url (DEBUGPY_WHEEL_URLS ["macOS" ])
147
102
elif "win32-ia32" == target :
148
- download_url (f" { os . getcwd () } /bundled/libs" , debugpy_urls ["win32" ])
103
+ download_url (DEBUGPY_WHEEL_URLS ["win32" ])
149
104
elif "win32-x64" == target :
150
- download_url (f" { os . getcwd () } /bundled/libs" , debugpy_urls ["win64" ])
105
+ download_url (DEBUGPY_WHEEL_URLS ["win64" ])
151
106
elif "linux-x64" == target :
152
- download_url (f" { os . getcwd () } /bundled/libs" , debugpy_urls ["linux" ])
107
+ download_url (DEBUGPY_WHEEL_URLS ["linux" ])
153
108
else :
154
- download_url (f" { os . getcwd () } /bundled/libs" , debugpy_urls ["any" ])
109
+ download_url (DEBUGPY_WHEEL_URLS ["any" ])
155
110
156
111
157
- @nox .session (python = "3.7" )
158
- def setup (session : nox .Session ) -> None :
159
- """Sets up the extension for development."""
160
- _setup_template_environment (session )
161
-
162
-
163
- @nox .session ()
164
- def update_packages (session : nox .Session ) -> None :
165
- """Update pip and npm packages."""
166
- session .install ("wheel" , "pip-tools" )
167
- _update_pip_packages (session )
168
- _update_npm_packages (session )
169
-
170
-
171
- def _contains (s , parts = ()):
172
- return any (p for p in parts if p in s )
173
-
174
-
175
- def _get_pypi_package_data (package_name ):
176
- json_uri = "https://pypi.org/pypi/{0}/json" .format (package_name )
177
- # Response format: https://warehouse.readthedocs.io/api-reference/json/#project
178
- # Release metadata format: https://github.com/pypa/interoperability-peps/blob/master/pep-0426-core-metadata.rst
179
- with url_lib .urlopen (json_uri ) as response :
180
- return json .loads (response .read ())
181
-
182
-
183
- def _get_urls (data , version ):
184
- return list (
185
- r ["url" ] for r in data ["releases" ][version ] if _contains (r ["url" ], ("cp310" ,))
186
- )
187
-
188
-
189
- def download_url (root , value ):
112
+ def download_url (value ):
190
113
with url_lib .urlopen (value ["url" ]) as response :
191
114
data = response .read ()
192
- if hashlib .sha256 (data ).hexdigest () == value ["hash" ]:
193
- print ("Download: " , value ["url" ])
194
- with zipfile .ZipFile (io .BytesIO (data ), "r" ) as wheel :
195
- for zip_info in wheel .infolist ():
196
- # Ignore dist info since we are merging multiple wheels
197
- if ".dist-info/" in zip_info .filename :
198
- continue
199
- print ("\t " + zip_info .filename )
200
- wheel .extract (zip_info .filename , root )
201
-
202
-
203
- def _download_and_extract (root , url ):
204
- if "manylinux" in url or "macosx" in url or "win_amd64" in url :
205
- root = os .getcwd () if root is None or root == "." else root
206
- print (url )
207
- with url_lib .urlopen (url ) as response :
208
- data = response .read ()
209
- with zipfile .ZipFile (io .BytesIO (data ), "r" ) as wheel :
210
- for zip_info in wheel .infolist ():
211
- # Ignore dist info since we are merging multiple wheels
212
- if ".dist-info/" in zip_info .filename :
213
- continue
214
- print ("\t " + zip_info .filename )
215
- wheel .extract (zip_info .filename , root )
216
-
217
-
218
- def _install_package (root , package_name , version = "latest" ):
219
- from packaging .version import parse as version_parser
220
-
221
- data = _get_pypi_package_data (package_name )
222
-
223
- if version == "latest" :
224
- use_version = max (data ["releases" ].keys (), key = version_parser )
225
- else :
226
- use_version = version
227
-
228
- for url in _get_urls (data , use_version ):
229
- _download_and_extract (root , url )
115
+ if hashlib .sha256 (data ).hexdigest () != value ["hash" ][1 ]:
116
+ raise Exception ("Failed hash verification for {}." .format (value ["url" ]))
117
+ print ("Download: " , value ["url" ])
118
+ with zipfile .ZipFile (io .BytesIO (data ), "r" ) as wheel :
119
+ for zip_info in wheel .infolist ():
120
+ print ("\t " + zip_info .filename )
121
+ wheel .extract (
122
+ zip_info .filename , pathlib .Path .cwd () / "bundled" / "libs"
123
+ )
230
124
231
125
232
126
@nox .session ()
0 commit comments