1
1
# Copyright (c) Microsoft Corporation. All rights reserved.
2
2
# Licensed under the MIT License.
3
3
"""All the action we need during build"""
4
+ import hashlib
4
5
import io
5
6
import json
6
7
import os
12
13
import nox # pylint: disable=import-error
13
14
14
15
15
- def _install_bundle (session : nox .Session , version = "latest" ) -> None :
16
- session .install (
17
- "-t" ,
18
- "./bundled/libs" ,
19
- "--no-cache-dir" ,
20
- "--implementation" ,
21
- "py" ,
22
- "--no-deps" ,
23
- "--upgrade" ,
24
- "-r" ,
25
- "./requirements.txt" ,
26
- )
27
- session .install ("packaging" )
28
- _install_package (f"{ os .getcwd ()} /bundled/libs" , "debugpy" , "1.7.0" )
29
-
30
-
31
- def _update_pip_packages (session : nox .Session ) -> None :
32
- session .run ("pip-compile" , "--generate-hashes" , "--upgrade" , "./requirements.in" )
33
-
34
-
35
16
@nox .session ()
36
17
def lint (session : nox .Session ) -> None :
37
18
"""Runs linter and formatter checks on python files."""
@@ -58,88 +39,80 @@ def tests(session: nox.Session) -> None:
58
39
session .run ("npm" , "run" , "test" )
59
40
60
41
61
- def _get_package_data (package ):
62
- json_uri = f"https://registry.npmjs.org/{ package } "
63
- with url_lib .urlopen (json_uri ) as response :
64
- return json .loads (response .read ())
65
-
66
-
67
- def _update_npm_packages (session : nox .Session ) -> None :
68
- pinned = {
69
- "vscode-languageclient" ,
70
- "@types/vscode" ,
71
- "@types/node" ,
72
- }
73
- package_json_path = pathlib .Path (__file__ ).parent / "package.json"
74
- package_json = json .loads (package_json_path .read_text (encoding = "utf-8" ))
75
-
76
- for package in package_json ["dependencies" ]:
77
- if package not in pinned :
78
- data = _get_package_data (package )
79
- latest = "^" + data ["dist-tags" ]["latest" ]
80
- package_json ["dependencies" ][package ] = latest
81
-
82
- for package in package_json ["devDependencies" ]:
83
- if package not in pinned :
84
- data = _get_package_data (package )
85
- latest = "^" + data ["dist-tags" ]["latest" ]
86
- package_json ["devDependencies" ][package ] = latest
87
-
88
- # Ensure engine matches the package
89
- if (
90
- package_json ["engines" ]["vscode" ]
91
- != package_json ["devDependencies" ]["@types/vscode" ]
92
- ):
93
- print (
94
- "Please check VS Code engine version and @types/vscode version in package.json."
95
- )
96
-
97
- new_package_json = json .dumps (package_json , indent = 4 )
98
- # JSON dumps uses \n for line ending on all platforms by default
99
- if not new_package_json .endswith ("\n " ):
100
- new_package_json += "\n "
101
- package_json_path .write_text (new_package_json , encoding = "utf-8" )
102
-
103
- session .run ("npm" , "audit" , "fix" , external = True )
104
- session .run ("npm" , "install" , external = True )
105
-
106
-
107
- def _setup_template_environment (session : nox .Session ) -> None :
108
- session .install ("wheel" , "pip-tools" )
109
- _update_pip_packages (session )
110
- _install_bundle (session )
111
-
112
-
113
- @nox .session (python = "3.7" )
42
+ @nox .session ()
114
43
def install_bundled_libs (session ):
115
44
"""Installs the libraries that will be bundled with the extension."""
116
45
session .install ("wheel" )
117
- _install_bundle (session )
46
+ session .install (
47
+ "-t" ,
48
+ "./bundled/libs" ,
49
+ "--no-cache-dir" ,
50
+ "--implementation" ,
51
+ "py" ,
52
+ "--no-deps" ,
53
+ "--require-hashes" ,
54
+ "--only-binary" ,
55
+ ":all:" ,
56
+ "-r" ,
57
+ "./requirements.txt" ,
58
+ )
59
+ session .install ("packaging" )
118
60
61
+ debugpy_info_json_path = pathlib .Path (__file__ ).parent / "debugpy_info.json"
62
+ debugpy_info = json .loads (debugpy_info_json_path .read_text (encoding = "utf-8" ))
63
+
64
+ target = os .environ .get ("VSCETARGET" , "" )
65
+ print ("target:" , target )
66
+ if "darwin" in target :
67
+ download_url (debugpy_info ["macOS" ])
68
+ elif "win32-ia32" == target :
69
+ download_url (debugpy_info ["win32" ])
70
+ elif "win32-x64" == target :
71
+ download_url (debugpy_info ["win64" ])
72
+ elif "linux-x64" == target :
73
+ download_url (debugpy_info ["linux" ])
74
+ else :
75
+ download_url (debugpy_info ["any" ])
119
76
120
- @nox .session (python = "3.6" )
121
- def install_old_bundled_libs (session ):
122
- """Installs the libraries that will be bundled with the extension."""
123
- session .install ("wheel" )
124
- _install_bundle (session , "1.5.1" )
125
77
78
+ def download_url (value ):
79
+ with url_lib .urlopen (value ["url" ]) as response :
80
+ data = response .read ()
81
+ hash_algorithm , hash_value = [
82
+ (key , value ) for key , value in value ["hash" ].items ()
83
+ ][0 ]
84
+ if hashlib .new (hash_algorithm , data ).hexdigest () != hash_value :
85
+ raise ValueError ("Failed hash verification for {}." .format (value ["url" ]))
126
86
127
- @nox .session (python = "3.7" )
128
- def setup (session : nox .Session ) -> None :
129
- """Sets up the extension for development."""
130
- _setup_template_environment (session )
87
+ print ("Download: " , value ["url" ])
88
+ with zipfile .ZipFile (io .BytesIO (data ), "r" ) as wheel :
89
+ libs_dir = pathlib .Path .cwd () / "bundled" / "libs"
90
+ for zip_info in wheel .infolist ():
91
+ print ("\t " + zip_info .filename )
92
+ wheel .extract (zip_info .filename , libs_dir )
131
93
132
94
133
95
@nox .session ()
134
- def update_packages (session : nox .Session ) -> None :
135
- """Update pip and npm packages."""
136
- session .install ("wheel" , "pip-tools" )
137
- _update_pip_packages (session )
138
- _update_npm_packages (session )
96
+ def update_build_number (session : nox .Session ) -> None :
97
+ """Updates build number for the extension."""
98
+ if not len (session .posargs ):
99
+ session .log ("No updates to package version" )
100
+ return
101
+
102
+ package_json_path = pathlib .Path (__file__ ).parent / "package.json"
103
+ session .log (f"Reading package.json at: { package_json_path } " )
104
+
105
+ package_json = json .loads (package_json_path .read_text (encoding = "utf-8" ))
106
+
107
+ parts = re .split (r"\.|-" , package_json ["version" ])
108
+ major , minor = parts [:2 ]
139
109
110
+ version = f"{ major } .{ minor } .{ session .posargs [0 ]} "
111
+ version = version if len (parts ) == 3 else f"{ version } -{ '' .join (parts [3 :])} "
140
112
141
- def _contains (s , parts = ()):
142
- return any (p for p in parts if p in s )
113
+ session .log (f"Updating version from { package_json ['version' ]} to { version } " )
114
+ package_json ["version" ] = version
115
+ package_json_path .write_text (json .dumps (package_json , indent = 4 ), encoding = "utf-8" )
143
116
144
117
145
118
def _get_pypi_package_data (package_name ):
@@ -150,59 +123,34 @@ def _get_pypi_package_data(package_name):
150
123
return json .loads (response .read ())
151
124
152
125
153
- def _get_urls (data , version ):
154
- return list (
155
- r ["url" ] for r in data ["releases" ][version ] if _contains (r ["url" ], ("cp37" ,))
156
- )
157
-
158
-
159
- def _download_and_extract (root , url ):
160
- if "manylinux" in url or "macosx" in url or "win_amd64" in url :
161
- root = os .getcwd () if root is None or root == "." else root
162
- print (url )
163
- with url_lib .urlopen (url ) as response :
164
- data = response .read ()
165
- with zipfile .ZipFile (io .BytesIO (data ), "r" ) as wheel :
166
- for zip_info in wheel .infolist ():
167
- # Ignore dist info since we are merging multiple wheels
168
- if ".dist-info/" in zip_info .filename :
169
- continue
170
- print ("\t " + zip_info .filename )
171
- wheel .extract (zip_info .filename , root )
172
-
173
-
174
- def _install_package (root , package_name , version = "latest" ):
126
+ def _get_debugpy_info (version = "latest" , platform = "none-any" , cp = "cp311" ):
175
127
from packaging .version import parse as version_parser
176
128
177
- data = _get_pypi_package_data (package_name )
129
+ data = _get_pypi_package_data ("debugpy" )
178
130
179
131
if version == "latest" :
180
132
use_version = max (data ["releases" ].keys (), key = version_parser )
181
133
else :
182
134
use_version = version
183
135
184
- for url in _get_urls (data , use_version ):
185
- _download_and_extract (root , url )
136
+ return list (
137
+ {"url" : r ["url" ], "hash" : {"sha256" : r ["digests" ]["sha256" ]}}
138
+ for r in data ["releases" ][use_version ]
139
+ if f"{ cp } -{ platform } " in r ["url" ] or f"py3-{ platform } " in r ["url" ]
140
+ )[0 ]
186
141
187
142
188
143
@nox .session ()
189
- def update_build_number (session : nox .Session ) -> None :
190
- """Updates build number for the extension."""
191
- if len (session .posargs ) == 0 :
192
- session .log ("No updates to package version" )
193
- return
194
-
195
- package_json_path = pathlib .Path (__file__ ).parent / "package.json"
196
- session .log (f"Reading package.json at: { package_json_path } " )
197
-
198
- package_json = json .loads (package_json_path .read_text (encoding = "utf-8" ))
199
-
200
- parts = re .split ("\\ .|-" , package_json ["version" ])
201
- major , minor = parts [:2 ]
202
-
203
- version = f"{ major } .{ minor } .{ session .posargs [0 ]} "
204
- version = version if len (parts ) == 3 else f"{ version } -{ '' .join (parts [3 :])} "
205
-
206
- session .log (f"Updating version from { package_json ['version' ]} to { version } " )
207
- package_json ["version" ] = version
208
- package_json_path .write_text (json .dumps (package_json , indent = 4 ), encoding = "utf-8" )
144
+ def create_debugpy_json (session : nox .Session , version = "1.7.0" , cp = "cp311" ):
145
+ platforms = [
146
+ ("macOS" , "macosx" ),
147
+ ("win32" , "win32" ),
148
+ ("win64" , "win_amd64" ),
149
+ ("linux" , "manylinux" ),
150
+ ("any" , "none-any" ),
151
+ ]
152
+ debugpy_info_json_path = pathlib .Path (__file__ ).parent / "debugpy_info.json"
153
+ debugpy_info = {p : _get_debugpy_info (version , id , cp ) for p , id in platforms }
154
+ debugpy_info_json_path .write_text (
155
+ json .dumps (debugpy_info , indent = 4 ), encoding = "utf-8"
156
+ )
0 commit comments