1
1
# Copyright (c) Microsoft Corporation. All rights reserved.
2
2
# Licensed under the MIT License.
3
3
"""All the action we need during build"""
4
+ import hashlib
4
5
import io
5
6
import json
6
7
import os
12
13
import nox # pylint: disable=import-error
13
14
14
15
15
- def _install_bundle (session : nox .Session ) -> None :
16
- session .install (
17
- "-t" ,
18
- "./bundled/libs" ,
19
- "--no-cache-dir" ,
20
- "--implementation" ,
21
- "py" ,
22
- "--no-deps" ,
23
- "--upgrade" ,
24
- "-r" ,
25
- "./requirements.txt" ,
26
- )
27
- session .install ("packaging" )
28
- _install_package (f"{ os .getcwd ()} /bundled/libs" , "debugpy" , "1.7.0" )
29
-
30
-
31
- def _update_pip_packages (session : nox .Session ) -> None :
32
- session .run ("pip-compile" , "--generate-hashes" , "--upgrade" , "./requirements.in" )
33
-
34
-
35
16
@nox .session ()
36
17
def lint (session : nox .Session ) -> None :
37
18
"""Runs linter and formatter checks on python files."""
@@ -58,81 +39,80 @@ def tests(session: nox.Session) -> None:
58
39
session .run ("npm" , "run" , "test" )
59
40
60
41
61
- def _get_package_data (package ):
62
- json_uri = f"https://registry.npmjs.org/{ package } "
63
- with url_lib .urlopen (json_uri ) as response :
64
- return json .loads (response .read ())
65
-
66
-
67
- def _update_npm_packages (session : nox .Session ) -> None :
68
- pinned = {
69
- "vscode-languageclient" ,
70
- "@types/vscode" ,
71
- "@types/node" ,
72
- }
73
- package_json_path = pathlib .Path (__file__ ).parent / "package.json"
74
- package_json = json .loads (package_json_path .read_text (encoding = "utf-8" ))
75
-
76
- for package in package_json ["dependencies" ]:
77
- if package not in pinned :
78
- data = _get_package_data (package )
79
- latest = "^" + data ["dist-tags" ]["latest" ]
80
- package_json ["dependencies" ][package ] = latest
81
-
82
- for package in package_json ["devDependencies" ]:
83
- if package not in pinned :
84
- data = _get_package_data (package )
85
- latest = "^" + data ["dist-tags" ]["latest" ]
86
- package_json ["devDependencies" ][package ] = latest
87
-
88
- # Ensure engine matches the package
89
- if (
90
- package_json ["engines" ]["vscode" ]
91
- != package_json ["devDependencies" ]["@types/vscode" ]
92
- ):
93
- print (
94
- "Please check VS Code engine version and @types/vscode version in package.json."
95
- )
42
+ @nox .session ()
43
+ def install_bundled_libs (session ):
44
+ """Installs the libraries that will be bundled with the extension."""
45
+ session .install ("wheel" )
46
+ session .install (
47
+ "-t" ,
48
+ "./bundled/libs" ,
49
+ "--no-cache-dir" ,
50
+ "--implementation" ,
51
+ "py" ,
52
+ "--no-deps" ,
53
+ "--require-hashes" ,
54
+ "--only-binary" ,
55
+ ":all:" ,
56
+ "-r" ,
57
+ "./requirements.txt" ,
58
+ )
59
+ session .install ("packaging" )
96
60
97
- new_package_json = json .dumps (package_json , indent = 4 )
98
- # JSON dumps uses \n for line ending on all platforms by default
99
- if not new_package_json .endswith ("\n " ):
100
- new_package_json += "\n "
101
- package_json_path .write_text (new_package_json , encoding = "utf-8" )
61
+ debugpy_info_json_path = pathlib .Path (__file__ ).parent / "debugpy_info.json"
62
+ debugpy_info = json .loads (debugpy_info_json_path .read_text (encoding = "utf-8" ))
63
+
64
+ target = os .environ .get ("VSCETARGET" , "" )
65
+ print ("target:" , target )
66
+ if "darwin" in target :
67
+ download_url (debugpy_info ["macOS" ])
68
+ elif "win32-ia32" == target :
69
+ download_url (debugpy_info ["win32" ])
70
+ elif "win32-x64" == target :
71
+ download_url (debugpy_info ["win64" ])
72
+ elif "linux-x64" == target :
73
+ download_url (debugpy_info ["linux" ])
74
+ else :
75
+ download_url (debugpy_info ["any" ])
102
76
103
- session .run ("npm" , "audit" , "fix" , external = True )
104
- session .run ("npm" , "install" , external = True )
105
77
78
+ def download_url (value ):
79
+ with url_lib .urlopen (value ["url" ]) as response :
80
+ data = response .read ()
81
+ hash_algorithm , hash_value = [
82
+ (key , value ) for key , value in value ["hash" ].items ()
83
+ ][0 ]
84
+ if hashlib .new (hash_algorithm , data ).hexdigest () != hash_value :
85
+ raise ValueError ("Failed hash verification for {}." .format (value ["url" ]))
106
86
107
- def _setup_template_environment (session : nox .Session ) -> None :
108
- session .install ("wheel" , "pip-tools" )
109
- _update_pip_packages (session )
110
- _install_bundle (session )
87
+ print ("Download: " , value ["url" ])
88
+ with zipfile .ZipFile (io .BytesIO (data ), "r" ) as wheel :
89
+ libs_dir = pathlib .Path .cwd () / "bundled" / "libs"
90
+ for zip_info in wheel .infolist ():
91
+ print ("\t " + zip_info .filename )
92
+ wheel .extract (zip_info .filename , libs_dir )
111
93
112
94
113
- @nox .session (python = "3.7" )
114
- def install_bundled_libs (session ) :
115
- """Installs the libraries that will be bundled with the extension."""
116
- session .install ( "wheel" )
117
- _install_bundle ( session )
118
-
95
+ @nox .session ()
96
+ def update_build_number (session : nox . Session ) -> None :
97
+ """Updates build number for the extension."""
98
+ if not len ( session .posargs ):
99
+ session . log ( "No updates to package version" )
100
+ return
119
101
120
- @nox .session (python = "3.7" )
121
- def setup (session : nox .Session ) -> None :
122
- """Sets up the extension for development."""
123
- _setup_template_environment (session )
102
+ package_json_path = pathlib .Path (__file__ ).parent / "package.json"
103
+ session .log (f"Reading package.json at: { package_json_path } " )
124
104
105
+ package_json = json .loads (package_json_path .read_text (encoding = "utf-8" ))
125
106
126
- @nox .session ()
127
- def update_packages (session : nox .Session ) -> None :
128
- """Update pip and npm packages."""
129
- session .install ("wheel" , "pip-tools" )
130
- _update_pip_packages (session )
131
- _update_npm_packages (session )
107
+ parts = re .split (r"\.|-" , package_json ["version" ])
108
+ major , minor = parts [:2 ]
132
109
110
+ version = f"{ major } .{ minor } .{ session .posargs [0 ]} "
111
+ version = version if len (parts ) == 3 else f"{ version } -{ '' .join (parts [3 :])} "
133
112
134
- def _contains (s , parts = ()):
135
- return any (p for p in parts if p in s )
113
+ session .log (f"Updating version from { package_json ['version' ]} to { version } " )
114
+ package_json ["version" ] = version
115
+ package_json_path .write_text (json .dumps (package_json , indent = 4 ), encoding = "utf-8" )
136
116
137
117
138
118
def _get_pypi_package_data (package_name ):
@@ -143,59 +123,34 @@ def _get_pypi_package_data(package_name):
143
123
return json .loads (response .read ())
144
124
145
125
146
- def _get_urls (data , version ):
147
- return list (
148
- r ["url" ] for r in data ["releases" ][version ] if _contains (r ["url" ], ("cp37" ,))
149
- )
150
-
151
-
152
- def _download_and_extract (root , url ):
153
- if "manylinux" in url or "macosx" in url or "win_amd64" in url :
154
- root = os .getcwd () if root is None or root == "." else root
155
- print (url )
156
- with url_lib .urlopen (url ) as response :
157
- data = response .read ()
158
- with zipfile .ZipFile (io .BytesIO (data ), "r" ) as wheel :
159
- for zip_info in wheel .infolist ():
160
- # Ignore dist info since we are merging multiple wheels
161
- if ".dist-info/" in zip_info .filename :
162
- continue
163
- print ("\t " + zip_info .filename )
164
- wheel .extract (zip_info .filename , root )
165
-
166
-
167
- def _install_package (root , package_name , version = "latest" ):
126
+ def _get_debugpy_info (version = "latest" , platform = "none-any" , cp = "cp311" ):
168
127
from packaging .version import parse as version_parser
169
128
170
- data = _get_pypi_package_data (package_name )
129
+ data = _get_pypi_package_data ("debugpy" )
171
130
172
131
if version == "latest" :
173
132
use_version = max (data ["releases" ].keys (), key = version_parser )
174
133
else :
175
134
use_version = version
176
135
177
- for url in _get_urls (data , use_version ):
178
- _download_and_extract (root , url )
136
+ return list (
137
+ {"url" : r ["url" ], "hash" : {"sha256" : r ["digests" ]["sha256" ]}}
138
+ for r in data ["releases" ][use_version ]
139
+ if f"{ cp } -{ platform } " in r ["url" ] or f"py3-{ platform } " in r ["url" ]
140
+ )[0 ]
179
141
180
142
181
143
@nox .session ()
182
- def update_build_number (session : nox .Session ) -> None :
183
- """Updates build number for the extension."""
184
- if len (session .posargs ) == 0 :
185
- session .log ("No updates to package version" )
186
- return
187
-
188
- package_json_path = pathlib .Path (__file__ ).parent / "package.json"
189
- session .log (f"Reading package.json at: { package_json_path } " )
190
-
191
- package_json = json .loads (package_json_path .read_text (encoding = "utf-8" ))
192
-
193
- parts = re .split ("\\ .|-" , package_json ["version" ])
194
- major , minor = parts [:2 ]
195
-
196
- version = f"{ major } .{ minor } .{ session .posargs [0 ]} "
197
- version = version if len (parts ) == 3 else f"{ version } -{ '' .join (parts [3 :])} "
198
-
199
- session .log (f"Updating version from { package_json ['version' ]} to { version } " )
200
- package_json ["version" ] = version
201
- package_json_path .write_text (json .dumps (package_json , indent = 4 ), encoding = "utf-8" )
144
+ def create_debugpy_json (session : nox .Session , version = "1.7.0" , cp = "cp311" ):
145
+ platforms = [
146
+ ("macOS" , "macosx" ),
147
+ ("win32" , "win32" ),
148
+ ("win64" , "win_amd64" ),
149
+ ("linux" , "manylinux" ),
150
+ ("any" , "none-any" ),
151
+ ]
152
+ debugpy_info_json_path = pathlib .Path (__file__ ).parent / "debugpy_info.json"
153
+ debugpy_info = {p : _get_debugpy_info (version , id , cp ) for p , id in platforms }
154
+ debugpy_info_json_path .write_text (
155
+ json .dumps (debugpy_info , indent = 4 ), encoding = "utf-8"
156
+ )
0 commit comments