@@ -158,35 +158,39 @@ def maintainers_add_info(context):
158
158
Given the active maintainers defined in the yaml file, it fetches
159
159
the GitHub user information for them.
160
160
"""
161
- timestamp = time .time ()
162
-
163
- cache_file = pathlib .Path ("maintainers.json" )
164
- if cache_file .is_file ():
165
- with open (cache_file ) as f :
166
- context ["maintainers" ] = json .load (f )
167
- # refresh cache after 1 hour
168
- if (timestamp - context ["maintainers" ]["timestamp" ]) < 3_600 :
169
- return context
170
-
171
- context ["maintainers" ]["timestamp" ] = timestamp
172
-
173
161
repeated = set (context ["maintainers" ]["active" ]) & set (
174
162
context ["maintainers" ]["inactive" ]
175
163
)
176
164
if repeated :
177
165
raise ValueError (f"Maintainers { repeated } are both active and inactive" )
178
166
179
- for kind in ("active" , "inactive" ):
180
- context ["maintainers" ][f"{ kind } _with_github_info" ] = []
181
- for user in context ["maintainers" ][kind ]:
182
- resp = requests .get (f"https://api.github.com/users/{ user } " )
183
- if context ["ignore_io_errors" ] and resp .status_code == 403 :
184
- return context
185
- resp .raise_for_status ()
186
- context ["maintainers" ][f"{ kind } _with_github_info" ].append (resp .json ())
167
+ maintainers_info = {}
168
+ for user in (
169
+ context ["maintainers" ]["active" ] + context ["maintainers" ]["inactive" ]
170
+ ):
171
+ resp = requests .get (f"https://api.github.com/users/{ user } " )
172
+ if resp .status_code == 403 :
173
+ sys .stderr .write (
174
+ "WARN: GitHub API quota exceeded when fetching maintainers\n "
175
+ )
176
+ # if we exceed github api quota, we use the github info
177
+ # of maintainers saved with the website
178
+ resp_bkp = requests .get (
179
+ context ["main" ]["production_url" ] + "maintainers.json"
180
+ )
181
+ resp_bkp .raise_for_status ()
182
+ maintainers_info = resp_bkp .json ()
183
+ break
184
+
185
+ resp .raise_for_status ()
186
+ maintainers_info [user ] = resp .json ()
187
187
188
- with open (cache_file , "w" ) as f :
189
- json .dump (context ["maintainers" ], f )
188
+ context ["maintainers" ]["github_info" ] = maintainers_info
189
+
190
+ # save the data fetched from github to use it in case we exceed
191
+ # git github api quota in the future
192
+ with open (pathlib .Path (context ["target_path" ]) / "maintainers.json" , "w" ) as f :
193
+ json .dump (maintainers_info , f )
190
194
191
195
return context
192
196
@@ -196,11 +200,19 @@ def home_add_releases(context):
196
200
197
201
github_repo_url = context ["main" ]["github_repo_url" ]
198
202
resp = requests .get (f"https://api.github.com/repos/{ github_repo_url } /releases" )
199
- if context ["ignore_io_errors" ] and resp .status_code == 403 :
200
- return context
201
- resp .raise_for_status ()
203
+ if resp .status_code == 403 :
204
+ sys .stderr .write ("WARN: GitHub API quota exceeded when fetching releases\n " )
205
+ resp_bkp = requests .get (context ["main" ]["production_url" ] + "releases.json" )
206
+ resp_bkp .raise_for_status ()
207
+ releases = resp_bkp .json ()
208
+ else :
209
+ resp .raise_for_status ()
210
+ releases = resp .json ()
202
211
203
- for release in resp .json ():
212
+ with open (pathlib .Path (context ["target_path" ]) / "releases.json" , "w" ) as f :
213
+ json .dump (releases , f , default = datetime .datetime .isoformat )
214
+
215
+ for release in releases :
204
216
if release ["prerelease" ]:
205
217
continue
206
218
published = datetime .datetime .strptime (
@@ -218,6 +230,7 @@ def home_add_releases(context):
218
230
),
219
231
}
220
232
)
233
+
221
234
return context
222
235
223
236
@staticmethod
@@ -264,12 +277,20 @@ def roadmap_pdeps(context):
264
277
"https://api.github.com/search/issues?"
265
278
f"q=is:pr is:open label:PDEP repo:{ github_repo_url } "
266
279
)
267
- if context ["ignore_io_errors" ] and resp .status_code == 403 :
268
- return context
269
- resp .raise_for_status ()
280
+ if resp .status_code == 403 :
281
+ sys .stderr .write ("WARN: GitHub API quota exceeded when fetching pdeps\n " )
282
+ resp_bkp = requests .get (context ["main" ]["production_url" ] + "pdeps.json" )
283
+ resp_bkp .raise_for_status ()
284
+ pdeps = resp_bkp .json ()
285
+ else :
286
+ resp .raise_for_status ()
287
+ pdeps = resp .json ()
288
+
289
+ with open (pathlib .Path (context ["target_path" ]) / "pdeps.json" , "w" ) as f :
290
+ json .dump (pdeps , f )
270
291
271
- for pdep in resp . json () ["items" ]:
272
- context ["pdeps" ]["under_discussion " ].append (
292
+ for pdep in pdeps ["items" ]:
293
+ context ["pdeps" ]["Under discussion " ].append (
273
294
{"title" : pdep ["title" ], "url" : pdep ["url" ]}
274
295
)
275
296
@@ -302,7 +323,7 @@ def get_callable(obj_as_str: str) -> object:
302
323
return obj
303
324
304
325
305
- def get_context (config_fname : str , ignore_io_errors : bool , ** kwargs ):
326
+ def get_context (config_fname : str , ** kwargs ):
306
327
"""
307
328
Load the config yaml as the base context, and enrich it with the
308
329
information added by the context preprocessors defined in the file.
@@ -311,7 +332,6 @@ def get_context(config_fname: str, ignore_io_errors: bool, **kwargs):
311
332
context = yaml .safe_load (f )
312
333
313
334
context ["source_path" ] = os .path .dirname (config_fname )
314
- context ["ignore_io_errors" ] = ignore_io_errors
315
335
context .update (kwargs )
316
336
317
337
preprocessors = (
@@ -349,7 +369,9 @@ def extend_base_template(content: str, base_template: str) -> str:
349
369
350
370
351
371
def main (
352
- source_path : str , target_path : str , base_url : str , ignore_io_errors : bool
372
+ source_path : str ,
373
+ target_path : str ,
374
+ base_url : str ,
353
375
) -> int :
354
376
"""
355
377
Copy every file in the source directory to the target directory.
@@ -363,7 +385,7 @@ def main(
363
385
os .makedirs (target_path , exist_ok = True )
364
386
365
387
sys .stderr .write ("Generating context...\n " )
366
- context = get_context (config_fname , ignore_io_errors , base_url = base_url )
388
+ context = get_context (config_fname , base_url = base_url , target_path = target_path )
367
389
sys .stderr .write ("Context generated\n " )
368
390
369
391
templates_path = os .path .join (source_path , context ["main" ]["templates_path" ])
@@ -407,15 +429,5 @@ def main(
407
429
parser .add_argument (
408
430
"--base-url" , default = "" , help = "base url where the website is served from"
409
431
)
410
- parser .add_argument (
411
- "--ignore-io-errors" ,
412
- action = "store_true" ,
413
- help = "do not fail if errors happen when fetching "
414
- "data from http sources, and those fail "
415
- "(mostly useful to allow github quota errors "
416
- "when running the script locally)" ,
417
- )
418
432
args = parser .parse_args ()
419
- sys .exit (
420
- main (args .source_path , args .target_path , args .base_url , args .ignore_io_errors )
421
- )
433
+ sys .exit (main (args .source_path , args .target_path , args .base_url ))
0 commit comments