@@ -42,6 +42,7 @@ def __init__(self, repository, revision, task_name_filter, cache_root):
42
42
43
43
temp_dir = tempfile .mkdtemp ()
44
44
self .artifacts_dir = os .path .join (temp_dir , "ccov-artifacts" )
45
+ self .reports_dir = os .path .join (temp_dir , "ccov-reports" )
45
46
46
47
self .index_service = taskcluster_config .get_service ("index" )
47
48
@@ -118,23 +119,18 @@ def retrieve_source_and_artifacts(self):
118
119
# Thread 2 - Clone repository.
119
120
executor .submit (self .clone_repository , self .repository , self .revision )
120
121
121
- def generate_covdir (self ):
122
+ def build_reports (self , only = None ):
122
123
"""
123
- Build the full covdir report using current artifacts
124
+ Build all the possible covdir reports using current artifacts
124
125
"""
125
- output = grcov .report (
126
- self .artifactsHandler .get (), source_dir = self .repo_dir , out_format = "covdir"
127
- )
128
- logger .info ("Covdir report generated successfully" )
129
- return json .loads (output )
126
+ os .makedirs (self .reports_dir , exist_ok = True )
130
127
131
- def build_suites (self ):
132
- """
133
- Build all the detailed covdir reports using current artifacts
134
- and upload them directly on GCP
135
- """
128
+ reports = {}
136
129
for (platform , suite ), artifacts in self .artifactsHandler .get_suites ().items ():
137
130
131
+ if only is not None and (platform , suite ) not in only :
132
+ continue
133
+
138
134
# Generate covdir report for that suite & platform
139
135
logger .info (
140
136
"Building covdir suite report" ,
@@ -146,27 +142,33 @@ def build_suites(self):
146
142
artifacts , source_dir = self .repo_dir , out_format = "covdir"
147
143
)
148
144
149
- # Then upload on GCP
150
- report = json .loads (output )
145
+ # Write output on FS
146
+ path = os .path .join (self .reports_dir , f"{ platform } .{ suite } .json" )
147
+ with open (path , "wb" ) as f :
148
+ f .write (output )
149
+
150
+ reports [(platform , suite )] = path
151
+
152
+ return reports
153
+
154
+ def upload_reports (self , reports ):
155
+ """
156
+ Upload all provided covdir reports on GCP
157
+ """
158
+ for (platform , suite ), path in reports .items ():
159
+ report = json .load (open (path ))
151
160
uploader .gcp (
152
161
self .branch , self .revision , report , suite = suite , platform = platform
153
162
)
154
163
155
- # This function is executed when the bot is triggered at the end of a mozilla-central build.
156
- def go_from_trigger_mozilla_central (self ):
157
- # Check the covdir report does not already exists
158
- if uploader .gcp_covdir_exists (self .branch , self .revision , "full" ):
159
- logger .warn ("Covdir report already on GCP" )
160
- return
161
-
162
- self .retrieve_source_and_artifacts ()
163
-
164
- # Check that all JavaScript files present in the coverage artifacts actually exist.
165
- # If they don't, there might be a bug in the LCOV rewriter.
164
+ def check_javascript_files (self ):
165
+ """
166
+ Check that all JavaScript files present in the coverage artifacts actually exist.
167
+ If they don't, there might be a bug in the LCOV rewriter.
168
+ """
166
169
for artifact in self .artifactsHandler .get ():
167
170
if "jsvm" not in artifact :
168
171
continue
169
-
170
172
with zipfile .ZipFile (artifact , "r" ) as zf :
171
173
for file_name in zf .namelist ():
172
174
with zf .open (file_name , "r" ) as fl :
@@ -185,7 +187,25 @@ def go_from_trigger_mozilla_central(self):
185
187
f"{ missing_files } are present in coverage reports, but missing from the repository"
186
188
)
187
189
188
- report = self .generate_covdir ()
190
+ # This function is executed when the bot is triggered at the end of a mozilla-central build.
191
+ def go_from_trigger_mozilla_central (self ):
192
+ # Check the covdir report does not already exists
193
+ if uploader .gcp_covdir_exists (self .branch , self .revision , "all" , "all" ):
194
+ logger .warn ("Full covdir report already on GCP" )
195
+ return
196
+
197
+ self .retrieve_source_and_artifacts ()
198
+
199
+ # TODO: restore that check
200
+ # self.check_javascript_files()
201
+
202
+ reports = self .build_reports ()
203
+ logger .info ("Built all covdir reports" , nb = len (reports ))
204
+
205
+ # Retrieve the full report
206
+ full_path = reports .get (("all" , "all" ))
207
+ assert full_path is not None , "Missing full report (all:all)"
208
+ report = json .load (open (full_path ))
189
209
190
210
paths = uploader .covdir_paths (report )
191
211
expected_extensions = [".js" , ".cpp" ]
@@ -194,6 +214,9 @@ def go_from_trigger_mozilla_central(self):
194
214
path .endswith (extension ) for path in paths
195
215
), "No {} file in the generated report" .format (extension )
196
216
217
+ self .upload_reports (reports )
218
+ logger .info ("Uploaded all covdir reports" , nb = len (reports ))
219
+
197
220
# Get pushlog and ask the backend to generate the coverage by changeset
198
221
# data, which will be cached.
199
222
with hgmo .HGMO (self .repo_dir ) as hgmo_server :
@@ -203,10 +226,6 @@ def go_from_trigger_mozilla_central(self):
203
226
phabricatorUploader = PhabricatorUploader (self .repo_dir , self .revision )
204
227
changesets_coverage = phabricatorUploader .upload (report , changesets )
205
228
206
- uploader .gcp (self .branch , self .revision , report )
207
- logger .info ("Main Build uploaded on GCP" )
208
-
209
- self .build_suites ()
210
229
notify_email (self .revision , changesets , changesets_coverage )
211
230
212
231
# This function is executed when the bot is triggered at the end of a try build.
@@ -226,7 +245,10 @@ def go_from_trigger_try(self):
226
245
227
246
self .retrieve_source_and_artifacts ()
228
247
229
- report = self .generate_covdir ()
248
+ reports = self .build_reports (only = ("all" , "all" ))
249
+ full_path = reports .get (("all" , "all" ))
250
+ assert full_path is not None , "Missing full report (all:all)"
251
+ report = json .load (open (full_path ))
230
252
231
253
logger .info ("Upload changeset coverage data to Phabricator" )
232
254
phabricatorUploader .upload (report , changesets )
0 commit comments