xref: /aosp_15_r20/external/cronet/testing/merge_scripts/standard_isolated_script_merge.py (revision 6777b5387eb2ff775bb5750e3f5d96f37fb7352b)
1#!/usr/bin/env python3
2# Copyright 2017 The Chromium Authors
3# Use of this source code is governed by a BSD-style license that can be
4# found in the LICENSE file.
5
6from __future__ import print_function
7
8import json
9import os
10import six
11import sys
12
13import merge_api
14import results_merger
15
16
17def StandardIsolatedScriptMerge(output_json, summary_json, jsons_to_merge):
18  """Merge the contents of one or more results JSONs into a single JSON.
19
20  Args:
21    output_json: A path to a JSON file to which the merged results should be
22      written.
23    jsons_to_merge: A list of paths to JSON files that should be merged.
24  """
25  # summary.json is produced by swarming client itself. We are mostly interested
26  # in the number of shards.
27  try:
28    with open(summary_json) as f:
29      summary = json.load(f)
30  except (IOError, ValueError):
31    print((
32        'summary.json is missing or can not be read',
33        'Something is seriously wrong with swarming client or the bot.'),
34        file=sys.stderr)
35    return 1
36
37  missing_shards = []
38  shard_results_list = []
39  for index, result in enumerate(summary['shards']):
40    output_path = None
41    if result:
42      output_path = find_shard_output_path(index, result.get('task_id'),
43                                           jsons_to_merge)
44    if not output_path:
45      missing_shards.append(index)
46      continue
47
48    with open(output_path) as f:
49      try:
50        json_contents = json.load(f)
51      except ValueError as e:
52        six.raise_from(ValueError(
53              'Failed to parse JSON from %s' % output_path), e)
54      shard_results_list.append(json_contents)
55
56  merged_results = results_merger.merge_test_results(shard_results_list)
57  if missing_shards:
58    merged_results['missing_shards'] = missing_shards
59    if 'global_tags' not in merged_results:
60      merged_results['global_tags'] = []
61    merged_results['global_tags'].append('UNRELIABLE_RESULTS')
62
63  with open(output_json, 'w') as f:
64    json.dump(merged_results, f)
65
66  return 0
67
68
69def find_shard_output_path(index, task_id, jsons_to_merge):
70  """Finds the shard matching the index/task-id.
71
72  Args:
73    index: The index of the shard to load data for, this is for old api.
74    task_id: The directory of the shard to load data for, this is for new api.
75    jsons_to_merge: A container of file paths for shards that emitted output.
76
77  Returns:
78    * The matching path, or None
79  """
80  # 'output.json' is set in swarming/api.py, gtest_task method.
81  matching_json_files = [
82      j for j in jsons_to_merge
83      if (os.path.basename(j) == 'output.json' and
84          (os.path.basename(os.path.dirname(j)) == str(index) or
85           os.path.basename(os.path.dirname(j)) == task_id))]
86
87  if not matching_json_files:
88    print('shard %s test output missing' % index, file=sys.stderr)
89    return None
90  if len(matching_json_files) > 1:
91    print('duplicate test output for shard %s' % index, file=sys.stderr)
92    return None
93
94  return matching_json_files[0]
95
96
97def main(raw_args):
98  parser = merge_api.ArgumentParser()
99  args = parser.parse_args(raw_args)
100  return StandardIsolatedScriptMerge(
101      args.output_json, args.summary_json, args.jsons_to_merge)
102
103
104if __name__ == '__main__':
105  sys.exit(main(sys.argv[1:]))
106