Parser machine logs (#118707)

* remove file reporter optional

* decouple stack trace from metric collections

* update tests; add collect metrics option

* add failed tests property

* add test for multiple stack failed

* change path on result

* create factory method

* throw exception when test file failed to generate

* remove catch of file exception

* handle when no stacktrace on file reporter
diff --git a/dev/bots/test.dart b/dev/bots/test.dart
index 17cf7e2..388023d 100644
--- a/dev/bots/test.dart
+++ b/dev/bots/test.dart
@@ -1802,6 +1802,7 @@
   final List<String> args = <String>[
     'run',
     'test',
+    '--file-reporter=json:${metricFile.path}',
     if (shuffleTests) '--test-randomize-ordering-seed=$shuffleSeed',
     '-j$cpus',
     if (!hasColor)
@@ -1813,8 +1814,6 @@
     if (testPaths != null)
       for (final String testPath in testPaths)
         testPath,
-    if (collectMetrics)
-      '--file-reporter=json:${metricFile.path}',
   ];
   final Map<String, String> environment = <String, String>{
     'FLUTTER_ROOT': flutterRoot,
@@ -1840,19 +1839,24 @@
     removeLine: useBuildRunner ? (String line) => line.startsWith('[INFO]') : null,
   );
 
+  final TestFileReporterResults test = TestFileReporterResults.fromFile(metricFile); // --file-reporter name
+  final File info = fileSystem.file(path.join(flutterRoot, 'error.log'));
+  info.writeAsStringSync(json.encode(test.errors));
+
   if (collectMetrics) {
     try {
       final List<String> testList = <String>[];
-      final Map<int, TestSpecs> allTestSpecs = generateMetrics(metricFile);
+      final Map<int, TestSpecs> allTestSpecs = test.allTestSpecs;
       for (final TestSpecs testSpecs in allTestSpecs.values) {
         testList.add(testSpecs.toJson());
       }
       if (testList.isNotEmpty) {
         final String testJson = json.encode(testList);
-        final File testResults = fileSystem.file(path.join(flutterRoot, 'test_results.json'));
+        final File testResults = fileSystem.file(
+            path.join(flutterRoot, 'test_results.json'));
         testResults.writeAsStringSync(testJson);
       }
-    } on fs.FileSystemException catch (e){
+    } on fs.FileSystemException catch (e) {
       print('Failed to generate metrics: $e');
     }
   }
diff --git a/dev/bots/test/tool_subsharding_test.dart b/dev/bots/test/tool_subsharding_test.dart
index 19e9f8e..1e409b0 100644
--- a/dev/bots/test/tool_subsharding_test.dart
+++ b/dev/bots/test/tool_subsharding_test.dart
@@ -23,8 +23,8 @@
       {"missing": "entry"}
       {"other": true}''';
       file.writeAsStringSync(output);
-      final Map<int, TestSpecs> result = generateMetrics(file);
-      expect(result, isEmpty);
+      final TestFileReporterResults result = TestFileReporterResults.fromFile(file);
+      expect(result.allTestSpecs, isEmpty);
     });
 
     test('have metrics', () async {
@@ -47,7 +47,7 @@
       {"group":{"id":7,"suiteID":1,"parentID":2,"name":"ProjectValidatorTask","metadata":{"skip":false,"skipReason":null},"testCount":1,"line":82,"column":3,"url":"file:///file"},"type":"group","time":5000}
       {"success":true,"type":"done","time":4870}''';
       file.writeAsStringSync(output);
-      final Map<int, TestSpecs> result = generateMetrics(file);
+      final Map<int, TestSpecs> result = TestFileReporterResults.fromFile(file).allTestSpecs;
       expect(result, contains(0));
       expect(result, contains(1));
       expect(result[0]!.path, 'test/general.shard/project_validator_result_test.dart');
@@ -62,8 +62,37 @@
       {"suite":{"id":1,"platform":"vm","path":"other_path"},"type":"suite","time":1000}
       {"group":{"id":7,"suiteID":1,"parentID":2,"name":"name","metadata":{"skip":false,"skipReason":null},"testCount":1,"line":82,"column":3,"url":"file:///file"},"type":"group","time":5000}''';
       file.writeAsStringSync(output);
-      final Map<int, TestSpecs> result = generateMetrics(file);
-      expect(result, isEmpty);
+      final TestFileReporterResults result = TestFileReporterResults.fromFile(file);
+      expect(result.hasFailedTests, true);
+    });
+
+    test('has failed stack traces', () async {
+      final File file = fileSystem.file('success_file');
+      const String output = '''
+      {"protocolVersion":"0.1.1","runnerVersion":"1.22.1","pid":47372,"type":"start","time":0}
+      {"suite":{"id":0,"platform":"vm","path":"test/tool_subsharding_test.dart"},"type":"suite","time":0}
+      {"test":{"id":1,"name":"loading test/tool_subsharding_test.dart","suiteID":0,"groupIDs":[],"metadata":{"skip":false,"skipReason":null},"line":null,"column":null,"url":null},"type":"testStart","time":2}
+      {"count":1,"time":11,"type":"allSuites"}
+      {"testID":1,"result":"success","skipped":false,"hidden":true,"type":"testDone","time":1021}
+      {"group":{"id":2,"suiteID":0,"parentID":null,"name":"","metadata":{"skip":false,"skipReason":null},"testCount":3,"line":null,"column":null,"url":null},"type":"group","time":1026}
+      {"group":{"id":3,"suiteID":0,"parentID":2,"name":"generateMetrics","metadata":{"skip":false,"skipReason":null},"testCount":3,"line":13,"column":3,"url":"file:///Users/user/Documents/flutter/dev/bots/test/tool_subsharding_test.dart"},"type":"group","time":1027}
+      {"test":{"id":4,"name":"generateMetrics empty metrics","suiteID":0,"groupIDs":[2,3],"metadata":{"skip":false,"skipReason":null},"line":20,"column":5,"url":"file:///Users/user/Documents/flutter/dev/bots/test/tool_subsharding_test.dart"},"type":"testStart","time":1027}
+      {"testID":4,"error":"Expected: <true>  Actual: <false>","stackTrace":"package:test_api                      expect test/tool_subsharding_test.dart 28:7  main.<fn>.<fn> ","isFailure":true,"type":"error","time":1095}
+      {"testID":4,"result":"failure","skipped":false,"hidden":false,"type":"testDone","time":1096}
+      {"test":{"id":5,"name":"generateMetrics have metrics","suiteID":0,"groupIDs":[2,3],"metadata":{"skip":false,"skipReason":null},"line":31,"column":5,"url":"file:///Users/user/Documents/flutter/dev/bots/test/tool_subsharding_test.dart"},"type":"testStart","time":1097}
+      {"testID":5,"result":"success","skipped":false,"hidden":false,"type":"testDone","time":1103}
+      {"test":{"id":6,"name":"generateMetrics missing success entry","suiteID":0,"groupIDs":[2,3],"metadata":{"skip":false,"skipReason":null},"line":60,"column":5,"url":"file:///Users/user/Documents/flutter/dev/bots/test/tool_subsharding_test.dart"},"type":"testStart","time":1103}
+      {"testID":6,"error":"Expected: <false>  Actual: <true>","stackTrace":"package:test_api                      expect test/tool_subsharding_test.dart 68:7  main.<fn>.<fn> ","isFailure":true,"type":"error","time":1107}
+      {"testID":6,"result":"failure","skipped":false,"hidden":false,"type":"testDone","time":1107}
+      {"testID":6,"error":"my error","isFailure":true,"type":"error","time":1107}
+      {"success":false,"type":"done","time":1120}''';
+      file.writeAsStringSync(output);
+      final TestFileReporterResults result = TestFileReporterResults.fromFile(file);
+      expect(result.hasFailedTests, true);
+      expect(result.errors.length == 3, true);
+      expect(result.errors[0].contains('Expected: <true>  Actual: <false>'), true);
+      expect(result.errors[1].contains('Expected: <false>  Actual: <true>'), true);
+      expect(result.errors[2].contains('my error'), true);
     });
   });
 }
diff --git a/dev/bots/tool_subsharding.dart b/dev/bots/tool_subsharding.dart
index 7d7d676..f9cbb3c 100644
--- a/dev/bots/tool_subsharding.dart
+++ b/dev/bots/tool_subsharding.dart
@@ -38,42 +38,65 @@
   }
 }
 
-/// Intended to parse the output file of `dart test --file-reporter json:file_name
-Map<int, TestSpecs> generateMetrics(File metrics) {
-  final Map<int, TestSpecs> allTestSpecs = <int, TestSpecs>{};
-  if (!metrics.existsSync()) {
-    return allTestSpecs;
-  }
+class TestFileReporterResults {
+  TestFileReporterResults._({
+    required this.allTestSpecs,
+    required this.hasFailedTests,
+    required this.errors,
+  });
 
-  bool success = false;
-  for(final String metric in metrics.readAsLinesSync()) {
-    final Map<String, dynamic> entry = json.decode(metric) as Map<String, dynamic>;
-    if (entry.containsKey('suite')) {
-      final Map<dynamic, dynamic> suite = entry['suite'] as Map<dynamic, dynamic>;
-      allTestSpecs[suite['id'] as int] = TestSpecs(
-        path: suite['path'] as String,
-        startTime: entry['time'] as int,
-      );
-    } else if (_isMetricDone(entry, allTestSpecs)) {
-      final Map<dynamic, dynamic> group = entry['group'] as Map<dynamic, dynamic>;
-      final int suiteID = group['suiteID'] as int;
-      final TestSpecs testSpec = allTestSpecs[suiteID]!;
-      testSpec.endTime = entry['time'] as int;
-    } else if (entry.containsKey('success') && entry['success'] == true) {
-      success = true;
+  /// Intended to parse the output file of `dart test --file-reporter json:file_name
+  factory TestFileReporterResults.fromFile(File metrics) {
+    if (!metrics.existsSync()) {
+      throw Exception('${metrics.path} does not exist');
     }
+
+    final Map<int, TestSpecs> testSpecs = <int, TestSpecs>{};
+    bool hasFailedTests = true;
+    final List<String> errors = <String>[];
+
+    for (final String metric in metrics.readAsLinesSync()) {
+      final Map<String, Object?> entry = json.decode(metric) as Map<String, Object?>;
+      if (entry.containsKey('suite')) {
+        final Map<String, Object?> suite = entry['suite']! as Map<String, Object?>;
+        addTestSpec(suite, entry['time']! as int, testSpecs);
+      } else if (isMetricDone(entry, testSpecs)) {
+        final Map<String, Object?> group = entry['group']! as Map<String, Object?>;
+        final int suiteID = group['suiteID']! as int;
+        addMetricDone(suiteID, entry['time']! as int, testSpecs);
+      } else if (entry.containsKey('error')) {
+        final String stackTrace = entry.containsKey('stackTrace') ? entry['stackTrace']! as String : '';
+        errors.add('${entry['error']}\n $stackTrace');
+      } else if (entry.containsKey('success') && entry['success'] == true) {
+        hasFailedTests = false;
+      }
+    }
+
+    return TestFileReporterResults._(allTestSpecs: testSpecs, hasFailedTests: hasFailedTests, errors: errors);
   }
 
-  if (!success) { // means that not all tests succeeded therefore no metrics are stored
-    return <int, TestSpecs>{};
-  }
-  return allTestSpecs;
-}
+  final Map<int, TestSpecs> allTestSpecs;
+  final bool hasFailedTests;
+  final List<String> errors;
 
-bool _isMetricDone(Map<String, dynamic> entry, Map<int, TestSpecs> allTestSpecs) {
-  if (entry.containsKey('group') && entry['type'] as String == 'group') {
-    final Map<dynamic, dynamic> group = entry['group'] as Map<dynamic, dynamic>;
-    return allTestSpecs.containsKey(group['suiteID'] as int);
+
+  static void addTestSpec(Map<String, Object?> suite, int time, Map<int, TestSpecs> allTestSpecs) {
+    allTestSpecs[suite['id']! as int] = TestSpecs(
+      path: suite['path']! as String,
+      startTime: time,
+    );
   }
-  return false;
+
+  static void addMetricDone(int suiteID, int time, Map<int, TestSpecs> allTestSpecs) {
+    final TestSpecs testSpec = allTestSpecs[suiteID]!;
+    testSpec.endTime = time;
+  }
+
+  static bool isMetricDone(Map<String, Object?> entry, Map<int, TestSpecs> allTestSpecs) {
+    if (entry.containsKey('group') && entry['type']! as String == 'group') {
+      final Map<String, Object?> group = entry['group']! as Map<String, Object?>;
+      return allTestSpecs.containsKey(group['suiteID']! as int);
+    }
+    return false;
+  }
 }