Implementation of go/conformanceDebuggability. Refactor ForkPipeRunner::RunTest(): There's no need for rerunning through argv for common flags between suites. This also enables "debug_test_names" to be shared between suites.

PiperOrigin-RevId: 656509695
diff --git a/conformance/BUILD.bazel b/conformance/BUILD.bazel
index e402498..e38330d 100644
--- a/conformance/BUILD.bazel
+++ b/conformance/BUILD.bazel
@@ -233,6 +233,7 @@
         "//src/google/protobuf",
         "//src/google/protobuf:port",
         "//src/google/protobuf:protobuf_lite",
+        "//src/google/protobuf/json",
         "//src/google/protobuf/stubs",
         "//src/google/protobuf/util:json_util",
         "//src/google/protobuf/util:type_resolver",
diff --git a/conformance/binary_json_conformance_suite.cc b/conformance/binary_json_conformance_suite.cc
index de575bb..892a2a4 100644
--- a/conformance/binary_json_conformance_suite.cc
+++ b/conformance/binary_json_conformance_suite.cc
@@ -445,7 +445,10 @@
       absl::StrCat(setting.ConformanceLevelToString(level), ".",
                    setting.GetSyntaxIdentifier(), ".ProtobufInput.", test_name);
 
-  suite_.RunTest(effective_test_name, request, &response);
+  if (!suite_.RunTest(effective_test_name, request, &response)) {
+    return;
+  }
+
   TestStatus test;
   test.set_name(effective_test_name);
   if (response.result_case() == ConformanceResponse::kParseError) {
@@ -641,7 +644,9 @@
       setting.ConformanceLevelToString(level), ".",
       setting.GetSyntaxIdentifier(), ".JsonInput.", test_name, ".Validator");
 
-  suite_.RunTest(effective_test_name, request, &response);
+  if (!suite_.RunTest(effective_test_name, request, &response)) {
+    return;
+  }
 
   TestStatus test;
   test.set_name(effective_test_name);
@@ -689,7 +694,9 @@
       absl::StrCat(setting.ConformanceLevelToString(level), ".",
                    SyntaxIdentifier(), ".JsonInput.", test_name);
 
-  suite_.RunTest(effective_test_name, request, &response);
+  if (!suite_.RunTest(effective_test_name, request, &response)) {
+    return;
+  }
 
   TestStatus test;
   test.set_name(effective_test_name);
@@ -722,7 +729,9 @@
       absl::StrCat(setting.ConformanceLevelToString(level), ".",
                    SyntaxIdentifier(), ".", test_name, ".JsonOutput");
 
-  suite_.RunTest(effective_test_name, request, &response);
+  if (!suite_.RunTest(effective_test_name, request, &response)) {
+    return;
+  }
 
   TestStatus test;
   test.set_name(effective_test_name);
@@ -1438,7 +1447,9 @@
       conformance::BINARY_TEST, prototype, "UnknownOrdering", serialized);
   const ConformanceRequest& request = setting.GetRequest();
   ConformanceResponse response;
-  suite_.RunTest(setting.GetTestName(), request, &response);
+  if (!suite_.RunTest(setting.GetTestName(), request, &response)) {
+    return;
+  }
 
   MessageType response_message;
   TestStatus test;
@@ -1447,6 +1458,7 @@
     suite_.ReportSkip(test, request, response);
     return;
   }
+
   suite_.ParseResponse(response, setting, &response_message);
 
   const UnknownFieldSet& ufs = response_message.unknown_fields();
diff --git a/conformance/conformance_cpp.cc b/conformance/conformance_cpp.cc
index 0f4196a..45fa91a 100644
--- a/conformance/conformance_cpp.cc
+++ b/conformance/conformance_cpp.cc
@@ -7,15 +7,15 @@
 
 #include <errno.h>
 #include <stdarg.h>
+#include <stdlib.h>
 #include <unistd.h>
 
 #include <cstddef>
 #include <cstdint>
+#include <cstdio>
 #include <memory>
 #include <string>
 
-#include "google/protobuf/util/json_util.h"
-#include "google/protobuf/util/type_resolver_util.h"
 #include "absl/log/absl_check.h"
 #include "absl/log/absl_log.h"
 #include "absl/status/status.h"
@@ -27,12 +27,15 @@
 #include "editions/golden/test_messages_proto2_editions.pb.h"
 #include "editions/golden/test_messages_proto3_editions.pb.h"
 #include "google/protobuf/endian.h"
+#include "google/protobuf/json/json.h"
 #include "google/protobuf/message.h"
 #include "google/protobuf/test_messages_proto2.pb.h"
 #include "google/protobuf/test_messages_proto3.pb.h"
 #include "google/protobuf/test_messages_proto3.pb.h"
 #include "google/protobuf/text_format.h"
+#include "google/protobuf/util/json_util.h"
 #include "google/protobuf/util/type_resolver.h"
+#include "google/protobuf/util/type_resolver_util.h"
 #include "google/protobuf/stubs/status_macros.h"
 
 // Must be included last.
@@ -241,8 +244,9 @@
                           serialized_output.size()));
 
   if (verbose_) {
-    ABSL_LOG(INFO) << "conformance-cpp: request=" << request.ShortDebugString()
-                   << ", response=" << response->ShortDebugString();
+    ABSL_LOG(INFO) << "conformance-cpp: request="
+                   << google::protobuf::ShortFormat(request)
+                   << ", response=" << google::protobuf::ShortFormat(*response);
   }
   return false;
 }
diff --git a/conformance/conformance_python.py b/conformance/conformance_python.py
index eea05d9..77076bd 100755
--- a/conformance/conformance_python.py
+++ b/conformance/conformance_python.py
@@ -102,32 +102,34 @@
     response.protobuf_payload = failure_set.SerializeToString()
     return response
 
-  isJson = (request.WhichOneof('payload') == 'json_payload')
+  isJson = request.WhichOneof("payload") == "json_payload"
   test_message = _create_test_message(request.message_type)
 
   if (not isJson) and (test_message is None):
     raise ProtocolError("Protobuf request doesn't have specific payload type")
 
   try:
-    if request.WhichOneof('payload') == 'protobuf_payload':
+    if request.WhichOneof("payload") == "protobuf_payload":
       try:
         test_message.ParseFromString(request.protobuf_payload)
       except message.DecodeError as e:
         response.parse_error = str(e)
         return response
 
-    elif request.WhichOneof('payload') == 'json_payload':
+    elif request.WhichOneof("payload") == "json_payload":
       try:
-        ignore_unknown_fields = \
-            request.test_category == \
-                conformance_pb2.JSON_IGNORE_UNKNOWN_PARSING_TEST
-        json_format.Parse(request.json_payload, test_message,
-                          ignore_unknown_fields)
+        ignore_unknown_fields = (
+            request.test_category
+            == conformance_pb2.JSON_IGNORE_UNKNOWN_PARSING_TEST
+        )
+        json_format.Parse(
+            request.json_payload, test_message, ignore_unknown_fields
+        )
       except Exception as e:
         response.parse_error = str(e)
         return response
 
-    elif request.WhichOneof('payload') == 'text_payload':
+    elif request.WhichOneof("payload") == "text_payload":
       try:
         text_format.Parse(request.text_payload, test_message)
       except Exception as e:
@@ -152,7 +154,8 @@
 
     elif request.requested_output_format == conformance_pb2.TEXT_FORMAT:
       response.text_payload = text_format.MessageToString(
-          test_message, print_unknown_fields=request.print_unknown_fields)
+          test_message, print_unknown_fields=request.print_unknown_fields
+      )
 
   except Exception as e:
     response.runtime_error = str(e)
@@ -163,7 +166,7 @@
 def do_test_io():
   length_bytes = sys.stdin.buffer.read(4)
   if len(length_bytes) == 0:
-    return False   # EOF
+    return False  # EOF
   elif len(length_bytes) != 4:
     raise IOError("I/O error")
 
@@ -183,15 +186,20 @@
   sys.stdout.buffer.flush()
 
   if verbose:
-    sys.stderr.write("conformance_python: request=%s, response=%s\n" % (
-                       request.ShortDebugString().c_str(),
-                       response.ShortDebugString().c_str()))
+    sys.stderr.write(
+        "conformance_python: request=%s, response=%s\n"
+        % (
+            request.ShortDebugString().c_str(),
+            response.ShortDebugString().c_str(),
+        )
+    )
 
   global test_count
   test_count += 1
 
   return True
 
+
 while True:
   if not do_test_io():
     sys.stderr.write(
diff --git a/conformance/conformance_test.cc b/conformance/conformance_test.cc
index 10b3103..2b0d728 100644
--- a/conformance/conformance_test.cc
+++ b/conformance/conformance_test.cc
@@ -21,6 +21,7 @@
 #include "google/protobuf/util/field_comparator.h"
 #include "google/protobuf/util/message_differencer.h"
 #include "absl/container/btree_map.h"
+#include "absl/container/flat_hash_set.h"
 #include "absl/log/absl_check.h"
 #include "absl/log/absl_log.h"
 #include "absl/strings/str_cat.h"
@@ -29,6 +30,7 @@
 #include "conformance/conformance.pb.h"
 #include "conformance/conformance.pb.h"
 #include "google/protobuf/descriptor_legacy.h"
+#include "google/protobuf/endian.h"
 #include "google/protobuf/message.h"
 #include "google/protobuf/text_format.h"
 
@@ -42,6 +44,15 @@
 
 namespace {
 
+static void ReplaceAll(std::string& input, std::string replace_word,
+                       std::string replace_by) {
+  size_t pos = input.find(replace_word);
+  while (pos != std::string::npos) {
+    input.replace(pos, replace_word.length(), replace_by);
+    pos = input.find(replace_word, pos + replace_by.length());
+  }
+}
+
 static std::string ToOctString(const std::string& binary_string) {
   std::string oct_string;
   for (size_t i = 0; i < binary_string.size(); i++) {
@@ -57,6 +68,52 @@
   return oct_string;
 }
 
+// Returns full filename path of written .txt file if successful
+static std::string ProduceOctalSerialized(const std::string& request,
+                                          uint32_t len) {
+  char* len_split_bytes = static_cast<char*>(static_cast<void*>(&len));
+
+  std::string out;
+
+  std::string hex_repr;
+  for (int i = 0; i < 4; i++) {
+    auto conversion = (unsigned int)static_cast<uint8_t>(len_split_bytes[i]);
+    std::string hex = absl::StrFormat("\\x%x", conversion);
+    absl::StrAppend(&hex_repr, hex);
+  }
+
+  absl::StrAppend(&out, hex_repr);
+
+  absl::StrAppend(&out, ToOctString(request));
+
+  return out;
+}
+
+static std::string WriteToFile(const std::string& octal_serialized,
+                               const std::string& output_dir,
+                               const std::string& test_name) {
+  std::string test_name_txt = test_name;
+  ReplaceAll(test_name_txt, ".", "_");
+  absl::StrAppend(&test_name_txt, ".txt");
+  std::string full_filename;
+  if (!output_dir.empty()) {
+    full_filename = output_dir;
+    if (*output_dir.rbegin() != '/') {
+      full_filename.push_back('/');
+    }
+    absl::StrAppend(&full_filename, test_name_txt);
+  }
+  std::ofstream os{std::string(full_filename)};
+  if (os) {
+    os << octal_serialized;
+    return full_filename;
+  } else {
+    ABSL_LOG(INFO) << "Failed to open file for debugging: " << full_filename
+                   << "\n";
+    return "";
+  }
+}
+
 // Removes all newlines.
 static void Normalize(std::string& input) {
   input.erase(std::remove(input.begin(), input.end(), '\n'), input.end());
@@ -376,7 +433,10 @@
     const std::string& equivalent_wire_format, bool require_same_wire_format) {
   const ConformanceRequest& request = setting.GetRequest();
   ConformanceResponse response;
-  RunTest(setting.GetTestName(), request, &response);
+  if (!RunTest(setting.GetTestName(), request, &response)) {
+    return;
+  }
+
   VerifyResponse(setting, equivalent_wire_format, response, true,
                  require_same_wire_format);
 }
@@ -451,7 +511,7 @@
   }
 }
 
-void ConformanceTestSuite::RunTest(const std::string& test_name,
+bool ConformanceTestSuite::RunTest(const std::string& test_name,
                                    const ConformanceRequest& request,
                                    ConformanceResponse* response) {
   if (test_names_.insert(test_name).second == false) {
@@ -462,7 +522,43 @@
   std::string serialized_response;
   request.SerializeToString(&serialized_request);
 
-  runner_->RunTest(test_name, serialized_request, &serialized_response);
+  uint32_t len = internal::little_endian::FromHost(
+      static_cast<uint32_t>(serialized_request.size()));
+
+  if (!debug_) {  // Not in debug mode. Continue.
+  } else if (debug_test_names_->erase(test_name) == 1) {
+    std::string octal = ProduceOctalSerialized(serialized_request, len);
+    std::string full_filename = WriteToFile(octal, output_dir_, test_name);
+    if (!full_filename.empty()) {
+      absl::StrAppendFormat(
+          &output_, "Produced octal serialized request file for test %s\n",
+          test_name);
+      absl::StrAppendFormat(
+          &output_,
+          "  To pipe the "
+          "serialized request directly to "
+          "the "
+          "testee run from the root of your workspace:\n    printf $("
+          "<\"%s\") | "
+          "./bazel-bin/google/protobuf/conformance/%s\n\n",
+          full_filename, testee_);
+      absl::StrAppendFormat(
+          &output_,
+          "  To inspect the wire format of the serialized request run "
+          "(Disclaimer: This may not work properly on non-Linux platforms):\n  "
+          "  "
+          "contents=$(<\"%s\"); sub=$(cut -d \\\\ -f 6- <<< "
+          "$contents) ; printf \"\\\\${sub}\" | protoscope \n\n\n",
+          full_filename);
+    }
+  } else {  // Test is not ran, as it was not asked to be debugged.
+    expected_to_fail_.erase(test_name);
+    return false;
+  }
+
+  response->set_protobuf_payload(serialized_request);
+
+  runner_->RunTest(test_name, len, serialized_request, &serialized_response);
 
   if (!response->ParseFromString(serialized_response)) {
     response->Clear();
@@ -475,6 +571,7 @@
         test_name, TruncateRequest(request).ShortDebugString(),
         TruncateResponse(*response).ShortDebugString());
   }
+  return true;
 }
 
 std::string ConformanceTestSuite::WireFormatToString(WireFormat wire_format) {
@@ -511,7 +608,10 @@
   unexpected_failing_tests_.clear();
   unexpected_succeeding_tests_.clear();
 
-  output_ = "\nCONFORMANCE TEST BEGIN ====================================\n\n";
+  std::string mode = debug_ ? "DEBUG" : "TEST";
+  absl::StrAppendFormat(
+      &output_, "CONFORMANCE %s BEGIN ====================================\n\n",
+      mode);
 
   failure_list_filename_ = filename;
   expected_to_fail_.clear();
@@ -604,11 +704,9 @@
 
   absl::StrAppendFormat(&output_,
                         "CONFORMANCE SUITE %s: %d successes, %zu skipped, "
-                        "%d expected failures, %zu unexpected failures, %zu "
-                        "unexpected_failure_messages.\n",
+                        "%d expected failures, %zu unexpected failures.\n",
                         ok ? "PASSED" : "FAILED", successes_, skipped_.size(),
-                        expected_failures_, unexpected_failing_tests_.size(),
-                        unexpected_failure_messages_.size());
+                        expected_failures_, unexpected_failing_tests_.size());
   absl::StrAppendFormat(&output_, "\n");
 
   output->assign(output_);
diff --git a/conformance/conformance_test.h b/conformance/conformance_test.h
index 2785138..d10c983 100644
--- a/conformance/conformance_test.h
+++ b/conformance/conformance_test.h
@@ -15,6 +15,7 @@
 #define CONFORMANCE_CONFORMANCE_TEST_H
 
 #include <cstddef>
+#include <cstdint>
 #include <memory>
 #include <string>
 #include <vector>
@@ -48,13 +49,14 @@
 
   // Call to run a single conformance test.
   //
+  // "len" is the byte length of a serialized conformance.ConformanceRequest.
   // "input" is a serialized conformance.ConformanceRequest.
   // "output" should be set to a serialized conformance.ConformanceResponse.
   //
   // If there is any error in running the test itself, set "runtime_error" in
   // the response.
-  virtual void RunTest(const std::string& test_name, const std::string& input,
-                       std::string* output) = 0;
+  virtual void RunTest(const std::string& test_name, uint32_t len,
+                       const std::string& input, std::string* output) = 0;
 };
 
 // Test runner that spawns the process being tested and communicates with it
@@ -78,8 +80,8 @@
 
   ~ForkPipeRunner() override = default;
 
-  void RunTest(const std::string& test_name, const std::string& request,
-               std::string* response) override;
+  void RunTest(const std::string& test_name, uint32_t len,
+               const std::string& request, std::string* response) override;
 
  private:
   void SpawnTestProgram();
@@ -127,11 +129,14 @@
 class ConformanceTestSuite {
  public:
   ConformanceTestSuite()
-      : verbose_(false),
+      : testee_(""),
+        verbose_(false),
         performance_(false),
         enforce_recommended_(false),
         maximum_edition_(Edition::EDITION_PROTO3),
-        failure_list_flag_name_("--failure_list") {}
+        failure_list_flag_name_("--failure_list"),
+        debug_test_names_(nullptr),
+        debug_(false) {}
   virtual ~ConformanceTestSuite() = default;
 
   void SetPerformance(bool performance) { performance_ = performance; }
@@ -159,7 +164,18 @@
   }
 
   // Sets the path of the output directory.
-  void SetOutputDir(const char* output_dir) { output_dir_ = output_dir; }
+  void SetOutputDir(const std::string& output_dir) { output_dir_ = output_dir; }
+
+  // Sets if we are running the test in debug mode.
+  void SetDebug(bool debug) { debug_ = debug; }
+
+  // Sets the testee name
+  void SetTestee(const std::string& testee) { testee_ = testee; }
+
+  // Sets the debug test names
+  void SetDebugTestNames(absl::flat_hash_set<std::string>& debug_test_names) {
+    debug_test_names_ = &debug_test_names;
+  }
 
   // Run all the conformance tests against the given test runner.
   // Test output will be stored in "output".
@@ -168,6 +184,9 @@
   // failure list.
   // The filename here is *only* used to create/format useful error messages for
   // how to update the failure list.  We do NOT read this file at all.
+
+  // "debug_test_names" holds the list of test names that the user requested to
+  // debug.  If this is empty, we will run all the tests.
   bool RunSuite(ConformanceTestRunner* runner, std::string* output,
                 const std::string& filename,
                 conformance::FailureSet* failure_list);
@@ -271,7 +290,8 @@
                                const std::string& equivalent_wire_format,
                                bool require_same_wire_format = false);
 
-  void RunTest(const std::string& test_name,
+  // Returns true if our runner_ ran the test and false if it did not.
+  bool RunTest(const std::string& test_name,
                const conformance::ConformanceRequest& request,
                conformance::ConformanceResponse* response);
 
@@ -280,6 +300,7 @@
   virtual void RunSuiteImpl() = 0;
 
   ConformanceTestRunner* runner_;
+  std::string testee_;
   int successes_;
   int expected_failures_;
   bool verbose_;
@@ -290,6 +311,8 @@
   std::string output_dir_;
   std::string failure_list_flag_name_;
   std::string failure_list_filename_;
+  absl::flat_hash_set<std::string>* debug_test_names_;
+  bool debug_;
 
   // The set of test names that are expected to fail in this run, but haven't
   // failed yet.
diff --git a/conformance/conformance_test_runner.cc b/conformance/conformance_test_runner.cc
index 49aaea3..3fbd9e2 100644
--- a/conformance/conformance_test_runner.cc
+++ b/conformance/conformance_test_runner.cc
@@ -32,6 +32,7 @@
 
 #include <errno.h>
 #include <signal.h>
+#include <stdio.h>
 #include <sys/types.h>
 #include <sys/wait.h>
 #include <unistd.h>
@@ -47,9 +48,11 @@
 #include <string>
 #include <vector>
 
+#include "absl/container/flat_hash_set.h"
 #include "absl/log/absl_log.h"
 #include "absl/strings/ascii.h"
 #include "absl/strings/str_cat.h"
+#include "absl/strings/str_format.h"
 #include "conformance/conformance.pb.h"
 #include "conformance/conformance.pb.h"
 #include "conformance_test.h"
@@ -122,7 +125,7 @@
   fprintf(stderr,
           "                              should contain one test name per\n");
   fprintf(stderr,
-          "                              line.  Use '#' for comments.\n");
+          "                              line.  Use '#' for comments.\n\n");
   fprintf(stderr,
           "  --text_format_failure_list <filename>   Use to specify list \n");
   fprintf(stderr,
@@ -133,7 +136,7 @@
   fprintf(stderr,
           "                              File should contain one test name \n");
   fprintf(stderr,
-          "                              per line.  Use '#' for comments.\n");
+          "                              per line.  Use '#' for comments.\n\n");
 
   fprintf(stderr,
           "  --enforce_recommended       Enforce that recommended test\n");
@@ -143,19 +146,30 @@
           "                              this flag if you want to be\n");
   fprintf(stderr,
           "                              strictly conforming to protobuf\n");
-  fprintf(stderr, "                              spec.\n");
+  fprintf(stderr, "                              spec.\n\n");
   fprintf(stderr,
-          "  --maximum_edition <edition>   Only run conformance tests up\n");
+          "  --maximum_edition <edition> Only run conformance tests up to\n");
   fprintf(stderr,
-          "                              to and including the specified\n");
-  fprintf(stderr, "                              edition.\n");
+          "                              and including the specified\n");
+  fprintf(stderr, "                              edition.\n\n");
   fprintf(stderr,
           "  --output_dir                <dirname> Directory to write\n"
-          "                              output files.\n");
+          "                              output files.\n\n");
+  fprintf(stderr,
+          "  --debug <test_name1> <test_name2> ... <test_nameN> Debug the \n");
+  fprintf(stderr, "                              specified tests by running\n");
+  fprintf(stderr,
+          "                              them in isolation and producing\n");
+  fprintf(stderr,
+          "                              serialized request data for piping\n");
+  fprintf(stderr, "                              directly to the testee.\n\n");
+  fprintf(stderr, "  --performance               Boolean option\n");
+  fprintf(stderr, "                              for enabling run of\n");
+  fprintf(stderr, "                              performance tests.\n");
   exit(1);
 }
 
-void ForkPipeRunner::RunTest(const std::string &test_name,
+void ForkPipeRunner::RunTest(const std::string &test_name, uint32_t len,
                              const std::string &request,
                              std::string *response) {
   if (child_pid_ < 0) {
@@ -163,8 +177,6 @@
   }
   current_test_name_ = test_name;
 
-  uint32_t len =
-      internal::little_endian::FromHost(static_cast<uint32_t>(request.size()));
   CheckedWrite(write_fd_, &len, sizeof(uint32_t));
   CheckedWrite(write_fd_, request.c_str(), request.size());
 
@@ -210,57 +222,97 @@
     fprintf(stderr, "No test suites found.\n");
     return EXIT_FAILURE;
   }
+
+  string program;
+  string testee;
+  std::vector<string> program_args;
+  bool performance = false;
+  bool debug = false;
+  absl::flat_hash_set<string> debug_test_names;
+  bool enforce_recommended = false;
+  Edition maximum_edition = EDITION_UNKNOWN;
+  std::string output_dir;
+  bool verbose = false;
+
+  for (int arg = 1; arg < argc; ++arg) {
+    if (strcmp(argv[arg], "--performance") == 0) {
+      performance = true;
+    } else if (strcmp(argv[arg], "--verbose") == 0) {
+      verbose = true;
+    } else if (strcmp(argv[arg], "--enforce_recommended") == 0) {
+      enforce_recommended = true;
+    } else if (strcmp(argv[arg], "--maximum_edition") == 0) {
+      if (++arg == argc) UsageError();
+      Edition edition = EDITION_UNKNOWN;
+      if (!Edition_Parse(absl::StrCat("EDITION_", argv[arg]), &edition)) {
+        fprintf(stderr, "Unknown edition: %s\n", argv[arg]);
+        UsageError();
+      }
+      maximum_edition = edition;
+    } else if (strcmp(argv[arg], "--output_dir") == 0) {
+      if (++arg == argc) UsageError();
+      output_dir = argv[arg];
+
+    } else if (strcmp(argv[arg], "--debug") == 0) {
+      if (++arg == argc) UsageError();
+      for (int debug_arg = arg; debug_arg < argc; ++debug_arg) {
+        // Stop when we either find another flag or we reach the last arg
+        // (program arg)
+        if (argv[debug_arg][0] == '-' || debug_arg == argc - 1) {
+          arg = debug_arg - 1;
+          break;
+        }
+        debug_test_names.insert(argv[debug_arg]);
+      }
+
+    } else if (argv[arg][0] == '-') {
+      bool recognized_flag = false;
+      for (ConformanceTestSuite *suite : suites) {
+        if (strcmp(argv[arg], suite->GetFailureListFlagName().c_str()) == 0) {
+          if (++arg == argc) UsageError();
+          recognized_flag = true;
+        }
+      }
+      if (!recognized_flag) {
+        fprintf(stderr, "Unknown option: %s\n", argv[arg]);
+        UsageError();
+      }
+    } else {
+      program += argv[arg++];
+      while (arg < argc) {
+        program_args.push_back(argv[arg]);
+        arg++;
+      }
+    }
+  }
+
+  if (!debug_test_names.empty()) {
+    debug = true;
+  }
+  auto last_slash = program.find_last_of('/');
+  if (last_slash != string::npos) {
+    testee = program.substr(last_slash + 1);
+  }
+
   bool all_ok = true;
   for (ConformanceTestSuite *suite : suites) {
-    string program;
-    std::vector<string> program_args;
     string failure_list_filename;
     conformance::FailureSet failure_list;
-
-    bool performance = false;
     for (int arg = 1; arg < argc; ++arg) {
       if (strcmp(argv[arg], suite->GetFailureListFlagName().c_str()) == 0) {
         if (++arg == argc) UsageError();
         failure_list_filename = argv[arg];
         ParseFailureList(argv[arg], &failure_list);
-      } else if (strcmp(argv[arg], "--performance") == 0) {
-        performance = true;
-        suite->SetPerformance(true);
-      } else if (strcmp(argv[arg], "--verbose") == 0) {
-        suite->SetVerbose(true);
-      } else if (strcmp(argv[arg], "--enforce_recommended") == 0) {
-        suite->SetEnforceRecommended(true);
-      } else if (strcmp(argv[arg], "--maximum_edition") == 0) {
-        if (++arg == argc) UsageError();
-        Edition edition = EDITION_UNKNOWN;
-        if (!Edition_Parse(absl::StrCat("EDITION_", argv[arg]), &edition)) {
-          fprintf(stderr, "Unknown edition: %s\n", argv[arg]);
-          UsageError();
-        }
-        suite->SetMaximumEdition(edition);
-      } else if (strcmp(argv[arg], "--output_dir") == 0) {
-        if (++arg == argc) UsageError();
-        suite->SetOutputDir(argv[arg]);
-      } else if (argv[arg][0] == '-') {
-        bool recognized_flag = false;
-        for (ConformanceTestSuite *suite : suites) {
-          if (strcmp(argv[arg], suite->GetFailureListFlagName().c_str()) == 0) {
-            if (++arg == argc) UsageError();
-            recognized_flag = true;
-          }
-        }
-        if (!recognized_flag) {
-          fprintf(stderr, "Unknown option: %s\n", argv[arg]);
-          UsageError();
-        }
-      } else {
-        program += argv[arg++];
-        while (arg < argc) {
-          program_args.push_back(argv[arg]);
-          arg++;
-        }
       }
     }
+    suite->SetPerformance(performance);
+    suite->SetVerbose(verbose);
+    suite->SetEnforceRecommended(enforce_recommended);
+    suite->SetMaximumEdition(maximum_edition);
+    suite->SetOutputDir(output_dir);
+    suite->SetDebug(debug);
+    suite->SetDebugTestNames(debug_test_names);
+    suite->SetTestee(testee);
 
     ForkPipeRunner runner(program, program_args, performance);
 
@@ -270,6 +322,16 @@
 
     fwrite(output.c_str(), 1, output.size(), stderr);
   }
+
+  if (!debug_test_names.empty()) {
+    fprintf(stderr,
+            "These tests were requested to be debugged, but they do "
+            "not exist. Revise the test names:\n\n");
+    for (const string &test_name : debug_test_names) {
+      fprintf(stderr, "  %s\n", test_name.c_str());
+    }
+    fprintf(stderr, "\n\n");
+  }
   return all_ok ? EXIT_SUCCESS : EXIT_FAILURE;
 }
 
diff --git a/conformance/text_format_conformance_suite.cc b/conformance/text_format_conformance_suite.cc
index 2f6a609..508206f 100644
--- a/conformance/text_format_conformance_suite.cc
+++ b/conformance/text_format_conformance_suite.cc
@@ -175,7 +175,10 @@
       setting.ConformanceLevelToString(level), ".",
       setting.GetSyntaxIdentifier(), ".TextFormatInput.", test_name);
 
-  suite_.RunTest(effective_test_name, request, &response);
+  if (!suite_.RunTest(effective_test_name, request, &response)) {
+    return;
+  }
+
   TestStatus test;
   test.set_name(effective_test_name);
   if (response.result_case() == ConformanceResponse::kParseError) {