void Shell_script_tester::execute_script()

in unittest/shell_script_tester.cc [1335:1620]


void Shell_script_tester::execute_script(const std::string &path,
                                         bool in_chunks, bool is_pre_script) {
  // If no path is provided then executes the setup script
  std::string script(path.empty()    ? _setup_script
                     : is_pre_script ? PRE_SCRIPT(path)
                     : _new_format   ? NEW_TEST_SCRIPT(path)
                                     : TEST_SCRIPT(path));
  std::ifstream stream(script.c_str());

  if (!stream.fail()) {
    TestResultCatcher catcher(&output_handler.full_output);

    // Capture GTest output if we're not tracing, so that it can be dumped
    // together with the test trace at the end.
    if (g_test_trace_scripts == 0)
      testing::UnitTest::GetInstance()->listeners().Append(&catcher);

    shcore::on_leave_scope cleaner([&catcher]() {
      if (g_test_trace_scripts == 0)
        testing::UnitTest::GetInstance()->listeners().Release(&catcher);
    });

    // When it is a test script, preprocesses it so the
    // right execution scenario is in place
    if (!path.empty()) {
      if (!is_pre_script) {
        // Processes independent preprocessing file
        std::string pre_script = PRE_SCRIPT(path);
        std::ifstream pre_stream(pre_script.c_str());
        if (!pre_stream.fail()) {
          pre_stream.close();
          _custom_context = "Preprocessing";
          execute_script(path, false, true);
        }
      }

      // Preprocesses the test file itself
      _custom_context = "Setup";
      process_setup(stream);
    }

    // Process the file
    if (in_chunks) {
      _options->interactive = true;
      if (load_source_chunks(script, stream)) {
        if (!_chunks.empty()) {
          // Loads the validations
          load_validations(_new_format ? VAL_SCRIPT(path)
                                       : VALIDATION_SCRIPT(path));
        } else {
          ADD_SKIPPED_TEST("All test chunks were skipped.");
        }
      }

      // Abort the script processing if something went wrong on the validation
      // loading
      if (testutil->test_skipped() && ::testing::Test::HasFailure()) return;

      std::ofstream ofile;
      if (g_generate_validation_file) {
        std::string vfile_name = VALIDATION_SCRIPT(path);
        if (!shcore::is_file(vfile_name)) {
          ofile.open(vfile_name, std::ofstream::out | std::ofstream::trunc);
        } else {
          vfile_name.append(".new");
          ofile.open(vfile_name, std::ofstream::out | std::ofstream::trunc);
        }
      }

      bool skip_until_cleanup = false;
      for (size_t index = 0; index < _chunk_order.size(); index++) {
        // Prints debugging information
        _cout.str("");
        _cout.clear();
        if (str_beginswith(_chunk_order[index], "INCLUDE ")) {
          std::string chunk_log = _chunk_order[index];
          std::string splitter(chunk_log.length(), '=');
          output_handler.debug_print(makelblue(splitter));
          output_handler.debug_print(makelblue(chunk_log));
          output_handler.debug_print(makelblue(splitter));
        } else {
          std::string chunk_log = "CHUNK: " + _chunk_order[index];
          std::string splitter(chunk_log.length(), '-');
          output_handler.debug_print(makeyellow(splitter));
          output_handler.debug_print(makeyellow(chunk_log));
          output_handler.debug_print(makeyellow(splitter));
        }

        // Gets the chunks for the next id
        auto &chunk = _chunks[_chunk_order[index]];

        bool enabled;
        try {
          enabled = context_enabled(chunk.def->context);

          if (enabled && skip_until_cleanup) {
            if (_chunk_order[index] == "Cleanup") {
              skip_until_cleanup = false;
            } else {
              output_handler.debug_print("Chunk skipped...");
              enabled = false;
            }
          }
        } catch (const std::exception &e) {
          ADD_FAILURE_AT(chunk.source.c_str(), chunk.code[0].first)
              << makered("ERROR EVALUATING CONTEXT: ") << e.what() << "\n"
              << "\tCHUNK: " << chunk.def->line << "\n";
          break;
        }

        // Executes the file line by line
        if (enabled) {
          _custom_context = "while executing chunk \"" + chunk.def->line +
                            "\" at " + chunk.source + ":" +
                            std::to_string(chunk.def->linenum);
          set_scripting_context();
          auto &code = chunk.code;
          std::string full_statement;
          for (size_t chunk_item = 0; chunk_item < code.size(); chunk_item++) {
            std::string line(code[chunk_item].second);

            full_statement.append(line);
            // Execution context is at line (statement actually) level
            _custom_context = chunk.source + "@[" + _chunk_order[index] + "][" +
                              std::to_string(chunk.code[chunk_item].first) +
                              ":" + full_statement + "]";

            // There's chance to do preprocessing
            pre_process_line(path, &line);

            if (testutil)
              testutil->set_test_execution_context(
                  chunk.source.c_str(), code[chunk_item].first, this);

            if (g_tdb->will_execute(chunk.source, chunk.code[chunk_item].first,
                                    line) ==
                mysqlsh::Test_debugger::Action::Skip_execute) {
              continue;
            }

            try {
              std::unique_ptr<Timeout> timeout;
              if (g_test_script_timeout > 0)
                timeout = std::make_unique<Timeout>(this);
              execute(chunk.code[chunk_item].first, line);
              if (timeout && timeout->did_timeout()) {
                ADD_FAILURE() << "line took too long: " << line
                              << "\nSkipping the rest of the script...";
                skip_until_cleanup = true;
              }
            } catch (...) {
              g_tdb->did_throw(chunk.code[chunk_item].first, line);
              throw;
            }
            if (testutil->test_skipped()) return;

            if (_interactive_shell->input_state() == shcore::Input_state::Ok)
              full_statement.clear();
            else
              full_statement.append("\n");

            g_tdb->did_execute(chunk.code[chunk_item].first, line);

            if (::testing::Test::HasFailure()) {
              if (g_tdb->did_execute_test_failure() ==
                  mysqlsh::Test_debugger::Action::Abort)
                FAIL();
            }
          }

          execute("");

          if (g_generate_validation_file) {
            // Only saves the data if the chunk is not a reference
            if (chunk.def->id == chunk.def->validation_id) {
              if (_options->trace_protocol) {
                std::string protocol_text = _cout.str();
                if (!protocol_text.empty()) {
                  ofile << get_chunk_token() << "<PROTOCOL> "
                        << _chunk_order[index] << std::endl;
                  ofile << protocol_text << std::endl;
                }
              }

              if (!output_handler.std_out.empty()) {
                ofile << get_chunk_token() << "<OUT> " << _chunk_order[index]
                      << std::endl;
                ofile << output_handler.std_out << std::endl;
              }

              if (!output_handler.std_err.empty()) {
                ofile << get_chunk_token() << "<ERR> " << _chunk_order[index]
                      << std::endl;
                ofile << output_handler.std_err << std::endl;
              }
            }
            output_handler.wipe_all();
            _cout.str("");
            _cout.clear();
          } else {
            // Validation contexts is at chunk level
            _custom_context =
                path + "@[" + _chunk_order[index] + " validation]";
            if (!validate(path, _chunk_order[index],
                          chunk.is_validation_optional())) {
              if (g_tdb->on_validate_fail(_chunk_order[index]) ==
                  mysqlsh::Test_debugger::Action::Abort) {
                FAIL();
              }
            } else {
              output_handler.wipe_debug_log();
            }
          }
        } else {
          _skipped_chunks.insert(chunk.def->id);
          if (output_handler.internal_std_err.empty()) {
            SKIP_CHUNK(chunk.def->line);
          } else {
            SKIP_CHUNK(chunk.def->line + ": " +
                       output_handler.internal_std_err);
          }
        }
      }

      if (g_generate_validation_file) {
        ofile.close();
      }
    } else {  // !in_chunks
      _options->interactive = false;

      // Loads the validations, the validation is to exclude
      // - Loading validations for a pre_script
      // - Loading validations for a setup script (path empty)
      if (!is_pre_script && !path.empty())
        load_validations(_new_format ? VAL_SCRIPT(path)
                                     : VALIDATION_SCRIPT(path));

      // Abort the script processing if something went wrong on the validation
      // loading
      if (testutil->test_skipped() && ::testing::Test::HasFailure()) return;

      // Processes the script
      _interactive_shell->process_stream(stream, script, {}, true);

      // When path is empty it is processing a setup script
      // If an error is found it will be printed here
      if (path.empty() || is_pre_script) {
        if (!output_handler.std_err.empty()) {
          SCOPED_TRACE(output_handler.std_err);
          std::string text("Setup Script: " + _setup_script);
          SCOPED_TRACE(text.c_str());
          ADD_FAILURE();
        }

        output_handler.wipe_all();
        _cout.str("");
        _cout.clear();
      } else {
        // If processing a tets script, performs the validations over it
        _options->interactive = true;
        if (!validate(script)) {
          if (g_test_fail_early) {
            // Failure logs are printed on the fly in debug mode
            FAIL();
          }
        } else {
          output_handler.wipe_debug_log();
        }
      }
    }

    if (::testing::Test::HasFailure() && g_test_trace_scripts == 0) {
      std::cerr << makeredbg("----------vvvv Failure Log Begin vvvv----------")
                << std::endl;
      output_handler.flush_debug_log();
      std::cerr << makeredbg("----------^^^^ Failure Log End ^^^^------------")
                << std::endl;
    }

    stream.close();
  } else {
    std::string text("Unable to open test script: " + script);
    SCOPED_TRACE(text.c_str());
    ADD_FAILURE();
  }
}