run-tests.sh

Same filename and directory in other branches
  1. 10 core/scripts/run-tests.sh
  2. 11.x core/scripts/run-tests.sh
  3. 9 core/scripts/run-tests.sh
  4. 8.9.x core/scripts/run-tests.sh
  5. 7.x scripts/run-tests.sh

Script for running tests on DrupalCI.

This script is intended for use only by drupal.org's testing. In general, tests should be run directly with phpunit.

@internal

File

core/scripts/run-tests.sh

View source
  1. <?php
  2. /**
  3. * @file
  4. * Script for running tests on DrupalCI.
  5. *
  6. * This script is intended for use only by drupal.org's testing. In general,
  7. * tests should be run directly with phpunit.
  8. *
  9. * @internal
  10. */
  11. use Composer\Autoload\ClassLoader;
  12. use Drupal\BuildTests\Framework\BuildTestBase;
  13. use Drupal\Component\FileSystem\FileSystem;
  14. use Drupal\Component\Utility\Environment;
  15. use Drupal\Component\Utility\Html;
  16. use Drupal\Component\Utility\Timer;
  17. use Drupal\Core\Composer\Composer;
  18. use Drupal\Core\Database\Database;
  19. use Drupal\Core\Test\EnvironmentCleaner;
  20. use Drupal\Core\Test\PhpUnitTestDiscovery;
  21. use Drupal\Core\Test\PhpUnitTestRunner;
  22. use Drupal\Core\Test\SimpletestTestRunResultsStorage;
  23. use Drupal\Core\Test\TestDatabase;
  24. use Drupal\Core\Test\TestRun;
  25. use Drupal\Core\Test\TestRunnerKernel;
  26. use Drupal\Core\Test\TestRunResultsStorageInterface;
  27. use Drupal\FunctionalJavascriptTests\WebDriverTestBase;
  28. use Drupal\KernelTests\KernelTestBase;
  29. use Drupal\Tests\BrowserTestBase;
  30. use Drupal\TestTools\TestRunner\Configuration as Config;
  31. use PHPUnit\Framework\TestCase;
  32. use PHPUnit\Runner\Version;
  33. use Symfony\Component\Console\Helper\DescriptorHelper;
  34. use Symfony\Component\Console\Input\InputDefinition;
  35. use Symfony\Component\Console\Output\ConsoleOutput;
  36. use Symfony\Component\HttpFoundation\Request;
  37. use Symfony\Component\Process\PhpExecutableFinder;
  38. // cspell:ignore exitcode testbots wwwrun
  39. // Define some colors for display.
  40. // A nice calming green.
  41. const SIMPLETEST_SCRIPT_COLOR_PASS = 32;
  42. // An alerting Red.
  43. const SIMPLETEST_SCRIPT_COLOR_FAIL = 31;
  44. // An annoying brown.
  45. const SIMPLETEST_SCRIPT_COLOR_EXCEPTION = 33;
  46. // An appeasing yellow.
  47. const SIMPLETEST_SCRIPT_COLOR_YELLOW = 33;
  48. // A refreshing cyan.
  49. const SIMPLETEST_SCRIPT_COLOR_CYAN = 36;
  50. // A fainting gray.
  51. const SIMPLETEST_SCRIPT_COLOR_GRAY = 90;
  52. // A notable white.
  53. const SIMPLETEST_SCRIPT_COLOR_BRIGHT_WHITE = "1;97";
  54. // Restricting the chunk of queries prevents memory exhaustion.
  55. const SIMPLETEST_SCRIPT_SQLITE_VARIABLE_LIMIT = 350;
  56. const SIMPLETEST_SCRIPT_EXIT_SUCCESS = 0;
  57. const SIMPLETEST_SCRIPT_EXIT_FAILURE = 1;
  58. const SIMPLETEST_SCRIPT_EXIT_ERROR = 2;
  59. const SIMPLETEST_SCRIPT_EXIT_EXCEPTION = 3;
  60. // Setup class autoloading.
  61. $autoloader = require_once __DIR__ . '/../../autoload.php';
  62. $autoloader->addPsr4('Drupal\\TestTools\\', __DIR__ . '/../tests/Drupal/TestTools');
  63. // Setup console output.
  64. $console_output = new ConsoleOutput();
  65. // Get the configuration from the command line.
  66. $script_basename = basename($_SERVER['argv'][0]);
  67. try {
  68. Config::createFromCommandLine($_SERVER['argv']);
  69. }
  70. catch (\RuntimeException $e) {
  71. simpletest_script_print_error($e->getMessage() . ' ' . "Use the --help option for the list and usage of the options available.\n");
  72. simpletest_script_print(Config::commandLineDefinition()->getSynopsis(), SIMPLETEST_SCRIPT_COLOR_PASS);
  73. exit(SIMPLETEST_SCRIPT_EXIT_FAILURE);
  74. }
  75. // If --help requested, show it and exit.
  76. if (Config::get('help')) {
  77. simpletest_script_help(Config::commandLineDefinition(), $script_basename, $console_output);
  78. exit(SIMPLETEST_SCRIPT_EXIT_SUCCESS);
  79. }
  80. // Initialize script variables and bootstrap Drupal kernel.
  81. simpletest_script_init($autoloader);
  82. if (!class_exists(TestCase::class)) {
  83. echo "\nrun-tests.sh requires the PHPUnit testing framework. Use 'composer install' to ensure that it is present.\n\n";
  84. exit(SIMPLETEST_SCRIPT_EXIT_FAILURE);
  85. }
  86. // Defaults the PHPUnit configuration file path.
  87. if (empty(Config::get('phpunit-configuration'))) {
  88. Config::set('phpunit-configuration', \Drupal::root() . \DIRECTORY_SEPARATOR . 'core');
  89. }
  90. if (!Composer::upgradePHPUnitCheck(Version::id())) {
  91. simpletest_script_print_error("PHPUnit testing framework version 11 or greater is required when running on PHP 8.4 or greater. Run the command 'composer run-script drupal-phpunit-upgrade' in order to fix this.");
  92. exit(SIMPLETEST_SCRIPT_EXIT_FAILURE);
  93. }
  94. if (Config::get('list')) {
  95. // Display all available tests organized by one #[Group()] attribute.
  96. echo "\nAvailable test groups & classes\n";
  97. echo "-------------------------------\n\n";
  98. $test_discovery = PhpUnitTestDiscovery::instance()->setConfigurationFilePath(Config::get('phpunit-configuration'));
  99. try {
  100. $groups = $test_discovery->getTestClasses(Config::get('module'));
  101. dump_discovery_warnings();
  102. }
  103. catch (Exception $e) {
  104. error_log((string) $e);
  105. echo (string) $e;
  106. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  107. }
  108. // A given class can appear in multiple groups. For historical reasons, we
  109. // need to present each test only once. The test is shown in the group that is
  110. // printed first.
  111. $printed_tests = [];
  112. foreach ($groups as $group => $tests) {
  113. echo $group . "\n";
  114. $tests = array_diff(array_keys($tests), $printed_tests);
  115. foreach ($tests as $test) {
  116. echo " - $test\n";
  117. }
  118. $printed_tests = array_merge($printed_tests, $tests);
  119. }
  120. exit(SIMPLETEST_SCRIPT_EXIT_SUCCESS);
  121. }
  122. // List-files and list-files-json provide a way for external tools such as the
  123. // testbot to prioritize running changed tests.
  124. // @see https://www.drupal.org/node/2569585
  125. if (Config::get('list-files') || Config::get('list-files-json')) {
  126. // List all files which could be run as tests.
  127. $test_discovery = PhpUnitTestDiscovery::instance()->setConfigurationFilePath(Config::get('phpunit-configuration'));
  128. // PhpUnitTestDiscovery::findAllClassFiles() gives us a classmap similar to a
  129. // Composer 'classmap' array.
  130. $test_classes = $test_discovery->findAllClassFiles();
  131. // JSON output is the easiest.
  132. if (Config::get('list-files-json')) {
  133. echo json_encode($test_classes);
  134. exit(SIMPLETEST_SCRIPT_EXIT_SUCCESS);
  135. }
  136. // Output the list of files.
  137. else {
  138. foreach (array_values($test_classes) as $test_class) {
  139. echo $test_class . "\n";
  140. }
  141. }
  142. exit(SIMPLETEST_SCRIPT_EXIT_SUCCESS);
  143. }
  144. simpletest_script_setup_database();
  145. // Setup the test run results storage environment. Currently, this coincides
  146. // with the simpletest database schema.
  147. $test_run_results_storage = simpletest_script_setup_test_run_results_storage();
  148. if (Config::get('clean')) {
  149. // Clean up left-over tables and directories.
  150. $cleaner = new EnvironmentCleaner(
  151. DRUPAL_ROOT,
  152. Database::getConnection(),
  153. $test_run_results_storage,
  154. $console_output,
  155. \Drupal::service('file_system')
  156. );
  157. try {
  158. $cleaner->cleanEnvironment();
  159. }
  160. catch (Exception $e) {
  161. echo (string) $e;
  162. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  163. }
  164. echo "\nEnvironment cleaned.\n";
  165. // Get the status messages and print them.
  166. $messages = \Drupal::messenger()->messagesByType('status');
  167. foreach ($messages as $text) {
  168. echo " - " . $text . "\n";
  169. }
  170. exit(SIMPLETEST_SCRIPT_EXIT_SUCCESS);
  171. }
  172. echo "\n";
  173. echo "Drupal test run\n\n";
  174. echo "--------------------------------------------------------------\n";
  175. echo sprintf("Drupal Version.......: %s\n", \Drupal::VERSION);
  176. echo sprintf("PHP Version..........: %s\n", \PHP_VERSION);
  177. echo sprintf("PHP Binary...........: %s\n", (new PhpExecutableFinder())->find());
  178. echo sprintf("PHPUnit Version......: %s\n", Version::id());
  179. echo sprintf("PHPUnit configuration: %s\n", Config::get('phpunit-configuration'));
  180. if (Config::get('dburl')) {
  181. $sut_connection_info = Database::getConnectionInfo();
  182. $sut_tasks_class = $sut_connection_info['default']['namespace'] . "\\Install\\Tasks";
  183. $sut_installer = new $sut_tasks_class();
  184. $sut_connection = Database::getConnection();
  185. echo sprintf("Database.............: %s\n", (string) $sut_installer->name());
  186. echo sprintf("Database Version.....: %s\n", $sut_connection->version());
  187. }
  188. echo sprintf("Working directory....: %s\n", getcwd());
  189. echo "--------------------------------------------------------------\n";
  190. echo "\n";
  191. $test_list = simpletest_script_get_test_list();
  192. // Try to allocate unlimited time to run the tests.
  193. Environment::setTimeLimit(0);
  194. simpletest_script_reporter_init();
  195. $tests_to_run = [];
  196. for ($i = 0; $i < Config::get('repeat'); $i++) {
  197. $tests_to_run = array_merge($tests_to_run, $test_list);
  198. }
  199. // Execute tests.
  200. $status = simpletest_script_execute_batch($test_run_results_storage, $tests_to_run);
  201. // Stop the timer.
  202. simpletest_script_reporter_timer_stop();
  203. // Ensure all test locks are released once finished. If tests are run with a
  204. // concurrency of 1 the each test will clean up its own lock. Test locks are
  205. // not released if using a higher concurrency to ensure each test has unique
  206. // fixtures.
  207. TestDatabase::releaseAllTestLocks();
  208. // Display results before database is cleared.
  209. simpletest_script_reporter_display_results($test_run_results_storage);
  210. if (Config::get('xml')) {
  211. simpletest_script_reporter_write_xml_results($test_run_results_storage);
  212. }
  213. // Clean up all test results.
  214. if (!Config::get('keep-results')) {
  215. try {
  216. $cleaner = new EnvironmentCleaner(
  217. DRUPAL_ROOT,
  218. Database::getConnection(),
  219. $test_run_results_storage,
  220. $console_output,
  221. \Drupal::service('file_system')
  222. );
  223. $cleaner->cleanResults();
  224. }
  225. catch (Exception $e) {
  226. echo (string) $e;
  227. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  228. }
  229. }
  230. // Test complete, exit.
  231. exit($status);
  232. /**
  233. * Print help text.
  234. */
  235. function simpletest_script_help(InputDefinition $input_definition, string $script_basename, ConsoleOutput $console_output): void {
  236. echo <<
  237. Run Drupal tests from the shell.
  238. Usage: {$script_basename} [OPTIONS]
  239. Example: {$script_basename} Profile
  240. EOF;
  241. $helper = new DescriptorHelper();
  242. $helper->describe($console_output, $input_definition);
  243. echo <<
  244. To run this script you will normally invoke it from the root directory of your
  245. Drupal installation as the webserver user (differs per configuration), or root:
  246. sudo -u [wwwrun|www-data|etc] php ./core/scripts/{$script_basename} --url http://example.com/ --all
  247. sudo -u [wwwrun|www-data|etc] php ./core/scripts/{$script_basename} --url http://example.com/ --class Drupal\\\\Tests\\\\block\\\\Functional\\\\BlockTest
  248. Without a preinstalled Drupal site, specify a SQLite database pathname to create
  249. (for the test runner) and the default database connection info (for Drupal) to
  250. use in tests:
  251. sudo -u [wwwrun|www-data|etc] php ./core/scripts/{$script_basename}
  252. --sqlite /tmpfs/drupal/test.sqlite
  253. --dburl mysql://username:password@localhost/database
  254. --url http://example.com/ --all
  255. EOF;
  256. }
  257. /**
  258. * Initialize script variables and perform general setup requirements.
  259. */
  260. function simpletest_script_init(ClassLoader $autoloader): void {
  261. // Get URL from arguments.
  262. $parsed_url = parse_url(Config::get('url'));
  263. $host = $parsed_url['host'] . (isset($parsed_url['port']) ? ':' . $parsed_url['port'] : '');
  264. $path = isset($parsed_url['path']) ? rtrim(rtrim($parsed_url['path']), '/') : '';
  265. $port = $parsed_url['port'] ?? '80';
  266. // If the passed URL schema is 'https' then setup the $_SERVER variables
  267. // properly so that testing will run under HTTPS.
  268. if ($parsed_url['scheme'] == 'https') {
  269. $_SERVER['HTTPS'] = 'on';
  270. }
  271. $base_url = isset($_SERVER['HTTPS']) && $_SERVER['HTTPS'] === 'on' ? 'https://' : 'http://';
  272. $base_url .= $host;
  273. if ($path !== '') {
  274. $base_url .= $path;
  275. }
  276. putenv('SIMPLETEST_BASE_URL=' . $base_url);
  277. $_SERVER['HTTP_HOST'] = $host;
  278. $_SERVER['REMOTE_ADDR'] = '127.0.0.1';
  279. $_SERVER['SERVER_ADDR'] = '127.0.0.1';
  280. $_SERVER['SERVER_PORT'] = $port;
  281. $_SERVER['SERVER_SOFTWARE'] = NULL;
  282. $_SERVER['SERVER_NAME'] = 'localhost';
  283. $_SERVER['REQUEST_URI'] = $path . '/';
  284. $_SERVER['REQUEST_METHOD'] = 'GET';
  285. $_SERVER['SCRIPT_NAME'] = $path . '/index.php';
  286. $_SERVER['SCRIPT_FILENAME'] = $path . '/index.php';
  287. $_SERVER['PHP_SELF'] = $path . '/index.php';
  288. $_SERVER['HTTP_USER_AGENT'] = 'Drupal command line';
  289. if (Config::get('concurrency') > 1) {
  290. $directory = FileSystem::getOsTemporaryDirectory();
  291. $test_symlink = @symlink(__FILE__, $directory . '/test_symlink');
  292. if (!$test_symlink) {
  293. throw new \RuntimeException('In order to use a concurrency higher than 1 the test system needs to be able to create symlinks in ' . $directory);
  294. }
  295. unlink($directory . '/test_symlink');
  296. putenv('RUN_TESTS_CONCURRENCY=' . Config::get('concurrency'));
  297. }
  298. if (!empty($_SERVER['HTTPS']) && $_SERVER['HTTPS'] == 'on') {
  299. // Ensure that any and all environment variables are changed to https://.
  300. foreach ($_SERVER as $key => $value) {
  301. // Some values are NULL. Non-NULL values which are falsy will not contain
  302. // text to replace.
  303. if ($value) {
  304. $_SERVER[$key] = str_replace('http://', 'https://', $value);
  305. }
  306. }
  307. }
  308. chdir(realpath(__DIR__ . '/../..'));
  309. // Prepare the kernel.
  310. try {
  311. $request = Request::createFromGlobals();
  312. $kernel = TestRunnerKernel::createFromRequest($request, $autoloader);
  313. $kernel->boot();
  314. $kernel->preHandle($request);
  315. }
  316. catch (Exception $e) {
  317. echo (string) $e;
  318. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  319. }
  320. }
  321. /**
  322. * Sets up database connection info for running tests.
  323. *
  324. * If this script is executed from within a real Drupal installation, then this
  325. * function essentially performs nothing (unless the --sqlite or --dburl
  326. * parameters were passed).
  327. *
  328. * Otherwise, there are three database connections of concern:
  329. * - --sqlite: The test runner connection, providing access to database tables
  330. * for recording test IDs and assertion results.
  331. * - --dburl: A database connection that is used as base connection info for all
  332. * tests; i.e., every test will spawn from this connection. In case this
  333. * connection uses e.g. SQLite, then all tests will run against SQLite. This
  334. * is exposed as $databases['default']['default'] to Drupal.
  335. * - The actual database connection used within a test. This is the same as
  336. * --dburl, but uses an additional database table prefix. This is
  337. * $databases['default']['default'] within a test environment. The original
  338. * connection is retained in
  339. * $databases['simpletest_original_default']['default'] and restored after
  340. * each test.
  341. */
  342. function simpletest_script_setup_database(): void {
  343. // If there is an existing Drupal installation that contains a database
  344. // connection info in settings.php, then $databases['default']['default'] will
  345. // hold the default database connection already. This connection is assumed to
  346. // be valid, and this connection will be used in tests, so that they run
  347. // against e.g. MySQL instead of SQLite.
  348. // However, in case no Drupal installation exists, this default database
  349. // connection can be set and/or overridden with the --dburl parameter.
  350. if (Config::get('dburl')) {
  351. // Remove a possibly existing default connection (from settings.php).
  352. Database::removeConnection('default');
  353. try {
  354. $databases['default']['default'] = Database::convertDbUrlToConnectionInfo(Config::get('dburl'), TRUE);
  355. }
  356. catch (\InvalidArgumentException $e) {
  357. simpletest_script_print_error('Invalid --dburl. Reason: ' . $e->getMessage());
  358. exit(SIMPLETEST_SCRIPT_EXIT_FAILURE);
  359. }
  360. }
  361. // Otherwise, use the default database connection from settings.php.
  362. else {
  363. $databases['default'] = Database::getConnectionInfo('default');
  364. }
  365. if (isset($databases['default']['default'])) {
  366. Database::addConnectionInfo('default', 'default', $databases['default']['default']);
  367. }
  368. }
  369. /**
  370. * Sets up the test runs results storage.
  371. */
  372. function simpletest_script_setup_test_run_results_storage() {
  373. $databases['default'] = Database::getConnectionInfo('default');
  374. // If no --sqlite parameter has been passed, then the test runner database
  375. // connection is the default database connection.
  376. $sqlite = Config::get('sqlite');
  377. if (!$sqlite) {
  378. $sqlite = FALSE;
  379. $databases['test-runner']['default'] = $databases['default']['default'];
  380. }
  381. // Otherwise, set up a SQLite connection for the test runner.
  382. else {
  383. if ($sqlite === ':memory:') {
  384. $sqlite = ':memory:';
  385. }
  386. elseif (is_string($sqlite) && !str_starts_with($sqlite, '/')) {
  387. $sqlite = DRUPAL_ROOT . '/' . $sqlite;
  388. }
  389. $databases['test-runner']['default'] = [
  390. 'driver' => 'sqlite',
  391. 'database' => $sqlite,
  392. 'prefix' => '',
  393. ];
  394. // Create the test runner SQLite database, unless it exists already.
  395. if ($sqlite !== ':memory:' && !file_exists($sqlite)) {
  396. if (!is_dir(dirname($sqlite))) {
  397. mkdir(dirname($sqlite));
  398. }
  399. touch($sqlite);
  400. }
  401. }
  402. // Add the test runner database connection.
  403. Database::addConnectionInfo('test-runner', 'default', $databases['test-runner']['default']);
  404. // Create the test result schema.
  405. try {
  406. $test_run_results_storage = new SimpletestTestRunResultsStorage(Database::getConnection('default', 'test-runner'));
  407. }
  408. catch (\PDOException $e) {
  409. simpletest_script_print_error($databases['test-runner']['default']['driver'] . ': ' . $e->getMessage());
  410. exit(SIMPLETEST_SCRIPT_EXIT_FAILURE);
  411. }
  412. if ($sqlite) {
  413. try {
  414. $test_run_results_storage->buildTestingResultsEnvironment(Config::get('keep-results-table'));
  415. }
  416. catch (Exception $e) {
  417. echo (string) $e;
  418. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  419. }
  420. }
  421. // Verify that the test result database schema exists by checking one table.
  422. try {
  423. if (!$test_run_results_storage->validateTestingResultsEnvironment()) {
  424. simpletest_script_print_error('Missing test result database schema. Use the --sqlite parameter.');
  425. exit(SIMPLETEST_SCRIPT_EXIT_FAILURE);
  426. }
  427. }
  428. catch (Exception $e) {
  429. echo (string) $e;
  430. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  431. }
  432. return $test_run_results_storage;
  433. }
  434. /**
  435. * Execute a batch of tests.
  436. */
  437. function simpletest_script_execute_batch(TestRunResultsStorageInterface $test_run_results_storage, $test_classes) {
  438. global $test_ids, $total_time;
  439. $total_status = SIMPLETEST_SCRIPT_EXIT_SUCCESS;
  440. $total_time = 0;
  441. $process_runner = PhpUnitTestRunner::create(\Drupal::getContainer())
  442. ->setConfigurationFilePath(Config::get('phpunit-configuration'));
  443. // Multi-process execution.
  444. $children = [];
  445. while (!empty($test_classes) || !empty($children)) {
  446. while (count($children) < Config::get('concurrency')) {
  447. if (empty($test_classes)) {
  448. break;
  449. }
  450. try {
  451. $test_run = TestRun::createNew($test_run_results_storage);
  452. }
  453. catch (Exception $e) {
  454. echo (string) $e;
  455. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  456. }
  457. $test_ids[] = $test_run->id();
  458. $test_class = array_shift($test_classes);
  459. // Fork a child process.
  460. try {
  461. $process = $process_runner->startPhpUnitOnSingleTestClass(
  462. $test_run,
  463. $test_class,
  464. Config::get('color'),
  465. Config::get('suppress-deprecations'),
  466. );
  467. }
  468. catch (\Throwable $e) {
  469. // PHPUnit catches exceptions already, so this is only reached when an
  470. // exception is thrown in the wrapped test runner environment.
  471. echo (string) $e;
  472. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  473. }
  474. // Register our new child.
  475. $children[] = [
  476. 'process' => $process,
  477. 'test_run' => $test_run,
  478. 'class' => $test_class,
  479. ];
  480. }
  481. // Wait for children every 2ms.
  482. usleep(2000);
  483. // Check if some children finished.
  484. foreach ($children as $cid => $child) {
  485. if ($child['process']->isTerminated()) {
  486. // The child exited.
  487. $child['test_run']->end(microtime(TRUE));
  488. $total_time += $child['test_run']->duration();
  489. $process_outcome = $process_runner->processPhpUnitOnSingleTestClassOutcome(
  490. $child['process'],
  491. $child['test_run'],
  492. $child['class'],
  493. );
  494. simpletest_script_reporter_display_summary(
  495. $child['class'],
  496. $process_outcome['summaries'][$child['class']],
  497. $child['test_run']->duration()
  498. );
  499. if ($process_outcome['error_output']) {
  500. echo 'ERROR: ' . implode("\n", $process_outcome['error_output']);
  501. }
  502. if (in_array($process_outcome['status'], [SIMPLETEST_SCRIPT_EXIT_FAILURE, SIMPLETEST_SCRIPT_EXIT_ERROR])) {
  503. $total_status = max($process_outcome['status'], $total_status);
  504. }
  505. elseif ($process_outcome['status']) {
  506. $message = 'FATAL ' . $child['class'] . ': test runner returned an unexpected error code (' . $process_outcome['status'] . ').';
  507. echo $message . "\n";
  508. $total_status = max(SIMPLETEST_SCRIPT_EXIT_EXCEPTION, $total_status);
  509. if (Config::get('die-on-fail')) {
  510. $test_db = new TestDatabase($child['test_run']->getDatabasePrefix());
  511. $test_directory = $test_db->getTestSitePath();
  512. echo 'Test database and files kept and test exited immediately on fail so should be reproducible if you change settings.php to use the database prefix ' . $child['test_run']->getDatabasePrefix() . ' and config directories in ' . $test_directory . "\n";
  513. Config::set('keep-results', TRUE);
  514. // Exit repeat loop immediately.
  515. Config::set('repeat', -1);
  516. }
  517. }
  518. // Remove this child.
  519. unset($children[$cid]);
  520. }
  521. }
  522. }
  523. return $total_status;
  524. }
  525. /**
  526. * Get list of tests based on arguments.
  527. *
  528. * If --all specified then return all available tests, otherwise reads list of
  529. * tests.
  530. *
  531. * @return array
  532. * List of tests.
  533. */
  534. function simpletest_script_get_test_list() {
  535. $test_discovery = PhpUnitTestDiscovery::instance()->setConfigurationFilePath(Config::get('phpunit-configuration'));
  536. $test_list = [];
  537. $slow_tests = [];
  538. if (Config::get('all') || Config::get('module') || Config::get('directory')) {
  539. try {
  540. $groups = $test_discovery->getTestClasses(Config::get('module'), Config::get('types'), Config::get('directory'));
  541. dump_discovery_warnings();
  542. }
  543. catch (Exception $e) {
  544. echo (string) $e;
  545. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  546. }
  547. // Ensure that tests marked explicitly as #[Group('#slow')] are run at the
  548. // beginning of each job.
  549. if (key($groups) === '#slow') {
  550. $slow_tests = array_shift($groups);
  551. }
  552. $not_slow_tests = [];
  553. foreach ($groups as $group => $tests) {
  554. $not_slow_tests = array_merge($not_slow_tests, $tests);
  555. }
  556. // Filter slow tests out of the not slow tests and ensure a unique list
  557. // since tests may appear in more than one group.
  558. $not_slow_tests = array_diff_key($not_slow_tests, $slow_tests);
  559. // If the tests are not being run in parallel, then ensure slow tests run
  560. // all together first.
  561. if ((int) Config::get('ci-parallel-node-total') <= 1 ) {
  562. sort_tests_by_type_and_methods($slow_tests);
  563. sort_tests_by_type_and_methods($not_slow_tests);
  564. $all_tests_list = array_merge($slow_tests, $not_slow_tests);
  565. assign_tests_sequence($all_tests_list);
  566. dump_tests_sequence($all_tests_list);
  567. $test_list = array_keys($all_tests_list);
  568. }
  569. else {
  570. // Sort all tests by the number of test cases on the test class.
  571. // This is used in combination with #[Group('#slow')] to start the
  572. // slowest tests first and distribute tests between test runners.
  573. sort_tests_by_public_method_count($slow_tests);
  574. sort_tests_by_public_method_count($not_slow_tests);
  575. $all_tests_list = array_merge($slow_tests, $not_slow_tests);
  576. assign_tests_sequence($all_tests_list);
  577. // Now set up a bin per test runner.
  578. $bin_count = (int) Config::get('ci-parallel-node-total');
  579. // Now loop over the slow tests and add them to a bin one by one, this
  580. // distributes the tests evenly across the bins.
  581. $binned_slow_tests = place_tests_into_bins($slow_tests, $bin_count);
  582. $slow_tests_for_job = $binned_slow_tests[Config::get('ci-parallel-node-index') - 1];
  583. // And the same for the rest of the tests.
  584. $binned_other_tests = place_tests_into_bins($not_slow_tests, $bin_count);
  585. $other_tests_for_job = $binned_other_tests[Config::get('ci-parallel-node-index') - 1];
  586. $test_list = array_merge($slow_tests_for_job, $other_tests_for_job);
  587. dump_bin_tests_sequence(Config::get('ci-parallel-node-index'), $all_tests_list, $test_list);
  588. $test_list = array_keys($test_list);
  589. }
  590. }
  591. else {
  592. if (Config::get('class')) {
  593. $test_list = [];
  594. foreach (Config::getTests() as $test_class) {
  595. [$class_name] = explode('::', $test_class, 2);
  596. if (class_exists($class_name)) {
  597. $test_list[] = $test_class;
  598. }
  599. else {
  600. try {
  601. $groups = $test_discovery->getTestClasses(NULL, Config::get('types'));
  602. dump_discovery_warnings();
  603. }
  604. catch (Exception $e) {
  605. echo (string) $e;
  606. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  607. }
  608. $all_classes = [];
  609. foreach ($groups as $group) {
  610. $all_classes = array_merge($all_classes, array_keys($group));
  611. }
  612. simpletest_script_print_error('Test class not found: ' . $class_name);
  613. simpletest_script_print_alternatives($class_name, $all_classes, 6);
  614. exit(SIMPLETEST_SCRIPT_EXIT_FAILURE);
  615. }
  616. }
  617. }
  618. elseif (Config::get('file')) {
  619. // Extract test case class names from specified files.
  620. foreach (Config::getTests() as $file) {
  621. if (!file_exists($file) || is_dir($file)) {
  622. simpletest_script_print_error('File not found: ' . $file);
  623. exit(SIMPLETEST_SCRIPT_EXIT_FAILURE);
  624. }
  625. $fileTests = current($test_discovery->getTestClasses(NULL, [], $file));
  626. $test_list = array_merge($test_list, $fileTests);
  627. }
  628. assign_tests_sequence($test_list);
  629. dump_tests_sequence($test_list);
  630. $test_list = array_keys($test_list);
  631. }
  632. else {
  633. try {
  634. $groups = $test_discovery->getTestClasses(NULL, Config::get('types'));
  635. dump_discovery_warnings();
  636. }
  637. catch (Exception $e) {
  638. echo (string) $e;
  639. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  640. }
  641. // Store all the groups so we can suggest alternatives if we need to.
  642. $all_groups = array_keys($groups);
  643. // Verify that the groups exist.
  644. if (!empty($unknown_groups = array_diff(Config::getTests(), $all_groups))) {
  645. $first_group = reset($unknown_groups);
  646. simpletest_script_print_error('Test group not found: ' . $first_group);
  647. simpletest_script_print_alternatives($first_group, $all_groups);
  648. exit(SIMPLETEST_SCRIPT_EXIT_FAILURE);
  649. }
  650. // Merge the tests from the groups together.
  651. foreach (Config::getTests() as $group_name) {
  652. $test_list = array_merge($test_list, $groups[$group_name]);
  653. }
  654. assign_tests_sequence($test_list);
  655. dump_tests_sequence($test_list);
  656. // Ensure our list of tests contains only one entry for each test.
  657. $test_list = array_keys($test_list);
  658. }
  659. }
  660. if (empty($test_list)) {
  661. simpletest_script_print_error('No valid tests were specified.');
  662. exit(SIMPLETEST_SCRIPT_EXIT_FAILURE);
  663. }
  664. return $test_list;
  665. }
  666. /**
  667. * Sort tests by test type and number of public methods.
  668. */
  669. function sort_tests_by_type_and_methods(array &$tests): void {
  670. uasort($tests, function ($a, $b) {
  671. if (get_test_type_weight($a['name']) === get_test_type_weight($b['name'])) {
  672. return $b['tests_count'] <=> $a['tests_count'];
  673. }
  674. return get_test_type_weight($b['name']) <=> get_test_type_weight($a['name']);
  675. });
  676. }
  677. /**
  678. * Sort tests by the number of public methods in the test class.
  679. *
  680. * Tests with several methods take longer to run than tests with a single
  681. * method all else being equal, so this allows tests runs to be sorted by
  682. * approximately the slowest to fastest tests. Tests that are exceptionally
  683. * slow can be added to the '#slow' group so they are placed first in each
  684. * test run regardless of the number of methods.
  685. *
  686. * @param string[] $tests
  687. * An array of test class names.
  688. */
  689. function sort_tests_by_public_method_count(array &$tests): void {
  690. // @phpstan-ignore argument.type
  691. uasort($tests, function (array $a, array $b) {
  692. return $b['tests_count'] <=> $a['tests_count'];
  693. });
  694. }
  695. /**
  696. * Weights a test class based on which test base class it extends.
  697. *
  698. * @param string $class
  699. * The test class name.
  700. */
  701. function get_test_type_weight(string $class): int {
  702. return match(TRUE) {
  703. is_subclass_of($class, WebDriverTestBase::class) => 3,
  704. is_subclass_of($class, BrowserTestBase::class) => 2,
  705. is_subclass_of($class, BuildTestBase::class) => 2,
  706. is_subclass_of($class, KernelTestBase::class) => 1,
  707. default => 0,
  708. };
  709. }
  710. /**
  711. * Assigns the test sequence.
  712. *
  713. * @param array $tests
  714. * The array of test class info.
  715. */
  716. function assign_tests_sequence(array &$tests): void {
  717. $i = 0;
  718. foreach ($tests as &$testInfo) {
  719. $testInfo['sequence'] = ++$i;
  720. }
  721. }
  722. /**
  723. * Dumps the list of tests in order of execution after sorting.
  724. *
  725. * @param array $tests
  726. * The array of test class info.
  727. */
  728. function dump_tests_sequence(array $tests): void {
  729. if (!Config::get('debug-discovery')) {
  730. return;
  731. }
  732. echo "Test execution sequence\n";
  733. echo "-----------------------\n\n";
  734. echo " Seq Slow? Group Cnt Class\n";
  735. echo "-----------------------------------------\n";
  736. foreach ($tests as $testInfo) {
  737. echo sprintf(
  738. "%4d %5s %15s %4d %s\n",
  739. $testInfo['sequence'],
  740. in_array('#slow', $testInfo['groups']) ? '#slow' : '',
  741. trim_with_ellipsis($testInfo['group'], 15, \STR_PAD_RIGHT),
  742. $testInfo['tests_count'],
  743. trim_with_ellipsis($testInfo['name'], 60, \STR_PAD_LEFT),
  744. );
  745. }
  746. echo "-----------------------------------------\n\n";
  747. }
  748. /**
  749. * Distribute tests into bins.
  750. *
  751. * The given array of tests is split into the available bins. The distribution
  752. * starts with the first test, placing the first test in the first bin, the
  753. * second test in the second bin and so on. This results each bin having a
  754. * similar number of test methods to run in total.
  755. *
  756. * @param string[] $tests
  757. * An array of test class names.
  758. * @param int $bin_count
  759. * The number of bins available.
  760. *
  761. * @return array
  762. * An associative array of bins and the test class names in each bin.
  763. */
  764. function place_tests_into_bins(array $tests, int $bin_count) {
  765. // Create a bin corresponding to each parallel test job.
  766. $bins = array_fill(0, $bin_count, []);
  767. // Go through each test and add them to one bin at a time.
  768. $i = 0;
  769. foreach ($tests as $key => $test) {
  770. $bins[($i++ % $bin_count)][$key] = $test;
  771. }
  772. return $bins;
  773. }
  774. /**
  775. * Dumps the list of tests in order of execution for a bin.
  776. *
  777. * @param int $bin
  778. * The bin.
  779. * @param array $allTests
  780. * The list of all test classes discovered.
  781. * @param array $tests
  782. * The list of test class to run for this bin.
  783. */
  784. function dump_bin_tests_sequence(int $bin, array $allTests, array $tests): void {
  785. if (!Config::get('debug-discovery')) {
  786. return;
  787. }
  788. echo "Test execution sequence. ";
  789. echo "Tests marked *** will be executed in this PARALLEL BIN #{$bin}.\n";
  790. echo "-------------------------------------------------------------------------------------\n\n";
  791. echo "Bin Seq Slow? Group Cnt Class\n";
  792. echo "--------------------------------------------\n";
  793. foreach ($allTests as $testInfo) {
  794. $inBin = isset($tests[$testInfo['name']]);
  795. $message = sprintf(
  796. "%s %4d %5s %15s %4d %s\n",
  797. $inBin ? "***" : " ",
  798. $testInfo['sequence'],
  799. in_array('#slow', $testInfo['groups']) ? '#slow' : '',
  800. trim_with_ellipsis($testInfo['group'], 15, \STR_PAD_RIGHT),
  801. $testInfo['tests_count'],
  802. trim_with_ellipsis($testInfo['name'], 60, \STR_PAD_LEFT),
  803. );
  804. simpletest_script_print($message, $inBin ? SIMPLETEST_SCRIPT_COLOR_BRIGHT_WHITE : SIMPLETEST_SCRIPT_COLOR_GRAY);
  805. }
  806. echo "-------------------------------------------------\n\n";
  807. }
  808. /**
  809. * Initialize the reporter.
  810. */
  811. function simpletest_script_reporter_init(): void {
  812. global $test_list, $results_map;
  813. $results_map = [
  814. 'pass' => 'Pass',
  815. 'fail' => 'Fail',
  816. 'error' => 'Error',
  817. 'skipped' => 'Skipped',
  818. 'cli_fail' => 'Failure',
  819. 'exception' => 'Exception',
  820. 'debug' => 'Log',
  821. ];
  822. // Tell the user about what tests are to be run.
  823. if (Config::get('all')) {
  824. echo "All tests will run.\n\n";
  825. }
  826. else {
  827. echo "Tests to be run:\n";
  828. foreach ($test_list as $class_name) {
  829. echo " - $class_name\n";
  830. }
  831. echo "\n";
  832. }
  833. echo "Test run started:\n";
  834. echo " " . date('l, F j, Y - H:i', $_SERVER['REQUEST_TIME']) . "\n";
  835. Timer::start('run-tests');
  836. echo "\n";
  837. echo "Test summary\n";
  838. echo "------------\n";
  839. echo "\n";
  840. }
  841. /**
  842. * Displays the assertion result summary for a single test class.
  843. *
  844. * @param string $class
  845. * The test class name that was run.
  846. * @param array $results
  847. * The assertion results using #pass, #fail, #exception, #debug array keys.
  848. * @param float|null $duration
  849. * The time taken for the test to complete.
  850. */
  851. function simpletest_script_reporter_display_summary($class, $results, $duration = NULL): void {
  852. // Output all test results vertically aligned.
  853. $summary = [str_pad($results['#pass'], 4, " ", STR_PAD_LEFT) . ' passed'];
  854. if ($results['#fail']) {
  855. $summary[] = $results['#fail'] . ' failed';
  856. }
  857. if ($results['#error']) {
  858. $summary[] = $results['#error'] . ' errored';
  859. }
  860. if ($results['#skipped']) {
  861. $summary[] = $results['#skipped'] . ' skipped';
  862. }
  863. if ($results['#exception']) {
  864. $summary[] = $results['#exception'] . ' exception(s)';
  865. }
  866. if ($results['#debug']) {
  867. $summary[] = $results['#debug'] . ' log(s)';
  868. }
  869. if ($results['#cli_fail']) {
  870. $summary[] = 'exit code ' . $results['#exit_code'];
  871. }
  872. // The key $results['#time'] holds the sum of the tests execution times,
  873. // without taking into account the process spawning time and the setup
  874. // times of the tests themselves. So for reporting to be consistent with
  875. // PHPUnit CLI reported execution time, we report here the overall time of
  876. // execution of the spawned process.
  877. $time = sprintf('%8.3fs', $duration);
  878. $output = vsprintf('%s %s %s', [$time, trim_with_ellipsis($class, 70, STR_PAD_LEFT), implode(', ', $summary)]);
  879. $status = ($results['#fail'] || $results['#cli_fail'] || $results['#exception'] || $results['#error'] ? 'fail' : 'pass');
  880. simpletest_script_print($output . "\n", simpletest_script_color_code($status));
  881. }
  882. /**
  883. * Display jUnit XML test results.
  884. */
  885. function simpletest_script_reporter_write_xml_results(TestRunResultsStorageInterface $test_run_results_storage): void {
  886. global $test_ids, $results_map;
  887. try {
  888. $results = simpletest_script_load_messages_by_test_id($test_run_results_storage, $test_ids);
  889. }
  890. catch (Exception $e) {
  891. echo (string) $e;
  892. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  893. }
  894. $test_class = '';
  895. $xml_files = [];
  896. foreach ($results as $result) {
  897. if (isset($results_map[$result->status])) {
  898. if ($result->test_class != $test_class) {
  899. // We've moved onto a new class, so write the last classes results to a
  900. // file:
  901. if (isset($xml_files[$test_class])) {
  902. file_put_contents(Config::get('xml') . '/' . str_replace('\\', '_', $test_class) . '.xml', $xml_files[$test_class]['doc']->saveXML());
  903. unset($xml_files[$test_class]);
  904. }
  905. $test_class = $result->test_class;
  906. if (!isset($xml_files[$test_class])) {
  907. $doc = new DOMDocument('1.0', 'utf-8');
  908. $root = $doc->createElement('testsuite');
  909. $root = $doc->appendChild($root);
  910. $xml_files[$test_class] = ['doc' => $doc, 'suite' => $root];
  911. }
  912. }
  913. // For convenience:
  914. $dom_document = &$xml_files[$test_class]['doc'];
  915. // Create the XML element for this test case:
  916. $case = $dom_document->createElement('testcase');
  917. $case->setAttribute('classname', $test_class);
  918. if (str_contains($result->function, '->')) {
  919. [, $name] = explode('->', $result->function, 2);
  920. }
  921. else {
  922. $name = $result->function;
  923. }
  924. $case->setAttribute('name', $name);
  925. // Passes get no further attention, but failures and exceptions get to add
  926. // more detail:
  927. if ($result->status == 'fail') {
  928. $fail = $dom_document->createElement('failure');
  929. $fail->setAttribute('type', 'failure');
  930. $fail->setAttribute('message', $result->message_group);
  931. $text = $dom_document->createTextNode($result->message);
  932. $fail->appendChild($text);
  933. $case->appendChild($fail);
  934. }
  935. elseif ($result->status == 'exception') {
  936. // In the case of an exception the $result->function may not be a class
  937. // method so we record the full function name:
  938. $case->setAttribute('name', $result->function);
  939. $fail = $dom_document->createElement('error');
  940. $fail->setAttribute('type', 'exception');
  941. $fail->setAttribute('message', $result->message_group);
  942. $full_message = $result->message . "\n\nline: " . $result->line . "\nfile: " . $result->file;
  943. $text = $dom_document->createTextNode($full_message);
  944. $fail->appendChild($text);
  945. $case->appendChild($fail);
  946. }
  947. // Append the test case XML to the test suite:
  948. $xml_files[$test_class]['suite']->appendChild($case);
  949. }
  950. }
  951. // The last test case hasn't been saved to a file yet, so do that now:
  952. if (isset($xml_files[$test_class])) {
  953. file_put_contents(Config::get('xml') . '/' . str_replace('\\', '_', $test_class) . '.xml', $xml_files[$test_class]['doc']->saveXML());
  954. unset($xml_files[$test_class]);
  955. }
  956. }
  957. /**
  958. * Stop the test timer.
  959. */
  960. function simpletest_script_reporter_timer_stop(): void {
  961. global $total_time;
  962. echo "\n";
  963. $end = Timer::stop('run-tests');
  964. $wall_seconds = $end['time'] / 1000;
  965. $formatter = \Drupal::service('date.formatter');
  966. echo "Wall time: " . $formatter->formatInterval((int) $wall_seconds) . "\n";
  967. echo "Total time: " . $formatter->formatInterval((int) $total_time) . "\n";
  968. if ($wall_seconds > 0) {
  969. echo sprintf("Speedup: %.2fx (concurrency %d)\n", $total_time / $wall_seconds, Config::get('concurrency'));
  970. }
  971. echo "\n";
  972. }
  973. /**
  974. * Display test results.
  975. */
  976. function simpletest_script_reporter_display_results(TestRunResultsStorageInterface $test_run_results_storage): void {
  977. global $test_ids, $results_map;
  978. if (Config::get('verbose')) {
  979. // Report results.
  980. echo "Detailed test results\n";
  981. echo "---------------------\n";
  982. try {
  983. $results = simpletest_script_load_messages_by_test_id($test_run_results_storage, $test_ids);
  984. }
  985. catch (Exception $e) {
  986. echo (string) $e;
  987. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  988. }
  989. $test_class = '';
  990. foreach ($results as $result) {
  991. if (isset($results_map[$result->status])) {
  992. if ($result->test_class != $test_class) {
  993. // Display test class every time results are for new test class.
  994. echo "\n\n---- $result->test_class ----\n\n\n";
  995. $test_class = $result->test_class;
  996. // Print table header.
  997. echo "Status Duration Info \n";
  998. echo "--------------------------------------------------------------------------------------------------------\n";
  999. }
  1000. simpletest_script_format_result($result);
  1001. }
  1002. }
  1003. }
  1004. }
  1005. /**
  1006. * Format the result so that it fits within 80 characters.
  1007. *
  1008. * @param object $result
  1009. * The result object to format.
  1010. */
  1011. function simpletest_script_format_result($result): void {
  1012. global $results_map;
  1013. if ($result->time == 0) {
  1014. $duration = " ";
  1015. }
  1016. elseif ($result->time < 0.001) {
  1017. $duration = " <1 ms";
  1018. }
  1019. else {
  1020. $duration = sprintf("%9.3fs", $result->time);
  1021. }
  1022. $summary = sprintf("%-9.9s %s %s\n", $results_map[$result->status], $duration, trim_with_ellipsis($result->function, 80, STR_PAD_LEFT));
  1023. simpletest_script_print($summary, simpletest_script_color_code($result->status));
  1024. if ($result->message === '' || in_array($result->status, ['pass', 'fail', 'error'])) {
  1025. return;
  1026. }
  1027. $message = trim(strip_tags($result->message));
  1028. if (Config::get('non-html')) {
  1029. $message = Html::decodeEntities($message);
  1030. }
  1031. $lines = explode("\n", $message);
  1032. foreach ($lines as $line) {
  1033. echo " $line\n";
  1034. }
  1035. }
  1036. /**
  1037. * Print error messages so the user will notice them.
  1038. *
  1039. * Print error message prefixed with " ERROR: " and displayed in fail color if
  1040. * color output is enabled.
  1041. *
  1042. * @param string $message
  1043. * The message to print.
  1044. */
  1045. function simpletest_script_print_error($message): void {
  1046. simpletest_script_print(" ERROR: $message\n", SIMPLETEST_SCRIPT_COLOR_FAIL);
  1047. }
  1048. /**
  1049. * Print a message to the console, using a color.
  1050. *
  1051. * @param string $message
  1052. * The message to print.
  1053. * @param int|string $color_code
  1054. * The color code to use for coloring.
  1055. */
  1056. function simpletest_script_print($message, $color_code): void {
  1057. try {
  1058. if (Config::get('color')) {
  1059. echo "\033[" . $color_code . "m" . $message . "\033[0m";
  1060. }
  1061. else {
  1062. echo $message;
  1063. }
  1064. }
  1065. catch (\RuntimeException) {
  1066. echo $message;
  1067. }
  1068. }
  1069. /**
  1070. * Get the color code associated with the specified status.
  1071. *
  1072. * @param string $status
  1073. * The status string to get code for. Special cases are: 'pass', 'fail', or
  1074. * 'exception'.
  1075. *
  1076. * @return int
  1077. * Color code. Returns 0 for default case.
  1078. */
  1079. function simpletest_script_color_code($status) {
  1080. return match ($status) {
  1081. 'pass' => SIMPLETEST_SCRIPT_COLOR_PASS,
  1082. 'fail', 'cli_fail', 'error', 'exception' => SIMPLETEST_SCRIPT_COLOR_FAIL,
  1083. 'skipped' => SIMPLETEST_SCRIPT_COLOR_YELLOW,
  1084. 'debug' => SIMPLETEST_SCRIPT_COLOR_CYAN,
  1085. default => 0,
  1086. };
  1087. }
  1088. /**
  1089. * Prints alternative test names.
  1090. *
  1091. * Searches the provided array of string values for close matches based on the
  1092. * Levenshtein algorithm.
  1093. *
  1094. * @param string $string
  1095. * A string to test.
  1096. * @param array $array
  1097. * A list of strings to search.
  1098. * @param int $degree
  1099. * The matching strictness. Higher values return fewer matches. A value of
  1100. * 4 means that the function will return strings from $array if the candidate
  1101. * string in $array would be identical to $string by changing 1/4 or fewer of
  1102. * its characters.
  1103. *
  1104. * @see http://php.net/manual/function.levenshtein.php
  1105. */
  1106. function simpletest_script_print_alternatives($string, $array, $degree = 4): void {
  1107. $alternatives = [];
  1108. foreach ($array as $item) {
  1109. $lev = levenshtein($string, $item);
  1110. if ($lev <= strlen($item) / $degree || str_contains($string, $item)) {
  1111. $alternatives[] = $item;
  1112. }
  1113. }
  1114. if (!empty($alternatives)) {
  1115. simpletest_script_print(" Did you mean?\n", SIMPLETEST_SCRIPT_COLOR_FAIL);
  1116. foreach ($alternatives as $alternative) {
  1117. simpletest_script_print(" - $alternative\n", SIMPLETEST_SCRIPT_COLOR_FAIL);
  1118. }
  1119. }
  1120. }
  1121. /**
  1122. * Loads test result messages from the database.
  1123. *
  1124. * Messages are ordered by test class and message id.
  1125. *
  1126. * @param array $test_ids
  1127. * Array of test IDs of the messages to be loaded.
  1128. *
  1129. * @return array
  1130. * Array of test result messages from the database.
  1131. */
  1132. function simpletest_script_load_messages_by_test_id(TestRunResultsStorageInterface $test_run_results_storage, $test_ids) {
  1133. $results = [];
  1134. // Sqlite has a maximum number of variables per query. If required, the
  1135. // database query is split into chunks.
  1136. if (count($test_ids) > SIMPLETEST_SCRIPT_SQLITE_VARIABLE_LIMIT && Config::get('sqlite')) {
  1137. $test_id_chunks = array_chunk($test_ids, SIMPLETEST_SCRIPT_SQLITE_VARIABLE_LIMIT);
  1138. }
  1139. else {
  1140. $test_id_chunks = [$test_ids];
  1141. }
  1142. foreach ($test_id_chunks as $test_id_chunk) {
  1143. try {
  1144. $result_chunk = [];
  1145. foreach ($test_id_chunk as $test_id) {
  1146. $test_run = TestRun::get($test_run_results_storage, $test_id);
  1147. $result_chunk = array_merge($result_chunk, $test_run->getLogEntriesByTestClass());
  1148. }
  1149. }
  1150. catch (Exception $e) {
  1151. echo (string) $e;
  1152. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  1153. }
  1154. if ($result_chunk) {
  1155. $results = array_merge($results, $result_chunk);
  1156. }
  1157. }
  1158. return $results;
  1159. }
  1160. /**
  1161. * Trims a string adding a leading or trailing ellipsis.
  1162. *
  1163. * @param string $input
  1164. * The input string.
  1165. * @param int $length
  1166. * The exact trimmed string length.
  1167. * @param int $side
  1168. * Leading or trailing ellipsis.
  1169. *
  1170. * @return string
  1171. * The trimmed string.
  1172. */
  1173. function trim_with_ellipsis(string $input, int $length, int $side): string {
  1174. if (strlen($input) < $length) {
  1175. return str_pad($input, $length, ' ', \STR_PAD_RIGHT);
  1176. }
  1177. elseif (strlen($input) > $length) {
  1178. return match($side) {
  1179. \STR_PAD_RIGHT => substr($input, 0, $length - 1) . '…',
  1180. default => '…' . substr($input, -$length + 1),
  1181. };
  1182. }
  1183. return $input;
  1184. }
  1185. /**
  1186. * Outputs the discovery warning messages.
  1187. */
  1188. function dump_discovery_warnings(): void {
  1189. $warnings = PhpUnitTestDiscovery::instance()->getWarnings();
  1190. if (!empty($warnings)) {
  1191. simpletest_script_print("Test discovery warnings\n", SIMPLETEST_SCRIPT_COLOR_BRIGHT_WHITE);
  1192. simpletest_script_print("-----------------------\n", SIMPLETEST_SCRIPT_COLOR_BRIGHT_WHITE);
  1193. foreach ($warnings as $warning) {
  1194. $tmp = explode("\n", $warning);
  1195. simpletest_script_print('* ' . array_shift($tmp) . "\n", SIMPLETEST_SCRIPT_COLOR_EXCEPTION);
  1196. foreach ($tmp as $sub) {
  1197. simpletest_script_print(' ' . $sub . "\n", SIMPLETEST_SCRIPT_COLOR_EXCEPTION);
  1198. }
  1199. echo "\n";
  1200. }
  1201. }
  1202. }

Buggy or inaccurate documentation? Please file an issue. Need support? Need help programming? Connect with the Drupal community.