run-tests.sh

Same filename and directory in other branches
  1. 10 core/scripts/run-tests.sh
  2. 9 core/scripts/run-tests.sh
  3. 8.9.x core/scripts/run-tests.sh
  4. 7.x scripts/run-tests.sh
  5. main core/scripts/run-tests.sh

Script for running tests on DrupalCI.

This script is intended for use only by drupal.org's testing. In general, tests should be run directly with phpunit.

@internal

File

core/scripts/run-tests.sh

View source
  1. <?php
  2. /**
  3. * @file
  4. * Script for running tests on DrupalCI.
  5. *
  6. * This script is intended for use only by drupal.org's testing. In general,
  7. * tests should be run directly with phpunit.
  8. *
  9. * @internal
  10. */
  11. use Composer\Autoload\ClassLoader;
  12. use Drupal\BuildTests\Framework\BuildTestBase;
  13. use Drupal\Component\FileSystem\FileSystem;
  14. use Drupal\Component\Utility\Environment;
  15. use Drupal\Component\Utility\Html;
  16. use Drupal\Component\Utility\Timer;
  17. use Drupal\Core\Composer\Composer;
  18. use Drupal\Core\Database\Database;
  19. use Drupal\Core\Test\EnvironmentCleaner;
  20. use Drupal\Core\Test\PhpUnitTestDiscovery;
  21. use Drupal\Core\Test\PhpUnitTestRunner;
  22. use Drupal\Core\Test\SimpletestTestRunResultsStorage;
  23. use Drupal\Core\Test\TestDatabase;
  24. use Drupal\Core\Test\TestRun;
  25. use Drupal\Core\Test\TestRunnerKernel;
  26. use Drupal\Core\Test\TestRunResultsStorageInterface;
  27. use Drupal\FunctionalJavascriptTests\WebDriverTestBase;
  28. use Drupal\KernelTests\KernelTestBase;
  29. use Drupal\Tests\BrowserTestBase;
  30. use Drupal\TestTools\TestRunner\Configuration as Config;
  31. use PHPUnit\Framework\TestCase;
  32. use PHPUnit\Runner\Version;
  33. use Symfony\Component\Console\Helper\DescriptorHelper;
  34. use Symfony\Component\Console\Input\InputDefinition;
  35. use Symfony\Component\Console\Output\ConsoleOutput;
  36. use Symfony\Component\HttpFoundation\Request;
  37. use Symfony\Component\Process\PhpExecutableFinder;
  38. // cspell:ignore exitcode testbots wwwrun
  39. // Define some colors for display.
  40. // A nice calming green.
  41. const SIMPLETEST_SCRIPT_COLOR_PASS = 32;
  42. // An alerting Red.
  43. const SIMPLETEST_SCRIPT_COLOR_FAIL = 31;
  44. // An annoying brown.
  45. const SIMPLETEST_SCRIPT_COLOR_EXCEPTION = 33;
  46. // An appeasing yellow.
  47. const SIMPLETEST_SCRIPT_COLOR_YELLOW = 33;
  48. // A refreshing cyan.
  49. const SIMPLETEST_SCRIPT_COLOR_CYAN = 36;
  50. // A fainting gray.
  51. const SIMPLETEST_SCRIPT_COLOR_GRAY = 90;
  52. // A notable white.
  53. const SIMPLETEST_SCRIPT_COLOR_BRIGHT_WHITE = "1;97";
  54. // Restricting the chunk of queries prevents memory exhaustion.
  55. const SIMPLETEST_SCRIPT_SQLITE_VARIABLE_LIMIT = 350;
  56. const SIMPLETEST_SCRIPT_EXIT_SUCCESS = 0;
  57. const SIMPLETEST_SCRIPT_EXIT_FAILURE = 1;
  58. const SIMPLETEST_SCRIPT_EXIT_ERROR = 2;
  59. const SIMPLETEST_SCRIPT_EXIT_EXCEPTION = 3;
  60. // Setup class autoloading.
  61. $autoloader = require_once __DIR__ . '/../../autoload.php';
  62. $autoloader->addPsr4('Drupal\\TestTools\\', __DIR__ . '/../tests/Drupal/TestTools');
  63. // Setup console output.
  64. $console_output = new ConsoleOutput();
  65. // Get the configuration from the command line.
  66. $script_basename = basename($_SERVER['argv'][0]);
  67. try {
  68. Config::createFromCommandLine($_SERVER['argv']);
  69. }
  70. catch (\RuntimeException $e) {
  71. simpletest_script_print_error($e->getMessage() . ' ' . "Use the --help option for the list and usage of the options available.\n");
  72. simpletest_script_print(Config::commandLineDefinition()->getSynopsis(), SIMPLETEST_SCRIPT_COLOR_PASS);
  73. exit(SIMPLETEST_SCRIPT_EXIT_FAILURE);
  74. }
  75. // If --help requested, show it and exit.
  76. if (Config::get('help')) {
  77. simpletest_script_help(Config::commandLineDefinition(), $script_basename, $console_output);
  78. exit(SIMPLETEST_SCRIPT_EXIT_SUCCESS);
  79. }
  80. // Initialize script variables and bootstrap Drupal kernel.
  81. simpletest_script_init($autoloader);
  82. if (!class_exists(TestCase::class)) {
  83. echo "\nrun-tests.sh requires the PHPUnit testing framework. Use 'composer install' to ensure that it is present.\n\n";
  84. exit(SIMPLETEST_SCRIPT_EXIT_FAILURE);
  85. }
  86. // Defaults the PHPUnit configuration file path.
  87. if (empty(Config::get('phpunit-configuration'))) {
  88. Config::set('phpunit-configuration', \Drupal::root() . \DIRECTORY_SEPARATOR . 'core');
  89. }
  90. if (!Composer::upgradePHPUnitCheck(Version::id())) {
  91. simpletest_script_print_error("PHPUnit testing framework version 11 or greater is required when running on PHP 8.4 or greater. Run the command 'composer run-script drupal-phpunit-upgrade' in order to fix this.");
  92. exit(SIMPLETEST_SCRIPT_EXIT_FAILURE);
  93. }
  94. if (Config::get('list')) {
  95. // Display all available tests organized by one #[Group()] attribute.
  96. echo "\nAvailable test groups & classes\n";
  97. echo "-------------------------------\n\n";
  98. $test_discovery = PhpUnitTestDiscovery::instance()->setConfigurationFilePath(Config::get('phpunit-configuration'));
  99. try {
  100. $groups = $test_discovery->getTestClasses(Config::get('module'));
  101. dump_discovery_warnings();
  102. }
  103. catch (Exception $e) {
  104. error_log((string) $e);
  105. echo (string) $e;
  106. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  107. }
  108. // A given class can appear in multiple groups. For historical reasons, we
  109. // need to present each test only once. The test is shown in the group that is
  110. // printed first.
  111. $printed_tests = [];
  112. foreach ($groups as $group => $tests) {
  113. echo $group . "\n";
  114. $tests = array_diff(array_keys($tests), $printed_tests);
  115. foreach ($tests as $test) {
  116. echo " - $test\n";
  117. }
  118. $printed_tests = array_merge($printed_tests, $tests);
  119. }
  120. exit(SIMPLETEST_SCRIPT_EXIT_SUCCESS);
  121. }
  122. // List-files and list-files-json provide a way for external tools such as the
  123. // testbot to prioritize running changed tests.
  124. // @see https://www.drupal.org/node/2569585
  125. if (Config::get('list-files') || Config::get('list-files-json')) {
  126. // List all files which could be run as tests.
  127. $test_discovery = PhpUnitTestDiscovery::instance()->setConfigurationFilePath(Config::get('phpunit-configuration'));
  128. // PhpUnitTestDiscovery::findAllClassFiles() gives us a classmap similar to a
  129. // Composer 'classmap' array.
  130. $test_classes = $test_discovery->findAllClassFiles();
  131. // JSON output is the easiest.
  132. if (Config::get('list-files-json')) {
  133. echo json_encode($test_classes);
  134. exit(SIMPLETEST_SCRIPT_EXIT_SUCCESS);
  135. }
  136. // Output the list of files.
  137. else {
  138. foreach (array_values($test_classes) as $test_class) {
  139. echo $test_class . "\n";
  140. }
  141. }
  142. exit(SIMPLETEST_SCRIPT_EXIT_SUCCESS);
  143. }
  144. simpletest_script_setup_database();
  145. // Setup the test run results storage environment. Currently, this coincides
  146. // with the simpletest database schema.
  147. $test_run_results_storage = simpletest_script_setup_test_run_results_storage();
  148. if (Config::get('clean')) {
  149. // Clean up left-over tables and directories.
  150. $cleaner = new EnvironmentCleaner(
  151. DRUPAL_ROOT,
  152. Database::getConnection(),
  153. $test_run_results_storage,
  154. $console_output,
  155. \Drupal::service('file_system')
  156. );
  157. try {
  158. $cleaner->cleanEnvironment();
  159. }
  160. catch (Exception $e) {
  161. echo (string) $e;
  162. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  163. }
  164. echo "\nEnvironment cleaned.\n";
  165. // Get the status messages and print them.
  166. $messages = \Drupal::messenger()->messagesByType('status');
  167. foreach ($messages as $text) {
  168. echo " - " . $text . "\n";
  169. }
  170. exit(SIMPLETEST_SCRIPT_EXIT_SUCCESS);
  171. }
  172. echo "\n";
  173. echo "Drupal test run\n\n";
  174. echo "--------------------------------------------------------------\n";
  175. echo sprintf("Drupal Version.......: %s\n", \Drupal::VERSION);
  176. echo sprintf("PHP Version..........: %s\n", \PHP_VERSION);
  177. echo sprintf("PHP Binary...........: %s\n", (new PhpExecutableFinder())->find());
  178. echo sprintf("PHPUnit Version......: %s\n", Version::id());
  179. echo sprintf("PHPUnit configuration: %s\n", Config::get('phpunit-configuration'));
  180. if (Config::get('dburl')) {
  181. $sut_connection_info = Database::getConnectionInfo();
  182. $sut_tasks_class = $sut_connection_info['default']['namespace'] . "\\Install\\Tasks";
  183. $sut_installer = new $sut_tasks_class();
  184. $sut_connection = Database::getConnection();
  185. echo sprintf("Database.............: %s\n", (string) $sut_installer->name());
  186. echo sprintf("Database Version.....: %s\n", $sut_connection->version());
  187. }
  188. echo sprintf("Working directory....: %s\n", getcwd());
  189. echo "--------------------------------------------------------------\n";
  190. echo "\n";
  191. $test_list = simpletest_script_get_test_list();
  192. // Try to allocate unlimited time to run the tests.
  193. Environment::setTimeLimit(0);
  194. simpletest_script_reporter_init();
  195. $tests_to_run = [];
  196. for ($i = 0; $i < Config::get('repeat'); $i++) {
  197. $tests_to_run = array_merge($tests_to_run, $test_list);
  198. }
  199. // Execute tests.
  200. $status = simpletest_script_execute_batch($test_run_results_storage, $tests_to_run);
  201. // Stop the timer.
  202. simpletest_script_reporter_timer_stop();
  203. // Ensure all test locks are released once finished. If tests are run with a
  204. // concurrency of 1 the each test will clean up its own lock. Test locks are
  205. // not released if using a higher concurrency to ensure each test has unique
  206. // fixtures.
  207. TestDatabase::releaseAllTestLocks();
  208. // Display results before database is cleared.
  209. simpletest_script_reporter_display_results($test_run_results_storage);
  210. if (Config::get('xml')) {
  211. simpletest_script_reporter_write_xml_results($test_run_results_storage);
  212. }
  213. // Clean up all test results.
  214. if (!Config::get('keep-results')) {
  215. try {
  216. $cleaner = new EnvironmentCleaner(
  217. DRUPAL_ROOT,
  218. Database::getConnection(),
  219. $test_run_results_storage,
  220. $console_output,
  221. \Drupal::service('file_system')
  222. );
  223. $cleaner->cleanResults();
  224. }
  225. catch (Exception $e) {
  226. echo (string) $e;
  227. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  228. }
  229. }
  230. // Test complete, exit.
  231. exit($status);
  232. /**
  233. * Print help text.
  234. */
  235. function simpletest_script_help(InputDefinition $input_definition, string $script_basename, ConsoleOutput $console_output): void {
  236. echo <<
  237. Run Drupal tests from the shell.
  238. Usage: {$script_basename} [OPTIONS]
  239. Example: {$script_basename} Profile
  240. EOF;
  241. $helper = new DescriptorHelper();
  242. $helper->describe($console_output, $input_definition);
  243. echo <<
  244. To run this script you will normally invoke it from the root directory of your
  245. Drupal installation as the webserver user (differs per configuration), or root:
  246. sudo -u [wwwrun|www-data|etc] php ./core/scripts/{$script_basename} --url http://example.com/ --all
  247. sudo -u [wwwrun|www-data|etc] php ./core/scripts/{$script_basename} --url http://example.com/ --class Drupal\\\\Tests\\\\block\\\\Functional\\\\BlockTest
  248. Without a preinstalled Drupal site, specify a SQLite database pathname to create
  249. (for the test runner) and the default database connection info (for Drupal) to
  250. use in tests:
  251. sudo -u [wwwrun|www-data|etc] php ./core/scripts/{$script_basename}
  252. --sqlite /tmpfs/drupal/test.sqlite
  253. --dburl mysql://username:password@localhost/database
  254. --url http://example.com/ --all
  255. EOF;
  256. }
  257. /**
  258. * Initialize script variables and perform general setup requirements.
  259. */
  260. function simpletest_script_init(ClassLoader $autoloader): void {
  261. // Get URL from arguments.
  262. $parsed_url = parse_url(Config::get('url'));
  263. $host = $parsed_url['host'] . (isset($parsed_url['port']) ? ':' . $parsed_url['port'] : '');
  264. $path = isset($parsed_url['path']) ? rtrim(rtrim($parsed_url['path']), '/') : '';
  265. $port = $parsed_url['port'] ?? '80';
  266. // If the passed URL schema is 'https' then setup the $_SERVER variables
  267. // properly so that testing will run under HTTPS.
  268. if ($parsed_url['scheme'] == 'https') {
  269. $_SERVER['HTTPS'] = 'on';
  270. }
  271. $base_url = isset($_SERVER['HTTPS']) && $_SERVER['HTTPS'] === 'on' ? 'https://' : 'http://';
  272. $base_url .= $host;
  273. if ($path !== '') {
  274. $base_url .= $path;
  275. }
  276. putenv('SIMPLETEST_BASE_URL=' . $base_url);
  277. $_SERVER['HTTP_HOST'] = $host;
  278. $_SERVER['REMOTE_ADDR'] = '127.0.0.1';
  279. $_SERVER['SERVER_ADDR'] = '127.0.0.1';
  280. $_SERVER['SERVER_PORT'] = $port;
  281. $_SERVER['SERVER_SOFTWARE'] = NULL;
  282. $_SERVER['SERVER_NAME'] = 'localhost';
  283. $_SERVER['REQUEST_URI'] = $path . '/';
  284. $_SERVER['REQUEST_METHOD'] = 'GET';
  285. $_SERVER['SCRIPT_NAME'] = $path . '/index.php';
  286. $_SERVER['SCRIPT_FILENAME'] = $path . '/index.php';
  287. $_SERVER['PHP_SELF'] = $path . '/index.php';
  288. $_SERVER['HTTP_USER_AGENT'] = 'Drupal command line';
  289. if (Config::get('concurrency') > 1) {
  290. $directory = FileSystem::getOsTemporaryDirectory();
  291. $test_symlink = @symlink(__FILE__, $directory . '/test_symlink');
  292. if (!$test_symlink) {
  293. throw new \RuntimeException('In order to use a concurrency higher than 1 the test system needs to be able to create symlinks in ' . $directory);
  294. }
  295. unlink($directory . '/test_symlink');
  296. putenv('RUN_TESTS_CONCURRENCY=' . Config::get('concurrency'));
  297. }
  298. if (!empty($_SERVER['HTTPS']) && $_SERVER['HTTPS'] == 'on') {
  299. // Ensure that any and all environment variables are changed to https://.
  300. foreach ($_SERVER as $key => $value) {
  301. // Some values are NULL. Non-NULL values which are falsy will not contain
  302. // text to replace.
  303. if ($value) {
  304. $_SERVER[$key] = str_replace('http://', 'https://', $value);
  305. }
  306. }
  307. }
  308. chdir(realpath(__DIR__ . '/../..'));
  309. // Prepare the kernel.
  310. try {
  311. $request = Request::createFromGlobals();
  312. $kernel = TestRunnerKernel::createFromRequest($request, $autoloader);
  313. $kernel->boot();
  314. $kernel->preHandle($request);
  315. }
  316. catch (Exception $e) {
  317. echo (string) $e;
  318. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  319. }
  320. }
  321. /**
  322. * Sets up database connection info for running tests.
  323. *
  324. * If this script is executed from within a real Drupal installation, then this
  325. * function essentially performs nothing (unless the --sqlite or --dburl
  326. * parameters were passed).
  327. *
  328. * Otherwise, there are three database connections of concern:
  329. * - --sqlite: The test runner connection, providing access to database tables
  330. * for recording test IDs and assertion results.
  331. * - --dburl: A database connection that is used as base connection info for all
  332. * tests; i.e., every test will spawn from this connection. In case this
  333. * connection uses e.g. SQLite, then all tests will run against SQLite. This
  334. * is exposed as $databases['default']['default'] to Drupal.
  335. * - The actual database connection used within a test. This is the same as
  336. * --dburl, but uses an additional database table prefix. This is
  337. * $databases['default']['default'] within a test environment. The original
  338. * connection is retained in
  339. * $databases['simpletest_original_default']['default'] and restored after
  340. * each test.
  341. */
  342. function simpletest_script_setup_database(): void {
  343. // If there is an existing Drupal installation that contains a database
  344. // connection info in settings.php, then $databases['default']['default'] will
  345. // hold the default database connection already. This connection is assumed to
  346. // be valid, and this connection will be used in tests, so that they run
  347. // against e.g. MySQL instead of SQLite.
  348. // However, in case no Drupal installation exists, this default database
  349. // connection can be set and/or overridden with the --dburl parameter.
  350. if (Config::get('dburl')) {
  351. // Remove a possibly existing default connection (from settings.php).
  352. Database::removeConnection('default');
  353. try {
  354. $databases['default']['default'] = Database::convertDbUrlToConnectionInfo(Config::get('dburl'), TRUE);
  355. }
  356. catch (\InvalidArgumentException $e) {
  357. simpletest_script_print_error('Invalid --dburl. Reason: ' . $e->getMessage());
  358. exit(SIMPLETEST_SCRIPT_EXIT_FAILURE);
  359. }
  360. }
  361. // Otherwise, use the default database connection from settings.php.
  362. else {
  363. $databases['default'] = Database::getConnectionInfo('default');
  364. }
  365. if (isset($databases['default']['default'])) {
  366. Database::addConnectionInfo('default', 'default', $databases['default']['default']);
  367. }
  368. }
  369. /**
  370. * Sets up the test runs results storage.
  371. */
  372. function simpletest_script_setup_test_run_results_storage() {
  373. $databases['default'] = Database::getConnectionInfo('default');
  374. // If no --sqlite parameter has been passed, then the test runner database
  375. // connection is the default database connection.
  376. $sqlite = Config::get('sqlite');
  377. if (!$sqlite) {
  378. $sqlite = FALSE;
  379. $databases['test-runner']['default'] = $databases['default']['default'];
  380. }
  381. // Otherwise, set up a SQLite connection for the test runner.
  382. else {
  383. if ($sqlite === ':memory:') {
  384. $sqlite = ':memory:';
  385. }
  386. elseif (is_string($sqlite) && !str_starts_with($sqlite, '/')) {
  387. $sqlite = DRUPAL_ROOT . '/' . $sqlite;
  388. }
  389. $databases['test-runner']['default'] = [
  390. 'driver' => 'sqlite',
  391. 'database' => $sqlite,
  392. 'prefix' => '',
  393. ];
  394. // Create the test runner SQLite database, unless it exists already.
  395. if ($sqlite !== ':memory:' && !file_exists($sqlite)) {
  396. if (!is_dir(dirname($sqlite))) {
  397. mkdir(dirname($sqlite));
  398. }
  399. touch($sqlite);
  400. }
  401. }
  402. // Add the test runner database connection.
  403. Database::addConnectionInfo('test-runner', 'default', $databases['test-runner']['default']);
  404. // Create the test result schema.
  405. try {
  406. $test_run_results_storage = new SimpletestTestRunResultsStorage(Database::getConnection('default', 'test-runner'));
  407. }
  408. catch (\PDOException $e) {
  409. simpletest_script_print_error($databases['test-runner']['default']['driver'] . ': ' . $e->getMessage());
  410. exit(SIMPLETEST_SCRIPT_EXIT_FAILURE);
  411. }
  412. if ($sqlite) {
  413. try {
  414. $test_run_results_storage->buildTestingResultsEnvironment(Config::get('keep-results-table'));
  415. }
  416. catch (Exception $e) {
  417. echo (string) $e;
  418. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  419. }
  420. }
  421. // Verify that the test result database schema exists by checking one table.
  422. try {
  423. if (!$test_run_results_storage->validateTestingResultsEnvironment()) {
  424. simpletest_script_print_error('Missing test result database schema. Use the --sqlite parameter.');
  425. exit(SIMPLETEST_SCRIPT_EXIT_FAILURE);
  426. }
  427. }
  428. catch (Exception $e) {
  429. echo (string) $e;
  430. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  431. }
  432. return $test_run_results_storage;
  433. }
  434. /**
  435. * Execute a batch of tests.
  436. */
  437. function simpletest_script_execute_batch(TestRunResultsStorageInterface $test_run_results_storage, $test_classes) {
  438. global $test_ids, $total_time;
  439. $total_status = SIMPLETEST_SCRIPT_EXIT_SUCCESS;
  440. $process_runner = PhpUnitTestRunner::create(\Drupal::getContainer())
  441. ->setConfigurationFilePath(Config::get('phpunit-configuration'));
  442. // Multi-process execution.
  443. $children = [];
  444. while (!empty($test_classes) || !empty($children)) {
  445. while (count($children) < Config::get('concurrency')) {
  446. if (empty($test_classes)) {
  447. break;
  448. }
  449. try {
  450. $test_run = TestRun::createNew($test_run_results_storage);
  451. }
  452. catch (Exception $e) {
  453. echo (string) $e;
  454. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  455. }
  456. $test_ids[] = $test_run->id();
  457. $test_class = array_shift($test_classes);
  458. // Fork a child process.
  459. try {
  460. $process = $process_runner->startPhpUnitOnSingleTestClass(
  461. $test_run,
  462. $test_class,
  463. Config::get('color'),
  464. Config::get('suppress-deprecations'),
  465. );
  466. }
  467. catch (\Throwable $e) {
  468. // PHPUnit catches exceptions already, so this is only reached when an
  469. // exception is thrown in the wrapped test runner environment.
  470. echo (string) $e;
  471. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  472. }
  473. // Register our new child.
  474. $children[] = [
  475. 'process' => $process,
  476. 'test_run' => $test_run,
  477. 'class' => $test_class,
  478. ];
  479. }
  480. // Wait for children every 2ms.
  481. usleep(2000);
  482. // Check if some children finished.
  483. foreach ($children as $cid => $child) {
  484. if ($child['process']->isTerminated()) {
  485. // The child exited.
  486. $child['test_run']->end(microtime(TRUE));
  487. $process_outcome = $process_runner->processPhpUnitOnSingleTestClassOutcome(
  488. $child['process'],
  489. $child['test_run'],
  490. $child['class'],
  491. );
  492. simpletest_script_reporter_display_summary(
  493. $child['class'],
  494. $process_outcome['summaries'][$child['class']],
  495. $child['test_run']->duration()
  496. );
  497. if ($process_outcome['error_output']) {
  498. echo 'ERROR: ' . implode("\n", $process_outcome['error_output']);
  499. }
  500. if (in_array($process_outcome['status'], [SIMPLETEST_SCRIPT_EXIT_FAILURE, SIMPLETEST_SCRIPT_EXIT_ERROR])) {
  501. $total_status = max($process_outcome['status'], $total_status);
  502. }
  503. elseif ($process_outcome['status']) {
  504. $message = 'FATAL ' . $child['class'] . ': test runner returned an unexpected error code (' . $process_outcome['status'] . ').';
  505. echo $message . "\n";
  506. $total_status = max(SIMPLETEST_SCRIPT_EXIT_EXCEPTION, $total_status);
  507. if (Config::get('die-on-fail')) {
  508. $test_db = new TestDatabase($child['test_run']->getDatabasePrefix());
  509. $test_directory = $test_db->getTestSitePath();
  510. echo 'Test database and files kept and test exited immediately on fail so should be reproducible if you change settings.php to use the database prefix ' . $child['test_run']->getDatabasePrefix() . ' and config directories in ' . $test_directory . "\n";
  511. Config::set('keep-results', TRUE);
  512. // Exit repeat loop immediately.
  513. Config::set('repeat', -1);
  514. }
  515. }
  516. // Remove this child.
  517. unset($children[$cid]);
  518. }
  519. }
  520. }
  521. return $total_status;
  522. }
  523. /**
  524. * Get list of tests based on arguments.
  525. *
  526. * If --all specified then return all available tests, otherwise reads list of
  527. * tests.
  528. *
  529. * @return array
  530. * List of tests.
  531. */
  532. function simpletest_script_get_test_list() {
  533. $test_discovery = PhpUnitTestDiscovery::instance()->setConfigurationFilePath(Config::get('phpunit-configuration'));
  534. $test_list = [];
  535. $slow_tests = [];
  536. if (Config::get('all') || Config::get('module') || Config::get('directory')) {
  537. try {
  538. $groups = $test_discovery->getTestClasses(Config::get('module'), Config::get('types'), Config::get('directory'));
  539. dump_discovery_warnings();
  540. }
  541. catch (Exception $e) {
  542. echo (string) $e;
  543. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  544. }
  545. // Ensure that tests marked explicitly as #[Group('#slow')] are run at the
  546. // beginning of each job.
  547. if (key($groups) === '#slow') {
  548. $slow_tests = array_shift($groups);
  549. }
  550. $not_slow_tests = [];
  551. foreach ($groups as $group => $tests) {
  552. $not_slow_tests = array_merge($not_slow_tests, $tests);
  553. }
  554. // Filter slow tests out of the not slow tests and ensure a unique list
  555. // since tests may appear in more than one group.
  556. $not_slow_tests = array_diff_key($not_slow_tests, $slow_tests);
  557. // If the tests are not being run in parallel, then ensure slow tests run
  558. // all together first.
  559. if ((int) Config::get('ci-parallel-node-total') <= 1 ) {
  560. sort_tests_by_type_and_methods($slow_tests);
  561. sort_tests_by_type_and_methods($not_slow_tests);
  562. $all_tests_list = array_merge($slow_tests, $not_slow_tests);
  563. assign_tests_sequence($all_tests_list);
  564. dump_tests_sequence($all_tests_list);
  565. $test_list = array_keys($all_tests_list);
  566. }
  567. else {
  568. // Sort all tests by the number of test cases on the test class.
  569. // This is used in combination with #[Group('#slow')] to start the
  570. // slowest tests first and distribute tests between test runners.
  571. sort_tests_by_public_method_count($slow_tests);
  572. sort_tests_by_public_method_count($not_slow_tests);
  573. $all_tests_list = array_merge($slow_tests, $not_slow_tests);
  574. assign_tests_sequence($all_tests_list);
  575. // Now set up a bin per test runner.
  576. $bin_count = (int) Config::get('ci-parallel-node-total');
  577. // Now loop over the slow tests and add them to a bin one by one, this
  578. // distributes the tests evenly across the bins.
  579. $binned_slow_tests = place_tests_into_bins($slow_tests, $bin_count);
  580. $slow_tests_for_job = $binned_slow_tests[Config::get('ci-parallel-node-index') - 1];
  581. // And the same for the rest of the tests.
  582. $binned_other_tests = place_tests_into_bins($not_slow_tests, $bin_count);
  583. $other_tests_for_job = $binned_other_tests[Config::get('ci-parallel-node-index') - 1];
  584. $test_list = array_merge($slow_tests_for_job, $other_tests_for_job);
  585. dump_bin_tests_sequence(Config::get('ci-parallel-node-index'), $all_tests_list, $test_list);
  586. $test_list = array_keys($test_list);
  587. }
  588. }
  589. else {
  590. if (Config::get('class')) {
  591. $test_list = [];
  592. foreach (Config::getTests() as $test_class) {
  593. [$class_name] = explode('::', $test_class, 2);
  594. if (class_exists($class_name)) {
  595. $test_list[] = $test_class;
  596. }
  597. else {
  598. try {
  599. $groups = $test_discovery->getTestClasses(NULL, Config::get('types'));
  600. dump_discovery_warnings();
  601. }
  602. catch (Exception $e) {
  603. echo (string) $e;
  604. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  605. }
  606. $all_classes = [];
  607. foreach ($groups as $group) {
  608. $all_classes = array_merge($all_classes, array_keys($group));
  609. }
  610. simpletest_script_print_error('Test class not found: ' . $class_name);
  611. simpletest_script_print_alternatives($class_name, $all_classes, 6);
  612. exit(SIMPLETEST_SCRIPT_EXIT_FAILURE);
  613. }
  614. }
  615. }
  616. elseif (Config::get('file')) {
  617. // Extract test case class names from specified files.
  618. foreach (Config::getTests() as $file) {
  619. if (!file_exists($file) || is_dir($file)) {
  620. simpletest_script_print_error('File not found: ' . $file);
  621. exit(SIMPLETEST_SCRIPT_EXIT_FAILURE);
  622. }
  623. $fileTests = current($test_discovery->getTestClasses(NULL, [], $file));
  624. $test_list = array_merge($test_list, $fileTests);
  625. }
  626. assign_tests_sequence($test_list);
  627. dump_tests_sequence($test_list);
  628. $test_list = array_keys($test_list);
  629. }
  630. else {
  631. try {
  632. $groups = $test_discovery->getTestClasses(NULL, Config::get('types'));
  633. dump_discovery_warnings();
  634. }
  635. catch (Exception $e) {
  636. echo (string) $e;
  637. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  638. }
  639. // Store all the groups so we can suggest alternatives if we need to.
  640. $all_groups = array_keys($groups);
  641. // Verify that the groups exist.
  642. if (!empty($unknown_groups = array_diff(Config::getTests(), $all_groups))) {
  643. $first_group = reset($unknown_groups);
  644. simpletest_script_print_error('Test group not found: ' . $first_group);
  645. simpletest_script_print_alternatives($first_group, $all_groups);
  646. exit(SIMPLETEST_SCRIPT_EXIT_FAILURE);
  647. }
  648. // Merge the tests from the groups together.
  649. foreach (Config::getTests() as $group_name) {
  650. $test_list = array_merge($test_list, $groups[$group_name]);
  651. }
  652. assign_tests_sequence($test_list);
  653. dump_tests_sequence($test_list);
  654. // Ensure our list of tests contains only one entry for each test.
  655. $test_list = array_keys($test_list);
  656. }
  657. }
  658. if (empty($test_list)) {
  659. simpletest_script_print_error('No valid tests were specified.');
  660. exit(SIMPLETEST_SCRIPT_EXIT_FAILURE);
  661. }
  662. return $test_list;
  663. }
  664. /**
  665. * Sort tests by test type and number of public methods.
  666. */
  667. function sort_tests_by_type_and_methods(array &$tests): void {
  668. uasort($tests, function ($a, $b) {
  669. if (get_test_type_weight($a['name']) === get_test_type_weight($b['name'])) {
  670. return $b['tests_count'] <=> $a['tests_count'];
  671. }
  672. return get_test_type_weight($b['name']) <=> get_test_type_weight($a['name']);
  673. });
  674. }
  675. /**
  676. * Sort tests by the number of public methods in the test class.
  677. *
  678. * Tests with several methods take longer to run than tests with a single
  679. * method all else being equal, so this allows tests runs to be sorted by
  680. * approximately the slowest to fastest tests. Tests that are exceptionally
  681. * slow can be added to the '#slow' group so they are placed first in each
  682. * test run regardless of the number of methods.
  683. *
  684. * @param string[] $tests
  685. * An array of test class names.
  686. */
  687. function sort_tests_by_public_method_count(array &$tests): void {
  688. uasort($tests, function (array $a, array $b) {
  689. return $b['tests_count'] <=> $a['tests_count'];
  690. });
  691. }
  692. /**
  693. * Weights a test class based on which test base class it extends.
  694. *
  695. * @param string $class
  696. * The test class name.
  697. */
  698. function get_test_type_weight(string $class): int {
  699. return match(TRUE) {
  700. is_subclass_of($class, WebDriverTestBase::class) => 3,
  701. is_subclass_of($class, BrowserTestBase::class) => 2,
  702. is_subclass_of($class, BuildTestBase::class) => 2,
  703. is_subclass_of($class, KernelTestBase::class) => 1,
  704. default => 0,
  705. };
  706. }
  707. /**
  708. * Assigns the test sequence.
  709. *
  710. * @param array $tests
  711. * The array of test class info.
  712. */
  713. function assign_tests_sequence(array &$tests): void {
  714. $i = 0;
  715. foreach ($tests as &$testInfo) {
  716. $testInfo['sequence'] = ++$i;
  717. }
  718. }
  719. /**
  720. * Dumps the list of tests in order of execution after sorting.
  721. *
  722. * @param array $tests
  723. * The array of test class info.
  724. */
  725. function dump_tests_sequence(array $tests): void {
  726. if (!Config::get('debug-discovery')) {
  727. return;
  728. }
  729. echo "Test execution sequence\n";
  730. echo "-----------------------\n\n";
  731. echo " Seq Slow? Group Cnt Class\n";
  732. echo "-----------------------------------------\n";
  733. foreach ($tests as $testInfo) {
  734. echo sprintf(
  735. "%4d %5s %15s %4d %s\n",
  736. $testInfo['sequence'],
  737. in_array('#slow', $testInfo['groups']) ? '#slow' : '',
  738. trim_with_ellipsis($testInfo['group'], 15, \STR_PAD_RIGHT),
  739. $testInfo['tests_count'],
  740. trim_with_ellipsis($testInfo['name'], 60, \STR_PAD_LEFT),
  741. );
  742. }
  743. echo "-----------------------------------------\n\n";
  744. }
  745. /**
  746. * Distribute tests into bins.
  747. *
  748. * The given array of tests is split into the available bins. The distribution
  749. * starts with the first test, placing the first test in the first bin, the
  750. * second test in the second bin and so on. This results each bin having a
  751. * similar number of test methods to run in total.
  752. *
  753. * @param string[] $tests
  754. * An array of test class names.
  755. * @param int $bin_count
  756. * The number of bins available.
  757. *
  758. * @return array
  759. * An associative array of bins and the test class names in each bin.
  760. */
  761. function place_tests_into_bins(array $tests, int $bin_count) {
  762. // Create a bin corresponding to each parallel test job.
  763. $bins = array_fill(0, $bin_count, []);
  764. // Go through each test and add them to one bin at a time.
  765. $i = 0;
  766. foreach ($tests as $key => $test) {
  767. $bins[($i++ % $bin_count)][$key] = $test;
  768. }
  769. return $bins;
  770. }
  771. /**
  772. * Dumps the list of tests in order of execution for a bin.
  773. *
  774. * @param int $bin
  775. * The bin.
  776. * @param array $allTests
  777. * The list of all test classes discovered.
  778. * @param array $tests
  779. * The list of test class to run for this bin.
  780. */
  781. function dump_bin_tests_sequence(int $bin, array $allTests, array $tests): void {
  782. if (!Config::get('debug-discovery')) {
  783. return;
  784. }
  785. echo "Test execution sequence. ";
  786. echo "Tests marked *** will be executed in this PARALLEL BIN #{$bin}.\n";
  787. echo "-------------------------------------------------------------------------------------\n\n";
  788. echo "Bin Seq Slow? Group Cnt Class\n";
  789. echo "--------------------------------------------\n";
  790. foreach ($allTests as $testInfo) {
  791. $inBin = isset($tests[$testInfo['name']]);
  792. $message = sprintf(
  793. "%s %4d %5s %15s %4d %s\n",
  794. $inBin ? "***" : " ",
  795. $testInfo['sequence'],
  796. in_array('#slow', $testInfo['groups']) ? '#slow' : '',
  797. trim_with_ellipsis($testInfo['group'], 15, \STR_PAD_RIGHT),
  798. $testInfo['tests_count'],
  799. trim_with_ellipsis($testInfo['name'], 60, \STR_PAD_LEFT),
  800. );
  801. simpletest_script_print($message, $inBin ? SIMPLETEST_SCRIPT_COLOR_BRIGHT_WHITE : SIMPLETEST_SCRIPT_COLOR_GRAY);
  802. }
  803. echo "-------------------------------------------------\n\n";
  804. }
  805. /**
  806. * Initialize the reporter.
  807. */
  808. function simpletest_script_reporter_init(): void {
  809. global $test_list, $results_map;
  810. $results_map = [
  811. 'pass' => 'Pass',
  812. 'fail' => 'Fail',
  813. 'error' => 'Error',
  814. 'skipped' => 'Skipped',
  815. 'cli_fail' => 'Failure',
  816. 'exception' => 'Exception',
  817. 'debug' => 'Log',
  818. ];
  819. // Tell the user about what tests are to be run.
  820. if (Config::get('all')) {
  821. echo "All tests will run.\n\n";
  822. }
  823. else {
  824. echo "Tests to be run:\n";
  825. foreach ($test_list as $class_name) {
  826. echo " - $class_name\n";
  827. }
  828. echo "\n";
  829. }
  830. echo "Test run started:\n";
  831. echo " " . date('l, F j, Y - H:i', $_SERVER['REQUEST_TIME']) . "\n";
  832. Timer::start('run-tests');
  833. echo "\n";
  834. echo "Test summary\n";
  835. echo "------------\n";
  836. echo "\n";
  837. }
  838. /**
  839. * Displays the assertion result summary for a single test class.
  840. *
  841. * @param string $class
  842. * The test class name that was run.
  843. * @param array $results
  844. * The assertion results using #pass, #fail, #exception, #debug array keys.
  845. * @param float|null $duration
  846. * The time taken for the test to complete.
  847. */
  848. function simpletest_script_reporter_display_summary($class, $results, $duration = NULL): void {
  849. // Output all test results vertically aligned.
  850. $summary = [str_pad($results['#pass'], 4, " ", STR_PAD_LEFT) . ' passed'];
  851. if ($results['#fail']) {
  852. $summary[] = $results['#fail'] . ' failed';
  853. }
  854. if ($results['#error']) {
  855. $summary[] = $results['#error'] . ' errored';
  856. }
  857. if ($results['#skipped']) {
  858. $summary[] = $results['#skipped'] . ' skipped';
  859. }
  860. if ($results['#exception']) {
  861. $summary[] = $results['#exception'] . ' exception(s)';
  862. }
  863. if ($results['#debug']) {
  864. $summary[] = $results['#debug'] . ' log(s)';
  865. }
  866. if ($results['#cli_fail']) {
  867. $summary[] = 'exit code ' . $results['#exit_code'];
  868. }
  869. // The key $results['#time'] holds the sum of the tests execution times,
  870. // without taking into account the process spawning time and the setup
  871. // times of the tests themselves. So for reporting to be consistent with
  872. // PHPUnit CLI reported execution time, we report here the overall time of
  873. // execution of the spawned process.
  874. $time = sprintf('%8.3fs', $duration);
  875. $output = vsprintf('%s %s %s', [$time, trim_with_ellipsis($class, 70, STR_PAD_LEFT), implode(', ', $summary)]);
  876. $status = ($results['#fail'] || $results['#cli_fail'] || $results['#exception'] || $results['#error'] ? 'fail' : 'pass');
  877. simpletest_script_print($output . "\n", simpletest_script_color_code($status));
  878. }
  879. /**
  880. * Display jUnit XML test results.
  881. */
  882. function simpletest_script_reporter_write_xml_results(TestRunResultsStorageInterface $test_run_results_storage): void {
  883. global $test_ids, $results_map;
  884. try {
  885. $results = simpletest_script_load_messages_by_test_id($test_run_results_storage, $test_ids);
  886. }
  887. catch (Exception $e) {
  888. echo (string) $e;
  889. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  890. }
  891. $test_class = '';
  892. $xml_files = [];
  893. foreach ($results as $result) {
  894. if (isset($results_map[$result->status])) {
  895. if ($result->test_class != $test_class) {
  896. // We've moved onto a new class, so write the last classes results to a
  897. // file:
  898. if (isset($xml_files[$test_class])) {
  899. file_put_contents(Config::get('xml') . '/' . str_replace('\\', '_', $test_class) . '.xml', $xml_files[$test_class]['doc']->saveXML());
  900. unset($xml_files[$test_class]);
  901. }
  902. $test_class = $result->test_class;
  903. if (!isset($xml_files[$test_class])) {
  904. $doc = new DOMDocument('1.0', 'utf-8');
  905. $root = $doc->createElement('testsuite');
  906. $root = $doc->appendChild($root);
  907. $xml_files[$test_class] = ['doc' => $doc, 'suite' => $root];
  908. }
  909. }
  910. // For convenience:
  911. $dom_document = &$xml_files[$test_class]['doc'];
  912. // Create the XML element for this test case:
  913. $case = $dom_document->createElement('testcase');
  914. $case->setAttribute('classname', $test_class);
  915. if (str_contains($result->function, '->')) {
  916. [, $name] = explode('->', $result->function, 2);
  917. }
  918. else {
  919. $name = $result->function;
  920. }
  921. $case->setAttribute('name', $name);
  922. // Passes get no further attention, but failures and exceptions get to add
  923. // more detail:
  924. if ($result->status == 'fail') {
  925. $fail = $dom_document->createElement('failure');
  926. $fail->setAttribute('type', 'failure');
  927. $fail->setAttribute('message', $result->message_group);
  928. $text = $dom_document->createTextNode($result->message);
  929. $fail->appendChild($text);
  930. $case->appendChild($fail);
  931. }
  932. elseif ($result->status == 'exception') {
  933. // In the case of an exception the $result->function may not be a class
  934. // method so we record the full function name:
  935. $case->setAttribute('name', $result->function);
  936. $fail = $dom_document->createElement('error');
  937. $fail->setAttribute('type', 'exception');
  938. $fail->setAttribute('message', $result->message_group);
  939. $full_message = $result->message . "\n\nline: " . $result->line . "\nfile: " . $result->file;
  940. $text = $dom_document->createTextNode($full_message);
  941. $fail->appendChild($text);
  942. $case->appendChild($fail);
  943. }
  944. // Append the test case XML to the test suite:
  945. $xml_files[$test_class]['suite']->appendChild($case);
  946. }
  947. }
  948. // The last test case hasn't been saved to a file yet, so do that now:
  949. if (isset($xml_files[$test_class])) {
  950. file_put_contents(Config::get('xml') . '/' . str_replace('\\', '_', $test_class) . '.xml', $xml_files[$test_class]['doc']->saveXML());
  951. unset($xml_files[$test_class]);
  952. }
  953. }
  954. /**
  955. * Stop the test timer.
  956. */
  957. function simpletest_script_reporter_timer_stop(): void {
  958. global $total_time;
  959. echo "\n";
  960. $end = Timer::stop('run-tests');
  961. $wall_seconds = $end['time'] / 1000;
  962. $formatter = \Drupal::service('date.formatter');
  963. echo "Wall time: " . $formatter->formatInterval((int) $wall_seconds) . "\n";
  964. echo "Total time: " . $formatter->formatInterval((int) $total_time) . "\n";
  965. if ($wall_seconds > 0) {
  966. echo sprintf("Speedup: %.2fx (concurrency %d)\n", $total_time / $wall_seconds, Config::get('concurrency'));
  967. }
  968. echo "\n";
  969. }
  970. /**
  971. * Display test results.
  972. */
  973. function simpletest_script_reporter_display_results(TestRunResultsStorageInterface $test_run_results_storage): void {
  974. global $test_ids, $results_map;
  975. if (Config::get('verbose')) {
  976. // Report results.
  977. echo "Detailed test results\n";
  978. echo "---------------------\n";
  979. try {
  980. $results = simpletest_script_load_messages_by_test_id($test_run_results_storage, $test_ids);
  981. }
  982. catch (Exception $e) {
  983. echo (string) $e;
  984. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  985. }
  986. $test_class = '';
  987. foreach ($results as $result) {
  988. if (isset($results_map[$result->status])) {
  989. if ($result->test_class != $test_class) {
  990. // Display test class every time results are for new test class.
  991. echo "\n\n---- $result->test_class ----\n\n\n";
  992. $test_class = $result->test_class;
  993. // Print table header.
  994. echo "Status Duration Info \n";
  995. echo "--------------------------------------------------------------------------------------------------------\n";
  996. }
  997. simpletest_script_format_result($result);
  998. }
  999. }
  1000. }
  1001. }
  1002. /**
  1003. * Format the result so that it fits within 80 characters.
  1004. *
  1005. * @param object $result
  1006. * The result object to format.
  1007. */
  1008. function simpletest_script_format_result($result): void {
  1009. global $results_map;
  1010. if ($result->time == 0) {
  1011. $duration = " ";
  1012. }
  1013. elseif ($result->time < 0.001) {
  1014. $duration = " <1 ms";
  1015. }
  1016. else {
  1017. $duration = sprintf("%9.3fs", $result->time);
  1018. }
  1019. $summary = sprintf("%-9.9s %s %s\n", $results_map[$result->status], $duration, trim_with_ellipsis($result->function, 80, STR_PAD_LEFT));
  1020. simpletest_script_print($summary, simpletest_script_color_code($result->status));
  1021. if ($result->message === '' || in_array($result->status, ['pass', 'fail', 'error'])) {
  1022. return;
  1023. }
  1024. $message = trim(strip_tags($result->message));
  1025. if (Config::get('non-html')) {
  1026. $message = Html::decodeEntities($message);
  1027. }
  1028. $lines = explode("\n", $message);
  1029. foreach ($lines as $line) {
  1030. echo " $line\n";
  1031. }
  1032. }
  1033. /**
  1034. * Print error messages so the user will notice them.
  1035. *
  1036. * Print error message prefixed with " ERROR: " and displayed in fail color if
  1037. * color output is enabled.
  1038. *
  1039. * @param string $message
  1040. * The message to print.
  1041. */
  1042. function simpletest_script_print_error($message): void {
  1043. simpletest_script_print(" ERROR: $message\n", SIMPLETEST_SCRIPT_COLOR_FAIL);
  1044. }
  1045. /**
  1046. * Print a message to the console, using a color.
  1047. *
  1048. * @param string $message
  1049. * The message to print.
  1050. * @param int|string $color_code
  1051. * The color code to use for coloring.
  1052. */
  1053. function simpletest_script_print($message, $color_code): void {
  1054. try {
  1055. if (Config::get('color')) {
  1056. echo "\033[" . $color_code . "m" . $message . "\033[0m";
  1057. }
  1058. else {
  1059. echo $message;
  1060. }
  1061. }
  1062. catch (\RuntimeException) {
  1063. echo $message;
  1064. }
  1065. }
  1066. /**
  1067. * Get the color code associated with the specified status.
  1068. *
  1069. * @param string $status
  1070. * The status string to get code for. Special cases are: 'pass', 'fail', or
  1071. * 'exception'.
  1072. *
  1073. * @return int
  1074. * Color code. Returns 0 for default case.
  1075. */
  1076. function simpletest_script_color_code($status) {
  1077. return match ($status) {
  1078. 'pass' => SIMPLETEST_SCRIPT_COLOR_PASS,
  1079. 'fail', 'cli_fail', 'error', 'exception' => SIMPLETEST_SCRIPT_COLOR_FAIL,
  1080. 'skipped' => SIMPLETEST_SCRIPT_COLOR_YELLOW,
  1081. 'debug' => SIMPLETEST_SCRIPT_COLOR_CYAN,
  1082. default => 0,
  1083. };
  1084. }
  1085. /**
  1086. * Prints alternative test names.
  1087. *
  1088. * Searches the provided array of string values for close matches based on the
  1089. * Levenshtein algorithm.
  1090. *
  1091. * @param string $string
  1092. * A string to test.
  1093. * @param array $array
  1094. * A list of strings to search.
  1095. * @param int $degree
  1096. * The matching strictness. Higher values return fewer matches. A value of
  1097. * 4 means that the function will return strings from $array if the candidate
  1098. * string in $array would be identical to $string by changing 1/4 or fewer of
  1099. * its characters.
  1100. *
  1101. * @see http://php.net/manual/function.levenshtein.php
  1102. */
  1103. function simpletest_script_print_alternatives($string, $array, $degree = 4): void {
  1104. $alternatives = [];
  1105. foreach ($array as $item) {
  1106. $lev = levenshtein($string, $item);
  1107. if ($lev <= strlen($item) / $degree || str_contains($string, $item)) {
  1108. $alternatives[] = $item;
  1109. }
  1110. }
  1111. if (!empty($alternatives)) {
  1112. simpletest_script_print(" Did you mean?\n", SIMPLETEST_SCRIPT_COLOR_FAIL);
  1113. foreach ($alternatives as $alternative) {
  1114. simpletest_script_print(" - $alternative\n", SIMPLETEST_SCRIPT_COLOR_FAIL);
  1115. }
  1116. }
  1117. }
  1118. /**
  1119. * Loads test result messages from the database.
  1120. *
  1121. * Messages are ordered by test class and message id.
  1122. *
  1123. * @param array $test_ids
  1124. * Array of test IDs of the messages to be loaded.
  1125. *
  1126. * @return array
  1127. * Array of test result messages from the database.
  1128. */
  1129. function simpletest_script_load_messages_by_test_id(TestRunResultsStorageInterface $test_run_results_storage, $test_ids) {
  1130. $results = [];
  1131. // Sqlite has a maximum number of variables per query. If required, the
  1132. // database query is split into chunks.
  1133. if (count($test_ids) > SIMPLETEST_SCRIPT_SQLITE_VARIABLE_LIMIT && Config::get('sqlite')) {
  1134. $test_id_chunks = array_chunk($test_ids, SIMPLETEST_SCRIPT_SQLITE_VARIABLE_LIMIT);
  1135. }
  1136. else {
  1137. $test_id_chunks = [$test_ids];
  1138. }
  1139. foreach ($test_id_chunks as $test_id_chunk) {
  1140. try {
  1141. $result_chunk = [];
  1142. foreach ($test_id_chunk as $test_id) {
  1143. $test_run = TestRun::get($test_run_results_storage, $test_id);
  1144. $result_chunk = array_merge($result_chunk, $test_run->getLogEntriesByTestClass());
  1145. }
  1146. }
  1147. catch (Exception $e) {
  1148. echo (string) $e;
  1149. exit(SIMPLETEST_SCRIPT_EXIT_EXCEPTION);
  1150. }
  1151. if ($result_chunk) {
  1152. $results = array_merge($results, $result_chunk);
  1153. }
  1154. }
  1155. return $results;
  1156. }
  1157. /**
  1158. * Trims a string adding a leading or trailing ellipsis.
  1159. *
  1160. * @param string $input
  1161. * The input string.
  1162. * @param int $length
  1163. * The exact trimmed string length.
  1164. * @param int $side
  1165. * Leading or trailing ellipsis.
  1166. *
  1167. * @return string
  1168. * The trimmed string.
  1169. */
  1170. function trim_with_ellipsis(string $input, int $length, int $side): string {
  1171. if (strlen($input) < $length) {
  1172. return str_pad($input, $length, ' ', \STR_PAD_RIGHT);
  1173. }
  1174. elseif (strlen($input) > $length) {
  1175. return match($side) {
  1176. \STR_PAD_RIGHT => substr($input, 0, $length - 1) . '…',
  1177. default => '…' . substr($input, -$length + 1),
  1178. };
  1179. }
  1180. return $input;
  1181. }
  1182. /**
  1183. * Outputs the discovery warning messages.
  1184. */
  1185. function dump_discovery_warnings(): void {
  1186. $warnings = PhpUnitTestDiscovery::instance()->getWarnings();
  1187. if (!empty($warnings)) {
  1188. simpletest_script_print("Test discovery warnings\n", SIMPLETEST_SCRIPT_COLOR_BRIGHT_WHITE);
  1189. simpletest_script_print("-----------------------\n", SIMPLETEST_SCRIPT_COLOR_BRIGHT_WHITE);
  1190. foreach ($warnings as $warning) {
  1191. $tmp = explode("\n", $warning);
  1192. simpletest_script_print('* ' . array_shift($tmp) . "\n", SIMPLETEST_SCRIPT_COLOR_EXCEPTION);
  1193. foreach ($tmp as $sub) {
  1194. simpletest_script_print(' ' . $sub . "\n", SIMPLETEST_SCRIPT_COLOR_EXCEPTION);
  1195. }
  1196. echo "\n";
  1197. }
  1198. }
  1199. }

Buggy or inaccurate documentation? Please file an issue. Need support? Need help programming? Connect with the Drupal community.