2016-02-29 15:15:13 +01:00
|
|
|
<?php
|
|
|
|
|
|
|
|
final class HarbormasterUnitStatus
|
|
|
|
extends Phobject {
|
|
|
|
|
|
|
|
public static function getUnitStatusIcon($status) {
|
|
|
|
$map = self::getUnitStatusDictionary($status);
|
|
|
|
$default = 'fa-question-circle';
|
|
|
|
return idx($map, 'icon', $default);
|
|
|
|
}
|
|
|
|
|
|
|
|
public static function getUnitStatusColor($status) {
|
|
|
|
$map = self::getUnitStatusDictionary($status);
|
|
|
|
$default = 'violet';
|
|
|
|
return idx($map, 'color', $default);
|
|
|
|
}
|
|
|
|
|
|
|
|
public static function getUnitStatusLabel($status) {
|
|
|
|
$map = self::getUnitStatusDictionary($status);
|
|
|
|
$default = pht('Unknown Status ("%s")', $status);
|
|
|
|
return idx($map, 'label', $default);
|
|
|
|
}
|
|
|
|
|
|
|
|
public static function getUnitStatusSort($status) {
|
|
|
|
$map = self::getUnitStatusDictionary($status);
|
|
|
|
$default = 'N';
|
|
|
|
return idx($map, 'sort', $default);
|
|
|
|
}
|
|
|
|
|
|
|
|
private static function getUnitStatusDictionary($status) {
|
|
|
|
$map = self::getUnitStatusMap();
|
|
|
|
$default = array();
|
|
|
|
return idx($map, $status, $default);
|
|
|
|
}
|
|
|
|
|
|
|
|
public static function getUnitStatusCountLabel($status, $count) {
|
|
|
|
$count = new PhutilNumber($count);
|
|
|
|
|
|
|
|
switch ($status) {
|
|
|
|
case ArcanistUnitTestResult::RESULT_FAIL:
|
|
|
|
return pht('%s Failed Test(s)', $count);
|
|
|
|
case ArcanistUnitTestResult::RESULT_BROKEN:
|
|
|
|
return pht('%s Broken Test(s)', $count);
|
|
|
|
case ArcanistUnitTestResult::RESULT_UNSOUND:
|
|
|
|
return pht('%s Unsound Test(s)', $count);
|
|
|
|
case ArcanistUnitTestResult::RESULT_PASS:
|
|
|
|
return pht('%s Passed Test(s)', $count);
|
Treat "skipped" unit tests as less interesting than "passed"
Summary:
Ref T10457. Skipped tests are almost always well-behaved (e.g., `testWindows()`, but the test is running on Linux) and not interesting, and we do not expect well-written, solid systems to necessarily have 0 skips.
Although skips //could// indicate that you have missing dependencies on a build server, and thus be a bit interesting, I think they almost always indicate that a particular test is not expected to run in the current environment.
If we wanted to tackle this problem in granular detail, we could eventually add a "Missing" status or similar which would serve as "a skip you //could// reasonably fix in this environment", but I don't think that's too interesting.
Test Plan:
Here's an example of a build result with skips: B10875
{F1136511}
I think this is clearer as "Passed", as this is the expected production state of the build.
Locally, looked at some builds.
Reviewers: chad
Reviewed By: chad
Maniphest Tasks: T10457
Differential Revision: https://secure.phabricator.com/D15369
2016-03-01 14:58:28 +01:00
|
|
|
case ArcanistUnitTestResult::RESULT_SKIP:
|
|
|
|
return pht('%s Skipped Test(s)', $count);
|
2016-02-29 15:15:13 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
return pht('%s Other Test(s)', $count);
|
|
|
|
}
|
|
|
|
|
|
|
|
private static function getUnitStatusMap() {
|
|
|
|
return array(
|
|
|
|
ArcanistUnitTestResult::RESULT_FAIL => array(
|
|
|
|
'label' => pht('Failed'),
|
|
|
|
'icon' => 'fa-times',
|
|
|
|
'color' => 'red',
|
|
|
|
'sort' => 'A',
|
|
|
|
),
|
|
|
|
ArcanistUnitTestResult::RESULT_BROKEN => array(
|
|
|
|
'label' => pht('Broken'),
|
|
|
|
'icon' => 'fa-bomb',
|
|
|
|
'color' => 'indigo',
|
|
|
|
'sort' => 'B',
|
|
|
|
),
|
|
|
|
ArcanistUnitTestResult::RESULT_UNSOUND => array(
|
|
|
|
'label' => pht('Unsound'),
|
|
|
|
'icon' => 'fa-exclamation-triangle',
|
|
|
|
'color' => 'yellow',
|
|
|
|
'sort' => 'C',
|
|
|
|
),
|
|
|
|
ArcanistUnitTestResult::RESULT_PASS => array(
|
|
|
|
'label' => pht('Passed'),
|
|
|
|
'icon' => 'fa-check',
|
|
|
|
'color' => 'green',
|
Treat "skipped" unit tests as less interesting than "passed"
Summary:
Ref T10457. Skipped tests are almost always well-behaved (e.g., `testWindows()`, but the test is running on Linux) and not interesting, and we do not expect well-written, solid systems to necessarily have 0 skips.
Although skips //could// indicate that you have missing dependencies on a build server, and thus be a bit interesting, I think they almost always indicate that a particular test is not expected to run in the current environment.
If we wanted to tackle this problem in granular detail, we could eventually add a "Missing" status or similar which would serve as "a skip you //could// reasonably fix in this environment", but I don't think that's too interesting.
Test Plan:
Here's an example of a build result with skips: B10875
{F1136511}
I think this is clearer as "Passed", as this is the expected production state of the build.
Locally, looked at some builds.
Reviewers: chad
Reviewed By: chad
Maniphest Tasks: T10457
Differential Revision: https://secure.phabricator.com/D15369
2016-03-01 14:58:28 +01:00
|
|
|
'sort' => 'D',
|
|
|
|
),
|
|
|
|
ArcanistUnitTestResult::RESULT_SKIP => array(
|
|
|
|
'label' => pht('Skipped'),
|
|
|
|
'icon' => 'fa-fast-forward',
|
|
|
|
'color' => 'blue',
|
|
|
|
'sort' => 'E',
|
2016-02-29 15:15:13 +01:00
|
|
|
),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|