blob: e83a8ec918d977612383f7a3725fa08da155cea1 [file] [log] [blame]
/**
* @license
* Copyright 2021 The Chromium Authors. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
import './test/test-setup';
import {
BinarySize,
BuildbucketBuild,
BuildbucketBuilder,
BinarySizeRow,
BinarySizeConfig,
BuilderPair,
} from './binary-size';
import {
ChangeInfo,
ChangeStatus,
PatchSetNum,
RevisionKind,
} from '@gerritcodereview/typescript-api/rest-api';
import {Category, ChangeData} from '@gerritcodereview/typescript-api/checks';
suite('binary-size basic tests', () => {
let element: BinarySize;
function stubSearch(searchPromise: object) {
class MockBuildbucketV2Client {
constructor() {}
async searchBuilds() {
return searchPromise;
}
async batch() {
return searchPromise;
}
}
window.buildbucket = {
getAccessToken: async () => null,
compareBuildIds: (a: string, b: string) => {
const d = b.length - a.length;
if (d !== 0) {
return d;
}
if (a > b) {
return -1;
}
if (a < b) {
return 1;
}
return 0;
},
convertLegacyBucket: (a: string) => {
const sep = a.indexOf('.', 5);
return {
project: a.slice(5, sep),
bucket: a.slice(sep + 1),
};
},
identicalBucket: (a: BuildbucketBuilder, b: BuildbucketBuilder) =>
a.project === b.project && a.bucket === b.bucket,
BuildbucketV2Client: MockBuildbucketV2Client,
};
return sinon.spy(MockBuildbucketV2Client.prototype, 'searchBuilds');
}
function stubReload(element: BinarySize, stub: sinon.SinonStub) {
class fakeCache {
constructor() {
return;
}
read() {
return {optedIn: true};
}
}
Object.assign(window.buildbucket, {
CacheObject: fakeCache,
CHECKS_OPT_CACHE_KEY: 'foo',
});
Object.assign(element, {
plugin: {
getPluginName: () => 'binary-size',
restApi: () => {
return {
get: () =>
Promise.resolve({
builders: [],
_account_id: 123,
}),
};
},
checks: () => {
return {register: stub};
},
hook: () => {
return {onAttached: () => {}};
},
},
});
}
setup(() => {
element = new BinarySize();
Object.assign(element, {
plugin: {
getPluginName() {
return 'binary-size';
},
restApi() {
return {
getLoggedIn: async () => true,
};
},
},
change: {
project: 'project-foo',
_number: 1,
revisions: {
deadbeef: {
_number: 1,
},
},
},
pluginConfig: {
gerritHost: 'gerrit.example.com',
buckets: [],
builders: [],
},
revision: {
_number: 1,
},
});
});
teardown(() => {
sinon.restore();
});
test('getBuilds empty batch response', async () => {
stubSearch(Promise.resolve({}));
const builders = [
{project: 'project', bucket: 'some.bucket', builder: 'builder1'},
{project: 'project', bucket: 'a.b.c', builder: 'builder2'},
];
assert.deepEqual(await element.getBuilds(builders, [], []), []);
});
test('getBuilds', async () => {
stubSearch(
Promise.resolve({
responses: [
{
searchBuilds: {
builds: [{id: 'build1'}, {id: 'build2'}],
},
},
],
})
);
const builders: BuildbucketBuilder[] = [
{project: 'project', bucket: 'some.bucket', builder: 'builder1'},
{project: 'project', bucket: 'a.b.c', builder: 'builder2'},
];
assert.deepEqual(
await element.getBuilds(
builders,
[],
[{key: 'buildset', value: 'foo/bar'}]
),
[{id: 'build1'}, {id: 'build2'}] as BuildbucketBuild[]
);
});
test('selectRelevantBuilds', () => {
const builds = [
{output: {properties: {got_revision: 'abc', binary_sizes: {}}}},
{output: {properties: {got_revision: 'abc'}}},
{output: {}},
{},
] as BuildbucketBuild[];
assert.deepEqual(element.selectRelevantBuilds(builds), [builds[0]]);
});
test('sortUniqueInfoRows sorts', () => {
const rows = [
{builder: 'abc', binary: 'b.so', id: '2'},
{builder: 'abc', binary: 'a.so', id: '1'},
] as BinarySizeRow[];
assert.deepEqual(element.sortUniqueInfoRows(rows), [rows[1], rows[0]]);
});
test('sortUniqueInfoRows keeps only latest', () => {
const rows = [
{builder: 'abc', binary: 'a.so', id: '3'},
{builder: 'abc', binary: 'b.so', id: '2'},
{builder: 'abc', binary: 'a.so', id: '1'},
] as BinarySizeRow[];
// Remember: build IDs are monotonically decreasing.
assert.deepEqual(element.sortUniqueInfoRows(rows), [rows[2], rows[1]]);
});
test('getUniqueTryAndCiBuilders', () => {
element.pluginConfig = {
builders: [
{
tryBucket: 'luci.project.try',
tryBuilder: 'builder1',
ciBucket: 'luci.project.ci',
ciBuilder: 'Builder 1',
},
{
tryBucket: 'luci.project.try',
tryBuilder: 'builder2',
// This should be deduplicated.
ciBucket: 'luci.project.ci',
ciBuilder: 'Builder 1',
},
],
} as BinarySizeConfig;
assert.deepEqual(element.getUniqueTryAndCiBuilders(), [
[
{bucket: 'luci.project.try', builder: 'builder1'},
{bucket: 'luci.project.try', builder: 'builder2'},
],
[{bucket: 'luci.project.ci', builder: 'Builder 1'}],
]);
});
test('revisionTags', () => {
element.change = {project: 'src'} as ChangeInfo;
element.pluginConfig = {gitHost: 'host'} as BinarySizeConfig;
const tryBuildToBuilderPair = new Map([
[{output: {properties: {got_revision: 'beef'}}}, {ciBuilderRepo: ''}],
[{output: {properties: {got_revision: 'a3ee'}}}, {ciBuilderRepo: ''}],
[{output: {properties: {got_revision: 'beef'}}}, {ciBuilderRepo: ''}],
]) as Map<BuildbucketBuild, BuilderPair>;
assert.deepEqual(element.revisionTags(tryBuildToBuilderPair), [
{key: 'buildset', value: 'commit/gitiles/host/src/+/beef'},
{key: 'buildset', value: 'commit/gitiles/host/src/+/a3ee'},
]);
});
const BUILDERS = [
{
tryBucket: 'luci.project.try',
tryBuilder: 'builder1',
ciBucket: 'luci.project.ci',
ciBuilder: 'Builder 1',
},
{
tryBucket: 'luci.project.try',
tryBuilder: 'builder2',
ciBucket: 'luci.project.ci',
ciBuilder: 'Builder 2',
},
{
tryBucket: 'luci.other.try',
tryBuilder: 'builder1',
ciBucket: 'luci.other.ci',
ciBuilder: 'Builder 1',
},
] as BuilderPair[];
Object.freeze(BUILDERS);
test('binarySizeInfo basic', () => {
assert.deepEqual(
element.processBinarySizeInfo(
BUILDERS,
[
{
builder: {builder: 'builder1', project: 'project', bucket: 'try'},
tags: [{key: 'builder', value: 'builder1'}],
id: '7',
url: 'http://example.org/builder1/7',
output: {
properties: {
got_revision: 'deadbeef',
binary_sizes: {
a: 1337,
'a.owner': 'owner',
'a.budget': 1339,
},
},
},
},
] as BuildbucketBuild[],
[
{
builder: {builder: 'Builder 1', project: 'project', bucket: 'ci'},
tags: [{key: 'builder', value: 'Builder 1'}],
id: '8',
url: 'http://example.org/Builder%201/8',
output: {
properties: {
got_revision: 'deadbeef',
binary_sizes: {
a: 1338,
'a.owner': 'owner',
'a.budget': 1339,
},
},
},
},
] as BuildbucketBuild[]
).rows,
[
{
id: '7',
builder: 'builder1',
binary: 'a',
trySize: 1337,
tryUrl: 'http://example.org/builder1/7',
ciSize: 1338,
ciUrl: 'http://example.org/Builder%201/8',
tryBudget: 1339,
budgetExceeded: false,
tryCreepBudget: undefined,
creepExceeded: false,
ownerUrl: 'owner',
},
]
);
});
test('binarySizeInfo with no budgets', () => {
assert.deepEqual(
element.processBinarySizeInfo(
BUILDERS,
[
{
builder: {builder: 'builder1', project: 'project', bucket: 'try'},
tags: [{key: 'builder', value: 'builder1'}],
id: '7',
url: 'http://example.org/builder1/7',
output: {
properties: {
got_revision: 'deadbeef',
binary_sizes: {
a: 1337,
'a.owner': 'owner',
},
},
},
},
] as BuildbucketBuild[],
[
{
builder: {builder: 'Builder 1', project: 'project', bucket: 'ci'},
tags: [{key: 'builder', value: 'Builder 1'}],
id: '8',
url: 'http://example.org/Builder%201/8',
output: {
properties: {
got_revision: 'deadbeef',
binary_sizes: {
a: 1338,
'a.owner': 'owner',
},
},
},
},
] as BuildbucketBuild[]
).rows,
[
{
id: '7',
builder: 'builder1',
binary: 'a',
trySize: 1337,
tryUrl: 'http://example.org/builder1/7',
ciSize: 1338,
ciUrl: 'http://example.org/Builder%201/8',
tryBudget: undefined,
budgetExceeded: false,
tryCreepBudget: undefined,
creepExceeded: false,
ownerUrl: 'owner',
},
]
);
});
test('binarySizeInfo with exceeded budget', () => {
assert.deepEqual(
element.processBinarySizeInfo(
BUILDERS,
[
{
builder: {builder: 'builder1', project: 'project', bucket: 'try'},
tags: [{key: 'builder', value: 'builder1'}],
id: '7',
url: 'http://example.org/builder1/7',
output: {
properties: {
got_revision: 'deadbeef',
binary_sizes: {
a: 1337,
'a.owner': 'owner',
'a.budget': 1,
},
},
},
},
] as BuildbucketBuild[],
[
{
builder: {builder: 'Builder 1', project: 'project', bucket: 'ci'},
tags: [{key: 'builder', value: 'Builder 1'}],
id: '8',
url: 'http://example.org/Builder%201/8',
output: {
properties: {
got_revision: 'deadbeef',
binary_sizes: {
a: 1338,
'a.owner': 'owner',
'a.budget': 1,
},
},
},
},
] as BuildbucketBuild[]
).rows,
[
{
id: '7',
builder: 'builder1',
binary: 'a',
trySize: 1337,
tryUrl: 'http://example.org/builder1/7',
ciSize: 1338,
ciUrl: 'http://example.org/Builder%201/8',
tryBudget: 1,
budgetExceeded: true,
tryCreepBudget: undefined,
creepExceeded: false,
ownerUrl: 'owner',
},
]
);
});
test('binarySizeInfo with creep budget', () => {
assert.deepEqual(
element.processBinarySizeInfo(
BUILDERS,
[
{
builder: {builder: 'builder1', project: 'project', bucket: 'try'},
tags: [{key: 'builder', value: 'builder1'}],
id: '7',
url: 'http://example.org/builder1/7',
output: {
properties: {
got_revision: 'deadbeef',
binary_sizes: {
a: 1337,
'a.owner': 'owner',
'a.budget': 2,
'a.creepBudget': 1,
},
},
},
},
] as BuildbucketBuild[],
[
{
builder: {builder: 'Builder 1', project: 'project', bucket: 'ci'},
tags: [{key: 'builder', value: 'Builder 1'}],
id: '8',
url: 'http://example.org/Builder%201/8',
output: {
properties: {
got_revision: 'deadbeef',
binary_sizes: {
a: 1338,
'a.owner': 'owner',
'a.budget': 2,
'a.creepBudget': 1,
},
},
},
},
] as BuildbucketBuild[]
).rows,
[
{
id: '7',
builder: 'builder1',
binary: 'a',
trySize: 1337,
tryUrl: 'http://example.org/builder1/7',
ciSize: 1338,
ciUrl: 'http://example.org/Builder%201/8',
tryBudget: 2,
budgetExceeded: true,
tryCreepBudget: 1,
creepExceeded: false,
ownerUrl: 'owner',
},
]
);
});
test('binarySizeInfo with exceeded creep budget', () => {
assert.deepEqual(
element.processBinarySizeInfo(
BUILDERS,
[
{
builder: {builder: 'builder1', project: 'project', bucket: 'try'},
tags: [{key: 'builder', value: 'builder1'}],
id: '7',
url: 'http://example.org/builder1/7',
output: {
properties: {
got_revision: 'deadbeef',
binary_sizes: {
a: 1340,
'a.owner': 'owner',
'a.budget': 2,
'a.creepBudget': 1,
},
},
},
},
] as BuildbucketBuild[],
[
{
builder: {builder: 'Builder 1', project: 'project', bucket: 'ci'},
tags: [{key: 'builder', value: 'Builder 1'}],
id: '8',
url: 'http://example.org/Builder%201/8',
output: {
properties: {
got_revision: 'deadbeef',
binary_sizes: {
a: 1337,
'a.owner': 'owner',
'a.budget': 2,
'a.creepBudget': 1,
},
},
},
},
] as BuildbucketBuild[]
).rows,
[
{
id: '7',
builder: 'builder1',
binary: 'a',
trySize: 1340,
tryUrl: 'http://example.org/builder1/7',
ciSize: 1337,
ciUrl: 'http://example.org/Builder%201/8',
tryBudget: 2,
budgetExceeded: true,
tryCreepBudget: 1,
creepExceeded: true,
ownerUrl: 'owner',
},
]
);
});
test('binarySizeInfo no match on bucket', () => {
assert.deepEqual(
element.processBinarySizeInfo(
BUILDERS,
[
{
builder: {builder: 'builder1', project: 'project', bucket: 'try'},
tags: [{key: 'builder', value: 'builder1'}],
id: '7',
url: 'http://example.org/builder1/7',
output: {
properties: {
got_revision: 'deadbeef',
binary_sizes: {
a: 1337,
'a.owner': 'owner',
'a.budget': 1,
},
},
},
},
] as BuildbucketBuild[],
[
{
builder: {
builder: 'Builder 1',
project: 'project',
bucket: 'other',
},
tags: [{key: 'builder', value: 'Builder 1'}],
id: '8',
url: 'http://example.org/Builder%201/8',
output: {
properties: {
got_revision: 'deadbeef',
binary_sizes: {
a: 1338,
'a.owner': 'owner',
'a.budget': 1,
},
},
},
},
] as BuildbucketBuild[]
).rows,
[]
);
});
test('binarySizeInfo no match on binary', () => {
assert.deepEqual(
element.processBinarySizeInfo(
BUILDERS,
[
{
builder: {builder: 'builder1', project: 'project', bucket: 'try'},
tags: [{key: 'builder', value: 'builder1'}],
id: '7',
url: 'http://example.org/builder1/7',
output: {
properties: {
got_revision: 'deadbeef',
binary_sizes: {
a: 1337,
'a.owner': 'owner',
'a.budget': 1,
},
},
},
},
] as BuildbucketBuild[],
[
{
builder: {builder: 'Builder 1', project: 'project', bucket: 'ci'},
tags: [{key: 'builder', value: 'Builder 1'}],
id: '8',
url: 'http://example.org/Builder%201/8',
output: {
properties: {
got_revision: 'deadbeef',
binary_sizes: {
b: 1338,
'b.owner': 'owner',
'b.budget': 1,
},
},
},
},
] as BuildbucketBuild[]
).rows,
[]
);
});
test('binarySizeInfo multiple try builds', () => {
const result = element.processBinarySizeInfo(
BUILDERS,
[
{
builder: {builder: 'builder1', project: 'project', bucket: 'try'},
tags: [{key: 'builder', value: 'builder1'}],
id: '7',
url: 'http://example.org/builder1/7',
output: {
properties: {
got_revision: 'deadbeef',
binary_sizes: {
a: 1337,
b: 2222,
},
},
},
},
{
builder: {builder: 'builder1', project: 'project', bucket: 'try'},
tags: [{key: 'builder', value: 'builder1'}],
id: '7',
url: 'http://example.org/builder1/7',
output: {
properties: {
got_revision: 'da39a3ee',
binary_sizes: {
a: 1111,
},
},
},
},
{
builder: {builder: 'builder1', project: 'project', bucket: 'try'},
tags: [{key: 'builder', value: 'builder1'}],
id: '7',
url: 'http://example.org/builder1/7',
output: {
properties: {
got_revision: 'da39a3ee',
binary_sizes: {
a: 1110,
},
},
},
},
] as BuildbucketBuild[],
[
{
builder: {builder: 'Builder 1', project: 'project', bucket: 'ci'},
tags: [{key: 'builder', value: 'Builder 1'}],
id: '8',
url: 'http://example.org/Builder%201/8',
output: {
properties: {
got_revision: 'deadbeef',
binary_sizes: {
a: 1338,
b: 2223,
},
},
},
},
{
builder: {builder: 'Builder 1', project: 'project', bucket: 'ci'},
tags: [{key: 'builder', value: 'Builder 1'}],
id: '8',
url: 'http://example.org/Builder%201/8',
output: {
properties: {
got_revision: 'da39a3ee',
binary_sizes: {
a: 1112,
},
},
},
},
] as BuildbucketBuild[]
).rows;
assert.equal(result.length, 2);
assert.equal(result[0].builder, 'builder1');
assert.equal(result[0].binary, 'a');
assert.equal(result[0].ciSize, 1338);
assert.equal(result[1].builder, 'builder1');
assert.equal(result[1].binary, 'b');
assert.equal(result[1].ciSize, 2223);
});
test('reload registers checks only once', async () => {
const stub = sinon.stub();
stubReload(element, stub);
// Wrap in try/catch since we're only testing checks registration.
try {
await element.reload();
} catch (e) {
// Continue regardless of error
}
sinon.assert.calledOnce(stub);
try {
await element.reload();
} catch (e) {
// Continue regardless of error
}
sinon.assert.calledOnce(stub);
});
test('exponential backoff', async () => {
assert.isNotOk(element.updateTimeoutID);
stubSearch(Promise.reject(new Error('nope')));
const timeoutMs = element.updateIntervalMs;
stubReload(element, sinon.stub());
await element.reload();
assert.isAbove(element.updateIntervalMs, 2 * timeoutMs);
assert.isOk(element.updateTimeoutID);
});
test('valid patch numbers', () => {
let change = {
revisions: {
rev1: {_number: 1, kind: RevisionKind.TRIVIAL_REBASE},
rev2: {_number: 2, kind: RevisionKind.TRIVIAL_REBASE},
rev3: {_number: 3, kind: RevisionKind.TRIVIAL_REBASE},
},
} as any; // eslint-disable-line @typescript-eslint/no-explicit-any
assert.deepEqual(element.computeValidPatchNums(change, 3 as PatchSetNum), [
3, 2, 1,
] as PatchSetNum[]);
assert.deepEqual(element.computeValidPatchNums(change, 2 as PatchSetNum), [
2, 1,
] as PatchSetNum[]);
change.status = ChangeStatus.MERGED;
assert.deepEqual(element.computeValidPatchNums(change, 3 as PatchSetNum), [
2, 1,
] as PatchSetNum[]);
change = {
revisions: {
rev1: {_number: 1, kind: RevisionKind.TRIVIAL_REBASE},
rev2: {_number: 2, kind: RevisionKind.REWORK},
rev3: {_number: 3, kind: RevisionKind.TRIVIAL_REBASE},
},
};
assert.deepEqual(element.computeValidPatchNums(change, 3 as PatchSetNum), [
3, 2,
] as PatchSetNum[]);
});
test('fetchChecks creates results with messages and summaries', async () => {
const changeData = {
changeNumber: 1,
patchsetNumber: 1,
changeInfo: {},
} as ChangeData;
const stub = sinon.stub(element, 'processBinarySizeInfo');
// No size changes, no budgets exceeded.
stub.returns({
rows: [
{binary: 'foo', budgetExceeded: false, trySize: 100, ciSize: 100},
{binary: 'bar', budgetExceeded: false, trySize: 100, ciSize: 100},
{binary: 'baz', budgetExceeded: false, trySize: 100, ciSize: 100},
],
} as any); // eslint-disable-line @typescript-eslint/no-explicit-any
let fetchRes = await element.fetchChecks(changeData);
let res = fetchRes.runs![0].results![0];
assert.strictEqual(
res.summary,
'All 3 files within budgets. No sizes changed.'
);
assert.strictEqual(res.message, 'Expand to view more.');
// 2 size changes, no budgets exceeded.
stub.returns({
rows: [
{binary: 'foo', budgetExceeded: false, trySize: 100, ciSize: 100},
{binary: 'bar', budgetExceeded: false, trySize: 123, ciSize: 100},
{binary: 'baz', budgetExceeded: false, trySize: 123, ciSize: 100},
],
} as any); // eslint-disable-line @typescript-eslint/no-explicit-any
fetchRes = await element.fetchChecks(changeData);
res = fetchRes.runs![0].results![0];
assert.strictEqual(
res.summary,
'All 3 files within budgets. 2 of 3 sizes changed.'
);
assert.strictEqual(
res.message,
'Sizes changed: bar (+23 B), baz (+23 B). Expand to view more.'
);
// 3 size changes, 2 budgets exceeded.
stub.returns({
rows: [
{binary: 'foo', budgetExceeded: false, trySize: 123, ciSize: 100},
{binary: 'bar', budgetExceeded: true, trySize: 123, ciSize: 100},
{binary: 'baz', creepExceeded: true, trySize: 123, ciSize: 100},
],
} as any); // eslint-disable-line @typescript-eslint/no-explicit-any
fetchRes = await element.fetchChecks(changeData);
res = fetchRes.runs![0].results![0];
assert.strictEqual(
res.summary,
'2 of 3 budgets exceeded. 3 of 3 sizes changed.'
);
assert.strictEqual(
res.message,
'Exceeded budgets: bar (+23 B), baz (+23 B). Expand to view more.'
);
});
test('getCheckRunAttempt', () => {
assert.deepEqual(
element.getCheckRunAttempt([
{builder: {builder: 'a'}},
] as BuildbucketBuild[]),
1
);
assert.deepEqual(
element.getCheckRunAttempt([
{builder: {builder: 'a'}},
{builder: {builder: 'b'}},
] as BuildbucketBuild[]),
1
);
assert.deepEqual(
element.getCheckRunAttempt([
{builder: {builder: 'a'}},
{builder: {builder: 'a'}},
{builder: {builder: 'b'}},
] as BuildbucketBuild[]),
2
);
assert.deepEqual(
element.getCheckRunAttempt([
{builder: {builder: 'a'}},
{builder: {builder: 'a'}},
{builder: {builder: 'b'}},
{builder: {builder: 'b'}},
{builder: {builder: 'b'}},
] as BuildbucketBuild[]),
3
);
});
test('getCheckResultCategory', () => {
// If all no budgets were exceeded, category should be INFO.
let binarySizeRows = [
{budgetExceeded: false},
{budgetExceeded: false},
] as BinarySizeRow[];
assert.deepEqual(
element.getCheckResultCategory(
[{status: 'SUCCESS'}] as BuildbucketBuild[],
binarySizeRows
),
Category.INFO
);
assert.deepEqual(
element.getCheckResultCategory(
[{status: 'FAILURE'}] as BuildbucketBuild[],
binarySizeRows
),
Category.INFO
);
// If at least one budget was exceeded, category should be based on
// builds.
binarySizeRows = [
{budgetExceeded: true},
{budgetExceeded: false},
] as BinarySizeRow[];
assert.deepEqual(
element.getCheckResultCategory(
[{status: 'SUCCESS'}] as BuildbucketBuild[],
binarySizeRows
),
Category.WARNING
);
assert.deepEqual(
element.getCheckResultCategory(
[{status: 'SUCCESS'}, {status: 'FAILURE'}] as BuildbucketBuild[],
binarySizeRows
),
Category.ERROR
);
// If no creep budgets were exceeded, category should be INFO.
binarySizeRows = [
{creepExceeded: false},
{creepExceeded: false},
] as BinarySizeRow[];
assert.deepEqual(
element.getCheckResultCategory(
[{status: 'SUCCESS'}] as BuildbucketBuild[],
binarySizeRows
),
Category.INFO
);
assert.deepEqual(
element.getCheckResultCategory(
[{status: 'FAILURE'}] as BuildbucketBuild[],
binarySizeRows
),
Category.INFO
);
// If at least one creep budget was exceeded, category should be based on
// builds.
binarySizeRows = [
{creepExceeded: false},
{creepExceeded: true},
] as BinarySizeRow[];
assert.deepEqual(
element.getCheckResultCategory(
[{status: 'SUCCESS'}] as BuildbucketBuild[],
binarySizeRows
),
Category.WARNING
);
assert.deepEqual(
element.getCheckResultCategory(
[{status: 'SUCCESS'}, {status: 'FAILURE'}] as BuildbucketBuild[],
binarySizeRows
),
Category.ERROR
);
// If no budgets were exceeded and no creep budgets were exceeded,
// category should be INFO.
binarySizeRows = [
{creepExceeded: false, budgetExceeded: false},
{creepExceeded: false, budgetExceeded: false},
] as BinarySizeRow[];
assert.deepEqual(
element.getCheckResultCategory(
[{status: 'SUCCESS'}] as BuildbucketBuild[],
binarySizeRows
),
Category.INFO
);
assert.deepEqual(
element.getCheckResultCategory(
[{status: 'FAILURE'}] as BuildbucketBuild[],
binarySizeRows
),
Category.INFO
);
// If no budgets were exceeded but at least one creep budget was exceeded,
// category should be based on builds.
binarySizeRows = [
{creepExceeded: false, budgetExceeded: false},
{creepExceeded: true, budgetExceeded: false},
] as BinarySizeRow[];
assert.deepEqual(
element.getCheckResultCategory(
[{status: 'SUCCESS'}] as BuildbucketBuild[],
binarySizeRows
),
Category.WARNING
);
assert.deepEqual(
element.getCheckResultCategory(
[{status: 'SUCCESS'}, {status: 'FAILURE'}] as BuildbucketBuild[],
binarySizeRows
),
Category.ERROR
);
// If no creep budgets were exceeded but at least budget was exceeded,
// category should be based on builds.
binarySizeRows = [
{creepExceeded: false, budgetExceeded: false},
{creepExceeded: false, budgetExceeded: true},
] as BinarySizeRow[];
assert.deepEqual(
element.getCheckResultCategory(
[{status: 'SUCCESS'}] as BuildbucketBuild[],
binarySizeRows
),
Category.WARNING
);
assert.deepEqual(
element.getCheckResultCategory(
[{status: 'SUCCESS'}, {status: 'FAILURE'}] as BuildbucketBuild[],
binarySizeRows
),
Category.ERROR
);
});
});