Skip to content

Commit cfc847d

Browse files
panvaaduh95
authored andcommitted
test: update WPT compression to ae05f5cb53
PR-URL: #62107 Reviewed-By: Yagiz Nizipli <yagiz@nizipli.com> Reviewed-By: Matteo Collina <matteo.collina@gmail.com> Reviewed-By: Mattias Buelens <mattias@buelens.com> Reviewed-By: René <contact.9a5d6388@renegade334.me.uk>
1 parent 19efe60 commit cfc847d

33 files changed

+525
-727
lines changed

test/fixtures/wpt/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ See [test/wpt](../../wpt/README.md) for information on how these tests are run.
1111
Last update:
1212

1313
- common: https://github.com/web-platform-tests/wpt/tree/dbd648158d/common
14-
- compression: https://github.com/web-platform-tests/wpt/tree/67880a4eb8/compression
14+
- compression: https://github.com/web-platform-tests/wpt/tree/ae05f5cb53/compression
1515
- console: https://github.com/web-platform-tests/wpt/tree/e48251b778/console
1616
- dom/abort: https://github.com/web-platform-tests/wpt/tree/dc928169ee/dom/abort
1717
- dom/events: https://github.com/web-platform-tests/wpt/tree/0a811c5161/dom/events
Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
// META: global=window,worker,shadowrealm
2+
// META: script=resources/formats.js
3+
4+
'use strict';
5+
6+
const badChunks = [
7+
{
8+
name: 'undefined',
9+
value: undefined
10+
},
11+
{
12+
name: 'null',
13+
value: null
14+
},
15+
{
16+
name: 'numeric',
17+
value: 3.14
18+
},
19+
{
20+
name: 'object, not BufferSource',
21+
value: {}
22+
},
23+
{
24+
name: 'array',
25+
value: [65]
26+
},
27+
{
28+
name: 'SharedArrayBuffer',
29+
// Use a getter to postpone construction so that all tests don't fail where
30+
// SharedArrayBuffer is not yet implemented.
31+
get value() {
32+
// See https://github.com/whatwg/html/issues/5380 for why not `new SharedArrayBuffer()`
33+
return new WebAssembly.Memory({ shared:true, initial:1, maximum:1 }).buffer;
34+
}
35+
},
36+
{
37+
name: 'shared Uint8Array',
38+
get value() {
39+
// See https://github.com/whatwg/html/issues/5380 for why not `new SharedArrayBuffer()`
40+
return new Uint8Array(new WebAssembly.Memory({ shared:true, initial:1, maximum:1 }).buffer)
41+
}
42+
},
43+
];
44+
45+
for (const format of formats) {
46+
for (const chunk of badChunks) {
47+
promise_test(async t => {
48+
const cs = new CompressionStream(format);
49+
const reader = cs.readable.getReader();
50+
const writer = cs.writable.getWriter();
51+
const writePromise = writer.write(chunk.value);
52+
const readPromise = reader.read();
53+
await promise_rejects_js(t, TypeError, writePromise, 'write should reject');
54+
await promise_rejects_js(t, TypeError, readPromise, 'read should reject');
55+
}, `chunk of type ${chunk.name} should error the stream for ${format}`);
56+
}
57+
}

test/fixtures/wpt/compression/compression-bad-chunks.tentative.any.js

Lines changed: 0 additions & 74 deletions
This file was deleted.

test/fixtures/wpt/compression/compression-constructor-error.tentative.any.js renamed to test/fixtures/wpt/compression/compression-constructor-error.any.js

File renamed without changes.

test/fixtures/wpt/compression/compression-including-empty-chunk.tentative.any.js renamed to test/fixtures/wpt/compression/compression-including-empty-chunk.any.js

Lines changed: 11 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
// META: global=window,worker,shadowrealm
22
// META: script=third_party/pako/pako_inflate.min.js
3+
// META: script=resources/decompress.js
4+
// META: script=resources/formats.js
35
// META: timeout=long
46

57
'use strict';
@@ -42,22 +44,13 @@ const chunkLists = [
4244
];
4345
const expectedValue = new TextEncoder().encode('HelloHello');
4446

45-
for (const chunkList of chunkLists) {
46-
promise_test(async t => {
47-
const compressedData = await compressChunkList(chunkList, 'deflate');
48-
// decompress with pako, and check that we got the same result as our original string
49-
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
50-
}, `the result of compressing [${chunkList}] with deflate should be 'HelloHello'`);
51-
52-
promise_test(async t => {
53-
const compressedData = await compressChunkList(chunkList, 'gzip');
54-
// decompress with pako, and check that we got the same result as our original string
55-
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
56-
}, `the result of compressing [${chunkList}] with gzip should be 'HelloHello'`);
57-
58-
promise_test(async t => {
59-
const compressedData = await compressChunkList(chunkList, 'deflate-raw');
60-
// decompress with pako, and check that we got the same result as our original string
61-
assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match');
62-
}, `the result of compressing [${chunkList}] with deflate-raw should be 'HelloHello'`);
47+
for (const format of formats) {
48+
for (const chunkList of chunkLists) {
49+
promise_test(async t => {
50+
const compressedData = await compressChunkList(chunkList, format);
51+
const decompressedData = await decompressDataOrPako(compressedData, format);
52+
// check that we got the same result as our original string
53+
assert_array_equals(expectedValue, decompressedData, 'value should match');
54+
}, `the result of compressing [${chunkList}] with ${format} should be 'HelloHello'`);
55+
}
6356
}
Lines changed: 10 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
// META: global=window,worker,shadowrealm
22
// META: script=third_party/pako/pako_inflate.min.js
33
// META: script=resources/concatenate-stream.js
4+
// META: script=resources/decompress.js
5+
// META: script=resources/formats.js
46
// META: timeout=long
57

68
'use strict';
@@ -21,21 +23,11 @@ const fullData = new TextEncoder().encode(JSON.stringify(Array.from({ length: 10
2123
const data = fullData.subarray(0, 35_579);
2224
const expectedValue = data;
2325

24-
promise_test(async t => {
25-
const compressedData = await compressData(data, 'deflate');
26-
// decompress with pako, and check that we got the same result as our original string
27-
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
28-
}, `deflate compression with large flush output`);
29-
30-
promise_test(async t => {
31-
const compressedData = await compressData(data, 'gzip');
32-
// decompress with pako, and check that we got the same result as our original string
33-
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
34-
}, `gzip compression with large flush output`);
35-
36-
promise_test(async t => {
37-
const compressedData = await compressData(data, 'deflate-raw');
38-
// decompress with pako, and check that we got the same result as our original string
39-
assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match');
40-
}, `deflate-raw compression with large flush output`);
41-
26+
for (const format of formats) {
27+
promise_test(async t => {
28+
const compressedData = await compressData(data, format);
29+
const decompressedData = await decompressDataOrPako(compressedData, format);
30+
// check that we got the same result as our original string
31+
assert_array_equals(decompressedData, expectedValue, 'value should match');
32+
}, `${format} compression with large flush output`);
33+
}
Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
// META: global=window,worker,shadowrealm
2+
// META: script=third_party/pako/pako_inflate.min.js
3+
// META: script=resources/decompress.js
4+
// META: script=resources/formats.js
5+
// META: timeout=long
6+
7+
'use strict';
8+
9+
// This test asserts that compressing multiple chunks should work.
10+
11+
// Example: ('Hello', 3) => TextEncoder().encode('HelloHelloHello')
12+
function makeExpectedChunk(input, numberOfChunks) {
13+
const expectedChunk = input.repeat(numberOfChunks);
14+
return new TextEncoder().encode(expectedChunk);
15+
}
16+
17+
// Example: ('Hello', 3, 'deflate') => compress ['Hello', 'Hello', Hello']
18+
async function compressMultipleChunks(input, numberOfChunks, format) {
19+
const cs = new CompressionStream(format);
20+
const writer = cs.writable.getWriter();
21+
const chunk = new TextEncoder().encode(input);
22+
for (let i = 0; i < numberOfChunks; ++i) {
23+
writer.write(chunk);
24+
}
25+
const closePromise = writer.close();
26+
const out = [];
27+
const reader = cs.readable.getReader();
28+
let totalSize = 0;
29+
while (true) {
30+
const { value, done } = await reader.read();
31+
if (done)
32+
break;
33+
out.push(value);
34+
totalSize += value.byteLength;
35+
}
36+
await closePromise;
37+
const concatenated = new Uint8Array(totalSize);
38+
let offset = 0;
39+
for (const array of out) {
40+
concatenated.set(array, offset);
41+
offset += array.byteLength;
42+
}
43+
return concatenated;
44+
}
45+
46+
const hello = 'Hello';
47+
48+
for (const format of formats) {
49+
for (let numberOfChunks = 2; numberOfChunks <= 16; ++numberOfChunks) {
50+
promise_test(async t => {
51+
const compressedData = await compressMultipleChunks(hello, numberOfChunks, format);
52+
const decompressedData = await decompressDataOrPako(compressedData, format);
53+
const expectedValue = makeExpectedChunk(hello, numberOfChunks);
54+
// check that we got the same result as our original string
55+
assert_array_equals(decompressedData, expectedValue, 'value should match');
56+
}, `compressing ${numberOfChunks} chunks with ${format} should work`);
57+
}
58+
}

test/fixtures/wpt/compression/compression-multiple-chunks.tentative.any.js

Lines changed: 0 additions & 67 deletions
This file was deleted.
Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
// META: global=window,worker,shadowrealm
2+
// META: script=resources/formats.js
3+
4+
'use strict';
5+
6+
// This test asserts that compressed data length is shorter than the original
7+
// data length. If the input is extremely small, the compressed data may be
8+
// larger than the original data.
9+
10+
const LARGE_FILE = '/media/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm';
11+
12+
async function compressArrayBuffer(input, format) {
13+
const cs = new CompressionStream(format);
14+
const writer = cs.writable.getWriter();
15+
writer.write(input);
16+
const closePromise = writer.close();
17+
const out = [];
18+
const reader = cs.readable.getReader();
19+
let totalSize = 0;
20+
while (true) {
21+
const { value, done } = await reader.read();
22+
if (done)
23+
break;
24+
out.push(value);
25+
totalSize += value.byteLength;
26+
}
27+
await closePromise;
28+
const concatenated = new Uint8Array(totalSize);
29+
let offset = 0;
30+
for (const array of out) {
31+
concatenated.set(array, offset);
32+
offset += array.byteLength;
33+
}
34+
return concatenated;
35+
}
36+
37+
for (const format of formats) {
38+
promise_test(async () => {
39+
const response = await fetch(LARGE_FILE);
40+
const buffer = await response.arrayBuffer();
41+
const bufferView = new Uint8Array(buffer);
42+
const originalLength = bufferView.length;
43+
const compressedData = await compressArrayBuffer(bufferView, format);
44+
const compressedLength = compressedData.length;
45+
assert_less_than(compressedLength, originalLength, 'output should be smaller');
46+
}, `the length of ${format} data should be shorter than that of the original data`);
47+
}

0 commit comments

Comments
 (0)