๐Ÿ“ฆ kitten / multitars

๐Ÿ“„ reader.test.ts ยท 446 lines
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446import { describe, it, expect } from 'vitest';
import { ReadableStreamBlockReader, readUntilBoundary } from '../reader';
import {
  utf8Encode,
  iterableToStream,
  streamChunks,
  streamText,
} from './utils';

// NOTE(@kitten): This is pretty dense set of tests, but they simply are designed
// to reach 100% test coverage (pnpm test --coverage)
describe(ReadableStreamBlockReader, () => {
  it('allows block-wise reads from a byte stream emitting right-sized chunks', async () => {
    const stream = iterableToStream(
      streamChunks({ numChunks: 3, chunkSize: 4 })
    );
    const reader = new ReadableStreamBlockReader(stream, 4);
    await expect(reader.read()).resolves.toEqual(new Uint8Array([0, 1, 2, 3]));
    await expect(reader.read()).resolves.toEqual(new Uint8Array([4, 5, 6, 7]));
    await expect(reader.read()).resolves.toEqual(
      new Uint8Array([8, 9, 10, 11])
    );
    await expect(reader.read()).resolves.toEqual(null);
    await expect(reader.pull()).resolves.toEqual(null);
  });

  it('allows block-wise reads from a byte stream emitting undersized chunks (even)', async () => {
    const stream = iterableToStream(
      streamChunks({ numChunks: 4, chunkSize: 3 })
    );
    const reader = new ReadableStreamBlockReader(stream, 4);
    await expect(reader.read()).resolves.toEqual(new Uint8Array([0, 1, 2, 3]));
    await expect(reader.read()).resolves.toEqual(new Uint8Array([4, 5, 6, 7]));
    await expect(reader.read()).resolves.toEqual(
      new Uint8Array([8, 9, 10, 11])
    );
    await expect(reader.read()).resolves.toEqual(null);
    await expect(reader.pull()).resolves.toEqual(null);
  });

  it('allows block-wise reads from a byte stream emitting undersized chunks (uneven)', async () => {
    const stream = iterableToStream(
      streamChunks({ numChunks: 5, chunkSize: 3 })
    );
    const reader = new ReadableStreamBlockReader(stream, 4);
    await expect(reader.read()).resolves.toEqual(new Uint8Array([0, 1, 2, 3]));
    await expect(reader.read()).resolves.toEqual(new Uint8Array([4, 5, 6, 7]));
    await expect(reader.read()).resolves.toEqual(
      new Uint8Array([8, 9, 10, 11])
    );
    await expect(reader.read()).resolves.toEqual(null);
    await expect(reader.pull()).resolves.toEqual(new Uint8Array([12, 13, 14]));
    await expect(reader.pull()).resolves.toEqual(null);
  });

  it('allows block-wise reads from a byte stream emitting oversized chunks (even)', async () => {
    const stream = iterableToStream(
      streamChunks({ numChunks: 3, chunkSize: 4 })
    );
    const reader = new ReadableStreamBlockReader(stream, 3);
    await expect(reader.read()).resolves.toEqual(new Uint8Array([0, 1, 2]));
    await expect(reader.read()).resolves.toEqual(new Uint8Array([3, 4, 5]));
    await expect(reader.read()).resolves.toEqual(new Uint8Array([6, 7, 8]));
    await expect(reader.read()).resolves.toEqual(new Uint8Array([9, 10, 11]));
    await expect(reader.read()).resolves.toEqual(null);
    await expect(reader.pull()).resolves.toEqual(null);
  });

  it('allows block-wise reads from a byte stream emitting oversized chunks (uneven)', async () => {
    const stream = iterableToStream(
      streamChunks({ numChunks: 2, chunkSize: 4 })
    );
    const reader = new ReadableStreamBlockReader(stream, 3);
    await expect(reader.read()).resolves.toEqual(new Uint8Array([0, 1, 2]));
    await expect(reader.read()).resolves.toEqual(new Uint8Array([3, 4, 5]));
    await expect(reader.read()).resolves.toEqual(null);
    await expect(reader.pull()).resolves.toEqual(new Uint8Array([6, 7]));
    await expect(reader.pull()).resolves.toEqual(null);
  });

  it('allows block-wise reads from a byte stream emitting multiply-oversized chunks (even)', async () => {
    const stream = iterableToStream(
      streamChunks({ numChunks: 2, chunkSize: 5 })
    );
    const reader = new ReadableStreamBlockReader(stream, 2);
    await expect(reader.read()).resolves.toEqual(new Uint8Array([0, 1]));
    await expect(reader.read()).resolves.toEqual(new Uint8Array([2, 3]));
    await expect(reader.read()).resolves.toEqual(new Uint8Array([4, 5]));
    await expect(reader.read()).resolves.toEqual(new Uint8Array([6, 7]));
    await expect(reader.read()).resolves.toEqual(new Uint8Array([8, 9]));
    await expect(reader.read()).resolves.toEqual(null);
    await expect(reader.pull()).resolves.toEqual(null);
  });

  it('allows partial final blocks to be returned when `true` is passed to read()', async () => {
    const stream = iterableToStream(
      streamChunks({ numChunks: 2, chunkSize: 4 })
    );
    const reader = new ReadableStreamBlockReader(stream, 3);
    await expect(reader.read(true)).resolves.toEqual(new Uint8Array([0, 1, 2]));
    await expect(reader.read(true)).resolves.toEqual(new Uint8Array([3, 4, 5]));
    await expect(reader.read(true)).resolves.toEqual(new Uint8Array([6, 7]));
    await expect(reader.read(true)).resolves.toEqual(null);
  });

  it('allows block-wise reads from a byte stream emitting multiply-oversized chunks (uneven)', async () => {
    const stream = iterableToStream(
      streamChunks({ numChunks: 1, chunkSize: 5 })
    );
    const reader = new ReadableStreamBlockReader(stream, 2);
    await expect(reader.read()).resolves.toEqual(new Uint8Array([0, 1]));
    await expect(reader.read()).resolves.toEqual(new Uint8Array([2, 3]));
    await expect(reader.read()).resolves.toEqual(null);
    await expect(reader.pull()).resolves.toEqual(new Uint8Array([4]));
  });

  it('allows block-wise reads from a byte stream emitting multiply-oversized chunks (single)', async () => {
    const stream = iterableToStream(
      streamChunks({ numChunks: 1, chunkSize: 10 })
    );
    const reader = new ReadableStreamBlockReader(stream, 4);
    await expect(reader.read()).resolves.toEqual(new Uint8Array([0, 1, 2, 3]));
    await expect(reader.read()).resolves.toEqual(new Uint8Array([4, 5, 6, 7]));
    await expect(reader.read()).resolves.toEqual(null);
    await expect(reader.pull()).resolves.toEqual(new Uint8Array([8, 9]));
  });

  it('allows skipping bytes for undersized chunks at end of blocks', async () => {
    const stream = iterableToStream(
      streamChunks({ numChunks: 4, chunkSize: 2 })
    );
    const reader = new ReadableStreamBlockReader(stream, 4);
    await expect(reader.read()).resolves.toEqual(new Uint8Array([0, 1, 2, 3]));
    await expect(reader.skip(2)).resolves.toBe(0);
    await expect(reader.read()).resolves.toEqual(null);
    await expect(reader.pull()).resolves.toEqual(new Uint8Array([6, 7]));
  });

  it('allows skipping bytes for undersized chunks at beginning of blocks', async () => {
    const stream = iterableToStream(
      streamChunks({ numChunks: 4, chunkSize: 2 })
    );
    const reader = new ReadableStreamBlockReader(stream, 4);
    await expect(reader.skip(2)).resolves.toBe(0);
    await expect(reader.read()).resolves.toEqual(new Uint8Array([2, 3, 4, 5]));
    await expect(reader.read()).resolves.toEqual(null);
    await expect(reader.pull()).resolves.toEqual(new Uint8Array([6, 7]));
  });

  it('allows skipping bytes for oversized chunks at end of blocks', async () => {
    const stream = iterableToStream(
      streamChunks({ numChunks: 1, chunkSize: 4 })
    );
    const reader = new ReadableStreamBlockReader(stream, 2);
    await expect(reader.read()).resolves.toEqual(new Uint8Array([0, 1]));
    await expect(reader.skip(2)).resolves.toBe(0);
    await expect(reader.read()).resolves.toEqual(null);
    await expect(reader.pull()).resolves.toEqual(null);
  });

  it('allows skipping bytes for oversized chunks at beginning of blocks', async () => {
    const stream = iterableToStream(
      streamChunks({ numChunks: 1, chunkSize: 4 })
    );
    const reader = new ReadableStreamBlockReader(stream, 2);
    await expect(reader.skip(2)).resolves.toBe(0);
    await expect(reader.read()).resolves.toEqual(new Uint8Array([2, 3]));
    await expect(reader.read()).resolves.toEqual(null);
    await expect(reader.pull()).resolves.toEqual(null);
  });

  it('allows skipping bytes for multiply-oversized chunks', async () => {
    const stream = iterableToStream(
      streamChunks({ numChunks: 1, chunkSize: 6 })
    );
    const reader = new ReadableStreamBlockReader(stream, 2);
    await expect(reader.read()).resolves.toEqual(new Uint8Array([0, 1]));
    await expect(reader.skip(2)).resolves.toBe(0);
    await expect(reader.read()).resolves.toEqual(new Uint8Array([4, 5]));
    await expect(reader.read()).resolves.toEqual(null);
    await expect(reader.pull()).resolves.toEqual(null);
  });

  it('allows skipping uneven number of bytes', async () => {
    const stream = iterableToStream(
      streamChunks({ numChunks: 3, chunkSize: 2 })
    );
    const reader = new ReadableStreamBlockReader(stream, 4);
    await expect(reader.read()).resolves.toEqual(new Uint8Array([0, 1, 2, 3]));
    await expect(reader.skip(2)).resolves.toBe(0);
    await expect(reader.read()).resolves.toEqual(null);
    await expect(reader.pull()).resolves.toEqual(null);
  });

  it('allows skipping excessive number of bytes', async () => {
    const stream = iterableToStream(
      streamChunks({ numChunks: 3, chunkSize: 2 })
    );
    const reader = new ReadableStreamBlockReader(stream, 4);
    await expect(reader.skip(8)).resolves.toBe(2);
    await expect(reader.read()).resolves.toEqual(null);
    await expect(reader.pull()).resolves.toEqual(null);
  });

  it('allows pulling chunks as-is with matching input size', async () => {
    const stream = iterableToStream(
      streamChunks({ numChunks: 2, chunkSize: 4 })
    );
    const reader = new ReadableStreamBlockReader(stream, 2);
    await expect(reader.pull(8)).resolves.toEqual(new Uint8Array([0, 1, 2, 3]));
    await expect(reader.pull(8)).resolves.toEqual(new Uint8Array([4, 5, 6, 7]));
    await expect(reader.pull(8)).resolves.toEqual(null);
  });

  it('allows pulling chunks as-is with matching output size', async () => {
    const stream = iterableToStream(
      streamChunks({ numChunks: 2, chunkSize: 4 })
    );
    const reader = new ReadableStreamBlockReader(stream, 2);
    await expect(reader.pull(8)).resolves.toEqual(new Uint8Array([0, 1, 2, 3]));
    await expect(reader.pull()).resolves.toEqual(new Uint8Array([4, 5]));
    await expect(reader.pull()).resolves.toEqual(new Uint8Array([6, 7]));
    await expect(reader.pull()).resolves.toEqual(null);
  });

  it('respects pushed back buffers', async () => {
    const stream = iterableToStream(
      streamChunks({ numChunks: 1, chunkSize: 8 })
    );
    const reader = new ReadableStreamBlockReader(stream, 4);
    let chunk: Uint8Array | null;
    expect((chunk = await reader.read())).toEqual(new Uint8Array([0, 1, 2, 3]));
    reader.pushback(chunk!);
    await expect(reader.read()).resolves.toEqual(chunk);
    await expect(reader.read()).resolves.toEqual(new Uint8Array([4, 5, 6, 7]));
    await expect(reader.read()).resolves.toEqual(null);
    await expect(reader.pull()).resolves.toEqual(null);
  });

  it('combines pushed back buffers with other buffers', async () => {
    const stream = iterableToStream(
      streamChunks({ numChunks: 1, chunkSize: 8 })
    );
    const reader = new ReadableStreamBlockReader(stream, 4);
    let chunk: Uint8Array | null;
    expect((chunk = await reader.read())).toEqual(new Uint8Array([0, 1, 2, 3]));
    reader.pushback(new Uint8Array([1, 2, 3]));
    reader.pushback(new Uint8Array([0]));
    await expect(reader.read()).resolves.toEqual(chunk);
    await expect(reader.read()).resolves.toEqual(new Uint8Array([4, 5, 6, 7]));
    await expect(reader.pull()).resolves.toEqual(null);
  });
});

describe(readUntilBoundary, () => {
  const BOUNDARY = '--boundary\r\n';

  it('throws if chunk size is smaller than boundary', async () => {
    await expect(async () => {
      const stream = iterableToStream(streamText('', 1));
      const reader = new ReadableStreamBlockReader(stream, 4);
      for await (const _chunk of readUntilBoundary(
        reader,
        utf8Encode(BOUNDARY)
      )) {
        // noop
      }
    }).rejects.toThrow(/Boundary must be shorter/);
  });

  it('returns bytes until boundary, even between two chunks', async () => {
    const stream = iterableToStream(
      streamText(`once upon a time...${BOUNDARY}...the end of the story`, 4)
    );
    const reader = new ReadableStreamBlockReader(stream, 12);

    // Reads data until a boundary across two chunks
    let output = '';
    const decoder = new TextDecoder();
    for await (const chunk of readUntilBoundary(reader, utf8Encode(BOUNDARY))) {
      expect(chunk).not.toBe(null);
      output += decoder.decode(chunk!);
    }
    expect(output).toBe('once upon a time...');

    // Continues exposing data after the boundary:
    let after = '';
    let chunk: Uint8Array | null;
    while ((chunk = await reader.pull())) after += decoder.decode(chunk);
    expect(after).toBe('...the end of the story');
  });

  it('handles boundary-like strings', async () => {
    const stream = iterableToStream(
      streamText(
        `once upon a time...${BOUNDARY.slice(0, -2)}${BOUNDARY}...the end of the story`,
        4
      )
    );
    const reader = new ReadableStreamBlockReader(stream, 12);

    // Reads data until a boundary across two chunks
    let output = '';
    const decoder = new TextDecoder();
    for await (const chunk of readUntilBoundary(reader, utf8Encode(BOUNDARY))) {
      expect(chunk).not.toBe(null);
      output += decoder.decode(chunk!);
    }
    expect(output).toBe(`once upon a time...${BOUNDARY.slice(0, -2)}`);

    // Continues exposing data after the boundary:
    let after = '';
    let chunk: Uint8Array | null;
    while ((chunk = await reader.pull())) after += decoder.decode(chunk);
    expect(after).toBe('...the end of the story');
  });

  it('returns immediately when boundary is first item', async () => {
    const stream = iterableToStream(
      streamText(BOUNDARY + 'test', BOUNDARY.length)
    );
    const reader = new ReadableStreamBlockReader(stream, BOUNDARY.length);
    let chunks = 0;
    for await (const chunk of readUntilBoundary(reader, utf8Encode(BOUNDARY))) {
      expect(chunk).toEqual(new Uint8Array([]));
      chunks++;
    }
    expect(chunks).toBe(1);
    expect(await reader.pull()).toEqual(new Uint8Array(utf8Encode('test')));
  });

  it('aborts with null yield for EOF', async () => {
    const stream = iterableToStream(streamText('some longer string', 4));
    const reader = new ReadableStreamBlockReader(stream, 12);
    const chunks: (Uint8Array | null)[] = [];
    for await (const chunk of readUntilBoundary(reader, utf8Encode(BOUNDARY))) {
      if (chunk) {
        const copy = new Uint8Array(chunk.byteLength);
        copy.set(chunk);
        chunks.push(copy);
      } else {
        chunks.push(chunk);
      }
    }
    expect(chunks).toMatchInlineSnapshot(`
      [
        Uint8Array [
          115,
          111,
          109,
          101,
          32,
          108,
          111,
          110,
          103,
          101,
          114,
          32,
        ],
        Uint8Array [
          115,
          116,
          114,
          105,
          110,
          103,
        ],
        null,
      ]
    `);
  });

  it('aborts with null yield for EOF while looking at partial boundary', async () => {
    const stream = iterableToStream(
      streamText(`some longer string${BOUNDARY.slice(0, 4)}`, 4)
    );
    const reader = new ReadableStreamBlockReader(stream, 12);
    const chunks: (Uint8Array | null)[] = [];
    for await (const chunk of readUntilBoundary(reader, utf8Encode(BOUNDARY))) {
      if (chunk) {
        const copy = new Uint8Array(chunk.byteLength);
        copy.set(chunk);
        chunks.push(copy);
      } else {
        chunks.push(chunk);
      }
    }
    expect(chunks).toMatchInlineSnapshot(`
      [
        Uint8Array [
          115,
          111,
          109,
          101,
          32,
          108,
          111,
          110,
          103,
          101,
          114,
          32,
        ],
        null,
      ]
    `);
  });

  it('handles randomized boundaries', async () => {
    function rand(length: number): string {
      let result = '';
      const characters =
        'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
      const charactersLength = characters.length;
      for (let i = 0; i < length; i++)
        result += characters.charAt(
          Math.floor(Math.random() * charactersLength)
        );
      return result;
    }

    for (let ITERATION = 0; ITERATION < 500; ITERATION++) {
      const before = rand(Math.round(Math.random() * 100));
      const after = rand(Math.round(Math.random() * 100));
      const stream = iterableToStream(
        streamText(`${before}${BOUNDARY}${after}`, 4)
      );
      const reader = new ReadableStreamBlockReader(stream, 12);
      // Reads data until a boundary across two chunks
      let actual = '';
      const decoder = new TextDecoder();
      for await (const chunk of readUntilBoundary(reader, utf8Encode(BOUNDARY)))
        actual += decoder.decode(chunk!);
      expect(actual).toBe(before);

      // Continues exposing data after the boundary:
      actual = '';
      let chunk: Uint8Array | null;
      while ((chunk = await reader.pull())) actual += decoder.decode(chunk);

      expect(actual).toBe(after);
    }
  });
});