Skip to content

Commit 72eaf55

Browse files
committed
Fix random-read in BufferTokenizer
1 parent 2d8acf2 commit 72eaf55

File tree

2 files changed

+51
-43
lines changed

2 files changed

+51
-43
lines changed

lib/BufferTokenizer.ts

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -25,9 +25,6 @@ export class BufferTokenizer extends AbstractTokenizer implements IRandomAccessT
2525
public async readBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number> {
2626

2727
if (options?.position) {
28-
if (options.position < this.position) {
29-
throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
30-
}
3128
this.position = options.position;
3229
}
3330

test/test.ts

Lines changed: 51 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,14 @@ import { fileURLToPath } from 'node:url';
77
import * as Token from 'token-types';
88
import { assert, expect, use } from 'chai';
99
import chaiAsPromised from 'chai-as-promised';
10-
import { fromStream, fromWebStream, fromFile, fromBuffer, type ITokenizer } from '../lib/index.js';
10+
import {
11+
fromStream,
12+
fromWebStream,
13+
fromFile,
14+
fromBuffer,
15+
type ITokenizer,
16+
type IRandomAccessTokenizer
17+
} from '../lib/index.js';
1118
import Path from 'node:path';
1219
import { EndOfStreamError } from 'peek-readable';
1320

@@ -27,6 +34,7 @@ interface ITokenizerTest {
2734
loadTokenizer: (testFile: string, delay?: number, abortSignal?: AbortSignal) => Promise<ITokenizer>;
2835
hasFileInfo: boolean;
2936
abortable: boolean;
37+
randomRead: boolean;
3038
}
3139

3240
function getResourcePath(testFile: string) {
@@ -50,22 +58,25 @@ describe('Matrix tests', () => {
5058
return fromStream(delayedStream, {abortSignal});
5159
},
5260
hasFileInfo: true,
53-
abortable: true
61+
abortable: true,
62+
randomRead: false
5463
}, {
5564
name: 'fromWebStream()',
5665
loadTokenizer: async (testFile, delay, abortSignal?: AbortSignal) => {
5766
const fileStream = await makeReadableByteFileStream(Path.join(__dirname, 'resources', testFile), delay);
5867
return fromWebStream(fileStream.stream, {onClose: () => fileStream.closeFile(), abortSignal});
5968
},
6069
hasFileInfo: false,
61-
abortable: true
70+
abortable: true,
71+
randomRead: false
6272
}, {
6373
name: 'fromFile()',
6474
loadTokenizer: async testFile => {
6575
return fromFile(Path.join(__dirname, 'resources', testFile));
6676
},
6777
hasFileInfo: true,
68-
abortable: false
78+
abortable: false,
79+
randomRead: true
6980
}, {
7081
name: 'fromBuffer()',
7182
loadTokenizer: async testFile => {
@@ -74,7 +85,8 @@ describe('Matrix tests', () => {
7485
});
7586
},
7687
hasFileInfo: true,
77-
abortable: false
88+
abortable: false,
89+
randomRead: true
7890
}
7991
];
8092

@@ -927,6 +939,40 @@ describe('Matrix tests', () => {
927939

928940
}); // End of test "Tokenizer-types"
929941
});
942+
943+
describe('Random-read-access', async () => {
944+
945+
tokenizerTests
946+
.filter(tokenizerType => tokenizerType.randomRead)
947+
.forEach(tokenizerType => {
948+
describe(tokenizerType.name, () => {
949+
950+
it('Read ID3v1 header at the end of the file', async () => {
951+
const tokenizer = await tokenizerType.loadTokenizer('id3v1.mp3') as IRandomAccessTokenizer;
952+
assert.isTrue(tokenizer.supportsRandomAccess(), 'Tokenizer should support random reads');
953+
const id3HeaderSize = 128;
954+
const id3Header = new Uint8Array(id3HeaderSize);
955+
await tokenizer.readBuffer(id3Header,{position: tokenizer.fileInfo.size - id3HeaderSize});
956+
const id3Tag = new TextDecoder('utf-8').decode(id3Header.subarray(0, 3));
957+
assert.strictEqual(id3Tag, 'TAG');
958+
assert.strictEqual(tokenizer.position, tokenizer.fileInfo.size, 'Tokenizer position should be at the end of the file');
959+
tokenizer.setPosition(0);
960+
assert.strictEqual(tokenizer.position, 0, 'Tokenizer position should be at the beginning of the file');
961+
});
962+
963+
it('Be able to random read from position 0', async () => {
964+
const tokenizer = await fromFile(getResourcePath('id3v1.mp3'));
965+
// Advance tokenizer.position
966+
await tokenizer.ignore(20);
967+
const mpegSync = new Uint8Array(2);
968+
await tokenizer.readBuffer(mpegSync,{position: 0});
969+
assert.strictEqual(mpegSync[0], 255, 'First sync byte');
970+
assert.strictEqual(mpegSync[1], 251, 'Second sync byte');
971+
});
972+
});
973+
});
974+
975+
});
930976
});
931977

932978
describe('fromStream with mayBeLess flag', () => {
@@ -991,38 +1037,3 @@ it('should release stream after close', async () => {
9911037
await webStreamTokenizer.close();
9921038
assert.isFalse(stream.locked, 'stream is unlocked after closing tokenizer');
9931039
});
994-
995-
describe('Random-read-acccess', async () => {
996-
997-
it('Read ID3v1 header at the end of the file', async () => {
998-
999-
const tokenizer = await fromFile(getResourcePath('id3v1.mp3'));
1000-
try {
1001-
const id3HeaderSize = 128;
1002-
const id3Header = new Uint8Array(id3HeaderSize);
1003-
await tokenizer.readBuffer(id3Header,{position: tokenizer.fileInfo.size - id3HeaderSize});
1004-
const id3Tag = new TextDecoder('utf-8').decode(id3Header.subarray(0, 3));
1005-
assert.strictEqual(id3Tag, 'TAG');
1006-
assert.strictEqual(tokenizer.position, tokenizer.fileInfo.size, 'Tokenizer position should be at the end of the file');
1007-
tokenizer.setPosition(0);
1008-
assert.strictEqual(tokenizer.position, 0, 'Tokenizer position should be at the beginning of the file');
1009-
}
1010-
finally {
1011-
await tokenizer.close();
1012-
}
1013-
});
1014-
1015-
it('Be able to random read from position 0', async () => {
1016-
const tokenizer = await fromFile(getResourcePath('id3v1.mp3'));
1017-
// Advance tokenizer.position
1018-
await tokenizer.ignore(20);
1019-
const mpegSync = new Uint8Array(2);
1020-
await tokenizer.readBuffer(mpegSync,{position: 0});
1021-
assert.strictEqual(mpegSync[0], 255, 'First sync byte');
1022-
assert.strictEqual(mpegSync[1], 251, 'Second sync byte');
1023-
});
1024-
1025-
});
1026-
1027-
1028-

0 commit comments

Comments
 (0)