Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/source/blockedsource.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import QuickLRU from 'quick-lru';
import { BaseSource } from './basesource.js';
import { AbortError, AggregateError, wait, zip } from '../utils.js';
import { BaseSource } from './basesource.js';

class Block {
/**
Expand Down Expand Up @@ -262,7 +262,7 @@ export class BlockedSource extends BaseSource {
top = Math.min(this.fileSize, top);
}
const blockIdLow = Math.floor(slice.offset / this.blockSize);
const blockIdHigh = Math.floor(top / this.blockSize);
const blockIdHigh = Math.floor((top - 1) / this.blockSize);
Copy link

@turien-pix4d turien-pix4d Aug 5, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The PR mention the bug was occurring when reading the last block and the blockSize is equals to slice.length.
For what I see, this change may also affect the behavior of reading the first block.

In a situation where slice.offset is 0 and blockSize is equals to slice.length, for the previous implementation, blockIdHigh would have a value of 1, with the new implementation it will have a value of 0.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

that's a great observation! It is a bug as well. It was documented with a test at https://github.com/geotiffjs/geotiff.js/pull/474/files#diff-492ba43e11ce778a6c52157c7795c3a45b40213fb11b00df31dbb469614b4bd1R1325-R1335

In case you describe, our data would fit exactly in one block - blockId 0.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I've generalized this case in the description as reading a block that is equal to a block size (so originally the code would also fail if you try to read offset 2, blockSize 2 and slice length 2.. etc.)

const sliceData = new ArrayBuffer(slice.length);
const sliceView = new Uint8Array(sliceData);

Expand Down
100 changes: 90 additions & 10 deletions test/geotiff.spec.js
Original file line number Diff line number Diff line change
@@ -1,20 +1,20 @@
/* eslint-disable no-unused-expressions */
import isNode from 'detect-node';
import { expect } from 'chai';
import http from 'http';
import serveStatic from 'serve-static';
import isNode from 'detect-node';
import finalhandler from 'finalhandler';
import http from 'http';
import AbortController from 'node-abort-controller';
import { dirname } from 'path';
import serveStatic from 'serve-static';
import { fileURLToPath } from 'url';

import { GeoTIFF, fromArrayBuffer, writeArrayBuffer, fromUrls, Pool } from '../dist-module/geotiff.js';
import { makeFetchSource } from '../dist-module/source/remote.js';
import { makeFileSource } from '../dist-module/source/file.js';
import { BlockedSource } from '../dist-module/source/blockedsource.js';
import { chunk, toArray, toArrayRecursively, range } from '../dist-module/utils.js';
import DataSlice from '../dist-module/dataslice.js';
import DataView64 from '../dist-module/dataview64.js';
import { fromArrayBuffer, fromUrls, GeoTIFF, Pool, writeArrayBuffer } from '../dist-module/geotiff.js';
import { BlockedSource } from '../dist-module/source/blockedsource.js';
import { makeFileSource } from '../dist-module/source/file.js';
import { makeFetchSource } from '../dist-module/source/remote.js';
import { chunk, range, toArray, toArrayRecursively } from '../dist-module/utils.js';

const __dirname = dirname(fileURLToPath(import.meta.url));

Expand Down Expand Up @@ -1290,9 +1290,8 @@ describe('writeTests', () => {
});

describe('BlockedSource Test', () => {
const blockedSource = new BlockedSource(null, { blockSize: 2 });

it('Groups only contiguous blocks as one group', () => {
const blockedSource = new BlockedSource(null, { blockSize: 2 });
const groups = blockedSource.groupBlocks([2, 0, 1, 3]);
expect(groups.length).to.equal(1);
const [group] = groups;
Expand All @@ -1302,6 +1301,7 @@ describe('BlockedSource Test', () => {
});

it('Groups two non-contiguous blocks as two groups', () => {
const blockedSource = new BlockedSource(null, { blockSize: 2 });
const groups = blockedSource.groupBlocks([0, 1, 7, 2, 8, 3]);
expect(groups.length).to.equal(2);
const [group1, group2] = groups;
Expand All @@ -1314,4 +1314,84 @@ describe('BlockedSource Test', () => {
expect(group2.length).to.equal(4);
expect(group2.blockIds).to.deep.equal([7, 8]);
});

it('Fetches all data in a single block', async () => {
const blockedSource = new BlockedSource(null, { blockSize: 2 });
blockedSource.source = { fileSize: null, fetch: async () => [{ data: new Uint8Array(2).buffer, offset: 0 }] };
const data = await blockedSource.fetch([{ offset: 0, length: 2 }]);
expect(data[0].byteLength).to.equal(2);
});

it('Fetches complete first block', async () => {
const blockedSource = new BlockedSource(null, { blockSize: 2 });
const data = new Uint8Array([1, 2, 3, 4]).buffer;
blockedSource.source = {
fileSize: null,
fetch: async () => [
{ data, offset: 0 },
] };
const result = await blockedSource.fetch([{ offset: 0, length: 2 }]);
expect(Array.from(new Uint8Array(result[0]))).to.deep.equal([1, 2]);
});

it('Fetches complete last block', async () => {
const blockedSource = new BlockedSource(null, { blockSize: 2 });
const data = new Uint8Array([1, 2, 3, 4]).buffer;
blockedSource.source = {
fileSize: null,
fetch: async () => [
{ data, offset: 0 },
] };
const result = await blockedSource.fetch([{ offset: 2, length: 2 }]);
expect(Array.from(new Uint8Array(result[0]))).to.deep.equal([3, 4]);
});

it('Fetches partial data from the beginning', async () => {
const blockedSource = new BlockedSource(null, { blockSize: 2 });
const data = new Uint8Array([1, 2, 3, 4]).buffer;
blockedSource.source = {
fileSize: null,
fetch: async () => [
{ data, offset: 0 },
] };
const result = await blockedSource.fetch([{ offset: 0, length: 1 }]);
expect(Array.from(new Uint8Array(result[0]))).to.deep.equal([1]);
});

it('Fetches partial data from the end', async () => {
const blockedSource = new BlockedSource(null, { blockSize: 2 });
const data = new Uint8Array([1, 2, 3, 4]).buffer;
blockedSource.source = {
fileSize: null,
fetch: async () => [
{ data, offset: 0 },
] };
const result = await blockedSource.fetch([{ offset: 3, length: 1 }]);
expect(Array.from(new Uint8Array(result[0]))).to.deep.equal([4]);
});

it('Fetches data from between the blocks', async () => {
const blockedSource = new BlockedSource(null, { blockSize: 2 });
const data = new Uint8Array([1, 2, 3, 4]).buffer;
blockedSource.source = {
fileSize: null,
fetch: async () => [
{ data, offset: 0 },
] };
const result = await blockedSource.fetch([{ offset: 1, length: 2 }]);
expect(Array.from(new Uint8Array(result[0]))).to.deep.equal([2, 3]);
});

it('Fetches multiple slices from between blocks', async () => {
const blockedSource = new BlockedSource(null, { blockSize: 2 });
const data = new Uint8Array([1, 2, 3, 4, 5, 6]).buffer;
blockedSource.source = {
fileSize: null,
fetch: async () => [
{ data, offset: 0 },
] };
const result = await blockedSource.fetch([{ offset: 1, length: 2 }, { offset: 3, length: 2 }]);
expect(Array.from(new Uint8Array(result[0]))).to.deep.equal([2, 3]);
expect(Array.from(new Uint8Array(result[1]))).to.deep.equal([4, 5]);
});
});