From 565991eb5de029ab81396c616cc5b7be7453e324 Mon Sep 17 00:00:00 2001 From: Ib Green Date: Sun, 12 Apr 2026 08:58:50 -0400 Subject: [PATCH 1/3] chore: More jest assertions --- modules/crypto/test/crypto-worker.spec.ts | 27 ++- modules/crypto/test/crypto.spec.ts | 34 ++-- modules/crypto/test/lib/crc32c-hash.spec.ts | 5 +- modules/crypto/test/lib/crypto-hash.spec.ts | 36 ++-- modules/crypto/test/lib/md5-wasm.spec.ts | 16 +- .../test/lib/utils/digest-utils.spec.ts | 17 +- .../geoarrow/test/geoarrow-metadata.spec.ts | 8 +- .../geoarrow/test/get-arrow-bounds.spec.ts | 15 +- .../test/get-geoarrow-geometry-info.spec.ts | 11 +- .../convert-binary-geometry-to-wkb.spec.ts | 26 +-- .../wkb/convert-geometry-to-twkb.spec.ts | 84 +++++----- .../wkb/convert-geometry-to-wkb.spec.ts | 26 +-- .../wkb/convert-geometry-to-wkt.spec.ts | 13 +- .../wkb/convert-hex-twkb-to-geometry.spec.ts | 16 +- .../wkb/convert-wkb-to-geometry.spec.ts | 26 ++- .../wkb/convert-wkt-to-geometry.spec.ts | 147 +++++++---------- modules/gis/test/utils/hex-transcoder.spec.ts | 24 ++- .../gis/test/wkt-crs/parse-wkt-crs.spec.ts | 132 ++++++--------- .../lib/mapbox-vt-pbf/to-vector-tile.spec.js | 40 ++--- .../vector-tile-roundtrip.spec.js | 139 ++++++++-------- .../mvt/test/lib/parse-mvt-from-pbf.spec.ts | 112 +++++++------ .../mvt/test/lib/utils/geometry-utils.spec.ts | 38 ++--- .../lib/vector-tiler/clip-features.spec.ts | 39 ++--- .../lib/vector-tiler/simplify-path.spec.ts | 15 +- modules/mvt/test/mvt-loader.spec.ts | 117 ++++++------- modules/mvt/test/mvt-source.spec.ts | 85 ++++------ modules/mvt/test/mvt-writer.spec.ts | 21 +-- .../mvt/test/table-tile-source-full.spec.ts | 28 ++-- .../table-tile-source-multi-world.spec.ts | 30 ++-- modules/mvt/test/table-tile-source.spec.ts | 38 ++--- modules/mvt/test/tilejson-loader.spec.ts | 24 +-- modules/pmtiles/test/pmtiles-loader.spec.ts | 12 +- modules/pmtiles/test/pmtiles-source.spec.ts | 48 ++---- modules/wkt/test/hex-wkb-loader.spec.ts | 26 +-- modules/wkt/test/twkb-loader.spec.ts | 12 +- modules/wkt/test/twkb-writer.spec.ts | 84 +++++----- modules/wkt/test/wkb-loader.spec.ts | 26 ++- modules/wkt/test/wkb-writer.spec.ts | 26 +-- modules/wkt/test/wkt-crs-loader.spec.ts | 132 ++++++--------- modules/wkt/test/wkt-loader.spec.ts | 154 ++++++++---------- modules/wkt/test/wkt-writer.spec.ts | 13 +- modules/wms/test/arcgis/arcgis-server.spec.ts | 110 ++++++------- .../test/csw/csw-capabilities-loader.spec.ts | 10 +- .../wms/test/csw/csw-domain-loader.spec.ts | 24 +-- .../wms/test/csw/csw-records-loader.spec.ts | 40 ++--- modules/wms/test/gml/gml-loader.spec.ts | 15 +- .../test/wfs/wfs-capabilities-loader.spec.ts | 141 +++++++--------- .../test/wms/wms-capabilities-loader.spec.ts | 67 ++++---- modules/wms/test/wms/wms-error-loader.spec.ts | 6 +- .../test/wms/wms-feature-info-loader.spec.ts | 74 ++++----- .../wms/wms-layer-description-loader.spec.ts | 16 +- modules/wms/test/wms/wms-source.spec.ts | 87 +++++----- .../wmts/wmts-capabilities-loader.spec.ts | 88 +++++----- test/common/conformance.ts | 62 +++++-- 54 files changed, 1161 insertions(+), 1501 deletions(-) diff --git a/modules/crypto/test/crypto-worker.spec.ts b/modules/crypto/test/crypto-worker.spec.ts index 3640e78659..d825fa93bb 100644 --- a/modules/crypto/test/crypto-worker.spec.ts +++ b/modules/crypto/test/crypto-worker.spec.ts @@ -2,77 +2,72 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {processOnWorker, isBrowser, WorkerFarm} from '@loaders.gl/worker-utils'; import {CryptoWorker, CryptoJSWorker} from '@loaders.gl/crypto'; import {getBinaryData} from './test-utils/test-utils'; -test('CryptoWorker', async t => { +test('CryptoWorker', async () => { const {binaryData} = getBinaryData(); - t.equal(binaryData.byteLength, 100000, 'Length correct'); + expect(binaryData.byteLength, 'Length correct').toBe(100000); let hash = await processOnWorker(CryptoWorker, binaryData.slice(0), { operation: 'crc32', _workerType: 'test' }); - t.equal(hash, 'khuskQ==', 'CRC32 Hash correct'); + expect(hash, 'CRC32 Hash correct').toBe('khuskQ=='); hash = await processOnWorker(CryptoWorker, binaryData.slice(0), { operation: 'crc32c', workerLocation: 'test' }); - t.equal(hash, 'PDGE8A==', 'CRC32c Hash correct'); + expect(hash, 'CRC32c Hash correct').toBe('PDGE8A=='); hash = await processOnWorker(CryptoWorker, binaryData.slice(0), { operation: 'md5', _workerType: 'test' }); - t.equal(hash, 'YnxTb+lyen1CsNkpmLv+qA==', 'MD5 Hash correct'); + expect(hash, 'MD5 Hash correct').toBe('YnxTb+lyen1CsNkpmLv+qA=='); // Destroy all workers in NodeJS if (!isBrowser) { const workerFarm = WorkerFarm.getWorkerFarm({}); workerFarm.destroy(); } - - t.end(); }); // CryptoJSWorker is disabled -test.skip('CryptoJSWorker', async t => { +test.skip('CryptoJSWorker', async () => { if (!isBrowser) { - t.end(); return; } const {binaryData} = getBinaryData(); - t.equal(binaryData.byteLength, 100000, 'Length correct'); + expect(binaryData.byteLength, 'Length correct').toBe(100000); let hash = await processOnWorker(CryptoJSWorker, binaryData.slice(0), { operation: 'crc32', _workerType: 'test' }); - t.equal(hash, 'beRTbw==', 'CRC32 Hash correct'); + expect(hash, 'CRC32 Hash correct').toBe('beRTbw=='); hash = await processOnWorker(CryptoJSWorker, binaryData.slice(0), { operation: 'crc32c', _workerType: 'test' }); - t.equal(hash, '==', 'CRC32c Hash correct'); + expect(hash, 'CRC32c Hash correct').toBe('=='); hash = await processOnWorker(CryptoJSWorker, binaryData.slice(0), { operation: 'md5', _workerType: 'test' }); - t.equal(hash, '==', 'CRC32c Hash correct'); - - t.end(); + expect(hash, 'CRC32c Hash correct').toBe('=='); }); diff --git a/modules/crypto/test/crypto.spec.ts b/modules/crypto/test/crypto.spec.ts index b790ea7651..2dbd089a98 100644 --- a/modules/crypto/test/crypto.spec.ts +++ b/modules/crypto/test/crypto.spec.ts @@ -2,7 +2,7 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {fetchFile, loadInBatches, NullLoader, isBrowser} from '@loaders.gl/core'; import {CRC32Hash, CRC32CHash, MD5Hash, SHA256Hash, NodeHash} from '@loaders.gl/crypto'; import {getBinaryData} from './test-utils/test-utils'; @@ -49,7 +49,7 @@ const TEST_CASES = [ const HASHES = [new CRC32Hash(), new CRC32CHash(), new MD5Hash(), new SHA256Hash({modules})]; -test('crypto#atomic hashes', async t => { +test('crypto#atomic hashes', async () => { await loadTestCaseData(); for (const tc of TEST_CASES) { @@ -59,14 +59,12 @@ test('crypto#atomic hashes', async t => { const hash = await cryptoHash.hash(tc.data, 'base64'); const expectedHash = tc.digests[algorithm]; - t.equal(hash, expectedHash, `${algorithm} hash is correct for ${tc.title}`); + expect(hash, `${algorithm} hash is correct for ${tc.title}`).toBe(expectedHash); } } - - t.end(); }); -test('crypto#streaming hashes', async t => { +test('crypto#streaming hashes', async () => { for (const tc of TEST_CASES) { // test each test case against all precomputed digests/hashes for (const algorithm in tc.digests) { @@ -93,36 +91,28 @@ test('crypto#streaming hashes', async t => { for await (const _batch of nullIterator) { } - t.equal(hash, tc.digests[algorithm], `${algorithm} hash is correct for ${tc.title}`); + expect(hash, `${algorithm} hash is correct for ${tc.title}`).toBe(tc.digests[algorithm]); } } } - - t.end(); }); // EXTRA TESTS NOT COVERED BY TEST CASES -test('NodeHash#hash', async t => { +test('NodeHash#hash', async () => { if (!isBrowser) { const cryptoHash = new NodeHash({crypto: {algorithm: 'SHA256'}}); let hash = await cryptoHash.hash(binaryData, 'base64'); - t.equal( - hash, - 'gsoMi29gqdIBCEdTdRJW8VPFx5PQyFPTF4Lv7TJ4eQw=', - 'binary data SHA256 hash is correct' - ); + expect( + hash, 'binary data SHA256 hash is correct' + ).toBe('gsoMi29gqdIBCEdTdRJW8VPFx5PQyFPTF4Lv7TJ4eQw='); hash = await cryptoHash.hash(repeatedData, 'base64'); - t.equal( - hash, - 'bSCTuOJei5XsmAnqtmm2Aw/2EvUHldNdAxYb3mjSK9s=', - 'repeated data SHA256 hash is correct' - ); + expect( + hash, 'repeated data SHA256 hash is correct' + ).toBe('bSCTuOJei5XsmAnqtmm2Aw/2EvUHldNdAxYb3mjSK9s='); } - - t.end(); }); // HELPERS diff --git a/modules/crypto/test/lib/crc32c-hash.spec.ts b/modules/crypto/test/lib/crc32c-hash.spec.ts index 452d327f99..66482b5f98 100644 --- a/modules/crypto/test/lib/crc32c-hash.spec.ts +++ b/modules/crypto/test/lib/crc32c-hash.spec.ts @@ -2,7 +2,7 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {fetchFile} from '@loaders.gl/core'; import {CRC32CHash, encodeNumber} from '@loaders.gl/crypto'; @@ -18,7 +18,7 @@ const loadJSON = async (relativePath: string) => { const TEST_CASES = await loadJSON('./crc32c-test-cases.json'); -test('crc32c#additional tests', async t => { +test('crc32c#additional tests', async () => { for (const type in TEST_CASES) { const set = TEST_CASES[type]; @@ -62,5 +62,4 @@ test('crc32c#additional tests', async t => { t.equals(hash, set.expected, `should digest all test chunks correctly`); */ } - t.end(); }); diff --git a/modules/crypto/test/lib/crypto-hash.spec.ts b/modules/crypto/test/lib/crypto-hash.spec.ts index 604cac9d9f..f32e1dd8e8 100644 --- a/modules/crypto/test/lib/crypto-hash.spec.ts +++ b/modules/crypto/test/lib/crypto-hash.spec.ts @@ -4,7 +4,7 @@ /** eslint-disable @typescript-eslint/unbound-method */ -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {compareArrayBuffers, getBinaryData} from '../test-utils/test-utils'; import {concatenateArrayBuffers, concatenateArrayBuffersAsync} from '@loaders.gl/loader-utils'; import {fetchFile, loadInBatches} from '@loaders.gl/core'; @@ -24,9 +24,7 @@ test('CryptoHash#hash(CSV, against external hash)', async t => { data, 'base64' ); - t.equal(hash, CSV_MD5, 'repeated data MD5 hash is correct'); - - t.end(); + expect(hash, 'repeated data MD5 hash is correct').toBe(CSV_MD5); }); test('CryptoHash#iterator(CSV stream, against external hash)', async t => { @@ -50,14 +48,12 @@ test('CryptoHash#iterator(CSV stream, against external hash)', async t => { for await (const batch of csvIterator) { csv = batch; } - t.ok(Array.isArray(csv?.data), 'parsing from wrapped iterator works'); - - t.equal(hash, CSV_MD5, 'streaming MD5 hash is correct'); + expect(Array.isArray(csv?.data), 'parsing from wrapped iterator works').toBeTruthy(); - t.end(); + expect(hash, 'streaming MD5 hash is correct').toBe(CSV_MD5); }); -test('CryptoHash#hash(MD5 = default)', async t => { +test('CryptoHash#hash(MD5 = default)', async () => { const {binaryData, repeatedData} = getBinaryData(); const cryptoHash = new CryptoHash({ @@ -67,15 +63,13 @@ test('CryptoHash#hash(MD5 = default)', async t => { let hash = await cryptoHash.hash(binaryData, 'base64'); - t.equal(hash, 'YnxTb+lyen1CsNkpmLv+qA==', 'binary data MD5 hash is correct'); + expect(hash, 'binary data MD5 hash is correct').toBe('YnxTb+lyen1CsNkpmLv+qA=='); hash = await cryptoHash.hash(repeatedData, 'base64'); - t.equal(hash, '2d4uZUoLXXO/XWJGnrVl5Q==', 'repeated data MD5 hash is correct'); - - t.end(); + expect(hash, 'repeated data MD5 hash is correct').toBe('2d4uZUoLXXO/XWJGnrVl5Q=='); }); -test('CryptoHash#hashBatches(small chunks)', async t => { +test('CryptoHash#hashBatches(small chunks)', async () => { const inputChunks = [ new Uint8Array([1, 2, 3]).buffer, new Uint8Array([4, 5, 6]).buffer, @@ -100,13 +94,11 @@ test('CryptoHash#hashBatches(small chunks)', async t => { const inputData = concatenateArrayBuffers(...inputChunks); const transformedData = await concatenateArrayBuffersAsync(hashIterator); - t.equal(hash, 'hZbBr1WxS3syARKUT8uFNg==', 'CryptoHash generated correct hash'); - t.ok(compareArrayBuffers(inputData, transformedData), 'CryptoHash passed through data'); - - t.end(); + expect(hash, 'CryptoHash generated correct hash').toBe('hZbBr1WxS3syARKUT8uFNg=='); + expect(compareArrayBuffers(inputData, transformedData), 'CryptoHash passed through data').toBeTruthy(); }); -test('CryptoHash#batches(100K)', async t => { +test('CryptoHash#batches(100K)', async () => { const {binaryData} = getBinaryData(); const inputChunks = [binaryData]; @@ -128,8 +120,6 @@ test('CryptoHash#batches(100K)', async t => { const inputData = concatenateArrayBuffers(...inputChunks); const transformedData = await concatenateArrayBuffersAsync(hashIterator); - t.equal(hash, 'YnxTb+lyen1CsNkpmLv+qA==', 'CryptoHash generated correct hash'); - t.ok(compareArrayBuffers(inputData, transformedData), 'CryptoHash passed through data'); - - t.end(); + expect(hash, 'CryptoHash generated correct hash').toBe('YnxTb+lyen1CsNkpmLv+qA=='); + expect(compareArrayBuffers(inputData, transformedData), 'CryptoHash passed through data').toBeTruthy(); }); diff --git a/modules/crypto/test/lib/md5-wasm.spec.ts b/modules/crypto/test/lib/md5-wasm.spec.ts index 8fd6205891..037c3e2e7b 100644 --- a/modules/crypto/test/lib/md5-wasm.spec.ts +++ b/modules/crypto/test/lib/md5-wasm.spec.ts @@ -2,26 +2,24 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import md5WASM from '../../src/lib/algorithms/md5-wasm'; const textEncoder = new TextEncoder(); -test('md5WASM#hash supports ArrayBuffer and Uint8Array inputs', async t => { +test('md5WASM#hash supports ArrayBuffer and Uint8Array inputs', async () => { const input = textEncoder.encode('array md5 input'); const arrayBufferInput = input.buffer.slice(0); const hashFromTypedArray = await runMd5(input); const hashFromArrayBuffer = await runMd5(arrayBufferInput); - t.equal(hashFromTypedArray, 'debde7239b0aafd48eccd2d048e80c3a', 'hash matches expected value'); - t.equal(hashFromArrayBuffer, hashFromTypedArray, 'ArrayBuffer input hashes match'); - - t.end(); + expect(hashFromTypedArray, 'hash matches expected value').toBe('debde7239b0aafd48eccd2d048e80c3a'); + expect(hashFromArrayBuffer, 'ArrayBuffer input hashes match').toBe(hashFromTypedArray); }); -test('md5WASM#hash works when Buffer is undefined', async t => { +test('md5WASM#hash works when Buffer is undefined', async () => { const originalBuffer = globalThis.Buffer; // @ts-ignore Buffer is intentionally overridden for this test globalThis.Buffer = undefined; @@ -30,7 +28,7 @@ test('md5WASM#hash works when Buffer is undefined', async t => { const input = textEncoder.encode('bufferless md5 input'); const hash = await runMd5(input); - t.equal(hash, 'c2cccb15893fdb77c499a18ee750c51b', 'hash generated without Buffer present'); + expect(hash, 'hash generated without Buffer present').toBe('c2cccb15893fdb77c499a18ee750c51b'); } finally { if (typeof originalBuffer === 'undefined') { delete globalThis.Buffer; @@ -38,8 +36,6 @@ test('md5WASM#hash works when Buffer is undefined', async t => { globalThis.Buffer = originalBuffer; } } - - t.end(); }); function runMd5(data: ArrayBuffer | Uint8Array): Promise { diff --git a/modules/crypto/test/lib/utils/digest-utils.spec.ts b/modules/crypto/test/lib/utils/digest-utils.spec.ts index 705d8f90eb..9d7fe7283c 100644 --- a/modules/crypto/test/lib/utils/digest-utils.spec.ts +++ b/modules/crypto/test/lib/utils/digest-utils.spec.ts @@ -2,7 +2,7 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {encodeNumber, encodeHex, encodeBase64} from '@loaders.gl/crypto'; const loadJSON = async (relativePath: string) => { @@ -17,28 +17,25 @@ const loadJSON = async (relativePath: string) => { const TEST_CASES = await loadJSON('../crc32c-test-cases.json'); -test('encodeHexToBase64#crc32 test cases', t => { +test('encodeHexToBase64#crc32 test cases', () => { for (const type in TEST_CASES) { const set = TEST_CASES[type]; for (const tc of set.cases) { if (!tc.charset) { tc.expected = encodeNumber(tc.want, 'base64'); - t.ok(tc.expected, `${tc.want} encodeed to ${tc.expected}`); + expect(tc.expected, `${tc.want} encodeed to ${tc.expected}`).toBeTruthy(); } } set.expected = encodeHex(set.want.toString(16), 'base64'); } - t.end(); }); -test('encodeHexToBase64', t => { - t.equal(encodeHex('f85d741', 'base64'), 'D4XXQQ==', 'encode zero leading hex correctly'); - t.end(); +test('encodeHexToBase64', () => { + expect(encodeHex('f85d741', 'D4XXQQ==', 'encode zero leading hex correctly').toBe('base64')); }); -test('encodeBase64ToHex', t => { - t.equal(encodeBase64('D4XXQQ==', 'hex'), '0f85d741'); - t.end(); +test('encodeBase64ToHex', () => { + expect(encodeBase64('D4XXQQ==', '0f85d741').toBe('hex')); }); diff --git a/modules/geoarrow/test/geoarrow-metadata.spec.ts b/modules/geoarrow/test/geoarrow-metadata.spec.ts index 82950a2690..a94783e662 100644 --- a/modules/geoarrow/test/geoarrow-metadata.spec.ts +++ b/modules/geoarrow/test/geoarrow-metadata.spec.ts @@ -2,13 +2,13 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import type {GeoArrowMetadata} from '@loaders.gl/geoarrow'; import {getGeometryColumnsFromSchema} from '@loaders.gl/geoarrow'; // fix a bug that map bounds are not updated correctly from arrow samples -test('geoarrow#getGeometryColumnsFromSchema', t => { +test('geoarrow#getGeometryColumnsFromSchema', () => { const testCases: {schema: string; columns: Record}[] = [ { schema: '', @@ -18,8 +18,6 @@ test('geoarrow#getGeometryColumnsFromSchema', t => { for (const testCase of testCases) { const columns = getGeometryColumnsFromSchema(testCase.schema as any); - t.ok(columns); + expect(columns).toBeTruthy(); } - - t.end(); }); diff --git a/modules/geoarrow/test/get-arrow-bounds.spec.ts b/modules/geoarrow/test/get-arrow-bounds.spec.ts index 3513c64458..a841950cb2 100644 --- a/modules/geoarrow/test/get-arrow-bounds.spec.ts +++ b/modules/geoarrow/test/get-arrow-bounds.spec.ts @@ -2,12 +2,12 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {updateBoundsFromGeoArrowSamples} from '@loaders.gl/geoarrow'; // fix a bug that map bounds are not updated correctly from arrow samples -test('ArrowUtils#updateBoundsFromGeoArrowSamples', t => { +test('ArrowUtils#updateBoundsFromGeoArrowSamples', () => { const testCases = [ { coords: [0, 0, 1, 1, 2, 2], @@ -43,7 +43,7 @@ test('ArrowUtils#updateBoundsFromGeoArrowSamples', t => { testCase.nDim, initBound ); - t.deepEqual(updatedBound, testCase.bound, 'bounds updated correctly'); + expect(updatedBound, 'bounds updated correctly').toEqual(testCase.bound); const sampleSize = 2; const updateBoundWith2Samples = updateBoundsFromGeoArrowSamples( @@ -52,11 +52,8 @@ test('ArrowUtils#updateBoundsFromGeoArrowSamples', t => { initBound, sampleSize ); - t.deepEqual( - updateBoundWith2Samples, - testCase.boundSample2, - 'bounds updated correctly with 2 samples' - ); + expect( + updateBoundWith2Samples, 'bounds updated correctly with 2 samples' + ).toEqual(testCase.boundSample2); }); - t.end(); }); diff --git a/modules/geoarrow/test/get-geoarrow-geometry-info.spec.ts b/modules/geoarrow/test/get-geoarrow-geometry-info.spec.ts index 41d4a02811..c141da4f05 100644 --- a/modules/geoarrow/test/get-geoarrow-geometry-info.spec.ts +++ b/modules/geoarrow/test/get-geoarrow-geometry-info.spec.ts @@ -2,14 +2,14 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import * as arrow from 'apache-arrow'; import {getGeoArrowGeometryInfo} from '@loaders.gl/geoarrow'; import {GeoArrowGeometryInfo} from '../src/get-geoarrow-geometry-info'; // fix a bug that map bounds are not updated correctly from arrow samples -test('geoarrow#getGeoArrowGeometryInfo', t => { +test('geoarrow#getGeoArrowGeometryInfo', () => { const testCases: {field: arrow.Field; info: Partial}[] = [ // { // field: new arrow.Field('point', new arrow.Float(arrow.Precision.DOUBLE)), @@ -45,8 +45,9 @@ test('geoarrow#getGeoArrowGeometryInfo', t => { for (const testCase of testCases) { const info = getGeoArrowGeometryInfo(testCase.field); - t.deepEqual(info?.compatibleEncodings, info?.compatibleEncodings, testCase.field.toString()); + expect(info?.compatibleEncodings).toEqual( + info?.compatibleEncodings, + testCase.field.toString() + ); } - - t.end(); }); diff --git a/modules/gis/test/geometry-converters/wkb/convert-binary-geometry-to-wkb.spec.ts b/modules/gis/test/geometry-converters/wkb/convert-binary-geometry-to-wkb.spec.ts index 1800674b5f..ef66ab8a50 100644 --- a/modules/gis/test/geometry-converters/wkb/convert-binary-geometry-to-wkb.spec.ts +++ b/modules/gis/test/geometry-converters/wkb/convert-binary-geometry-to-wkb.spec.ts @@ -4,7 +4,7 @@ /* eslint-disable no-continue */ -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {fetchFile} from '@loaders.gl/core'; import {parseTestCases} from '@loaders.gl/gis/test/data/wkt/parse-test-cases'; import {convertGeometryToWKB} from '@loaders.gl/gis'; @@ -14,33 +14,29 @@ const WKB_2D_NAN_TEST_CASES = '@loaders.gl/gis/test/data/wkt/wkb-testdata2d-nan. const WKB_Z_TEST_CASES = '@loaders.gl/gis/test/data/wkt/wkb-testdataZ.json'; const WKB_Z_NAN_TEST_CASES = '@loaders.gl/gis/test/data/wkt/wkb-testdataZ-nan.json'; -test('convertGeometryToWKB#2D', async t => { +test('convertGeometryToWKB#2D', async () => { const response = await fetchFile(WKB_2D_TEST_CASES); const TEST_CASES = parseTestCases(await response.json()); for (const [title, testCase] of Object.entries(TEST_CASES)) { const {geoJSON, wkb} = testCase; const encoded = convertGeometryToWKB(geoJSON); - t.deepEqual(encoded, wkb, title); + expect(encoded, title).toEqual(wkb); } - - t.end(); }); -test('convertGeometryToWKB#2D NaN', async t => { +test('convertGeometryToWKB#2D NaN', async () => { const response = await fetchFile(WKB_2D_NAN_TEST_CASES); const TEST_CASES = parseTestCases(await response.json()); for (const [title, testCase] of Object.entries(TEST_CASES)) { const {geoJSON, wkb} = testCase; const encoded = convertGeometryToWKB(geoJSON); - t.deepEqual(encoded, wkb, title); + expect(encoded, title).toEqual(wkb); } - - t.end(); }); -test('convertGeometryToWKB#Z', async t => { +test('convertGeometryToWKB#Z', async () => { const response = await fetchFile(WKB_Z_TEST_CASES); const TEST_CASES = parseTestCases(await response.json()); @@ -51,13 +47,11 @@ test('convertGeometryToWKB#Z', async t => { continue; } const encoded = convertGeometryToWKB(geoJSON, {wkb: {hasZ: true, hasM: false}}); - t.deepEqual(encoded, wkb, title); + expect(encoded, title).toEqual(wkb); } - - t.end(); }); -test('convertGeometryToWKB#Z NaN', async t => { +test('convertGeometryToWKB#Z NaN', async () => { const response = await fetchFile(WKB_Z_NAN_TEST_CASES); const TEST_CASES = parseTestCases(await response.json()); @@ -68,8 +62,6 @@ test('convertGeometryToWKB#Z NaN', async t => { continue; } const encoded = convertGeometryToWKB(geoJSON, {wkb: {hasZ: true, hasM: false}}); - t.deepEqual(encoded, wkb, title); + expect(encoded, title).toEqual(wkb); } - - t.end(); }); diff --git a/modules/gis/test/geometry-converters/wkb/convert-geometry-to-twkb.spec.ts b/modules/gis/test/geometry-converters/wkb/convert-geometry-to-twkb.spec.ts index 1ef011e9b2..060b61f731 100644 --- a/modules/gis/test/geometry-converters/wkb/convert-geometry-to-twkb.spec.ts +++ b/modules/gis/test/geometry-converters/wkb/convert-geometry-to-twkb.spec.ts @@ -2,66 +2,58 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -/** -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {fetchFile, encodeSync} from '@loaders.gl/core'; -import {WKBWriter} from '@loaders.gl/wkt'; +import {TWKBWriter} from '@loaders.gl/wkt'; import {parseTestCases} from '@loaders.gl/gis/test/data/wkt/parse-test-cases'; -const WKB_2D_TEST_CASES = '@loaders.gl/gis/test/data/wkb-testdata2d.json'; -const WKB_2D_NAN_TEST_CASES = '@loaders.gl/gis/test/data/wkb-testdata2d-nan.json'; -const WKB_Z_TEST_CASES = '@loaders.gl/gis/test/data/wkb-testdataZ.json'; -const WKB_Z_NAN_TEST_CASES = '@loaders.gl/gis/test/data/wkb-testdataZ-nan.json'; - -test('encodeTWKB#2D', async (t) => { - const response = await fetchFile(WKB_2D_TEST_CASES); - const TEST_CASES = parseTestCases(await response.json()); - - for (const testCase of Object.values(TEST_CASES)) { - const {geoJSON, wkb} = testCase; - const encoded = encodeSync(geoJSON, WKBWriter, {wkb: {hasZ: false, hasM: false}}); - t.deepEqual(encoded, wkb); +const TWKB_2D_TEST_CASES = '@loaders.gl/gis/test/data/wkt/twkb-testdata2d.json'; +const TWKB_2D_NAN_TEST_CASES = '@loaders.gl/gis/test/data/wkt/twkb-testdata2d-nan.json'; +const TWKB_Z_TEST_CASES = '@loaders.gl/gis/test/data/wkt/twkb-testdataZ.json'; +const TWKB_Z_NAN_TEST_CASES = '@loaders.gl/gis/test/data/wkt/twkb-testdataZ-nan.json'; + +// These legacy writer cases were previously commented out. Keep them skipped during the +// syntax migration so this change does not expand test surface area. +test.skip('encodeTWKB#2D', async () => { + const response = await fetchFile(TWKB_2D_TEST_CASES); + const testCases = parseTestCases(await response.json()); + + for (const testCase of Object.values(testCases)) { + const {geoJSON, twkb} = testCase; + const encoded = encodeSync(geoJSON, TWKBWriter, {wkb: {hasZ: false, hasM: false}}); + expect(encoded).toEqual(twkb); } - - t.end(); }); -test('encodeTWKB#2D NaN', async (t) => { - const response = await fetchFile(WKB_2D_NAN_TEST_CASES); - const TEST_CASES = parseTestCases(await response.json()); +test.skip('encodeTWKB#2D NaN', async () => { + const response = await fetchFile(TWKB_2D_NAN_TEST_CASES); + const testCases = parseTestCases(await response.json()); - for (const testCase of Object.values(TEST_CASES)) { - const {geoJSON, wkb} = testCase; - const encoded = encodeSync(geoJSON, WKBWriter, {wkb: {hasZ: false, hasM: false}}); - t.deepEqual(encoded, wkb); + for (const testCase of Object.values(testCases)) { + const {geoJSON, twkb} = testCase; + const encoded = encodeSync(geoJSON, TWKBWriter, {wkb: {hasZ: false, hasM: false}}); + expect(encoded).toEqual(twkb); } - - t.end(); }); -test('encodeTWKB#Z', async (t) => { - const response = await fetchFile(WKB_Z_TEST_CASES); - const TEST_CASES = parseTestCases(await response.json()); +test.skip('encodeTWKB#Z', async () => { + const response = await fetchFile(TWKB_Z_TEST_CASES); + const testCases = parseTestCases(await response.json()); - for (const testCase of Object.values(TEST_CASES)) { - const {geoJSON, wkb} = testCase; - const encoded = encodeSync(geoJSON, WKBWriter, {wkb: {hasZ: true, hasM: false}}); - t.deepEqual(encoded, wkb); + for (const testCase of Object.values(testCases)) { + const {geoJSON, twkb} = testCase; + const encoded = encodeSync(geoJSON, TWKBWriter, {wkb: {hasZ: true, hasM: false}}); + expect(encoded).toEqual(twkb); } - - t.end(); }); -test('encodeTWKB#Z NaN', async (t) => { - const response = await fetchFile(WKB_Z_NAN_TEST_CASES); - const TEST_CASES = parseTestCases(await response.json()); +test.skip('encodeTWKB#Z NaN', async () => { + const response = await fetchFile(TWKB_Z_NAN_TEST_CASES); + const testCases = parseTestCases(await response.json()); - for (const testCase of Object.values(TEST_CASES)) { - const {geoJSON, wkb} = testCase; - const encoded = encodeSync(geoJSON, WKBWriter, {wkb: {hasZ: true, hasM: false}}); - t.deepEqual(encoded, wkb); + for (const testCase of Object.values(testCases)) { + const {geoJSON, twkb} = testCase; + const encoded = encodeSync(geoJSON, TWKBWriter, {wkb: {hasZ: true, hasM: false}}); + expect(encoded).toEqual(twkb); } - - t.end(); }); - */ diff --git a/modules/gis/test/geometry-converters/wkb/convert-geometry-to-wkb.spec.ts b/modules/gis/test/geometry-converters/wkb/convert-geometry-to-wkb.spec.ts index 1800674b5f..ef66ab8a50 100644 --- a/modules/gis/test/geometry-converters/wkb/convert-geometry-to-wkb.spec.ts +++ b/modules/gis/test/geometry-converters/wkb/convert-geometry-to-wkb.spec.ts @@ -4,7 +4,7 @@ /* eslint-disable no-continue */ -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {fetchFile} from '@loaders.gl/core'; import {parseTestCases} from '@loaders.gl/gis/test/data/wkt/parse-test-cases'; import {convertGeometryToWKB} from '@loaders.gl/gis'; @@ -14,33 +14,29 @@ const WKB_2D_NAN_TEST_CASES = '@loaders.gl/gis/test/data/wkt/wkb-testdata2d-nan. const WKB_Z_TEST_CASES = '@loaders.gl/gis/test/data/wkt/wkb-testdataZ.json'; const WKB_Z_NAN_TEST_CASES = '@loaders.gl/gis/test/data/wkt/wkb-testdataZ-nan.json'; -test('convertGeometryToWKB#2D', async t => { +test('convertGeometryToWKB#2D', async () => { const response = await fetchFile(WKB_2D_TEST_CASES); const TEST_CASES = parseTestCases(await response.json()); for (const [title, testCase] of Object.entries(TEST_CASES)) { const {geoJSON, wkb} = testCase; const encoded = convertGeometryToWKB(geoJSON); - t.deepEqual(encoded, wkb, title); + expect(encoded, title).toEqual(wkb); } - - t.end(); }); -test('convertGeometryToWKB#2D NaN', async t => { +test('convertGeometryToWKB#2D NaN', async () => { const response = await fetchFile(WKB_2D_NAN_TEST_CASES); const TEST_CASES = parseTestCases(await response.json()); for (const [title, testCase] of Object.entries(TEST_CASES)) { const {geoJSON, wkb} = testCase; const encoded = convertGeometryToWKB(geoJSON); - t.deepEqual(encoded, wkb, title); + expect(encoded, title).toEqual(wkb); } - - t.end(); }); -test('convertGeometryToWKB#Z', async t => { +test('convertGeometryToWKB#Z', async () => { const response = await fetchFile(WKB_Z_TEST_CASES); const TEST_CASES = parseTestCases(await response.json()); @@ -51,13 +47,11 @@ test('convertGeometryToWKB#Z', async t => { continue; } const encoded = convertGeometryToWKB(geoJSON, {wkb: {hasZ: true, hasM: false}}); - t.deepEqual(encoded, wkb, title); + expect(encoded, title).toEqual(wkb); } - - t.end(); }); -test('convertGeometryToWKB#Z NaN', async t => { +test('convertGeometryToWKB#Z NaN', async () => { const response = await fetchFile(WKB_Z_NAN_TEST_CASES); const TEST_CASES = parseTestCases(await response.json()); @@ -68,8 +62,6 @@ test('convertGeometryToWKB#Z NaN', async t => { continue; } const encoded = convertGeometryToWKB(geoJSON, {wkb: {hasZ: true, hasM: false}}); - t.deepEqual(encoded, wkb, title); + expect(encoded, title).toEqual(wkb); } - - t.end(); }); diff --git a/modules/gis/test/geometry-converters/wkb/convert-geometry-to-wkt.spec.ts b/modules/gis/test/geometry-converters/wkb/convert-geometry-to-wkt.spec.ts index 0f1a08bc3a..94e7e0b431 100644 --- a/modules/gis/test/geometry-converters/wkb/convert-geometry-to-wkt.spec.ts +++ b/modules/gis/test/geometry-converters/wkb/convert-geometry-to-wkt.spec.ts @@ -2,16 +2,13 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {encodeTextSync} from '@loaders.gl/core'; import {WKTWriter} from '@loaders.gl/wkt'; -test('encodeWKT', t => { - t.throws( - () => encodeTextSync({type: 'FeatureCollection'}, WKTWriter), - 'does not accept featurecollections' - ); +test('encodeWKT', () => { + expect(() => encodeTextSync({type: 'FeatureCollection'}, WKTWriter), 'does not accept featurecollections').toThrow(); // const fixtures = [ // 'LINESTRING (30 10, 10 30, 40 40)', @@ -39,7 +36,5 @@ test('encodeWKT', t => { }; const wkt = encodeTextSync(geojsonFeature.geometry, WKTWriter); - t.equal(wkt, 'POINT (42 20)', 'point equal'); - - t.end(); + expect(wkt, 'point equal').toBe('POINT (42 20)'); }); diff --git a/modules/gis/test/geometry-converters/wkb/convert-hex-twkb-to-geometry.spec.ts b/modules/gis/test/geometry-converters/wkb/convert-hex-twkb-to-geometry.spec.ts index dbe0af1f05..03c18cf2a9 100644 --- a/modules/gis/test/geometry-converters/wkb/convert-hex-twkb-to-geometry.spec.ts +++ b/modules/gis/test/geometry-converters/wkb/convert-hex-twkb-to-geometry.spec.ts @@ -2,7 +2,7 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {fetchFile, parseSync} from '@loaders.gl/core'; import {isTWKB} from '@loaders.gl/gis'; import {TWKBLoader} from '@loaders.gl/wkt'; @@ -31,7 +31,7 @@ function normalizeTypedArrays(value: unknown): unknown { return value; } -test('parseHexTWKB#2D', async t => { +test('parseHexTWKB#2D', async () => { const response = await fetchFile(WKB_2D_TEST_CASES); const TEST_CASES = parseTestCases(await response.json()); @@ -42,7 +42,7 @@ test('parseHexTWKB#2D', async t => { // Big endian if (testCase.twkb && testCase.binary) { - t.ok(isTWKB(testCase.twkb), 'isTWKB(2D)'); + expect(isTWKB(testCase.twkb), 'isTWKB(2D)').toBeTruthy(); const geometry = {...testCase.geoJSON}; // TODO - Weird empty geometry case, is that coorrect per spec? if ( @@ -55,14 +55,12 @@ test('parseHexTWKB#2D', async t => { geometry.coordinates = []; } const parsedGeometry = parseSync(testCase.twkb, TWKBLoader); - t.deepEqual(normalizeTypedArrays(parsedGeometry), geometry); + expect(normalizeTypedArrays(parsedGeometry)).toEqual(geometry); } } - - t.end(); }); -test('parseHexTWKB#Z', async t => { +test('parseHexTWKB#Z', async () => { const response = await fetchFile(WKB_Z_TEST_CASES); const TEST_CASES = parseTestCases(await response.json()); @@ -75,9 +73,7 @@ test('parseHexTWKB#Z', async t => { const parsedGeometry = parseSync(testCase.twkb, TWKBLoader, { wkb: {shape: 'geojson-geometry'} }); - t.deepEqual(normalizeTypedArrays(parsedGeometry), testCase.geoJSON); + expect(normalizeTypedArrays(parsedGeometry)).toEqual(testCase.geoJSON); } } - - t.end(); }); diff --git a/modules/gis/test/geometry-converters/wkb/convert-wkb-to-geometry.spec.ts b/modules/gis/test/geometry-converters/wkb/convert-wkb-to-geometry.spec.ts index 42a585b750..eba87d5a05 100644 --- a/modules/gis/test/geometry-converters/wkb/convert-wkb-to-geometry.spec.ts +++ b/modules/gis/test/geometry-converters/wkb/convert-wkb-to-geometry.spec.ts @@ -2,7 +2,7 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {fetchFile} from '@loaders.gl/core'; import {convertWKBToBinaryGeometry, isWKB} from '@loaders.gl/gis'; import {parseTestCases} from '@loaders.gl/gis/test/data/wkt/parse-test-cases'; @@ -30,7 +30,7 @@ function normalizeTypedArrays(value: unknown): unknown { return value; } -test('convertWKBToBinaryGeometry#2D', async t => { +test('convertWKBToBinaryGeometry#2D', async () => { const response = await fetchFile(WKB_2D_TEST_CASES); const TEST_CASES = parseTestCases(await response.json()); @@ -38,45 +38,41 @@ test('convertWKBToBinaryGeometry#2D', async t => { for (const [title, testCase] of Object.entries(TEST_CASES2)) { // Little endian if (testCase.wkb && testCase.binary) { - t.ok(isWKB(testCase.wkb), 'isWKB(2D)'); + expect(isWKB(testCase.wkb), 'isWKB(2D)').toBeTruthy(); const result = convertWKBToBinaryGeometry(testCase.wkb); - t.deepEqual(normalizeTypedArrays(result), normalizeTypedArrays(testCase.binary), title); + expect(normalizeTypedArrays(result), title).toEqual(normalizeTypedArrays(testCase.binary)); } // Big endian if (testCase.wkbXdr && testCase.binary) { - t.ok(isWKB(testCase.wkbXdr), 'isWKB(2D)'); + expect(isWKB(testCase.wkbXdr), 'isWKB(2D)').toBeTruthy(); const result = convertWKBToBinaryGeometry(testCase.wkbXdr); - t.deepEqual(normalizeTypedArrays(result), normalizeTypedArrays(testCase.binary), title); + expect(normalizeTypedArrays(result), title).toEqual(normalizeTypedArrays(testCase.binary)); } } - - t.end(); }); -test('convertWKBToBinaryGeometry#Z', async t => { +test('convertWKBToBinaryGeometry#Z', async () => { const response = await fetchFile(WKB_Z_TEST_CASES); const TEST_CASES = parseTestCases(await response.json()); for (const [title, testCase] of Object.entries(TEST_CASES)) { // Little endian if (testCase.wkb && testCase.binary) { - t.ok(isWKB(testCase.wkb), 'isWKB(Z)'); + expect(isWKB(testCase.wkb), 'isWKB(Z)').toBeTruthy(); const result = convertWKBToBinaryGeometry(testCase.wkb); - t.deepEqual(normalizeTypedArrays(result), normalizeTypedArrays(testCase.binary), title); + expect(normalizeTypedArrays(result), title).toEqual(normalizeTypedArrays(testCase.binary)); } // Big endian if (testCase.wkbXdr && testCase.binary) { - t.ok(isWKB(testCase.wkbXdr), 'isWKB(Z)'); + expect(isWKB(testCase.wkbXdr), 'isWKB(Z)').toBeTruthy(); const result = convertWKBToBinaryGeometry(testCase.wkbXdr); - t.deepEqual(normalizeTypedArrays(result), normalizeTypedArrays(testCase.binary), title); + expect(normalizeTypedArrays(result), title).toEqual(normalizeTypedArrays(testCase.binary)); } // if (testCase.wkbXdr && testCase.binary && testCase.geoJSON) { // t.deepEqual(parseSync(testCase.wkbXdr, WKBLoader, {wkb: {shape: 'geometry'}}), testCase.geoJSON); // } } - - t.end(); }); diff --git a/modules/gis/test/geometry-converters/wkb/convert-wkt-to-geometry.spec.ts b/modules/gis/test/geometry-converters/wkb/convert-wkt-to-geometry.spec.ts index 0d4f5206d0..c8ab0c077c 100644 --- a/modules/gis/test/geometry-converters/wkb/convert-wkt-to-geometry.spec.ts +++ b/modules/gis/test/geometry-converters/wkb/convert-wkt-to-geometry.spec.ts @@ -4,7 +4,7 @@ // Fork of https://github.com/mapbox/wellknown under ISC license (MIT/BSD-2-clause equivalent) -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {WKTLoader} from '@loaders.gl/wkt'; import {setLoaderOptions, fetchFile, parseSync} from '@loaders.gl/core'; @@ -18,46 +18,46 @@ setLoaderOptions({ }); // eslint-disable-next-line max-statements -test('parseWKT', async t => { +test('parseWKT', async () => { let response = await fetchFile(GEOMETRYCOLLECTION_WKT_URL); const GEOMETRYCOLLECTION_WKT = await response.text(); response = await fetchFile(GEOMETRYCOLLECTION_GEOJSON_URL); const GEOMETRYCOLLECTION_GEOJSON = await response.json(); - t.deepEqual(parseSync('POINT (0 1)', WKTLoader), { + expect(parseSync('POINT (0 1)', WKTLoader)).toEqual({ type: 'Point', coordinates: [0, 1] }); - t.deepEqual(parseSync('POINT (1 1)', WKTLoader), { + expect(parseSync('POINT (1 1)', WKTLoader)).toEqual({ type: 'Point', coordinates: [1, 1] }); - t.deepEqual(parseSync('POINT(1 1)', WKTLoader), { + expect(parseSync('POINT(1 1)', WKTLoader)).toEqual({ type: 'Point', coordinates: [1, 1] }); - t.deepEqual(parseSync('POINT\n\r(1 1)', WKTLoader), { + expect(parseSync('POINT\n\r(1 1)', WKTLoader)).toEqual({ type: 'Point', coordinates: [1, 1] }); - t.deepEqual(parseSync('POINT(1.1 1.1)', WKTLoader), { + expect(parseSync('POINT(1.1 1.1)', WKTLoader)).toEqual({ type: 'Point', coordinates: [1.1, 1.1] }); - t.deepEqual(parseSync('point(1.1 1.1)', WKTLoader), { + expect(parseSync('point(1.1 1.1)', WKTLoader)).toEqual({ type: 'Point', coordinates: [1.1, 1.1] }); - t.deepEqual(parseSync('point(1 2 3)', WKTLoader), { + expect(parseSync('point(1 2 3)', WKTLoader)).toEqual({ type: 'Point', coordinates: [1, 2, 3] }); - t.deepEqual(parseSync('point(1 2 3 4)', WKTLoader), { + expect(parseSync('point(1 2 3 4)', WKTLoader)).toEqual({ type: 'Point', coordinates: [1, 2, 3, 4] }); - t.deepEqual(parseSync('SRID=3857;POINT (1 2 3)', WKTLoader), { + expect(parseSync('SRID=3857;POINT (1 2 3)', WKTLoader)).toEqual({ type: 'Point', coordinates: [1, 2, 3], crs: { @@ -67,7 +67,7 @@ test('parseWKT', async t => { } } }); - t.deepEqual(parseSync('LINESTRING (30 10, 10 30, 40 40)', WKTLoader), { + expect(parseSync('LINESTRING (30 10, 10 30, 40 40)', WKTLoader)).toEqual({ type: 'LineString', coordinates: [ [30, 10], @@ -75,7 +75,7 @@ test('parseWKT', async t => { [40, 40] ] }); - t.deepEqual(parseSync('LINESTRING(30 10, 10 30, 40 40)', WKTLoader), { + expect(parseSync('LINESTRING(30 10, 10 30, 40 40)', WKTLoader)).toEqual({ type: 'LineString', coordinates: [ [30, 10], @@ -83,7 +83,7 @@ test('parseWKT', async t => { [40, 40] ] }); - t.deepEqual(parseSync('LineString(30 10, 10 30, 40 40)', WKTLoader), { + expect(parseSync('LineString(30 10, 10 30, 40 40)', WKTLoader)).toEqual({ type: 'LineString', coordinates: [ [30, 10], @@ -91,21 +91,21 @@ test('parseWKT', async t => { [40, 40] ] }); - t.deepEqual(parseSync('LINESTRING (1 2 3, 4 5 6)', WKTLoader), { + expect(parseSync('LINESTRING (1 2 3, 4 5 6)', WKTLoader)).toEqual({ type: 'LineString', coordinates: [ [1, 2, 3], [4, 5, 6] ] }); - t.deepEqual(parseSync('LINESTRING (1 2 3 4, 5 6 7 8)', WKTLoader), { + expect(parseSync('LINESTRING (1 2 3 4, 5 6 7 8)', WKTLoader)).toEqual({ type: 'LineString', coordinates: [ [1, 2, 3, 4], [5, 6, 7, 8] ] }); - t.deepEqual(parseSync('SRID=3857;LINESTRING (30 10, 10 30, 40 40)', WKTLoader), { + expect(parseSync('SRID=3857;LINESTRING (30 10, 10 30, 40 40)', WKTLoader)).toEqual({ type: 'LineString', coordinates: [ [30, 10], @@ -119,7 +119,7 @@ test('parseWKT', async t => { } } }); - t.deepEqual(parseSync('POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))', WKTLoader), { + expect(parseSync('POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))', WKTLoader)).toEqual({ type: 'Polygon', coordinates: [ [ @@ -131,7 +131,7 @@ test('parseWKT', async t => { ] ] }); - t.deepEqual(parseSync('POLYGON((30 10, 10 20, 20 40, 40 40, 30 10))', WKTLoader), { + expect(parseSync('POLYGON((30 10, 10 20, 20 40, 40 40, 30 10))', WKTLoader)).toEqual({ type: 'Polygon', coordinates: [ [ @@ -143,7 +143,7 @@ test('parseWKT', async t => { ] ] }); - t.deepEqual(parseSync('SRID=3857;POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))', WKTLoader), { + expect(parseSync('SRID=3857;POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))', WKTLoader)).toEqual({ type: 'Polygon', coordinates: [ [ @@ -161,12 +161,10 @@ test('parseWKT', async t => { } } }); - t.deepEqual( - parseSync( + expect(parseSync( 'POLYGON ((35 10, 10 20, 15 40, 45 45, 35 10),(20 30, 35 35, 30 20, 20 30))', WKTLoader - ), - { + )).toEqual({ type: 'Polygon', coordinates: [ [ @@ -183,30 +181,29 @@ test('parseWKT', async t => { [20, 30] ] ] - } - ); - t.deepEqual(parseSync('MULTIPOINT (0 0, 2 3)', WKTLoader), { + }); + expect(parseSync('MULTIPOINT (0 0, 2 3)', WKTLoader)).toEqual({ type: 'MultiPoint', coordinates: [ [0, 0], [2, 3] ] }); - t.deepEqual(parseSync('MULTIPOINT (1 1, 2 3)', WKTLoader), { + expect(parseSync('MULTIPOINT (1 1, 2 3)', WKTLoader)).toEqual({ type: 'MultiPoint', coordinates: [ [1, 1], [2, 3] ] }); - t.deepEqual(parseSync('MultiPoint (1 1, 2 3)', WKTLoader), { + expect(parseSync('MultiPoint (1 1, 2 3)', WKTLoader)).toEqual({ type: 'MultiPoint', coordinates: [ [1, 1], [2, 3] ] }); - t.deepEqual(parseSync('SRID=3857;MULTIPOINT (1 1, 2 3)', WKTLoader), { + expect(parseSync('SRID=3857;MULTIPOINT (1 1, 2 3)', WKTLoader)).toEqual({ type: 'MultiPoint', coordinates: [ [1, 1], @@ -219,28 +216,28 @@ test('parseWKT', async t => { } } }); - t.deepEqual(parseSync('MULTIPOINT ((0 0), (2 3))', WKTLoader), { + expect(parseSync('MULTIPOINT ((0 0), (2 3))', WKTLoader)).toEqual({ type: 'MultiPoint', coordinates: [ [0, 0], [2, 3] ] }); - t.deepEqual(parseSync('MULTIPOINT ((1 1), (2 3))', WKTLoader), { + expect(parseSync('MULTIPOINT ((1 1), (2 3))', WKTLoader)).toEqual({ type: 'MultiPoint', coordinates: [ [1, 1], [2, 3] ] }); - t.deepEqual(parseSync('MultiPoint ((1 1), (2 3))', WKTLoader), { + expect(parseSync('MultiPoint ((1 1), (2 3))', WKTLoader)).toEqual({ type: 'MultiPoint', coordinates: [ [1, 1], [2, 3] ] }); - t.deepEqual(parseSync('SRID=3857;MULTIPOINT ((1 1), (2 3))', WKTLoader), { + expect(parseSync('SRID=3857;MULTIPOINT ((1 1), (2 3))', WKTLoader)).toEqual({ type: 'MultiPoint', coordinates: [ [1, 1], @@ -253,9 +250,7 @@ test('parseWKT', async t => { } } }); - t.deepEqual( - parseSync('MULTILINESTRING ((30 10, 10 30, 40 40), (30 10, 10 30, 40 40))', WKTLoader), - { + expect(parseSync('MULTILINESTRING ((30 10, 10 30, 40 40), (30 10, 10 30, 40 40))', WKTLoader)).toEqual({ type: 'MultiLineString', coordinates: [ [ @@ -269,14 +264,11 @@ test('parseWKT', async t => { [40, 40] ] ] - } - ); - t.deepEqual( - parseSync( + }); + expect(parseSync( 'SRID=3857;MULTILINESTRING ((30 10, 10 30, 40 40), (30 10, 10 30, 40 40))', WKTLoader - ), - { + )).toEqual({ type: 'MultiLineString', coordinates: [ [ @@ -296,14 +288,11 @@ test('parseWKT', async t => { name: 'urn:ogc:def:crs:EPSG::3857' } } - } - ); - t.deepEqual( - parseSync( + }); + expect(parseSync( 'MULTIPOLYGON (((30 20, 10 40, 45 40, 30 20)), ((15 5, 40 10, 10 20, 5 10, 15 5)))', WKTLoader - ), - { + )).toEqual({ type: 'MultiPolygon', coordinates: [ [ @@ -324,18 +313,15 @@ test('parseWKT', async t => { ] ] ] - } - ); - t.deepEqual(parseSync('MULTIPOLYGON (((-74.03349399999999 40.688348)))', WKTLoader), { + }); + expect(parseSync('MULTIPOLYGON (((-74.03349399999999 40.688348)))', WKTLoader)).toEqual({ type: 'MultiPolygon', coordinates: [[[[-74.03349399999999, 40.688348]]]] }); - t.deepEqual( - parseSync( + expect(parseSync( 'MULTIPOLYGON (((30 20, 10 40, 45 40, 30 20)), ((15 5, 40 10, 10 20, 5 10, 15 5), (10 10, 15 10, 15 15, 10 10)))', WKTLoader - ), - { + )).toEqual({ type: 'MultiPolygon', coordinates: [ [ @@ -362,14 +348,11 @@ test('parseWKT', async t => { ] ] ] - } - ); - t.deepEqual( - parseSync( + }); + expect(parseSync( 'SRID=3857;MULTIPOLYGON (((30 20, 10 40, 45 40, 30 20)), ((15 5, 40 10, 10 20, 5 10, 15 5)))', WKTLoader - ), - { + )).toEqual({ type: 'MultiPolygon', coordinates: [ [ @@ -396,10 +379,9 @@ test('parseWKT', async t => { name: 'urn:ogc:def:crs:EPSG::3857' } } - } - ); - t.deepEqual(parseSync(GEOMETRYCOLLECTION_WKT, WKTLoader), GEOMETRYCOLLECTION_GEOJSON); - t.deepEqual(parseSync('GeometryCollection(POINT(4 6),LINESTRING(4 6,7 10))', WKTLoader), { + }); + expect(parseSync(GEOMETRYCOLLECTION_WKT, WKTLoader)).toEqual(GEOMETRYCOLLECTION_GEOJSON); + expect(parseSync('GeometryCollection(POINT(4 6),LINESTRING(4 6,7 10))', WKTLoader)).toEqual({ type: 'GeometryCollection', geometries: [ { @@ -415,7 +397,7 @@ test('parseWKT', async t => { } ] }); - t.deepEqual(parseSync('GeometryCollection(POINT(4 6),\nLINESTRING(4 6,7 10))', WKTLoader), { + expect(parseSync('GeometryCollection(POINT(4 6),\nLINESTRING(4 6,7 10))', WKTLoader)).toEqual({ type: 'GeometryCollection', geometries: [ { @@ -431,23 +413,23 @@ test('parseWKT', async t => { } ] }); - t.deepEqual(parseSync('POINT (1e-6 1E+2)', WKTLoader), { + expect(parseSync('POINT (1e-6 1E+2)', WKTLoader)).toEqual({ type: 'Point', coordinates: [1e-6, 1e2] }); - t.equal(parseSync('POINT(100)', WKTLoader), null); - t.equal(parseSync('POINT(100, 100)', WKTLoader), null); - t.equal(parseSync('POINT()', WKTLoader), null); - t.equal(parseSync('MULTIPOINT()', WKTLoader), null); - t.equal(parseSync('MULTIPOINT(1)', WKTLoader), null); - t.equal(parseSync('MULTIPOINT(1 1, 1)', WKTLoader), null); + expect(parseSync('POINT(100)', WKTLoader)).toBe(null); + expect(parseSync('POINT(100, 100)', WKTLoader)).toBe(null); + expect(parseSync('POINT()', WKTLoader)).toBe(null); + expect(parseSync('MULTIPOINT()', WKTLoader)).toBe(null); + expect(parseSync('MULTIPOINT(1)', WKTLoader)).toBe(null); + expect(parseSync('MULTIPOINT(1 1, 1)', WKTLoader)).toBe(null); - t.deepEqual(parseSync('POINT Z (1 2 3)', WKTLoader), { + expect(parseSync('POINT Z (1 2 3)', WKTLoader)).toEqual({ type: 'Point', coordinates: [1, 2, 3] }); - t.deepEqual(parseSync('LINESTRING Z (30 10 1, 10 30 2, 40 40 3)', WKTLoader), { + expect(parseSync('LINESTRING Z (30 10 1, 10 30 2, 40 40 3)', WKTLoader)).toEqual({ type: 'LineString', coordinates: [ [30, 10, 1], @@ -456,7 +438,7 @@ test('parseWKT', async t => { ] }); - t.deepEqual(parseSync('POLYGON Z ((30 10 1, 10 20 2, 20 40 3, 40 40 4, 30 10 5))', WKTLoader), { + expect(parseSync('POLYGON Z ((30 10 1, 10 20 2, 20 40 3, 40 40 4, 30 10 5))', WKTLoader)).toEqual({ type: 'Polygon', coordinates: [ [ @@ -468,12 +450,10 @@ test('parseWKT', async t => { ] ] }); - - t.end(); }); // NOTE(Kyle): Test disabled for now, to be fixed before 2.2.0 release -// test('WKTWorkerLoader', async t => { +// test('WKTWorkerLoader', async () => { // if (typeof Worker === 'undefined') { // t.comment('Worker is not usable in non-browser environments'); // t.end(); @@ -489,7 +469,7 @@ test('parseWKT', async t => { // t.end(); // }); -test('parseWKT#fuzz', t => { +test('parseWKT#fuzz', () => { fuzzer.seed(0); const inputs = [ 'MULTIPOLYGON (((30 20, 10 40, 45 40, 30 20)), ((15 5, 40 10, 10 20, 5 10, 15 5)))', @@ -502,10 +482,9 @@ test('parseWKT#fuzz', t => { const input = fuzzer.mutate.string(str); try { parseSync(input, WKTLoader); - } catch (e) { - t.fail(`could not parse ${input}, exception ${e}`); + } catch (e) {throw new Error(`could not parse ${input}, exception ${e}`) + } } }); - t.end(); }); diff --git a/modules/gis/test/utils/hex-transcoder.spec.ts b/modules/gis/test/utils/hex-transcoder.spec.ts index e6454cfd83..c92166caff 100644 --- a/modules/gis/test/utils/hex-transcoder.spec.ts +++ b/modules/gis/test/utils/hex-transcoder.spec.ts @@ -2,37 +2,35 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {HexWKBLoader} from '@loaders.gl/wkt'; const isHexWKB = HexWKBLoader.testText; -test('datasetUtils.isHexWKB', t => { - t.notOk(isHexWKB(''), 'empty string is not a valid hex wkb'); +test('datasetUtils.isHexWKB', () => { + expect(isHexWKB(''), 'empty string is not a valid hex wkb').toBeFalsy(); // @ts-ignore null is not a string - t.notOk(isHexWKB(null), 'null is not a valid hex wkb'); + expect(isHexWKB(null), 'null is not a valid hex wkb').toBeFalsy(); const countyFIPS = '06075'; - t.notOk(isHexWKB(countyFIPS), 'FIPS code should not be a valid hex wkb'); + expect(isHexWKB(countyFIPS), 'FIPS code should not be a valid hex wkb').toBeFalsy(); const h3Code = '8a2a1072b59ffff'; - t.notOk(isHexWKB(h3Code), 'H3 code should not be a valid hex wkb'); + expect(isHexWKB(h3Code), 'H3 code should not be a valid hex wkb').toBeFalsy(); const randomHexStr = '8a2a1072b59ffff'; - t.notOk(isHexWKB(randomHexStr), 'A random hex string should not be a valid hex wkb'); + expect(isHexWKB(randomHexStr), 'A random hex string should not be a valid hex wkb').toBeFalsy(); const validWkt = '0101000000000000000000f03f0000000000000040'; - t.ok(isHexWKB(validWkt), 'A valid hex wkb should be valid'); + expect(isHexWKB(validWkt), 'A valid hex wkb should be valid').toBeTruthy(); const validEWkt = '0101000020e6100000000000000000f03f0000000000000040'; - t.ok(isHexWKB(validEWkt), 'A valid hex ewkb should be valid'); + expect(isHexWKB(validEWkt), 'A valid hex ewkb should be valid').toBeTruthy(); const validWktNDR = '00000000013ff0000000000000400000000000000040'; - t.ok(isHexWKB(validWktNDR), 'A valid hex wkb in NDR should be valid'); + expect(isHexWKB(validWktNDR), 'A valid hex wkb in NDR should be valid').toBeTruthy(); const validEWktNDR = '0020000001000013ff0000000000400000000000000040'; - t.ok(isHexWKB(validEWktNDR), 'A valid hex ewkb in NDR should be valid'); - - t.end(); + expect(isHexWKB(validEWktNDR), 'A valid hex ewkb in NDR should be valid').toBeTruthy(); }); diff --git a/modules/gis/test/wkt-crs/parse-wkt-crs.spec.ts b/modules/gis/test/wkt-crs/parse-wkt-crs.spec.ts index fdb848ede7..72f3aa6220 100644 --- a/modules/gis/test/wkt-crs/parse-wkt-crs.spec.ts +++ b/modules/gis/test/wkt-crs/parse-wkt-crs.spec.ts @@ -3,7 +3,7 @@ // Copyright (c) vis.gl contributors // parse-wkt-crs was forked from https://github.com/DanielJDufour/wkt-crs under Creative Commons CC0 1.0 license. -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {parseSync, encodeTextSync} from '@loaders.gl/core'; import {WKTCRSLoader, WKTCRSWriter} from '@loaders.gl/wkt'; @@ -11,24 +11,20 @@ const roundtrip = wkt => encodeTextSync(parseSync(wkt, WKTCRSLoader, {raw: true} const condense = wkt => wkt.trim().replace(/(?<=[,\[\]])[ \n]+/g, ''); -test('parseWKTCRS#NAD27 / UTM zone 16N', t => { +test('parseWKTCRS#NAD27 / UTM zone 16N', () => { const wkt = 'PROJCS["NAD27 / UTM zone 16N",GEOGCS["NAD27",DATUM["North_American_Datum_1927",SPHEROID["Clarke 1866",6378206.4,294.9786982139006,AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4267"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-87],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","26716"]]'; const data = parseSync(wkt, WKTCRSLoader, {raw: false, debug: false}); // console.log(JSON.stringify(data, undefined, 2)); - t.deepEqual(data.length, 1); - t.deepEqual(Object.keys(data), ['0', 'PROJCS']); - t.deepEqual(data.PROJCS.AUTHORITY, ['AUTHORITY', 'EPSG', '26716']); - t.deepEqual(data.PROJCS === data[0], true); - t.deepEqual(data.PROJCS[1] === 'NAD27 / UTM zone 16N', true); - t.deepEqual(data.PROJCS.GEOGCS === data[0][2], true); - - // raw mode - // t.deepEqual(roundtrip(wkt), wkt); - t.end(); + expect(data.length).toEqual(1); + expect(Object.keys(data)).toEqual(['0', 'PROJCS']); + expect(data.PROJCS.AUTHORITY).toEqual(['AUTHORITY', 'EPSG', '26716']); + expect(data.PROJCS === data[0]).toEqual(true); + expect(data.PROJCS[1] === 'NAD27 / UTM zone 16N').toEqual(true); + expect(data.PROJCS.GEOGCS === data[0][2]).toEqual(true); }); -test('parseWKTCRS#wikipedia example', t => { +test('parseWKTCRS#wikipedia example', () => { const wkt = `GEODCRS["WGS 84", DATUM["World Geodetic System 1984", ELLIPSOID["WGS 84", 6378137, 298.257223563, LENGTHUNIT["metre", 1]]], @@ -37,15 +33,13 @@ test('parseWKTCRS#wikipedia example', t => { AXIS["Longitude (lon)", east, ORDER[2]], ANGLEUNIT["degree", 0.0174532925199433]]`; const data = parseSync(wkt, WKTCRSLoader, {debug: false}); - t.deepEqual(data.GEODCRS[1], 'WGS 84'); - t.deepEqual(data.GEODCRS.DATUM.ELLIPSOID[3], 298.257223563); - t.deepEqual(data.GEODCRS.CS[1], 'ellipsoidal'); - t.deepEqual(data.GEODCRS.ANGLEUNIT[2], 0.0174532925199433); - // t.deepEqual(roundtrip(wkt), condense(wkt)); - t.end(); + expect(data.GEODCRS[1]).toEqual('WGS 84'); + expect(data.GEODCRS.DATUM.ELLIPSOID[3]).toEqual(298.257223563); + expect(data.GEODCRS.CS[1]).toEqual('ellipsoidal'); + expect(data.GEODCRS.ANGLEUNIT[2]).toEqual(0.0174532925199433); }); -test.skip('parseWKTCRS#wikipedia raw', t => { +test.skip('parseWKTCRS#wikipedia raw', () => { const wkt = `GEODCRS["WGS 84", DATUM["World Geodetic System 1984", ELLIPSOID["WGS 84", 6378137, 298.257223563, LENGTHUNIT["metre", 1]]], @@ -54,15 +48,14 @@ test.skip('parseWKTCRS#wikipedia raw', t => { AXIS["Longitude (lon)", east, ORDER[2]], ANGLEUNIT["degree", 0.0174532925199433]]`; const data = parseSync(wkt, WKTCRSLoader, {debug: false, raw: true}); - t.deepEqual(data.GEODCRS[1], 'WGS 84'); - t.deepEqual(data.GEODCRS.DATUM.ELLIPSOID[3], 'raw:298.257223563'); - t.deepEqual(data.GEODCRS.CS[1], 'raw:ellipsoidal'); - t.deepEqual(data.GEODCRS.ANGLEUNIT[2], 'raw:0.0174532925199433'); - t.deepEqual(roundtrip(wkt), condense(wkt)); - t.end(); + expect(data.GEODCRS[1]).toEqual('WGS 84'); + expect(data.GEODCRS.DATUM.ELLIPSOID[3]).toEqual('raw:298.257223563'); + expect(data.GEODCRS.CS[1]).toEqual('raw:ellipsoidal'); + expect(data.GEODCRS.ANGLEUNIT[2]).toEqual('raw:0.0174532925199433'); + expect(roundtrip(wkt)).toEqual(condense(wkt)); }); -test('parseWKTCRS#wikipedia concat', t => { +test('parseWKTCRS#wikipedia concat', () => { const wkt = ` CONCAT_MT[ PARAM_MT["Mercator_2SP", @@ -80,13 +73,11 @@ test('parseWKTCRS#wikipedia concat', t => { PARAMETER["elt 1 2",3]]] `; const data = parseSync(wkt, WKTCRSLoader, {debug: false, raw: true}); - t.deepEqual(data.CONCAT_MT.PARAM_MT, undefined); - t.deepEqual(data.CONCAT_MT.MULTIPLE_PARAM_MT.length, 2); - // t.deepEqual(roundtrip(wkt), condense(wkt)); - t.end(); + expect(data.CONCAT_MT.PARAM_MT).toEqual(undefined); + expect(data.CONCAT_MT.MULTIPLE_PARAM_MT.length).toEqual(2); }); -test.skip('parseWKTCRS#wikipedia datum shift', t => { +test.skip('parseWKTCRS#wikipedia datum shift', () => { const wkt = ` COORDINATEOPERATION["AGD84 to GDA94 Auslig 5m", SOURCECRS["…full CRS definition required here but omitted for brevity…"], @@ -102,41 +93,34 @@ test.skip('parseWKTCRS#wikipedia datum shift', t => { const data = parseSync(wkt, WKTCRSLoader, {debug: false, raw: true}); // stringifying array ignores keys added on const str = JSON.stringify(data); - t.deepEqual( - str, - '[["COORDINATEOPERATION","AGD84 to GDA94 Auslig 5m",["SOURCECRS","…full CRS definition required here but omitted for brevity…"],["TARGETCRS","…full CRS definition required here but omitted for brevity…"],["METHOD","Geocentric translations",["ID","EPSG","raw:1031"]],["PARAMETER","X-axis translation","raw:-128.5",["LENGTHUNIT","metre","raw:1"]],["PARAMETER","Y-axis translation","raw:-53.0",["LENGTHUNIT","metre","raw:1"]],["PARAMETER","Z-axis translation","raw:153.4",["LENGTHUNIT","metre","raw:1"]],["OPERATIONACCURACY","raw:5"],["AREA","Australia onshore"],["BBOX","raw:-43.7","raw:112.85","raw:-9.87","raw:153.68"]]]' - ); - t.deepEqual(roundtrip(wkt), condense(wkt)); - t.end(); + expect(str).toEqual('[["COORDINATEOPERATION","AGD84 to GDA94 Auslig 5m",["SOURCECRS","…full CRS definition required here but omitted for brevity…"],["TARGETCRS","…full CRS definition required here but omitted for brevity…"],["METHOD","Geocentric translations",["ID","EPSG","raw:1031"]],["PARAMETER","X-axis translation","raw:-128.5",["LENGTHUNIT","metre","raw:1"]],["PARAMETER","Y-axis translation","raw:-53.0",["LENGTHUNIT","metre","raw:1"]],["PARAMETER","Z-axis translation","raw:153.4",["LENGTHUNIT","metre","raw:1"]],["OPERATIONACCURACY","raw:5"],["AREA","Australia onshore"],["BBOX","raw:-43.7","raw:112.85","raw:-9.87","raw:153.68"]]]'); + expect(roundtrip(wkt)).toEqual(condense(wkt)); }); -test('parseWKTCRS#proj4js example', t => { +test('parseWKTCRS#proj4js example', () => { const wkt = 'PROJCS["NAD83 / Massachusetts Mainland",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4269"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",42.68333333333333],PARAMETER["standard_parallel_2",41.71666666666667],PARAMETER["latitude_of_origin",41],PARAMETER["central_meridian",-71.5],PARAMETER["false_easting",200000],PARAMETER["false_northing",750000],AUTHORITY["EPSG","26986"],AXIS["X",EAST],AXIS["Y",NORTH]]'; const data = parseSync(wkt, WKTCRSLoader); - t.deepEqual(data.PROJCS[1], 'NAD83 / Massachusetts Mainland'); - t.end(); + expect(data.PROJCS[1]).toEqual('NAD83 / Massachusetts Mainland'); }); -test('parseWKTCRS#parse attribute that ends in number (TOWGS84)', t => { +test('parseWKTCRS#parse attribute that ends in number (TOWGS84)', () => { const wkt = ' GEOGCS["SAD69",DATUM["South_American_Datum_1969",SPHEROID["GRS 1967 Modified",6378160,298.25,AUTHORITY["EPSG","7050"]],TOWGS84[-57,1,-41,0,0,0,0],AUTHORITY["EPSG","6618"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4618"]]'; - t.deepEqual(roundtrip(wkt), condense(wkt)); - t.end(); + expect(roundtrip(wkt)).toEqual(condense(wkt)); }); -test.skip('parseWKTCRS#another parse bug', t => { +test.skip('parseWKTCRS#another parse bug', () => { const wkt = 'PROJCS["ETRS89 / TM35FIN(E,N)",GEOGCS["ETRS89",DATUM["European_Terrestrial_Reference_System_1989",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6258"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4258"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",27],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","3067"]]'; const data = parseSync(wkt, WKTCRSLoader, {debug: false}); - t.deepEqual(data.PROJCS[1], 'ETRS89 / TM35FIN(E,N)'); - t.deepEqual(data.PROJCS.MULTIPLE_AXIS[1][2], 'NORTH'); - t.deepEqual(roundtrip(wkt), wkt); - t.end(); + expect(data.PROJCS[1]).toEqual('ETRS89 / TM35FIN(E,N)'); + expect(data.PROJCS.MULTIPLE_AXIS[1][2]).toEqual('NORTH'); + expect(roundtrip(wkt)).toEqual(wkt); }); // Not clear where to find crs.json -// test.skip('parseWKTCRS#try to parse everything in crs.json', (t) => { +// test.skip('parseWKTCRS#try to parse everything in crs.json', () => { // let data = require('./crs.json'); // data = data.map(({wkt, esriwkt, prettywkt}) => ({ // raw: { @@ -159,7 +143,7 @@ test.skip('parseWKTCRS#another parse bug', t => { // }); // }); -// test("7.5.6.3 Axis unit for ordinal coordinate systems", t => { +// test("7.5.6.3 Axis unit for ordinal coordinate systems", () => { // const wkt = `NULL[CS[ordinal,2], // AXIS["inline (I)",southeast,ORDER[1]], // AXIS["crossline (J)",northeast,ORDER[2]]]`; @@ -168,69 +152,58 @@ test.skip('parseWKTCRS#another parse bug', t => { // t.end(); // }); -test.skip('parseWKTCRS#sort parameters', t => { +test.skip('parseWKTCRS#sort parameters', () => { const wkt = 'PROJCS["WGS_1984_Antarctic_Polar_Stereographic",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Stereographic_South_Pole"],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",0.0],PARAMETER["Standard_Parallel_1",-71.0],UNIT["Meter",1.0]]'; const data = parseSync(wkt, WKTCRSLoader, {debug: false, raw: true, sort: true}); - t.deepEqual( - encodeTextSync(data, WKTCRSWriter), - 'PROJCS["WGS_1984_Antarctic_Polar_Stereographic",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Stereographic_South_Pole"],PARAMETER["Central_Meridian",0.0],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Standard_Parallel_1",-71.0],UNIT["Meter",1.0]]' - ); - t.end(); + expect(encodeTextSync(data, WKTCRSWriter)).toEqual('PROJCS["WGS_1984_Antarctic_Polar_Stereographic",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Stereographic_South_Pole"],PARAMETER["Central_Meridian",0.0],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Standard_Parallel_1",-71.0],UNIT["Meter",1.0]]'); }); -// test("sort example", t => { +// test("sort example", () => { // const data = ["EXAMPLE", ["AXIS", "Northing", "raw:NORTH"], ["AXIS", "Easting", "raw:EAST"]]; // wktcrs.sort(data); // t.deepEqual(data, ["EXAMPLE", ["AXIS", "Easting", "raw:EAST"], ["AXIS", "Northing", "raw:NORTH"]]); // t.end(); // }); -test.skip('parseWKTCRS#sort params', t => { +test.skip('parseWKTCRS#sort params', () => { const wkt = 'PARAMETERS[PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-87],PARAMETER["scale_factor",0.9996]]'; let data = parseSync(wkt, WKTCRSLoader, {debug: false, raw: true}); - t.deepEqual(data[0].MULTIPLE_PARAMETER, [ + expect(data[0].MULTIPLE_PARAMETER).toEqual([ ['PARAMETER', 'latitude_of_origin', 'raw:0'], ['PARAMETER', 'central_meridian', 'raw:-87'], ['PARAMETER', 'scale_factor', 'raw:0.9996'] ]); data = parseSync(wkt, WKTCRSLoader, {debug: false, raw: true, sort: true}); - t.deepEqual(data[0].MULTIPLE_PARAMETER, [ + expect(data[0].MULTIPLE_PARAMETER).toEqual([ ['PARAMETER', 'central_meridian', 'raw:-87'], ['PARAMETER', 'latitude_of_origin', 'raw:0'], ['PARAMETER', 'scale_factor', 'raw:0.9996'] ]); - t.deepEqual( - encodeTextSync(data, WKTCRSWriter), - 'PARAMETERS[PARAMETER["central_meridian",-87],PARAMETER["latitude_of_origin",0],PARAMETER["scale_factor",0.9996]]' - ); - t.end(); + expect(encodeTextSync(data, WKTCRSWriter)).toEqual('PARAMETERS[PARAMETER["central_meridian",-87],PARAMETER["latitude_of_origin",0],PARAMETER["scale_factor",0.9996]]'); }); -test('parseWKTCRS#parse inner parens', t => { +test('parseWKTCRS#parse inner parens', () => { const wkt = 'GEOGCS["GRS 1980(IUGG, 1980)",DATUM["unknown",SPHEROID["GRS80",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["epsg","7686"]]'; const data = parseSync(wkt, WKTCRSLoader, {debug: false, raw: true}); - t.deepEqual(data.GEOGCS[0], 'GEOGCS'); - t.end(); + expect(data.GEOGCS[0]).toEqual('GEOGCS'); }); -test.skip('WKTCRSWriter#authority', t => { +test.skip('WKTCRSWriter#authority', () => { const authority = ['AUTHORITY', 'EPSG', '9122']; const unparsed = encodeTextSync(authority, WKTCRSWriter); - t.deepEqual(unparsed, {data: 'AUTHORITY["EPSG","9122"]'}); - t.end(); + expect(unparsed).toEqual({data: 'AUTHORITY["EPSG","9122"]'}); }); -test.skip('WKTCRSWriter#PRIMEM', t => { +test.skip('WKTCRSWriter#PRIMEM', () => { const authority = ['PRIMEM', 'Greenwich', 0, ['AUTHORITY', 'EPSG', '8901']]; const unparsed = encodeTextSync(authority, WKTCRSWriter); - t.deepEqual(unparsed, {data: 'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]]'}); - t.end(); + expect(unparsed).toEqual({data: 'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]]'}); }); -test.skip('WKTCRSWriter#DATUM', t => { +test.skip('WKTCRSWriter#DATUM', () => { const datum = [ 'DATUM', 'North_American_Datum_1927', @@ -238,13 +211,12 @@ test.skip('WKTCRSWriter#DATUM', t => { ['AUTHORITY', 'EPSG', '6267'] ]; const unparsed = encodeTextSync(datum, WKTCRSWriter); - t.deepEqual(unparsed, { + expect(unparsed).toEqual({ data: 'DATUM["North_American_Datum_1927",SPHEROID["Clarke 1866",6378206.4,294.9786982139006,AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]]' }); - t.end(); }); -test.skip('WKTCRSWriter#GEOGCS', t => { +test.skip('WKTCRSWriter#GEOGCS', () => { const data = [ 'GEOGCS', 'NAD27', @@ -259,7 +231,7 @@ test.skip('WKTCRSWriter#GEOGCS', t => { ['AUTHORITY', 'EPSG', '4267'] ]; const unparsed = encodeTextSync(data, WKTCRSWriter); - t.deepEqual(unparsed, { + expect(unparsed).toEqual({ data: 'GEOGCS["NAD27",DATUM["North_American_Datum_1927",SPHEROID["Clarke 1866",6378206.4,294.9786982139006,AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4267"]]' }); }); diff --git a/modules/mvt/test/lib/mapbox-vt-pbf/to-vector-tile.spec.js b/modules/mvt/test/lib/mapbox-vt-pbf/to-vector-tile.spec.js index 7be8800c1a..bbba019242 100644 --- a/modules/mvt/test/lib/mapbox-vt-pbf/to-vector-tile.spec.js +++ b/modules/mvt/test/lib/mapbox-vt-pbf/to-vector-tile.spec.js @@ -1,5 +1,5 @@ // @ts-nocheck -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {fetchFile} from '@loaders.gl/core'; import Pbf from 'pbf'; import VectorTile from '@loaders.gl/mvt/lib/mapbox-vector-tile-js/vector-tile'; @@ -9,7 +9,7 @@ import GeoJsonEquality from 'geojson-equality'; const eq = new GeoJsonEquality({precision: 1}); -test('property encoding: JSON.stringify non-primitive values', t => { +test('property encoding: JSON.stringify non-primitive values', () => { // Includes two properties with a common non-primitive value for // https://github.com/mapbox/vt-pbf/issues/9 const orig = { @@ -53,14 +53,13 @@ test('property encoding: JSON.stringify non-primitive values', t => { const first = layer.feature(0).properties; const second = layer.feature(1).properties; - t.same(first.c, '{"hello":"world"}'); - t.same(first.d, '[1,2,3]'); - t.same(second.c, '{"goodbye":"planet"}'); - t.same(second.d, '{"hello":"world"}'); - t.end(); + expect(first.c).toBe('{"hello":"world"}'); + expect(first.d).toBe('[1,2,3]'); + expect(second.c).toBe('{"goodbye":"planet"}'); + expect(second.d).toBe('{"hello":"world"}'); }); -test('number encoding https://github.com/mapbox/vt-pbf/pull/11', t => { +test('number encoding https://github.com/mapbox/vt-pbf/pull/11', () => { const orig = { type: 'Feature', properties: { @@ -80,12 +79,12 @@ test('number encoding https://github.com/mapbox/vt-pbf/pull/11', t => { const layer = vt.layers.geojsonLayer; const properties = layer.feature(0).properties; - t.equal(properties.large_integer, 39953616224); - t.equal(properties.non_integer, 331.75415); - t.end(); + expect(properties.large_integer).toBe(39953616224); + expect(properties.non_integer).toBe(331.75415); + }); -test('id encoding', t => { +test('id encoding', () => { const orig = { type: 'FeatureCollection', features: [ @@ -123,13 +122,12 @@ test('id encoding', t => { const buff = fromGeojsonVt({geojsonLayer: tile}); const vt = new VectorTile(new Pbf(buff)); const layer = vt.layers.geojsonLayer; - t.same(layer.feature(0).id, 123); - t.notOk(layer.feature(1).id, 'Non-integer values should not be saved'); - t.notOk(layer.feature(2).id); - t.end(); + expect(layer.feature(0).id).toBe(123); + expect(layer.feature(1).id, 'Non-integer values should not be saved').toBeFalsy(); + expect(layer.feature(2).id).toBeFalsy(); }); -test('accept geojson-vt options https://github.com/mapbox/vt-pbf/pull/21', async t => { +test('accept geojson-vt options https://github.com/mapbox/vt-pbf/pull/21', async () => { const RECTANGLE_URL = '@loaders.gl/mvt/test/data/mapbox-vt-pbf-fixtures/rectangle.geojson'; const response = await fetchFile(RECTANGLE_URL); const orig = await response.json(); @@ -149,8 +147,8 @@ test('accept geojson-vt options https://github.com/mapbox/vt-pbf/pull/21', async features.push(feat); } - t.equal(layer.version, options.version, 'version should be equal'); - t.equal(layer.extent, options.extent, 'extent should be equal'); + expect(layer.version, 'version should be equal').toBe(options.version); + expect(layer.extent, 'extent should be equal').toBe(options.extent); orig.features.forEach(function (expected) { const actual = features.shift(); @@ -158,8 +156,6 @@ test('accept geojson-vt options https://github.com/mapbox/vt-pbf/pull/21', async // TODO - this was added in loaders fork to make tests pass, investigate why it is needed delete expected.id; - t.ok(eq.compare(actual, expected)); + expect(eq.compare(actual, expected)).toBeTruthy(); }); - - t.end(); }); diff --git a/modules/mvt/test/lib/mapbox-vt-pbf/vector-tile-roundtrip.spec.js b/modules/mvt/test/lib/mapbox-vt-pbf/vector-tile-roundtrip.spec.js index 6ab7f3f3d4..98814d8b94 100644 --- a/modules/mvt/test/lib/mapbox-vt-pbf/vector-tile-roundtrip.spec.js +++ b/modules/mvt/test/lib/mapbox-vt-pbf/vector-tile-roundtrip.spec.js @@ -1,5 +1,5 @@ // @ts-nocheck -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {isBrowser} from '@loaders.gl/loader-utils'; import VectorTile from '@loaders.gl/mvt/lib/mapbox-vector-tile-js/vector-tile'; import {fromGeojsonVt, fromVectorTileJs} from '@loaders.gl/mvt/lib/mapbox-vt-pbf/to-vector-tile'; @@ -18,13 +18,19 @@ const vtvalidate = { const eq = new GeoJsonEquality({precision: 1}); -test('geojson-vt', t => { - if (isBrowser) { - t.comment('Skipping as @mapbox/geojson-fixtures is only supported in Node.js'); - t.end(); - return; - } +function validateTileAsync(buffer) { + return new Promise((resolve, reject) => { + vtvalidate.isValid(buffer, (error, invalid) => { + if (error) { + reject(error); + return; + } + resolve(invalid); + }); + }); +} +test.skipIf(isBrowser)('geojson-vt', async () => { const geometryTypes = [ 'polygon', 'point', @@ -41,90 +47,75 @@ test('geojson-vt', t => { }; }); - fixtures.forEach(function (fixture) { - t.comment(`Testing ${fixture.name}`); + for (const fixture of fixtures) { const tile = geojsonVt(fixture.data).getTile(0, 0, 0); - const buff = fromGeojsonVt({geojsonLayer: tile}); - vtvalidate.isValid(buff, (err, invalid) => { - t.error(err); - - t.ok(!invalid, invalid); - - // Compare roundtripped features with originals - const expected = - fixture.data.type === 'FeatureCollection' ? fixture.data.features : [fixture.data]; - const layer = new VectorTile(new Pbf(buff)).layers.geojsonLayer; - t.equal(layer.length, expected.length, `${expected.length} features`); - for (let i = 0; i < layer.length; i++) { - const actual = layer.feature(i).toGeoJSON(0, 0, 0); - t.ok(eq.compare(actual, expected[i]), `feature ${i}`); - } - t.end(); - }); - }); + const buffer = fromGeojsonVt({geojsonLayer: tile}); + const invalid = await validateTileAsync(buffer); + + expect(!invalid, invalid).toBeTruthy(); - t.end(); + const expected = fixture.data.type === 'FeatureCollection' ? fixture.data.features : [fixture.data]; + const layer = new VectorTile(new Pbf(buffer)).layers.geojsonLayer; + expect(layer.length, `${expected.length} features`).toBe(expected.length); + for (let index = 0; index < layer.length; index++) { + const actual = layer.feature(index).toGeoJSON(0, 0, 0); + expect(eq.compare(actual, expected[index]), `feature ${index}`).toBeTruthy(); + } + } }); -test('vector-tile-js', t => { +test('vector-tile-js', async () => { // See https://github.com/mapbox/mvt-fixtures/blob/master/FIXTURES.md for // fixture descriptions + const fixtures = []; mvtf.each(function (fixture) { - // skip invalid tiles - if (!fixture.validity.v2) return; + fixtures.push(fixture); + }); - t.comment(`mvt-fixtures: ${fixture.id} ${fixture.description}`); - const original = new VectorTile(new Pbf(fixture.buffer)); + for (const fixture of fixtures) { + if (!fixture.validity.v2) { + continue; + } if (fixture.id === '020') { - t.comment('Skipping test due to https://github.com/mapbox/vt-pbf/issues/30'); - t.end(); - return; + continue; } if (fixture.id === '049' || fixture.id === '050') { - t.comment('Skipping test due to https://github.com/mapbox/vt-pbf/issues/31'); - t.end(); - return; + continue; } - const buff = fromVectorTileJs(original); - const roundtripped = new VectorTile(new Pbf(buff)); - - vtvalidate.isValid(buff, (err, invalid) => { - t.error(err); + const original = new VectorTile(new Pbf(fixture.buffer)); + const buffer = fromVectorTileJs(original); + const roundtripped = new VectorTile(new Pbf(buffer)); + let invalid = await validateTileAsync(buffer); - if (invalid && invalid === 'ClosePath command count is not 1') { - t.comment('Skipping test due to https://github.com/mapbox/vt-pbf/issues/28'); - t.end(); - return; - } + if (invalid && invalid === 'ClosePath command count is not 1') { + continue; + } - // UNKOWN geometry type is valid in the spec, but vtvalidate considers - // it an error - if (fixture.id === '016' || fixture.id === '039') { - invalid = null; - } + // UNKOWN geometry type is valid in the spec, but vtvalidate considers + // it an error + if (fixture.id === '016' || fixture.id === '039') { + invalid = null; + } - t.ok(!invalid, invalid); - - // Compare roundtripped features with originals - for (const name in original.layers) { - const originalLayer = original.layers[name]; - t.ok(roundtripped.layers[name], `layer ${name}`); - const roundtrippedLayer = roundtripped.layers[name]; - t.equal(roundtrippedLayer.length, originalLayer.length); - for (let i = 0; i < originalLayer.length; i++) { - const actual = roundtrippedLayer.feature(i); - const expected = originalLayer.feature(i); - - t.equal(actual.id, expected.id, 'id'); - t.equal(actual.type, expected.type, 'type'); - t.deepEqual(actual.properties, expected.properties, 'properties'); - t.deepEqual(actual.loadGeometry(), expected.loadGeometry(), 'geometry'); - } + expect(!invalid, invalid).toBeTruthy(); + + for (const name in original.layers) { + const originalLayer = original.layers[name]; + expect(roundtripped.layers[name], `layer ${name}`).toBeTruthy(); + const roundtrippedLayer = roundtripped.layers[name]; + expect(roundtrippedLayer.length).toBe(originalLayer.length); + for (let index = 0; index < originalLayer.length; index++) { + const actual = roundtrippedLayer.feature(index); + const expected = originalLayer.feature(index); + + expect(actual.id, 'id').toBe(expected.id); + expect(actual.type, 'type').toBe(expected.type); + expect(actual.properties, 'properties').toEqual(expected.properties); + expect(actual.loadGeometry(), 'geometry').toEqual(expected.loadGeometry()); } - }); - }); - t.end(); + } + } }); diff --git a/modules/mvt/test/lib/parse-mvt-from-pbf.spec.ts b/modules/mvt/test/lib/parse-mvt-from-pbf.spec.ts index 4157e0e48c..37201c8d8c 100644 --- a/modules/mvt/test/lib/parse-mvt-from-pbf.spec.ts +++ b/modules/mvt/test/lib/parse-mvt-from-pbf.spec.ts @@ -3,7 +3,7 @@ // Copyright (c) vis.gl contributors // import type {BinaryFeatureCollection} from '@loaders.gl/schema'; -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {parseMVT} from '../../src/lib/mvt-pbf/parse-mvt-from-pbf'; import {fetchFile} from '@loaders.gl/core'; // import {geojsonToBinary, binaryToGeojson} from '@loaders.gl/gis'; @@ -42,25 +42,24 @@ const MVT_POINTS_DATA_URL = '@loaders.gl/mvt/test/data/mvt/points_4-2-6.mvt'; // _workerType: 'test' // }); -test('Point MVT to local coordinates JSON', async t => { +test('Point MVT to local coordinates JSON', async () => { const response = await fetchFile(MVT_POINTS_DATA_URL); const mvtArrayBuffer = await response.arrayBuffer(); const tile = parseMVT(mvtArrayBuffer); - t.deepEqual(tile.layers.layer0.length, 1, 'layer0 has 1 feature'); - // t.deepEqual(tile.layers.layer0.idColumn[0], 1, 'idColumn is 1'); - t.deepEqual(tile.layers.layer0.geometryTypeColumn[0], 1, 'geometryTypeColumn is 1'); - t.deepEqual(tile.layers.layer0.columns.cartodb_id[0], 3, 'cartodb_id is 3'); - t.deepEqual(tile.layers.layer0.columns._cdb_feature_count[0], 1, '_cdb_feature_count is 1'); - t.deepEqual( + expect(tile.layers.layer0.length, 'layer0 has 1 feature').toEqual(1); + // expect(tile.layers.layer0.idColumn[0], 'idColumn is 1').toEqual(1); + expect(tile.layers.layer0.geometryTypeColumn[0], 'geometryTypeColumn is 1').toEqual(1); + expect(tile.layers.layer0.columns.cartodb_id[0], 'cartodb_id is 3').toEqual(3); + expect(tile.layers.layer0.columns._cdb_feature_count[0], '_cdb_feature_count is 1').toEqual(1); + expect( tile.layers.layer0.schema.fields, - [ - {name: 'cartodb_id', type: 'uint32', nullable: false}, - {name: '_cdb_feature_count', type: 'uint32', nullable: false} - ], 'schema fields are correct' - ); + ).toEqual([ + {name: 'cartodb_id', type: 'uint32', nullable: false}, + {name: '_cdb_feature_count', type: 'uint32', nullable: false} + ]); // { // type: 'Feature', @@ -78,7 +77,6 @@ test('Point MVT to local coordinates JSON', async t => { // } // ]); - t.end(); }); // test('Line MVT to local coordinates JSON', async (t) => { @@ -104,7 +102,7 @@ test('Point MVT to local coordinates JSON', async t => { // } // ]); -// t.end(); +// // }); // test('Polygon MVT to local coordinates JSON', async (t) => { @@ -112,9 +110,9 @@ test('Point MVT to local coordinates JSON', async t => { // const mvtArrayBuffer = await response.arrayBuffer(); // const geometryJSON = await parse(mvtArrayBuffer, MVTLoader); -// t.deepEqual(geometryJSON, decodedPolygonsGeometry); +// expect(geometryJSON).toEqual(decodedPolygonsGeometry); -// t.end(); +// // }); // test('MVTLoader#Parse Point MVT', async (t) => { @@ -143,12 +141,12 @@ test('Point MVT to local coordinates JSON', async t => { // if (binary) { // // @ts-ignore // expected = geojsonToBinary(expected); -// t.ok(geometry.byteLength > 0); +// expect(geometry.byteLength > 0).toBeTruthy(); // delete geometry.byteLength; // } -// t.deepEqual(geometry, expected, `Parsed Point MVT as ${outputFormat}`); +// expect(geometry, `Parsed Point MVT as ${outputFormat}`).toEqual(expected); // } -// t.end(); +// // }); // test('MVTLoader#Parse Lines MVT', async (t) => { @@ -177,12 +175,12 @@ test('Point MVT to local coordinates JSON', async t => { // if (binary) { // // @ts-ignore // expected = geojsonToBinary(expected); -// t.ok(geometry.byteLength > 0); +// expect(geometry.byteLength > 0).toBeTruthy(); // delete geometry.byteLength; // } -// t.deepEqual(geometry, expected, `Parsed Lines MVT as ${outputFormat}`); +// expect(geometry, `Parsed Lines MVT as ${outputFormat}`).toEqual(expected); // } -// t.end(); +// // }); // test('MVTLoader#Parse Polygons MVT', async (t) => { @@ -211,12 +209,12 @@ test('Point MVT to local coordinates JSON', async t => { // if (binary) { // // @ts-ignore // expected = geojsonToBinary(expected, {fixRingWinding: false}); -// t.ok(geometry.byteLength > 0); +// expect(geometry.byteLength > 0).toBeTruthy(); // delete geometry.byteLength; // } -// t.deepEqual(geometry, expected, `Parsed Polygons MVT as ${outputFormat}`); +// expect(geometry, `Parsed Polygons MVT as ${outputFormat}`).toEqual(expected); // } -// t.end(); +// // }); // test('Should raise an error when coordinates param is wgs84 and tileIndex is missing', async (t) => { @@ -227,9 +225,9 @@ test('Point MVT to local coordinates JSON', async t => { // mvt: {coordinates: 'wgs84'} // }; -// t.throws(() => parseSync(mvtArrayBuffer, MVTLoader, loaderOptions)); +// expect(() => parseSync(mvtArrayBuffer, MVTLoader, loaderOptions)).toThrow(); -// t.end(); +// // }); // test('Should add layer name to custom property', async (t) => { @@ -241,9 +239,9 @@ test('Point MVT to local coordinates JSON', async t => { // }; // const geometryJSON = await parse(mvtArrayBuffer, MVTLoader, loaderOptions); -// t.equals(geometryJSON[0].properties.layerSource, 'layer0'); +// expect(geometryJSON[0].properties.layerSource).toBe('layer0'); -// t.end(); +// // }); // test('Should return features from selected layers when layers property is provided', async (t) => { @@ -259,9 +257,9 @@ test('Point MVT to local coordinates JSON', async t => { // (feature) => feature.properties.layerName !== 'layer1' // ); // t.false(anyFeatureFromAnotherLayer); -// t.equals(geometryJSON[0].properties.layerName, 'layer1'); +// expect(geometryJSON[0].properties.layerName).toBe('layer1'); -// t.end(); +// // }); // test('Polygon MVT to local coordinates binary', async (t) => { @@ -269,13 +267,13 @@ test('Point MVT to local coordinates JSON', async t => { // const mvtArrayBuffer = await response.arrayBuffer(); // const geometryBinary = await parse(mvtArrayBuffer, MVTLoader, {gis: {format: 'binary'}}); -// t.ok(geometryBinary.byteLength > 0); +// expect(geometryBinary.byteLength > 0).toBeTruthy(); // delete geometryBinary.byteLength; // // @ts-ignore deduced type of 'Feature' is string... // const expectedBinary = geojsonToBinary(decodedPolygonsGeometry); -// t.deepEqual(geometryBinary, expectedBinary); -// t.end(); +// expect(geometryBinary).toEqual(expectedBinary); +// // }); // test('MVTLoader#Parse geojson-to-binary', async (t) => { @@ -291,9 +289,9 @@ test('Point MVT to local coordinates JSON', async t => { // delete binary.byteLength; // const expectedBinary = geojsonToBinary(geojson); -// t.deepEqual(expectedBinary, binary); +// expect(expectedBinary).toEqual(binary); // } -// t.end(); +// // }); // test('Features with top-level id', async (t) => { @@ -301,30 +299,30 @@ test('Point MVT to local coordinates JSON', async t => { // const mvtArrayBuffer = await response.arrayBuffer(); // const binary = await parse(mvtArrayBuffer, MVTLoader, {mvt: {shape: 'binary'}}); -// t.ok(binary.points.fields.length, 'feature.id fields are preserved'); -// t.ok(binary.lines.fields.length, 'feature.id fields are preserved'); -// t.ok(binary.polygons.fields.length, 'feature.id fields are preserved'); +// expect(binary.points.fields.length, 'feature.id fields are preserved').toBeTruthy(); +// expect(binary.lines.fields.length, 'feature.id fields are preserved').toBeTruthy(); +// expect(binary.polygons.fields.length, 'feature.id fields are preserved').toBeTruthy(); // const feature = binaryToGeojson(binary, { // globalFeatureId: binary.points.globalFeatureIds.value[0] // }); // // @ts-ignore -// t.ok(feature.id, 'feature.id is restored'); +// expect(feature.id, 'feature.id is restored').toBeTruthy(); -// t.end(); +// // }); // test('Empty MVT must return empty binary format', async (t) => { // const emptyMVTArrayBuffer = new Uint8Array(); // const geometryBinary = await parse(emptyMVTArrayBuffer, MVTLoader, {gis: {format: 'binary'}}); -// t.ok(geometryBinary.points); -// t.ok(geometryBinary.lines); -// t.ok(geometryBinary.polygons); -// t.ok(geometryBinary.points.positions.size === 2); -// t.ok(geometryBinary.lines.positions.size === 2); -// t.ok(geometryBinary.polygons.positions.size === 2); - -// t.end(); +// expect(geometryBinary.points).toBeTruthy(); +// expect(geometryBinary.lines).toBeTruthy(); +// expect(geometryBinary.polygons).toBeTruthy(); +// expect(geometryBinary.points.positions.size === 2).toBeTruthy(); +// expect(geometryBinary.lines.positions.size === 2).toBeTruthy(); +// expect(geometryBinary.polygons.positions.size === 2).toBeTruthy(); + +// // }); // test('Triangulation is supported', async (t) => { @@ -335,17 +333,17 @@ test('Point MVT to local coordinates JSON', async t => { // }); // // Closed polygon with 31 vertices (0===30) -// t.ok(geometry.polygons.positions); -// t.equals(geometry.polygons.positions.value.length, 62); +// expect(geometry.polygons.positions).toBeTruthy(); +// expect(geometry.polygons.positions.value.length).toBe(62); -// t.ok(geometry.polygons.triangles); -// t.equals(geometry.polygons.triangles.value.length, 84); +// expect(geometry.polygons.triangles).toBeTruthy(); +// expect(geometry.polygons.triangles.value.length).toBe(84); // // Basic check that triangulation is valid // const minI = Math.min(...geometry.polygons.triangles.value); // const maxI = Math.max(...geometry.polygons.triangles.value); -// t.equals(minI, 0); -// t.equals(maxI, 29); // Don't expect to find 30 as closed polygon +// expect(minI).toBe(0); +// expect(maxI).toBe(29); // Don't expect to find 30 as closed polygon -// t.end(); +// // }); diff --git a/modules/mvt/test/lib/utils/geometry-utils.spec.ts b/modules/mvt/test/lib/utils/geometry-utils.spec.ts index b1151dbcd6..d973b11441 100644 --- a/modules/mvt/test/lib/utils/geometry-utils.spec.ts +++ b/modules/mvt/test/lib/utils/geometry-utils.spec.ts @@ -3,7 +3,7 @@ // Copyright (c) vis.gl contributors // import type {BinaryFeatureCollection} from '@loaders.gl/schema'; -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {classifyRingsFlat} from '@loaders.gl/mvt/lib/utils/geometry-utils'; const loadJSON = async (relativePath: string) => { @@ -23,38 +23,38 @@ const [ringsSingleRing, ringsRingAndHole, ringsTwoRings, ringsZeroSizeHole] = aw loadJSON('../../data/rings/rings_zero_size_hole.json') ]); -test('classifyRingsFlat#single ring', async t => { +test('classifyRingsFlat#single ring', async () => { const geom = {...ringsSingleRing}; const classified = classifyRingsFlat(geom); - t.deepEqual(classified.areas, [[-0.02624368667602539]]); - t.deepEqual(classified.indices, [[0]]); - t.end(); + expect(classified.areas).toEqual([[-0.02624368667602539]]); + expect(classified.indices).toEqual([[0]]); + }); -test('classifyRingsFlat#ring and hole', async t => { +test('classifyRingsFlat#ring and hole', async () => { const geom = {...ringsRingAndHole}; const classified = classifyRingsFlat(geom); - t.deepEqual(classified.areas, [[-0.02624368667602539, 0.001363515853881836]]); - t.deepEqual(classified.indices, [[0, 10]]); - t.end(); + expect(classified.areas, 0.001363515853881836]]).toEqual([[-0.02624368667602539); + expect(classified.indices, 10]]).toEqual([[0); + }); -test('classifyRingsFlat#two rings', async t => { +test('classifyRingsFlat#two rings', async () => { const geom = {...ringsTwoRings}; const classified = classifyRingsFlat(geom); - t.deepEqual(classified.areas, [[-0.02624368667602539], [-0.001363515853881836]]); - t.deepEqual(classified.indices, [[0], [10]]); - t.end(); + expect(classified.areas, [-0.001363515853881836]]).toEqual([[-0.02624368667602539]); + expect(classified.indices, [10]]).toEqual([[0]); + }); -test('classifyRingsFlat#zero sized hole', async t => { +test('classifyRingsFlat#zero sized hole', async () => { // In addition to checking the result, // verify that the data array is shortened const geom = {...ringsZeroSizeHole}; - t.equal(geom.data.length, 20); + expect(geom.data.length).toBe(20); const classified = classifyRingsFlat(geom); - t.deepEqual(classified.areas, [[-0.44582176208496094]]); - t.deepEqual(classified.indices, [[0]]); - t.equal(classified.data.length, 12); - t.end(); + expect(classified.areas).toEqual([[-0.44582176208496094]]); + expect(classified.indices).toEqual([[0]]); + expect(classified.data.length).toBe(12); + }); diff --git a/modules/mvt/test/lib/vector-tiler/clip-features.spec.ts b/modules/mvt/test/lib/vector-tiler/clip-features.spec.ts index cda1810951..10436cd808 100644 --- a/modules/mvt/test/lib/vector-tiler/clip-features.spec.ts +++ b/modules/mvt/test/lib/vector-tiler/clip-features.spec.ts @@ -3,7 +3,7 @@ // Copyright (c) vis.gl contributors // Forked from https://github.com/mapbox/geojson-vt under compatible ISC license -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; // @ts-ignore-error import {clipFeatures} from '@loaders.gl/mvt/lib/vector-tiler/features/clip-features'; @@ -15,7 +15,7 @@ const geom1 = [ ]; const geom2 = [0, 0, 0, 50, 0, 0, 50, 10, 0, 0, 10, 0]; -test('VectorTiler#clipFeatures#clips polylines', t => { +test('VectorTiler#clipFeatures#clips polylines', () => { const clipped = clipFeatures( [ {geometry: geom1, type: 'LineString', tags: 1, minX: 0, minY: 0, maxX: 50, maxY: 60}, @@ -61,12 +61,10 @@ test('VectorTiler#clipFeatures#clips polylines', t => { } ]; - t.equal(JSON.stringify(clipped), JSON.stringify(expected)); - - t.end(); + expect(clipped).toEqual(expected); }); -test('VectorTiler#clipFeatures#clips lines with line metrics on', t => { +test('VectorTiler#clipFeatures#clips lines with line metrics on', () => { const geom = geom1.slice(); // @ts-expect-error geom.size = 0; @@ -92,24 +90,21 @@ test('VectorTiler#clipFeatures#clips lines with line metrics on', t => { {lineMetrics: true} ); - t.same( + expect( clipped.map(f => [f.geometry.start, f.geometry.end]), - [ - [10, 40], - [70, 130], - [160, 200], - [230, 245] - ] - ); - - t.end(); + ).toEqual([ + [10, 40], + [70, 130], + [160, 200], + [230, 245] + ]); }); function closed(geometry) { return [geometry.concat(geometry.slice(0, 3))]; } -test('VectorTiler#clipFeatures#clips polygons', t => { +test('VectorTiler#clipFeatures#clips polygons', () => { const clipped = clipFeatures( [ {geometry: closed(geom1), type: 'Polygon', tags: 1, minX: 0, minY: 0, maxX: 50, maxY: 60}, @@ -152,12 +147,10 @@ test('VectorTiler#clipFeatures#clips polygons', t => { } ]; - t.equal(JSON.stringify(clipped), JSON.stringify(expected)); - - t.end(); + expect(clipped).toEqual(expected); }); -test('VectorTiler#clipFeatures#clips points', t => { +test('VectorTiler#clipFeatures#clips points', () => { const clipped = clipFeatures( [ {geometry: geom1, type: 'MultiPoint', tags: 1, minX: 0, minY: 0, maxX: 50, maxY: 60}, @@ -172,7 +165,7 @@ test('VectorTiler#clipFeatures#clips points', t => { {} ); - t.same(clipped, [ + expect(clipped).toEqual([ { id: null, type: 'MultiPoint', @@ -185,6 +178,4 @@ test('VectorTiler#clipFeatures#clips points', t => { maxY: 60 } ]); - - t.end(); }); diff --git a/modules/mvt/test/lib/vector-tiler/simplify-path.spec.ts b/modules/mvt/test/lib/vector-tiler/simplify-path.spec.ts index 4443760c3e..1c288c4e8b 100644 --- a/modules/mvt/test/lib/vector-tiler/simplify-path.spec.ts +++ b/modules/mvt/test/lib/vector-tiler/simplify-path.spec.ts @@ -3,7 +3,7 @@ // Copyright (c) vis.gl contributors // Forked from https://github.com/mapbox/geojson-vt under compatible ISC license -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {simplifyPath} from '@loaders.gl/mvt/lib/vector-tiler/features/simplify-path'; /* eslint comma-spacing:0, no-shadow: 0*/ @@ -148,7 +148,7 @@ const simplified = [ [0.86636, 0.48077] ]; -test('GeoJSONVT#simplifyPath#simplifies points correctly with the given tolerance', t => { +test('GeoJSONVT#simplifyPath#simplifies points correctly with the given tolerance', () => { const coords: number[] = []; for (let i = 0; i < points.length; i++) { coords.push(points[i][0], points[i][1], 0); @@ -164,19 +164,16 @@ test('GeoJSONVT#simplifyPath#simplifies points correctly with the given toleranc result.push([coords[i], coords[i + 1]]); } } - t.same(result, simplified); - t.end(); + expect(result).toEqual(simplified); }); -test('GeoJSONVT#simplifyPath#does not throw max call stack error on bad long input', t => { +test('GeoJSONVT#simplifyPath#does not throw max call stack error on bad long input', () => { const coords: number[][] = []; for (let i = 0; i < 1400; i++) { coords.push([0.0, 0.0], [1.0, 0.0], [1.0, 1.0], [0.0, 1.0]); } - t.doesNotThrow(() => { + expect(() => { simplifyPath(coords, 2e-15); - }); - - t.end(); + }).not.toThrow(); }); diff --git a/modules/mvt/test/mvt-loader.spec.ts b/modules/mvt/test/mvt-loader.spec.ts index a6b09cbc2a..64f1a6221a 100644 --- a/modules/mvt/test/mvt-loader.spec.ts +++ b/modules/mvt/test/mvt-loader.spec.ts @@ -3,7 +3,7 @@ // Copyright (c) vis.gl contributors // import type {BinaryFeatureCollection} from '@loaders.gl/schema'; -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {MVTLoader, MVTLoaderOptions} from '@loaders.gl/mvt'; import {setLoaderOptions, fetchFile, parse, parseSync} from '@loaders.gl/core'; import {geojsonToBinary, binaryToGeojson} from '@loaders.gl/gis'; @@ -43,12 +43,12 @@ setLoaderOptions({ _workerType: 'test' }); -test('Point MVT to local coordinates JSON', async t => { +test('Point MVT to local coordinates JSON', async () => { const response = await fetchFile(MVT_POINTS_DATA_URL); const mvtArrayBuffer = await response.arrayBuffer(); const geometryJSON = await parse(mvtArrayBuffer, MVTLoader); - t.deepEqual(geometryJSON, [ + expect(geometryJSON).toEqual([ { type: 'Feature', geometry: { @@ -64,16 +64,14 @@ test('Point MVT to local coordinates JSON', async t => { } } ]); - - t.end(); }); -test('Line MVT to local coordinates JSON', async t => { +test('Line MVT to local coordinates JSON', async () => { const response = await fetchFile(MVT_LINES_DATA_URL); const mvtArrayBuffer = await response.arrayBuffer(); const geometryJSON = await parse(mvtArrayBuffer, MVTLoader); - t.deepEqual(geometryJSON, [ + expect(geometryJSON).toEqual([ { type: 'Feature', geometry: { @@ -90,21 +88,17 @@ test('Line MVT to local coordinates JSON', async t => { } } ]); - - t.end(); }); -test('Polygon MVT to local coordinates JSON', async t => { +test('Polygon MVT to local coordinates JSON', async () => { const response = await fetchFile(MVT_POLYGONS_DATA_URL); const mvtArrayBuffer = await response.arrayBuffer(); const geometryJSON = await parse(mvtArrayBuffer, MVTLoader); - t.deepEqual(geometryJSON, decodedPolygonsGeometry); - - t.end(); + expect(geometryJSON).toEqual(decodedPolygonsGeometry); }); -test('MVTLoader#Parse Point MVT', async t => { +test('MVTLoader#Parse Point MVT', async () => { for (const binary of [true, false]) { const outputFormat = binary ? 'binary' : 'geojson'; const response = await fetchFile(MVT_POINTS_DATA_URL); @@ -128,15 +122,14 @@ test('MVTLoader#Parse Point MVT', async t => { if (binary) { // @ts-ignore expected = geojsonToBinary(expected); - t.ok(geometry.byteLength > 0); + expect(geometry.byteLength > 0).toBeTruthy(); delete geometry.byteLength; } - t.deepEqual(geometry, expected, `Parsed Point MVT as ${outputFormat}`); + expect(geometry, `Parsed Point MVT as ${outputFormat}`).toEqual(expected); } - t.end(); }); -test('MVTLoader#Parse Lines MVT', async t => { +test('MVTLoader#Parse Lines MVT', async () => { for (const binary of [true, false]) { const outputFormat = binary ? 'binary' : 'geojson'; @@ -160,15 +153,14 @@ test('MVTLoader#Parse Lines MVT', async t => { if (binary) { // @ts-ignore expected = geojsonToBinary(expected); - t.ok(geometry.byteLength > 0); + expect(geometry.byteLength > 0).toBeTruthy(); delete geometry.byteLength; } - t.deepEqual(geometry, expected, `Parsed Lines MVT as ${outputFormat}`); + expect(geometry, `Parsed Lines MVT as ${outputFormat}`).toEqual(expected); } - t.end(); }); -test('MVTLoader#Parse Polygons MVT', async t => { +test('MVTLoader#Parse Polygons MVT', async () => { for (const binary of [true, false]) { const outputFormat = binary ? 'binary' : 'geojson'; @@ -192,15 +184,14 @@ test('MVTLoader#Parse Polygons MVT', async t => { if (binary) { // @ts-ignore expected = geojsonToBinary(expected, {fixRingWinding: false}); - t.ok(geometry.byteLength > 0); + expect(geometry.byteLength > 0).toBeTruthy(); delete geometry.byteLength; } - t.deepEqual(geometry, expected, `Parsed Polygons MVT as ${outputFormat}`); + expect(geometry, `Parsed Polygons MVT as ${outputFormat}`).toEqual(expected); } - t.end(); }); -test('Should raise an error when coordinates param is wgs84 and tileIndex is missing', async t => { +test('Should raise an error when coordinates param is wgs84 and tileIndex is missing', async () => { const response = await fetchFile(MVT_POINTS_DATA_URL); const mvtArrayBuffer = await response.arrayBuffer(); @@ -208,12 +199,10 @@ test('Should raise an error when coordinates param is wgs84 and tileIndex is mis mvt: {coordinates: 'wgs84'} }; - t.throws(() => parseSync(mvtArrayBuffer, MVTLoader, loaderOptions)); - - t.end(); + expect(() => parseSync(mvtArrayBuffer, MVTLoader, loaderOptions)).toThrow(); }); -test('Should add layer name to custom property', async t => { +test('Should add layer name to custom property', async () => { const response = await fetchFile(MVT_POINTS_DATA_URL); const mvtArrayBuffer = await response.arrayBuffer(); @@ -222,12 +211,12 @@ test('Should add layer name to custom property', async t => { }; const geometryJSON = await parse(mvtArrayBuffer, MVTLoader, loaderOptions); - t.equals(geometryJSON[0].properties.layerSource, 'layer0'); + expect(geometryJSON[0].properties.layerSource).toBe('layer0'); - t.end(); + }); -test('Should return features from selected layers when layers property is provided', async t => { +test('Should return features from selected layers when layers property is provided', async () => { const response = await fetchFile(MVT_MULTIPLE_LAYERS_DATA_URL); const mvtArrayBuffer = await response.arrayBuffer(); @@ -239,24 +228,21 @@ test('Should return features from selected layers when layers property is provid const anyFeatureFromAnotherLayer = geometryJSON.some( feature => feature.properties.layerName !== 'layer1' ); - t.false(anyFeatureFromAnotherLayer); - t.equals(geometryJSON[0].properties.layerName, 'layer1'); - - t.end(); + expect(anyFeatureFromAnotherLayer).toBe(false); + expect(geometryJSON[0].properties.layerName).toBe('layer1'); }); -test('Polygon MVT to local coordinates binary', async t => { +test('Polygon MVT to local coordinates binary', async () => { const response = await fetchFile(MVT_POLYGONS_DATA_URL); const mvtArrayBuffer = await response.arrayBuffer(); const geometryBinary = await parse(mvtArrayBuffer, MVTLoader, {mvt: {shape: 'binary'}}); - t.ok(geometryBinary.byteLength > 0); + expect(geometryBinary.byteLength > 0).toBeTruthy(); delete geometryBinary.byteLength; // @ts-ignore deduced type of 'Feature' is string... const expectedBinary = geojsonToBinary(decodedPolygonsGeometry); - t.deepEqual(geometryBinary, expectedBinary); - t.end(); + expect(geometryBinary).toEqual(expectedBinary); }); // Test to sanity check that old method of parsing binary @@ -270,7 +256,7 @@ const TEST_FILES = [ MVT_MULTIPLE_LAYERS_DATA_URL ]; -test('MVTLoader#Parse geojson-to-binary', async t => { +test('MVTLoader#Parse geojson-to-binary', async () => { for (const filename of TEST_FILES) { const response = await fetchFile(filename); const mvtArrayBuffer = await response.arrayBuffer(); @@ -283,43 +269,38 @@ test('MVTLoader#Parse geojson-to-binary', async t => { delete binary.byteLength; const expectedBinary = geojsonToBinary(geojson); - t.deepEqual(expectedBinary, binary); + expect(expectedBinary).toEqual(binary); } - t.end(); }); -test('Features with top-level id', async t => { +test('Features with top-level id', async () => { const response = await fetchFile(WITH_FEATURE_ID); const mvtArrayBuffer = await response.arrayBuffer(); const binary = await parse(mvtArrayBuffer, MVTLoader, {mvt: {shape: 'binary'}}); - t.ok(binary.points.fields.length, 'feature.id fields are preserved'); - t.ok(binary.lines.fields.length, 'feature.id fields are preserved'); - t.ok(binary.polygons.fields.length, 'feature.id fields are preserved'); + expect(binary.points.fields.length, 'feature.id fields are preserved').toBeTruthy(); + expect(binary.lines.fields.length, 'feature.id fields are preserved').toBeTruthy(); + expect(binary.polygons.fields.length, 'feature.id fields are preserved').toBeTruthy(); const feature = binaryToGeojson(binary, { globalFeatureId: binary.points.globalFeatureIds.value[0] }); // @ts-ignore - t.ok(feature.id, 'feature.id is restored'); - - t.end(); + expect(feature.id, 'feature.id is restored').toBeTruthy(); }); -test('Empty MVT must return empty binary format', async t => { +test('Empty MVT must return empty binary format', async () => { const emptyMVTArrayBuffer = new Uint8Array(); const geometryBinary = await parse(emptyMVTArrayBuffer, MVTLoader, {mvt: {shape: 'binary'}}); - t.ok(geometryBinary.points); - t.ok(geometryBinary.lines); - t.ok(geometryBinary.polygons); - t.ok(geometryBinary.points.positions.size === 2); - t.ok(geometryBinary.lines.positions.size === 2); - t.ok(geometryBinary.polygons.positions.size === 2); - - t.end(); + expect(geometryBinary.points).toBeTruthy(); + expect(geometryBinary.lines).toBeTruthy(); + expect(geometryBinary.polygons).toBeTruthy(); + expect(geometryBinary.points.positions.size === 2).toBeTruthy(); + expect(geometryBinary.lines.positions.size === 2).toBeTruthy(); + expect(geometryBinary.polygons.positions.size === 2).toBeTruthy(); }); -test('Triangulation is supported', async t => { +test('Triangulation is supported', async () => { const response = await fetchFile(MVT_POLYGONS_DATA_URL); const mvtArrayBuffer = await response.arrayBuffer(); const geometry = await parse(mvtArrayBuffer, MVTLoader, { @@ -327,17 +308,15 @@ test('Triangulation is supported', async t => { }); // Closed polygon with 31 vertices (0===30) - t.ok(geometry.polygons.positions); - t.equals(geometry.polygons.positions.value.length, 62); + expect(geometry.polygons.positions).toBeTruthy(); + expect(geometry.polygons.positions.value.length).toBe(62); - t.ok(geometry.polygons.triangles); - t.equals(geometry.polygons.triangles.value.length, 84); + expect(geometry.polygons.triangles).toBeTruthy(); + expect(geometry.polygons.triangles.value.length).toBe(84); // Basic check that triangulation is valid const minI = Math.min(...geometry.polygons.triangles.value); const maxI = Math.max(...geometry.polygons.triangles.value); - t.equals(minI, 0); - t.equals(maxI, 29); // Don't expect to find 30 as closed polygon - - t.end(); + expect(minI).toBe(0); + expect(maxI).toBe(29); // Don't expect to find 30 as closed polygon }); diff --git a/modules/mvt/test/mvt-source.spec.ts b/modules/mvt/test/mvt-source.spec.ts index 3e7ca7dc2f..93bcffa747 100644 --- a/modules/mvt/test/mvt-source.spec.ts +++ b/modules/mvt/test/mvt-source.spec.ts @@ -2,43 +2,31 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {isBrowser} from '@loaders.gl/core'; import {TILESETS} from './data/tilesets'; import {MVTSource} from '@loaders.gl/mvt'; import {isURLTemplate, getURLFromTemplate} from '../src/mvt-source'; -test('MVTSource#urls', async t => { - if (!isBrowser) { - t.comment('MVTSource currently only supported in browser'); - t.end(); - return; - } +test.skipIf(!isBrowser)('MVTSource#urls', async () => { for (const tilesetUrl of TILESETS) { const source = new MVTSource({url: tilesetUrl}); - t.ok(source); + expect(source).toBeTruthy(); const metadata = await source.getMetadata(); - t.ok(metadata); + expect(metadata).toBeTruthy(); // console.error(JSON.stringify(metadata.tileJSON, null, 2)); } - t.end(); }); -test('MVTSource#Blobs', async t => { - if (!isBrowser) { - t.comment('MVTSource currently only supported in browser'); - t.end(); - return; - } +test.skipIf(!isBrowser)('MVTSource#Blobs', async () => { for (const tilesetUrl of TILESETS) { const source = new MVTSource({url: tilesetUrl}); - t.ok(source); + expect(source).toBeTruthy(); const metadata = await source.getMetadata(); - t.ok(metadata); + expect(metadata).toBeTruthy(); // console.error(JSON.stringify(metadata.tileJSON, null, 2)); } - t.end(); }); const TEST_TEMPLATE = 'https://server.com/{z}/{x}/{y}.png'; @@ -48,48 +36,43 @@ const TEST_TEMPLATE_ARRAY = [ 'https://server.com/ep2/{x}/{y}.png' ]; -test('isURLFromTemplate', t => { - t.true(isURLTemplate(TEST_TEMPLATE), 'single string template'); - t.true(isURLTemplate(TEST_TEMPLATE2), 'single string template with multiple occurance'); +test('isURLFromTemplate', () => { + expect(isURLTemplate(TEST_TEMPLATE), 'single string template').toBe(true); + expect(isURLTemplate(TEST_TEMPLATE2), 'single string template with multiple occurance').toBe( + true + ); // t.true(isURLTemplate(TEST_TEMPLATE_ARRAY), 'array of templates'); - t.end(); }); -test('getURLFromTemplate', t => { - t.is( +test('getURLFromTemplate', () => { + expect( getURLFromTemplate(TEST_TEMPLATE, 1, 2, 0), - 'https://server.com/0/1/2.png', 'single string template' - ); - t.is( + ).toBe('https://server.com/0/1/2.png'); + expect( getURLFromTemplate(TEST_TEMPLATE2, 1, 2, 0), - 'https://server.com/0/1/2/1-2-0.png', 'single string template with multiple occurance' - ); - t.is( + ).toBe('https://server.com/0/1/2/1-2-0.png'); + expect( getURLFromTemplate(TEST_TEMPLATE_ARRAY, 1, 2, 0, '1-2-0'), - 'https://server.com/ep2/1/2.png', 'array of templates' - ); - t.is( + ).toBe('https://server.com/ep2/1/2.png'); + expect( getURLFromTemplate(TEST_TEMPLATE_ARRAY, 2, 2, 0, '2-2-0'), - 'https://server.com/ep1/2/2.png', 'array of templates' - ); - t.is( + ).toBe('https://server.com/ep1/2/2.png'); + expect( getURLFromTemplate(TEST_TEMPLATE_ARRAY, 17, 11, 5, '17-11-5'), - 'https://server.com/ep2/17/11.png', 'array of templates' - ); + ).toBe('https://server.com/ep2/17/11.png'); // t.is(getURLFromTemplate(null, 1, 2, 0), null, 'invalid template'); // t.is(getURLFromTemplate([], 1, 2, 0), null, 'empty array'); - t.end(); }); // TBA - TILE LOADING TESTS /* -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {validateLoader} from 'test/common/conformance'; import {load} from '@loaders.gl/core'; @@ -99,15 +82,15 @@ import {PMTILESETS} from './data/tilesets'; test('PMTilesLoader#loader conformance', (t) => { validateLoader(t, PMTilesLoader, 'PMTilesLoader'); - t.end(); + }); test.skip('PMTilesLoader#load', async (t) => { for (const tilesetUrl of PMTILESETS) { const metadata = await load(tilesetUrl, PMTilesLoader); - t.ok(metadata); + expect(metadata).toBeTruthy(); } - t.end(); + }); /* @@ -189,7 +172,7 @@ test('cache getDirectory', async (t) => { t.strictEqual(directory[0].runLength, 1); for (const v of cache.cache.values()) { - t.ok(v.lastUsed > 0); + expect(v.lastUsed > 0).toBeTruthy(); } }); @@ -260,16 +243,16 @@ test('cache pruning by byte size', async (t) => { cache.cache.set('2', {lastUsed: 2, data: Promise.resolve([])}); cache.prune(); t.strictEqual(cache.cache.size, 2); - t.ok(cache.cache.get('2')); - t.ok(cache.cache.get('1')); - t.ok(!cache.cache.get('0')); + expect(cache.cache.get('2')).toBeTruthy(); + expect(cache.cache.get('1')).toBeTruthy(); + expect(!cache.cache.get('0')).toBeTruthy(); }); test('pmtiles get metadata', async (t) => { const source = new TestFileSource('@loaders.gl/pmtiles/test/data/test_fixture_1.pmtiles', '1'); const p = new PMTiles(source); const metadata = await p.getMetadata(); - t.ok(metadata.name); + expect(metadata.name).toBeTruthy(); }); // echo '{"type":"Polygon","coordinates":[[[0,0],[0,1],[1,0],[0,0]]]}' | ./tippecanoe -zg -o test_fixture_2.pmtiles @@ -278,9 +261,9 @@ test('pmtiles handle retries', async (t) => { source.etag = '1'; const p = new PMTiles(source); const metadata = await p.getMetadata(); - t.ok(metadata.name); + expect(metadata.name).toBeTruthy(); source.etag = '2'; source.replaceData('@loaders.gl/pmtiles/test/data/test_fixture_2.pmtiles'); - t.ok(await p.getZxy(0, 0, 0)); + expect(await p.getZxy(0, 0, 0)).toBeTruthy(); }); */ diff --git a/modules/mvt/test/mvt-writer.spec.ts b/modules/mvt/test/mvt-writer.spec.ts index 7fc411ef0f..c5954cd476 100644 --- a/modules/mvt/test/mvt-writer.spec.ts +++ b/modules/mvt/test/mvt-writer.spec.ts @@ -2,20 +2,19 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {encode, fetchFile, parse} from '@loaders.gl/core'; import {MVTLoader, MVTWriter} from '@loaders.gl/mvt'; const RECTANGLE_URL = '@loaders.gl/mvt/test/data/mapbox-vt-pbf-fixtures/rectangle.geojson'; const RECTANGLE_TILE = '@loaders.gl/mvt/test/data/mapbox-vt-pbf-fixtures/rectangle-1.0.0.pbf'; -test('MVTWriter#import', async t => { - t.ok(MVTWriter, 'MVTWriter is defined'); - t.end(); +test('MVTWriter#import', async () => { + expect(MVTWriter, 'MVTWriter is defined').toBeTruthy(); }); /** @todo - fix this test */ -test.skip('MVTWriter#encode', async t => { +test.skip('MVTWriter#encode', async () => { const geojsonResponse = await fetchFile(RECTANGLE_URL); const geojson = await geojsonResponse.json(); @@ -24,13 +23,11 @@ test.skip('MVTWriter#encode', async t => { const fixtureResponse = await fetchFile(RECTANGLE_TILE); const expected = await fixtureResponse.arrayBuffer(); - t.ok(arrayBuffer instanceof ArrayBuffer, 'MVTWriter encodes to ArrayBuffer'); - t.deepEqual(new Uint8Array(arrayBuffer), new Uint8Array(expected)); - - t.end(); + expect(arrayBuffer instanceof ArrayBuffer, 'MVTWriter encodes to ArrayBuffer').toBeTruthy(); + expect(new Uint8Array(arrayBuffer)).toEqual(new Uint8Array(expected)); }); -test('MVTWriter#roundtrip', async t => { +test('MVTWriter#roundtrip', async () => { const tileIndex = {x: 2, y: 1, z: 2}; const response = await fetchFile('@loaders.gl/mvt/test/data/mvt/lines_2-2-1.mvt'); const sourceTile = await response.arrayBuffer(); @@ -41,7 +38,5 @@ test('MVTWriter#roundtrip', async t => { const roundtripBuffer = await encode(geojson, MVTWriter, {mvt: {layerName: 'layer0', tileIndex}}); const roundtripGeojson = await parse(roundtripBuffer, MVTLoader, loaderOptions); - t.deepEqual(roundtripGeojson, geojson, 'Roundtrip preserves GeoJSON features'); - - t.end(); + expect(roundtripGeojson, 'Roundtrip preserves GeoJSON features').toEqual(geojson); }); diff --git a/modules/mvt/test/table-tile-source-full.spec.ts b/modules/mvt/test/table-tile-source-full.spec.ts index ee2d5ec939..486b08fb30 100644 --- a/modules/mvt/test/table-tile-source-full.spec.ts +++ b/modules/mvt/test/table-tile-source-full.spec.ts @@ -3,7 +3,7 @@ // Copyright (c) vis.gl contributors // Forked from https://github.com/mapbox/geojson-vt under compatible ISC license -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {fetchFile} from '@loaders.gl/core'; import {TableTileSource, TableTileSourceProps} from '@loaders.gl/mvt'; @@ -56,39 +56,33 @@ const TEST_CASES = [ } ]; -test('GeoJSONVT#full tiling test', async t => { +test('GeoJSONVT#full tiling test', async () => { for (const tc of TEST_CASES) { const {inputFile, expectedFile, options} = tc; const parsedGeojson = await getJSON(inputFile); const tiles = await genTiles(parsedGeojson, options); // fs.writeFileSync(path.join(__dirname, '/fixtures/' + expectedFile), JSON.stringify(tiles)); - t.same( + expect( tiles, - await getJSON(expectedFile), `Tiling ${inputFile}: ${expectedFile.replace('-tiles.json', '')}` - ); + ).toEqual(await getJSON(expectedFile)); } - - t.end(); }); -test('GeoJSONVT#throws on invalid GeoJSON', async t => { - t.throws(() => { +test('GeoJSONVT#throws on invalid GeoJSON', async () => { + expect(() => { genTiles({type: 'Pologon'}); - }); - t.end(); + }).toThrow(); }); -test('GeoJSONVT#empty geojson', async t => { - t.same({}, await genTiles(await getJSON('empty.json'))); - t.end(); +test('GeoJSONVT#empty geojson', async () => { + expect(await genTiles(await getJSON('empty.json'))).toEqual({}); }); -test('GeoJSONVT#null geometry', async t => { +test('GeoJSONVT#null geometry', async () => { // should ignore features with null geometry - t.same({}, await genTiles(await getJSON('feature-null-geometry.json'))); - t.end(); + expect(await genTiles(await getJSON('feature-null-geometry.json'))).toEqual({}); }); // Helpers diff --git a/modules/mvt/test/table-tile-source-multi-world.spec.ts b/modules/mvt/test/table-tile-source-multi-world.spec.ts index d102bb46cb..62e78ae020 100644 --- a/modules/mvt/test/table-tile-source-multi-world.spec.ts +++ b/modules/mvt/test/table-tile-source-multi-world.spec.ts @@ -3,7 +3,7 @@ // Copyright (c) vis.gl contributors // Forked from https://github.com/mapbox/geojson-vt under compatible ISC license -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {TableTileSource} from '@loaders.gl/mvt'; import type {GeoJSONTable, Feature} from '@loaders.gl/schema'; @@ -33,31 +33,31 @@ function makeGeoJSONTable(feature: Feature): GeoJSONTable { }; } -test('GeoJSONVT#handle point only in the rightside world', async t => { +test('GeoJSONVT#handle point only in the rightside world', async () => { try { const source = new TableTileSource(makeGeoJSONTable(rightPoint)); await source.ready; - t.equal(source.tiles[0].features[0].geometry[0], 1); - t.equal(source.tiles[0].features[0].geometry[1], 0.5); + expect(source.tiles[0].features[0].geometry[0]).toBe(1); + expect(source.tiles[0].features[0].geometry[1]).toBe(0.5); } catch (err) { t.ifError(err); } - t.end(); + }); -test('GeoJSONVT#handle point only in the leftside world', async t => { +test('GeoJSONVT#handle point only in the leftside world', async () => { try { const source = new TableTileSource(makeGeoJSONTable(leftPoint)); - t.equal(source.tiles[0].features[0].geometry[0], 0); - t.equal(source.tiles[0].features[0].geometry[1], 0.5); + expect(source.tiles[0].features[0].geometry[0]).toBe(0); + expect(source.tiles[0].features[0].geometry[1]).toBe(0.5); } catch (err) { t.ifError(err); } - t.end(); + }); -test('GeoJSONVT#handle points in the leftside world and the rightside world', async t => { +test('GeoJSONVT#handle points in the leftside world and the rightside world', async () => { try { const source = new TableTileSource({ shape: 'geojson-table', @@ -65,13 +65,13 @@ test('GeoJSONVT#handle points in the leftside world and the rightside world', as features: [leftPoint, rightPoint] }); - t.equal(source.tiles[0].features[0].geometry[0], 0); - t.equal(source.tiles[0].features[0].geometry[1], 0.5); + expect(source.tiles[0].features[0].geometry[0]).toBe(0); + expect(source.tiles[0].features[0].geometry[1]).toBe(0.5); - t.equal(source.tiles[0].features[1].geometry[0], 1); - t.equal(source.tiles[0].features[1].geometry[1], 0.5); + expect(source.tiles[0].features[1].geometry[0]).toBe(1); + expect(source.tiles[0].features[1].geometry[1]).toBe(0.5); } catch (err) { t.ifError(err); } - t.end(); + }); diff --git a/modules/mvt/test/table-tile-source.spec.ts b/modules/mvt/test/table-tile-source.spec.ts index 3bf51a88cc..f45f29bc28 100644 --- a/modules/mvt/test/table-tile-source.spec.ts +++ b/modules/mvt/test/table-tile-source.spec.ts @@ -3,7 +3,7 @@ // Copyright (c) vis.gl contributors // Forked from https://github.com/mapbox/geojson-vt under compatible ISC license -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {fetchFile} from '@loaders.gl/core'; import {TableTileSource} from '@loaders.gl/mvt'; import {Feature, GeoJSONTable, Geometry} from '@loaders.gl/schema'; @@ -27,7 +27,7 @@ const square = [ } ]; -test('TableTileSource#getTile#us-states.json', async t => { +test('TableTileSource#getTile#us-states.json', async () => { const geojson = await loadGeoJSONTable('us-states.json'); const source = TableTileSource.createDataSource(geojson, {table: {coordinates: 'wgs84'}}); // , debug: 2}); await source.ready; @@ -36,33 +36,33 @@ test('TableTileSource#getTile#us-states.json', async t => { let tile = source.getProtoTile({z: 7, x: 37, y: 48}); const expected = await loadGeoJSONTable('us-states-z7-37-48.json'); - t.same(tile?.protoFeatures, expected.features, 'z7-37-48'); + expect(tile?.protoFeatures, 'z7-37-48').toEqual(expected.features); tile = source.getProtoTile({z: 9, x: 148, y: 192}); - t.same(tile?.protoFeatures, square, 'z9-148-192 (clipped square)'); + expect(tile?.protoFeatures, 'z9-148-192 (clipped square)').toEqual(square); // t.same(source.getProtoTile({z: 11, x: 592, y: 768})?.features, square, 'z11-592-768 (clipped square)'); // Check non-existing tiles (no geometry in these tile indices => no tile generated) tile = source.getProtoTile({z: 11, x: 800, y: 400}); - t.equal(tile, null, 'non-existing tile'); + expect(tile, 'non-existing tile').toBe(null); tile = source.getProtoTile({z: -5, x: 123.25, y: 400.25}); - t.equal(tile, null, 'invalid tile'); + expect(tile, 'invalid tile').toBe(null); tile = source.getProtoTile({z: 25, x: 200, y: 200}); - t.equal(tile, null, 'invalid tile'); + expect(tile, 'invalid tile').toBe(null); // Check total number of tiles generated const total = source.stats.get('total').count; - t.equal(total, 37); + expect(total).toBe(37); - t.end(); + }); -test('TableTileSource#getTile#unbuffered tile left/right edges', async t => { +test('TableTileSource#getTile#unbuffered tile left/right edges', async () => { const geojson = makeGeoJSONTable({ type: 'LineString', coordinates: [ @@ -79,9 +79,9 @@ test('TableTileSource#getTile#unbuffered tile left/right edges', async t => { await source.ready; let tile = source.getProtoTile({z: 2, x: 1, y: 1}); - t.same(tile, null); + expect(tile).toBe(null); tile = source.getProtoTile({z: 2, x: 2, y: 1}); - t.same(tile?.protoFeatures, [ + expect(tile?.protoFeatures).toEqual([ { geometry: [ [ @@ -93,10 +93,9 @@ test('TableTileSource#getTile#unbuffered tile left/right edges', async t => { tags: null } ]); - t.end(); }); -test('TableTileSource#getTile#unbuffered tile top/bottom edges', async t => { +test('TableTileSource#getTile#unbuffered tile top/bottom edges', async () => { const geojson = makeGeoJSONTable({ type: 'LineString', coordinates: [ @@ -112,7 +111,7 @@ test('TableTileSource#getTile#unbuffered tile top/bottom edges', async t => { }); await source.ready; - t.same(source.getProtoTile({z: 2, x: 1, y: 0})?.protoFeatures, [ + expect(source.getProtoTile({z: 2, x: 1, y: 0})?.protoFeatures).toEqual([ { geometry: [ [ @@ -124,11 +123,10 @@ test('TableTileSource#getTile#unbuffered tile top/bottom edges', async t => { tags: null } ]); - t.same(source.getProtoTile({z: 2, x: 1, y: 1})?.protoFeatures, []); - t.end(); + expect(source.getProtoTile({z: 2, x: 1, y: 1})?.protoFeatures).toEqual([]); }); -test('TableTileSource#getTile#polygon clipping on the boundary', async t => { +test('TableTileSource#getTile#polygon clipping on the boundary', async () => { const geojson = makeGeoJSONTable({ type: 'Polygon', coordinates: [ @@ -149,7 +147,7 @@ test('TableTileSource#getTile#polygon clipping on the boundary', async t => { }); await source.ready; - t.same(source.getProtoTile({z: 5, x: 19, y: 9})?.protoFeatures, [ + expect(source.getProtoTile({z: 5, x: 19, y: 9})?.protoFeatures).toEqual([ { geometry: [ [ @@ -164,8 +162,6 @@ test('TableTileSource#getTile#polygon clipping on the boundary', async t => { tags: null } ]); - - t.end(); }); // HELPERS diff --git a/modules/mvt/test/tilejson-loader.spec.ts b/modules/mvt/test/tilejson-loader.spec.ts index 397d64abd9..72f24fe3b6 100644 --- a/modules/mvt/test/tilejson-loader.spec.ts +++ b/modules/mvt/test/tilejson-loader.spec.ts @@ -2,7 +2,7 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {validateLoader} from 'test/common/conformance'; import {load} from '@loaders.gl/core'; @@ -13,32 +13,32 @@ import {TILEJSONS} from './data/tilejson/tilejson'; const TIPPECANOE_TILEJSON = '@loaders.gl/mvt/test/data/tilejson/tippecanoe.tilejson'; // const TIPPECANOE_EXPECTED = '@loaders.gl/mvt/test/data/tilejson/tippecanoe.expected.json'; -test('TileJSONLoader#loader conformance', t => { +test('TileJSONLoader#loader conformance', () => { validateLoader(t, TileJSONLoader, 'TileJSONLoader'); - t.end(); + }); -test('TileJSONLoader#load', async t => { +test('TileJSONLoader#load', async () => { for (const tileJSON of TILEJSONS) { const metadata = await load(tileJSON.url, TileJSONLoader); - t.ok(metadata.layers); + expect(metadata.layers).toBeTruthy(); // TODO - actually check results, add tilejsons with fields - // t.deepEqual(metadata, parsedMetadata); + // expect(metadata).toEqual(parsedMetadata); // console.error(JSON.stringify(metadata, null, 2)); } - t.end(); + }); -test('TileJSONLoader#tippecanoe', async t => { +test('TileJSONLoader#tippecanoe', async () => { // let metadata = await load(TIPPECANOE_TILEJSON, TileJSONLoader); // const expected = await load(TIPPECANOE_EXPECTED, JSONLoader); - // t.deepEqual(metadata, expected, 'Tippecanoe TileJSON loaded correctly'); + // expect(metadata, 'Tippecanoe TileJSON loaded correctly').toEqual(expected); let metadata = await load(TIPPECANOE_TILEJSON, TileJSONLoader); - t.equal(metadata.layers?.[0]?.fields?.[10]?.values?.length, 100, '100 unique values'); + expect(metadata.layers?.[0]?.fields?.[10]?.values?.length, '100 unique values').toBe(100); metadata = await load(TIPPECANOE_TILEJSON, TileJSONLoader, {tilejson: {maxValues: 10}}); - t.equal(metadata.layers?.[0]?.fields?.[10]?.values?.length, 10, 'maxValue clips unique values'); + expect(metadata.layers?.[0]?.fields?.[10]?.values?.length, 'maxValue clips unique values').toBe(10); - t.end(); + }); diff --git a/modules/pmtiles/test/pmtiles-loader.spec.ts b/modules/pmtiles/test/pmtiles-loader.spec.ts index 11d68cf16e..7c78d4f309 100644 --- a/modules/pmtiles/test/pmtiles-loader.spec.ts +++ b/modules/pmtiles/test/pmtiles-loader.spec.ts @@ -2,25 +2,19 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {isBrowser, load} from '@loaders.gl/core'; import {PMTILESETS_VECTOR} from './data/tilesets'; import {_PMTilesLoader as PMTilesLoader} from '@loaders.gl/pmtiles'; -test('PMTilesLoader#schemas', async t => { - if (!isBrowser) { - t.comment('PMTilesSource currently only supported in browser'); - t.end(); - return; - } +test.skipIf(!isBrowser)('PMTilesLoader#schemas', async () => { for (const tilesetUrl of PMTILESETS_VECTOR) { const source = await load(tilesetUrl, PMTilesLoader); const fields: any[] = []; for (const layer of source.layers) { fields.push(...layer.schema.fields); } - t.equal(fields.length, 66); + expect(fields.length).toBe(66); } - t.end(); }); diff --git a/modules/pmtiles/test/pmtiles-source.spec.ts b/modules/pmtiles/test/pmtiles-source.spec.ts index 079cc8b02c..f710c8f846 100644 --- a/modules/pmtiles/test/pmtiles-source.spec.ts +++ b/modules/pmtiles/test/pmtiles-source.spec.ts @@ -2,50 +2,38 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {isBrowser, fetchFile} from '@loaders.gl/core'; import {PMTILESETS} from './data/tilesets'; import {PMTilesSource} from '@loaders.gl/pmtiles'; -test('PMTilesSource#urls', async t => { - if (!isBrowser) { - t.comment('PMTilesSource currently only supported in browser'); - t.end(); - return; - } +test.skipIf(!isBrowser)('PMTilesSource#urls', async () => { for (const tilesetUrl of PMTILESETS) { const source = PMTilesSource.createDataSource(tilesetUrl, {url: tilesetUrl}); - t.ok(source); + expect(source).toBeTruthy(); const metadata = await source.getMetadata(); - t.ok(metadata); + expect(metadata).toBeTruthy(); // console.error(JSON.stringify(metadata.tileJSON, null, 2)); } - t.end(); }); -test('PMTilesSource#Blobs', async t => { - if (!isBrowser) { - t.comment('PMTilesSource currently only supported in browser'); - t.end(); - return; - } +test.skipIf(!isBrowser)('PMTilesSource#Blobs', async () => { for (const tilesetUrl of PMTILESETS) { const response = await fetchFile(tilesetUrl); const blob = await response.blob(); const source = PMTilesSource.createDataSource(blob, {url: blob}); - t.ok(source); + expect(source).toBeTruthy(); const metadata = await source.getMetadata(); - t.ok(metadata); + expect(metadata).toBeTruthy(); // console.error(JSON.stringify(metadata.tileJSON, null, 2)); } - t.end(); }); // TBA - TILE LOADING TESTS /* -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {validateLoader} from 'test/common/conformance'; import {load} from '@loaders.gl/core'; @@ -55,15 +43,15 @@ import {PMTILESETS} from './data/tilesets'; test('PMTilesLoader#loader conformance', (t) => { validateLoader(t, PMTilesLoader, 'PMTilesLoader'); - t.end(); + }); test.skip('PMTilesLoader#load', async (t) => { for (const tilesetUrl of PMTILESETS) { const metadata = await load(tilesetUrl, PMTilesLoader); - t.ok(metadata); + expect(metadata).toBeTruthy(); } - t.end(); + }); /* @@ -145,7 +133,7 @@ test('cache getDirectory', async (t) => { t.strictEqual(directory[0].runLength, 1); for (const v of cache.cache.values()) { - t.ok(v.lastUsed > 0); + expect(v.lastUsed > 0).toBeTruthy(); } }); @@ -216,16 +204,16 @@ test('cache pruning by byte size', async (t) => { cache.cache.set('2', {lastUsed: 2, data: Promise.resolve([])}); cache.prune(); t.strictEqual(cache.cache.size, 2); - t.ok(cache.cache.get('2')); - t.ok(cache.cache.get('1')); - t.ok(!cache.cache.get('0')); + expect(cache.cache.get('2')).toBeTruthy(); + expect(cache.cache.get('1')).toBeTruthy(); + expect(!cache.cache.get('0')).toBeTruthy(); }); test('pmtiles get metadata', async (t) => { const source = new TestFileSource('@loaders.gl/pmtiles/test/data/test_fixture_1.pmtiles', '1'); const p = new PMTiles(source); const metadata = await p.getMetadata(); - t.ok(metadata.name); + expect(metadata.name).toBeTruthy(); }); // echo '{"type":"Polygon","coordinates":[[[0,0],[0,1],[1,0],[0,0]]]}' | ./tippecanoe -zg -o test_fixture_2.pmtiles @@ -234,9 +222,9 @@ test('pmtiles handle retries', async (t) => { source.etag = '1'; const p = new PMTiles(source); const metadata = await p.getMetadata(); - t.ok(metadata.name); + expect(metadata.name).toBeTruthy(); source.etag = '2'; source.replaceData('@loaders.gl/pmtiles/test/data/test_fixture_2.pmtiles'); - t.ok(await p.getZxy(0, 0, 0)); + expect(await p.getZxy(0, 0, 0)).toBeTruthy(); }); */ diff --git a/modules/wkt/test/hex-wkb-loader.spec.ts b/modules/wkt/test/hex-wkb-loader.spec.ts index 6dacf25332..05e737eff2 100644 --- a/modules/wkt/test/hex-wkb-loader.spec.ts +++ b/modules/wkt/test/hex-wkb-loader.spec.ts @@ -4,7 +4,7 @@ /* eslint-disable no-continue */ -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {fetchFile, parseSync} from '@loaders.gl/core'; import {HexWKBLoader} from '@loaders.gl/wkt'; import {parseTestCases} from '@loaders.gl/gis/test/data/wkt/parse-test-cases'; @@ -12,7 +12,7 @@ import {parseTestCases} from '@loaders.gl/gis/test/data/wkt/parse-test-cases'; const WKB_2D_TEST_CASES = '@loaders.gl/gis/test/data/wkt/wkb-testdata2d.json'; const WKB_Z_TEST_CASES = '@loaders.gl/gis/test/data/wkt/wkb-testdataZ.json'; -test('HexWKBLoader#2D', async t => { +test('HexWKBLoader#2D', async () => { const response = await fetchFile(WKB_2D_TEST_CASES); const TEST_CASES = parseTestCases(await response.json()); @@ -24,19 +24,17 @@ test('HexWKBLoader#2D', async t => { } // Little endian if (testCase.wkbHex && testCase.geoJSON) { - t.deepEqual(parseSync(testCase.wkbHex, HexWKBLoader), testCase.geoJSON, title); + expect(parseSync(testCase.wkbHex, HexWKBLoader), title).toEqual(testCase.geoJSON); } // Big endian if (testCase.wkbHexXdr && testCase.geoJSON) { - t.deepEqual(parseSync(testCase.wkbHexXdr, HexWKBLoader), testCase.geoJSON, title); + expect(parseSync(testCase.wkbHexXdr, HexWKBLoader), title).toEqual(testCase.geoJSON); } } - - t.end(); }); -test('HexWKBLoader#Z', async t => { +test('HexWKBLoader#Z', async () => { const response = await fetchFile(WKB_Z_TEST_CASES); const TEST_CASES = parseTestCases(await response.json()); @@ -49,22 +47,12 @@ test('HexWKBLoader#Z', async t => { // Little endian if (testCase.wkbHex && testCase.geoJSON) { - t.deepEqual( - parseSync(testCase.wkbHex, HexWKBLoader), - testCase.geoJSON, - testCase.wkbHex.slice(0, 60) - ); + expect(parseSync(testCase.wkbHex, HexWKBLoader), testCase.wkbHex.slice(0, 60)).toEqual(testCase.geoJSON); } // Big endian if (testCase.wkbHexXdr && testCase.geoJSON) { - t.deepEqual( - parseSync(testCase.wkbHexXdr, HexWKBLoader), - testCase.geoJSON, - testCase.wkbHexXdr.slice(0, 60) - ); + expect(parseSync(testCase.wkbHexXdr, HexWKBLoader), testCase.wkbHexXdr.slice(0, 60)).toEqual(testCase.geoJSON); } } - - t.end(); }); diff --git a/modules/wkt/test/twkb-loader.spec.ts b/modules/wkt/test/twkb-loader.spec.ts index f9a2d3c8dd..8e4c349802 100644 --- a/modules/wkt/test/twkb-loader.spec.ts +++ b/modules/wkt/test/twkb-loader.spec.ts @@ -2,7 +2,7 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {fetchFile, parseSync} from '@loaders.gl/core'; import {isTWKB} from '@loaders.gl/gis'; import {TWKBLoader} from '@loaders.gl/wkt'; @@ -11,7 +11,7 @@ import {parseTestCases} from '@loaders.gl/gis/test/data/wkt/parse-test-cases'; const WKB_2D_TEST_CASES = '@loaders.gl/gis/test/data/wkt/wkb-testdata2d.json'; // const WKB_Z_TEST_CASES = '@loaders.gl/gis/test/data/wkt/wkb-testdataZ.json'; -test('TWKBLoader#2D', async t => { +test('TWKBLoader#2D', async () => { const response = await fetchFile(WKB_2D_TEST_CASES); const TEST_CASES = parseTestCases(await response.json()); @@ -23,7 +23,7 @@ test('TWKBLoader#2D', async t => { // Big endian if (testCase.twkb && testCase.binary) { - t.ok(isTWKB(testCase.twkb), 'isTWKB(2D)'); + expect(isTWKB(testCase.twkb), 'isTWKB(2D)').toBeTruthy(); const geometry = {...testCase.geoJSON}; // TODO - Weird empty geometry case, is that coorrect per spec? if ( @@ -35,14 +35,12 @@ test('TWKBLoader#2D', async t => { ) { geometry.coordinates = []; } - t.deepEqual(parseSync(testCase.twkb, TWKBLoader), geometry); + expect(parseSync(testCase.twkb, TWKBLoader)).toEqual(geometry); } } - - t.end(); }); -// test('TWKBLoader#Z', async (t) => { +// test('TWKBLoader#Z', async () => { // const response = await fetchFile(WKB_Z_TEST_CASES); // const TEST_CASES = parseTestCases(await response.json()); diff --git a/modules/wkt/test/twkb-writer.spec.ts b/modules/wkt/test/twkb-writer.spec.ts index 24426a7e04..50f9440a1f 100644 --- a/modules/wkt/test/twkb-writer.spec.ts +++ b/modules/wkt/test/twkb-writer.spec.ts @@ -2,66 +2,58 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -/** -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {fetchFile, encodeSync} from '@loaders.gl/core'; -import {WKBWriter} from '@loaders.gl/wkt'; +import {TWKBWriter} from '@loaders.gl/wkt'; import {parseTestCases} from '@loaders.gl/gis/test/data/wkt/parse-test-cases'; -const WKB_2D_TEST_CASES = '@loaders.gl/gis/test/data/wkb-testdata2d.json'; -const WKB_2D_NAN_TEST_CASES = '@loaders.gl/gis/test/data/wkb-testdata2d-nan.json'; -const WKB_Z_TEST_CASES = '@loaders.gl/gis/test/data/wkb-testdataZ.json'; -const WKB_Z_NAN_TEST_CASES = '@loaders.gl/gis/test/data/wkb-testdataZ-nan.json'; - -test('TWKBWriter#2D', async (t) => { - const response = await fetchFile(WKB_2D_TEST_CASES); - const TEST_CASES = parseTestCases(await response.json()); - - for (const testCase of Object.values(TEST_CASES)) { - const {geoJSON, wkb} = testCase; - const encoded = encodeSync(geoJSON, WKBWriter, {wkb: {hasZ: false, hasM: false}}); - t.deepEqual(encoded, wkb); +const TWKB_2D_TEST_CASES = '@loaders.gl/gis/test/data/wkt/twkb-testdata2d.json'; +const TWKB_2D_NAN_TEST_CASES = '@loaders.gl/gis/test/data/wkt/twkb-testdata2d-nan.json'; +const TWKB_Z_TEST_CASES = '@loaders.gl/gis/test/data/wkt/twkb-testdataZ.json'; +const TWKB_Z_NAN_TEST_CASES = '@loaders.gl/gis/test/data/wkt/twkb-testdataZ-nan.json'; + +// These legacy writer cases were previously commented out. Keep them skipped during the +// syntax migration so this change does not expand test surface area. +test.skip('TWKBWriter#2D', async () => { + const response = await fetchFile(TWKB_2D_TEST_CASES); + const testCases = parseTestCases(await response.json()); + + for (const testCase of Object.values(testCases)) { + const {geoJSON, twkb} = testCase; + const encoded = encodeSync(geoJSON, TWKBWriter, {wkb: {hasZ: false, hasM: false}}); + expect(encoded).toEqual(twkb); } - - t.end(); }); -test('TWKBWriter#2D NaN', async (t) => { - const response = await fetchFile(WKB_2D_NAN_TEST_CASES); - const TEST_CASES = parseTestCases(await response.json()); +test.skip('TWKBWriter#2D NaN', async () => { + const response = await fetchFile(TWKB_2D_NAN_TEST_CASES); + const testCases = parseTestCases(await response.json()); - for (const testCase of Object.values(TEST_CASES)) { - const {geoJSON, wkb} = testCase; - const encoded = encodeSync(geoJSON, WKBWriter, {wkb: {hasZ: false, hasM: false}}); - t.deepEqual(encoded, wkb); + for (const testCase of Object.values(testCases)) { + const {geoJSON, twkb} = testCase; + const encoded = encodeSync(geoJSON, TWKBWriter, {wkb: {hasZ: false, hasM: false}}); + expect(encoded).toEqual(twkb); } - - t.end(); }); -test('TWKBWriter#Z', async (t) => { - const response = await fetchFile(WKB_Z_TEST_CASES); - const TEST_CASES = parseTestCases(await response.json()); +test.skip('TWKBWriter#Z', async () => { + const response = await fetchFile(TWKB_Z_TEST_CASES); + const testCases = parseTestCases(await response.json()); - for (const testCase of Object.values(TEST_CASES)) { - const {geoJSON, wkb} = testCase; - const encoded = encodeSync(geoJSON, WKBWriter, {wkb: {hasZ: true, hasM: false}}); - t.deepEqual(encoded, wkb); + for (const testCase of Object.values(testCases)) { + const {geoJSON, twkb} = testCase; + const encoded = encodeSync(geoJSON, TWKBWriter, {wkb: {hasZ: true, hasM: false}}); + expect(encoded).toEqual(twkb); } - - t.end(); }); -test('TWKBWriter#Z NaN', async (t) => { - const response = await fetchFile(WKB_Z_NAN_TEST_CASES); - const TEST_CASES = parseTestCases(await response.json()); +test.skip('TWKBWriter#Z NaN', async () => { + const response = await fetchFile(TWKB_Z_NAN_TEST_CASES); + const testCases = parseTestCases(await response.json()); - for (const testCase of Object.values(TEST_CASES)) { - const {geoJSON, wkb} = testCase; - const encoded = encodeSync(geoJSON, WKBWriter, {wkb: {hasZ: true, hasM: false}}); - t.deepEqual(encoded, wkb); + for (const testCase of Object.values(testCases)) { + const {geoJSON, twkb} = testCase; + const encoded = encodeSync(geoJSON, TWKBWriter, {wkb: {hasZ: true, hasM: false}}); + expect(encoded).toEqual(twkb); } - - t.end(); }); - */ diff --git a/modules/wkt/test/wkb-loader.spec.ts b/modules/wkt/test/wkb-loader.spec.ts index 488c400a2c..e387a04599 100644 --- a/modules/wkt/test/wkb-loader.spec.ts +++ b/modules/wkt/test/wkb-loader.spec.ts @@ -4,7 +4,7 @@ /* eslint-disable no-continue */ -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {fetchFile, parseSync} from '@loaders.gl/core'; import {isWKB} from '@loaders.gl/gis'; import {WKBLoader} from '@loaders.gl/wkt'; @@ -13,7 +13,7 @@ import {parseTestCases} from '@loaders.gl/gis/test/data/wkt/parse-test-cases'; const WKB_2D_TEST_CASES = '@loaders.gl/gis/test/data/wkt/wkb-testdata2d.json'; const WKB_Z_TEST_CASES = '@loaders.gl/gis/test/data/wkt/wkb-testdataZ.json'; -test('WKBLoader#2D', async t => { +test('WKBLoader#2D', async () => { const response = await fetchFile(WKB_2D_TEST_CASES); const TEST_CASES = parseTestCases(await response.json()); @@ -22,23 +22,21 @@ test('WKBLoader#2D', async t => { for (const [title, testCase] of Object.entries(TEST_CASES2)) { // Little endian if (testCase.wkb && testCase.binary) { - t.ok(isWKB(testCase.wkb), 'isWKB(2D)'); + expect(isWKB(testCase.wkb), 'isWKB(2D)').toBeTruthy(); const result = parseSync(testCase.wkb, WKBLoader); - t.deepEqual(result, testCase.geoJSON, title); + expect(result, title).toEqual(testCase.geoJSON); } // Big endian if (testCase.wkbXdr && testCase.binary) { - t.ok(isWKB(testCase.wkbXdr), 'isWKB(2D)'); + expect(isWKB(testCase.wkbXdr), 'isWKB(2D)').toBeTruthy(); const result = parseSync(testCase.wkbXdr, WKBLoader); - t.deepEqual(result, testCase.geoJSON, title); + expect(result, title).toEqual(testCase.geoJSON); } } - - t.end(); }); -test('WKBLoader#Z', async t => { +test('WKBLoader#Z', async () => { const response = await fetchFile(WKB_Z_TEST_CASES); const TEST_CASES = parseTestCases(await response.json()); @@ -46,30 +44,28 @@ test('WKBLoader#Z', async t => { for (const [title, testCase] of Object.entries(TEST_CASES)) { // Little endian if (testCase.wkb && testCase.binary) { - t.ok(isWKB(testCase.wkb), 'isWKB(Z)'); + expect(isWKB(testCase.wkb), 'isWKB(Z)').toBeTruthy(); // TODO - remove and fix empty handling if (title.startsWith('empty') || title.includes('One')) { continue; } const result = parseSync(testCase.wkb, WKBLoader); - t.deepEqual(result, testCase.geoJSON, title); + expect(result, title).toEqual(testCase.geoJSON); } // Big endian if (testCase.wkbXdr && testCase.binary) { - t.ok(isWKB(testCase.wkbXdr), 'isWKB(Z)'); + expect(isWKB(testCase.wkbXdr), 'isWKB(Z)').toBeTruthy(); // TODO - remove and fix empty handling if (title.startsWith('empty') || title.includes('One')) { continue; } const result = parseSync(testCase.wkbXdr, WKBLoader); - t.deepEqual(result, testCase.geoJSON, title); + expect(result, title).toEqual(testCase.geoJSON); } // if (testCase.wkbXdr && testCase.binary && testCase.geoJSON) { // t.deepEqual(parseSync(testCase.wkbXdr, WKBLoader, {wkb: {shape: 'geometry'}}), testCase.geoJSON); // } } - - t.end(); }); diff --git a/modules/wkt/test/wkb-writer.spec.ts b/modules/wkt/test/wkb-writer.spec.ts index 77c89ef6a5..4002339613 100644 --- a/modules/wkt/test/wkb-writer.spec.ts +++ b/modules/wkt/test/wkb-writer.spec.ts @@ -2,7 +2,7 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {fetchFile, encodeSync} from '@loaders.gl/core'; import {WKBWriter} from '@loaders.gl/wkt'; import {parseTestCases} from '@loaders.gl/gis/test/data/wkt/parse-test-cases'; @@ -12,54 +12,46 @@ const WKB_2D_NAN_TEST_CASES = '@loaders.gl/gis/test/data/wkt/wkb-testdata2d-nan. const WKB_Z_TEST_CASES = '@loaders.gl/gis/test/data/wkt/wkb-testdataZ.json'; const WKB_Z_NAN_TEST_CASES = '@loaders.gl/gis/test/data/wkt/wkb-testdataZ-nan.json'; -test('WKBWriter#2D', async t => { +test('WKBWriter#2D', async () => { const response = await fetchFile(WKB_2D_TEST_CASES); const TEST_CASES = parseTestCases(await response.json()); for (const testCase of Object.values(TEST_CASES)) { const {geoJSON, wkb} = testCase; const encoded = encodeSync(geoJSON, WKBWriter, {wkb: {hasZ: false, hasM: false}}); - t.deepEqual(encoded, wkb); + expect(encoded).toEqual(wkb); } - - t.end(); }); -test('WKBWriter#2D NaN', async t => { +test('WKBWriter#2D NaN', async () => { const response = await fetchFile(WKB_2D_NAN_TEST_CASES); const TEST_CASES = parseTestCases(await response.json()); for (const testCase of Object.values(TEST_CASES)) { const {geoJSON, wkb} = testCase; const encoded = encodeSync(geoJSON, WKBWriter, {wkb: {hasZ: false, hasM: false}}); - t.deepEqual(encoded, wkb); + expect(encoded).toEqual(wkb); } - - t.end(); }); -test('WKBWriter#Z', async t => { +test('WKBWriter#Z', async () => { const response = await fetchFile(WKB_Z_TEST_CASES); const TEST_CASES = parseTestCases(await response.json()); for (const testCase of Object.values(TEST_CASES)) { const {geoJSON, wkb} = testCase; const encoded = encodeSync(geoJSON, WKBWriter, {wkb: {hasZ: true, hasM: false}}); - t.deepEqual(encoded, wkb); + expect(encoded).toEqual(wkb); } - - t.end(); }); -test('WKBWriter#Z NaN', async t => { +test('WKBWriter#Z NaN', async () => { const response = await fetchFile(WKB_Z_NAN_TEST_CASES); const TEST_CASES = parseTestCases(await response.json()); for (const testCase of Object.values(TEST_CASES)) { const {geoJSON, wkb} = testCase; const encoded = encodeSync(geoJSON, WKBWriter, {wkb: {hasZ: true, hasM: false}}); - t.deepEqual(encoded, wkb); + expect(encoded).toEqual(wkb); } - - t.end(); }); diff --git a/modules/wkt/test/wkt-crs-loader.spec.ts b/modules/wkt/test/wkt-crs-loader.spec.ts index fbb79f7958..6c0752869d 100644 --- a/modules/wkt/test/wkt-crs-loader.spec.ts +++ b/modules/wkt/test/wkt-crs-loader.spec.ts @@ -3,7 +3,7 @@ // Copyright (c) vis.gl contributors // parse-wkt-crs was forked from https://github.com/DanielJDufour/wkt-crs under Creative Commons CC0 1.0 license. -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {parseSync, encodeTextSync} from '@loaders.gl/core'; import {WKTCRSLoader, WKTCRSWriter} from '@loaders.gl/wkt'; @@ -11,24 +11,20 @@ const roundtrip = wkt => encodeTextSync(parseSync(wkt, WKTCRSLoader, {raw: true} const condense = wkt => wkt.trim().replace(/(?<=[,\[\]])[ \n]+/g, ''); -test('WKTCRSLoader#NAD27 / UTM zone 16N', t => { +test('WKTCRSLoader#NAD27 / UTM zone 16N', () => { const wkt = 'PROJCS["NAD27 / UTM zone 16N",GEOGCS["NAD27",DATUM["North_American_Datum_1927",SPHEROID["Clarke 1866",6378206.4,294.9786982139006,AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4267"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-87],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","26716"]]'; const data = parseSync(wkt, WKTCRSLoader, {raw: false, debug: false}); // console.log(JSON.stringify(data, undefined, 2)); - t.deepEqual(data.length, 1); - t.deepEqual(Object.keys(data), ['0', 'PROJCS']); - t.deepEqual(data.PROJCS.AUTHORITY, ['AUTHORITY', 'EPSG', '26716']); - t.deepEqual(data.PROJCS === data[0], true); - t.deepEqual(data.PROJCS[1] === 'NAD27 / UTM zone 16N', true); - t.deepEqual(data.PROJCS.GEOGCS === data[0][2], true); - - // raw mode - // t.deepEqual(roundtrip(wkt), wkt); - t.end(); + expect(data.length).toEqual(1); + expect(Object.keys(data)).toEqual(['0', 'PROJCS']); + expect(data.PROJCS.AUTHORITY).toEqual(['AUTHORITY', 'EPSG', '26716']); + expect(data.PROJCS === data[0]).toEqual(true); + expect(data.PROJCS[1] === 'NAD27 / UTM zone 16N').toEqual(true); + expect(data.PROJCS.GEOGCS === data[0][2]).toEqual(true); }); -test('WKTCRSLoader#wikipedia example', t => { +test('WKTCRSLoader#wikipedia example', () => { const wkt = `GEODCRS["WGS 84", DATUM["World Geodetic System 1984", ELLIPSOID["WGS 84", 6378137, 298.257223563, LENGTHUNIT["metre", 1]]], @@ -37,15 +33,13 @@ test('WKTCRSLoader#wikipedia example', t => { AXIS["Longitude (lon)", east, ORDER[2]], ANGLEUNIT["degree", 0.0174532925199433]]`; const data = parseSync(wkt, WKTCRSLoader, {debug: false}); - t.deepEqual(data.GEODCRS[1], 'WGS 84'); - t.deepEqual(data.GEODCRS.DATUM.ELLIPSOID[3], 298.257223563); - t.deepEqual(data.GEODCRS.CS[1], 'ellipsoidal'); - t.deepEqual(data.GEODCRS.ANGLEUNIT[2], 0.0174532925199433); - // t.deepEqual(roundtrip(wkt), condense(wkt)); - t.end(); + expect(data.GEODCRS[1]).toEqual('WGS 84'); + expect(data.GEODCRS.DATUM.ELLIPSOID[3]).toEqual(298.257223563); + expect(data.GEODCRS.CS[1]).toEqual('ellipsoidal'); + expect(data.GEODCRS.ANGLEUNIT[2]).toEqual(0.0174532925199433); }); -test.skip('WKTCRSLoader#wikipedia raw', t => { +test.skip('WKTCRSLoader#wikipedia raw', () => { const wkt = `GEODCRS["WGS 84", DATUM["World Geodetic System 1984", ELLIPSOID["WGS 84", 6378137, 298.257223563, LENGTHUNIT["metre", 1]]], @@ -54,15 +48,14 @@ test.skip('WKTCRSLoader#wikipedia raw', t => { AXIS["Longitude (lon)", east, ORDER[2]], ANGLEUNIT["degree", 0.0174532925199433]]`; const data = parseSync(wkt, WKTCRSLoader, {debug: false, raw: true}); - t.deepEqual(data.GEODCRS[1], 'WGS 84'); - t.deepEqual(data.GEODCRS.DATUM.ELLIPSOID[3], 'raw:298.257223563'); - t.deepEqual(data.GEODCRS.CS[1], 'raw:ellipsoidal'); - t.deepEqual(data.GEODCRS.ANGLEUNIT[2], 'raw:0.0174532925199433'); - t.deepEqual(roundtrip(wkt), condense(wkt)); - t.end(); + expect(data.GEODCRS[1]).toEqual('WGS 84'); + expect(data.GEODCRS.DATUM.ELLIPSOID[3]).toEqual('raw:298.257223563'); + expect(data.GEODCRS.CS[1]).toEqual('raw:ellipsoidal'); + expect(data.GEODCRS.ANGLEUNIT[2]).toEqual('raw:0.0174532925199433'); + expect(roundtrip(wkt)).toEqual(condense(wkt)); }); -test('WKTCRSLoader#wikipedia concat', t => { +test('WKTCRSLoader#wikipedia concat', () => { const wkt = ` CONCAT_MT[ PARAM_MT["Mercator_2SP", @@ -80,13 +73,11 @@ test('WKTCRSLoader#wikipedia concat', t => { PARAMETER["elt 1 2",3]]] `; const data = parseSync(wkt, WKTCRSLoader, {debug: false, raw: true}); - t.deepEqual(data.CONCAT_MT.PARAM_MT, undefined); - t.deepEqual(data.CONCAT_MT.MULTIPLE_PARAM_MT.length, 2); - // t.deepEqual(roundtrip(wkt), condense(wkt)); - t.end(); + expect(data.CONCAT_MT.PARAM_MT).toEqual(undefined); + expect(data.CONCAT_MT.MULTIPLE_PARAM_MT.length).toEqual(2); }); -test.skip('WKTCRSLoader#wikipedia datum shift', t => { +test.skip('WKTCRSLoader#wikipedia datum shift', () => { const wkt = ` COORDINATEOPERATION["AGD84 to GDA94 Auslig 5m", SOURCECRS["…full CRS definition required here but omitted for brevity…"], @@ -102,41 +93,34 @@ test.skip('WKTCRSLoader#wikipedia datum shift', t => { const data = parseSync(wkt, WKTCRSLoader, {debug: false, raw: true}); // stringifying array ignores keys added on const str = JSON.stringify(data); - t.deepEqual( - str, - '[["COORDINATEOPERATION","AGD84 to GDA94 Auslig 5m",["SOURCECRS","…full CRS definition required here but omitted for brevity…"],["TARGETCRS","…full CRS definition required here but omitted for brevity…"],["METHOD","Geocentric translations",["ID","EPSG","raw:1031"]],["PARAMETER","X-axis translation","raw:-128.5",["LENGTHUNIT","metre","raw:1"]],["PARAMETER","Y-axis translation","raw:-53.0",["LENGTHUNIT","metre","raw:1"]],["PARAMETER","Z-axis translation","raw:153.4",["LENGTHUNIT","metre","raw:1"]],["OPERATIONACCURACY","raw:5"],["AREA","Australia onshore"],["BBOX","raw:-43.7","raw:112.85","raw:-9.87","raw:153.68"]]]' - ); - t.deepEqual(roundtrip(wkt), condense(wkt)); - t.end(); + expect(str).toEqual('[["COORDINATEOPERATION","AGD84 to GDA94 Auslig 5m",["SOURCECRS","…full CRS definition required here but omitted for brevity…"],["TARGETCRS","…full CRS definition required here but omitted for brevity…"],["METHOD","Geocentric translations",["ID","EPSG","raw:1031"]],["PARAMETER","X-axis translation","raw:-128.5",["LENGTHUNIT","metre","raw:1"]],["PARAMETER","Y-axis translation","raw:-53.0",["LENGTHUNIT","metre","raw:1"]],["PARAMETER","Z-axis translation","raw:153.4",["LENGTHUNIT","metre","raw:1"]],["OPERATIONACCURACY","raw:5"],["AREA","Australia onshore"],["BBOX","raw:-43.7","raw:112.85","raw:-9.87","raw:153.68"]]]'); + expect(roundtrip(wkt)).toEqual(condense(wkt)); }); -test('WKTCRSLoader#proj4js example', t => { +test('WKTCRSLoader#proj4js example', () => { const wkt = 'PROJCS["NAD83 / Massachusetts Mainland",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4269"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",42.68333333333333],PARAMETER["standard_parallel_2",41.71666666666667],PARAMETER["latitude_of_origin",41],PARAMETER["central_meridian",-71.5],PARAMETER["false_easting",200000],PARAMETER["false_northing",750000],AUTHORITY["EPSG","26986"],AXIS["X",EAST],AXIS["Y",NORTH]]'; const data = parseSync(wkt, WKTCRSLoader); - t.deepEqual(data.PROJCS[1], 'NAD83 / Massachusetts Mainland'); - t.end(); + expect(data.PROJCS[1]).toEqual('NAD83 / Massachusetts Mainland'); }); -test('WKTCRSLoader#parse attribute that ends in number (TOWGS84)', t => { +test('WKTCRSLoader#parse attribute that ends in number (TOWGS84)', () => { const wkt = ' GEOGCS["SAD69",DATUM["South_American_Datum_1969",SPHEROID["GRS 1967 Modified",6378160,298.25,AUTHORITY["EPSG","7050"]],TOWGS84[-57,1,-41,0,0,0,0],AUTHORITY["EPSG","6618"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4618"]]'; - t.deepEqual(roundtrip(wkt), condense(wkt)); - t.end(); + expect(roundtrip(wkt)).toEqual(condense(wkt)); }); -test.skip('WKTCRSLoader#another parse bug', t => { +test.skip('WKTCRSLoader#another parse bug', () => { const wkt = 'PROJCS["ETRS89 / TM35FIN(E,N)",GEOGCS["ETRS89",DATUM["European_Terrestrial_Reference_System_1989",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6258"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4258"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",27],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","3067"]]'; const data = parseSync(wkt, WKTCRSLoader, {debug: false}); - t.deepEqual(data.PROJCS[1], 'ETRS89 / TM35FIN(E,N)'); - t.deepEqual(data.PROJCS.MULTIPLE_AXIS[1][2], 'NORTH'); - t.deepEqual(roundtrip(wkt), wkt); - t.end(); + expect(data.PROJCS[1]).toEqual('ETRS89 / TM35FIN(E,N)'); + expect(data.PROJCS.MULTIPLE_AXIS[1][2]).toEqual('NORTH'); + expect(roundtrip(wkt)).toEqual(wkt); }); // Not clear where to find crs.json -// test.skip('WKTCRSLoader#try to parse everything in crs.json', (t) => { +// test.skip('WKTCRSLoader#try to parse everything in crs.json', () => { // let data = require('./crs.json'); // data = data.map(({wkt, esriwkt, prettywkt}) => ({ // raw: { @@ -159,7 +143,7 @@ test.skip('WKTCRSLoader#another parse bug', t => { // }); // }); -// test("7.5.6.3 Axis unit for ordinal coordinate systems", t => { +// test("7.5.6.3 Axis unit for ordinal coordinate systems", () => { // const wkt = `NULL[CS[ordinal,2], // AXIS["inline (I)",southeast,ORDER[1]], // AXIS["crossline (J)",northeast,ORDER[2]]]`; @@ -168,69 +152,58 @@ test.skip('WKTCRSLoader#another parse bug', t => { // t.end(); // }); -test.skip('WKTCRSLoader#sort parameters', t => { +test.skip('WKTCRSLoader#sort parameters', () => { const wkt = 'PROJCS["WGS_1984_Antarctic_Polar_Stereographic",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Stereographic_South_Pole"],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",0.0],PARAMETER["Standard_Parallel_1",-71.0],UNIT["Meter",1.0]]'; const data = parseSync(wkt, WKTCRSLoader, {debug: false, raw: true, sort: true}); - t.deepEqual( - encodeTextSync(data, WKTCRSWriter), - 'PROJCS["WGS_1984_Antarctic_Polar_Stereographic",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Stereographic_South_Pole"],PARAMETER["Central_Meridian",0.0],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Standard_Parallel_1",-71.0],UNIT["Meter",1.0]]' - ); - t.end(); + expect(encodeTextSync(data, WKTCRSWriter)).toEqual('PROJCS["WGS_1984_Antarctic_Polar_Stereographic",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Stereographic_South_Pole"],PARAMETER["Central_Meridian",0.0],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Standard_Parallel_1",-71.0],UNIT["Meter",1.0]]'); }); -// test("sort example", t => { +// test("sort example", () => { // const data = ["EXAMPLE", ["AXIS", "Northing", "raw:NORTH"], ["AXIS", "Easting", "raw:EAST"]]; // wktcrs.sort(data); // t.deepEqual(data, ["EXAMPLE", ["AXIS", "Easting", "raw:EAST"], ["AXIS", "Northing", "raw:NORTH"]]); // t.end(); // }); -test.skip('WKTCRSLoader#sort params', t => { +test.skip('WKTCRSLoader#sort params', () => { const wkt = 'PARAMETERS[PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-87],PARAMETER["scale_factor",0.9996]]'; let data = parseSync(wkt, WKTCRSLoader, {debug: false, raw: true}); - t.deepEqual(data[0].MULTIPLE_PARAMETER, [ + expect(data[0].MULTIPLE_PARAMETER).toEqual([ ['PARAMETER', 'latitude_of_origin', 'raw:0'], ['PARAMETER', 'central_meridian', 'raw:-87'], ['PARAMETER', 'scale_factor', 'raw:0.9996'] ]); data = parseSync(wkt, WKTCRSLoader, {debug: false, raw: true, sort: true}); - t.deepEqual(data[0].MULTIPLE_PARAMETER, [ + expect(data[0].MULTIPLE_PARAMETER).toEqual([ ['PARAMETER', 'central_meridian', 'raw:-87'], ['PARAMETER', 'latitude_of_origin', 'raw:0'], ['PARAMETER', 'scale_factor', 'raw:0.9996'] ]); - t.deepEqual( - encodeTextSync(data, WKTCRSWriter), - 'PARAMETERS[PARAMETER["central_meridian",-87],PARAMETER["latitude_of_origin",0],PARAMETER["scale_factor",0.9996]]' - ); - t.end(); + expect(encodeTextSync(data, WKTCRSWriter)).toEqual('PARAMETERS[PARAMETER["central_meridian",-87],PARAMETER["latitude_of_origin",0],PARAMETER["scale_factor",0.9996]]'); }); -test('WKTCRSLoader#parse inner parens', t => { +test('WKTCRSLoader#parse inner parens', () => { const wkt = 'GEOGCS["GRS 1980(IUGG, 1980)",DATUM["unknown",SPHEROID["GRS80",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["epsg","7686"]]'; const data = parseSync(wkt, WKTCRSLoader, {debug: false, raw: true}); - t.deepEqual(data.GEOGCS[0], 'GEOGCS'); - t.end(); + expect(data.GEOGCS[0]).toEqual('GEOGCS'); }); -test.skip('WKTCRSWriter#authority', t => { +test.skip('WKTCRSWriter#authority', () => { const authority = ['AUTHORITY', 'EPSG', '9122']; const unparsed = encodeTextSync(authority, WKTCRSWriter); - t.deepEqual(unparsed, {data: 'AUTHORITY["EPSG","9122"]'}); - t.end(); + expect(unparsed).toEqual({data: 'AUTHORITY["EPSG","9122"]'}); }); -test.skip('WKTCRSWriter#PRIMEM', t => { +test.skip('WKTCRSWriter#PRIMEM', () => { const authority = ['PRIMEM', 'Greenwich', 0, ['AUTHORITY', 'EPSG', '8901']]; const unparsed = encodeTextSync(authority, WKTCRSWriter); - t.deepEqual(unparsed, {data: 'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]]'}); - t.end(); + expect(unparsed).toEqual({data: 'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]]'}); }); -test.skip('WKTCRSWriter#DATUM', t => { +test.skip('WKTCRSWriter#DATUM', () => { const datum = [ 'DATUM', 'North_American_Datum_1927', @@ -238,13 +211,12 @@ test.skip('WKTCRSWriter#DATUM', t => { ['AUTHORITY', 'EPSG', '6267'] ]; const unparsed = encodeTextSync(datum, WKTCRSWriter); - t.deepEqual(unparsed, { + expect(unparsed).toEqual({ data: 'DATUM["North_American_Datum_1927",SPHEROID["Clarke 1866",6378206.4,294.9786982139006,AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]]' }); - t.end(); }); -test.skip('WKTCRSWriter#GEOGCS', t => { +test.skip('WKTCRSWriter#GEOGCS', () => { const data = [ 'GEOGCS', 'NAD27', @@ -259,7 +231,7 @@ test.skip('WKTCRSWriter#GEOGCS', t => { ['AUTHORITY', 'EPSG', '4267'] ]; const unparsed = encodeTextSync(data, WKTCRSWriter); - t.deepEqual(unparsed, { + expect(unparsed).toEqual({ data: 'GEOGCS["NAD27",DATUM["North_American_Datum_1927",SPHEROID["Clarke 1866",6378206.4,294.9786982139006,AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4267"]]' }); }); diff --git a/modules/wkt/test/wkt-loader.spec.ts b/modules/wkt/test/wkt-loader.spec.ts index 4854e8f5b3..8c07c9f540 100644 --- a/modules/wkt/test/wkt-loader.spec.ts +++ b/modules/wkt/test/wkt-loader.spec.ts @@ -4,7 +4,7 @@ // Fork of https://github.com/mapbox/wellknown under ISC license (MIT/BSD-2-clause equivalent) -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {validateLoader} from 'test/common/conformance'; import {WKTLoader, WKTWorkerLoader} from '@loaders.gl/wkt'; import {setLoaderOptions, fetchFile, parseSync} from '@loaders.gl/core'; @@ -18,53 +18,52 @@ setLoaderOptions({ _workerType: 'test' }); -test('WKTWorkerLoader#loader objects', async t => { - validateLoader(t, WKTLoader, 'WKTLoader'); - validateLoader(t, WKTWorkerLoader, 'WKTWorkerLoader'); - t.end(); +test('WKTWorkerLoader#loader objects', async () => { + validateLoader(WKTLoader, 'WKTLoader'); + validateLoader(WKTWorkerLoader, 'WKTWorkerLoader'); }); // eslint-disable-next-line max-statements -test('WKTLoader', async t => { +test('WKTLoader', async () => { let response = await fetchFile(GEOMETRYCOLLECTION_WKT_URL); const GEOMETRYCOLLECTION_WKT = await response.text(); response = await fetchFile(GEOMETRYCOLLECTION_GEOJSON_URL); const GEOMETRYCOLLECTION_GEOJSON = await response.json(); - t.deepEqual(parseSync('POINT (0 1)', WKTLoader), { + expect(parseSync('POINT (0 1)', WKTLoader)).toEqual({ type: 'Point', coordinates: [0, 1] }); - t.deepEqual(parseSync('POINT (1 1)', WKTLoader), { + expect(parseSync('POINT (1 1)', WKTLoader)).toEqual({ type: 'Point', coordinates: [1, 1] }); - t.deepEqual(parseSync('POINT(1 1)', WKTLoader), { + expect(parseSync('POINT(1 1)', WKTLoader)).toEqual({ type: 'Point', coordinates: [1, 1] }); - t.deepEqual(parseSync('POINT\n\r(1 1)', WKTLoader), { + expect(parseSync('POINT\n\r(1 1)', WKTLoader)).toEqual({ type: 'Point', coordinates: [1, 1] }); - t.deepEqual(parseSync('POINT(1.1 1.1)', WKTLoader), { + expect(parseSync('POINT(1.1 1.1)', WKTLoader)).toEqual({ type: 'Point', coordinates: [1.1, 1.1] }); - t.deepEqual(parseSync('point(1.1 1.1)', WKTLoader), { + expect(parseSync('point(1.1 1.1)', WKTLoader)).toEqual({ type: 'Point', coordinates: [1.1, 1.1] }); - t.deepEqual(parseSync('point(1 2 3)', WKTLoader), { + expect(parseSync('point(1 2 3)', WKTLoader)).toEqual({ type: 'Point', coordinates: [1, 2, 3] }); - t.deepEqual(parseSync('point(1 2 3 4)', WKTLoader), { + expect(parseSync('point(1 2 3 4)', WKTLoader)).toEqual({ type: 'Point', coordinates: [1, 2, 3, 4] }); - t.deepEqual(parseSync('SRID=3857;POINT (1 2 3)', WKTLoader), { + expect(parseSync('SRID=3857;POINT (1 2 3)', WKTLoader)).toEqual({ type: 'Point', coordinates: [1, 2, 3], crs: { @@ -74,7 +73,7 @@ test('WKTLoader', async t => { } } }); - t.deepEqual(parseSync('LINESTRING (30 10, 10 30, 40 40)', WKTLoader), { + expect(parseSync('LINESTRING (30 10, 10 30, 40 40)', WKTLoader)).toEqual({ type: 'LineString', coordinates: [ [30, 10], @@ -82,7 +81,7 @@ test('WKTLoader', async t => { [40, 40] ] }); - t.deepEqual(parseSync('LINESTRING(30 10, 10 30, 40 40)', WKTLoader), { + expect(parseSync('LINESTRING(30 10, 10 30, 40 40)', WKTLoader)).toEqual({ type: 'LineString', coordinates: [ [30, 10], @@ -90,7 +89,7 @@ test('WKTLoader', async t => { [40, 40] ] }); - t.deepEqual(parseSync('LineString(30 10, 10 30, 40 40)', WKTLoader), { + expect(parseSync('LineString(30 10, 10 30, 40 40)', WKTLoader)).toEqual({ type: 'LineString', coordinates: [ [30, 10], @@ -98,21 +97,21 @@ test('WKTLoader', async t => { [40, 40] ] }); - t.deepEqual(parseSync('LINESTRING (1 2 3, 4 5 6)', WKTLoader), { + expect(parseSync('LINESTRING (1 2 3, 4 5 6)', WKTLoader)).toEqual({ type: 'LineString', coordinates: [ [1, 2, 3], [4, 5, 6] ] }); - t.deepEqual(parseSync('LINESTRING (1 2 3 4, 5 6 7 8)', WKTLoader), { + expect(parseSync('LINESTRING (1 2 3 4, 5 6 7 8)', WKTLoader)).toEqual({ type: 'LineString', coordinates: [ [1, 2, 3, 4], [5, 6, 7, 8] ] }); - t.deepEqual(parseSync('SRID=3857;LINESTRING (30 10, 10 30, 40 40)', WKTLoader), { + expect(parseSync('SRID=3857;LINESTRING (30 10, 10 30, 40 40)', WKTLoader)).toEqual({ type: 'LineString', coordinates: [ [30, 10], @@ -126,7 +125,7 @@ test('WKTLoader', async t => { } } }); - t.deepEqual(parseSync('POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))', WKTLoader), { + expect(parseSync('POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))', WKTLoader)).toEqual({ type: 'Polygon', coordinates: [ [ @@ -138,7 +137,7 @@ test('WKTLoader', async t => { ] ] }); - t.deepEqual(parseSync('POLYGON((30 10, 10 20, 20 40, 40 40, 30 10))', WKTLoader), { + expect(parseSync('POLYGON((30 10, 10 20, 20 40, 40 40, 30 10))', WKTLoader)).toEqual({ type: 'Polygon', coordinates: [ [ @@ -150,7 +149,7 @@ test('WKTLoader', async t => { ] ] }); - t.deepEqual(parseSync('SRID=3857;POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))', WKTLoader), { + expect(parseSync('SRID=3857;POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))', WKTLoader)).toEqual({ type: 'Polygon', coordinates: [ [ @@ -168,12 +167,10 @@ test('WKTLoader', async t => { } } }); - t.deepEqual( - parseSync( + expect(parseSync( 'POLYGON ((35 10, 10 20, 15 40, 45 45, 35 10),(20 30, 35 35, 30 20, 20 30))', WKTLoader - ), - { + )).toEqual({ type: 'Polygon', coordinates: [ [ @@ -190,30 +187,29 @@ test('WKTLoader', async t => { [20, 30] ] ] - } - ); - t.deepEqual(parseSync('MULTIPOINT (0 0, 2 3)', WKTLoader), { + }); + expect(parseSync('MULTIPOINT (0 0, 2 3)', WKTLoader)).toEqual({ type: 'MultiPoint', coordinates: [ [0, 0], [2, 3] ] }); - t.deepEqual(parseSync('MULTIPOINT (1 1, 2 3)', WKTLoader), { + expect(parseSync('MULTIPOINT (1 1, 2 3)', WKTLoader)).toEqual({ type: 'MultiPoint', coordinates: [ [1, 1], [2, 3] ] }); - t.deepEqual(parseSync('MultiPoint (1 1, 2 3)', WKTLoader), { + expect(parseSync('MultiPoint (1 1, 2 3)', WKTLoader)).toEqual({ type: 'MultiPoint', coordinates: [ [1, 1], [2, 3] ] }); - t.deepEqual(parseSync('SRID=3857;MULTIPOINT (1 1, 2 3)', WKTLoader), { + expect(parseSync('SRID=3857;MULTIPOINT (1 1, 2 3)', WKTLoader)).toEqual({ type: 'MultiPoint', coordinates: [ [1, 1], @@ -226,28 +222,28 @@ test('WKTLoader', async t => { } } }); - t.deepEqual(parseSync('MULTIPOINT ((0 0), (2 3))', WKTLoader), { + expect(parseSync('MULTIPOINT ((0 0), (2 3))', WKTLoader)).toEqual({ type: 'MultiPoint', coordinates: [ [0, 0], [2, 3] ] }); - t.deepEqual(parseSync('MULTIPOINT ((1 1), (2 3))', WKTLoader), { + expect(parseSync('MULTIPOINT ((1 1), (2 3))', WKTLoader)).toEqual({ type: 'MultiPoint', coordinates: [ [1, 1], [2, 3] ] }); - t.deepEqual(parseSync('MultiPoint ((1 1), (2 3))', WKTLoader), { + expect(parseSync('MultiPoint ((1 1), (2 3))', WKTLoader)).toEqual({ type: 'MultiPoint', coordinates: [ [1, 1], [2, 3] ] }); - t.deepEqual(parseSync('SRID=3857;MULTIPOINT ((1 1), (2 3))', WKTLoader), { + expect(parseSync('SRID=3857;MULTIPOINT ((1 1), (2 3))', WKTLoader)).toEqual({ type: 'MultiPoint', coordinates: [ [1, 1], @@ -260,9 +256,7 @@ test('WKTLoader', async t => { } } }); - t.deepEqual( - parseSync('MULTILINESTRING ((30 10, 10 30, 40 40), (30 10, 10 30, 40 40))', WKTLoader), - { + expect(parseSync('MULTILINESTRING ((30 10, 10 30, 40 40), (30 10, 10 30, 40 40))', WKTLoader)).toEqual({ type: 'MultiLineString', coordinates: [ [ @@ -276,14 +270,11 @@ test('WKTLoader', async t => { [40, 40] ] ] - } - ); - t.deepEqual( - parseSync( + }); + expect(parseSync( 'SRID=3857;MULTILINESTRING ((30 10, 10 30, 40 40), (30 10, 10 30, 40 40))', WKTLoader - ), - { + )).toEqual({ type: 'MultiLineString', coordinates: [ [ @@ -303,14 +294,11 @@ test('WKTLoader', async t => { name: 'urn:ogc:def:crs:EPSG::3857' } } - } - ); - t.deepEqual( - parseSync( + }); + expect(parseSync( 'MULTIPOLYGON (((30 20, 10 40, 45 40, 30 20)), ((15 5, 40 10, 10 20, 5 10, 15 5)))', WKTLoader - ), - { + )).toEqual({ type: 'MultiPolygon', coordinates: [ [ @@ -331,18 +319,15 @@ test('WKTLoader', async t => { ] ] ] - } - ); - t.deepEqual(parseSync('MULTIPOLYGON (((-74.03349399999999 40.688348)))', WKTLoader), { + }); + expect(parseSync('MULTIPOLYGON (((-74.03349399999999 40.688348)))', WKTLoader)).toEqual({ type: 'MultiPolygon', coordinates: [[[[-74.03349399999999, 40.688348]]]] }); - t.deepEqual( - parseSync( + expect(parseSync( 'MULTIPOLYGON (((30 20, 10 40, 45 40, 30 20)), ((15 5, 40 10, 10 20, 5 10, 15 5), (10 10, 15 10, 15 15, 10 10)))', WKTLoader - ), - { + )).toEqual({ type: 'MultiPolygon', coordinates: [ [ @@ -369,14 +354,11 @@ test('WKTLoader', async t => { ] ] ] - } - ); - t.deepEqual( - parseSync( + }); + expect(parseSync( 'SRID=3857;MULTIPOLYGON (((30 20, 10 40, 45 40, 30 20)), ((15 5, 40 10, 10 20, 5 10, 15 5)))', WKTLoader - ), - { + )).toEqual({ type: 'MultiPolygon', coordinates: [ [ @@ -403,10 +385,9 @@ test('WKTLoader', async t => { name: 'urn:ogc:def:crs:EPSG::3857' } } - } - ); - t.deepEqual(parseSync(GEOMETRYCOLLECTION_WKT, WKTLoader), GEOMETRYCOLLECTION_GEOJSON); - t.deepEqual(parseSync('GeometryCollection(POINT(4 6),LINESTRING(4 6,7 10))', WKTLoader), { + }); + expect(parseSync(GEOMETRYCOLLECTION_WKT, WKTLoader)).toEqual(GEOMETRYCOLLECTION_GEOJSON); + expect(parseSync('GeometryCollection(POINT(4 6),LINESTRING(4 6,7 10))', WKTLoader)).toEqual({ type: 'GeometryCollection', geometries: [ { @@ -422,7 +403,7 @@ test('WKTLoader', async t => { } ] }); - t.deepEqual(parseSync('GeometryCollection(POINT(4 6),\nLINESTRING(4 6,7 10))', WKTLoader), { + expect(parseSync('GeometryCollection(POINT(4 6),\nLINESTRING(4 6,7 10))', WKTLoader)).toEqual({ type: 'GeometryCollection', geometries: [ { @@ -438,23 +419,23 @@ test('WKTLoader', async t => { } ] }); - t.deepEqual(parseSync('POINT (1e-6 1E+2)', WKTLoader), { + expect(parseSync('POINT (1e-6 1E+2)', WKTLoader)).toEqual({ type: 'Point', coordinates: [1e-6, 1e2] }); - t.equal(parseSync('POINT(100)', WKTLoader), null); - t.equal(parseSync('POINT(100, 100)', WKTLoader), null); - t.equal(parseSync('POINT()', WKTLoader), null); - t.equal(parseSync('MULTIPOINT()', WKTLoader), null); - t.equal(parseSync('MULTIPOINT(1)', WKTLoader), null); - t.equal(parseSync('MULTIPOINT(1 1, 1)', WKTLoader), null); + expect(parseSync('POINT(100)', WKTLoader)).toBe(null); + expect(parseSync('POINT(100, 100)', WKTLoader)).toBe(null); + expect(parseSync('POINT()', WKTLoader)).toBe(null); + expect(parseSync('MULTIPOINT()', WKTLoader)).toBe(null); + expect(parseSync('MULTIPOINT(1)', WKTLoader)).toBe(null); + expect(parseSync('MULTIPOINT(1 1, 1)', WKTLoader)).toBe(null); - t.deepEqual(parseSync('POINT Z (1 2 3)', WKTLoader), { + expect(parseSync('POINT Z (1 2 3)', WKTLoader)).toEqual({ type: 'Point', coordinates: [1, 2, 3] }); - t.deepEqual(parseSync('LINESTRING Z (30 10 1, 10 30 2, 40 40 3)', WKTLoader), { + expect(parseSync('LINESTRING Z (30 10 1, 10 30 2, 40 40 3)', WKTLoader)).toEqual({ type: 'LineString', coordinates: [ [30, 10, 1], @@ -463,7 +444,7 @@ test('WKTLoader', async t => { ] }); - t.deepEqual(parseSync('POLYGON Z ((30 10 1, 10 20 2, 20 40 3, 40 40 4, 30 10 5))', WKTLoader), { + expect(parseSync('POLYGON Z ((30 10 1, 10 20 2, 20 40 3, 40 40 4, 30 10 5))', WKTLoader)).toEqual({ type: 'Polygon', coordinates: [ [ @@ -475,12 +456,10 @@ test('WKTLoader', async t => { ] ] }); - - t.end(); }); // NOTE(Kyle): Test disabled for now, to be fixed before 2.2.0 release -// test('WKTWorkerLoader', async t => { +// test('WKTWorkerLoader', async () => { // if (typeof Worker === 'undefined') { // t.comment('Worker is not usable in non-browser environments'); // t.end(); @@ -496,7 +475,7 @@ test('WKTLoader', async t => { // t.end(); // }); -test('WKTLoader#fuzz', t => { +test('WKTLoader#fuzz', () => { fuzzer.seed(0); const inputs = [ 'MULTIPOLYGON (((30 20, 10 40, 45 40, 30 20)), ((15 5, 40 10, 10 20, 5 10, 15 5)))', @@ -509,10 +488,9 @@ test('WKTLoader#fuzz', t => { const input = fuzzer.mutate.string(str); try { parseSync(input, WKTLoader); - } catch (e) { - t.fail(`could not parse ${input}, exception ${e}`); + } catch (e) {throw new Error(`could not parse ${input}, exception ${e}`) + } } }); - t.end(); }); diff --git a/modules/wkt/test/wkt-writer.spec.ts b/modules/wkt/test/wkt-writer.spec.ts index 433b187704..a0937da0a1 100644 --- a/modules/wkt/test/wkt-writer.spec.ts +++ b/modules/wkt/test/wkt-writer.spec.ts @@ -2,16 +2,13 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {encodeTextSync} from '@loaders.gl/core'; import {WKTWriter} from '@loaders.gl/wkt'; -test('WKTWriter', t => { - t.throws( - () => encodeTextSync({type: 'FeatureCollection'}, WKTWriter), - 'does not accept featurecollections' - ); +test('WKTWriter', () => { + expect(() => encodeTextSync({type: 'FeatureCollection'}, WKTWriter), 'does not accept featurecollections').toThrow(); // const fixtures = [ // 'LINESTRING (30 10, 10 30, 40 40)', @@ -38,7 +35,5 @@ test('WKTWriter', t => { }; const wkt = encodeTextSync(geojsonFeature.geometry, WKTWriter); - t.equal(wkt, 'POINT (42 20)', 'point equal'); - - t.end(); + expect(wkt, 'point equal').toBe('POINT (42 20)'); }); diff --git a/modules/wms/test/arcgis/arcgis-server.spec.ts b/modules/wms/test/arcgis/arcgis-server.spec.ts index 0f914346db..54ad33df09 100644 --- a/modules/wms/test/arcgis/arcgis-server.spec.ts +++ b/modules/wms/test/arcgis/arcgis-server.spec.ts @@ -2,7 +2,7 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import { _ArcGISFeatureServerSource as ArcGISFeatureServerSource, @@ -12,22 +12,23 @@ import { const IMAGE_SERVER_URL = 'https://example.com/arcgis/rest/services/Imagery/ImageServer'; const FEATURE_SERVER_URL = 'https://example.com/arcgis/rest/services/Roads/FeatureServer/0'; -test('ArcGISImageServerSource#testURL', t => { - t.ok(ArcGISImageServerSource); - t.ok(ArcGISImageServerSource.testURL(IMAGE_SERVER_URL), 'identifies ArcGIS ImageServer URLs'); - t.end(); +test('ArcGISImageServerSource#testURL', () => { + expect(ArcGISImageServerSource).toBeTruthy(); + expect( + ArcGISImageServerSource.testURL(IMAGE_SERVER_URL), + 'identifies ArcGIS ImageServer URLs' + ).toBeTruthy(); }); -test('ArcGISImageSource#metadataURL', t => { +test('ArcGISImageSource#metadataURL', () => { const source = ArcGISImageServerSource.createDataSource(IMAGE_SERVER_URL, {}); const metadataUrl = new URL(source.metadataURL()); - t.equal(metadataUrl.origin + metadataUrl.pathname, IMAGE_SERVER_URL, 'metadata base URL'); - t.equal(metadataUrl.searchParams.get('f'), 'pjson', 'metadata format'); - t.end(); + expect(metadataUrl.origin + metadataUrl.pathname, 'metadata base URL').toBe(IMAGE_SERVER_URL); + expect(metadataUrl.searchParams.get('f'), 'metadata format').toBe('pjson'); }); -test('ArcGISImageSource#exportImageURL', t => { +test('ArcGISImageSource#exportImageURL', () => { const source = ArcGISImageServerSource.createDataSource(IMAGE_SERVER_URL, {}); const exportImageUrl = new URL( @@ -41,17 +42,16 @@ test('ArcGISImageSource#exportImageURL', t => { }) ); - t.equal(exportImageUrl.origin + exportImageUrl.pathname, `${IMAGE_SERVER_URL}/exportImage`); - t.equal(exportImageUrl.searchParams.get('bbox'), '1,2,3,4'); - t.equal(exportImageUrl.searchParams.get('bboxSR'), '4326'); - t.equal(exportImageUrl.searchParams.get('size'), '512,256'); - t.equal(exportImageUrl.searchParams.get('imageSR'), '3857'); - t.equal(exportImageUrl.searchParams.get('format'), 'png'); - t.equal(exportImageUrl.searchParams.get('f'), 'image'); - t.end(); + expect(exportImageUrl.origin + exportImageUrl.pathname).toBe(`${IMAGE_SERVER_URL}/exportImage`); + expect(exportImageUrl.searchParams.get('bbox')).toBe('1,2,3,4'); + expect(exportImageUrl.searchParams.get('bboxSR')).toBe('4326'); + expect(exportImageUrl.searchParams.get('size')).toBe('512,256'); + expect(exportImageUrl.searchParams.get('imageSR')).toBe('3857'); + expect(exportImageUrl.searchParams.get('format')).toBe('png'); + expect(exportImageUrl.searchParams.get('f')).toBe('image'); }); -test('ArcGISImageSource#getMetadata', async t => { +test('ArcGISImageSource#getMetadata', async () => { const source = ArcGISImageServerSource.createDataSource(IMAGE_SERVER_URL, {}); source.fetch = async () => new Response( @@ -63,13 +63,12 @@ test('ArcGISImageSource#getMetadata', async t => { ); const metadata = await source.getMetadata(); - t.equal(metadata.name, 'Imagery'); - t.equal(metadata.abstract, 'Image service description'); - t.deepEqual(metadata.keywords, ['raster', 'imagery']); - t.end(); + expect(metadata.name).toBe('Imagery'); + expect(metadata.abstract).toBe('Image service description'); + expect(metadata.keywords).toEqual(['raster', 'imagery']); }); -test('ArcGISImageSource#getImage maps generic parameters', async t => { +test('ArcGISImageSource#getImage maps generic parameters', async () => { const source = ArcGISImageServerSource.createDataSource(IMAGE_SERVER_URL, {}); let exportImageParameters; source.exportImage = async parameters => { @@ -89,7 +88,7 @@ test('ArcGISImageSource#getImage maps generic parameters', async t => { layers: [] }); - t.deepEqual(exportImageParameters, { + expect(exportImageParameters).toEqual({ bbox: [1, 2, 3, 4], bboxSR: '3857', imageSR: '3857', @@ -97,28 +96,25 @@ test('ArcGISImageSource#getImage maps generic parameters', async t => { height: 256, format: 'png' }); - t.end(); }); -test('ArcGISFeatureServerSource#testURL', t => { - t.ok(ArcGISFeatureServerSource); - t.ok( +test('ArcGISFeatureServerSource#testURL', () => { + expect(ArcGISFeatureServerSource).toBeTruthy(); + expect( ArcGISFeatureServerSource.testURL(FEATURE_SERVER_URL), 'identifies ArcGIS FeatureServer URLs' - ); - t.end(); + ).toBeTruthy(); }); -test('ArcGISVectorSource#metadataURL', t => { +test('ArcGISVectorSource#metadataURL', () => { const source = ArcGISFeatureServerSource.createDataSource(FEATURE_SERVER_URL, {}); const metadataUrl = new URL(source.metadataURL()); - t.equal(metadataUrl.origin + metadataUrl.pathname, FEATURE_SERVER_URL, 'metadata base URL'); - t.equal(metadataUrl.searchParams.get('f'), 'pjson', 'metadata format'); - t.end(); + expect(metadataUrl.origin + metadataUrl.pathname, 'metadata base URL').toBe(FEATURE_SERVER_URL); + expect(metadataUrl.searchParams.get('f'), 'metadata format').toBe('pjson'); }); -test('ArcGISVectorSource#getFeaturesURL', t => { +test('ArcGISVectorSource#getFeaturesURL', () => { const source = ArcGISFeatureServerSource.createDataSource(FEATURE_SERVER_URL, {}); const featuresUrl = new URL( source.getFeaturesURL({ @@ -131,20 +127,19 @@ test('ArcGISVectorSource#getFeaturesURL', t => { }) ); - t.equal(featuresUrl.origin + featuresUrl.pathname, `${FEATURE_SERVER_URL}/query`); - t.equal(featuresUrl.searchParams.get('returnGeometry'), 'true'); - t.equal(featuresUrl.searchParams.get('where'), '1=1'); - t.equal(featuresUrl.searchParams.get('outFields'), '*'); - t.equal(featuresUrl.searchParams.get('outSR'), '3857'); - t.equal(featuresUrl.searchParams.get('inSR'), '3857'); - t.equal(featuresUrl.searchParams.get('geometry'), '1,2,3,4'); - t.equal(featuresUrl.searchParams.get('geometryType'), 'esriGeometryEnvelope'); - t.equal(featuresUrl.searchParams.get('spatialRel'), 'esriSpatialRelIntersects'); - t.equal(featuresUrl.searchParams.get('f'), 'geojson'); - t.end(); + expect(featuresUrl.origin + featuresUrl.pathname).toBe(`${FEATURE_SERVER_URL}/query`); + expect(featuresUrl.searchParams.get('returnGeometry')).toBe('true'); + expect(featuresUrl.searchParams.get('where')).toBe('1=1'); + expect(featuresUrl.searchParams.get('outFields')).toBe('*'); + expect(featuresUrl.searchParams.get('outSR')).toBe('3857'); + expect(featuresUrl.searchParams.get('inSR')).toBe('3857'); + expect(featuresUrl.searchParams.get('geometry')).toBe('1,2,3,4'); + expect(featuresUrl.searchParams.get('geometryType')).toBe('esriGeometryEnvelope'); + expect(featuresUrl.searchParams.get('spatialRel')).toBe('esriSpatialRelIntersects'); + expect(featuresUrl.searchParams.get('f')).toBe('geojson'); }); -test('ArcGISVectorSource#getMetadata and getSchema', async t => { +test('ArcGISVectorSource#getMetadata and getSchema', async () => { const source = ArcGISFeatureServerSource.createDataSource(FEATURE_SERVER_URL, {}); source.fetch = async () => new Response( @@ -161,21 +156,23 @@ test('ArcGISVectorSource#getMetadata and getSchema', async t => { ); const metadata = await source.getMetadata({formatSpecificMetadata: true}); - t.equal(metadata.name, 'Roads'); - t.equal(metadata.abstract, 'Road centerlines'); - t.deepEqual(metadata.layers, [{name: 'Road centerlines'}]); - t.ok(metadata.formatSpecificMetadata, 'preserves format-specific metadata when requested'); + expect(metadata.name).toBe('Roads'); + expect(metadata.abstract).toBe('Road centerlines'); + expect(metadata.layers).toEqual([{name: 'Road centerlines'}]); + expect( + metadata.formatSpecificMetadata, + 'preserves format-specific metadata when requested' + ).toBeTruthy(); const schema = await source.getSchema(); - t.deepEqual(schema.fields, [ + expect(schema.fields).toEqual([ {name: 'OBJECTID', type: 'int32', nullable: false}, {name: 'NAME', type: 'utf8', nullable: true}, {name: 'LENGTH', type: 'float64', nullable: true} ]); - t.end(); }); -test('ArcGISVectorSource#getFeatures', async t => { +test('ArcGISVectorSource#getFeatures', async () => { const source = ArcGISFeatureServerSource.createDataSource(FEATURE_SERVER_URL, {}); const featureCollection = { type: 'FeatureCollection', @@ -198,9 +195,8 @@ test('ArcGISVectorSource#getFeatures', async t => { crs: '4326' }); - t.deepEqual(table, { + expect(table).toEqual({ shape: 'geojson-table', ...featureCollection }); - t.end(); }); diff --git a/modules/wms/test/csw/csw-capabilities-loader.spec.ts b/modules/wms/test/csw/csw-capabilities-loader.spec.ts index cedaf38172..6be29f7479 100644 --- a/modules/wms/test/csw/csw-capabilities-loader.spec.ts +++ b/modules/wms/test/csw/csw-capabilities-loader.spec.ts @@ -2,7 +2,7 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; // import {validateLoader} from 'test/common/conformance'; import {CSWCapabilitiesLoader} from '@loaders.gl/wms'; @@ -10,12 +10,12 @@ import {load} from '@loaders.gl/core'; const CSW_CAPABILITIES_URL = '@loaders.gl/wms/test/data/csw/get-capabilities.xml'; -test('CSWCapabilitiesLoader#forecasts.xml', async t => { +test('CSWCapabilitiesLoader#forecasts.xml', async () => { const capabilities = await load(CSW_CAPABILITIES_URL, CSWCapabilitiesLoader); // t.comment(JSON.stringify(capabilities)); - t.equal(typeof capabilities, 'object', 'parsed'); - // t.equal(capabilities.layer.layers[2]?.name, 'world_rivers', 'contents'); + expect(typeof capabilities, 'parsed').toBe('object'); + // expect(capabilities.layer.layers[2]?.name, 'contents').toBe('world_rivers'); - t.end(); + }); diff --git a/modules/wms/test/csw/csw-domain-loader.spec.ts b/modules/wms/test/csw/csw-domain-loader.spec.ts index 61bb2a1617..5cbf9abb4a 100644 --- a/modules/wms/test/csw/csw-domain-loader.spec.ts +++ b/modules/wms/test/csw/csw-domain-loader.spec.ts @@ -6,7 +6,7 @@ // under OpenLayers license (only used for test cases) // See README.md in `./data` directory for full license text copy. -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; // import {validateLoader} from 'test/common/conformance'; import {CSWDomainLoader} from '@loaders.gl/wms'; @@ -28,27 +28,27 @@ const CSW_RESPONSE_2_0_2 = '' + '' + ''; -test('CSWGetDomainLoader', async t => { +test('CSWGetDomainLoader', async () => { const domain = await parse(CSW_RESPONSE_2_0_2, CSWDomainLoader); // t.comment(JSON.stringify(domain)); const domainValues = domain.domainValues; // test getRecordsResponse object - t.ok(domainValues, 'object contains domainValues property'); + expect(domainValues, 'object contains domainValues property').toBeTruthy(); // test DomainValues - t.equal(domainValues.length, 1, 'object contains 1 object in domainValues'); + expect(domainValues.length, 'object contains 1 object in domainValues').toBe(1); const domainValue = domainValues[0]; - t.equal(domainValue.type, 'csw:Record', 'check value for attribute type'); - t.equal(domainValue.propertyName, 'type', 'check value for element propertyName'); - t.ok(domainValue.values, 'object contains values property'); + expect(domainValue.type, 'check value for attribute type').toBe('csw:Record'); + expect(domainValue.propertyName, 'check value for element propertyName').toBe('type'); + expect(domainValue.values, 'object contains values property').toBeTruthy(); // test ListOfValues - t.equal(domainValue.values.length, 2, 'object contains 2 objects ' + 'in values'); + expect(domainValue.values.length, 'object contains 2 objects ' + 'in values').toBe(2); const value = domainValue.values[0]; - t.ok(value, 'object contains value property'); - t.equal(value.my_attr, 'my_value', 'check value for attribute my_attr'); - t.equal(value.value, 'dataset', 'check value for element Value'); + expect(value, 'object contains value property').toBeTruthy(); + expect(value.my_attr, 'check value for attribute my_attr').toBe('my_value'); + expect(value.value, 'check value for element Value').toBe('dataset'); - t.end(); + }); diff --git a/modules/wms/test/csw/csw-records-loader.spec.ts b/modules/wms/test/csw/csw-records-loader.spec.ts index 968e0ee8fc..3a02ad6669 100644 --- a/modules/wms/test/csw/csw-records-loader.spec.ts +++ b/modules/wms/test/csw/csw-records-loader.spec.ts @@ -6,7 +6,7 @@ // under OpenLayers license (only used for test cases) // See README.md in `./data` directory for full license text copy. -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; // import {validateLoader} from 'test/common/conformance'; import {CSWRecordsLoader} from '@loaders.gl/wms'; @@ -54,7 +54,7 @@ const CSW_RESPONSE_2_0_2 = '' + '' + ''; -test('CSWGetRecordsLoader', async t => { +test('CSWGetRecordsLoader', async () => { const cswRecords = await parse(CSW_RESPONSE_2_0_2, CSWRecordsLoader); // t.comment(JSON.stringify(cswRecords)); @@ -63,36 +63,32 @@ test('CSWGetRecordsLoader', async t => { const records = cswRecords.records; // test getRecordsResponse object - t.ok(searchStatus, 'object contains SearchStatus property'); - t.ok(searchResults, 'object contains SearchResults property'); - t.ok(records, 'object contains records property'); + expect(searchStatus, 'object contains SearchStatus property').toBeTruthy(); + expect(searchResults, 'object contains SearchResults property').toBeTruthy(); + expect(records, 'object contains records property').toBeTruthy(); // test SearchResults attributes - t.equal( + expect( searchResults.numberOfRecordsMatched, - 10, 'check value for SearchResults.numberOfRecordsMatched' - ); - t.equal( + ).toBe(10); + expect( searchResults.numberOfRecordsReturned, - 2, 'check value for SearchResults.numberOfRecordsReturned' - ); - t.equal(searchResults.elementSet, 'brief', 'check value for SearchResults.elementSet'); - t.equal(searchResults.nextRecord, 3, 'check value for SearchResults.nextRecord'); + ).toBe(2); + expect(searchResults.elementSet, 'check value for SearchResults.elementSet').toBe('brief'); + expect(searchResults.nextRecord, 'check value for SearchResults.nextRecord').toBe(3); // test records - t.equal(records.length, 2, 'object contains 10 records'); + expect(records.length, 'object contains 10 records').toBe(2); const testRecord = records[0]; - // t.equal(testRecord.type, "BriefRecord", "check value for record.type"); - t.equal(testRecord.title, 'Sample title', 'check value for record.title'); + // expect(testRecord.type, "check value for record.type").toBe("BriefRecord"); + expect(testRecord.title, 'check value for record.title').toBe('Sample title'); // test bbox TODO - // t.equal(testRecord.boundingBoxes.length, 2, "object contains 2 BoundingBoxes"); + // expect(testRecord.boundingBoxes.length, "object contains 2 BoundingBoxes").toBe(2); // const bbox = testRecord.boundingBoxes[0]; - // t.ok(bbox, "object contains BoundingBox properties"); - // t.equal(bbox.crs, "::Lambert Azimuthal Projection", "check value for BoundingBox.crs"); - // t.equal(bbox.value, [156, -3, 37, 83], "check value for record.BoundingBox"); - - t.end(); + // expect(bbox, "object contains BoundingBox properties").toBeTruthy(); + // expect(bbox.crs, "check value for BoundingBox.crs").toBe("::Lambert Azimuthal Projection"); + // expect(bbox.value, -3, 37, 83], "check value for record.BoundingBox").toBe([156); }); diff --git a/modules/wms/test/gml/gml-loader.spec.ts b/modules/wms/test/gml/gml-loader.spec.ts index 6b1210fa28..8e09815f9a 100644 --- a/modules/wms/test/gml/gml-loader.spec.ts +++ b/modules/wms/test/gml/gml-loader.spec.ts @@ -6,7 +6,7 @@ // under OpenLayers license (only used for test cases) // See README.md in `./data` directory for full license text copy. -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {GML_V3_TESTS} from '@loaders.gl/wms/test/data/gml/v3/tests'; // import {validateLoader} from 'test/common/conformance'; @@ -46,22 +46,21 @@ const VALID_TEST = { 'v3/repeated-name.xml': true }; -test('GMLLoader#parse', async t => { +test('GMLLoader#parse', async () => { for (const [fileName, xmlText] of Object.entries(GML_V3_TESTS)) { if (VALID_TEST[fileName]) { const geojson = (await parse(xmlText, GMLLoader)) as GeoJSON; - t.equal(typeof geojson, 'object', `Parsed ${fileName}`); + expect(typeof geojson, `Parsed ${fileName}`).toBe('object'); // t.comment(JSON.stringify(geojson)); } } - t.end(); + }); /* function test_boundedBy(t) { - t.plan(5); var doc = readXML("v3/topp-states-wfs.xml"); var format = new OpenLayers.Format.GML.v3({ @@ -73,7 +72,7 @@ function test_boundedBy(t) { var features = format.read(doc.documentElement); var bounds = features[0].bounds; - t.ok(bounds instanceof OpenLayers.Bounds, "feature given a bounds"); + expect(bounds instanceof OpenLayers.Bounds, "feature given a bounds").toBeTruthy(); t.eq(bounds.left.toFixed(2), "-91.52", "bounds left correct"); t.eq(bounds.bottom.toFixed(2), "36.99", "bounds bottom correct"); t.eq(bounds.right.toFixed(2), "-87.51", "bounds right correct"); @@ -81,7 +80,6 @@ function test_boundedBy(t) { } function test_read(t) { - t.plan(8); var doc = readXML("v3/topp-states-wfs.xml"); var format = new OpenLayers.Format.GML.v3({ featureType: "states", @@ -105,7 +103,6 @@ function test_read(t) { } function test_emptyAttribute(t) { - t.plan(4); var str = '' + '' + @@ -139,7 +136,6 @@ function test_emptyAttribute(t) { function test_repeatedName(t) { // test that if an attribute name matches the featureType, all goes well - t.plan(2); var doc = readXML("v3/repeated-name.xml"); var format = new OpenLayers.Format.GML.v3({ featureType: "zoning", @@ -156,7 +152,6 @@ function test_repeatedName(t) { } function test_write(t) { - t.plan(1); var doc = readXML("v3/topp-states-gml.xml"); var format = new OpenLayers.Format.GML.v3({ featureType: "states", diff --git a/modules/wms/test/wfs/wfs-capabilities-loader.spec.ts b/modules/wms/test/wfs/wfs-capabilities-loader.spec.ts index 03c234a308..4c5f5a4f3f 100644 --- a/modules/wms/test/wfs/wfs-capabilities-loader.spec.ts +++ b/modules/wms/test/wfs/wfs-capabilities-loader.spec.ts @@ -6,7 +6,7 @@ // under OpenLayers license (only used for test cases) // See README.md in `./data` directory for full license text copy. -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; // import {validateLoader} from 'test/common/conformance'; // @ts-nocheck @@ -23,14 +23,14 @@ test('WFSCapabilitiesLoader#response.xml', async (t) => { WFSCapabilitiesLoader ); - t.equal(typeof capabilities, 'object', 'parsed'); + expect(typeof capabilities, 'parsed').toBe('object'); - t.end(); + }); // TODO - copied from WMTS -test.skip('WFSCapabilitiesLoader#response.xml#OWS', async (t) => { +test.skip('WFSCapabilitiesLoader#response.xml#OWS', async () => { const capabilities = await load( WFS_CAPABILITIES_RESPONSE_URL, WFSCapabilitiesLoader @@ -38,99 +38,81 @@ test.skip('WFSCapabilitiesLoader#response.xml#OWS', async (t) => { // ows:ServiceIdentification const serviceIdentification = capabilities.serviceIdentification; - t.equal( + expect( serviceIdentification.title, - 'Web Map Tile Service', 'ows:ServiceIdentification title is correct' - ); - t.equal( + ).toBe('Web Map Tile Service'); + expect( serviceIdentification.serviceTypeVersion, - '1.0.0', 'ows:ServiceIdentification serviceTypeVersion is correct' - ); - t.equal( + ).toBe('1.0.0'); + expect( serviceIdentification.serviceType, - 'OGC WFS', 'ows:ServiceIdentification serviceType is correct' - ); + ).toBe('OGC WFS'); // ows:ServiceProvider const serviceProvider = capabilities.serviceProvider; - t.equal(serviceProvider.providerName, 'MiraMon', 'ows:ServiceProvider providerName is correct'); - t.equal( + expect(serviceProvider.providerName, 'ows:ServiceProvider providerName is correct').toBe('MiraMon'); + expect( serviceProvider.providerSite, - 'http://www.creaf.uab.es/miramon', 'ows:ServiceProvider providerSite is correct' - ); - t.equal( + ).toBe('http://www.creaf.uab.es/miramon'); + expect( serviceProvider.serviceContact.individualName, - 'Joan Maso Pau', 'ows:ServiceProvider individualName is correct' - ); - t.equal( + ).toBe('Joan Maso Pau'); + expect( serviceProvider.serviceContact.positionName, - 'Senior Software Engineer', 'ows:ServiceProvider positionName is correct' - ); - t.equal( + ).toBe('Senior Software Engineer'); + expect( serviceProvider.serviceContact.contactInfo.address.administrativeArea, - 'Barcelona', 'ows:ServiceProvider address administrativeArea is correct' - ); - t.equal( + ).toBe('Barcelona'); + expect( serviceProvider.serviceContact.contactInfo.address.city, - 'Bellaterra', 'ows:ServiceProvider address city is correct' - ); - t.equal( + ).toBe('Bellaterra'); + expect( serviceProvider.serviceContact.contactInfo.address.country, - 'Spain', 'ows:ServiceProvider address country is correct' - ); - t.equal( + ).toBe('Spain'); + expect( serviceProvider.serviceContact.contactInfo.address.deliveryPoint, - 'Fac Ciencies UAB', 'ows:ServiceProvider address deliveryPoint is correct' - ); - t.equal( + ).toBe('Fac Ciencies UAB'); + expect( serviceProvider.serviceContact.contactInfo.address.electronicMailAddress, - 'joan.maso@uab.es', 'ows:ServiceProvider address electronicMailAddress is correct' - ); - t.equal( + ).toBe('joan.maso@uab.es'); + expect( serviceProvider.serviceContact.contactInfo.address.postalCode, - '08193', 'ows:ServiceProvider address postalCode is correct' - ); - t.equal( + ).toBe('08193'); + expect( serviceProvider.serviceContact.contactInfo.phone.voice, - '+34 93 581 1312', 'ows:ServiceProvider phone voice is correct' - ); + ).toBe('+34 93 581 1312'); // ows:OperationsMetadata const operationsMetadata = capabilities.operationsMetadata; - t.equal( + expect( operationsMetadata.GetCapabilities.dcp.http.get, - 'http://www.miramon.uab.es/cgi-bin/MiraMon5_0.cgi?', 'ows:OperationsMetadata GetCapabilities url is correct' - ); - t.equal( + ).toBe('http://www.miramon.uab.es/cgi-bin/MiraMon5_0.cgi?'); + expect( operationsMetadata.GetFeatureInfo.dcp.http.get, - 'http://www.miramon.uab.es/cgi-bin/MiraMon5_0.cgi?', 'ows:OperationsMetadata GetFeatureInfo url is correct' - ); - t.equal( + ).toBe('http://www.miramon.uab.es/cgi-bin/MiraMon5_0.cgi?'); + expect( operationsMetadata.GetTile.dcp.http.get, - 'http://www.miramon.uab.es/cgi-bin/MiraMon5_0.cgi?', 'ows:OperationsMetadata GetTile url is correct' - ); - - t.end(); + ).toBe('http://www.miramon.uab.es/cgi-bin/MiraMon5_0.cgi?'); }); // eslint-disable-next-line max-statements -test.skip('WFSCapabilitiesLoader#response.xml#layers', async (t) => { +test.skip('WFSCapabilitiesLoader#response.xml#layers', async () => { const capabilities = await load( WFS_CAPABILITIES_RESPONSE_URL, WFSCapabilitiesLoader @@ -139,37 +121,34 @@ test.skip('WFSCapabilitiesLoader#response.xml#layers', async (t) => { const contents = capabilities.contents; const numOfLayers = contents.layers.length; - t.equal(numOfLayers, 1, 'correct count of layers'); + expect(numOfLayers, 'correct count of layers').toBe(1); const layer = contents.layers[0]; - t.equal(layer.abstract, 'Coastline/shorelines (BA010)', 'layer abstract is correct'); - t.equal(layer.identifier, 'coastlines', 'layer identifier is correct'); - t.equal(layer.title, 'Coastlines', 'layer title is correct'); + expect(layer.abstract, 'layer abstract is correct').toBe('Coastline/shorelines (BA010)'); + expect(layer.identifier, 'layer identifier is correct').toBe('coastlines'); + expect(layer.title, 'layer title is correct').toBe('Coastlines'); const numOfFormats = layer.formats.length; - t.equal(numOfFormats, 2, 'correct count of formats'); - t.equal(layer.formats[0], 'image/png', 'format image/png is correct'); - t.equal(layer.formats[1], 'image/gif', 'format image/gif is correct'); + expect(numOfFormats, 'correct count of formats').toBe(2); + expect(layer.formats[0], 'format image/png is correct').toBe('image/png'); + expect(layer.formats[1], 'format image/gif is correct').toBe('image/gif'); const numOfStyles = layer.styles.length; - t.equal(numOfStyles, 2, 'correct count of styles'); - t.equal(layer.styles[0].identifier, 'DarkBlue', 'style 0 identifier is correct'); - t.equal(layer.styles[0].isDefault, 'true', 'style 0 isDefault is correct'); - t.equal(layer.styles[0].title, 'Dark Blue', 'style 0 title is correct'); - t.equal(layer.styles[1].identifier, 'thickAndRed', 'style 1 identifier is correct'); - t.ok(!layer.styles[1].isDefault, 'style 1 isDefault is correct'); - t.equal(layer.styles[1].title, 'Thick And Red', 'style 1 title is correct'); - // t.equal(layer.styles[1].abstract, "Specify this style if you want your maps to have thick red coastlines. ", "style 1 abstract is correct"); - - t.equal(layer.tileMatrixSetLinks.length, 1, 'correct count of tileMatrixSetLinks'); - t.equal(layer.tileMatrixSetLinks[0].tileMatrixSet, 'BigWorld', 'tileMatrixSet is correct'); + expect(numOfStyles, 'correct count of styles').toBe(2); + expect(layer.styles[0].identifier, 'style 0 identifier is correct').toBe('DarkBlue'); + expect(layer.styles[0].isDefault, 'style 0 isDefault is correct').toBe('true'); + expect(layer.styles[0].title, 'style 0 title is correct').toBe('Dark Blue'); + expect(layer.styles[1].identifier, 'style 1 identifier is correct').toBe('thickAndRed'); + expect(!layer.styles[1].isDefault, 'style 1 isDefault is correct').toBeTruthy(); + expect(layer.styles[1].title, 'style 1 title is correct').toBe('Thick And Red'); + // expect(layer.styles[1].abstract, "style 1 abstract is correct").toBe("Specify this style if you want your maps to have thick red coastlines. "); + + expect(layer.tileMatrixSetLinks.length, 'correct count of tileMatrixSetLinks').toBe(1); + expect(layer.tileMatrixSetLinks[0].tileMatrixSet, 'tileMatrixSet is correct').toBe('BigWorld'); const wgs84Bbox = layer.bounds; - t.equal(wgs84Bbox.left, -180.0, 'wgs84BoundingBox left is correct'); - t.equal(wgs84Bbox.right, 180.0, 'wgs84BoundingBox right is correct'); - t.equal(wgs84Bbox.bottom, -90.0, 'wgs84BoundingBox bottom is correct'); - t.equal(wgs84Bbox.top, 90.0, 'wgs84BoundingBox top is correct'); - - t.end(); + expect(wgs84Bbox.left, 'wgs84BoundingBox left is correct').toBe(-180.0); + expect(wgs84Bbox.right, 'wgs84BoundingBox right is correct').toBe(180.0); + expect(wgs84Bbox.bottom, 'wgs84BoundingBox bottom is correct').toBe(-90.0); + expect(wgs84Bbox.top, 'wgs84BoundingBox top is correct').toBe(90.0); }); - diff --git a/modules/wms/test/wms/wms-capabilities-loader.spec.ts b/modules/wms/test/wms/wms-capabilities-loader.spec.ts index 43d39aac6e..f9afe83258 100644 --- a/modules/wms/test/wms/wms-capabilities-loader.spec.ts +++ b/modules/wms/test/wms/wms-capabilities-loader.spec.ts @@ -2,7 +2,7 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; // import {validateLoader} from 'test/common/conformance'; import {WMSCapabilitiesLoader} from '@loaders.gl/wms'; @@ -16,66 +16,55 @@ const WMS_WWA_URL = '@loaders.gl/wms/test/data/wms/get-capabilities/wwa.xml'; const WMS_ADHOC_URL = '@loaders.gl/wms/test/data/wms/get-capabilities/?.xml'; -test('WMSCapabilitiesLoader#forecasts.xml', async t => { +test('WMSCapabilitiesLoader#forecasts.xml', async () => { const capabilities = await load(WMS_FORECASTS_URL, WMSCapabilitiesLoader); - t.equal(typeof capabilities, 'object', 'parsed'); - t.equal(capabilities.version, '1.1.1', 'version'); - t.equal(capabilities.layers[0].layers?.[2]?.name, 'world_rivers', 'contents'); - - t.end(); + expect(typeof capabilities, 'parsed').toBe('object'); + expect(capabilities.version, 'version').toBe('1.1.1'); + expect(capabilities.layers[0].layers?.[2]?.name, 'contents').toBe('world_rivers'); }); -test('WMSCapabilitiesLoader#obs.xml', async t => { +test('WMSCapabilitiesLoader#obs.xml', async () => { const capabilities = await load(WMS_OBS_URL, WMSCapabilitiesLoader); - t.equal(typeof capabilities, 'object', 'parsed'); - t.equal(capabilities.version, '1.1.1', 'version'); - t.equal(capabilities.layers[0].layers?.[2]?.name, 'world_rivers', 'contents'); - t.end(); + expect(typeof capabilities, 'parsed').toBe('object'); + expect(capabilities.version, 'version').toBe('1.1.1'); + expect(capabilities.layers[0].layers?.[2]?.name, 'contents').toBe('world_rivers'); }); -test('WMSCapabilitiesLoader#wwa.xml', async t => { +test('WMSCapabilitiesLoader#wwa.xml', async () => { const capabilities = await load(WMS_WWA_URL, WMSCapabilitiesLoader); - t.equal(typeof capabilities, 'object', 'parsed'); - t.equal(capabilities.version, '1.1.1', 'version'); - t.equal(capabilities.layers[0].layers?.[2]?.name, 'world_rivers', 'contents'); - - t.end(); + expect(typeof capabilities, 'parsed').toBe('object'); + expect(capabilities.version, 'version').toBe('1.1.1'); + expect(capabilities.layers[0].layers?.[2]?.name, 'contents').toBe('world_rivers'); }); -test('WMSCapabilitiesLoader#analyses.xml', async t => { +test('WMSCapabilitiesLoader#analyses.xml', async () => { const capabilities = await load(WMS_ANALYSES_URL, WMSCapabilitiesLoader); - t.equal(typeof capabilities, 'object', 'parsed'); - t.equal(capabilities.version, '1.1.1', 'version'); - t.equal(capabilities.layers[0].layers?.[2]?.name, 'world_countries_label', 'contents'); - - t.end(); + expect(typeof capabilities, 'parsed').toBe('object'); + expect(capabilities.version, 'version').toBe('1.1.1'); + expect(capabilities.layers[0].layers?.[2]?.name, 'contents').toBe('world_countries_label'); }); -test('WMSCapabilitiesLoader#dmsp.xml', async t => { +test('WMSCapabilitiesLoader#dmsp.xml', async () => { const capabilities = await load(WMS_DMSP_URL, WMSCapabilitiesLoader); - t.equal(typeof capabilities, 'object', 'parsed'); + expect(typeof capabilities, 'parsed').toBe('object'); - t.equal(capabilities.version, '1.3.0', 'version'); - t.equal(capabilities.layers[0].layers?.[2]?.name, 'eez', 'name'); - t.strictEqual(capabilities.layers[0].layers?.[2]?.opaque, false, 'opaque'); - t.strictEqual(capabilities.layers[0].layers?.[2]?.queryable, false, 'queryable'); - t.strictEqual(capabilities.layers[0].layers?.[2]?.cascaded, false, 'cascaded'); - - t.end(); + expect(capabilities.version, 'version').toBe('1.3.0'); + expect(capabilities.layers[0].layers?.[2]?.name, 'name').toBe('eez'); + expect(capabilities.layers[0].layers?.[2]?.opaque, 'opaque').toBe(false); + expect(capabilities.layers[0].layers?.[2]?.queryable, 'queryable').toBe(false); + expect(capabilities.layers[0].layers?.[2]?.cascaded, 'cascaded').toBe(false); }); // For adhoc testing (non-committed XML files or direct from server) -test.skip('WMSCapabilitiesLoader#ad-hoc-test', async t => { +test.skip('WMSCapabilitiesLoader#ad-hoc-test', async () => { const capabilities = await load(WMS_ADHOC_URL, WMSCapabilitiesLoader); - t.equal(typeof capabilities, 'object', 'parsed'); - t.equal(capabilities.version, '1.1.1', 'version'); - t.equal(capabilities.layers[0].layers?.[2]?.name, 'eez', 'contents'); - - t.end(); + expect(typeof capabilities, 'parsed').toBe('object'); + expect(capabilities.version, 'version').toBe('1.1.1'); + expect(capabilities.layers[0].layers?.[2]?.name, 'contents').toBe('eez'); }); diff --git a/modules/wms/test/wms/wms-error-loader.spec.ts b/modules/wms/test/wms/wms-error-loader.spec.ts index fff552d819..a496073011 100644 --- a/modules/wms/test/wms/wms-error-loader.spec.ts +++ b/modules/wms/test/wms/wms-error-loader.spec.ts @@ -2,7 +2,7 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; // import {validateLoader} from 'test/common/conformance'; import {WMSErrorLoader} from '@loaders.gl/wms'; @@ -37,7 +37,7 @@ http://schemas.opengis.net/wms/1.3.0/exceptions_1_3_0.xsd"> test('WMSErrorLoader#test cases', async (t) => { for (const tc of ERROR_TEST_CASES) { const error = (await parse(tc.xml, WMSErrorLoader, {wms: {minimalErrors: true}})); - t.equal(error, tc.parsed, `Error message: "${error}"`); + expect(error, `Error message: "${error}"`).toBe(tc.parsed); } - t.end(); + }); diff --git a/modules/wms/test/wms/wms-feature-info-loader.spec.ts b/modules/wms/test/wms/wms-feature-info-loader.spec.ts index e69c842148..e127b52c01 100644 --- a/modules/wms/test/wms/wms-feature-info-loader.spec.ts +++ b/modules/wms/test/wms/wms-feature-info-loader.spec.ts @@ -6,17 +6,17 @@ // under OpenLayers license (only used for test cases) // See README.md in `./data` directory for full license text copy. -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {_WMSFeatureInfoLoader as WMSFeatureInfoLoader} from '@loaders.gl/wms'; import {parse} from '@loaders.gl/core'; -test('WMSFeatureInfoLoader#read_FeatureInfoResponse', async (t) => { +test('WMSFeatureInfoLoader#read_FeatureInfoResponse', async () => { // read empty response let text = '' + '' + ''; let featureInfo = await parse(text, WMSFeatureInfoLoader); - t.equal(featureInfo.features.length, 0, 'Parsing empty FeatureInfoResponse response successful'); + expect(featureInfo.features.length, 'Parsing empty FeatureInfoResponse response successful').toBe(0); // read 1 feature text = @@ -26,13 +26,12 @@ test('WMSFeatureInfoLoader#read_FeatureInfoResponse', async (t) => { ''; featureInfo = await parse(text, WMSFeatureInfoLoader); - t.equal(featureInfo.features.length, 1, 'Parsed 1 feature in total'); + expect(featureInfo.features.length, 'Parsed 1 feature in total').toBe(1); - t.equal( + expect( featureInfo.features[0].attributes.OBJECTID, - '1188', 'Attribute OBJECTID contains the right value' - ); + ).toBe('1188'); // read multiple features text = @@ -45,18 +44,15 @@ test('WMSFeatureInfoLoader#read_FeatureInfoResponse', async (t) => { featureInfo = await parse(text, WMSFeatureInfoLoader); - t.equal(featureInfo.features.length, 3, 'Parsed 3 features in total'); + expect(featureInfo.features.length, 'Parsed 3 features in total').toBe(3); - t.equal( + expect( featureInfo.features[1].attributes.STATE_NAME, - 'Wyoming', 'Attribute STATE_NAME contains the right value' - ); - - t.end(); + ).toBe('Wyoming'); }); -test.skip('WMSFeatureInfoLoader#msGMLOutput', async (t) => { +test.skip('WMSFeatureInfoLoader#msGMLOutput', async () => { // function test_read_msGMLOutput(t) { // read empty response let text = @@ -68,7 +64,7 @@ test.skip('WMSFeatureInfoLoader#msGMLOutput', async (t) => { ''; let featureInfo = await parse(text, WMSFeatureInfoLoader); - t.equal(featureInfo.features.length, 0, 'Parsing empty msGMLOutput response succesfull'); + expect(featureInfo.features.length, 'Parsing empty msGMLOutput response succesfull').toBe(0); // read 1 feature from 1 layer text = @@ -98,22 +94,21 @@ test.skip('WMSFeatureInfoLoader#msGMLOutput', async (t) => { featureInfo = await parse(text, WMSFeatureInfoLoader); - t.equal(featureInfo.features.length, 1, 'Parsed 1 feature in total'); + expect(featureInfo.features.length, 'Parsed 1 feature in total').toBe(1); - t.equal( + expect( featureInfo.features[0].attributes.OBJECTID, - '109', 'Attribute OBJECTID contains the right value' - ); + ).toBe('109'); - t.equal(featureInfo.features[0].type, 'AAA64', 'Parsed the layer name correctly'); + expect(featureInfo.features[0].type, 'Parsed the layer name correctly').toBe('AAA64'); const bounds = featureInfo.features[0].bounds; - t.ok(Array.isArray(bounds), 'feature given a bounds'); - t.equal(bounds.left.toFixed(3), '107397.266', 'Bounds left parsed correctly'); - t.equal(bounds.right.toFixed(3), '116568.188', 'Bounds right parsed correctly'); - t.equal(bounds.bottom.toFixed(3), '460681.063', 'Bounds bottom parsed correctly'); - t.equal(bounds.top.toFixed(3), '480609.250', 'Bounds top parsed correctly'); + expect(Array.isArray(bounds), 'feature given a bounds').toBeTruthy(); + expect(bounds.left.toFixed(3), 'Bounds left parsed correctly').toBe('107397.266'); + expect(bounds.right.toFixed(3), 'Bounds right parsed correctly').toBe('116568.188'); + expect(bounds.bottom.toFixed(3), 'Bounds bottom parsed correctly').toBe('460681.063'); + expect(bounds.top.toFixed(3), 'Bounds top parsed correctly').toBe('480609.250'); // read 2 features from 2 layers text = @@ -167,13 +162,12 @@ test.skip('WMSFeatureInfoLoader#msGMLOutput', async (t) => { featureInfo = await parse(text, WMSFeatureInfoLoader); - t.equal(featureInfo.features.length, 2, 'Parsed 2 features in total'); + expect(featureInfo.features.length, 'Parsed 2 features in total').toBe(2); - t.equal( + expect( featureInfo.features[0].type === featureInfo.features[1].type, - false, 'The layer name differs for the two features' - ); + ).toBe(false); text = '' + @@ -210,10 +204,9 @@ test.skip('WMSFeatureInfoLoader#msGMLOutput', async (t) => { // 'Parsed geometry is of type multi line string' // ); - t.end(); }); -test.skip('WMSFeatureInfoLoader#Ionic/GeoServer', async (t) => { +test.skip('WMSFeatureInfoLoader#Ionic/GeoServer', async () => { // function test_read_GMLFeatureInfoResponse(t) { // read Ionic response, see if parser falls back to GML format // url used: @@ -254,17 +247,15 @@ test.skip('WMSFeatureInfoLoader#Ionic/GeoServer', async (t) => { let featureInfo = await parse(text, WMSFeatureInfoLoader); - t.equal( + expect( featureInfo.features.length, - 1, 'Parsing GML GetFeatureInfo response from Ionic succesfull' - ); + ).toBe(1); - t.equal( + expect( featureInfo.features[0].attributes.TILE_NAME, - '126', 'Attribute TILE_NAME contains the right value' - ); + ).toBe('126'); // read Geoserver response // taken from: @@ -275,13 +266,10 @@ test.skip('WMSFeatureInfoLoader#Ionic/GeoServer', async (t) => { featureInfo = await parse(text, WMSFeatureInfoLoader); - t.equal( + expect( featureInfo.features.length, - 1, 'Parsing GML GetFeatureInfo response from Geoserver succesfull' - ); - - t.equal(featureInfo.features[0].attributes.cat, '3', 'Attribute cat contains the right value'); + ).toBe(1); - t.end(); + expect(featureInfo.features[0].attributes.cat, 'Attribute cat contains the right value').toBe('3'); }); diff --git a/modules/wms/test/wms/wms-layer-description-loader.spec.ts b/modules/wms/test/wms/wms-layer-description-loader.spec.ts index be511d9ccf..1b19e94414 100644 --- a/modules/wms/test/wms/wms-layer-description-loader.spec.ts +++ b/modules/wms/test/wms/wms-layer-description-loader.spec.ts @@ -6,14 +6,14 @@ // under OpenLayers license (only used for test cases) // See README.md in `./data` directory for full license text copy. -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import { _WMSLayerDescriptionLoader as WMSLayerDescriptionLoader // _WMSLayerDescription as WMSLayerDescription } from '@loaders.gl/wms'; import {parse} from '@loaders.gl/core'; -test.skip('WMSLayerDescriptionLoader#read_WMSDescribeLayer', async t => { +test.skip('WMSLayerDescriptionLoader#read_WMSDescribeLayer', async () => { const text = '' + ' ' + @@ -22,12 +22,12 @@ test.skip('WMSLayerDescriptionLoader#read_WMSDescribeLayer', async t => { ''; const description = await parse(text, WMSLayerDescriptionLoader); - t.ok(description); + expect(description).toBeTruthy(); // const res = description.layers; - // t.equal(res.length, 1, 'Only one LayerDescription in data, so only one parsed'); - // t.equal(res[0].owsType, 'WFS', 'Properly parses owsType as WFS'); + // expect(res.length, 'Only one LayerDescription in data, so only one parsed').toBe(1); + // expect(res[0].owsType, 'Properly parses owsType as WFS').toBe('WFS'); // t.equal( // res[0].owsURL, @@ -35,9 +35,9 @@ test.skip('WMSLayerDescriptionLoader#read_WMSDescribeLayer', async t => { // 'Properly parses owsURL' // ); - // t.equal(res[0].typeName, 'topp:states', 'Properly parses typeName'); + // expect(res[0].typeName, 'Properly parses typeName').toBe('topp:states'); - // t.equal(res[0].layerName, 'topp:states', 'Properly parses name'); + // expect(res[0].layerName, 'Properly parses name').toBe('topp:states'); - t.end(); + }); diff --git a/modules/wms/test/wms/wms-source.spec.ts b/modules/wms/test/wms/wms-source.spec.ts index e1bf514179..aacfff68ba 100644 --- a/modules/wms/test/wms/wms-source.spec.ts +++ b/modules/wms/test/wms/wms-source.spec.ts @@ -2,7 +2,7 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; import {withFetchMock, mockResults, requestInits} from '../test-utils/fetch-spy'; import {WMSSource} from '@loaders.gl/wms'; @@ -10,19 +10,17 @@ import {WMSSource} from '@loaders.gl/wms'; const WMS_SERVICE_URL = 'https:/mock-wms-service'; const WMS_VERSION = '1.3.0'; -test('WMSSource#constructor', async t => { +test('WMSSource#constructor', async () => { const wmsImageSource = WMSSource.createDataSource(WMS_SERVICE_URL, {}); const getCapabilitiesUrl = wmsImageSource.getCapabilitiesURL(); - t.equal( + expect( getCapabilitiesUrl, - `https:/mock-wms-service?SERVICE=WMS&VERSION=${WMS_VERSION}&REQUEST=GetCapabilities`, 'getCapabilitiesURL' - ); - t.end(); + ).toBe(`https:/mock-wms-service?SERVICE=WMS&VERSION=${WMS_VERSION}&REQUEST=GetCapabilities`); }); -test('WMSSource#getMapURL', async t => { +test('WMSSource#getMapURL', async () => { let wmsImageSource = WMSSource.createDataSource(WMS_SERVICE_URL, {}); let getMapUrl = wmsImageSource.getMapURL({ width: 800, @@ -31,10 +29,11 @@ test('WMSSource#getMapURL', async t => { layers: ['oms'], crs: 'EPSG:3857' }); - t.equal( + expect( getMapUrl, - `https:/mock-wms-service?SERVICE=WMS&VERSION=${WMS_VERSION}&REQUEST=GetMap&FORMAT=image/png&LAYERS=oms&STYLES=&CRS=EPSG:3857&WIDTH=800&HEIGHT=600&BBOX=30,70,35,75`, 'getMapURL layers in params' + ).toBe( + `https:/mock-wms-service?SERVICE=WMS&VERSION=${WMS_VERSION}&REQUEST=GetMap&FORMAT=image/png&LAYERS=oms&STYLES=&CRS=EPSG:3857&WIDTH=800&HEIGHT=600&BBOX=30,70,35,75` ); wmsImageSource = WMSSource.createDataSource(WMS_SERVICE_URL, { @@ -45,45 +44,39 @@ test('WMSSource#getMapURL', async t => { height: 600, bbox: [30, 70, 35, 75] }); - t.equal( + expect( getMapUrl, - `https:/mock-wms-service?SERVICE=WMS&VERSION=${WMS_VERSION}&REQUEST=GetMap&FORMAT=image/png&LAYERS=oms&STYLES=&CRS=EPSG:3857&WIDTH=800&HEIGHT=600&BBOX=30,70,35,75`, 'getMapURL layers in constructor' + ).toBe( + `https:/mock-wms-service?SERVICE=WMS&VERSION=${WMS_VERSION}&REQUEST=GetMap&FORMAT=image/png&LAYERS=oms&STYLES=&CRS=EPSG:3857&WIDTH=800&HEIGHT=600&BBOX=30,70,35,75` ); - t.end(); }); -test('WMSSource#getFeatureInfoURL', async t => { +test('WMSSource#getFeatureInfoURL', async () => { // const wmsImageSource = WMSSource.createDataSource({url: WMS_SERVICE_URL}); // const getFeatureInfoUrl = wmsImageSource.getFeatureInfoURL({x: 400, y: 300}); - // t.equal(getFeatureInfoUrl, 'https:/mock-wms-service?REQUEST=GetFeatureInfo', 'getFeatureInfoURL'); - t.end(); + // expect(getFeatureInfoUrl, 'getFeatureInfoURL').toBe('https:/mock-wms-service?REQUEST=GetFeatureInfo'); }); -test('WMSSource#describeLayerURL', async t => { +test('WMSSource#describeLayerURL', async () => { const wmsImageSource = WMSSource.createDataSource(WMS_SERVICE_URL, {url: WMS_SERVICE_URL}); const describeLayerUrl = wmsImageSource.describeLayerURL({}); - t.equal( + expect( describeLayerUrl, - `https:/mock-wms-service?SERVICE=WMS&VERSION=${WMS_VERSION}&REQUEST=DescribeLayer`, 'describeLayerURL' - ); - t.end(); + ).toBe(`https:/mock-wms-service?SERVICE=WMS&VERSION=${WMS_VERSION}&REQUEST=DescribeLayer`); }); -test('WMSSource#getLegendGraphicURL', async t => { +test('WMSSource#getLegendGraphicURL', async () => { const wmsImageSource = WMSSource.createDataSource(WMS_SERVICE_URL, {url: WMS_SERVICE_URL}); const getLegendGraphicUrl = wmsImageSource.getLegendGraphicURL({}); - t.equal( + expect( getLegendGraphicUrl, - `https:/mock-wms-service?SERVICE=WMS&VERSION=${WMS_VERSION}&REQUEST=GetLegendGraphic`, 'getLegendGraphicURL' - ); - - t.end(); + ).toBe(`https:/mock-wms-service?SERVICE=WMS&VERSION=${WMS_VERSION}&REQUEST=GetLegendGraphic`); }); -test('WMSSource#WMS versions', async t => { +test('WMSSource#WMS versions', async () => { const wms111Service = WMSSource.createDataSource(WMS_SERVICE_URL, { wmsParameters: {version: '1.1.1', layers: ['oms']} }); @@ -92,10 +85,11 @@ test('WMSSource#WMS versions', async t => { height: 600, bbox: [30, 70, 35, 75] }); - t.equal( + expect( getMapUrl, - 'https:/mock-wms-service?SERVICE=WMS&VERSION=1.1.1&REQUEST=GetMap&FORMAT=image/png&LAYERS=oms&STYLES=&SRS=EPSG:4326&WIDTH=800&HEIGHT=600&BBOX=30,70,35,75', 'getMapURL replaces CRS with SRS in WMS 1.1.1' + ).toBe( + 'https:/mock-wms-service?SERVICE=WMS&VERSION=1.1.1&REQUEST=GetMap&FORMAT=image/png&LAYERS=oms&STYLES=&SRS=EPSG:4326&WIDTH=800&HEIGHT=600&BBOX=30,70,35,75' ); const wms130Service = WMSSource.createDataSource(WMS_SERVICE_URL, { wms: { @@ -108,16 +102,16 @@ test('WMSSource#WMS versions', async t => { height: 600, bbox: [30, 70, 35, 75] }); - t.equal( + expect( getMapUrl, - 'https:/mock-wms-service?SERVICE=WMS&VERSION=1.3.0&REQUEST=GetMap&FORMAT=image/png&LAYERS=oms&STYLES=&CRS=CRS:84&WIDTH=800&HEIGHT=600&BBOX=30,70,35,75', 'getMapURL replaces ESPG:4326 with CRS:84 in WMS 1.3.0' + ).toBe( + 'https:/mock-wms-service?SERVICE=WMS&VERSION=1.3.0&REQUEST=GetMap&FORMAT=image/png&LAYERS=oms&STYLES=&CRS=CRS:84&WIDTH=800&HEIGHT=600&BBOX=30,70,35,75' ); - t.end(); }); // TODO - move to image-source.spec.ts -test('WMSSource#fetch override', async t => { +test('WMSSource#fetch override', async () => { const loadOptions = {fetch: {headers: {Authorization: 'Bearer abc'}}}; const wmsImageSource = WMSSource.createDataSource(WMS_SERVICE_URL, { core: { @@ -150,16 +144,15 @@ test('WMSSource#fetch override', async t => { // eslint-disable-next-line camelcase query_layers: ['oms'] }); - t.deepEqual( - requestInits[generatedUrl]?.headers, - {Authorization: 'Bearer abc'}, + const headers = new Headers(requestInits[generatedUrl]?.headers); + expect( + headers.get('Authorization'), 'authorization header provided in constructor passed to fetch' - ); - t.end(); + ).toBe('Bearer abc'); }); }); -test('WMSSource#getImage', async t => { +test('WMSSource#getImage', async () => { const wmsImageSource = WMSSource.createDataSource(WMS_SERVICE_URL, {url: WMS_SERVICE_URL}); let getMapParameters; @@ -178,15 +171,13 @@ test('WMSSource#getImage', async t => { layers: ['oms'] }); - t.deepEqual( + expect( getMapParameters, - { - width: 800, - height: 600, - bbox: [30, 70, 35, 75], - layers: ['oms'] - }, 'boundingBox transformed to bbox' - ); - t.end(); + ).toEqual({ + width: 800, + height: 600, + bbox: [30, 70, 35, 75], + layers: ['oms'] + }); }); diff --git a/modules/wms/test/wmts/wmts-capabilities-loader.spec.ts b/modules/wms/test/wmts/wmts-capabilities-loader.spec.ts index 65fd857ad5..e522b6838d 100644 --- a/modules/wms/test/wmts/wmts-capabilities-loader.spec.ts +++ b/modules/wms/test/wmts/wmts-capabilities-loader.spec.ts @@ -9,7 +9,7 @@ // See README.md in `./data` directory for full license text copy. /* -import test from 'tape-promise/tape'; +import {expect, test} from 'vitest'; // import {validateLoader} from 'test/common/conformance'; import {_WMTSCapabilitiesLoader as WMTSCapabilitiesLoader, _WMTSCapabilities as WMTSCapabilities} from '@loaders.gl/wms'; @@ -24,9 +24,9 @@ test('WMTSCapabilitiesLoader#response.xml', async (t) => { WMTSCapabilitiesLoader )) as WMTSCapabilities; - t.equal(typeof capabilities, 'object', 'parsed'); + expect(typeof capabilities, 'parsed').toBe('object'); - t.end(); + }); test.skip('WMTSCapabilitiesLoader#response.xml#OWS', async (t) => { @@ -55,7 +55,7 @@ test.skip('WMTSCapabilitiesLoader#response.xml#OWS', async (t) => { // ows:ServiceProvider const serviceProvider = capabilities.serviceProvider; - t.equal(serviceProvider.providerName, 'MiraMon', 'ows:ServiceProvider providerName is correct'); + expect(serviceProvider.providerName, 'ows:ServiceProvider providerName is correct').toBe('MiraMon'); t.equal( serviceProvider.providerSite.href, 'http://www.creaf.uab.es/miramon', @@ -125,7 +125,7 @@ test.skip('WMTSCapabilitiesLoader#response.xml#OWS', async (t) => { // 'ows:OperationsMetadata GetTile url is correct' // ); - t.end(); + }); // eslint-disable-next-line max-statements @@ -142,38 +142,38 @@ test.skip('WMTSCapabilitiesLoader#response.xml#layers', async (t) => { const contents = capabilities.contents; const numOfLayers = contents.layers.length; - t.equal(numOfLayers, 1, 'correct count of layers'); + expect(numOfLayers, 'correct count of layers').toBe(1); const layer = contents.layers[0]; - t.equal(layer.abstract, 'Coastline/shorelines (BA010)', 'layer abstract is correct'); - t.equal(layer.identifier, 'coastlines', 'layer identifier is correct'); - t.equal(layer.title, 'Coastlines', 'layer title is correct'); + expect(layer.abstract, 'layer abstract is correct').toBe('Coastline/shorelines (BA010)'); + expect(layer.identifier, 'layer identifier is correct').toBe('coastlines'); + expect(layer.title, 'layer title is correct').toBe('Coastlines'); const numOfFormats = layer.formats.length; - t.equal(numOfFormats, 2, 'correct count of formats'); - t.equal(layer.formats[0], 'image/png', 'format image/png is correct'); - t.equal(layer.formats[1], 'image/gif', 'format image/gif is correct'); + expect(numOfFormats, 'correct count of formats').toBe(2); + expect(layer.formats[0], 'format image/png is correct').toBe('image/png'); + expect(layer.formats[1], 'format image/gif is correct').toBe('image/gif'); const numOfStyles = layer.styles.length; - t.equal(numOfStyles, 2, 'correct count of styles'); - t.equal(layer.styles[0].identifier, 'DarkBlue', 'style 0 identifier is correct'); - t.equal(layer.styles[0].isDefault, 'true', 'style 0 isDefault is correct'); - t.equal(layer.styles[0].title, 'Dark Blue', 'style 0 title is correct'); - t.equal(layer.styles[1].identifier, 'thickAndRed', 'style 1 identifier is correct'); - t.ok(!layer.styles[1].isDefault, 'style 1 isDefault is correct'); - t.equal(layer.styles[1].title, 'Thick And Red', 'style 1 title is correct'); - // t.equal(layer.styles[1].abstract, "Specify this style if you want your maps to have thick red coastlines. ", "style 1 abstract is correct"); - - t.equal(layer.tileMatrixSetLinks.length, 1, 'correct count of tileMatrixSetLinks'); - t.equal(layer.tileMatrixSetLinks[0].tileMatrixSet, 'BigWorld', 'tileMatrixSet is correct'); + expect(numOfStyles, 'correct count of styles').toBe(2); + expect(layer.styles[0].identifier, 'style 0 identifier is correct').toBe('DarkBlue'); + expect(layer.styles[0].isDefault, 'style 0 isDefault is correct').toBe('true'); + expect(layer.styles[0].title, 'style 0 title is correct').toBe('Dark Blue'); + expect(layer.styles[1].identifier, 'style 1 identifier is correct').toBe('thickAndRed'); + expect(!layer.styles[1].isDefault, 'style 1 isDefault is correct').toBeTruthy(); + expect(layer.styles[1].title, 'style 1 title is correct').toBe('Thick And Red'); + // expect(layer.styles[1].abstract, "style 1 abstract is correct").toBe("Specify this style if you want your maps to have thick red coastlines. "); + + expect(layer.tileMatrixSetLinks.length, 'correct count of tileMatrixSetLinks').toBe(1); + expect(layer.tileMatrixSetLinks[0].tileMatrixSet, 'tileMatrixSet is correct').toBe('BigWorld'); const wgs84Bbox = layer.bounds; - t.equal(wgs84Bbox.left, -180.0, 'wgs84BoundingBox left is correct'); - t.equal(wgs84Bbox.right, 180.0, 'wgs84BoundingBox right is correct'); - t.equal(wgs84Bbox.bottom, -90.0, 'wgs84BoundingBox bottom is correct'); - t.equal(wgs84Bbox.top, 90.0, 'wgs84BoundingBox top is correct'); + expect(wgs84Bbox.left, 'wgs84BoundingBox left is correct').toBe(-180.0); + expect(wgs84Bbox.right, 'wgs84BoundingBox right is correct').toBe(180.0); + expect(wgs84Bbox.bottom, 'wgs84BoundingBox bottom is correct').toBe(-90.0); + expect(wgs84Bbox.top, 'wgs84BoundingBox top is correct').toBe(90.0); - t.end(); + }); // eslint-disable-next-line max-statements @@ -184,44 +184,44 @@ test.skip('WMTSCapabilitiesLoader#response.xml#test_tileMatrixSets', async (t) = )) as WMTSCapabilities; const tileMatrixSets = capabilities.contents.tileMatrixSets; - t.ok(tileMatrixSets.BigWorld, 'tileMatrixSets \'BigWorld\' found'); + expect(tileMatrixSets.BigWorld, 'tileMatrixSets \'BigWorld\' found').toBeTruthy(); const bigWorld = tileMatrixSets.BigWorld; - t.equal(bigWorld.identifier, 'BigWorld', 'tileMatrixSets identifier is correct'); - t.equal(bigWorld.matrixIds.length, 2, 'tileMatrix count is correct'); - t.equal(bigWorld.matrixIds[0].identifier, '1e6', 'tileMatrix 0 identifier is correct'); - t.equal(bigWorld.matrixIds[0].matrixHeight, 50000, 'tileMatrix 0 matrixHeight is correct'); - t.equal(bigWorld.matrixIds[0].matrixWidth, 60000, 'tileMatrix 0 matrixWidth is correct'); + expect(bigWorld.identifier, 'tileMatrixSets identifier is correct').toBe('BigWorld'); + expect(bigWorld.matrixIds.length, 'tileMatrix count is correct').toBe(2); + expect(bigWorld.matrixIds[0].identifier, 'tileMatrix 0 identifier is correct').toBe('1e6'); + expect(bigWorld.matrixIds[0].matrixHeight, 'tileMatrix 0 matrixHeight is correct').toBe(50000); + expect(bigWorld.matrixIds[0].matrixWidth, 'tileMatrix 0 matrixWidth is correct').toBe(60000); t.equal( bigWorld.matrixIds[0].scaleDenominator, 1000000, 'tileMatrix 0 scaleDenominator is correct' ); - t.equal(bigWorld.matrixIds[0].tileWidth, 256, 'tileMatrix 0 tileWidth is correct'); - t.equal(bigWorld.matrixIds[0].tileHeight, 256, 'tileMatrix 0 tileHeight is correct'); + expect(bigWorld.matrixIds[0].tileWidth, 'tileMatrix 0 tileWidth is correct').toBe(256); + expect(bigWorld.matrixIds[0].tileHeight, 'tileMatrix 0 tileHeight is correct').toBe(256); t.equal( bigWorld.matrixIds[0].topLeftCorner.lon, -180, 'tileMatrix 0 topLeftCorner.lon is correct' ); - t.equal(bigWorld.matrixIds[0].topLeftCorner.lat, 84, 'tileMatrix 0 topLeftCorner.lat is correct'); + expect(bigWorld.matrixIds[0].topLeftCorner.lat, 'tileMatrix 0 topLeftCorner.lat is correct').toBe(84); - t.equal(bigWorld.matrixIds[1].identifier, '2.5e6', 'tileMatrix 1 identifier is correct'); - t.equal(bigWorld.matrixIds[1].matrixHeight, 7000, 'tileMatrix 1 matrixHeight is correct'); - t.equal(bigWorld.matrixIds[1].matrixWidth, 9000, 'tileMatrix 1 matrixWidth is correct'); + expect(bigWorld.matrixIds[1].identifier, 'tileMatrix 1 identifier is correct').toBe('2.5e6'); + expect(bigWorld.matrixIds[1].matrixHeight, 'tileMatrix 1 matrixHeight is correct').toBe(7000); + expect(bigWorld.matrixIds[1].matrixWidth, 'tileMatrix 1 matrixWidth is correct').toBe(9000); t.equal( bigWorld.matrixIds[1].scaleDenominator, 2500000, 'tileMatrix 1 scaleDenominator is correct' ); - t.equal(bigWorld.matrixIds[1].tileWidth, 256, 'tileMatrix 1 tileWidth is correct'); - t.equal(bigWorld.matrixIds[1].tileHeight, 256, 'tileMatrix 1 tileHeight is correct'); + expect(bigWorld.matrixIds[1].tileWidth, 'tileMatrix 1 tileWidth is correct').toBe(256); + expect(bigWorld.matrixIds[1].tileHeight, 'tileMatrix 1 tileHeight is correct').toBe(256); t.equal( bigWorld.matrixIds[1].topLeftCorner.lon, -180, 'tileMatrix 1 topLeftCorner.lon is correct' ); - t.equal(bigWorld.matrixIds[1].topLeftCorner.lat, 84, 'tileMatrix 1 topLeftCorner.lat is correct'); + expect(bigWorld.matrixIds[1].topLeftCorner.lat, 'tileMatrix 1 topLeftCorner.lat is correct').toBe(84); - t.end(); + }); */ diff --git a/test/common/conformance.ts b/test/common/conformance.ts index 0c86e7da5b..6a2f2d4974 100644 --- a/test/common/conformance.ts +++ b/test/common/conformance.ts @@ -2,27 +2,65 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -export function validateLoader(t, loader, name = '') { - t.ok(typeof loader.id === 'string', `Loader ${name} loader.id is not defined`); - t.ok(loader, `Loader ${name} defined`); - t.equal(typeof loader.name, 'string', `Loader ${name} has a name`); - t.ok(Array.isArray(loader.extensions), `Loader ${name} has an extensions array`); - t.ok(Array.isArray(loader.mimeTypes), `Loader ${name} has a mimeTypes array`); +import {expect} from 'vitest'; + +function getAssertions(assertions) { + if (assertions) { + return assertions; + } + + return { + ok(value, message) { + expect(value, message).toBeTruthy(); + }, + notOk(value, message) { + expect(value, message).toBeFalsy(); + }, + equal(actual, expected, message) { + expect(actual, message).toBe(expected); + }, + equals(actual, expected, message) { + expect(actual, message).toBe(expected); + } + }; +} + +export function validateLoader(assertionsOrLoader, loaderOrName, name = '') { + const loader = + loaderOrName && typeof loaderOrName === 'object' ? loaderOrName : assertionsOrLoader; + const resolvedName = typeof loaderOrName === 'string' ? loaderOrName : name; + const assertions = getAssertions(loader === assertionsOrLoader ? null : assertionsOrLoader); + + assertions.ok(typeof loader.id === 'string', `Loader ${resolvedName} loader.id is not defined`); + assertions.ok(loader, `Loader ${resolvedName} defined`); + assertions.equal(typeof loader.name, 'string', `Loader ${resolvedName} has a name`); + assertions.ok( + Array.isArray(loader.extensions), + `Loader ${resolvedName} has an extensions array` + ); + assertions.ok( + Array.isArray(loader.mimeTypes), + `Loader ${resolvedName} has a mimeTypes array` + ); const options = loader.options || {}; - t.ok(!('workerUrl' in options), 'workerUrl is not defined on loader.options'); - if (name.includes('Worker')) { - t.ok('worker' in loader, `Loader ${name} loader.worker is not defined`); + assertions.ok(!('workerUrl' in options), 'workerUrl is not defined on loader.options'); + if (resolvedName.includes('Worker')) { + assertions.ok('worker' in loader, `Loader ${resolvedName} loader.worker is not defined`); } // const loaderOptions = options[loader.id] || {}; if (!loader.parse) { - // t.ok(loaderOptions.workerUrl, 'options..workerUrl'); + // assertions.ok(loaderOptions.workerUrl, 'options..workerUrl'); } else { - t.equal(typeof loader.parse, 'function', `Loader ${name} has 'parse' function`); + assertions.equal( + typeof loader.parse, + 'function', + `Loader ${resolvedName} has 'parse' function` + ); // Call parse just to ensure it returns a promise const promise = loader.parse(new ArrayBuffer(0), {}).catch(_ => {}); - t.ok(promise.then, `Loader ${name} is async (returns a promise)`); + assertions.ok(promise.then, `Loader ${resolvedName} is async (returns a promise)`); } } From 5a4a30b2c64b0c31c53609ea3ef7b6284e79ed96 Mon Sep 17 00:00:00 2001 From: Ib Green Date: Sun, 12 Apr 2026 09:05:54 -0400 Subject: [PATCH 2/3] fix --- modules/crypto/test/lib/utils/digest-utils.spec.ts | 4 ++-- test/common/conformance.ts | 7 +++++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/modules/crypto/test/lib/utils/digest-utils.spec.ts b/modules/crypto/test/lib/utils/digest-utils.spec.ts index 9d7fe7283c..9b99beff1d 100644 --- a/modules/crypto/test/lib/utils/digest-utils.spec.ts +++ b/modules/crypto/test/lib/utils/digest-utils.spec.ts @@ -33,9 +33,9 @@ test('encodeHexToBase64#crc32 test cases', () => { }); test('encodeHexToBase64', () => { - expect(encodeHex('f85d741', 'D4XXQQ==', 'encode zero leading hex correctly').toBe('base64')); + expect(encodeHex('f85d741', 'base64'), 'encode zero leading hex correctly').toBe('D4XXQQ=='); }); test('encodeBase64ToHex', () => { - expect(encodeBase64('D4XXQQ==', '0f85d741').toBe('hex')); + expect(encodeBase64('D4XXQQ==', 'hex')).toBe('0f85d741'); }); diff --git a/test/common/conformance.ts b/test/common/conformance.ts index 6a2f2d4974..c69f6ae2bf 100644 --- a/test/common/conformance.ts +++ b/test/common/conformance.ts @@ -2,13 +2,16 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors -import {expect} from 'vitest'; - function getAssertions(assertions) { if (assertions) { return assertions; } + const expect = globalThis.expect; + if (typeof expect !== 'function') { + throw new Error('validateLoader requires tape assertions or a global expect'); + } + return { ok(value, message) { expect(value, message).toBeTruthy(); From 6c27e31c5d4ae1c7c909c7b9cad86155fe256971 Mon Sep 17 00:00:00 2001 From: Ib Green Date: Sun, 12 Apr 2026 09:25:17 -0400 Subject: [PATCH 3/3] fix --- modules/crypto/test/lib/crc32c-hash.spec.ts | 5 ++--- modules/mvt/test/tilejson-loader.spec.ts | 10 ++++------ test/common/conformance.ts | 14 +++++--------- 3 files changed, 11 insertions(+), 18 deletions(-) diff --git a/modules/crypto/test/lib/crc32c-hash.spec.ts b/modules/crypto/test/lib/crc32c-hash.spec.ts index 66482b5f98..4bbc629437 100644 --- a/modules/crypto/test/lib/crc32c-hash.spec.ts +++ b/modules/crypto/test/lib/crc32c-hash.spec.ts @@ -36,11 +36,10 @@ test('crc32c#additional tests', async () => { for (const tc of set.cases) { if (tc.expected && !tc.charset) { const hash = await new CRC32CHash().hash(tc.arrayBuffer, 'base64'); - t.equals( + expect( hash, - tc.expected, `should digest "${tc.input.slice(0, 10)}..." correctly ${tc.expected} ${tc.want}` - ); + ).toBe(tc.expected); } } diff --git a/modules/mvt/test/tilejson-loader.spec.ts b/modules/mvt/test/tilejson-loader.spec.ts index 72f24fe3b6..680471b75c 100644 --- a/modules/mvt/test/tilejson-loader.spec.ts +++ b/modules/mvt/test/tilejson-loader.spec.ts @@ -14,8 +14,7 @@ const TIPPECANOE_TILEJSON = '@loaders.gl/mvt/test/data/tilejson/tippecanoe.tilej // const TIPPECANOE_EXPECTED = '@loaders.gl/mvt/test/data/tilejson/tippecanoe.expected.json'; test('TileJSONLoader#loader conformance', () => { - validateLoader(t, TileJSONLoader, 'TileJSONLoader'); - + validateLoader(TileJSONLoader, 'TileJSONLoader'); }); test('TileJSONLoader#load', async () => { @@ -26,7 +25,6 @@ test('TileJSONLoader#load', async () => { // expect(metadata).toEqual(parsedMetadata); // console.error(JSON.stringify(metadata, null, 2)); } - }); test('TileJSONLoader#tippecanoe', async () => { @@ -38,7 +36,7 @@ test('TileJSONLoader#tippecanoe', async () => { expect(metadata.layers?.[0]?.fields?.[10]?.values?.length, '100 unique values').toBe(100); metadata = await load(TIPPECANOE_TILEJSON, TileJSONLoader, {tilejson: {maxValues: 10}}); - expect(metadata.layers?.[0]?.fields?.[10]?.values?.length, 'maxValue clips unique values').toBe(10); - - + expect(metadata.layers?.[0]?.fields?.[10]?.values?.length, 'maxValue clips unique values').toBe( + 10 + ); }); diff --git a/test/common/conformance.ts b/test/common/conformance.ts index c69f6ae2bf..67dc1115c8 100644 --- a/test/common/conformance.ts +++ b/test/common/conformance.ts @@ -2,12 +2,14 @@ // SPDX-License-Identifier: MIT // Copyright (c) vis.gl contributors +import {expect as vitestExpect} from 'vitest'; + function getAssertions(assertions) { if (assertions) { return assertions; } - const expect = globalThis.expect; + const expect = globalThis.expect || vitestExpect; if (typeof expect !== 'function') { throw new Error('validateLoader requires tape assertions or a global expect'); } @@ -37,14 +39,8 @@ export function validateLoader(assertionsOrLoader, loaderOrName, name = '') { assertions.ok(typeof loader.id === 'string', `Loader ${resolvedName} loader.id is not defined`); assertions.ok(loader, `Loader ${resolvedName} defined`); assertions.equal(typeof loader.name, 'string', `Loader ${resolvedName} has a name`); - assertions.ok( - Array.isArray(loader.extensions), - `Loader ${resolvedName} has an extensions array` - ); - assertions.ok( - Array.isArray(loader.mimeTypes), - `Loader ${resolvedName} has a mimeTypes array` - ); + assertions.ok(Array.isArray(loader.extensions), `Loader ${resolvedName} has an extensions array`); + assertions.ok(Array.isArray(loader.mimeTypes), `Loader ${resolvedName} has a mimeTypes array`); const options = loader.options || {}; assertions.ok(!('workerUrl' in options), 'workerUrl is not defined on loader.options');