diff --git a/.github/workflows/publish-to.sh b/.github/workflows/publish-to.sh index b577ff70..3ca74829 100755 --- a/.github/workflows/publish-to.sh +++ b/.github/workflows/publish-to.sh @@ -7,13 +7,13 @@ v="${1%%+*}" t="${2}" cd lib -for f in {,tdf3/}src/version.ts; do - if ! sed "s/export const version = \'[^']\{1,\}\';\$/export const version = \'${v}\';/" "${f}" >"${f}.tmp"; then - echo "Failed to insert version [${v}] into file [$f]" - exit 1 - fi - mv "${f}.tmp" "${f}" -done +f=src/version.ts +if ! sed "s/export const version = \'[^']\{1,\}\';\$/export const version = \'${v}\';/" "${f}" >"${f}.tmp"; then + echo "Failed to insert version [${v}] into file [$f]" + exit 1 +fi +mv "${f}.tmp" "${f}" + npm version --no-git-tag-version --allow-same-version "$v" npm publish --access public --tag "$t" diff --git a/cli/src/cli.ts b/cli/src/cli.ts index 2cec570b..91af19d0 100644 --- a/cli/src/cli.ts +++ b/cli/src/cli.ts @@ -13,6 +13,7 @@ import { type Source, AuthProviders, version, + tdfSpecVersion, OpenTDF, DecoratedStream, } from '@opentdf/sdk'; @@ -650,6 +651,7 @@ export const handleArgs = (args: string[]) => { JSON.stringify({ '@opentdf/ctl': process.env.npm_package_version || 'UNRELEASED', '@opentdf/sdk': version, + tdfSpecVersion, }) ) .alias('version', 'V') diff --git a/lib/src/index.ts b/lib/src/index.ts index 022be994..8e27fc56 100644 --- a/lib/src/index.ts +++ b/lib/src/index.ts @@ -1,6 +1,6 @@ export { type AuthProvider, type HttpMethod, HttpRequest, withHeaders } from './auth/auth.js'; export * as AuthProviders from './auth/providers.js'; export { attributeFQNsAsValues } from './policy/api.js'; -export { version, clientType } from './version.js'; +export { version, clientType, tdfSpecVersion } from './version.js'; export * from './opentdf.js'; export * from './seekable.js'; diff --git a/lib/src/version.ts b/lib/src/version.ts index dabecd6e..49c3cf27 100644 --- a/lib/src/version.ts +++ b/lib/src/version.ts @@ -7,3 +7,8 @@ export const version = '0.2.0'; * A string name used to label requests as coming from this library client. */ export const clientType = 'web-sdk'; + +/** + * Version of the opentdf/spec this library is targeting + */ +export const tdfSpecVersion = '4.3.0'; diff --git a/lib/tdf3/src/assertions.ts b/lib/tdf3/src/assertions.ts index b1f1b436..d4bfad66 100644 --- a/lib/tdf3/src/assertions.ts +++ b/lib/tdf3/src/assertions.ts @@ -110,8 +110,9 @@ export function isAssertionConfig(obj: unknown): obj is AssertionConfig { */ export async function verify( thiz: Assertion, - aggregateHash: string, - key: AssertionKey + aggregateHash: Uint8Array, + key: AssertionKey, + isLegacyTDF: boolean ): Promise { let payload: AssertionPayload; try { @@ -126,14 +127,25 @@ export async function verify( // Get the hash of the assertion const hashOfAssertion = await hash(thiz); - const combinedHash = aggregateHash + hashOfAssertion; - const encodedHash = base64.encode(combinedHash); // check if assertionHash is same as hashOfAssertion if (hashOfAssertion !== assertionHash) { throw new IntegrityError('Assertion hash mismatch'); } + let encodedHash: string; + if (isLegacyTDF) { + const aggregateHashAsStr = new TextDecoder('utf-8').decode(aggregateHash); + const combinedHash = aggregateHashAsStr + hashOfAssertion; + encodedHash = base64.encode(combinedHash); + } else { + const combinedHash = concatenateUint8Arrays( + aggregateHash, + new Uint8Array(hex.decodeArrayBuffer(assertionHash)) + ); + encodedHash = base64.encodeArrayBuffer(combinedHash); + } + // check if assertionSig is same as encodedHash if (assertionSig !== encodedHash) { throw new IntegrityError('Failed integrity check on assertion signature'); @@ -144,7 +156,7 @@ export async function verify( * Creates an Assertion object with the specified properties. */ export async function CreateAssertion( - aggregateHash: string, + aggregateHash: Uint8Array, assertionConfig: AssertionConfig ): Promise { if (!assertionConfig.signingKey) { @@ -162,8 +174,11 @@ export async function CreateAssertion( }; const assertionHash = await hash(a); - const combinedHash = aggregateHash + assertionHash; - const encodedHash = base64.encode(combinedHash); + const combinedHash = concatenateUint8Arrays( + aggregateHash, + new Uint8Array(hex.decodeArrayBuffer(assertionHash)) + ); + const encodedHash = base64.encodeArrayBuffer(combinedHash); return await sign(a, assertionHash, encodedHash, assertionConfig.signingKey); } @@ -189,3 +204,13 @@ export type AssertionVerificationKeys = { DefaultKey?: AssertionKey; Keys: Record; }; + +function concatenateUint8Arrays(array1: Uint8Array, array2: Uint8Array): Uint8Array { + const combinedLength = array1.length + array2.length; + const combinedArray = new Uint8Array(combinedLength); + + combinedArray.set(array1, 0); + combinedArray.set(array2, array1.length); + + return combinedArray; +} diff --git a/lib/tdf3/src/index.ts b/lib/tdf3/src/index.ts index 8630c12f..11bc1855 100644 --- a/lib/tdf3/src/index.ts +++ b/lib/tdf3/src/index.ts @@ -1,4 +1,4 @@ export * as Client from './client/index.js'; export { Client as TDF3Client } from './client/index.js'; export * as Errors from '../../src/errors.js'; -export { version, clientType } from './version.js'; +export { clientType, tdfSpecVersion, version } from '../../src/version.js'; diff --git a/lib/tdf3/src/models/manifest.ts b/lib/tdf3/src/models/manifest.ts index 4ed4ebdd..4dffa8b8 100644 --- a/lib/tdf3/src/models/manifest.ts +++ b/lib/tdf3/src/models/manifest.ts @@ -6,4 +6,5 @@ export type Manifest = { payload: Payload; encryptionInformation: EncryptionInformation; assertions: Assertion[]; + tdf_spec_version: string; }; diff --git a/lib/tdf3/src/tdf.ts b/lib/tdf3/src/tdf.ts index 2fb059dd..9f2c9861 100644 --- a/lib/tdf3/src/tdf.ts +++ b/lib/tdf3/src/tdf.ts @@ -1,11 +1,35 @@ -import { unsigned } from './utils/buffer-crc32.js'; import { exportSPKI, importX509 } from 'jose'; -import { DecoratedReadableStream } from './client/DecoratedReadableStream.js'; -import { fetchKasPubKey as fetchKasPubKeyV2, fetchWrappedKey } from '../../src/access.js'; -import { DecryptParams } from './client/builders.js'; + +import { + KasPublicKeyAlgorithm, + KasPublicKeyInfo, + OriginAllowList, + fetchKasPubKey as fetchKasPubKeyV2, + fetchWrappedKey, +} from '../../src/access.js'; +import { type AuthProvider, reqSignature } from '../../src/auth/auth.js'; +import { allPool, anyPool } from '../../src/concurrency.js'; +import { base64, hex } from '../../src/encodings/index.js'; +import { + ConfigurationError, + DecryptError, + InvalidFileError, + IntegrityError, + NetworkError, + UnsafeUrlError, + UnsupportedFeatureError as UnsupportedError, +} from '../../src/errors.js'; +import { type Chunker } from '../../src/seekable.js'; +import { PolicyObject } from '../../src/tdf/PolicyObject.js'; +import { tdfSpecVersion } from '../../src/version.js'; import { AssertionConfig, AssertionKey, AssertionVerificationKeys } from './assertions.js'; import * as assertions from './assertions.js'; - +import { Binary } from './binary.js'; +import { AesGcmCipher } from './ciphers/aes-gcm-cipher.js'; +import { SymmetricCipher } from './ciphers/symmetric-cipher-base.js'; +import { DecryptParams } from './client/builders.js'; +import { DecoratedReadableStream } from './client/DecoratedReadableStream.js'; +import { type CryptoService, type DecryptResult } from './crypto/declarations.js'; import { KeyAccessType, KeyInfo, @@ -18,30 +42,9 @@ import { KeyAccessObject, SplitType, } from './models/index.js'; -import { base64 } from '../../src/encodings/index.js'; -import { ZipReader, ZipWriter, keyMerge, buffToString, concatUint8 } from './utils/index.js'; -import { Binary } from './binary.js'; -import { KasPublicKeyAlgorithm, KasPublicKeyInfo, OriginAllowList } from '../../src/access.js'; -import { - ConfigurationError, - DecryptError, - InvalidFileError, - IntegrityError, - NetworkError, - UnsafeUrlError, - UnsupportedFeatureError as UnsupportedError, -} from '../../src/errors.js'; - -// configurable -// TODO: remove dependencies from ciphers so that we can open-source instead of relying on other Virtru libs -import { AesGcmCipher } from './ciphers/index.js'; -import { type AuthProvider, reqSignature } from '../../src/auth/auth.js'; -import { PolicyObject } from '../../src/tdf/PolicyObject.js'; -import { type CryptoService, type DecryptResult } from './crypto/declarations.js'; +import { unsigned } from './utils/buffer-crc32.js'; +import { ZipReader, ZipWriter, keyMerge, concatUint8 } from './utils/index.js'; import { CentralDirectory } from './utils/zip-reader.js'; -import { SymmetricCipher } from './ciphers/symmetric-cipher-base.js'; -import { allPool, anyPool } from '../../src/concurrency.js'; -import { type Chunker } from '../../src/seekable.js'; // TODO: input validation on manifest JSON const DEFAULT_SEGMENT_SIZE = 1024 * 1024; @@ -269,25 +272,34 @@ async function _generateManifest( // generate the manifest first, then insert integrity information into it encryptionInformation: encryptionInformationStr, assertions: assertions, + tdf_spec_version: tdfSpecVersion, }; } async function getSignature( - unwrappedKeyBinary: Binary, - payloadBinary: Binary, - algorithmType: IntegrityAlgorithm, - cryptoService: CryptoService -) { + unwrappedKey: Uint8Array, + content: Uint8Array, + algorithmType: IntegrityAlgorithm +): Promise { switch (algorithmType.toUpperCase()) { case 'GMAC': // use the auth tag baked into the encrypted payload - return buffToString(Uint8Array.from(payloadBinary.asByteArray()).slice(-16), 'hex'); - case 'HS256': + return content.slice(-16); + case 'HS256': { // simple hmac is the default - return await cryptoService.hmac( - buffToString(new Uint8Array(unwrappedKeyBinary.asArrayBuffer()), 'hex'), - buffToString(new Uint8Array(payloadBinary.asArrayBuffer()), 'utf-8') + const cryptoKey = await crypto.subtle.importKey( + 'raw', + unwrappedKey, + { + name: 'HMAC', + hash: { name: 'SHA-256' }, + }, + true, + ['sign', 'verify'] ); + const signature = await crypto.subtle.sign('HMAC', cryptoKey, content); + return new Uint8Array(signature); + } default: throw new ConfigurationError(`Unsupported signature alg [${algorithmType}]`); } @@ -321,7 +333,7 @@ export async function writeStream(cfg: EncryptConfiguration): Promise { if (segmentIntegrityAlgorithm !== 'GMAC' && segmentIntegrityAlgorithm !== 'HS256') { } - const segmentHashStr = await getSignature( - reconstructedKeyBinary, - Binary.fromArrayBuffer(encryptedChunk.buffer), - segmentIntegrityAlgorithm, - cryptoService + const segmentSig = await getSignature( + new Uint8Array(reconstructedKeyBinary.asArrayBuffer()), + encryptedChunk, + segmentIntegrityAlgorithm ); - if (hash !== btoa(segmentHashStr)) { + + const segmentHash = isLegacyTDF + ? base64.encode(hex.encodeArrayBuffer(segmentSig)) + : base64.encodeArrayBuffer(segmentSig); + + if (hash !== segmentHash) { throw new IntegrityError('Failed integrity check on segment hash'); } return await cipher.decrypt(encryptedChunk, reconstructedKeyBinary); @@ -738,7 +755,8 @@ async function updateChunkQueue( reconstructedKeyBinary: Binary, cipher: SymmetricCipher, segmentIntegrityAlgorithm: IntegrityAlgorithm, - cryptoService: CryptoService + cryptoService: CryptoService, + isLegacyTDF: boolean ) { const chunksInOneDownload = 500; let requests = []; @@ -779,6 +797,7 @@ async function updateChunkQueue( slice, cipher, segmentIntegrityAlgorithm, + isLegacyTDF, }); } })() @@ -793,6 +812,7 @@ export async function sliceAndDecrypt({ cipher, cryptoService, segmentIntegrityAlgorithm, + isLegacyTDF, }: { buffer: Uint8Array; reconstructedKeyBinary: Binary; @@ -800,6 +820,7 @@ export async function sliceAndDecrypt({ cipher: SymmetricCipher; cryptoService: CryptoService; segmentIntegrityAlgorithm: IntegrityAlgorithm; + isLegacyTDF: boolean; }) { for (const index in slice) { const { encryptedOffset, encryptedSegmentSize, _resolve, _reject } = slice[index]; @@ -817,7 +838,8 @@ export async function sliceAndDecrypt({ slice[index]['hash'], cipher, segmentIntegrityAlgorithm, - cryptoService + cryptoService, + isLegacyTDF ); slice[index].decryptedChunk = result; if (_resolve) { @@ -864,23 +886,33 @@ export async function readStream(cfg: DecryptConfiguration) { const keyForDecryption = await cfg.keyMiddleware(reconstructedKeyBinary); const encryptedSegmentSizeDefault = defaultSegmentSize || DEFAULT_SEGMENT_SIZE; - // check the combined string of hashes - const aggregateHash = segments.map(({ hash }) => base64.decode(hash)).join(''); + // check if the TDF is a legacy TDF + const isLegacyTDF = !manifest.tdf_spec_version; + + // Decode each hash and store it in an array of Uint8Array + const segmentHashList = segments.map( + ({ hash }) => new Uint8Array(base64.decodeArrayBuffer(hash)) + ); + + // Concatenate all segment hashes into a single Uint8Array + const aggregateHash = await concatenateUint8Array(segmentHashList); + const integrityAlgorithm = rootSignature.alg; if (integrityAlgorithm !== 'GMAC' && integrityAlgorithm !== 'HS256') { throw new UnsupportedError(`Unsupported integrity alg [${integrityAlgorithm}]`); } - const payloadSigStr = await getSignature( - keyForDecryption, - Binary.fromString(aggregateHash), - integrityAlgorithm, - cfg.cryptoService + + const payloadSig = await getSignature( + new Uint8Array(keyForDecryption.asArrayBuffer()), + aggregateHash, + integrityAlgorithm ); - if ( - manifest.encryptionInformation.integrityInformation.rootSignature.sig !== - base64.encode(payloadSigStr) - ) { + const rootSig = isLegacyTDF + ? base64.encode(hex.encodeArrayBuffer(payloadSig)) + : base64.encodeArrayBuffer(payloadSig); + + if (manifest.encryptionInformation.integrityInformation.rootSignature.sig !== rootSig) { throw new IntegrityError('Failed integrity check on root signature'); } @@ -898,7 +930,7 @@ export async function readStream(cfg: DecryptConfiguration) { assertionKey = foundKey; } } - await assertions.verify(assertion, aggregateHash, assertionKey); + await assertions.verify(assertion, aggregateHash, assertionKey, isLegacyTDF); } } @@ -939,7 +971,8 @@ export async function readStream(cfg: DecryptConfiguration) { keyForDecryption, cipher, segmentIntegrityAlg, - cfg.cryptoService + cfg.cryptoService, + isLegacyTDF ); let progress = 0; @@ -972,3 +1005,9 @@ export async function readStream(cfg: DecryptConfiguration) { outputStream.metadata = metadata; return outputStream; } + +async function concatenateUint8Array(uint8arrays: Uint8Array[]): Promise { + const blob = new Blob(uint8arrays); + const buffer = await blob.arrayBuffer(); + return new Uint8Array(buffer); +} diff --git a/lib/tdf3/src/version.ts b/lib/tdf3/src/version.ts deleted file mode 100644 index fef58459..00000000 --- a/lib/tdf3/src/version.ts +++ /dev/null @@ -1,2 +0,0 @@ -export const version = '0.2.0'; -export const clientType = 'tdf3-js-client'; diff --git a/scripts/bump-version.sh b/scripts/bump-version.sh index 0969715e..0ef6956f 100755 --- a/scripts/bump-version.sh +++ b/scripts/bump-version.sh @@ -46,12 +46,7 @@ if ! sed_i "s/version=${old_version}/version=${new_version}/" "Makefile"; then exit 1 fi -if ! sed_i "s/export const version = '[^']\{1,\}';\$/export const version = \'${new_version}\';/" lib{,/tdf3}/src/version.ts; then - echo "Unable to change version in version files" - exit 1 -fi - -if ! sed_i "s/export const version = '[^']\{1,\}';\$/export const version = \'${new_version}\';/" lib{,/tdf3}/src/version.ts; then +if ! sed_i "s/export const version = '[^']\{1,\}';\$/export const version = \'${new_version}\';/" lib/src/version.ts; then echo "Unable to change version in version files" exit 1 fi diff --git a/scripts/check-version-is.sh b/scripts/check-version-is.sh index 650307ac..7f514e1f 100755 --- a/scripts/check-version-is.sh +++ b/scripts/check-version-is.sh @@ -16,16 +16,15 @@ if ! grep --fixed-strings --line-regexp --quiet "version=${expected_version}" "M exit 1 fi -for f in lib{,/tdf3}/src/version.ts; do - if ! grep --fixed-strings --line-regexp --quiet "export const version = '${expected_version}';" "$f"; then - if grep --quiet "^export const version" "$f"; then - echo "::error file=$f,line=$(sed -n '/export const version/=' $f)::Incorrect version line, should be setting it to [${expected_version}]" - else - echo "::error file=$f::Missing version line [version=${expected_version}]" - fi - exit 1 +f=lib/src/version.ts +if ! grep --fixed-strings --line-regexp --quiet "export const version = '${expected_version}';" "$f"; then + if grep --quiet "^export const version" "$f"; then + echo "::error file=$f,line=$(sed -n '/export const version/=' $f)::Incorrect version line, should be setting it to [${expected_version}]" + else + echo "::error file=$f::Missing version line [version=${expected_version}]" fi -done + exit 1 +fi for x in lib cli web-app; do sub_version="$(cd $x && node -p "require('./package.json').version")" @@ -36,7 +35,7 @@ for x in lib cli web-app; do done if [[ "${GITHUB_ACTION:-}" ]]; then - echo "TARGET_VERSION=$expected_version" >>$GITHUB_OUTPUT + echo "TARGET_VERSION=$expected_version" >>"$GITHUB_OUTPUT" else echo "SUCCESS: TARGET_VERSION=$expected_version" fi diff --git a/scripts/platform.sh b/scripts/platform.sh index 9c04e4ab..ee2216e9 100755 --- a/scripts/platform.sh +++ b/scripts/platform.sh @@ -12,4 +12,4 @@ if [ $? -ne 0 ]; then npm install @bufbuild/protoc-gen-es @bufbuild/buf @connectrpc/protoc-gen-connect-es fi npx buf generate platform/service -echo "Generated Typescript code from Protobuf files (src: platform/service, dst: lib/src/platform)" \ No newline at end of file +echo "Generated Typescript code from Protobuf files (src: platform/service, dst: lib/src/platform)"