From 1ee13672aa22cad1d6ca391508eb859c5617a9a0 Mon Sep 17 00:00:00 2001 From: dominic reed Date: Wed, 20 Nov 2024 15:52:36 -0800 Subject: [PATCH] feat(sdk): adds Collections API (#212) Adds Collection API to NanoTDF. Decrypt: NanoTDF Client constructor will have a option to enable `CollectionStore` or pass in a custom `CollectionStore` implementation. Encrypt: NanoTDFConfig will have an option `WithCollection` to enable writing as a Collection rather as individual NanoTDF's. Examples for Collection are made in `ExampleEncryptCollection` and `ExampleDecryptCollection` and will successfully encrypt/decrypt with default OpenTDF Platform Fixtures. ExampleDecryptCollection will have only 1 Unwrap Request to Platform. --- .../platform/DecryptCollectionExample.java | 41 +++++++ .../platform/EncryptCollectionExample.java | 49 ++++++++ .../java/io/opentdf/platform/sdk/Config.java | 68 +++++++++++ .../java/io/opentdf/platform/sdk/NanoTDF.java | 112 ++++++++++++++---- .../platform/sdk/nanotdf/CollectionStore.java | 21 ++++ .../sdk/nanotdf/CollectionStoreImpl.java | 30 +++++ .../io/opentdf/platform/sdk/NanoTDFTest.java | 41 +++++++ 7 files changed, 336 insertions(+), 26 deletions(-) create mode 100644 examples/src/main/java/io/opentdf/platform/DecryptCollectionExample.java create mode 100644 examples/src/main/java/io/opentdf/platform/EncryptCollectionExample.java create mode 100644 sdk/src/main/java/io/opentdf/platform/sdk/nanotdf/CollectionStore.java create mode 100644 sdk/src/main/java/io/opentdf/platform/sdk/nanotdf/CollectionStoreImpl.java diff --git a/examples/src/main/java/io/opentdf/platform/DecryptCollectionExample.java b/examples/src/main/java/io/opentdf/platform/DecryptCollectionExample.java new file mode 100644 index 00000000..9e2d1b08 --- /dev/null +++ b/examples/src/main/java/io/opentdf/platform/DecryptCollectionExample.java @@ -0,0 +1,41 @@ +package io.opentdf.platform; + +import io.opentdf.platform.sdk.Config; +import io.opentdf.platform.sdk.NanoTDF; +import io.opentdf.platform.sdk.SDK; +import io.opentdf.platform.sdk.SDKBuilder; + +import java.io.ByteArrayInputStream; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.security.NoSuchAlgorithmException; + +public class DecryptCollectionExample { + public static void main(String[] args) throws IOException, NanoTDF.NanoTDFMaxSizeLimit, NanoTDF.UnsupportedNanoTDFFeature, NanoTDF.InvalidNanoTDFConfig, NoSuchAlgorithmException, InterruptedException { + String clientId = "opentdf-sdk"; + String clientSecret = "secret"; + String platformEndpoint = "localhost:8080"; + + SDKBuilder builder = new SDKBuilder(); + SDK sdk = builder.platformEndpoint(platformEndpoint) + .clientSecret(clientId, clientSecret).useInsecurePlaintextConnection(true) + .build(); + + var kasInfo = new Config.KASInfo(); + kasInfo.URL = "http://localhost:8080/kas"; + + + // Convert String to InputStream + NanoTDF nanoTDFClient = new NanoTDF(true); + + for (int i = 0; i < 50; i++) { + FileInputStream fis = new FileInputStream(String.format("out/my.%d_ciphertext", i)); + nanoTDFClient.readNanoTDF(ByteBuffer.wrap(fis.readAllBytes()), System.out, sdk.getServices().kas()); + fis.close(); + } + + } +} diff --git a/examples/src/main/java/io/opentdf/platform/EncryptCollectionExample.java b/examples/src/main/java/io/opentdf/platform/EncryptCollectionExample.java new file mode 100644 index 00000000..7f431a2c --- /dev/null +++ b/examples/src/main/java/io/opentdf/platform/EncryptCollectionExample.java @@ -0,0 +1,49 @@ +package io.opentdf.platform; + +import io.opentdf.platform.sdk.Config; +import io.opentdf.platform.sdk.NanoTDF; +import io.opentdf.platform.sdk.SDK; +import io.opentdf.platform.sdk.SDKBuilder; + +import java.io.ByteArrayInputStream; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.security.NoSuchAlgorithmException; + +public class EncryptCollectionExample { + public static void main(String[] args) throws IOException, NanoTDF.NanoTDFMaxSizeLimit, NanoTDF.UnsupportedNanoTDFFeature, NanoTDF.InvalidNanoTDFConfig, NoSuchAlgorithmException, InterruptedException { + String clientId = "opentdf-sdk"; + String clientSecret = "secret"; + String platformEndpoint = "localhost:8080"; + + SDKBuilder builder = new SDKBuilder(); + SDK sdk = builder.platformEndpoint(platformEndpoint) + .clientSecret(clientId, clientSecret).useInsecurePlaintextConnection(true) + .build(); + + var kasInfo = new Config.KASInfo(); + kasInfo.URL = "http://localhost:8080/kas"; + + var tdfConfig = Config.newNanoTDFConfig( + Config.withNanoKasInformation(kasInfo), + Config.witDataAttributes("https://example.com/attr/attr1/value/value1"), + Config.withCollection() + ); + + String str = "Hello, World!"; + + // Convert String to InputStream + var in = new ByteArrayInputStream(str.getBytes(StandardCharsets.UTF_8)); + NanoTDF nanoTDFClient = new NanoTDF(); + + for (int i = 0; i < 50; i++) { + FileOutputStream fos = new FileOutputStream(String.format("out/my.%d_ciphertext", i)); + nanoTDFClient.createNanoTDF(ByteBuffer.wrap(str.getBytes()), fos, tdfConfig, + sdk.getServices().kas()); + } + + } +} diff --git a/sdk/src/main/java/io/opentdf/platform/sdk/Config.java b/sdk/src/main/java/io/opentdf/platform/sdk/Config.java index 1f44854a..7b7d824f 100644 --- a/sdk/src/main/java/io/opentdf/platform/sdk/Config.java +++ b/sdk/src/main/java/io/opentdf/platform/sdk/Config.java @@ -2,12 +2,15 @@ import io.opentdf.platform.sdk.Autoconfigure.AttributeValueFQN; import io.opentdf.platform.sdk.nanotdf.ECCMode; +import io.opentdf.platform.sdk.nanotdf.Header; import io.opentdf.platform.sdk.nanotdf.NanoTDFType; import io.opentdf.platform.sdk.nanotdf.SymmetricAndPayloadConfig; import io.opentdf.platform.policy.Value; +import org.bouncycastle.oer.its.ieee1609dot2.HeaderInfo; import java.util.*; +import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; /** @@ -20,6 +23,7 @@ public class Config { public static final int DEFAULT_SEGMENT_SIZE = 2 * 1024 * 1024; // 2mb public static final String KAS_PUBLIC_KEY_PATH = "/kas_public_key"; public static final String DEFAULT_MIME_TYPE = "application/octet-stream"; + public static final int MAX_COLLECTION_ITERATION = (1 << 24) - 1; public enum TDFFormat { JSONFormat, @@ -248,6 +252,7 @@ public static class NanoTDFConfig { public SymmetricAndPayloadConfig config; public List attributes; public List kasInfoList; + public CollectionConfig collectionConfig; public NanoTDFConfig() { this.eccMode = new ECCMode(); @@ -262,6 +267,7 @@ public NanoTDFConfig() { this.attributes = new ArrayList<>(); this.kasInfoList = new ArrayList<>(); + this.collectionConfig = new CollectionConfig(false); } } @@ -273,6 +279,12 @@ public static NanoTDFConfig newNanoTDFConfig(Consumer... options) return config; } + public static Consumer withCollection() { + return (NanoTDFConfig config) -> { + config.collectionConfig = new CollectionConfig(true); + }; + } + public static Consumer witDataAttributes(String... attributes) { return (NanoTDFConfig config) -> { Collections.addAll(config.attributes, attributes); @@ -304,4 +316,60 @@ public static Consumer withEllipticCurve(String curve) { public static Consumer WithECDSAPolicyBinding() { return (NanoTDFConfig config) -> config.eccMode.setECDSABinding(false); } + + public static class HeaderInfo { + private final Header header; + private final AesGcm key; + private final int iteration; + + public HeaderInfo(Header header,AesGcm key, int iteration) { + this.header = header; + this.key = key; + this.iteration = iteration; + } + + public Header getHeader() { + return header; + } + + public int getIteration() { + return iteration; + } + + public AesGcm getKey() { + return key; + } + } + + public static class CollectionConfig { + private int iterationCounter; + private HeaderInfo headerInfo; + public final boolean useCollection; + private Boolean updatedHeaderInfo; + + + public CollectionConfig(boolean useCollection) { + this.useCollection = useCollection; + } + + public synchronized HeaderInfo getHeaderInfo() throws InterruptedException { + int iteration = iterationCounter; + iterationCounter = (iterationCounter + 1) % MAX_COLLECTION_ITERATION; + + if (iteration == 0) { + updatedHeaderInfo = false; + return null; + } + while (!updatedHeaderInfo) { + this.wait(); + } + return new HeaderInfo(headerInfo.getHeader(), headerInfo.getKey(), iteration); + } + + public synchronized void updateHeaderInfo(HeaderInfo headerInfo) { + this.headerInfo = headerInfo; + updatedHeaderInfo = true; + this.notifyAll(); + } + } } \ No newline at end of file diff --git a/sdk/src/main/java/io/opentdf/platform/sdk/NanoTDF.java b/sdk/src/main/java/io/opentdf/platform/sdk/NanoTDF.java index 5695713a..f893762a 100644 --- a/sdk/src/main/java/io/opentdf/platform/sdk/NanoTDF.java +++ b/sdk/src/main/java/io/opentdf/platform/sdk/NanoTDF.java @@ -5,6 +5,7 @@ import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; +import java.nio.ByteOrder; import java.nio.charset.StandardCharsets; import java.security.*; import java.util.*; @@ -31,6 +32,19 @@ public class NanoTDF { private static final int kIvPadding = 9; private static final int kNanoTDFIvSize = 3; private static final byte[] kEmptyIV = new byte[] { 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0 }; + private final CollectionStore collectionStore; + + public NanoTDF() { + this(new CollectionStore.NoOpCollectionStore()); + } + + public NanoTDF(boolean collectionStoreEnabled) { + this(collectionStoreEnabled ? new CollectionStoreImpl() : null); + } + + public NanoTDF(CollectionStore collectionStore) { + this.collectionStore = collectionStore; + } public static class NanoTDFMaxSizeLimit extends Exception { public NanoTDFMaxSizeLimit(String errorMessage) { @@ -50,19 +64,16 @@ public InvalidNanoTDFConfig(String errorMessage) { } } - public int createNanoTDF(ByteBuffer data, OutputStream outputStream, - Config.NanoTDFConfig nanoTDFConfig, - SDK.KAS kas) throws IOException, NanoTDFMaxSizeLimit, InvalidNanoTDFConfig, - NoSuchAlgorithmException, UnsupportedNanoTDFFeature { - - int nanoTDFSize = 0; - Gson gson = new GsonBuilder().create(); - - int dataSize = data.limit(); - if (dataSize > kMaxTDFSize) { - throw new NanoTDFMaxSizeLimit("exceeds max size for nano tdf"); + private Config.HeaderInfo getHeaderInfo(Config.NanoTDFConfig nanoTDFConfig, SDK.KAS kas) + throws InvalidNanoTDFConfig, UnsupportedNanoTDFFeature, NoSuchAlgorithmException, InterruptedException { + if (nanoTDFConfig.collectionConfig.useCollection) { + Config.HeaderInfo headerInfo = nanoTDFConfig.collectionConfig.getHeaderInfo(); + if (headerInfo != null) { + return headerInfo; + } } + Gson gson = new GsonBuilder().create(); if (nanoTDFConfig.kasInfoList.isEmpty()) { throw new InvalidNanoTDFConfig("kas url is missing"); } @@ -120,9 +131,32 @@ public int createNanoTDF(ByteBuffer data, OutputStream outputStream, header.setPayloadConfig(nanoTDFConfig.config); header.setEphemeralKey(compressedPubKey); header.setKasLocator(kasURL); - header.setPolicyInfo(policyInfo); + Config.HeaderInfo headerInfo = new Config.HeaderInfo(header, gcm, 0); + if (nanoTDFConfig.collectionConfig.useCollection) { + nanoTDFConfig.collectionConfig.updateHeaderInfo(headerInfo); + } + + return headerInfo; + } + + public int createNanoTDF(ByteBuffer data, OutputStream outputStream, + Config.NanoTDFConfig nanoTDFConfig, + SDK.KAS kas) throws IOException, NanoTDFMaxSizeLimit, InvalidNanoTDFConfig, + NoSuchAlgorithmException, UnsupportedNanoTDFFeature, InterruptedException { + int nanoTDFSize = 0; + + int dataSize = data.limit(); + if (dataSize > kMaxTDFSize) { + throw new NanoTDFMaxSizeLimit("exceeds max size for nano tdf"); + } + + Config.HeaderInfo headerKeyPair = getHeaderInfo(nanoTDFConfig, kas); + Header header = headerKeyPair.getHeader(); + AesGcm gcm = headerKeyPair.getKey(); + int iteration = headerKeyPair.getIteration(); + int headerSize = header.getTotalSize(); ByteBuffer bufForHeader = ByteBuffer.allocate(headerSize); header.writeIntoBuffer(bufForHeader); @@ -132,13 +166,21 @@ public int createNanoTDF(ByteBuffer data, OutputStream outputStream, nanoTDFSize += headerSize; logger.debug("createNanoTDF header length {}", headerSize); + int authTagSize = SymmetricAndPayloadConfig.sizeOfAuthTagForCipher(nanoTDFConfig.config.getCipherType()); // Encrypt the data byte[] actualIV = new byte[kIvPadding + kNanoTDFIvSize]; - do { - byte[] iv = new byte[kNanoTDFIvSize]; - SecureRandom.getInstanceStrong().nextBytes(iv); - System.arraycopy(iv, 0, actualIV, kIvPadding, iv.length); - } while (Arrays.equals(actualIV, kEmptyIV)); // if match, we need to retry to prevent key + iv reuse with the policy + if (nanoTDFConfig.collectionConfig.useCollection) { + ByteBuffer b = ByteBuffer.allocate(4); + b.order(ByteOrder.LITTLE_ENDIAN); + b.putInt(iteration); + System.arraycopy(b.array(), 0, actualIV, kIvPadding, kNanoTDFIvSize); + } else { + do { + byte[] iv = new byte[kNanoTDFIvSize]; + SecureRandom.getInstanceStrong().nextBytes(iv); + System.arraycopy(iv, 0, actualIV, kIvPadding, iv.length); + } while (Arrays.equals(actualIV, kEmptyIV)); // if match, we need to retry to prevent key + iv reuse with the policy + } byte[] cipherData = gcm.encrypt(actualIV, authTagSize, data.array(), data.arrayOffset(), dataSize); @@ -157,23 +199,30 @@ public int createNanoTDF(ByteBuffer data, OutputStream outputStream, return nanoTDFSize; } + public void readNanoTDF(ByteBuffer nanoTDF, OutputStream outputStream, SDK.KAS kas) throws IOException { Header header = new Header(nanoTDF); + CollectionKey cachedKey = collectionStore.getKey(header); + byte[] key = cachedKey.getKey(); - // create base64 encoded - byte[] headerData = new byte[header.getTotalSize()]; - header.writeIntoBuffer(ByteBuffer.wrap(headerData)); - String base64HeaderData = Base64.getEncoder().encodeToString(headerData); + // perform unwrap is not in collectionStore; + if (key == null) { + // create base64 encoded + byte[] headerData = new byte[header.getTotalSize()]; + header.writeIntoBuffer(ByteBuffer.wrap(headerData)); + String base64HeaderData = Base64.getEncoder().encodeToString(headerData); - logger.debug("readNanoTDF header length {}", headerData.length); + logger.debug("readNanoTDF header length {}", headerData.length); - String kasUrl = header.getKasLocator().getResourceUrl(); + String kasUrl = header.getKasLocator().getResourceUrl(); - byte[] key = kas.unwrapNanoTDF(header.getECCMode().getEllipticCurveType(), - base64HeaderData, - kasUrl); + key = kas.unwrapNanoTDF(header.getECCMode().getEllipticCurveType(), + base64HeaderData, + kasUrl); + collectionStore.store(header, new CollectionKey(key)); + } byte[] payloadLengthBuf = new byte[4]; nanoTDF.get(payloadLengthBuf, 1, 3); @@ -213,4 +262,15 @@ PolicyObject createPolicyObject(List attributes) { } return policyObject; } + + public static class CollectionKey { + private final byte[] key; + + public CollectionKey(byte[] key) { + this.key = key; + } + protected byte[] getKey() { + return key; + } + } } diff --git a/sdk/src/main/java/io/opentdf/platform/sdk/nanotdf/CollectionStore.java b/sdk/src/main/java/io/opentdf/platform/sdk/nanotdf/CollectionStore.java new file mode 100644 index 00000000..69a0be8c --- /dev/null +++ b/sdk/src/main/java/io/opentdf/platform/sdk/nanotdf/CollectionStore.java @@ -0,0 +1,21 @@ +package io.opentdf.platform.sdk.nanotdf; + +import io.opentdf.platform.sdk.NanoTDF; + +public interface CollectionStore { + NanoTDF.CollectionKey NO_PRIVATE_KEY = new NanoTDF.CollectionKey(null); + void store(Header header, NanoTDF.CollectionKey key); + NanoTDF.CollectionKey getKey(Header header); + + class NoOpCollectionStore implements CollectionStore { + public NoOpCollectionStore() {} + + @Override + public void store(Header header, NanoTDF.CollectionKey key) {} + + @Override + public NanoTDF.CollectionKey getKey(Header header) { + return NO_PRIVATE_KEY; + } + } +} diff --git a/sdk/src/main/java/io/opentdf/platform/sdk/nanotdf/CollectionStoreImpl.java b/sdk/src/main/java/io/opentdf/platform/sdk/nanotdf/CollectionStoreImpl.java new file mode 100644 index 00000000..9bee9be6 --- /dev/null +++ b/sdk/src/main/java/io/opentdf/platform/sdk/nanotdf/CollectionStoreImpl.java @@ -0,0 +1,30 @@ +package io.opentdf.platform.sdk.nanotdf; + +import io.opentdf.platform.sdk.NanoTDF; + +import java.nio.ByteBuffer; +import java.util.LinkedHashMap; +import java.util.Map; + +public class CollectionStoreImpl extends LinkedHashMap + implements CollectionStore { + private static final int MAX_SIZE_STORE = 500; + + public CollectionStoreImpl() {} + + public synchronized void store(Header header, NanoTDF.CollectionKey key) { + ByteBuffer buf = ByteBuffer.allocate(header.getTotalSize()); + header.writeIntoBuffer(buf); + super.put(buf, key); + } + + public synchronized NanoTDF.CollectionKey getKey(Header header) { + ByteBuffer buf = ByteBuffer.allocate(header.getTotalSize()); + header.writeIntoBuffer(buf); + return super.getOrDefault(buf, NO_PRIVATE_KEY); + } + @Override + protected boolean removeEldestEntry(Map.Entry eldest) { + return this.size() > MAX_SIZE_STORE; + } +} diff --git a/sdk/src/test/java/io/opentdf/platform/sdk/NanoTDFTest.java b/sdk/src/test/java/io/opentdf/platform/sdk/NanoTDFTest.java index e36ae5cb..87bae33b 100644 --- a/sdk/src/test/java/io/opentdf/platform/sdk/NanoTDFTest.java +++ b/sdk/src/test/java/io/opentdf/platform/sdk/NanoTDFTest.java @@ -154,4 +154,45 @@ void encryptionAndDecryptionWithValidKey() throws Exception { assertThat(dataStream.toByteArray()).isEqualTo(data); } } + + @Test + void collection() throws Exception { + var kasInfos = new ArrayList<>(); + var kasInfo = new Config.KASInfo(); + kasInfo.URL = "https://api.example.com/kas"; + kasInfo.PublicKey = null; + kasInfo.KID = KID; + kasInfos.add(kasInfo); + + Config.NanoTDFConfig config = Config.newNanoTDFConfig( + Config.withNanoKasInformation(kasInfos.toArray(new Config.KASInfo[0])), + Config.witDataAttributes("https://example.com/attr/Classification/value/S", + "https://example.com/attr/Classification/value/X"), + Config.withCollection() + ); + + ByteBuffer byteBuffer = ByteBuffer.wrap(new byte[]{}); + + NanoTDF nanoTDF = new NanoTDF(); + ByteBuffer header = getHeaderBuffer(byteBuffer,nanoTDF, config); + for (int i = 0; i < Config.MAX_COLLECTION_ITERATION - 10; i++) { + config.collectionConfig.getHeaderInfo(); + + } + for (int i = 1; i < 10; i++) { + ByteBuffer newHeader = getHeaderBuffer(byteBuffer,nanoTDF, config); + assertThat(header).isEqualTo(newHeader); + } + + ByteBuffer newHeader = getHeaderBuffer(byteBuffer,nanoTDF, config); + assertThat(header).isNotEqualTo(newHeader); + } + + private ByteBuffer getHeaderBuffer(ByteBuffer input, NanoTDF nanoTDF, Config.NanoTDFConfig config) throws Exception { + ByteArrayOutputStream tdfOutputStream = new ByteArrayOutputStream(); + nanoTDF.createNanoTDF(input, tdfOutputStream, config, kas); + ByteBuffer tdf = ByteBuffer.wrap(tdfOutputStream.toByteArray()); + Header header = new Header(tdf); + return tdf.position(0).slice().limit(header.getTotalSize()); + } }