diff --git a/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/filter/IncrementalWriteFilter.java b/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/filter/IncrementalWriteFilter.java index cb14f469b..b87f29ccb 100644 --- a/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/filter/IncrementalWriteFilter.java +++ b/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/filter/IncrementalWriteFilter.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2010-2025 Progress Software Corporation and/or its subsidiaries or affiliates. All Rights Reserved. + * Copyright (c) 2010-2026 Progress Software Corporation and/or its subsidiaries or affiliates. All Rights Reserved. */ package com.marklogic.client.datamovement.filter; @@ -50,7 +50,10 @@ public static class Builder { * @param keyName the name of the MarkLogic metadata key that will hold the hash value; defaults to "incrementalWriteHash". */ public Builder hashKeyName(String keyName) { - this.hashKeyName = keyName; + // Don't let user shoot themselves in the foot with an empty key name. + if (keyName != null && !keyName.trim().isEmpty()) { + this.hashKeyName = keyName; + } return this; } @@ -58,7 +61,10 @@ public Builder hashKeyName(String keyName) { * @param keyName the name of the MarkLogic metadata key that will hold the timestamp value; defaults to "incrementalWriteTimestamp". */ public Builder timestampKeyName(String keyName) { - this.timestampKeyName = keyName; + // Don't let user shoot themselves in the foot with an empty key name. + if (keyName != null && !keyName.trim().isEmpty()) { + this.timestampKeyName = keyName; + } return this; } diff --git a/marklogic-client-api/src/test/java/com/marklogic/client/datamovement/filter/IncrementalWriteTest.java b/marklogic-client-api/src/test/java/com/marklogic/client/datamovement/filter/IncrementalWriteTest.java index 47ea0d28d..95bda1326 100644 --- a/marklogic-client-api/src/test/java/com/marklogic/client/datamovement/filter/IncrementalWriteTest.java +++ b/marklogic-client-api/src/test/java/com/marklogic/client/datamovement/filter/IncrementalWriteTest.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2010-2025 Progress Software Corporation and/or its subsidiaries or affiliates. All Rights Reserved. + * Copyright (c) 2010-2026 Progress Software Corporation and/or its subsidiaries or affiliates. All Rights Reserved. */ package com.marklogic.client.datamovement.filter; @@ -181,6 +181,43 @@ void noRangeIndexForFieldWithEval() { "fail with a helpful error message. Actual message: " + message); } + @Test + void customTimestampKeyName() { + filter = IncrementalWriteFilter.newBuilder() + .hashKeyName("incrementalWriteHash") + .timestampKeyName("myTimestamp") + .build(); + + writeTenDocuments(); + + DocumentMetadataHandle metadata = Common.client.newDocumentManager().readMetadata("/incremental/test/doc-1.xml", + new DocumentMetadataHandle()); + + assertNotNull(metadata.getMetadataValues().get("myTimestamp")); + assertNotNull(metadata.getMetadataValues().get("incrementalWriteHash")); + assertFalse(metadata.getMetadataValues().containsKey("incrementalWriteTimestamp")); + } + + /** + * The thought for this test is that if the user passes null in (which could happen via our Spark connector), + * they're breaking the feature. So don't let them do that - ignore null and use the default values. + */ + @Test + void nullIsIgnoredForKeyNames() { + filter = IncrementalWriteFilter.newBuilder() + .hashKeyName(null) + .timestampKeyName(null) + .build(); + + writeTenDocuments(); + + DocumentMetadataHandle metadata = Common.client.newDocumentManager().readMetadata("/incremental/test/doc-1.xml", + new DocumentMetadataHandle()); + + assertNotNull(metadata.getMetadataValues().get("incrementalWriteHash")); + assertNotNull(metadata.getMetadataValues().get("incrementalWriteTimestamp")); + } + private void verifyIncrementalWriteWorks() { writeTenDocuments(); verifyDocumentsHasHashInMetadataKey();