Skip to content

Commit 5f25dcc

Browse files
committed
MLE-26918 Refactor: Changed config class back to a non-record class
Had a discussion with Copilot, agreed that a record isn't a fit here as it implies immutability but that's not currently the case.
1 parent 1d19be9 commit 5f25dcc

File tree

4 files changed

+56
-20
lines changed

4 files changed

+56
-20
lines changed

marklogic-client-api/src/main/java/com/marklogic/client/datamovement/filter/IncrementalWriteConfig.java

Lines changed: 41 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -14,10 +14,17 @@
1414
*
1515
* @since 8.1.0
1616
*/
17-
public record IncrementalWriteConfig(String hashKeyName, String timestampKeyName, boolean canonicalizeJson,
18-
Consumer<DocumentWriteOperation[]> skippedDocumentsConsumer,
19-
String[] jsonExclusions, String[] xmlExclusions, Map<String, String> xmlNamespaces,
20-
String schemaName, String viewName) {
17+
public class IncrementalWriteConfig {
18+
19+
private final String hashKeyName;
20+
private final String timestampKeyName;
21+
private final boolean canonicalizeJson;
22+
private final Consumer<DocumentWriteOperation[]> skippedDocumentsConsumer;
23+
private final String[] jsonExclusions;
24+
private final String[] xmlExclusions;
25+
private final Map<String, String> xmlNamespaces;
26+
private final String schemaName;
27+
private final String viewName;
2128

2229
public IncrementalWriteConfig(String hashKeyName, String timestampKeyName, boolean canonicalizeJson,
2330
Consumer<DocumentWriteOperation[]> skippedDocumentsConsumer,
@@ -34,11 +41,39 @@ public IncrementalWriteConfig(String hashKeyName, String timestampKeyName, boole
3441
this.viewName = viewName;
3542
}
3643

44+
public String getHashKeyName() {
45+
return hashKeyName;
46+
}
47+
48+
public String getTimestampKeyName() {
49+
return timestampKeyName;
50+
}
51+
52+
public boolean isCanonicalizeJson() {
53+
return canonicalizeJson;
54+
}
55+
56+
public Consumer<DocumentWriteOperation[]> getSkippedDocumentsConsumer() {
57+
return skippedDocumentsConsumer;
58+
}
59+
60+
public String[] getJsonExclusions() {
61+
return jsonExclusions;
62+
}
3763

38-
@Override
39-
public Map<String, String> xmlNamespaces() {
64+
public String[] getXmlExclusions() {
65+
return xmlExclusions;
66+
}
67+
68+
public Map<String, String> getXmlNamespaces() {
4069
return xmlNamespaces != null ? xmlNamespaces : Collections.emptyMap();
4170
}
4271

72+
public String getSchemaName() {
73+
return schemaName;
74+
}
4375

76+
public String getViewName() {
77+
return viewName;
78+
}
4479
}

marklogic-client-api/src/main/java/com/marklogic/client/datamovement/filter/IncrementalWriteFilter.java

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -242,19 +242,19 @@ protected final DocumentWriteSet filterDocuments(Context context, Function<Strin
242242

243243
if (existingHash != null) {
244244
if (!existingHash.equals(contentHash)) {
245-
newWriteSet.add(addHashToMetadata(doc, config.hashKeyName(), contentHash, config.timestampKeyName(), timestamp));
246-
} else if (config.skippedDocumentsConsumer() != null) {
245+
newWriteSet.add(addHashToMetadata(doc, config.getHashKeyName(), contentHash, config.getTimestampKeyName(), timestamp));
246+
} else if (config.getSkippedDocumentsConsumer() != null) {
247247
skippedDocuments.add(doc);
248248
} else {
249249
// No consumer, so skip the document silently.
250250
}
251251
} else {
252-
newWriteSet.add(addHashToMetadata(doc, config.hashKeyName(), contentHash, config.timestampKeyName(), timestamp));
252+
newWriteSet.add(addHashToMetadata(doc, config.getHashKeyName(), contentHash, config.getTimestampKeyName(), timestamp));
253253
}
254254
}
255255

256-
if (!skippedDocuments.isEmpty() && config.skippedDocumentsConsumer() != null) {
257-
config.skippedDocumentsConsumer().accept(skippedDocuments.toArray(new DocumentWriteOperation[0]));
256+
if (!skippedDocuments.isEmpty() && config.getSkippedDocumentsConsumer() != null) {
257+
config.getSkippedDocumentsConsumer().accept(skippedDocuments.toArray(new DocumentWriteOperation[0]));
258258
}
259259

260260
return newWriteSet;
@@ -271,11 +271,11 @@ private String serializeContent(DocumentWriteOperation doc) {
271271
format = baseHandle.getFormat();
272272
}
273273

274-
if (config.canonicalizeJson() && (Format.JSON.equals(format) || isPossiblyJsonContent(content))) {
274+
if (config.isCanonicalizeJson() && (Format.JSON.equals(format) || isPossiblyJsonContent(content))) {
275275
JsonCanonicalizer jc;
276276
try {
277-
if (config.jsonExclusions() != null && config.jsonExclusions().length > 0) {
278-
content = ContentExclusionUtil.applyJsonExclusions(doc.getUri(), content, config.jsonExclusions());
277+
if (config.getJsonExclusions() != null && config.getJsonExclusions().length > 0) {
278+
content = ContentExclusionUtil.applyJsonExclusions(doc.getUri(), content, config.getJsonExclusions());
279279
}
280280
jc = new JsonCanonicalizer(content);
281281
return jc.getEncodedString();
@@ -286,9 +286,9 @@ private String serializeContent(DocumentWriteOperation doc) {
286286
logger.warn("Unable to canonicalize JSON content for URI {}, using original content for hashing; cause: {}",
287287
doc.getUri(), e.getMessage());
288288
}
289-
} else if (config.xmlExclusions() != null && config.xmlExclusions().length > 0) {
289+
} else if (config.getXmlExclusions() != null && config.getXmlExclusions().length > 0) {
290290
try {
291-
content = ContentExclusionUtil.applyXmlExclusions(doc.getUri(), content, config.xmlNamespaces(), config.xmlExclusions());
291+
content = ContentExclusionUtil.applyXmlExclusions(doc.getUri(), content, config.getXmlNamespaces(), config.getXmlExclusions());
292292
} catch (Exception e) {
293293
logger.warn("Unable to apply XML exclusions for URI {}, using original content for hashing; cause: {}",
294294
doc.getUri(), e.getMessage());

marklogic-client-api/src/main/java/com/marklogic/client/datamovement/filter/IncrementalWriteFromLexiconsFilter.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ public DocumentWriteSet apply(Context context) {
3131
Map<String, Long> existingHashes = new RowTemplate(context.getDatabaseClient()).query(op ->
3232
op.fromLexicons(Map.of(
3333
"uri", op.cts.uriReference(),
34-
"hash", op.cts.fieldReference(getConfig().hashKeyName())
34+
"hash", op.cts.fieldReference(getConfig().getHashKeyName())
3535
)).where(
3636
op.cts.documentQuery(op.xs.stringSeq(uris))
3737
),

marklogic-client-api/src/main/java/com/marklogic/client/datamovement/filter/IncrementalWriteFromViewFilter.java

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,14 +29,14 @@ public DocumentWriteSet apply(Context context) {
2929

3030
try {
3131
Map<String, Long> existingHashes = new RowTemplate(context.getDatabaseClient()).query(op ->
32-
op.fromView(getConfig().schemaName(), getConfig().viewName(), "")
32+
op.fromView(getConfig().getSchemaName(), getConfig().getViewName(), "")
3333
.where(op.cts.documentQuery(op.xs.stringSeq(uris)))
3434
,
3535
rows -> {
3636
Map<String, Long> map = new HashMap<>();
3737
rows.forEach(row -> {
3838
String uri = row.getString("uri");
39-
String hashString = row.getString(getConfig().hashKeyName());
39+
String hashString = row.getString(getConfig().getHashKeyName());
4040
if (hashString != null && !hashString.isEmpty()) {
4141
long existingHash = Long.parseUnsignedLong(hashString);
4242
map.put(uri, existingHash);
@@ -51,7 +51,8 @@ public DocumentWriteSet apply(Context context) {
5151

5252
return filterDocuments(context, uri -> existingHashes.get(uri));
5353
} catch (FailedRequestException e) {
54-
String message = "Unable to query for existing incremental write hashes from view " + getConfig().schemaName() + "." + getConfig().viewName() + "; cause: " + e.getMessage();
54+
String message = "Unable to query for existing incremental write hashes from view "
55+
+ getConfig().getSchemaName() + "." + getConfig().getViewName() + "; cause: " + e.getMessage();
5556
throw new FailedRequestException(message, e.getFailedRequest());
5657
}
5758
}

0 commit comments

Comments
 (0)