From 8524cb993b2089df9f67e571378df3cf7519884e Mon Sep 17 00:00:00 2001 From: Rob Rudin Date: Fri, 10 Oct 2025 14:17:16 -0400 Subject: [PATCH 01/15] Bumped to 8.0-SNAPSHOT --- gradle.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gradle.properties b/gradle.properties index dc8a5f9c0..ce21d2cfa 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,5 +1,5 @@ group=com.marklogic -version=8.0.0 +version=8.0-SNAPSHOT publishUrl=file:../marklogic-java/releases okhttpVersion=5.2.0 From 360db5eba8b9ee9a815ca69d6323b392527c2c63 Mon Sep 17 00:00:00 2001 From: Rob Rudin Date: Mon, 13 Oct 2025 10:03:01 -0400 Subject: [PATCH 02/15] MLE-24717 Bumping logback to 1.5.19 Minor CVE thing. Getting rid of the PR template as well as it's not needed and GitKraken keeps trying to use it. --- .github/PULL_REQUEST_TEMPLATE.md | 9 --------- marklogic-client-api-functionaltests/build.gradle | 2 +- marklogic-client-api/build.gradle | 2 +- test-app/build.gradle | 2 +- 4 files changed, 3 insertions(+), 12 deletions(-) delete mode 100644 .github/PULL_REQUEST_TEMPLATE.md diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md deleted file mode 100644 index 623c3f1bd..000000000 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ /dev/null @@ -1,9 +0,0 @@ -So we can incorporate your pull request, please share the following: -* What issue are you addressing with this pull request? -* Are you modifying the correct branch? (See CONTRIBUTING.md) -* Have you run unit tests? (See CONTRIBUTING.md) -* Version of MarkLogic Java Client API (see Readme.txt) -* Version of MarkLogic Server (see admin gui on port 8001) -* Java version (`java -version`) -* OS and version -* What Changed: What happened before this change? What happens without this change? diff --git a/marklogic-client-api-functionaltests/build.gradle b/marklogic-client-api-functionaltests/build.gradle index d9cebfa63..e8d18b2c3 100755 --- a/marklogic-client-api-functionaltests/build.gradle +++ b/marklogic-client-api-functionaltests/build.gradle @@ -20,7 +20,7 @@ dependencies { exclude module: "marklogic-client-api" } - testImplementation 'ch.qos.logback:logback-classic:1.5.18' + testImplementation 'ch.qos.logback:logback-classic:1.5.19' testImplementation 'org.junit.jupiter:junit-jupiter:5.13.4' testImplementation 'org.xmlunit:xmlunit-legacy:2.10.4' diff --git a/marklogic-client-api/build.gradle b/marklogic-client-api/build.gradle index a8c48a096..cd3ace4a0 100644 --- a/marklogic-client-api/build.gradle +++ b/marklogic-client-api/build.gradle @@ -56,7 +56,7 @@ dependencies { testImplementation "com.squareup.okhttp3:mockwebserver3:5.1.0" testImplementation "com.fasterxml.jackson.dataformat:jackson-dataformat-xml:${jacksonVersion}" - testImplementation 'ch.qos.logback:logback-classic:1.5.18' + testImplementation 'ch.qos.logback:logback-classic:1.5.19' // Using this to avoid a schema validation issue with the regular xercesImpl testImplementation 'org.opengis.cite.xerces:xercesImpl-xsd11:2.12-beta-r1667115' diff --git a/test-app/build.gradle b/test-app/build.gradle index a06a500b5..284d12d64 100644 --- a/test-app/build.gradle +++ b/test-app/build.gradle @@ -12,7 +12,7 @@ dependencies { implementation "io.undertow:undertow-core:2.3.19.Final" implementation "io.undertow:undertow-servlet:2.3.19.Final" implementation 'org.slf4j:slf4j-api:2.0.17' - implementation 'ch.qos.logback:logback-classic:1.5.18' + implementation 'ch.qos.logback:logback-classic:1.5.19' implementation "com.fasterxml.jackson.core:jackson-databind:${jacksonVersion}" implementation "com.squareup.okhttp3:okhttp:${okhttpVersion}" } From a398a93163fb726fbd928a7f13136491444247dd Mon Sep 17 00:00:00 2001 From: Rob Rudin Date: Wed, 15 Oct 2025 15:00:03 -0400 Subject: [PATCH 03/15] MLE-24747 Bumping ml-gradle to 6.1.0 Removed hack in BitemporalTest for bug that is fixed. And bumped mockito, realized it can go to 5 safely now. --- .../build.gradle | 9 +++------ marklogic-client-api/build.gradle | 14 ++++++-------- .../marklogic/client/test/BitemporalTest.java | 17 ----------------- test-app/build.gradle | 2 +- 4 files changed, 10 insertions(+), 32 deletions(-) diff --git a/marklogic-client-api-functionaltests/build.gradle b/marklogic-client-api-functionaltests/build.gradle index e8d18b2c3..4a4d2e029 100755 --- a/marklogic-client-api-functionaltests/build.gradle +++ b/marklogic-client-api-functionaltests/build.gradle @@ -15,18 +15,15 @@ dependencies { testImplementation 'org.apache.commons:commons-lang3:3.19.0' - // Allows talking to the Manage API. - testImplementation("com.marklogic:ml-app-deployer:6.0.1") { - exclude module: "marklogic-client-api" - } + testImplementation "com.marklogic:ml-app-deployer:6.1.0" testImplementation 'ch.qos.logback:logback-classic:1.5.19' - testImplementation 'org.junit.jupiter:junit-jupiter:5.13.4' + testImplementation 'org.junit.jupiter:junit-jupiter:5.14.0' testImplementation 'org.xmlunit:xmlunit-legacy:2.10.4' // Without this, once using JUnit 5.12 or higher, Gradle will not find any tests and report an error of: // org.junit.platform.commons.JUnitException: TestEngine with ID 'junit-jupiter' failed to discover tests - testRuntimeOnly "org.junit.platform:junit-platform-launcher:1.13.4" + testRuntimeOnly "org.junit.platform:junit-platform-launcher:1.14.0" } tasks.withType(Test).configureEach { diff --git a/marklogic-client-api/build.gradle b/marklogic-client-api/build.gradle index cd3ace4a0..46b724103 100644 --- a/marklogic-client-api/build.gradle +++ b/marklogic-client-api/build.gradle @@ -46,14 +46,12 @@ dependencies { testImplementation 'org.apache.commons:commons-lang3:3.19.0' // Allows talking to the Manage API. - testImplementation("com.marklogic:ml-app-deployer:6.0.1") { - exclude module: "marklogic-client-api" - } + testImplementation "com.marklogic:ml-app-deployer:6.1.0" + + testImplementation "org.mockito:mockito-core:5.20.0" + testImplementation "org.mockito:mockito-inline:5.20.0" - // Starting with mockito 5.x, Java 11 is required, so sticking with 4.x as we have to support Java 8. - testImplementation "org.mockito:mockito-core:4.11.0" - testImplementation "org.mockito:mockito-inline:4.11.0" - testImplementation "com.squareup.okhttp3:mockwebserver3:5.1.0" + testImplementation "com.squareup.okhttp3:mockwebserver3:5.2.0" testImplementation "com.fasterxml.jackson.dataformat:jackson-dataformat-xml:${jacksonVersion}" testImplementation 'ch.qos.logback:logback-classic:1.5.19' @@ -73,7 +71,7 @@ dependencies { // https://docs.gradle.org/current/userguide/upgrading_version_8.html#test_framework_implementation_dependencies // Without this, once using JUnit 5.12 or higher, Gradle will not find any tests and report an error of: // org.junit.platform.commons.JUnitException: TestEngine with ID 'junit-jupiter' failed to discover tests - testRuntimeOnly "org.junit.platform:junit-platform-launcher:1.13.4" + testRuntimeOnly "org.junit.platform:junit-platform-launcher:1.14.0" } // Ensure that mlHost and mlPassword can override the defaults of localhost/admin if they've been modified diff --git a/marklogic-client-api/src/test/java/com/marklogic/client/test/BitemporalTest.java b/marklogic-client-api/src/test/java/com/marklogic/client/test/BitemporalTest.java index 9d447d098..97bb22d9d 100644 --- a/marklogic-client-api/src/test/java/com/marklogic/client/test/BitemporalTest.java +++ b/marklogic-client-api/src/test/java/com/marklogic/client/test/BitemporalTest.java @@ -15,8 +15,6 @@ import com.marklogic.client.io.StringHandle; import com.marklogic.client.query.*; import com.marklogic.client.query.StructuredQueryBuilder.TemporalOperator; -import com.marklogic.mgmt.ManageClient; -import com.marklogic.mgmt.resource.temporal.TemporalCollectionLSQTManager; import jakarta.xml.bind.DatatypeConverter; import org.custommonkey.xmlunit.exceptions.XpathException; import org.junit.jupiter.api.AfterEach; @@ -163,21 +161,6 @@ void writeTwoVersionsOfFourDocuments() throws XpathException { @Test void lsqtTest() { - // Due to bug MLE-24511 where LSQT properties aren't updated correctly in ml-gradle 6.0.0, we need to manually - // deploy them for this test. - ManageClient manageClient = Common.newManageClient(); - TemporalCollectionLSQTManager mgr = new TemporalCollectionLSQTManager(manageClient, "java-unittest", "temporal-collection"); - String payload = """ - { - "lsqt-enabled": true, - "automation": { - "enabled": true, - "period": 5000 - } - } - """; - mgr.save(payload); - String version1 = """ %s version1 diff --git a/test-app/build.gradle b/test-app/build.gradle index 284d12d64..d9dc86e10 100644 --- a/test-app/build.gradle +++ b/test-app/build.gradle @@ -4,7 +4,7 @@ plugins { id "net.saliman.properties" version "1.5.2" - id 'com.marklogic.ml-gradle' version '6.0.1' + id 'com.marklogic.ml-gradle' version '6.1.0' id "com.github.psxpaul.execfork" version "0.2.2" } From 1775b8111bb184f6b071d9b1d825415316571ee2 Mon Sep 17 00:00:00 2001 From: Rob Rudin Date: Thu, 16 Oct 2025 09:15:20 -0400 Subject: [PATCH 04/15] MLE-12708 Re-enabling test Bug was fixed in the server. --- .../java/com/marklogic/client/test/SPARQLManagerTest.java | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/marklogic-client-api/src/test/java/com/marklogic/client/test/SPARQLManagerTest.java b/marklogic-client-api/src/test/java/com/marklogic/client/test/SPARQLManagerTest.java index 9e37efa19..7287c639c 100644 --- a/marklogic-client-api/src/test/java/com/marklogic/client/test/SPARQLManagerTest.java +++ b/marklogic-client-api/src/test/java/com/marklogic/client/test/SPARQLManagerTest.java @@ -30,7 +30,8 @@ import static org.junit.jupiter.api.Assertions.*; -public class SPARQLManagerTest { +class SPARQLManagerTest { + private static String graphUri = "http://marklogic.com/java/SPARQLManagerTest"; private static String triple1 = " ."; private static String triple2 = " ."; @@ -362,10 +363,6 @@ public void testSPARQLWithBindings() throws Exception { @Test public void testPagination() { - if (Common.getMarkLogicVersion().getMajor() >= 12) { - // Disabled until MLE-12708 is fixed. - return; - } SPARQLQueryDefinition qdef1 = smgr.newQueryDefinition( "SELECT ?s ?p ?o FROM <" + graphUri + "> { ?s ?p ?o }"); qdef1.setIncludeDefaultRulesets(false); From 7566d49d258fa177740944f9fe6da17ed31affde Mon Sep 17 00:00:00 2001 From: Rob Rudin Date: Thu, 16 Oct 2025 12:06:04 -0400 Subject: [PATCH 05/15] MLE-24747 Fixing typo in Gradle file Don't know why this didn't cause a failure initially, but did cause failures in nightly regressions. --- marklogic-client-api/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/marklogic-client-api/build.gradle b/marklogic-client-api/build.gradle index 46b724103..a70d9d4b9 100644 --- a/marklogic-client-api/build.gradle +++ b/marklogic-client-api/build.gradle @@ -49,7 +49,7 @@ dependencies { testImplementation "com.marklogic:ml-app-deployer:6.1.0" testImplementation "org.mockito:mockito-core:5.20.0" - testImplementation "org.mockito:mockito-inline:5.20.0" + testImplementation "org.mockito:mockito-inline:5.2.0" testImplementation "com.squareup.okhttp3:mockwebserver3:5.2.0" From 6f321dd17dea9fbb1ead5eb4c2adef478853aa4b Mon Sep 17 00:00:00 2001 From: Rob Rudin Date: Wed, 22 Oct 2025 14:04:35 -0400 Subject: [PATCH 06/15] MLE-24826 Bumping Spring, undertow for CVEs Also bumped an old version of xmlunit. Pretty sure the junit:junit stuff can be easily removed next. --- build.gradle | 1 - marklogic-client-api-functionaltests/build.gradle | 2 +- marklogic-client-api/build.gradle | 4 ++-- ml-development-tools/build.gradle | 2 +- test-app/build.gradle | 4 ++-- 5 files changed, 6 insertions(+), 7 deletions(-) diff --git a/build.gradle b/build.gradle index 6ad238fe4..b71510d5f 100644 --- a/build.gradle +++ b/build.gradle @@ -23,7 +23,6 @@ subprojects { } repositories { - mavenLocal() mavenCentral() // Needed so that ml-development-tools can resolve snapshots of marklogic-client-api. diff --git a/marklogic-client-api-functionaltests/build.gradle b/marklogic-client-api-functionaltests/build.gradle index 4a4d2e029..0bfccada1 100755 --- a/marklogic-client-api-functionaltests/build.gradle +++ b/marklogic-client-api-functionaltests/build.gradle @@ -15,7 +15,7 @@ dependencies { testImplementation 'org.apache.commons:commons-lang3:3.19.0' - testImplementation "com.marklogic:ml-app-deployer:6.1.0" + testImplementation "com.marklogic:ml-app-deployer:6.2-SNAPSHOT" testImplementation 'ch.qos.logback:logback-classic:1.5.19' testImplementation 'org.junit.jupiter:junit-jupiter:5.14.0' diff --git a/marklogic-client-api/build.gradle b/marklogic-client-api/build.gradle index a70d9d4b9..8d0aca524 100644 --- a/marklogic-client-api/build.gradle +++ b/marklogic-client-api/build.gradle @@ -36,7 +36,7 @@ dependencies { compileOnly 'org.dom4j:dom4j:2.2.0' compileOnly 'com.google.code.gson:gson:2.13.2' - testImplementation 'org.junit.jupiter:junit-jupiter:5.13.4' + testImplementation 'org.junit.jupiter:junit-jupiter:5.14.0' // Forcing junit version to avoid vulnerability with older version in xmlunit testImplementation 'junit:junit:4.13.2' @@ -46,7 +46,7 @@ dependencies { testImplementation 'org.apache.commons:commons-lang3:3.19.0' // Allows talking to the Manage API. - testImplementation "com.marklogic:ml-app-deployer:6.1.0" + testImplementation "com.marklogic:ml-app-deployer:6.2-SNAPSHOT" testImplementation "org.mockito:mockito-core:5.20.0" testImplementation "org.mockito:mockito-inline:5.2.0" diff --git a/ml-development-tools/build.gradle b/ml-development-tools/build.gradle index f7bfb277f..9b0f67f85 100644 --- a/ml-development-tools/build.gradle +++ b/ml-development-tools/build.gradle @@ -32,7 +32,7 @@ dependencies { // Not yet migrating this project to JUnit 5. Will reconsider it once we have a reason to enhance // this project. testImplementation 'junit:junit:4.13.2' - testImplementation 'xmlunit:xmlunit:1.6' + testImplementation 'org.xmlunit:xmlunit-legacy:2.10.4' testCompileOnly gradleTestKit() testImplementation "com.squareup.okhttp3:okhttp:${okhttpVersion}" diff --git a/test-app/build.gradle b/test-app/build.gradle index d9dc86e10..f60f7b413 100644 --- a/test-app/build.gradle +++ b/test-app/build.gradle @@ -9,8 +9,8 @@ plugins { } dependencies { - implementation "io.undertow:undertow-core:2.3.19.Final" - implementation "io.undertow:undertow-servlet:2.3.19.Final" + implementation "io.undertow:undertow-core:2.3.20.Final" + implementation "io.undertow:undertow-servlet:2.3.20.Final" implementation 'org.slf4j:slf4j-api:2.0.17' implementation 'ch.qos.logback:logback-classic:1.5.19' implementation "com.fasterxml.jackson.core:jackson-databind:${jacksonVersion}" From 1c51f56ccadff2410f1c8babf333143914b9e2ff Mon Sep 17 00:00:00 2001 From: Rob Rudin Date: Mon, 27 Oct 2025 10:57:16 -0400 Subject: [PATCH 07/15] MLE-24892 Cleaning up test dependencies Unfortunately can't upgrade to JUnit5 in ml-development-tools, far too many breaking changes to fix for now. --- marklogic-client-api/build.gradle | 2 -- .../com/marklogic/client/test/rows/TransformDocTest.java | 2 -- ml-development-tools/build.gradle | 8 ++------ 3 files changed, 2 insertions(+), 10 deletions(-) diff --git a/marklogic-client-api/build.gradle b/marklogic-client-api/build.gradle index 8d0aca524..11855b87b 100644 --- a/marklogic-client-api/build.gradle +++ b/marklogic-client-api/build.gradle @@ -38,8 +38,6 @@ dependencies { testImplementation 'org.junit.jupiter:junit-jupiter:5.14.0' - // Forcing junit version to avoid vulnerability with older version in xmlunit - testImplementation 'junit:junit:4.13.2' testImplementation 'org.xmlunit:xmlunit-legacy:2.10.4' testImplementation project(':examples') diff --git a/marklogic-client-api/src/test/java/com/marklogic/client/test/rows/TransformDocTest.java b/marklogic-client-api/src/test/java/com/marklogic/client/test/rows/TransformDocTest.java index f7ed8233b..9e18896f5 100644 --- a/marklogic-client-api/src/test/java/com/marklogic/client/test/rows/TransformDocTest.java +++ b/marklogic-client-api/src/test/java/com/marklogic/client/test/rows/TransformDocTest.java @@ -13,9 +13,7 @@ import com.marklogic.client.io.marker.AbstractWriteHandle; import com.marklogic.client.row.RowRecord; import com.marklogic.client.test.Common; -import com.marklogic.client.test.MarkLogicVersion; import com.marklogic.client.test.junit5.RequiresML11; -import org.junit.Before; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; diff --git a/ml-development-tools/build.gradle b/ml-development-tools/build.gradle index 9b0f67f85..37836848e 100644 --- a/ml-development-tools/build.gradle +++ b/ml-development-tools/build.gradle @@ -29,13 +29,9 @@ dependencies { // Sticking with this older version for now as the latest 1.x version introduces breaking changes. implementation 'com.networknt:json-schema-validator:1.0.88' - // Not yet migrating this project to JUnit 5. Will reconsider it once we have a reason to enhance - // this project. + // Sticking with JUnit 4 as there are no vulnerabilities with it, and shifting to JUnit 5 in this module will be + // a significant and tedious effort. testImplementation 'junit:junit:4.13.2' - testImplementation 'org.xmlunit:xmlunit-legacy:2.10.4' - testCompileOnly gradleTestKit() - - testImplementation "com.squareup.okhttp3:okhttp:${okhttpVersion}" } // Added to avoid problem where processResources fails because - somehow - the plugin properties file is getting From ad497c336f1fb44242fbbf9348711a005c3638c5 Mon Sep 17 00:00:00 2001 From: Rob Rudin Date: Tue, 18 Nov 2025 06:58:13 -0500 Subject: [PATCH 08/15] MLE-24892 Parameterizing image tags in Jenkinsfile --- Jenkinsfile | 73 ++++++++++++++++++----------------------------------- 1 file changed, 25 insertions(+), 48 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 080b15cf3..9226f1613 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -171,6 +171,7 @@ pipeline { parameters { booleanParam(name: 'regressions', defaultValue: false, description: 'indicator if build is for regressions') string(name: 'JAVA_VERSION', defaultValue: 'JAVA17', description: 'Either JAVA17 or JAVA21') + string(name: 'MARKLOGIC_IMAGE_TAGS', defaultValue: 'marklogic-server-ubi:latest-11,marklogic-server-ubi:latest-12', description: 'Comma-delimited list of MarkLogic image tags including variant (e.g., marklogic-server-ubi:latest-11,marklogic-server-ubi-rootless:11.3.2). The registry/org (ml-docker-db-dev-tierpoint.bed-artifactory.bedford.progress.com/marklogic) path will be prepended automatically.') } environment { @@ -237,60 +238,36 @@ pipeline { } } - stage('regressions-11') { + stage('regressions') { when { allOf { branch 'develop' expression { return params.regressions } } } - steps { - runTests("ml-docker-db-dev-tierpoint.bed-artifactory.bedford.progress.com/marklogic/marklogic-server-ubi:latest-11") - } - post { - always { - junit '**/build/**/TEST*.xml' - updateWorkspacePermissions() - tearDownDocker() - } - } - } - - // Latest run had 87 errors, which have been added to MLE-24523 for later research. -// stage('regressions-12-reverseProxy') { -// when { -// allOf { -// branch 'develop' -// expression {return params.regressions} -// } -// } -// steps { -// runTestsWithReverseProxy("ml-docker-db-dev-tierpoint.bed-artifactory.bedford.progress.com/marklogic/marklogic-server-ubi:latest-12") -// } -// post { -// always { -// junit '**/build/**/TEST*.xml' -// updateWorkspacePermissions() -// tearDownDocker() -// } -// } -// } - - stage('regressions-12') { - when { - allOf { - branch 'develop' - expression { return params.regressions } - } - } - steps { - runTests("ml-docker-db-dev-tierpoint.bed-artifactory.bedford.progress.com/marklogic/marklogic-server-ubi:latest-12") - } - post { - always { - junit '**/build/**/TEST*.xml' - updateWorkspacePermissions() - tearDownDocker() + steps { + script { + def imageTags = params.MARKLOGIC_IMAGE_TAGS.split(',') + def imagePrefix = 'ml-docker-db-dev-tierpoint.bed-artifactory.bedford.progress.com/marklogic/' + + def parallelStages = [:] + imageTags.each { tag -> + def fullImage = imagePrefix + tag.trim() + def stageName = "regressions-${tag.trim().replace(':', '-')}" + + parallelStages[stageName] = { + stage(stageName) { + try { + runTests(fullImage) + } finally { + junit '**/build/**/TEST*.xml' + updateWorkspacePermissions() + tearDownDocker() + } + } + } + } + parallel parallelStages } } } From 3791e8a4b4657fb42779d7ddbbcb1ff81d8a3171 Mon Sep 17 00:00:00 2001 From: Rob Rudin Date: Tue, 18 Nov 2025 08:29:03 -0500 Subject: [PATCH 09/15] MLE-24892 Fixing issue with parallel config --- Jenkinsfile | 45 ++++++++++++++++++++++++++----------------- test-app/build.gradle | 16 ++++++++++++++- 2 files changed, 42 insertions(+), 19 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 9226f1613..ef1af8976 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -20,12 +20,13 @@ def setupDockerMarkLogic(String image) { echo "Using image: "''' + image + ''' docker pull ''' + image + ''' MARKLOGIC_IMAGE=''' + image + ''' MARKLOGIC_LOGS_VOLUME=marklogicLogs docker compose up -d --build - echo "Waiting for MarkLogic server to initialize." - sleep 60s export JAVA_HOME=$JAVA_HOME_DIR export GRADLE_USER_HOME=$WORKSPACE/$GRADLE_DIR - export PATH=$GRADLE_USER_HOME:$JAVA_HOME/bin:$PATH - ./gradlew mlTestConnections + export PATH=$JAVA_HOME/bin:$PATH + ./gradlew -i mlWaitTillReady + sleep 3 + ./gradlew -i mlWaitTillReady + ./gradlew mlTestConnections ./gradlew -i mlDeploy mlReloadSchemas ''' } @@ -161,7 +162,7 @@ def tearDownDocker() { } pipeline { - agent { label 'javaClientLinuxPool' } + agent none options { checkoutToSubdirectory 'java-client-api' @@ -184,6 +185,7 @@ pipeline { stages { stage('pull-request-tests') { + agent { label 'javaClientLinuxPool' } when { not { expression { return params.regressions } @@ -219,7 +221,9 @@ pipeline { } } } + stage('publish') { + agent { label 'javaClientLinuxPool' } when { branch 'develop' not { @@ -245,28 +249,33 @@ pipeline { expression { return params.regressions } } } - steps { - script { - def imageTags = params.MARKLOGIC_IMAGE_TAGS.split(',') - def imagePrefix = 'ml-docker-db-dev-tierpoint.bed-artifactory.bedford.progress.com/marklogic/' - def parallelStages = [:] - imageTags.each { tag -> + steps { + script { + def imageTags = params.MARKLOGIC_IMAGE_TAGS.split(',') + def imagePrefix = 'ml-docker-db-dev-tierpoint.bed-artifactory.bedford.progress.com/marklogic/' + + def parallelStages = [:] + + imageTags.each { tag -> def fullImage = imagePrefix + tag.trim() def stageName = "regressions-${tag.trim().replace(':', '-')}" parallelStages[stageName] = { - stage(stageName) { - try { - runTests(fullImage) - } finally { - junit '**/build/**/TEST*.xml' - updateWorkspacePermissions() - tearDownDocker() + node('javaClientLinuxPool') { + stage(stageName) { + try { + runTests(fullImage) + } finally { + junit '**/build/**/TEST*.xml' + updateWorkspacePermissions() + tearDownDocker() + } } } } } + parallel parallelStages } } diff --git a/test-app/build.gradle b/test-app/build.gradle index f60f7b413..80a907488 100644 --- a/test-app/build.gradle +++ b/test-app/build.gradle @@ -2,12 +2,26 @@ * Copyright (c) 2010-2025 Progress Software Corporation and/or its subsidiaries or affiliates. All Rights Reserved. */ +buildscript { + repositories { + mavenCentral() + // Needed for ml-gradle 6.2-SNAPSHOT + maven { + url = "https://bed-artifactory.bedford.progress.com:443/artifactory/ml-maven-snapshots/" + } + } + dependencies { + classpath "com.marklogic:ml-gradle:6.2-SNAPSHOT" + } +} + plugins { id "net.saliman.properties" version "1.5.2" - id 'com.marklogic.ml-gradle' version '6.1.0' id "com.github.psxpaul.execfork" version "0.2.2" } +apply plugin: "com.marklogic.ml-gradle" + dependencies { implementation "io.undertow:undertow-core:2.3.20.Final" implementation "io.undertow:undertow-servlet:2.3.20.Final" From 57ad84e5873cfe07d7df1cd61384f23dd009c948 Mon Sep 17 00:00:00 2001 From: Rob Rudin Date: Tue, 18 Nov 2025 09:09:33 -0500 Subject: [PATCH 10/15] MLE-24892 Backing off parallel config for now Having issues on Jenkins, just want to get dynamic stages in place for now. --- Jenkinsfile | 26 ++++++++------------------ 1 file changed, 8 insertions(+), 18 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index ef1af8976..5225054e6 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -162,7 +162,7 @@ def tearDownDocker() { } pipeline { - agent none + agent { label 'javaClientLinuxPool' } options { checkoutToSubdirectory 'java-client-api' @@ -185,7 +185,6 @@ pipeline { stages { stage('pull-request-tests') { - agent { label 'javaClientLinuxPool' } when { not { expression { return params.regressions } @@ -223,7 +222,6 @@ pipeline { } stage('publish') { - agent { label 'javaClientLinuxPool' } when { branch 'develop' not { @@ -255,28 +253,20 @@ pipeline { def imageTags = params.MARKLOGIC_IMAGE_TAGS.split(',') def imagePrefix = 'ml-docker-db-dev-tierpoint.bed-artifactory.bedford.progress.com/marklogic/' - def parallelStages = [:] - imageTags.each { tag -> def fullImage = imagePrefix + tag.trim() def stageName = "regressions-${tag.trim().replace(':', '-')}" - parallelStages[stageName] = { - node('javaClientLinuxPool') { - stage(stageName) { - try { - runTests(fullImage) - } finally { - junit '**/build/**/TEST*.xml' - updateWorkspacePermissions() - tearDownDocker() - } - } + stage(stageName) { + try { + runTests(fullImage) + } finally { + junit '**/build/**/TEST*.xml' + updateWorkspacePermissions() + tearDownDocker() } } } - - parallel parallelStages } } } From a035b8ee348a560d55a4b6368529f429acef8a55 Mon Sep 17 00:00:00 2001 From: Rob Rudin Date: Wed, 10 Dec 2025 11:16:34 -0500 Subject: [PATCH 11/15] MLE-25782 Some refactoring of BatchWriter Wanted to move this into a separate class so it's easier / cleaner to prototype an incremental check listener. Also made BatchWriteSet nicer by making a bunch of fields final. No change in functionality, just cleaning up code. --- .../datamovement/impl/BatchWriteSet.java | 198 ++++++++---------- .../client/datamovement/impl/BatchWriter.java | 81 +++++++ .../datamovement/impl/WriteBatcherImpl.java | 80 +------ 3 files changed, 171 insertions(+), 188 deletions(-) create mode 100644 marklogic-client-api/src/main/java/com/marklogic/client/datamovement/impl/BatchWriter.java diff --git a/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/impl/BatchWriteSet.java b/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/impl/BatchWriteSet.java index ce4426563..8cd7593a6 100644 --- a/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/impl/BatchWriteSet.java +++ b/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/impl/BatchWriteSet.java @@ -3,121 +3,95 @@ */ package com.marklogic.client.datamovement.impl; -import java.util.function.Consumer; - import com.marklogic.client.DatabaseClient; -import com.marklogic.client.document.DocumentWriteSet; -import com.marklogic.client.document.ServerTransform; import com.marklogic.client.datamovement.WriteBatch; import com.marklogic.client.datamovement.WriteBatcher; import com.marklogic.client.datamovement.WriteEvent; +import com.marklogic.client.document.DocumentWriteSet; +import com.marklogic.client.document.ServerTransform; + +import java.util.function.Consumer; -public class BatchWriteSet { - private WriteBatcher batcher; - private DocumentWriteSet writeSet; - private long batchNumber; - private long itemsSoFar; - private DatabaseClient client; - private ServerTransform transform; - private String temporalCollection; - private Runnable onSuccess; - private Consumer onFailure; - private Runnable onBeforeWrite; - - public BatchWriteSet(WriteBatcher batcher, DocumentWriteSet writeSet, DatabaseClient client, - ServerTransform transform, String temporalCollection) - { - this.batcher = batcher; - this.writeSet = writeSet; - this.client = client; - this.transform = transform; - this.temporalCollection = temporalCollection; - } - - public DocumentWriteSet getWriteSet() { - return writeSet; - } - - public void setWriteSet(DocumentWriteSet writeSet) { - this.writeSet = writeSet; - } - - public long getBatchNumber() { - return batchNumber; - } - - public void setBatchNumber(long batchNumber) { - this.batchNumber = batchNumber; - } - - public void setItemsSoFar(long itemsSoFar) { - this.itemsSoFar = itemsSoFar; - } - - public DatabaseClient getClient() { - return client; - } - - public void setClient(DatabaseClient client) { - this.client = client; - } - - public ServerTransform getTransform() { - return transform; - } - - public void setTransform(ServerTransform transform) { - this.transform = transform; - } - - public String getTemporalCollection() { - return temporalCollection; - } - - public void setTemporalCollection(String temporalCollection) { - this.temporalCollection = temporalCollection; - } - - public Runnable getOnSuccess() { - return onSuccess; - } - - public void onSuccess(Runnable onSuccess) { - this.onSuccess = onSuccess; - } - - public Consumer getOnFailure() { - return onFailure; - } - - public void onFailure(Consumer onFailure) { - this.onFailure = onFailure; - } - - public Runnable getOnBeforeWrite() { - return onBeforeWrite; - } - - public void onBeforeWrite(Runnable onBeforeWrite) { - this.onBeforeWrite = onBeforeWrite; - } - - public WriteBatch getBatchOfWriteEvents() { - WriteBatchImpl batch = new WriteBatchImpl() - .withBatcher(batcher) - .withClient(client) - .withJobBatchNumber(batchNumber) - .withJobWritesSoFar(itemsSoFar) - .withJobTicket(batcher.getJobTicket()); - WriteEvent[] writeEvents = getWriteSet().stream() - .map(writeOperation -> - new WriteEventImpl() - .withTargetUri(writeOperation.getUri()) - .withContent(writeOperation.getContent()) - .withMetadata(writeOperation.getMetadata()) - ) - .toArray(WriteEventImpl[]::new); - batch.withItems(writeEvents); - return batch; - } +class BatchWriteSet { + + private final WriteBatcher batcher; + private final DocumentWriteSet writeSet; + private final long batchNumber; + private final DatabaseClient client; + private final ServerTransform transform; + private final String temporalCollection; + + private long itemsSoFar; + private Runnable onSuccess; + private Consumer onFailure; + + BatchWriteSet(WriteBatcher batcher, DatabaseClient hostClient, ServerTransform transform, String temporalCollection, long batchNumber) { + this.batcher = batcher; + this.writeSet = hostClient.newDocumentManager().newWriteSet(); + this.client = hostClient; + this.transform = transform; + this.temporalCollection = temporalCollection; + this.batchNumber = batchNumber; + } + + public DocumentWriteSet getWriteSet() { + return writeSet; + } + + public long getBatchNumber() { + return batchNumber; + } + + public void setItemsSoFar(long itemsSoFar) { + this.itemsSoFar = itemsSoFar; + } + + public DatabaseClient getClient() { + return client; + } + + public ServerTransform getTransform() { + return transform; + } + + public String getTemporalCollection() { + return temporalCollection; + } + + public Runnable getOnSuccess() { + return onSuccess; + } + + public void onSuccess(Runnable onSuccess) { + this.onSuccess = onSuccess; + } + + public Consumer getOnFailure() { + return onFailure; + } + + public void onFailure(Consumer onFailure) { + this.onFailure = onFailure; + } + + public WriteBatch getBatchOfWriteEvents() { + WriteBatchImpl batch = new WriteBatchImpl() + .withBatcher(batcher) + .withClient(client) + .withJobBatchNumber(batchNumber) + .withJobWritesSoFar(itemsSoFar) + .withJobTicket(batcher.getJobTicket()); + + WriteEvent[] writeEvents = getWriteSet().stream() + .map(writeOperation -> + new WriteEventImpl() + .withTargetUri(writeOperation.getUri()) + .withContent(writeOperation.getContent()) + .withMetadata(writeOperation.getMetadata()) + ) + .toArray(WriteEventImpl[]::new); + + batch.withItems(writeEvents); + return batch; + } } diff --git a/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/impl/BatchWriter.java b/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/impl/BatchWriter.java new file mode 100644 index 000000000..c3d1e6ffe --- /dev/null +++ b/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/impl/BatchWriter.java @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2010-2025 Progress Software Corporation and/or its subsidiaries or affiliates. All Rights Reserved. + */ +package com.marklogic.client.datamovement.impl; + +import com.marklogic.client.document.DocumentWriteOperation; +import com.marklogic.client.document.XMLDocumentManager; +import com.marklogic.client.io.Format; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.Closeable; +import java.util.function.Consumer; + +class BatchWriter implements Runnable { + + private static Logger logger = LoggerFactory.getLogger(WriteBatcherImpl.class); + + private final BatchWriteSet writeSet; + + BatchWriter(BatchWriteSet writeSet) { + if (writeSet.getWriteSet().size() == 0) { + throw new IllegalStateException("Attempt to write an empty batch"); + } + this.writeSet = writeSet; + } + + @Override + public void run() { + try { + logger.trace("begin write batch {} to forest on host \"{}\"", writeSet.getBatchNumber(), writeSet.getClient().getHost()); + if (writeSet.getTemporalCollection() == null) { + writeSet.getClient().newDocumentManager().write( + writeSet.getWriteSet(), writeSet.getTransform(), null + ); + } else { + // to get access to the TemporalDocumentManager write overload we need to instantiate + // a JSONDocumentManager or XMLDocumentManager, but we don't want to make assumptions about content + // format, so we'll set the default content format to unknown + XMLDocumentManager docMgr = writeSet.getClient().newXMLDocumentManager(); + docMgr.setContentFormat(Format.UNKNOWN); + docMgr.write( + writeSet.getWriteSet(), writeSet.getTransform(), null, writeSet.getTemporalCollection() + ); + } + closeAllHandles(); + Runnable onSuccess = writeSet.getOnSuccess(); + if (onSuccess != null) { + onSuccess.run(); + } + } catch (Throwable t) { + logger.trace("failed batch sent to forest on host \"{}\"", writeSet.getClient().getHost()); + Consumer onFailure = writeSet.getOnFailure(); + if (onFailure != null) { + onFailure.accept(t); + } + } + } + + private void closeAllHandles() throws Throwable { + Throwable lastThrowable = null; + for (DocumentWriteOperation doc : writeSet.getWriteSet()) { + try { + if (doc.getContent() instanceof Closeable) { + ((Closeable) doc.getContent()).close(); + } + if (doc.getMetadata() instanceof Closeable) { + ((Closeable) doc.getMetadata()).close(); + } + } catch (Throwable t) { + logger.error("error calling close()", t); + lastThrowable = t; + } + } + if (lastThrowable != null) throw lastThrowable; + } + + public BatchWriteSet getWriteSet() { + return writeSet; + } +} diff --git a/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/impl/WriteBatcherImpl.java b/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/impl/WriteBatcherImpl.java index a87775285..424eaff13 100644 --- a/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/impl/WriteBatcherImpl.java +++ b/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/impl/WriteBatcherImpl.java @@ -3,7 +3,6 @@ */ package com.marklogic.client.datamovement.impl; -import java.io.Closeable; import java.util.*; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentHashMap; @@ -12,7 +11,6 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; -import java.util.function.Consumer; import java.util.stream.Stream; import org.slf4j.Logger; @@ -22,10 +20,8 @@ import com.marklogic.client.DatabaseClientFactory; import com.marklogic.client.document.DocumentWriteOperation; import com.marklogic.client.document.ServerTransform; -import com.marklogic.client.document.XMLDocumentManager; import com.marklogic.client.document.DocumentWriteOperation.OperationType; import com.marklogic.client.io.DocumentMetadataHandle; -import com.marklogic.client.io.Format; import com.marklogic.client.impl.DocumentWriteOperationImpl; import com.marklogic.client.impl.Utilities; import com.marklogic.client.io.marker.AbstractWriteHandle; @@ -281,10 +277,7 @@ private BatchWriteSet newBatchWriteSet() { private BatchWriteSet newBatchWriteSet(long batchNum) { int hostToUse = (int) (batchNum % hostInfos.length); HostInfo host = hostInfos[hostToUse]; - DatabaseClient hostClient = host.client; - BatchWriteSet batchWriteSet = new BatchWriteSet(this, hostClient.newDocumentManager().newWriteSet(), - hostClient, getTransform(), getTemporalCollection()); - batchWriteSet.setBatchNumber(batchNum); + BatchWriteSet batchWriteSet = new BatchWriteSet(this, host.client, getTransform(), getTemporalCollection(), batchNum); batchWriteSet.onSuccess( () -> { sendSuccessToListeners(batchWriteSet); }); @@ -613,15 +606,15 @@ public synchronized WriteBatcher withForestConfig(ForestConfiguration forestConf for ( Runnable task : tasks ) { if ( task instanceof BatchWriter ) { BatchWriter writerTask = (BatchWriter) task; - if ( removedHostInfos.containsKey(writerTask.writeSet.getClient().getHost()) ) { + if ( removedHostInfos.containsKey(writerTask.getWriteSet().getClient().getHost()) ) { // this batch was targeting a host that's no longer on the list // if we re-add these docs they'll now be in batches that target acceptable hosts - BatchWriteSet writeSet = newBatchWriteSet(writerTask.writeSet.getBatchNumber()); + BatchWriteSet writeSet = newBatchWriteSet(writerTask.getWriteSet().getBatchNumber()); writeSet.onFailure(throwable -> { if ( throwable instanceof RuntimeException ) throw (RuntimeException) throwable; else throw new DataMovementException("Failed to retry batch after failover", throwable); }); - for ( WriteEvent doc : writerTask.writeSet.getBatchOfWriteEvents().getItems() ) { + for ( WriteEvent doc : writerTask.getWriteSet().getBatchOfWriteEvents().getItems() ) { writeSet.getWriteSet().add(doc.getTargetUri(), doc.getMetadata(), doc.getContent()); } BatchWriter retryWriterTask = new BatchWriter(writeSet); @@ -649,71 +642,6 @@ public static class HostInfo { public DatabaseClient client; } - public static class BatchWriter implements Runnable { - private BatchWriteSet writeSet; - - public BatchWriter(BatchWriteSet writeSet) { - if ( writeSet.getWriteSet().size() == 0 ) { - throw new IllegalStateException("Attempt to write an empty batch"); - } - this.writeSet = writeSet; - } - - @Override - public void run() { - try { - Runnable onBeforeWrite = writeSet.getOnBeforeWrite(); - if ( onBeforeWrite != null ) { - onBeforeWrite.run(); - } - logger.trace("begin write batch {} to forest on host \"{}\"", writeSet.getBatchNumber(), writeSet.getClient().getHost()); - if ( writeSet.getTemporalCollection() == null ) { - writeSet.getClient().newDocumentManager().write( - writeSet.getWriteSet(), writeSet.getTransform(), null - ); - } else { - // to get access to the TemporalDocumentManager write overload we need to instantiate - // a JSONDocumentManager or XMLDocumentManager, but we don't want to make assumptions about content - // format, so we'll set the default content format to unknown - XMLDocumentManager docMgr = writeSet.getClient().newXMLDocumentManager(); - docMgr.setContentFormat(Format.UNKNOWN); - docMgr.write( - writeSet.getWriteSet(), writeSet.getTransform(), null, writeSet.getTemporalCollection() - ); - } - closeAllHandles(); - Runnable onSuccess = writeSet.getOnSuccess(); - if ( onSuccess != null ) { - onSuccess.run(); - } - } catch (Throwable t) { - logger.trace("failed batch sent to forest on host \"{}\"", writeSet.getClient().getHost()); - Consumer onFailure = writeSet.getOnFailure(); - if ( onFailure != null ) { - onFailure.accept(t); - } - } - } - - private void closeAllHandles() throws Throwable { - Throwable lastThrowable = null; - for ( DocumentWriteOperation doc : writeSet.getWriteSet() ) { - try { - if ( doc.getContent() instanceof Closeable ) { - ((Closeable) doc.getContent()).close(); - } - if ( doc.getMetadata() instanceof Closeable ) { - ((Closeable) doc.getMetadata()).close(); - } - } catch (Throwable t) { - logger.error("error calling close()", t); - lastThrowable = t; - } - } - if ( lastThrowable != null ) throw lastThrowable; - } - } - /** * The following classes and CompletableThreadPoolExecutor * CompletableRejectedExecutionHandler exist exclusively to enable the From 1e28465b1ec456fe6f9bf23067ae94f0fba87d89 Mon Sep 17 00:00:00 2001 From: Rob Rudin Date: Wed, 10 Dec 2025 14:32:06 -0500 Subject: [PATCH 12/15] MLE-25782 Refactor: Explicit naming of writeSet objects Was going crazy trying to figure out if a "writeSet" was a BatchWriteSet or DocumentWriteSet. Now using more explicit names. --- .../datamovement/impl/BatchWriteSet.java | 14 +++++--- .../client/datamovement/impl/BatchWriter.java | 32 +++++++++---------- .../datamovement/impl/WriteBatcherImpl.java | 22 ++++++------- 3 files changed, 36 insertions(+), 32 deletions(-) diff --git a/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/impl/BatchWriteSet.java b/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/impl/BatchWriteSet.java index 8cd7593a6..0c08fdd7b 100644 --- a/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/impl/BatchWriteSet.java +++ b/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/impl/BatchWriteSet.java @@ -12,10 +12,14 @@ import java.util.function.Consumer; +/** + * Mutable class that captures the documents to be written. Documents are added via calls to "getDocumentWriteSet()", where the + * DocumentWriteSet is empty when this class is constructed. + */ class BatchWriteSet { private final WriteBatcher batcher; - private final DocumentWriteSet writeSet; + private final DocumentWriteSet documentWriteSet; private final long batchNumber; private final DatabaseClient client; private final ServerTransform transform; @@ -27,15 +31,15 @@ class BatchWriteSet { BatchWriteSet(WriteBatcher batcher, DatabaseClient hostClient, ServerTransform transform, String temporalCollection, long batchNumber) { this.batcher = batcher; - this.writeSet = hostClient.newDocumentManager().newWriteSet(); + this.documentWriteSet = hostClient.newDocumentManager().newWriteSet(); this.client = hostClient; this.transform = transform; this.temporalCollection = temporalCollection; this.batchNumber = batchNumber; } - public DocumentWriteSet getWriteSet() { - return writeSet; + public DocumentWriteSet getDocumentWriteSet() { + return documentWriteSet; } public long getBatchNumber() { @@ -82,7 +86,7 @@ public WriteBatch getBatchOfWriteEvents() { .withJobWritesSoFar(itemsSoFar) .withJobTicket(batcher.getJobTicket()); - WriteEvent[] writeEvents = getWriteSet().stream() + WriteEvent[] writeEvents = getDocumentWriteSet().stream() .map(writeOperation -> new WriteEventImpl() .withTargetUri(writeOperation.getUri()) diff --git a/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/impl/BatchWriter.java b/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/impl/BatchWriter.java index c3d1e6ffe..037a781f3 100644 --- a/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/impl/BatchWriter.java +++ b/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/impl/BatchWriter.java @@ -16,41 +16,41 @@ class BatchWriter implements Runnable { private static Logger logger = LoggerFactory.getLogger(WriteBatcherImpl.class); - private final BatchWriteSet writeSet; + private final BatchWriteSet batchWriteSet; - BatchWriter(BatchWriteSet writeSet) { - if (writeSet.getWriteSet().size() == 0) { + BatchWriter(BatchWriteSet batchWriteSet) { + if (batchWriteSet.getDocumentWriteSet().size() == 0) { throw new IllegalStateException("Attempt to write an empty batch"); } - this.writeSet = writeSet; + this.batchWriteSet = batchWriteSet; } @Override public void run() { try { - logger.trace("begin write batch {} to forest on host \"{}\"", writeSet.getBatchNumber(), writeSet.getClient().getHost()); - if (writeSet.getTemporalCollection() == null) { - writeSet.getClient().newDocumentManager().write( - writeSet.getWriteSet(), writeSet.getTransform(), null + logger.trace("begin write batch {} to forest on host \"{}\"", batchWriteSet.getBatchNumber(), batchWriteSet.getClient().getHost()); + if (batchWriteSet.getTemporalCollection() == null) { + batchWriteSet.getClient().newDocumentManager().write( + batchWriteSet.getDocumentWriteSet(), batchWriteSet.getTransform(), null ); } else { // to get access to the TemporalDocumentManager write overload we need to instantiate // a JSONDocumentManager or XMLDocumentManager, but we don't want to make assumptions about content // format, so we'll set the default content format to unknown - XMLDocumentManager docMgr = writeSet.getClient().newXMLDocumentManager(); + XMLDocumentManager docMgr = batchWriteSet.getClient().newXMLDocumentManager(); docMgr.setContentFormat(Format.UNKNOWN); docMgr.write( - writeSet.getWriteSet(), writeSet.getTransform(), null, writeSet.getTemporalCollection() + batchWriteSet.getDocumentWriteSet(), batchWriteSet.getTransform(), null, batchWriteSet.getTemporalCollection() ); } closeAllHandles(); - Runnable onSuccess = writeSet.getOnSuccess(); + Runnable onSuccess = batchWriteSet.getOnSuccess(); if (onSuccess != null) { onSuccess.run(); } } catch (Throwable t) { - logger.trace("failed batch sent to forest on host \"{}\"", writeSet.getClient().getHost()); - Consumer onFailure = writeSet.getOnFailure(); + logger.trace("failed batch sent to forest on host \"{}\"", batchWriteSet.getClient().getHost()); + Consumer onFailure = batchWriteSet.getOnFailure(); if (onFailure != null) { onFailure.accept(t); } @@ -59,7 +59,7 @@ public void run() { private void closeAllHandles() throws Throwable { Throwable lastThrowable = null; - for (DocumentWriteOperation doc : writeSet.getWriteSet()) { + for (DocumentWriteOperation doc : batchWriteSet.getDocumentWriteSet()) { try { if (doc.getContent() instanceof Closeable) { ((Closeable) doc.getContent()).close(); @@ -75,7 +75,7 @@ private void closeAllHandles() throws Throwable { if (lastThrowable != null) throw lastThrowable; } - public BatchWriteSet getWriteSet() { - return writeSet; + public BatchWriteSet getBatchWriteSet() { + return batchWriteSet; } } diff --git a/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/impl/WriteBatcherImpl.java b/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/impl/WriteBatcherImpl.java index 424eaff13..e7ae80d9c 100644 --- a/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/impl/WriteBatcherImpl.java +++ b/marklogic-client-api/src/main/java/com/marklogic/client/datamovement/impl/WriteBatcherImpl.java @@ -204,7 +204,7 @@ public WriteBatcher add(DocumentWriteOperation writeOperation) { BatchWriteSet writeSet = newBatchWriteSet(); int minBatchSize = 0; if(defaultMetadata != null) { - writeSet.getWriteSet().add(new DocumentWriteOperationImpl(OperationType.METADATA_DEFAULT, null, defaultMetadata, null)); + writeSet.getDocumentWriteSet().add(new DocumentWriteOperationImpl(OperationType.METADATA_DEFAULT, null, defaultMetadata, null)); minBatchSize = 1; } for (int i=0; i < getBatchSize(); i++ ) { @@ -213,9 +213,9 @@ public WriteBatcher add(DocumentWriteOperation writeOperation) { // strange, there should have been a full batch of docs in the queue... break; } - writeSet.getWriteSet().add(doc); + writeSet.getDocumentWriteSet().add(doc); } - if ( writeSet.getWriteSet().size() > minBatchSize ) { + if ( writeSet.getDocumentWriteSet().size() > minBatchSize ) { threadPool.submit( new BatchWriter(writeSet) ); } } @@ -326,7 +326,7 @@ private void retry(WriteBatch batch, boolean callFailListeners) { }); } for (WriteEvent doc : batch.getItems()) { - writeSet.getWriteSet().add(doc.getTargetUri(), doc.getMetadata(), doc.getContent()); + writeSet.getDocumentWriteSet().add(doc.getTargetUri(), doc.getMetadata(), doc.getContent()); } BatchWriter runnable = new BatchWriter(writeSet); runnable.run(); @@ -392,12 +392,12 @@ private void flush(boolean waitForCompletion) { } BatchWriteSet writeSet = newBatchWriteSet(); if(defaultMetadata != null) { - writeSet.getWriteSet().add(new DocumentWriteOperationImpl(OperationType.METADATA_DEFAULT, null, defaultMetadata, null)); + writeSet.getDocumentWriteSet().add(new DocumentWriteOperationImpl(OperationType.METADATA_DEFAULT, null, defaultMetadata, null)); } int j=0; for ( ; j < getBatchSize() && iter.hasNext(); j++ ) { DocumentWriteOperation doc = iter.next(); - writeSet.getWriteSet().add(doc); + writeSet.getDocumentWriteSet().add(doc); } threadPool.submit( new BatchWriter(writeSet) ); } @@ -406,7 +406,7 @@ private void flush(boolean waitForCompletion) { } private void sendSuccessToListeners(BatchWriteSet batchWriteSet) { - batchWriteSet.setItemsSoFar(itemsSoFar.addAndGet(batchWriteSet.getWriteSet().size())); + batchWriteSet.setItemsSoFar(itemsSoFar.addAndGet(batchWriteSet.getDocumentWriteSet().size())); WriteBatch batch = batchWriteSet.getBatchOfWriteEvents(); for ( WriteBatchListener successListener : successListeners ) { try { @@ -606,16 +606,16 @@ public synchronized WriteBatcher withForestConfig(ForestConfiguration forestConf for ( Runnable task : tasks ) { if ( task instanceof BatchWriter ) { BatchWriter writerTask = (BatchWriter) task; - if ( removedHostInfos.containsKey(writerTask.getWriteSet().getClient().getHost()) ) { + if ( removedHostInfos.containsKey(writerTask.getBatchWriteSet().getClient().getHost()) ) { // this batch was targeting a host that's no longer on the list // if we re-add these docs they'll now be in batches that target acceptable hosts - BatchWriteSet writeSet = newBatchWriteSet(writerTask.getWriteSet().getBatchNumber()); + BatchWriteSet writeSet = newBatchWriteSet(writerTask.getBatchWriteSet().getBatchNumber()); writeSet.onFailure(throwable -> { if ( throwable instanceof RuntimeException ) throw (RuntimeException) throwable; else throw new DataMovementException("Failed to retry batch after failover", throwable); }); - for ( WriteEvent doc : writerTask.getWriteSet().getBatchOfWriteEvents().getItems() ) { - writeSet.getWriteSet().add(doc.getTargetUri(), doc.getMetadata(), doc.getContent()); + for ( WriteEvent doc : writerTask.getBatchWriteSet().getBatchOfWriteEvents().getItems() ) { + writeSet.getDocumentWriteSet().add(doc.getTargetUri(), doc.getMetadata(), doc.getContent()); } BatchWriter retryWriterTask = new BatchWriter(writeSet); Runnable fretryWriterTask = (Runnable) threadPool.submit(retryWriterTask); From 8c2f2ebd245d05752cd4fe1dde6e67fbe5033488 Mon Sep 17 00:00:00 2001 From: Rob Rudin Date: Thu, 11 Dec 2025 13:29:19 -0500 Subject: [PATCH 13/15] MLE-25959 Deprecating AUTH_TYPE_MARKLOGIC_CLOUD Should have renamed this in a prior release. --- .../marklogic/client/DatabaseClientBuilder.java | 14 +++++++++++--- .../client/impl/DatabaseClientPropertySource.java | 4 ++-- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/marklogic-client-api/src/main/java/com/marklogic/client/DatabaseClientBuilder.java b/marklogic-client-api/src/main/java/com/marklogic/client/DatabaseClientBuilder.java index 976f870e6..a39d00802 100644 --- a/marklogic-client-api/src/main/java/com/marklogic/client/DatabaseClientBuilder.java +++ b/marklogic-client-api/src/main/java/com/marklogic/client/DatabaseClientBuilder.java @@ -29,7 +29,15 @@ public class DatabaseClientBuilder { public final static String PREFIX = "marklogic.client."; public final static String AUTH_TYPE_BASIC = "basic"; public final static String AUTH_TYPE_DIGEST = "digest"; - public final static String AUTH_TYPE_MARKLOGIC_CLOUD = "cloud"; + + public final static String AUTH_TYPE_CLOUD = "cloud"; + + /** + * @deprecated as of 8.1.0, use AUTH_TYPE_CLOUD instead + */ + @Deprecated + public final static String AUTH_TYPE_MARKLOGIC_CLOUD = AUTH_TYPE_CLOUD; + public final static String AUTH_TYPE_KERBEROS = "kerberos"; public final static String AUTH_TYPE_CERTIFICATE = "certificate"; public final static String AUTH_TYPE_SAML = "saml"; @@ -150,7 +158,7 @@ public DatabaseClientBuilder withDigestAuth(String username, String password) { } public DatabaseClientBuilder withCloudAuth(String apiKey, String basePath) { - return withAuthType(AUTH_TYPE_MARKLOGIC_CLOUD) + return withAuthType(AUTH_TYPE_CLOUD) .withCloudApiKey(apiKey) .withBasePath(basePath); } @@ -163,7 +171,7 @@ public DatabaseClientBuilder withCloudAuth(String apiKey, String basePath) { * @since 6.3.0 */ public DatabaseClientBuilder withCloudAuth(String apiKey, String basePath, Integer tokenDuration) { - return withAuthType(AUTH_TYPE_MARKLOGIC_CLOUD) + return withAuthType(AUTH_TYPE_CLOUD) .withCloudApiKey(apiKey) .withBasePath(basePath) .withCloudTokenDuration(tokenDuration != null ? tokenDuration.toString() : null); diff --git a/marklogic-client-api/src/main/java/com/marklogic/client/impl/DatabaseClientPropertySource.java b/marklogic-client-api/src/main/java/com/marklogic/client/impl/DatabaseClientPropertySource.java index 806815e05..ceffa379a 100644 --- a/marklogic-client-api/src/main/java/com/marklogic/client/impl/DatabaseClientPropertySource.java +++ b/marklogic-client-api/src/main/java/com/marklogic/client/impl/DatabaseClientPropertySource.java @@ -185,7 +185,7 @@ private DatabaseClientFactory.SecurityContext newSecurityContext(String type, Co return newBasicAuthContext(connectionString); case DatabaseClientBuilder.AUTH_TYPE_DIGEST: return newDigestAuthContext(connectionString); - case DatabaseClientBuilder.AUTH_TYPE_MARKLOGIC_CLOUD: + case DatabaseClientBuilder.AUTH_TYPE_CLOUD: return newCloudAuthContext(); case DatabaseClientBuilder.AUTH_TYPE_KERBEROS: return newKerberosAuthContext(); @@ -400,7 +400,7 @@ private String getSSLProtocol(String authType) { } // For convenience for Progress Data Cloud users, assume the JVM's default SSLContext should trust the certificate // used by Progress Data Cloud. A user can always override this default behavior by providing their own SSLContext. - if ((sslProtocol == null || sslProtocol.length() == 0) && DatabaseClientBuilder.AUTH_TYPE_MARKLOGIC_CLOUD.equalsIgnoreCase(authType)) { + if ((sslProtocol == null || sslProtocol.length() == 0) && DatabaseClientBuilder.AUTH_TYPE_CLOUD.equalsIgnoreCase(authType)) { sslProtocol = "default"; } return sslProtocol; From 823409d6dacc109cdc6379c360b130c7bf147ff2 Mon Sep 17 00:00:00 2001 From: Rob Rudin Date: Tue, 16 Dec 2025 10:20:04 -0500 Subject: [PATCH 14/15] MLE-25959 Quieted down PDC logging Info-level was too verbose for token generation. Also fixed a little bug in an error message built for an exception. --- ...ressDataCloudAuthenticationConfigurer.java | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/marklogic-client-api/src/main/java/com/marklogic/client/impl/okhttp/ProgressDataCloudAuthenticationConfigurer.java b/marklogic-client-api/src/main/java/com/marklogic/client/impl/okhttp/ProgressDataCloudAuthenticationConfigurer.java index afd35b527..3730cc6a2 100644 --- a/marklogic-client-api/src/main/java/com/marklogic/client/impl/okhttp/ProgressDataCloudAuthenticationConfigurer.java +++ b/marklogic-client-api/src/main/java/com/marklogic/client/impl/okhttp/ProgressDataCloudAuthenticationConfigurer.java @@ -24,7 +24,7 @@ class ProgressDataCloudAuthenticationConfigurer implements AuthenticationConfigu @Override public void configureAuthentication(OkHttpClient.Builder clientBuilder, DatabaseClientFactory.ProgressDataCloudAuthContext securityContext) { final String apiKey = securityContext.getApiKey(); - if (apiKey == null || apiKey.trim().length() < 1) { + if (apiKey == null || apiKey.trim().isEmpty()) { throw new IllegalArgumentException("No API key provided"); } TokenGenerator tokenGenerator = new DefaultTokenGenerator(this.host, securityContext); @@ -56,8 +56,8 @@ public DefaultTokenGenerator(String host, DatabaseClientFactory.ProgressDataClou public String generateToken() { final Response tokenResponse = callTokenEndpoint(); String token = getAccessTokenFromResponse(tokenResponse); - if (logger.isInfoEnabled()) { - logger.info("Successfully obtained authentication token"); + if (logger.isDebugEnabled()) { + logger.debug("Successfully obtained authentication token"); } return token; } @@ -70,8 +70,8 @@ private Response callTokenEndpoint() { OkHttpUtil.configureSocketFactory(clientBuilder, securityContext.getSSLContext(), securityContext.getTrustManager()); OkHttpUtil.configureHostnameVerifier(clientBuilder, securityContext.getSSLHostnameVerifier()); - if (logger.isInfoEnabled()) { - logger.info("Calling token endpoint at: " + tokenUrl); + if (logger.isDebugEnabled()) { + logger.debug("Calling token endpoint at: {}", tokenUrl); } final Call call = clientBuilder.build().newCall( @@ -85,7 +85,7 @@ private Response callTokenEndpoint() { return call.execute(); } catch (IOException e) { throw new ProgressDataCloudException(String.format("Unable to call token endpoint at %s; cause: %s", - tokenUrl, e.getMessage(), e)); + tokenUrl, e.getMessage()), e); } } @@ -97,7 +97,8 @@ protected HttpUrl buildTokenUrl() { .host(host) .port(443) .build() - .resolve(securityContext.getTokenEndpoint()).newBuilder(); + .resolve(securityContext.getTokenEndpoint()) + .newBuilder(); Integer duration = securityContext.getTokenDuration(); return duration != null ? @@ -146,7 +147,7 @@ public TokenAuthenticationInterceptor(TokenGenerator tokenGenerator) { @Override public Response intercept(Chain chain) throws IOException { Request.Builder builder = chain.request().newBuilder(); - addTokenToRequest(builder); + builder = addTokenToRequest(builder); Response response = chain.proceed(builder.build()); if (response.code() == 401) { logger.info("Received 401; will generate new token if necessary and retry request"); @@ -155,7 +156,7 @@ public Response intercept(Chain chain) throws IOException { generateNewTokenIfNecessary(currentToken); builder = chain.request().newBuilder(); - addTokenToRequest(builder); + builder = addTokenToRequest(builder); response = chain.proceed(builder.build()); } return response; From be95f2ded25d50fbbb7c0af0ad65fc486a32b34e Mon Sep 17 00:00:00 2001 From: Rob Rudin Date: Wed, 17 Dec 2025 16:46:02 -0500 Subject: [PATCH 15/15] MLE-26370 Reproducible test for QBV issue --- .../datamovement/RowBatcherFailureTest.java | 27 +++++++++++++++++++ test-app/build.gradle | 1 + .../ml-schemas/qbv/permissions.properties | 1 + test-app/src/main/ml-schemas/qbv/qbv.js | 3 +++ test-app/src/main/ml-schemas/qbv/qbv2.js | 19 +++++++++++++ .../src/main/ml-schemas/tde/musician2.xml | 21 +++++++++++++++ .../ml-schemas/tde/permissions.properties | 2 +- 7 files changed, 73 insertions(+), 1 deletion(-) create mode 100644 test-app/src/main/ml-schemas/qbv/permissions.properties create mode 100644 test-app/src/main/ml-schemas/qbv/qbv.js create mode 100644 test-app/src/main/ml-schemas/qbv/qbv2.js create mode 100644 test-app/src/main/ml-schemas/tde/musician2.xml diff --git a/marklogic-client-api/src/test/java/com/marklogic/client/test/datamovement/RowBatcherFailureTest.java b/marklogic-client-api/src/test/java/com/marklogic/client/test/datamovement/RowBatcherFailureTest.java index 6e88e88f2..64eaddf6e 100644 --- a/marklogic-client-api/src/test/java/com/marklogic/client/test/datamovement/RowBatcherFailureTest.java +++ b/marklogic-client-api/src/test/java/com/marklogic/client/test/datamovement/RowBatcherFailureTest.java @@ -22,6 +22,33 @@ public class RowBatcherFailureTest { + @Test + void qbv() { + DatabaseClient client = Common.newClient(); + DataMovementManager dmm = client.newDataMovementManager(); + + RowManager rowManager = client.newRowManager(); + PlanBuilder op = rowManager.newPlanBuilder(); + PlanBuilder.ModifyPlan plan = op + .fromView("qbv", "musicians"); + + List returnedRows = new ArrayList<>(); + List batchFailures = new ArrayList<>(); + + RowBatcher rowBatcher = dmm.newRowBatcher(new JacksonHandle()) + .withBatchView(plan) + .withBatchSize(Integer.MAX_VALUE) // guarantees a single batch + .onSuccess(batch -> returnedRows.add(batch.getRowsDoc())) + .onFailure(((batch, throwable) -> batchFailures.add(throwable))); + + dmm.startJob(rowBatcher); + rowBatcher.awaitCompletion(); + dmm.stopJob(rowBatcher); + + System.out.println("Returned rows: " + returnedRows); + System.out.println("Batch failures: " + batchFailures); + } + @Test void invalidQuery() { DatabaseClient client = Common.newClient(); diff --git a/test-app/build.gradle b/test-app/build.gradle index 80a907488..357f3766f 100644 --- a/test-app/build.gradle +++ b/test-app/build.gradle @@ -5,6 +5,7 @@ buildscript { repositories { mavenCentral() + mavenLocal() // Needed for ml-gradle 6.2-SNAPSHOT maven { url = "https://bed-artifactory.bedford.progress.com:443/artifactory/ml-maven-snapshots/" diff --git a/test-app/src/main/ml-schemas/qbv/permissions.properties b/test-app/src/main/ml-schemas/qbv/permissions.properties new file mode 100644 index 000000000..fa295e21f --- /dev/null +++ b/test-app/src/main/ml-schemas/qbv/permissions.properties @@ -0,0 +1 @@ +*=rest-reader,read,admin,read,rest-writer,update diff --git a/test-app/src/main/ml-schemas/qbv/qbv.js b/test-app/src/main/ml-schemas/qbv/qbv.js new file mode 100644 index 000000000..d4f4000fa --- /dev/null +++ b/test-app/src/main/ml-schemas/qbv/qbv.js @@ -0,0 +1,3 @@ +const op = require("/MarkLogic/optic"); +op.fromView("opticUnitTest", "musician") + .generateView("qbv", "musicians"); diff --git a/test-app/src/main/ml-schemas/qbv/qbv2.js b/test-app/src/main/ml-schemas/qbv/qbv2.js new file mode 100644 index 000000000..a57328f14 --- /dev/null +++ b/test-app/src/main/ml-schemas/qbv/qbv2.js @@ -0,0 +1,19 @@ +const op = require("/MarkLogic/optic"); + +op.fromView("opticUnitTest", "musician") + + // Remove this select to see the error occur due to two "rowid" columns. + .select(["firstName", "lastName"]) + + .joinInner( + op.fromView("opticUnitTest", "musician2"), + op.on(op.col("firstName"), op.col("firstName2")) + ) + .generateView("qbv", "musicians2") + + +// ML 11 +// SQL-AMBCOLUMN: result = plan.generateView(plandef, schemaName, viewName); -- Ambiguous column reference: found opticUnitTest.musician.rowid and opticUnitTest.musician2.rowid + +// ML 12 - 12.1.20251217 +// SQL-AMBCOLUMN: result = plan.generateView(plandef, schemaName, viewName); -- Ambiguous column reference: found opticUnitTest.musician.rowid and opticUnitTest.musician2.rowid diff --git a/test-app/src/main/ml-schemas/tde/musician2.xml b/test-app/src/main/ml-schemas/tde/musician2.xml new file mode 100644 index 000000000..7472ff75b --- /dev/null +++ b/test-app/src/main/ml-schemas/tde/musician2.xml @@ -0,0 +1,21 @@ + diff --git a/test-app/src/main/ml-schemas/tde/permissions.properties b/test-app/src/main/ml-schemas/tde/permissions.properties index 9aabfbd18..fa295e21f 100644 --- a/test-app/src/main/ml-schemas/tde/permissions.properties +++ b/test-app/src/main/ml-schemas/tde/permissions.properties @@ -1 +1 @@ -*=rest-reader,read,rest-writer,update \ No newline at end of file +*=rest-reader,read,admin,read,rest-writer,update