Skip to content

Commit 2fdd4f5

Browse files
2 parents eb71990 + 5fa30a1 commit 2fdd4f5

File tree

10 files changed

+22
-19
lines changed

10 files changed

+22
-19
lines changed

ansible/roles/pipelines_jenkins/templates/jenkins/jobs/Dwc-verbatim-all/config.xml.j2

+2-2
Original file line numberDiff line numberDiff line change
@@ -86,8 +86,8 @@ stage ('Clear file locks') {
8686
datasets.removeAll(batch3.split(" "))
8787
datasets.removeAll(batch4.split(" "))
8888
89-
//split into 6 batches
90-
def batches = ["",""]
89+
//split into n-node batches
90+
def batches = [""] * jenkins.model.Jenkins.instance.nodes.size()
9191
datasets.eachWithIndex { datasetId, index ->
9292
batches[index.mod(2)] = batches[index.mod(2)] + " " + datasetId
9393
}

ansible/roles/pipelines_jenkins/templates/jenkins/jobs/Image-load-all/config.xml.j2

+2-2
Original file line numberDiff line numberDiff line change
@@ -54,8 +54,8 @@ stage('Run image load on jenkins nodes') {
5454
}
5555
5656
@NonCPS def batchDatasets(datasets, noOfBatches){
57-
//split into 6 batches
58-
def batches = ["","","","","",""]
57+
//split into n-node batches
58+
def batches = [""] * noOfBatches
5959
datasets.eachWithIndex { datasetId, index ->
6060
batches[index.mod(noOfBatches)] = datasetId + " " + batches[index.mod(noOfBatches)]
6161
}

ansible/roles/pipelines_jenkins/templates/jenkins/jobs/Image-sync-all/config.xml.j2

+2-2
Original file line numberDiff line numberDiff line change
@@ -52,8 +52,8 @@ stage('Interpret small datasets on jenkins nodes') {
5252
}
5353
5454
@NonCPS def batchDatasets(datasets, noOfBatches){
55-
//split into 6 batches
56-
def batches = ["","","","","",""]
55+
//split into n-node batches
56+
def batches = [""] * noOfBatches
5757
datasets.eachWithIndex { datasetId, index ->
5858
batches[index.mod(noOfBatches)] = datasetId + " " + batches[index.mod(noOfBatches)]
5959
}

ansible/roles/pipelines_jenkins/templates/jenkins/jobs/Index-all/config.xml.j2

+2-2
Original file line numberDiff line numberDiff line change
@@ -79,8 +79,8 @@ stage('Index small datasets on jenkins nodes') {
7979
8080
8181
@NonCPS def batchDatasets(datasets, noOfBatches){
82-
//split into 6 batches
83-
def batches = ["","","","","",""]
82+
//split into n-node batches
83+
def batches = [""] * noOfBatches
8484
datasets.eachWithIndex { datasetId, index ->
8585
batches[index.mod(noOfBatches)] = datasetId + " " + batches[index.mod(noOfBatches)]
8686
}

ansible/roles/pipelines_jenkins/templates/jenkins/jobs/Interpret-all/config.xml.j2

+2-2
Original file line numberDiff line numberDiff line change
@@ -111,8 +111,8 @@ stage('Interpret small datasets on jenkins nodes') {
111111
}
112112
113113
@NonCPS def batchDatasets(datasets, noOfBatches){
114-
//split into 6 batches
115-
def batches = ["","","","","",""]
114+
//split into n-node batches
115+
def batches = [""] * noOfBatches
116116
datasets.eachWithIndex { datasetId, index ->
117117
batches[index.mod(noOfBatches)] = datasetId + " " + batches[index.mod(noOfBatches)]
118118
}

ansible/roles/pipelines_jenkins/templates/jenkins/jobs/Interpret-small-datasets/config.xml.j2

+2-2
Original file line numberDiff line numberDiff line change
@@ -87,8 +87,8 @@ stage('Interpret small datasets on jenkins nodes') {
8787
}
8888
8989
@NonCPS def batchDatasets(datasets, noOfBatches){
90-
//split into 6 batches
91-
def batches = ["","","","","",""]
90+
//split into n-node batches
91+
def batches = [""] * noOfBatches
9292
datasets.eachWithIndex { datasetId, index ->
9393
batches[index.mod(noOfBatches)] = datasetId + " " + batches[index.mod(noOfBatches)]
9494
}

ansible/roles/pipelines_jenkins/templates/jenkins/jobs/Restart-docker-services/config.xml.j2

+4-1
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,10 @@
66

77
</properties>
88
<definition class="org.jenkinsci.plugins.workflow.cps.CpsFlowDefinition" plugin="workflow-cps@2.90">
9-
<script>nodeNames().each { nodeName -&gt;
9+
<script>// Skip if we are not using docker
10+
def use_docker_with_pipelines = {{ use_docker_with_pipelines | string | lower }}
11+
if (!use_docker_with_pipelines) return
12+
nodeNames().each { nodeName -&gt;
1013
node(nodeName) {
1114
echo 'nodeName ' + nodeName
1215
sh 'sudo sh -c "docker kill $(docker ps -q)" || true'

ansible/roles/pipelines_jenkins/templates/jenkins/jobs/SDS-all/config.xml.j2

+2-2
Original file line numberDiff line numberDiff line change
@@ -83,8 +83,8 @@ stage('SDS small datasets on jenkins nodes') {
8383
8484
8585
@NonCPS def batchDatasets(datasets, noOfBatches){
86-
//split into 6 batches
87-
def batches = ["","","","","",""]
86+
//split into n-node batches
87+
def batches = [&quot;&quot;] * noOfBatches
8888
datasets.eachWithIndex { datasetId, index -&gt;
8989
batches[index.mod(noOfBatches)] = datasetId + " " + batches[index.mod(noOfBatches)]
9090
}

ansible/roles/pipelines_jenkins/templates/jenkins/jobs/SDS-small-datasets/config.xml.j2

+2-2
Original file line numberDiff line numberDiff line change
@@ -79,8 +79,8 @@ stage('SDS small datasets on jenkins nodes') {
7979
8080
8181
@NonCPS def batchDatasets(datasets, noOfBatches){
82-
//split into 6 batches
83-
def batches = ["","","","","",""]
82+
//split into n-node batches
83+
def batches = [&quot;&quot;] * noOfBatches
8484
datasets.eachWithIndex { datasetId, index -&gt;
8585
batches[index.mod(noOfBatches)] = datasetId + " " + batches[index.mod(noOfBatches)]
8686
}

ansible/roles/pipelines_jenkins/templates/jenkins/jobs/UUID-all/config.xml.j2

+2-2
Original file line numberDiff line numberDiff line change
@@ -76,8 +76,8 @@ stage('UUID small datasets on jenkins nodes') {
7676
7777
7878
@NonCPS def batchDatasets(datasets, noOfBatches){
79-
//split into 6 batches
80-
def batches = ["","","","","",""]
79+
//split into n-node batches
80+
def batches = [&quot;&quot;] * noOfBatches
8181
datasets.eachWithIndex { datasetId, index -&gt;
8282
batches[index.mod(noOfBatches)] = datasetId + " " + batches[index.mod(noOfBatches)]
8383
}

0 commit comments

Comments
 (0)