Skip to content

Commit e0433f4

Browse files
authored
Merge pull request #124 from boozallen/123-release-add-sparkapp-migration
#123 add baton migration to update spark app exec
2 parents 80f2205 + 8669064 commit e0433f4

File tree

11 files changed

+758
-2
lines changed

11 files changed

+758
-2
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,113 @@
1+
package com.boozallen.aissemble.upgrade.migration.v1_7_0;
2+
3+
/*-
4+
* #%L
5+
* aiSSEMBLE::Foundation::Upgrade
6+
* %%
7+
* Copyright (C) 2021 Booz Allen
8+
* %%
9+
* This software package is licensed under the Booz Allen Public License. All Rights Reserved.
10+
* #L%
11+
*/
12+
13+
import com.boozallen.aissemble.upgrade.migration.AbstractAissembleMigration;
14+
import com.boozallen.aissemble.upgrade.util.pom.PomHelper;
15+
import com.boozallen.aissemble.upgrade.util.pom.PomModifications;
16+
import org.apache.maven.model.InputLocation;
17+
import org.apache.maven.model.Model;
18+
import org.apache.maven.model.PluginExecution;
19+
import org.codehaus.plexus.util.xml.Xpp3Dom;
20+
21+
import java.io.File;
22+
import java.util.List;
23+
import java.util.stream.Collectors;
24+
import java.util.stream.Stream;
25+
26+
/**
27+
* Upgrades pipeline POM files to replace the old aissemble-spark-application chart template execution with the updated
28+
* execution that uses the ghcr.io repository. These executions are used to create the SparkApplication files that can
29+
* be executed/submitted by Airflow. However, they are included in all pipelines regardless of whether Airflow is in
30+
* use.
31+
*/
32+
public class SparkAppExecMigration extends AbstractAissembleMigration {
33+
34+
public static final String NEW_CHART = "${aissemble.helm.repo.protocol}://${aissemble.helm.repo}/aissemble-spark-application-chart";
35+
36+
@Override
37+
protected boolean shouldExecuteOnFile(File file) {
38+
Model pom = PomHelper.getLocationAnnotatedModel(file);
39+
return !getSparkAppExecutions(pom).isEmpty();
40+
}
41+
42+
@Override
43+
protected boolean performMigration(File file) {
44+
Model pom = PomHelper.getLocationAnnotatedModel(file);
45+
List<PluginExecution> execs = getSparkAppExecutions(pom);
46+
PomModifications modifications = new PomModifications();
47+
for (PluginExecution exec : execs) {
48+
modifyExecution(exec, modifications);
49+
}
50+
return PomHelper.writeModifications(file, modifications.finalizeMods());
51+
}
52+
53+
/**
54+
* Modifies the exec-maven-plugin execution to use the new aissemble-spark-application chart template format.
55+
* @param exec the plugin <execution> to modify
56+
* @param modifications accumulator to which modifications are added
57+
*/
58+
private void modifyExecution(PluginExecution exec, PomModifications modifications) {
59+
Xpp3Dom[] args = getConfig(exec, "arguments").getChildren("argument");
60+
for (Xpp3Dom arg : args) {
61+
InputLocation location = (InputLocation) arg.getInputLocation();
62+
String value = arg.getValue();
63+
if ("aissemble-spark-application".equals(value)) {
64+
InputLocation end = PomHelper.incrementColumn(location, "aissemble-spark-application".length());
65+
modifications.add(new PomModifications.Replacement(location, end, NEW_CHART));
66+
} else if ("--repo".equals(value) || "${aissemble.helm.repo}".equals(value)) {
67+
InputLocation start = PomHelper.incrementColumn(location, -"<argument>".length());
68+
InputLocation end = PomHelper.incrementColumn(location, value.length() + "</argument>".length());
69+
modifications.add(new PomModifications.Deletion(start, end));
70+
}
71+
}
72+
}
73+
74+
/**
75+
* Gets the plugin <execution> blocks which use the exec-maven-plugin to run helm template on the aissemble-spark-application chart.
76+
* @param pom the POM to search
77+
* @return the executions that need to be modified
78+
*/
79+
private List<PluginExecution> getSparkAppExecutions(Model pom) {
80+
return pom.getBuild().getPlugins().stream().filter(plugin -> plugin.getArtifactId().equals("exec-maven-plugin"))
81+
.flatMap(plugin -> plugin.getExecutions().stream())
82+
.filter(execution -> execution.getGoals().contains("exec"))
83+
.filter(this::isHelmExec)
84+
.filter(this::containsOldSparkAppArg)
85+
.collect(Collectors.toList());
86+
}
87+
88+
private boolean isHelmExec(PluginExecution execution) {
89+
return getConfig(execution, "executable").getValue().equals("helm");
90+
}
91+
92+
/**
93+
* Checks if the exec-maven-plugin execution contains the old aissemble-spark-application argument.
94+
*
95+
* @param execution the plugin <execution> to check
96+
* @return true if the execution contains the old argument
97+
*/
98+
private boolean containsOldSparkAppArg(PluginExecution execution) {
99+
Xpp3Dom args = ((Xpp3Dom) execution.getConfiguration()).getChild("arguments");
100+
return Stream.of(args.getChildren("argument"))
101+
.anyMatch(arg -> "aissemble-spark-application".equals(arg.getValue()));
102+
}
103+
104+
/**
105+
* Gets the <configuration> item for a given plugin <execution> by name.
106+
* @param execution the plugin <execution> to search
107+
* @param name the name of the configuration item to find
108+
* @return the configuration item DOM
109+
*/
110+
private Xpp3Dom getConfig(PluginExecution execution, String name) {
111+
return ((Xpp3Dom) execution.getConfiguration()).getChild(name);
112+
}
113+
}

foundation/foundation-upgrade/src/main/java/com/boozallen/aissemble/upgrade/util/FileUtils.java

+3
Original file line numberDiff line numberDiff line change
@@ -188,6 +188,9 @@ public static boolean hasRegExMatch(String regex, File file) throws IOException
188188
* @return a single indent in the inferred style
189189
*/
190190
public static String getIndent(String line, int level) {
191+
if( level < 1 ) {
192+
return "";
193+
}
191194
int i = 0;
192195
while (i < line.length() && Character.isWhitespace(line.charAt(i))) {
193196
i++;

foundation/foundation-upgrade/src/main/java/com/boozallen/aissemble/upgrade/util/pom/LocationAwareMavenReader.java_112370201260000

Whitespace-only changes.

foundation/foundation-upgrade/src/main/java/com/boozallen/aissemble/upgrade/util/pom/PomHelper.java

+5
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
* #L%
1111
*/
1212

13+
import org.apache.maven.model.InputLocation;
1314
import org.apache.maven.model.InputSource;
1415
import org.apache.maven.model.Model;
1516
import org.apache.maven.project.MavenProject;
@@ -72,4 +73,8 @@ public static boolean writeModifications(File file, PomModifications.Final modif
7273
}
7374
return true;
7475
}
76+
77+
public static InputLocation incrementColumn(InputLocation location, int i) {
78+
return new InputLocation(location.getLineNumber(), location.getColumnNumber() + i, location.getSource());
79+
}
7580
}

foundation/foundation-upgrade/src/main/java/com/boozallen/aissemble/upgrade/util/pom/PomModifications.java

+71-1
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,8 @@ public InputLocation getEnd() {
108108
public int apply(BufferedReader in, Writer out, String line) throws IOException {
109109
int current = getStart().getLineNumber();
110110
// NB: clashes with other modifications on same line
111-
String substring = line.substring(0, getStart().getColumnNumber() - 1);
111+
int startColumn = getStart().getColumnNumber();
112+
String substring = startColumn == 0 ? "" : line.substring(0, startColumn - 1);
112113
if (StringUtils.isNotBlank(substring)) {
113114
out.write(substring);
114115
out.write("\n");
@@ -153,4 +154,73 @@ public int apply(BufferedReader in, Writer out, String line) throws IOException
153154
return getStart().getLineNumber();
154155
}
155156
}
157+
158+
/**
159+
* Replaces the content between the start and end locations with the produced content.
160+
*/
161+
public static class Replacement extends Modification {
162+
private final InputLocation end;
163+
private final Function<String,String> contentProducer;
164+
private final int indentLvl;
165+
166+
/**
167+
* Constructor for replacing content within a single line.
168+
*
169+
* @param start the location to insert the new content
170+
* @param end the location to skip to, existing content between start and end will be deleted
171+
* @param content the new content
172+
*/
173+
public Replacement(InputLocation start, InputLocation end, String content) {
174+
this(start, end, 0, l -> content);
175+
}
176+
177+
/**
178+
* Constructor for multi-line replacements.
179+
*
180+
* @param start the location to insert the new content
181+
* @param end the location to skip to, existing content between start and end will be deleted
182+
* @param indentLvl the indent level of the current content on the line
183+
* @param contentProducer a function that produces the content to insert, given a one-level indent string
184+
*/
185+
public Replacement(InputLocation start, InputLocation end, int indentLvl, Function<String,String> contentProducer) {
186+
super(start);
187+
this.end = end;
188+
this.contentProducer = contentProducer;
189+
this.indentLvl = indentLvl;
190+
}
191+
192+
public InputLocation getEnd() {
193+
return end;
194+
}
195+
196+
public Function<String, String> getContentProducer() {
197+
return contentProducer;
198+
}
199+
200+
public int getIndentLvl() {
201+
return indentLvl;
202+
}
203+
204+
@Override
205+
public int apply(BufferedReader in, Writer out, String line) throws IOException {
206+
int current = getStart().getLineNumber();
207+
// NB: clashes with other modifications on same line
208+
String substring = line.substring(0, getStart().getColumnNumber() - 1);
209+
if (StringUtils.isNotBlank(substring)) {
210+
out.write(substring);
211+
}
212+
String indent = FileUtils.getIndent(line, getIndentLvl());
213+
out.write(getContentProducer().apply(indent));
214+
while (current < getEnd().getLineNumber()) {
215+
line = in.readLine();
216+
current++;
217+
}
218+
if( getEnd().getColumnNumber() <= line.length() ) {
219+
out.write(line.substring(getEnd().getColumnNumber()-1));
220+
out.write("\n");
221+
}
222+
return current;
223+
}
224+
225+
}
156226
}

foundation/foundation-upgrade/src/main/resources/migrations.json

+11
Original file line numberDiff line numberDiff line change
@@ -79,6 +79,17 @@
7979
}
8080
]
8181
},
82+
{
83+
"name": "upgrade-spark-application-exec-migration",
84+
"implementation": "com.boozallen.aissemble.upgrade.migration.v1_7_0.SparkAppExecMigration",
85+
"fileSets": [
86+
{
87+
"includes": [
88+
"pom.xml"
89+
]
90+
}
91+
]
92+
},
8293
{
8394
"name": "enable-habushu-build-cache-migration",
8495
"implementation": "com.boozallen.aissemble.upgrade.migration.v1_7_0.EnableDistOutputFolderForHabushuBuildCacheMigration",

foundation/foundation-upgrade/src/test/java/com/boozallen/aissemble/upgrade/migration/AbstractMigrationTest.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
import java.io.IOException;
1515
import java.nio.file.Paths;
1616

17-
public class AbstractMigrationTest {
17+
public abstract class AbstractMigrationTest {
1818
protected File testFile;
1919

2020
protected boolean shouldExecute;
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
package com.boozallen.aissemble.upgrade.migration.v1_7_0;
2+
3+
/*-
4+
* #%L
5+
* aiSSEMBLE::Foundation::Upgrade
6+
* %%
7+
* Copyright (C) 2021 Booz Allen
8+
* %%
9+
* This software package is licensed under the Booz Allen Public License. All Rights Reserved.
10+
* #L%
11+
*/
12+
13+
import com.boozallen.aissemble.upgrade.migration.AbstractMigrationTest;
14+
import io.cucumber.java.en.Given;
15+
import io.cucumber.java.en.Then;
16+
import io.cucumber.java.en.When;
17+
import org.apache.commons.io.FileUtils;
18+
19+
import java.io.File;
20+
import java.io.IOException;
21+
22+
import static org.junit.Assert.assertTrue;
23+
24+
public class SparkAppExecMigrationSteps extends AbstractMigrationTest {
25+
@Given("a pipeline pom file with one or more helm template commands using the aissemble-spark-application chart")
26+
public void aPipelinePomFileWithOneOrMoreHelmTemplateCommandsUsingTheAissembleSparkApplicationChart() {
27+
testFile = getTestFile("v1_7_0/SparkAppExecMigration/migration/pom.xml");
28+
}
29+
30+
@When("the 1.7.0 spark app exec migration executes")
31+
public void theSparkAppExecMigrationExecutes() {
32+
performMigration(new SparkAppExecMigration());
33+
}
34+
35+
@Then("the pom is updated to use the aissemble-spark-application-chart from the fully qualified URL")
36+
public void thePomIsUpdatedToUseTheAissembleSparkApplicationChartFromTheFullyQualifiedURL() throws IOException {
37+
assertTrue("File migration was incorrectly skipped", shouldExecute);
38+
assertTrue("File was not migrated successfully", successful);
39+
File migratedFile = getTestFile("v1_7_0/SparkAppExecMigration/migration/pom.xml");
40+
File validationFile = getTestFile("v1_7_0/SparkAppExecMigration/validation/pom.xml");
41+
42+
assertTrue("Migrated file does not match expected output",
43+
FileUtils.contentEqualsIgnoreEOL(migratedFile, validationFile, null));
44+
}
45+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
@habushu-build-cache-migration
2+
Feature: Spark App Exec Migration
3+
4+
Scenario: Update a standard project to the new Chart URL
5+
Given a pipeline pom file with one or more helm template commands using the aissemble-spark-application chart
6+
When the 1.7.0 spark app exec migration executes
7+
Then the pom is updated to use the aissemble-spark-application-chart from the fully qualified URL

0 commit comments

Comments
 (0)