diff --git a/nextflow/src/org/labkey/nextflow/pipeline/NextFlowPipelineJob.java b/nextflow/src/org/labkey/nextflow/pipeline/NextFlowPipelineJob.java index 1cc4d8c3..6f31dafa 100644 --- a/nextflow/src/org/labkey/nextflow/pipeline/NextFlowPipelineJob.java +++ b/nextflow/src/org/labkey/nextflow/pipeline/NextFlowPipelineJob.java @@ -2,17 +2,23 @@ import lombok.Getter; import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.Logger; import org.jetbrains.annotations.NotNull; +import org.json.JSONObject; import org.labkey.api.data.Container; import org.labkey.api.files.FileContentService; import org.labkey.api.pipeline.ParamParser; import org.labkey.api.pipeline.PipeRoot; import org.labkey.api.pipeline.PipelineJobService; +import org.labkey.api.pipeline.PipelineService; +import org.labkey.api.pipeline.PipelineStatusFile; import org.labkey.api.pipeline.TaskId; import org.labkey.api.pipeline.TaskPipeline; import org.labkey.api.pipeline.file.AbstractFileAnalysisJob; import org.labkey.api.util.FileUtil; import org.labkey.api.util.PageFlowUtil; +import org.labkey.api.util.StringUtilsLabKey; +import org.labkey.api.util.logging.LogHelper; import org.labkey.api.view.ViewBackgroundInfo; import java.io.BufferedWriter; @@ -26,6 +32,8 @@ @Getter public class NextFlowPipelineJob extends AbstractFileAnalysisJob { + protected static final Logger LOG = LogHelper.getLogger(NextFlowPipelineJob.class, "NextFlow jobs"); + private Path config; @SuppressWarnings("unused") // For serialization @@ -51,6 +59,24 @@ public NextFlowPipelineJob(ViewBackgroundInfo info, @NotNull PipeRoot root, Path super(new NextFlowProtocol(), NextFlowPipelineProvider.NAME, info, root, config.getFileName().toString(), config, inputFiles, false, false); this.config = config; setLogFile(log); + LOG.info("NextFlow job queued: {}", getJsonJobInfo()); + } + + protected JSONObject getJsonJobInfo() + { + JSONObject result = new JSONObject(); + result.put("user", getUser().getEmail()); + result.put("container", getContainer().getPath()); + result.put("filePath", getLogFilePath().getParent().toString()); + result.put("runName", getNextFlowRunName()); + result.put("configFile", getConfig().getFileName().toString()); + return result; + } + + protected String getNextFlowRunName() + { + PipelineStatusFile file = PipelineService.get().getStatusFile(getJobGUID()); + return file == null ? "Unknown" : ("LabKeyJob" + file.getRowId()); } @Override @@ -87,7 +113,7 @@ private static Path createConfig(Path configTemplate, Path parentDir, Path jobDi @Override public String getDescription() { - return "NextFlow analysis using " + config.getFileName() + " of " + getInputFilePaths().size() + " files"; + return "NextFlow analysis of " + StringUtilsLabKey.pluralize(getInputFilePaths().size(), "file") + " using config: " + config.getFileName(); } @Override diff --git a/nextflow/src/org/labkey/nextflow/pipeline/NextFlowRunTask.java b/nextflow/src/org/labkey/nextflow/pipeline/NextFlowRunTask.java index 571e2ee9..35670120 100644 --- a/nextflow/src/org/labkey/nextflow/pipeline/NextFlowRunTask.java +++ b/nextflow/src/org/labkey/nextflow/pipeline/NextFlowRunTask.java @@ -42,14 +42,14 @@ public NextFlowRunTask(Factory factory, PipelineJob job) super(factory, job); } - - @Override public @NotNull RecordedActionSet run() throws PipelineJobException { Logger log = getJob().getLogger(); + NextFlowPipelineJob.LOG.info("Starting to execute NextFlow: {}", getJob().getJsonJobInfo()); SecurityManager.TransformSession session = null; + boolean success = false; try { @@ -69,13 +69,14 @@ public NextFlowRunTask(Factory factory, PipelineJob job) // Need to pass to the main process directly in the future to allow concurrent execution for different users ProcessBuilder secretsPB = new ProcessBuilder("nextflow", "secrets", "set", "PANORAMA_API_KEY", apiKey); - log.info("Job Started"); + log.info("Setting secrets"); File dir = getJob().getLogFile().getParentFile(); getJob().runSubProcess(secretsPB, dir); ProcessBuilder executionPB = new ProcessBuilder(getArgs()); getJob().runSubProcess(executionPB, dir); log.info("Job Finished"); + NextFlowPipelineJob.LOG.info("Finished executing NextFlow: {}", getJob().getJsonJobInfo()); RecordedAction action = new RecordedAction(ACTION_NAME); for (Path inputFile : getJob().getInputFilePaths()) @@ -84,6 +85,7 @@ public NextFlowRunTask(Factory factory, PipelineJob job) } addOutputs(action, getJob().getLogFilePath().getParent().resolve("reports"), log); addOutputs(action, getJob().getLogFilePath().getParent().resolve("results"), log); + success = true; return new RecordedActionSet(action); } catch (IOException e) @@ -96,6 +98,10 @@ public NextFlowRunTask(Factory factory, PipelineJob job) { session.close(); } + if (!success) + { + NextFlowPipelineJob.LOG.info("Failed executing NextFlow: {}", getJob().getJsonJobInfo()); + } } } @@ -182,6 +188,8 @@ private boolean hasAwsSection(Path configFile) throws PipelineJobException } args.add("-c"); args.add(configFile.toAbsolutePath().toString()); + args.add("-name"); + args.add(getJob().getNextFlowRunName()); return args; }