Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/develop' into fb_filesystemlike
Browse files Browse the repository at this point in the history
  • Loading branch information
labkey-matthewb committed Oct 1, 2024
2 parents 1aab1df + a490a6c commit d84a997
Show file tree
Hide file tree
Showing 4 changed files with 65 additions and 38 deletions.
24 changes: 0 additions & 24 deletions SequenceAnalysis/pipeline_code/sequence_tools_install.sh
Original file line number Diff line number Diff line change
Expand Up @@ -311,30 +311,6 @@ else
fi


#
# RNA-SeQC
#
echo ""
echo ""
echo "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%"
echo "Install RNA-SeQC"
echo ""
cd $LKSRC_DIR

if [[ ! -e ${LKTOOLS_DIR}/RNA-SeQC.jar || ! -z $FORCE_REINSTALL ]];
then
echo "Cleaning up previous installs"
rm -Rf RNA-SeQC*
rm -Rf $LKTOOLS_DIR/RNA-SeQC.jar

wget $WGET_OPTS https://data.broadinstitute.org/cancer/cga/tools/rnaseqc/RNA-SeQC_v1.1.8.jar

install ./RNA-SeQC_v1.1.8.jar $LKTOOLS_DIR/RNA-SeQC.jar
else
echo "Already installed"
fi


#
#mosaik
#
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.labkey.api.data.ColumnInfo;
import org.jetbrains.annotations.NotNull;
import org.labkey.api.data.CompareType;
import org.labkey.api.data.Container;
import org.labkey.api.data.ContainerManager;
Expand Down Expand Up @@ -48,7 +48,6 @@
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
Expand Down Expand Up @@ -91,7 +90,7 @@ public String getDescription()
}

@Override
public TaskPipeline getTaskPipeline()
public TaskPipeline<?> getTaskPipeline()
{
return PipelineJobService.get().getTaskPipeline(new TaskId(OrphanFilePipelineJob.class));
}
Expand Down Expand Up @@ -129,7 +128,7 @@ public List<String> getProtocolActionNames()
}

@Override
public PipelineJob.Task createTask(PipelineJob job)
public PipelineJob.Task<?> createTask(PipelineJob job)
{
return new Task(this, job);
}
Expand All @@ -142,7 +141,7 @@ public boolean isJobComplete(PipelineJob job)
}

@Override
public RecordedActionSet run() throws PipelineJobException
public @NotNull RecordedActionSet run() throws PipelineJobException
{
getJob().getLogger().info("## The following sections list any files or pipeline jobs that appear to be orphans, not connected to any imported readsets or sequence outputs:");

Expand All @@ -165,7 +164,23 @@ public RecordedActionSet run() throws PipelineJobException
knownExpDatas = Collections.unmodifiableSet(knownExpDatas);
//messages.add("## total registered sequence ExpData: " + knownExpDatas.size());

getOrphanFilesForContainer(getJob().getContainer(), getJob().getUser(), orphanFiles, orphanIndexes, orphanJobs, messages, probableDeletes, knownJobPaths, knownExpDatas);
// Build map of URL/ExpData for all data, to cover cross-container files
Map<URI, Set<Integer>> knownDataMap = new HashMap<>();
for (Integer d : knownExpDatas)
{
ExpData ed = ExperimentService.get().getExpData(d);
if (ed != null)
{
if (!knownDataMap.containsKey(ed.getDataFileURI()))
{
knownDataMap.put(ed.getDataFileURI(), new HashSet<>());
}

knownDataMap.get(ed.getDataFileURI()).add(d);
}
}

getOrphanFilesForContainer(getJob().getContainer(), getJob().getUser(), orphanFiles, orphanIndexes, orphanJobs, messages, probableDeletes, knownJobPaths, knownExpDatas, knownDataMap);
probableDeletes.addAll(orphanIndexes);

if (!orphanFiles.isEmpty())
Expand Down Expand Up @@ -286,7 +301,7 @@ private Set<File> getKnownSequenceJobPaths(Container c, User u, Collection<Strin
return knownJobPaths;
}

private Map<URI, Set<Integer>> getDataMapForContainer(Container c)
private Map<URI, Set<Integer>> getDataMapForContainer(Container c, Map<URI, Set<Integer>> knownExpDataMap)
{
SimpleFilter dataFilter = new SimpleFilter(FieldKey.fromString("container"), c.getId());
TableInfo dataTable = ExperimentService.get().getTinfoData();
Expand Down Expand Up @@ -320,10 +335,21 @@ public void exec(ResultSet rs) throws SQLException
});
//messages.add("## total ExpData paths: " + dataMap.size());

// append additional datas:
for (URI u : knownExpDataMap.keySet())
{
if (!dataMap.containsKey(u))
{
dataMap.put(u, new HashSet<>());
}

dataMap.get(u).addAll(knownExpDataMap.get(u));
}

return dataMap;
}

public void getOrphanFilesForContainer(Container c, User u, Set<File> orphanFiles, Set<File> orphanIndexes, Set<PipelineStatusFile> orphanJobs, List<String> messages, Set<File> probableDeletes, Set<File> knownSequenceJobPaths, Set<Integer> knownExpDatas)
public void getOrphanFilesForContainer(Container c, User u, Set<File> orphanFiles, Set<File> orphanIndexes, Set<PipelineStatusFile> orphanJobs, List<String> messages, Set<File> probableDeletes, Set<File> knownSequenceJobPaths, Set<Integer> knownExpDatas, Map<URI, Set<Integer>> knownExpDataMap)
{
PipeRoot root = PipelineService.get().getPipelineRootSetting(c);
if (root == null)
Expand All @@ -338,7 +364,7 @@ public void getOrphanFilesForContainer(Container c, User u, Set<File> orphanFile

messages.add("## processing container: " + c.getPath());

Map<URI, Set<Integer>> dataMap = getDataMapForContainer(c);
Map<URI, Set<Integer>> dataMap = getDataMapForContainer(c, knownExpDataMap);

Container parent = c.isWorkbook() ? c.getParent() : c;
TableInfo jobsTableParent = PipelineService.get().getJobsTable(u, parent);
Expand Down Expand Up @@ -438,7 +464,7 @@ public boolean accept(File pathname)
{
if (child.isWorkbook())
{
getOrphanFilesForContainer(child, u, orphanFiles, orphanIndexes, orphanJobs, messages, probableDeletes, knownSequenceJobPaths, knownExpDatas);
getOrphanFilesForContainer(child, u, orphanFiles, orphanIndexes, orphanJobs, messages, probableDeletes, knownSequenceJobPaths, knownExpDatas, knownExpDataMap);
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { observer } from 'mobx-react';
import {
DataGrid,
GridColDef,
GridColumnVisibilityModel,
GridColumnVisibilityModel, GridFilterPanel,
GridPaginationModel,
GridRenderCellParams,
GridSortDirection,
Expand Down Expand Up @@ -420,13 +420,26 @@ const VariantTableWidget = observer(props => {
}
}

// NOTE: the filterPanel/sx override is added to fix an issue where the grid column filter value input doesn't align with the field and operator inputs
const gridElement = (
<DataGrid
columns={[...columns, actionsCol]}
rows={features}
density="comfortable"
slots={{
toolbar: ToolbarWithProps
toolbar: ToolbarWithProps,
filterPanel: GridFilterPanel
}}
slotProps={{
filterPanel: {
filterFormProps: {
valueInputProps: {
sx: {
marginTop: 0
}
}
}
}
}}
columnVisibilityModel={columnVisibilityModel}
pageSizeOptions={[10,25,50,100]}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import ScopedCssBaseline from '@mui/material/ScopedCssBaseline';
import {
DataGrid,
GridColDef,
GridColumnVisibilityModel,
GridColumnVisibilityModel, GridFilterPanel,
GridPaginationModel,
GridRenderCellParams,
GridToolbar
Expand Down Expand Up @@ -352,10 +352,22 @@ const VariantTableWidget = observer(props => {
}

const gridElement = (
// NOTE: the filterPanel/sx override is added to fix an issue where the grid column filter value input doesn't align with the field and operator inputs
<DataGrid
columns={[...gridColumns, actionsCol]}
rows={features.map((rawFeature, id) => rawFeatureToRow(rawFeature, id, gridColumns, trackId))}
slots={{ toolbar: GridToolbar }}
slots={{ toolbar: GridToolbar, filterPanel: GridFilterPanel }}
slotProps={{
filterPanel: {
filterFormProps: {
valueInputProps: {
sx: {
marginTop: 0
}
}
}
}
}}
pageSizeOptions={[10,25,50,100]}
paginationModel={ pageSizeModel }
onPaginationModelChange= {(newModel) => setPageSizeModel(newModel)}
Expand Down

0 comments on commit d84a997

Please sign in to comment.