diff --git a/SequenceAnalysis/api-src/org/labkey/api/sequenceanalysis/pipeline/BcftoolsRunner.java b/SequenceAnalysis/api-src/org/labkey/api/sequenceanalysis/pipeline/BcftoolsRunner.java index 0767a7471..e3f748329 100644 --- a/SequenceAnalysis/api-src/org/labkey/api/sequenceanalysis/pipeline/BcftoolsRunner.java +++ b/SequenceAnalysis/api-src/org/labkey/api/sequenceanalysis/pipeline/BcftoolsRunner.java @@ -2,9 +2,13 @@ import org.apache.logging.log4j.Logger; import org.jetbrains.annotations.Nullable; +import org.labkey.api.pipeline.PipelineJobException; +import org.labkey.api.pipeline.PipelineJobService; import org.labkey.api.sequenceanalysis.run.AbstractCommandWrapper; import java.io.File; +import java.util.ArrayList; +import java.util.List; /** * User: bimber @@ -22,4 +26,32 @@ public static File getBcfToolsPath() { return SequencePipelineService.get().getExeForPackage("BCFTOOLSPATH", "bcftools"); } + + public static boolean isBcftoolsFound() + { + return BcftoolsRunner.resolveFileInPath("bcftools", null, false) != null; + } + + public void doIndex(File vcf) throws PipelineJobException + { + List args = new ArrayList<>(); + args.add(getBcfToolsPath().getAbsolutePath()); + args.add("index"); + args.add("-t"); + args.add("-f"); + + if (!PipelineJobService.get().isWebServer()) + { + Integer threads = SequencePipelineService.get().getMaxThreads(getLogger()); + if (threads != null) + { + args.add("--threads"); + args.add(String.valueOf(threads)); + } + } + + args.add(vcf.getAbsolutePath()); + + execute(args); + } } diff --git a/SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisServiceImpl.java b/SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisServiceImpl.java index fb577f4ec..9a3305cbd 100644 --- a/SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisServiceImpl.java +++ b/SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisServiceImpl.java @@ -36,6 +36,7 @@ import org.labkey.api.sequenceanalysis.SequenceAnalysisService; import org.labkey.api.sequenceanalysis.SequenceDataProvider; import org.labkey.api.sequenceanalysis.model.Readset; +import org.labkey.api.sequenceanalysis.pipeline.BcftoolsRunner; import org.labkey.api.sequenceanalysis.pipeline.ReferenceGenome; import org.labkey.api.sequenceanalysis.pipeline.SamtoolsCramConverter; import org.labkey.api.sequenceanalysis.pipeline.SequenceOutputHandler; @@ -267,8 +268,16 @@ public File ensureVcfIndex(File vcf, Logger log, boolean forceRecreate) throws I //note: there is a bug in htsjdk's index creation with gz inputs if (gz.isType(vcf) && !SystemUtils.IS_OS_WINDOWS) { - TabixRunner r = new TabixRunner(log); - r.execute(vcf); + // preferentially use bcftools since it supports multithreading: + if (BcftoolsRunner.isBcftoolsFound()) + { + new BcftoolsRunner(log).doIndex(vcf); + } + else + { + new TabixRunner(log).execute(vcf); + } + if (!expectedIdx.exists()) { throw new PipelineJobException("Expected index was not created: " + expectedIdx.getPath()); diff --git a/SequenceAnalysis/src/org/labkey/sequenceanalysis/analysis/LiftoverHandler.java b/SequenceAnalysis/src/org/labkey/sequenceanalysis/analysis/LiftoverHandler.java index b304f6f0a..934a0adea 100644 --- a/SequenceAnalysis/src/org/labkey/sequenceanalysis/analysis/LiftoverHandler.java +++ b/SequenceAnalysis/src/org/labkey/sequenceanalysis/analysis/LiftoverHandler.java @@ -178,7 +178,7 @@ public void processFilesRemote(List inputFiles, JobContext c boolean dropGenotypes = params.optBoolean("dropGenotypes", false); boolean useBcfTools = params.optBoolean("useBcfTools", false); boolean doNotRetainUnmapped = params.optBoolean("doNotRetainUnmapped", false); - if (doNotRetainUnmapped && !useBcfTools) + if (!doNotRetainUnmapped && !useBcfTools) { ctx.getLogger().debug("Picard LiftoverVcf requires an output file for rejected sites, so setting doNotRetainUnmapped to true"); doNotRetainUnmapped = true; diff --git a/SequenceAnalysis/src/org/labkey/sequenceanalysis/run/variant/GatherVcfsCloudWrapper.java b/SequenceAnalysis/src/org/labkey/sequenceanalysis/run/variant/GatherVcfsCloudWrapper.java new file mode 100644 index 000000000..f65de9a1b --- /dev/null +++ b/SequenceAnalysis/src/org/labkey/sequenceanalysis/run/variant/GatherVcfsCloudWrapper.java @@ -0,0 +1,54 @@ +package org.labkey.sequenceanalysis.run.variant; + +import org.apache.logging.log4j.Logger; +import org.labkey.api.pipeline.PipelineJobException; +import org.labkey.api.sequenceanalysis.SequenceAnalysisService; +import org.labkey.api.sequenceanalysis.run.AbstractGatk4Wrapper; +import org.labkey.api.writer.PrintWriters; + +import java.io.File; +import java.io.IOException; +import java.io.PrintWriter; +import java.util.ArrayList; +import java.util.List; + +public class GatherVcfsCloudWrapper extends AbstractGatk4Wrapper +{ + public GatherVcfsCloudWrapper(Logger log) + { + super(log); + } + + public void gatherVcfs(File output, List inputVcfs) throws PipelineJobException + { + List args = new ArrayList<>(getBaseArgs("GatherVcfsCloud")); + args.add("-O"); + args.add(output.getPath()); + + File argFile = new File(output.getParentFile(), "inputs.list"); + try (PrintWriter writer = PrintWriters.getPrintWriter(argFile)) + { + inputVcfs.forEach(f -> writer.println(f.getPath())); + } + catch (IOException e) + { + throw new PipelineJobException(e); + } + + args.add("-I"); + args.add(argFile.getPath()); + + execute(args); + + argFile.delete(); + + try + { + SequenceAnalysisService.get().ensureVcfIndex(output, getLogger()); + } + catch (IOException e) + { + throw new PipelineJobException(e); + } + } +} diff --git a/SequenceAnalysis/src/org/labkey/sequenceanalysis/run/variant/KingInferenceStep.java b/SequenceAnalysis/src/org/labkey/sequenceanalysis/run/variant/KingInferenceStep.java index 5e11307bc..9a21691b5 100644 --- a/SequenceAnalysis/src/org/labkey/sequenceanalysis/run/variant/KingInferenceStep.java +++ b/SequenceAnalysis/src/org/labkey/sequenceanalysis/run/variant/KingInferenceStep.java @@ -1,5 +1,6 @@ package org.labkey.sequenceanalysis.run.variant; +import com.google.common.io.Files; import htsjdk.samtools.SAMSequenceDictionary; import htsjdk.samtools.SAMSequenceRecord; import htsjdk.samtools.util.Interval; @@ -10,10 +11,7 @@ import org.apache.logging.log4j.Logger; import org.jetbrains.annotations.Nullable; import org.json.JSONObject; -import org.labkey.api.collections.CaseInsensitiveHashMap; import org.labkey.api.pipeline.PipelineJobException; -import org.labkey.api.reader.Readers; -import org.labkey.api.sequenceanalysis.SequenceAnalysisService; import org.labkey.api.sequenceanalysis.pipeline.AbstractVariantProcessingStepProvider; import org.labkey.api.sequenceanalysis.pipeline.PedigreeToolParameterDescriptor; import org.labkey.api.sequenceanalysis.pipeline.PipelineContext; @@ -26,18 +24,11 @@ import org.labkey.api.sequenceanalysis.run.AbstractCommandPipelineStep; import org.labkey.api.sequenceanalysis.run.AbstractCommandWrapper; import org.labkey.api.util.Compress; -import org.labkey.api.writer.PrintWriters; -import org.labkey.sequenceanalysis.pipeline.ProcessVariantsHandler; -import java.io.BufferedReader; import java.io.File; import java.io.IOException; -import java.io.PrintWriter; import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; import java.util.List; -import java.util.Map; public class KingInferenceStep extends AbstractCommandPipelineStep implements VariantProcessingStep { @@ -50,7 +41,7 @@ public static class Provider extends AbstractVariantProcessingStepProvider plinkArgs1 = new ArrayList<>(plinkArgs); plinkArgs1.add("--make-bed"); @@ -139,7 +139,24 @@ public Output processVariants(File inputVCF, File outputDirectory, ReferenceGeno plinkArgs1.add("--out"); plinkArgs1.add(plinkOut.getPath()); - plink.execute(plinkArgs1); + File doneFile = new File (plinkOut.getPath() + ".done"); + output.addIntermediateFile(doneFile); + if (doneFile.exists()) + { + getPipelineCtx().getLogger().debug("plink has completed, will not repeat"); + } + else { + plink.execute(plinkArgs1); + + try + { + Files.touch(doneFile); + } + catch (IOException e) + { + throw new PipelineJobException(e); + } + } File plinkOutBed = new File(plinkOut.getPath() + ".bed"); if (!plinkOutBed.exists()) @@ -154,154 +171,51 @@ public Output processVariants(File inputVCF, File outputDirectory, ReferenceGeno plinkArgs2.add("--out"); plinkArgs2.add(plinkOutKing.getPath()); - plink.execute(plinkArgs2); - + doneFile = new File (plinkOutKing.getPath() + ".done"); File plinkOutKingFile = new File(plinkOutKing.getPath() + ".kin0"); - if (!plinkOutKingFile.exists()) + File plinkOutKingFileGz = new File(plinkOutKingFile.getPath() + ".txt.gz"); + if (doneFile.exists()) { - throw new PipelineJobException("Unable to find file: " + plinkOutKingFile.getPath()); + getPipelineCtx().getLogger().debug("plink has completed, will not repeat"); } - - File plinkOutKingFileTxt = new File(plinkOutKingFile.getPath() + ".txt.gz"); - if (plinkOutKingFileTxt.exists()) + else { - plinkOutKingFileTxt.delete(); - } + plink.execute(plinkArgs2); - long lineCount = SequencePipelineService.get().getLineCount(plinkOutKingFile)-1; - try - { - Compress.compressGzip(plinkOutKingFile, plinkOutKingFileTxt); - FileUtils.delete(plinkOutKingFile); - } - catch (IOException e) - { - throw new PipelineJobException(e); - } - - output.addSequenceOutput(plinkOutKingFileTxt, "PLINK2 Relatedness: " + inputVCF.getName(), "PLINK2 Kinship", null, null, genome.getGenomeId(), "Total lines: " + lineCount); - - // Also with KING: - KingWrapper wrapper = new KingWrapper(getPipelineCtx().getLogger()); - wrapper.setWorkingDir(outputDirectory); - - List kingArgs = new ArrayList<>(); - kingArgs.add(wrapper.getExe().getPath()); - - kingArgs.add("-b"); - kingArgs.add(plinkOutBed.getPath()); - - kingArgs.add("--prefix"); - kingArgs.add(SequenceAnalysisService.get().getUnzippedBaseName(inputVCF.getName())); - - // Update the pedigree / fam file: - String demographicsProviderName = getProvider().getParameterByName(PedigreeToolParameterDescriptor.NAME).extractValue(getPipelineCtx().getJob(), getProvider(), getStepIdx()); - if (demographicsProviderName != null) - { - File pedFile = ProcessVariantsHandler.getPedigreeFile(getPipelineCtx().getSourceDirectory(true), demographicsProviderName); - if (!pedFile.exists()) + if (!plinkOutKingFile.exists()) { - throw new PipelineJobException("Unable to find pedigree file: " + pedFile.getPath()); + throw new PipelineJobException("Unable to find file: " + plinkOutKingFile.getPath()); } - File kingFam = createFamFile(pedFile, new File(plinkOutBed.getParentFile(), "plink.fam")); - kingArgs.add("--fam"); - kingArgs.add(kingFam.getPath()); - - output.addIntermediateFile(kingFam); - } - - if (threads != null) - { - kingArgs.add("--cpus"); - kingArgs.add(threads.toString()); - } - - kingArgs.add("--kinship"); - kingArgs.add("--rplot"); - - File kinshipOutput = new File(outputDirectory, SequenceAnalysisService.get().getUnzippedBaseName(inputVCF.getName()) + ".kin"); - wrapper.execute(kingArgs); - if (!kinshipOutput.exists()) - { - throw new PipelineJobException("Unable to find file: " + kinshipOutput.getPath()); - } - - File kinshipOutputTxt = new File(kinshipOutput.getPath() + ".txt.gz"); - if (kinshipOutputTxt.exists()) - { - kinshipOutputTxt.delete(); - } - - lineCount = SequencePipelineService.get().getLineCount(kinshipOutput)-1; - try - { - Compress.compressGzip(kinshipOutput, kinshipOutputTxt); - FileUtils.delete(kinshipOutput); - } - catch (IOException e) - { - throw new PipelineJobException(e); - } - - output.addSequenceOutput(kinshipOutputTxt, "King Relatedness: " + inputVCF.getName(), "KING Relatedness", null, null, genome.getGenomeId(), "Total lines: " + lineCount); - - return output; - } - - private File createFamFile(File pedFile, File famFile) throws PipelineJobException - { - File newFamFile = new File(famFile.getParentFile(), "king.fam"); - - Map pedMap = new CaseInsensitiveHashMap<>(); - try (BufferedReader reader = Readers.getReader(pedFile)) - { - String line; - while ((line = reader.readLine()) != null) + if (plinkOutKingFileGz.exists()) { - String[] tokens = line.split(" "); - if (tokens.length != 6) - { - throw new PipelineJobException("Improper ped line length: " + tokens.length); - } + plinkOutKingFileGz.delete(); + } - pedMap.put(tokens[1], StringUtils.join(Arrays.asList("0", tokens[1], tokens[2], tokens[3], tokens[4], "-9"), "\t")); + try + { + Compress.compressGzip(plinkOutKingFile, plinkOutKingFileGz); + FileUtils.delete(plinkOutKingFile); + } + catch (IOException e) + { + throw new PipelineJobException(e); } - } - catch (IOException e) - { - throw new PipelineJobException(e); - } - try (BufferedReader reader = Readers.getReader(famFile);PrintWriter writer = PrintWriters.getPrintWriter(newFamFile)) - { - String line; - while ((line = reader.readLine()) != null) + try { - String[] tokens = line.split("\t"); - if (tokens.length != 6) - { - throw new PipelineJobException("Improper ped line length: " + tokens.length); - } - - String newRow = pedMap.get(tokens[1]); - if (newRow == null) - { - getPipelineCtx().getLogger().warn("Unable to find pedigree entry for: " + tokens[1] + ", reusing original"); - writer.println(line); - } - else - { - writer.println(newRow); - } + Files.touch(doneFile); + } + catch (IOException e) + { + throw new PipelineJobException(e); } - } - catch (IOException e) - { - throw new PipelineJobException(e); } - return newFamFile; + long lineCount = SequencePipelineService.get().getLineCount(plinkOutKingFileGz)-1; + output.addSequenceOutput(plinkOutKingFileGz, "PLINK2/KING Relatedness: " + inputVCF.getName(), "PLINK2/KING Kinship", null, null, genome.getGenomeId(), "Total lines: " + lineCount); + + return output; } public static class KingWrapper extends AbstractCommandWrapper diff --git a/SequenceAnalysis/src/org/labkey/sequenceanalysis/run/variant/PlinkPcaStep.java b/SequenceAnalysis/src/org/labkey/sequenceanalysis/run/variant/PlinkPcaStep.java index 4c5c204e8..1750c4c4e 100644 --- a/SequenceAnalysis/src/org/labkey/sequenceanalysis/run/variant/PlinkPcaStep.java +++ b/SequenceAnalysis/src/org/labkey/sequenceanalysis/run/variant/PlinkPcaStep.java @@ -218,6 +218,15 @@ private void runBatch(File inputVCF, File outputDirectory, VariantProcessingStep args.add(SequencePipelineService.get().getMaxThreads(getPipelineCtx().getLogger()).toString()); } + Integer maxRam = SequencePipelineService.get().getMaxRam(); + if (maxRam != null) + { + args.add("--memory"); + + maxRam = maxRam * 1000; + args.add(String.valueOf(maxRam)); + } + args.addAll(getClientCommandArgs()); getWrapper().execute(args); diff --git a/SequenceAnalysis/src/org/labkey/sequenceanalysis/util/SequenceUtil.java b/SequenceAnalysis/src/org/labkey/sequenceanalysis/util/SequenceUtil.java index 6289f959e..0a2ce0784 100644 --- a/SequenceAnalysis/src/org/labkey/sequenceanalysis/util/SequenceUtil.java +++ b/SequenceAnalysis/src/org/labkey/sequenceanalysis/util/SequenceUtil.java @@ -33,6 +33,7 @@ import org.json.JSONArray; import org.json.JSONObject; import org.labkey.api.pipeline.PipelineJobException; +import org.labkey.api.pipeline.PipelineJobService; import org.labkey.api.sequenceanalysis.SequenceAnalysisService; import org.labkey.api.sequenceanalysis.model.Readset; import org.labkey.api.sequenceanalysis.pipeline.ReferenceGenome; @@ -44,6 +45,7 @@ import org.labkey.api.util.StringUtilsLabKey; import org.labkey.api.writer.PrintWriters; import org.labkey.sequenceanalysis.run.util.BgzipRunner; +import org.labkey.sequenceanalysis.run.variant.GatherVcfsCloudWrapper; import java.io.BufferedReader; import java.io.File; @@ -428,8 +430,19 @@ public static void sortROD(File input, Logger log, Integer startColumnIdx) throw //then sort/append the records CommandWrapper wrapper = SequencePipelineService.get().getCommandWrapper(log); String cat = isCompressed ? "zcat" : "cat"; + + String sortThreading = ""; + if (!PipelineJobService.get().isWebServer()) + { + Integer threads = SequencePipelineService.get().getMaxThreads(log); + if (threads != null && threads > 1) + { + sortThreading = " --parallel " + threads; + } + } + File tempSorted = new File(input.getParent(), "sorted.tmp"); - wrapper.execute(Arrays.asList("/bin/sh", "-c", "{ cat '" + tempHeader.getPath() + "'; " + cat + " '" + input.getPath() + "' | grep -v '^#' | sort -V -k1,1" + (startColumnIdx == null ? "" : " -k" + startColumnIdx + "," + startColumnIdx + "n") + "; } " + (isCompressed ? " | bgzip -c " : "")), ProcessBuilder.Redirect.to(tempSorted)); + wrapper.execute(Arrays.asList("/bin/sh", "-c", "{ cat '" + tempHeader.getPath() + "'; " + cat + " '" + input.getPath() + "' | grep -v '^#' | sort -V -k1,1" + (startColumnIdx == null ? "" : " -k" + startColumnIdx + "," + startColumnIdx + "n") + sortThreading + "; } " + (isCompressed ? " | bgzip -c " : "")), ProcessBuilder.Redirect.to(tempSorted)); //replace the non-sorted output input.delete(); @@ -454,6 +467,33 @@ public static File combineVcfs(List files, ReferenceGenome genome, File ou } public static File combineVcfs(List files, ReferenceGenome genome, File outputGzip, Logger log, boolean multiThreaded, @Nullable Integer compressionLevel, boolean showTotals, boolean sortAfterMerge) throws PipelineJobException + { + if (sortAfterMerge) + { + return combineVcfsUsingZcat(files, genome, outputGzip, log, multiThreaded, compressionLevel, showTotals, sortAfterMerge); + } + else + { + log.info("Combining VCFs using GatherVcfsCloudWrapper"); + new GatherVcfsCloudWrapper(log).gatherVcfs(outputGzip, files); + + File idx = new File(outputGzip.getPath() + ".tbi"); + if (!idx.exists()) + { + throw new PipelineJobException("Unable to find index: " + idx.getPath()); + } + + if (showTotals) + { + log.info("total variants: " + SequenceAnalysisService.get().getVCFLineCount(outputGzip, log, false)); + log.info("passing variants: " + SequenceAnalysisService.get().getVCFLineCount(outputGzip, log, true)); + } + + return outputGzip; + } + } + + private static File combineVcfsUsingZcat(List files, ReferenceGenome genome, File outputGzip, Logger log, boolean multiThreaded, @Nullable Integer compressionLevel, boolean showTotals, boolean sortAfterMerge) throws PipelineJobException { log.info("combining VCFs: "); diff --git a/singlecell/resources/chunks/AppendNimble.R b/singlecell/resources/chunks/AppendNimble.R index d24ac73bf..d2fd17348 100644 --- a/singlecell/resources/chunks/AppendNimble.R +++ b/singlecell/resources/chunks/AppendNimble.R @@ -18,9 +18,8 @@ for (datasetId in names(seuratObjects)) { for (genomeId in names(nimbleGenomes)) { maxAmbiguityAllowed <- nimbleGenomeAmbiguousPreference[[genomeId]] - queryDatabaseForLineageUpdates <- queryDatabaseForLineageUpdatesPreference[[genomeId]] replaceExistingAssayData <- replaceExistingAssayDataByGenome[[genomeId]] - seuratObj <- Rdiscvr::DownloadAndAppendNimble(seuratObject = seuratObj, allowableGenomes = genomeId, ensureSamplesShareAllGenomes = ensureSamplesShareAllGenomes, targetAssayName = nimbleGenomes[[genomeId]], enforceUniqueFeatureNames = TRUE, maxAmbiguityAllowed = maxAmbiguityAllowed, maxLibrarySizeRatio = maxLibrarySizeRatio, queryDatabaseForLineageUpdates = queryDatabaseForLineageUpdates, replaceExistingAssayData = replaceExistingAssayData) + seuratObj <- Rdiscvr::DownloadAndAppendNimble(seuratObj = seuratObj, allowableGenomes = genomeId, ensureSamplesShareAllGenomes = ensureSamplesShareAllGenomes, targetAssayName = nimbleGenomes[[genomeId]], enforceUniqueFeatureNames = TRUE, maxAmbiguityAllowed = maxAmbiguityAllowed, maxLibrarySizeRatio = maxLibrarySizeRatio, replaceExistingAssayData = replaceExistingAssayData) } saveData(seuratObj, datasetId) diff --git a/singlecell/resources/chunks/PerformDefaultNimbleAppend.R b/singlecell/resources/chunks/PerformDefaultNimbleAppend.R index f4f37dcd1..746d1228c 100644 --- a/singlecell/resources/chunks/PerformDefaultNimbleAppend.R +++ b/singlecell/resources/chunks/PerformDefaultNimbleAppend.R @@ -16,7 +16,7 @@ for (datasetId in names(seuratObjects)) { printName(datasetId) seuratObj <- readSeuratRDS(seuratObjects[[datasetId]]) - seuratObj <- Rdiscvr::PerformDefaultNimbleAppend(seuratObj) + seuratObj <- Rdiscvr::PerformDefaultNimbleAppend(seuratObj, appendMHC = appendMHC, appendKIR = appendKIR, appendNKG = appendNKG, appendIG = appendIG, appendViral = appendViral) saveData(seuratObj, datasetId) diff --git a/singlecell/resources/chunks/PerformMhcDimRedux.R b/singlecell/resources/chunks/PerformMhcDimRedux.R new file mode 100644 index 000000000..5eeb3ba8a --- /dev/null +++ b/singlecell/resources/chunks/PerformMhcDimRedux.R @@ -0,0 +1,12 @@ +for (datasetId in names(seuratObjects)) { + printName(datasetId) + seuratObj <- readSeuratRDS(seuratObjects[[datasetId]]) + + seuratObj <- PerformMhcDimRedux(seuratObj) + + saveData(seuratObj, datasetId) + + # Cleanup + rm(seuratObj) + gc() +} \ No newline at end of file diff --git a/singlecell/resources/web/singlecell/panel/NimbleAppendPanel.js b/singlecell/resources/web/singlecell/panel/NimbleAppendPanel.js index 9f346ff14..5a6a4ead1 100644 --- a/singlecell/resources/web/singlecell/panel/NimbleAppendPanel.js +++ b/singlecell/resources/web/singlecell/panel/NimbleAppendPanel.js @@ -40,7 +40,7 @@ Ext4.define('SingleCell.panel.NimbleAppendPanel', { },LABKEY.ext4.GRIDBUTTONS.DELETERECORD()], store: { type: 'array', - fields: ['genomeId', 'targetAssay','maxAmbiguityAllowed', 'queryDatabaseForLineageUpdates', 'replaceExistingAssayData'] + fields: ['genomeId', 'targetAssay','maxAmbiguityAllowed', 'appendIfExists'] }, columns: [{ dataIndex: 'genomeId', @@ -78,24 +78,14 @@ Ext4.define('SingleCell.panel.NimbleAppendPanel', { minValue: 0 } },{ - dataIndex: 'queryDatabaseForLineageUpdates', + dataIndex: 'appendIfExists', width: 175, - header: 'Check for Lineage Updates', + header: 'Append To Assay If Exists', editor: { xtype: 'checkbox', allowBlank: true, value: false } - },{ - dataIndex: 'replaceExistingAssayData', - width: 150, - header: 'Replace Existing Data?', - editor: { - xtype: 'checkbox', - allowBlank: true, - value: true - } - }] }] }); @@ -106,7 +96,7 @@ Ext4.define('SingleCell.panel.NimbleAppendPanel', { getValue: function(){ var ret = []; this.down('ldk-gridpanel').store.each(function(r, i) { - ret.push([r.data.genomeId, r.data.targetAssay, r.data.maxAmbiguityAllowed ?? '', !!r.data.queryDatabaseForLineageUpdates], !!r.data.replaceExistingAssayData); + ret.push([r.data.genomeId, r.data.targetAssay, r.data.maxAmbiguityAllowed ?? '', !!r.data.appendIfExists]); }, this); return Ext4.isEmpty(ret) ? null : JSON.stringify(ret); @@ -143,8 +133,7 @@ Ext4.define('SingleCell.panel.NimbleAppendPanel', { genomeId: row[0], targetAssay: row[1], maxAmbiguityAllowed: row[2], - queryDatabaseForLineageUpdates: !!row[3], - replaceExistingAssayData: !!row[4] + appendIfExists: !!row[3] }); grid.store.add(rec); }, this); diff --git a/singlecell/src/org/labkey/singlecell/SingleCellModule.java b/singlecell/src/org/labkey/singlecell/SingleCellModule.java index 724b15efa..28d08146f 100644 --- a/singlecell/src/org/labkey/singlecell/SingleCellModule.java +++ b/singlecell/src/org/labkey/singlecell/SingleCellModule.java @@ -72,6 +72,7 @@ import org.labkey.singlecell.pipeline.singlecell.MergeSeurat; import org.labkey.singlecell.pipeline.singlecell.NormalizeAndScale; import org.labkey.singlecell.pipeline.singlecell.PerformDefaultNimbleAppend; +import org.labkey.singlecell.pipeline.singlecell.PerformMhcDimRedux; import org.labkey.singlecell.pipeline.singlecell.PhenotypePlots; import org.labkey.singlecell.pipeline.singlecell.PlotAssayFeatures; import org.labkey.singlecell.pipeline.singlecell.PlotAverageCiteSeqCounts; @@ -295,6 +296,7 @@ public static void registerPipelineSteps() SequencePipelineService.get().registerPipelineStep(new UpdateSeuratPrototype.Provider()); SequencePipelineService.get().registerPipelineStep(new RunDecoupler.Provider()); SequencePipelineService.get().registerPipelineStep(new PerformDefaultNimbleAppend.Provider()); + SequencePipelineService.get().registerPipelineStep(new PerformMhcDimRedux.Provider()); SequenceAnalysisService.get().registerReadsetListener(new SingleCellReadsetListener()); } diff --git a/singlecell/src/org/labkey/singlecell/pipeline/singlecell/AppendNimble.java b/singlecell/src/org/labkey/singlecell/pipeline/singlecell/AppendNimble.java index 106464839..023f59f33 100644 --- a/singlecell/src/org/labkey/singlecell/pipeline/singlecell/AppendNimble.java +++ b/singlecell/src/org/labkey/singlecell/pipeline/singlecell/AppendNimble.java @@ -77,7 +77,7 @@ protected Chunk createParamChunk(SequenceOutputHandler.JobContext ctx, List + { + public Provider() + { + super("PerformMhcDimRedux", "Perform MHC DimRedux", "RDiscvr", "This will perform dimensionality reduction based on MHC data", Arrays.asList( + + ), null, null); + } + + @Override + public PerformMhcDimRedux create(PipelineContext ctx) + { + return new PerformMhcDimRedux(ctx, this); + } + } + + @Override + public String getFileSuffix() + { + return "mhc"; + } +} +