@@ -1,24 +1,24 @@ | |||
{ | |||
"{{ project_name }}.SENTIEON_INSTALL_DIR": "/opt/sentieon-genomics", | |||
"{{ project_name }}.fasta": "GRCh38.d1.vd1.fa", | |||
"{{ project_name }}.PIdocker": "registry-vpc.cn-shanghai.aliyuncs.com/pgx-docker-registry/picard:2.20.2", | |||
"{{ project_name }}.platform": "ILLUMINA", | |||
"{{ project_name }}.dbsnp_dir": "oss://pgx-reference-data/GRCh38.d1.vd1/", | |||
"{{ project_name }}.disk_size": "500", | |||
"{{ project_name }}.SAdocker": "registry-vpc.cn-shanghai.aliyuncs.com/pgx-docker-registry/star:2.7.0", | |||
"{{ project_name }}.fastq_1": "{{ read1 }}", | |||
"{{ project_name }}.machine": "{{ machine }}", | |||
"{{ project_name }}.STdocker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/sentieon-genomics:v2018.08.01", | |||
"{{ project_name }}.library": "{{ library }}", | |||
"{{ project_name }}.fastq_2": "{{ read2 }}", | |||
"{{ project_name }}.dbmills_dir": "oss://pgx-reference-data/GRCh38.d1.vd1/", | |||
"{{ project_name }}.cluster_config": "OnDemand bcs.ps.g.8xlarge img-ubuntu-vpc", | |||
"{{ project_name }}.SAref_dir": "oss://chinese-quartet/quartet-storage-data/reference_data/STAR_GRCh38_2.7.0d/", | |||
"{{ project_name }}.SAMdocker": "registry-vpc.cn-shanghai.aliyuncs.com/pgx-docker-registry/samtools:v1.3.1", | |||
"{{ project_name }}.STref_dir": "oss://chinese-quartet/quartet-storage-data/reference_data/", | |||
"{{ project_name }}.db_mills": "Mills_and_1000G_gold_standard.indels.hg38.vcf", | |||
"{{ project_name }}.sample": "{{ sample }}", | |||
"{{ project_name }}.dbsnp": "dbsnp_146.hg38.vcf", | |||
"{{ project_name }}.id": "{{ id }}", | |||
"{{ project_name }}.GATKdocker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/gatk:v2019.01" | |||
} | |||
"{{ projecr_name }} .indexBam.docker": "registry-vpc.cn-shanghai.aliyuncs.com/pgx-docker-registry/samtools:v1.3.1", | |||
"{{ projecr_name }} .Haplotyper.docker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/sentieon-genomics:v2018.08.01", | |||
"{{ projecr_name }} .SENTIEON_INSTALL_DIR": "/opt/sentieon-genomics", | |||
"{{ projecr_name }} .fasta": "GRCh38.d1.vd1.fa", | |||
"{{ projecr_name }} .Dedup.docker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/sentieon-genomics:v2018.08.01", | |||
"{{ projecr_name }} .dbsnp_dir": "oss://pgx-reference-data/GRCh38.d1.vd1/", | |||
"{{ projecr_name }} .deduped_Metrics.docker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/sentieon-genomics:v2018.08.01", | |||
"{{ projecr_name }} .disk_size": "500", | |||
"{{ projecr_name }} .fastq_1": "{{ fastq_1 }}", | |||
"{{ projecr_name }} .Metrics.docker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/sentieon-genomics:v2018.08.01", | |||
"{{ projecr_name }} .STARref_dir": "oss://chinese-quartet/quartet-storage-data/reference_data/STAR_GRCh38_2.7.0d/", | |||
"{{ projecr_name }} .fastq_2": "{{ fastq_2 }}", | |||
"{{ projecr_name }} .dbmills_dir": "oss://pgx-reference-data/GRCh38.d1.vd1/", | |||
"{{ projecr_name }} .SplitReads.docker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/sentieon-genomics:v2018.08.01", | |||
"{{ projecr_name }} .cluster_config": "OnDemand bcs.ps.g.8xlarge img-ubuntu-vpc", | |||
"{{ projecr_name }} .Hardfiltration.docker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/sentieon-genomics:v2018.08.01", | |||
"{{ projecr_name }} .BQSR.docker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/sentieon-genomics:v2018.08.01", | |||
"{{ projecr_name }} .mapping.docker": "registry-vpc.cn-shanghai.aliyuncs.com/pgx-docker-registry/star:2.7.0", | |||
"{{ projecr_name }} .db_mills": "Mills_and_1000G_gold_standard.indels.hg38.vcf", | |||
"{{ projecr_name }} .sample": "{{ sample }}", | |||
"{{ projecr_name }} .dbsnp": "dbsnp_146.hg38.vcf", | |||
"{{ projecr_name }} .ref_dir": "oss://chinese-quartet/quartet-storage-data/reference_data/" | |||
} |
@@ -9,8 +9,8 @@ task BQSR { | |||
String db_mills | |||
File Split_bam | |||
File Split_bam_index | |||
File STref_dir | |||
String STdocker | |||
File ref_dir | |||
String docker | |||
String cluster_config | |||
String disk_size | |||
@@ -21,17 +21,17 @@ task BQSR { | |||
export SENTIEON_LICENSE=192.168.0.55:8990 | |||
nt=$(nproc) | |||
${SENTIEON_INSTALL_DIR}/bin/sentieon driver -r ${STref_dir}/${fasta} -t $nt -i ${Split_bam} --algo QualCal -k ${dbsnp_dir}/${dbsnp} -k ${dbmills_dir}/${db_mills} ${sample}_recal_data.table | |||
${SENTIEON_INSTALL_DIR}/bin/sentieon driver --traverse_param 1000000/10000 -r ${ref_dir}/${fasta} -t $nt -i ${Split_bam} --algo QualCal -k ${dbsnp_dir}/${dbsnp} -k ${dbmills_dir}/${db_mills} ${sample}_recal_data.table | |||
${SENTIEON_INSTALL_DIR}/bin/sentieon driver -r ${STref_dir}/${fasta} -t $nt -i ${Split_bam} -q ${sample}_recal_data.table --algo QualCal -k ${dbsnp_dir}/${dbsnp} -k ${dbmills_dir}/${db_mills} ${sample}_recal_data.table.post --algo ReadWriter ${sample}.sorted.deduped.recaled.bam | |||
${SENTIEON_INSTALL_DIR}/bin/sentieon driver --traverse_param 1000000/10000 -r ${ref_dir}/${fasta} -t $nt -i ${Split_bam} -q ${sample}_recal_data.table --algo QualCal -k ${dbsnp_dir}/${dbsnp} -k ${dbmills_dir}/${db_mills} ${sample}_recal_data.table.post --algo ReadWriter ${sample}.sorted.deduped.recaled.bam | |||
${SENTIEON_INSTALL_DIR}/bin/sentieon driver -t $nt --algo QualCal --plot --before ${sample}_recal_data.table --after ${sample}_recal_data.table.post ${sample}_recal_data.csv | |||
${SENTIEON_INSTALL_DIR}/bin/sentieon driver --traverse_param 1000000/10000 -t $nt --algo QualCal --plot --before ${sample}_recal_data.table --after ${sample}_recal_data.table.post ${sample}_recal_data.csv | |||
${SENTIEON_INSTALL_DIR}/bin/sentieon plot QualCal -o ${sample}_bqsrreport.pdf ${sample}_recal_data.csv | |||
>>> | |||
runtime { | |||
dockerTag:STdocker | |||
dockerTag:docker | |||
cluster: cluster_config | |||
systemDisk: "cloud_ssd 40" | |||
dataDisk: "cloud_ssd " + disk_size + " /cromwell_root/" |
@@ -5,7 +5,7 @@ task Dedup { | |||
File sorted_bam | |||
File sorted_bam_index | |||
String STdocker | |||
String docker | |||
String cluster_config | |||
String disk_size | |||
@@ -19,11 +19,11 @@ task Dedup { | |||
cp ${sorted_bam} . | |||
cp ${sorted_bam_index} . | |||
nt=$(nproc) | |||
${SENTIEON_INSTALL_DIR}/bin/sentieon driver -t $nt -i $sentieon_bam --algo LocusCollector --fun score_info ${sample}_score.txt | |||
${SENTIEON_INSTALL_DIR}/bin/sentieon driver -t $nt -i $sentieon_bam --algo Dedup --rmdup --score_info ${sample}_score.txt --metrics ${sample}_dedup_metrics.txt ${sample}.sorted.deduped.bam | |||
${SENTIEON_INSTALL_DIR}/bin/sentieon driver --traverse_param 1000000/10000 -t $nt -i $sentieon_bam --algo LocusCollector --fun score_info ${sample}_score.txt | |||
${SENTIEON_INSTALL_DIR}/bin/sentieon driver --traverse_param 1000000/10000 -t $nt -i $sentieon_bam --algo Dedup --rmdup --score_info ${sample}_score.txt --metrics ${sample}_dedup_metrics.txt ${sample}.sorted.deduped.bam | |||
>>> | |||
runtime { | |||
docker:STdocker | |||
docker:docker | |||
cluster: cluster_config | |||
systemDisk: "cloud_ssd 40" | |||
dataDisk: "cloud_ssd " + disk_size + " /cromwell_root/" |
@@ -5,9 +5,9 @@ task Haplotyper { | |||
File recaled_bam_index | |||
File dbsnp_dir | |||
String dbsnp | |||
File STref_dir | |||
File ref_dir | |||
String sample | |||
String STdocker | |||
String docker | |||
String cluster_config | |||
String disk_size | |||
@@ -16,11 +16,11 @@ command <<< | |||
set -e | |||
export SENTIEON_LICENSE=192.168.0.55:8990 | |||
nt=$(nproc) | |||
${SENTIEON_INSTALL_DIR}/bin/sentieon driver -r ${STref_dir}/${fasta} -t $nt -i ${recaled_bam} --algo Haplotyper -d ${dbsnp_dir}/${dbsnp} --trim_soft_clip --call_conf 20 --emit_conf 20 ${sample}_hc.vcf | |||
${SENTIEON_INSTALL_DIR}/bin/sentieon driver --traverse_param 1000000/10000 -r ${ref_dir}/${fasta} -t $nt -i ${recaled_bam} --algo Haplotyper -d ${dbsnp_dir}/${dbsnp} --trim_soft_clip --call_conf 20 --emit_conf 20 ${sample}_hc.vcf | |||
>>> | |||
runtime { | |||
dockerTag:STdocker | |||
dockerTag:docker | |||
cluster: cluster_config | |||
systemDisk: "cloud_ssd 40" | |||
dataDisk: "cloud_ssd " + disk_size + " /cromwell_root/" |
@@ -2,10 +2,10 @@ task Hardfiltration { | |||
File vcf_file | |||
File vcf_index | |||
File STref_dir | |||
File ref_dir | |||
String fasta | |||
String sample | |||
String GATKdocker | |||
String docker | |||
String disk_size | |||
String cluster_config | |||
@@ -13,14 +13,14 @@ task Hardfiltration { | |||
command <<< | |||
set -o pipefail | |||
set -e | |||
java -Dsamjdk.use_async_io_read_samtools=false -Dsamjdk.use_async_io_write_samtools=true -Dsamjdk.use_async_io_write_tribble=false -Dsamjdk.compression_level=2 -Xmx32G -jar /gatk/gatk-package-4.1.0.0-local.jar VariantFiltration -V ${vcf_file} -O ${sample}_hc_filtered.vcf -R ${STref_dir}/${fasta} --cluster-window-size 35 --cluster-size 3 --filter-name FS --filter-expression "FS > 30.0" --filter-name QD --filter-expression "QD < 2.0" --filter-name LowDepth --filter-expression "DP < 5" | |||
java -Dsamjdk.use_async_io_read_samtools=false -Dsamjdk.use_async_io_write_samtools=true -Dsamjdk.use_async_io_write_tribble=false -Dsamjdk.compression_level=2 -Xmx32G -jar /gatk/gatk-package-4.1.0.0-local.jar VariantFiltration -V ${vcf_file} -O ${sample}_hc_filtered.vcf -R ${ref_dir}/${fasta} --cluster-window-size 35 --cluster-size 3 --filter-name FS --filter-expression "FS > 30.0" --filter-name QD --filter-expression "QD < 2.0" --filter-name LowDepth --filter-expression "DP < 5" | |||
cat ${sample}_hc_filtered.vcf |grep "#" > ${sample}_hc_filtered.vcf.header.tmp | |||
cat ${sample}_hc_filtered.vcf |grep PASS > ${sample}_hc_filtered.vcf.tmp | |||
cat ${sample}_hc_filtered.vcf.header.tmp ${sample}_hc_filtered.vcf.tmp > ${sample}_hc_PASS.vcf | |||
>>> | |||
runtime { | |||
docker:GATKdocker | |||
docker:docker | |||
cluster:cluster_config | |||
systemDisk: "cloud_ssd 40" | |||
dataDisk: "cloud_ssd " + disk_size + " /cromwell_root/" |
@@ -1,10 +1,9 @@ | |||
task Metrics { | |||
File STref_dir | |||
File ref_dir | |||
String SENTIEON_INSTALL_DIR | |||
String sample | |||
String STdocker | |||
String docker | |||
String cluster_config | |||
String fasta | |||
@@ -23,7 +22,7 @@ task Metrics { | |||
sentieon_bam_index=`basename ${sorted_bam_index}` | |||
cp ${sorted_bam} . | |||
cp ${sorted_bam_index} . | |||
${SENTIEON_INSTALL_DIR}/bin/sentieon driver -r ${STref_dir}/${fasta} -t $nt -i $sentieon_bam \ | |||
${SENTIEON_INSTALL_DIR}/bin/sentieon driver --traverse_param 1000000/10000 -r ${ref_dir}/${fasta} -t $nt -i $sentieon_bam \ | |||
--algo GCBias --summary ${sample}_gc_summary.txt ${sample}_gc_metrics.txt \ | |||
--algo MeanQualityByCycle ${sample}_mq_metrics.txt \ | |||
--algo QualDistribution ${sample}_qd_metrics.txt \ | |||
@@ -34,7 +33,7 @@ task Metrics { | |||
>>> | |||
runtime { | |||
docker:STdocker | |||
docker:docker | |||
cluster: cluster_config | |||
systemDisk: "cloud_ssd 40" | |||
dataDisk: "cloud_ssd " + disk_size + " /cromwell_root/" |
@@ -0,0 +1,30 @@ | |||
task SamToBamSentieon { | |||
File aligned_sam | |||
String SENTIEON_INSTALL_DIR | |||
String sample | |||
String STdocker | |||
String cluster_config | |||
String disk_size | |||
command <<< | |||
set -o pipefail | |||
set -e | |||
export SENTIEON_LICENSE=192.168.0.55:8990 | |||
nt=$(nproc) | |||
${SENTIEON_INSTALL_DIR}/bin/sentieon util sort -t $nt --sam2bam -i ${aligned_sam} -o ${sample}.bam | |||
>>> | |||
runtime { | |||
docker:STdocker | |||
cluster: cluster_config | |||
systemDisk: "cloud_ssd 40" | |||
dataDisk: "cloud_ssd " + disk_size + " /cromwell_root/" | |||
} | |||
output { | |||
File sorted_bam = "${sample}.bam" | |||
} | |||
} |
@@ -3,12 +3,12 @@ task SplitReads { | |||
String SENTIEON_INSTALL_DIR | |||
String sample | |||
File STref_dir | |||
File ref_dir | |||
File fasta | |||
File Dedup_bam | |||
File Dedup_bam_index | |||
String STdocker | |||
String docker | |||
String cluster_config | |||
String disk_size | |||
@@ -18,10 +18,10 @@ task SplitReads { | |||
set -e | |||
export SENTIEON_LICENSE=192.168.0.55:8990 | |||
nt=$(nproc) | |||
${SENTIEON_INSTALL_DIR}/bin/sentieon driver -t $nt -r ${STref_dir}/${fasta} -i ${Dedup_bam} --algo RNASplitReadsAtJunction --reassign_mapq 255:60 ${sample}.split.bam | |||
${SENTIEON_INSTALL_DIR}/bin/sentieon driver --traverse_param 1000000/10000 -t $nt -r ${ref_dir}/${fasta} -i ${Dedup_bam} --algo RNASplitReadsAtJunction --reassign_mapq 255:60 ${sample}.split.bam | |||
>>> | |||
runtime { | |||
docker:STdocker | |||
docker:docker | |||
cluster: cluster_config | |||
systemDisk: "cloud_ssd 40" | |||
dataDisk: "cloud_ssd " + disk_size + " /cromwell_root/" |
@@ -1,13 +1,13 @@ | |||
task deduped_Metrics { | |||
File STref_dir | |||
File ref_dir | |||
String SENTIEON_INSTALL_DIR | |||
String sample | |||
String fasta | |||
File Dedup_bam | |||
File Dedup_bam_index | |||
String STdocker | |||
String docker | |||
String cluster_config | |||
String disk_size | |||
@@ -17,7 +17,7 @@ task deduped_Metrics { | |||
set -e | |||
export SENTIEON_LICENSE=192.168.0.55:8990 | |||
nt=$(nproc) | |||
${SENTIEON_INSTALL_DIR}/bin/sentieon driver -r ${STref_dir}/${fasta} -t $nt -i ${Dedup_bam} \ | |||
${SENTIEON_INSTALL_DIR}/bin/sentieon driver --traverse_param 1000000/10000 -r ${ref_dir}/${fasta} -t $nt -i ${Dedup_bam} \ | |||
--algo CoverageMetrics --omit_base_output ${sample}_deduped_coverage_metrics \ | |||
--algo MeanQualityByCycle ${sample}_deduped_mq_metrics.txt \ | |||
--algo QualDistribution ${sample}_deduped_qd_metrics.txt \ | |||
@@ -27,7 +27,7 @@ task deduped_Metrics { | |||
>>> | |||
runtime { | |||
docker:STdocker | |||
docker:docker | |||
cluster: cluster_config | |||
systemDisk: "cloud_ssd 40" | |||
dataDisk: "cloud_ssd " + disk_size + " /cromwell_root/" |
@@ -1,23 +1,23 @@ | |||
task indexBam { | |||
File sorted_bam | |||
String sample | |||
String SAMdocker | |||
String docker | |||
String cluster_config | |||
String disk_size | |||
command <<< | |||
set -o pipefail | |||
set -e | |||
/opt/conda/bin/samtools index ${sorted_bam} ${sample}.bam.bai | |||
/opt/conda/bin/samtools index ${sorted_bam} ${sample}_Aligned.sortedByCoord.out.bai | |||
>>> | |||
runtime { | |||
docker:SAMdocker | |||
docker:docker | |||
cluster: cluster_config | |||
systemDisk: "cloud_ssd 40" | |||
dataDisk: "cloud_ssd " + disk_size + " /cromwell_root/" | |||
} | |||
output { | |||
File sorted_bam_index = "${sample}.bam.bai" | |||
File sorted_bam_index = "${sample}_Aligned.sortedByCoord.out.bai" | |||
} | |||
} |
@@ -1,13 +1,13 @@ | |||
task mapping { | |||
File SAref_dir | |||
File STref_dir | |||
File STARref_dir | |||
File ref_dir | |||
File fasta | |||
File fastq_1 | |||
File fastq_2 | |||
String sample | |||
String SAdocker | |||
String docker | |||
String cluster_config | |||
String disk_size | |||
@@ -15,21 +15,25 @@ task mapping { | |||
set -o pipefail | |||
set -e | |||
STAR --genomeDir ${SAref_dir} --readFilesIn ${fastq_1} ${fastq_2} --readFilesCommand zcat --runThreadN 20 --outFileNamePrefix OnePass_ | |||
STAR --genomeDir ${STARref_dir} --readFilesIn ${fastq_1} ${fastq_2} --readFilesCommand zcat --runThreadN 20 --outFileNamePrefix OnePass_ | |||
STAR --runMode genomeGenerate --genomeDir "./" --genomeFastaFiles ${STref_dir}/${fasta} --sjdbFileChrStartEnd OnePass_SJ.out.tab --sjdbOverhang 75 --runThreadN 12 | |||
STAR --runMode genomeGenerate --genomeDir "./" --genomeFastaFiles ${ref_dir}/${fasta} --sjdbFileChrStartEnd OnePass_SJ.out.tab --sjdbOverhang 75 --runThreadN 12 | |||
STAR --genomeDir "./" --readFilesIn ${fastq_1} ${fastq_2} --readFilesCommand zcat --runThreadN 20 --outFileNamePrefix ${sample}_ | |||
STAR --genomeDir "./" --readFilesIn ${fastq_1} ${fastq_2} --readFilesCommand zcat --runThreadN 20 --outFileNamePrefix ${sample}_ --outSAMtype BAM SortedByCoordinate --outSAMattrRGline "ID:${sample}" "SM:${sample}" "PL:ILLUMINAL" | |||
>>> | |||
runtime { | |||
docker:SAdocker | |||
docker:docker | |||
cluster: cluster_config | |||
systemDisk: "cloud_ssd 40" | |||
dataDisk: "cloud_ssd " + disk_size + " /cromwell_root/" | |||
} | |||
output { | |||
File aligned_sam = "${sample}_Aligned.out.sam" | |||
File sorted_bam = "${sample}_Aligned.sortedByCoord.out.bam" | |||
File SJ_out_tab = "${sample}_SJ.out.tab" | |||
File log_progress_out = "${sample}_Log.progress.out" | |||
File log_final_out = "${sample}_Log.final.out" | |||
File log_out = "${sample}_Log.out" | |||
} | |||
} |
@@ -1,5 +1,4 @@ | |||
import "./tasks/mapping.wdl" as mapping | |||
import "./tasks/SamToBam.wdl" as SamToBam | |||
import "./tasks/indexBam.wdl" as indexBam | |||
import "./tasks/Metrics.wdl" as Metrics | |||
import "./tasks/Dedup.wdl" as Dedup | |||
@@ -13,52 +12,28 @@ workflow {{ project_name }} { | |||
File fastq_1 | |||
File fastq_2 | |||
File SAref_dir | |||
File STref_dir | |||
File dbsnp_dir | |||
File dbsnp | |||
File dbmills_dir | |||
File db_mills | |||
File STARref_dir | |||
File ref_dir | |||
String SENTIEON_INSTALL_DIR | |||
String sample | |||
String STdocker | |||
String SAMdocker | |||
String SAdocker | |||
String PIdocker | |||
String GATKdocker | |||
String fasta | |||
String disk_size | |||
String cluster_config | |||
String id | |||
String library | |||
String platform | |||
String machine | |||
call mapping.mapping as mapping { | |||
input: | |||
SAref_dir=SAref_dir, | |||
STref_dir=STref_dir, | |||
sample=sample, | |||
STARref_dir=STARref_dir, | |||
ref_dir=ref_dir, | |||
fasta=fasta, | |||
fastq_1=fastq_1, | |||
fastq_2=fastq_2, | |||
SAdocker=SAdocker, | |||
disk_size=disk_size, | |||
cluster_config=cluster_config | |||
} | |||
call SamToBam.SamToBam as SamToBam { | |||
input: | |||
aligned_sam=mapping.aligned_sam, | |||
sample=sample, | |||
id=id, | |||
library=library, | |||
platform=platform, | |||
machine=machine, | |||
PIdocker=PIdocker, | |||
disk_size=disk_size, | |||
cluster_config=cluster_config | |||
} | |||
@@ -66,21 +41,19 @@ workflow {{ project_name }} { | |||
call indexBam.indexBam as indexBam { | |||
input: | |||
sample=sample, | |||
sorted_bam=SamToBam.sorted_bam, | |||
SAMdocker=SAMdocker, | |||
sorted_bam=mapping.sorted_bam, | |||
disk_size=disk_size, | |||
cluster_config=cluster_config | |||
} | |||
call Metrics.Metrics as Metrics { | |||
input: | |||
ref_dir=ref_dir, | |||
SENTIEON_INSTALL_DIR=SENTIEON_INSTALL_DIR, | |||
fasta=fasta, | |||
STref_dir=STref_dir, | |||
sorted_bam=SamToBam.sorted_bam, | |||
sorted_bam=mapping.sorted_bam, | |||
sorted_bam_index=indexBam.sorted_bam_index, | |||
sample=sample, | |||
STdocker=STdocker, | |||
disk_size=disk_size, | |||
cluster_config=cluster_config | |||
} | |||
@@ -88,36 +61,33 @@ workflow {{ project_name }} { | |||
call Dedup.Dedup as Dedup { | |||
input: | |||
SENTIEON_INSTALL_DIR=SENTIEON_INSTALL_DIR, | |||
sorted_bam=SamToBam.sorted_bam, | |||
sorted_bam=mapping.sorted_bam, | |||
sorted_bam_index=indexBam.sorted_bam_index, | |||
sample=sample, | |||
STdocker=STdocker, | |||
disk_size=disk_size, | |||
cluster_config=cluster_config | |||
} | |||
call deduped_Metrics.deduped_Metrics as deduped_Metrics { | |||
input: | |||
ref_dir=ref_dir, | |||
SENTIEON_INSTALL_DIR=SENTIEON_INSTALL_DIR, | |||
fasta=fasta, | |||
STref_dir=STref_dir, | |||
Dedup_bam=Dedup.Dedup_bam, | |||
Dedup_bam_index=Dedup.Dedup_bam_index, | |||
sample=sample, | |||
STdocker=STdocker, | |||
disk_size=disk_size, | |||
cluster_config=cluster_config | |||
} | |||
call SplitReads.SplitReads as SplitReads { | |||
input: | |||
ref_dir=ref_dir, | |||
SENTIEON_INSTALL_DIR=SENTIEON_INSTALL_DIR, | |||
fasta=fasta, | |||
STref_dir=STref_dir, | |||
Dedup_bam=Dedup.Dedup_bam, | |||
Dedup_bam_index=Dedup.Dedup_bam_index, | |||
sample=sample, | |||
STdocker=STdocker, | |||
disk_size=disk_size, | |||
cluster_config=cluster_config | |||
} | |||
@@ -125,16 +95,15 @@ workflow {{ project_name }} { | |||
call BQSR.BQSR as BQSR { | |||
input: | |||
SENTIEON_INSTALL_DIR=SENTIEON_INSTALL_DIR, | |||
ref_dir=ref_dir, | |||
fasta=fasta, | |||
dbsnp_dir=dbsnp_dir, | |||
dbsnp=dbsnp, | |||
dbmills_dir=dbmills_dir, | |||
db_mills=db_mills, | |||
STref_dir=STref_dir, | |||
Split_bam=SplitReads.Split_bam, | |||
Split_bam_index=SplitReads.Split_bam_index, | |||
sample=sample, | |||
STdocker=STdocker, | |||
disk_size=disk_size, | |||
cluster_config=cluster_config | |||
} | |||
@@ -143,28 +112,26 @@ workflow {{ project_name }} { | |||
input: | |||
SENTIEON_INSTALL_DIR=SENTIEON_INSTALL_DIR, | |||
fasta=fasta, | |||
STref_dir=STref_dir, | |||
recaled_bam=BQSR.recaled_bam, | |||
recaled_bam_index=BQSR.recaled_bam_index, | |||
dbsnp_dir=dbsnp_dir, | |||
dbsnp=dbsnp, | |||
ref_dir=ref_dir, | |||
sample=sample, | |||
STdocker=STdocker, | |||
disk_size=disk_size, | |||
cluster_config=cluster_config | |||
} | |||
call Hardfiltration.Hardfiltration as Hardfiltration { | |||
input: | |||
ref_dir=ref_dir, | |||
fasta=fasta, | |||
STref_dir=STref_dir, | |||
vcf_file=Haplotyper.vcf, | |||
vcf_index=Haplotyper.vcf_idx, | |||
sample=sample, | |||
GATKdocker=GATKdocker, | |||
disk_size=disk_size, | |||
cluster_config=cluster_config | |||
} | |||
} | |||
} | |||