Browse Source

更新 'workflow.wdl'

master
meng 2 years ago
parent
commit
b190db47d3
1 changed files with 75 additions and 87 deletions
  1. +75
    -87
      workflow.wdl

+ 75
- 87
workflow.wdl View File

@@ -1,16 +1,3 @@
import "./tasks/BQSR.wdl" as Sentieon_BQSR
import "./tasks/TNseq.wdl" as TNseq
import "./tasks/TNscope.wdl" as TNscope
import "./tasks/manta.wdl" as manta_calling
import "./tasks/strelka.wdl" as strelka_calling
import "./tasks/bcftools.wdl" as bcftools
import "./tasks/bcftools_concat.wdl" as bcftools_concat
import "./tasks/annovar.wdl" as annovar
import "./tasks/Haplotyper.wdl" as Haplotyper
import "./tasks/MSIsensor.wdl" as MSIsensor
import "./tasks/tmb.wdl" as tmb

#VIP客户(只用TNseq进行call)
workflow {{ project_name }} {
#sample info
File? normal_deduped_bam
@@ -33,6 +20,9 @@ workflow {{ project_name }} {
File? regions
File baseline
File hg38_CDS
File tumor_recal_table
File normal_recal_table




@@ -57,49 +47,7 @@ workflow {{ project_name }} {
Boolean AF_filter


# call MSIsensor.MSIsensor as MSIsensor {
# input:
# sample=sample_id,
# fasta=ref_fasta,
# ref_dir=ref_fasta_dir,
# normal_bam=normal_deduped_bam,
# normal_bam_index=normal_deduped_bam_bai,
# tumor_bam=tumor_deduped_bam,
# tumor_bam_index=tumor_deduped_bam_bai,
# baseline=baseline,
# docker=docker_MSIsensor,
# cluster_config=cluster_config,
# disk_size=disk_size
# }


# call Haplotyper.Haplotyper as Haplotyper {
# input:
# SENTIEON_LICENSE=SENTIEON_LICENSE,
# sample=sample_id + '.N',
# fasta=ref_fasta,
# ref_dir=ref_fasta_dir,
# recaled_bam=normal_deduped_bam,
# recaled_bam_index=normal_deduped_bam_bai,
# dbsnp=dbsnp,
# dbsnp_dir=dbsnp_dir,
# regions=regions,
# docker=docker_sentieon,
# disk_size=disk_size,
# cluster_config=cluster_config
# }

# call bcftools.bcftools as Haplotyper_bcftools {
# input:
# vcf=Haplotyper.vcf,
# fasta=ref_fasta,
# ref_dir=ref_fasta_dir,
# docker=docker_bcftools,
# cluster_config=cluster_config,
# disk_size=disk_size
# }

call TNseq.sentieon_TNseq as sentieon_TNseq{
call TNscope.sentieon_TNscope as sentieon_TNscope{
input:
sample_id = sample_id,
tumor_bam = tumor_deduped_bam,
@@ -108,23 +56,23 @@ call TNseq.sentieon_TNseq as sentieon_TNseq{
normal_bam_bai = normal_deduped_bam_bai,
tumor_name = sample_id+'_T',
normal_name = sample_id+'_N',
tumor_recall_data = tumor_recal_table,
normal_recall_data = normal_recal_table,

ref_dir = ref_fasta_dir,
ref_fasta = ref_fasta,
germline_resource = germline_resource,
germline_resource_tbi = germline_resource_tbi,
dbsnp_dir = dbsnp_dir,
dbsnp = dbsnp,
# excute env
docker = docker_sentieon,
cluster_config = cluster_config,
disk_size = disk_size,
SENTIEON_LICENSE = SENTIEON_LICENSE
}



call bcftools.bcftools as TNseq_bcftools {
call bcftools.bcftools as TNscope_bcftools {
input:
vcf=sentieon_TNseq.vcf,
vcf=sentieon_TNscope.vcf,
fasta=ref_fasta,
ref_dir=ref_fasta_dir,
docker=docker_bcftools,
@@ -132,40 +80,80 @@ call bcftools.bcftools as TNseq_bcftools {
disk_size=disk_size
}

call manta_calling.manta_calling as manta_calling{
input:
tumor_bam = tumor_deduped_bam,
tumor_bam_bai = tumor_deduped_bam_bai,
normal_bam = normal_deduped_bam,
normal_bam_bai = normal_deduped_bam_bai,
ref_fasta = ref_fasta,
ref_dir = ref_fasta_dir,
sample_id = sample_id,
docker = docker_manta,
cluster_config = cluster_config,
disk_size = disk_size

}

call strelka_calling.strelka_calling as strelka_calling{
input:
tumor_bam = tumor_deduped_bam,
tumor_bam_bai = tumor_deduped_bam_bai,
normal_bam = normal_deduped_bam,
normal_bam_bai = normal_deduped_bam_bai,
ref_fasta = ref_fasta,
ref_dir = ref_fasta_dir,
sample_id = sample_id,
manta_indel_vcf = manta_calling.manta_indel_vcf,
manta_indel_vcf_index = manta_calling.manta_indel_vcf_index,
docker=docker_strelka,
cluster_config=cluster_config,
disk_size=disk_size
}

call bcftools_concat.bcftools as bcftools_concat{
input:
ref_dir=ref_fasta_dir,
fasta=ref_fasta,
vcf_indels=strelka_calling.indel_vcf,
vcf_snvs=strelka_calling.snv_vcf,
sample_id=sample_id,
docker=docker_bcftools,
cluster_config=cluster_config,
disk_size=disk_size

}

call bcftools.bcftools as strelka_bcftools {
input:
vcf=bcftools_concat.concat_vcf,
fasta=ref_fasta,
ref_dir=ref_fasta_dir,
docker=docker_bcftools,
cluster_config=cluster_config,
disk_size=disk_size
}

if (Annovar){
call annovar.ANNOVAR as TNseq_ANNOVAR {

call annovar.ANNOVAR as TNscope_ANNOVAR {
input:
vcf=TNseq_bcftools.norm_vcf,
vcf=TNscope_bcftools.norm_vcf,
annovar_database=annovar_database,
docker=docker_annovar,
cluster_config=cluster_config,
disk_size=disk_size
}

# call tmb.TMB as TMB{
# input:
# sample=sample_id,
# hg38_CDS=hg38_CDS,
# regions=regions,
# snpindel_txt=TNseq_ANNOVAR.multianno_txt,
# docker=docker_tmb,
# cluster_config=cluster_config,
# disk_size=disk_size,
# AF_filter=AF_filter
# }

# call annovar.ANNOVAR as Haplotyper_ANNOVAR {
# input:
# vcf=Haplotyper_bcftools.norm_vcf,
# annovar_database=annovar_database,
# docker=docker_annovar,
# cluster_config=cluster_config,
# disk_size=disk_size
# }


call annovar.ANNOVAR as Haplotyper_ANNOVAR {
input:
vcf=Haplotyper_bcftools.norm_vcf,
annovar_database=annovar_database,
docker=docker_annovar,
cluster_config=cluster_config,
disk_size=disk_size
}

}
}

Loading…
Cancel
Save