Browse Source

Improve format.

tags/v0.2.2
YJC 4 years ago
parent
commit
c37b3ec35b
11 changed files with 239 additions and 237 deletions
  1. +1
    -0
      README.md
  2. +30
    -27
      defaults
  3. +34
    -34
      inputs
  4. +24
    -24
      tasks/ballgown.wdl
  5. +23
    -23
      tasks/count.wdl
  6. +33
    -33
      tasks/fastp.wdl
  7. +23
    -23
      tasks/hisat2.wdl
  8. +4
    -5
      tasks/qualimap.wdl
  9. +33
    -33
      tasks/samtools.wdl
  10. +24
    -24
      tasks/stringtie.wdl
  11. +10
    -11
      workflow.wdl

+ 1
- 0
README.md View File

@@ -28,6 +28,7 @@ The samplec.csv looks like this:
| --------- | --------- | ---------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------- | ----------- | ------------ |
| FDU_D5 | 200 | oss://choppy-app-example-data/RNAseq/Downsampling_Quartet_RNAseqQC_12samples/Quartet_RNA_ILM_NovaSeq_RiboZero_FDU_D5_1_20190710_CleanData_R1_400k.gz | oss://choppy-app-example-data/RNAseq/Downsampling_Quartet_RNAseqQC_12samples/Quartet_RNA_ILM_NovaSeq_RiboZero_FDU_D5_1_20190710_CleanData_R2_400k.gz | false | true | true |


- sample_id: 一般设置为文件输出前缀
- read1: R1 fastq 文件所在路径
- read2: R2 fastq 文件所在路径

+ 30
- 27
defaults View File

@@ -1,29 +1,32 @@
{
"idx": "oss://pgx-reference-data/reference/hisat2/grch38_snp_tran/",
"gtf": "oss://pgx-reference-data/reference/annotation/Homo_sapiens.GRCh38.93.gtf",
"idx_prefix": "genome_snp_tran",
"screen_ref_dir": "oss://pgx-reference-data/fastq_screen_reference/",
"fastq_screen_conf": "oss://pgx-reference-data/fastq_screen_reference/fastq_screen.conf",
"fastqscreen_docker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/fastqscreen:0.12.0",
"fastqscreen_cluster": "OnDemand bcs.b2.3xlarge img-ubuntu-vpc",
"fastqc_docker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/fastqc:v0.11.5",
"fastqc_cluster": "OnDemand bcs.b2.3xlarge img-ubuntu-vpc",
"fastp_docker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/fastp:0.19.6",
"fastp_cluster": "OnDemand bcs.a2.xlarge img-ubuntu-vpc",
"adapter_sequence": "AGATCGGAAGAGCACACGTCTGAACTCCAGTCA",
"adapter_sequence_r2": "AGATCGGAAGAGCGTCGTGTAGGGAAAGAGTGT",
"hisat2_docker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/hisat2:v2.1.0-2",
"hisat2_cluster": "OnDemand bcs.a2.3xlarge img-ubuntu-vpc",
"samtools_docker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/samtools:v1.3.1",
"samtools_cluster": "OnDemand bcs.a2.large img-ubuntu-vpc",
"qualimap_docker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/qualimap:2.0.0",
"qualimap_cluster": "OnDemand bcs.a2.3xlarge img-ubuntu-vpc",
"stringtie_docker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/stringtie:v1.3.4",
"stringtie_cluster": "OnDemand bcs.a2.large img-ubuntu-vpc",
"ballgown_docker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/pgx-ballgown:0.0.1",
"ballgown_cluster": "OnDemand bcs.ps.g.large img-ubuntu-vpc",
"count_docker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/count:v1.0",
"count_cluster": "OnDemand bcs.ps.g.large img-ubuntu-vpc",
"insert_size":"8000",
"count_length": "150"
"idx": "oss://pgx-reference-data/reference/hisat2/grch38_snp_tran/",
"gtf": "oss://pgx-reference-data/reference/annotation/Homo_sapiens.GRCh38.93.gtf",
"idx_prefix": "genome_snp_tran",
"screen_ref_dir": "oss://pgx-reference-data/fastq_screen_reference/",
"fastq_screen_conf": "oss://pgx-reference-data/fastq_screen_reference/fastq_screen.conf",
"fastqscreen_docker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/fastqscreen:0.12.0",
"fastqscreen_cluster": "OnDemand bcs.b2.3xlarge img-ubuntu-vpc",
"fastqc_docker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/fastqc:v0.11.5",
"fastqc_cluster": "OnDemand bcs.b2.3xlarge img-ubuntu-vpc",
"fastp_docker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/fastp:0.19.6",
"fastp_cluster": "OnDemand bcs.a2.xlarge img-ubuntu-vpc",
"adapter_sequence": "AGATCGGAAGAGCACACGTCTGAACTCCAGTCA",
"adapter_sequence_r2": "AGATCGGAAGAGCGTCGTGTAGGGAAAGAGTGT",
"hisat2_docker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/hisat2:v2.1.0-2",
"hisat2_cluster": "OnDemand bcs.a2.3xlarge img-ubuntu-vpc",
"samtools_docker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/samtools:v1.3.1",
"samtools_cluster": "OnDemand bcs.a2.large img-ubuntu-vpc",
"qualimap_docker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/qualimap:2.0.0",
"qualimap_cluster": "OnDemand bcs.a2.3xlarge img-ubuntu-vpc",
"stringtie_docker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/stringtie:v1.3.4",
"stringtie_cluster": "OnDemand bcs.a2.large img-ubuntu-vpc",
"ballgown_docker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/pgx-ballgown:0.0.1",
"ballgown_cluster": "OnDemand bcs.ps.g.large img-ubuntu-vpc",
"count_docker": "registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/count:v1.0",
"count_cluster": "OnDemand bcs.ps.g.large img-ubuntu-vpc",
"insert_size":"8000",
"count_length": "150",
"trim_adapter": true,
"pre_alignment_qc": true,
"qualimap_qc": true
}

+ 34
- 34
inputs View File

@@ -1,36 +1,36 @@
{
"{{ project_name }}.read1": "{{ read1 }}",
"{{ project_name }}.read2": "{{ read2 }}",
"{{ project_name }}.sample_id": "{{ sample_id }}",
"{{ project_name }}.screen_ref_dir": "{{ screen_ref_dir }}",
"{{ project_name }}.fastq_screen_conf": "{{ fastq_screen_conf }}",
"{{ project_name }}.idx": "{{ idx }}",
"{{ project_name }}.gtf": "{{ gtf }}",
"{{ project_name }}.disk_size": "{{ disk_size if disk_size != '' else 200}}",
"{{ project_name }}.idx_prefix": "{{ idx_prefix }}",
"{{ project_name }}.fastqscreen_docker": "{{ fastqscreen_docker }}",
"{{ project_name }}.fastqscreen_cluster": "{{ fastqscreen_cluster }}",
"{{ project_name }}.fastqc_cluster": "{{ fastqc_cluster }}",
"{{ project_name }}.fastqc_docker": "{{ fastqc_docker }}",
"{{ project_name }}.fastp_docker": "{{ fastp_docker }}",
"{{ project_name }}.fastp_cluster": "{{ fastp_cluster }}",
"{{ project_name }}.adapter_sequence": "{{ adapter_sequence }}",
"{{ project_name }}.adapter_sequence_r2": "{{ adapter_sequence_r2 }}",
"{{ project_name }}.hisat2_docker": "{{ hisat2_docker }}",
"{{ project_name }}.hisat2_cluster": "{{ hisat2_cluster }}",
"{{ project_name }}.insert_size": "{{ insert_size }}",
"{{ project_name }}.samtools_docker": "{{ samtools_docker }}",
"{{ project_name }}.samtools_cluster": "{{ samtools_cluster }}",
"{{ project_name }}.qualimap_docker": "{{ qualimap_docker }}",
"{{ project_name }}.qualimap_cluster": "{{ qualimap_cluster }}",
"{{ project_name }}.stringtie_docker": "{{ stringtie_docker }}",
"{{ project_name }}.stringtie_cluster": "{{ stringtie_cluster }}",
"{{ project_name }}.ballgown_docker": "{{ ballgown_docker }}",
"{{ project_name }}.ballgown_cluster": "{{ ballgown_cluster }}",
"{{ project_name }}.count_docker": "{{ count_docker }}",
"{{ project_name }}.count_cluster": "{{ count_cluster }}",
"{{ project_name }}.count_length": "{{ count_length }}",
"{{ project_name }}.trim_adapter": true,
"{{ project_name }}.pre_alignment_qc": true,
"{{ project_name }}.qualimap_qc": true
"{{ project_name }}.read1": "{{ read1 }}",
"{{ project_name }}.read2": "{{ read2 }}",
"{{ project_name }}.sample_id": "{{ sample_id }}",
"{{ project_name }}.screen_ref_dir": "{{ screen_ref_dir }}",
"{{ project_name }}.fastq_screen_conf": "{{ fastq_screen_conf }}",
"{{ project_name }}.idx": "{{ idx }}",
"{{ project_name }}.gtf": "{{ gtf }}",
"{{ project_name }}.disk_size": "{{ disk_size if disk_size != '' else 200}}",
"{{ project_name }}.idx_prefix": "{{ idx_prefix }}",
"{{ project_name }}.fastqscreen_docker": "{{ fastqscreen_docker }}",
"{{ project_name }}.fastqscreen_cluster": "{{ fastqscreen_cluster }}",
"{{ project_name }}.fastqc_cluster": "{{ fastqc_cluster }}",
"{{ project_name }}.fastqc_docker": "{{ fastqc_docker }}",
"{{ project_name }}.fastp_docker": "{{ fastp_docker }}",
"{{ project_name }}.fastp_cluster": "{{ fastp_cluster }}",
"{{ project_name }}.adapter_sequence": "{{ adapter_sequence }}",
"{{ project_name }}.adapter_sequence_r2": "{{ adapter_sequence_r2 }}",
"{{ project_name }}.hisat2_docker": "{{ hisat2_docker }}",
"{{ project_name }}.hisat2_cluster": "{{ hisat2_cluster }}",
"{{ project_name }}.insert_size": "{{ insert_size }}",
"{{ project_name }}.samtools_docker": "{{ samtools_docker }}",
"{{ project_name }}.samtools_cluster": "{{ samtools_cluster }}",
"{{ project_name }}.qualimap_docker": "{{ qualimap_docker }}",
"{{ project_name }}.qualimap_cluster": "{{ qualimap_cluster }}",
"{{ project_name }}.stringtie_docker": "{{ stringtie_docker }}",
"{{ project_name }}.stringtie_cluster": "{{ stringtie_cluster }}",
"{{ project_name }}.ballgown_docker": "{{ ballgown_docker }}",
"{{ project_name }}.ballgown_cluster": "{{ ballgown_cluster }}",
"{{ project_name }}.count_docker": "{{ count_docker }}",
"{{ project_name }}.count_cluster": "{{ count_cluster }}",
"{{ project_name }}.count_length": "{{ count_length }}",
"{{ project_name }}.trim_adapter": {{ trim_adapter }},
"{{ project_name }}.pre_alignment_qc": {{ pre_alignment_qc }},
"{{ project_name }}.qualimap_qc": {{ qualimap_qc }}
}

+ 24
- 24
tasks/ballgown.wdl View File

@@ -1,27 +1,27 @@
task ballgown {
File gene_abundance
String base=basename(gene_abundance, ".gene.abundance.txt")
String docker
String cluster
String disk_size
Array[File] ballgown
File gene_abundance
String base=basename(gene_abundance, ".gene.abundance.txt")
String docker
String cluster
String disk_size
Array[File] ballgown

command <<<
mkdir -p /cromwell_root/tmp/${base}
cp -r ${sep=" " ballgown} /cromwell_root/tmp/${base}
ballgown /cromwell_root/tmp/${base} ${base}.txt
sed -i 's/"//g' ${base}.txt
sed -i '1s/FPKM./GENE_ID\t/g' ${base}.txt
>>>
runtime {
docker: docker
cluster: cluster
systemDisk: "cloud_ssd 40"
dataDisk: "cloud_ssd " + disk_size + " /cromwell_root/"
}
output {
File mat_expression="${base}.txt"
}
command <<<
mkdir -p /cromwell_root/tmp/${base}
cp -r ${sep=" " ballgown} /cromwell_root/tmp/${base}
ballgown /cromwell_root/tmp/${base} ${base}.txt
sed -i 's/"//g' ${base}.txt
sed -i '1s/FPKM./GENE_ID\t/g' ${base}.txt
>>>
runtime {
docker: docker
cluster: cluster
systemDisk: "cloud_ssd 40"
dataDisk: "cloud_ssd " + disk_size + " /cromwell_root/"
}
output {
File mat_expression="${base}.txt"
}
}

+ 23
- 23
tasks/count.wdl View File

@@ -1,26 +1,26 @@
task count {
Array[File] ballgown
String sample_id
String docker
String cluster
String disk_size
Int count_length
Array[File] ballgown
String sample_id
String docker
String cluster
String disk_size
Int count_length

command <<<
mkdir -p /cromwell_root/tmp/ballgown/${sample_id}
cp -r ${sep=" " ballgown} /cromwell_root/tmp/ballgown/${sample_id}
count -i /cromwell_root/tmp/ballgown -l ${count_length} -g ${sample_id}_gene_count_matrix.csv -t ${sample_id}_transcript_count_matrix.csv
>>>
runtime {
docker: docker
cluster: cluster
systemDisk: "cloud_ssd 40"
dataDisk: "cloud_ssd " + disk_size + " /cromwell_root/"
}
output {
File mat_expression_genecount="${sample_id}_gene_count_matrix.csv"
File mat_expression_transcriptcount="${sample_id}_transcript_count_matrix.csv"
}
command <<<
mkdir -p /cromwell_root/tmp/ballgown/${sample_id}
cp -r ${sep=" " ballgown} /cromwell_root/tmp/ballgown/${sample_id}
count -i /cromwell_root/tmp/ballgown -l ${count_length} -g ${sample_id}_gene_count_matrix.csv -t ${sample_id}_transcript_count_matrix.csv
>>>
runtime {
docker: docker
cluster: cluster
systemDisk: "cloud_ssd 40"
dataDisk: "cloud_ssd " + disk_size + " /cromwell_root/"
}
output {
File mat_expression_genecount="${sample_id}_gene_count_matrix.csv"
File mat_expression_transcriptcount="${sample_id}_transcript_count_matrix.csv"
}
}

+ 33
- 33
tasks/fastp.wdl View File

@@ -1,37 +1,37 @@
task fastp {
File read1
File read2
String sample_id
String adapter_sequence
String adapter_sequence_r2
String docker
String cluster
String disk_size
String trim_adapter
File read1
File read2
String sample_id
String adapter_sequence
String adapter_sequence_r2
String docker
String cluster
String disk_size
String trim_adapter

command <<<
nt=$(nproc)
## Trim
if [ "${trim_adapter}" != 'true' ]; then
cp ${read1} ${sample_id}_R1.fq.gz
cp ${read2} ${sample_id}_R2.fq.gz
else
fastp --thread $nt --adapter_sequence ${adapter_sequence} --adapter_sequence_r2 ${adapter_sequence_r2} --detect_adapter_for_pe -i ${read1} -I ${read2} -o ${sample_id}_R1.fq.gz -O ${sample_id}_R2.fq.gz -j ${sample_id}.json -h ${sample_id}.html
fi
>>>
runtime {
docker: docker
cluster: cluster
systemDisk: "cloud_ssd 40"
dataDisk: "cloud_ssd " + disk_size + " /cromwell_root/"
}
command <<<
nt=$(nproc)
## Trim
if [ "${trim_adapter}" != 'true' ]; then
cp ${read1} ${sample_id}_R1.fq.gz
cp ${read2} ${sample_id}_R2.fq.gz
else
fastp --thread $nt --adapter_sequence ${adapter_sequence} --adapter_sequence_r2 ${adapter_sequence_r2} --detect_adapter_for_pe -i ${read1} -I ${read2} -o ${sample_id}_R1.fq.gz -O ${sample_id}_R2.fq.gz -j ${sample_id}.json -h ${sample_id}.html
fi
>>>
runtime {
docker: docker
cluster: cluster
systemDisk: "cloud_ssd 40"
dataDisk: "cloud_ssd " + disk_size + " /cromwell_root/"
}

output {
File json = "${sample_id}.json"
File report = "${sample_id}.html"
File trim_R1 = "${sample_id}_R1.fq.gz"
File trim_R2 = "${sample_id}_R2.fq.gz"
}
output {
File json = "${sample_id}.json"
File report = "${sample_id}.html"
File trim_R1 = "${sample_id}_R1.fq.gz"
File trim_R2 = "${sample_id}_R2.fq.gz"
}
}

+ 23
- 23
tasks/hisat2.wdl View File

@@ -1,29 +1,29 @@
task hisat2 {
File idx
File read_1P
File read_2P
String idx_prefix
String sample_id
String docker
String cluster
String disk_size
File idx
File read_1P
File read_2P
String idx_prefix
String sample_id
String docker
String cluster
String disk_size

command {
nt=$(nproc)
hisat2 -t -p $nt -x ${idx}/${idx_prefix} -1 ${read_1P} -2 ${read_2P} -S ${sample_id}.sam --un-conc-gz ${sample_id}_un.fq.gz
}
command {
nt=$(nproc)
hisat2 -t -p $nt -x ${idx}/${idx_prefix} -1 ${read_1P} -2 ${read_2P} -S ${sample_id}.sam --un-conc-gz ${sample_id}_un.fq.gz
}
runtime {
docker: docker
cluster: cluster
systemDisk: "cloud_ssd 40"
dataDisk: "cloud_ssd " + disk_size + " /cromwell_root/"
}
runtime {
docker: docker
cluster: cluster
systemDisk: "cloud_ssd 40"
dataDisk: "cloud_ssd " + disk_size + " /cromwell_root/"
}

output {
File sam=sample_id + ".sam"
File unmapread_1p=sample_id + "_un.fq.1.gz"
File unmapread_2p=sample_id + "_un.fq.2.gz"
}
output {
File sam=sample_id + ".sam"
File unmapread_1p=sample_id + "_un.fq.1.gz"
File unmapread_2p=sample_id + "_un.fq.2.gz"
}
}

+ 4
- 5
tasks/qualimap.wdl View File

@@ -12,14 +12,13 @@ task qualimap {
nt=$(nproc)
/opt/qualimap/qualimap bamqc -bam ${bam} -outformat HTML -nt $nt -outdir ${bamname}_bamqc --java-mem-size=32G
tar -zcf ${bamname}_bamqc_qualimap.tar.gz ${bamname}_bamqc
>>>

runtime {
docker:docker
cluster:cluster
systemDisk:"cloud_ssd 40"
dataDisk:"cloud_ssd " + disk_size + " /cromwell_root/"
docker: docker
cluster: cluster
systemDisk: "cloud_ssd 40"
dataDisk: "cloud_ssd " + disk_size + " /cromwell_root/"
}

output {

+ 33
- 33
tasks/samtools.wdl View File

@@ -1,40 +1,40 @@
task samtools {
File sam
String base=basename(sam, ".sam")
String bam=base + ".bam"
String sorted_bam=base + ".sorted.bam"
String sorted_bam_index=base + ".sorted.bam.bai"
String samstats=base + ".samstats"
String ins_size=base + ".ins_size"
String docker
String cluster
String disk_size
Int insert_size
File sam
String base=basename(sam, ".sam")
String bam=base + ".bam"
String sorted_bam=base + ".sorted.bam"
String sorted_bam_index=base + ".sorted.bam.bai"
String samstats=base + ".samstats"
String ins_size=base + ".ins_size"
String docker
String cluster
String disk_size
Int insert_size

command <<<
set -o pipefail
set -e
nt=$(nproc)
/opt/conda/bin/samtools view -@ $nt -bS ${sam} > ${bam}
/opt/conda/bin/samtools sort -@ $nt -m 1000000000 ${bam} -o ${sorted_bam}
/opt/conda/bin/samtools index ${sorted_bam}
/opt/conda/bin/samtools stats ${sorted_bam} > ${samstats}
/opt/conda/bin/samtools stats -i ${insert_size} ${sorted_bam} |grep ^IS|cut -f 2- > ${ins_size}
>>>
command <<<
set -o pipefail
set -e
nt=$(nproc)
/opt/conda/bin/samtools view -@ $nt -bS ${sam} > ${bam}
/opt/conda/bin/samtools sort -@ $nt -m 1000000000 ${bam} -o ${sorted_bam}
/opt/conda/bin/samtools index ${sorted_bam}
/opt/conda/bin/samtools stats ${sorted_bam} > ${samstats}
/opt/conda/bin/samtools stats -i ${insert_size} ${sorted_bam} |grep ^IS|cut -f 2- > ${ins_size}
>>>

runtime {
docker: docker
cluster: cluster
systemDisk: "cloud_ssd 40"
dataDisk: "cloud_ssd " + disk_size + " /cromwell_root/"
}
runtime {
docker: docker
cluster: cluster
systemDisk: "cloud_ssd 40"
dataDisk: "cloud_ssd " + disk_size + " /cromwell_root/"
}

output {
File out_sort_bam="${sorted_bam}"
File out_sort_bam_index="${sorted_bam_index}"
File out_samstats="${samstats}"
File out_ins_size="${ins_size}"
}
output {
File out_sort_bam="${sorted_bam}"
File out_sort_bam_index="${sorted_bam_index}"
File out_samstats="${samstats}"
File out_ins_size="${ins_size}"
}

}


+ 24
- 24
tasks/stringtie.wdl View File

@@ -1,27 +1,27 @@
task stringtie {
File bam
File gtf
String docker
String base=basename(bam, ".sorted.bam")
String cluster
String disk_size
File bam
File gtf
String docker
String base=basename(bam, ".sorted.bam")
String cluster
String disk_size

command <<<
nt=$(nproc)
mkdir ballgown
/opt/conda/bin/stringtie -e -B -p $nt -G ${gtf} -o ballgown/${base}/${base}.gtf -C ${base}.cov.ref.gtf -A ${base}.gene.abundance.txt ${bam}
>>>
runtime {
docker: docker
cluster: cluster
systemDisk: "cloud_ssd 40"
dataDisk: "cloud_ssd " + disk_size + " /cromwell_root/"
}
output {
File covered_transcripts="${base}.cov.ref.gtf"
File gene_abundance="${base}.gene.abundance.txt"
Array[File] ballgown=["ballgown/${base}/${base}.gtf", "ballgown/${base}/e2t.ctab", "ballgown/${base}/e_data.ctab", "ballgown/${base}/i2t.ctab", "ballgown/${base}/i_data.ctab", "ballgown/${base}/t_data.ctab"]
}
command <<<
nt=$(nproc)
mkdir ballgown
/opt/conda/bin/stringtie -e -B -p $nt -G ${gtf} -o ballgown/${base}/${base}.gtf -C ${base}.cov.ref.gtf -A ${base}.gene.abundance.txt ${bam}
>>>
runtime {
docker: docker
cluster: cluster
systemDisk: "cloud_ssd 40"
dataDisk: "cloud_ssd " + disk_size + " /cromwell_root/"
}
output {
File covered_transcripts="${base}.cov.ref.gtf"
File gene_abundance="${base}.gene.abundance.txt"
Array[File] ballgown=["ballgown/${base}/${base}.gtf", "ballgown/${base}/e2t.ctab", "ballgown/${base}/e_data.ctab", "ballgown/${base}/i2t.ctab", "ballgown/${base}/i_data.ctab", "ballgown/${base}/t_data.ctab"]
}
}

+ 10
- 11
workflow.wdl View File

@@ -9,7 +9,6 @@ import "./tasks/ballgown.wdl" as ballgown
import "./tasks/count.wdl" as count

workflow {{ project_name }} {
File read1
File read2
File idx
@@ -56,7 +55,7 @@ workflow {{ project_name }} {
docker=fastqc_docker,
cluster=fastqc_cluster,
disk_size=disk_size
}
}
call fastqscreen.fastqscreen as fastqscreen {
input:
@@ -67,8 +66,8 @@ workflow {{ project_name }} {
screen_ref_dir=screen_ref_dir,
fastq_screen_conf=fastq_screen_conf,
disk_size=disk_size
}
}
}
}

call fastp.fastp as fastp {
input:
@@ -81,7 +80,7 @@ workflow {{ project_name }} {
adapter_sequence=adapter_sequence,
adapter_sequence_r2=adapter_sequence_r2,
trim_adapter=trim_adapter
}
}
call hisat2.hisat2 as hisat2 {
input:
@@ -93,7 +92,7 @@ workflow {{ project_name }} {
read_1P=fastp.trim_R1,
read_2P=fastp.trim_R2,
disk_size=disk_size
}
}
call samtools.samtools as samtools {
input:
@@ -102,7 +101,7 @@ workflow {{ project_name }} {
sam=hisat2.sam,
insert_size=insert_size,
disk_size=disk_size
}
}

if (qualimap_qc){
call qualimap.qualimap as qualimap {
@@ -112,8 +111,8 @@ workflow {{ project_name }} {
docker=qualimap_docker,
cluster=qualimap_cluster,
disk_size=disk_size
}
}
}
}

call stringtie.stringtie as stringtie {
input:
@@ -122,7 +121,7 @@ workflow {{ project_name }} {
gtf=gtf,
bam=samtools.out_sort_bam,
disk_size=disk_size
}
}

call ballgown.ballgown as ballgown {
input:
@@ -131,7 +130,7 @@ workflow {{ project_name }} {
ballgown=stringtie.ballgown,
gene_abundance=stringtie.gene_abundance,
disk_size=disk_size
}
}
call count.count as count {
input:

Loading…
Cancel
Save