Browse Source

split fastq and vcf

tags/v0.1.1
LUYAO REN 3 years ago
parent
commit
a75a008604
5 changed files with 70 additions and 30 deletions
  1. +0
    -1
      inputs
  2. +9
    -13
      tasks/extract_tables.wdl
  3. +23
    -0
      tasks/extract_tables_vcf.wdl
  4. +31
    -0
      tasks/multiqc_hap.wdl
  5. +7
    -16
      workflow.wdl

+ 0
- 1
inputs View File

"{{ project_name }}.fastq_1_M8": "{{ fastq_1_M8 }}", "{{ project_name }}.fastq_1_M8": "{{ fastq_1_M8 }}",
"{{ project_name }}.SMALLcluster_config": "{{ SMALLcluster_config }}", "{{ project_name }}.SMALLcluster_config": "{{ SMALLcluster_config }}",
"{{ project_name }}.screen_ref_dir": "{{ screen_ref_dir }}", "{{ project_name }}.screen_ref_dir": "{{ screen_ref_dir }}",
"{{ project_name }}.bed": "{{ bed }}",
"{{ project_name }}.fastq_1_D5": "{{ fastq_1_D5 }}", "{{ project_name }}.fastq_1_D5": "{{ fastq_1_D5 }}",
"{{ project_name }}.dbmills_dir": "{{ dbmills_dir }}", "{{ project_name }}.dbmills_dir": "{{ dbmills_dir }}",
"{{ project_name }}.BIGcluster_config": "{{ BIGcluster_config }}", "{{ project_name }}.BIGcluster_config": "{{ BIGcluster_config }}",

+ 9
- 13
tasks/extract_tables.wdl View File

task extract_tables { task extract_tables {


File? quality_yield_summary
File? wgs_metrics_summary
File? aln_metrics_summary
File? is_metrics_summary
File quality_yield_summary
File wgs_metrics_summary
File aln_metrics_summary
File is_metrics_summary
File hap File hap
File? fastqc
File? fastqscreen
File fastqc
File fastqscreen




String project String project
String disk_size String disk_size


command <<< command <<<
if [ ${fastqc} ];then
python /opt/extract_tables.py -quality ${quality_yield_summary} -depth ${wgs_metrics_summary} -aln ${aln_metrics_summary} -is ${is_metrics_summary} -fastqc ${fastqc} -fastqscreen ${fastqscreen} -hap ${hap} -project ${project}
else
python /opt/extract_tables.py -hap ${hap} -project ${project}
fi
python /opt/extract_tables.py -quality ${quality_yield_summary} -depth ${wgs_metrics_summary} -aln ${aln_metrics_summary} -is ${is_metrics_summary} -fastqc ${fastqc} -fastqscreen ${fastqscreen} -hap ${hap} -project ${project}
>>> >>>


runtime { runtime {
} }


output { output {
File? pre_alignment = "pre_alignment.txt"
File? post_alignment = "post_alignment.txt"
File pre_alignment = "pre_alignment.txt"
File post_alignment = "post_alignment.txt"
File variant_calling = "variants.calling.qc.txt" File variant_calling = "variants.calling.qc.txt"
} }
} }

+ 23
- 0
tasks/extract_tables_vcf.wdl View File

task extract_tables_vcf {

File hap
String project
String docker
String cluster_config
String disk_size

command <<<
python /opt/extract_tables.py -hap ${hap} -project ${project}
>>>

runtime {
docker:docker
cluster:cluster_config
systemDisk:"cloud_ssd 40"
dataDisk:"cloud_ssd " + disk_size + " /cromwell_root/"
}

output {
File variant_calling = "variants.calling.qc.txt"
}
}

+ 31
- 0
tasks/multiqc_hap.wdl View File

task multiqc_hap {

Array[File] summary

String docker
String cluster_config
String disk_size

command <<<
set -o pipefail
set -e
mkdir -p /cromwell_root/tmp/benchmark
cp ${sep=" " summary} /cromwell_root/tmp/benchmark
multiqc /cromwell_root/tmp/
cat multiqc_data/multiqc_happy_data.json > multiqc_happy_data.json
>>>

runtime {
docker:docker
cluster:cluster_config
systemDisk:"cloud_ssd 40"
dataDisk:"cloud_ssd " + disk_size + " /cromwell_root/"
}

output {
File multiqc_html = "multiqc_report.html"
Array[File] multiqc_txt = glob("multiqc_data/*")
File hap = "multiqc_happy_data.json"
}
}

+ 7
- 16
workflow.wdl View File

import "./tasks/Haplotyper.wdl" as Haplotyper import "./tasks/Haplotyper.wdl" as Haplotyper
import "./tasks/benchmark.wdl" as benchmark import "./tasks/benchmark.wdl" as benchmark
import "./tasks/multiqc.wdl" as multiqc import "./tasks/multiqc.wdl" as multiqc
import "./tasks/multiqc_hap.wdl" as multiqc_hap
import "./tasks/merge_sentieon_metrics.wdl" as merge_sentieon_metrics import "./tasks/merge_sentieon_metrics.wdl" as merge_sentieon_metrics
import "./tasks/extract_tables.wdl" as extract_tables import "./tasks/extract_tables.wdl" as extract_tables
import "./tasks/extract_tables_vcf.wdl" as extract_tables_vcf
import "./tasks/mendelian.wdl" as mendelian import "./tasks/mendelian.wdl" as mendelian
import "./tasks/merge_mendelian.wdl" as merge_mendelian import "./tasks/merge_mendelian.wdl" as merge_mendelian
import "./tasks/quartet_mendelian.wdl" as quartet_mendelian import "./tasks/quartet_mendelian.wdl" as quartet_mendelian
import "./tasks/filter_vcf.wdl" as filter_vcf import "./tasks/filter_vcf.wdl" as filter_vcf




workflow {{ project_name }} {
workflow project_name {


File? fastq_1_D5 File? fastq_1_D5
File? fastq_1_D6 File? fastq_1_D6
disk_size=disk_size disk_size=disk_size
} }


call extract_tables.extract_tables as extract_tables_big {
call extract_tables.extract_tables as extract_tables {
input: input:
quality_yield_summary=merge_sentieon_metrics.quality_yield_summary, quality_yield_summary=merge_sentieon_metrics.quality_yield_summary,
wgs_metrics_summary=merge_sentieon_metrics.wgs_metrics_summary, wgs_metrics_summary=merge_sentieon_metrics.wgs_metrics_summary,


Array[File] benchmark_summary_hap = [benchmark_D5_vcf.summary, benchmark_D6_vcf.summary, benchmark_F7_vcf.summary, benchmark_M8_vcf.summary] Array[File] benchmark_summary_hap = [benchmark_D5_vcf.summary, benchmark_D6_vcf.summary, benchmark_F7_vcf.summary, benchmark_M8_vcf.summary]


call multiqc.multiqc as multiqc_vcf {
call multiqc_hap.multiqc_hap as multiqc_hap {
input: input:
read1_zip="",
read2_zip="",
txt1="",
txt2="",
zip="",
summary=benchmark_summary_hap, summary=benchmark_summary_hap,
docker=MULTIQCdocker, docker=MULTIQCdocker,
cluster_config=SMALLcluster_config, cluster_config=SMALLcluster_config,
disk_size=disk_size disk_size=disk_size
} }


call extract_tables.extract_tables as extract_tables_vcf {
call extract_tables_vcf.extract_tables_vcf as extract_tables_vcf {
input: input:
quality_yield_summary="",
wgs_metrics_summary="",
aln_metrics_summary="",
is_metrics_summary="",
fastqc="",
fastqscreen="",
hap=multiqc_vcf.hap,
hap=multiqc_hap.hap,
project=project, project=project,
docker=DIYdocker, docker=DIYdocker,
cluster_config=SMALLcluster_config, cluster_config=SMALLcluster_config,

Loading…
Cancel
Save