Sfoglia il codice sorgente

first commit

master
chenqingwang 3 anni fa
commit
6bc6fb9804
5 ha cambiato i file con 141 aggiunte e 0 eliminazioni
  1. +51
    -0
      README.md
  2. +6
    -0
      defaults
  3. +9
    -0
      inputs
  4. +51
    -0
      tasks/spladder_merge.wdl
  5. +24
    -0
      workflow.wdl

+ 51
- 0
README.md Vedi File

@@ -0,0 +1,51 @@
# README.md

> Author: Qingwang Chen
>
> Email: [qwch20@fudan.edu.cn](mailto:qwch20@fudan.edu.cn)
>
> Last Updates: 22/08/2021

#### Brief Introduction

For RNA-seq data, from bams to AS Information files.

#### Requirements

- choppy
- Ali-Cloud
- Linux

```
# 激活choppy环境
$ source activate choppy (open-choppy-env)

# 第一次安装
$ choppy install chenqingwang/SplAdder-merge-T
# 非第一次安装
$ choppy install chenqingwang/SplAdder-merge-T -f

# 查询已安装APP
$ choppy apps
```

#### Quick Start

```
# 准备 samples.csv 文件
$ choppy samples chenqingwang/SplAdder-merge-T-latest > samples.csv
# 准备无默认参数的samples.csv 文件
$ choppy samples --no-default chenqingwang/SplAdder-merge-T-latest > samples.csv

# 提交任务
$ choppy batch chenqingwang/SplAdder-merge-T-latest samples.json -p Your_project_name -l Your_label

# 查询任务运行状况
$ choppy query -L Your_label | grep "status"

# 查询失败任务
$ choppy search -s Failed -p Your_project_name -u chenqingwang --short-format

# 结果文件地址
$ oss://choppy-cromwell-result/test-choppy/Your_project_name/
```

+ 6
- 0
defaults Vedi File

@@ -0,0 +1,6 @@
{
"reference_gtf_file":"oss://pgx-reference-data/reference/spladder/SplAdder/data/reference/Homo_sapiens.GRCh38.103.gtf",
"spladder_docker":"registry.cn-shanghai.aliyuncs.com/pgx-docker-registry/spladder:v2.4.2",
"spladder_cluster":"OnDemand bcs.a2.3xlarge img-ubuntu-vpc",
"disk_size":"500"
}

+ 9
- 0
inputs Vedi File

@@ -0,0 +1,9 @@
{
"{{ project_name }}.sample_id": "{{ sample_id }}",
"{{ project_name }}.bam": "{{ bam }}",
"{{ project_name }}.pickle": {{ pickle | tojson }},
"{{ project_name }}.reference_gtf_file": "{{ reference_gtf_file }}",
"{{ project_name }}.spladder_docker": "{{ spladder_docker }}",
"{{ project_name }}.spladder_cluster": "{{ spladder_cluster }}",
"{{ project_name }}.disk_size": "{{ disk_size }}"
}

+ 51
- 0
tasks/spladder_merge.wdl Vedi File

@@ -0,0 +1,51 @@
task spladder_merge {
String sample_id
File bam
File pickle
File reference_gtf_file

String docker
String cluster
String disk_size

command <<<
set -o pipefail
set -e

mkdir -p ${sample_id}/spladder_out/spladder
ls ${bam} | grep bam$ > alignment.txt
ls ${bam} | grep bai$ alignment_bai.txt
ln -s ${pickle}/*.pickle ${sample_id}/spladder_out/spladder/
ls ${sample_id}/spladder_out/spladder/ >> pickle.txt
# spladder build -o ${sample_id}/spladder_out \
# --annotation ${reference_gtf_file} \
# --bams alignment.txt \
# --confidence 2 \
# --merge-strat merge_graphs \
# --validate-sg \
# --readlen 150 \
# --parallel 4 \
# --event-types exon_skip,intron_retention,alt_3prime,alt_5prime,mutex_exons,mult_exon_skip
find . -depth > fileList.txt
>>>

runtime {
docker: docker
cluster: cluster
systemDisk: "cloud_ssd 500"
dataDisk: "cloud_ssd " + disk_size + " /cromwell_root/"
timeout: 259200
}

output {
File fileList = "fileList.txt"
File alignment = "alignment.txt"
File alignment_bai = "alignment_bai.txt"
File pickle_txt = "pickle.txt"
}
}

+ 24
- 0
workflow.wdl Vedi File

@@ -0,0 +1,24 @@
import "./tasks/spladder_merge.wdl" as spladder_merge


workflow {{ project_name }} {
String sample_id
File bam
File pickle
File reference_gtf_file

String spladder_docker
String spladder_cluster
String disk_size

call spladder_merge.spladder_merge as spladder_merge {
input:
reference_gtf_file=reference_gtf_file,
sample_id=sample_id,
bam=bam,
pickle=pickle,
docker=spladder_docker,
cluster=spladder_cluster,
disk_size=disk_size
}
}

Loading…
Annulla
Salva