Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions main.nf
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
/*
* not actually used - just a placeholder
*/
30 changes: 0 additions & 30 deletions modules/UMCUGenetics/utils/EditSummaryFileHappy.nf

This file was deleted.

41 changes: 41 additions & 0 deletions modules/UMCUGenetics/utils/editsummaryfilehappy/main.nf
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
process UTILS_EDITSUMMARYFILEHAPPY {
tag "${meta.id}"
label 'process_low'

input:
tuple val(meta), path(summary_csv)

output:
path("*_INDEL_PASS.summary.csv"), emit: indel_pass_csv
path("*_INDEL_ALL.summary.csv"), emit: indel_all_csv
path("*_SNP_PASS.summary.csv"), emit: snp_pass_csv
path("*_SNP_ALL.summary.csv"), emit: snp_all_csv

when:
task.ext.when == null || task.ext.when

script:
"""
# Add samplenames as columns (header and row values) at start of line
sed '1s/^/samples,sample_truth,sample_query,/; 2,\$s/^/${meta.truth}_${meta.query},${meta.truth},${meta.query},/' ${summary_csv} > ${summary_csv}.tmp

# Split file including header (first line)
awk -F',' 'FNR==1{hdr=\$0;next} {
print hdr>"${meta.truth}_${meta.query}_"\$4"_"\$5".summary.csv";
print \$0>>"${meta.truth}_${meta.query}_"\$4"_"\$5".summary.csv"
}' ${summary_csv}.tmp

# Remove tmp files
rm ${summary_csv}.tmp
"""

stub:
"""
touch ${meta.truth}_${meta.query}_INDEL_PASS.summary.csv
touch ${meta.truth}_${meta.query}_INDEL_ALL.summary.csv
touch ${meta.truth}_${meta.query}_SNP_PASS.summary.csv
touch ${meta.truth}_${meta.query}_SNP_ALL.summary.csv
"""


}
41 changes: 41 additions & 0 deletions modules/UMCUGenetics/utils/editsummaryfilehappy/meta.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
name: editsummaryfilehappy
description: Reformat the happy summary file for GIABEval
keywords:
- giab
- GIABEval
- utils

tools:
- bash:
description: |
Util process, using only bash
input:
- - meta:
type: map
description: |
Groovy Map containing sample information it should have the keys [id: '', 'query', 'truth']
- summary_csv:
type: file
description: summary output csv from happy
output:
indel_pass_csv:
- "*_INDEL_PASS.summary.csv":
type: file
description: "INDELs that pass the criteria"
indel_all_csv:
- "*_INDEL_ALL.summary.csv":
type: file
description: "All indels"
snp_pass_csv:
- "*_SNP_PASS.summary.csv":
type: file
description: "SNPs that pass the criteria"
snp_all_csv:
- "*_SNP_ALL.summary.csv":
type: file
description: "All SNPs"

authors:
- "@ellendejong"
- "@melferink"

26 changes: 26 additions & 0 deletions modules/UMCUGenetics/utils/editsummaryfilehappy/tests/main.nf.test
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
nextflow_process {
name "Test UTILS_EDITSUMMARYFILEHAPPY"
script "../main.nf"
process "UTILS_EDITSUMMARYFILEHAPPY"

tag "modules"
tag "modules_umcugenetics"
tag "happy"

test("stub") {
options "-stub"
when{
process{
"""
input[0] = [
[id: 'test', truth: 'truthsample', query: "querysample"], // meta map
file("demo_summary.csv")
]
"""
}
}
then {
assert process.success
}
}
}
14 changes: 14 additions & 0 deletions nf-test.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
config {
// location for all nf-tests
testsDir = "."

// nf-test directory including temporary files for each test
workDir = System.getenv("NFT_WORKDIR") ?: ".nf-test"

// location of an optional nextflow.config file specific for executing tests
configFile = "tests/config/nf-test.config"

// run all test with the defined docker profile from the main nextflow.config
profile = ""

}
62 changes: 62 additions & 0 deletions tests/config/nextflow.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
params {
outdir = "output/"
publish_dir_mode = "copy"
singularity_pull_docker_container = false
test_data_base = 'https://raw.githubusercontent.com/nf-core/test-datasets/modules'
}

process {
resourceLimits = [
cpus: 4,
memory: '16.GB',
time: '1.h'
]
}

profiles {
singularity {
singularity.enabled = true
singularity.autoMounts = true
singularity.registry = 'quay.io'
}

conda {
conda.enabled = true
conda.channels = ['conda-forge', 'bioconda']
conda.createTimeout = "120 min"
}

mamba {
conda.enabled = true
conda.useMamba = true
conda.channels = ['conda-forge', 'bioconda']
conda.createTimeout = "120 min"
}

micromamba {
conda.enabled = true
conda.useMicromamba = true
conda.channels = ['conda-forge', 'bioconda']
conda.createTimeout = "120 min"
}

podman {
podman.enabled = true
podman.runOptions = "--runtime crun --platform linux/x86_64 --systemd=always"
podman.registry = 'quay.io'
}

docker {
docker.enabled = true
docker.fixOwnership = true
docker.runOptions = '--platform=linux/amd64'
docker.registry = 'quay.io'
}
}

// Load test_data.config containing paths to test data
includeConfig 'test_data.config'

manifest {
nextflowVersion = '!>=24.10.2'
}
98 changes: 98 additions & 0 deletions tests/config/nf-test.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
params {
publish_dir_mode = "copy"
singularity_pull_docker_container = false
test_data_base = 'https://raw.githubusercontent.com/nf-core/test-datasets/modules'
modules_testdata_base_path = 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/'
}

process {
cpus = 2
memory = '4.GB'
time = '2.h'
}

profiles {
conda {
conda.enabled = true
conda.channels = ['conda-forge', 'bioconda']
apptainer.enabled = false
}
mamba {
conda.enabled = true
conda.useMamba = true
}
micromamba {
conda.enabled = true
conda.useMicromamba = true
}
docker {
docker.enabled = true
docker.runOptions = '-u $(id -u):$(id -g) --platform=linux/amd64'
}
docker_self_hosted {
docker.enabled = true
docker.fixOwnership = true
docker.runOptions = '--platform=linux/amd64'
}
arm64 {
process.arch = 'arm64'
docker.runOptions = '-u $(id -u):$(id -g) --platform=linux/arm64'
apptainer.ociAutoPull = true
singularity.ociAutoPull = true
wave.enabled = true
wave.freeze = true
wave.strategy = 'conda,container'
}
emulate_amd64 {
docker.runOptions = '-u $(id -u):$(id -g) --platform=linux/amd64'
}
singularity {
singularity.enabled = true
singularity.autoMounts = true
}
podman {
podman.enabled = true
podman.runOptions = "--runtime crun --platform linux/x86_64 --systemd=always"
}
shifter {
shifter.enabled = true
}
charliecloud {
charliecloud.enabled = true
}
apptainer {
apptainer.enabled = true
apptainer.autoMounts = true
}
wave {
apptainer.ociAutoPull = true
singularity.ociAutoPull = true
wave.enabled = true
wave.freeze = true
wave.strategy = 'conda,container'
}
gitpod {
executor.name = 'local'
executor.cpus = 4
executor.memory = 8.GB
}
gpu {
docker.runOptions = '-u $(id -u):$(id -g) --gpus all'
apptainer.runOptions = '--nv'
singularity.runOptions = '--nv'
}
}

docker.registry = 'quay.io'
podman.registry = 'quay.io'
singularity.registry = 'quay.io'

// Increase time available to build Conda environment
conda { createTimeout = "120 min" }

// Load test_data.config containing paths to test data
includeConfig 'test_data.config'

manifest {
nextflowVersion = '!>=24.10.2'
}
Loading
Loading