Skip to content

Commit 6ec8c1c

Browse files
authored
Merge pull request #1096 from nf-core/dev
Release candidate 3.2.2
2 parents 6c0d335 + 702e3bc commit 6ec8c1c

File tree

98 files changed

+645
-327
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

98 files changed

+645
-327
lines changed

.github/workflows/awsfulltest.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ jobs:
2323
workdir: s3://${{ secrets.AWS_S3_BUCKET }}/work/sarek/work-${{ github.sha }}/somatic_test
2424
parameters: |
2525
{
26+
"hook_url": "${{ secrets.MEGATESTS_ALERTS_SLACK_HOOK_URL }}",
2627
"outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/sarek/results-${{ github.sha }}/somatic_test"
2728
}
2829
profiles: test_full,public_aws_ecr

.github/workflows/awsfulltest_germline.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ jobs:
2323
workdir: s3://${{ secrets.AWS_S3_BUCKET }}/work/sarek/work-${{ github.sha }}/germline_test
2424
parameters: |
2525
{
26+
"hook_url": "${{ secrets.MEGATESTS_ALERTS_SLACK_HOOK_URL }}",
2627
"outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/sarek/results-${{ github.sha }}/germline_test"
2728
}
2829
profiles: test_full_germline,public_aws_ecr

.github/workflows/ci.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ jobs:
2727
- "latest-everything"
2828
test:
2929
- "default"
30-
profile: ["docker", "singularity"]
30+
profile: ["docker"]
3131
# profile: ["docker", "singularity", "conda"]
3232
env:
3333
NXF_ANSI_LOG: false

.github/workflows/pytest-workflow.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ name: pytest-workflow
22
# This workflow runs the pipeline with the minimal test dataset to check that it completes without any syntax errors
33
on:
44
pull_request:
5-
branches: [dev, master]
5+
branches: [dev]
66

77
# Cancel if a newer run is started
88
concurrency:
@@ -32,7 +32,7 @@ jobs:
3232
fail-fast: false
3333
matrix:
3434
tags: ["${{ fromJson(needs.changes.outputs.tags) }}"]
35-
profile: ["docker", "singularity"]
35+
profile: ["docker"]
3636
# profile: ["docker", "singularity", "conda"]
3737
TEST_DATA_BASE:
3838
- "test-datasets/data"
Lines changed: 154 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,154 @@
1+
name: pytest-workflow-release
2+
# This workflow runs the pipeline with the minimal test dataset to check that it completes without any syntax errors
3+
on:
4+
pull_request:
5+
branches: [master]
6+
release:
7+
types: [published]
8+
9+
# Cancel if a newer run is started
10+
concurrency:
11+
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
12+
cancel-in-progress: true
13+
14+
jobs:
15+
changes:
16+
name: Check for changes
17+
runs-on: ubuntu-latest
18+
outputs:
19+
# Expose matched filters as job 'tags' output variable
20+
tags: ${{ steps.filter.outputs.changes }}
21+
steps:
22+
- uses: actions/checkout@v3
23+
- uses: dorny/paths-filter@v2
24+
id: filter
25+
with:
26+
filters: "tests/config/tags.yml"
27+
28+
test:
29+
name: ${{ matrix.tags }} ${{ matrix.profile }} NF ${{ matrix.NXF_VER }}
30+
runs-on: ubuntu-latest
31+
needs: changes
32+
if: needs.changes.outputs.tags != '[]'
33+
strategy:
34+
fail-fast: false
35+
matrix:
36+
tags: ["${{ fromJson(needs.changes.outputs.tags) }}"]
37+
profile: ["docker", "singularity"]
38+
TEST_DATA_BASE:
39+
- "test-datasets/data"
40+
NXF_VER:
41+
- "23.04.0"
42+
- "latest-everything"
43+
exclude:
44+
- profile: "singularity"
45+
tags: concatenate_vcfs
46+
- profile: "singularity"
47+
tags: merge
48+
- profile: "singularity"
49+
tags: validation_checks
50+
env:
51+
NXF_ANSI_LOG: false
52+
TEST_DATA_BASE: "${{ github.workspace }}/test-datasets"
53+
steps:
54+
- name: Check out pipeline code
55+
uses: actions/checkout@v3
56+
57+
- name: Hash Github Workspace
58+
id: hash_workspace
59+
run: |
60+
echo "digest=$(echo sarek3_${{ github.workspace }} | md5sum | cut -c 1-25)" >> $GITHUB_OUTPUT
61+
62+
- name: Cache test data
63+
id: cache-testdata
64+
uses: actions/cache@v3
65+
with:
66+
path: test-datasets/
67+
key: ${{ steps.hash_workspace.outputs.digest }}
68+
69+
- name: Check out test data
70+
if: steps.cache-testdata.outputs.cache-hit != 'true'
71+
uses: actions/checkout@v3
72+
with:
73+
repository: nf-core/test-datasets
74+
ref: sarek3
75+
path: test-datasets/
76+
77+
- name: Replace remote paths in samplesheets
78+
run: |
79+
for f in tests/csv/3.0/*csv; do
80+
sed -i "s=https://raw.githubusercontent.com/nf-core/test-datasets/modules/=${{ github.workspace }}/test-datasets/=g" $f
81+
echo "========== $f ============"
82+
cat $f
83+
echo "========================================"
84+
done;
85+
86+
- name: Set up Python
87+
uses: actions/setup-python@v2
88+
with:
89+
python-version: "3.x"
90+
91+
- uses: actions/cache@v3
92+
with:
93+
path: ~/.cache/pip
94+
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
95+
restore-keys: |
96+
${{ runner.os }}-pip-
97+
98+
- name: Install Python dependencies
99+
run: python -m pip install --upgrade pip pytest-workflow
100+
101+
- uses: actions/cache@v3
102+
with:
103+
path: /usr/local/bin/nextflow
104+
key: ${{ runner.os }}
105+
restore-keys: |
106+
${{ runner.os }}-nextflow-
107+
108+
- name: Install Nextflow ${{ matrix.NXF_VER }}
109+
uses: nf-core/setup-nextflow@v1.2.0
110+
with:
111+
version: "${{ matrix.NXF_VER }}"
112+
113+
- name: Set up Singularity
114+
if: matrix.profile == 'singularity'
115+
uses: eWaterCycle/setup-singularity@v5
116+
with:
117+
singularity-version: 3.7.1
118+
119+
- name: Set up miniconda
120+
if: matrix.profile == 'conda'
121+
uses: conda-incubator/setup-miniconda@v2
122+
with:
123+
auto-update-conda: true
124+
channels: conda-forge,bioconda,defaults
125+
python-version: ${{ matrix.python-version }}
126+
127+
- name: Conda clean
128+
if: matrix.profile == 'conda'
129+
run: conda clean -a
130+
131+
- name: Run pytest-workflow
132+
uses: Wandalen/wretry.action@v1.0.11
133+
with:
134+
command: TMPDIR=~ PROFILE=${{ matrix.profile }} pytest --tag ${{ matrix.tags }} --symlink --kwdof --git-aware --color=yes
135+
attempt_limit: 3
136+
137+
- name: Output log on failure
138+
if: failure()
139+
run: |
140+
sudo apt install bat > /dev/null
141+
batcat --decorations=always --color=always /home/runner/pytest_workflow_*/*/log.{out,err}
142+
143+
- name: Upload logs on failure
144+
if: failure()
145+
uses: actions/upload-artifact@v2
146+
with:
147+
name: logs-${{ matrix.profile }}
148+
path: |
149+
/home/runner/pytest_workflow_*/*/.nextflow.log
150+
/home/runner/pytest_workflow_*/*/log.out
151+
/home/runner/pytest_workflow_*/*/log.err
152+
/home/runner/pytest_workflow_*/*/work
153+
!/home/runner/pytest_workflow_*/*/work/conda
154+
!/home/runner/pytest_workflow_*/*/work/singularity

CHANGELOG.md

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,35 @@ All notable changes to this project will be documented in this file.
55
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
66
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
77

8+
## [3.2.2](https://github.com/nf-core/sarek/releases/tag/3.2.2) - Vuoinesluobbalah
9+
10+
Vuoinesluobbalah is a lake close to Bierikjávrre.
11+
12+
### Added
13+
14+
- [#1106](https://github.com/nf-core/sarek/pull/1106) - Add Slack integration to Megatests
15+
- [#1107](https://github.com/nf-core/sarek/pull/1107) - Add `singularity.registry` to `public_aws_ecr`
16+
17+
### Changed
18+
19+
- [#1087](https://github.com/nf-core/sarek/pull/1087) - Back to dev
20+
- [#1087](https://github.com/nf-core/sarek/pull/1087) - Minor modules update
21+
- [#1088](https://github.com/nf-core/sarek/pull/1088) - Replace profile `test` by `test_cache` and add a `test` profile without hidden files
22+
- [#1095](https://github.com/nf-core/sarek/pull/1095) - Prepare release `3.2.2`
23+
24+
### Fixed
25+
26+
- [#1087](https://github.com/nf-core/sarek/pull/1087) - Fix wrong default memory in GATK4_CREATESEQUENCEDICTIONARY [#1085](https://github.com/nf-core/sarek/pull/1085)
27+
- [#1089](https://github.com/nf-core/sarek/pull/1089) - Remove duplicated code
28+
- [#1093](https://github.com/nf-core/sarek/pull/1093) - Fixing Ascat by reverting meta.id in channels allele_files, loci_files, gc_file and rt_file to baseName.
29+
- [#1098](https://github.com/nf-core/sarek/pull/1098) - Fix Channel issue in Mutect2 subworkflow [#1094](https://github.com/nf-core/sarek/pull/1094)
30+
- [#1100](https://github.com/nf-core/sarek/pull/1100) - Remove duplicate index with deepvariant when no_intervals [#1069](https://github.com/nf-core/sarek/pull/1069)
31+
- [#1101](https://github.com/nf-core/sarek/pull/1101) - Remove duplicate index computation for GATK4 Markduplicates & [#1065](https://github.com/nf-core/sarek/issues/1065)
32+
- [#1101](https://github.com/nf-core/sarek/pull/1101) - Fix GATK4 version for GATK4 MarkduplicatesSpark [#1068](https://github.com/nf-core/sarek/issues/1068)
33+
- [#1105](https://github.com/nf-core/sarek/pull/1105) - Remove `params.tracedir`
34+
- [#1108](https://github.com/nf-core/sarek/pull/1108) - Refactor bad prefix definition for vcf files [#938](https://github.com/nf-core/sarek/issues/938)
35+
- [#1109](https://github.com/nf-core/sarek/pull/1109) - Fix `mpileup` for variantcalling: only `bcftools` run and file publishing
36+
837
## [3.2.1](https://github.com/nf-core/sarek/releases/tag/3.2.1) - Pierikjaure
938

1039
Pierikjaure is a previous spelling of Bierikjávrre.

conf/modules/annotate.config

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ process {
1818
// SNPEFF
1919
if (params.tools && (params.tools.split(',').contains('snpeff') || params.tools.split(',').contains('merge'))) {
2020
withName: 'SNPEFF_SNPEFF' {
21-
ext.prefix = { "${vcf.baseName.minus(".vcf")}_snpEff" }
21+
ext.prefix = { vcf.baseName - ".vcf" + "_snpEff" }
2222
ext.args = '-nodownload -canon -v'
2323
if (!params.snpeff_cache && !params.download_cache) container = { params.snpeff_genome ? "docker.io/nfcore/snpeff:${params.snpeff_version}.${params.snpeff_genome}" : "docker.io/nfcore/snpeff:${params.snpeff_version}.${params.genome}" }
2424
publishDir = [
@@ -46,7 +46,7 @@ process {
4646
(params.vep_custom_args) ?: ''
4747
].join(' ').trim() }
4848
// If just VEP: <vcf prefix>_VEP.ann.vcf
49-
ext.prefix = { "${vcf.baseName.minus(".vcf")}_VEP.ann" }
49+
ext.prefix = { vcf.baseName - ".vcf" + "_VEP.ann" }
5050
if (!params.vep_cache && !params.download_cache) container = { params.vep_genome ? "docker.io/nfcore/vep:${params.vep_version}.${params.vep_genome}" : "docker.io/nfcore/vep:${params.vep_version}.${params.genome}" }
5151
publishDir = [
5252
[
@@ -67,14 +67,14 @@ process {
6767
if (params.tools && params.tools.split(',').contains('merge')) {
6868
withName: "NFCORE_SAREK:SAREK:VCF_ANNOTATE_ALL:VCF_ANNOTATE_MERGE:ENSEMBLVEP_VEP" {
6969
// If merge: Output file will have format *_snpEff_VEP.ann.vcf, *_snpEff_VEP.ann.json or *_snpEff_VEP.ann.tab
70-
ext.prefix = { "${vcf.baseName.minus(".ann.vcf")}_VEP.ann" }
70+
ext.prefix = { vcf.baseName - ".ann.vcf" + "_VEP.ann" }
7171
}
7272
}
7373

7474
// ALL ANNOTATION TOOLS
7575
if (params.tools && (params.tools.split(',').contains('snpeff') || params.tools.split(',').contains('vep') || params.tools.split(',').contains('merge'))) {
7676
withName: "NFCORE_SAREK:SAREK:VCF_ANNOTATE_ALL:.*:(TABIX_BGZIPTABIX|TABIX_TABIX)" {
77-
ext.prefix = { input.baseName.minus(".vcf") }
77+
ext.prefix = { input.name - ".vcf" }
7878
publishDir = [
7979
mode: params.publish_dir_mode,
8080
path: { "${params.outdir}/annotation/${meta.variantcaller}/${meta.id}/" },

conf/modules/deepvariant.config

Lines changed: 1 addition & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ process {
2222
publishDir = [
2323
mode: params.publish_dir_mode,
2424
path: { "${params.outdir}/variant_calling/" },
25-
pattern: "*vcf.gz",
25+
pattern: "*{vcf.gz,vcf.gz.tbi}",
2626
saveAs: { meta.num_intervals > 1 ? null : "deepvariant/${meta.id}/${it}" }
2727
]
2828
}
@@ -40,12 +40,4 @@ process {
4040
ext.prefix = {"${meta.id}.deepvariant.g"}
4141
}
4242

43-
withName : 'TABIX_VC_DEEPVARIANT_.*' {
44-
ext.prefix = {"${meta.id}.deepvariant"}
45-
publishDir = [
46-
mode: params.publish_dir_mode,
47-
path: { "${params.outdir}/variant_calling/deepvariant/${meta.id}/" },
48-
pattern: "*tbi"
49-
]
50-
}
5143
}

conf/modules/freebayes.config

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ process {
3535
}
3636

3737
withName: 'BCFTOOLS_SORT' {
38-
ext.prefix = { meta.num_intervals <= 1 ? "${meta.id}.freebayes" : "${vcf.minus("vcf")}.sort" }
38+
ext.prefix = { meta.num_intervals <= 1 ? meta.id + ".freebayes" : vcf.name - ".vcf" + ".sort" }
3939
publishDir = [
4040
mode: params.publish_dir_mode,
4141
path: { "${params.outdir}/variant_calling/" },

conf/modules/joint_germline.config

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ process {
3232

3333
if (params.tools && params.tools.contains('haplotypecaller') && params.joint_germline) {
3434
withName: 'NFCORE_SAREK:SAREK:BAM_VARIANT_CALLING_GERMLINE_ALL:BAM_JOINT_CALLING_GERMLINE_GATK:BCFTOOLS_SORT' {
35-
ext.prefix = { "${vcf.baseName.minus("vcf")}sort" }
35+
ext.prefix = { vcf.baseName - ".vcf" + ".sort" }
3636
publishDir = [
3737
enabled: false
3838
]

0 commit comments

Comments
 (0)