From bba1b2e8e668a557c9a5dbf7991dea0c950093db Mon Sep 17 00:00:00 2001 From: Gerbenvandervries Date: Tue, 26 Nov 2024 15:22:40 +0000 Subject: [PATCH 1/9] re-include thereShallBeOnlyOne --- bin/ParseDarwinSamplesheet.sh | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/bin/ParseDarwinSamplesheet.sh b/bin/ParseDarwinSamplesheet.sh index c75f3f0..ae5b245 100755 --- a/bin/ParseDarwinSamplesheet.sh +++ b/bin/ParseDarwinSamplesheet.sh @@ -220,7 +220,7 @@ fetch_data () { _sampleId="$(basename "${_filePath}" ".cram")" _fileType='CRAM' else - log4Bash 'WARN' "${LINENO}" "${FUNCNAME[0]:-main}" '0' "The project folder ${_project} ${_sample} ${_type} cannot be found anywhere in ${_searchPath[0]}." + log4Bash 'WARN' "${LINENO}" "${FUNCNAME[0]:-main}" '0' "The project folder ${_project} ${_sample} ${_type} cannot be found anywhere." _sampleId='not found' _project='not found' _fileType='not found' @@ -324,6 +324,11 @@ then log4Bash 'FATAL' "${LINENO}" "${FUNCNAME:-main}" '1' "This script must be executed by user ${ATEAMBOTUSER}, but you are ${ROLE_USER} (${REAL_USER})." fi +lockFile="${DAT_ROOT_DIR}/logs/${SCRIPT_NAME}.lock" +thereShallBeOnlyOne "${lockFile}" +log4Bash 'DEBUG' "${LINENO}" "${FUNCNAME:-main}" '0' "Successfully got exclusive access to lock file ${lockFile} ..." +log4Bash 'DEBUG' "${LINENO}" "${FUNCNAME:-main}" '0' "Log files will be written to ${DAT_ROOT_DIR}/logs ..." + # shellcheck disable=SC2029 ## ervanuit gaande dat de filename samplename.txt heet, # example filename: processStepID_project1_sample1_project2_sample2.csv From b242725390d2ada020c3593eadbe18005fa84840 Mon Sep 17 00:00:00 2001 From: Gerbenvandervries Date: Tue, 26 Nov 2024 15:23:21 +0000 Subject: [PATCH 2/9] bugfix version number include --- bin/ConcordanceCheck.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bin/ConcordanceCheck.sh b/bin/ConcordanceCheck.sh index 37fb9ed..165ec8c 100755 --- a/bin/ConcordanceCheck.sh +++ b/bin/ConcordanceCheck.sh @@ -251,8 +251,8 @@ fi } # Adding concordance pipeline version into .sample file. - awk -v c="${concordanceCheckVersion}" '{if (NR>1){print $0"\t"c}else {print $0"\tConcordanceCheckVersion"}}' "${concordanceDir}/results/${concordanceCheckId}.sample" > "${concordanceDir}/results/${concordanceCheckId}.sample.tmp" - mv "${concordanceDir}/results/${concordanceCheckId}.sample{.tmp,} + awk -v c="${concordanceCheckVersion}" '{if (NR>1){print \$0"\t"c}else {print \$0"\tConcordanceCheckVersion"}}' "${concordanceDir}/results/${concordanceCheckId}.sample" > "${concordanceDir}/results/${concordanceCheckId}.sample.tmp" + mv "${concordanceDir}/results/${concordanceCheckId}.sample"{.tmp,} if [[ -e "${JOB_CONTROLE_FILE_BASE}.started" ]] then From 361df4345a75666ff17dfb99cd4ef1c2d1e32c1a Mon Sep 17 00:00:00 2001 From: Gerbenvandervries Date: Wed, 27 Nov 2024 09:57:15 +0000 Subject: [PATCH 3/9] added missing type --- bin/ParseDarwinSamplesheet.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/ParseDarwinSamplesheet.sh b/bin/ParseDarwinSamplesheet.sh index ae5b245..646839f 100755 --- a/bin/ParseDarwinSamplesheet.sh +++ b/bin/ParseDarwinSamplesheet.sh @@ -124,7 +124,7 @@ fetch_data () { log4Bash 'WARN' "${LINENO}" "${FUNCNAME[0]:-main}" '0' "Try to find: /groups/${NGSGROUP}/prm0"*"/projects/${_projectId}"* - if [[ "${_prefix}" =~ ^(NGS|NGSR|QXTR|XHTS|MAGR|QXT|HSR|GS)$ ]] && [[ "${_type}" =~ ^(WES|WGS)$ ]] + if [[ "${_prefix}" =~ ^(NGS|NGSR|QXTR|XHTS|MAGR|QXT|HSR|GS)$ ]] && [[ "${_type}" =~ ^(WES|WGS|NGS)$ ]] then ### From 617ae41c799df500909f4ff7782242309be6a2b7 Mon Sep 17 00:00:00 2001 From: Gerbenvandervries Date: Thu, 28 Nov 2024 14:42:30 +0000 Subject: [PATCH 4/9] vcf filter module --- nextflow/modules/CHECK/check.nf | 39 --------------------- nextflow/modules/FILTER/filter.nf | 27 ++++++++++++++ nextflow/modules/FILTER/templates/filter.sh | 9 +++++ 3 files changed, 36 insertions(+), 39 deletions(-) delete mode 100644 nextflow/modules/CHECK/check.nf create mode 100644 nextflow/modules/FILTER/filter.nf create mode 100755 nextflow/modules/FILTER/templates/filter.sh diff --git a/nextflow/modules/CHECK/check.nf b/nextflow/modules/CHECK/check.nf deleted file mode 100644 index 5736946..0000000 --- a/nextflow/modules/CHECK/check.nf +++ /dev/null @@ -1,39 +0,0 @@ -process CHECK { - label 'check' - tag "$pair_id" - module = ['HTSlib/1.16-GCCcore-11.3.0'] - - input: - tuple val(meta), path(files) - - output: - tuple val(meta), path(vcf1), path(vcf2) - - script: - vcf1="${meta.data1Id}.${meta.build1}.final.vcf.gz" - vcf2="${meta.data2Id}.${meta.build2}.final.vcf.gz" - """ - if [[ "${files[0]}" != *".gz" ]]; then - bgzip -c "${files[0]}" > "${files[0]}.gz" - mv "${files[0]}.gz" "${vcf1}" - #files[0]="${files[0]}.gz" - else - mv "${files[0]}" "${vcf1}" 2>/dev/null; true - fi - if [[ "${files[1]}" != *".gz" ]]; then - bgzip -c "${files[1]}" > "${files[1]}.gz" - mv "${files[1]}.gz" "${vcf2}" - #files[1]="${files[1]}.gz" - else - mv "${files[1]}" "${vcf2}" 2>/dev/null; true - fi - """ - stub: - - vcf1="${meta.data1Id}."${meta.build1}.vcf.gz - vcf2="${meta.data2Id}."${meta.build2}.vcf.gz - """ - touch "${vcf1}" - touch "${vcf2}" - """ -} diff --git a/nextflow/modules/FILTER/filter.nf b/nextflow/modules/FILTER/filter.nf new file mode 100644 index 0000000..b389a8c --- /dev/null +++ b/nextflow/modules/FILTER/filter.nf @@ -0,0 +1,27 @@ +process FILTER { + label 'filter' + tag "$pair_id" + module = ['picard/2.26.10-Java-8-LTS','HTSlib/1.16-GCCcore-11.3.0'] + + input: + tuple val(meta), path(file) + + output: + tuple val(meta), path(vcf) + + shell: + + file="${file}" + sampleId="${meta.dataId}" + vcf="${meta.dataId}.${params.build}.DPfiltered.vcf.gz" + + template 'filter.sh' + + stub: + + vcf="${meta.dataId}.${params.build}.DPfiltered.vcf.gz" + + """ + touch "${vcf}" + """ +} diff --git a/nextflow/modules/FILTER/templates/filter.sh b/nextflow/modules/FILTER/templates/filter.sh new file mode 100755 index 0000000..9c63d25 --- /dev/null +++ b/nextflow/modules/FILTER/templates/filter.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +set -o pipefail +set -eu + + sampleId="!{meta.dataId}" + + bcftools filter -e 'DP < 10' "!{file}" > "!{sampleId}.!{params.build}.DPfiltered.vcf" + bgzip -c "!{sampleId}.!{params.build}.DPfiltered.vcf" > "!{sampleId}.!{params.build}.DPfiltered.vcf.gz" \ No newline at end of file From e5decda74b7503b334ca95bc0a3b1349c14b66f7 Mon Sep 17 00:00:00 2001 From: Gerbenvandervries Date: Thu, 28 Nov 2024 15:42:22 +0000 Subject: [PATCH 5/9] wip filtering --- nextflow/main.nf | 2 ++ nextflow/modules/FILTER/filter.nf | 2 +- nextflow/modules/FILTER/templates/filter.sh | 2 +- nextflow/nextflow.config | 1 + 4 files changed, 5 insertions(+), 2 deletions(-) diff --git a/nextflow/main.nf b/nextflow/main.nf index a6ce09d..d34e2d2 100755 --- a/nextflow/main.nf +++ b/nextflow/main.nf @@ -15,6 +15,7 @@ log.info """\ include { LIFTOVER } from './modules/LIFTOVER/liftover' include { CONVERT } from './modules/CONVERT/convert' include { SNPCALL } from './modules/SNPCALL/snpcall' +include { FILTER } from './modules/FILTER/filter' include { CONCORDANCE } from './modules/CONCORDANCE/concordance' def split_samples( row ) { @@ -126,6 +127,7 @@ workflow { | subscribe { item -> println "Error, got UNKNOWN fileType: ${item}" } Channel.empty().mix( ch_vcfs_liftovered, ch_vcf_liftover.ready, ch_oa_liftover.ready, ch_snpcall_liftover.ready) + | FILTER | map { sample , file -> [groupKey(sample.processStepId, 2), sample, file ] } | groupTuple( remainder: true ) | map { key, group, files -> validateGroup(key, group, files) } diff --git a/nextflow/modules/FILTER/filter.nf b/nextflow/modules/FILTER/filter.nf index b389a8c..f0f814c 100644 --- a/nextflow/modules/FILTER/filter.nf +++ b/nextflow/modules/FILTER/filter.nf @@ -1,7 +1,7 @@ process FILTER { label 'filter' tag "$pair_id" - module = ['picard/2.26.10-Java-8-LTS','HTSlib/1.16-GCCcore-11.3.0'] + module = ['BCFtools/1.19-GCCcore-11.3.0','SAMtools/1.16.1-GCCcore-11.3.0'] input: tuple val(meta), path(file) diff --git a/nextflow/modules/FILTER/templates/filter.sh b/nextflow/modules/FILTER/templates/filter.sh index 9c63d25..1fda461 100755 --- a/nextflow/modules/FILTER/templates/filter.sh +++ b/nextflow/modules/FILTER/templates/filter.sh @@ -5,5 +5,5 @@ set -eu sampleId="!{meta.dataId}" - bcftools filter -e 'DP < 10' "!{file}" > "!{sampleId}.!{params.build}.DPfiltered.vcf" + bcftools filter -e "INFO/DP < !{params.minimalDP}" "!{file}" > "!{sampleId}.!{params.build}.DPfiltered.vcf" bgzip -c "!{sampleId}.!{params.build}.DPfiltered.vcf" > "!{sampleId}.!{params.build}.DPfiltered.vcf.gz" \ No newline at end of file diff --git a/nextflow/nextflow.config b/nextflow/nextflow.config index 9330ec0..256b57a 100755 --- a/nextflow/nextflow.config +++ b/nextflow/nextflow.config @@ -16,6 +16,7 @@ params { concordanceCheckSnps.b38 = "/apps/data/UMCG/concordanceCheckSnps_GRCh38.bed" intermediateDir = "${launchDir}/results" tmpDir = "${launchDir}/tmp" + minimalDP = 10 } profiles { From de59700cd40de627d103926d91f849b09ca0748e Mon Sep 17 00:00:00 2001 From: Gerbenvandervries Date: Fri, 29 Nov 2024 14:40:35 +0000 Subject: [PATCH 6/9] added filterstep to workflow for non oa vcfs. --- nextflow/main.nf | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/nextflow/main.nf b/nextflow/main.nf index d34e2d2..886e8f6 100755 --- a/nextflow/main.nf +++ b/nextflow/main.nf @@ -13,6 +13,7 @@ log.info """\ .stripIndent() include { LIFTOVER } from './modules/LIFTOVER/liftover' +include { LIFTOVER as LIFTOVER_OA } from './modules/LIFTOVER/liftover' include { CONVERT } from './modules/CONVERT/convert' include { SNPCALL } from './modules/SNPCALL/snpcall' include { FILTER } from './modules/FILTER/filter' @@ -119,15 +120,24 @@ workflow { } | set { ch_vcf_liftover } - Channel.empty().mix( ch_vcf_liftover.take, ch_oa_liftover.take, ch_snpcall_liftover.take ) + //liftover non oa vcfs + Channel.empty().mix( ch_vcf_liftover.take, ch_snpcall_liftover.take ) | LIFTOVER | set { ch_vcfs_liftovered } + //liftover oa vcfs + Channel.empty().mix( ch_oa_liftover.take) + | LIFTOVER_OA + | set { ch_oa_liftovered } + ch_sample.UNKNOWN | subscribe { item -> println "Error, got UNKNOWN fileType: ${item}" } - Channel.empty().mix( ch_vcfs_liftovered, ch_vcf_liftover.ready, ch_oa_liftover.ready, ch_snpcall_liftover.ready) + Channel.empty().mix( ch_vcfs_liftovered, ch_vcf_liftover.ready, ch_snpcall_liftover.ready) | FILTER + | set { ch_vcfs_filtered } + + Channel.empty().mix( ch_vcfs_filtered, ch_oa_liftover.ready, ch_oa_liftovered) | map { sample , file -> [groupKey(sample.processStepId, 2), sample, file ] } | groupTuple( remainder: true ) | map { key, group, files -> validateGroup(key, group, files) } From 5083e09ab5ef598dcfcfa04924b6b9d9718ffa9d Mon Sep 17 00:00:00 2001 From: Gerbenvandervries Date: Wed, 4 Dec 2024 15:33:40 +0000 Subject: [PATCH 7/9] bugfix for search over multiple project dirs if present. --- bin/ParseDarwinSamplesheet.sh | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/bin/ParseDarwinSamplesheet.sh b/bin/ParseDarwinSamplesheet.sh index 646839f..2040553 100755 --- a/bin/ParseDarwinSamplesheet.sh +++ b/bin/ParseDarwinSamplesheet.sh @@ -81,13 +81,13 @@ EOH fetch () { # gets a sampleId, a extention and searchPatch and reruns the filename, full filepath. local _sample="${1}" -local _extention="${2}" -local _searchPath="${3}" +local _extention="${2}" && shift && shift #remove first 2 elements from _searchPath[@] +local _searchPath=("${@}") #receives an array with one or more search paths. local _filePath="" if [[ -e "${_searchPath[0]}" ]] then - mapfile -t _files < <(find "${_searchPath}" -maxdepth 1 -regex "${_searchPath}.*${_sample}.*${_extention}" ) + mapfile -t _files < <(find "${_searchPath[@]}" -maxdepth 1 -regex ".*${_sample}.*${_extention}" ) if [[ "${#_files[@]}" -eq '0' ]] then _filePath="not found" @@ -133,7 +133,7 @@ fetch_data () { then #fetch filename and path, and store in ${_sampleId} ${_filePath}, set _fileType to CRAM - _filePath="$(set -e; fetch "${_sample}" "\(.bam\|.bam.cram\)" "${_searchPath[0]}")" + _filePath="$(set -e; fetch "${_sample}" "\(.bam\|.bam.cram\)" "${_searchPath[@]}")" _sampleId="$(basename "${_filePath}" ".merged.dedup.bam.cram")" _sampleId="$(basename "${_sampleId}" ".merged.dedup.bam")" if [[ "${_filePath}" == *"cram"* ]] @@ -146,7 +146,7 @@ fetch_data () { then _searchPath=("/groups/${NGSGROUP}/prm0"*"/projects/${_projectId}"*"/run01/results/concordanceCheckSnps/") #fetch filename and path, and store in ${_sampleId} ${_filePath}, set _fileType to VCF - _filePath="$(set -e; fetch "${_sample}" ".concordanceCheckCalls.vcf" "${_searchPath[0]}")" + _filePath="$(set -e; fetch "${_sample}" ".concordanceCheckCalls.vcf" "${_searchPath[@]}")" _sampleId="$(basename "${_filePath}" ".concordanceCheckCalls.vcf")" _fileType='VCF' @@ -156,7 +156,7 @@ fetch_data () { # if [[ -e "${_searchPath[0]}" ]] # then # #fetch filename and path, and store in ${_sampleId} ${_filePath}, set _fileType to VCF -# _filePath="$(set -e; fetch "${_sample}" ".concordanceCheckCalls.vcf" "${_searchPath[0]}")" +# _filePath="$(set -e; fetch "${_sample}" ".concordanceCheckCalls.vcf" "${_searchPath[@]}")" # _sampleId="$(basename "${_filePath}" ".concordanceCheckCalls.vcf")" # _fileType='VCF' @@ -166,7 +166,7 @@ fetch_data () { # _searchPath=("/groups/${NGSGROUP}/prm0"*"/projects/${_project}"*"/run01/results/alignment/") #fetch filename and path, and store in ${_sampleId} ${_filePath}, set _fileType to CRAM -# _filePath="$(set -e; fetch "${_sample}" "\(.bam\|.bam.cram\)" "${_searchPath[0]}")" +# _filePath="$(set -e; fetch "${_sample}" "\(.bam\|.bam.cram\)" "${_searchPath[@]}")" # _sampleId="$(basename "${_filePath}" ".merged.dedup.bam.cram")" # _sampleId="$(basename "${_sampleId}" ".merged.dedup.bam")" # if [[ "${_filePath}" == *"cram"* ]] @@ -185,7 +185,7 @@ fetch_data () { if [[ -d "${_searchPath[0]}" ]] then #fetch filename and path, and store in ${_sampleId} ${_filePath}, set _fileType to VCF - _filePath="$(set -e; fetch "${_sample}" ".concordance.vcf.gz" "${_searchPath[0]}")" + _filePath="$(set -e; fetch "${_sample}" ".concordance.vcf.gz" "${_searchPath[@]}")" _sampleId="$(basename "${_filePath}" ".concordance.vcf.gz")" _fileType='VCF' @@ -195,7 +195,7 @@ fetch_data () { _searchPath=("/groups/${NGSGROUP}/prm0"*"/projects/${_projectId}"*"/run01/results/alignment/") #fetch filename and path, and store in ${_sampleId} ${_filePath}, set _fileType to CRAM - _filePath="$(set -e; fetch "${_sample}" ".sorted.merged.bam" "${_searchPath[0]}")" + _filePath="$(set -e; fetch "${_sample}" ".sorted.merged.bam" "${_searchPath[@]}")" _sampleId="$(basename "${_filePath}" ".sorted.merged.bam")" _fileType='BAM' else @@ -207,7 +207,7 @@ fetch_data () { _searchPath=("/groups/${ARRAYGROUP}/dat0"*"/openarray/"*"${_project}"*"/") #fetch filename and path, and store in ${_sampleId} ${_filePath}, set _fileType to OA - _filePath="$(set -e; fetch "${_sample}" ".oarray.txt" "${_searchPath[0]}")" + _filePath="$(set -e; fetch "${_sample}" ".oarray.txt" "${_searchPath[@]}")" _sampleId="$(basename "${_filePath}" ".oarray.txt")" _fileType='OPENARRAY' @@ -216,7 +216,7 @@ fetch_data () { _searchPath=("/groups/${NGSGROUP}/prm0"*"/projects/"*"${_project}"*"/run01/results/intermediates/") #fetch filename and path, and store in ${_sampleId} ${_filePath}, set _fileType to OA - _filePath="$(set -e; fetch "${_sample}" ".cram" "${_searchPath[0]}")" + _filePath="$(set -e; fetch "${_sample}" ".cram" "${_searchPath[@]}")" _sampleId="$(basename "${_filePath}" ".cram")" _fileType='CRAM' else From d7958b70cfbe7150eddc8837c0bbcbfca57b1784 Mon Sep 17 00:00:00 2001 From: Gerbenvandervries Date: Tue, 10 Dec 2024 10:33:37 +0000 Subject: [PATCH 8/9] changed to use correct buildnumber in filename --- nextflow/modules/FILTER/filter.nf | 4 ++-- nextflow/modules/FILTER/templates/filter.sh | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/nextflow/modules/FILTER/filter.nf b/nextflow/modules/FILTER/filter.nf index f0f814c..8d96802 100644 --- a/nextflow/modules/FILTER/filter.nf +++ b/nextflow/modules/FILTER/filter.nf @@ -13,13 +13,13 @@ process FILTER { file="${file}" sampleId="${meta.dataId}" - vcf="${meta.dataId}.${params.build}.DPfiltered.vcf.gz" + vcf="${meta.dataId}.${meta.build}.DPfiltered.vcf.gz" template 'filter.sh' stub: - vcf="${meta.dataId}.${params.build}.DPfiltered.vcf.gz" + vcf="${meta.dataId}.${meta.build}.DPfiltered.vcf.gz" """ touch "${vcf}" diff --git a/nextflow/modules/FILTER/templates/filter.sh b/nextflow/modules/FILTER/templates/filter.sh index 1fda461..3a96104 100755 --- a/nextflow/modules/FILTER/templates/filter.sh +++ b/nextflow/modules/FILTER/templates/filter.sh @@ -5,5 +5,5 @@ set -eu sampleId="!{meta.dataId}" - bcftools filter -e "INFO/DP < !{params.minimalDP}" "!{file}" > "!{sampleId}.!{params.build}.DPfiltered.vcf" - bgzip -c "!{sampleId}.!{params.build}.DPfiltered.vcf" > "!{sampleId}.!{params.build}.DPfiltered.vcf.gz" \ No newline at end of file + bcftools filter -e "INFO/DP < !{params.minimalDP}" "!{file}" > "!{sampleId}.!{meta.build}.DPfiltered.vcf" + bgzip -c "!{sampleId}.!{meta.build}.DPfiltered.vcf" > "!{sampleId}.!{meta.build}.DPfiltered.vcf.gz" \ No newline at end of file From f86a4086d867910e8ab766ca17fa390ee9701ffb Mon Sep 17 00:00:00 2001 From: Gerbenvandervries Date: Fri, 13 Dec 2024 14:40:25 +0000 Subject: [PATCH 9/9] vcf before bam --- bin/ParseDarwinSamplesheet.sh | 46 ++++++++--------------------------- 1 file changed, 10 insertions(+), 36 deletions(-) diff --git a/bin/ParseDarwinSamplesheet.sh b/bin/ParseDarwinSamplesheet.sh index 2040553..0ec6c17 100755 --- a/bin/ParseDarwinSamplesheet.sh +++ b/bin/ParseDarwinSamplesheet.sh @@ -126,11 +126,18 @@ fetch_data () { if [[ "${_prefix}" =~ ^(NGS|NGSR|QXTR|XHTS|MAGR|QXT|HSR|GS)$ ]] && [[ "${_type}" =~ ^(WES|WGS|NGS)$ ]] then - - ### - _searchPath=("/groups/${NGSGROUP}/prm0"*"/projects/${_projectId}"*"/run01/results/alignment/") + _searchPath=("/groups/${NGSGROUP}/prm0"*"/projects/${_projectId}"*"/run01/results/concordanceCheckSnps/") if [[ -e "${_searchPath[0]}" ]] then + #fetch filename and path, and store in ${_sampleId} ${_filePath}, set _fileType to VCF + _filePath="$(set -e; fetch "${_sample}" ".concordanceCheckCalls.vcf" "${_searchPath[@]}")" + _sampleId="$(basename "${_filePath}" ".concordanceCheckCalls.vcf")" + _fileType='VCF' + + elif [[ ! -d "${_searchPath[0]}" ]] + then + log4Bash 'INFO' "${LINENO}" "${FUNCNAME[0]:-main}" '0' "VCF not found, Try fetching CRAM." + _searchPath=("/groups/${NGSGROUP}/prm0"*"/projects/${_projectId}"*"/run01/results/alignment/") #fetch filename and path, and store in ${_sampleId} ${_filePath}, set _fileType to CRAM _filePath="$(set -e; fetch "${_sample}" "\(.bam\|.bam.cram\)" "${_searchPath[@]}")" @@ -142,39 +149,6 @@ fetch_data () { else _fileType='BAM' fi - elif [[ ! -d "${_searchPath[0]}" ]] - then - _searchPath=("/groups/${NGSGROUP}/prm0"*"/projects/${_projectId}"*"/run01/results/concordanceCheckSnps/") - #fetch filename and path, and store in ${_sampleId} ${_filePath}, set _fileType to VCF - _filePath="$(set -e; fetch "${_sample}" ".concordanceCheckCalls.vcf" "${_searchPath[@]}")" - _sampleId="$(basename "${_filePath}" ".concordanceCheckCalls.vcf")" - _fileType='VCF' - - ### later switch vcf before bam. - -# _searchPath=("/groups/${NGSGROUP}/prm0"*"/projects/${_projectId}"*"/run01/results/concordanceCheckSnps/") -# if [[ -e "${_searchPath[0]}" ]] -# then -# #fetch filename and path, and store in ${_sampleId} ${_filePath}, set _fileType to VCF -# _filePath="$(set -e; fetch "${_sample}" ".concordanceCheckCalls.vcf" "${_searchPath[@]}")" -# _sampleId="$(basename "${_filePath}" ".concordanceCheckCalls.vcf")" -# _fileType='VCF' - -# elif [[ ! -d "${_searchPath[0]}" ]] -# then -# log4Bash 'INFO' "${LINENO}" "${FUNCNAME[0]:-main}" '0' "VCF not found, Try fetching CRAM." -# _searchPath=("/groups/${NGSGROUP}/prm0"*"/projects/${_project}"*"/run01/results/alignment/") - - #fetch filename and path, and store in ${_sampleId} ${_filePath}, set _fileType to CRAM -# _filePath="$(set -e; fetch "${_sample}" "\(.bam\|.bam.cram\)" "${_searchPath[@]}")" -# _sampleId="$(basename "${_filePath}" ".merged.dedup.bam.cram")" -# _sampleId="$(basename "${_sampleId}" ".merged.dedup.bam")" -# if [[ "${_filePath}" == *"cram"* ]] -# then -# _fileType='CRAM' -# else -# _fileType='BAM' -# fi else log4Bash 'WARN' "${LINENO}" "${FUNCNAME[0]:-main}" '0' "concordanceCheckSnps VCF not found, CRAM not found." fi