Skip to content

Commit

Permalink
Merge pull request #234 from ggabernet/dsl2
Browse files Browse the repository at this point in the history
Conversion to DSL2
  • Loading branch information
ggabernet authored Jul 15, 2020
2 parents 7fc9f9a + ca5e36d commit 9d1087b
Show file tree
Hide file tree
Showing 22 changed files with 3,872 additions and 3,904 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ jobs:
strategy:
matrix:
# Nextflow versions: check pipeline minimum and current latest
nxf_ver: ['19.10.0', '']
nxf_ver: ['20.04.1', '']
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
Expand Down Expand Up @@ -42,7 +42,7 @@ jobs:
sudo mv nextflow /usr/local/bin/
env:
# Only check Nextflow pipeline minimum version
NXF_VER: '19.10.0'
NXF_VER: '20.04.1'
- name: Pull docker image
run: |
docker pull nfcore/sarek:dev
Expand Down
1 change: 1 addition & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
123,125
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,13 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) a

## [dev](https://github.com/nf-core/sarek/tree/dev)

- [#234](https://github.com/nf-core/sarek/pull/234) -Switching to DSL2

### Added

### Changed

- [#234](https://github.com/nf-core/sarek/pull/234) - Update Nextflow `19.10.0` -> `20.04.1`
- [#233](https://github.com/nf-core/sarek/pull/233) - Switch `bwa 0.7.17` for `bwa-mem2 2.0`

### Fixed
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

> **An open-source analysis pipeline to detect germline or somatic variants from whole genome or targeted sequencing**
[![Nextflow](https://img.shields.io/badge/nextflow-%E2%89%A519.10.0-brightgreen.svg)](https://www.nextflow.io/)
[![Nextflow](https://img.shields.io/badge/nextflow-%E2%89%A520.04.1-brightgreen.svg)](https://www.nextflow.io/)
[![nf-core](https://img.shields.io/badge/nf--core-pipeline-brightgreen.svg)](https://nf-co.re/)
[![DOI](https://zenodo.org/badge/184289291.svg)](https://zenodo.org/badge/latestdoi/184289291)

Expand Down
21 changes: 12 additions & 9 deletions conf/test.config
Original file line number Diff line number Diff line change
Expand Up @@ -28,13 +28,16 @@ params {
vep_cache_version = '99'
}

process {
withName:Snpeff {
container = 'nfcore/sareksnpeff:dev.WBcel235'
maxForks = 1
}
withLabel:VEP {
container = 'nfcore/sarekvep:dev.WBcel235'
maxForks = 1
}
/*
* TODO: uncomment when ready
process {
withName:Snpeff {
container = 'nfcore/sareksnpeff:dev.WBcel235'
maxForks = 1
}
withLabel:VEP {
container = 'nfcore/sarekvep:dev.WBcel235'
maxForks = 1
}
}
*/
36 changes: 36 additions & 0 deletions lib/Checks.groovy
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
/*
* This file holds several functions used to perform standard checks for the nf-core pipeline template.
*/

class Checks {

static void aws_batch(workflow, params) {
if (workflow.profile.contains('awsbatch')) {
assert !params.awsqueue || !params.awsregion : "Specify correct --awsqueue and --awsregion parameters on AWSBatch!"
// Check outdir paths to be S3 buckets if running on AWSBatch
// related: https://github.com/nextflow-io/nextflow/issues/813
assert !params.outdir.startsWith('s3:') : "Outdir not on S3 - specify S3 Bucket to run on AWSBatch!"
// Prevent trace files to be stored on S3 since S3 does not support rolling files.
assert params.tracedir.startsWith('s3:') : "Specify a local tracedir or run without trace! S3 cannot be used for tracefiles."
}
}

static void hostname(workflow, params, log) {
Map colors = Headers.log_colours(params.monochrome_logs)
if (params.hostnames) {
def hostname = "hostname".execute().text.trim()
params.hostnames.each { prof, hnames ->
hnames.each { hname ->
if (hostname.contains(hname) && !workflow.profile.contains(prof)) {
log.info "=${colors.yellow}====================================================${colors.reset}=\n" +
"${colors.yellow}WARN: You are running with `-profile $workflow.profile`\n" +
" but your machine hostname is ${colors.white}'$hostname'${colors.reset}.\n" +
" ${colors.yellow_bold}Please use `-profile $prof${colors.reset}`\n" +
"=${colors.yellow}====================================================${colors.reset}="
}
}
}
}
}
}

119 changes: 119 additions & 0 deletions lib/Completion.groovy
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
/*
* Functions to be run on completion of pipeline
*/

class Completion {
static void email(workflow, params, summary, run_name, baseDir, multiqc_report, log) {

// Set up the e-mail variables
def subject = "[$workflow.manifest.name] Successful: $workflow.runName"
if (!workflow.success) {
subject = "[$workflow.manifest.name] FAILED: $workflow.runName"
}

def email_fields = [:]
email_fields['version'] = workflow.manifest.version
email_fields['runName'] = run_name ?: workflow.runName
email_fields['success'] = workflow.success
email_fields['dateComplete'] = workflow.complete
email_fields['duration'] = workflow.duration
email_fields['exitStatus'] = workflow.exitStatus
email_fields['errorMessage'] = (workflow.errorMessage ?: 'None')
email_fields['errorReport'] = (workflow.errorReport ?: 'None')
email_fields['commandLine'] = workflow.commandLine
email_fields['projectDir'] = workflow.projectDir
email_fields['summary'] = summary
email_fields['summary']['Date Started'] = workflow.start
email_fields['summary']['Date Completed'] = workflow.complete
email_fields['summary']['Pipeline script file path'] = workflow.scriptFile
email_fields['summary']['Pipeline script hash ID'] = workflow.scriptId
if (workflow.repository) email_fields['summary']['Pipeline repository Git URL'] = workflow.repository
if (workflow.commitId) email_fields['summary']['Pipeline repository Git Commit'] = workflow.commitId
if (workflow.revision) email_fields['summary']['Pipeline Git branch/tag'] = workflow.revision
email_fields['summary']['Nextflow Version'] = workflow.nextflow.version
email_fields['summary']['Nextflow Build'] = workflow.nextflow.build
email_fields['summary']['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp

// TODO nf-core: If not using MultiQC, strip out this code (including params.max_multiqc_email_size)
// On success try attach the multiqc report
def mqc_report = null
try {
if (workflow.success) {
mqc_report = multiqc_report.getVal()
if (mqc_report.getClass() == ArrayList) {
log.warn "[$workflow.manifest.name] Found multiple reports from process 'MULTIQC', will use only one"
mqc_report = mqc_report[0]
}
}
} catch (all) {
log.warn "[$workflow.manifest.name] Could not attach MultiQC report to summary email"
}

// Check if we are only sending emails on failure
def email_address = params.email
if (!params.email && params.email_on_fail && !workflow.success) {
email_address = params.email_on_fail
}

// Render the TXT template
def engine = new groovy.text.GStringTemplateEngine()
def tf = new File("$baseDir/assets/email_template.txt")
def txt_template = engine.createTemplate(tf).make(email_fields)
def email_txt = txt_template.toString()

// Render the HTML template
def hf = new File("$baseDir/assets/email_template.html")
def html_template = engine.createTemplate(hf).make(email_fields)
def email_html = html_template.toString()

// Render the sendmail template
def smail_fields = [ email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, baseDir: "$baseDir", mqcFile: mqc_report, mqcMaxSize: params.max_multiqc_email_size.toBytes() ]
def sf = new File("$baseDir/assets/sendmail_template.txt")
def sendmail_template = engine.createTemplate(sf).make(smail_fields)
def sendmail_html = sendmail_template.toString()

// Send the HTML e-mail
if (email_address) {
try {
if (params.plaintext_email) { throw GroovyException('Send plaintext e-mail, not HTML') }
// Try to send HTML e-mail using sendmail
[ 'sendmail', '-t' ].execute() << sendmail_html
log.info "[$workflow.manifest.name] Sent summary e-mail to $email_address (sendmail)"
} catch (all) {
// Catch failures and try with plaintext
def mail_cmd = [ 'mail', '-s', subject, '--content-type=text/html', email_address ]
if ( mqc_report.size() <= params.max_multiqc_email_size.toBytes() ) {
mail_cmd += [ '-A', mqc_report ]
}
mail_cmd.execute() << email_html
log.info "[$workflow.manifest.name] Sent summary e-mail to $email_address (mail)"
}
}

// Write summary e-mail HTML to a file
def output_d = new File("${params.outdir}/pipeline_info/")
if (!output_d.exists()) {
output_d.mkdirs()
}
def output_hf = new File(output_d, "pipeline_report.html")
output_hf.withWriter { w -> w << email_html }
def output_tf = new File(output_d, "pipeline_report.txt")
output_tf.withWriter { w -> w << email_txt }
}

static void summary(workflow, params, log) {
Map colors = Headers.log_colours(params.monochrome_logs)
if (workflow.stats.ignoredCount > 0 && workflow.success) {
log.info "-${colors.purple}Warning, pipeline completed, but with errored process(es) ${colors.reset}-"
log.info "-${colors.red}Number of ignored errored process(es) : ${workflow.stats.ignoredCount} ${colors.reset}-"
log.info "-${colors.green}Number of successfully ran process(es) : ${workflow.stats.succeedCount} ${colors.reset}-"
}
if (workflow.success) {
log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Pipeline completed successfully${colors.reset}-"
} else {
Checks.hostname()
log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed with errors${colors.reset}-"
}
}
}

39 changes: 39 additions & 0 deletions lib/Headers.groovy
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
/*
* This file holds several functions used to render the nf-core ANSI header.
*/

class Headers {

private static Map log_colours(Boolean monochrome_logs) {
Map colorcodes = [:]
colorcodes['reset'] = monochrome_logs ? '' : "\033[0m"
colorcodes['dim'] = monochrome_logs ? '' : "\033[2m"
colorcodes['black'] = monochrome_logs ? '' : "\033[0;30m"
colorcodes['green'] = monochrome_logs ? '' : "\033[0;32m"
colorcodes['yellow'] = monochrome_logs ? '' : "\033[0;33m"
colorcodes['yellow_bold'] = monochrome_logs ? '' : "\033[1;93m"
colorcodes['blue'] = monochrome_logs ? '' : "\033[0;34m"
colorcodes['purple'] = monochrome_logs ? '' : "\033[0;35m"
colorcodes['cyan'] = monochrome_logs ? '' : "\033[0;36m"
colorcodes['white'] = monochrome_logs ? '' : "\033[0;37m"
colorcodes['red'] = monochrome_logs ? '' : "\033[1;91m"
return colorcodes
}

static String nf_core(workflow, monochrome_logs) {
Map colors = log_colours(monochrome_logs)
String.format(
"""\n
-${colors.dim}----------------------------------------------------${colors.reset}-
${colors.green},--.${colors.black}/${colors.green},-.${colors.reset}
${colors.blue} ___ __ __ __ ___ ${colors.green}/,-._.--~\'${colors.reset}
${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset}
${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset}
${colors.green}`._,._,\'${colors.reset}
${colors.purple} ${workflow.manifest.name} v${workflow.manifest.version}${colors.reset}
-${colors.dim}----------------------------------------------------${colors.reset}-
""".stripIndent()
)
}
}

128 changes: 128 additions & 0 deletions lib/Schema.groovy
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
/*
* This file holds several functions used to perform JSON parameter validation, help and summary rendering for the nf-core pipeline template.
*/

import groovy.json.JsonSlurper

class JSON {
/*
* This method tries to read a JSON params file
*/
private static LinkedHashMap params_get(String path) {
def usage = new LinkedHashMap()
try {
usage = params_try(path)
} catch (Exception e) {
println "Could not read parameters settings from JSON. $e"
usage = new LinkedHashMap()
}
return usage
}

/*
Method to actually read in JSON file using Groovy.
Group (as Key), values are all parameters
- Parameter1 as Key, Description as Value
- Parameter2 as Key, Description as Value
....
Group
-
*/
private static LinkedHashMap params_try(String path) throws Exception {

def json = new File(path).text
def Map usage = (Map) new JsonSlurper().parseText(json).get('properties')

/* Tree looks like this in nf-core schema
* properties <- this is what the first get('properties') gets us
group 1
properties
description
group 2
properties
description
group 3
properties
description
*/
def output_map = new LinkedHashMap()

// Lets go deeper
usage.each { key, val ->
def Map submap = usage."$key".properties // Gets the property object of the group
def sub_params = new LinkedHashMap()
submap.each { innerkey, value ->
sub_params.put("$innerkey", "$value.description")
}
output_map.put("$key", sub_params)
}
return output_map
}

static String params_help(path, command) {
String output = "Typical pipeline command:\n\n"
output += " ${command}\n\n"
output += params_beautify(params_get(path))
}

static String params_beautify(usage) {
String output = ""
for (group in usage.keySet()) {
output += group + "\n"
def params = usage.get(group) // This gets the parameters of that particular group
for (par in params.keySet()) {
output+= " \u001B[1m" + par.padRight(27) + "\u001B[1m" + params.get(par) + "\n"
}
output += "\n"
}
return output
}

private static LinkedHashMap params_summary(workflow, params, run_name) {
def Map summary = [:]
if (workflow.revision) summary['Pipeline Release'] = workflow.revision
summary['Run Name'] = run_name ?: workflow.runName
// TODO nf-core: Report custom parameters here
summary['Input'] = params.input
summary['Fasta File'] = params.fasta
summary['Max Resources'] = "$params.max_memory memory, $params.max_cpus cpus, $params.max_time time per job"
if (workflow.containerEngine) summary['Container'] = "$workflow.containerEngine - $workflow.container"
summary['Output dir'] = params.outdir
summary['Launch dir'] = workflow.launchDir
summary['Working dir'] = workflow.workDir
summary['Script dir'] = workflow.projectDir
summary['User'] = workflow.userName
if (workflow.profile.contains('awsbatch')) {
summary['AWS Region'] = params.awsregion
summary['AWS Queue'] = params.awsqueue
summary['AWS CLI'] = params.awscli
}
summary['Config Profile'] = workflow.profile
if (params.config_profile_description) summary['Config Profile Descr'] = params.config_profile_description
if (params.config_profile_contact) summary['Config Profile Contact'] = params.config_profile_contact
if (params.config_profile_url) summary['Config Profile URL'] = params.config_profile_url
summary['Config Files'] = workflow.configFiles.join(', ')
if (params.email || params.email_on_fail) {
summary['E-mail Address'] = params.email
summary['E-mail on failure'] = params.email_on_fail
summary['MultiQC maxsize'] = params.max_multiqc_email_size
}
return summary
}

static String params_mqc_summary(summary) {
String yaml_file_text = """
id: 'nf-core-tcrseq-summary'
description: " - this information is collected when the pipeline is started."
section_name: 'nf-core/tcrseq Workflow Summary'
section_href: 'https://github.com/nf-core/tcrseq'
plot_type: 'html'
data: |
<dl class=\"dl-horizontal\">
${summary.collect { k,v -> " <dt>$k</dt><dd><samp>${v ?: '<span style=\"color:#999999;\">N/A</a>'}</samp></dd>" }.join("\n")}
</dl>
""".stripIndent()

return yaml_file_text
}
}
Loading

0 comments on commit 9d1087b

Please sign in to comment.