Skip to content

Commit

Permalink
Merge pull request #80 from HippocampusGirl/dev/cli-update
Browse files Browse the repository at this point in the history
Update CLI
  • Loading branch information
HippocampusGirl committed Apr 13, 2021
2 parents 1ae6986 + 3c80a00 commit b791cda
Show file tree
Hide file tree
Showing 4 changed files with 113 additions and 28 deletions.
127 changes: 102 additions & 25 deletions halfpipe/cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,20 +8,26 @@

import logging

from .utils import first
from .io import make_cachefilepath

script_template = """#!/bin/bash
script_templates = dict(
slurm="""#!/bin/bash
#
#
#SBATCH --job-name=halfpipe
#SBATCH --output=halfpipe.log.txt
#
#SBATCH --time=24:00:00
#SBATCH --ntasks=1
#SBATCH --cpus-per-task={n_cpus}
#SBATCH --mem-per-cpu={mem_per_cpu}
#SBATCH --mem-per-cpu={mem_mb}M
#
#SBATCH --array=1-{n_chunks}
if ! [ -x "$(command -v singularity)" ]; then
module load singularity
fi
singularity run \\
--no-home \\
--cleanenv \\
Expand All @@ -32,31 +38,102 @@
--execgraph-file {execgraph_file} \\
--only-chunk-index ${{SLURM_ARRAY_TASK_ID}} \\
--nipype-n-procs 2 \\
--verbose
--verbose {extra_args}
""",
torque="""#!/bin/bash
#
#
#PBS -N halfpipe
#PBS -j oe
#PBS -o halfpipe.log.txt
#$ -cwd
#
#PBS -l nodes=1:ppn=2
#PBS -l walltime=24:00:00
#PBS -l mem={mem_mb}mb
#
#PBS -J 1-{n_chunks}
if ! [ -x "$(command -v singularity)" ]; then
module load singularity
fi
singularity run \\
--no-home \\
--cleanenv \\
--bind /:/ext \\
{singularity_container} \\
--workdir {cwd} \\
--only-run \\
--execgraph-file {execgraph_file} \\
--only-chunk-index ${{PBS_ARRAY_INDEX}} \\
--nipype-n-procs 2 \\
--verbose {extra_args}
"""
""",
sge="""#!/bin/bash
#
#
#$ -N halfpipe
#$ -j y
#$ -o halfpipe.log.txt
#$ -cwd
#
#$ -pe smp 2
#$ -l h_rt=24:0:0
#$ -l mem={mem_mb}M
#
#$ -t 1-{n_chunks}
if ! [ -x "$(command -v singularity)" ]; then
module load singularity
fi
def create_example_script(workdir, execgraphs):
uuid = first(execgraphs).uuid
singularity run \\
--no-home \\
--cleanenv \\
--bind /:/ext \\
{singularity_container} \\
--workdir {cwd} \\
--only-run \\
--execgraph-file {execgraph_file} \\
--only-chunk-index ${{SGE_TASK_ID}} \\
--nipype-n-procs 2 \\
--verbose {extra_args}
""",
)


def create_example_script(workdir, execgraphs, opts):
uuid = execgraphs[0].uuid
n_chunks = len(execgraphs) - 1 # omit model chunk
assert n_chunks > 1
execgraph_file = make_cachefilepath(f"execgraph.{n_chunks:d}_chunks", uuid)

n_cpus = 2
mem_gb = max(node.mem_gb for execgraph in execgraphs for node in execgraph.nodes)
mem_per_cpu = f"{ceil(mem_gb / n_cpus * 1536):d}M" # fudge factor

data = {
"n_chunks": n_chunks, # one-based indexing
"singularity_container": os.environ["SINGULARITY_CONTAINER"],
"cwd": str(Path(workdir).resolve()),
"execgraph_file": str(Path(workdir).resolve() / execgraph_file),
"n_cpus": n_cpus,
"mem_per_cpu": mem_per_cpu,
}
st = script_template.format(**data)
stpath = "submit.slurm.sh"
logging.getLogger("halfpipe").log(25, f'A submission script template was created at "{stpath}"')
with open(Path(workdir) / stpath, "w") as f:
f.write(st)
nipype_max_mem_gb = max(node.mem_gb for execgraph in execgraphs for node in execgraph.nodes)
mem_mb = f"{ceil(nipype_max_mem_gb / n_cpus * 1536):d}" # fudge factor

extra_args = f"--keep {opts.keep}"
if opts.fs_license_file is not None:
extra_args += f" --fs-license-file {opts.fs_license_file}"

data = dict(
n_chunks=n_chunks, # one-based indexing
singularity_container=os.environ["SINGULARITY_CONTAINER"],
cwd=str(Path(workdir).resolve()),
execgraph_file=str(Path(workdir).resolve() / execgraph_file),
n_cpus=n_cpus,
mem_mb=mem_mb,
extra_args=extra_args,
)

for cluster_type, script_template in script_templates.items():
st = script_template.format(**data)
stpath = f"submit.{cluster_type}.sh"
logging.getLogger("halfpipe").log(25, f'A submission script template was created at "{stpath}"')
with open(Path(workdir) / stpath, "w") as f:
f.write(st)
1 change: 1 addition & 0 deletions halfpipe/logging/filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ def filter(self, record):
"Support for setting the 'mathtext.fallback_to_cm' rcParam is deprecated since 3.3 and will be removed two minor releases later; use 'mathtext.fallback : 'cm' instead.",
"VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated.",
"FutureWarning: Index.ravel returning ndarray is deprecated; in a future version this will return a view on self.",
"FutureWarning: Fetchers from the nilearn.datasets module will be updated in version 0.9 to return python strings instead of bytes and Pandas dataframes instead of Numpy arrays",
]


Expand Down
4 changes: 2 additions & 2 deletions halfpipe/model/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,8 @@ class MixedEffectsModelSchema(BaseModelSchema):

algorithms = fields.List(
fields.Str(validate=validate.OneOf(algorithms.keys())),
default=["flame1"],
missing=["flame1"],
default=["flame1", "mcartest", "heterogeneity"],
missing=["flame1", "mcartest", "heterogeneity"],
)


Expand Down
9 changes: 8 additions & 1 deletion halfpipe/workflow/feature/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,14 @@ def create(self, sourcefile, feature, raw_sources=[]):
if feature.type == "task_based":
confounds_action = "select"

condition_files = sorted(list(database.associations(sourcefile, datatype="func", suffix="events")))
condition_files = sorted(set(
database.associations(
sourcefile,
task=database.tagval(sourcefile, "task"), # enforce same task
datatype="func",
suffix="events",
)
))
raw_sources = [*raw_sources, *condition_files]
if ".txt" in database.tagvalset("extension", filepaths=condition_files):
condition_files = [
Expand Down

0 comments on commit b791cda

Please sign in to comment.