-
Notifications
You must be signed in to change notification settings - Fork 0
/
Makefile
146 lines (104 loc) · 4.95 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
.PHONY : clean docs doctests tests pypolychord inference_test clean-results
JUPYTER_CMD = MKL_NUM_THREADS=1 NUMEXPR_NUM_THREADS=1 OMP_NUM_THREADS=1 jupyter-nbconvert --execute --ExecuteProcessor.timeout=-1 --to=html
build : flake8 tests docs
flake8 :
flake8
tests :
pytest tests -v --cov=shedding --cov-report=html --cov-report=term-missing
doctests :
sphinx-build -b doctest . docs/_build
docs : doctests
sphinx-build . docs/_build
clean :
rm -rf docs/_build PolyChordLite
rm -f shedding/*.so shedding/*.c
clean-results:
rm -rf workspace figures results.html
# Generate pinned dependencies
requirements.txt : requirements.in setup.py
pip-compile -v --upgrade
sync : requirements.txt
pip-sync
$(MAKE) pypolychord
# Build the repository using a GitHub action for local debugging
# (cf. https://github.com/nektos/act)
gh-action :
act -P ubuntu-latest=nektos/act-environments-ubuntu:18.04
POLYCHORDLITE_COMMIT = daba49d
PolyChordLite :
curl -L -o polychordlite-source.zip https://github.com/PolyChord/PolyChordLite/archive/${POLYCHORDLITE_COMMIT}.zip
unzip polychordlite-source.zip
mv PolyChordLite-${POLYCHORDLITE_COMMIT}* PolyChordLite
pypolychord : PolyChordLite
cd PolyChordLite \
&& make MPI=0 libchord.so \
&& python setup.py --no-mpi install
# Function to get parts of a string split by a dash
wordd = $(word $2,$(subst -, ,$1))
# Code to generate samples
PARAMETRISATIONS = general
INFLATED = standard inflated
INFLATED_inflated = --inflated
TEMPORAL = constant temporal
TEMPORAL_temporal = --temporal=exponential
SEEDS ?= 0 1 2
# See the polychord publication for reference
NLIVE ?= 25
NREPEAT ?= 5
TARGET_DIRS = $(addprefix workspace/,\
$(foreach s,${SEEDS}, \
$(foreach p,${PARAMETRISATIONS},\
$(foreach i,${INFLATED}, \
$(foreach t,${TEMPORAL}, \
$p-$i-$t-$s)))))
# Evidences using polychord
EVIDENCE_TARGETS = $(addsuffix /polychord/result.pkl,${TARGET_DIRS})
evidences: ${EVIDENCE_TARGETS}
$(EVIDENCE_TARGETS) : workspace/%/polychord/result.pkl :
ARGS="--evidence --seed=$(call wordd,$*,4) ${TEMPORAL_$(call wordd,$*,3)} ${INFLATED_$(call wordd,$*,2)} -f --nlive-factor=${NLIVE} --nrepeat-factor=${NREPEAT} $(call wordd,$*,1) workspace/$*/polychord" \
${JUPYTER_CMD} --output-dir=workspace/$* polychord-sampling.ipynb
rm -rf workspace/$*/polychord/clusters
# Additional samples for the constant parameters including Wang's data
EXTRA_TARGET_DIRS = $(addprefix workspace/,\
$(foreach s,${SEEDS}, \
$(foreach p,${PARAMETRISATIONS},\
$(foreach i,${INFLATED}, \
$p-$i-constant-$s))))
EXTRA_SAMPLE_TARGETS = $(addsuffix -extra/polychord/result.pkl,${EXTRA_TARGET_DIRS})
extra_samples : ${EXTRA_SAMPLE_TARGETS}
$(EXTRA_SAMPLE_TARGETS) : workspace/%/polychord/result.pkl :
ARGS="--seed=$(call wordd,$*,4) ${INFLATED_$(call wordd,$*,2)} -f --nlive-factor=${NLIVE} --nrepeat-factor=${NREPEAT} $(call wordd,$*,1) workspace/$*/polychord" \
${JUPYTER_CMD} --output-dir=workspace/$* polychord-sampling.ipynb
rm -rf workspace/$*/polychord/clusters
all : evidences extra_samples
# Targets for assessing sensitvity to errors in days past symptom onset (don't need as many points
# because we're not trying to evaluate evidences)
SENSITIVITY_TARGET_DIRS = $(foreach s,${SEEDS},$(foreach d,1 2 3,workspace/sensitivity-$d-$s))
SENSITIVITY_TARGETS = $(addsuffix /result.pkl,${SENSITIVITY_TARGET_DIRS})
sensitivity : ${SENSITIVITY_TARGETS}
${SENSITIVITY_TARGETS} : workspace/sensitivity-%/result.pkl :
ARGS="-f --nlive-factor=${NLIVE} --nrepeat-factor=${NREPEAT} --day-noise=$(call wordd,$*,1) --seed=$(call wordd,$*,2) --temporal exponential general workspace/sensitivity-$*" \
${JUPYTER_CMD} --output-dir=workspace/sensitivity-$* polychord-sampling.ipynb
rm -rf workspace/sensitivity-$*/polychord/clusters
# Targets for investigating different shedding profiles
PROFILE_TARGET_DIRS = $(foreach s,${SEEDS},$(foreach t,teunis gamma,workspace/profile-$t-$s))
PROFILE_TARGETS = $(addsuffix /result.pkl,${PROFILE_TARGET_DIRS})
profiles : ${PROFILE_TARGETS}
${PROFILE_TARGETS} : workspace/profile-%/result.pkl :
ARGS="-f --nlive-factor=${NLIVE} --nrepeat-factor=${NREPEAT} --temporal=$(call wordd,$*,1) --seed=$(call wordd,$*,2) general workspace/profile-$*" \
${JUPYTER_CMD} --output-dir=workspace/profile-$* polychord-sampling.ipynb
rm -rf workspace/profile-$*/polychord/clusters
inference_test : polychord-sampling.ipynb pypolychord
mkdir -p $@
ARGS="-f --nlive-factor=0.1 --nrepeat-factor=0.1 --temporal=exponential --seed=0 general $@" \
jupyter-nbconvert --execute --ExecuteProcessor.timeout=-1 --output-dir $@ --to=html $<
FIGURES = model decay positivity-replicates prediction profiles replication shape-scale
workspace/results.html $(addprefix workspace/figures/,${FIGURES:=.pdf}) : results.ipynb evidences extra_samples sensitivity profiles
mkdir -p workspace/figures
${JUPYTER_CMD} $< --output-dir=workspace
PLATFORM =
image :
docker build ${PLATFORM} -t shedding .
container :
mkdir -p workspace
docker run --rm -it ${PLATFORM} -v `pwd`/workspace:/workspace shedding bash