|
a |
|
b/justfile |
|
|
1 |
set dotenv-load := true |
|
|
2 |
set positional-arguments |
|
|
3 |
|
|
|
4 |
|
|
|
5 |
export VIRTUAL_ENV := env_var_or_default("VIRTUAL_ENV", ".venv") |
|
|
6 |
|
|
|
7 |
export BIN := VIRTUAL_ENV + if os_family() == "unix" { "/bin" } else { "/Scripts" } |
|
|
8 |
export PIP := BIN + if os_family() == "unix" { "/python -m pip" } else { "/python.exe -m pip" } |
|
|
9 |
|
|
|
10 |
|
|
|
11 |
alias help := list |
|
|
12 |
|
|
|
13 |
# List available commands |
|
|
14 |
list: |
|
|
15 |
@just --list --unsorted |
|
|
16 |
|
|
|
17 |
|
|
|
18 |
# Ensure valid virtualenv |
|
|
19 |
virtualenv: |
|
|
20 |
#!/usr/bin/env bash |
|
|
21 |
set -euo pipefail |
|
|
22 |
|
|
|
23 |
# allow users to specify python version in .env |
|
|
24 |
PYTHON_VERSION=${PYTHON_VERSION:-python3.11} |
|
|
25 |
|
|
|
26 |
# create venv and upgrade pip |
|
|
27 |
if [[ ! -d $VIRTUAL_ENV ]]; then |
|
|
28 |
$PYTHON_VERSION -m venv $VIRTUAL_ENV |
|
|
29 |
$PIP install --upgrade pip |
|
|
30 |
fi |
|
|
31 |
|
|
|
32 |
|
|
|
33 |
# Run pip-compile with our standard settings |
|
|
34 |
pip-compile *ARGS: devenv |
|
|
35 |
#!/usr/bin/env bash |
|
|
36 |
set -euo pipefail |
|
|
37 |
|
|
|
38 |
$BIN/pip-compile --allow-unsafe --generate-hashes --strip-extras "$@" |
|
|
39 |
|
|
|
40 |
|
|
|
41 |
update-dependencies: devenv |
|
|
42 |
just pip-compile -U requirements.prod.in |
|
|
43 |
just pip-compile -U requirements.dev.in |
|
|
44 |
|
|
|
45 |
# Ensure dev and prod requirements installed and up to date |
|
|
46 |
devenv: virtualenv |
|
|
47 |
#!/usr/bin/env bash |
|
|
48 |
set -euo pipefail |
|
|
49 |
|
|
|
50 |
for req_file in requirements.dev.txt requirements.prod.txt pyproject.minimal.toml; do |
|
|
51 |
# If we've installed this file before and the original hasn't been |
|
|
52 |
# modified since then bail early |
|
|
53 |
record_file="$VIRTUAL_ENV/$req_file" |
|
|
54 |
if [[ -e "$record_file" && "$record_file" -nt "$req_file" ]]; then |
|
|
55 |
continue |
|
|
56 |
fi |
|
|
57 |
|
|
|
58 |
if cmp --silent "$req_file" "$record_file"; then |
|
|
59 |
# If the timestamp has been changed but not the contents (as can happen |
|
|
60 |
# when switching branches) then just update the timestamp |
|
|
61 |
touch "$record_file" |
|
|
62 |
else |
|
|
63 |
# Otherwise actually install the requirements |
|
|
64 |
|
|
|
65 |
if [[ "$req_file" == *.txt ]]; then |
|
|
66 |
# --no-deps is recommended when using hashes, and also works around a |
|
|
67 |
# bug with constraints and hashes. See: |
|
|
68 |
# https://pip.pypa.io/en/stable/topics/secure-installs/#do-not-use-setuptools-directly |
|
|
69 |
$PIP install --no-deps --requirement "$req_file" |
|
|
70 |
elif [[ "$req_file" == *.toml ]]; then |
|
|
71 |
$PIP install --no-deps --editable "$(dirname "$req_file")" |
|
|
72 |
else |
|
|
73 |
echo "Unhandled file: $req_file" |
|
|
74 |
exit 1 |
|
|
75 |
fi |
|
|
76 |
|
|
|
77 |
# Make a record of what we just installed |
|
|
78 |
cp "$req_file" "$record_file" |
|
|
79 |
fi |
|
|
80 |
done |
|
|
81 |
|
|
|
82 |
if [[ ! -f .git/hooks/pre-commit ]]; then |
|
|
83 |
$BIN/pre-commit install |
|
|
84 |
fi |
|
|
85 |
|
|
|
86 |
|
|
|
87 |
# Lint and check formatting but don't modify anything |
|
|
88 |
check: devenv |
|
|
89 |
#!/usr/bin/env bash |
|
|
90 |
|
|
|
91 |
failed=0 |
|
|
92 |
|
|
|
93 |
check() { |
|
|
94 |
# Display the command we're going to run, in bold and with the "$BIN/" |
|
|
95 |
# prefix removed if present |
|
|
96 |
echo -e "\e[1m=> ${1#"$BIN/"}\e[0m" |
|
|
97 |
# Run it |
|
|
98 |
eval $1 |
|
|
99 |
# Increment the counter on failure |
|
|
100 |
if [[ $? != 0 ]]; then |
|
|
101 |
failed=$((failed + 1)) |
|
|
102 |
# Add spacing to separate the error output from the next check |
|
|
103 |
echo -e "\n" |
|
|
104 |
fi |
|
|
105 |
} |
|
|
106 |
|
|
|
107 |
check "$BIN/ruff format --diff --quiet ." |
|
|
108 |
check "$BIN/ruff check --output-format=full ." |
|
|
109 |
check "docker run --rm -i ghcr.io/hadolint/hadolint:v2.12.0-alpine < Dockerfile" |
|
|
110 |
|
|
|
111 |
if [[ $failed > 0 ]]; then |
|
|
112 |
echo -en "\e[1;31m" |
|
|
113 |
echo " $failed checks failed" |
|
|
114 |
echo -e "\e[0m" |
|
|
115 |
exit 1 |
|
|
116 |
fi |
|
|
117 |
|
|
|
118 |
|
|
|
119 |
# Fix any automatically fixable linting or formatting errors |
|
|
120 |
fix: devenv |
|
|
121 |
$BIN/ruff format . |
|
|
122 |
$BIN/ruff check --fix . |
|
|
123 |
|
|
|
124 |
|
|
|
125 |
# Build the ehrQL docker image |
|
|
126 |
build-ehrql image_name="ehrql-dev" *args="": |
|
|
127 |
#!/usr/bin/env bash |
|
|
128 |
set -euo pipefail |
|
|
129 |
|
|
|
130 |
export BUILD_DATE=$(date -u +'%y-%m-%dT%H:%M:%SZ') |
|
|
131 |
export GITREF=$(git rev-parse --short HEAD) |
|
|
132 |
|
|
|
133 |
[[ -v CI ]] && echo "::group::Build ehrql Docker image (click to view)" || echo "Build ehrql Docker image" |
|
|
134 |
DOCKER_BUILDKIT=1 docker build . --build-arg BUILD_DATE="$BUILD_DATE" --build-arg GITREF="$GITREF" --tag {{ image_name }} {{ args }} |
|
|
135 |
[[ -v CI ]] && echo "::endgroup::" || echo "" |
|
|
136 |
|
|
|
137 |
|
|
|
138 |
# Build a docker image tagged `ehrql:dev` that can be used in `project.yaml` for local testing |
|
|
139 |
build-ehrql-for-os-cli: build-ehrql |
|
|
140 |
docker tag ehrql-dev ghcr.io/opensafely-core/ehrql:dev |
|
|
141 |
|
|
|
142 |
|
|
|
143 |
# Tear down the persistent docker containers we create to run tests again |
|
|
144 |
remove-database-containers: |
|
|
145 |
docker rm --force ehrql-mssql ehrql-trino |
|
|
146 |
|
|
|
147 |
|
|
|
148 |
# Create an MSSQL docker container with the TPP database schema and print connection strings |
|
|
149 |
create-tpp-test-db: devenv |
|
|
150 |
$BIN/python -m pytest -o python_functions=create tests/lib/create_tpp_test_db.py |
|
|
151 |
|
|
|
152 |
|
|
|
153 |
# Open an interactive SQL Server shell running against MSSQL |
|
|
154 |
connect-to-mssql: |
|
|
155 |
# Only pass '-t' argument to Docker if stdin is a TTY so you can pipe a SQL |
|
|
156 |
# file to this commmand as well as using it interactively. |
|
|
157 |
docker exec -i `[ -t 0 ] && echo '-t'` \ |
|
|
158 |
ehrql-mssql \ |
|
|
159 |
/opt/mssql-tools18/bin/sqlcmd -C -S localhost -U sa -P 'Your_password123!' -d test |
|
|
160 |
|
|
|
161 |
|
|
|
162 |
# Open an interactive trino shell |
|
|
163 |
connect-to-trino: |
|
|
164 |
docker exec -it ehrql-trino trino --catalog trino --schema default |
|
|
165 |
|
|
|
166 |
|
|
|
167 |
################################################################### |
|
|
168 |
# Testing targets |
|
|
169 |
################################################################### |
|
|
170 |
|
|
|
171 |
# Run all or some pytest tests |
|
|
172 |
test *ARGS: devenv |
|
|
173 |
$BIN/python -m pytest "$@" |
|
|
174 |
|
|
|
175 |
|
|
|
176 |
# Run the acceptance tests only |
|
|
177 |
test-acceptance *ARGS: devenv |
|
|
178 |
$BIN/python -m pytest tests/acceptance "$@" |
|
|
179 |
|
|
|
180 |
|
|
|
181 |
# Run the backend validation tests only |
|
|
182 |
test-backend-validation *ARGS: devenv |
|
|
183 |
$BIN/python -m pytest tests/backend_validation "$@" |
|
|
184 |
|
|
|
185 |
|
|
|
186 |
# Run the ehrql-in-docker tests only |
|
|
187 |
test-docker *ARGS: devenv |
|
|
188 |
$BIN/python -m pytest tests/docker "$@" |
|
|
189 |
|
|
|
190 |
|
|
|
191 |
# Run the docs examples tests only |
|
|
192 |
test-docs-examples *ARGS: devenv |
|
|
193 |
$BIN/python -m pytest tests/docs "$@" |
|
|
194 |
|
|
|
195 |
|
|
|
196 |
# Run the integration tests only |
|
|
197 |
test-integration *ARGS: devenv |
|
|
198 |
$BIN/python -m pytest tests/integration "$@" |
|
|
199 |
|
|
|
200 |
|
|
|
201 |
# Run the spec tests only |
|
|
202 |
test-spec *ARGS: devenv |
|
|
203 |
$BIN/python -m pytest tests/spec "$@" |
|
|
204 |
|
|
|
205 |
|
|
|
206 |
# Run the unit tests only |
|
|
207 |
test-unit *ARGS: devenv |
|
|
208 |
$BIN/python -m pytest tests/unit "$@" |
|
|
209 |
$BIN/python -m pytest --doctest-modules ehrql |
|
|
210 |
|
|
|
211 |
|
|
|
212 |
# Run the generative tests only, configured to use more than the tiny default |
|
|
213 |
# number of examples. Optional args are passed to pytest. |
|
|
214 |
# |
|
|
215 |
# Set GENTEST_DEBUG env var to see stats. |
|
|
216 |
# Set GENTEST_EXAMPLES to change the number of examples generated. |
|
|
217 |
# Set GENTEST_MAX_DEPTH to change the depth of generated query trees. |
|
|
218 |
# |
|
|
219 |
# Run generative tests using more than the small deterministic set of examples used in CI |
|
|
220 |
test-generative *ARGS: devenv |
|
|
221 |
GENTEST_EXAMPLES=${GENTEST_EXAMPLES:-200} \ |
|
|
222 |
$BIN/python -m pytest tests/generative "$@" |
|
|
223 |
|
|
|
224 |
|
|
|
225 |
# Run all test as they will be run in CI (checking code coverage etc) |
|
|
226 |
@test-all *ARGS: devenv generate-docs |
|
|
227 |
#!/usr/bin/env bash |
|
|
228 |
set -euo pipefail |
|
|
229 |
|
|
|
230 |
GENTEST_DERANDOMIZE=t \ |
|
|
231 |
GENTEST_EXAMPLES=${GENTEST_EXAMPLES:-100} \ |
|
|
232 |
GENTEST_CHECK_IGNORED_ERRORS=t \ |
|
|
233 |
$BIN/python -m pytest \ |
|
|
234 |
--cov=ehrql \ |
|
|
235 |
--cov=tests \ |
|
|
236 |
--cov-report=html \ |
|
|
237 |
--cov-report=term-missing:skip-covered \ |
|
|
238 |
"$@" |
|
|
239 |
$BIN/python -m pytest --doctest-modules ehrql |
|
|
240 |
|
|
|
241 |
|
|
|
242 |
# Convert a raw failing example from Hypothesis's output into a simplified test case |
|
|
243 |
gentest-example-simplify *ARGS: devenv |
|
|
244 |
$BIN/python -m tests.lib.gentest_example_simplify "$@" |
|
|
245 |
|
|
|
246 |
|
|
|
247 |
# Run a generative test example defined in the supplied file |
|
|
248 |
gentest-example-run example *ARGS: devenv |
|
|
249 |
GENTEST_EXAMPLE_FILE='{{example}}' \ |
|
|
250 |
$BIN/python -m pytest \ |
|
|
251 |
tests/generative/test_query_model.py::test_query_model_example_file \ |
|
|
252 |
"$@" |
|
|
253 |
|
|
|
254 |
|
|
|
255 |
generate-docs OUTPUT_DIR="docs/includes/generated_docs": devenv |
|
|
256 |
$BIN/python -m ehrql.docs {{ OUTPUT_DIR }} |
|
|
257 |
echo "Generated data for documentation in {{ OUTPUT_DIR }}" |
|
|
258 |
|
|
|
259 |
|
|
|
260 |
# Run the documentation server: to configure the port, append: ---dev-addr localhost:<port> |
|
|
261 |
docs-serve *ARGS: devenv generate-docs |
|
|
262 |
# Run the MkDocs server with `--clean` to enforce the `exclude_docs` option. |
|
|
263 |
# This removes false positives that pertain to the autogenerated documentation includes. |
|
|
264 |
"$BIN"/mkdocs serve --clean "$@" |
|
|
265 |
|
|
|
266 |
|
|
|
267 |
# Build the documentation |
|
|
268 |
docs-build *ARGS: devenv generate-docs |
|
|
269 |
"$BIN"/mkdocs build --clean --strict "$@" |
|
|
270 |
|
|
|
271 |
|
|
|
272 |
# Run the snippet tests |
|
|
273 |
docs-test: devenv |
|
|
274 |
echo "Not implemented here" |
|
|
275 |
|
|
|
276 |
|
|
|
277 |
# Check the dataset public docs are current |
|
|
278 |
docs-check-generated-docs-are-current: generate-docs |
|
|
279 |
#!/usr/bin/env bash |
|
|
280 |
set -euo pipefail |
|
|
281 |
|
|
|
282 |
# https://stackoverflow.com/questions/3878624/how-do-i-programmatically-determine-if-there-are-uncommitted-changes |
|
|
283 |
# git diff --exit-code won't pick up untracked files, which we also want to check for. |
|
|
284 |
if [[ -z $(git status --porcelain ./docs/includes/generated_docs/; git clean -nd ./docs/includes/generated_docs/) ]] |
|
|
285 |
then |
|
|
286 |
echo "Generated docs directory is current and free of other files/directories." |
|
|
287 |
else |
|
|
288 |
echo "Generated docs directory contains files/directories not in the repository." |
|
|
289 |
git diff ./docs/includes/generated_docs/; git clean -n ./docs/includes/generated_docs/ |
|
|
290 |
exit 1 |
|
|
291 |
fi |
|
|
292 |
|
|
|
293 |
|
|
|
294 |
update-external-studies: devenv |
|
|
295 |
$BIN/python -m tests.acceptance.update_external_studies |
|
|
296 |
|
|
|
297 |
|
|
|
298 |
update-tpp-schema: devenv |
|
|
299 |
#!/usr/bin/env bash |
|
|
300 |
set -euo pipefail |
|
|
301 |
|
|
|
302 |
echo 'Fetching latest tpp_schema.csv' |
|
|
303 |
$BIN/python -m tests.lib.update_tpp_schema fetch |
|
|
304 |
echo 'Building new tpp_schema.py' |
|
|
305 |
$BIN/python -m tests.lib.update_tpp_schema build |
|
|
306 |
|
|
|
307 |
|
|
|
308 |
update-pledge: devenv |
|
|
309 |
#!/usr/bin/env bash |
|
|
310 |
set -euo pipefail |
|
|
311 |
URL_RECORD_FILE="bin/cosmopolitan-release-url.txt" |
|
|
312 |
ZIP_URL="$( |
|
|
313 |
$BIN/python -c \ |
|
|
314 |
'import requests; print([ |
|
|
315 |
asset["browser_download_url"] |
|
|
316 |
for release in requests.get("https://api.github.com/repos/jart/cosmopolitan/releases").json() |
|
|
317 |
for asset in release["assets"] |
|
|
318 |
if asset["name"].startswith("cosmos-") and asset["name"].endswith(".zip") |
|
|
319 |
][0])' |
|
|
320 |
)" |
|
|
321 |
echo "Latest Cosmopolitation release: $ZIP_URL" |
|
|
322 |
if grep -Fxqs "$ZIP_URL" "$URL_RECORD_FILE"; then |
|
|
323 |
echo "Already up to date." |
|
|
324 |
exit 0 |
|
|
325 |
fi |
|
|
326 |
|
|
|
327 |
if [[ "$(uname -s)" != "Linux" ]]; then |
|
|
328 |
echo "This command can only be run on a Linux system because we need to" |
|
|
329 |
echo " \"assimilate\" `pledge` to be a regular Linux executable" |
|
|
330 |
exit 1 |
|
|
331 |
fi |
|
|
332 |
|
|
|
333 |
echo "Downloading ..." |
|
|
334 |
TMP_FILE="$(mktemp)" |
|
|
335 |
curl --location --output "$TMP_FILE" "$ZIP_URL" |
|
|
336 |
unzip -o -j "$TMP_FILE" bin/pledge -d bin/ |
|
|
337 |
rm "$TMP_FILE" |
|
|
338 |
|
|
|
339 |
# Rewrite the file header so it becomes a native Linux executable which we |
|
|
340 |
# can run directly without needing a shell. See: |
|
|
341 |
# https://justine.lol/apeloader/ |
|
|
342 |
echo "Assimilating ..." |
|
|
343 |
sh bin/pledge --assimilate |
|
|
344 |
|
|
|
345 |
echo "Complete." |
|
|
346 |
echo "$ZIP_URL" > "$URL_RECORD_FILE" |