Compare commits
No commits in common. "41025e0c91ccc07e8d2c79eb0b0be21ed9f6bdea" and "a2ef5fad7ed10707c695c6f00ee5df267769b58c" have entirely different histories.
41025e0c91
...
a2ef5fad7e
|
|
@ -1,44 +0,0 @@
|
|||
name: Create and publish a Docker image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ['main']
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
|
||||
jobs:
|
||||
build-and-push-image:
|
||||
strategy:
|
||||
matrix:
|
||||
image: [datalad-apptainer, deface, dicom_indexer, heudiconv]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ env.NIDATAOPS_BOT_NAME }}
|
||||
password: ${{ secrets.NIDATAOPS_BOT_REGISTRY_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.GITHUB_REPOSITORY_OWNER }}.${{ matrix.image }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: docker/${{ matrix.image }}
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
|
@ -31,13 +31,12 @@ COPY --from=builder /usr/local/apptainer /usr/local/apptainer
|
|||
ENV PATH="/usr/local/apptainer/bin:$PATH" \
|
||||
APPTAINER_TMPDIR="/tmp-apptainer"
|
||||
RUN apk add --no-cache py3-pytest ca-certificates libseccomp squashfs-tools tzdata fuse2fs fuse-overlayfs squashfuse \
|
||||
python3 py3-pip git openssh-client git-annex curl bzip2 bash glab jq\
|
||||
python3 py3-pip git openssh-client git-annex curl bzip2 bash glab\
|
||||
&& mkdir -p $APPTAINER_TMPDIR \
|
||||
&& cp /usr/share/zoneinfo/UTC /etc/localtime \
|
||||
&& apk del tzdata \
|
||||
&& rm -rf /tmp/* /var/cache/apk/*
|
||||
|
||||
RUN pip install --break-system-packages --no-cache-dir datalad datalad-container ssh_agent_setup python-gitlab
|
||||
ADD cfg_nidataops.py /usr/lib/python3.11/site-packages/datalad/resources/procedures/
|
||||
|
||||
WORKDIR /work
|
||||
|
|
|
|||
|
|
@ -1,45 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Procedure to configure Git annex to add text files directly to Git"""
|
||||
|
||||
import sys
|
||||
import os.path as op
|
||||
|
||||
from datalad.distribution.dataset import require_dataset
|
||||
|
||||
ds = require_dataset(
|
||||
sys.argv[1],
|
||||
check_installed=True,
|
||||
purpose='configuration')
|
||||
|
||||
nthg = {'annex.largefiles': 'nothing'}
|
||||
anthg = {'annex.largefiles': 'anything'}
|
||||
annex_largefiles = '((mimeencoding=binary)and(largerthan=0))'
|
||||
attrs = ds.repo.get_gitattributes('*')
|
||||
if not attrs.get('*', {}).get(
|
||||
'annex.largefiles', None) == annex_largefiles:
|
||||
ds.repo.set_gitattributes([
|
||||
('*', {'annex.largefiles': annex_largefiles}),
|
||||
('.gitignore', nthg),
|
||||
('.gitmodules', nthg),
|
||||
('.gitlab-ci.yml', nthg),
|
||||
('.all-contributorsrc', nthg),
|
||||
('.bidsignore', nthg),
|
||||
('*.json', nthg),
|
||||
('*.txt', nthg),
|
||||
('*.tsv', nthg),
|
||||
('*.nii.gz', anthg),
|
||||
('*.tgz', anthg),
|
||||
('*_scans.tsv', anthg),
|
||||
# annex event files as they contain subjects behavioral responses
|
||||
('sub-*/**/*_events.tsv', anthg),
|
||||
('*.bk2', anthg),
|
||||
('*.html', anthg),
|
||||
('*.svg', anthg),
|
||||
])
|
||||
|
||||
git_attributes_file = op.join(ds.path, '.gitattributes')
|
||||
ds.save(
|
||||
git_attributes_file,
|
||||
message="Setup gitattributes for ni-dataops",
|
||||
result_renderer='disabled'
|
||||
)
|
||||
|
|
@ -3,9 +3,6 @@
|
|||
export CONTAINER_ID=$(basename $(cat /proc/1/cpuset))
|
||||
GITLAB_TOKEN_SECRET=$(cat /var/run/secrets/dicom_bot_gitlab_token 2>/dev/null)
|
||||
export GITLAB_TOKEN=${GITLAB_TOKEN_SECRET:=$GITLAB_TOKEN}
|
||||
S3_ID=$(cat /var/run/secrets/s3_id 2>/dev/null)
|
||||
S3_SECRET=$(cat /var/run/secrets/s3_secret 2>/dev/null)
|
||||
export AWS_ACCESS_KEY_ID=${S3_ID:=$AWS_ACCESS_KEY_ID} AWS_SECRET_ACCESS_KEY=${S3_SECRET:=$AWS_SECRET_ACCESS_KEY}
|
||||
export GITLAB_API_URL=https://${CI_SERVER_HOST}/api/v4
|
||||
export GIT_SSH_PORT=${GIT_SSH_PORT:=222}
|
||||
|
||||
|
|
@ -26,6 +23,14 @@ fi
|
|||
git config --global init.defaultBranch main
|
||||
ssh-keyscan -p ${GIT_SSH_PORT} -H ${CI_SERVER_HOST} | install -m 600 /dev/stdin $HOME/.ssh/known_hosts
|
||||
|
||||
# example
|
||||
# /usr/bin/storescp \
|
||||
# -aet DICOM_SERVER_SEQUOIA\
|
||||
# -pm\
|
||||
# -od $DICOM_TMP_DIR -su ''\
|
||||
# --eostudy-timeout ${STORESCP_STUDY_TIMEOUT:=60} \
|
||||
# --exec-on-eostudy "python3 $DICOM_ROOT/exec_on_study_received.py #p " 2100 >> $DICOM_DATA_ROOT/storescp.log
|
||||
|
||||
# run whatever command was passed (storescp or python index_dicoms directly)
|
||||
$@
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import os
|
||||
import time
|
||||
import pydicom as dicom
|
||||
import argparse
|
||||
import pathlib
|
||||
|
|
@ -27,8 +26,6 @@ GITLAB_TOKEN = os.environ.get("GITLAB_TOKEN", None)
|
|||
GITLAB_BOT_USERNAME = os.environ.get("GITLAB_BOT_USERNAME", None)
|
||||
GITLAB_BOT_EMAIL = os.environ.get("GITLAB_BOT_EMAIL", None)
|
||||
BIDS_DEV_BRANCH = os.environ.get("BIDS_DEV_BRANCH", "dev")
|
||||
BIDS_BASE_BRANCH = os.environ.get("BIDS_BASE_BRANCH", "base")
|
||||
BIDS_CONVERT_BRANCHES = os.environ.get("BIDS_CONVERT_BRANCHES", 'convert/*')
|
||||
NI_DATAOPS_GITLAB_ROOT = os.environ.get("NI_DATAOPS_GITLAB_ROOT", "ni-dataops")
|
||||
|
||||
S3_REMOTE_DEFAULT_PARAMETERS = [
|
||||
|
|
@ -205,7 +202,7 @@ def index_dicoms(
|
|||
# cannot pass message above so commit now
|
||||
dicom_session_ds.save(message=f"index dicoms from archive {archive}") #
|
||||
# optimize git index after large import
|
||||
# dicom_session_ds.repo.gc() # aggressive by default
|
||||
#dicom_session_ds.repo.gc() # aggressive by default
|
||||
yield dicom_session_ds
|
||||
|
||||
|
||||
|
|
@ -299,11 +296,11 @@ def setup_gitlab_repos(
|
|||
dicom_study_ds.create(force=True)
|
||||
# add default study DS structure.
|
||||
init_dicom_study(dicom_study_ds, gitlab_group_path)
|
||||
# initialize BIDS project
|
||||
init_bids(gitlab_conn, dicom_study_repo, gitlab_group_path)
|
||||
# create subgroup for QC and derivatives repos
|
||||
get_or_create_gitlab_group(gitlab_conn, gitlab_group_path / "derivatives")
|
||||
get_or_create_gitlab_group(gitlab_conn, gitlab_group_path / "qc")
|
||||
# initialize BIDS project
|
||||
init_bids(gitlab_conn, dicom_study_repo, gitlab_group_path)
|
||||
|
||||
dicom_study_ds.install(
|
||||
source=dicom_session_repo._attrs["ssh_url_to_repo"],
|
||||
|
|
@ -337,23 +334,11 @@ def init_bids(
|
|||
)
|
||||
bids_project_ds.push(to="origin")
|
||||
# create dev branch and push for merge requests
|
||||
for branch in [BIDS_DEV_BRANCH, BIDS_BASE_BRANCH]:
|
||||
bids_project_ds.repo.checkout(branch, ["-b"])
|
||||
bids_project_ds.repo.checkout(BIDS_DEV_BRANCH, ["-b"])
|
||||
bids_project_ds.push(to="origin")
|
||||
# set protected branches
|
||||
for branch in [BIDS_CONVERT_BRANCHES, BIDS_DEV_BRANCH, BIDS_BASE_BRANCH]:
|
||||
bids_project_repo.protectedbranches.create(data={"name": branch})
|
||||
|
||||
### avoid race conditions for first session pushed ###
|
||||
### otherwise heudiconv starts before the remotes are configured
|
||||
time.sleep(5) # wait for config pipeline to be created
|
||||
while True:
|
||||
pipelines = bids_project_repo.pipelines.list(all=True)
|
||||
no_pipe = all(p.status in ["success", "failed"] for p in pipelines)
|
||||
if no_pipe:
|
||||
break
|
||||
time.sleep(1)
|
||||
return bids_project_repo
|
||||
bids_project_repo.protectedbranches.create(data={"name": "convert/*"})
|
||||
bids_project_repo.protectedbranches.create(data={"name": "dev"})
|
||||
|
||||
|
||||
def init_dicom_study(
|
||||
|
|
@ -399,10 +384,8 @@ def extract_session_metas(dicom_session_ds: dlad.Dataset) -> dict:
|
|||
dic = dicom.read_file(dicom_session_ds.pathobj / f, stop_before_pixels=True)
|
||||
except Exception as e: # TODO: what exception occurs when non-dicom ?
|
||||
continue
|
||||
metas = {k: str(getattr(dic, k)).replace("^", "/") for k in SESSION_META_KEYS}
|
||||
metas["StudyDescriptionPath"] = metas["StudyDescription"].split("/")
|
||||
# return at first dicom found
|
||||
return metas
|
||||
return {k: str(getattr(dic, k)).replace("^", "/") for k in SESSION_META_KEYS}
|
||||
raise InputError("no dicom found")
|
||||
|
||||
|
||||
|
|
@ -480,7 +463,7 @@ def export_to_s3(
|
|||
# git-annex initremote remotename ...
|
||||
remote_name = s3_url.hostname
|
||||
s3_path = s3_url.path
|
||||
if "{" in s3_path:
|
||||
if '{' in s3_path:
|
||||
s3_path = s3_path.format(**session_metas)
|
||||
_, bucket_name, *fileprefix = pathlib.Path(s3_path).parts
|
||||
fileprefix.append(session_metas["StudyInstanceUID"] + "/")
|
||||
|
|
|
|||
|
|
@ -1,10 +1,3 @@
|
|||
# localizers/scouts converted in BIDS for QC/QA
|
||||
**/anat/*localizer*
|
||||
**/anat/*scout*
|
||||
# reproin/heudiconv duplicated scans mechanism
|
||||
**/*dup*
|
||||
**/*_defacemaskreg.mat
|
||||
# follow MIDS extension wip for c-spine data
|
||||
**/*_bp-*
|
||||
# follows SWI BEP wip
|
||||
**/swi/*
|
||||
**/*__dup*
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
include:
|
||||
- local: /.ci-env.yml
|
||||
- project: "$NI_DATAOPS_GITLAB_ROOT/ci-pipelines"
|
||||
ref: refactor
|
||||
file:
|
||||
- 'ci-pipelines/bids/bids_repo.yml'
|
||||
|
|
|
|||
|
|
@ -2,5 +2,6 @@
|
|||
include:
|
||||
- local: /.ci-env.yml
|
||||
- project: "$NI_DATAOPS_GITLAB_ROOT/ci-pipelines"
|
||||
ref: refactor
|
||||
file:
|
||||
- 'ci-pipelines/sources/dicoms_study.yml'
|
||||
|
|
|
|||
Loading…
Reference in New Issue