Merge remote-tracking branch 'origin/dicom_index' into dicom_index

This commit is contained in:
bpinsard 2024-01-30 13:45:05 -05:00
commit 9ed657f3ae
1 changed files with 7 additions and 6 deletions

View File

@ -17,7 +17,7 @@ DEBUG = bool(os.environ.get("DEBUG", False))
GITLAB_REMOTE_NAME = os.environ.get("GITLAB_REMOTE_NAME", "origin")
GITLAB_TOKEN = os.environ.get("GITLAB_TOKEN", None)
GITLAB_BOT_USERNAME = os.environ.get("GITLAB_BOT_USERNAME", None)
BIDS_DEV_BRANCH = os.environ.get("BIDS_DEV_BRANCH", "dev")
S3_REMOTE_DEFAULT_PARAMETERS = [
"type=S3",
@ -247,7 +247,7 @@ def setup_gitlab_repos(
set_bot_privileges(gitlab_conn, gitlab_group_path)
# and push
dicom_session_ds.push(to=GITLAB_REMOTE_NAME, force='gitpush')
dicom_session_ds.push(to=GITLAB_REMOTE_NAME, force="gitpush")
## add the session to the dicom study repo
dicom_study_repo = get_or_create_gitlab_project(gitlab_conn, dicom_study_path)
@ -291,7 +291,7 @@ def init_bids(
) -> None:
bids_project_repo = get_or_create_gitlab_project(gl, f"{gitlab_group_path}/bids")
with tempfile.TemporaryDirectory() as tmpdir:
bids_project_ds = datalad.api.install(
bids_project_ds = dlad.install(
source=bids_project_repo._attrs["ssh_url_to_repo"],
path=tmpdir,
)
@ -305,7 +305,7 @@ def init_bids(
# TODO: setup sensitive / non-sensitive S3 buckets
bids_project_ds.push(to="origin")
# create dev branch and push for merge requests
bids_project_ds.gitrepo.checkout(BIDS_DEV_BRANCH, ["-b"])
bids_project_ds.repo.checkout(BIDS_DEV_BRANCH, ["-b"])
bids_project_ds.push(to="origin")
# set protected branches
bids_project_repo.protectedbranches.create(data={"name": "convert/*"})
@ -427,7 +427,7 @@ def export_to_s3(
# git-annex initremote remotename ...
remote_name = s3_url.hostname
_, bucket_name, *fileprefix = pathlib.Path(s3_url.path).parts
fileprefix.append(session_metas['StudyInstanceUID']+'/')
fileprefix.append(session_metas["StudyInstanceUID"] + "/")
ds.repo.init_remote(
remote_name,
S3_REMOTE_DEFAULT_PARAMETERS
@ -443,7 +443,8 @@ def export_to_s3(
remote=remote_name,
)
ds.push(to=remote_name)
ds.push(to=remote_name, data="auto")
# It does not push the data to the S3 unless I set data="anything" which pushes everyhing including the deflated archived data
def connect_gitlab(