Skip to content

Commit 8ebbdba

Browse files
committed
remove singularity from vcell CI/CD (keep it in CLI/biosimulations)
1 parent dd8f83e commit 8ebbdba

File tree

4 files changed

+8
-94
lines changed

4 files changed

+8
-94
lines changed

.github/workflows/CI-full.yml

Lines changed: 5 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -94,34 +94,22 @@ jobs:
9494
java-version: '17'
9595
cache: 'maven'
9696

97-
- name: Install Singularity # to make singularity image for cluster
98-
uses: eWaterCycle/setup-singularity@v6
99-
with:
100-
singularity-version: 3.7.1
10197
- name: build and publish all images
10298
shell: bash
10399
run: |
104100
cd docker/build
105101
sudo docker login -u ${{ secrets.ACTION_USER }} -p ${{ secrets.ACTION_TOKEN }} ghcr.io
106102
docker login -u ${{ secrets.ACTION_USER }} -p ${{ secrets.ACTION_TOKEN }} ghcr.io
107103
./build.sh all ${{ env.VCELL_REPO_NAMESPACE }} ${{ env.VCELL_TAG }}
108-
cd singularity-vm
109-
singularity remote login -u ${{ secrets.ACTION_USER }} -p ${{ secrets.ACTION_TOKEN }} oras://ghcr.io
110-
- name: tag as latest and push to registry # (jcs) are explicit singularity push commands redundant? (see ./build.sh)
104+
105+
- name: tag as latest and push to registry
111106
shell: bash
112107
run: |
113108
for CONTAINER in vcell-api vcell-rest vcell-webapp-prod vcell-webapp-dev vcell-webapp-stage vcell-webapp-island vcell-batch vcell-opt vcell-clientgen vcell-data vcell-db vcell-mongo vcell-sched vcell-submit vcell-admin;\
114-
do docker tag ${VCELL_REPO_NAMESPACE}/$CONTAINER:${VCELL_TAG} ${VCELL_REPO_NAMESPACE}/$CONTAINER:latest;\
115-
docker tag ${VCELL_REPO_NAMESPACE}/$CONTAINER:${VCELL_TAG} ${VCELL_REPO_NAMESPACE}/$CONTAINER:${{ steps.version.outputs.tag }};\
116-
docker push --all-tags ${VCELL_REPO_NAMESPACE}/$CONTAINER;\
109+
do docker tag ${VCELL_REPO_NAMESPACE}/$CONTAINER:${VCELL_TAG} ${VCELL_REPO_NAMESPACE}/$CONTAINER:latest;\
110+
docker tag ${VCELL_REPO_NAMESPACE}/$CONTAINER:${VCELL_TAG} ${VCELL_REPO_NAMESPACE}/$CONTAINER:${{ steps.version.outputs.tag }};\
111+
docker push --all-tags ${VCELL_REPO_NAMESPACE}/$CONTAINER;\
117112
done
118-
cd docker/build/singularity-vm
119-
singularity push -U $(ls *batch*img) oras://${VCELL_REPO_NAMESPACE}/vcell-batch-singularity:${VCELL_TAG}
120-
singularity push -U $(ls *batch*img) oras://${VCELL_REPO_NAMESPACE}/vcell-batch-singularity:${{ steps.version.outputs.tag }}
121-
singularity push -U $(ls *batch*img) oras://${VCELL_REPO_NAMESPACE}/vcell-batch-singularity:latest
122-
singularity push -U $(ls *opt*img) oras://${VCELL_REPO_NAMESPACE}/vcell-opt-singularity:${VCELL_TAG}
123-
singularity push -U $(ls *opt*img) oras://${VCELL_REPO_NAMESPACE}/vcell-opt-singularity:${{ steps.version.outputs.tag }}
124-
singularity push -U $(ls *opt*img) oras://${VCELL_REPO_NAMESPACE}/vcell-opt-singularity:latest
125113
126114
- name: Setup tmate session
127115
uses: mxschmitt/action-tmate@v3

.github/workflows/site_deploy.yml

Lines changed: 2 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -182,20 +182,6 @@ jobs:
182182
ssh-keyscan $VCELL_MANAGER_NODE >> ~/.ssh/known_hosts
183183
cd docker/swarm
184184
scp ${{ secrets.CD_FULL_USER }}@${VCELL_MANAGER_NODE}:${VCELL_DEPLOY_REMOTE_DIR}/${VCELL_CONFIG_FILE_NAME} .
185-
- name: install singularity
186-
uses: eWaterCycle/setup-singularity@v6
187-
with:
188-
singularity-version: 3.7.1
189-
- name: retrieve batch and opt singularity images
190-
run: |
191-
set -ux
192-
cd docker/swarm
193-
export BATCH_SINGULARITY_FILENAME=`cat $VCELL_CONFIG_FILE_NAME | grep VCELL_BATCH_SINGULARITY_FILENAME | cut -d"=" -f2`
194-
export OPT_SINGULARITY_FILENAME=`cat $VCELL_CONFIG_FILE_NAME | grep VCELL_OPT_SINGULARITY_FILENAME | cut -d"=" -f2`
195-
cd ../build/singularity-vm
196-
singularity remote login -u ${{ secrets.ACTION_USER }} -p ${{ secrets.ACTION_TOKEN }} oras://ghcr.io
197-
singularity pull $BATCH_SINGULARITY_FILENAME oras://${VCELL_REPO_NAMESPACE}/vcell-batch-singularity:${{ github.event.inputs.vcell_version }}.${{ github.event.inputs.vcell_build }}
198-
singularity pull $OPT_SINGULARITY_FILENAME oras://${VCELL_REPO_NAMESPACE}/vcell-opt-singularity:${{ github.event.inputs.vcell_version }}.${{ github.event.inputs.vcell_build }}
199185
- name: setup java 17 with maven cache (for documentation build)
200186
uses: actions/setup-java@v4
201187
with:
@@ -207,16 +193,15 @@ jobs:
207193
run: |
208194
set -ux
209195
mvn clean install -DskipTests
210-
- name: deploy installers and singularity to kubernetes site and web help to vcell.org
196+
- name: deploy installers and web help to vcell.org
211197
run: |
212198
set -ux
213199
cd docker/swarm
214200
ssh -t ${{ secrets.CD_FULL_USER }}@${VCELL_MANAGER_NODE} sudo docker login -u ${{ secrets.ACTION_USER }} -p ${{ secrets.ACTION_TOKEN }} ghcr.io
215201
if ${{ github.event.inputs.server_only != 'true' }}; then
216-
# build and install the client installers, the singularity images, and the web help (kubernetes cluster deployments are separate)
202+
# build and install the client installers, and the web help (kubernetes cluster deployments are separate)
217203
./deploy-action-kubernetes.sh \
218204
--ssh-user ${{ secrets.CD_FULL_USER }} \
219-
--install-singularity \
220205
--build-installers \
221206
--installer-deploy-dir $VCELL_INSTALLER_REMOTE_DIR \
222207
--webhelp-local-dir ../../vcell-client/target/classes/vcellDoc \
@@ -227,13 +212,6 @@ jobs:
227212
ssh ${{ secrets.CD_FULL_USER }}@${VCELL_MANAGER_NODE} \
228213
installer_deploy_dir=$VCELL_INSTALLER_REMOTE_DIR vcell_siteCamel=$VCELL_SITE_CAMEL vcell_version=$VCELL_VERSION vcell_build=$VCELL_BUILD \
229214
'bash -s' < link-installers.sh
230-
else
231-
# build and install only the singularity images (kubernetes cluster deployments are separate)
232-
./deploy-action-kubernetes.sh \
233-
--ssh-user ${{ secrets.CD_FULL_USER }} \
234-
--install-singularity \
235-
${VCELL_MANAGER_NODE} \
236-
./${VCELL_CONFIG_FILE_NAME}
237215
fi
238216
- name: Capitalize first character of site name
239217
id: capitalize

docker/build/Dockerfile-submit-dev

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,6 @@ ENV softwareVersion=SOFTWARE-VERSION-NOT-SET \
7171
htc_vcellbatch_docker_name="htc-vcellbatch-docker-name-not-set" \
7272
htc_vcellbatch_solver_list="htc-vcellbatch-solver-list-not-set" \
7373
htc_vcellopt_docker_name="htc-vcellopt-docker-name-not-set" \
74-
opt_singularity_imagefile=/path/to/external/opt/singularity_opt.img \
7574
batchhost="batch-host-not-set" \
7675
batchuser="batch-user-not-set" \
7776
slurm_cmd_sbatch=sbatch \

docker/swarm/deploy-action-kubernetes.sh

Lines changed: 1 addition & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
set -ux
44

55
show_help() {
6-
echo "Deploys vcell client installers, webhelp and singularity images for a Kubernetes deploy"
6+
echo "Deploys vcell client installers and webhelp for a Kubernetes deploy"
77
echo ""
88
echo "usage: deploy-action-kubernetes.sh [OPTIONS] REQUIRED-ARGUMENTS"
99
echo ""
@@ -30,14 +30,11 @@ show_help() {
3030
echo " --webhelp-deploy-dir /remote/path/to/web/VCell_Help"
3131
echo " directory for deployed html webhelp published on web server"
3232
echo ""
33-
echo " --install-singularity optionally install batch and opt singularity images on each compute node in 'vcell' SLURM partition"
34-
echo ""
3533
echo ""
3634
echo "example:"
3735
echo ""
3836
echo "deploy-action-kubernetes.sh \\"
3937
echo " --ssh-user vcell \\"
40-
echo " --install_singularity \\"
4138
echo " --build_installers --installer_deploy_dir /share/apps/vcell3/apache_webroot/htdocs/webstart/Alpha \\"
4239
echo " --webhelp_local_dir ../../vcell-client/target/classes/vcellDoc \\"
4340
echo " --webhelp_deploy_dir /share/apps/vcell3/apache_webroot/htdocs/webstart/VCell_Tutorials/VCell_Help \\"
@@ -55,7 +52,6 @@ installer_deploy_dir=
5552
webhelp_local_dir=
5653
webhelp_deploy_dir=
5754
build_installers=false
58-
install_singularity=false
5955
while :; do
6056
case $1 in
6157
-h|--help)
@@ -78,9 +74,6 @@ while :; do
7874
shift
7975
webhelp_deploy_dir=$1
8076
;;
81-
--install-singularity)
82-
install_singularity=true
83-
;;
8477
--build-installers)
8578
build_installers=true
8679
;;
@@ -106,50 +99,6 @@ local_config_file=$2
10699
vcell_siteCamel=$(grep VCELL_SITE_CAMEL "$local_config_file" | cut -d"=" -f2)
107100
vcell_version=$(grep VCELL_VERSION_NUMBER "$local_config_file" | cut -d"=" -f2)
108101
vcell_build=$(grep VCELL_BUILD_NUMBER "$local_config_file" | cut -d"=" -f2)
109-
batch_singularity_filename=$(grep VCELL_BATCH_SINGULARITY_FILENAME "$local_config_file" | cut -d"=" -f2)
110-
opt_singularity_filename=$(grep VCELL_OPT_SINGULARITY_FILENAME "$local_config_file" | cut -d"=" -f2)
111-
slurm_singularity_central_dir=$(grep VCELL_SLURM_CENTRAL_SINGULARITY_DIR "$local_config_file" | cut -d"=" -f2)
112-
113-
114-
#
115-
# install the singularity images on the cluster nodes
116-
#
117-
if [ "$install_singularity" == "true" ]; then
118-
119-
echo ""
120-
pushd ../build/singularity-vm || (echo "pushd ../build/singularity-vm failed"; exit 1)
121-
echo ""
122-
echo "CURRENT DIRECTORY IS $PWD"
123-
124-
#
125-
# get configuration from config file and load into current bash environment
126-
#
127-
echo ""
128-
129-
if [ ! -e "./${batch_singularity_filename}" ]; then
130-
echo "failed to find local batch singularity image file $batch_singularity_filename in ./singularity-vm directory"
131-
exit 1
132-
fi
133-
134-
if ! scp "./${batch_singularity_filename}" "$ssh_user@$manager_node:${slurm_singularity_central_dir}"; then
135-
echo "failed to copy batch singularity image to server"
136-
exit 1
137-
fi
138-
139-
if [ ! -e "./${opt_singularity_filename}" ]; then
140-
echo "failed to find local opt singularity image file $opt_singularity_filename in ./singularity-vm directory"
141-
exit 1
142-
fi
143-
144-
if ! scp "./${opt_singularity_filename}" "$ssh_user@$manager_node:${slurm_singularity_central_dir}"; then
145-
echo "failed to copy opt singularity image to server"
146-
exit 1
147-
fi
148-
149-
echo "popd"
150-
popd || (echo "popd failed"; exit 1)
151-
fi
152-
153102

154103
#
155104
# if --build-installers, then generate client installers, placing then in ./generated_installers

0 commit comments

Comments
 (0)