From 7c32ff1713a17711e5a370f2e46223debfb6ce15 Mon Sep 17 00:00:00 2001 From: Devin Robison Date: Fri, 30 Sep 2022 15:19:13 -0600 Subject: [PATCH 01/11] Creating branch for v22.11 From d688a76f03a36e725fc12f4eb87f93d8de31fdde Mon Sep 17 00:00:00 2001 From: David Gardner <96306125+dagardner-nv@users.noreply.github.com> Date: Thu, 6 Oct 2022 11:28:21 -0700 Subject: [PATCH 02/11] Fix returning of thread_binding attr (#179) Authors: - David Gardner (https://github.com/dagardner-nv) Approvers: - Michael Demoret (https://github.com/mdemoret-nv) URL: https://github.com/nv-morpheus/SRF/pull/179 --- src/public/options/fiber_pool.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/public/options/fiber_pool.cpp b/src/public/options/fiber_pool.cpp index ed0897de6..a05846297 100644 --- a/src/public/options/fiber_pool.cpp +++ b/src/public/options/fiber_pool.cpp @@ -40,7 +40,7 @@ bool FiberPoolOptions::enable_memory_binding() const } bool FiberPoolOptions::enable_thread_binding() const { - return m_enable_memory_binding; + return m_enable_thread_binding; } bool FiberPoolOptions::enable_tracing_scheduler() const { From 77e4964aa0f24a8a6b53ffca142129e99a31019b Mon Sep 17 00:00:00 2001 From: David Gardner <96306125+dagardner-nv@users.noreply.github.com> Date: Fri, 7 Oct 2022 09:54:13 -0700 Subject: [PATCH 03/11] Emit the value before incrementing the iterator fixes (#180) Fixes #181 morpheus issue [#330](https://github.com/nv-morpheus/Morpheus/issues/330) Authors: - David Gardner (https://github.com/dagardner-nv) Approvers: - Michael Demoret (https://github.com/mdemoret-nv) URL: https://github.com/nv-morpheus/SRF/pull/180 --- python/srf/_pysrf/src/segment.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/python/srf/_pysrf/src/segment.cpp b/python/srf/_pysrf/src/segment.cpp index 203cd04de..bc4af16fb 100644 --- a/python/srf/_pysrf/src/segment.cpp +++ b/python/srf/_pysrf/src/segment.cpp @@ -87,9 +87,6 @@ std::shared_ptr build_source(srf::segment::Build // Get the next value auto next_val = py::cast(*iter); - // Increment it for next loop - ++iter; - { // Release the GIL to call on_next pybind11::gil_scoped_release nogil; @@ -100,6 +97,9 @@ std::shared_ptr build_source(srf::segment::Build subscriber.on_next(std::move(next_val)); } } + + // Increment it for next loop + ++iter; } } catch (const std::exception& e) From 6174bc780756c68cd0f6548d4b419f79a8ef2692 Mon Sep 17 00:00:00 2001 From: David Gardner <96306125+dagardner-nv@users.noreply.github.com> Date: Mon, 10 Oct 2022 13:28:44 -0700 Subject: [PATCH 04/11] Switch to github actions (#182) In general this should have parity with the existing Jenkins pipeline. * On pull requests all steps are run except the conda package step * On pushes to branches named `branch-*` the check script is skipped, and the conda package step is run It isn't clear if the nightly builds were actually enabled/working. Fixes #154 Authors: - David Gardner (https://github.com/dagardner-nv) Approvers: - Michael Demoret (https://github.com/mdemoret-nv) URL: https://github.com/nv-morpheus/SRF/pull/182 --- .github/workflows/pull_request.yml | 189 +++++++++++ ci/Jenkinsfile | 318 ------------------ ci/scripts/{jenkins => github}/benchmark.sh | 2 +- ci/scripts/{jenkins => github}/build.sh | 2 +- ci/scripts/{jenkins => github}/checks.sh | 4 +- ci/scripts/{jenkins => github}/common.sh | 29 +- ci/scripts/{jenkins => github}/conda.sh | 16 +- ci/scripts/{jenkins => github}/docs.sh | 2 +- .../{jenkins => github}/post_benchmark.sh | 2 +- .../{jenkins => github}/pre_benchmark.sh | 2 +- ci/scripts/{jenkins => github}/test.sh | 2 +- 11 files changed, 219 insertions(+), 349 deletions(-) create mode 100644 .github/workflows/pull_request.yml delete mode 100644 ci/Jenkinsfile rename ci/scripts/{jenkins => github}/benchmark.sh (96%) rename ci/scripts/{jenkins => github}/build.sh (98%) rename ci/scripts/{jenkins => github}/checks.sh (94%) rename ci/scripts/{jenkins => github}/common.sh (82%) rename ci/scripts/{jenkins => github}/conda.sh (56%) rename ci/scripts/{jenkins => github}/docs.sh (96%) rename ci/scripts/{jenkins => github}/post_benchmark.sh (95%) rename ci/scripts/{jenkins => github}/pre_benchmark.sh (95%) rename ci/scripts/{jenkins => github}/test.sh (98%) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml new file mode 100644 index 000000000..4bf622214 --- /dev/null +++ b/.github/workflows/pull_request.yml @@ -0,0 +1,189 @@ +# SPDX-FileCopyrightText: Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: Build pull request + +on: + push: + branches: + - 'pull-request/**' + - "branch-*" + +# This allows a subsequently queued workflow run to interrupt previous runs +concurrency: + group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' + cancel-in-progress: true + +env: + AWS_DEFAULT_REGION: us-west-2 + AWS_ACCESS_KEY_ID: "${{ secrets.GHA_AWS_ACCESS_KEY_ID }}" + AWS_SECRET_ACCESS_KEY: "${{ secrets.GHA_AWS_SECRET_ACCESS_KEY }}" + BUILD_CC: "gcc" + CHANGE_TARGET: "${{ github.base_ref }}" + GH_TOKEN: "${{ github.token }}" + GIT_COMMIT: "${{ github.sha }}" + SRF_ROOT: "${{ github.workspace }}/srf" + WORKSPACE: "${{ github.workspace }}/srf" + WORKSPACE_TMP: "${{ github.workspace }}/tmp" + + +jobs: + check: + if: ${{ startsWith(github.ref_name, 'pull-request/') }} + name: Check + runs-on: [self-hosted, linux, amd64, cpu4] + timeout-minutes: 60 + container: + image: gpuci/rapidsai-driver:21.10-cuda11.4-devel-ubuntu20.04-py3.8 + strategy: + fail-fast: true + + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + lfs: false + path: 'srf' + fetch-depth: 0 + + - name: Check + shell: bash + run: ./srf/ci/scripts/github/checks.sh + + build: + name: Build + runs-on: [self-hosted, linux, amd64, cpu16] + timeout-minutes: 60 + container: + image: gpuci/rapidsai-driver:21.10-cuda11.4-devel-ubuntu20.04-py3.8 + strategy: + fail-fast: true + matrix: + build_cc: ["gcc", "gcc-coverage", "clang"] + + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + lfs: false + path: 'srf' + + - name: Build:linux:x86_64 + shell: bash + env: + BUILD_CC: ${{ matrix.build_cc }} + run: ./srf/ci/scripts/github/build.sh + + test: + name: Test + needs: [check, build] + runs-on: [self-hosted, linux, amd64, gpu-v100-495-1] + timeout-minutes: 60 + container: + image: gpuci/rapidsai:21.10-cuda11.4-devel-ubuntu20.04-py3.8 + options: --cap-add=sys_nice + env: + NVIDIA_VISIBLE_DEVICES: ${{ env.NVIDIA_VISIBLE_DEVICES }} + PARALLEL_LEVEL: '10' + strategy: + fail-fast: true + matrix: + build_type: ["Debug", "Release"] + + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + lfs: false + path: 'srf' + + - name: Test:linux:x86_64:gcc + shell: bash + env: + BUILD_TYPE: ${{ matrix.build_type }} + run: ./srf/ci/scripts/github/test.sh + + documentation: + name: Documentation + needs: [check, build] + runs-on: [self-hosted, linux, amd64, cpu4] + timeout-minutes: 60 + container: + image: gpuci/rapidsai-driver:21.10-cuda11.4-devel-ubuntu20.04-py3.8 + strategy: + fail-fast: true + + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + lfs: false + path: 'srf' + + - name: build_docs + shell: bash + run: ./srf/ci/scripts/github/docs.sh + + benchmark: + name: Benchmark + needs: [check, build] + runs-on: [self-hosted, linux, amd64, cpu4] + timeout-minutes: 60 + container: + image: gpuci/rapidsai-driver:21.10-cuda11.4-devel-ubuntu20.04-py3.8 + strategy: + fail-fast: true + + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + lfs: false + path: 'srf' + + - name: pre_benchmark + shell: bash + run: ./srf/ci/scripts/github/pre_benchmark.sh + - name: benchmark + shell: bash + run: ./srf/ci/scripts/github/benchmark.sh + - name: post_benchmark + shell: bash + run: ./srf/ci/scripts/github/benchmark.sh + + + package: + name: Package + if: ${{ startsWith(github.ref_name, 'branch-') }} + needs: [benchmark, documentation, test] + runs-on: [self-hosted, linux, amd64, cpu16] + timeout-minutes: 60 + container: + image: gpuci/rapidsai-driver:21.10-cuda11.4-devel-ubuntu20.04-py3.8 + strategy: + fail-fast: true + + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + lfs: false + path: 'srf' + + - name: conda + shell: bash + env: + CONDA_TOKEN: "${{ secrets.CONDA_TOKEN }}" + run: ./srf/ci/scripts/github/conda.sh diff --git a/ci/Jenkinsfile b/ci/Jenkinsfile deleted file mode 100644 index 877e50b92..000000000 --- a/ci/Jenkinsfile +++ /dev/null @@ -1,318 +0,0 @@ -@Library('jenkins_shared_lib') _ - -pipeline { - agent any - options { - disableConcurrentBuilds(abortPrevious: true) - } - environment { - BUILD_CC = 'gcc' - BUILD_TYPE = sh(returnStdout: true, script: 'rapids-build-type') - } - stages { - stage('Checks') { - when { environment name: 'BUILD_TYPE', value: 'pull-request' } - options { - timeout(time: 1, unit: 'HOURS') - } - environment { - PARALLEL_LEVEL = '4' - HOME = "${WORKSPACE}" - GH_TOKEN = credentials('gputester-github-token') - } - agent { - docker { - image 'gpuci/rapidsai-driver:21.10-cuda11.4-devel-ubuntu20.04-py3.8' - label 'cpu' - } - } - steps { - cleanWs( - deleteDirs: true, - externalDelete: 'sudo rm -rf %s' - ) - checkout scm - sh "${WORKSPACE}/ci/scripts/jenkins/checks.sh" - } - } - stage('Builds') { - failFast true - parallel { - stage('Build:linux:x86_64:gcc:release') { - options { - timeout(time: 1, unit: 'HOURS') - } - environment { - PARALLEL_LEVEL = '16' - HOME = "${WORKSPACE}" - } - agent { - docker { - image 'gpuci/rapidsai-driver:21.10-cuda11.4-devel-ubuntu20.04-py3.8' - label 'cpu4' - } - } - steps { - cleanWs( - deleteDirs: true, - externalDelete: 'sudo rm -rf %s' - ) - checkout scm - withCredentials([[ - $class: 'AmazonWebServicesCredentialsBinding', - credentialsId: "aws-s3-gpuci", - accessKeyVariable: 'AWS_ACCESS_KEY_ID', - secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' - ]]) - { - sh "${WORKSPACE}/ci/scripts/jenkins/build.sh" - } - } - } - stage('Build:linux:x86_64:gcc:debug') { - options { - timeout(time: 1, unit: 'HOURS') - } - environment { - BUILD_CC= "gcc-coverage" - PARALLEL_LEVEL = '16' - HOME = "${WORKSPACE}" - } - agent { - docker { - image 'gpuci/rapidsai-driver:21.10-cuda11.4-devel-ubuntu20.04-py3.8' - label 'cpu4' - } - } - steps { - cleanWs( - deleteDirs: true, - externalDelete: 'sudo rm -rf %s' - ) - checkout scm - withCredentials([[ - $class: 'AmazonWebServicesCredentialsBinding', - credentialsId: "aws-s3-gpuci", - accessKeyVariable: 'AWS_ACCESS_KEY_ID', - secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' - ]]) - { - sh "${WORKSPACE}/ci/scripts/jenkins/build.sh" - } - } - } - stage('Build:linux:x86_64:clang') { - options { - timeout(time: 1, unit: 'HOURS') - } - environment { - PARALLEL_LEVEL = '16' - BUILD_CC = 'clang' - HOME = "${WORKSPACE}" - } - agent { - docker { - image 'gpuci/rapidsai-driver:21.10-cuda11.4-devel-ubuntu20.04-py3.8' - label 'cpu4' - } - } - steps { - cleanWs( - deleteDirs: true, - externalDelete: 'sudo rm -rf %s' - ) - checkout scm - withCredentials([[ - $class: 'AmazonWebServicesCredentialsBinding', - credentialsId: "aws-s3-gpuci", - accessKeyVariable: 'AWS_ACCESS_KEY_ID', - secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' - ]]) - { - sh "${WORKSPACE}/ci/scripts/jenkins/build.sh" - } - } - } - stage('Build:Documentation') { - options { - timeout(time: 1, unit: 'HOURS') - } - environment { - PARALLEL_LEVEL = '4' - HOME = "${WORKSPACE}" - } - agent { - docker { - image 'gpuci/rapidsai-driver:21.10-cuda11.4-devel-ubuntu20.04-py3.8' - label 'cpu' - } - } - steps { - cleanWs( - deleteDirs: true, - externalDelete: 'sudo rm -rf %s' - ) - checkout scm - withCredentials([[ - $class: 'AmazonWebServicesCredentialsBinding', - credentialsId: "aws-s3-gpuci", - accessKeyVariable: 'AWS_ACCESS_KEY_ID', - secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' - ]]) - { - sh "${WORKSPACE}/ci/scripts/jenkins/docs.sh" - } - } - } - } - } - stage('Tests') { - failFast true - parallel { - stage('TestDebug') { - options { - timeout(time: 1, unit: 'HOURS') - } - agent { - docker { - image 'gpuci/rapidsai:21.10-cuda11.4-devel-ubuntu20.04-py3.8' - label 'driver-495' - args '--cap-add=sys_nice --runtime "nvidia" -e "NVIDIA_VISIBLE_DEVICES=$EXECUTOR_NUMBER"' - } - } - environment { - BUILD_TYPE = "Debug" - HOME = "${WORKSPACE}" - } - steps { - cleanWs( - deleteDirs: true, - externalDelete: 'sudo rm -rf %s' - ) - checkout scm - withCredentials([[ - $class: 'AmazonWebServicesCredentialsBinding', - credentialsId: "aws-s3-gpuci", - accessKeyVariable: 'AWS_ACCESS_KEY_ID', - secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' - ]]) - { - sh "${WORKSPACE}/ci/scripts/jenkins/test.sh" - } - } - } - stage('TestRelease') { - options { - timeout(time: 1, unit: 'HOURS') - } - agent { - docker { - image 'gpuci/rapidsai:21.10-cuda11.4-devel-ubuntu20.04-py3.8' - label 'driver-495' - args '--cap-add=sys_nice --runtime "nvidia" -e "NVIDIA_VISIBLE_DEVICES=$EXECUTOR_NUMBER"' - } - } - environment { - BUILD_TYPE = "Release" - HOME = "${WORKSPACE}" - } - steps { - cleanWs( - deleteDirs: true, - externalDelete: 'sudo rm -rf %s' - ) - checkout scm - withCredentials([[ - $class: 'AmazonWebServicesCredentialsBinding', - credentialsId: "aws-s3-gpuci", - accessKeyVariable: 'AWS_ACCESS_KEY_ID', - secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' - ]]) - { - sh "${WORKSPACE}/ci/scripts/jenkins/test.sh" - } - } - } - stage('Benchmark') { - options { - timeout(time: 1, unit: 'HOURS') - } - agent { - docker { - image 'gpuci/rapidsai-driver:21.10-cuda11.4-devel-ubuntu20.04-py3.8' - label 'cpu' - args '--cap-add=sys_nice' - } - } - environment { - HOME = "${WORKSPACE}" - } - steps { - cleanWs( - deleteDirs: true, - externalDelete: 'sudo rm -rf %s' - ) - checkout scm - withCredentials([[ - $class: 'AmazonWebServicesCredentialsBinding', - credentialsId: "aws-s3-gpuci", - accessKeyVariable: 'AWS_ACCESS_KEY_ID', - secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' - ]]) - { - sh "${WORKSPACE}/ci/scripts/jenkins/pre_benchmark.sh" - sh "${WORKSPACE}/ci/scripts/jenkins/benchmark.sh" - sh "${WORKSPACE}/ci/scripts/jenkins/post_benchmark.sh" - } - } - } - } - } - stage('package:conda') { - when { - anyOf { - environment name: 'BUILD_TYPE', value: 'branch' - environment name: 'BUILD_TYPE', value: 'nightly' - } - } - options { - timeout(time: 1, unit: 'HOURS') - } - agent { - docker { - image 'gpuci/rapidsai-driver:21.10-cuda11.4-devel-ubuntu20.04-py3.8' - label 'cpu4' - } - } - environment { - HOME = "${WORKSPACE}" - PARALLEL_LEVEL = '16' - CONDA_PKG_LABEL = 'dev-ci' - } - steps { - cleanWs( - deleteDirs: true, - externalDelete: 'sudo rm -rf %s' - ) - checkout scm - withCredentials([[ - $class: 'AmazonWebServicesCredentialsBinding', - credentialsId: "aws-s3-gpuci", - accessKeyVariable: 'AWS_ACCESS_KEY_ID', - secretKeyVariable: 'AWS_SECRET_ACCESS_KEY' - ]]) - { - sh "${WORKSPACE}/ci/scripts/jenkins/conda.sh" - } - } - } - } - post { - always { - cleanWs( - deleteDirs: true, - externalDelete: 'sudo rm -rf %s' - ) - } - } -} diff --git a/ci/scripts/jenkins/benchmark.sh b/ci/scripts/github/benchmark.sh similarity index 96% rename from ci/scripts/jenkins/benchmark.sh rename to ci/scripts/github/benchmark.sh index a4195df4e..82a74a631 100755 --- a/ci/scripts/jenkins/benchmark.sh +++ b/ci/scripts/github/benchmark.sh @@ -16,7 +16,7 @@ set -e -source ${WORKSPACE}/ci/scripts/jenkins/common.sh +source ${WORKSPACE}/ci/scripts/github/common.sh REPORTS_DIR="${WORKSPACE_TMP}/reports" diff --git a/ci/scripts/jenkins/build.sh b/ci/scripts/github/build.sh similarity index 98% rename from ci/scripts/jenkins/build.sh rename to ci/scripts/github/build.sh index b8e1bef0d..6aca4af3f 100755 --- a/ci/scripts/jenkins/build.sh +++ b/ci/scripts/github/build.sh @@ -16,7 +16,7 @@ set -e -source ${WORKSPACE}/ci/scripts/jenkins/common.sh +source ${WORKSPACE}/ci/scripts/github/common.sh rm -rf ${SRF_ROOT}/.cache/ ${SRF_ROOT}/build/ diff --git a/ci/scripts/jenkins/checks.sh b/ci/scripts/github/checks.sh similarity index 94% rename from ci/scripts/jenkins/checks.sh rename to ci/scripts/github/checks.sh index bd354bedb..51d53a952 100755 --- a/ci/scripts/jenkins/checks.sh +++ b/ci/scripts/github/checks.sh @@ -16,11 +16,9 @@ set -e -source ${WORKSPACE}/ci/scripts/jenkins/common.sh +source ${WORKSPACE}/ci/scripts/github/common.sh export IWYU_DIR="${WORKSPACE_TMP}/iwyu" -rm -rf ${SRF_ROOT}/.cache/ ${SRF_ROOT}/build/ ${IWYU_DIR} - fetch_base_branch gpuci_logger "Creating conda env" diff --git a/ci/scripts/jenkins/common.sh b/ci/scripts/github/common.sh similarity index 82% rename from ci/scripts/jenkins/common.sh rename to ci/scripts/github/common.sh index d01bd32b6..837ce50c1 100644 --- a/ci/scripts/jenkins/common.sh +++ b/ci/scripts/github/common.sh @@ -17,7 +17,13 @@ gpuci_logger "Env Setup" source /opt/conda/etc/profile.d/conda.sh export SRF_ROOT=${SRF_ROOT:-$(git rev-parse --show-toplevel)} -gpuci_logger "Procs: $(nproc)" +cd ${SRF_ROOT} +# For non-gpu hosts nproc will correctly report the number of cores we are able to use +# On a GPU host however nproc will report the total number of cores and PARALLEL_LEVEL +# will be defined specifying the subset we are allowed to use. +NUM_CORES=$(nproc) +export PARALLEL_LEVEL=${PARALLEL_LEVEL:-${NUM_CORES}} +gpuci_logger "Procs: ${NUM_CORES}" gpuci_logger "Memory" /usr/bin/free -g @@ -37,9 +43,10 @@ export CMAKE_BUILD_WITH_CODECOV="-DCMAKE_BUILD_TYPE=Debug -DSRF_ENABLE_CODECOV=O export GIT_DEPTH=1000 # For PRs, $GIT_BRANCH is like: pull-request/989 -REPO_NAME=$(basename "${GIT_URL}" .git) -ORG_NAME=$(basename "$(dirname "${GIT_URL}")") -PR_NUM="${GIT_BRANCH##*/}" +REPO_NAME=$(basename "${GITHUB_REPOSITORY}") +ORG_NAME="${GITHUB_REPOSITORY_OWNER}" +PR_NUM="${GITHUB_REF_NAME##*/}" + # S3 vars export S3_URL="s3://rapids-downloads/ci/srf" @@ -51,12 +58,18 @@ export DISPLAY_ARTIFACT_URL="${DISPLAY_URL}${ARTIFACT_ENDPOINT}" # Set sccache env vars export SCCACHE_S3_KEY_PREFIX=srf-${NVARCH}-${BUILD_CC} export SCCACHE_BUCKET=rapids-sccache -export SCCACHE_REGION=us-west-2 +export SCCACHE_REGION="${AWS_DEFAULT_REGION}" export SCCACHE_IDLE_TIMEOUT=32768 #export SCCACHE_LOG=debug -gpuci_logger "Environ:" -env | sort +mkdir -p ${WORKSPACE_TMP} + +function print_env_vars() { + gpuci_logger "Environ:" + env | grep -v -E "AWS_ACCESS_KEY_ID|AWS_SECRET_ACCESS_KEY|TOKEN" | sort +} + +print_env_vars function fetch_base_branch() { gpuci_logger "Retrieving base branch from GitHub API" @@ -65,7 +78,7 @@ function fetch_base_branch() { curl -s \ -H "Accept: application/vnd.github.v3+json" \ "${CURL_HEADERS[@]}" \ - "https://api.github.com/repos/${ORG_NAME}/${REPO_NAME}/pulls/${PR_NUM}" + "${GITHUB_API_URL}/repos/${ORG_NAME}/${REPO_NAME}/pulls/${PR_NUM}" ) BASE_BRANCH=$(echo "${RESP}" | jq -r '.base.ref') diff --git a/ci/scripts/jenkins/conda.sh b/ci/scripts/github/conda.sh similarity index 56% rename from ci/scripts/jenkins/conda.sh rename to ci/scripts/github/conda.sh index 2a0ab8c14..31c8c99e6 100755 --- a/ci/scripts/jenkins/conda.sh +++ b/ci/scripts/github/conda.sh @@ -16,23 +16,11 @@ set -e -source ${WORKSPACE}/ci/scripts/jenkins/common.sh +source ${WORKSPACE}/ci/scripts/github/common.sh restore_conda_env gpuci_logger "Building Conda Package" -CONDA_BLD_OUTPUT="${WORKSPACE_TMP}/conda-bld" -mkdir -p ${CONDA_BLD_OUTPUT} -CONDA_ARGS=() -CONDA_ARGS+=("--output-folder=${CONDA_BLD_OUTPUT}") -CONDA_ARGS+=("--label" "${CONDA_PKG_LABEL}") -CONDA_ARGS="${CONDA_ARGS[@]}" ${SRF_ROOT}/ci/conda/recipes/run_conda_build.sh - -gpuci_logger "Archiving Conda Package" -cd $(dirname ${CONDA_BLD_OUTPUT}) -tar cfj ${WORKSPACE_TMP}/conda_pkg.tar.bz $(basename ${CONDA_BLD_OUTPUT}) - -gpuci_logger "Pushing results to ${DISPLAY_ARTIFACT_URL}/" -aws s3 cp ${WORKSPACE_TMP}/conda_pkg.tar.bz "${ARTIFACT_URL}/conda_pkg.tar.bz" +${SRF_ROOT}/ci/conda/recipes/run_conda_build.sh upload diff --git a/ci/scripts/jenkins/docs.sh b/ci/scripts/github/docs.sh similarity index 96% rename from ci/scripts/jenkins/docs.sh rename to ci/scripts/github/docs.sh index 6b8d91a77..ba4c6727a 100755 --- a/ci/scripts/jenkins/docs.sh +++ b/ci/scripts/github/docs.sh @@ -16,7 +16,7 @@ set -e -source ${WORKSPACE}/ci/scripts/jenkins/common.sh +source ${WORKSPACE}/ci/scripts/github/common.sh rm -rf ${SRF_ROOT}/.cache/ ${SRF_ROOT}/build/ diff --git a/ci/scripts/jenkins/post_benchmark.sh b/ci/scripts/github/post_benchmark.sh similarity index 95% rename from ci/scripts/jenkins/post_benchmark.sh rename to ci/scripts/github/post_benchmark.sh index 342e69666..5fb7d89b3 100755 --- a/ci/scripts/jenkins/post_benchmark.sh +++ b/ci/scripts/github/post_benchmark.sh @@ -16,7 +16,7 @@ set -e -source ${WORKSPACE}/ci/scripts/jenkins/common.sh +source ${WORKSPACE}/ci/scripts/github/common.sh REPORTS_DIR="${WORKSPACE_TMP}/reports" diff --git a/ci/scripts/jenkins/pre_benchmark.sh b/ci/scripts/github/pre_benchmark.sh similarity index 95% rename from ci/scripts/jenkins/pre_benchmark.sh rename to ci/scripts/github/pre_benchmark.sh index 987e441db..6f6bef377 100755 --- a/ci/scripts/jenkins/pre_benchmark.sh +++ b/ci/scripts/github/pre_benchmark.sh @@ -16,7 +16,7 @@ set -e -source ${WORKSPACE}/ci/scripts/jenkins/common.sh +source ${WORKSPACE}/ci/scripts/github/common.sh restore_conda_env diff --git a/ci/scripts/jenkins/test.sh b/ci/scripts/github/test.sh similarity index 98% rename from ci/scripts/jenkins/test.sh rename to ci/scripts/github/test.sh index 133927f4d..8c582ab46 100755 --- a/ci/scripts/jenkins/test.sh +++ b/ci/scripts/github/test.sh @@ -16,7 +16,7 @@ set -e -source ${WORKSPACE}/ci/scripts/jenkins/common.sh +source ${WORKSPACE}/ci/scripts/github/common.sh /usr/bin/nvidia-smi restore_conda_env From 85b5aa477c67b0456ab4b21c633d679a3600f392 Mon Sep 17 00:00:00 2001 From: David Gardner <96306125+dagardner-nv@users.noreply.github.com> Date: Tue, 11 Oct 2022 09:04:46 -0700 Subject: [PATCH 05/11] Add documentation on how to build the doxygen docs (#183) Looks like VSCode decided to remove two white-space chars. fixes #127 Authors: - David Gardner (https://github.com/dagardner-nv) Approvers: - Michael Demoret (https://github.com/mdemoret-nv) URL: https://github.com/nv-morpheus/SRF/pull/183 --- CONTRIBUTING.md | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index aa995fd62..9b396db5a 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -27,7 +27,7 @@ More information can be found at: [Contributor Code of Conduct](CODE_OF_CONDUCT. 1. Find an issue to work on. The best way is to look for issues with the [good first issue](https://github.com/NVIDIA/SRF/issues) label. 2. Comment on the issue stating that you are going to work on it. -3. Code! Make sure to update unit tests and confirm that test coverage has not decreased (see below)! Ensure the +3. Code! Make sure to update unit tests and confirm that test coverage has not decreased (see below)! Ensure the [license headers are set properly](#Licensing). 4. When done, [create your pull request](https://github.com/NVIDIA/SRF/compare). 5. Wait for other developers to review your code and update code as needed. @@ -37,7 +37,7 @@ Remember, if you are unsure about anything, don't hesitate to comment on issues ## Unit testing and Code Coverage Prior to submitting a pull request, you should ensure that all your contributed code is covered by unit tests, and that -unit test coverage percentages have not decreased (even better if they've increased). To test, from the SRF root +unit test coverage percentages have not decreased (even better if they've increased). To test, from the SRF root directory: 1. Generate a code coverage report and ensure your additions are covered. @@ -114,6 +114,13 @@ pip install -e $SRF_HOME/build/python pytest $SRF_HOME/python ``` +### Building API Documentation +From the root of the SRF repo, configure CMake with `SRF_BUILD_DOCS=ON` then build the `srf_docs` target. Once built the documentation will be located in the `build/docs/html` directory. +```bash +cmake -B build -DSRF_BUILD_DOCS=ON . +cmake --build build --target srf_docs +``` + ## Licensing SRF is licensed under the Apache v2.0 license. All new source files including CMake and other build scripts should contain the Apache v2.0 license header. Any edits to existing source code should update the date range of the copyright to the current year. The format for the license header is: From 96bdec952d9dcc251f1053f61c0d60cdd6e4a6ac Mon Sep 17 00:00:00 2001 From: David Gardner <96306125+dagardner-nv@users.noreply.github.com> Date: Tue, 11 Oct 2022 09:22:31 -0700 Subject: [PATCH 06/11] Fix CI deps (#187) Remove dependency on check so that downstream stages are executed even when the check stage is skipped Authors: - David Gardner (https://github.com/dagardner-nv) Approvers: - Michael Demoret (https://github.com/mdemoret-nv) URL: https://github.com/nv-morpheus/SRF/pull/187 --- .github/workflows/pull_request.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 4bf622214..5dac985b4 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -88,7 +88,7 @@ jobs: test: name: Test - needs: [check, build] + needs: [build] runs-on: [self-hosted, linux, amd64, gpu-v100-495-1] timeout-minutes: 60 container: @@ -117,7 +117,7 @@ jobs: documentation: name: Documentation - needs: [check, build] + needs: [build] runs-on: [self-hosted, linux, amd64, cpu4] timeout-minutes: 60 container: @@ -138,7 +138,7 @@ jobs: benchmark: name: Benchmark - needs: [check, build] + needs: [build] runs-on: [self-hosted, linux, amd64, cpu4] timeout-minutes: 60 container: From 597f1a1520c9eeab568dc4d5ff72119d6f4d9902 Mon Sep 17 00:00:00 2001 From: David Gardner <96306125+dagardner-nv@users.noreply.github.com> Date: Tue, 11 Oct 2022 12:24:06 -0700 Subject: [PATCH 07/11] Fetch history and tags for package step (#188) Authors: - David Gardner (https://github.com/dagardner-nv) Approvers: - Michael Demoret (https://github.com/mdemoret-nv) URL: https://github.com/nv-morpheus/SRF/pull/188 --- .github/workflows/pull_request.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 5dac985b4..14376becb 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -181,6 +181,7 @@ jobs: with: lfs: false path: 'srf' + fetch-depth: 0 - name: conda shell: bash From a916825778fa5e0b6a713d664741e5f1d0195b62 Mon Sep 17 00:00:00 2001 From: Christopher Harris Date: Tue, 11 Oct 2022 16:35:05 -0500 Subject: [PATCH 08/11] update abseil, grpc, and ucx versions for cuml compatibility (#177) installing cuml 22.08 along side srf 22.09 does not work, as the abseil, grpc, and ucx versions mismatch. This PR resolves those issues. In doing so, a bug presented itself in how conda build was determining hashes. SRF was attempting to look for a version of libsrf that was not being generated, to fix this, I added abseil as a dependency of SRF, which is a workaround mentioned in https://github.com/conda-forge/arrow-cpp-feedstock/issues/814. Authors: - Christopher Harris (https://github.com/cwharris) - Michael Demoret (https://github.com/mdemoret-nv) Approvers: - Michael Demoret (https://github.com/mdemoret-nv) URL: https://github.com/nv-morpheus/SRF/pull/177 --- Dockerfile | 2 +- ci/conda/environments/ci_env.yml | 2 +- ci/conda/environments/dev_env.yml | 4 +- ci/conda/environments/dev_env_nogcc.yml | 4 +- ci/conda/recipes/libsrf/build.sh | 1 + .../recipes/libsrf/conda_build_config.yaml | 29 +++++++++---- ci/conda/recipes/libsrf/meta.yaml | 6 ++- cmake/dependencies.cmake | 2 +- cmake/deps/Configure_gRPC.cmake | 41 ------------------- cmake/deps/Configure_ucx.cmake | 2 +- 10 files changed, 34 insertions(+), 59 deletions(-) delete mode 100644 cmake/deps/Configure_gRPC.cmake diff --git a/Dockerfile b/Dockerfile index 89512e255..5f2bf6d99 100644 --- a/Dockerfile +++ b/Dockerfile @@ -40,7 +40,7 @@ RUN apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/cuda/ RUN conda config --set ssl_verify false &&\ conda config --add pkgs_dirs /opt/conda/pkgs &&\ conda config --env --add channels conda-forge &&\ - /opt/conda/bin/conda install -y -n base -c conda-forge "mamba >=0.22" "boa >=0.10" python=${PYTHON_VER} + /opt/conda/bin/conda install -y -n base -c conda-forge "mamba >=0.22" "boa >=0.12" python=${PYTHON_VER} # conda clean -afy # All code will be under /work diff --git a/ci/conda/environments/ci_env.yml b/ci/conda/environments/ci_env.yml index 993824428..34aa9fcbe 100644 --- a/ci/conda/environments/ci_env.yml +++ b/ci/conda/environments/ci_env.yml @@ -18,6 +18,6 @@ name: srf channels: - conda-forge dependencies: - - boa>=0.1 + - boa>=0.12 - conda-pack=0.7 - sccache=0.3 diff --git a/ci/conda/environments/dev_env.yml b/ci/conda/environments/dev_env.yml index b6ccd973e..879ae15c8 100644 --- a/ci/conda/environments/dev_env.yml +++ b/ci/conda/environments/dev_env.yml @@ -21,7 +21,7 @@ dependencies: - glog=0.6 - gmock=1.10 - graphviz=3.0 - - grpc-cpp=1.45 + - grpc-cpp=1.46 - gtest=1.10 - gxx_linux-64=9.4 - jinja2=3.0 @@ -42,7 +42,7 @@ dependencies: - scikit-build>=0.12 - spdlog=1.8.5 - sysroot_linux-64=2.17 - - ucx=1.12 + - ucx=1.13 - pip: - cython - flake8 diff --git a/ci/conda/environments/dev_env_nogcc.yml b/ci/conda/environments/dev_env_nogcc.yml index f1b8ec610..5be1b1455 100644 --- a/ci/conda/environments/dev_env_nogcc.yml +++ b/ci/conda/environments/dev_env_nogcc.yml @@ -17,7 +17,7 @@ dependencies: - glog=0.6 - gmock=1.10 - graphviz=3.0 - - grpc-cpp=1.45 + - grpc-cpp=1.46 - gtest=1.10 - libhwloc=2.5 - libprotobuf=3.20 @@ -28,7 +28,7 @@ dependencies: - python=3.8 - scikit-build>=0.12 - spdlog=1.8.5 - - ucx=1.12 + - ucx=1.13 - pip: - cython - flake8 diff --git a/ci/conda/recipes/libsrf/build.sh b/ci/conda/recipes/libsrf/build.sh index 711277d0d..11a4254a0 100644 --- a/ci/conda/recipes/libsrf/build.sh +++ b/ci/conda/recipes/libsrf/build.sh @@ -62,6 +62,7 @@ CMAKE_ARGS="-DSRF_BUILD_PYTHON=ON ${CMAKE_ARGS}" CMAKE_ARGS="-DCMAKE_CUDA_ARCHITECTURES=${CMAKE_CUDA_ARCHITECTURES=-"ALL"} ${CMAKE_ARGS}" CMAKE_ARGS="-DPython_EXECUTABLE=${PYTHON} ${CMAKE_ARGS}" CMAKE_ARGS="-DSRF_RAPIDS_VERSION=${rapids_version} ${CMAKE_ARGS}" +CMAKE_ARGS="-DUCX_VERSION=${ucx} ${CMAKE_ARGS}" echo "CC : ${CC}" echo "CXX : ${CXX}" diff --git a/ci/conda/recipes/libsrf/conda_build_config.yaml b/ci/conda/recipes/libsrf/conda_build_config.yaml index d11d84f59..84db3f5eb 100644 --- a/ci/conda/recipes/libsrf/conda_build_config.yaml +++ b/ci/conda/recipes/libsrf/conda_build_config.yaml @@ -30,19 +30,36 @@ python: - 3.9 # Setup the dependencies to build with multiple versions of RAPIDS -rapids_version: - - 22.04 # Keep around compatibility with current version -1 +rapids_version: # Keep around compatibility with current version -1 - 22.06 - 22.08 + - 22.08 +# Multiple versions of abseil are required to satisfy the solver for some +# environments. RAPIDS 22.06 only works with gRPC 1.45 and 22.08 only works with +# 1.46. For each version of gRPC, support 2 abseil versions. Zip all of the keys +# together to avoid impossible combinations abseil_cpp: - - 20211102.0 - - 20210324.2 - 20210324.2 + - 20211102.0 + - 20220623.0 + +grpc_cpp: + - 1.45 + - 1.46 + - 1.46 + +# UCX 1.12 is required for RAPIDS 22.06 +ucx: + - 1.12 + - 1.13 + - 1.13 zip_keys: - rapids_version - abseil_cpp + - grpc_cpp + - ucx # The following mimic what is available in the pinning feedstock: # https://github.com/conda-forge/conda-forge-pinning-feedstock/blob/main/recipe/conda_build_config.yaml @@ -54,12 +71,8 @@ gflags: - 2.2 glog: - 0.6 -grpc_cpp: - - 1.45 libprotobuf: - 3.20 -ucx: - - 1.12 pin_run_as_build: diff --git a/ci/conda/recipes/libsrf/meta.yaml b/ci/conda/recipes/libsrf/meta.yaml index e39a5ee8a..2eb2ff035 100644 --- a/ci/conda/recipes/libsrf/meta.yaml +++ b/ci/conda/recipes/libsrf/meta.yaml @@ -127,15 +127,16 @@ outputs: host: # Only should need libsrf and python. Keep sorted! - {{ pin_subpackage('libsrf', exact=True) }} + - abseil-cpp # srf does not require abseil at build time. See https://github.com/conda-forge/arrow-cpp-feedstock/issues/814 - python {{ python }} run: - {{ pin_subpackage('libsrf', exact=True) }} - - rmm {{ rapids_version }} # This is not necessary but required until this is fixed: https://github.com/mamba-org/boa/issues/232 + - rmm {{ rapids_version }}.* # This is not necessary but required until this is fixed: https://github.com/mamba-org/boa/issues/232 - python test: imports: - srf - script: test_libsrf.sh + script: test_srf.sh source_files: # Copy the pytest source files - python/pytest.ini @@ -145,6 +146,7 @@ outputs: - numpy - nvtx - pytest + - cuml {{ rapids_version }}.* # Ensure we can install cuml. This can cause issues solving abseil-cpp about: home: https://www.nvidia.com/ diff --git a/cmake/dependencies.cmake b/cmake/dependencies.cmake index 72262cca5..dc162a29c 100644 --- a/cmake/dependencies.cmake +++ b/cmake/dependencies.cmake @@ -64,7 +64,7 @@ include(deps/Configure_boost) # UCX # === -set(UCX_VERSION "1.12" CACHE STRING "Version of ucx to use") +set(UCX_VERSION "1.13" CACHE STRING "Version of ucx to use") include(deps/Configure_ucx) # hwloc diff --git a/cmake/deps/Configure_gRPC.cmake b/cmake/deps/Configure_gRPC.cmake deleted file mode 100644 index fddae0dd5..000000000 --- a/cmake/deps/Configure_gRPC.cmake +++ /dev/null @@ -1,41 +0,0 @@ -#============================================================================= -# SPDX-FileCopyrightText: Copyright (c) 2020-2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved. -# SPDX-License-Identifier: Apache-2.0 -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#============================================================================= - -function(find_and_configure_gRPC VERSION) - - list(APPEND CMAKE_MESSAGE_CONTEXT "gRPC") - - rapids_cpm_find(gRPC ${GRPC_VERSION} - GLOBAL_TARGETS - gRPC::grpc++_unsecure gRPC::grpc_cpp_plugin gRPC::gpr - BUILD_EXPORT_SET - ${PROJECT_NAME}-core-exports - INSTALL_EXPORT_SET - ${PROJECT_NAME}-core-exports - CPM_ARGS - GIT_REPOSITORY https://github.com/grpc/grpc.git - GIT_TAG v${GRPC_VERSION} - GIT_SHALLOW TRUE - OPTIONS "BUILD_TESTS OFF" - "BUILD_BENCHMARKS OFF" - "CUDA_STATIC_RUNTIME ON" - "DISABLE_DEPRECATION_WARNING ${DISABLE_DEPRECATION_WARNINGS}" - ) - -endfunction() - -find_and_configure_gRPC(${GRPC_VERSION}) diff --git a/cmake/deps/Configure_ucx.cmake b/cmake/deps/Configure_ucx.cmake index 2bcd5da9f..550999d30 100644 --- a/cmake/deps/Configure_ucx.cmake +++ b/cmake/deps/Configure_ucx.cmake @@ -20,7 +20,7 @@ function(find_and_configure_ucx version) list(APPEND CMAKE_MESSAGE_CONTEXT "ucx") # Try to find UCX and download from source if not found - rapids_cpm_find(ucx 1.12 + rapids_cpm_find(ucx ${version} GLOBAL_TARGETS ucx ucx::ucp ucx::uct ucx_ucx ucx::ucp ucx::uct ucx::ucx BUILD_EXPORT_SET From 276f13892015d808ddde0a880ba357536a616bad Mon Sep 17 00:00:00 2001 From: David Gardner <96306125+dagardner-nv@users.noreply.github.com> Date: Wed, 12 Oct 2022 08:56:25 -0700 Subject: [PATCH 09/11] Fixes "Add new issue/PR to project" action (#189) fixes #190 Authors: - David Gardner (https://github.com/dagardner-nv) Approvers: - Michael Demoret (https://github.com/mdemoret-nv) URL: https://github.com/nv-morpheus/SRF/pull/189 --- .github/workflows/add_to_project.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/add_to_project.yml b/.github/workflows/add_to_project.yml index 00f0ac7fa..96c6b741d 100644 --- a/.github/workflows/add_to_project.yml +++ b/.github/workflows/add_to_project.yml @@ -17,4 +17,4 @@ jobs: - uses: actions/add-to-project@v0.3.0 with: project-url: https://github.com/orgs/nv-morpheus/projects/2 - github-token: ${{ secrets.ACTIONS_ACCESS_TOKEN }} + github-token: ${{ github.token}} From 15d348626e25055b5f08694ecf9345c923fec5e6 Mon Sep 17 00:00:00 2001 From: Michael Demoret <42954918+mdemoret-nv@users.noreply.github.com> Date: Wed, 12 Oct 2022 15:36:00 -0500 Subject: [PATCH 10/11] Updating SRF versions from 22.09 to 22.11 (#191) Some versions in the SRF repo werent properly updated from 22.09 to 22.11 Authors: - Michael Demoret (https://github.com/mdemoret-nv) Approvers: - David Gardner (https://github.com/dagardner-nv) URL: https://github.com/nv-morpheus/SRF/pull/191 --- CMakeLists.txt | 2 +- docs/quickstart/CMakeLists.txt | 2 +- docs/quickstart/environment_cpp.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 74c1c1c81..84fa4e79c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -58,7 +58,7 @@ endif() rapids_cuda_init_architectures(srf) project(srf - VERSION 22.09.00 + VERSION 22.11.00 LANGUAGES C CXX ) diff --git a/docs/quickstart/CMakeLists.txt b/docs/quickstart/CMakeLists.txt index 0f692e350..b3db84fea 100644 --- a/docs/quickstart/CMakeLists.txt +++ b/docs/quickstart/CMakeLists.txt @@ -25,7 +25,7 @@ list(PREPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/../../cmake") include(import_rapids_cmake) project(srf-quickstart - VERSION 22.09.00 + VERSION 22.11.00 LANGUAGES C CXX ) diff --git a/docs/quickstart/environment_cpp.yml b/docs/quickstart/environment_cpp.yml index 3d8464080..992c2e6ae 100644 --- a/docs/quickstart/environment_cpp.yml +++ b/docs/quickstart/environment_cpp.yml @@ -30,7 +30,7 @@ dependencies: - python=3.8 - scikit-build>=0.12 - spdlog=1.8.5 - - srf=22.09 + - srf=22.11 - sysroot_linux-64=2.17 - pip: - cython From b65991859e82b3f799c32583d1d342f9c784f546 Mon Sep 17 00:00:00 2001 From: Ryan Olson Date: Thu, 13 Oct 2022 05:18:04 +0000 Subject: [PATCH 11/11] explicitly unregistering memory and deleting remote key buffers --- src/tests/test_ucx.cpp | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/tests/test_ucx.cpp b/src/tests/test_ucx.cpp index 21642953b..f289b1a4e 100644 --- a/src/tests/test_ucx.cpp +++ b/src/tests/test_ucx.cpp @@ -204,6 +204,11 @@ TEST_F(TestUCX, Get) worker_get_src->progress(); } future.get(); + + // unregister memory + ucp_rkey_buffer_release(src_rbuff); + context->unregister_memory(src_lkey); + context->unregister_memory(dst_lkey); } // Recv