-
Notifications
You must be signed in to change notification settings - Fork 62
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge remote-tracking branch 'origin/master' into qa/clang-tidy
- Loading branch information
Showing
27 changed files
with
520 additions
and
328 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,122 @@ | ||
include: | ||
- remote: 'https://gitlab.com/cscs-ci/recipes/-/raw/master/templates/v2/.ci-ext.yml' | ||
|
||
stages: | ||
- build_base | ||
- build_app | ||
- build_multiarch | ||
- test | ||
|
||
# TARGET must be any of {daint-gpu daint-mc alps-zen2 alps-a100 alps-mi200 alps-gh200 alps-mi300a} | ||
# eiger: 2x AMD EPYC 7742 64-Core, micro-arch: zen2 | ||
# todi: 4x gh200 72-Core + H100, micro-arch: neoverse-v2, cuda-arch: 90 | ||
|
||
build_base_image_x86_64: | ||
extends: [.container-builder-cscs-zen2, .dynamic-image-name] | ||
stage: build_base | ||
variables: | ||
DOCKERFILE: .gitlab/docker/Dockerfile.base | ||
WATCH_FILECHANGES: '.gitlab/docker/Dockerfile.base' | ||
PERSIST_IMAGE_NAME: $CSCS_REGISTRY_PATH/arbor/baseimg-x86_64 | ||
CSCS_BUILD_IN_MEMORY: TRUE | ||
CSCS_REBUILD_POLICY: "if-not-exists" | ||
DOCKER_BUILD_ARGS: '["IMG_BASE=ghcr.io/eth-cscs/docker-ci-ext/base-containers/spack-base:spack0.21.0-ubuntu22.04-cpu", "IMG_HELPER=ghcr.io/eth-cscs/docker-ci-ext/base-containers/spack-helper:ubuntu22.04-cpu", "TARGET=alps-zen2"]' | ||
|
||
build_base_image_aarch64: | ||
extends: [.container-builder-cscs-gh200, .dynamic-image-name] | ||
stage: build_base | ||
variables: | ||
DOCKERFILE: .gitlab/docker/Dockerfile.base | ||
WATCH_FILECHANGES: '.gitlab/docker/Dockerfile.base' | ||
PERSIST_IMAGE_NAME: $CSCS_REGISTRY_PATH/arbor/baseimg-aarch64 | ||
CSCS_BUILD_IN_MEMORY: TRUE | ||
CSCS_REBUILD_POLICY: "if-not-exists" | ||
DOCKER_BUILD_ARGS: '["IMG_BASE=ghcr.io/eth-cscs/docker-ci-ext/base-containers/spack-base:spack0.21.0-ubuntu22.04-cuda12.4.1", "IMG_HELPER=ghcr.io/eth-cscs/docker-ci-ext/base-containers/spack-helper:ubuntu22.04-cuda12.4.1", "TARGET=alps-gh200"]' | ||
|
||
build_app_image_x86_64: | ||
extends: .container-builder-cscs-zen2 | ||
stage: build_app | ||
needs: | ||
- job: build_base_image_x86_64 | ||
artifacts: true | ||
variables: | ||
DOCKERFILE: .gitlab/docker/Dockerfile.app | ||
PERSIST_IMAGE_NAME: $CSCS_REGISTRY_PATH/arbor/appimg-x86_64:$CI_COMMIT_SHORT_SHA | ||
DOCKER_BUILD_ARGS: '["BASE_IMAGE=$BASE_IMAGE", "CXX_FLAGS=-march=znver2"]' | ||
|
||
build_app_image_aarch64: | ||
extends: .container-builder-cscs-gh200 | ||
stage: build_app | ||
needs: | ||
- job: build_base_image_aarch64 | ||
artifacts: true | ||
variables: | ||
DOCKERFILE: .gitlab/docker/Dockerfile.app | ||
PERSIST_IMAGE_NAME: $CSCS_REGISTRY_PATH/arbor/appimg-aarch64:$CI_COMMIT_SHORT_SHA | ||
DOCKER_BUILD_ARGS: '["BASE_IMAGE=$BASE_IMAGE", "CXX_FLAGS=-mcpu=neoverse-v2 -mtune=neoverse-v2", "GPU=cuda", "GPU_ARCH=90"]' | ||
|
||
build_multiarch_image: | ||
extends: .make-multiarch-image | ||
stage: build_multiarch | ||
variables: | ||
PERSIST_IMAGE_NAME_X86_64: "$CSCS_REGISTRY_PATH/arbor/appimg-x86_64:$CI_COMMIT_SHORT_SHA" | ||
PERSIST_IMAGE_NAME_AARCH64: "$CSCS_REGISTRY_PATH/arbor/appimg-aarch64:$CI_COMMIT_SHORT_SHA" | ||
PERSIST_IMAGE_NAME: "$CSCS_REGISTRY_PATH/arbor/appimg:$CI_COMMIT_SHORT_SHA" | ||
|
||
.test_unit: | ||
stage: test | ||
image: $CSCS_REGISTRY_PATH/arbor/appimg:$CI_COMMIT_SHORT_SHA | ||
script: | ||
- cd /arbor.src | ||
- build/bin/unit-modcc | ||
- build/bin/unit-local | ||
- build/bin/unit | ||
- scripts/run_cpp_examples.sh | ||
- python -m venv --system-site-packages /arbor.install | ||
- source /arbor.install/bin/activate | ||
- python -m unittest discover -v -s python | ||
- scripts/run_python_examples.sh | ||
- scripts/test_executables.sh | ||
- deactivate | ||
variables: | ||
SLURM_JOB_NUM_NODES: 1 | ||
SLURM_NTASKS: 1 | ||
SLURM_NTASKS_PER_NODE: 1 | ||
SLURM_TIMELIMIT: "00:30:00" | ||
SLURM_CPU_BIND: "verbose,none" | ||
USE_MPI: "NO" | ||
|
||
test_x86_64: | ||
extends: [.container-runner-eiger-mc, .test_unit] | ||
variables: | ||
SLURM_CONSTRAINT: mc | ||
|
||
test_aarch64: | ||
extends: [.container-runner-todi-gh200, .test_unit] | ||
|
||
## distributed tests don't work yet - possible problem with the gitlab runners | ||
#.test_distributed: | ||
# stage: test | ||
# image: $CSCS_REGISTRY_PATH/arbor/appimg:$CI_COMMIT_SHORT_SHA | ||
# script: | ||
# - cd /arbor.src | ||
# - build/bin/unit-mpi | ||
# - scripts/run_cpp_examples.sh -d | ||
# variables: | ||
# SLURM_JOB_NUM_NODES: 2 | ||
# SLURM_CPU_BIND: "verbose,rank_ldom" | ||
# SLURM_TIMELIMIT: "00:30:00" | ||
# USE_MPI: "YES" | ||
# | ||
#test_x86_64-distributed: | ||
# extends: [.container-runner-eiger-mc, .test_distributed] | ||
# variables: | ||
# SLURM_CONSTRAINT: mc | ||
# SLURM_NTASKS_PER_NODE: 8 | ||
# | ||
#test_aarch64-distributed: | ||
# extends: [.container-runner-todi-gh200, .test_distributed] | ||
# variables: | ||
# SLURM_GPUS_PER_NODE: 4 | ||
# SLURM_GPUS_PER_TASK: 1 | ||
# SLURM_NTASKS_PER_NODE: 4 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,32 @@ | ||
ARG BASE_IMAGE | ||
FROM $BASE_IMAGE | ||
|
||
COPY . /arbor.src | ||
|
||
ARG NUM_PROCS | ||
ARG CXX_FLAGS="" | ||
ARG GPU=none | ||
ARG GPU_ARCH=60 | ||
|
||
RUN echo ${CXX_FLAGS} | ||
|
||
RUN mkdir -p /arbor.src/build \ | ||
&& cd /arbor.src/build \ | ||
&& cmake .. \ | ||
-GNinja \ | ||
-DCMAKE_INSTALL_PREFIX=/arbor.install \ | ||
-DCMAKE_BUILD_TYPE=Release \ | ||
-DBUILD_TESTING=ON \ | ||
-DARB_ARCH=none \ | ||
-DARB_CXX_FLAGS_TARGET="${CXX_FLAGS}" \ | ||
-DARB_WITH_ASSERTIONS=ON \ | ||
-DARB_WITH_PROFILING=ON \ | ||
-DARB_VECTORIZE=ON \ | ||
-DARB_WITH_PYTHON=ON \ | ||
-DARB_USE_HWLOC=ON \ | ||
-DARB_WITH_MPI=ON \ | ||
-DARB_GPU=$GPU\ | ||
-DCMAKE_CUDA_ARCHITECTURES=$GPU_ARCH \ | ||
-DARB_USE_GPU_RNG=ON \ | ||
&& ninja -j${NUM_PROCS} tests examples pyarb \ | ||
&& ninja install |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,50 @@ | ||
ARG IMG_BASE | ||
FROM $IMG_BASE as builder | ||
|
||
ARG TARGET | ||
RUN spack-install-helper --target $TARGET \ | ||
"git" \ | ||
"meson" \ | ||
"ninja" \ | ||
"cmake" \ | ||
"valgrind" \ | ||
"python" \ | ||
"hwloc" \ | ||
"boost" \ | ||
"fmt" \ | ||
"random123" \ | ||
"py-mpi4py" \ | ||
"py-sphinx" \ | ||
"py-svgwrite" \ | ||
"nlohmann-json" \ | ||
"py-pybind11" \ | ||
"py-numpy" \ | ||
"py-flake8" \ | ||
"py-black" \ | ||
"py-pytest" \ | ||
"py-seaborn" \ | ||
"py-pandas" \ | ||
"pugixml" \ | ||
"googletest" | ||
|
||
# end of builder container, now we are ready to copy necessary files | ||
# copy only relevant parts to the final container | ||
ARG IMG_HELPER | ||
FROM $IMG_HELPER | ||
|
||
# it is important to keep the paths, otherwise your installation is broken | ||
# all these paths are created with the above `spack-install-helper` invocation | ||
COPY --from=builder /opt/spack-environment /opt/spack-environment | ||
COPY --from=builder /opt/software /opt/software | ||
COPY --from=builder /opt/._view /opt/._view | ||
COPY --from=builder /etc/profile.d/z10_spack_environment.sh /etc/profile.d/z10_spack_environment.sh | ||
|
||
# Some boilerplate to get all paths correctly - fix_spack_install is part of thebase image | ||
# and makes sure that all important things are being correctly setup | ||
RUN fix_spack_install | ||
|
||
# Finally install software that is needed, e.g. compilers | ||
# It is also possible to build compilers via spack and let all dependencies be handled by spack | ||
RUN apt-get -yqq update && apt-get -yqq upgrade \ | ||
&& apt-get -yqq install build-essential gfortran \ | ||
&& rm -rf /var/lib/apt/lists/* |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,63 +1,107 @@ | ||
#pragma once | ||
|
||
#include <type_traits> | ||
#include <vector> | ||
|
||
#include <arbor/generic_event.hpp> | ||
#include <arbor/mechanism_abi.h> | ||
|
||
|
||
#include "backends/event.hpp" | ||
#include "backends/event_stream_state.hpp" | ||
#include "event_lane.hpp" | ||
#include "timestep_range.hpp" | ||
#include "util/partition.hpp" | ||
|
||
ARB_SERDES_ENABLE_EXT(arb_deliverable_event_data, mech_index, weight); | ||
|
||
namespace arb { | ||
|
||
template <typename Event, typename Span> | ||
class event_stream_base { | ||
public: // member types | ||
template <typename Event> | ||
struct event_stream_base { | ||
using size_type = std::size_t; | ||
using event_type = Event; | ||
using event_time_type = ::arb::event_time_type<Event>; | ||
using event_data_type = ::arb::event_data_type<Event>; | ||
|
||
protected: // private member types | ||
using span_type = Span; | ||
|
||
static_assert(std::is_same<decltype(std::declval<span_type>().begin()), event_data_type*>::value); | ||
static_assert(std::is_same<decltype(std::declval<span_type>().end()), event_data_type*>::value); | ||
|
||
protected: // members | ||
std::vector<event_data_type> ev_data_; | ||
std::vector<span_type> ev_spans_; | ||
std::vector<std::size_t> ev_spans_ = {0}; | ||
size_type index_ = 0; | ||
event_data_type* base_ptr_ = nullptr; | ||
|
||
public: | ||
event_stream_base() = default; | ||
|
||
// returns true if the currently marked time step has no events | ||
bool empty() const { | ||
return ev_spans_.empty() || ev_data_.empty() || !index_ || index_ > ev_spans_.size() || | ||
!ev_spans_[index_-1].size(); | ||
return ev_data_.empty() // No events | ||
|| index_ < 1 // Since we index with a left bias, index_ must be at least 1 | ||
|| index_ >= ev_spans_.size() // Cannot index at container length | ||
|| ev_spans_[index_-1] >= ev_spans_[index_]; // Current span is empty | ||
} | ||
|
||
void mark() { | ||
index_ += (index_ <= ev_spans_.size() ? 1 : 0); | ||
} | ||
void mark() { index_ += 1; } | ||
|
||
auto marked_events() const { | ||
using std::begin; | ||
using std::end; | ||
if (empty()) { | ||
return make_event_stream_state((event_data_type*)nullptr, (event_data_type*)nullptr); | ||
} else { | ||
return make_event_stream_state(begin(ev_spans_[index_-1]), end(ev_spans_[index_-1])); | ||
auto marked_events() { | ||
auto beg = (event_data_type*)nullptr; | ||
auto end = (event_data_type*)nullptr; | ||
if (!empty()) { | ||
beg = base_ptr_ + ev_spans_[index_-1]; | ||
end = base_ptr_ + ev_spans_[index_]; | ||
} | ||
return make_event_stream_state(beg, end); | ||
} | ||
|
||
// clear all previous data | ||
void clear() { | ||
ev_data_.clear(); | ||
// Clear + push doesn't allocate a new vector | ||
ev_spans_.clear(); | ||
ev_spans_.push_back(0); | ||
base_ptr_ = nullptr; | ||
index_ = 0; | ||
} | ||
|
||
// Construct a mapping of mech_id to a stream s.t. streams are partitioned into | ||
// time step buckets by `ev_span` | ||
template<typename EventStream> | ||
static std::enable_if_t<std::is_base_of_v<event_stream_base, EventStream>> | ||
multi_event_stream(const event_lane_subrange& lanes, | ||
const std::vector<target_handle>& handles, | ||
const std::vector<std::size_t>& divs, | ||
const timestep_range& steps, | ||
std::unordered_map<unsigned, EventStream>& streams) { | ||
auto n_steps = steps.size(); | ||
|
||
std::unordered_map<unsigned, std::vector<std::size_t>> dt_sizes; | ||
for (auto& [k, v]: streams) { | ||
v.clear(); | ||
dt_sizes[k].resize(n_steps, 0); | ||
} | ||
|
||
auto cell = 0; | ||
for (auto& lane: lanes) { | ||
auto div = divs[cell]; | ||
arb_size_type step = 0; | ||
for (auto evt: lane) { | ||
auto time = evt.time; | ||
auto weight = evt.weight; | ||
auto target = evt.target; | ||
while(step < n_steps && time >= steps[step].t_end()) ++step; | ||
// Events coinciding with epoch's upper boundary belong to next epoch | ||
if (step >= n_steps) break; | ||
auto& handle = handles[div + target]; | ||
streams[handle.mech_id].ev_data_.push_back({handle.mech_index, weight}); | ||
dt_sizes[handle.mech_id][step]++; | ||
} | ||
++cell; | ||
} | ||
|
||
for (auto& [id, stream]: streams) { | ||
util::make_partition(stream.ev_spans_, dt_sizes[id]); | ||
stream.init(); | ||
} | ||
} | ||
}; | ||
|
||
} // namespace arb |
Oops, something went wrong.