From af1c3096a72f63bfb3a04a03dc418750c1f8035f Mon Sep 17 00:00:00 2001 From: neworderofjamie Date: Wed, 1 Mar 2023 18:07:48 +0000 Subject: [PATCH 1/7] expose $(num_batch), $(num), $(num_pre) and $(num_post) where appropriate --- .../code_generator/customUpdateGroupMerged.cc | 39 ++++++++++++------- .../genn/code_generator/initGroupMerged.cc | 35 ++++++++++------- .../code_generator/neuronUpdateGroupMerged.cc | 3 ++ .../synapseUpdateGroupMerged.cc | 3 ++ 4 files changed, 52 insertions(+), 28 deletions(-) diff --git a/src/genn/genn/code_generator/customUpdateGroupMerged.cc b/src/genn/genn/code_generator/customUpdateGroupMerged.cc index 5f5e8cfd0e..95a82f89f7 100644 --- a/src/genn/genn/code_generator/customUpdateGroupMerged.cc +++ b/src/genn/genn/code_generator/customUpdateGroupMerged.cc @@ -171,6 +171,9 @@ boost::uuids::detail::sha1::digest_type CustomUpdateGroupMerged::getHashDigest() //---------------------------------------------------------------------------- void CustomUpdateGroupMerged::generateCustomUpdate(const BackendBase&, CodeStream &os, const ModelSpecMerged &modelMerged, Substitutions &popSubs) const { + popSubs.addVarSubstitution("num_batch", std::to_string(getArchetype().isBatched() ? modelMerged.getModel().getBatchSize() : 1)); + popSubs.addVarSubstitution("num", "group->size"); + genCustomUpdate(os, popSubs, *this, modelMerged, "id", [this](const Models::VarReference &varRef, const std::string &index) { @@ -272,6 +275,20 @@ CustomUpdateWUGroupMergedBase::CustomUpdateWUGroupMergedBase(size_t index, const const std::vector> &groups) : GroupMerged(index, precision, groups) { + addField("unsigned int", "numSrcNeurons", + [](const CustomUpdateWUInternal &cg, size_t) + { + const SynapseGroupInternal *sgInternal = static_cast(cg.getSynapseGroup()); + return std::to_string(sgInternal->getSrcNeuronGroup()->getNumNeurons()); + }); + + addField("unsigned int", "numTrgNeurons", + [](const CustomUpdateWUInternal &cg, size_t) + { + const SynapseGroupInternal *sgInternal = static_cast(cg.getSynapseGroup()); + return std::to_string(sgInternal->getTrgNeuronGroup()->getNumNeurons()); + }); + // If underlying synapse group has kernel weights if (getArchetype().getSynapseGroup()->getMatrixType() & SynapseMatrixWeight::KERNEL) { // Loop through kernel size dimensions @@ -294,20 +311,6 @@ CustomUpdateWUGroupMergedBase::CustomUpdateWUGroupMergedBase(size_t index, const const SynapseGroupInternal *sgInternal = static_cast(cg.getSynapseGroup()); return std::to_string(backend.getSynapticMatrixRowStride(*sgInternal)); }); - - addField("unsigned int", "numSrcNeurons", - [](const CustomUpdateWUInternal &cg, size_t) - { - const SynapseGroupInternal *sgInternal = static_cast(cg.getSynapseGroup()); - return std::to_string(sgInternal->getSrcNeuronGroup()->getNumNeurons()); - }); - - addField("unsigned int", "numTrgNeurons", - [](const CustomUpdateWUInternal &cg, size_t) - { - const SynapseGroupInternal *sgInternal = static_cast(cg.getSynapseGroup()); - return std::to_string(sgInternal->getTrgNeuronGroup()->getNumNeurons()); - }); // If synapse group has sparse connectivity if(getArchetype().getSynapseGroup()->getMatrixType() & SynapseMatrixConnectivity::SPARSE) { @@ -370,6 +373,10 @@ const std::string CustomUpdateWUGroupMerged::name = "CustomUpdateWU"; //---------------------------------------------------------------------------- void CustomUpdateWUGroupMerged::generateCustomUpdate(const BackendBase&, CodeStream &os, const ModelSpecMerged &modelMerged, Substitutions &popSubs) const { + popSubs.addVarSubstitution("num_batch", std::to_string(getArchetype().isBatched() ? modelMerged.getModel().getBatchSize() : 1)); + popSubs.addVarSubstitution("num_pre", "group->numSrcNeurons"); + popSubs.addVarSubstitution("num_post", "group->numTrgNeurons"); + genCustomUpdate(os, popSubs, *this, modelMerged, "id_syn", [this, &modelMerged](const Models::WUVarReference &varRef, const std::string &index) { @@ -385,6 +392,10 @@ const std::string CustomUpdateTransposeWUGroupMerged::name = "CustomUpdateTransp //---------------------------------------------------------------------------- void CustomUpdateTransposeWUGroupMerged::generateCustomUpdate(const BackendBase&, CodeStream &os, const ModelSpecMerged &modelMerged, Substitutions &popSubs) const { + popSubs.addVarSubstitution("num_batch", std::to_string(getArchetype().isBatched() ? modelMerged.getModel().getBatchSize() : 1)); + popSubs.addVarSubstitution("num_pre", "group->numSrcNeurons"); + popSubs.addVarSubstitution("num_post", "group->numTrgNeurons"); + genCustomUpdate(os, popSubs, *this, modelMerged, "id_syn", [this, &modelMerged](const Models::WUVarReference &varRef, const std::string &index) { diff --git a/src/genn/genn/code_generator/initGroupMerged.cc b/src/genn/genn/code_generator/initGroupMerged.cc index 236366efde..64b3bc4526 100644 --- a/src/genn/genn/code_generator/initGroupMerged.cc +++ b/src/genn/genn/code_generator/initGroupMerged.cc @@ -66,6 +66,8 @@ void genInitNeuronVarCode(CodeStream &os, const BackendBase &backend, const Subs CodeStream::Scope b(os); Substitutions varSubs(&popSubs); + varSubs.addVarSubstitution("num_batch", std::to_string(batchSize)); + varSubs.addVarSubstitution("num", count); // Substitute in parameters and derived parameters for initialising variables varSubs.addParamValueSubstitution(varInit.getSnippet()->getParamNames(), varInit.getParams(), @@ -147,31 +149,36 @@ void genInitWUVarCode(CodeStream &os, const Substitutions &popSubs, // If this variable has any initialisation code and doesn't require a kernel if(!varInit.getSnippet()->getCode().empty() && !varInit.getSnippet()->requiresKernel()) { CodeStream::Scope b(os); + + Substitutions varSubs(&popSubs); + varSubs.addVarSubstitution("num_batch", std::to_string(batchSize)); + varSubs.addVarSubstitution("num_pre", "group->numSrcNeurons"); + varSubs.addVarSubstitution("num_post", "group->numTrgNeurons"); // Generate target-specific code to initialise variable - genSynapseVariableRowInitFn(os, popSubs, + genSynapseVariableRowInitFn(os, varSubs, [&vars, &varInit, &ftype, &stride, batchSize, k, groupIndex, isParamHeterogeneousFn, isDerivedParamHeterogeneousFn] - (CodeStream &os, Substitutions &varSubs) + (CodeStream &os, Substitutions &varInitSubs) { - varSubs.addParamValueSubstitution(varInit.getSnippet()->getParamNames(), varInit.getParams(), - [k, isParamHeterogeneousFn](size_t p) { return isParamHeterogeneousFn(k, p); }, - "", "group->", vars[k].name); - varSubs.addVarValueSubstitution(varInit.getSnippet()->getDerivedParams(), varInit.getDerivedParams(), - [k, isDerivedParamHeterogeneousFn](size_t p) { return isDerivedParamHeterogeneousFn(k, p); }, - "", "group->", vars[k].name); - varSubs.addVarNameSubstitution(varInit.getSnippet()->getExtraGlobalParams(), - "", "group->", vars[k].name); + varInitSubs.addParamValueSubstitution(varInit.getSnippet()->getParamNames(), varInit.getParams(), + [k, isParamHeterogeneousFn](size_t p) { return isParamHeterogeneousFn(k, p); }, + "", "group->", vars[k].name); + varInitSubs.addVarValueSubstitution(varInit.getSnippet()->getDerivedParams(), varInit.getDerivedParams(), + [k, isDerivedParamHeterogeneousFn](size_t p) { return isDerivedParamHeterogeneousFn(k, p); }, + "", "group->", vars[k].name); + varInitSubs.addVarNameSubstitution(varInit.getSnippet()->getExtraGlobalParams(), + "", "group->", vars[k].name); // Generate initial value into temporary variable os << vars[k].type << " initVal;" << std::endl; - varSubs.addVarSubstitution("value", "initVal"); + varInitSubs.addVarSubstitution("value", "initVal"); std::string code = varInit.getSnippet()->getCode(); - varSubs.applyCheckUnreplaced(code, "initVar : merged" + vars[k].name + std::to_string(groupIndex)); + varInitSubs.applyCheckUnreplaced(code, "initVar : merged" + vars[k].name + std::to_string(groupIndex)); code = ensureFtype(code, ftype); os << code << std::endl; // Fill value across all batches - genVariableFill(os, vars[k].name, "initVal", varSubs["id_syn"], stride, + genVariableFill(os, vars[k].name, "initVal", varInitSubs["id_syn"], stride, getVarAccessDuplication(vars[k].access), batchSize); }); } @@ -333,7 +340,7 @@ void NeuronInitGroupMerged::generateInit(const BackendBase &backend, CodeStream os << "*group->spkQuePtr = 0;" << std::endl; }); } - + // Initialise neuron variables genInitNeuronVarCode(os, backend, popSubs, getArchetype().getNeuronModel()->getVars(), getArchetype().getVarInitialisers(), "", "numNeurons", getArchetype().getNumDelaySlots(), getIndex(), model.getPrecision(), model.getBatchSize(), diff --git a/src/genn/genn/code_generator/neuronUpdateGroupMerged.cc b/src/genn/genn/code_generator/neuronUpdateGroupMerged.cc index 4700714d1d..788c516afc 100644 --- a/src/genn/genn/code_generator/neuronUpdateGroupMerged.cc +++ b/src/genn/genn/code_generator/neuronUpdateGroupMerged.cc @@ -225,6 +225,9 @@ void NeuronUpdateGroupMerged::generateNeuronUpdate(const BackendBase &backend, C } Substitutions neuronSubs(&popSubs); + neuronSubs.addVarSubstitution("num_batch", std::to_string(batchSize)); + neuronSubs.addVarSubstitution("num", "group->numNeurons"); + neuronSubs.addVarSubstitution("Isyn", "Isyn"); if(getArchetype().isSpikeTimeRequired()) { diff --git a/src/genn/genn/code_generator/synapseUpdateGroupMerged.cc b/src/genn/genn/code_generator/synapseUpdateGroupMerged.cc index b7fcd010ff..674f92b7fc 100644 --- a/src/genn/genn/code_generator/synapseUpdateGroupMerged.cc +++ b/src/genn/genn/code_generator/synapseUpdateGroupMerged.cc @@ -20,6 +20,9 @@ void applySynapseSubstitutions(CodeStream &os, std::string code, const std::stri const auto *wu = sg.getArchetype().getWUModel(); Substitutions synapseSubs(&baseSubs); + synapseSubs.addVarSubstitution("num_batch", std::to_string(batchSize)); + synapseSubs.addVarSubstitution("num_pre", "group->numSrcNeurons"); + synapseSubs.addVarSubstitution("num_post", "group->numTrgNeurons"); // Substitute parameter and derived parameter names synapseSubs.addParamValueSubstitution(wu->getParamNames(), sg.getArchetype().getWUParams(), From c7e2f5ad1bdad5b43547cb3776950ce180fd9324 Mon Sep 17 00:00:00 2001 From: neworderofjamie Date: Thu, 2 Mar 2023 11:00:50 +0000 Subject: [PATCH 2/7] basic feature test for $(num_XXX) variables --- tests/features/num/model.cc | 108 +++++++++++++++++++++++++++++ tests/features/num/num.sln | 30 ++++++++ tests/features/num/num.vcxproj | 63 +++++++++++++++++ tests/features/num/runner_guid.txt | 1 + tests/features/num/test.cc | 71 +++++++++++++++++++ 5 files changed, 273 insertions(+) create mode 100644 tests/features/num/model.cc create mode 100644 tests/features/num/num.sln create mode 100644 tests/features/num/num.vcxproj create mode 100644 tests/features/num/runner_guid.txt create mode 100644 tests/features/num/test.cc diff --git a/tests/features/num/model.cc b/tests/features/num/model.cc new file mode 100644 index 0000000000..fce62bba9d --- /dev/null +++ b/tests/features/num/model.cc @@ -0,0 +1,108 @@ +//-------------------------------------------------------------------------- +/*! \file num/model.cc + +\brief model definition file that is part of the feature testing +suite of minimal models with known analytic outcomes that are used for continuous integration testing. +*/ +//-------------------------------------------------------------------------- + + +#include "modelSpec.h" + +//---------------------------------------------------------------------------- +// Neuron +//---------------------------------------------------------------------------- +class Neuron : public NeuronModels::Base +{ +public: + DECLARE_MODEL(Neuron, 0, 2); + + SET_VARS({{"num_test", "unsigned int"}, {"num_batch_test", "unsigned int"}}); + + SET_SIM_CODE( + "$(num_test)= $(num);\n" + "$(num_batch_test) = $(num_batch);\n"); +}; +IMPLEMENT_MODEL(Neuron); + +//---------------------------------------------------------------------------- +// PSM +//---------------------------------------------------------------------------- +class PSM : public PostsynapticModels::Base +{ +public: + DECLARE_MODEL(PSM, 0, 2); + + SET_VARS({{"num_psm_test", "unsigned int"}, {"num_batch_psm_test", "unsigned int"}}); + + SET_DECAY_CODE( + "$(num_psm_test)= $(num);\n" + "$(num_batch_psm_test) = $(num_batch);\n"); +}; +IMPLEMENT_MODEL(PSM); + +//---------------------------------------------------------------------------- +// CS +//---------------------------------------------------------------------------- +class CS : public CurrentSourceModels::Base +{ +public: + DECLARE_MODEL(CS, 0, 2); + + SET_VARS({{"num_cs_test", "unsigned int"}, {"num_batch_cs_test", "unsigned int"}}); + + SET_INJECTION_CODE( + "$(num_cs_test)= $(num);\n" + "$(num_batch_cs_test) = $(num_batch);\n"); +}; +IMPLEMENT_MODEL(CS); + +//---------------------------------------------------------------------------- +// WUM +//---------------------------------------------------------------------------- +class WUM : public WeightUpdateModels::Base +{ +public: + DECLARE_WEIGHT_UPDATE_MODEL(WUM, 0, 3, 2, 2); + + SET_VARS({{"num_pre_wum_syn_test", "unsigned int"}, {"num_post_wum_syn_test", "unsigned int"}, {"num_batch_wum_syn_test", "unsigned int"}}); + SET_PRE_VARS({{"num_wum_pre_test", "unsigned int"}, {"num_batch_wum_pre_test", "unsigned int"}}); + SET_POST_VARS({{"num_wum_post_test", "unsigned int"}, {"num_batch_wum_post_test", "unsigned int"}}); + + SET_SYNAPSE_DYNAMICS_CODE( + "$(num_pre_wum_syn_test)= $(num_pre);\n" + "$(num_post_wum_syn_test)= $(num_post);\n" + "$(num_batch_wum_syn_test) = $(num_batch);\n"); + SET_PRE_DYNAMICS_CODE( + "$(num_wum_pre_test)= $(num);\n" + "$(num_batch_wum_pre_test) = $(num_batch);\n"); + SET_POST_DYNAMICS_CODE( + "$(num_wum_post_test)= $(num);\n" + "$(num_batch_wum_post_test) = $(num_batch);\n"); +}; +IMPLEMENT_MODEL(WUM); + +void modelDefinition(ModelSpec &model) +{ +#ifdef CL_HPP_TARGET_OPENCL_VERSION + if(std::getenv("OPENCL_DEVICE") != nullptr) { + GENN_PREFERENCES.deviceSelectMethod = DeviceSelect::MANUAL; + GENN_PREFERENCES.manualDeviceID = std::atoi(std::getenv("OPENCL_DEVICE")); + } + if(std::getenv("OPENCL_PLATFORM") != nullptr) { + GENN_PREFERENCES.manualPlatformID = std::atoi(std::getenv("OPENCL_PLATFORM")); + } +#endif + model.setDT(1.0); + model.setName("num"); + + model.addNeuronPopulation("Pre", 2, {}, {}); + model.addNeuronPopulation("Post", 4, {}, {0, 0}); + model.addCurrentSource("CurrSource", "Post", {}, {0, 0}); + auto *syn = model.addSynapsePopulation( + "Syn", SynapseMatrixType::DENSE_INDIVIDUALG, NO_DELAY, "Pre", "Post", + {}, {0, 0, 0}, {0, 0}, {0, 0}, + {}, {0, 0}); + + model.setPrecision(GENN_FLOAT); +} \ No newline at end of file diff --git a/tests/features/num/num.sln b/tests/features/num/num.sln new file mode 100644 index 0000000000..90a6f3c3cc --- /dev/null +++ b/tests/features/num/num.sln @@ -0,0 +1,30 @@ +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio 2013 +VisualStudioVersion = 12.0.30501.0 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "num", "num.vcxproj", "{BD132E31-1A6E-4285-931A-50006B7F635C}" + ProjectSection(ProjectDependencies) = postProject + {489E2E2B-7899-40C3-BBE9-6A342CDCBBA0} = {489E2E2B-7899-40C3-BBE9-6A342CDCBBA0} + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "runner", "num_CODE\runner.vcxproj", "{489E2E2B-7899-40C3-BBE9-6A342CDCBBA0}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|x64 = Debug|x64 + Release|x64 = Release|x64 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {BD132E31-1A6E-4285-931A-50006B7F635C}.Debug|x64.ActiveCfg = Debug|x64 + {BD132E31-1A6E-4285-931A-50006B7F635C}.Debug|x64.Build.0 = Debug|x64 + {BD132E31-1A6E-4285-931A-50006B7F635C}.Release|x64.ActiveCfg = Release|x64 + {BD132E31-1A6E-4285-931A-50006B7F635C}.Release|x64.Build.0 = Release|x64 + {489E2E2B-7899-40C3-BBE9-6A342CDCBBA0}.Debug|x64.ActiveCfg = Debug|x64 + {489E2E2B-7899-40C3-BBE9-6A342CDCBBA0}.Debug|x64.Build.0 = Debug|x64 + {489E2E2B-7899-40C3-BBE9-6A342CDCBBA0}.Release|x64.ActiveCfg = Release|x64 + {489E2E2B-7899-40C3-BBE9-6A342CDCBBA0}.Release|x64.Build.0 = Release|x64 + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/tests/features/num/num.vcxproj b/tests/features/num/num.vcxproj new file mode 100644 index 0000000000..01b5f3920a --- /dev/null +++ b/tests/features/num/num.vcxproj @@ -0,0 +1,63 @@ + + + + + Debug + x64 + + + Release + x64 + + + + {BD132E31-1A6E-4285-931A-50006B7F635C} + + + + + + + + + Application + true + $(DefaultPlatformToolset) + true + MultiByte + + + + + + + + + + ./ + $(Platform)\$(Configuration)\ + test + + + + Level3 + MaxSpeed + Disabled + true + true + true + _SILENCE_TR1_NAMESPACE_DEPRECATION_WARNING;%(PreprocessorDefinitions) + num_CODE;$(GTEST_DIR);$(GTEST_DIR)/include + + + true + true + true + runner_Release.lib;%(AdditionalDependencies) + runner_Debug.lib;%(AdditionalDependencies) + + + + + + diff --git a/tests/features/num/runner_guid.txt b/tests/features/num/runner_guid.txt new file mode 100644 index 0000000000..39fac6cfed --- /dev/null +++ b/tests/features/num/runner_guid.txt @@ -0,0 +1 @@ +489E2E2B-7899-40C3-BBE9-6A342CDCBBA0 diff --git a/tests/features/num/test.cc b/tests/features/num/test.cc new file mode 100644 index 0000000000..d087ea4bb6 --- /dev/null +++ b/tests/features/num/test.cc @@ -0,0 +1,71 @@ +//-------------------------------------------------------------------------- +/*! \file num/test.cc + +\brief Main test code that is part of the feature testing +suite of minimal models with known analytic outcomes that are used for continuous integration testing. +*/ +//-------------------------------------------------------------------------- +#include + +// Google test includes +#include "gtest/gtest.h" + +// Auto-generated simulation code includess +#include "num_CODE/definitions.h" + +// **NOTE** base-class for simulation tests must be +// included after auto-generated globals are includes +#include "../../utils/simulation_test.h" + +//---------------------------------------------------------------------------- +// SimTest +//---------------------------------------------------------------------------- +class SimTest : public SimulationTest +{ +}; + +TEST_F(SimTest, Num) +{ + // Simulate timestep + StepGeNN(); + + copyStateFromDevice(); + + // Neuron + EXPECT_TRUE(std::all_of(&num_testPost[0], &num_testPost[4], + [](unsigned int n){ return (n == 4); })); + EXPECT_TRUE(std::all_of(&num_batch_testPost[0], &num_batch_testPost[4], + [](unsigned int n){ return (n == 1); })); + + // PSM + EXPECT_TRUE(std::all_of(&num_psm_testSyn[0], &num_psm_testSyn[4], + [](unsigned int n){ return (n == 4); })); + EXPECT_TRUE(std::all_of(&num_batch_psm_testSyn[0], &num_batch_psm_testSyn[4], + [](unsigned int n){ return (n == 1); })); + + // Current source + EXPECT_TRUE(std::all_of(&num_cs_testCurrSource[0], &num_cs_testCurrSource[4], + [](unsigned int n){ return (n == 4); })); + EXPECT_TRUE(std::all_of(&num_batch_cs_testCurrSource[0], &num_batch_cs_testCurrSource[4], + [](unsigned int n){ return (n == 1); })); + + // WUM pre + EXPECT_TRUE(std::all_of(&num_wum_pre_testSyn[0], &num_wum_pre_testSyn[2], + [](unsigned int n){ return (n == 2); })); + EXPECT_TRUE(std::all_of(&num_batch_wum_pre_testSyn[0], &num_batch_wum_pre_testSyn[2], + [](unsigned int n){ return (n == 1); })); + + // WUM post + EXPECT_TRUE(std::all_of(&num_wum_post_testSyn[0], &num_wum_post_testSyn[4], + [](unsigned int n){ return (n == 4); })); + EXPECT_TRUE(std::all_of(&num_batch_wum_post_testSyn[0], &num_batch_wum_post_testSyn[4], + [](unsigned int n){ return (n == 1); })); + + // WUM syn + EXPECT_TRUE(std::all_of(&num_pre_wum_syn_testSyn[0], &num_pre_wum_syn_testSyn[8], + [](unsigned int n){ return (n == 2); })); + EXPECT_TRUE(std::all_of(&num_post_wum_syn_testSyn[0], &num_post_wum_syn_testSyn[8], + [](unsigned int n){ return (n == 4); })); + EXPECT_TRUE(std::all_of(&num_batch_wum_syn_testSyn[0], &num_batch_wum_syn_testSyn[8], + [](unsigned int n){ return (n == 1); })); +} From 7574ec28157173a2e8ca6b603da5abcef52b7dff Mon Sep 17 00:00:00 2001 From: neworderofjamie Date: Thu, 2 Mar 2023 11:01:19 +0000 Subject: [PATCH 3/7] fixed for accessing $(num) and $(num_batch) in PSM/CS/WUM pre/WUM post --- src/genn/genn/code_generator/neuronUpdateGroupMerged.cc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/genn/genn/code_generator/neuronUpdateGroupMerged.cc b/src/genn/genn/code_generator/neuronUpdateGroupMerged.cc index 788c516afc..db53ccec14 100644 --- a/src/genn/genn/code_generator/neuronUpdateGroupMerged.cc +++ b/src/genn/genn/code_generator/neuronUpdateGroupMerged.cc @@ -182,6 +182,9 @@ void NeuronUpdateGroupMerged::generateNeuronUpdate(const BackendBase &backend, C const unsigned int batchSize = model.getBatchSize(); const NeuronModels::Base *nm = getArchetype().getNeuronModel(); + popSubs.addVarSubstitution("num_batch", std::to_string(batchSize)); + popSubs.addVarSubstitution("num", "group->numNeurons"); + // Generate code to copy neuron state into local variable for(const auto &v : nm->getVars()) { if(v.access & VarAccessMode::READ_ONLY) { @@ -225,9 +228,6 @@ void NeuronUpdateGroupMerged::generateNeuronUpdate(const BackendBase &backend, C } Substitutions neuronSubs(&popSubs); - neuronSubs.addVarSubstitution("num_batch", std::to_string(batchSize)); - neuronSubs.addVarSubstitution("num", "group->numNeurons"); - neuronSubs.addVarSubstitution("Isyn", "Isyn"); if(getArchetype().isSpikeTimeRequired()) { From 0d61426676203db16a4b631354c8b001ca68f4fd Mon Sep 17 00:00:00 2001 From: neworderofjamie Date: Thu, 2 Mar 2023 11:15:49 +0000 Subject: [PATCH 4/7] unrelated git ignore fiddling --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 0995fc81cb..7ff8b84286 100644 --- a/.gitignore +++ b/.gitignore @@ -14,6 +14,7 @@ **/*.suo **/*.sdf **/*.vcxproj.user +**/*.vcxproj.filters **/*.opensdf **/x64 **/.vs @@ -21,6 +22,7 @@ **/*.iobj **/*.testdurations +/dist /lib /documentation/ /build/ From 7343d2c598366a11c44ab8b0bc10a3be3f9934ae Mon Sep 17 00:00:00 2001 From: neworderofjamie Date: Thu, 2 Mar 2023 11:16:01 +0000 Subject: [PATCH 5/7] extended test to custom updates --- tests/features/num/model.cc | 44 +++++++++++++++++++++++++++++++++++-- tests/features/num/test.cc | 20 +++++++++++++++-- 2 files changed, 60 insertions(+), 4 deletions(-) diff --git a/tests/features/num/model.cc b/tests/features/num/model.cc index fce62bba9d..b6e98dbacb 100644 --- a/tests/features/num/model.cc +++ b/tests/features/num/model.cc @@ -82,6 +82,39 @@ class WUM : public WeightUpdateModels::Base }; IMPLEMENT_MODEL(WUM); +//---------------------------------------------------------------------------- +// CU +//---------------------------------------------------------------------------- +class CU : public CustomUpdateModels::Base +{ + DECLARE_CUSTOM_UPDATE_MODEL(CU, 0, 2, 1); + + SET_VARS({{"num_test", "unsigned int"}, {"num_batch_test", "unsigned int"}}); + SET_VAR_REFS({{"ref", "unsigned int", VarAccessMode::READ_ONLY}}); + + SET_UPDATE_CODE( + "$(num_test)= $(num);\n" + "$(num_batch_test) = $(num_batch);\n"); +}; +IMPLEMENT_MODEL(CU); + +//---------------------------------------------------------------------------- +// CUWUM +//---------------------------------------------------------------------------- +class CUWUM : public CustomUpdateModels::Base +{ + DECLARE_CUSTOM_UPDATE_MODEL(CUWUM, 0, 3, 1); + + SET_VARS({{"num_pre_test", "unsigned int"}, {"num_post_test", "unsigned int"}, {"num_batch_test", "unsigned int"}}); + SET_VAR_REFS({{"ref", "unsigned int", VarAccessMode::READ_ONLY}}); + + SET_UPDATE_CODE( + "$(num_pre_test)= $(num_pre);\n" + "$(num_post_test)= $(num_post);\n" + "$(num_batch_test) = $(num_batch);\n"); +}; +IMPLEMENT_MODEL(CUWUM); + void modelDefinition(ModelSpec &model) { #ifdef CL_HPP_TARGET_OPENCL_VERSION @@ -97,12 +130,19 @@ void modelDefinition(ModelSpec &model) model.setName("num"); model.addNeuronPopulation("Pre", 2, {}, {}); - model.addNeuronPopulation("Post", 4, {}, {0, 0}); + auto *post = model.addNeuronPopulation("Post", 4, {}, {0, 0}); model.addCurrentSource("CurrSource", "Post", {}, {0, 0}); auto *syn = model.addSynapsePopulation( "Syn", SynapseMatrixType::DENSE_INDIVIDUALG, NO_DELAY, "Pre", "Post", {}, {0, 0, 0}, {0, 0}, {0, 0}, {}, {0, 0}); - + + CU::VarReferences varReferences(createVarRef(post, "num_test")); // ref + model.addCustomUpdate("CU", "Test", {}, {0, 0}, varReferences); + + CUWUM::WUVarReferences wuVarReferences(createWUVarRef(syn, "num_pre_wum_syn_test")); // R + model.addCustomUpdate("CUWM", "Test", {}, {0, 0, 0}, wuVarReferences); + + model.setPrecision(GENN_FLOAT); } \ No newline at end of file diff --git a/tests/features/num/test.cc b/tests/features/num/test.cc index d087ea4bb6..b494c41fe1 100644 --- a/tests/features/num/test.cc +++ b/tests/features/num/test.cc @@ -26,9 +26,11 @@ class SimTest : public SimulationTest TEST_F(SimTest, Num) { - // Simulate timestep + // Simulate timestep and trigger custom update StepGeNN(); - + updateTest(); + + // Copy all state from device copyStateFromDevice(); // Neuron @@ -68,4 +70,18 @@ TEST_F(SimTest, Num) [](unsigned int n){ return (n == 4); })); EXPECT_TRUE(std::all_of(&num_batch_wum_syn_testSyn[0], &num_batch_wum_syn_testSyn[8], [](unsigned int n){ return (n == 1); })); + + // CU + EXPECT_TRUE(std::all_of(&num_testCU[0], &num_testCU[4], + [](unsigned int n){ return (n == 4); })); + EXPECT_TRUE(std::all_of(&num_batch_testCU[0], &num_batch_testCU[4], + [](unsigned int n){ return (n == 1); })); + + // CU WUM + EXPECT_TRUE(std::all_of(&num_pre_testCUWM[0], &num_pre_testCUWM[8], + [](unsigned int n){ return (n == 2); })); + EXPECT_TRUE(std::all_of(&num_post_testCUWM[0], &num_post_testCUWM[8], + [](unsigned int n){ return (n == 4); })); + EXPECT_TRUE(std::all_of(&num_batch_testCUWM[0], &num_batch_testCUWM[8], + [](unsigned int n){ return (n == 1); })); } From c7a999db991d36ec375a71a94f8d7ad69d615488 Mon Sep 17 00:00:00 2001 From: neworderofjamie Date: Thu, 2 Mar 2023 11:56:38 +0000 Subject: [PATCH 6/7] extended var_init test to test access to $(num_XX) variables in initialisation --- tests/features/var_init/model.cc | 155 +++++++++++++++++++++++-------- tests/features/var_init/test.cc | 49 ++++++---- 2 files changed, 148 insertions(+), 56 deletions(-) diff --git a/tests/features/var_init/model.cc b/tests/features/var_init/model.cc index 305d438e4d..08e007947a 100644 --- a/tests/features/var_init/model.cc +++ b/tests/features/var_init/model.cc @@ -15,9 +15,9 @@ suite of minimal models with known analytic outcomes that are used for continuou class Neuron : public NeuronModels::Base { public: - DECLARE_MODEL(Neuron, 0, 6); + DECLARE_MODEL(Neuron, 0, 7); - SET_VARS({{"constant_val", "scalar"}, {"uniform", "scalar"}, {"normal", "scalar"}, {"exponential", "scalar"}, {"gamma", "scalar"}, {"binomial", "unsigned int"}}); + SET_VARS({{"num", "unsigned int"}, {"num_batch", "unsigned int"}, {"uniform", "scalar"}, {"normal", "scalar"}, {"exponential", "scalar"}, {"gamma", "scalar"}, {"binomial", "unsigned int"}}); }; IMPLEMENT_MODEL(Neuron); @@ -27,9 +27,9 @@ IMPLEMENT_MODEL(Neuron); class CurrentSrc : public CurrentSourceModels::Base { public: - DECLARE_MODEL(CurrentSrc, 0, 6); + DECLARE_MODEL(CurrentSrc, 0, 7); - SET_VARS({{"constant_val", "scalar"}, {"uniform", "scalar"}, {"normal", "scalar"}, {"exponential", "scalar"}, {"gamma", "scalar"}, {"binomial", "unsigned int"}}); + SET_VARS({{"num", "unsigned int"}, {"num_batch", "unsigned int"}, {"uniform", "scalar"}, {"normal", "scalar"}, {"exponential", "scalar"}, {"gamma", "scalar"}, {"binomial", "unsigned int"}}); }; IMPLEMENT_MODEL(CurrentSrc); @@ -39,9 +39,9 @@ IMPLEMENT_MODEL(CurrentSrc); class PostsynapticModel : public PostsynapticModels::Base { public: - DECLARE_MODEL(PostsynapticModel, 0, 6); + DECLARE_MODEL(PostsynapticModel, 0, 7); - SET_VARS({{"pconstant_val", "scalar"}, {"puniform", "scalar"}, {"pnormal", "scalar"}, {"pexponential", "scalar"}, {"pgamma", "scalar"}, {"pbinomial", "unsigned int"}}); + SET_VARS({{"pnum", "unsigned int"}, {"pnum_batch", "unsigned int"}, {"puniform", "scalar"}, {"pnormal", "scalar"}, {"pexponential", "scalar"}, {"pgamma", "scalar"}, {"pbinomial", "unsigned int"}}); }; IMPLEMENT_MODEL(PostsynapticModel); @@ -51,11 +51,11 @@ IMPLEMENT_MODEL(PostsynapticModel); class WeightUpdateModel : public WeightUpdateModels::Base { public: - DECLARE_WEIGHT_UPDATE_MODEL(WeightUpdateModel, 0, 6, 6, 6); + DECLARE_WEIGHT_UPDATE_MODEL(WeightUpdateModel, 0, 8, 7, 7); - SET_VARS({{"constant_val", "scalar"}, {"uniform", "scalar"}, {"normal", "scalar"}, {"exponential", "scalar"}, {"gamma", "scalar"}, {"binomial", "unsigned int"}}); - SET_PRE_VARS({{"pre_constant_val", "scalar"}, {"pre_uniform", "scalar"}, {"pre_normal", "scalar"}, {"pre_exponential", "scalar"}, {"pre_gamma", "scalar"}, {"pre_binomial", "unsigned int"}}); - SET_POST_VARS({{"post_constant_val", "scalar"}, {"post_uniform", "scalar"}, {"post_normal", "scalar"}, {"post_exponential", "scalar"}, {"post_gamma", "scalar"}, {"post_binomial", "unsigned int"}}); + SET_VARS({{"num_pre", "unsigned int"}, {"num_post", "unsigned int"}, {"num_batch", "unsigned int"}, {"uniform", "scalar"}, {"normal", "scalar"}, {"exponential", "scalar"}, {"gamma", "scalar"}, {"binomial", "unsigned int"}}); + SET_PRE_VARS({{"pre_num", "unsigned int"}, {"pre_num_batch", "unsigned int"}, {"pre_uniform", "scalar"}, {"pre_normal", "scalar"}, {"pre_exponential", "scalar"}, {"pre_gamma", "scalar"}, {"pre_binomial", "unsigned int"}}); + SET_POST_VARS({{"post_num", "unsigned int"}, {"post_num_batch", "unsigned int"}, {"post_uniform", "scalar"}, {"post_normal", "scalar"}, {"post_exponential", "scalar"}, {"post_gamma", "scalar"}, {"post_binomial", "unsigned int"}}); }; IMPLEMENT_MODEL(WeightUpdateModel); @@ -65,9 +65,9 @@ IMPLEMENT_MODEL(WeightUpdateModel); class WeightUpdateModelNoPrePost : public WeightUpdateModels::Base { public: - DECLARE_WEIGHT_UPDATE_MODEL(WeightUpdateModelNoPrePost, 0, 6, 0, 0); + DECLARE_WEIGHT_UPDATE_MODEL(WeightUpdateModelNoPrePost, 0, 8, 0, 0); - SET_VARS({{"constant_val", "scalar"}, {"uniform", "scalar"}, {"normal", "scalar"}, {"exponential", "scalar"}, {"gamma", "scalar"}, {"binomial", "unsigned int"}}); + SET_VARS({{"num_pre", "unsigned int"}, {"num_post", "unsigned int"}, {"num_batch", "unsigned int"}, {"uniform", "scalar"}, {"normal", "scalar"}, {"exponential", "scalar"}, {"gamma", "scalar"}, {"binomial", "unsigned int"}}); }; IMPLEMENT_MODEL(WeightUpdateModelNoPrePost); @@ -77,13 +77,74 @@ IMPLEMENT_MODEL(WeightUpdateModelNoPrePost); class NopCustomUpdateModel : public CustomUpdateModels::Base { public: - DECLARE_CUSTOM_UPDATE_MODEL(NopCustomUpdateModel, 0, 6, 1); + DECLARE_CUSTOM_UPDATE_MODEL(NopCustomUpdateModel, 0, 7, 1); - SET_VARS({{"constant_val", "scalar"}, {"uniform", "scalar"}, {"normal", "scalar"}, {"exponential", "scalar"}, {"gamma", "scalar"}, {"binomial", "unsigned int"}}); - SET_VAR_REFS({{"R", "scalar", VarAccessMode::READ_WRITE}}) + SET_VARS({{"num", "unsigned int"}, {"num_batch", "unsigned int"}, {"uniform", "scalar"}, {"normal", "scalar"}, {"exponential", "scalar"}, {"gamma", "scalar"}, {"binomial", "unsigned int"}}); + SET_VAR_REFS({{"R", "unsigned int", VarAccessMode::READ_WRITE}}) }; IMPLEMENT_MODEL(NopCustomUpdateModel); +//---------------------------------------------------------------------------- +// NopCustomUpdateModelWU +//---------------------------------------------------------------------------- +class NopCustomUpdateModelWU : public CustomUpdateModels::Base +{ +public: + DECLARE_CUSTOM_UPDATE_MODEL(NopCustomUpdateModelWU, 0, 8, 1); + + SET_VARS({{"num_pre", "unsigned int"}, {"num_post", "unsigned int"}, {"num_batch", "unsigned int"}, {"uniform", "scalar"}, {"normal", "scalar"}, {"exponential", "scalar"}, {"gamma", "scalar"}, {"binomial", "unsigned int"}}); + SET_VAR_REFS({{"R", "unsigned int", VarAccessMode::READ_WRITE}}) +}; +IMPLEMENT_MODEL(NopCustomUpdateModelWU); + +//---------------------------------------------------------------------------- +// NumBatch +//---------------------------------------------------------------------------- +class NumBatch : public InitVarSnippet::Base +{ +public: + DECLARE_SNIPPET(NumBatch, 0); + + SET_CODE("$(value) = $(num_batch);"); +}; +IMPLEMENT_SNIPPET(NumBatch); + +//---------------------------------------------------------------------------- +// Num +//---------------------------------------------------------------------------- +class Num : public InitVarSnippet::Base +{ +public: + DECLARE_SNIPPET(Num, 0); + + SET_CODE("$(value) = $(num);"); +}; +IMPLEMENT_SNIPPET(Num); + +//---------------------------------------------------------------------------- +// NumPre +//---------------------------------------------------------------------------- +class NumPre : public InitVarSnippet::Base +{ +public: + DECLARE_SNIPPET(NumPre, 0); + + SET_CODE("$(value) = $(num_pre);"); +}; +IMPLEMENT_SNIPPET(NumPre); + +//---------------------------------------------------------------------------- +// NumPost +//---------------------------------------------------------------------------- +class NumPost : public InitVarSnippet::Base +{ +public: + DECLARE_SNIPPET(NumPost, 0); + + SET_CODE("$(value) = $(num_post);"); +}; +IMPLEMENT_SNIPPET(NumPost); + void modelDefinition(ModelSpec &model) { #ifdef CL_HPP_TARGET_OPENCL_VERSION @@ -122,7 +183,8 @@ void modelDefinition(ModelSpec &model) // Neuron parameters Neuron::VarValues neuronInit( - 13.0, + initVar(), + initVar(), initVar(uniformParams), initVar(normalParams), initVar(exponentialParams), @@ -131,7 +193,8 @@ void modelDefinition(ModelSpec &model) // Current source parameters CurrentSrc::VarValues currentSourceInit( - 13.0, + initVar(), + initVar(), initVar(uniformParams), initVar(normalParams), initVar(exponentialParams), @@ -140,7 +203,8 @@ void modelDefinition(ModelSpec &model) // PostsynapticModel parameters PostsynapticModel::VarValues postsynapticInit( - 13.0, + initVar(), + initVar(), initVar(uniformParams), initVar(normalParams), initVar(exponentialParams), @@ -149,29 +213,44 @@ void modelDefinition(ModelSpec &model) // WeightUpdateModel parameters WeightUpdateModel::VarValues weightUpdateInit( - 13.0, + initVar(), + initVar(), + initVar(), initVar(uniformParams), initVar(normalParams), initVar(exponentialParams), initVar(gammaParams), initVar(binomialParams)); WeightUpdateModel::PreVarValues weightUpdatePreInit( - 13.0, + initVar(), + initVar(), initVar(uniformParams), initVar(normalParams), initVar(exponentialParams), initVar(gammaParams), initVar(binomialParams)); WeightUpdateModel::PostVarValues weightUpdatePostInit( - 13.0, + initVar(), + initVar(), initVar(uniformParams), initVar(normalParams), initVar(exponentialParams), initVar(gammaParams), initVar(binomialParams)); - + + // CustomUpdateModel parameters NopCustomUpdateModel::VarValues customUpdateInit( - 13.0, + initVar(), + initVar(), + initVar(uniformParams), + initVar(normalParams), + initVar(exponentialParams), + initVar(gammaParams), + initVar(binomialParams)); + NopCustomUpdateModelWU::VarValues customUpdateWUInit( + initVar(), + initVar(), + initVar(), initVar(uniformParams), initVar(normalParams), initVar(exponentialParams), @@ -212,37 +291,37 @@ void modelDefinition(ModelSpec &model) initToeplitzConnectivity(convParams)); // Custom updates - NopCustomUpdateModel::VarReferences neuronVarReferences(createVarRef(ng, "constant_val")); // R + NopCustomUpdateModel::VarReferences neuronVarReferences(createVarRef(ng, "num")); // R model.addCustomUpdate("NeuronCustomUpdate", "Test", {}, customUpdateInit, neuronVarReferences); - NopCustomUpdateModel::VarReferences currentSourceVarReferences(createVarRef(cs, "constant_val")); // R + NopCustomUpdateModel::VarReferences currentSourceVarReferences(createVarRef(cs, "num")); // R model.addCustomUpdate("CurrentSourceCustomUpdate", "Test", {}, customUpdateInit, currentSourceVarReferences); - NopCustomUpdateModel::VarReferences psmVarReferences(createPSMVarRef(sgDense, "pconstant_val")); // R + NopCustomUpdateModel::VarReferences psmVarReferences(createPSMVarRef(sgDense, "pnum")); // R model.addCustomUpdate("PSMCustomUpdate", "Test", {}, customUpdateInit, neuronVarReferences); - NopCustomUpdateModel::VarReferences wuPreVarReferences(createWUPreVarRef(sgSparse, "pre_constant_val")); // R + NopCustomUpdateModel::VarReferences wuPreVarReferences(createWUPreVarRef(sgSparse, "pre_num")); // R model.addCustomUpdate("WUPreCustomUpdate", "Test", {}, customUpdateInit, wuPreVarReferences); - NopCustomUpdateModel::VarReferences wuPostVarReferences(createWUPostVarRef(sgDense, "post_constant_val")); // R + NopCustomUpdateModel::VarReferences wuPostVarReferences(createWUPostVarRef(sgDense, "post_num")); // R model.addCustomUpdate("WUPostCustomUpdate", "Test", - {}, customUpdateInit, wuPostVarReferences); + {}, customUpdateInit, wuPostVarReferences); - NopCustomUpdateModel::WUVarReferences wuSparseVarReferences(createWUVarRef(sgSparse, "constant_val")); // R - model.addCustomUpdate("WUSparseCustomUpdate", "Test", - {}, customUpdateInit, wuSparseVarReferences); + NopCustomUpdateModelWU::WUVarReferences wuSparseVarReferences(createWUVarRef(sgSparse, "num_pre")); // R + model.addCustomUpdate("WUSparseCustomUpdate", "Test", + {}, customUpdateWUInit, wuSparseVarReferences); - NopCustomUpdateModel::WUVarReferences wuDenseVarReferences(createWUVarRef(sgDense, "constant_val")); // R - model.addCustomUpdate("WUDenseCustomUpdate", "Test", - {}, customUpdateInit, wuDenseVarReferences); + NopCustomUpdateModelWU::WUVarReferences wuDenseVarReferences(createWUVarRef(sgDense, "num_pre")); // R + model.addCustomUpdate("WUDenseCustomUpdate", "Test", + {}, customUpdateWUInit, wuDenseVarReferences); - NopCustomUpdateModel::WUVarReferences wuKernelVarReferences(createWUVarRef(sgKernel, "constant_val")); // R - model.addCustomUpdate("WUKernelCustomUpdate", "Test", - {}, customUpdateInit, wuKernelVarReferences); + NopCustomUpdateModelWU::WUVarReferences wuKernelVarReferences(createWUVarRef(sgKernel, "num_pre")); // R + model.addCustomUpdate("WUKernelCustomUpdate", "Test", + {}, customUpdateWUInit, wuKernelVarReferences); model.setPrecision(GENN_FLOAT); } diff --git a/tests/features/var_init/test.cc b/tests/features/var_init/test.cc index c37fb00c3d..ada18009c0 100644 --- a/tests/features/var_init/test.cc +++ b/tests/features/var_init/test.cc @@ -26,7 +26,6 @@ double gammaCDF4(double x) // Macro to generate full set of tests for a particular model #define PROB_TEST(PREFIX, SUFFIX, N) \ { \ - EXPECT_TRUE(std::all_of(&PREFIX##constant_val##SUFFIX[0], &PREFIX##constant_val##SUFFIX[N], [](scalar x){ return (x == 13.0); })); \ const double PREFIX##uniform##SUFFIX##Prob = getProb(PREFIX##uniform##SUFFIX, N, Stats::uniformCDF); \ EXPECT_GT(PREFIX##uniform##SUFFIX##Prob, p); \ const double PREFIX##normal##SUFFIX##Prob = getProb(PREFIX##normal##SUFFIX, N, Stats::normalCDF); \ @@ -35,8 +34,22 @@ double gammaCDF4(double x) EXPECT_GT(PREFIX##exponential##SUFFIX##Prob, p); \ const double PREFIX##gamma##SUFFIX##Prob = getProb(PREFIX##gamma##SUFFIX, N, gammaCDF4); \ EXPECT_GT(PREFIX##gamma##SUFFIX##Prob, p); \ - } \ + } +#define PROB_TEST_NEURON(PREFIX, SUFFIX, NUM) \ + { \ + EXPECT_TRUE(std::all_of(&PREFIX##num##SUFFIX[0], &PREFIX##num##SUFFIX[NUM], [](unsigned int x){ return (x == NUM); })); \ + EXPECT_TRUE(std::all_of(&PREFIX##num_batch##SUFFIX[0], &PREFIX##num_batch##SUFFIX[NUM], [](unsigned int x){ return (x == 1); })); \ + PROB_TEST(PREFIX, SUFFIX, NUM) \ + } + +#define PROB_TEST_SYNAPSE(PREFIX, SUFFIX, NUM, NUM_PRE, NUM_POST) \ + { \ + EXPECT_TRUE(std::all_of(&PREFIX##num_pre##SUFFIX[0], &PREFIX##num_pre##SUFFIX[NUM], [](unsigned int x){ return (x == NUM_PRE); })); \ + EXPECT_TRUE(std::all_of(&PREFIX##num_post##SUFFIX[0], &PREFIX##num_post##SUFFIX[NUM], [](unsigned int x){ return (x == NUM_POST); })); \ + EXPECT_TRUE(std::all_of(&PREFIX##num_batch##SUFFIX[0], &PREFIX##num_batch##SUFFIX[NUM], [](unsigned int x){ return (x == 1); })); \ + PROB_TEST(PREFIX, SUFFIX, NUM) \ + } //---------------------------------------------------------------------------- // SimTest //---------------------------------------------------------------------------- @@ -86,21 +99,21 @@ TEST_F(SimTest, Vars) pullWUKernelCustomUpdateStateFromDevice(); // Test host-generated vars - PROB_TEST(, Pop, 50000); - PROB_TEST(, CurrSource, 50000); - PROB_TEST(p, Dense, 50000); - PROB_TEST(, Dense, 50000); - PROB_TEST(, Sparse, 50000); - PROB_TEST(pre_, Sparse, 50000); - PROB_TEST(post_, Sparse, 50000); - PROB_TEST(, Kernel, 3 * 3 * 5 * 5); - PROB_TEST(, NeuronCustomUpdate, 50000); - PROB_TEST(, CurrentSourceCustomUpdate, 50000); - PROB_TEST(, PSMCustomUpdate, 50000); - PROB_TEST(, WUPreCustomUpdate, 50000); - PROB_TEST(, WUPostCustomUpdate, 50000); - PROB_TEST(, WUDenseCustomUpdate, 50000); - PROB_TEST(, WUSparseCustomUpdate, 50000); - PROB_TEST(, WUKernelCustomUpdate, 3 * 3 * 5 * 5); + PROB_TEST_NEURON(, Pop, 50000); + PROB_TEST_NEURON(, CurrSource, 50000); + PROB_TEST_NEURON(p, Dense, 50000); + PROB_TEST_SYNAPSE(, Dense, 50000, 1, 50000); + PROB_TEST_SYNAPSE(, Sparse, 50000, 50000, 50000); + PROB_TEST_NEURON(pre_, Sparse, 50000); + PROB_TEST_NEURON(post_, Sparse, 50000); + PROB_TEST_SYNAPSE(, Kernel, 3 * 3 * 5 * 5, 50000, 50000); + PROB_TEST_NEURON(, NeuronCustomUpdate, 50000); + PROB_TEST_NEURON(, CurrentSourceCustomUpdate, 50000); + PROB_TEST_NEURON(, PSMCustomUpdate, 50000); + PROB_TEST_NEURON(, WUPreCustomUpdate, 50000); + PROB_TEST_NEURON(, WUPostCustomUpdate, 50000); + PROB_TEST_SYNAPSE(, WUDenseCustomUpdate, 50000, 1, 50000); + PROB_TEST_SYNAPSE(, WUSparseCustomUpdate, 50000, 50000, 50000); + PROB_TEST_SYNAPSE(, WUKernelCustomUpdate, 3 * 3 * 5 * 5, 50000, 50000); } From 0c9c11060106525473e4d197e635a83d5ac7fa1a Mon Sep 17 00:00:00 2001 From: neworderofjamie Date: Thu, 2 Mar 2023 11:57:16 +0000 Subject: [PATCH 7/7] always expose numSrcNeurons and numTrgNeurons in merged synapse (and custom update WU) groups --- src/genn/genn/code_generator/groupMerged.cc | 11 ++++++----- src/genn/genn/code_generator/initGroupMerged.cc | 9 +++++---- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/src/genn/genn/code_generator/groupMerged.cc b/src/genn/genn/code_generator/groupMerged.cc index 6dc11df616..262fe58283 100644 --- a/src/genn/genn/code_generator/groupMerged.cc +++ b/src/genn/genn/code_generator/groupMerged.cc @@ -862,14 +862,15 @@ SynapseGroupMergedBase::SynapseGroupMergedBase(size_t index, const std::string & || (role == Role::SynapseDynamics)); const WeightUpdateModels::Base *wum = getArchetype().getWUModel(); + addField("unsigned int", "numSrcNeurons", + [](const SynapseGroupInternal &sg, size_t) { return std::to_string(sg.getSrcNeuronGroup()->getNumNeurons()); }); + addField("unsigned int", "numTrgNeurons", + [](const SynapseGroupInternal &sg, size_t) { return std::to_string(sg.getTrgNeuronGroup()->getNumNeurons()); }); + // If role isn't an init role or weights aren't kernel - if(role != Role::Init || !(getArchetype().getMatrixType() & SynapseMatrixWeight::KERNEL)) { + if (role != Role::Init || !(getArchetype().getMatrixType() & SynapseMatrixWeight::KERNEL)) { addField("unsigned int", "rowStride", [&backend](const SynapseGroupInternal &sg, size_t) { return std::to_string(backend.getSynapticMatrixRowStride(sg)); }); - addField("unsigned int", "numSrcNeurons", - [](const SynapseGroupInternal &sg, size_t) { return std::to_string(sg.getSrcNeuronGroup()->getNumNeurons()); }); - addField("unsigned int", "numTrgNeurons", - [](const SynapseGroupInternal &sg, size_t) { return std::to_string(sg.getTrgNeuronGroup()->getNumNeurons()); }); } if(role == Role::PostsynapticUpdate || role == Role::SparseInit) { diff --git a/src/genn/genn/code_generator/initGroupMerged.cc b/src/genn/genn/code_generator/initGroupMerged.cc index 64b3bc4526..ddb0992ad2 100644 --- a/src/genn/genn/code_generator/initGroupMerged.cc +++ b/src/genn/genn/code_generator/initGroupMerged.cc @@ -787,6 +787,11 @@ CustomWUUpdateInitGroupMerged::CustomWUUpdateInitGroupMerged(size_t index, const const std::vector> &groups) : CustomUpdateInitGroupMergedBase(index, precision, backend, groups) { + addField("unsigned int", "numSrcNeurons", + [](const CustomUpdateWUInternal &cg, size_t) { return std::to_string(cg.getSynapseGroup()->getSrcNeuronGroup()->getNumNeurons()); }); + addField("unsigned int", "numTrgNeurons", + [](const CustomUpdateWUInternal &cg, size_t) { return std::to_string(cg.getSynapseGroup()->getTrgNeuronGroup()->getNumNeurons()); }); + if(getArchetype().getSynapseGroup()->getMatrixType() & SynapseMatrixWeight::KERNEL) { // Loop through kernel size dimensions for (size_t d = 0; d < getArchetype().getSynapseGroup()->getKernelSize().size(); d++) { @@ -800,10 +805,6 @@ CustomWUUpdateInitGroupMerged::CustomWUUpdateInitGroupMerged(size_t index, const else { addField("unsigned int", "rowStride", [&backend](const CustomUpdateWUInternal &cg, size_t) { return std::to_string(backend.getSynapticMatrixRowStride(*cg.getSynapseGroup())); }); - addField("unsigned int", "numSrcNeurons", - [](const CustomUpdateWUInternal &cg, size_t) { return std::to_string(cg.getSynapseGroup()->getSrcNeuronGroup()->getNumNeurons()); }); - addField("unsigned int", "numTrgNeurons", - [](const CustomUpdateWUInternal &cg, size_t) { return std::to_string(cg.getSynapseGroup()->getTrgNeuronGroup()->getNumNeurons()); }); } } //----------------------------------------------------------------------------