diff --git a/bgate/dareceptors_string.go b/bgate/dareceptors_string.go new file mode 100644 index 00000000..3009acd4 --- /dev/null +++ b/bgate/dareceptors_string.go @@ -0,0 +1,40 @@ +// Code generated by "stringer -type=DaReceptors"; DO NOT EDIT. + +package bgate + +import ( + "errors" + "strconv" +) + +var _ = errors.New("dummy error") + +func _() { + // An "invalid array index" compiler error signifies that the constant values have changed. + // Re-run the stringer command to generate them again. + var x [1]struct{} + _ = x[D1R-0] + _ = x[D2R-1] + _ = x[DaReceptorsN-2] +} + +const _DaReceptors_name = "D1RD2RDaReceptorsN" + +var _DaReceptors_index = [...]uint8{0, 3, 6, 18} + +func (i DaReceptors) String() string { + if i < 0 || i >= DaReceptors(len(_DaReceptors_index)-1) { + return "DaReceptors(" + strconv.FormatInt(int64(i), 10) + ")" + } + return _DaReceptors_name[_DaReceptors_index[i]:_DaReceptors_index[i+1]] +} + +func (i *DaReceptors) FromString(s string) error { + for j := 0; j < len(_DaReceptors_index)-1; j++ { + if s == _DaReceptors_name[_DaReceptors_index[j]:_DaReceptors_index[j+1]] { + *i = DaReceptors(j) + return nil + } + } + return errors.New("String: " + s + " is not a valid option for type: DaReceptors") +} diff --git a/bgate/gp.go b/bgate/gp.go index b275e8ce..c8daef27 100644 --- a/bgate/gp.go +++ b/bgate/gp.go @@ -15,8 +15,7 @@ import ( // GPLayer represents the dorsal matrisome MSN's that are the main // Go / NoGo gating units in BG. D1R = Go, D2R = NoGo. type GPLayer struct { - leabra.Layer - DA float32 `inactive:"+" desc:"dopamine value for this layer"` + Layer } var KiT_GPLayer = kit.Types.AddType(&GPLayer{}, leabra.LayerProps) @@ -107,28 +106,6 @@ func (ly *GPLayer) Defaults() { ly.UpdateParams() } -// DALayer interface: - -func (ly *GPLayer) GetDA() float32 { return ly.DA } -func (ly *GPLayer) SetDA(da float32) { ly.DA = da } - -/* -// UnitValByIdx returns value of given PBWM-specific variable by variable index -// and flat neuron index (from layer or neuron-specific one). -func (ly *GPLayer) UnitValByIdx(vidx NeuronVars, idx int) float32 { - switch vidx { - case DA: - return ly.DA - } - return 0 -} -*/ - -func (ly *GPLayer) InitActs() { - ly.Layer.InitActs() - ly.DA = 0 -} - ////////////////////////////////////////////////////////////////////// // GPeInPrjn diff --git a/bgate/gpi.go b/bgate/gpi.go index d69a6d1f..7928961a 100644 --- a/bgate/gpi.go +++ b/bgate/gpi.go @@ -53,18 +53,6 @@ func (ly *GPiLayer) Defaults() { ly.UpdateParams() } -/* -// UnitValByIdx returns value of given PBWM-specific variable by variable index -// and flat neuron index (from layer or neuron-specific one). -func (ly *GPiLayer) UnitValByIdx(vidx NeuronVars, idx int) float32 { - switch vidx { - case DA: - return ly.DA - } - return 0 -} -*/ - // Build constructs the layer state, including calling Build on the projections // you MUST have properly configured the Inhib.Pool.On setting by this point // to properly allocate Pools for the unit groups if necessary. diff --git a/bgate/layer.go b/bgate/layer.go index 0f7adb5b..2c790574 100644 --- a/bgate/layer.go +++ b/bgate/layer.go @@ -4,8 +4,65 @@ package bgate +import ( + "fmt" + + "github.com/chewxy/math32" + "github.com/emer/leabra/leabra" + "github.com/goki/ki/kit" +) + // Layer is the base layer type for BGate framework. -// It has variables for the layer-level neuromodulatory variables: dopamine, ACh. -// Because the routines for accessing these variables are somewhat long +// Adds a dopamine variable to base Leabra layer type. type Layer struct { + leabra.Layer + DA float32 `inactive:"+" desc:"dopamine value for this layer"` +} + +var KiT_Layer = kit.Types.AddType(&Layer{}, leabra.LayerProps) + +// DALayer interface: + +func (ly *Layer) GetDA() float32 { return ly.DA } +func (ly *Layer) SetDA(da float32) { ly.DA = da } + +// UnitVarIdx returns the index of given variable within the Neuron, +// according to UnitVarNames() list (using a map to lookup index), +// or -1 and error message if not found. +func (ly *Layer) UnitVarIdx(varNm string) (int, error) { + vidx, err := ly.Layer.UnitVarIdx(varNm) + if err == nil { + return vidx, err + } + if varNm != "DA" { + return -1, fmt.Errorf("bgate.NeuronVars: variable named: %s not found", varNm) + } + nn := len(leabra.NeuronVars) + return nn, nil +} + +// UnitVal1D returns value of given variable index on given unit, using 1-dimensional index. +// returns NaN on invalid index. +// This is the core unit var access method used by other methods, +// so it is the only one that needs to be updated for derived layer types. +func (ly *Layer) UnitVal1D(varIdx int, idx int) float32 { + nn := len(leabra.NeuronVars) + if varIdx < 0 || varIdx > nn { + return math32.NaN() + } + if varIdx < nn { + return ly.Layer.UnitVal1D(varIdx, idx) + } + if idx < 0 || idx >= len(ly.Neurons) { + return math32.NaN() + } + if varIdx != nn { + return math32.NaN() + } + return ly.DA +} + +func (ly *Layer) InitActs() { + ly.Layer.InitActs() + ly.DA = 0 } diff --git a/bgate/matrix.go b/bgate/matrix.go index a6a1bd37..16c96b6f 100644 --- a/bgate/matrix.go +++ b/bgate/matrix.go @@ -5,8 +5,10 @@ package bgate import ( + "fmt" "strings" + "github.com/chewxy/math32" "github.com/emer/leabra/leabra" "github.com/goki/ki/kit" ) @@ -26,10 +28,9 @@ func (mp *MatrixParams) Defaults() { // MatrixLayer represents the dorsal matrisome MSN's that are the main // Go / NoGo gating units in BG. D1R = Go, D2R = NoGo. type MatrixLayer struct { - leabra.Layer + Layer DaR DaReceptors `desc:"dominant type of dopamine receptor -- D1R for Go pathway, D2R for NoGo"` Matrix MatrixParams `view:"inline" desc:"matrix parameters"` - DA float32 `inactive:"+" desc:"dopamine value for this layer"` DALrn float32 `inactive:"+" desc:"effective learning dopamine value for this layer: reflects DaR and Gains"` ACh float32 `inactive:"+" desc:"acetylcholine value from TAN tonically active neurons reflecting the absolute value of reward or CS predictions thereof -- used for resetting the trace of matrix learning"` } @@ -99,11 +100,6 @@ func (ly *MatrixLayer) Defaults() { ly.UpdateParams() } -// DALayer interface: - -func (ly *MatrixLayer) GetDA() float32 { return ly.DA } -func (ly *MatrixLayer) SetDA(da float32) { ly.DA = da } - // AChLayer interface: func (ly *MatrixLayer) GetACh() float32 { return ly.ACh } @@ -123,18 +119,6 @@ func (ly *MatrixLayer) DALrnFmDA(da float32) float32 { return da } -/* -// UnitValByIdx returns value of given PBWM-specific variable by variable index -// and flat neuron index (from layer or neuron-specific one). -func (ly *MatrixLayer) UnitValByIdx(vidx NeuronVars, idx int) float32 { - switch vidx { - case DA: - return ly.DA - } - return 0 -} -*/ - func (ly *MatrixLayer) InitActs() { ly.Layer.InitActs() ly.DA = 0 @@ -149,3 +133,46 @@ func (ly *MatrixLayer) ActFmG(ltime *leabra.Time) { ly.DALrn = ly.DALrnFmDA(ly.DA) ly.Layer.ActFmG(ltime) } + +// UnitVarIdx returns the index of given variable within the Neuron, +// according to UnitVarNames() list (using a map to lookup index), +// or -1 and error message if not found. +func (ly *MatrixLayer) UnitVarIdx(varNm string) (int, error) { + vidx, err := ly.Layer.UnitVarIdx(varNm) + if err == nil { + return vidx, err + } + if !(varNm == "DALrn" || varNm == "ACh") { + return -1, fmt.Errorf("bgate.NeuronVars: variable named: %s not found", varNm) + } + nn := len(leabra.NeuronVars) + // nn = DA + if varNm == "DALrn" { + return nn + 1, nil + } + return nn + 2, nil +} + +// UnitVal1D returns value of given variable index on given unit, using 1-dimensional index. +// returns NaN on invalid index. +// This is the core unit var access method used by other methods, +// so it is the only one that needs to be updated for derived layer types. +func (ly *MatrixLayer) UnitVal1D(varIdx int, idx int) float32 { + nn := len(leabra.NeuronVars) + if varIdx < 0 || varIdx > nn+2 { // nn = DA, nn+1 = DALrn, nn+2 = ACh + return math32.NaN() + } + if varIdx <= nn { // + return ly.Layer.UnitVal1D(varIdx, idx) + } + if idx < 0 || idx >= len(ly.Neurons) { + return math32.NaN() + } + if varIdx > nn+2 { + return math32.NaN() + } + if varIdx == nn+1 { // DALrn + return ly.DALrn + } + return ly.ACh +} diff --git a/bgate/matrix_trace.go b/bgate/matrix_trace.go index a7494271..1938d3df 100644 --- a/bgate/matrix_trace.go +++ b/bgate/matrix_trace.go @@ -5,6 +5,8 @@ package bgate import ( + "fmt" + "github.com/chewxy/math32" "github.com/emer/leabra/leabra" "github.com/goki/mat32" @@ -27,6 +29,17 @@ func (sy *TraceSyn) VarByName(varNm string) float32 { return math32.NaN() } +// VarByIndex returns synapse variable by index +func (sy *TraceSyn) VarByIndex(varIdx int) float32 { + switch varIdx { + case 0: + return sy.NTr + case 1: + return sy.Tr + } + return math32.NaN() +} + var TraceSynVars = []string{"NTr", "Tr"} // Params for for trace-based learning in the MatrixTracePrjn. @@ -178,55 +191,40 @@ func (pj *MatrixTracePrjn) DWt() { /////////////////////////////////////////////////////////////////////////////// // SynVals -// SynVals sets values of given variable name for each synapse, using the natural ordering -// of the synapses (sender based for Leabra), -// into given float32 slice (only resized if not big enough). -// Returns error on invalid var name. -func (pj *MatrixTracePrjn) SynVals(vals *[]float32, varNm string) error { - _, err := leabra.SynapseVarByName(varNm) +// SynVarIdx returns the index of given variable within the synapse, +// according to *this prjn's* SynVarNames() list (using a map to lookup index), +// or -1 and error message if not found. +func (pj *MatrixTracePrjn) SynVarIdx(varNm string) (int, error) { + vidx, err := pj.Prjn.SynVarIdx(varNm) if err == nil { - return pj.Prjn.SynVals(vals, varNm) + return vidx, err } - ns := len(pj.TrSyns) - if *vals == nil || cap(*vals) < ns { - *vals = make([]float32, ns) - } else if len(*vals) < ns { - *vals = (*vals)[0:ns] - } - for i := range pj.TrSyns { - sy := &pj.TrSyns[i] - (*vals)[i] = sy.VarByName(varNm) + nn := len(leabra.SynapseVars) + switch varNm { + case "NTr": + return nn, nil + case "Tr": + return nn + 1, nil } - return nil + return -1, fmt.Errorf("MatrixTracePrjn SynVarIdx: variable name: %v not valid", varNm) } -// SynVal returns value of given variable name on the synapse -// between given send, recv unit indexes (1D, flat indexes). -// Returns math32.NaN() for access errors (see SynValTry for error message) -func (pj *MatrixTracePrjn) SynVal(varNm string, sidx, ridx int) float32 { - _, err := leabra.SynapseVarByName(varNm) - if err == nil { - return pj.Prjn.SynVal(varNm, sidx, ridx) - } - if !(varNm == "NTr" || varNm == "Tr") { +// SynVal1D returns value of given variable index (from SynVarIdx) on given SynIdx. +// Returns NaN on invalid index. +// This is the core synapse var access method used by other methods, +// so it is the only one that needs to be updated for derived layer types. +func (pj *MatrixTracePrjn) SynVal1D(varIdx int, synIdx int) float32 { + if varIdx < 0 || varIdx >= len(SynVarsAll) { return math32.NaN() } - rsi := pj.SynIdx(sidx, ridx) - if rsi < 0 { - return math32.NaN() + nn := len(leabra.SynapseVars) + if varIdx < nn { + return pj.Prjn.SynVal1D(varIdx, synIdx) } - sy := &pj.TrSyns[rsi] - return sy.VarByName(varNm) -} - -// SynValTry returns value of given variable name on the synapse -// between given send, recv unit indexes (1D, flat indexes). -// Returns error for access errors. -func (pj *MatrixTracePrjn) SynValTry(varNm string, sidx, ridx int) (float32, error) { - sv, err := pj.Prjn.SynValTry(varNm, sidx, ridx) - if err == nil { - return sv, err + if synIdx < 0 || synIdx >= len(pj.TrSyns) { + return math32.NaN() } - sv = pj.SynVal(varNm, sidx, ridx) - return sv, nil + varIdx -= nn + sy := &pj.TrSyns[synIdx] + return sy.VarByIndex(varIdx) } diff --git a/bgate/network.go b/bgate/network.go index 0f3179f7..9f6c0490 100644 --- a/bgate/network.go +++ b/bgate/network.go @@ -43,29 +43,6 @@ func (nt *Network) UpdateParams() { nt.Network.UpdateParams() } -var ( - // NeuronVars are extra neuron variables for bgate - NeuronVars = []string{"DA", "DALrn", "ACh", "Ca", "KCa"} - - // NeuronVarsAll is the bgate collection of all neuron-level vars - NeuronVarsAll []string - - // SynVarsAll is the bgate collection of all synapse-level vars (includes TraceSynVars) - SynVarsAll []string -) - -func init() { - ln := len(deep.NeuronVarsAll) - NeuronVarsAll = make([]string, len(NeuronVars)+ln) - copy(NeuronVarsAll, deep.NeuronVarsAll) - copy(NeuronVarsAll[ln:], NeuronVars) - - ln = len(leabra.SynapseVars) - SynVarsAll = make([]string, len(TraceSynVars)+ln) - copy(SynVarsAll, leabra.SynapseVars) - copy(SynVarsAll[ln:], TraceSynVars) -} - // UnitVarNames returns a list of variable names available on the units in this layer func (nt *Network) UnitVarNames() []string { return NeuronVarsAll @@ -142,7 +119,7 @@ func (nt *Network) AddVThalLayer(name string, nPoolsY, nPoolsX, nNeurY, nNeurX i // Only Matrix has more than 1 unit per Pool by default. // Appropriate PoolOneToOne connections are made between layers, // using standard styles -func (nt *Network) AddBG(prefix string, nPoolsY, nPoolsX, nNeurY, nNeurX int) (mtxGo, mtxNo, gpeOut, gpeIn, gpeTA, stnp, stns, gpi, vthal leabra.LeabraLayer) { +func (nt *Network) AddBG(prefix string, nPoolsY, nPoolsX, nNeurY, nNeurX int) (mtxGo, mtxNo, tan, gpeOut, gpeIn, gpeTA, stnp, stns, gpi, vthal leabra.LeabraLayer) { gpi = nt.AddGPiLayer(prefix+"GPi", nPoolsY, nPoolsX, 1, 1) vthal = nt.AddVThalLayer(prefix+"VThal", nPoolsY, nPoolsX, 1, 1) gpeOut = nt.AddGPeLayer(prefix+"GPeOut", nPoolsY, nPoolsX, 1, 1) @@ -152,6 +129,7 @@ func (nt *Network) AddBG(prefix string, nPoolsY, nPoolsX, nNeurY, nNeurX int) (m stns = nt.AddSTNLayer(prefix+"STNs", nPoolsY, nPoolsX, 1, 1) mtxGo = nt.AddMatrixLayer(prefix+"MtxGo", nPoolsY, nPoolsX, nNeurY, nNeurX, D1R) mtxNo = nt.AddMatrixLayer(prefix+"MtxNo", nPoolsY, nPoolsX, nNeurY, nNeurX, D2R) + tan = nt.AddTANLayer(prefix + "TAN") vthal.SetRelPos(relpos.Rel{Rel: relpos.RightOf, Other: gpi.Name(), YAlign: relpos.Front, Space: 2}) @@ -163,6 +141,7 @@ func (nt *Network) AddBG(prefix string, nPoolsY, nPoolsX, nNeurY, nNeurX int) (m mtxGo.SetRelPos(relpos.Rel{Rel: relpos.Above, Other: gpeOut.Name(), YAlign: relpos.Front, XAlign: relpos.Left, YOffset: 1}) mtxNo.SetRelPos(relpos.Rel{Rel: relpos.RightOf, Other: mtxGo.Name(), YAlign: relpos.Front, Space: 2}) + tan.SetRelPos(relpos.Rel{Rel: relpos.RightOf, Other: mtxNo.Name(), YAlign: relpos.Front, Space: 2}) one2one := prjn.NewPoolOneToOne() full := prjn.NewFull() diff --git a/bgate/neuron.go b/bgate/neuron.go new file mode 100644 index 00000000..89738c9c --- /dev/null +++ b/bgate/neuron.go @@ -0,0 +1,85 @@ +// Copyright (c) 2020, The Emergent Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package bgate + +import ( + "fmt" + "unsafe" + + "github.com/emer/leabra/deep" + "github.com/emer/leabra/leabra" +) + +var ( + // NeuronVars are extra neuron variables for bgate + NeuronVars = []string{"DA", "DALrn", "ACh", "Ca", "KCa"} + + // NeuronVarsAll is the bgate collection of all neuron-level vars + NeuronVarsAll []string + + // SynVarsAll is the bgate collection of all synapse-level vars (includes TraceSynVars) + SynVarsAll []string +) + +func init() { + ln := len(deep.NeuronVarsAll) + NeuronVarsAll = make([]string, len(NeuronVars)+ln) + copy(NeuronVarsAll, deep.NeuronVarsAll) + copy(NeuronVarsAll[ln:], NeuronVars) + + ln = len(leabra.SynapseVars) + SynVarsAll = make([]string, len(TraceSynVars)+ln) + copy(SynVarsAll, leabra.SynapseVars) + copy(SynVarsAll[ln:], TraceSynVars) +} + +////////////////////////////////////////////////////////////////////// +// STN neurons + +// STNNeuron holds the extra neuron (unit) level variables for STN computation. +type STNNeuron struct { + Ca float32 `desc:"intracellular Calcium concentration -- increased by bursting and elevated levels of activation, drives KCa currents that result in hyperpolarization / inhibition."` + KCa float32 `desc:"Calcium-gated potassium channel conductance level, computed using function from gillies & Willshaw 2006 as function of Ca."` +} + +var ( + STNNeuronVars = []string{"Ca", "KCa"} + STNNeuronVarsMap map[string]int +) + +func init() { + STNNeuronVarsMap = make(map[string]int, len(STNNeuronVars)) + for i, v := range STNNeuronVars { + STNNeuronVarsMap[v] = i + } +} + +func (nrn *STNNeuron) VarNames() []string { + return STNNeuronVars +} + +// STNNeuronVarByName returns the index of the variable in the STNNeuron, or error +func STNNeuronVarByName(varNm string) (int, error) { + i, ok := STNNeuronVarsMap[varNm] + if !ok { + return 0, fmt.Errorf("STNNeuron VarByName: variable name: %v not valid", varNm) + } + return i, nil +} + +// VarByIndex returns variable using index (0 = first variable in STNNeuronVars list) +func (nrn *STNNeuron) VarByIndex(idx int) float32 { + fv := (*float32)(unsafe.Pointer(uintptr(unsafe.Pointer(nrn)) + uintptr(4*idx))) + return *fv +} + +// VarByName returns variable by name, or error +func (nrn *STNNeuron) VarByName(varNm string) (float32, error) { + i, err := STNNeuronVarByName(varNm) + if err != nil { + return 0, err + } + return nrn.VarByIndex(i), nil +} diff --git a/bgate/stn.go b/bgate/stn.go index 098e6731..166b3300 100644 --- a/bgate/stn.go +++ b/bgate/stn.go @@ -5,13 +5,9 @@ package bgate import ( - "fmt" - "log" "strings" - "unsafe" "github.com/chewxy/math32" - "github.com/emer/etable/etensor" "github.com/emer/leabra/leabra" "github.com/goki/ki/kit" ) @@ -37,7 +33,7 @@ type CaParams struct { func (kc *CaParams) Defaults() { kc.BurstThr = 0.9 kc.ActThr = 0.7 - kc.BurstCa = 200 + kc.BurstCa = 1 kc.ActCa = 0.2 kc.GbarKCa = 20 kc.KCaTau = 40 @@ -56,9 +52,8 @@ func (kc *CaParams) KCaGFmCa(ca float32) float32 { // STNLayer represents the pausing subtype of STN neurons. // These open the gating window. type STNLayer struct { - leabra.Layer + Layer Ca CaParams `view:"inline" desc:"parameters for calcium and calcium-gated potassium channels that drive the afterhyperpolarization that open the gating window in STN neurons (Hallworth et al., 2003)"` - DA float32 `inactive:"+" desc:"dopamine value for this layer"` STNNeurs []STNNeuron `desc:"slice of extra STNNeuron state for this layer -- flat list of len = Shape.Len(). You must iterate over index and use pointer to modify values."` } @@ -134,21 +129,13 @@ func (ly *STNLayer) Defaults() { func (ly *STNLayer) GetDA() float32 { return ly.DA } func (ly *STNLayer) SetDA(da float32) { ly.DA = da } -/* -// UnitValByIdx returns value of given PBWM-specific variable by variable index -// and flat neuron index (from layer or neuron-specific one). -func (ly *STNLayer) UnitValByIdx(vidx NeuronVars, idx int) float32 { - switch vidx { - case DA: - return ly.DA - } - return 0 -} -*/ - func (ly *STNLayer) InitActs() { ly.Layer.InitActs() - ly.DA = 0 + for ni := range ly.STNNeurs { + nrn := &ly.STNNeurs[ni] + nrn.Ca = 0 + nrn.KCa = 0 + } } // AlphaCycInit handles all initialization at start of new input pattern, including computing @@ -193,144 +180,51 @@ func (ly *STNLayer) ActFmG(ltime *leabra.Time) { } } -////////////////////////////////////////////////////////////////////// -// STNNeurs management - -// UnitVals fills in values of given variable name on unit, -// for each unit in the layer, into given float32 slice (only resized if not big enough). -// Returns error on invalid var name. -func (ly *STNLayer) UnitVals(vals *[]float32, varNm string) error { - vidx, err := leabra.NeuronVarByName(varNm) - if err == nil { - return ly.Layer.UnitVals(vals, varNm) - } - vidx, err = STNNeuronVarByName(varNm) +// Build constructs the layer state, including calling Build on the projections. +func (ly *STNLayer) Build() error { + err := ly.Layer.Build() if err != nil { return err } - nn := len(ly.Neurons) - if *vals == nil || cap(*vals) < nn { - *vals = make([]float32, nn) - } else if len(*vals) < nn { - *vals = (*vals)[0:nn] - } - for i := range ly.STNNeurs { - dnr := &ly.STNNeurs[i] - (*vals)[i] = dnr.VarByIndex(vidx) - } + ly.STNNeurs = make([]STNNeuron, len(ly.Neurons)) return nil } -// UnitValsTensor returns values of given variable name on unit -// for each unit in the layer, as a float32 tensor in same shape as layer units. -func (ly *STNLayer) UnitValsTensor(tsr etensor.Tensor, varNm string) error { - if tsr == nil { - err := fmt.Errorf("leabra.UnitValsTensor: Tensor is nil") - log.Println(err) - return err - } - vidx, err := leabra.NeuronVarByName(varNm) +// UnitVarIdx returns the index of given variable within the Neuron, +// according to UnitVarNames() list (using a map to lookup index), +// or -1 and error message if not found. +func (ly *STNLayer) UnitVarIdx(varNm string) (int, error) { + vidx, err := ly.Layer.UnitVarIdx(varNm) if err == nil { - return ly.Layer.UnitValsTensor(tsr, varNm) + return vidx, err } vidx, err = STNNeuronVarByName(varNm) if err != nil { - return err + return -1, err } - tsr.SetShape(ly.Shp.Shp, ly.Shp.Strd, ly.Shp.Nms) - for i := range ly.STNNeurs { - dnr := &ly.STNNeurs[i] - tsr.SetFloat1D(i, float64(dnr.VarByIndex(vidx))) - } - return nil + nn := len(leabra.NeuronVars) + return nn + vidx, err } -// UnitValTry returns value of given variable name on given unit, -// using shape-based dimensional index -func (ly *STNLayer) UnitValTry(varNm string, idx []int) (float32, error) { - _, err := leabra.NeuronVarByName(varNm) - if err == nil { - return ly.Layer.UnitValTry(varNm, idx) +// UnitVal1D returns value of given variable index on given unit, using 1-dimensional index. +// returns NaN on invalid index. +// This is the core unit var access method used by other methods, +// so it is the only one that needs to be updated for derived layer types. +func (ly *STNLayer) UnitVal1D(varIdx int, idx int) float32 { + if varIdx < 0 { + return math32.NaN() } - fidx := ly.Shp.Offset(idx) - nn := len(ly.STNNeurs) - if fidx < 0 || fidx >= nn { - return 0, fmt.Errorf("STNLayer UnitVal index: %v out of range, N = %v", fidx, nn) + nn := len(leabra.NeuronVars) + if varIdx <= nn { // DA = nn + return ly.Layer.UnitVal1D(varIdx, idx) } - dnr := &ly.STNNeurs[fidx] - return dnr.VarByName(varNm) -} - -// UnitVal1DTry returns value of given variable name on given unit, -// using 1-dimensional index. -func (ly *STNLayer) UnitVal1DTry(varNm string, idx int) (float32, error) { - _, err := leabra.NeuronVarByName(varNm) - if err == nil { - return ly.Layer.UnitVal1DTry(varNm, idx) + if idx < 0 || idx >= len(ly.Neurons) { + return math32.NaN() } - nn := len(ly.STNNeurs) - if idx < 0 || idx >= nn { - return 0, fmt.Errorf("STNLayer UnitVal1D index: %v out of range, N = %v", idx, nn) - } - dnr := &ly.STNNeurs[idx] - return dnr.VarByName(varNm) -} - -// Build constructs the layer state, including calling Build on the projections. -func (ly *STNLayer) Build() error { - err := ly.Layer.Build() - if err != nil { - return err - } - ly.STNNeurs = make([]STNNeuron, len(ly.Neurons)) - return nil -} - -////////////////////////////////////////////////////////////////////// -// STN neurons - -// STNNeuron holds the extra neuron (unit) level variables for STN computation. -type STNNeuron struct { - Ca float32 `desc:"intracellular Calcium concentration -- increased by bursting and elevated levels of activation, drives KCa currents that result in hyperpolarization / inhibition."` - KCa float32 `desc:"Calcium-gated potassium channel conductance level, computed using function from gillies & Willshaw 2006 as function of Ca."` -} - -var ( - STNNeuronVars = []string{"Ca", "KCa"} - STNNeuronVarsMap map[string]int -) - -func init() { - STNNeuronVarsMap = make(map[string]int, len(STNNeuronVars)) - for i, v := range STNNeuronVars { - STNNeuronVarsMap[v] = i - } -} - -func (nrn *STNNeuron) VarNames() []string { - return STNNeuronVars -} - -// STNNeuronVarByName returns the index of the variable in the STNNeuron, or error -func STNNeuronVarByName(varNm string) (int, error) { - i, ok := STNNeuronVarsMap[varNm] - if !ok { - return 0, fmt.Errorf("STNNeuron VarByName: variable name: %v not valid", varNm) - } - return i, nil -} - -// VarByIndex returns variable using index (0 = first variable in STNNeuronVars list) -func (nrn *STNNeuron) VarByIndex(idx int) float32 { - fv := (*float32)(unsafe.Pointer(uintptr(unsafe.Pointer(nrn)) + uintptr(4*idx))) - return *fv -} - -// VarByName returns variable by name, or error -func (nrn *STNNeuron) VarByName(varNm string) (float32, error) { - i, err := STNNeuronVarByName(varNm) - if err != nil { - return 0, err + varIdx -= nn + 1 + if varIdx >= len(STNNeuronVars) { + return math32.NaN() } - return nrn.VarByIndex(i), nil + snr := &ly.STNNeurs[idx] + return snr.VarByIndex(varIdx) } diff --git a/bgate/tan.go b/bgate/tan.go index 9ba17abc..0fdc86e3 100644 --- a/bgate/tan.go +++ b/bgate/tan.go @@ -5,8 +5,10 @@ package bgate import ( + "fmt" "log" + "github.com/chewxy/math32" "github.com/emer/leabra/leabra" "github.com/emer/leabra/rl" "github.com/goki/ki/kit" @@ -28,7 +30,9 @@ var KiT_TANLayer = kit.Types.AddType(&TANLayer{}, leabra.LayerProps) func (ly *TANLayer) Defaults() { ly.Layer.Defaults() - ly.RewLay = "Rew" + if ly.RewLay == "" { + ly.RewLay = "Rew" + } } // AChLayer interface: @@ -80,3 +84,39 @@ func (ly *TANLayer) CyclePost(ltime *leabra.Time) { ly.ACh = act ly.SendACh.SendACh(ly.Network, act) } + +// UnitVarIdx returns the index of given variable within the Neuron, +// according to UnitVarNames() list (using a map to lookup index), +// or -1 and error message if not found. +func (ly *TANLayer) UnitVarIdx(varNm string) (int, error) { + vidx, err := ly.Layer.UnitVarIdx(varNm) + if err == nil { + return vidx, err + } + if varNm != "ACh" { + return -1, fmt.Errorf("bgate.NeuronVars: variable named: %s not found", varNm) + } + nn := len(leabra.NeuronVars) + return nn, nil +} + +// UnitVal1D returns value of given variable index on given unit, using 1-dimensional index. +// returns NaN on invalid index. +// This is the core unit var access method used by other methods, +// so it is the only one that needs to be updated for derived layer types. +func (ly *TANLayer) UnitVal1D(varIdx int, idx int) float32 { + nn := len(leabra.NeuronVars) + if varIdx < 0 || varIdx > nn { // nn = ACh + return math32.NaN() + } + if varIdx < nn { + return ly.Layer.UnitVal1D(varIdx, idx) + } + if idx < 0 || idx >= len(ly.Neurons) { + return math32.NaN() + } + if varIdx > nn { + return math32.NaN() + } + return ly.ACh +} diff --git a/rl/rw.go b/rl/rw.go index da447cad..54b7aba5 100644 --- a/rl/rw.go +++ b/rl/rw.go @@ -69,8 +69,10 @@ var KiT_RWDaLayer = kit.Types.AddType(&RWDaLayer{}, deep.LayerProps) func (ly *RWDaLayer) Defaults() { ly.Layer.Defaults() - ly.RewLay = "Rew" - ly.RWPredLay = "RWPred" + if ly.RewLay == "" { + ly.RewLay = "Rew" + ly.RWPredLay = "RWPred" + } } // DALayer interface: diff --git a/rl/td.go b/rl/td.go index 028c49af..4135089a 100644 --- a/rl/td.go +++ b/rl/td.go @@ -55,7 +55,9 @@ type TDRewIntegParams struct { func (tp *TDRewIntegParams) Defaults() { tp.Discount = 0.9 - tp.RewPred = "RewPred" + if tp.RewPred == "" { + tp.RewPred = "RewPred" + } } // TDRewIntegLayer is the temporal differences reward integration layer. @@ -136,7 +138,9 @@ var KiT_TDDaLayer = kit.Types.AddType(&TDDaLayer{}, deep.LayerProps) func (ly *TDDaLayer) Defaults() { ly.Layer.Defaults() - ly.RewInteg = "RewInteg" + if ly.RewInteg == "" { + ly.RewInteg = "RewInteg" + } } // DALayer interface: