Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/devel'
Browse files Browse the repository at this point in the history
* origin/devel:
  tests/ParameterEstimationComposition: Reduce the number of estimates by 40x
  tests/ParameterEstimationComposition: Provide expected result instead of relying on tolerance
  requirements: update dill requirement from <0.3.7 to <0.3.8 (#2743)
  Feat/em composition refactor learning mech (#2754)
  Fix/input port combine (#2755)
  requirements: update beartype requirement from <0.15.0 to <0.16.0 (#2744)
  • Loading branch information
kmantel committed Aug 1, 2023
2 parents b8cd10e + c49c015 commit e3887fc
Show file tree
Hide file tree
Showing 21 changed files with 925 additions and 483 deletions.
3 changes: 0 additions & 3 deletions psyneulink/core/components/component.py
Original file line number Diff line number Diff line change
Expand Up @@ -1243,9 +1243,6 @@ def __init__(self,
# - assign function's output to self.defaults.value (based on call of self.execute)
self._instantiate_function(function=function, function_params=function_params, context=context)

# FIX TIME 3/18/21
if '(RESULT) to (OUTPUT_CIM_TransferMechanism-1_RESULT)' in self.name:
assert True
self._instantiate_value(context=context)

# INSTANTIATE ATTRIBUTES AFTER FUNCTION
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,9 @@ class EMStorage(LearningFunction):
EMStorage( \
default_variable=None, \
axis=0, \
storage_location=None \
storage_prob=1.0, \
decay_rate=0.0, \
params=None, \
name=None, \
prefs=None)
Expand All @@ -207,10 +209,19 @@ class EMStorage(LearningFunction):
axis : int : default 0
specifies the axis of `memory_matrix <EMStorage.memory_matrix>` to which `entry <EMStorage.entry>` is assigned.
storage_location : int : default None
specifies the location (row or col determined by `axis <EMStorage.axis>`) of `memory_matrix
<EMStorage.memory_matrix>` at which the new entry is stored (replacing the existing one);
if None, the weeakest entry (one with the lowest norm) along `axis <EMStorage.axis>` of
`memory_matrix <EMStorage.memory_matrix>` is used.
storage_prob : float : default default_learning_rate
specifies the probability with which `entry <EMStorage.entry>` is assigned to `memory_matrix
<EMStorage.memory_matrix>`.
decay_rate : float : default 0.0
specifies the rate at which pre-existing entries in `memory_matrix <EMStorage.memory_matrix>` are decayed.
params : Dict[param keyword: param value] : default None
a `parameter dictionary <ParameterPort_Specification>` that specifies the parameters for the
function. Values specified for parameters in the dictionary override any assigned to those
Expand Down Expand Up @@ -240,10 +251,17 @@ class EMStorage(LearningFunction):
axis : int
determines axis of `memory_matrix <EMStorage.memory_matrix>` to which `entry <EMStorage.entry>` is assigned.
storage_location : int
specifies the location (row or col determined by `axis <EMStorage.axis>`) of `memory_matrix
<EMStorage.memory_matrix>` at which the new entry is stored.
storage_prob : float
determines the probability with which `entry <EMStorage.entry>` is stored in `memory_matrix
<EMStorage.memory_matrix>`.
decay_rate : float
determines the rate at which pre-existing entries in `memory_matrix <EMStorage.memory_matrix>` are decayed.
random_state : numpy.RandomState
private pseudorandom number generator
Expand Down Expand Up @@ -275,6 +293,12 @@ class Parameters(LearningFunction.Parameters):
:type: int
:read only: True
decay_rate
see `decay_rate <EMStorage.axis>`
:default value: 0.0
:type: float
entry
see `entry <EMStorage.error_signal>`
Expand All @@ -295,6 +319,12 @@ class Parameters(LearningFunction.Parameters):
:default value: None
:type: ``numpy.random.RandomState``
storage_location
see `storage_location <EMStorage.storage_location>`
:default value: None
:type: int
storage_prob
see `storage_prob <EMStorage.storage_prob>`
Expand All @@ -306,12 +336,14 @@ class Parameters(LearningFunction.Parameters):
read_only=True,
pnl_internal=True,
constructor_argument='default_variable')
entry = Parameter([0], read_only=True)
memory_matrix = Parameter([[0],[0]], read_only=True)
axis = Parameter(0, read_only=True, structural=True)
storage_location = Parameter(None, read_only=True)
storage_prob = Parameter(1.0, modulable=True)
decay_rate = Parameter(0.0, modulable=True)
random_state = Parameter(None, loggable=False, getter=_random_state_getter, dependencies='seed')
seed = Parameter(DEFAULT_SEED, modulable=True, fallback_default=True, setter=_seed_setter)
storage_prob = Parameter(1.0, modulable=True)
entry = Parameter([0], read_only=True)
memory_matrix = Parameter([[0],[0]], read_only=True)

default_learning_rate = 1.0

Expand All @@ -326,7 +358,9 @@ def _validate_storage_prob(self, storage_prob):
def __init__(self,
default_variable=None,
axis=0,
storage_location=None,
storage_prob=1.0,
decay_rate=0.0,
seed=None,
params=None,
owner=None,
Expand All @@ -335,7 +369,9 @@ def __init__(self,
super().__init__(
default_variable=default_variable,
axis=axis,
storage_location=storage_location,
storage_prob=storage_prob,
decay_rate=decay_rate,
seed=seed,
params=params,
owner=owner,
Expand Down Expand Up @@ -401,16 +437,22 @@ def _function(self,

entry = variable
axis = self.parameters.axis._get(context)
storage_location = self.parameters.storage_location._get(context)
storage_prob = self.parameters.storage_prob._get(context)
decay_rate = self.parameters.decay_rate._get(context)
random_state = self.parameters.random_state._get(context)

# FIX: IMPLEMENT decay_rate CALCUALTION

# IMPLEMENTATION NOTE: if memory_matrix is an arg, it must in params (put there by Component.function()
# Manage memory_matrix param
memory_matrix = None
if params:
memory_matrix = params.pop(MEMORY_MATRIX, None)
axis = params.pop('axis', axis)
storage_location = params.pop('storage_location', storage_location)
storage_prob = params.pop('storage_prob', storage_prob)
decay_rate = params.pop('decay_rate', decay_rate)
# During init, function is called directly from Component (i.e., not from LearningMechanism execute() method),
# so need "placemarker" error_matrix for validation
if memory_matrix is None:
Expand All @@ -430,8 +472,13 @@ def _function(self,
# Don't store entry during initialization to avoid contaminating memory_matrix
pass
elif random_state.uniform(0, 1) < storage_prob:
# Store entry in slot with weakest memory (one with lowest norm) along specified axis
idx_of_min = np.argmin(np.linalg.norm(memory_matrix, axis=axis))
if decay_rate:
memory_matrix *= decay_rate
if storage_location is not None:
idx_of_min = storage_location
else:
# Find weakest entry (i.e., with lowest norm) along specified axis of matrix
idx_of_min = np.argmin(np.linalg.norm(memory_matrix, axis=axis))
if axis == 0:
memory_matrix[:,idx_of_min] = np.array(entry)
elif axis == 1:
Expand Down
12 changes: 7 additions & 5 deletions psyneulink/core/components/mechanisms/mechanism.py
Original file line number Diff line number Diff line change
Expand Up @@ -1865,9 +1865,9 @@ def _handle_arg_input_ports(self, input_ports):

try:
parsed_input_port_spec = _parse_port_spec(owner=self,
port_type=InputPort,
port_spec=s,
)
port_type=InputPort,
port_spec=s,
context=Context(string='handle_arg_input_ports'))
except AttributeError as e:
if DEFER_VARIABLE_SPEC_TO_MECH_MSG in e.args[0]:
default_variable_from_input_ports.append(InputPort.defaults.variable)
Expand Down Expand Up @@ -1980,9 +1980,11 @@ def _validate_params(self, request_set, target_set=None, context=None):
try:
try:
for port_spec in params[INPUT_PORTS]:
_parse_port_spec(owner=self, port_type=InputPort, port_spec=port_spec)
_parse_port_spec(owner=self, port_type=InputPort, port_spec=port_spec,
context=Context(string='mechanism.validate_params'))
except TypeError:
_parse_port_spec(owner=self, port_type=InputPort, port_spec=params[INPUT_PORTS])
_parse_port_spec(owner=self, port_type=InputPort, port_spec=params[INPUT_PORTS],
context=Context(string='mechanism.validate_params'))
except AttributeError as e:
if DEFER_VARIABLE_SPEC_TO_MECH_MSG in e.args[0]:
pass
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -632,6 +632,7 @@
OBJECTIVE_MECHANISM, OUTCOME, OWNER_VALUE, PARAMS, PORT_TYPE, PRODUCT, PROJECTION_TYPE, PROJECTIONS, \
SEPARATE, SIZE
from psyneulink.core.globals.parameters import Parameter, check_user_specified
from psyneulink.core.globals.context import Context

Check notice

Code scanning / CodeQL

Cyclic import Note

Import of module
psyneulink.core.globals.context
begins an import cycle.
from psyneulink.core.globals.preferences.basepreferenceset import ValidPrefSet
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
from psyneulink.core.globals.utilities import ContentAddressableList, convert_all_elements_to_np_array, convert_to_list, convert_to_np_array
Expand Down Expand Up @@ -672,24 +673,17 @@ def __init__(self, message, data=None):


def validate_monitored_port_spec(owner, spec_list):
context = Context(string='ControlMechanism.validate_monitored_port_spec')
for spec in spec_list:
if isinstance(spec, MonitoredOutputPortTuple):
spec = spec.output_port
elif isinstance(spec, tuple):
spec = _parse_port_spec(
owner=owner,
port_type=InputPort,
port_spec=spec,
)
spec = _parse_port_spec(owner=owner, port_type=InputPort, port_spec=spec, context=context)
spec = spec['params'][PROJECTIONS][0][0]
elif isinstance(spec, dict):
# If it is a dict, parse to validate that it is an InputPort specification dict
# (for InputPort of ObjectiveMechanism to be assigned to the monitored_output_port)
spec = _parse_port_spec(
owner=owner,
port_type=InputPort,
port_spec=spec,
)
spec = _parse_port_spec(owner=owner, port_type=InputPort, port_spec=spec, context=context)
# Get the OutputPort, to validate that it is in the ControlMechanism's Composition (below);
# presumes that the monitored_output_port is the first in the list of projection_specs
# in the InputPort port specification dictionary returned from the parse,
Expand Down Expand Up @@ -1263,15 +1257,10 @@ def _validate_output_ports(self, control):

port_types = self._owner.outputPortTypes
for ctl_spec in control:
ctl_spec = _parse_port_spec(
port_type=port_types, owner=self._owner, port_spec=ctl_spec
)
if not (
isinstance(ctl_spec, port_types)
or (
isinstance(ctl_spec, dict) and ctl_spec[PORT_TYPE] == port_types
)
):
ctl_spec = _parse_port_spec(port_type=port_types, owner=self._owner, port_spec=ctl_spec,
context=Context(string='ControlMechanism._validate_input_ports'))
if not (isinstance(ctl_spec, port_types)
or (isinstance(ctl_spec, dict) and ctl_spec[PORT_TYPE] == port_types)):
return 'invalid port specification'

# FIX 5/28/20:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2581,7 +2581,8 @@ def _validate_entries(spec=None, source=None):
self.state_feature_specs[i] = spec

# Get InputPort specification dictionary for state_input_port and update its entries
parsed_spec = _parse_port_spec(owner=self, port_type=InputPort, port_spec=spec)
parsed_spec = _parse_port_spec(owner=self, port_type=InputPort, port_spec=spec,
context=Context(string='OptimizationControlMechanism._parse_specs'))
parsed_spec[NAME] = state_input_port_names[i]
if parsed_spec[PARAMS] and SHADOW_INPUTS in parsed_spec[PARAMS]:
# Composition._update_shadow_projections will take care of PROJECTIONS specification
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -544,7 +544,7 @@
from psyneulink.core.components.ports.modulatorysignals.learningsignal import LearningSignal
from psyneulink.core.components.ports.parameterport import ParameterPort
from psyneulink.core.components.shellclasses import Mechanism
from psyneulink.core.globals.context import ContextFlags, handle_external_context
from psyneulink.core.globals.context import Context, ContextFlags, handle_external_context

Check notice

Code scanning / CodeQL

Cyclic import Note

Import of module
psyneulink.core.globals.context
begins an import cycle.
from psyneulink.core.globals.keywords import \
ADDITIVE, ASSERT, ENABLED, INPUT_PORTS, \
LEARNING, LEARNING_MECHANISM, LEARNING_PROJECTION, LEARNING_SIGNAL, LEARNING_SIGNALS, MATRIX, \
Expand Down Expand Up @@ -1161,7 +1161,8 @@ def _validate_params(self, request_set, target_set=None, context=None):
format(LEARNING_SIGNAL, self.name))

for spec in target_set[LEARNING_SIGNALS]:
learning_signal = _parse_port_spec(port_type=LearningSignal, owner=self, port_spec=spec)
learning_signal = _parse_port_spec(port_type=LearningSignal, owner=self, port_spec=spec,
context=Context(string='LearningMechanism.validate_params'))

# Validate that the receiver of the LearningProjection (if specified)
# is a MappingProjection and in the same Composition as self (if specified)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -377,7 +377,7 @@
from psyneulink.core.components.ports.inputport import InputPort, INPUT_PORT
from psyneulink.core.components.ports.outputport import OutputPort
from psyneulink.core.components.ports.port import _parse_port_spec
from psyneulink.core.globals.context import ContextFlags, handle_external_context
from psyneulink.core.globals.context import Context, ContextFlags, handle_external_context

Check notice

Code scanning / CodeQL

Cyclic import Note

Import of module
psyneulink.core.globals.context
begins an import cycle.
from psyneulink.core.globals.keywords import \
CONTROL, EXPONENT, EXPONENTS, LEARNING, MATRIX, NAME, OBJECTIVE_MECHANISM, OUTCOME, OWNER_VALUE, \
PARAMS, PREFERENCE_SET_NAME, PROJECTION, PROJECTIONS, PORT_TYPE, VARIABLE, WEIGHT, WEIGHTS
Expand Down Expand Up @@ -714,7 +714,8 @@ def add_to_monitor(self, monitor_specs, context=None):
monitor_specs[i] = spec

# Parse spec to get value of OutputPort and (possibly) the Projection from it
input_port = _parse_port_spec(owner=self, port_type = InputPort, port_spec=spec)
input_port = _parse_port_spec(owner=self, port_type = InputPort, port_spec=spec,
context=Context(string='objective_mechanism.add_to_monitor'))

# There should be only one ProjectionTuple specified,
# that designates the OutputPort and (possibly) a Projection from it
Expand Down
Loading

0 comments on commit e3887fc

Please sign in to comment.