Skip to content

Commit

Permalink
Merge pull request #132 from ljvmiranda921/remove-history-getters
Browse files Browse the repository at this point in the history
Remove history getters
  • Loading branch information
Lj Miranda authored Jun 13, 2018
2 parents 28703bf + a967c25 commit bd1b24e
Show file tree
Hide file tree
Showing 10 changed files with 45 additions and 144 deletions.
58 changes: 2 additions & 56 deletions pyswarms/base/base_discrete.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,7 @@
"""

import os
import yaml
import logging
import numpy as np
import logging.config
from collections import namedtuple

# Import from package
Expand Down Expand Up @@ -71,30 +67,6 @@ def assertions(self):
if not all(key in self.options for key in ('c1', 'c2', 'w')):
raise KeyError('Missing either c1, c2, or w in options')

def setup_logging(self, default_path='./config/logging.yaml',
default_level=logging.INFO, env_key='LOG_CFG'):
"""Setup logging configuration
Parameters
----------
default_path : str (default is `./config/logging.yaml`)
the path where the logging configuration is stored
default_level: logging.LEVEL (default is `logging.INFO`)
the default logging level
env_key : str
the environment key for accessing the setup
"""
path = default_path
value = os.getenv(env_key, None)
if value:
path = value
if os.path.exists(path):
with open(path, 'rt') as f:
config = yaml.safe_load(f.read())
logging.config.dictConfig(config)
else:
logging.basicConfig(level=default_level)

def __init__(self, n_particles, dimensions, binary, options,
velocity_clamp=None, init_pos=None, ftol=-np.inf):
"""Initializes the swarm.
Expand All @@ -115,7 +87,7 @@ def __init__(self, n_particles, dimensions, binary, options,
initial positions. When passed with a :code:`False` value,
random integers from 0 to :code:`dimensions` are generated.
options : dict with keys :code:`{'c1', 'c2', 'w'}`
a dictionary containing the parameters for the specific
a dictionary containing the parameters for the specific
optimization technique
* c1 : float
cognitive parameter
Expand All @@ -131,7 +103,6 @@ def __init__(self, n_particles, dimensions, binary, options,
a dictionary containing the parameters for a specific
optimization technique
"""
self.setup_logging()
# Initialize primary swarm attributes
self.n_particles = n_particles
self.dimensions = dimensions
Expand Down Expand Up @@ -172,31 +143,6 @@ def _populate_history(self, hist):
self.pos_history.append(hist.position)
self.velocity_history.append(hist.velocity)

@property
def get_cost_history(self):
"""Get cost history"""
return np.array(self.cost_history)

@property
def get_mean_pbest_history(self):
"""Get mean personal best history"""
return np.array(self.mean_pbest_history)

@property
def get_mean_neighbor_history(self):
"""Get mean neighborhood cost history"""
return np.array(self.mean_neighbor_history)

@property
def get_pos_history(self):
"""Get position history"""
return np.array(self.pos_history)

@property
def get_velocity_history(self):
"""Get velocity history"""
return np.array(self.velocity_history)

def optimize(self, objective_func, iters, print_step=1, verbose=1):
"""Optimizes the swarm for a number of iterations.
Expand Down Expand Up @@ -258,4 +204,4 @@ def reset(self):
discrete=True,
init_pos=self.init_pos,
binary=self.binary,
clamp=self.velocity_clamp, options=self.options)
clamp=self.velocity_clamp, options=self.options)
54 changes: 0 additions & 54 deletions pyswarms/base/base_single.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,7 @@
:mod:`pyswarms.single.local_best`: local-best PSO implementation
"""

import os
import yaml
import logging
import numpy as np
import logging.config
from collections import namedtuple

# Import from package
Expand Down Expand Up @@ -95,30 +91,6 @@ def assertions(self):
if not all(key in self.options for key in ('c1', 'c2', 'w')):
raise KeyError('Missing either c1, c2, or w in options')

def setup_logging(self, default_path='./config/logging.yaml',
default_level=logging.INFO, env_key='LOG_CFG'):
"""Setup logging configuration
Parameters
----------
default_path : str (default is `./config/logging.yaml`)
the path where the logging configuration is stored
default_level: logging.LEVEL (default is `logging.INFO`)
the default logging level
env_key : str
the environment key for accessing the setup
"""
path = default_path
value = os.getenv(env_key, None)
if value:
path = value
if os.path.exists(path):
with open(path, 'rt') as f:
config = yaml.safe_load(f.read())
logging.config.dictConfig(config)
else:
logging.basicConfig(level=default_level)

def __init__(self, n_particles, dimensions, options, bounds=None,
velocity_clamp=None, center=1.0, ftol=-np.inf, init_pos=None):
"""Initializes the swarm.
Expand Down Expand Up @@ -153,7 +125,6 @@ def __init__(self, n_particles, dimensions, options, bounds=None,
ftol : float
relative error in objective_func(best_pos) acceptable for convergence
"""
self.setup_logging()
# Initialize primary swarm attributes
self.n_particles = n_particles
self.dimensions = dimensions
Expand Down Expand Up @@ -194,31 +165,6 @@ def _populate_history(self, hist):
self.pos_history.append(hist.position)
self.velocity_history.append(hist.velocity)

@property
def get_cost_history(self):
"""Get cost history"""
return np.array(self.cost_history)

@property
def get_mean_pbest_history(self):
"""Get mean personal best history"""
return np.array(self.mean_pbest_history)

@property
def get_mean_neighbor_history(self):
"""Get mean neighborhood cost history"""
return np.array(self.mean_neighbor_history)

@property
def get_pos_history(self):
"""Get position history"""
return np.array(self.pos_history)

@property
def get_velocity_history(self):
"""Get velocity history"""
return np.array(self.velocity_history)

def optimize(self, objective_func, iters, print_step=1, verbose=1):
"""Optimizes the swarm for a number of iterations.
Expand Down
2 changes: 1 addition & 1 deletion pyswarms/utils/plotters/plotters.py
Original file line number Diff line number Diff line change
Expand Up @@ -370,4 +370,4 @@ def _mesh(mesher):
# Get z-value
z = mesher.func(xypairs)
zz = z.reshape(xx.shape)
return (xx, yy, zz)
return (xx, yy, zz)
16 changes: 9 additions & 7 deletions tests/optimizers/test_binary.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@

# Import from package
from pyswarms.discrete import BinaryPSO
from pyswarms.utils.functions.single_obj import sphere_func

@pytest.mark.parametrize('options', [
{'c2':0.7, 'w':0.5, 'k': 2, 'p': 2},
Expand Down Expand Up @@ -59,10 +58,13 @@ def test_reset_default_values(binary_reset):
assert binary_reset.swarm.best_cost == np.inf
assert set(binary_reset.swarm.best_pos) == set(np.array([]))

def test_training_history_shape(binary_history):
@pytest.mark.parametrize('history, expected_shape', [
('cost_history', (1000,)),
('mean_pbest_history', (1000,)),
('mean_neighbor_history',(1000,)),
('pos_history',(1000, 10, 2)),
('velocity_history',(1000, 10, 2))])
def test_training_history_shape(binary_history, history, expected_shape):
"""Test if training histories are of expected shape"""
assert binary_history.get_cost_history.shape == (1000,)
assert binary_history.get_mean_pbest_history.shape == (1000,)
assert binary_history.get_mean_neighbor_history.shape == (1000,)
assert binary_history.get_pos_history.shape == (1000, 10, 2)
assert binary_history.get_velocity_history.shape == (1000, 10, 2)
pso = vars(binary_history)
assert np.array(pso[history]).shape == expected_shape
17 changes: 10 additions & 7 deletions tests/optimizers/test_global_best.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,16 +74,19 @@ def test_reset_default_values(gbest_reset):
assert gbest_reset.swarm.best_cost == np.inf
assert set(gbest_reset.swarm.best_pos) == set(np.array([]))

def test_training_history_shape(gbest_history):
@pytest.mark.parametrize('history, expected_shape', [
('cost_history', (1000,)),
('mean_pbest_history', (1000,)),
('mean_neighbor_history',(1000,)),
('pos_history',(1000, 10, 2)),
('velocity_history',(1000, 10, 2))])
def test_training_history_shape(gbest_history, history, expected_shape):
"""Test if training histories are of expected shape"""
assert gbest_history.get_cost_history.shape == (1000,)
assert gbest_history.get_mean_pbest_history.shape == (1000,)
assert gbest_history.get_mean_neighbor_history.shape == (1000,)
assert gbest_history.get_pos_history.shape == (1000, 10, 2)
assert gbest_history.get_velocity_history.shape == (1000, 10, 2)
pso = vars(gbest_history)
assert np.array(pso[history]).shape == expected_shape

def test_ftol_effect(options):
"""Test if setting the ftol breaks the optimization process accodingly"""
pso = GlobalBestPSO(10, 2, options=options, ftol=1e-1)
pso.optimize(sphere_func, 2000, verbose=0)
assert pso.get_cost_history.shape != (2000,)
assert np.array(pso.cost_history).shape != (2000,)
17 changes: 10 additions & 7 deletions tests/optimizers/test_local_best.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,16 +87,19 @@ def test_reset_default_values(lbest_reset):
assert lbest_reset.swarm.best_cost == np.inf
assert set(lbest_reset.swarm.best_pos) == set(np.array([]))

def test_training_history_shape(lbest_history):
@pytest.mark.parametrize('history, expected_shape', [
('cost_history', (1000,)),
('mean_pbest_history', (1000,)),
('mean_neighbor_history',(1000,)),
('pos_history',(1000, 10, 2)),
('velocity_history',(1000, 10, 2))])
def test_training_history_shape(lbest_history, history, expected_shape):
"""Test if training histories are of expected shape"""
assert lbest_history.get_cost_history.shape == (1000,)
assert lbest_history.get_mean_pbest_history.shape == (1000,)
assert lbest_history.get_mean_neighbor_history.shape == (1000,)
assert lbest_history.get_pos_history.shape == (1000, 10, 2)
assert lbest_history.get_velocity_history.shape == (1000, 10, 2)
pso = vars(lbest_history)
assert np.array(pso[history]).shape == expected_shape

def test_ftol_effect(options):
"""Test if setting the ftol breaks the optimization process accodingly"""
pso = LocalBestPSO(10, 2, options=options, ftol=1e-1)
pso.optimize(sphere_func, 2000, verbose=0)
assert pso.get_cost_history.shape != (2000,)
assert np.array(pso.cost_history).shape != (2000,)
9 changes: 4 additions & 5 deletions tests/utils/environments/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
# Import modules
import os
import pytest
import numpy as np
from mock import Mock
import matplotlib as mpl

Expand All @@ -23,9 +22,9 @@ def mock_pso():
"""Returns a function that mocks a PSO class with missing attributes"""
def _mock_pso(index):
class_methods = [
'get_cost_history',
'get_pos_history',
'get_velocity_history',
'cost_history',
'pos_history',
'velocity_history',
'optimize',
'reset'
]
Expand All @@ -37,4 +36,4 @@ def _mock_pso(index):
def plot_environment():
"""Returns a PlotEnvironment instance"""
optimizer = GlobalBestPSO(10, 3, options={'c1': 0.5, 'c2': 0.3, 'w': 0.9})
return PlotEnvironment(optimizer, sphere_func, 1000)
return PlotEnvironment(optimizer, sphere_func, 1000)
12 changes: 7 additions & 5 deletions tests/utils/environments/test_plot_environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
# Import modules
import os
import pytest
import numpy as np
import matplotlib as mpl

if os.environ.get('DISPLAY','') == '':
Expand All @@ -18,9 +17,9 @@
from pyswarms.utils.functions.single_obj import sphere_func

class_methods = [
'get_cost_history',
'get_pos_history',
'get_velocity_history',
'cost_history',
'pos_history',
'velocity_history',
'optimize',
'reset'
]
Expand All @@ -33,14 +32,17 @@ def test_getters_pso(mock_pso, attributes):
m = mock_pso(idx)
PlotEnvironment(m, sphere_func, 100)

@pytest.mark.xfail
def test_plot_cost_return_type(plot_environment):
"""Tests if plot_cost() returns a SubplotBase instance"""
assert isinstance(plot_environment.plot_cost(),SubplotBase)

@pytest.mark.xfail
def test_plot2D_return_type(plot_environment):
"""Test if plot_particles2D() returns a FuncAnimation instance"""
assert isinstance(plot_environment.plot_particles2D(), FuncAnimation)

@pytest.mark.xfail
def test_plot3D_return_type(plot_environment):
"""Test if plot_particles3D() returns a FuncAnimation instance"""
assert isinstance(plot_environment.plot_particles3D(), FuncAnimation)
assert isinstance(plot_environment.plot_particles3D(), FuncAnimation)
2 changes: 1 addition & 1 deletion tests/utils/plotters/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,4 +34,4 @@ def pos_history():
@pytest.fixture
def mesher():
"""A Mesher instance with sphere function and delta=0.1"""
return Mesher(func=sphere_func, delta=0.1)
return Mesher(func=sphere_func, delta=0.1)
2 changes: 1 addition & 1 deletion tests/utils/plotters/test_plotters.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,4 +52,4 @@ def test_animate_hidden_function_type(pos_history):
fig, ax = mpl.pyplot.subplots(1,1)
ax = mpl.pyplot.scatter(x=[], y=[])
return_plot = _animate(i=1, data=pos_history, plot=ax)
assert isinstance(return_plot, tuple)
assert isinstance(return_plot, tuple)

0 comments on commit bd1b24e

Please sign in to comment.