13.1. GTOpt

13.1.1. example_gtopt.py

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
#
# coding: utf-8
# Copyright (C) pSeven SAS, 2010-present
#

"""GTOpt python example usage."""

from da.p7core import gtopt
from da.p7core import loggers
from pprint import pprint

class UnconstrainedProblemExample(gtopt.ProblemUnconstrained):
  """
  Problem: find minimum functions f = (x - 1)^2 + 42 in interval (-inf, +inf)
  Initial guess: x_0 = 100500.
  """

  def prepare_problem(self):
    # variable x in (-inf, +inf), initial value = 100500
    self.add_variable((None, None), 100500.)
    self.add_objective()

  def define_objectives(self, x):
    return [(x[0] - 1)**2 + 42]


class TwoObjectivesUnconstrainedProblem(gtopt.ProblemUnconstrained):
  """
  Problem: find minimum functions f1 = (x-1)**2 f2 = (x+1)**2 in interval (-inf, +inf)
  Initial guess: x_0 = 1000.
  """

  def prepare_problem(self):
    # variable x in (-inf, +inf), initial value = 1000.
    self.add_variable((None, None), 1000.)
    for i in range(2):
      self.add_objective()

  def define_objectives(self, x):
    x = x[0]
    return [(x - 1)**2, (x + 1)**2]


class TwoObjectivesConstrainedProblem(gtopt.ProblemConstrained):
  """
  Problem: find minimum functions f1 = (x1 - 1)^2 + (x2 - 1)^2, f2 = (x1 + 1)^2 + (x2 + 1)^2 in interval (0, 1) x (0, 1)
           under the constraint 0. < x1 + x2 < 0.8
  Initial guess: x1_0 = 0.5,  x2_0 = 0.5
  """

  def prepare_problem(self):
    # variable x1 is in interval (0., 1.), initial value = 0.5
    self.add_variable((0., 1.), 0.5)
    self.add_variable((0., 1.), 0.5)
    self.add_constraint((0., 0.8))
    self.add_objective()
    self.add_objective()

  def define_objectives(self, x):
    return [(x[0] - 1)**2 + (x[1] - 1)**2, (x[0] + 1)**2 + (x[1] + 1)**2]

  def define_constraints(self, x):
    return [x[0] + x[1]]


class MyDistribution:
  def __init__(self, dimension, vector):
    self._dimension = dimension
    self._vector = vector
  def getNumericalSample(self, quantity):
    out = []
    for i in range(0, quantity):
      for j in range(0, self._dimension):
        out.append(self._vector[j])
    return out
  def getDimension(self):
    return self._dimension


class ConstraintSatisfactionProblemExample(gtopt.ProblemCSP):
  """
  Problem: find a point (x0, x1) that satisfies the  constraint x0 + x1 > 1
  Initial guess: x0_0 = 500.,  x1_0 = -700.
  """

  def prepare_problem(self):
    self.add_variable((None, None), 500.)
    self.add_variable((None, None), -700.)
    # there is no upper limit
    self.add_constraint((1, None))

  def define_constraints(self, x):
    return [x[0] + x[1]]


class StochasticProblemExample(gtopt.ProblemConstrained):
  """
  Problem: find minimum functions f = (x1 - k1)^2 + (x2 - k2)^2
           under the constraint -1 < x1 + x2 < 1
  Initial guess: x1_0 = -10,  x2_0 = 50
  """

  def prepare_problem(self):
    # variable x1 is in interval (0., 1.), initial guess is 0.5
    self.add_variable((-1000.0, 10.0), -10.0, 'x1')
    self.add_variable((-10.0, 1000.0), 50.0, 'x2')
    self.add_constraint((-1.0, 1.0), 'c')
    self.add_objective('f')
    self.set_stochastic(MyDistribution(4, [-2.0, -1.0, 1.0, 0.5]))

  def define_objectives(self, x):
    ksi = x[2:]
    return [(x[0] - ksi[0])**2 + (x[1] - ksi[1])**2]

  def define_constraints(self, x):
    return [x[0] + x[1]]


class CustomWatcher(object):
  """
  Watcher is an object that is capable of interrupting a process.
  Define watcher to check intermediate results and terminate optimization.
  """

  def __call__(self, obj):
    return True

def trivial_example():
  # create optimizer with default parameters
  optimizer = gtopt.Solver()
  # create problem
  problem = UnconstrainedProblemExample()
  print(str(problem))
  # solve problem and get result
  result = optimizer.solve(problem)
  # print general info about result
  print(str(result))
  # print optimal points
  print("Optimal answer:")
  result.optimal.pprint()

def trivial_example_two():
  optimizer = gtopt.Solver()
  # it set optimization stop criteria
  # look documentation for details
  optimizer.options.set("GTOpt/ObjectiveTolerance", "0.1")
  problem = TwoObjectivesUnconstrainedProblem()
  print(str(problem))
  result = optimizer.solve(problem)
  print(str(result))
  print("Optimal answer:")
  result.optimal.pprint()

def advanced_example():
  optimizer = gtopt.Solver()
  # set logger, by default output -- to sys.stdout
  optimizer.set_logger(loggers.StreamLogger())
  # set watcher
  optimizer.set_watcher(CustomWatcher())

  # work with options
  print("Options list:")
  pprint(optimizer.options.list)
  print("")
  optionname = 'GTOpt/FrontDensity'
  print('Option: %s' % optionname)
  print('Info: %s' % optimizer.options.info(optionname))
  optimizer.options.set(optionname, "5")
  print('New value: %s' % optimizer.options.get(optionname))
  optimizer.options.reset()
  print('After reset: %s' % optimizer.options.get(optionname))
  print("")
  optimizer.options.set(optionname, "15")

  problem = TwoObjectivesConstrainedProblem()
  print(str(problem))
  result = optimizer.solve(problem)
  print(str(result))
  print('Optimal points:')
  result.optimal.pprint()

def constraint_example():
  optimizer = gtopt.Solver()
  problem = ConstraintSatisfactionProblemExample()
  result = optimizer.solve(problem)
  print(str(result))
  print("Found point:")
  result.optimal.pprint()

def stochastic_example():
  optimizer = gtopt.Solver()
  optimizer.options.set("GTOpt/ObjectiveTolerance", "0.1")
  problem = StochasticProblemExample()
  print(str(problem))
  result = optimizer.solve(problem)
  print(str(result))
  print('Optimal point:')
  result.optimal.pprint(components=['x', 'f', 'c', 'fe', 'ce', 'psi'])

def main():
  """Example of GTOpt usage."""

  print('Find the minimum of function')
  print('=' * 60)
  trivial_example()
  print('=' * 60)
  print('Finished!')
  print('')

  print('Find the minimum of two functions')
  print('=' * 60)
  trivial_example_two()
  print('=' * 60)
  print('Finished!')
  print('')

  print('Find the minimum of functions with constraint')
  print('=' * 60)
  advanced_example()
  print('=' * 60)
  print('Finished!')
  print('')

  print('Find a point satisfying the constraint')
  print('=' * 60)
  constraint_example()
  print('=' * 60)
  print('Finished!')
  print('')

  print('Stochastic problem example')
  print('=' * 60)
  stochastic_example()
  print('=' * 60)
  print('Finished!')
  print('')

if __name__ == "__main__":
  main()

13.1.2. example_gtopt_generic.py

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
#
# coding: utf-8
# Copyright (C) pSeven SAS, 2010-present
#

"""Example of using generic problem definition in GTOpt python interface"""

from da.p7core import gtopt
from da.p7core import loggers

import matplotlib.pyplot as plt

import sys
import os

class TP7GenericProblemExample(gtopt.ProblemGeneric):
  """
  Problem TP7 - constrained multi-objective problem without analytic gradients:
  minimize:    f0 = x0, f1 = (1 + x1) / x0
  where:       0.1 < x0 < 1
               0 < x1 < 0.5
  subject to:  x1 + 9 * x0 - 6 > 0
              -x1 + 9 * x0 - 1 > 0
  """

  def prepare_problem(self):
    #for variables and constraints names are not set - they will be named automatically: (x0, x1, ...), (c0, c1, ...)
    #define variables: bounds are specified, initial guess is not set
    self.add_variable((0.1, 1.0), None)
    self.add_variable((0.0, 0.5), None)
    #define constraints: lower bounds are specified, upper bounds are not set (will be interpreted as positive infinity)
    self.add_constraint((0.0, None))
    self.add_constraint((0.0, None))
    #define 2 objectives: names are set
    self.add_objective("first_objective")
    self.add_objective("second_objective")

  def evaluate(self, queryx, querymask):
    """
    Batch mode is supported in generic problem definition, i.e. optimizer may ask several points.
    Masks are supported - optimizer specifies required responses, user must give at least these responses.

    In this example, regardless of mask, all responses (all objectives and constraints) are calculated.
    All points in batch are calculated sequentially and in the same way.
    """
    #functions_batch will be filled with lists of outputs
    functions_batch = []
    #output_masks_batch will be filled with lists of output masks
    output_masks_batch = []
    #queryx is a 2D array, x is a point to evaluate
    #querymask is a list of lists, mask marks with by nonzero values required responses
    for x, mask in zip(queryx, querymask):
      #calculate objectives
      objectives = []
      objectives.append(x[0] if mask[0] else None)
      objectives.append((1 + x[1]) / x[0] if mask[1] else None)
      #calculate constraints
      constraints = []
      constraints.append(x[1] + 9 * x[0] - 6 if mask[2] else None)
      constraints.append(-x[1] + 9 * x[0] - 1 if mask[3] else None)
      #add responses to list
      functions_batch.append(objectives + constraints)
      # this example calculates only those responses that were requested by the input mask,
      # so it should return exactly the same mask
      output_mask = mask
      output_masks_batch.append(output_mask)
    return functions_batch, output_masks_batch

class TP7GenericProblemWithGradientsExample(gtopt.ProblemGeneric):
  """
  Problem TP7 - constrained multi-objective problem with analytic gradients:
  minimize:    f0 = x0, f1 = (1 + x1) / x0
  where:       0.1 < x0 < 1
               0 < x1 < 0.5
  subject to:  x1 + 9 * x0 - 6 > 0
              -x1 + 9 * x0 - 1 > 0

  objectives Jacobian: (      1             0  )
                       (-(1 + x1)/(x0^2)   1/x0)

  constraints Jacobian: (9   1)
                        (9  -1)
  """

  def prepare_problem(self):
    #for variables and constraints names are not set - they will be named automatically: (x0, x1, ...), (c0, c1, ...)
    #define variables: bounds are specified, initial guess is not set
    self.add_variable((0.1, 1.0), None)
    self.add_variable((0.0, 0.5), None)
    #define constraints: lower bounds are specified, upper bounds are not set (will be interpreted as positive infinity)
    self.add_constraint((0.0, None))
    self.add_constraint((0.0, None))
    #define 2 objectives: names are set
    self.add_objective("first_objective")
    self.add_objective("second_objective")
    """
    turn on analytic objectives gradient
    objectives gradient is sparse: non-zero elements are (0, 0), (1, 0), (1, 1)
    we need to specify non-zero rows and columns in Jacobian
    rows are (0, 1, 1), columns are (0, 0, 1)
    """
    self.enable_objectives_gradient(([0, 1, 1], [0, 0, 1]))
    """
    turn on analytic constraints gradient
    constraints gradient is dense - all values are non-zero
    respectively, no parameter is needed
    """
    self.enable_constraints_gradient()

  def evaluate(self, queryx, querymask):
    """
    Batch mode is supported in generic problem definition, i.e. optimizer may ask several points.
    Masks are supported - optimizer specifies required responses, user must give at least these responses.

    All points in batch are calculated sequentially and in the same way.
    List with responses has the following structure: [objectives, constraints, objectives gradients, constraints gradients]
    It's length is equal to 11 = 2 + 2 + 3 + 4
    Here, if optimizer asks for at least one response of the corresponding type, all responses of this type are calculated
    """
    #functions_batch will be filled with lists of outputs
    functions_batch = []
    #output_masks_batch will be filled with lists of output masks
    output_masks_batch = []
    #queryx - list of lists, x - list with values of input variables
    #querymask - list of lists, mask - list of required responses
    for x, mask in zip(queryx, querymask):
      output_mask = []
      #calculate objectives required by mask
      objectives = [None] * 2
      if mask[0]:
        objectives[0] = x[0]
      if mask[1]:
        objectives[1] = (1 + x[1]) / x[0]

      #calculate constraints required by mask
      constraints = [None] * 2
      if mask[2]:
        constraints[0] = x[1] + 9 * x[0] - 6
      if mask[3]:
        constraints[1] = -x[1] + 9 * x[0] - 1

      #calculate non-zero elements of objectives Jacobian (the order of partial derivatives should be same as in input of enable_objectives_gradient)
      objectives_gradient = [None] * 3
      if mask[4]:#df0/dx0
        objectives_gradient[0] = 1.0
      if mask[5]:#df1/dx0
        objectives_gradient[1] = -(1 + x[1]) / x[0]**2
      if mask[6]:#df1/dx1
        objectives_gradient[2] = 1 / x[0]

      #calculate all elements of constraints Jacobian demanded by the mask. The order is dc_1/dx_1 dc_1/dx_2 dc_2/dx_1 dc_2/dx_2
      constraints_gradient = [None] * 4
      if mask[7]:#dc0/dx0
        constraints_gradient[0] = 9
      if mask[8]:#dc0/dx1
        constraints_gradient[1] = 1
      if mask[9]:#dc1/dx0
        constraints_gradient[2] = 9
      if mask[10]:#dc1/dx1
        constraints_gradient[3] = -1
      #add responses to list
      functions_batch.append(objectives + constraints + objectives_gradient + constraints_gradient)
      #add mask to list
      output_masks_batch.append(mask) #In this example we calculate only what is requested by an incoming mask, so we should return exactly the incoming mask
    return functions_batch, output_masks_batch

def solve_problem(problem):
  #create optimizer instance
  optimizer = gtopt.Solver()
  # set logger
  optimizer.set_logger(loggers.StreamLogger(sys.stdout, loggers.LogLevel.DEBUG))
  #set options
  options = []
  options.append(('GTOpt/LogLevel', 'WARN'))
  options.append(('GTOpt/FrontDensity', 8))
  for option in options:
    optimizer.options.set(*option)
  #print information about problem
  print(str(problem))

  #here the problem is solving
  result = optimizer.solve(problem)

  #print solution
  print(str(result))
  print('Optimal points:')
  result.optimal.pprint()
  return result

def plot(result):
  plt.clf()
  fig = plt.figure(1)
  ax = fig.add_subplot(111)
  ax.axis('off')
  title = 'TP7 Generic Solution: case of numerical gradients'
  plt.title(title)
  axv = [fig.add_subplot(211), fig.add_subplot(212)]
  for i in range(2):
    x1 = [x[0] for x in result[i].optimal.f]
    x2 = [x[1] for x in result[i].optimal.f]
    axv[i].plot(x1, x2, 'bo', label = 'Pareto Frontier')
    axv[i].legend(loc = 'best')
    if i == 1:
      title = 'TP7 Generic Solution: case of analytical gradients'
      plt.title(title)
  fig.savefig(title)
  print('Plots are saved to %s.png' % os.path.join(os.getcwd(), title))
  if 'SUPPRESS_SHOW_PLOTS' not in os.environ:
    plt.show()

def main():
  result = []
  for problem in [TP7GenericProblemExample(), TP7GenericProblemWithGradientsExample()]:
    print('=' * 60)
    print('Solve problem %s' % problem.__class__.__name__)
    result.append(solve_problem(problem))
  plot(result)
  print('Finished!')
  print('=' * 60)

if __name__ == "__main__":
  main()

13.1.3. example_gtopt_gradients.py

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
#
# coding: utf-8
# Copyright (C) pSeven SAS, 2010-present
#

"""Example of using generic problem definition with analytical gradients in GTOpt python interface"""

from da.p7core import gtopt
from da.p7core import loggers

import matplotlib.pyplot as plt

import os

class SimpleProblem(gtopt.ProblemGeneric):
  """
  minimize:    f1 = x0, f2 = (1 + x1) / x0
  where:       0.1 < x0 < 1
               0 < x1 < 0.5
  """
  def prepare_problem(self):
    # 2 variables
    self.add_variable((0.1, 1.0), None)
    self.add_variable((0.0, 0.5), None)
    # 2 objectives
    self.add_objective()
    self.add_objective()
    # dense objectives jacobian
    self.enable_objectives_gradient()

  def evaluate(self, queryx, querymask):
    batch_f = []
    batch_mask = []
    for x, mask in zip(queryx, querymask):
      responses, output_mask = self.eval_single(x, mask)
      batch_f.append(responses)
      batch_mask.append(output_mask)
    return batch_f, batch_mask

  def eval_single(self, x, mask):
    # The order of responses here is
    # f dim = 2
    # c dim = 0
    # f gradients 2 * 2
    # c gradients 0 * 2

    output_mask = [False] * len(mask)
    f = [None] * 2
    grads = [None] * 4

    if any(mask[:2]): # We suppose that we can not separate objectives computation.
      f = self.eval_objectives(x) # We provide them both always
      output_mask[:2] = [True] * 2 #and may extended mask sometimes.

    if any(mask[2:]): #  Each gradient entry has a separate mask, but as above we assume that we can not avoid simultaneous computation of Jacobian,
                      #  we provide all values at once and may extend the mask.
      grads = sum(self.eval_gradients(x), [])
      output_mask[2:] = [True] * 4

    return f + grads, output_mask

  def eval_objectives(self, x):
    objectives = [None, None]
    objectives[0] = x[0]
    objectives[1] = (1 + x[1]) / x[0]
    return objectives

  def eval_gradients(self, x):
    objectives_gradient = [[None, None], [None, None]]
    objectives_gradient[0][0] = 1.0
    objectives_gradient[0][1] = 0.0
    objectives_gradient[1][0] = -(1 + x[1]) / x[0]**2
    objectives_gradient[1][1] = 1 / x[0]
    return objectives_gradient

def plot(result):
  plt.clf()
  fig = plt.figure(1)
  ax = fig.add_subplot(111)
  ax.grid(True)
  x1 = [x[0] for x in result.optimal.f]
  x2 = [x[1] for x in result.optimal.f]
  ax.plot(x1, x2, 'bo' , label = 'Pareto Frontier')
  ax.set_xlabel('f1')
  ax.set_ylabel('f2')
  ax.legend(loc = 'best')
  title ='Simple problem with gradients'
  plt.title(title)
  fig.savefig(title)
  print('Plots are saved to %s.png' % os.path.join(os.getcwd(), title))
  if 'SUPPRESS_SHOW_PLOTS' not in os.environ:
    plt.show()

def main():
  # create problem
  problem = SimpleProblem()
  print(str(problem))
  # create optimizer with default parameters
  optimizer = gtopt.Solver()
  optimizer.set_logger(loggers.StreamLogger())
  # solve problem and get result
  result = optimizer.solve(problem)
  # print general info about result
  print(str(result))
  # print Pareto optimal points
  result.optimal.pprint()
  # plot Pareto frontier
  plot(result)

if __name__ == "__main__":
  main()

13.1.4. example_gtopt_ackley_function.py

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
#
# coding: utf-8
# Copyright (C) pSeven SAS, 2010-present
#

"""Example of solving some well-known benchmarking problems using GTOpt"""

from da.p7core import gtopt
from da.p7core import loggers
from pprint import pprint
import os
import matplotlib.pyplot as plt
import numpy as np

class AckleyDesignSpaceExampleProblem(gtopt.ProblemUnconstrained):
  """
  Ackley function
  Number of variables: n = 2.
  global optima: x* = [- 1.5096201, -0.7548651]
  f(x*) = -4.59010163
  Link: http://www.it.lut.fi/ip/evo/functions/node14.html
  Source: D. H. Ackley. "A connectionist machine for genetic hillclimbing". Boston: Kluwer Academic Publishers, 1987.
  """
  def prepare_problem(self):
    self.enable_history()
    for i in range(2):
      self.add_variable((-2.0, 2.0), 0.5)
    self.add_objective()

  def define_objectives(self, x):
    pass

class AckleyProblemListExampleProblem(AckleyDesignSpaceExampleProblem):
  """
  Ackley function: define_objectives returns list
  """
  def define_objectives(self, x):
    a, b, c = np.exp(-0.2), 3.0, 2.0
    objectives = a * np.sqrt(x[0]**2 + x[1]**2) + b * (np.cos(c * x[0]) + np.sin(c * x[1]))
    return [objectives]

class AckleyProblemSingleExampleProblem(AckleyDesignSpaceExampleProblem):
  """
  Ackley function: define_objectives returns float
  """
  def define_objectives(self, x):
    a, b, c = np.exp(-0.2), 3.0, 2.0
    obj = a * np.sqrt(x[0]**2 + x[1]**2) + b * (np.cos(c * x[0]) + np.sin(c * x[1]))
    return obj

def run_singleobjective_example(problem):
  print(str(problem))
  # create optimizer with default parameters
  optimizer = gtopt.Solver()
  optimizer.set_logger(loggers.StreamLogger())
  # solve problem and get result
  result = optimizer.solve(problem)
  # print general info about result
  print(str(result))
  # print list of of all answers:
  print("Optimal answer:")
  result.optimal.pprint(['x'])
  # print list of the best answers:
  print("Converged answer:")
  result._converged.pprint(['x'])
  print("Functions values on answer:")
  result._converged.pprint(['f'])
  hist = [[],[]]
  hist[0] = [[x[0]] for x in problem.history]
  hist[1] = [[x[1]] for x in problem.history]
  return result, hist

def plot(result, hist):
  plt.clf()
  fig = plt.figure(1)
  ax = fig.add_subplot(111)
  title = 'Ackley function'
  plt.title(title)
  ax.set_xlabel('x')
  ax.set_ylabel('y')
  x = y = np.linspace(-2., 2., 50)
  [X, Y] = np.meshgrid(x, y)
  a, b, c = np.exp(-0.2), 3.0, 2.0
  Z = a * np.sqrt(X**2 + Y**2) + b * (np.cos(c * X) + np.sin(c * Y))
  ax.contour(X,Y,Z, 8)

  x,y = hist
  ax.plot(x, y, 'b+', markersize = 5., label = 'Optimization Steps')
  ax.plot(result.optimal.x[0][0], result.optimal.x[0][1], 'ro', markersize = 5., label = 'Optimal Solution')
  ax.legend(loc = 'best')
  plt.title(title)
  fig.savefig(title)
  print('Plots are saved to %s.png' % os.path.join(os.getcwd(), title))
  if 'SUPPRESS_SHOW_PLOTS' not in os.environ:
    plt.show()


def main():
  """Example of GTOpt usage."""
  result = []
  hist = []
  for problem in [AckleyProblemListExampleProblem(), AckleyProblemSingleExampleProblem()]:
    print('Find minimum function')
    print('=' * 60)
    result, hist = run_singleobjective_example(problem)
    print('=' * 60)
    print('Finished!')
  plot(result, hist)

if __name__ == "__main__":
  main()

13.1.5. example_gtopt_triangle.py

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
#
# coding: utf-8
# Copyright (C) pSeven SAS, 2010-present
#

"""
6 variables geometrical test case:
fit a triangle of minimum size around two non intersecting circles of unit radius
(assume w.l.o.g. that circle centers are located at (-1,1) and (1,1))
"""

from da.p7core import gtopt
from da.p7core import loggers
from pprint import pprint
from math import exp, sqrt, cos, sin, fabs
import os
import matplotlib.pyplot as plt

class CirclesInTriangleExampleProblem(gtopt.ProblemConstrained):
  def prepare_problem(self):
    self.enable_history()
    #coordinates of triangle vertices
    self.add_variable((1.5, 6.5), 5.0, 'x0')
    self.add_variable((-0.75, 0.75), -0.1, 'y0')
    self.add_variable((-0.75, 0.75), -0.1, 'x1')
    self.add_variable((1.5, 6.5), 5.0, 'y1')
    self.add_variable((-6.5, -1.5), -5.0, 'x2')
    self.add_variable((-0.75, 0.75), -0.1, 'y2')
    #find a triangle with minimal area
    self.add_objective('area')
    # define 6 constraints (distances from circle centers to sides of the triangle (these should be >= 1)
    for i in range(6):
      self.add_constraint((1.0, None), 'd' + str(i + 1))
    # area of triangle must be bigger than the area of circles
    self.add_constraint((6.2831854, None), 'areacon')

  def define_objectives(self, v):
    v = dict(zip(self.variables_names(), v))
    # triangle area
    return 0.5 * (v['x0'] * (v['y1'] - v['y2']) + v['x1'] * (v['y2'] - v['y0']) + v['x2'] * (v['y0'] - v['y1']))

  def define_constraints(self, v):
    v = dict(zip(self.variables_names(), v))
    con = {}
    # triangle centred on (1,1) dist from lines (want > radius = 1)
    con['d1'] = fabs((v['x1'] - v['x0']) * (v['y0'] - 1.0) - (v['x0'] - 1.0) * (v['y1'] - v['y0'])) / sqrt((v['x1'] - v['x0'])**2 + (v['y1'] - v['y0'])**2)
    con['d2'] = fabs((v['x2'] - v['x1']) * (v['y1'] - 1.0) - (v['x1'] - 1.0) * (v['y2'] - v['y1'])) / sqrt((v['x2'] - v['x1'])**2 + (v['y2'] - v['y1'])**2)
    con['d3'] = fabs((v['x0'] - v['x2']) * (v['y2'] - 1.0) - (v['x2'] - 1.0) * (v['y0'] - v['y2'])) / sqrt((v['x0'] - v['x2'])**2 + (v['y0'] - v['y2'])**2)
    # triangle centred on (-1,1) dist from lines (want > radius = 1)
    con['d4'] = fabs((v['x1'] - v['x0']) * (v['y0'] - 1.0) - (v['x0'] + 1.0) * (v['y1'] - v['y0'])) / sqrt((v['x1'] - v['x0'])**2 + (v['y1'] - v['y0'])**2)
    con['d5'] = fabs((v['x2'] - v['x1']) * (v['y1'] - 1.0) - (v['x1'] + 1.0) * (v['y2'] - v['y1'])) / sqrt((v['x2'] - v['x1'])**2 + (v['y2'] - v['y1'])**2)
    con['d6'] = fabs((v['x0'] - v['x2']) * (v['y2'] - 1.0) - (v['x2'] + 1.0) * (v['y0'] - v['y2'])) / sqrt((v['x0'] - v['x2'])**2 + (v['y0'] - v['y2'])**2)
    # constraint on area > area of circles
    con['areacon'] = 0.5 * (v['x0'] * (v['y1'] - v['y2']) + v['x1'] * (v['y2'] - v['y0']) + v['x2'] * (v['y0'] - v['y1']))
    return [con[k] for k in self.constraints_names()]


def run_circle_problem():
  optimizer = gtopt.Solver()
  optimizer.set_logger(loggers.StreamLogger())
  #initialize problem
  problem = CirclesInTriangleExampleProblem()
  # solve problem and get result
  result = optimizer.solve(problem)
  # print general info about result
  print(str(result))
  #different ways to work with solution
  opt_con = result.optimal.c
  print('constraints: %s' % opt_con)
  print('constraints names: %s' % result.names.c)
  print('%s:  %f' % (problem.constraints_names()[0], opt_con[0][0]))
  hist = [t[0:6] for t in problem.history]
  return result, hist


def plot(result, hist):
  plt.clf()
  fig = plt.figure(1)
  ax = fig.add_subplot(111)
  ax.axis('equal')
  title = 'Circles in triangle'
  plt.title(title)
  ax.set_xlabel('x')
  ax.set_ylabel('y')
  circle1 = plt.Circle((-1, 1), 1, facecolor='none',
              edgecolor='r', linewidth=1.5, alpha=0.5)
  ax.add_patch(circle1)
  circle2 = plt.Circle((1, 1), 1, facecolor='none',
              edgecolor='r', linewidth=1.5, alpha=0.5)
  ax.add_patch(circle2)
  for i in range(len(hist)):
    if i%10 == 0:
      points = hist[i]
      x = [points[0], points[2], points[4], points[0]]
      y = [points[1], points[3], points[5], points[1]]
    if i == 0:
      ax.plot(x,y, 'b--', linewidth=0.6, label = 'Opimization Steps')
    else :
      ax.plot(x,y, 'b--', linewidth=0.6)
  points = result.optimal.x[0]
  x = [points[0], points[2], points[4], points[0]]
  y = [points[1], points[3], points[5], points[1]]
  ax.plot(x,y, 'r-', linewidth=2, label = 'Optimal Solution')
  ax.legend(loc = 'best')
  plt.title(title)
  fig.savefig(title)
  print('Plots are saved to %s.png' % os.path.join(os.getcwd(), title))
  if 'SUPPRESS_SHOW_PLOTS' not in os.environ:
    plt.show()


def main():
  print('Find minimum function')
  print('=' * 60)
  result, hist = run_circle_problem()
  plot(result, hist)
  print('=' * 60)
  print('Finished!')

if __name__ == "__main__":
  main()

13.1.6. example_gtopt_dfo.py

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
#
# coding: utf-8
# Copyright (C) pSeven SAS, 2010-present
#

"""Example demonstrates using of different strategies for estimating derivatives."""

from da.p7core import gtopt
from da.p7core import loggers

import matplotlib.pyplot as plt
import numpy

import os

def noisy_function(x):
  return x * x * (1 + 0.03 * numpy.sin(800 * x))

class MultimodalProblemExample(gtopt.ProblemUnconstrained):
  def prepare_problem(self):
    self.add_variable((-1.0, 1.0), 0.9)
    self.add_objective()

  def define_objectives(self, x):
    return noisy_function(x)

def solve_problem(problem, options):
  # create optimizer with default parameters
  optimizer = gtopt.Solver()
  #set options
  for option in options:
    optimizer.options.set(*option)
  #display problem general info
  print(str(problem))
  # solve problem and get result
  result = optimizer.solve(problem)
  # print general info about result
  print(str(result))
  # print list of of all answers:
  print("Optimal:")
  result.optimal.pprint(["x", "f"])
  return (result.optimal.x[0][0], result.optimal.f[0][0]), numpy.array(problem.history)

def main():
  print("Find the minimum of function")
  print("=" * 60)
  results = {}
  hist = {}
  for diff_type in ["Framed", "Numerical"]:
    results[diff_type], hist[diff_type] = solve_problem(MultimodalProblemExample(), [("GTOpt/DiffType", diff_type)])
  plot(results, hist)
  print("=" * 60)
  print("Finished!")
  assert(results["Framed"][1] < results["Numerical"][1])


def plot(results, hist):
  plt.clf()
  fig = plt.figure(1)
  ax = fig.add_subplot(111)
  title = "DFO"
  colors = {"Framed": ["ro", "r+"], "Numerical": ["bD", "b+"]}
  x = numpy.linspace(-0.1 , 1., 500)
  y = noisy_function(x)
  ax.plot(x,y, "c-")
  for n in ["Framed", "Numerical"]:
    ax.plot(results[n][0], results[n][1], colors[n][0], label="%s Solution"%n, markersize=10)
    ax.plot(hist[n][:, 0], hist[n][:, 1], colors[n][1], label="%s Optimization steps"%n, markersize=10)
  ax.legend(loc = "best")
  plt.title(title)
  ax.set_xlabel("x")
  ax.set_ylabel("f")
  ax.axis([-0.1, 1., -0.1, 1.])
  ax.grid(True)
  plt.title(title)
  fig.savefig(title)
  print("Plots are saved to %s.png" % os.path.join(os.getcwd(), title))
  if 'SUPPRESS_SHOW_PLOTS' not in os.environ:
    plt.show()

if __name__ == "__main__":
  main()

13.1.7. example_gtopt_co.py

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
#
# coding: utf-8
# Copyright (C) pSeven SAS, 2010-present
#

'''
Example demonstrates usage of GTOpt for multidisciplinary optimization.
It solves Sellar problem using Collaborative Optimization.
Reference: Sellar, R. S., Batill, S. M., and Renaud, J. E., "Response Surface Based, Concurrent Subspace Optimization for Multidisciplinary System Design", Proceedings References 79 of the 34th AIAA Aerospace Sciences Meeting and Exhibit, Reno, NV, January 1996.
Also see: http://openmdao.org/docs/mdao/co.html
'''

from da.p7core import gtopt
from da.p7core import loggers

import sys
import os
import matplotlib.pyplot as plt
import numpy as np
import datetime

def calculate_residuals(targets, local_vars):
  """
  Calculate distance between targets and their local copies in the subsystems
  """
  residual = 0.0
  for (name, value) in local_vars.items():
    residual += (targets[name + '_t'] - value)**2
  return residual

class SystemLevelExampleProblem(gtopt.ProblemConstrained):
  """
  System level problem
  """
  def __init__(self):
    #counters for evaluations
    self.obj_evals = 0
    self.con_evals = 0
    #subsystem problems instances
    self.sub1problem = Subsystem1ExampleProblem(None)
    self.sub2problem = Subsystem2ExampleProblem(None)

  def prepare_problem(self):
    #history
    self.enable_history()
    #all system level variables are targets
    self.add_variable((-10.0, 10.0), None, 'z1_t')
    self.add_variable((0.0, 10.0), None, 'z2_t')
    self.add_variable((0.0, 10.0), None, 'x1_t')
    self.add_variable((3.16, 10.0), None, 'y1_t')
    self.add_variable((-10.0, 24.0), None, 'y2_t')
    #system level objective
    self.add_objective('obj')
    #feasibility constraints
    self.add_constraint((None, 0.0))
    self.add_constraint((None, 0.0))

  def define_objectives(self, v):
    print('System level: objective evaluation #%d' % self.obj_evals)
    v = dict(zip(self.variables_names(), v))
    #calculate objectives
    objective = v['x1_t']**2 + v['z2_t'] + v['y1_t'] + np.exp(-v['y2_t'])
    self.obj_evals += 1
    return objective

  def define_constraints(self, x):
    print('=' * 60)
    print('System level: constraints evaluation #%d' % self.con_evals)
    #use numerical differentiation instead of framed gradients
    options = [('GTOpt/DiffType', 'Numerical')]
    #update values of targets
    self.sub1problem.targets = dict(zip(self.variables_names(), x))
    self.sub2problem.targets = dict(zip(self.variables_names(), x))
    #run subsystems optimization
    #values of feasibility constraints are optimal subsystems objectives
    c = [optimize_problem(problem, options, loggers.LogLevel.WARN) for problem in [self.sub1problem, self.sub2problem]]
    #update counter
    self.con_evals += 1
    print('=' * 60)
    return c

class Subsystem1ExampleProblem(gtopt.ProblemUnconstrained):
  """
  Subsystem 1 problem:
  Local variable: x1
  Shared variables: z1, z2
  Coupling variable: y2
  """
  def __init__(self, targets):
    self.targets = targets
    self.evals = 0

  def prepare_problem(self):
    self.add_variable((-10.0, 10.0), None, 'z1')
    self.add_variable((0.0, 10.0), None, 'z2')
    self.add_variable((0.0, 10.0), None, 'x1')
    self.add_variable((-10.0, 24.0), None, 'y2')
    self.add_objective('obj')

  def calculate(self, v):
    #calculate response
    v = dict(zip(self.variables_names(), v))
    return v['z1']**2 + v['z2'] + v['x1'] - 0.2 * v['y2']

  def define_objectives(self, x):
    local_vars = dict(zip(self.variables_names(), x))
    local_vars['y1'] = self.calculate(x)
    self.evals += 1
    return calculate_residuals(self.targets, local_vars)

class Subsystem2ExampleProblem(gtopt.ProblemUnconstrained):
  """
  Subsystem 2 problem:
  Local variables: No
  Shared variables: z1, z2
  Coupling variable: y1
  """
  def __init__(self, targets):
    self.targets = targets
    self.evals = 0

  def prepare_problem(self):
    self.add_variable((-10.0, 10.0), None, 'z1')
    self.add_variable((0.0, 10.0), None, 'z2')
    self.add_variable((3.16, 10.0), None, 'y1')
    self.add_objective('obj')

  def calculate(self, v):
    #calculate response
    v = dict(zip(self.variables_names(), v))
    return np.sqrt(v['y1']) + v['z1'] + v['z2']

  def define_objectives(self, x):
    local_vars = dict(zip(self.variables_names(), x))
    local_vars['y2'] = self.calculate(x)
    self.evals += 1
    return calculate_residuals(self.targets, local_vars)

def optimize_problem(problem, options, level):
  """
  Run optimization of a given problem and return optimal objective value (single-objective problem is supposed)
  """
  print('Optimizing problem %s' % problem.__class__.__name__)
  optimizer = gtopt.Solver()
  optimizer.set_logger(loggers.StreamLogger(sys.stdout, level))
  for option in options:
    optimizer.options.set(*option)
  result = optimizer.solve(problem)
  print('Problem %s was optimized successfully. Number of evaluations: %d' % (problem.__class__.__name__, problem.evals))
  print('Optimal point:')
  print('Variables: %s' % list(zip(result.names.x, result.optimal.x[0])))
  print('Objectives: %s' % list(zip(result.names.f, result.optimal.f[0])))
  return result.optimal.f[0][0]

def optimize_problem_multi(problem, options, level, output_queue):
  """
  Optimize problem and put solution to queue
  """
  output_queue.put(optimize_problem(problem, options, level))

def main():
  #measure time
  time_start = datetime.datetime.now()
  #create optimizer
  optimizer = gtopt.Solver()
  #set logger
  optimizer.set_logger(loggers.StreamLogger())
  #choose strategy for estimating derivatives
  optimizer.options.set('GTOpt/DiffType', 'Numerical')
  #create problem
  problem = SystemLevelExampleProblem()
  #solve problem
  result = optimizer.solve(problem)
  time_stop = datetime.datetime.now()
  #print various information
  print(str(result))
  print('Variables: %s' % list(zip(result.names.x, result.optimal.x[0])))
  print('Objective: %s' % result.optimal.f[0][0])
  print('Constraints: %s' % list(zip(result.names.c, result.optimal.c[0])))
  print('System level evaluations of objective: %s' % problem.obj_evals)
  print('System level evaluations of constraints: %s' % problem.con_evals)
  print('Total time elapsed: %s' % (time_stop - time_start))

  obj_hist = np.array(problem.history, dtype=float)[:, 5]
  hist = obj_hist[np.where(np.isfinite(obj_hist))]
  plot(result, hist)

def plot(result, hist):
  plt.clf()
  fig = plt.figure(1)
  ax = fig.add_subplot(111)
  x = list(range(len(hist)))
  f = hist
  ax.plot(x, f, 'b-' , label = 'Values during optimization')
  ax.plot(x[-1], result.optimal.f[0][0], 'ro' , label = 'Solution')
  ax.set_xlabel('Iter')
  ax.set_ylabel('f')
  ax.legend(loc = 'best')
  title ='Collaborative optimization'
  plt.title(title)
  fig.savefig(title)
  print('Plots are saved to %s.png' % os.path.join(os.getcwd(), title))
  if 'SUPPRESS_SHOW_PLOTS' not in os.environ:
    plt.show()

if __name__ == '__main__':
  # simple long-run warning
  from time import sleep
  print("WARNING: The execution of this example can be time consuming!")
  print("Depending on the execution environment it can takes minutes to complete.")
  print("Press Ctrl-C in 10 seconds to avoid execution...")
  sleep(10)
  print("Starting example...")

  main()

13.1.8. example_gtopt_atc.py

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
#
# coding: utf-8
# Copyright (C) pSeven SAS, 2010-present
#

'''
Example demonstrates usage of GTOpt for multidisciplinary optimization.
It solves geometric programming problem using Analytical Target Cascading.
Reference: Kim, H.M., Target cascading in optimal system design, 2001, The University of Michigan
(See Chapter 5: Demonstration Studies)

WARNING: The execution of this example can be time consuming!
'''

from da.p7core import gtopt
from da.p7core import loggers

import sys
import math
import datetime
import numpy as np
from pprint import pprint

class SystemLevelExampleProblem(gtopt.ProblemConstrained):
  """
  System level problem:
  Responses: x01, x02
  Local variables: x04, x05, x07
  Linking variables: No
  Responses from lower level: x03, x06
  """
  def __init__(self, l):
    #initialize responses from lower level
    self.l = l
    #counter for number of evaluations
    self.evals = 0

  def prepare_problem(self):
    self.add_variable((1.0e-5, None), None, 'x03')
    self.add_variable((1.0e-5, None), None, 'x04')
    self.add_variable((1.0e-5, None), None, 'x05')
    self.add_variable((1.0e-5, None), None, 'x06')
    self.add_variable((1.0e-5, None), None, 'x07')
    self.add_variable((1.0e-5, None), None, 'x11')
    #target deviation tolerances
    self.add_variable((0.0, None), None, 'eps1')
    self.add_variable((0.0, None), None, 'eps2')
    self.add_variable((0.0, None), None, 'eps3')
    #deviation constraints
    self.add_constraint((None, 0.0), 'eps1_constr')
    self.add_constraint((None, 0.0), 'eps2_constr')
    self.add_constraint((None, 0.0), 'eps3_constr')
    #analysis model constraints
    self.add_constraint((None, 1.0), 'g1')
    self.add_constraint((None, 1.0), 'g2')
    self.add_objective('obj')

  def calculate(self, var):
    #calculate responses
    return {'x01': math.sqrt(var['x03']**2 + var['x04']**(-2) + var['x05']**2), 'x02': math.sqrt(var['x05']**2 + var['x06']**2 + var['x07']**2)}

  def define_objectives(self, var):
    #calculate objective function
    #objective function of the problem is x01^2 + x02^2
    #upper bounds of additional deviation constraints are also to be minimized
    #so, they are appended to the objective with some weights
    var = dict(zip(self.variables_names(), var))
    local = self.calculate(var)
    self.evals += 1
    return local['x01']**2 + local['x02']**2 + 500.0 * (var['eps1'] + var['eps2'] + var['eps3'])

  def define_constraints(self, var):
    var = dict(zip(self.variables_names(), var))
    con = {}
    #calculate deviation constraints
    con['eps1_constr'] = (var['x11'] - self.l['x11_1'])**2 + (var['x11'] - self.l['x11_2'])**2 - var['eps1']
    con['eps2_constr'] = (var['x03'] - self.l['x03'])**2 - var['eps2']
    con['eps3_constr'] = (var['x06'] - self.l['x06'])**2 - var['eps3']
    #calculate analysis model constraints
    con['g1'] = (var['x03']**(-2) + var['x04']**2) / var['x05']**2
    con['g2'] = (var['x05']**2 + var['x06']**(-2)) / var['x07']**2
    return [con[k] for k in self.constraints_names()]

class Subsystem1ExampleProblem(gtopt.ProblemConstrained):
  """
  Subsystem 1 problem:
  Response: x03
  Local variables: x08, x09, x10
  Linking variable: x11
  """
  def __init__(self, targets):
    #targets for responses and linking variables
    self.targets = targets
    #counter for number of evaluations
    self.evals = 0

  def prepare_problem(self):
    for i in range(8, 12):
      self.add_variable((1.0e-5, None), None, 'x%02d' % (i))
    self.add_constraint((None, 1.0), 'g3')
    self.add_constraint((None, 1.0), 'g4')
    self.add_objective()

  def calculate(self, var):
    #calculate response
    return math.sqrt(var['x08']**2 + var['x09']**(-2) + var['x10']**(-2) + var['x11']**2)

  def define_objectives(self, var):
    var = dict(zip(self.variables_names(), var))
    responses = {}
    responses['x03'] = self.calculate(var)
    self.evals += 1
    #calculate deviation
    return (responses['x03'] - self.targets['x03'])**2 + (var['x11'] - self.targets['x11'])**2

  def define_constraints(self, var):
    var = dict(zip(self.variables_names(), var))
    con = {}
    con['g3'] = (var['x08']**2 + var['x09']**2) / var['x11']**2
    con['g4'] = (var['x08']**(-2) + var['x10']**2) / var['x11']**2
    return [con[k] for k in self.constraints_names()]

class Subsystem2ExampleProblem(gtopt.ProblemConstrained):
  """
  Subsystem 2 problem:
  Response: x06
  Local variables: x12, x13, x14
  Linking variable: x11
  """
  def __init__(self, targets):
    #targets for responses and linking variables
    self.targets = targets
    #counter for number of evaluations
    self.evals = 0

  def prepare_problem(self):
    for i in range(11, 15):
      self.add_variable((1.0e-5, None), None, 'x' + str(i))
    self.add_constraint((None, 1.0), 'g5')
    self.add_constraint((None, 1.0), 'g6')
    self.add_objective(())

  def calculate(self, var):
    #calculate response
    return math.sqrt(var['x11']**2 + var['x12']**2 + var['x13']**2 + var['x14']**2)

  def define_objectives(self, var):
    var = dict(zip(self.variables_names(), var))
    responses = {}
    responses['x06'] = self.calculate(var)
    self.evals += 1
    #calculate deviation
    return (responses['x06'] - self.targets['x06'])**2 + (var['x11'] - self.targets['x11'])**2

  def define_constraints(self, var):
    var = dict(zip(self.variables_names(), var))
    con ={}
    con['g5'] = (var['x11']**2 + var['x12']**(-2)) / var['x13']**2
    con['g6'] = (var['x11']**2 + var['x12']**2) / var['x14']**2
    return [con[k] for k in self.constraints_names()]

def optimize_problem(problem, options, level):
  """
  Run optimization of a given problem and return first point from the solution
  (for the single-objective optimization there is the only point in the solution).
  """
  print('Optimizing problem %s' % problem.__class__.__name__)
  optimizer = gtopt.Solver()
  optimizer.set_logger(loggers.StreamLogger(sys.stdout, level))
  for option in options:
    optimizer.options.set(*option)
  result = optimizer.solve(problem)
  print('Problem %s was optimized successfully. Number of evaluations: %d' % (problem.__class__.__name__, problem.evals))
  problem.evals = 0
  print('Optimal point:')
  print('Variables: %s' % result.optimal.x[0])
  print('Objectives: %s' % result.optimal.f[0])
  print('Constraints: %s' % result.optimal.c[0])
  optimalArray = np.concatenate((result.optimal.x[0], result.optimal.f[0], result.optimal.c[0]))
  return dict(zip(result.names.x + result.names.f + result.names.c, optimalArray))

def optimize_problem_multi(problem, options, level, output_queue):
  """
  Optimize problem and put solution to queue
  """
  output_queue.put(optimize_problem(problem, options, level))

def atc_flow(max_iterations, tolerance):
  #measure time
  time_start = datetime.datetime.now()
  #initial values for variables passed from lower (subsystem) level to upper (system) level
  to_upper = {'x11_1' : 1.0, 'x11_2' : 1.0, 'x03' : 1.0, 'x06' : 1.0}
  #create problem instances
  problem_system = SystemLevelExampleProblem(to_upper)
  problem_subsystem1 = Subsystem1ExampleProblem(None)
  problem_subsystem2 = Subsystem2ExampleProblem(None)
  #numerical differentiation will be used instead of framed gradients
  options = [('GTOpt/DiffType', 'Numerical')]
  history = []
  for i in range(max_iterations):
    print("=" * 60)
    print('Iteration #%d' % i)
    #solve system level problem
    to_lower = optimize_problem(problem_system, options, loggers.LogLevel.WARN)
    history.append(to_lower['obj'])
    print('Check termination criteria')
    if math.fabs(history[i] - history[i-1]) < tolerance and i > 0:
      print('Terminate target cascading')
      break
    #pass optimal values to subsystem level
    problem_subsystem1.targets = to_lower
    problem_subsystem2.targets = to_lower
    #solve subsystem level problems
    ss1_vars, ss2_vars = [optimize_problem(problem, options, loggers.LogLevel.WARN) for problem in [problem_subsystem1, problem_subsystem2]]
    #pass optimal values to system level
    to_upper = {'x11_1' : ss1_vars['x11'], 'x11_2' : ss2_vars['x11'], 'x03' : problem_subsystem1.calculate(ss1_vars), 'x06' : problem_subsystem2.calculate(ss2_vars)}
    problem_system.l = to_upper
    print("=" * 60)
  time_stop = datetime.datetime.now()
  #print various information
  print('History of optimal values:')
  pprint (history)
  optimal_design = problem_system.calculate(to_lower)

  optimal_design.update({k : v for k, v in ss1_vars.items() if k.startswith('x')})
  optimal_design.update({k : v for k, v in ss2_vars.items() if k.startswith('x')})
  optimal_design.update({k : v for k, v in to_lower.items() if k.startswith('x')})

  print('Optimal design: ')
  for k in sorted(optimal_design.keys()):
    print("%s %s" % (k, optimal_design[k]))
  print('Total time elapsed: %s' % (time_stop - time_start))

if __name__ == '__main__':
  # simple long-run warning
  from time import sleep
  print("WARNING: The execution of this example can be time consuming!")
  print("Depending on the execution environment it can takes minutes to complete.")
  print("Press Ctrl-C in 10 seconds to avoid execution...")
  sleep(10)
  print("Starting example...")

  atc_flow(1000, 2.0e-4)


13.1.9. example_gtopt_rdo.py

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
#
# coding: utf-8
# Copyright (C) pSeven SAS, 2010-present
#

'''
Example demonstrates usage of GTOpt for Robust Optimization.
It solves chance-constrained programming problem:

minimize f
s.t.
Pr{zeta1 * x1 + zeta2 * x2 + zeta3 * x3 + f >= 0} >= 0.9
Pr{eta1 * x1^2 + eta2 * x2^2 + eta3 * x3^2 <= 8} >= 0.8
x1,x2,x3 >= 0

In this example for solving the problem, common random numbers variance reduction technique is used (i.e. a random generator with controllable state is required).
'''

from da.p7core import gtopt
from da.p7core import loggers

from pprint import pprint
import random
random.seed(0)

class ExampleDistribution:
  def getNumericalSample(self, quantity):
    out = []
    for i in range(0, quantity):
      ksi = [random.uniform(1.0, 2.0),
              random.normalvariate(1.0, 1.0),
              random.expovariate(1.0),
              random.uniform(2.0, 3.0),
              random.normalvariate(2.0, 1.0),
              random.expovariate(2.0)]
      out.extend(ksi)
    return out
  def getDimension(self):
    return 6


class ChanceConstrainedExampleProblem(gtopt.ProblemConstrained):
  def prepare_problem(self):
    #add one objective
    self.add_objective()
    #add x1,x2,x3 >= 0
    for _ in range(3):
      self.add_variable((0.0, None))
    #add variable without bounds
    self.add_variable((None, None))
    #add chance constraints
    self.add_constraint((0.0, None), hints = {'@GTOpt/ConstraintType' : 'ChanceConstraint', '@GTOpt/ConstraintAlpha' : '0.1'})
    self.add_constraint((None, 8.0), hints = {'@GTOpt/ConstraintType' : 'ChanceConstraint', '@GTOpt/ConstraintAlpha' : '0.2'})
    #add 6 random variables: uniform(1,2), normal(1,1), exp(1), uniform(2,3), normal(2,1), exp(2)
    self.set_stochastic(ExampleDistribution())

  def define_constraints(self, x):
    c = [0] * 2
    zeta = x[4:7]
    c[0] = zeta[0] * x[0] + zeta[1] * x[1] + zeta[2] * x[2] + x[3]
    eta = x[7:10]
    c[1] = eta[0] * x[0] * x[0] + eta[1] * x[1] * x[1] + eta[2] * x[2] * x[2]
    return c

  def define_objectives(self, x):
    return x[3]

def main():
  #create optimizer
  optimizer = gtopt.Solver()
  #set logger
  optimizer.set_logger(loggers.StreamLogger())
  #create problem
  problem = ChanceConstrainedExampleProblem()
  #print information about problem
  print(str(problem))
  #optimize problem
  result = optimizer.solve(problem)
  #print information about the result
  print(str(result))
  #print optimal values
  print('optimal values:')
  result.optimal.pprint(components=['x', 'f', 'c', 'fe', 'ce', 'psi'])

if __name__ == '__main__':
  main()

13.1.10. example_gtopt_MOSBO.py

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
#
# coding: utf-8
# Copyright (C) pSeven SAS, 2010-present
#

"""
Solve a simple problem illustrating the multi-objective surrogate based optimization (MOSBO) capability of GTOpt.
"""

#[0] required imports
from da.p7core import gtopt
from da.p7core import loggers

import numpy
import math
import sys
#[0] end imports

#[1] problem definition
class SBOSampleProblem(gtopt.ProblemUnconstrained):

  def __init__(self, use_sbo):
    self.use_sbo = use_sbo
    self.count = 0

  def prepare_problem(self):
    self.add_variable((0, 1), 0.75)
    self.add_variable((0, 1), 0.75)
    # Initialize objectives.
    # If we have at least one 'expensive' objective in a multi-objective problem, MOSBO algorithm will be used.
    self.add_objective(hints={'@GTOpt/EvaluationCostType': 'Expensive' if self.use_sbo else 'Cheap'})
    self.add_objective(hints={'@GTOpt/EvaluationCostType': 'Expensive' if self.use_sbo else 'Cheap'})

  def define_objectives(self, x):
    # ZDT1 test problem
    self.count += 1
    f1 = x[0]
    g = 1.0 + 9.0 * x[1]
    f2 = g * (1.0 - math.sqrt(f1 / g))
    return f1, f2
#[1] end problem definition

#[2] configure optimizer
def optimizer_prepare():
  opt = gtopt.Solver()
  opt.set_logger(loggers.StreamLogger())
  opt.options.set({'GTOpt/LogLevel': 'INFO'})
  return opt
#[2] end configuration

#[3-0] main
def main():
  optimizer = optimizer_prepare()
#[3-1]
  # Solve problem without using MOSBO.
  problem = SBOSampleProblem(False)
  print("\n\nSolving the sample problem without using surrogate based optimization...\n")
  result = optimizer.solve(problem)
  print("\nSolved, wait for SBO version to finish...\n")
#[3-2]
  # Solve the same problem using MOSBO.
  problem_sbo = SBOSampleProblem(True)

  # Use automatic choice of search intensity.
  #optimizer.options.set({"GTOpt/GlobalPhaseIntensity": "auto"})
  print("\n\nSolving the sample problem using surrogate based optimization...\n")
  result_sbo = optimizer.solve(problem_sbo, options={"GTOpt/MaximumExpensiveIterations": 80})
  print("\nSolved.\n\nResults:")
#[3-3]
  # compare results
  print("=" * 60)
  print("Without SBO.\n")
  print("Optimal points:")
  result.optimal.pprint(limit=10)
  print("User function calls: %s" % (problem.count))

  print("=" * 60)
  print("Using SBO.\n")
  print("Optimal points:")
  result_sbo.optimal.pprint(limit=10)
  print("User function calls: %s" % (problem_sbo.count))

  print("=" * 60)
  print("\nFinished!\n")
#[3-4]

if __name__ == '__main__':
  main()

13.1.11. example_gtopt_co_parallel.py

Note

Depending on your network environment and license type, this example may sometimes stop with a LicenseError exception. The reason is that it sends frequent license requests which the license server may be unable to process. See section License in Known Issues for details.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
#
# coding: utf-8
# Copyright (C) pSeven SAS, 2010-present
#

'''
This example provides a use-case for options GTOpt/MaxParallel and GTOpt/ResponsesScalability.
These options are intended to be used on multiprocessor systems to reduce overall time consumed
on optimization.

GTOpt/MaxParallel directs the GTOpt optimizer how to parallel its internal routines.

GTOpt/ResponsesScalability is related to parallelization that can be done for evaluation optimized
responses (constraints and/or objectives) and tells the pSeven Core optimizer to match actual evaluations
batches to be multiple of desire parallelization. This options, however, efficiently works only in
expensive and/or stochastic problems. The correct use of this options supposes that responses for
different designs are calculated independently and can be solved simultaneously while solving of
each responses problem cannot be efficiently parallelized or have natural parallelization limits.
For example, we have machine with 2 physical 2-cores processors.

Here we consider collaborative optimization problem in which optimization problem has expensive
constraints thus to evaluate when we need to solve others optimization problems. The optimization
subproblems have cheap responses and can not be parallelized.

We create optimization problem that can run evaluation of its constrains in parallel
processes using Python multiprocessing library. And then we provide this information
to the pSeven optimizer using GTOpt/ResponsesScalability and GTOpt/MaxParallel options.

Note this example is incompatible with Python 2.5 due to usage of the 'multiprocessing'
Python library implemented since Python 2.6.
'''
import os
import sys
import numpy

from datetime import datetime
from time import sleep

from da.p7core import gtopt
from da.p7core import loggers

PROCESSORS_NUMBER = 2 # The number of physical processors on our imaginary test machine.
CORES_NUMBER = 4 # The number of each physical processor cores

def main():
  # Create optimizer object
  optimizer = gtopt.Solver()

  # Set console logger
  optimizer.set_logger(loggers.StreamLogger())

  # Create problem description object.
  problem = SystemLevelExampleProblem()

  # Solve problem and measure the time elapsed
  time_start = datetime.now()
  # Considering our problem is using all available cores for parallel evaluation of
  # the sequentially calculated constrains, while gtopt.Solver cannot effectively
  # parallelize itself across different physical processors.
  result = optimizer.solve(problem, options = {'GTOpt/GlobalPhaseIntensity': 0.,
                                               'GTOpt/MaximumExpensiveIterations': 400,
                                               'GTOpt/ResponsesScalability': PROCESSORS_NUMBER * CORES_NUMBER,
                                               'GTOpt/MaxParallel': CORES_NUMBER
                                               })
  time_finish = datetime.now()

  # Print solution and some extras
  print(result)
  print('-'*80)
  if result.optimal.x.size:
    print('Variables:   %s' % ', '.join('%s=%g' % (name, val) for name, val in zip(result.names.x, result.optimal.x[0])))
    print('Objective:   %g' % result.optimal.f[0][0])
    print('Constraints: %s' % ', '.join('%s=%g' % (name, val) for name, val in zip(result.names.c, result.optimal.c[0])))
  else:
    print('Optimal solution is not found.')
  print('The number of system level define_constraints_batch() calls: %d' % problem.con_evals)
  print('Total time elapsed: %s' % (time_finish - time_start))

  # Plot solution
  plot(result, problem.objectives, problem.constraints)

try:
  from multiprocessing import Pool
except ImportError:
  print("ERROR: the 'multiprocessing' library is required for this example.")
  exit()

class SystemLevelExampleProblem(gtopt.ProblemConstrained):
  """
  System level problem with extremely expensive constraints
  """
  def prepare_problem(self):
    # Add variables
    self.add_variable((-10.0, 10.0), None, 'z1_t')
    self.add_variable((0.0, 10.0), None, 'z2_t')
    self.add_variable((0.0, 10.0), None, 'x1_t')
    self.add_variable((3.16, 10.0), None, 'y1_t')
    self.add_variable((-10.0, 24.0), None, 'y2_t')

    # Add system level objective
    self.add_objective('obj')

    # Add feasibility constraints
    self.add_constraint((None, 0.0), hints={'@GTOpt/EvaluationCostType': 'Expensive'})
    self.add_constraint((None, 0.0), hints={'@GTOpt/EvaluationCostType': 'Expensive'})

    # Pool of processes for parallel constraints evaluation.
    # Considering each particular evaluation is sequential,
    # so it is worth to use all available cores.
    # You can set self.pool to None or even comment this line out
    # to switch to the sequential mode
    self.pool = Pool(PROCESSORS_NUMBER * CORES_NUMBER)

    # We are going to use custom evaluations history.
    self.disable_history()

    # Helper dictionary that maps variables name to its indices.
    self.var_map = dict((var_name, var_index) for var_index, var_name in enumerate(self.variables_names()))

    # Custom history holders.
    self.con_evals = 0 # evaluations counter
    self.objectives = [] # objectives history
    self.constraints = [] # constraints history

  @staticmethod
  def define_constraints(targets):
    """
    Implements constraints evaluation by solving optimization problems.
    """
    constraints = []
    for subproblem_type in (Subsystem1ExampleProblem, Subsystem2ExampleProblem):
      print('Optimizing problem %s' % subproblem_type.__name__)
      sys.stdout.flush()

      subproblem = subproblem_type(targets)
      subproblem_result = gtopt.Solver().solve(subproblem, options={'GTOpt/GlobalPhaseIntensity': 1.,
                                                                  'GTOpt/Techniques': '[RL]',
                                                                  'GTOpt/MaxParallel': 1}) # Let us solve problem in the sequential way
      constraints.append(subproblem_result.optimal.f[0][0])

      print('Problem %s was optimized successfully. Number of evaluations: %d' % (subproblem_type.__name__, subproblem.evals))
      print('Optimal point:')
      print('  Variables: %s' % ', '.join('%s=%g' % (name, val) for name, val in zip(subproblem_result.names.x, subproblem_result.optimal.x[0])))
      print('  Objective: %g' % subproblem_result.optimal.f[0][0])
      sys.stdout.flush()

    return constraints

  def define_objectives_batch(self, v):
    return v[:, self.var_map['x1_t']]**2 + v[:, self.var_map['z2_t']] + v[:, self.var_map['y1_t']] + numpy.exp(-v[:, self.var_map['y2_t']])

  def define_constraints_batch(self, x):
    print('=' * 60)
    print('System level: define_constraints_batch() evaluation #%d' % self.con_evals)
    print('=' * 60)

    targets_list = [dict(zip(self.variables_names(), v)) for v in x]

    if getattr(self, 'pool', None) is not None:
      # Evaluate constraints in parallel.
      # Due to the multiprocessing module limitations in Python 2.x
      # we cannot directly map SystemLevelExampleProblem.define_constraints
      # function and must use module-level proxy function 'evaluate_constraints'.
      constraints = self.pool.map(evaluate_constraints, targets_list)
    else:
      # sequential constraints evaluation
      constraints = [evaluate_constraints(target) for target in targets_list]

    self.constraints += constraints

    # Evaluate objective for history purpose. The real-world problem should not do it.
    self.objectives += self.define_objectives_batch(x).tolist()
    self.con_evals += 1 #update counter

    return constraints

def evaluate_constraints(targets):
  """
  Proxy-function for redirecting constraints evaluation to the SystemLevelExampleProblem problem
  """
  return SystemLevelExampleProblem.define_constraints(targets)

def calculate_residuals(targets, local_vars):
  """
  Helper function calculating distance between targets and their local copies in the subsystems
  """
  residual = 0.0
  for (name, value) in local_vars.items():
    residual += (targets[name + '_t'] - value)**2
  return residual


class Subsystem1ExampleProblem(gtopt.ProblemUnconstrained):
  """
  Subsystem 1 problem:
  Local variable: x1
  Shared variables: z1, z2
  Coupling variable: y2
  """
  def __init__(self, targets):
    self.targets = targets

  def prepare_problem(self):
    # problem definition
    self.disable_history()
    self.add_variable((-10.0, 10.0), None, 'z1')
    self.add_variable((0.0, 10.0), None, 'z2')
    self.add_variable((0.0, 10.0), None, 'x1')
    self.add_variable((-10.0, 24.0), None, 'y2')
    self.add_objective('obj')

    # Helper dictionary that maps variables name to its indices.
    self.var_map = dict((var_name, var_index) for var_index, var_name in enumerate(self.variables_names()))

    # Helper evaluations counter
    self.evals = 0

  def calculate(self, v):
    # calculate response
    return v[self.var_map['z1']]**2 + v[self.var_map['z2']] + v[self.var_map['x1']] - 0.2 * v[self.var_map['y2']]

  def define_objectives(self, x):
    local_vars = dict(zip(self.variables_names(), x))
    local_vars['y1'] = self.calculate(x)
    self.evals += 1
    return calculate_residuals(self.targets, local_vars)

class Subsystem2ExampleProblem(gtopt.ProblemUnconstrained):
  """
  Subsystem 2 problem:
  Local variables: No
  Shared variables: z1, z2
  Coupling variable: y1
  """
  def __init__(self, targets):
    self.targets = targets

  def prepare_problem(self):
    # problem definition
    self.disable_history()
    self.add_variable((-10.0, 10.0), None, 'z1')
    self.add_variable((0.0, 10.0), None, 'z2')
    self.add_variable((3.16, 10.0), None, 'y1')
    self.add_objective('obj')

    # Helper dictionary that maps variables name to its indices.
    self.var_map = dict((var_name, var_index) for var_index, var_name in enumerate(self.variables_names()))

    # Helper evaluations counter
    self.evals = 0

  def calculate(self, v):
    #calculate response
    return numpy.sqrt(v[self.var_map['y1']]) + v[self.var_map['z1']] + v[self.var_map['z2']]

  def define_objectives(self, x):
    local_vars = dict(zip(self.variables_names(), x))
    local_vars['y2'] = self.calculate(x)
    self.evals += 1
    return calculate_residuals(self.targets, local_vars)

def plot(result, hist_obj, hist_con):
  """
  Helper function for optimization history plotting
  """
  try:
    import matplotlib.pyplot as plt
    plt.clf()
    fig = plt.figure(1)
    ax = fig.add_subplot(111)
    x = list(range(len(hist_obj)))
    f = hist_obj
    c = numpy.array(hist_con).max(axis=1)
    ax.plot(x, f, 'b-' , label = 'Values during optimization')
    ax.plot(x, c, 'r.' , label = 'Cons during optimization')
    if result.optimal.f.size:
      ax.plot(x[-1], result.optimal.f[0][0], 'go' , label = 'Solution')
    ax.set_xlabel('Iter')
    ax.set_ylabel('f')
    ax.legend(loc = 'best')
    title ='Collaborative optimization'
    plt.title(title)

    # Python 2/3 compatibility workaround
    try:
      filename = os.path.join(os.getcwdu(), title)
    except AttributeError:
      filename = os.path.join(os.getcwd(), title)

    fig.savefig(filename)
    print('Plots are saved to %s.png' % filename)

    if 'SUPPRESS_SHOW_PLOTS' not in os.environ:
      plt.show()
  except ImportError:
    print("WARNING: Plotting is disabled because matplotlib library is absent.")

if __name__ == '__main__':
  print("WARNING: The execution of this example can be time consuming!")
  print("Depending on the execution environment it can takes minutes to complete.")
  print("Press Ctrl-C in 10 seconds to avoid execution...")
  sleep(10)
  print("Starting example...")
  main()