Test Problems
cbo_benchmarks
Constrained, single-objective benchmark problems from the Bayesian optimization literature.
G10 (Problem)
G10 test problem: 8 inputs, 6 constraints.
From Michalewicz and Schoenauer 1996. https://ieeexplore.ieee.org/document/6791784
Source code in opti/problems/cbo_benchmarks.py
class G10(Problem):
"""G10 test problem: 8 inputs, 6 constraints.
From Michalewicz and Schoenauer 1996. https://ieeexplore.ieee.org/document/6791784
"""
def __init__(self):
super().__init__(
name="G10 (d=8, p=6)",
inputs=[
Continuous("x1", domain=[100.0, 10000.0]),
Continuous("x2", domain=[1000.0, 10000.0]),
Continuous("x3", domain=[1000.0, 10000.0]),
Continuous("x4", domain=[10.0, 1000.0]),
Continuous("x5", domain=[10.0, 1000.0]),
Continuous("x6", domain=[10.0, 1000.0]),
Continuous("x7", domain=[10.0, 1000.0]),
Continuous("x8", domain=[10.0, 1000.0]),
],
outputs=[Continuous("y0")],
objectives=[Minimize("y0")],
constraints=[
LinearInequality(["x4", "x6"], lhs=[0.0025, 0.0025], rhs=1.0),
LinearInequality(
["x4", "x5", "x7"], lhs=[-0.0025, 0.0025, 0.0025], rhs=1.0
),
LinearInequality(["x5", "x8"], lhs=[-0.01, 0.01], rhs=1.0),
NonlinearInequality(
"100.0 * x1 - x1 * x6 + 833.33252 * x4 - 83333.333"
),
NonlinearInequality("x2 * x4 - x2 * x7 - 1250.0 * x4 + 1250.0 * x5"),
NonlinearInequality("x3 * x5 - x3 * x8 - 2500.0 * x5 + 1250000.0"),
],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
return pd.DataFrame(
{
"y0": X.eval("x1 + x2 + x3"),
},
index=X.index,
)
def get_optima(self) -> pd.DataFrame:
x = np.array(
[
[
579.3167,
1359.943,
5110.071,
182.0174,
295.5985,
217.9799,
286.4162,
395.5979,
]
]
)
y = np.array([[7049.3307]])
return pd.DataFrame(np.c_[x, y], columns=self.inputs.names + self.outputs.names)
G4 (Problem)
G4 test problme: 5 inputs, 6 constraints.
From Michalewicz and Schoenauer 1996. https://ieeexplore.ieee.org/document/6791784
Source code in opti/problems/cbo_benchmarks.py
class G4(Problem):
"""G4 test problme: 5 inputs, 6 constraints.
From Michalewicz and Schoenauer 1996. https://ieeexplore.ieee.org/document/6791784
"""
def __init__(self):
super().__init__(
name="G4 (d=5, p=6)",
inputs=[
Continuous("x1", domain=[78.0, 102.0]),
Continuous("x2", domain=[33.0, 45.0]),
Continuous("x3", domain=[27.0, 45.0]),
Continuous("x4", domain=[27.0, 45.0]),
Continuous("x5", domain=[27.0, 45.0]),
],
outputs=[Continuous("y0")],
objectives=[Minimize("y0")],
constraints=[
NonlinearInequality(
"(85.334407 + 0.0056858 * x2 * x5 + 0.0006262 * x1 * x4 - 0.0022053 * x3 * x5) - 92.0"
),
NonlinearInequality(
"- (85.334407 + 0.0056858 * x2 * x5 + 0.0006262 * x1 * x4 - 0.0022053 * x3 * x5)"
),
NonlinearInequality(
"(80.51249 + 0.0071317 * x2 * x5 + 0.0029955 * x1 * x2 + 0.0021813 * x3**2) - 110.0"
),
NonlinearInequality(
"90.0 - (80.51249 + 0.0071317 * x2 * x5 + 0.0029955 * x1 * x2 + 0.0021813 * x3**2)"
),
NonlinearInequality(
"(9.300961 + 0.0047026 * x3 * x5 + 0.0012547 * x1 * x3 + 0.0019085 * x3 * x4) - 25.0"
),
NonlinearInequality(
"20.0 - (9.300961 + 0.0047026 * x3 * x5 + 0.0012547 * x1 * x3 + 0.0019085 * x3 * x4)"
),
],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
return pd.DataFrame(
{
"y0": X.eval(
"5.3578547 * x3**2 + 0.8356891 * x1 * x5 + 37.293239 * x1 - 40792.141"
),
},
index=X.index,
)
def get_optima(self) -> pd.DataFrame:
x = np.array([[78.0, 33.0, 29.995256025682, 45.0, 36.775812905788]])
y = np.array([[-30665.539]])
return pd.DataFrame(np.c_[x, y], columns=self.inputs.names + self.outputs.names)
G6 (Problem)
G6 test problem: 2 inputs, 2 constraints.
From Michalewicz and Schoenauer 1996. https://ieeexplore.ieee.org/document/6791784
Source code in opti/problems/cbo_benchmarks.py
class G6(Problem):
"""G6 test problem: 2 inputs, 2 constraints.
From Michalewicz and Schoenauer 1996. https://ieeexplore.ieee.org/document/6791784
"""
def __init__(self):
super().__init__(
name="G6 (d=2, p=2)",
inputs=[
Continuous("x1", domain=[13.5, 14.5]),
Continuous("x2", domain=[0.5, 1.5]),
],
outputs=[Continuous("y0")],
objectives=[Minimize("y0")],
constraints=[
NonlinearInequality("-(x1 - 5.0)**2 - (x2 - 5.0)**2 + 100.0"),
NonlinearInequality("(x1 - 6.0)**2 + (x2 - 5.0)**2 - 82.81"),
],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
return pd.DataFrame(
{
"y0": X.eval("(x1 - 10.0)**3 + (x2 - 20.0)**3"),
},
index=X.index,
)
def get_optima(self) -> pd.DataFrame:
x = np.array([[14.095, 0.84296]])
y = np.array([[-6961.81388]])
return pd.DataFrame(np.c_[x, y], columns=self.inputs.names + self.outputs.names)
G7 (Problem)
G7 test problem: 10 inputs, 8 constraints.
From Michalewicz and Schoenauer 1996. https://ieeexplore.ieee.org/document/6791784
Source code in opti/problems/cbo_benchmarks.py
class G7(Problem):
"""G7 test problem: 10 inputs, 8 constraints.
From Michalewicz and Schoenauer 1996. https://ieeexplore.ieee.org/document/6791784
"""
def __init__(self):
super().__init__(
name="G7 (d=10, p=8)",
inputs=[Continuous(f"x{i+1}", domain=[-10.0, 10.0]) for i in range(10)],
outputs=[Continuous("y0")],
objectives=[Minimize("y0")],
constraints=[
LinearInequality(
["x1", "x2", "x7", "x8"], lhs=[4.0, 5.0, -3.0, 9.0], rhs=105.0
),
LinearInequality(
["x1", "x2", "x7", "x8"], lhs=[10.0, -8.0, -17.0, 2.0], rhs=0
),
LinearInequality(
["x1", "x2", "x9", "x10"], lhs=[-8.0, 2.0, 5.0, -2.0], rhs=12.0
),
NonlinearInequality(
"3.0 * (x1 - 2.0)**2 + 4.0 * (x2 - 3.0)**2 + 2.0 * x3**2 - 7.0 * x4 - 120.0"
),
NonlinearInequality(
"5.0 * x1**2 + 8.0 * x2 + (x3 - 6.0)**2 - 2.0 * x4 - 40.0"
),
NonlinearInequality(
"0.5 * (x1 - 8.0)**2 + 2.0 * (x2 - 4.0)**2 + 3.0 * x5**2 - x6 - 30.0"
),
NonlinearInequality(
"x1**2 + 2.0 * (x2 - 2.0)**2 - 2.0 * x1 * x2 + 14.0 * x5 - 6.0 * x6"
),
NonlinearInequality(
"- 3.0 * x1 + 6.0 * x2 + 12.0 * (x9 - 8.0)**2 - 7.0 * x10"
),
],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
return pd.DataFrame(
{
"y0": X.eval(
"x1**2 + x2**2 + x1 * x2 - 14.0 * x1 - 16.0 * x2 + (x3 - 10.0)**2 + 4.0 * (x4 - 5.0)**2 + (x5 - 3.0)**2 + 2.0 * (x6 - 1.0)**2 + 5.0 * x7**2 + 7.0 * (x8 - 11.0)**2 + 2.0 * (x9 - 10.0)**2 + (x10 - 7.0)**2 + 45.0"
),
},
index=X.index,
)
def get_optima(self) -> pd.DataFrame:
x = np.array(
[
[
2.171996,
2.363683,
8.773926,
5.095984,
0.9906548,
1.430574,
1.321644,
9.828726,
8.280092,
8.375927,
]
]
)
y = np.array([[24.3062091]])
return pd.DataFrame(np.c_[x, y], columns=self.inputs.names + self.outputs.names)
G8 (Problem)
G8 test problem: 2 inputs, 2 constraints
From Michalewicz and Schoenauer 1996. https://ieeexplore.ieee.org/document/6791784
Source code in opti/problems/cbo_benchmarks.py
class G8(Problem):
"""G8 test problem: 2 inputs, 2 constraints
From Michalewicz and Schoenauer 1996. https://ieeexplore.ieee.org/document/6791784
"""
def __init__(self):
super().__init__(
name="G8 (d=2, p=2)",
inputs=[
Continuous("x1", domain=[0.5, 10.0]),
Continuous("x2", domain=[0.5, 10.0]),
],
outputs=[Continuous("y0")],
objectives=[Minimize("y0")],
constraints=[
NonlinearInequality("x1**2 - x2 + 1.0"),
NonlinearInequality("1.0 - x1 + (x2 - 4.0)**2"),
],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
return pd.DataFrame(
{
"y0": X.eval(
"- (sin(2.0 * arccos(-1.0) * x1)**3 * sin(2.0 * arccos(-1.0) * x2)) / (x1**3 * (x1 + x2))"
),
},
index=X.index,
)
def get_optima(self) -> pd.DataFrame:
x = np.array([[1.2279713, 4.2453733]])
y = np.array([[-0.095825]])
return pd.DataFrame(np.c_[x, y], columns=self.inputs.names + self.outputs.names)
G9 (Problem)
G9 test problem: 7 inputs, 4 constraints.
From Michalewicz and Schoenauer 1996. https://ieeexplore.ieee.org/document/6791784
Source code in opti/problems/cbo_benchmarks.py
class G9(Problem):
"""G9 test problem: 7 inputs, 4 constraints.
From Michalewicz and Schoenauer 1996. https://ieeexplore.ieee.org/document/6791784
"""
def __init__(self):
super().__init__(
name="G9 (d=7, p=4)",
inputs=[Continuous(f"x{i+1}", domain=[-10.0, 10.0]) for i in range(7)],
outputs=[Continuous("y0")],
objectives=[Minimize("y0")],
constraints=[
NonlinearInequality(
"2.0 * x1**2 + 3.0 * x2**4 + x3 + 4.0 * x4**2 + 5.0 * x5 - 127.0"
),
NonlinearInequality(
"7.0 * x1 + 3.0 * x2 + 10.0 * x3**2 + x4 - x5 - 282.0"
),
NonlinearInequality(
"23.0 * x1 + x2**2 + 6.0 * x6**2 - 8.0 * x7 - 196.0"
),
NonlinearInequality(
"4.0 * x1**2 + x2**2 - 3.0 * x1 * x2 + 2.0 * x3**2 + 5.0 * x6 - 11.0 * x7"
),
],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
return pd.DataFrame(
{
"y0": X.eval(
"(x1 - 10.0)**2 + 5.0 * (x2 - 12.0)**2 + x3**4 + 3.0 * (x4 - 11.0)**2 + 10.0 * x5**6 + 7.0 * x6**2 + x7**4 - 4.0 * x6 * x7 - 10.0 * x6 - 8.0 * x7"
),
},
index=X.index,
)
def get_optima(self) -> pd.DataFrame:
x = np.array(
[[2.330499, 1.951372, -0.4775414, 4.365726, -0.6244870, 1.038131, 1.594227]]
)
y = np.array([[680.6300573]])
return pd.DataFrame(np.c_[x, y], columns=self.inputs.names + self.outputs.names)
Gardner (Problem)
Gardner test problem: 2 inputs, 1 constraint.
From Gardner et al. 2014. http://proceedings.mlr.press/v32/gardner14.pdf
Source code in opti/problems/cbo_benchmarks.py
class Gardner(Problem):
"""Gardner test problem: 2 inputs, 1 constraint.
From Gardner et al. 2014. http://proceedings.mlr.press/v32/gardner14.pdf
"""
def __init__(self):
super().__init__(
name="Gardner (d=2, p=1)",
inputs=[
Continuous("x1", domain=[0, 2.0 * np.pi]),
Continuous("x2", domain=[0, 2.0 * np.pi]),
],
outputs=[Continuous("y0")],
objectives=[Minimize("y0")],
constraints=[NonlinearInequality("sin(x1) * sin(x2) + 0.95")],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
return pd.DataFrame(
{
"y0": X.eval("sin(x1) + x2"),
},
index=X.index,
)
def get_optima(self) -> pd.DataFrame:
x = np.array([[1.5 * np.pi, np.arcsin(0.95)]])
y = np.array([[np.arcsin(0.95) - 1.0]])
return pd.DataFrame(np.c_[x, y], columns=self.inputs.names + self.outputs.names)
Gramacy (Problem)
Gramacy test problem: 2 inputs, 2 constraints
From Gramacy et al. 2016. https://arxiv.org/pdf/1403.4890.pdf
Source code in opti/problems/cbo_benchmarks.py
class Gramacy(Problem):
"""Gramacy test problem: 2 inputs, 2 constraints
From Gramacy et al. 2016. https://arxiv.org/pdf/1403.4890.pdf
"""
def __init__(self):
super().__init__(
name="Gramacy (d=2, p=2)",
inputs=[
Continuous("x1", domain=[0, 1.0]),
Continuous("x2", domain=[0, 1.0]),
],
outputs=[Continuous("y0")],
objectives=[Minimize("y0")],
constraints=[
NonlinearInequality(
"1.5 - x1 - 2.0 * x2 - 0.5 * sin(2.0 * arccos(-1.0) * (x1**2 - 2.0 * x2))"
),
NonlinearInequality("x1**2 + x2**2 - 1.5"),
],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
return pd.DataFrame(
{
"y0": X.eval("x1 + x2"),
},
index=X.index,
)
def get_optima(self) -> pd.DataFrame:
x = np.array([[0.1954, 0.4044]])
y = np.array([[0.5998]])
return pd.DataFrame(np.c_[x, y], columns=self.inputs.names + self.outputs.names)
PressureVessel (Problem)
Pressure vessel test problem: 4 inputs, 3 constraints.
The 4D pressure vessel design problem aims to minimize the cost of designing a cylindrical vessel subject to 4 constraints. The original problem did not give the bounds for design variables. Here we use the bounds in [1] that contains the best known solution found in [2]. Note that the fourth constraint always holds after bounding the input variables. Therefore, we remove it.
[1] Eriksson and Poloczek 2021. http://proceedings.mlr.press/v130/eriksson21a/eriksson21a.pdf [2] Coello and Mezura-Montes 2002. https://link.springer.com/chapter/10.1007/978-0-85729-345-9_23
Source code in opti/problems/cbo_benchmarks.py
class PressureVessel(Problem):
"""Pressure vessel test problem: 4 inputs, 3 constraints.
The 4D pressure vessel design problem aims to minimize the cost of designing a
cylindrical vessel subject to 4 constraints. The original problem did not give the
bounds for design variables. Here we use the bounds in [1] that contains the best
known solution found in [2]. Note that the fourth constraint always holds after
bounding the input variables. Therefore, we remove it.
[1] Eriksson and Poloczek 2021. http://proceedings.mlr.press/v130/eriksson21a/eriksson21a.pdf
[2] Coello and Mezura-Montes 2002. https://link.springer.com/chapter/10.1007/978-0-85729-345-9_23
"""
def __init__(self):
super().__init__(
name="Pressure vessel (d=4, p=3)",
inputs=[
Continuous("x1", domain=[0, 10.0]),
Continuous("x2", domain=[0, 10.0]),
Continuous("x3", domain=[10.0, 50.0]),
Continuous("x4", domain=[150.0, 200.0]),
],
outputs=[Continuous("y0")],
objectives=[Minimize("y0")],
constraints=[
NonlinearInequality("- x1 + 0.0193 * x3"),
NonlinearInequality("- x2+ 0.00954 * x3"),
NonlinearInequality(
"- arccos(-1.0) * x3**2 * x4 - 4.0 * arccos(-1.0) / 3.0 * x3**3 + 1296000.0"
),
],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
return pd.DataFrame(
{
"y0": X.eval(
"0.6224 * x1 * x3 * x4 + 1.7781 * x2 * x3**2 + 3.1661 * x1**2 * x4 + 19.84 * x1**2 * x3"
),
},
index=X.index,
)
def get_optima(self) -> pd.DataFrame:
x = np.array([[0.8125, 0.4375, 42.0984, 176.6368]])
y = np.array([[6059.715]])
return pd.DataFrame(np.c_[x, y], columns=self.inputs.names + self.outputs.names)
Sasena (Problem)
Sasena test problem: 2 inputs, 3 constraints.
From Sasena's PhD thesis 2002. https://www.mat.univie.ac.at/~neum/glopt/mss/Sas02.pdf
Source code in opti/problems/cbo_benchmarks.py
class Sasena(Problem):
"""Sasena test problem: 2 inputs, 3 constraints.
From Sasena's PhD thesis 2002. https://www.mat.univie.ac.at/~neum/glopt/mss/Sas02.pdf
"""
#
def __init__(self):
super().__init__(
name="Sasena (d=2, p=3)",
inputs=[
Continuous("x1", domain=[0, 1.0]),
Continuous("x2", domain=[0, 1.0]),
],
outputs=[Continuous("y0")],
objectives=[Minimize("y0")],
constraints=[
NonlinearInequality(
"((x1 - 3.0)**2 + (x2 + 2.0)**2) * exp(- x2**7) - 12.0"
),
LinearInequality(["x1", "x2"], lhs=[10.0, 1.0], rhs=7.0),
NonlinearInequality("(x1 - 0.5)**2 + (x2 - 0.5)**2 - 0.2"),
],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
return pd.DataFrame(
{
"y0": X.eval("-(x1 - 1.0)**2 - (x2 - 0.5)**2"),
},
index=X.index,
)
def get_optima(self) -> pd.DataFrame:
x = np.array([[0.2017, 0.8332]])
y = np.array([[-0.7483]])
return pd.DataFrame(np.c_[x, y], columns=self.inputs.names + self.outputs.names)
SpeedReducer (Problem)
Speed reducer test problem: 7 inputs, 11 constraints.
The goal of 7D speed reducer problem is to minimize the weight of a speed reducer under 11 mechanical constraints. The third variable is a category variable. However, regarding it as a continuous variable does not change the optimum.
From Coello and Mezura-Montes 2002. https://link.springer.com/chapter/10.1007/978-0-85729-345-9_23
Source code in opti/problems/cbo_benchmarks.py
class SpeedReducer(Problem):
"""Speed reducer test problem: 7 inputs, 11 constraints.
The goal of 7D speed reducer problem is to minimize the weight of a speed reducer
under 11 mechanical constraints. The third variable is a category variable.
However, regarding it as a continuous variable does not change the optimum.
From Coello and Mezura-Montes 2002. https://link.springer.com/chapter/10.1007/978-0-85729-345-9_23
"""
def __init__(self):
super().__init__(
name="Speed reducer (d=7, p=11)",
inputs=[
Continuous("x1", domain=[2.6, 3.6]),
Continuous("x2", domain=[0.7, 0.8]),
Continuous("x3", domain=[17.0, 28.0]),
Continuous("x4", domain=[7.3, 8.3]),
Continuous("x5", domain=[7.8, 8.3]),
Continuous("x6", domain=[2.9, 3.9]),
Continuous("x7", domain=[4.9, 5.9]),
],
outputs=[Continuous("y0")],
objectives=[Minimize("y0")],
constraints=[
NonlinearInequality("27.0 / (x1 * x2**2 * x3) - 1.0"),
NonlinearInequality("397.5 / (x1 * x2**2 * x3**2) - 1.0"),
NonlinearInequality("1.93 * x4**3 / (x2 * x3 * x6**4) - 1.0"),
NonlinearInequality("1.93 * x5**3 / (x2 * x3 * x7**4) - 1.0"),
NonlinearInequality(
"((745.0 * x4 / (x2 * x3))**2 + 16900000.0)**0.5 / x6**3 - 110.0"
),
NonlinearInequality(
"((745.0 * x5 / (x2 * x3))**2 + 157500000.0)**0.5 / x7**3 - 85.0"
),
NonlinearInequality("x2 * x3 - 40.0"),
NonlinearInequality("- x1 / x2 + 5.0"),
NonlinearInequality("x1 / x2 - 12.0"),
NonlinearInequality("(1.5 * x6 + 1.9) / x4 - 1.0"),
NonlinearInequality("(1.1 * x7 + 1.9) / x5 - 1.0"),
],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
return pd.DataFrame(
{
"y0": X.eval(
"0.7854 * x1 * x2**2 * (3.3333 * x3**2 + 14.9334 * x3 - 43.0934) - 1.508 * x1 * (x6**2 + x7**2) + 7.4777 * (x6**3 + x7**3) + 0.7854 * (x4 * x6**2 + x5 * x7**2)"
),
},
index=X.index,
)
def get_optima(self) -> pd.DataFrame:
x = np.array([[3.5, 0.7, 17.0, 7.3, 7.8, 3.350215, 5.286683]])
y = np.array([[2996.3482]])
return pd.DataFrame(np.c_[x, y], columns=self.inputs.names + self.outputs.names)
TensionCompression (Problem)
Tension compression test problem: 3 inputs, 4 constraints.
The 3D tension-compression string design problem aims to minimize the weight of a tension/compression spring under 4 mechanical constraints.
From Coello and Mezura-Montes 2002. https://link.springer.com/chapter/10.1007/978-0-85729-345-9_23
Source code in opti/problems/cbo_benchmarks.py
class TensionCompression(Problem):
"""Tension compression test problem: 3 inputs, 4 constraints.
The 3D tension-compression string design problem aims to minimize the weight of a
tension/compression spring under 4 mechanical constraints.
From Coello and Mezura-Montes 2002. https://link.springer.com/chapter/10.1007/978-0-85729-345-9_23
"""
def __init__(self):
super().__init__(
name="Tension compression",
inputs=[
Continuous("x1", domain=[2.0, 15.0]),
Continuous("x2", domain=[0.25, 1.3]),
Continuous("x3", domain=[0.05, 2.0]),
],
outputs=[Continuous("y0")],
objectives=[Minimize("y0")],
constraints=[
NonlinearInequality("1.0 - x2**3 * x1 / (71785.0 * x3**4)"),
NonlinearInequality(
"(4.0 * x2**2 - x2 * x3) / (12566.0 * x3**3 * (x2 - x3)) + 1.0 / (5108.0 * x3**2) - 1.0"
),
NonlinearInequality("1.0 - 140.45 * x3 / (x1 * x2**2)"),
NonlinearInequality("(x2 + x3) / 1.5 - 1.0"),
],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
return pd.DataFrame(
{
"y0": X.eval("x3**2 * x2 * (x1 + 2.0)"),
},
index=X.index,
)
def get_optima(self) -> pd.DataFrame:
x = np.array([[11.21390736278739, 0.35800478345599, 0.05174250340926]])
y = np.array([[0.012666]])
return pd.DataFrame(np.c_[x, y], columns=self.inputs.names + self.outputs.names)
WeldedBeam1 (Problem)
Welded beam test problem: 1 output, 4 inputs, 5 constraints.
In this problem the cost of a welded beam is minimized subject to 5 constraints. The original version of this problem in [1] had 7 constraints, 2 of which could be shrunk into the bounds. Here we use the formulations in [2].
[1] Coello and Mezura-Montes 2002. https://link.springer.com/chapter/10.1007/978-0-85729-345-9_23 [2] Hedar and Fukushima 2006. https://link.springer.com/article/10.1007/s10898-005-3693-z
Source code in opti/problems/cbo_benchmarks.py
class WeldedBeam1(Problem):
"""Welded beam test problem: 1 output, 4 inputs, 5 constraints.
In this problem the cost of a welded beam is minimized subject to 5 constraints.
The original version of this problem in [1] had 7 constraints, 2 of which could be
shrunk into the bounds. Here we use the formulations in [2].
[1] Coello and Mezura-Montes 2002. https://link.springer.com/chapter/10.1007/978-0-85729-345-9_23
[2] Hedar and Fukushima 2006. https://link.springer.com/article/10.1007/s10898-005-3693-z
"""
def __init__(self):
super().__init__(
name="Single-objective welded beam problem (d=4, p=5)",
inputs=[
Continuous("x1", domain=[0.125, 10.0]),
Continuous("x2", domain=[0.1, 10.0]),
Continuous("x3", domain=[0.1, 10.0]),
Continuous("x4", domain=[0.1, 10.0]),
],
outputs=[Continuous("y0")],
objectives=[Minimize("y0")],
constraints=[
NonlinearInequality(
"((6000.0 / (2.0**0.5 * x1 * x2))**2 + ((6000. * (14.0 + 0.5 * x2) * (0.25 * (x2**2 + (x1 + x3)**2))**0.5) / (2.0 * (0.707 * x1 * x2 * (x2**2 / 12.0 + 0.25 * (x1 + x3)**2))))**2 + x2 * (6000.0 / (2.0**0.5 * x1 * x2)) * ((6000. * (14.0 + 0.5 * x2) * (0.25 * (x2**2 + (x1 + x3)**2))**0.5) / (2.0 * (0.707 * x1 * x2 * (x2**2 / 12.0 + 0.25 * (x1 + x3)**2)))) / (0.25 * (x2**2 + (x1 + x3)**2))**0.5) ** 0.5 - 13000.0"
),
NonlinearInequality("504000.0 / (x3**2 * x4) - 30000.0"),
LinearInequality(["x1", "x4"], lhs=[1.0, -1.0], rhs=0),
NonlinearInequality(
"6000.0 - 64746.022 * (1.0 - 0.0282346 * x3) * x3 * x4**3"
),
NonlinearInequality("2.1952 / (x3**3 * x4) - 0.25"),
],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
return pd.DataFrame(
{
"y0": X.eval("1.10471 * x1**2 * x2 + 0.04811 * x3 * x4 * (14.0 + x2)"),
},
index=X.index,
)
def get_optima(self) -> pd.DataFrame:
x = np.array([[0.24435257, 6.2157922, 8.2939046, 0.24435257]])
y = np.array([[2.381065]])
return pd.DataFrame(np.c_[x, y], columns=self.inputs.names + self.outputs.names)
datasets
Chemical datasets. These problems contain observed data but don't come with a ground truth.
Alkox (Problem)
Alkoxylation dataset
This dataset reports the biocatalytic oxidation of benzyl alcohol by a copper radical oxidase (AlkOx). The effects of enzyme loading, cocatalyst loading, and pH balance on both initial rate and total conversion were assayed. The dataset includes 104 samples with four parameters and one objective.
Reference
F. Häse, M. Aldeghi, R.J. Hickman, L.M. Roch, M. Christensen, E. Liles, J.E. Hein, A. Aspuru-Guzik. Olympus: a benchmarking framework for noisy optimization and experiment planning. arXiv (2020), 2010.04153. DOI. Obtained from Olympus.
Source code in opti/problems/datasets.py
class Alkox(Problem):
"""Alkoxylation dataset
This dataset reports the biocatalytic oxidation of benzyl alcohol by a copper radical oxidase (AlkOx).
The effects of enzyme loading, cocatalyst loading, and pH balance on both initial rate and total conversion were assayed.
The dataset includes 104 samples with four parameters and one objective.
Reference:
F. Häse, M. Aldeghi, R.J. Hickman, L.M. Roch, M. Christensen, E. Liles, J.E. Hein, A. Aspuru-Guzik. Olympus: a benchmarking framework for noisy optimization and experiment planning. arXiv (2020), 2010.04153.
[DOI](https://doi.org/10.1088/2632-2153/abedc8).
Obtained from [Olympus](https://github.com/aspuru-guzik-group/olympus).
"""
def __init__(self):
super().__init__(
name="Alkox",
inputs=[
Continuous("residence_time", domain=[0.05, 1]),
Continuous("ratio", domain=[0.5, 10]),
Continuous("concentration", domain=[2, 8]),
Continuous("temperature", domain=[6, 8]),
],
outputs=[Continuous("conversion")],
objectives=[Maximize("conversion")],
data=get_data("alkox.csv"),
)
BaumgartnerAniline (Problem)
Aniline C-N cross-coupling dataset.
Reference
Baumgartner et al. 2019 - Use of a Droplet Platform To Optimize Pd-Catalyzed C-N Coupling Reactions Promoted by Organic Bases DOI. Data obtained from Summit.
Source code in opti/problems/datasets.py
class BaumgartnerAniline(Problem):
"""Aniline C-N cross-coupling dataset.
Reference:
Baumgartner et al. 2019 - Use of a Droplet Platform To Optimize Pd-Catalyzed C-N Coupling Reactions Promoted by Organic Bases
[DOI](https://doi.org/10.1021/acs.oprd.9b00236).
Data obtained from [Summit](https://github.com/sustainable-processes/summit).
"""
def __init__(self):
super().__init__(
name="Aniline cross-coupling, Baumgartner 2019",
inputs=[
Categorical("catalyst", domain=["tBuXPhos", "tBuBrettPhos", "AlPhos"]),
Categorical("base", domain=["TEA", "TMG", "BTMG", "DBU"]),
Continuous("base_equivalents", domain=[1.0, 2.5]),
Continuous("temperature", domain=[30, 100]),
Continuous("residence_time", domain=[60, 1800]),
],
outputs=[Continuous("yield", domain=[0, 1])],
objectives=[Maximize("yield")],
data=get_data("baumgartner_aniline.csv"),
)
BaumgartnerBenzamide (Problem)
Benzamide C-N cross-coupling dataset.
Reference
Baumgartner et al. 2019 - Use of a Droplet Platform To Optimize Pd-Catalyzed C-N Coupling Reactions Promoted by Organic Bases DOI. Data obtained from Summit.
Source code in opti/problems/datasets.py
class BaumgartnerBenzamide(Problem):
"""Benzamide C-N cross-coupling dataset.
Reference:
Baumgartner et al. 2019 - Use of a Droplet Platform To Optimize Pd-Catalyzed C-N Coupling Reactions Promoted by Organic Bases
[DOI](https://doi.org/10.1021/acs.oprd.9b00236).
Data obtained from [Summit](https://github.com/sustainable-processes/summit).
"""
def __init__(self):
super().__init__(
name="Benzamide cross-coupling, Baumgartner 2019",
inputs=[
Categorical("catalyst", domain=["tBuXPhos", "tBuBrettPhos"]),
Categorical("base", domain=["TMG", "BTMG", "DBU", "MTBD"]),
Continuous("base_equivalents", domain=[1.0, 2.1]),
Continuous("temperature", domain=[30, 100]),
Continuous("residence_time", domain=[60, 1850]),
],
outputs=[Continuous("yield", domain=[0, 1])],
objectives=[Maximize("yield")],
data=get_data("baumgartner_benzamide.csv"),
)
Benzylation (Problem)
Benzylation dataset.
This dataset reports the yield of undesired product (impurity) in an N-benzylation reaction. Four conditions of this reaction performed in a flow reactor can be controlled to minimize the yield of impurity. The dataset includes 73 samples with four parameters and one objective.
Reference
A.M. Schweidtmann, A.D. Clayton, N. Holmes, E. Bradford, R.A. Bourne, A.A. Lapkin. Machine learning meets continuous flow chemistry: Automated optimization towards the Pareto front of multiple objectives. Chem. Eng. J. 352 (2018) 277-282. DOI. Obtained from Olympus.
Source code in opti/problems/datasets.py
class Benzylation(Problem):
"""Benzylation dataset.
This dataset reports the yield of undesired product (impurity) in an N-benzylation reaction.
Four conditions of this reaction performed in a flow reactor can be controlled to minimize the yield of impurity.
The dataset includes 73 samples with four parameters and one objective.
Reference:
A.M. Schweidtmann, A.D. Clayton, N. Holmes, E. Bradford, R.A. Bourne, A.A. Lapkin. Machine learning meets continuous flow chemistry: Automated optimization towards the Pareto front of multiple objectives. Chem. Eng. J. 352 (2018) 277-282.
[DOI](https://doi.org/10.1016/j.cej.2018.07.031).
Obtained from [Olympus](https://github.com/aspuru-guzik-group/olympus).
"""
def __init__(self):
super().__init__(
name="Benzylation",
inputs=[
Continuous("flow_rate", domain=[0.2, 0.4]),
Continuous("ratio", domain=[1.0, 5.0]),
Continuous("solvent", domain=[0.5, 1.0]),
Continuous("temperature", domain=[110.0, 150.0]),
],
outputs=[Continuous("impurity")],
objectives=[Minimize("impurity")],
data=get_data("benzylation.csv"),
)
Cake (Problem)
Cake recipe optimization with mixed objectives.
Source code in opti/problems/datasets.py
class Cake(Problem):
"""Cake recipe optimization with mixed objectives."""
def __init__(self):
super().__init__(
name="Cake",
inputs=[
Continuous("wheat_flour", domain=[0, 1]),
Continuous("spelt_flour", domain=[0, 1]),
Continuous("sugar", domain=[0, 1]),
Continuous("chocolate", domain=[0, 1]),
Continuous("nuts", domain=[0, 1]),
Continuous("carrot", domain=[0, 1]),
],
outputs=[
Continuous("calories", domain=[300, 600]),
Continuous("taste", domain=[0, 5]),
Continuous("browning", domain=[0, 2]),
],
objectives=[
Minimize("calories"),
Maximize("taste"),
CloseToTarget("browning", target=1.4),
],
constraints=[
LinearEquality(
[
"wheat_flour",
"spelt_flour",
"sugar",
"chocolate",
"nuts",
"carrot",
],
rhs=1,
)
],
data=get_data("cake.csv"),
)
Fullerenes (Problem)
Buckminsterfullerene dataset.
This dataset reports the production of o-xylenyl adducts of Buckminsterfullerenes. Three process conditions (temperature, reaction time and ratio of sultine to C60) are varied to maximize the mole fraction of the desired product. Experiments are executed on a three factor fully factorial grid with six levels per factor. The dataset includes 246 samples with three parameters and one objective.
Reference
B.E. Walker, J.H. Bannock, A.M. Nightingale, J.C. deMello. Tuning reaction products by constrained optimisation. React. Chem. Eng., (2017), 2, 785-798. DOI. Obtained from Olympus.
Source code in opti/problems/datasets.py
class Fullerenes(Problem):
"""Buckminsterfullerene dataset.
This dataset reports the production of o-xylenyl adducts of Buckminsterfullerenes.
Three process conditions (temperature, reaction time and ratio of sultine to C60) are varied to maximize the mole fraction of the desired product.
Experiments are executed on a three factor fully factorial grid with six levels per factor.
The dataset includes 246 samples with three parameters and one objective.
Reference:
B.E. Walker, J.H. Bannock, A.M. Nightingale, J.C. deMello. Tuning reaction products by constrained optimisation. React. Chem. Eng., (2017), 2, 785-798.
[DOI](https://doi.org/10.1039/C7RE00123A).
Obtained from [Olympus](https://github.com/aspuru-guzik-group/olympus).
"""
def __init__(self):
super().__init__(
name="Fullerenes",
inputs=[
Continuous("reaction_time", domain=[3.0, 31.0]),
Continuous("sultine", domain=[1.5, 6.0]),
Continuous("temperature", domain=[100.0, 150.0]),
],
outputs=[Continuous("product")],
objectives=[Maximize("product")],
data=get_data("fullerenes.csv"),
)
HPLC (Problem)
High-performance liquid chromatography dataset.
This dataset reports the peak response of an automated high-performance liquid chromatography (HPLC) system for varying process parameters. The dataset includes 1,386 samples with six parameters and one objective.
Reference
L.M. Roch, F. Häse, C. Kreisbeck, T. Tamayo-Mendoza, L.P.E. Yunker, J.E. Hein, A. Aspuru-Guzik. ChemOS: an orchestration software to democratize autonomous discovery. (2018) DOI. Obtained from Olympus.
Source code in opti/problems/datasets.py
class HPLC(Problem):
"""High-performance liquid chromatography dataset.
This dataset reports the peak response of an automated high-performance liquid chromatography (HPLC) system for varying process parameters.
The dataset includes 1,386 samples with six parameters and one objective.
Reference:
L.M. Roch, F. Häse, C. Kreisbeck, T. Tamayo-Mendoza, L.P.E. Yunker, J.E. Hein, A. Aspuru-Guzik. ChemOS: an orchestration software to democratize autonomous discovery. (2018)
[DOI](https://doi.org/10.26434/chemrxiv.5953606.v1).
Obtained from [Olympus](https://github.com/aspuru-guzik-group/olympus).
"""
def __init__(self):
super().__init__(
name="HPLC",
inputs=[
Continuous("sample_loop", domain=[0.0, 0.08]),
Continuous("additional_volume", domain=[0.0, 0.06]),
Continuous("tubing_volume", domain=[0.1, 0.9]),
Continuous("sample_flow", domain=[0.5, 2.5]),
Continuous("push_speed", domain=[80.0, 150]),
Continuous("wait_time", domain=[0.5, 10.0]),
],
outputs=[Continuous("peak_area")],
objectives=[Maximize("peak_area")],
data=get_data("hplc.csv"),
)
Photodegradation (Problem)
Photodegration dataset.
This dataset reports the degradation of polymer blends for organic solar cells under the exposure to light. Individual data points encode the ratios of individual polymers in one blend, along with the measured photodegradation of this blend. The dataset includes 2,080 samples with five parameters and one objective.
Reference
S. Langner, F. Häse, J.D. Perea, T. Stubhan, J. Hauch, L.M. Roch, T. Heumueller, A. Aspuru-Guzik, C.J. Brabec. Beyond Ternary OPV: High-Throughput Experimentation and Self-Driving Laboratories Optimize Multicomponent Systems. Advanced Materials, 2020, 1907801. DOI. Obtained from Olympus.
Source code in opti/problems/datasets.py
class Photodegradation(Problem):
"""Photodegration dataset.
This dataset reports the degradation of polymer blends for organic solar cells under the exposure to light.
Individual data points encode the ratios of individual polymers in one blend, along with the measured photodegradation of this blend.
The dataset includes 2,080 samples with five parameters and one objective.
Reference:
S. Langner, F. Häse, J.D. Perea, T. Stubhan, J. Hauch, L.M. Roch, T. Heumueller, A. Aspuru-Guzik, C.J. Brabec. Beyond Ternary OPV: High-Throughput Experimentation and Self-Driving Laboratories Optimize Multicomponent Systems. Advanced Materials, 2020, 1907801.
[DOI](https://doi.org/10.1002/adma.201907801).
Obtained from [Olympus](https://github.com/aspuru-guzik-group/olympus).
"""
def __init__(self):
super().__init__(
name="Photodegradation",
inputs=[
Continuous("PCE10", domain=[0, 1]),
Continuous("WF3", domain=[0, 1]),
Continuous("P3HT", domain=[0, 1]),
Continuous("PCBM", domain=[0, 1]),
Continuous("oIDTBR", domain=[0, 1]),
],
outputs=[Continuous("degradation")],
objectives=[Minimize("degradation")],
constraints=[
LinearEquality(
["PCE10", "WF3", "P3HT", "PCBM", "oIDTBR"], rhs=1, lhs=1
),
NChooseK(["PCE10", "WF3"], max_active=1),
],
data=get_data("photodegradation.csv"),
)
ReizmanSuzuki (Problem)
Suzuki-Miyaura cross-coupling optimization.
Each case was has a different set of substrates but the same possible catalysts.
Reference
Reizman et al. (2016) Suzuki-Miyaura cross-coupling optimization enabled by automated feedback. Reaction chemistry & engineering, 1(6), 658-666 DOI. Data obtained from Summit.
Source code in opti/problems/datasets.py
class ReizmanSuzuki(Problem):
"""Suzuki-Miyaura cross-coupling optimization.
Each case was has a different set of substrates but the same possible catalysts.
Reference:
Reizman et al. (2016) Suzuki-Miyaura cross-coupling optimization enabled by automated feedback. Reaction chemistry & engineering, 1(6), 658-666
[DOI](https://doi.org/10.1039/C6RE00153J).
Data obtained from [Summit](https://github.com/sustainable-processes/summit).
"""
def __init__(self, case=1):
assert case in [1, 2, 3, 4]
super().__init__(
name=f"Reizman 2016 - Suzuki Case {case}",
inputs=[
Categorical(
"catalyst",
domain=[
"P1-L1",
"P2-L1",
"P1-L2",
"P1-L3",
"P1-L4",
"P1-L5",
"P1-L6",
"P1-L7",
],
),
Continuous("t_res", domain=[60, 600]),
Continuous("temperature", domain=[30, 110]),
Continuous("catalyst_loading", domain=[0.496, 2.515]),
],
outputs=[
Continuous("ton", domain=[0, 100]),
Continuous("yield", domain=[0, 100]),
],
objectives=[Maximize("ton"), Maximize("yield")],
data=get_data(f"reizman_suzuki{case}.csv"),
)
SnAr (Problem)
SnAr reaction optimization.
This dataset reports the e-factor for a nucleophilic aromatic substitution following the SnAr mechanism. Individual data points encode four process parameters for a flow reactor to run the reaction, along with the measured e-factor (defined as the ratio of the mass waste to the mass of product). The dataset includes 67 samples with four parameters and one objective.
Reference
A.M. Schweidtmann, A.D. Clayton, N. Holmes, E. Bradford, R.A. Bourne, A.A. Lapkin. Machine learning meets continuous flow chemistry: Automated optimization towards the Pareto front of multiple objectives. Chem. Eng. J. 352 (2018) 277-282. DOI. Obtained from Olympus.
Source code in opti/problems/datasets.py
class SnAr(Problem):
"""SnAr reaction optimization.
This dataset reports the e-factor for a nucleophilic aromatic substitution following the SnAr mechanism.
Individual data points encode four process parameters for a flow reactor to run the reaction, along with the measured e-factor (defined as the ratio of the mass waste to the mass of product).
The dataset includes 67 samples with four parameters and one objective.
Reference:
A.M. Schweidtmann, A.D. Clayton, N. Holmes, E. Bradford, R.A. Bourne, A.A. Lapkin. Machine learning meets continuous flow chemistry: Automated optimization towards the Pareto front of multiple objectives. Chem. Eng. J. 352 (2018) 277-282.
[DOI](https://doi.org/10.1016/j.cej.2018.07.031).
Obtained from [Olympus](https://github.com/aspuru-guzik-group/olympus).
"""
def __init__(self):
super().__init__(
name="SnAr",
inputs=[
Continuous("residence_time", domain=[0.5, 2.0]),
Continuous("ratio", domain=[1.0, 5.0]),
Continuous("concentration", domain=[0.1, 0.5]),
Continuous("temperature", domain=[60.0, 140.0]),
],
outputs=[Continuous("impurity")],
objectives=[Minimize("impurity")],
data=get_data("snar.csv"),
)
Suzuki (Problem)
Suzuki reaction dataset.
This dataset reports palladium-catalyzed Suzuki cross-coupling between 2-bromophenyltetrazole and an electron-deficient aryl boronate. Four reaction conditions can be controlled to maximise the reaction yield. The dataset includes 247 samples with four parameters and one objective.
Reference
F. Häse, M. Aldeghi, R.J. Hickman, L.M. Roch, M. Christensen, E. Liles, J.E. Hein, A. Aspuru-Guzik. Olympus: a benchmarking framework for noisy optimization and experiment planning. arXiv (2020), 2010.04153. DOI. Obtained from Olympus.
Source code in opti/problems/datasets.py
class Suzuki(Problem):
"""Suzuki reaction dataset.
This dataset reports palladium-catalyzed Suzuki cross-coupling between 2-bromophenyltetrazole and an electron-deficient aryl boronate.
Four reaction conditions can be controlled to maximise the reaction yield.
The dataset includes 247 samples with four parameters and one objective.
Reference:
F. Häse, M. Aldeghi, R.J. Hickman, L.M. Roch, M. Christensen, E. Liles, J.E. Hein, A. Aspuru-Guzik. Olympus: a benchmarking framework for noisy optimization and experiment planning. arXiv (2020), 2010.04153.
[DOI](https://doi.org/10.1088/2632-2153/abedc8).
Obtained from [Olympus](https://github.com/aspuru-guzik-group/olympus).
"""
def __init__(self):
super().__init__(
name="Suzuki",
inputs=[
Continuous("temperature", domain=[75.0, 90.0]),
Continuous("pd_mol", domain=[0.5, 5.0]),
Continuous("arbpin", domain=[1.0, 1.8]),
Continuous("k3po4", domain=[1.5, 3.0]),
],
outputs=[Continuous("yield")],
objectives=[Maximize("yield")],
data=get_data("suzuki.csv"),
)
detergent
Detergent (Problem)
Detergent formulation problem.
There are 5 outputs representing the washing performance on different stain types. Each output is modeled as a second degree polynomial. The formulation consists of 5 components. The sixth input is a filler (water) and is factored out and it's parameter bounds 0.6 < water < 0.8 result in 2 linear inequality constraints for the other parameters.
Source code in opti/problems/detergent.py
class Detergent(Problem):
"""Detergent formulation problem.
There are 5 outputs representing the washing performance on different stain types.
Each output is modeled as a second degree polynomial.
The formulation consists of 5 components.
The sixth input is a filler (water) and is factored out and it's parameter bounds
0.6 < water < 0.8 result in 2 linear inequality constraints for the other parameters.
"""
def __init__(self):
# coefficients for the 2-order polynomial; generated with
# base = 3 * np.ones((1, 5))
# scale = PolynomialFeatures(degree=2).fit_transform(base).T
# coef = np.random.RandomState(42).normal(scale=scale, size=(len(scale), 5))
# coef = np.clip(coef, 0, None)
self.coef = np.array(
[
[0.4967, 0.0, 0.6477, 1.523, 0.0],
[0.0, 4.7376, 2.3023, 0.0, 1.6277],
[0.0, 0.0, 0.7259, 0.0, 0.0],
[0.0, 0.0, 0.9427, 0.0, 0.0],
[4.3969, 0.0, 0.2026, 0.0, 0.0],
[0.3328, 0.0, 1.1271, 0.0, 0.0],
[0.0, 16.6705, 0.0, 0.0, 7.4029],
[0.0, 1.8798, 0.0, 0.0, 1.7718],
[6.6462, 1.5423, 0.0, 0.0, 0.0],
[0.0, 0.0, 9.5141, 3.0926, 0.0],
[2.9168, 0.0, 0.0, 5.5051, 9.279],
[8.3815, 0.0, 0.0, 2.9814, 8.7799],
[0.0, 0.0, 0.0, 0.0, 7.3127],
[12.2062, 0.0, 9.0318, 3.2547, 0.0],
[3.2526, 13.8423, 0.0, 14.0818, 0.0],
[7.3971, 0.7834, 0.0, 0.8258, 0.0],
[0.0, 3.214, 13.301, 0.0, 0.0],
[0.0, 8.2386, 2.9588, 0.0, 4.6194],
[0.8737, 8.7178, 0.0, 0.0, 0.0],
[0.0, 2.6651, 2.3495, 0.046, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0],
]
)
super().__init__(
name="Detergent optimization",
inputs=[
Continuous("x1", domain=[0.0, 0.2]),
Continuous("x2", domain=[0.0, 0.3]),
Continuous("x3", domain=[0.02, 0.2]),
Continuous("x4", domain=[0.0, 0.06]),
Continuous("x5", domain=[0.0, 0.04]),
],
outputs=[Continuous(f"y{i+1}", domain=[0, 3]) for i in range(5)],
objectives=[Maximize(f"y{i+1}") for i in range(5)],
constraints=[
LinearInequality(["x1", "x2", "x3", "x4", "x5"], lhs=-1, rhs=-0.2),
LinearInequality(["x1", "x2", "x3", "x4", "x5"], lhs=1, rhs=0.4),
],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
x = np.atleast_2d(X[self.inputs.names])
xp = np.stack([_poly2(xi) for xi in x], axis=0)
return pd.DataFrame(xp @ self.coef, columns=self.outputs.names, index=X.index)
Detergent_NChooseKConstraint (Problem)
Variant of the Detergent problem where only 3 of the 5 formulation components are allowed to be active (n-choose-k constraint).
Source code in opti/problems/detergent.py
class Detergent_NChooseKConstraint(Problem):
"""Variant of the Detergent problem where only 3 of the 5 formulation components are allowed to be active (n-choose-k constraint)."""
def __init__(self):
base = Detergent()
super().__init__(
name="Detergent optimization with n-choose-k constraint",
inputs=base.inputs,
outputs=base.outputs,
objectives=base.objectives,
constraints=list(base.constraints)
+ [NChooseK(names=base.inputs.names, max_active=3)],
f=base.f,
)
Detergent_OutputConstraint (Problem)
Variant of the Detergent problem with an additional output/black-box constraint.
In addition to the 5 washing performances there is a sixth output reflecting the stability of the formulation.
If discrete=True
the stability can only be measured qualitatively (0: not stable, 1: stable).
If discrete=True
the stability can be measured quantively with smaller values indicating less stable formulations.
Source code in opti/problems/detergent.py
class Detergent_OutputConstraint(Problem):
"""Variant of the Detergent problem with an additional output/black-box constraint.
In addition to the 5 washing performances there is a sixth output reflecting the stability of the formulation.
If `discrete=True` the stability can only be measured qualitatively (0: not stable, 1: stable).
If `discrete=True` the stability can be measured quantively with smaller values indicating less stable formulations.
"""
def __init__(self, discrete=False):
base = Detergent()
def f(X):
Y = base.f(X)
if discrete:
Y["stable"] = (X.sum(axis=1) < 0.3).astype(int)
else:
Y["stable"] = (0.4 - X.sum(axis=1)) / 0.2
return Y
outputs = list(base.outputs)
if discrete:
outputs += [Discrete("stable", domain=[0, 1])]
else:
outputs += [Continuous("stable", domain=[0, 1])]
super().__init__(
name="Detergent optimization with stability constraint",
inputs=base.inputs,
outputs=outputs,
objectives=base.objectives,
output_constraints=[Maximize("stable", target=0.5)],
constraints=base.constraints,
f=f,
)
Detergent_TwoOutputConstraints (Problem)
Variant of the Detergent problem with two outputs constraint.
In addition to the 5 washing performances there are two more outputs measuring the formulation stability. The first, stability 1, measures the immediate stability. If not stable, the other properties cannot be measured, except for stability 2. The second, stability 2, measures the long-term stability.
Source code in opti/problems/detergent.py
class Detergent_TwoOutputConstraints(Problem):
"""Variant of the Detergent problem with two outputs constraint.
In addition to the 5 washing performances there are two more outputs measuring the formulation stability.
The first, stability 1, measures the immediate stability. If not stable, the other properties cannot be measured, except for stability 2.
The second, stability 2, measures the long-term stability.
"""
def __init__(self):
base = Detergent()
def f(X: pd.DataFrame) -> pd.DataFrame:
Y = base.f(X)
x = self.get_X(X)
stable1 = (x.sum(axis=1) < 0.3).astype(int)
stable2 = (x[:, :-1].sum(axis=1) < 0.25).astype(int)
Y[stable1 == 0] = np.nan
Y["stability 1"] = stable1
Y["stability 2"] = stable2
return Y
outputs = list(base.outputs) + [
Discrete("stability 1", domain=[0, 1]),
Discrete("stability 2", domain=[0, 1]),
]
super().__init__(
name="Detergent optimization with two output constraint",
inputs=base.inputs,
outputs=outputs,
objectives=base.objectives,
output_constraints=[
Maximize("stability 1", target=0.5),
Maximize("stability 2", target=0.5),
],
constraints=base.constraints,
f=f,
)
mixed
Mixed variables single and multi-objective test problems.
DiscreteFuelInjector (Problem)
Fuel injector test problem, modified to contain an integer variable.
- 4 objectives,
- mixed variables, unconstrained
See
Manson2021, MVMOO: Mixed variable multi-objective optimisation https://doi.org/10.1007/s10898-021-01052-9
Source code in opti/problems/mixed.py
class DiscreteFuelInjector(Problem):
"""Fuel injector test problem, modified to contain an integer variable.
* 4 objectives,
* mixed variables, unconstrained
See:
Manson2021, MVMOO: Mixed variable multi-objective optimisation
https://doi.org/10.1007/s10898-021-01052-9
"""
def __init__(self):
super().__init__(
name="Discrete fuel injector test problem",
inputs=[
Discrete("x1", [0, 1, 2, 3]),
Continuous("x2", [-2, 2]),
Continuous("x3", [-2, 2]),
Continuous("x4", [-2, 2]),
],
outputs=[Continuous(f"y{i+1}") for i in range(4)],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
x1 = X["x1"].to_numpy().astype(float)
x2 = X["x2"].to_numpy().astype(float)
x3 = X["x3"].to_numpy().astype(float)
x4 = X["x4"].to_numpy().astype(float)
x1 *= 0.2
y1 = (
0.692
+ 0.4771 * x1
- 0.687 * x4
- 0.08 * x3
- 0.065 * x2
- 0.167 * x1**2
- 0.0129 * x1 * x4
+ 0.0796 * x4**2
- 0.0634 * x1 * x3
- 0.0257 * x3 * x4
+ 0.0877 * x3**2
- 0.0521 * x1 * x2
+ 0.00156 * x2 * x4
+ 0.00198 * x2 * x3
+ 0.0184 * x2**2
)
y2 = (
0.37
- 0.205 * x1
+ 0.0307 * x4
+ 0.108 * x3
+ 1.019 * x2
- 0.135 * x1**2
+ 0.0141 * x1 * x4
+ 0.0998 * x4**2
+ 0.208 * x1 * x3
- 0.0301 * x3 * x4
- 0.226 * x3**2
+ 0.353 * x1 * x2
- 0.0497 * x2 * x3
- 0.423 * x2**2
+ 0.202 * x1**2 * x4
- 0.281 * x1**2 * x3
- 0.342 * x1 * x4**2
- 0.245 * x3 * x4**2
+ 0.281 * x3**2 * x4
- 0.184 * x1 * x2**2
+ 0.281 * x1 * x3 * x4
)
y3 = (
0.153
- 0.322 * x1
+ 0.396 * x4
+ 0.424 * x3
+ 0.0226 * x2
+ 0.175 * x1**2
+ 0.0185 * x1 * x4
- 0.0701 * x4**2
- 0.251 * x1 * x3
+ 0.179 * x3 * x4
+ 0.015 * x3**2
+ 0.0134 * x1 * x2
+ 0.0296 * x2 * x4
+ 0.0752 * x2 * x3
+ 0.0192 * x2**2
)
y4 = (
0.758
+ 0.358 * x1
- 0.807 * x4
+ 0.0925 * x3
- 0.0468 * x2
- 0.172 * x1**2
+ 0.0106 * x1 * x4
+ 0.0697 * x4**2
- 0.146 * x1 * x3
- 0.0416 * x3 * x4
+ 0.102 * x3**2
- 0.0694 * x1 * x2
- 0.00503 * x2 * x4
+ 0.0151 * x2 * x3
+ 0.0173 * x2**2
)
return pd.DataFrame({"y1": y1, "y2": y2, "y3": y3, "y4": y4})
DiscreteVLMOP2 (Problem)
VLMOP2 problem (also known as Fonzeca & Fleming), modified to contain a discrete variable.
- 2 minimization objectives
- 1 categorical and n continuous inputs, unconstrained
See
Manson2021, MVMOO: Mixed variable multi-objective optimisation https://doi.org/10.1007/s10898-021-01052-9
Source code in opti/problems/mixed.py
class DiscreteVLMOP2(Problem):
"""VLMOP2 problem (also known as Fonzeca & Fleming), modified to contain a discrete variable.
* 2 minimization objectives
* 1 categorical and n continuous inputs, unconstrained
See:
Manson2021, MVMOO: Mixed variable multi-objective optimisation
https://doi.org/10.1007/s10898-021-01052-9
"""
def __init__(self, n_inputs: int = 3):
assert n_inputs >= 2
super().__init__(
name="Discrete VLMOP2 test problem",
inputs=[Categorical("x1", ["a", "b"])]
+ [Continuous(f"x{i+1}", [-2, 2]) for i in range(1, n_inputs)],
outputs=[Continuous("y1"), Continuous("y2")],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
d = X[self.inputs.names[0]].values
x = X[self.inputs.names[1:]].values
n = self.n_inputs
y1 = np.exp(-np.sum((x - n**-0.5) ** 2, axis=1))
y2 = np.exp(-np.sum((x + n**-0.5) ** 2, axis=1))
y1 = np.where(d == "a", 1 - y1, 1.25 - y1)
y2 = np.where(d == "a", 1 - y2, 0.75 - y2)
return pd.DataFrame({"y1": y1, "y2": y2}, index=X.index)
multi
Daechert1 (Problem)
Problem with a non-convex Pareto front.
From Dächert & Teichert 2020, An improved hyperboxing algorithm for calculating a Pareto front representation, https://arxiv.org/abs/2003.14249
The ideal point is [-1.37, -1.61, -4] and the nadir is [0, 0, -1.44].
Source code in opti/problems/multi.py
class Daechert1(Problem):
"""Problem with a non-convex Pareto front.
From Dächert & Teichert 2020, An improved hyperboxing algorithm for calculating a Pareto front representation, https://arxiv.org/abs/2003.14249
The ideal point is [-1.37, -1.61, -4] and the nadir is [0, 0, -1.44].
"""
def __init__(self):
super().__init__(
name="Daechert-1",
inputs=[
Continuous("x1", domain=[0, np.pi]),
Continuous("x2", domain=[0, 10]),
Continuous("x3", domain=[1.2, 10]),
],
outputs=[Continuous(f"y{i+1}") for i in range(3)],
constraints=[NonlinearInequality("- cos(x1) - exp(-x2) + x3")],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
return pd.DataFrame({"y1": -X["x1"], "y2": -X["x2"], "y3": -X["x3"] ** 2})
Daechert2 (Problem)
Unconstrained problem with a Pareto front resembling a comet.
From Dächert & Teichert 2020, An improved hyperboxing algorithm for calculating a Pareto front representation, https://arxiv.org/abs/2003.14249
minimize f1(x) = (1 + x3) (x1^3 x2^2 - 10 x1 - 4 x2) f2(x) = (1 + x3) (x1^3 x2^2 - 10 x1 + 4 x2) f3(x) = 3 (1 + x3) x1^2 s.t. 1 <= x1 <= 3.5 -2 <= x2 <= 2 0 <= x3 <= 1
The ideal point is [-70.19, -70.19, 3] and the nadir is [4, 4, 73.5].
Source code in opti/problems/multi.py
class Daechert2(Problem):
"""Unconstrained problem with a Pareto front resembling a comet.
From Dächert & Teichert 2020, An improved hyperboxing algorithm for calculating a Pareto front representation, https://arxiv.org/abs/2003.14249
minimize
f1(x) = (1 + x3) (x1^3 x2^2 - 10 x1 - 4 x2)
f2(x) = (1 + x3) (x1^3 x2^2 - 10 x1 + 4 x2)
f3(x) = 3 (1 + x3) x1^2
s.t.
1 <= x1 <= 3.5
-2 <= x2 <= 2
0 <= x3 <= 1
The ideal point is [-70.19, -70.19, 3] and the nadir is [4, 4, 73.5].
"""
def __init__(self):
super().__init__(
name="Daechert-2",
inputs=[
Continuous("x1", domain=[1, 3.5]),
Continuous("x2", domain=[-2, 2]),
Continuous("x3", domain=[0, 1]),
],
outputs=[Continuous(f"y{i + 1}") for i in range(3)],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
return pd.DataFrame(
{
"y1": X.eval("(1 + x3) * (x1**3 * x2**2 - 10 * x1 - 4 * x2)"),
"y2": X.eval("(1 + x3) * (x1**3 * x2**2 - 10 * x1 + 4 * x2)"),
"y3": X.eval("3 * (1 + x3) * x1**2"),
}
)
Daechert3 (Problem)
Modification of DTLZ7, with a Pareto consisting of 4 disconnected parts.
From Dächert & Teichert 2020, An improved hyperboxing algorithm for calculating a Pareto front representation, https://arxiv.org/abs/2003.14249
The ideal point is [0, 0, 2.61] and the nadir is [0.86, 0.86, 6].
Source code in opti/problems/multi.py
class Daechert3(Problem):
"""Modification of DTLZ7, with a Pareto consisting of 4 disconnected parts.
From Dächert & Teichert 2020, An improved hyperboxing algorithm for calculating a Pareto front representation, https://arxiv.org/abs/2003.14249
The ideal point is [0, 0, 2.61] and the nadir is [0.86, 0.86, 6].
"""
def __init__(self):
super().__init__(
name="Daechert-3",
inputs=[Continuous(f"x{i+1}", domain=[0, 1]) for i in range(2)],
outputs=[Continuous(f"y{i+1}") for i in range(3)],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
x = X[self.inputs.names]
return pd.DataFrame(
{
"y1": X["x1"],
"y2": X["x2"],
"y3": 6 - np.sum(x * (1 + np.sin(3 * np.pi * x)), axis=1),
}
)
Hyperellipsoid (Problem)
Hyperellipsoid in n dimensions
minimize f_m(x) = x_m m = 1, ... n for x in R^n s.t. sum((x / a)^2) - 1 <= 0
The ideal point is -a and the is nadir 0^n.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
n |
int |
Dimension of the hyperellipsoid. Defaults to 5. |
5 |
a |
list-like |
Half length of principal axes. a = None or a = [1, ...] results in a hypersphere. |
None |
Source code in opti/problems/multi.py
class Hyperellipsoid(Problem):
"""Hyperellipsoid in n dimensions
minimize
f_m(x) = x_m m = 1, ... n
for
x in R^n
s.t.
sum((x / a)^2) - 1 <= 0
The ideal point is -a and the is nadir 0^n.
Args:
n (int, optional): Dimension of the hyperellipsoid. Defaults to 5.
a (list-like, optional): Half length of principal axes. a = None or a = [1, ...] results in a hypersphere.
"""
def __init__(self, n: int = 5, a: Optional[Union[list, np.ndarray]] = None):
if a is None:
a = np.ones(n)
constr = " + ".join([f"x{i+1}**2" for i in range(n)]) + " - 1"
else:
a = np.array(a).squeeze()
if len(a) != n:
raise ValueError("Dimension of half axes doesn't match input dimension")
constr = " + ".join([f"(x{i+1}/{a[i]})**2" for i in range(n)]) + " - 1"
self.a = a
super().__init__(
name="Hyperellipsoid",
inputs=[Continuous(f"x{i+1}", [-a[i], a[i]]) for i in range(n)],
outputs=[Continuous(f"y{i+1}", [-a[i], a[i]]) for i in range(n)],
constraints=[NonlinearInequality(constr)],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
y = X[self.inputs.names]
y.columns = self.outputs.names
return y
def get_optima(self, n=10) -> pd.DataFrame:
X = opti.sampling.sphere.sample(self.n_inputs, n, positive=True)
X = np.concatenate([-np.eye(self.n_inputs), -X], axis=0)[:n]
Y = self.a * X
return pd.DataFrame(
data=np.column_stack([X, Y]),
columns=self.inputs.names + self.outputs.names,
)
OmniTest (Problem)
Bi-objective benchmark problem with D inputs and a multi-modal Pareto set.
It has 3^D Pareto subsets in the decision space corresponding to the same Pareto front.
Reference
Deb & Tiwari "Omni-optimizer: A generic evolutionary algorithm for single and multi-objective optimization"
Source code in opti/problems/multi.py
class OmniTest(Problem):
"""Bi-objective benchmark problem with D inputs and a multi-modal Pareto set.
It has 3^D Pareto subsets in the decision space corresponding to the same Pareto front.
Reference:
Deb & Tiwari "Omni-optimizer: A generic evolutionary algorithm for single and multi-objective optimization"
"""
def __init__(self, n_inputs: int = 2):
super().__init__(
name="Omni",
inputs=[Continuous(f"x{i+1}", domain=[0, 6]) for i in range(2)],
outputs=[Continuous("y1"), Continuous("y2")],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
X = X[self.inputs.names]
return pd.DataFrame(
{
"y1": np.sum(np.sin(np.pi * X), axis=1),
"y2": np.sum(np.cos(np.pi * X), axis=1),
}
)
def get_optima(self) -> pd.DataFrame:
n = 11 # points per set (3^D sets)
s = [np.linspace(1, 1.5, n) + 2 * i for i in range(3)]
C = list(
product(
*[
s,
]
* self.n_inputs
)
)
C = np.moveaxis(C, 1, 2).reshape(-1, 2)
X = pd.DataFrame(C, columns=self.inputs.names)
XY = pd.concat([X, self.f(X)], axis=1)
XY["_patch"] = np.repeat(np.arange(3**self.n_inputs), n)
return XY
Poloni (Problem)
Poloni benchmark problem.
Source code in opti/problems/multi.py
class Poloni(Problem):
"""Poloni benchmark problem."""
def __init__(self):
super().__init__(
name="Poloni function",
inputs=[Continuous(f"x{i+1}", [-np.pi, np.pi]) for i in range(2)],
outputs=[Continuous("y1"), Continuous("y2")],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
x1, x2 = self.get_X(X).T
A1 = 0.5 * np.sin(1) - 2 * np.cos(1) + np.sin(2) - 1.5 * np.cos(2)
A2 = 1.5 * np.sin(1) - np.cos(1) + 2 * np.sin(2) - 0.5 * np.cos(2)
B1 = 0.5 * np.sin(x1) - 2 * np.cos(x1) + np.sin(x2) - 1.5 * np.cos(x2)
B2 = 1.5 * np.sin(x1) - np.cos(x1) + 2 * np.sin(x2) - 0.5 * np.cos(x2)
return pd.DataFrame(
{
"y1": 1 + (A1 - B1) ** 2 + (A2 - B2) ** 2,
"y2": (x1 + 3) ** 2 + (x2 + 1) ** 2,
},
index=X.index,
)
Qapi1 (Problem)
Constrained problem from the Qriteria API tests. Note that while the function is convex, the constraints are not.
minimize f1(x) = (x1 - 2)^2 + (x2 - 1)^2 f2(x) = x1^2 + (x2 - 3)^2 for x1 in [0, inf) x2 in (-inf, inf) s.t. 0 <= x1 c1(x) = - x1^2 + x2 <= 0 c2(x) = - x1 - x2 + 2 <= 0
The ideal point is [0, 0] and the nadir is [8, 8].
Source code in opti/problems/multi.py
class Qapi1(Problem):
"""Constrained problem from the Qriteria API tests.
Note that while the function is convex, the constraints are not.
minimize
f1(x) = (x1 - 2)^2 + (x2 - 1)^2
f2(x) = x1^2 + (x2 - 3)^2
for
x1 in [0, inf)
x2 in (-inf, inf)
s.t.
0 <= x1
c1(x) = - x1^2 + x2 <= 0
c2(x) = - x1 - x2 + 2 <= 0
The ideal point is [0, 0] and the nadir is [8, 8].
"""
def __init__(self):
super().__init__(
name="Constrained bi-objective problem",
inputs=[Continuous("x1", [0, 10]), Continuous("x2", [-10, 10])],
outputs=[Continuous("y1"), Continuous("y2")],
constraints=[
NonlinearInequality("x2 - x1**2"),
NonlinearInequality("2 - x1 - x2"),
],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
return pd.DataFrame(
{
"y1": X.eval("(x1 - 2)**2 + (x2 - 1)**2"),
"y2": X.eval("x1**2 + (x2 - 3)**2"),
}
)
WeldedBeam (Problem)
Design optimization of a welded beam.
This is a bi-objective problem with 4 inputs and 3 (non-)linear inequality constraints. The two objectives are the fabrication cost of the beam and the deflection of the end of the beam under the applied load P. The load P is fixed at 6000 lbs, and the distance L is fixed at 14 inch.
Note that for simplicity the constraint shear stress < 13600 psi is not included.
See https://www.mathworks.com/help/gads/multiobjective-optimization-welded-beam.html
Source code in opti/problems/multi.py
class WeldedBeam(Problem):
"""Design optimization of a welded beam.
This is a bi-objective problem with 4 inputs and 3 (non-)linear inequality constraints.
The two objectives are the fabrication cost of the beam and the deflection of the end of the beam under the applied load P.
The load P is fixed at 6000 lbs, and the distance L is fixed at 14 inch.
Note that for simplicity the constraint shear stress < 13600 psi is not included.
See https://www.mathworks.com/help/gads/multiobjective-optimization-welded-beam.html
"""
def __init__(self):
super().__init__(
name="Welded beam problem",
inputs=[
Continuous("h", [0.125, 5]), # thickness of welds
Continuous("l", [0.1, 10]), # length of welds
Continuous("t", [0.1, 10]), # height of beam
Continuous("b", [0.125, 5]), # width of beam
],
outputs=[Continuous("cost"), Continuous("deflection")],
constraints=[
# h <= b, weld thickness cannot exceed beam width
LinearInequality(["h", "b"], lhs=[1, -1], rhs=0),
# normal stress on the welds cannot exceed 30000 psi
NonlinearInequality("6000 * 6 * 14 / b / t**3 - 30000"),
# buckling load capacity must exceed 6000 lbs
NonlinearInequality(
"6000 - 60746.022 * (1 - 0.0282346 * t) * t * b**4"
),
],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
x1, x2, x3, x4 = self.get_X(X).T
return pd.DataFrame(
{
"cost": 1.10471 * x1**2 * x2 + 0.04811 * x3 * x4 * (14 + x2),
"deflection": 2.1952 / (x4 * x3**3),
},
index=X.index,
)
single
Single objective benchmark problems.
Ackley (Problem)
Ackley benchmark problem.
Source code in opti/problems/single.py
class Ackley(Problem):
"""Ackley benchmark problem."""
def __init__(self, n_inputs=2):
super().__init__(
name="Ackley problem",
inputs=[Continuous(f"x{i+1}", [-32.768, +32.768]) for i in range(n_inputs)],
outputs=[Continuous("y")],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
a = 20
b = 1 / 5
c = 2 * np.pi
n = self.n_inputs
x = self.get_X(X)
part1 = -a * np.exp(-b * np.sqrt((1 / n) * np.sum(x**2, axis=-1)))
part2 = -np.exp((1 / n) * np.sum(np.cos(c * x), axis=-1))
y = part1 + part2 + a + np.exp(1)
return pd.DataFrame(y, columns=self.outputs.names, index=X.index)
def get_optima(self) -> pd.DataFrame:
x = np.zeros((1, self.n_inputs))
y = 0
return pd.DataFrame(np.c_[x, y], columns=self.inputs.names + self.outputs.names)
Branin (Problem)
The Branin (Branin-Hoo) benchmark problem.
f(x) = a(x2 - b x1^2 + cx1 - r)^2 + s(1 - t) cos(x1) + s a = 1, b = 5.1 / (4 pi^2), c = 5 / pi, r = 6, s = 10 and t = 1 / (8pi)
It has 3 global optima.
Source code in opti/problems/single.py
class Branin(Problem):
"""The Branin (Branin-Hoo) benchmark problem.
f(x) = a(x2 - b x1^2 + cx1 - r)^2 + s(1 - t) cos(x1) + s
a = 1, b = 5.1 / (4 pi^2), c = 5 / pi, r = 6, s = 10 and t = 1 / (8pi)
It has 3 global optima.
"""
def __init__(self):
super().__init__(
name="Branin function",
inputs=[Continuous("x1", [-5, 10]), Continuous("x2", [0, 15])],
outputs=[Continuous("y")],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
x1, x2 = self.get_X(X).T
y = (
(x2 - 5.1 / (4 * np.pi**2) * x1**2 + 5 / np.pi * x1 - 6) ** 2
+ 10 * (1 - 1 / (8 * np.pi)) * np.cos(x1)
+ 10
)
return pd.DataFrame(y, columns=self.outputs.names, index=X.index)
def get_optima(self) -> pd.DataFrame:
return pd.DataFrame(
[
[-np.pi, 12.275, 0.397887],
[np.pi, 2.275, 0.397887],
[9.42478, 2.475, 0.397887],
],
columns=self.inputs.names + self.outputs.names,
)
Himmelblau (Problem)
Himmelblau benchmark problem
Source code in opti/problems/single.py
class Himmelblau(Problem):
"""Himmelblau benchmark problem"""
def __init__(self):
super().__init__(
name="Himmelblau function",
inputs=[Continuous(f"x{i+1}", [-6, 6]) for i in range(2)],
outputs=[Continuous("y")],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
x0, x1 = self.get_X(X).T
y = (x0**2 + x1 - 11) ** 2 + (x0 + x1**2 - 7) ** 2
return pd.DataFrame(y, columns=self.outputs.names, index=X.index)
def get_optima(self) -> pd.DataFrame:
x = np.array(
[
[3.0, 2.0],
[-2.805118, 3.131312],
[-3.779310, -3.283186],
[3.584428, -1.848126],
]
)
y = np.zeros(4)
return pd.DataFrame(np.c_[x, y], columns=self.inputs.names + self.outputs.names)
Michalewicz (Problem)
Michalewicz benchmark problem.
The Michalewicz function has d! local minima, and it is multimodal. The parameter m (m=10 is used here) defines the steepness of they valleys and a larger m leads to a more difficult search.
Source code in opti/problems/single.py
class Michalewicz(Problem):
"""Michalewicz benchmark problem.
The Michalewicz function has d! local minima, and it is multimodal.
The parameter m (m=10 is used here) defines the steepness of they valleys and a larger m leads to a more difficult search.
"""
def __init__(self, n_inputs: int = 2):
super().__init__(
name="Michalewicz function",
inputs=[Continuous(f"x{i+1}", [0, np.pi]) for i in range(n_inputs)],
outputs=[Continuous("y")],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
x = self.get_X(X)
m = 10
i = np.arange(1, self.n_inputs + 1)
y = -np.sum(np.sin(x) * np.sin(i * x**2 / np.pi) ** (2 * m), axis=1)
return pd.DataFrame({"y": y}, index=X.index)
def get_optima(self) -> pd.DataFrame:
x = pd.DataFrame([[2.2, 1.57]], columns=self.inputs.names)
return pd.concat([x, self.f(x)], axis=1)
Rastrigin (Problem)
Rastrigin benchmark problem.
Source code in opti/problems/single.py
class Rastrigin(Problem):
"""Rastrigin benchmark problem."""
def __init__(self, n_inputs=2):
super().__init__(
name="Rastrigin function",
inputs=[Continuous(f"x{i+1}", [-5, 5]) for i in range(n_inputs)],
outputs=[Continuous("y")],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
x = self.get_X(X)
a = 10
y = a * self.n_inputs + np.sum(x**2 - a * np.cos(2 * np.pi * x), axis=1)
return pd.DataFrame(y, columns=self.outputs.names, index=X.index)
def get_optima(self) -> pd.DataFrame:
x = np.zeros((1, self.n_inputs))
y = 0
return pd.DataFrame(np.c_[x, y], columns=self.inputs.names + self.outputs.names)
Rosenbrock (Problem)
Rosenbrock benchmark problem.
Source code in opti/problems/single.py
class Rosenbrock(Problem):
"""Rosenbrock benchmark problem."""
def __init__(self, n_inputs=2):
super().__init__(
name="Rosenbrock function",
inputs=[Continuous(f"x{i+1}", [-2.048, 2.048]) for i in range(n_inputs)],
outputs=[Continuous("y")],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
x = self.get_X(X).T
y = np.sum(100 * (x[1:] - x[:-1] ** 2) ** 2 + (1 - x[:-1]) ** 2, axis=0)
return pd.DataFrame(y, columns=self.outputs.names, index=X.index)
def get_optima(self) -> pd.DataFrame:
x = np.ones((1, self.n_inputs))
y = 0
return pd.DataFrame(np.c_[x, y], columns=self.inputs.names + self.outputs.names)
Schwefel (Problem)
Schwefel benchmark problem
Source code in opti/problems/single.py
class Schwefel(Problem):
"""Schwefel benchmark problem"""
def __init__(self, n_inputs=2):
super().__init__(
name="Schwefel function",
inputs=[Continuous(f"x{i+1}", [-500, 500]) for i in range(n_inputs)],
outputs=[Continuous("y")],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
x = self.get_X(X)
y = 418.9829 * self.n_inputs - np.sum(x * np.sin(np.abs(x) ** 0.5), axis=1)
return pd.DataFrame(y, columns=self.outputs.names, index=X.index)
def get_optima(self) -> pd.DataFrame:
x = np.full((1, self.n_inputs), 420.9687)
y = 0
return pd.DataFrame(np.c_[x, y], columns=self.inputs.names + self.outputs.names)
Sphere (Problem)
Sphere benchmark problem.
Source code in opti/problems/single.py
class Sphere(Problem):
"""Sphere benchmark problem."""
def __init__(self, n_inputs=10):
super().__init__(
name="Sphere function",
inputs=[Continuous(f"x{i+1}", [0, 1]) for i in range(n_inputs)],
outputs=[Continuous("y", [0, 2])],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
x = self.get_X(X)
y = np.sum((x - 0.5) ** 2, axis=1)
return pd.DataFrame(y, columns=self.outputs.names, index=X.index)
def get_optima(self) -> pd.DataFrame:
x = np.full((1, self.n_inputs), 0.5)
y = 0
return pd.DataFrame(np.c_[x, y], columns=self.inputs.names + self.outputs.names)
ThreeHumpCamel (Problem)
Three-hump camel benchmark problem.
Source code in opti/problems/single.py
class ThreeHumpCamel(Problem):
"""Three-hump camel benchmark problem."""
def __init__(self):
super().__init__(
name="Three-hump camel function",
inputs=[Continuous(f"x{i+1}", [-5, 5]) for i in range(2)],
outputs=[Continuous("y")],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
x1, x2 = self.get_X(X).T
y = 2 * x1**2 - 1.05 * x1**4 + x1**6 / 6 + x1 * x2 + x2**2
return pd.DataFrame(y, columns=["y"], index=X.index)
def get_optima(self) -> pd.DataFrame:
return pd.DataFrame(
np.zeros((1, 3)), columns=self.inputs.names + self.outputs.names
)
Zakharov (Problem)
Zakharov benchmark problem.
Source code in opti/problems/single.py
class Zakharov(Problem):
"""Zakharov benchmark problem."""
def __init__(self, n_inputs=2):
super().__init__(
name="Zakharov function",
inputs=[Continuous(f"x{i+1}", [-10, 10]) for i in range(n_inputs)],
outputs=[Continuous("y")],
)
def f(self, X: pd.DataFrame):
x = self.get_X(X)
a = 0.5 * np.sum(np.arange(1, self.n_inputs + 1) * x, axis=1)
y = np.sum(x**2, axis=1) + a**2 + a**4
return pd.DataFrame(y, columns=self.outputs.names, index=X.index)
def get_optima(self) -> pd.DataFrame:
x = np.zeros((1, self.n_inputs))
y = 0
return pd.DataFrame(np.c_[x, y], columns=self.inputs.names + self.outputs.names)
Zakharov_Categorical (Problem)
Zakharov problem with one categorical input
Source code in opti/problems/single.py
class Zakharov_Categorical(Problem):
"""Zakharov problem with one categorical input"""
def __init__(self, n_inputs=3):
base = Zakharov(n_inputs)
super().__init__(
name="Zakharov function with one categorical input",
inputs=[Continuous(f"x{i}", [-10, 10]) for i in range(n_inputs - 1)]
+ [Categorical("expon_switch", ["one", "two"])],
outputs=base.outputs,
)
def f(self, X: pd.DataFrame):
x_conti = X[self.inputs.names[:-1]].values # just the continuous inputs
a = 0.5 * np.sum(np.arange(1, self.n_inputs) * x_conti, axis=1)
powers = np.repeat(np.expand_dims([2.0, 2.0, 4.0], 0), repeats=len(X), axis=0)
modify_powers = X[self.inputs.names[-1]] == "two"
powers[modify_powers, :] += powers[modify_powers, :]
res = (
np.sum(x_conti ** np.expand_dims(powers[:, 0], 1), axis=1)
+ a ** np.expand_dims(powers[:, 1], 0)
+ a ** np.expand_dims(powers[:, 2], 0)
)
res_float_array = np.array(res, dtype=np.float64).ravel()
y = res_float_array
return pd.DataFrame(y, columns=self.outputs.names, index=X.index)
def get_optima(self) -> pd.DataFrame:
x = list(np.zeros(self.n_inputs - 1)) + ["one"]
y = [0]
return pd.DataFrame([x + y], columns=self.inputs.names + self.outputs.names)
Zakharov_Constrained (Problem)
Zakharov problem with one linear constraint
Source code in opti/problems/single.py
class Zakharov_Constrained(Problem):
"""Zakharov problem with one linear constraint"""
def __init__(self, n_inputs=5):
base = Zakharov(n_inputs)
self.base = base
super().__init__(
name="Zakharov with one linear constraint",
inputs=base.inputs,
outputs=base.outputs,
constraints=[LinearInequality(base.inputs.names, lhs=1, rhs=10)],
f=base.f,
)
def get_optima(self):
return self.base.get_optima()
Zakharov_NChooseKConstraint (Problem)
Zakharov problem with an n-choose-k constraint
Source code in opti/problems/single.py
class Zakharov_NChooseKConstraint(Problem):
"""Zakharov problem with an n-choose-k constraint"""
def __init__(self, n_inputs=5, n_max_active=3):
base = Zakharov(n_inputs)
self.base = base
super().__init__(
name="Zakharov with n-choose-k constraint",
inputs=base.inputs,
outputs=base.outputs,
constraints=[NChooseK(names=base.inputs.names, max_active=n_max_active)],
f=base.f,
)
def get_optima(self):
return self.base.get_optima()
univariate
Simple 1D problems for assessing probabilistic surrogate models. Note: these problems should be output-noisified, e.g.
import opti
problem = opti.problems.noisify_problem_with_gaussian(
opti.problems.Line1D(),
sigma=0.1
)
Line1D (Problem)
A line.
Source code in opti/problems/univariate.py
class Line1D(Problem):
"""A line."""
def __init__(self):
def f(X: pd.DataFrame) -> pd.DataFrame:
return pd.DataFrame({"y": X.eval("0.1 * x + 1")}, index=X.index)
super().__init__(
inputs=[Continuous("x", [0, 10])],
outputs=[Continuous("y", [0, 3])],
f=f,
data=pd.concat([_X, f(_X)], axis=1),
)
Parabola1D (Problem)
A parabola.
Source code in opti/problems/univariate.py
class Parabola1D(Problem):
"""A parabola."""
def __init__(self):
def f(X: pd.DataFrame) -> pd.DataFrame:
return pd.DataFrame(
{"y": X.eval("0.025 * (x - 5) ** 2 + 1")}, index=X.index
)
super().__init__(
inputs=[Continuous("x", [0, 10])],
outputs=[Continuous("y", [0, 3])],
f=f,
data=pd.concat([_X, f(_X)], axis=1),
)
Sigmoid1D (Problem)
A smooth step at x=5.
Source code in opti/problems/univariate.py
class Sigmoid1D(Problem):
"""A smooth step at x=5."""
def __init__(self):
def f(X: pd.DataFrame) -> pd.DataFrame:
return pd.DataFrame(
{"y": X.eval("1 / (1 + exp(-2 * (x - 5))) + 1")}, index=X.index
)
super().__init__(
inputs=[Continuous("x", [0, 10])],
outputs=[Continuous("y", [0, 3])],
f=f,
data=pd.concat([_X, f(_X)], axis=1),
)
Sinus1D (Problem)
A sinus-function with one full period over the domain.
Source code in opti/problems/univariate.py
class Sinus1D(Problem):
"""A sinus-function with one full period over the domain."""
def __init__(self):
def f(X: pd.DataFrame) -> pd.DataFrame:
return pd.DataFrame(
{"y": X.eval("sin(x * 2 * 3.14159 / 10) / 2 + 2")}, index=X.index
)
super().__init__(
inputs=[Continuous("x", [0, 10])],
outputs=[Continuous("y", [0, 3])],
f=f,
data=pd.concat([_X, f(_X)], axis=1),
)
Step1D (Problem)
A discrete step at x=1.1.
Source code in opti/problems/univariate.py
class Step1D(Problem):
"""A discrete step at x=1.1."""
def __init__(self):
def f(X: pd.DataFrame) -> pd.DataFrame:
return pd.DataFrame({"y": X.eval("x > 1.1").astype(float)}, index=X.index)
super().__init__(
inputs=[Continuous("x", [0, 10])],
outputs=[Discrete("y", [0, 1])],
f=f,
data=pd.concat([_X, f(_X)], axis=1),
)
zdt
ZDT benchmark problem suite. All problems are bi-objective, have D continuous inputs and are unconstrained.
Zitzler, Deb, Thiele 2000 - Comparison of Multiobjective Evolutionary Algorithms: Empirical Results http://dx.doi.org/10.1162/106365600568202
ZDT1 (Problem)
ZDT-1 benchmark problem.
Source code in opti/problems/zdt.py
class ZDT1(Problem):
"""ZDT-1 benchmark problem."""
def __init__(self, n_inputs=30):
super().__init__(
name="ZDT-1 problem",
inputs=[Continuous(f"x{i+1}", [0, 1]) for i in range(n_inputs)],
outputs=[Continuous(f"y{i+1}", [0, np.inf]) for i in range(2)],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
x = X[self.inputs.names[1:]].to_numpy()
g = 1 + 9 / (self.n_inputs - 1) * np.sum(x, axis=1)
y1 = X["x1"].to_numpy()
y2 = g * (1 - (y1 / g) ** 0.5)
return pd.DataFrame({"y1": y1, "y2": y2}, index=X.index)
def get_optima(self, points=100):
x = np.linspace(0, 1, points)
y = np.stack([x, 1 - np.sqrt(x)], axis=1)
return pd.DataFrame(y, columns=self.outputs.names)
ZDT2 (Problem)
ZDT-2 benchmark problem.
Source code in opti/problems/zdt.py
class ZDT2(Problem):
"""ZDT-2 benchmark problem."""
def __init__(self, n_inputs=30):
super().__init__(
name="ZDT-2 problem",
inputs=[Continuous(f"x{i+1}", [0, 1]) for i in range(n_inputs)],
outputs=[Continuous(f"y{i+1}", [0, np.inf]) for i in range(2)],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
x = X[self.inputs.names[1:]].to_numpy()
g = 1 + 9 / (self.n_inputs - 1) * np.sum(x, axis=1)
y1 = X["x1"].to_numpy()
y2 = g * (1 - (y1 / g) ** 2)
return pd.DataFrame({"y1": y1, "y2": y2}, index=X.index)
def get_optima(self, points=100):
x = np.linspace(0, 1, points)
y = np.stack([x, 1 - np.power(x, 2)], axis=1)
return pd.DataFrame(y, columns=self.outputs.names)
ZDT3 (Problem)
ZDT-3 benchmark problem.
Source code in opti/problems/zdt.py
class ZDT3(Problem):
"""ZDT-3 benchmark problem."""
def __init__(self, n_inputs=30):
super().__init__(
name="ZDT-3 problem",
inputs=[Continuous(f"x{i+1}", [0, 1]) for i in range(n_inputs)],
outputs=[Continuous(f"y{i+1}", [-np.inf, np.inf]) for i in range(2)],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
x = X[self.inputs.names[1:]].to_numpy()
g = 1 + 9 / (self.n_inputs - 1) * np.sum(x, axis=1)
y1 = X["x1"].to_numpy()
y2 = g * (1 - (y1 / g) ** 0.5 - (y1 / g) * np.sin(10 * np.pi * y1))
return pd.DataFrame({"y1": y1, "y2": y2}, index=X.index)
def get_optima(self, points=100):
regions = [
[0, 0.0830015349],
[0.182228780, 0.2577623634],
[0.4093136748, 0.4538821041],
[0.6183967944, 0.6525117038],
[0.8233317983, 0.8518328654],
]
pf = []
for r in regions:
x1 = np.linspace(r[0], r[1], int(points / len(regions)))
x2 = 1 - np.sqrt(x1) - x1 * np.sin(10 * np.pi * x1)
pf.append(np.stack([x1, x2], axis=1))
y = np.concatenate(pf, axis=0)
return pd.DataFrame(y, columns=self.outputs.names)
ZDT4 (Problem)
ZDT-4 benchmark problem.
Source code in opti/problems/zdt.py
class ZDT4(Problem):
"""ZDT-4 benchmark problem."""
def __init__(self, n_inputs=10):
super().__init__(
name="ZDT-4 problem",
inputs=[Continuous("x1", [0, 1])]
+ [Continuous(f"x{i+1}", [-5, 5]) for i in range(1, n_inputs)],
outputs=[Continuous(f"y{i+1}", [0, np.inf]) for i in range(2)],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
x = X[self.inputs.names].to_numpy()
g = 1 + 10 * (self.n_inputs - 1)
for i in range(1, self.n_inputs):
g += x[:, i] ** 2 - 10 * np.cos(4.0 * np.pi * x[:, i])
y1 = X["x1"].to_numpy()
y2 = g * (1 - np.sqrt(y1 / g))
return pd.DataFrame({"y1": y1, "y2": y2}, index=X.index)
def get_optima(self, points=100):
x = np.linspace(0, 1, points)
y = np.stack([x, 1 - np.sqrt(x)], axis=1)
return pd.DataFrame(y, columns=self.outputs.names)
ZDT6 (Problem)
ZDT-6 benchmark problem.
Source code in opti/problems/zdt.py
class ZDT6(Problem):
"""ZDT-6 benchmark problem."""
def __init__(self, n_inputs=30):
super().__init__(
name="ZDT-6 problem",
inputs=[Continuous(f"x{i+1}", [0, 1]) for i in range(n_inputs)],
outputs=[Continuous(f"y{i+1}", [-np.inf, np.inf]) for i in range(2)],
)
def f(self, X: pd.DataFrame) -> pd.DataFrame:
x = X[self.inputs.names].to_numpy()
n = self.n_inputs
g = 1 + 9 * (np.sum(x[:, 1:], axis=1) / (n - 1)) ** 0.25
y1 = 1 - np.exp(-4 * x[:, 0]) * (np.sin(6 * np.pi * x[:, 0])) ** 6
y2 = g * (1 - (y1 / g) ** 2)
return pd.DataFrame({"y1": y1, "y2": y2}, index=X.index)
def get_optima(self, points=100):
x = np.linspace(0.2807753191, 1, points)
y = np.stack([x, 1 - x**2], axis=1)
return pd.DataFrame(y, columns=self.outputs.names)