Skip to content

Commit df468c6

Browse files
edit gpr_optimization notebook
1 parent 86ffd8d commit df468c6

File tree

2 files changed

+91
-46
lines changed

2 files changed

+91
-46
lines changed

machine-learning/gpr_optimization.py

Lines changed: 88 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,44 @@
1515
app = marimo.App(width="medium")
1616

1717

18+
@app.cell(hide_code=True)
19+
def _():
20+
import matplotlib.pyplot as plt
21+
22+
def apply_codecut_style(ax=None):
23+
"""
24+
Apply CodeCut plot styling to a given Matplotlib Axes.
25+
If no Axes is provided, use the current active Axes.
26+
"""
27+
if ax is None:
28+
ax = plt.gca()
29+
30+
# Set global figure facecolor
31+
plt.figure(facecolor="#2F2D2E")
32+
33+
# Background colors
34+
fig = ax.figure
35+
fig.patch.set_facecolor("#2F2D2E")
36+
ax.set_facecolor("#2F2D2E")
37+
38+
# Line and text colors
39+
ax.title.set_color("white")
40+
ax.xaxis.label.set_color("white")
41+
ax.yaxis.label.set_color("white")
42+
ax.tick_params(axis="x", colors="white")
43+
ax.tick_params(axis="y", colors="white")
44+
45+
# Spine colors
46+
for spine in ax.spines.values():
47+
spine.set_color("white")
48+
49+
# Optional: turn off grid
50+
ax.grid(False)
51+
52+
return ax
53+
return apply_codecut_style, plt
54+
55+
1856
@app.cell
1957
def _():
2058
import random
@@ -45,17 +83,16 @@ def evaluate_model(model):
4583

4684
print("Best score:", best_score)
4785
print("Best params:", best_params)
48-
return (time,)
86+
return
4987

5088

5189
@app.cell
5290
def _():
53-
import matplotlib.pyplot as plt
5491
import numpy as np
5592
from sklearn.gaussian_process import GaussianProcessRegressor
5693
from sklearn.gaussian_process.kernels import ConstantKernel as C
5794
from sklearn.gaussian_process.kernels import Matern, WhiteKernel
58-
return C, GaussianProcessRegressor, Matern, WhiteKernel, np, plt
95+
return C, GaussianProcessRegressor, Matern, WhiteKernel, np
5996

6097

6198
@app.cell
@@ -66,15 +103,20 @@ def black_box_function(x):
66103

67104

68105
@app.cell
69-
def _(black_box_function, np, plt):
106+
def _(black_box_function, np):
70107
X = np.linspace(0, 5.5, 1000).reshape(-1, 1)
71108
y = black_box_function(X)
72-
plt.plot(X, y)
109+
return X, y
110+
111+
112+
@app.cell
113+
def _(X, apply_codecut_style, plt, y):
114+
plt.plot(X, y, "--", color="white")
73115
plt.title("Black-box function")
74116
plt.xlabel("x")
75117
plt.ylabel("f(x)")
76-
plt.show()
77-
return X, y
118+
apply_codecut_style()
119+
return
78120

79121

80122
@app.cell
@@ -86,22 +128,23 @@ def _(black_box_function, np):
86128

87129

88130
@app.cell
89-
def _(black_box_function, np, time):
90-
def train(epochs):
91-
time.sleep(0.1) # Simulate a slow training step
92-
return black_box_function(epochs)
93-
94-
search_space = np.linspace(0, 5, 1000)
95-
results = []
96-
97-
start = time.time()
98-
for x in search_space:
99-
loss = train(x)
100-
results.append((x, loss))
101-
end = time.time()
102-
103-
print("Best x:", search_space[np.argmin([r[1] for r in results])])
104-
print("Time taken:", round(end - start, 2), "seconds")
131+
def _():
132+
# def train(epochs):
133+
# time.sleep(0.1) # Simulate a slow training step
134+
# return black_box_function(epochs)
135+
136+
# search_space = np.linspace(0, 5, 1000)
137+
# results = []
138+
139+
# start = time.time()
140+
# for x in search_space:
141+
# loss = train(x)
142+
# results.append((x, loss))
143+
# end = time.time()
144+
145+
# best_x = search_space[np.argmin([r[1] for r in results])]
146+
# print(f"Best x: {best_x}")
147+
# print("Time taken:", round(end - start, 2), "seconds")
105148
return
106149

107150

@@ -125,21 +168,21 @@ def _(C, GaussianProcessRegressor, Matern, WhiteKernel, X_sample, y_sample):
125168

126169

127170
@app.cell
128-
def _(X, X_sample, gpr, plt, y, y_sample):
171+
def _(X, X_sample, apply_codecut_style, gpr, plt, y, y_sample):
129172
# Predict across the domain
130173
mu, std = gpr.predict(X, return_std=True)
131174

132175
# Plot the result
133176
plt.figure(figsize=(10, 5))
134-
plt.plot(X, y, "k--", label="True function")
135-
plt.plot(X, mu, "b-", label="GPR mean")
177+
plt.plot(X, y, "--", label="True function", color="white")
178+
plt.plot(X, mu, "-", label="GPR mean", color="#72BEFA")
136179
plt.fill_between(X.ravel(), mu - std, mu + std, alpha=0.3, label="Uncertainty")
137-
plt.scatter(X_sample, y_sample, c="red", label="Samples")
180+
plt.scatter(X_sample, y_sample, c="#E583B6", label="Samples")
138181
plt.legend()
139182
plt.title("Gaussian Process Fit")
140183
plt.xlabel("x")
141184
plt.ylabel("f(x)")
142-
plt.show()
185+
apply_codecut_style()
143186
return
144187

145188

@@ -163,18 +206,26 @@ def expected_improvement(X, X_sample, y_sample, model, xi=0.01):
163206

164207

165208
@app.cell
166-
def _(X, X_sample, expected_improvement, gpr, np, plt, y_sample):
209+
def _(
210+
X,
211+
X_sample,
212+
apply_codecut_style,
213+
expected_improvement,
214+
gpr,
215+
np,
216+
plt,
217+
y_sample,
218+
):
167219
ei = expected_improvement(X, X_sample, y_sample, gpr)
168220

169221
plt.figure(figsize=(10, 4))
170-
plt.plot(X, ei, label="Expected Improvement")
171-
plt.axvline(X[np.argmax(ei)], color="r", linestyle="--", label="Next sample point")
222+
plt.plot(X, ei, label="Expected Improvement", color="#72BEFA")
223+
plt.axvline(X[np.argmax(ei)], color="#E583B6", linestyle="--", label="Next sample point")
172224
plt.title("Acquisition Function (Expected Improvement)")
173225
plt.xlabel("x")
174226
plt.ylabel("EI(x)")
175227
plt.legend()
176-
plt.show()
177-
228+
apply_codecut_style()
178229
return
179230

180231

@@ -209,21 +260,15 @@ def _(bayesian_optimization):
209260

210261

211262
@app.cell
212-
def _(X, X_opt, black_box_function, plt, y_opt):
263+
def _(X, X_opt, apply_codecut_style, black_box_function, plt, y_opt):
213264
# Plot final sampled points
214-
plt.plot(X, black_box_function(X), "k--", label="True function")
215-
plt.scatter(X_opt, y_opt, c="red", label="Sampled Points")
265+
plt.plot(X, black_box_function(X), "--", label="True function", color="white")
266+
plt.scatter(X_opt, y_opt, c="#E583B6", label="Sampled Points")
216267
plt.title("Bayesian Optimization with Gaussian Process")
217268
plt.xlabel("x")
218269
plt.ylabel("f(x)")
219270
plt.legend()
220-
plt.show()
221-
222-
return
223-
224-
225-
@app.cell
226-
def _():
271+
apply_codecut_style()
227272
return
228273

229274

public/machine-learning/gpr_optimization.html

Lines changed: 3 additions & 3 deletions
Large diffs are not rendered by default.

0 commit comments

Comments
 (0)