@@ -118,11 +118,11 @@ def test_lml_gradient(kernel):
118118 assert_almost_equal (lml_gradient , lml_gradient_approx , 3 )
119119
120120
121- def test_random_starts ():
121+ def test_random_starts (global_random_seed ):
122122 # Test that an increasing number of random-starts of GP fitting only
123123 # increases the log marginal likelihood of the chosen theta.
124124 n_samples , n_features = 25 , 2
125- rng = np .random .RandomState (0 )
125+ rng = np .random .RandomState (global_random_seed )
126126 X = rng .randn (n_samples , n_features ) * 2 - 1
127127 y = (np .sin (X ).sum (axis = 1 ) + np .sin (3 * X ).sum (axis = 1 )) > 0
128128
@@ -132,19 +132,21 @@ def test_random_starts():
132132 last_lml = - np .inf
133133 for n_restarts_optimizer in range (5 ):
134134 gp = GaussianProcessClassifier (
135- kernel = kernel , n_restarts_optimizer = n_restarts_optimizer , random_state = 0
135+ kernel = kernel ,
136+ n_restarts_optimizer = n_restarts_optimizer ,
137+ random_state = global_random_seed ,
136138 ).fit (X , y )
137139 lml = gp .log_marginal_likelihood (gp .kernel_ .theta )
138140 assert lml > last_lml - np .finfo (np .float32 ).eps
139141 last_lml = lml
140142
141143
142144@pytest .mark .parametrize ("kernel" , non_fixed_kernels )
143- def test_custom_optimizer (kernel ):
145+ def test_custom_optimizer (kernel , global_random_seed ):
144146 # Test that GPC can use externally defined optimizers.
145147 # Define a dummy optimizer that simply tests 10 random hyperparameters
146148 def optimizer (obj_func , initial_theta , bounds ):
147- rng = np .random .RandomState (0 )
149+ rng = np .random .RandomState (global_random_seed )
148150 theta_opt , func_min = initial_theta , obj_func (
149151 initial_theta , eval_gradient = False
150152 )
@@ -160,9 +162,9 @@ def optimizer(obj_func, initial_theta, bounds):
160162 gpc = GaussianProcessClassifier (kernel = kernel , optimizer = optimizer )
161163 gpc .fit (X , y_mc )
162164 # Checks that optimizer improved marginal likelihood
163- assert gpc .log_marginal_likelihood (gpc . kernel_ . theta ) > gpc . log_marginal_likelihood (
164- kernel .theta
165- )
165+ assert gpc .log_marginal_likelihood (
166+ gpc . kernel_ .theta
167+ ) >= gpc . log_marginal_likelihood ( kernel . theta )
166168
167169
168170@pytest .mark .parametrize ("kernel" , kernels )
0 commit comments