Skip to content

Allow SNR optimizer to use candidate point in initial array #4393

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
85 changes: 73 additions & 12 deletions bin/pycbc_optimize_snr
Original file line number Diff line number Diff line change
Expand Up @@ -130,13 +130,28 @@ def compute_minus_network_snr_pso(v, *argv, **kwargs):
return -nsnr_array


def optimize_di(bounds, cli_args, extra_args):
bounds = [
def normalize_initial_point(initial_point, bounds):
return (initial_point - bounds[:,0]) / (bounds[:,1] - bounds[:,0])

def optimize_di(bounds, cli_args, extra_args, initial_point):
bounds = numpy.array([
bounds['mchirp'],
bounds['eta'],
bounds['spin1z'],
bounds['spin2z']
]
])


# Currently only implemented for random seed initial array
rng = numpy.random.mtrand._rand
population_shape=(cli_args.di_popsize, 4)
population = rng.uniform(size=population_shape)
if cli_args.include_candidate_in_optimizer:
# Re-normalize the initial point into the correct range
point_init = normalize_initial_point(initial_point, bounds)
# add the initial point to the population
population = numpy.concatenate((population[:-1], point_init))

results = differential_evolution(
compute_minus_network_snr,
bounds,
Expand All @@ -146,12 +161,13 @@ def optimize_di(bounds, cli_args, extra_args):
mutation=(0.5, 1),
recombination=0.7,
callback=callback_func,
args=extra_args
args=extra_args,
init=population
)
return results.x


def optimize_shgo(bounds, cli_args, extra_args):
def optimize_shgo(bounds, cli_args, extra_args, initial_point): # pylint: disable=unused-argument
bounds = [
bounds['mchirp'],
bounds['eta'],
Expand All @@ -168,30 +184,48 @@ def optimize_shgo(bounds, cli_args, extra_args):
)
return results.x

def normalize_population(population, min_bounds, max_bounds):
norm_pop = min_bounds + population * (max_bounds - min_bounds)

return norm_pop

def optimize_pso(bounds, cli_args, extra_args):
def optimize_pso(bounds, cli_args, extra_args, initial_point):
options = {
'c1': cli_args.pso_c1,
'c2': cli_args.pso_c2,
'w': cli_args.pso_w
}
min_bounds = [
min_bounds = numpy.array([
bounds['mchirp'][0],
bounds['eta'][0],
bounds['spin1z'][0],
bounds['spin2z'][0]
]
max_bounds = [
])
max_bounds = numpy.array([
bounds['mchirp'][1],
bounds['eta'][1],
bounds['spin1z'][1],
bounds['spin2z'][1]
]
])

# Manually generate the initial points, this is the same as the default
# method, but allows us to make some modifications
population = numpy.random.uniform(
low=0.0, high=1.0, size=(cli_args.pso_particles, 4)
)
population = normalize_population(population, min_bounds, max_bounds)

if cli_args.include_candidate_in_optimizer:
# add the initial point to the population
population = numpy.concatenate((population[:-1],
initial_point))

optimizer = ps.single.GlobalBestPSO(
n_particles=cli_args.pso_particles,
dimensions=4,
options=options,
bounds=(min_bounds, max_bounds)
bounds=(min_bounds, max_bounds),
init_pos=population
)
_, results = optimizer.optimize(
compute_minus_network_snr_pso,
Expand Down Expand Up @@ -284,6 +318,14 @@ parser.add_argument('--pso-c2', type=float, default=2.0,
parser.add_argument('--pso-w', type=float, default=0.01,
help='Only relevant for --optimizer pso: '
'The hyperparameter w: the inertia parameter.')
parser.add_argument('--include-candidate-in-optimizer', action='store_true',
help='Include parameters of the candidate event in the '
'initialised array for the optimizer. Only relevant for '
'--optimizer pso or differential_evolution')
parser.add_argument('--seed', type=int,
help='Seed to supply to the random generation of initial '
'array to pass to the optimizer. Only relevant for '
'--optimizer pso or differential_evolution')

scheme.insert_processing_option_group(parser)
fft.insert_fft_option_group(parser)
Expand All @@ -296,6 +338,9 @@ if args.optimizer == 'pso' and ps == None:
parser.error('You need to install pyswarms to use the pso optimizer.')
pycbc.init_logging(args.verbose)

if args.seed:
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

These changes mean that we can control the random seed for these optimizers, which is useful for testing

numpy.random.seed(args.seed)

scheme.verify_processing_options(args, parser)
fft.verify_fft_options(args, parser)

Expand Down Expand Up @@ -375,11 +420,27 @@ bounds = {
'spin2z': (minspin2z, maxspin2z)
}

if args.include_candidate_in_optimizer:
# Initial point from found candidate
mchirp_init = cv.mchirp_from_mass1_mass2(fp['mass1'][()], fp['mass2'][()])
eta_init = cv.eta_from_mass1_mass2(fp['mass1'][()], fp['mass2'][()])
spin1z_init = fp['spin1z'][()]
spin2z_init = fp['spin2z'][()]

initial_point = numpy.array([
mchirp_init,
eta_init,
spin1z_init,
spin2z_init,
])[numpy.newaxis]
else:
initial_point = None

with scheme_context:
logging.info('Starting optimization')

optimize_func = optimize_funcs[args.optimizer]
opt_params = optimize_func(bounds, args, extra_args)
opt_params = optimize_func(bounds, args, extra_args, initial_point)

logging.info('Optimization complete')

Expand Down