@@ -452,7 +452,10 @@ def gradientTrimming(self, w, use_ad=False, eps=1e-10):
452
452
453
453
return g
454
454
455
- def optimize (self , x0 = None , print_level = 0 , max_iter = 100 , tol = 1e-8 ):
455
+ def optimize (self , x0 = None , print_level = 0 , max_iter = 100 , tol = 1e-8 ,
456
+ acceptable_tol = 1e-6 ,
457
+ nlp_scaling_method = None ,
458
+ nlp_scaling_min_value = None ):
456
459
if x0 is None :
457
460
x0 = np .hstack ((self .beta , self .gamma , self .delta ))
458
461
if self .use_lprior :
@@ -461,8 +464,8 @@ def optimize(self, x0=None, print_level=0, max_iter=100, tol=1e-8):
461
464
assert x0 .size == self .k_total
462
465
463
466
opt_problem = ipopt .problem (
464
- n = self .k_total ,
465
- m = self .num_constraints ,
467
+ n = int ( self .k_total ) ,
468
+ m = int ( self .num_constraints ) ,
466
469
problem_obj = self ,
467
470
lb = self .uprior [0 ],
468
471
ub = self .uprior [1 ],
@@ -473,8 +476,11 @@ def optimize(self, x0=None, print_level=0, max_iter=100, tol=1e-8):
473
476
opt_problem .addOption ('print_level' , print_level )
474
477
opt_problem .addOption ('max_iter' , max_iter )
475
478
opt_problem .addOption ('tol' , tol )
476
- # opt_problem.addOption('bound_push', 1e-15)
477
- # opt_problem.addOption('bound_frac', 1e-15)
479
+ opt_problem .addOption ('acceptable_tol' , acceptable_tol )
480
+ if nlp_scaling_method is not None :
481
+ opt_problem .addOption ('nlp_scaling_method' , nlp_scaling_method )
482
+ if nlp_scaling_min_value is not None :
483
+ opt_problem .addOption ('nlp_scaling_min_value' , nlp_scaling_min_value )
478
484
479
485
soln , info = opt_problem .solve (x0 )
480
486
@@ -488,6 +494,9 @@ def fitModel(self, x0=None,
488
494
inner_print_level = 0 ,
489
495
inner_max_iter = 20 ,
490
496
inner_tol = 1e-8 ,
497
+ inner_acceptable_tol = 1e-6 ,
498
+ inner_nlp_scaling_method = None ,
499
+ inner_nlp_scaling_min_value = None ,
491
500
outer_verbose = False ,
492
501
outer_max_iter = 100 ,
493
502
outer_step_size = 1.0 ,
@@ -497,7 +506,10 @@ def fitModel(self, x0=None,
497
506
if not self .use_trimming :
498
507
self .optimize (x0 = x0 ,
499
508
print_level = inner_print_level ,
500
- max_iter = inner_max_iter )
509
+ max_iter = inner_max_iter ,
510
+ acceptable_tol = inner_acceptable_tol ,
511
+ nlp_scaling_method = inner_nlp_scaling_method ,
512
+ nlp_scaling_min_value = inner_nlp_scaling_min_value )
501
513
502
514
return self .beta , self .gamma , self .w
503
515
@@ -510,7 +522,10 @@ def fitModel(self, x0=None,
510
522
self .optimize (x0 = self .soln ,
511
523
print_level = inner_print_level ,
512
524
max_iter = inner_max_iter ,
513
- tol = inner_tol )
525
+ tol = inner_tol ,
526
+ acceptable_tol = inner_acceptable_tol ,
527
+ nlp_scaling_method = inner_nlp_scaling_method ,
528
+ nlp_scaling_min_value = inner_nlp_scaling_min_value )
514
529
515
530
w_grad = self .gradientTrimming (self .w )
516
531
if normalize_trimming_grad :
0 commit comments