import json
import sys
import numpy as np
from scipy.optimize import minimize
def rosenbrock(x):
a = 1.0
b = 100.0
f = (a - x[0])**2 + b * (x[1] - x[0]**2)**2
g = np.array([
-2.0 * (a - x[0]) - 4.0 * b * (x[1] - x[0]**2) * x[0],
2.0 * b * (x[1] - x[0]**2)
])
return f, g
def quadratic(x):
f = np.dot(x, x)
g = 2.0 * x
return f, g
def scipy_objective(x, func_name):
if func_name == "rosenbrock":
return rosenbrock(x)[0]
elif func_name == "quadratic":
return quadratic(x)[0]
else:
raise ValueError(f"Unknown function: {func_name}")
def scipy_gradient(x, func_name):
if func_name == "rosenbrock":
return rosenbrock(x)[1]
elif func_name == "quadratic":
return quadratic(x)[1]
else:
raise ValueError(f"Unknown function: {func_name}")
def optimize_with_scipy(x0, func_name, tolerance=1e-6, max_iterations=100):
x0 = np.array(x0)
options = {
'gtol': tolerance, 'maxiter': max_iterations,
'disp': False
}
result = minimize(
fun=lambda x: scipy_objective(x, func_name),
x0=x0,
method='BFGS',
jac=lambda x: scipy_gradient(x, func_name),
options=options
)
return {
'success': bool(result.success),
'final_point': result.x.tolist(),
'final_value': float(result.fun),
'final_gradient_norm': float(np.linalg.norm(result.jac)),
'iterations': int(result.nit),
'func_evals': int(result.nfev),
'grad_evals': int(result.njev),
'message': str(result.message)
}
def main():
if len(sys.argv) != 2:
print("Usage: python optimization_harness.py '<json_input>'", file=sys.stderr)
sys.exit(1)
try:
input_data = json.loads(sys.argv[1])
x0 = input_data['x0']
func_name = input_data['function']
tolerance = input_data.get('tolerance', 1e-6)
max_iterations = input_data.get('max_iterations', 100)
result = optimize_with_scipy(x0, func_name, tolerance, max_iterations)
print(json.dumps(result))
except Exception as e:
error_result = {
'success': False,
'error': str(e)
}
print(json.dumps(error_result))
sys.exit(1)
if __name__ == '__main__':
main()