Skip to content
Snippets Groups Projects
Commit 46c6e98b authored by chsieh16's avatar chsieh16
Browse files

Import teacher and learner for first pass

parent ca37c3a4
No related branches found
No related tags found
No related merge requests found
;; The background theory
(set-logic LRA)
(define-fun split ((x Real) (c Real) (then_pred Bool) (e1se_pred Bool)) Bool
(and (=> (>= x c) then_pred) (=> (<= x c) e1se_pred))
)
(define-fun Abs ((x Real)) Real (ite (>= x 0) x (- x) ))
(define-fun L1_norm ((x1 Real) (x2 Real)) Real (+ (Abs x1) (Abs x2)))
(define-fun sqr ((x Real)) Real (* x x))
(define-fun L2_norm ((x1 Real) (x2 Real)) Real (+ (sqr x1) (sqr x2)))
(define-fun max ((x1 Real) (x2 Real)) Real (ite (>= x1 x2) x1 x2))
(define-fun Loo_norm ((x1 Real) (x2 Real)) Real (max (Abs x1) (Abs x2)))
(synth-fun inShape ((x1 Real) (x2 Real) (z1 Real) (z2 Real)) Bool
;; Declare the non-terminals that would be used in the grammar
( (B Bool) (Dist Real) (X Real) (C Real))
(
(B Bool (
false
(<= Dist C)
(split X C B B)
))
(Dist Real (
(L1_norm (- z1 (+ (* C x1) (* C x2) C)) (- z2 (+ (* C x1) (* C x2) C)))
(L2_norm (- z1 (+ (* C x1) (* C x2) C)) (- z2 (+ (* C x1) (* C x2) C)))
(Loo_norm (- z1 (+ (* C x1) (* C x2) C)) (- z2 (+ (* C x1) (* C x2) C)))
; TODO ellipsoid norm can be done with an upper triangle matrix U by ||Uv|| <= r
))
(X Real (x1 x2))
(C Real ( (Constant Real) ))
)
)
;; Define the semantic constraints on the function
(constraint (inShape 10 (- 10) 11 (- 11)))
(constraint (inShape 10 (- 10) 9 (- 9)))
(constraint (inShape 1 1 1.5 2.3 ))
(constraint (not (inShape 1 1 0.5 0.5)))
(constraint (not (inShape 1 1 1.5 1.5)))
(constraint (not (inShape 9 9 5 5)))
(check-synth)
#!/usr/bin/env python3
from typing import Tuple
import gurobipy as gp
from gurobipy import GRB
import numpy as np
import z3
def add_invariant(m: gp.Model, old_state, new_state):
old_x, old_y = old_state
new_x, new_y = new_state
old_abs_x = m.addVar(name="|x|", vtype=GRB.CONTINUOUS, lb=0, ub=np.inf)
m.addConstr(old_abs_x == gp.abs_(old_x))
new_abs_x = m.addVar(name="|x'|", vtype=GRB.CONTINUOUS, lb=0, ub=np.inf)
m.addConstr(new_abs_x == gp.abs_(new_x))
old_abs_y = m.addVar(name="|y|", vtype=GRB.CONTINUOUS, lb=0, ub=np.inf)
m.addConstr(old_abs_y == gp.abs_(old_y))
new_abs_y = m.addVar(name="|y'|", vtype=GRB.CONTINUOUS, lb=0, ub=np.inf)
m.addConstr(new_abs_y == gp.abs_(new_y))
old_V = m.addVar(name="V(x,y,θ)", vtype=GRB.CONTINUOUS,
lb=0, ub=np.inf)
m.addConstr(old_V == old_abs_x + old_abs_y)
new_V = m.addVar(name="V(x',y'')", vtype=GRB.CONTINUOUS,
lb=0, ub=np.inf)
m.addConstr(new_V == new_abs_x + new_abs_y)
m.addConstr(new_V >= old_V, name="Stability") # Tracking error is increasing (UNSAFE)
def add_constraints(m: gp.Model,
x_bound: Tuple[float, float],
y_bound: Tuple[float, float],
coeff: np.ndarray,
intercept: np.ndarray,
radius: float
) -> None:
# State variables
old_x = m.addVar(name='x', vtype=GRB.CONTINUOUS, lb=x_bound[0], ub=x_bound[1])
old_y = m.addVar(name='y', vtype=GRB.CONTINUOUS, lb=y_bound[0], ub=y_bound[1])
# Perceived state variables
xP = m.addVar(name='xP', vtype=GRB.CONTINUOUS, lb=-np.inf, ub=np.inf)
yP = m.addVar(name='yP', vtype=GRB.CONTINUOUS, lb=-np.inf, ub=np.inf)
# Constraints on values of xP and yP
x_diff = xP - (coeff[0][0]*old_x + coeff[0][1]*old_y + intercept[0])
y_diff = yP - (coeff[1][0]*old_x + coeff[1][1]*old_y + intercept[1])
m.addQConstr(x_diff * x_diff + y_diff * y_diff <= radius*radius, "(xP, yP) in M(x, y)")
# Control
u_x = m.addVar(name='u_x', vtype=GRB.CONTINUOUS, lb=-np.inf, ub=np.inf)
m.addGenConstrPWL(xP, u_x, [-10, 0, 0, 0, 10], [1, 1, 0, -1, -1], "u_x")
u_y = m.addVar(name='u_y', vtype=GRB.CONTINUOUS, lb=-np.inf, ub=np.inf)
m.addGenConstrPWL(yP, u_y, [-10, 0, 0, 0, 10], [1, 1, 0, -1, -1], "u_y")
new_x = m.addVar(name="x'", vtype=GRB.CONTINUOUS, lb=-np.inf, ub=np.inf)
m.addConstr(new_x == old_x + u_x)
new_y = m.addVar(name="y'", vtype=GRB.CONTINUOUS, lb=-np.inf, ub=np.inf)
m.addConstr(new_y == old_y + u_y)
add_invariant(m, (old_x, old_y), (new_x, new_y))
# # L1-norm objective
# m.setObjective(phi_dist + cte_dist, GRB.MINIMIZE)
# L2-norm objective
m.setObjective(x_diff*x_diff + y_diff*y_diff, GRB.MINIMIZE)
return
def compute_min_dist(x_bound: Tuple[float, float], y_bound: Tuple[float, float],
coeff: np.ndarray,
intercept: np.ndarray = None,
radius: float = np.nan) -> z3.CheckSatResult:
if intercept is None:
intercept = np.zeros(2)
if np.isnan(radius):
return np.nan
if np.any(np.isnan(coeff)):
return np.nan
assert coeff.shape == (2, 2) and intercept.shape == (2,)
with gp.Model("lane_centering_stanley") as m:
m.setParam(GRB.Param.DualReductions, 0)
print("Partition: x in [%g, %g] and y in [%g, %g]" % (x_bound + y_bound))
add_constraints(m, x_bound, y_bound,
coeff=coeff,
intercept=intercept,
radius=radius)
# m.write("firstpass.lp") # Dump optimization problem in LP format
m.optimize()
if m.status == GRB.OPTIMAL:
x = m.getVarByName('x')
y = m.getVarByName('y')
xP = m.getVarByName('xP')
yP = m.getVarByName('yP')
print('Obj: %g, ObjBound: %g, x: %g, y: %g, xP: %g, yP: %g\n'
% (m.objVal, m.objBound, x.x, y.x, xP.x, yP.x))
return z3.sat
elif m.status == GRB.INF_OR_UNBD:
print('Model is infeasible or unbounded')
return z3.unknown
elif m.status == GRB.INFEASIBLE:
print('Model is infeasible')
return z3.unsat
elif m.status == GRB.UNBOUNDED:
print('Model is unbounded')
return z3.unknown
else:
print('Optimization ended with status %d' % m.status)
return z3.unknown
CAND0_A = np.array([[0.0, -1.0],
[0.0, 1.0]])
CAND0_b = np.array([0.0, 0.0])
CAND0_r = 2.0
CAND1_A = np.array([[1.0, 0.0],
[0.0, 1.0]])
CAND1_b = np.array([0.0, 0.0])
CAND1_r = 2.0
def main():
x_bound = (-np.inf, np.inf) # (5.0, 10.0)
y_bound = (-np.inf, np.inf) # (5.0, 10.0)
result = compute_min_dist(x_bound, y_bound, coeff=CAND0_A, intercept=CAND0_b, radius=CAND0_r)
if __name__ == "__main__":
main()
OutputFlag 0
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment