Nikolaj Bjørner
Microsoft Research
nbjorner@microsoft.com |
Solver methods
|
|
Solver methods
|
Unsatisfiable cores contain tracked literals. |
Solver methods
|
Assertions added within a scope are removed when the scope is popped. |
Solver methods
|
Is the solver state satisfiable modulo the assumption literals . The solver state is the conjunction of assumption literals and assertions that have been added to the solver in the current scope. |
Methods
|
|
Slide is by Joao Marques-Silva
sat unsat
model (clausal) proof
correction set core
local min correction set local min core
min correction set min core
Optimization as SMT with preferred models
An introduction to cores and correction sets
Show examples of algorithms on top of SMT/Z3
(M)US (Minimal) unsatisfiable subset
(M)SS (Maximal) satisfiable subset
(M)CS (Minimal) correction set
(Prime) implicant
|
|
def ff(s, p):
return is_false(s.model().eval(p))
def marco(s, ps):
map = Solver()
while map.check() == sat:
seed = {p for p in ps if not ff(map, p)}
if s.check(seed) == sat:
mss = get_mss(s, seed, ps)
map.add(Or(ps - mss))
yield "MSS", mss
else:
mus = get_mus(s, seed)
map.add(Not(And(mus)))
yield "MUS", mus
def tt(s, f):
return is_true(s.model().eval(f))
def get_mss(s, mss, ps):
ps = ps - mss
backbones = set([])
while len(ps) > 0:
p = ps.pop()
if sat == s.check(mss | backbones | { p }):
mss = mss | { p } | { q for q in ps if tt(s, q) }
ps = ps - mss
else:
backbones = backbones | { Not(p) }
return mss
Use built-in core minimization:
s.set("sat.core.minimize","true") # For Bit-vector theories
s.set("smt.core.minimize","true") # For general SMT
Or roll your own:
def quick_explain(test, sub):
return qx(test, set([]), set([]), sub)
def qx(test, B, D, C):
if {} != D:
if test(B):
return set([])
if len(C) == 1:
return C
C1, C2 = split(C)
D1 = qx(test, B | C1, C1, C2)
D2 = qx(test, B | D1, D1, C1)
return D1 | D2
def test(s):
return lambda S: s.check([f for f in S]) == unsat
s = Solver()
a, b, c, d, e = Bools('a b c d e')
s.add(Or(a, b))
s.add(Or(Not(a), Not(b)))
s.add(Or(b, c))
s.add(Or(Not(c), Not(a)))
print s.check([a,b,c,d,e])
print s.unsat_core()
mus = quick_explain(test(s), {a,b,c,d})
def all_mss(s, ps):
while sat == s.check():
mss = get_mss(s, { p for p in ps if tt(s, p) }, ps)
s.add(Or(ps - mss))
yield "MSS", mss
Find all satisfying subsets among :
Typical definition: Minimize the number of violated soft assertions.
Is built-in, based on MaxSAT algorithms.
(declare-const a Bool)
(declare-const b Bool)
(declare-const c Bool)
(assert-soft a :weight 1)
(assert-soft b :weight 2)
(assert-soft c :weight 3)
(assert (= a c))
(assert (not (and a b)))
(check-sat)
(get-model)
(declare-const a Bool)
(declare-const b Bool)
(declare-const c Bool)
(assert-soft a :weight 1)
(assert-soft b :weight 1) (assert-soft b :weight 1)
(assert-soft c :weight 1) (assert-soft c :weight 1) (assert-soft c :weight 1)
(assert (= a c))
(assert (not (and a b)))
(check-sat)
(get-model)
- hard constraints
- soft constraints
A:
A':
Lemma: for any model of ,
Proof: min:
A:
A':
Lemma: for any model of ,
Proof: min:
def add_def(s, fml):
name = Bool("%s" % fml)
s.add(name == fml)
return name
def relax_core(s, core, Fs):
prefix = BoolVal(True)
Fs -= { f for f in core }
for i in range(len(core)-1):
prefix = add_def(s, And(core[i], prefix))
Fs |= { add_def(s, Or(prefix, core[i+1])) }
def maxsat(s, Fs):
cost = 0
Fs0 = Fs.copy()
while unsat == s.check(Fs):
cost += 1
relax_core(s, s.unsat_core(), Fs)
return cost, { f for f in Fs0 if tt(s, f) }
def relax_mcs(s, mcs, Fs):
prefix = BoolVal(False)
Fs -= { f for f in mcs }
s.add(Or(mcs))
for i in range(len(mcs)-1):
prefix = add_def(s, Or(mcs[i], prefix))
Fs |= { add_def(s, And(prefix, mcs[i+1])) }
def maxsat(s, Fs0):
Fs = Fs0.copy()
cost = len(Fs)
while s.check() == sat:
mss = { f for f in Fs if tt(s, f) }
model1 = get_mss(s, mss, Fs)
mcs = Fs - mss
if cost > len(mcs):
cost = len(mcs)
model = model1
relax_mcs(s, [ f for f in mcs ], Fs)
return cost, [ f for f in Fs0 if is_true(model.eval(f)) ]
MCS alone is inefficient. In (Bjørner and Narodytska, 2015) we combine MUS and MCS steps.