forked from StanfordMSL/Neural-Network-Reach
-
Notifications
You must be signed in to change notification settings - Fork 0
/
reach.jl
784 lines (588 loc) · 27 KB
/
reach.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
using Plots, LinearAlgebra, JuMP, GLPK, LazySets, Polyhedra, CDDLib, OrderedCollections, FileIO, JLD2
# using Clp
include("load_networks.jl")
include("unique_custom.jl")
ϵ = 1e-15 # used for numerical tolerances throughout
### GENERAL PURPOSE FUNCTIONS ###
function normalize_row(row::Vector{Float64})
scale = norm(row[1:end-1])
size = length(row)-1
if scale > ϵ
return row / scale
else
return vcat(zeros(size), [row[end]])
end
end
function normalize_row(row::Vector{Float64})
scale = norm(row[1:end-1])
size = length(row)-1
if scale > ϵ
return row / scale
else
return vcat(zeros(size), [row[end]])
end
end
# Returns the algorithm initialization point given task and input space constraints
function get_input(Aᵢ, bᵢ)
input = cheby_lp([], [], Aᵢ, bᵢ, [])[1]
return input + 0.0001*randn(length(input)) # random offset to avoid initializing on a boundary
end
# checks whether input is on cell boundary
# an input is on a cell boundary if any postactivation variable z is zero
function interior_input(input, weights)
NN_out = vcat(input, [1.])
for layer = 1:length(weights)-1
ẑ = weights[layer]*NN_out
for i in 1:length(ẑ)
isapprox(ẑ[i], 0.0, atol=ϵ) ? (return false) : nothing
end
NN_out = max.(0, ẑ)
end
return true
end
# Given input point, perform forward pass to get ap.
function get_ap(input, weights)
L = length(weights)
ap = Vector{BitVector}(undef, L-1)
layer_vals = vcat(input, [1])
for layer in 1:L-1 # No ReLU on last layer
layer_vals = max.(0, weights[layer]*layer_vals)
ap[layer] = layer_vals .> ϵ
end
return ap
end
# Given constraint index, return associated layer and neuron
function get_layer_neuron(index, ap)
prev = 0
for l in eachindex(ap)
if index > length(ap[l]) + prev
prev += length(ap[l])
else
return l, (index - prev)
end
end
end
# Get hyperplane equation associated with a given neuron and input
function neuron_map(layer, neuron, ap, weights; normalize=false)
matrix = I
for l in 1:layer-1
matrix = diagm(0 => ap[l])*weights[l]*matrix
end
matrix = weights[layer]*matrix
if normalize
return normalize_row(matrix[neuron,:])
else
return matrix[neuron,:]
end
end
# Get affine map associated with an ap
function local_map(ap::Vector{BitVector}, weights::Vector{Matrix{Float64}})
matrix = I
for l in 1:length(weights)-1
matrix = diagm(0 => ap[l])*weights[l]*matrix
end
matrix = weights[end]*matrix
return matrix[:,1:end-1], matrix[:,end] # C,d of y = Cx+d
end
### FUNCTIONS FOR GETTING CONSTRAINTS ###
function positive_zeros(A)
k = 1
@inbounds for t in eachindex(A)
if isequal(A[t], -0.0)
A[t] = 0.0
k += 1
end
end
return A
end
# Get redundant A≤b constraints
function get_constraints(weights::Vector{Matrix{Float64}}, ap::Vector{BitVector}, num_neurons)
L = length(weights)
# Initialize necessary data structures #
idx2repeat = Dict{Int64,Vector{Int64}}() # Dict from indices of A to indices of A that define the same constraint (including itself)
zerows = Vector{Int64}() # indices that correspond to degenerate constraints
A = Matrix{Float64}(undef, num_neurons, size(weights[1],2)) # constraint matrix. A[:,1:end-1]x ≤ -A[:,end]
lin_map = I
# build constraint matrix #
i = 1
for layer in 1:L-1
output = weights[layer]*lin_map
for neuron in 1:length(ap[layer])
A[i,:] = (1-2*ap[layer][neuron]) * output[neuron,:]
if !isapprox(A[i,1:end-1], zeros(size(A,2)-1), atol=ϵ) # check nonzero.
A[i,:] = normalize_row(A[i,:])
else
push!(zerows, i)
end
i += 1
end
lin_map = diagm(0 => ap[layer])*weights[layer]*lin_map
end
A = positive_zeros(A)
unique_rows, unique_row = unique_custom(A, dims=1)
for i in 1:length(unique_row)
idx2repeat[i] = findall(x-> x == i, unique_row)
end
unique_nonzerow_indices = setdiff(unique_rows, zerows)
return A[:,1:end-1], -A[:,end], idx2repeat, zerows, unique_nonzerow_indices
end
### FUNCTIONS FOR REMOVING REDUNDANT CONSTRAINTS ###
# Remove redundant constraints (rows of A,b).
# If we relax a constraint, can we push past where its initial 'b' value was? It's essential iff yes.
function remove_redundant(A, b, Aᵢ, bᵢ, unique_nonzerow_indices, essential, ap)
redundant, redundantᵢ = remove_redundant_bounds(A, b, Aᵢ, bᵢ, unique_nonzerow_indices) # bounding box heuristic
non_redundant = setdiff(unique_nonzerow_indices, redundant) # working non-redundant set
non_redundantᵢ = setdiff(collect(1:length(bᵢ)), redundantᵢ) # working non-redundant set
essentialᵢ = Vector{Int64}() #; essential = Vector{Int64}()
unknown_set = setdiff(non_redundant, essential) # working non-redundant and non-essential set
unknown_setᵢ = setdiff(non_redundantᵢ, essentialᵢ) # working non-redundant and non-essential set
essential, essentialᵢ = exact_lp_remove(A, b, Aᵢ, bᵢ, essential, essentialᵢ, non_redundant, non_redundantᵢ, unknown_set, unknown_setᵢ, ap)
essential == [] && essentialᵢ == [] ? error("No essential constraints!") : nothing
saved_lps = length(essential)+length(essentialᵢ) + length(redundant)+length(redundantᵢ)-2*size(A,2)
solved_lps = 2*size(A,2) + length(unknown_set) + length(unknown_setᵢ)
if bᵢ != []
return vcat(A[essential,:], Aᵢ[essentialᵢ,:]), vcat(b[essential], bᵢ[essentialᵢ]), sort(essential), saved_lps, solved_lps
else
return A[essential,:], b[essential], sort(essential), saved_lps, solved_lps
end
end
function remove_redundant_bounds(A, b, Aᵢ, bᵢ, unique_nonzerow_indices; presolve=false)
# implements the bounding box heuristic
redundant = Vector{Int64}()
redundantᵢ = Vector{Int64}()
bounds = Matrix{Float64}(undef,size(A,2),2) # [min₁ max₁; ... ;minₙ maxₙ]
model = Model(GLPK.Optimizer)
presolve ? set_optimizer_attribute(model, "presolve", 1) : nothing
@variable(model, x[1:size(A,2)])
@constraint(model, A[unique_nonzerow_indices,:]*x .<= b[unique_nonzerow_indices])
bᵢ != [] ? @constraint(model, Aᵢ*x .<= bᵢ) : nothing
@constraint(model, x[1:size(A,2)] .<= 1e8*ones(size(A,2))) # to keep bounded
@constraint(model, x[1:size(A,2)] .>= -1e8*ones(size(A,2))) # to keep bounded
for i in 1:size(A,2)
for j in [1,2]
j == 1 ? @objective(model, Min, x[i]) : @objective(model, Max, x[i])
optimize!(model)
if termination_status(model) == MOI.OPTIMAL
bounds[i,j] = objective_value(model)
elseif termination_status(model) == MOI.NUMERICAL_ERROR && !presolve
return remove_redundant_bounds(A, b, Aᵢ, bᵢ, unique_nonzerow_indices, presolve=true)
elseif termination_status(model) == MOI.INFEASIBLE && !presolve
return remove_redundant_bounds(A, b, Aᵢ, bᵢ, unique_nonzerow_indices, presolve=true)
else
# bounding box heuristic failed
println("Bounding box heuristic failed!")
return redundant, redundantᵢ
# @show termination_status(model)
# println(model)
# error("Suboptimal LP bounding box!")
end
end
end
# Find redundant constraints
for i in unique_nonzerow_indices
val = sum([A[i,j] > 0 ? A[i,j]*bounds[j,2] : A[i,j]*bounds[j,1] for j in axes(A,2)])
val + 1e-4 < b[i] ? push!(redundant,i) : nothing
end
if bᵢ != []
for i in eachindex(bᵢ)
val = sum([Aᵢ[i,j] > 0 ? Aᵢ[i,j]*bounds[j,2] : Aᵢ[i,j]*bounds[j,1] for j in axes(Aᵢ,2)])
val + 1e-4 < bᵢ[i] ? push!(redundantᵢ,i) : nothing
end
end
return redundant, redundantᵢ
end
# Brute force removal of LP constraints given we've performed heurstics to find essential and redundant constraints already
# Dynamic memory allocation might be slowing this down (e.g. push!)
function exact_lp_remove(A, b, Aᵢ, bᵢ, essential, essentialᵢ, non_redundant, non_redundantᵢ, unknown_set, unknown_setᵢ, ap; presolve=false)
model = Model(GLPK.Optimizer)
# model = Model(() -> Clp.Optimizer(; want_infeasibility_certificates = false))
# set_optimizer_attribute(model, "meth", GLPK.GLP_DUALP) # use dual simplex so we can early stop
presolve ? set_optimizer_attribute(model, "presolve", 1) : nothing
@variable(model, x[1:size(A,2)])
@constraint(model, con[j in non_redundant], dot(A[j,:],x) <= b[j])
bᵢ != [] ? @constraint(model, conₛ[j in non_redundantᵢ], dot(Aᵢ[j,:],x) <= bᵢ[j]) : nothing # tack on non-redundant superset constraints
for (k,i) in enumerate(unknown_set)
# solving maximization
@objective(model, Max, dot(A[i,:],x))
set_normalized_rhs(con[i], b[i]+100) # relax ith constraint in case it is essential
k > 1 ? set_normalized_rhs(con[unknown_set[k-1]], b[unknown_set[k-1]]) : nothing # un-relax i-1 constraint
# set_optimizer_attribute(model, "obj_ul", b[i] + ϵ) # stop the solve if objective value past the threshold
optimize!(model)
if termination_status(model) == MOI.OPTIMAL
if objective_value(model) ≥ b[i] + ϵ # 1e-15 is too small.
push!(essential, i)
end
elseif termination_status(model) == MOI.NUMERICAL_ERROR && !presolve
return exact_lp_remove(A, b, Aᵢ, bᵢ, essential, essentialᵢ, non_redundant, non_redundantᵢ, unknown_set, unknown_setᵢ, ap; presolve=true)
else
@show termination_status(model)
println("Dual infeasible implies that primal is unbounded.")
save("error_states/latest_error_ap.jld2", Dict("ap" => ap))
error("Suboptimal LP constraint check!")
end
end
if bᵢ != []
# Brute force LP method on superset constraints#
for (k,i) in enumerate(unknown_setᵢ)
@objective(model, Max, dot(Aᵢ[i,:],x))
set_normalized_rhs(conₛ[i], bᵢ[i]+100) # relax ith constraint
k > 1 ? set_normalized_rhs(conₛ[unknown_setᵢ[k-1]], bᵢ[unknown_setᵢ[k-1]]) : nothing # un-relax i-1 constraint
# set_optimizer_attribute(model, "obj_ul", bᵢ[i] + ϵ)
optimize!(model)
if termination_status(model) == MOI.OPTIMAL
if objective_value(model) ≥ bᵢ[i] + ϵ
push!(essentialᵢ, i)
end
elseif termination_status(model) == MOI.NUMERICAL_ERROR && !presolve
return exact_lp_remove(A, b, Aᵢ, bᵢ, essential, essentialᵢ, non_redundant, non_redundantᵢ, unknown_set, unknown_setᵢ, presolve=true)
else
@show termination_status(model)
println("Dual infeasible implies that primal is unbounded.")
error("Suboptimal LP constraint check!")
end
end
end
return essential, essentialᵢ
end
# Brute force removal of LP constraints given we've performed heurstics to find essential and redundant constraints already
# Dynamic memory allocation might be slowing this down (e.g. push!)
function exact_lp_remove_feas(A, b, Aᵢ, bᵢ, essential, essentialᵢ, non_redundant, non_redundantᵢ, unknown_set, unknown_setᵢ; presolve=false)
model = Model(GLPK.Optimizer)
# set_optimizer_attribute(model, "want_infeasibility_certificates", 0)
# set_optimizer_attribute(model, "meth", "GLP_DUALP")
# set_optimizer_attribute(model, "obj_ul", 0)
@objective(model, MOI.FEASIBILITY_SENSE, 0)
presolve ? set_optimizer_attribute(model, "presolve", 1) : nothing
@variable(model, x[1:size(A,2)])
@constraint(model, con[j in non_redundant], dot(A[j,:],x) <= b[j])
bᵢ != [] ? @constraint(model, conₛ[j in non_redundantᵢ], dot(Aᵢ[j,:],x) <= bᵢ[j]) : nothing # tack on non-redundant superset constraints
for (k,i) in enumerate(unknown_set)
# reset last flipped constraint
if k > 1
i_prev = unknown_set[k-1]
set_normalized_coefficient([con[i_prev] for _ in 1:length(x)], x, A[i_prev,:])
set_normalized_rhs(con[i_prev], b[i_prev])
end
# flip consraint and solve problem
set_normalized_coefficient([con[i] for _ in 1:length(x)], x, -A[i,:])
set_normalized_rhs(con[i], -b[i] - ϵ)
optimize!(model)
# mark constraint as redundant or essential
if termination_status(model) == MOI.OPTIMAL # essential
push!(essential, i)
elseif termination_status(model) == MOI.INFEASIBLE # redundant
nothing
elseif termination_status(model) == MOI.NUMERICAL_ERROR && !presolve
return exact_lp_remove_feas(A, b, Aᵢ, bᵢ, essential, essentialᵢ, non_redundant, non_redundantᵢ, unknown_set, unknown_setᵢ; presolve=true)
else
@show termination_status(model)
println("Dual infeasible implies that primal is unbounded.")
error("Suboptimal LP constraint check!")
end
end
if bᵢ != []
# Brute force LP method on superset constraints#
for (k,i) in enumerate(unknown_setᵢ)
# reset last flipped constraint
if k > 1
i_prev = unknown_setᵢ[k-1]
set_normalized_coefficient([conₛ[i_prev] for _ in 1:length(x)], x, Aᵢ[i_prev,:])
set_normalized_rhs(conₛ[i_prev], bᵢ[i_prev])
end
# flip consraint and solve problem
set_normalized_coefficient([conₛ[i] for _ in 1:length(x)], x, -Aᵢ[i,:])
set_normalized_rhs(conₛ[i], -bᵢ[i] - ϵ)
optimize!(model)
# mark constraint as redundant or essential
if termination_status(model) == MOI.OPTIMAL # essential
push!(essentialᵢ, i)
elseif termination_status(model) == MOI.INFEASIBLE # redundant
nothing
elseif termination_status(model) == MOI.NUMERICAL_ERROR && !presolve
return exact_lp_remove_feas(A, b, Aᵢ, bᵢ, essential, essentialᵢ, non_redundant, non_redundantᵢ, unknown_set, unknown_setᵢ; presolve=true)
else
@show termination_status(model)
println("Dual infeasible implies that primal is unbounded.")
error("Suboptimal LP constraint check!")
end
end
end
return essential, essentialᵢ
end
### FUNCTIONS FOR FINDING NEIGHBORS ###
# Adds neighbor aps to working_set
function add_neighbor_aps(ap::Vector{BitVector}, neighbor_indices::Vector{Int64}, working_set, idx2repeat::Dict{Int64,Vector{Int64}}, zerows::Vector{Int64}, weights::Vector{Matrix{Float64}}, ap2essential, ap2neighbors; graph=false)
for idx in neighbor_indices
neighbor_ap = deepcopy(ap)
l, n = get_layer_neuron(idx, neighbor_ap)
neighbor_constraint = -(1-2*neighbor_ap[l][n])*neuron_map(l, n, neighbor_ap, weights, normalize=true) # constraint for c′
type1 = idx2repeat[idx]
type2 = zerows
neighbor_ap = flip_neurons!(type1, type2, neighbor_ap, weights, neighbor_constraint)
# error_ap = load("error_states/error_ap_9.25.24.jld2")["ap"]
# if neighbor_ap == error_ap
# save("error_states/parent_ap_9.25.24.jld2", Dict("ap" => ap))
# error("Found parent!")
# end
if graph
if !haskey(ap2neighbors, ap)
ap2neighbors[ap] = [neighbor_ap]
elseif neighbor_ap ∉ ap2neighbors[ap]
push!(ap2neighbors[ap], neighbor_ap)
end
if !haskey(ap2neighbors, neighbor_ap)
ap2neighbors[neighbor_ap] = [ap]
elseif ap ∉ ap2neighbors[neighbor_ap]
push!(ap2neighbors[neighbor_ap], ap)
end
end
if !haskey(ap2essential, neighbor_ap) && neighbor_ap ∉ working_set # if I haven't explored it && it's not yet in the working set
push!(working_set, neighbor_ap)
ap2essential[neighbor_ap] = idx2repeat[idx] # All of the neurons that define the neighbor constraint
end
end
return working_set, ap2essential
end
# Handles flipping of activation pattern from c to c′
function flip_neurons!(type1, type2, neighbor_ap, weights, neighbor_constraint)
# neuron_idx = what number neuron with top neuron first layer = 1 and bottom neuron last layer = end
a, b = neighbor_constraint[1:end-1], -neighbor_constraint[end] # a⋅x ≤ b for c′
for neuron_idx in sort(vcat(type1, type2))
l, n = get_layer_neuron(neuron_idx, neighbor_ap)
new_map = (1-2*neighbor_ap[l][n])*neuron_map(l, n, neighbor_ap, weights, normalize=true)
a′, b′ = new_map[1:end-1], -new_map[end]
if isapprox(a′, -a, atol=ϵ ) && isapprox(b′, -b, atol=ϵ )
neighbor_ap[l][n] = !neighbor_ap[l][n]
elseif isapprox(a′, zeros(length(a)), atol=ϵ ) && isapprox(b′, 0, atol=ϵ )
neighbor_ap[l][n] = 0
end
end
return neighbor_ap
end
### FUNCTIONS FOR PERFORMING REACHABILITY ###
# Solve a Feasibility LP to check if two polyhedron intersect
function poly_intersection(A₁, b₁, A₂, b₂; presolve=false)
dim = max(size(A₁,2), size(A₂,2)) # In the case one of them is empty
model = Model(GLPK.Optimizer)
presolve ? set_optimizer_attribute(model, "presolve", 1) : nothing
@variable(model, x[1:dim])
@objective(model, MOI.FEASIBILITY_SENSE, 0)
@constraint(model, A₁*x .≤ b₁)
@constraint(model, A₂*x .≤ b₂)
optimize!(model)
if termination_status(model) == MOI.INFEASIBLE # no intersection
return false
elseif termination_status(model) == MOI.OPTIMAL # intersection
return true
elseif termination_status(model) == MOI.NUMERICAL_ERROR && !presolve
return poly_intersection(A₁, b₁, A₂, b₂, presolve=true)
else
@show termination_status(model)
@show A₁; @show b₁; @show A₂; @show b₂
error("Intersection LP error!")
end
end
# Find {y | Ax≤b and y=Cx+d} for the case where C is not invertible
# If this function fails it may be due to the forward reachable set being too small
# i.e. C ≈ 0, then the reachable set will be ≈ d, which is just a point.
function affine_map(A, b, C, d)
xdim = size(A,2)
ydim = size(C,1)
invertible = (ydim == xdim) && (rank(C) == xdim)
# compute ≈ chebyshev radius of reachable set
x_r = cheby_lp([], [], A, b, [])[4]
y_r = x_r*opnorm(C)
y_r < 1e-7 ? @warn("Forward reachable set is very small! Chebyshev radius ≈ ", y_r) : nothing
# return Hrep of reachable set
if invertible
return A*inv(C), b + A*inv(C)*d
else
return project_vertices(A,b,C,d)
end
end
# project polytope Ax≤b through map Cx+d
# convert to Vrep -> find reachable Vrep -> convert to reachable Hrep
# direct projection of the Hrep with Polyhedra and CDDLib was error-prone
function project_vertices(A,b,C,d)
H = HPolytope(A,b)
V = convert(VPolytope, H)
Hreach = convert(HPolytope, C*V + d)
Aₒ = hcat([constraint.a for constraint in Hreach.constraints]...)'
bₒ = [constraint.b for constraint in Hreach.constraints]
return Aₒ, bₒ
end
### FUNCTIONS TO VERIFY ACTIVATION PATTERN ###
# Make sure we generate the correct ap for a region
function check_ap(input, weights, ap)
if ap != get_ap(input, weights)
@show input; @show ap; @show get_ap(input, weights)
error("NN ap not what it seems!")
end
return nothing
end
# Solve Chebyshev Center LP
# "an optimal dual variable is nonzero only if its associated constraint in the primal is binding", http://web.mit.edu/15.053/www/AMP-Chapter-04.pdf
function cheby_lp(A, b, Aᵢ, bᵢ, unique_nonzerow_indices; presolve=false)
dim = max(size(A,2), size(Aᵢ,2)) # In the case one of them is empty
model = Model(GLPK.Optimizer)
# set_optimizer_attribute(model, "dual", 1)
presolve ? set_optimizer_attribute(model, "presolve", 1) : nothing
@variable(model, r)
@variable(model, x_c[1:dim])
@objective(model, Max, r)
for i in eachindex(b)
if i ∈ unique_nonzerow_indices
@constraint(model, dot(A[i,:],x_c) + r*norm(A[i,:]) ≤ b[i])
else
@constraint(model, 0*r ≤ 0) # non-constraint so we have a dual variable for each index in A
end
end
for i in eachindex(bᵢ)
@constraint(model, dot(Aᵢ[i,:],x_c) + r*norm(Aᵢ[i,:]) ≤ bᵢ[i]) # superset constraints
end
@constraint(model, r ≤ 1e4) # prevents unboundedness
@constraint(model, -r ≤ -1e-15) # Must have r>0
optimize!(model)
if termination_status(model) == MOI.OPTIMAL
constraints = all_constraints(model, GenericAffExpr{Float64,VariableRef}, MOI.LessThan{Float64})
length(constraints)-2 != length(b)+length(bᵢ) ? (error("Not enough dual variables!")) : nothing
essential = [i for i in 1:length(b) if abs(dual(constraints[i])) > 1e-4]
essentialᵢ = [i-length(b) for i in length(b)+1:length(constraints)-2 if abs(dual(constraints[i])) > 1e-4]
return value.(x_c), essential, essentialᵢ, value(r)
elseif termination_status(model) == MOI.NUMERICAL_ERROR && !presolve
return cheby_lp(A, b, Aᵢ, bᵢ, unique_nonzerow_indices, presolve=true)
else
@show termination_status(model)
@show A; @show b; @show Aᵢ; @show bᵢ
@show unique_nonzerow_indices
println("Dual infeasible => primal unbounded.")
error("Chebyshev center error!")
end
end
### MAIN ALGORITHM ###
# Given input point and weights return ap2input, ap2output, ap2map, plt_in, plt_out
# set reach=false for just cell enumeration
# Supports looking for multiple backward reachable sets at once
function compute_reach(weights, Aᵢ::Matrix{Float64}, bᵢ::Vector{Float64}, Aₒ::Vector{Matrix{Float64}}, bₒ::Vector{Vector{Float64}}; fp = [], reach=false, back=false, connected=false, graph=false, check_aps=false)
# Construct necessary data structures #
ap2input = OrderedDict{Vector{BitVector}, Tuple{Matrix{Float64},Vector{Float64}} }() # Dict from ap -> (A,b) input constraints
ap2output = OrderedDict{Vector{BitVector}, Tuple{Matrix{Float64},Vector{Float64}} }() # Dict from ap -> (A′,b′) ouput constraints
ap2backward = [Dict{Vector{BitVector}, Tuple{Matrix{Float64},Vector{Float64}}}() for _ in 1:length(Aₒ)]
ap2map = Dict{Vector{BitVector}, Tuple{Matrix{Float64},Vector{Float64}} }() # Dict from ap -> (C,d) local affine map
ap2essential = Dict{Vector{BitVector}, Vector{Int64}}() # Dict from ap to neuron indices we know are essential
ap2neighbors = Dict{Vector{BitVector}, Vector{Vector{BitVector}}}() # Dict from ap to vector of neighboring aps
working_set = Set{Vector{BitVector}}() # Network aps we want to explore
# Initialize algorithm #
fp == [] ? input = get_input(Aᵢ, bᵢ) : input = fp # this may fail if initialized on the boundary of a cell
ap = get_ap(input, weights)
ap2essential[ap] = Vector{Int64}()
push!(working_set, ap)
num_neurons = sum([length(ap[layer]) for layer in eachindex(ap)])
# Begin enumeration of affine regions #
i, saved_lps, solved_lps = (1, 0, 0)
while !isempty(working_set)
println("# of affine regions: ", i)
ap = pop!(working_set)
# Get local affine_map
C, d = local_map(ap, weights)
ap2map[ap] = (C,d)
# get local polytope
A, b, idx2repeat, zerows, unique_nonzerow_indices = get_constraints(weights, ap, num_neurons)
# whether to double check generated ap matches that of a sample point
if check_aps
center, essential, essentialᵢ = cheby_lp(A, b, Aᵢ, bᵢ, unique_nonzerow_indices)[1:3] # Chebyshev center
check_ap(center, weights, ap)
end
# remove redundant constraints in the polyhedron
A, b, neighbor_indices, saved_lps_i, solved_lps_i = remove_redundant(A, b, Aᵢ, bᵢ, unique_nonzerow_indices, ap2essential[ap], ap)
# compute compute forward reachable set
reach ? ap2output[ap] = affine_map(A, b, C, d) : nothing
# compute backward reachable set
if back && connected
# only add neighbors of cells that are in the BRS
for k in 1:length(Aₒ)
Aᵤ, bᵤ = (Aₒ[k]*C, bₒ[k]-Aₒ[k]*d) # for Aₒy ≤ bₒ and y = Cx+d -> AₒCx ≤ bₒ-Aₒd
if poly_intersection(A, b, Aᵤ, bᵤ)
ap2backward[k][ap] = (vcat(A, Aᵤ), vcat(b, bᵤ)) # not a fully reduced representation
working_set, ap2essential = add_neighbor_aps(ap, neighbor_indices, working_set, idx2repeat, zerows, weights, ap2essential, ap2neighbors, graph=graph)
end
end
else
if back
# add neighbors of all cells
for k in 1:length(Aₒ)
Aᵤ, bᵤ = (Aₒ[k]*C, bₒ[k]-Aₒ[k]*d) # for Aₒy ≤ bₒ and y = Cx+d -> AₒCx ≤ bₒ-Aₒd
if poly_intersection(A, b, Aᵤ, bᵤ)
ap2backward[k][ap] = (vcat(A, Aᵤ), vcat(b, bᵤ)) # not a fully reduced representation
end
end
end
working_set, ap2essential = add_neighbor_aps(ap, neighbor_indices, working_set, idx2repeat, zerows, weights, ap2essential, ap2neighbors, graph=graph)
end
ap2input[ap] = (A, b)
# update counts
i += 1; saved_lps += saved_lps_i; solved_lps += solved_lps_i
end
total_lps = saved_lps + solved_lps
println("Total solved LPs: ", solved_lps)
println("Total saved LPs: ", saved_lps, "/", total_lps, " : ", round(100*saved_lps/total_lps, digits=1), "% pruned." )
if graph
return ap2input, ap2output, ap2map, ap2backward, ap2neighbors
else
return ap2input, ap2output, ap2map, ap2backward
end
end
function verify_safety(weights, Aᵢ::Matrix{Float64}, bᵢ::Vector{Float64}, Aₒ::Vector{Matrix{Float64}}, bₒ::Vector{Vector{Float64}})
# Construct necessary data structures #
ap2essential = Dict{Vector{BitVector}, Vector{Int64}}() # Dict from ap to neuron indices we know are essential
ap2neighbors = Dict{Vector{BitVector}, Vector{Vector{BitVector}}}() # Dict from ap to vector of neighboring aps
working_set = Set{Vector{BitVector}}() # Network aps we want to explore
ap_unsafe = Dict{Int32, Vector{BitVector}}() # dictionary mapping of unsafe output set to example unsafe activation pattern
# Initialize algorithm #
input = get_input(Aᵢ, bᵢ) # this may fail if initialized on the boundary of a cell
ap = get_ap(input, weights)
ap2essential[ap] = Vector{Int64}()
push!(working_set, ap)
num_neurons = sum([length(ap[layer]) for layer in eachindex(ap)])
# Begin enumeration of affine regions #
i, saved_lps, solved_lps = (1, 0, 0)
while !isempty(working_set)
println("# of affine regions: ", i)
ap = pop!(working_set)
# Get local affine_map
C, d = local_map(ap, weights)
# get local polyhedron
A, b, idx2repeat, zerows, unique_nonzerow_indices = get_constraints(weights, ap, num_neurons)
# check safety of local polyhedron
for k in 1:length(Aₒ)
# don't check if already proven unsafe
if !haskey(ap_unsafe, k)
# compute preimage of unsafe set and check intersection with current polyhedron
Aᵤ, bᵤ = (Aₒ[k]*C, bₒ[k]-Aₒ[k]*d) # for Aₒy ≤ bₒ and y = Cx+d -> AₒCx ≤ bₒ-Aₒd
if poly_intersection(A, b, Aᵤ, bᵤ)
ap_unsafe[k] = ap
println("Found and unsafe activation patter for the ", k, "th output set!")
end
end
end
# if all already proven unsafe, then skip to return
if keys(ap_unsafe) == Set(1:length(Aₒ))
break
end
# remove redundant constraints in the polyhedron
A, b, neighbor_indices, saved_lps_i, solved_lps_i = remove_redundant(A, b, Aᵢ, bᵢ, unique_nonzerow_indices, ap2essential[ap], ap)
# add neighbors of all cells
working_set, ap2essential = add_neighbor_aps(ap, neighbor_indices, working_set, idx2repeat, zerows, weights, ap2essential, ap2neighbors)
# update counts
i += 1; saved_lps += saved_lps_i; solved_lps += solved_lps_i
end
# creat binary vector where 1 indicates that output set was unsafe
vec_unsafe = falses(length(Aₒ))
if !isempty(ap_unsafe)
vec_unsafe[collect(keys(ap_unsafe))] .= true
end
total_lps = saved_lps + solved_lps
println("Total solved LPs: ", solved_lps)
println("Total saved LPs: ", saved_lps, "/", total_lps, " : ", round(100*saved_lps/total_lps, digits=1), "% pruned." )
return ap_unsafe, vec_unsafe
end