-
Notifications
You must be signed in to change notification settings - Fork 5
/
nfa.py
executable file
·2077 lines (1837 loc) · 80.4 KB
/
nfa.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# Copyright 2019-2024, Vincent Hugot <[email protected]>
# This file is part of VH's NFA Framework.
#
# VH's NFA Framework is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
#
# VH's NFA Framework is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with VH's NFA Framework.
# If not, see <https://www.gnu.org/licenses/>.
from __future__ import annotations
import io
from toolkit import *
from collections import defaultdict
import html
# TODO: meta decorator to choose what to preserve, using setattr
def preserve(m):
"""
Decorator: preverse annex properties:
worder
:param m:
:return:
"""
def f(s,*p,**k) -> NFA:
worder = s.worder
_grow_step = s._grow_step
res = m(s,*p,**k)
res.worder = worder
res._grow_step = _grow_step
return res
return f
class NFA:
"""
Nondeterministic Finite-State Automata (with ε-transitions)
:param I: initial states
:param F: final states
:param Δ: rules (p,a,q)
:param Q: additional states (if not in rules)
:param Σ: additional symbols (if not in rules)
:param name: what's its name?
:param trimmed: is it already trimmed? (avoid redoing it)
:param worder: type of the words of the language. str / tuple
:param w: NFA recognising word w
:param l: NFA recognising finite language l
"""
VISUPDF = "visu" # name of default visualisation file
VISULANG = 10 # default visualisation: how many language elements; zero to deactivate
VISULANGFILTER = ("power",3) # on each language element: raw -> as is
# (power,N) -> N+ consecutive symbols are grouped (strings only)
VISUSIZE = True # default visualisation of automaton's size
VISUNAME = True # default visualisation of automaton's name
VISUFONT = "Libertinus Sans, Linux Biolinum, Libertinus Math, Times New Roman" # defaut fontname
VISUPREPEND = False # default to prepending visus (pdf in reverse order)
VISUCROP = False # defaut dot/pdf visualisation: crop figure?
VISU_INITIAL_ARROW = True # True for initial arrow, else special style
VISURANKDIR="LR" # dot parameter rankdir for graph direction / TB
VISULAYOUT="dot" # layout engine to use: dot, neato, [s]fdp, circo, twopi, osage
VISUFACTORISE = True # factorise multiple transitions
VISUDOUBLEARROWS = False # use <-a-> for -a-> <-a- ?
NOVISU = False # globally deactivate visualisation; for profiling etc
LARGE_RENDERER = "sfdp" # alternative graph renderer for large graphs
LARGE_RENDERER_OPTIONS = ["-Goverlap=false"] # additional graph renderer options
LARGE = 800 # a NFA is considered large when it reaches this size
pdf_renderer = pdf_renderer()
def __new__(cls, *a, w=None, l=None, **k):
if any(x is not None for x in (w, l)): assert not a and not k
if w is not None: return NFA.of_word(w)
if l is not None: return NFA.of_set(l)
return super().__new__(cls)
def __init__(s,I=(),F=(),Δ=(),name="",Q=(),Σ=(),trimmed=False,worder=str,
w=None, l=None):
if any(x is not None for x in (w,l)): return # fully built by __new__
s.Δ = set()
s.Σ = set(Σ)
s._I, s._F = set(I), set(F)
s.Q = s._I | s._F | set(Q) # if you need useless states, with no associated rules
s.add_rules(Δ)
s._trimmed = trimmed
s.name=name
s.worder=worder
s._grow_step = None # for step by step constuctions
s.Moore_table = None
s.Moore_steps = 0
s.Moore_table_tex = None
s.classes = None
s.inv_classes = None
s.rm_eps_closures = None
@property
def F(s): return s._F
@F.setter
def F(s, F): s.Q |= F; s._F = F
@property
def I(s): return s._I
@I.setter
def I(s, I): s._I = set(I); s.Q |= s._I
# TODO: propery setter for |= on delta
def add_rule(s,p,a,q,final=False):
s.Δ.add((p,a,q))
s.Σ.add(a)
s.Q |= {p,q}
if final: s.F.add(q)
return s
def try_rule(s,p,a,q,final=False):
"""add if this is new and report if you did something"""
if (p,a,q) in s.Δ: return False
s.add_rule(p,a,q,final)
return True
def add_rules(s,Δ,final=False):
for r in Δ: s.add_rule(*r,final=final)
return s
@staticmethod
def clear():
"""clear visualisation PDF"""
if os.path.isfile(f := f"{NFA.VISUPDF}.pdf"): os.remove(f)
def trans_2(s):
"""get dict (p,a) -> set of q : delta total function"""
look = { (p,a) : set() for p in s.Q for a in s.Σ }
# look = defaultdict(lambda : set()) # breaks complete !
for (p,a,q) in s.Δ:
look[p,a] = look[p,a] | {q}
return look
def trans_2d(s):
"""deterministic case; partial"""
return { (p,a) : q for (p,a,q) in s.Δ }
def trans_1(s):
"""get dict p -> set of (a,q)"""
look = defaultdict(lambda: set())
for (p, a, q) in s.Δ: look[p].add((a, q))
return look
def trans_pq(s):
"""get dict p,q -> set of symbs linking them"""
d = defaultdict(lambda : set())
for p,a,q in s.Δ: d[(p,q)].add(a)
return d
def lang(s):
"""generator of all words in language; small length first;
unspecified order otherwise
Will always terminate if finite language !
:param worder: constructor for output type
"""
s = s.renum(smart=False).rm_eps().trim() # epsilons break ordering
runs = { (s.worder(), q) for q in s.I }
look = s.trans_1()
safety = 0 # count unproductive loops to avoid infinites if empty lang
while True:
acc = { w for (w,p) in runs
if p in s.F } # should redo like BUTA
for w in acc: yield w; safety = 0 # loop has been productive
runs = { (w+str(a) if s.worder is str else w+(a,) , q)
for (w,p) in runs
for (a,q) in look.get(p,()) }
safety += 1
if safety > len(s.Q): break
# iterable: over the language
def __iter__(s): return s.lang()
def lang_up_to_len(s,n):
for w in s.lang():
if len(w) > n: break
yield w
# test whether langage is finite; pumping lemma
def is_finite(s): # should just test existence of non epsilon loop
for w in s.lang():
if len(w) >= len(s.Q): return False
return True
def __len__(s):
"""cardinal of the language. math.inf if infinite"""
N = 200 # quick attempt for small finite languages
if len(l := list(s[:N])) < N: return len(set(l))
return len(set(s)) if s.is_finite() else math.inf
@preserve
def __matmul__(s,o):
""" shuffle product;
should support epsilons"""
return NFA(
{ (i,j) for i in s.I for j in o.I },
{ (i,j) for i in s.F for j in o.F },
{ ((p,r), a, (q,r)) for (p,a,q) in s.Δ for r in o.Q }
|
{ ((r,p), a, (r,q)) for (p,a,q) in o.Δ for r in s.Q },
name=f"({s.name} ∥ {o.name})"
)
@staticmethod
def disjoint(os):
"""Make automata states disjoint if need be
:param os: list of automata"""
if len(os) <= 1: return os
couples = ( (os[i],os[j]) for i in range(len(os)-1) for j in range(i+1,len(os)) )
if any( A.Q & B.Q for A,B in couples ):
return [s.map(lambda q: (k, q)) for k, s in enumerate(os)]
return os
@preserve
def concatenate(*os):
"""Concatenate automata / languages"""
os = NFA.disjoint(os)
res = NFA(
os[0].I,
os[-1].F,
set.union(*(s.Δ for s in os)),
name="("+" + ".join(s.name or "None" for s in os) +")",
trimmed=all(A._trimmed for A in os)
)
for A,B in pairwise(os):
res.add_rules( { (p,'',q) for p in A.F for q in B.I } )
return res
@classmethod
def _from_object(c,o):
"""
Convert various types of objects into automata, for instance for
"prefix" + Automaton + ["suffix1", "suffix2"]
"""
match o:
case str() | tuple(): return NFA.of_word(o)
case list() | set() : return NFA.of_set(o)
case c(): return o
return NotImplemented
def __add__(s,o):
"""
Language concatenation
:param o: other automaton or compatible object
"""
o = NFA._from_object(o); return s.concatenate(o)
def __radd__(s,o):
o = NFA._from_object(o); return o.concatenate(s)
def __mul__(s, n:int):
"""
Repeat
:param n:
:return: automaton concatenated to itself
"""
if not isinstance(n, int): return NotImplemented
if n < 0: raise ValueError
if n == 0: return NFA(name=f"{s.name} * 0")
if n == 1: return s
return (s + (s * (n - 1))).named(f"{s.name} * {n}")
def __rmul__(s,o): return s*o
def star(s):
"""Kleene star"""
A = s.copy()
i = next(fresh_gen(s.Q))
A.I = {i} ; A.F = {i}
A.add_rules( { (i,'',qi) for qi in s.I } | { (qf,'',i) for qf in s.F } )
return A.named(f"({s.name})*")
@staticmethod
def shuffle(u,v,aut=False):
"""word shuffle"""
u,v = ( NFA.of_word(w) for w in (u,v) )
return u @ v if aut else set( u @ v )
def __and__(s,o):
"""fully synchronised product: language intersection"""
if not all( s.Σ ): s = s.rm_eps()
if not all( o.Σ ): o = o.rm_eps()
return NFA(
{ (i,j) for i in s.I for j in o.I },
{ (i,j) for i in s.F for j in o.F },
{ ((p,P), a, (q,Q))
for (p,a,q) in s.Δ for (P,b,Q) in o.Δ if a == b },
name=f"({s.name} ∩ {o.name})")
class StayC:
def __repr__(s): return "_"
def __lt__(s,o): return True
def __gt__(s, o): return False
Stay = StayC() # special static stay operation symbol
def _sprod2_brutal(s, o, svec, _=Stay, silent=()):
assert all(len(v) == 2 for v in svec), len(svec.pop())
return NFA(
{(i, j) for i in s.I for j in o.I},
{(i, j) for i in s.F for j in o.F},
{((p, P), (a,b), (q, Q)) for (p, a, q) in s.Δ for (P, b, Q) in o.Δ if (a,b) in svec}
|
{((p, r), (a, _), (q, r)) for (p, a, q) in s.Δ for r in o.Q if a in silent or (a, _) in svec}
|
{((r, p), (_, a), (r, q)) for (p, a, q) in o.Δ for r in s.Q if a in silent or (_, a) in svec}
|
( {((R, r), (_, _), (R, r)) for R in s.Q for r in o.Q} if (_,_) in svec else set() )
,
name=f"({s.name} ⨉b {o.name})",
worder=tuple
)
@staticmethod
def sprod_brutal(*As, svec, _=Stay, silent=()):
"""as sprod, but performs product brutally"""
assert all( len(v) == len(As) for v in svec ), (len(svec.pop()),len(As))
l = As
R,*As = As
i=1
while As:
A,*As = As
i += 1
R = R._sprod2_brutal(A, svec and {tuplepart(t, i) for t in svec}, _, silent).map(flattupleL, flattupleL)
return R.named("(" + " ⨉b ".join(s.name or "None" for s in l) + ")")
@staticmethod
def sprod(*As, svec,
_=Stay,
silent=(),
record_steps=False,
stopper=None,
filter=lambda A,P,v,Q:True,
hook = lambda A: None
):
""" DEPRECATED
:param hook: called on current version of automaton at each step
:param filter: rulefilter(A,P,v,Q) predicate on automaton and rule, filtering
what rules / states are added (in batch)
:param stopper: predicate on automaton: stop when some condition is reached
:param record_steps: for growtofixpoint/visusteps
:param _: stay operation symbol
:param svec: synchronisation vectors set
:param silent: silent transitions can occur in one system regardless of synchro.
Equivalent to adding ____a__ vectors
:return: synchonised product along vectors; on-the-fly, starting from initial
states
"""
assert all( len(err := v) == len(As) for v in svec ), (err,len(As))
assert all ( a == _ or a in A.Σ or (err := (a,A.name)) and False
for v in svec for a,A in zip(v,As) ), err
I = set(product(*(A.I for A in As)))
tr = [A.trans_2() for A in As]
for a in silent:
svec |= NFA.shuffle([_]*(len(As)-1),[a])
def grow(A : NFA):
newrules= set()
for v in svec:
for P in A.Q: # P
# print(f"{v=} {P=}")
Qs = set(product( *( [p] if a == _ else t.get((p,a),[])
for p,a,t in zip(P,v,tr) ) ) )
# print(Qs)
newrules |= {(P,v,Q) for Q in Qs if filter(A, P, v, Q)} - A.Δ
if not newrules:
A.F = { Q for Q in A.Q if all( q in B.F for q,B in zip(Q,As) ) }
else: A.add_rules(newrules)
# print(repr(A.visu()))
hook(A)
if stopper and stopper(A): return False
return newrules
R = NFA(I, (), (),worder=tuple).growtofixpoint(grow,record_steps)
return R.named("(" + " ⨉ ".join(A.name or "None" for A in As) + ")")
@staticmethod
def nsprod_interf(*As, svec, _=Stay, pretty_trans=False, pretty_states=False, pretty=None, filter=lambda P, v, Q:True, **kw):
"""DEPRECATED. Named synchronised product. INTERFACE VERSION
Same as sprod, but with a more convenient interface:
svec of the form [{component_name: transition, ...},...]"""
vd = { tuple( d[n] if (n:=A.name) in d else _ for A in As ) : tuple(d.items())
for d in svec }
def toass(v): return tuple((c.name, x) for c, x in zip(As, v))
def filterv(A,P,v,Q):
return filter(dict(toass(P)), dict(toass(v)), dict(toass(Q)))
R = NFA.sprod(*As,svec=set(vd),_=_,**kw)
if pretty: pretty_trans = pretty_states = pretty
if pretty_states is False: R = R.map(f=toass)
if pretty_trans is False: R = R.map(g=toass)
if pretty_states is True:
def ps(x):return f"{', '.join([f'{cn}:{a}' for cn,a in toass(x)])}"
R = R.map(f=ps)
if pretty_trans is True :
def pt(x): return f"{', '.join([f'{cn}:{a}' for cn,a in vd[x]])}"
R = R.map(g=pt)
return R.named("(" + " ⨉ ".join(A.name or "None" for A in As) + ")")
@staticmethod
def nsprod(*C, sds,
silent=set(),
record_steps=False,
stopper=None,
filter=lambda A, P, v, Q: True,
hook=lambda A: None,
nice=False,
# disable
):
""" Named synchronised product of systems, along synchronisation dictionaries set
[{component_name: transition, ...},...]
:param sds: synchronisation dictionaries list
:param filter: rulefilter(A,P,v,Q) predicate on automaton and rule, filtering
what rules / states are added (in batch).
States are tuple of couples, since dict is not hashable.
:param record_steps: for growtofixpoint/visusteps
:param stopper: predicate on automaton: stop when some condition is reached
:param hook: called on current version of automaton at each step
:param silent: silent transitions can occur in any one system regardless of synchro.
equivalent to adding all {c:a} to sds
:param nice: call dnice at the end for nicer visualisation.
:return: synchonised product along vectors; on-the-fly, starting from initial
states"""
C = { c.name : c for c in C }
for d in sds:
for c in d: assert d[c] in C[c].Σ, (c,d)
I = { tuple( (c,q) for c in C for q in C[c].I ) }
assert all( silent <= c.Σ for c in C.values() )
sds += [ {c:a} for c in C for a in silent ]
T = { c : C[c].trans_2() for c in C }
def grow(A : NFA):
newrules= set()
for d in sds:
# print(f"for {d=} in sds")
dt = tuple( (c,d[c]) for c in C if c in d)
for P in A.Q:
Pd = dict(P)
# print(f"for {Pd=} in A.Q:")
# print(f"{T[c]=} {P[c]=} {d[c]=}")
Qsd = { c:T[c][(Pd[c],d[c])] if c in d else {Pd[c]} for c in C }
# print(f"{Qsd=}")
Qs = set() if set() in Qsd.values() else { tuple( (c,a) for c in C for a in Qsd[c] ) }
# print(f"{Qs=}")
newrules |= {(P,dt,Q) for Q in Qs if filter(A, P, dt, Q)} - A.Δ
if not newrules:
A.F = { Q for Q in A.Q if all( q in C[c].F for c,q in Q ) }
else:
A.add_rules(newrules)
hook(A)
if stopper and stopper(A): return "STOP"
R = NFA(I, worder=tuple).growtofixpoint(grow,record_steps)
if nice:
assert not record_steps, "Steps visualisation incompatible with dnice mapping"
R = R.dnice()
return R.named("(" + " ⨉ ".join(c or "None" for c in C) + ")")
def dnice(s,f=None, g=None):
"""nicer states and transitions for dictionary-based product automata.
f, g are the same parameters as for map, plus some predefined values:
on example (('A', 1), ('B', 2), ('C', 2))
"dict", -> "A:1, B:2, C:2"
"states", -> "122"
"systems" -> "ABC"
("-1", x): reverse mapping on x; e.g. with x=2
-> "BC"
"groups" -> "1:A, 2:BC"
"""
def dict_(x): return ', '.join(f'{cn}:{a}' for cn, a in x)
def states(x): return ''.join(str(a) for _,a in x)
def systems(x): return ''.join(str(a) for a,_ in x)
def groups(q):
d = dict(q)
return ", ".join( f"{v}:{''.join(invdl(q)[v])}"
for v in sorted(set(d.values())) )
def dispatch(fid,default):
if fid is None: return default
match fid:
case ("-1", x): return lambda q: ''.join(invdl(q)[x])
return {"dict": dict_, "states": states, "systems": systems,
"groups": groups}[fid]
f = dispatch(f,dict_); g = dispatch(g,dict_)
return s.map(f,g)
def label(s, l, l_str):
"""
Given a labelling for each state, return a representation of the NFA
where labels are transformed into strings by l_str, and presented one per line.
Used in particular for CTL labelling
:param l: dict { q in A.Q : set of labels }
:param l_str: label -> str
"""
bs = '\\'
def disp(q):
return f"{q}:{bs}n{f'{bs}n'.join(l_str(phi) for phi in l.get(q, ()))}"
return s.map(f=lambda q:disp(q)).named(s.nop("λ"))
@preserve
def proj(s,al):
"""projected NFA on smaller alphabet"""
return NFA(s.I,s.F,
{ (p,a,q) if a in al else (p,'',q)
for (p,a,q) in s.Δ } ).rm_eps().trim().setnop('π',s.name)
@preserve # depends on functions
def map(s, f=lambda x:x, g=lambda x:x):
"""renames all states and symbols according to a function
if f is not injective, might change language !
:param f states fun
:param g symbols fun
"""
f = { x: f(x) for x in s.Q }.get
g = { x: g(x) for (_,x,_) in s.Δ }.get
return NFA(
{f(q) for q in s.I},
{f(q) for q in s.F},
{(f(p), g(a), f(q)) for (p,a,q) in s.Δ },
Q={f(q) for q in s.Q},
name=s.nop('↦'))
def stringify(s,f=str,g=str):
"""when states contain sets, string rep may vary from moment to moment
fix a single string representation"""
f = {p : f(p) for p in s.Q}
g = {a : g(a) for a in s.Σ}
return s.map(lambda x:f[x],lambda x:g[x])
def setworder(s,w):
s.worder = w
return s
# preserve not usable due to variant return type
def renum(s,n=0,smart=True,getiso=False,getboth=False):
assert not (getiso and getboth)
"""change complicated states to n, n+1,,..
smart renumber: in order of accessibility
if getiso get the dict isomorphism instead
if getboth get AUT, iso"""
if not smart:
f = dict(zip(s.Q,range(n,n+len(s.Q))))
else:
d = s.trans_1() ; f = {}
acc = { p : { q for _,q in d.get(p,()) } for p in s.Q }
g = iter(range(n,n+len(s.Q)))
def up(new):
newf = { e : next(g) for e in new - set(f) }
if not newf: return False
f.update(newf); return True
up(s.I)
while up( set().union(*(acc[p] for p in f)) ):
pass
up(s.Q - set(f))
if getiso: return f
S = s.map(lambda x: f[x]).setnop('ℕ', name=s.name).setworder(s.worder)
if getboth: return S, f
return S
# language union
def __or__(s,o): return NFA.union(s,o)
def union(*os):
"""Union of argument NFAs"""
if not os: return NFA(set(), set(), set())
os = NFA.disjoint(os)
return NFA(
set.union(*(s.I for s in os)),
set.union(*(s.F for s in os)),
set.union(*(s.Δ for s in os)),
name="("+" ∪ ".join(s.name or "None" for s in os) +")",
trimmed=all(A._trimmed for A in os)
)
def inter(*os):
"""Intersection of argument NFAs"""
assert os
return reduce(NFA.__and__, os)
@staticmethod
def Union(it):
"""Union of iterator of NFAs"""
return NFA.union(*it)
@staticmethod
def Inter(it):
"""Intersection of iterator of NFAs"""
return NFA.inter(*it)
def __contains__(s,w):
"""language membership test"""
return s(w) & s.F
def run(s,w,used_states=True,**k):
"""visualise a run on w; params as visu"""
dmod={} ; e = s(s.worder()) ; u = s.worder()
for a in w+'Δ' if s.worder is str else w+('Δ',) : # final end-of-word unlikely symbol
dmod.update({ q: 'color="#BB0000" fillcolor="#BB0000" penwidth=2 fontcolor=white' for q in e })
s.visu(dmod=dmod, comment="/R:"+str(u),**k)
u += a if s.worder is str else (a,)
dmod = { k: # formerly used states and transitions
'color="#AA0000" fillcolor="#9999AA"'
if k in s.Q else
'color="#AA0000" fontcolor="#BB0000"'
for (k,v) in dmod.items() }
if not used_states: dmod={}
dmod.update({ (p,b,q): 'color="#BB0000" penwidth=1.2 fontcolor="#BB0000"'
for p,b,q in s.Δ if p in e and (not b or b == a) })
e = s(u)
return s
@preserve
def rm_eps(s,closures=False):
"""returns epsilon-free version of automaton"""
look = s.trans_2()
def clos(p):
s = {p}
while True:
ss = s | set().union(*(look.get((q,''),set()) for q in s))
if s == ss : return s
s = ss
def closs(s): return set().union(*( clos(p) for p in s ))
res = NFA(closs(s.I), s.F,
{ (p,a,q)
for p,a in look if a != ''
for q in closs(look[p,a]) },name=s.nop('ε'),
)
if closures: res.rm_eps_closures = { q:clos(q) for q in s.Q }
return res
@preserve
def rm_eps2(s, closures=False):
return s.reverse().rm_eps(closures).reverse().named(s.nop('ε2'))
def __call__(s,w,Q=None):
"""
returns exact states accessed by reading word from starting states
:param w: the word
:param Q:stating states: default: from initials
"""
if "" in s.Σ: s = s.rm_eps()
e = s.I if Q is None else Q
for a in w:
e = {q for p, b, q in s.Δ if p in e and b == a}
return e
def accessibles(s):
"""return set of accessible states"""
d = s.trans_1()
acc = { p : { q for _,q in d.get(p,()) } for p in s.Q }
x = s.I # accessibles
while True:
xx = x | set().union(*(acc[p] for p in x))
if xx == x: break
x = xx
return x
def reverse(s):
return NFA(s.F, s.I, { (q,a,p) for p,a,q in s.Δ }, name=s.nop('←') )
def coaccessibles(s):
return s.reverse().accessibles()
def only(s,S,trimmed=False):
"""brutally remove all states not in S"""
return NFA(s.I & S, s.F & S,
{ (p,a,q) for p,a,q in s.Δ if {p,q} <= S},
Q=s.Q & S, trimmed=trimmed, name=s.nop('t' if trimmed else 'only') )
def nop(s,o):
"""
name after operation
:param o: letter or string for operation type
:return: new name with suffix indication operation history
for internal use
"""
return s.name + f" /{o}"
@preserve
def setnop(s,o=None,name=None):
"""
Set name with additional operation.
Used to bypass large chains of op chars
:param o: op char
:param name: if defined, overrides real name
:return: self
"""
name = name if name else s.name
if name:
s.name = name
if o: s.name = s.nop(o)
return s
def named(s, name):
"""change name in place and return s"""
s.name = str(name)
return s
@preserve
def trim(s):
if s._trimmed: return s.copy()
use = s.accessibles() & s.coaccessibles()
return s.only(use,trimmed=True)
@staticmethod
def of_word(w):
"""return a NFA (DFA) accepting a single word"""
n = len(w)
return NFA({0}, {n}, { (k,w[k],k+1)
for k in range(n) },
name = str(w)[:10],
trimmed=True,
worder=str if type(w) is str else tuple)
@staticmethod
def of_length(n,Σ):
return NFA({0}, {n}, {(k, a, k + 1)
for k in range(n) for a in Σ},
name=f"/{Σ}^{n}:",
trimmed=True)
@staticmethod
def of_length_range(n,m,Σ):
A = NFA.of_length(m,Σ)
A.F = set(range(n,m+1))
return A.named(f"/{Σ}^[{n},{m}]:")
def extend(s,Σ):
"""concatenate language with Σ^*"""
S = s.copy()
S.add_rules({(p, a, p) for p in S.F for a in Σ})
return S.setnop('e')
@staticmethod
def of_set(L,name=None):
"""return a NFA accepting exact finite langage"""
return NFA.Union(NFA.of_word(w) for w in L).renum().setnop(name=name or sortstr(L))
@preserve
def copy(s): return NFA(s.I, s.F, s.Δ, Q=s.Q, trimmed=s._trimmed, name=s.name)
@preserve
def dfa(s,pdf=None, multi_init=False, force_init=False) -> NFA:
"""return equivalent DFA; no epsilons need apply
:param pdf: if true, generate step by step slides; not compatible with advanced options
:param multi_init: if true, do not merge initial states before proceeding
:param force_init: set of fsets; force a specific merging of initial states
"""
if pdf: return s.dfa_pdf(pdf) # delegate visualisation
if not all( a != "" for a in s.Σ ): s = s.rm_eps()
if s.is_det(): return s.copy().setnop('d')
l = s.trans_2()
do, done, rlz = ( { fset(s.I) } if not multi_init and not force_init else
{ fset({i}) for i in s.I } if not force_init else
force_init
, set(), set() )
q_init = do.copy()
while do:
P = do.pop(); done.add(P)
for a in s.Σ:
Q = fset(set.union(*(l[p,a] for p in P)))
if not Q: continue # useless: not coaccessible
rlz.add( (P,a,Q) )
if Q not in done: do.add(Q)
return NFA(q_init,
{ P for P in done if s.F & P },
rlz, name=s.nop('d' if not multi_init else 'mid'))
@preserve
def dfa_pdf(s,pdf=None) -> NFA:
s.visu(pdfname=pdf, test=False,print_extra="dfa_pdf: INIT: ")
res = s.dfa()
d= {k:"style=invis" for k in res.Q | res.Δ }
l = s.trans_2()
do, done, rlz = {fset(s.I)}, set(), set()
while do:
P = do.pop(); done.add(P)
for q in done: d.pop(q,None)
d[P] = 'color="#BB0000" fillcolor="#BB0000" penwidth=2 fontcolor=white'
d.update({q: 'color="#AA0000" fillcolor="#CC9999"' for q in do})
res.visu(pdfname=pdf, test=False, dmod=d,print_extra=f"dfa_pdf: {P}")
for a in s.Σ:
Q = fset(set().union(*(l[p, a] for p in P)))
if not Q: continue # useless: not coaccessible
rlz.add((P, a, Q))
del d[(P,a,Q)]
if Q not in done: do.add(Q)
res.visu(pdfname=pdf, test=False,print_extra="dfa_pdf: END: ")
return res
def visusteps(s, dsteps=None, print_current=False, *p, **kw):
"""Visualise several steps through progressive reveal.
Same API as visu()
:param dsteps: dict state/trans -> step index;
by default uses that produced by .growtofixpoint()
"""
if print_current: print(f"{erase_line}{term_visu_style}Visusteps: {repr(s)}{term_reset}", end="")
if dsteps is None: dsteps = s._grow_step
d = {k: "style=invis" for k in s.Q | s.Δ}
n = min(dsteps.values()) ; N = max(dsteps.values())
for k in range(n,N+1):
if print_current: print(f"{erase_line}{term_visu_style}Visusteps {k}/{N}: {repr(s)}{term_reset}\r", end="")
d.update({ x : 'color="#BB0000" fillcolor="#BB0000" penwidth=2 fontcolor=white'
for x in dsteps if dsteps[x]==k and x in s.Q } )
d.update({x: 'color="#BB0000" fillcolor="#BB0000" penwidth=2 fontcolor="#BB0000"'
for x in dsteps if dsteps[x] == k and x in s.Δ})
d.update({x: 'color="#BB4455" fillcolor="#FFF5FF" penwidth=2'
for x in dsteps if dsteps[x] == k-1 and x in s.Q })
# print(d.keys() - dsteps.keys())
for x in tuple(d):
if x in dsteps:
if dsteps[x] < k - 1 or dsteps[x] == k - 1 and x in s.Δ : del d[x]
# bugs with double arrows, and is not meaningful because
# back arrow not reached at same time as direct arrow.
s.visu(dmod=d,*p,doublearrows=False,print_current=False,**kw)
if print_current: print(erase_line, end="")
return s.visu(*p,**kw)
def is_det(s,show=False,ignore_inits=False):
""" is it deterministic ?
:param show: display nondeterministic aspects of automaton
:param ignore_inits: test only transitions"""
def p(*x):
if show: print(*x)
if not ignore_inits and len(s.I) > 1: p("Init",s.I); return False
for k,v in s.trans_2().items():
if len(v) > 1: p(*k, "->",v); return False
return True
@preserve
def complete(s,Σ=set()):
"""return complete version (sink state)
:param Σ: complete over symbols not appearing in automaton
"""
Σ = (Σ if type(Σ) is set else set(Σ)) | s.Σ
sink = 0
l = s.trans_2()
if all( l.values() ) and Σ <= s.Σ : return s.copy()
while sink in s.Q: sink += 1
return NFA(s.I, s.F,
s.Δ
| {(sink,a,sink) for a in Σ}
| { (p,a,sink)
for p in s.Q for a in Σ
if not l.get((p,a), None)
},
name=s.nop('c') )
@preserve
def __neg__(s):
"""language complementation"""
if s.is_empty(): return NFA.universal(s.Σ)
on = s.name
s = s.dfa().complete(Σ=s.Σ-{''})
return NFA(s.I, s.Q - s.F, s.Δ, name=f"{on} /-")
def __xor__(s,o):
return ((s|o) - (s&o)).setnop(name=f"({s.name} ⊖ {o.name})")
def repr_txt(s,N=20):
L = list(s[:N + 1]); L.sort()
n = len(L) if len(L) < N else f"{N}+"
return f"{s}L {n:<3} {sortstr(L)}"
def __repr__(s):
return f"<NFA {s.name} #{s.size} Σ{len(s.Σ)} Q{len(s.Q)} I{len(s.I)} F{len(s.F)} Δ{len(s.Δ)}>"\
f"{':Trim' if s._trimmed else ''}"
def export(s):
"""export as Python code"""
return f"NFA(I={repr(s.I)},F={repr(s.F)},Δ={repr(s.Δ)})"
def test(s, N=50):
print(s.repr_txt(N)); return s
def __str__(s):
return (f"NFA {s.name}:" if s.name else "")+ \
f" ## = {s.size}\n"\
f"Σ {len(s.Σ):<3} {sortstr(s.Σ)}\n"\
f"Q {len(s.Q):<3} {sortstr(s.Q)}\n"\
f"I {len(s.I):<3} {sortstr(s.I)}\n"\
f"F {len(s.F):<3} {sortstr(s.F)}\n"\
f"Δ {len(s.Δ):<3} {sortstr(s.Δ)}"
def universal(al="ab"):
"""universal automaton on alphabet"""
return NFA({0},{0}, { (0,a,0) for a in al },
name="/U:" + (r:=repr(al))[:20] + ("..." if len(r)>20 else ""),
trimmed=True )
@preserve
def __sub__(s,o):
"""language difference"""
sc,oc = s.complete(s.Σ|o.Σ), o.complete(s.Σ|o.Σ)
return (sc & (-oc)).setnop(name=f"({s.name} ∖ {o.name})")
def __getitem__( s, i ) :
"""language pseudo-indexing and slicing ; terms may be repeated"""
if isinstance( i, int ) :
g = iter(s)
for _ in range(i): next(g)
return next(g)
elif isinstance( i, slice ) :
return islice(s.lang(),i.start,i.stop,i.step)
else:
raise (TypeError, "Invalid argument type.")
def is_empty(s):
"""test whether language is empty"""
return not s.trim().Q
def is_universal(s):
"""is universal on its *implicit* alphabet?"""
return s == NFA.universal(s.Σ)
# language inclusion
def __le__(s,o): return (s - o).is_empty()
def __eq__(s,o):
"""language equivalence: s <= o <= s"""
# s,o = s.dfa(), o.dfa()
# for A,B in ((s,o), (o,s)): # attempts at optimisation make things slightly worse
# for w in A[:2]:
# if w not in B: return False
return bool(s.mini().renum(smart=False).iso(o.mini().renum(smart=False)))
def is_same(s,o):
"""is literally the exact same NFA"""
return s.I == o.I and s.F == o.F and s.Δ == o.Δ and s.Σ == o.Σ
def texvisu(s,*texargs,pdfname=None,pdfprepend=None,texout=None,
silent=True,print_current=False,renum=False,**texkwargs):
"""TeXperiment: visualise pdf of tex output; same arguments as tex
:param renum: renum states and use mapping -- use if complicated states
:param print_current: as visu() # deprecated
:param texout: write tex figure to...
:param silent: silence the output of LaTeX and pdfcrop processes
:param pdfname: name of target visualisation pdf; as visu()
:param pdfprepend: as visu()
:return:
"""
if NFA.NOVISU: return s
if print_current:
print(f"{erase_line}{term_visu_style}TeX Visu: {repr(s)}{term_reset}", end="")
if renum:
S, f = s.stringify().map(f=lambda q: q.replace("{", "\\{").replace("}", "\\}")).renum(getboth=True)
f = {str(v): k for k, v in f.items()}
S.texvisu(*texargs,qmap=f.get,**texkwargs)
return s
texc = s.tex(*texargs,**texkwargs)
if texout is not None:
with open(texout,'w') as f: f.write(texc)
pdfname = NFA.VISUPDF if not pdfname else pdfname
NFA.pdf_renderer.do_tex(texc,pdfname, pdfprepend=NFA.VISUPREPEND if pdfprepend is None else pdfprepend,
silent=silent)
if print_current: print(erase_line, end="")
return s
def tex(self, qloc="",bends="",defbend='',defavoidbend="bend left=10",
defloop='loop above',qmap=lambda x:x, params="",at=lambda x:None,
initdirs={}, noepsilon=False, f=lambda x:x):
"""
:param defloop: default loop stype
:param defavoidbend: default p <-> q avoidance bending
:param qloc: spacial relations between states; chains p REL q REL r