1 /*
2  * Copyright (c) 2012-2019 The ANTLR Project. All rights reserved.
3  * Use of this file is governed by the BSD 3-clause license that
4  * can be found in the LICENSE.txt file in the project root.
5  */
6 
7 module antlr.v4.runtime.atn.ATNSimulator;
8 
9 import antlr.v4.runtime.UnsupportedOperationException;
10 import antlr.v4.runtime.atn.ATN;
11 import antlr.v4.runtime.atn.ATNDeserializer;
12 import antlr.v4.runtime.atn.ATNConfigSet;
13 import antlr.v4.runtime.atn.InterfaceATNSimulator;
14 import antlr.v4.runtime.atn.PredictionContext;
15 import antlr.v4.runtime.atn.PredictionContextCache;
16 import antlr.v4.runtime.dfa.DFAState;
17 import std.uuid;
18 
19 /**
20  * ATN simulator base class
21  */
22 abstract class ATNSimulator : InterfaceATNSimulator
23 {
24 
25     public static int SERIALIZED_VERSION;
26 
27     /**
28      * This is the current serialized UUID.
29      * deprecated Use {@link ATNDeserializer#checkCondition(boolean)} instead.
30      */
31     public static UUID SERIALIZED_UUID;
32 
33     /**
34      * Must distinguish between missing edge and edge we know leads nowhere
35      */
36     public static DFAState ERROR;
37 
38     public ATN atn;
39 
40     /**
41      * The context cache maps all PredictionContext objects that are equals()
42      * to a single cached copy. This cache is shared across all contexts
43      * in all ATNConfigs in all DFA states.  We rebuild each ATNConfigSet
44      * to use only cached nodes/graphs in addDFAState(). We don't want to
45      * fill this during closure() since there are lots of contexts that
46      * pop up but are not used ever again. It also greatly slows down closure().
47      *
48      * <p>This cache makes a huge difference in memory and a little bit in speed.
49      * For the Java grammar on java.*, it dropped the memory requirements
50      * at the end from 25M to 16M. We don't store any of the full context
51      * graphs in the DFA because they are limited to local context only,
52      * but apparently there's a lot of repetition there as well. We optimize
53      * the config contexts before storing the config set in the DFA states
54      * by literally rebuilding them with cached subgraphs only.</p>
55      *
56      * <p>I tried a cache for use during closure operations, that was
57      * whacked after each adaptivePredict(). It cost a little bit
58      * more time I think and doesn't save on the overall footprint
59      * so it's not worth the complexity.</p>
60      */
61     public PredictionContextCache sharedContextCache;
62 
63     public static this()
64     {
65         SERIALIZED_VERSION = ATNDeserializer.SERIALIZED_VERSION;
66         SERIALIZED_UUID = ATNDeserializer.SERIALIZED_UUID;
67         ERROR = new DFAState(new ATNConfigSet());
68         ERROR.stateNumber = int.max;
69     }
70 
71     public this(ATN atn, PredictionContextCache sharedContextCache)
72     {
73         this.atn = atn;
74         this.sharedContextCache = sharedContextCache;
75     }
76 
77     abstract public void reset();
78 
79     /**
80      * Clear the DFA cache used by the current instance. Since the DFA cache may
81      * be shared by multiple ATN simulators, this method may affect the
82      * performance (but not accuracy) of other parsers which are being used
83      * concurrently.
84      *
85      *  @throws UnsupportedOperationException if the current instance does not
86      * support clearing the DFA.
87      */
88     public void clearDFA()
89     {
90         throw new UnsupportedOperationException("This ATN simulator does not support clearing the DFA.");
91     }
92 
93     public PredictionContextCache getSharedContextCache()
94     {
95         return sharedContextCache;
96     }
97 
98     public PredictionContext getCachedContext(PredictionContext context)
99     {
100         if (sharedContextCache is null)
101             return context;
102         PredictionContext[PredictionContext] visited;
103         return PredictionContext.getCachedContext(context,
104                                                   sharedContextCache,
105                                                   visited);
106     }
107 
108 }