1 /* 2 * Copyright (c) 2012-2017 The ANTLR Project. All rights reserved. 3 * Use of this file is governed by the BSD 3-clause license that 4 * can be found in the LICENSE.txt file in the project root. 5 */ 6 7 module antlr.v4.runtime.atn.PredictionContext; 8 9 import antlr.v4.runtime.InterfaceRecognizer; 10 import antlr.v4.runtime.RuleContext; 11 import antlr.v4.runtime.atn.ATN; 12 import antlr.v4.runtime.atn.ATNState; 13 import antlr.v4.runtime.atn.ArrayPredictionContext; 14 import antlr.v4.runtime.atn.EmptyPredictionContext; 15 import antlr.v4.runtime.atn.PredictionContextCache; 16 import antlr.v4.runtime.atn.RuleTransition; 17 import antlr.v4.runtime.atn.SingletonPredictionContext; 18 import antlr.v4.runtime.misc; 19 import core.atomic; 20 import std.algorithm.sorting; 21 import std.array; 22 import std.conv; 23 import std.typecons; 24 25 /** 26 * TODO add class description 27 */ 28 abstract class PredictionContext 29 { 30 31 /** 32 * Represents {@code $} in local context prediction, which means wildcard. 33 * {@code *+x = *}. 34 */ 35 public static const EmptyPredictionContext EMPTY = new EmptyPredictionContext; 36 37 /** 38 * Represents {@code $} in an array in full context mode, when {@code $} 39 * doesn't mean wildcard: {@code $ + x = [$,x]}. Here, 40 * {@code $} = {@link #EMPTY_RETURN_STATE}. 41 */ 42 enum int EMPTY_RETURN_STATE = int.max; 43 44 enum int INITIAL_HASH = 1; 45 46 /** 47 * @uml 48 * @shared 49 */ 50 public static shared int globalNodeCount = 0; 51 52 public int id; 53 54 /** 55 * Stores the computed hash code of this {@link PredictionContext}. The hash 56 * code is computed in parts to match the following reference algorithm. 57 * 58 * <pre> 59 * private int referenceHashCode() { 60 * int hash = {@link MurmurHash#initialize MurmurHash.initialize}({@link #INITIAL_HASH}); 61 * 62 * for (int i = 0; i < {@link #size()}; i++) { 63 * hash = {@link MurmurHash#update MurmurHash.update}(hash, {@link #getParent getParent}(i)); 64 * } 65 * 66 * for (int i = 0; i < {@link #size()}; i++) { 67 * hash = {@link MurmurHash#update MurmurHash.update}(hash, {@link #getReturnState getReturnState}(i)); 68 * } 69 * 70 * hash = {@link MurmurHash#finish MurmurHash.finish}(hash, 2 * {@link #size()}); 71 * return hash; 72 * } 73 * </pre> 74 */ 75 private size_t cachedHashCode; 76 77 public this() 78 { 79 id = globalNodeCount; 80 atomicOp!"+="(globalNodeCount, 1); 81 } 82 83 public this(size_t cachedHashCode) 84 { 85 this.cachedHashCode = cachedHashCode; 86 } 87 88 public static PredictionContext fromRuleContext(ATN atn, RuleContext outerContext) 89 { 90 if (outerContext is null) 91 outerContext = cast(RuleContext)RuleContext.EMPTY; 92 // if we are in RuleContext of start rule, s, then PredictionContext 93 // is EMPTY. Nobody called us. (if we are empty, return empty) 94 if (outerContext.parent is null || 95 outerContext == cast(RuleContext)RuleContext.EMPTY) 96 { 97 return cast(PredictionContext)PredictionContext.EMPTY; 98 } 99 100 // If we have a parent, convert it to a PredictionContext graph 101 PredictionContext parent = cast(PredictionContext)EMPTY; 102 parent = PredictionContext.fromRuleContext(atn, outerContext.parent); 103 104 ATNState state = atn.states[outerContext.invokingState]; 105 RuleTransition transition = cast(RuleTransition)state.transition(0); 106 return SingletonPredictionContext.create(parent, transition.followState.stateNumber); 107 } 108 109 abstract public size_t size(); 110 111 abstract public PredictionContext getParent(int index); 112 113 abstract public int getReturnState(int index); 114 115 /** 116 * @uml 117 * This means only the {@link #EMPTY} context is in set. 118 */ 119 public bool isEmpty() 120 { 121 return this == EMPTY; 122 } 123 124 public bool hasEmptyPath() 125 { 126 // since EMPTY_RETURN_STATE can only appear in the last position, we check last one 127 return getReturnState(to!int(size() - 1)) == EMPTY_RETURN_STATE; 128 } 129 130 /** 131 * @uml 132 * @safe 133 * @nothrow 134 * @override 135 */ 136 public override size_t toHash() @safe nothrow 137 { 138 return cachedHashCode; 139 } 140 141 /** 142 * @uml 143 * @override 144 */ 145 abstract public override bool opEquals(Object obj); 146 147 public static size_t calculateEmptyHashCode() 148 { 149 size_t hash = MurmurHash.initialize(INITIAL_HASH); 150 hash = MurmurHash.finish(hash, 0); 151 return hash; 152 } 153 154 public size_t calculateHashCode(PredictionContext parent, int returnState) 155 { 156 size_t hash = MurmurHash.initialize(INITIAL_HASH); 157 hash = MurmurHash.update!PredictionContext(hash, parent); 158 hash = MurmurHash.update(hash, returnState); 159 hash = MurmurHash.finish(hash, 2); 160 return hash; 161 } 162 163 public static size_t calculateHashCode(PredictionContext[] parents, int[] returnStates) 164 { 165 size_t hash = MurmurHash.initialize(INITIAL_HASH); 166 167 foreach (parent; parents) { 168 hash = MurmurHash.update!PredictionContext(hash, parent); 169 } 170 171 foreach (returnState; returnStates) { 172 hash = MurmurHash.update(hash, returnState); 173 } 174 175 hash = MurmurHash.finish(hash, (2 * parents.length)); 176 return hash; 177 } 178 179 public static PredictionContext merge(PredictionContext a, PredictionContext b, bool rootIsWildcard, 180 ref DoubleKeyMap!(PredictionContext, PredictionContext, PredictionContext) mergeCache) 181 in 182 { 183 assert(a !is null && b !is null); // must be empty context, never null 184 } 185 do 186 { 187 // share same graph if both same 188 if (a is b || a.opEquals(b)) 189 return a; 190 if (cast(SingletonPredictionContext)a && 191 cast(SingletonPredictionContext)b) { 192 return mergeSingletons(cast(SingletonPredictionContext)a, 193 cast(SingletonPredictionContext)b, 194 rootIsWildcard, mergeCache); 195 } 196 // At least one of a or b is array 197 // If one is $ and rootIsWildcard, return $ as * wildcard 198 if (rootIsWildcard) { 199 if (cast(EmptyPredictionContext)a) return a; 200 if (cast(EmptyPredictionContext)b) return b; 201 } 202 203 // convert singleton so both are arrays to normalize 204 if (cast(SingletonPredictionContext)a) { 205 a = new ArrayPredictionContext(cast(SingletonPredictionContext)a); 206 } 207 if (cast(SingletonPredictionContext)b) { 208 b = new ArrayPredictionContext(cast(SingletonPredictionContext)b); 209 } 210 return mergeArrays(cast(ArrayPredictionContext)a, cast(ArrayPredictionContext)b, 211 rootIsWildcard, mergeCache); 212 } 213 214 /** 215 * @uml 216 * Merge two {@link SingletonPredictionContext} instances. 217 * 218 * <p>Stack tops equal, parents merge is same; return left graph.<br> 219 * <embed src="images/SingletonMerge_SameRootSamePar.svg" type="image/svg+xml"/></p> 220 * 221 * <p>Same stack top, parents differ; merge parents giving array node, then 222 * remainders of those graphs. A new root node is created to point to the 223 * merged parents.<br> 224 * <embed src="images/SingletonMerge_SameRootDiffPar.svg" type="image/svg+xml"/></p> 225 * 226 * <p>Different stack tops pointing to same parent. Make array node for the 227 * root where both element in the root point to the same (original) 228 * parent.<br> 229 * <embed src="images/SingletonMerge_DiffRootSamePar.svg" type="image/svg+xml"/></p> 230 * 231 * <p>Different stack tops pointing to different parents. Make array node for 232 * the root where each element points to the corresponding original 233 * parent.<br> 234 * <embed src="images/SingletonMerge_DiffRootDiffPar.svg" type="image/svg+xml"/></p> 235 * 236 * @param a the first {@link SingletonPredictionContext} 237 * @param b the second {@link SingletonPredictionContext} 238 * @param rootIsWildcard {@code true} if this is a local-context merge, 239 * otherwise false to indicate a full-context merge 240 * @param mergeCache 241 */ 242 public static PredictionContext mergeSingletons(SingletonPredictionContext a, SingletonPredictionContext b, 243 bool rootIsWildcard, ref DoubleKeyMap!(PredictionContext, PredictionContext, PredictionContext) mergeCache) 244 { 245 if (mergeCache !is null ) { 246 Nullable!PredictionContext previous = mergeCache.get(a, b); 247 if (!previous.isNull) return previous.get; 248 previous = mergeCache.get(b, a); 249 if (!previous.isNull) return previous.get; 250 } 251 252 PredictionContext rootMerge = mergeRoot(a, b, rootIsWildcard); 253 if (rootMerge !is null) { 254 if (mergeCache !is null) mergeCache.put(a, b, rootMerge); 255 return rootMerge; 256 } 257 258 if (a.returnState == b.returnState) { // a == b 259 PredictionContext parent = merge(a.parent, b.parent, rootIsWildcard, mergeCache); 260 // if parent is same as existing a or b parent or reduced to a parent, return it 261 if ( parent == a.parent ) 262 return a; // ax + bx = ax, if a=b 263 if ( parent == b.parent ) 264 return b; // ax + bx = bx, if a=b 265 // else: ax + ay = a'[x,y] 266 // merge parents x and y, giving array node with x,y then remainders 267 // of those graphs. dup a, a' points at merged array 268 // new joined parent so create new singleton pointing to it, a' 269 PredictionContext a_ = SingletonPredictionContext.create(parent, a.returnState); 270 if (mergeCache !is null) 271 mergeCache.put(a, b, a_); 272 return a_; 273 } 274 else { // a != b payloads differ 275 // see if we can collapse parents due to $+x parents if local ctx 276 PredictionContext singleParent = null; 277 if (a == b || (a.parent !is null && a.parent.opEquals(b.parent))) { // ax + bx = [a,b]x 278 singleParent = a.parent; 279 } 280 if (singleParent !is null) { // parents are same 281 // sort payloads and use same parent 282 int[] payloads = [a.returnState, b.returnState]; 283 if ( a.returnState > b.returnState ) { 284 payloads[0] = b.returnState; 285 payloads[1] = a.returnState; 286 } 287 PredictionContext[] parents = [singleParent, singleParent]; 288 PredictionContext a_ = new ArrayPredictionContext(parents, payloads); 289 if (mergeCache !is null) mergeCache.put(a, b, a_); 290 return a_; 291 } 292 // parents differ and can't merge them. Just pack together 293 // into array; can't merge. 294 // ax + by = [ax,by] 295 int[] payloads = [a.returnState, b.returnState]; 296 PredictionContext[] parents = [a.parent, b.parent]; 297 if ( a.returnState > b.returnState ) { // sort by payload 298 payloads[0] = b.returnState; 299 payloads[1] = a.returnState; 300 parents.length = 0; 301 parents ~= b.parent; 302 parents ~= a.parent; 303 } 304 PredictionContext a_ = new ArrayPredictionContext(parents, payloads); 305 if (mergeCache !is null ) mergeCache.put(a, b, a_); 306 return a_; 307 } 308 } 309 310 /** 311 * Handle case where at least one of {@code a} or {@code b} is 312 * {@link #EMPTY}. In the following diagrams, the symbol {@code $} is used 313 * to represent {@link #EMPTY}. 314 * 315 * <h2>Local-Context Merges</h2> 316 * 317 * <p>These local-context merge operations are used when {@code rootIsWildcard} 318 * is true.</p> 319 * 320 * <p>{@link #EMPTY} is superset of any graph; return {@link #EMPTY}.<br> 321 * <embed src="images/LocalMerge_EmptyRoot.svg" type="image/svg+xml"/></p> 322 * 323 * <p>{@link #EMPTY} and anything is {@code #EMPTY}, so merged parent is 324 * {@code #EMPTY}; return left graph.<br> 325 * <embed src="images/LocalMerge_EmptyParent.svg" type="image/svg+xml"/></p> 326 * 327 * <p>Special case of last merge if local context.<br> 328 * <embed src="images/LocalMerge_DiffRoots.svg" type="image/svg+xml"/></p> 329 * 330 * <h2>Full-Context Merges</h2> 331 * 332 * <p>These full-context merge operations are used when {@code rootIsWildcard} 333 * is false.</p> 334 * 335 * <p><embed src="images/FullMerge_EmptyRoots.svg" type="image/svg+xml"/></p> 336 * 337 * <p>Must keep all contexts; {@link #EMPTY} in array is a special value (and 338 * null parent).<br> 339 * <embed src="images/FullMerge_EmptyRoot.svg" type="image/svg+xml"/></p> 340 * 341 * <p><embed src="images/FullMerge_SameRoot.svg" type="image/svg+xml"/></p> 342 * 343 * @param a the first {@link SingletonPredictionContext} 344 * @param b the second {@link SingletonPredictionContext} 345 * @param rootIsWildcard {@code true} if this is a local-context merge, 346 * otherwise false to indicate a full-context merge 347 */ 348 public static PredictionContext mergeRoot(SingletonPredictionContext a, SingletonPredictionContext b, 349 bool rootIsWildcard) 350 { 351 if (rootIsWildcard) { 352 if ( a == EMPTY ) return cast(PredictionContext)EMPTY; // * + b = * 353 if ( b == EMPTY ) return cast(PredictionContext)EMPTY; // a + * = * 354 } 355 else { 356 if ( a == EMPTY && b == EMPTY ) return cast(PredictionContext)EMPTY; // $ + $ = $ 357 if ( a == EMPTY ) { // $ + x = [$,x] 358 int[] payloads = [b.returnState, EMPTY_RETURN_STATE]; 359 PredictionContext[] parents = [b.parent, null]; 360 PredictionContext joined = 361 new ArrayPredictionContext(parents, payloads); 362 return joined; 363 } 364 if ( b == EMPTY ) { // x + $ = [$,x] ($ is always first if present) 365 int[] payloads = [a.returnState, EMPTY_RETURN_STATE]; 366 PredictionContext[] parents = [a.parent, null]; 367 PredictionContext joined = 368 new ArrayPredictionContext(parents, payloads); 369 return joined; 370 } 371 } 372 return null; 373 } 374 375 /** 376 * Merge two {@link ArrayPredictionContext} instances. 377 * * 378 * <p>Different tops, different parents.<br> 379 * <embed src="images/ArrayMerge_DiffTopDiffPar.svg" type="image/svg+xml"/></p> 380 * 381 * <p>Shared top, same parents.<br> 382 * <embed src="images/ArrayMerge_ShareTopSamePar.svg" type="image/svg+xml"/></p> 383 * 384 * <p>Shared top, different parents.<br> 385 * <embed src="images/ArrayMerge_ShareTopDiffPar.svg" type="image/svg+xml"/></p> 386 * 387 * <p>Shared top, all shared parents.<br> 388 * <embed src="images/ArrayMerge_ShareTopSharePar.svg" type="image/svg+xml"/></p> 389 * 390 * <p>Equal tops, merge parents and reduce top to 391 * {@link SingletonPredictionContext}.<br> 392 * <embed src="images/ArrayMerge_EqualTop.svg" type="image/svg+xml"/></p> 393 */ 394 public static PredictionContext mergeArrays(ArrayPredictionContext a, ArrayPredictionContext b, 395 bool rootIsWildcard, ref DoubleKeyMap!(PredictionContext, PredictionContext, PredictionContext) mergeCache) 396 { 397 if (mergeCache) { 398 Nullable!PredictionContext previous = mergeCache.get(a, b); 399 if (!previous.isNull) 400 return previous.get; 401 previous = mergeCache.get(b, a); 402 if (!previous.isNull) 403 return previous.get; 404 } 405 406 // merge sorted payloads a + b => M 407 int i = 0; // walks a 408 int j = 0; // walks b 409 int k = 0; // walks target M array 410 411 int[] mergedReturnStates = 412 new int[a.returnStates.length + b.returnStates.length]; 413 PredictionContext[] mergedParents = 414 new PredictionContext[a.returnStates.length + b.returnStates.length]; 415 // walk and merge to yield mergedParents, mergedReturnStates 416 417 while ( i<a.returnStates.length && j<b.returnStates.length ) { 418 PredictionContext a_parent = a.parents[i]; 419 PredictionContext b_parent = b.parents[j]; 420 if ( a.returnStates[i]==b.returnStates[j] ) { 421 // same payload (stack tops are equal), must yield merged singleton 422 int payload = a.returnStates[i]; 423 // $+$ = $ 424 bool both = payload == EMPTY_RETURN_STATE && 425 a_parent is null && b_parent is null; 426 bool ax_ax = (a_parent !is null && b_parent !is null) && 427 a_parent.opEquals(b_parent); // ax+ax -> ax 428 if (both || ax_ax ) { 429 mergedParents[k] = a_parent; // choose left 430 mergedReturnStates[k] = payload; 431 } 432 else { // ax+ay -> a'[x,y] 433 PredictionContext mergedParent = 434 merge(a_parent, b_parent, rootIsWildcard, mergeCache); 435 mergedParents[k] = mergedParent; 436 mergedReturnStates[k] = payload; 437 } 438 i++; // hop over left one as usual 439 j++; // but also skip one in right side since we merge 440 } 441 else if ( a.returnStates[i]<b.returnStates[j] ) { // copy a[i] to M 442 mergedParents[k] = a_parent; 443 mergedReturnStates[k] = a.returnStates[i]; 444 i++; 445 } 446 else { // b > a, copy b[j] to M 447 mergedParents[k] = b_parent; 448 mergedReturnStates[k] = b.returnStates[j]; 449 j++; 450 } 451 k++; 452 } 453 454 // copy over any payloads remaining in either array 455 if (i < a.returnStates.length) { 456 for (int p = i; p < a.returnStates.length; p++) { 457 mergedParents[k] = a.parents[p]; 458 mergedReturnStates[k] = a.returnStates[p]; 459 k++; 460 } 461 } 462 else { 463 for (int p = j; p < b.returnStates.length; p++) { 464 mergedParents[k] = b.parents[p]; 465 mergedReturnStates[k] = b.returnStates[p]; 466 k++; 467 } 468 } 469 470 // trim merged if we combined a few that had same stack tops 471 if ( k < mergedParents.length ) { // write index < last position; trim 472 if ( k == 1 ) { // for just one merged element, return singleton top 473 PredictionContext a_ = 474 SingletonPredictionContext.create(mergedParents[0], 475 mergedReturnStates[0]); 476 if ( mergeCache !is null ) mergeCache.put(a,b,a_); 477 return a_; 478 } 479 mergedParents = mergedParents[0..k]; 480 mergedReturnStates = mergedReturnStates[0..k]; 481 } 482 483 PredictionContext M = 484 new ArrayPredictionContext(mergedParents, mergedReturnStates); 485 486 // if we created same array as a or b, return that instead 487 // TODO: track whether this is possible above during merge sort for speed 488 if ( M.opEquals(a) ) { 489 if ( mergeCache !is null ) mergeCache.put(a,b,a); 490 return a; 491 } 492 if ( M.opEquals(b) ) { 493 if ( mergeCache !is null ) mergeCache.put(a,b,b); 494 return b; 495 } 496 497 combineCommonParents(mergedParents); 498 499 if ( mergeCache !is null ) mergeCache.put(a,b,M); 500 return M; 501 } 502 503 /** 504 * Make pass over all <em>M</em> {@code parents}; merge any {@code equals()} 505 * ones. 506 */ 507 public static void combineCommonParents(ref PredictionContext[] parents) 508 { 509 PredictionContext[PredictionContext] uniqueParents; 510 for (int p = 0; p < parents.length; p++) { 511 PredictionContext parent = parents[p]; 512 if (!(parent in uniqueParents)) { // don't replace 513 uniqueParents[parent] = parent; 514 } 515 } 516 for (int p = 0; p < parents.length; p++) { 517 parents[p] = uniqueParents[parents[p]]; 518 } 519 } 520 521 public static string toDOTString(PredictionContext context) 522 { 523 if (context is null) return ""; 524 auto buf = appender!string; 525 buf.put("digraph G {\n"); 526 buf.put("rankdir=LR;\n"); 527 528 PredictionContext[] nodes = getAllContextNodes(context); 529 nodes.sort(); 530 foreach (PredictionContext current; nodes) { 531 if (current.classinfo == SingletonPredictionContext.classinfo) { 532 string s = to!string(current.id); 533 buf.put(" s" ~ s); 534 string returnState = to!string(current.getReturnState(0)); 535 if (current.classinfo == EmptyPredictionContext.classinfo) 536 returnState = "$"; 537 buf.put(" [label=\"" ~ returnState ~ "\"];\n"); 538 continue; 539 } 540 ArrayPredictionContext arr = cast(ArrayPredictionContext)current; 541 buf.put(" s" ~ to!string(arr.id)); 542 buf.put(" [shape=box, label=\""); 543 buf.put("["); 544 bool first = true; 545 foreach (int inv; arr.returnStates) { 546 if (!first) buf.put(", "); 547 if (inv == EMPTY_RETURN_STATE) buf.put("$"); 548 else buf.put(to!string(inv)); 549 first = false; 550 } 551 buf.put("]"); 552 buf.put("\"];\n"); 553 } 554 555 foreach (PredictionContext current; nodes) { 556 if (current == EMPTY) continue; 557 for (int i = 0; i < current.size(); i++) { 558 if (current.getParent(i) is null) continue; 559 string s = to!string(current.id); 560 buf.put(" s" ~ s); 561 buf.put("->"); 562 buf.put("s"); 563 buf.put(to!string(current.getParent(i).id)); 564 if ( current.size()>1 ) buf.put(" [label=\"parent[" ~ to!string(i) ~ "]\"];\n"); 565 else buf.put(";\n"); 566 } 567 } 568 569 buf.put("}\n"); 570 return buf.data; 571 } 572 573 /** 574 * ref visited ? 575 */ 576 public static PredictionContext getCachedContext(PredictionContext context, PredictionContextCache contextCache, 577 PredictionContext[PredictionContext] visited) 578 { 579 if (context.isEmpty) { 580 return context; 581 } 582 583 if (context in visited) { 584 return visited[context]; 585 } 586 587 if (contextCache.hasKey(context)) { 588 auto existing = contextCache.get(context); 589 visited[context] = existing; 590 return existing; 591 } 592 593 bool changed = false; 594 PredictionContext[] parents = new PredictionContext[context.size]; 595 for (int i = 0; i < parents.length; i++) { 596 PredictionContext parent = getCachedContext(context.getParent(i), contextCache, visited); 597 if (changed || parent != context.getParent(i)) { 598 if (!changed) { 599 parents = new PredictionContext[context.size]; 600 for (int j = 0; j < context.size; j++) { 601 parents[j] = context.getParent(j); 602 } 603 changed = true; 604 } 605 parents[i] = parent; 606 } 607 } 608 609 if (!changed) { 610 contextCache.add(context); 611 visited[context] = context; 612 return context; 613 } 614 615 PredictionContext updated; 616 if (parents.length == 0) { 617 updated = cast(PredictionContext)EMPTY; 618 } 619 else if (parents.length == 1) { 620 updated = SingletonPredictionContext.create(parents[0], context.getReturnState(0)); 621 } 622 else { 623 ArrayPredictionContext arrayPredictionContext = cast(ArrayPredictionContext)context; 624 updated = new ArrayPredictionContext(parents, arrayPredictionContext.returnStates); 625 } 626 contextCache.add(updated); 627 visited[updated] = updated; 628 visited[context] = updated; 629 630 return updated; 631 } 632 633 /** 634 * recursive version of Sam's getAllNodes() 635 */ 636 public static PredictionContext[] getAllContextNodes(PredictionContext context) 637 { 638 PredictionContext[] nodes; 639 PredictionContext[PredictionContext] visited; 640 getAllContextNodes_(context, nodes, visited); 641 return nodes; 642 } 643 644 public static void getAllContextNodes_(PredictionContext context, PredictionContext[] nodes, 645 PredictionContext[PredictionContext] visited) 646 { 647 if (context is null || (context in visited)) return; 648 visited[context] = context; 649 nodes ~= context; 650 for (int i = 0; i < context.size; i++) { 651 getAllContextNodes_(context.getParent(i), nodes, visited); 652 } 653 } 654 655 public string[] toStrings(InterfaceRecognizer recognizer, int currentState) 656 { 657 return toStrings(recognizer, cast(PredictionContext)EMPTY, currentState); 658 } 659 660 public string[] toStrings(InterfaceRecognizer recognizer, PredictionContext stop, 661 int currentState) 662 { 663 string[] result; 664 outer: 665 for (int perm = 0; ; perm++) { 666 int offset = 0; 667 bool last = true; 668 PredictionContext p = this; 669 int stateNumber = currentState; 670 auto localBuffer = appender!string; 671 localBuffer.put("["); 672 while (!p.isEmpty() && p != stop) { 673 int index = 0; 674 if (p.size > 0) { 675 int bits = 1; 676 while ((1 << bits) < p.size) { 677 bits++; 678 } 679 int mask = (1 << bits) - 1; 680 index = (perm >> offset) & mask; 681 last &= index >= p.size - 1; 682 if (index >= p.size) { 683 continue outer; 684 } 685 offset += bits; 686 } 687 if (recognizer !is null) { 688 if (localBuffer.data.length > 1) { 689 // first char is '[', if more than that this isn't the first rule 690 localBuffer.put(' '); 691 } 692 ATN atn = recognizer.getATN(); 693 ATNState s = atn.states[stateNumber]; 694 string ruleName = recognizer.getRuleNames()[s.ruleIndex]; 695 localBuffer.put(ruleName); 696 } 697 else if ( p.getReturnState(index)!= EMPTY_RETURN_STATE) { 698 if ( !p.isEmpty ) { 699 if (localBuffer.data.length > 1) { 700 // first char is '[', if more than that this isn't the first rule 701 localBuffer.put(' '); 702 } 703 704 localBuffer.put(to!string(p.getReturnState(index))); 705 } 706 } 707 stateNumber = p.getReturnState(index); 708 p = p.getParent(index); 709 } 710 localBuffer.put("]"); 711 result ~= localBuffer.data; 712 713 if (last) { 714 break; 715 } 716 } 717 return result; 718 } 719 720 } 721 722 version (AntlrUnittest) 723 { 724 import dshould; 725 726 @("empty") 727 unittest 728 { 729 auto spcA = new EmptyPredictionContext; 730 spcA.should.not.be(null); 731 auto spcB = new SingletonPredictionContext(new EmptyPredictionContext, 0); 732 spcA.should.not.equal(spcB); 733 spcA.hasEmptyPath.should.equal(true); 734 spcA.isEmpty.should.equal(true); 735 spcB.hasEmptyPath.should.equal(false); 736 spcB.isEmpty.should.equal(false); 737 } 738 739 @("mergeArrayContext") 740 unittest 741 { 742 DoubleKeyMap!(PredictionContext, PredictionContext, PredictionContext) mergeCache; 743 auto spcA = new EmptyPredictionContext; 744 auto apcA = new ArrayPredictionContext(spcA); 745 apcA.should.not.be(null); 746 auto spcB = new EmptyPredictionContext; 747 auto apcB = new ArrayPredictionContext(spcB); 748 apcB.should.not.be(null); 749 auto spcC = new EmptyPredictionContext; 750 751 PredictionContext[] predA = [apcA, apcB, spcC, apcA]; // not unique 752 predA.length.should.equal(4); 753 PredictionContext.combineCommonParents(predA); 754 predA.length.should.equal(4); 755 } 756 757 @("mergeEmptyContext") 758 unittest 759 { 760 auto spcC = new EmptyPredictionContext; 761 spcC.should.not.be(null); 762 auto spcD = new EmptyPredictionContext; 763 764 PredictionContext[] predB = [spcC, spcD]; 765 PredictionContext.combineCommonParents(predB); 766 } 767 }