1 /* 2 * Copyright (c) 2012-2017 The ANTLR Project. All rights reserved. 3 * Use of this file is governed by the BSD 3-clause license that 4 * can be found in the LICENSE.txt file in the project root. 5 */ 6 7 module antlr.v4.runtime.atn.PredictionContext; 8 9 import antlr.v4.runtime.InterfaceRecognizer; 10 import antlr.v4.runtime.RuleContext; 11 import antlr.v4.runtime.atn.ATN; 12 import antlr.v4.runtime.atn.ATNState; 13 import antlr.v4.runtime.atn.ArrayPredictionContext; 14 import antlr.v4.runtime.atn.EmptyPredictionContext; 15 import antlr.v4.runtime.atn.PredictionContextCache; 16 import antlr.v4.runtime.atn.RuleTransition; 17 import antlr.v4.runtime.atn.SingletonPredictionContext; 18 import antlr.v4.runtime.misc; 19 import core.atomic; 20 import std.algorithm.sorting; 21 import std.array; 22 import std.conv; 23 import std.typecons; 24 25 // Class PredictionContext 26 /** 27 * TODO add class description 28 */ 29 abstract class PredictionContext 30 { 31 32 /** 33 * Represents {@code $} in local context prediction, which means wildcard. 34 * {@code *+x = *}. 35 */ 36 public static const EmptyPredictionContext EMPTY = new EmptyPredictionContext; 37 38 /** 39 * Represents {@code $} in an array in full context mode, when {@code $} 40 * doesn't mean wildcard: {@code $ + x = [$,x]}. Here, 41 * {@code $} = {@link #EMPTY_RETURN_STATE}. 42 */ 43 public static immutable int EMPTY_RETURN_STATE = int.max; 44 45 private static immutable int INITIAL_HASH = 1; 46 47 /** 48 * @uml 49 * @shared 50 */ 51 public static shared int globalNodeCount = 0; 52 53 public int id; 54 55 /** 56 * Stores the computed hash code of this {@link PredictionContext}. The hash 57 * code is computed in parts to match the following reference algorithm. 58 * 59 * <pre> 60 * private int referenceHashCode() { 61 * int hash = {@link MurmurHash#initialize MurmurHash.initialize}({@link #INITIAL_HASH}); 62 * 63 * for (int i = 0; i < {@link #size()}; i++) { 64 * hash = {@link MurmurHash#update MurmurHash.update}(hash, {@link #getParent getParent}(i)); 65 * } 66 * 67 * for (int i = 0; i < {@link #size()}; i++) { 68 * hash = {@link MurmurHash#update MurmurHash.update}(hash, {@link #getReturnState getReturnState}(i)); 69 * } 70 * 71 * hash = {@link MurmurHash#finish MurmurHash.finish}(hash, 2 * {@link #size()}); 72 * return hash; 73 * } 74 * </pre> 75 */ 76 private size_t cachedHashCode; 77 78 public this() 79 { 80 id = globalNodeCount; 81 atomicOp!"+="(globalNodeCount, 1); 82 } 83 84 public this(size_t cachedHashCode) 85 { 86 this.cachedHashCode = cachedHashCode; 87 } 88 89 public static PredictionContext fromRuleContext(ATN atn, RuleContext outerContext) 90 { 91 if (outerContext is null) 92 outerContext = cast(RuleContext)RuleContext.EMPTY; 93 // if we are in RuleContext of start rule, s, then PredictionContext 94 // is EMPTY. Nobody called us. (if we are empty, return empty) 95 if (outerContext.parent is null || 96 outerContext == cast(RuleContext)RuleContext.EMPTY) 97 { 98 return cast(PredictionContext)PredictionContext.EMPTY; 99 } 100 101 // If we have a parent, convert it to a PredictionContext graph 102 PredictionContext parent = cast(PredictionContext)EMPTY; 103 parent = PredictionContext.fromRuleContext(atn, outerContext.parent); 104 105 ATNState state = atn.states[outerContext.invokingState]; 106 RuleTransition transition = cast(RuleTransition)state.transition(0); 107 return SingletonPredictionContext.create(parent, transition.followState.stateNumber); 108 } 109 110 abstract public size_t size(); 111 112 abstract public PredictionContext getParent(int index); 113 114 abstract public int getReturnState(int index); 115 116 /** 117 * @uml 118 * This means only the {@link #EMPTY} context is in set. 119 */ 120 public bool isEmpty() 121 { 122 return this == EMPTY; 123 } 124 125 public bool hasEmptyPath() 126 { 127 // since EMPTY_RETURN_STATE can only appear in the last position, we check last one 128 return getReturnState(to!int(size() - 1)) == EMPTY_RETURN_STATE; 129 } 130 131 /** 132 * @uml 133 * @safe 134 * @nothrow 135 * @override 136 */ 137 public override size_t toHash() @safe nothrow 138 { 139 return cachedHashCode; 140 } 141 142 /** 143 * @uml 144 * @override 145 */ 146 abstract public override bool opEquals(Object obj); 147 148 public static size_t calculateEmptyHashCode() 149 { 150 size_t hash = MurmurHash.initialize(INITIAL_HASH); 151 hash = MurmurHash.finish(hash, 0); 152 return hash; 153 } 154 155 public size_t calculateHashCode(PredictionContext parent, int returnState) 156 { 157 size_t hash = MurmurHash.initialize(INITIAL_HASH); 158 hash = MurmurHash.update!PredictionContext(hash, parent); 159 hash = MurmurHash.update(hash, returnState); 160 hash = MurmurHash.finish(hash, 2); 161 return hash; 162 } 163 164 public static size_t calculateHashCode(PredictionContext[] parents, int[] returnStates) 165 { 166 size_t hash = MurmurHash.initialize(INITIAL_HASH); 167 168 foreach (parent; parents) { 169 hash = MurmurHash.update!PredictionContext(hash, parent); 170 } 171 172 foreach (returnState; returnStates) { 173 hash = MurmurHash.update(hash, returnState); 174 } 175 176 hash = MurmurHash.finish(hash, (2 * parents.length)); 177 return hash; 178 } 179 180 public static PredictionContext merge(PredictionContext a, PredictionContext b, bool rootIsWildcard, 181 ref DoubleKeyMap!(PredictionContext, PredictionContext, PredictionContext) mergeCache) 182 in 183 { 184 assert(a !is null && b !is null); // must be empty context, never null 185 } 186 do 187 { 188 // share same graph if both same 189 if (a is b || a.opEquals(b)) 190 return a; 191 if (cast(SingletonPredictionContext)a && 192 cast(SingletonPredictionContext)b) { 193 return mergeSingletons(cast(SingletonPredictionContext)a, 194 cast(SingletonPredictionContext)b, 195 rootIsWildcard, mergeCache); 196 } 197 // At least one of a or b is array 198 // If one is $ and rootIsWildcard, return $ as * wildcard 199 if (rootIsWildcard) { 200 if (cast(EmptyPredictionContext)a) return a; 201 if (cast(EmptyPredictionContext)b) return b; 202 } 203 204 // convert singleton so both are arrays to normalize 205 if (cast(SingletonPredictionContext)a) { 206 a = new ArrayPredictionContext(cast(SingletonPredictionContext)a); 207 } 208 if (cast(SingletonPredictionContext)b) { 209 b = new ArrayPredictionContext(cast(SingletonPredictionContext)b); 210 } 211 return mergeArrays(cast(ArrayPredictionContext)a, cast(ArrayPredictionContext)b, 212 rootIsWildcard, mergeCache); 213 } 214 215 /** 216 * @uml 217 * Merge two {@link SingletonPredictionContext} instances. 218 * 219 * <p>Stack tops equal, parents merge is same; return left graph.<br> 220 * <embed src="images/SingletonMerge_SameRootSamePar.svg" type="image/svg+xml"/></p> 221 * 222 * <p>Same stack top, parents differ; merge parents giving array node, then 223 * remainders of those graphs. A new root node is created to point to the 224 * merged parents.<br> 225 * <embed src="images/SingletonMerge_SameRootDiffPar.svg" type="image/svg+xml"/></p> 226 * 227 * <p>Different stack tops pointing to same parent. Make array node for the 228 * root where both element in the root point to the same (original) 229 * parent.<br> 230 * <embed src="images/SingletonMerge_DiffRootSamePar.svg" type="image/svg+xml"/></p> 231 * 232 * <p>Different stack tops pointing to different parents. Make array node for 233 * the root where each element points to the corresponding original 234 * parent.<br> 235 * <embed src="images/SingletonMerge_DiffRootDiffPar.svg" type="image/svg+xml"/></p> 236 * 237 * @param a the first {@link SingletonPredictionContext} 238 * @param b the second {@link SingletonPredictionContext} 239 * @param rootIsWildcard {@code true} if this is a local-context merge, 240 * otherwise false to indicate a full-context merge 241 * @param mergeCache 242 */ 243 public static PredictionContext mergeSingletons(SingletonPredictionContext a, SingletonPredictionContext b, 244 bool rootIsWildcard, ref DoubleKeyMap!(PredictionContext, PredictionContext, PredictionContext) mergeCache) 245 { 246 if (mergeCache !is null ) { 247 Nullable!PredictionContext previous = mergeCache.get(a, b); 248 if (!previous.isNull) return previous.get; 249 previous = mergeCache.get(b, a); 250 if (!previous.isNull) return previous.get; 251 } 252 253 PredictionContext rootMerge = mergeRoot(a, b, rootIsWildcard); 254 if (rootMerge !is null) { 255 if (mergeCache !is null) mergeCache.put(a, b, rootMerge); 256 return rootMerge; 257 } 258 259 if (a.returnState == b.returnState) { // a == b 260 PredictionContext parent = merge(a.parent, b.parent, rootIsWildcard, mergeCache); 261 // if parent is same as existing a or b parent or reduced to a parent, return it 262 if ( parent == a.parent ) 263 return a; // ax + bx = ax, if a=b 264 if ( parent == b.parent ) 265 return b; // ax + bx = bx, if a=b 266 // else: ax + ay = a'[x,y] 267 // merge parents x and y, giving array node with x,y then remainders 268 // of those graphs. dup a, a' points at merged array 269 // new joined parent so create new singleton pointing to it, a' 270 PredictionContext a_ = SingletonPredictionContext.create(parent, a.returnState); 271 if (mergeCache !is null) 272 mergeCache.put(a, b, a_); 273 return a_; 274 } 275 else { // a != b payloads differ 276 // see if we can collapse parents due to $+x parents if local ctx 277 PredictionContext singleParent = null; 278 if (a == b || (a.parent !is null && a.parent.opEquals(b.parent))) { // ax + bx = [a,b]x 279 singleParent = a.parent; 280 } 281 if (singleParent !is null) { // parents are same 282 // sort payloads and use same parent 283 int[] payloads = [a.returnState, b.returnState]; 284 if ( a.returnState > b.returnState ) { 285 payloads[0] = b.returnState; 286 payloads[1] = a.returnState; 287 } 288 PredictionContext[] parents = [singleParent, singleParent]; 289 PredictionContext a_ = new ArrayPredictionContext(parents, payloads); 290 if (mergeCache !is null) mergeCache.put(a, b, a_); 291 return a_; 292 } 293 // parents differ and can't merge them. Just pack together 294 // into array; can't merge. 295 // ax + by = [ax,by] 296 int[] payloads = [a.returnState, b.returnState]; 297 PredictionContext[] parents = [a.parent, b.parent]; 298 if ( a.returnState > b.returnState ) { // sort by payload 299 payloads[0] = b.returnState; 300 payloads[1] = a.returnState; 301 parents.length = 0; 302 parents ~= b.parent; 303 parents ~= a.parent; 304 } 305 PredictionContext a_ = new ArrayPredictionContext(parents, payloads); 306 if (mergeCache !is null ) mergeCache.put(a, b, a_); 307 return a_; 308 } 309 } 310 311 /** 312 * Handle case where at least one of {@code a} or {@code b} is 313 * {@link #EMPTY}. In the following diagrams, the symbol {@code $} is used 314 * to represent {@link #EMPTY}. 315 * 316 * <h2>Local-Context Merges</h2> 317 * 318 * <p>These local-context merge operations are used when {@code rootIsWildcard} 319 * is true.</p> 320 * 321 * <p>{@link #EMPTY} is superset of any graph; return {@link #EMPTY}.<br> 322 * <embed src="images/LocalMerge_EmptyRoot.svg" type="image/svg+xml"/></p> 323 * 324 * <p>{@link #EMPTY} and anything is {@code #EMPTY}, so merged parent is 325 * {@code #EMPTY}; return left graph.<br> 326 * <embed src="images/LocalMerge_EmptyParent.svg" type="image/svg+xml"/></p> 327 * 328 * <p>Special case of last merge if local context.<br> 329 * <embed src="images/LocalMerge_DiffRoots.svg" type="image/svg+xml"/></p> 330 * 331 * <h2>Full-Context Merges</h2> 332 * 333 * <p>These full-context merge operations are used when {@code rootIsWildcard} 334 * is false.</p> 335 * 336 * <p><embed src="images/FullMerge_EmptyRoots.svg" type="image/svg+xml"/></p> 337 * 338 * <p>Must keep all contexts; {@link #EMPTY} in array is a special value (and 339 * null parent).<br> 340 * <embed src="images/FullMerge_EmptyRoot.svg" type="image/svg+xml"/></p> 341 * 342 * <p><embed src="images/FullMerge_SameRoot.svg" type="image/svg+xml"/></p> 343 * 344 * @param a the first {@link SingletonPredictionContext} 345 * @param b the second {@link SingletonPredictionContext} 346 * @param rootIsWildcard {@code true} if this is a local-context merge, 347 * otherwise false to indicate a full-context merge 348 */ 349 public static PredictionContext mergeRoot(SingletonPredictionContext a, SingletonPredictionContext b, 350 bool rootIsWildcard) 351 { 352 if (rootIsWildcard) { 353 if ( a == EMPTY ) return cast(PredictionContext)EMPTY; // * + b = * 354 if ( b == EMPTY ) return cast(PredictionContext)EMPTY; // a + * = * 355 } 356 else { 357 if ( a == EMPTY && b == EMPTY ) return cast(PredictionContext)EMPTY; // $ + $ = $ 358 if ( a == EMPTY ) { // $ + x = [$,x] 359 int[] payloads = [b.returnState, EMPTY_RETURN_STATE]; 360 PredictionContext[] parents = [b.parent, null]; 361 PredictionContext joined = 362 new ArrayPredictionContext(parents, payloads); 363 return joined; 364 } 365 if ( b == EMPTY ) { // x + $ = [$,x] ($ is always first if present) 366 int[] payloads = [a.returnState, EMPTY_RETURN_STATE]; 367 PredictionContext[] parents = [a.parent, null]; 368 PredictionContext joined = 369 new ArrayPredictionContext(parents, payloads); 370 return joined; 371 } 372 } 373 return null; 374 } 375 376 /** 377 * Merge two {@link ArrayPredictionContext} instances. 378 * * 379 * <p>Different tops, different parents.<br> 380 * <embed src="images/ArrayMerge_DiffTopDiffPar.svg" type="image/svg+xml"/></p> 381 * 382 * <p>Shared top, same parents.<br> 383 * <embed src="images/ArrayMerge_ShareTopSamePar.svg" type="image/svg+xml"/></p> 384 * 385 * <p>Shared top, different parents.<br> 386 * <embed src="images/ArrayMerge_ShareTopDiffPar.svg" type="image/svg+xml"/></p> 387 * 388 * <p>Shared top, all shared parents.<br> 389 * <embed src="images/ArrayMerge_ShareTopSharePar.svg" type="image/svg+xml"/></p> 390 * 391 * <p>Equal tops, merge parents and reduce top to 392 * {@link SingletonPredictionContext}.<br> 393 * <embed src="images/ArrayMerge_EqualTop.svg" type="image/svg+xml"/></p> 394 */ 395 public static PredictionContext mergeArrays(ArrayPredictionContext a, ArrayPredictionContext b, 396 bool rootIsWildcard, ref DoubleKeyMap!(PredictionContext, PredictionContext, PredictionContext) mergeCache) 397 { 398 if (mergeCache) { 399 Nullable!PredictionContext previous = mergeCache.get(a, b); 400 if (!previous.isNull) 401 return previous.get; 402 previous = mergeCache.get(b, a); 403 if (!previous.isNull) 404 return previous.get; 405 } 406 407 // merge sorted payloads a + b => M 408 int i = 0; // walks a 409 int j = 0; // walks b 410 int k = 0; // walks target M array 411 412 int[] mergedReturnStates = 413 new int[a.returnStates.length + b.returnStates.length]; 414 PredictionContext[] mergedParents = 415 new PredictionContext[a.returnStates.length + b.returnStates.length]; 416 // walk and merge to yield mergedParents, mergedReturnStates 417 418 while ( i<a.returnStates.length && j<b.returnStates.length ) { 419 PredictionContext a_parent = a.parents[i]; 420 PredictionContext b_parent = b.parents[j]; 421 if ( a.returnStates[i]==b.returnStates[j] ) { 422 // same payload (stack tops are equal), must yield merged singleton 423 int payload = a.returnStates[i]; 424 // $+$ = $ 425 bool both = payload == EMPTY_RETURN_STATE && 426 a_parent is null && b_parent is null; 427 bool ax_ax = (a_parent !is null && b_parent !is null) && 428 a_parent.opEquals(b_parent); // ax+ax -> ax 429 if (both || ax_ax ) { 430 mergedParents[k] = a_parent; // choose left 431 mergedReturnStates[k] = payload; 432 } 433 else { // ax+ay -> a'[x,y] 434 PredictionContext mergedParent = 435 merge(a_parent, b_parent, rootIsWildcard, mergeCache); 436 mergedParents[k] = mergedParent; 437 mergedReturnStates[k] = payload; 438 } 439 i++; // hop over left one as usual 440 j++; // but also skip one in right side since we merge 441 } 442 else if ( a.returnStates[i]<b.returnStates[j] ) { // copy a[i] to M 443 mergedParents[k] = a_parent; 444 mergedReturnStates[k] = a.returnStates[i]; 445 i++; 446 } 447 else { // b > a, copy b[j] to M 448 mergedParents[k] = b_parent; 449 mergedReturnStates[k] = b.returnStates[j]; 450 j++; 451 } 452 k++; 453 } 454 455 // copy over any payloads remaining in either array 456 if (i < a.returnStates.length) { 457 for (int p = i; p < a.returnStates.length; p++) { 458 mergedParents[k] = a.parents[p]; 459 mergedReturnStates[k] = a.returnStates[p]; 460 k++; 461 } 462 } 463 else { 464 for (int p = j; p < b.returnStates.length; p++) { 465 mergedParents[k] = b.parents[p]; 466 mergedReturnStates[k] = b.returnStates[p]; 467 k++; 468 } 469 } 470 471 // trim merged if we combined a few that had same stack tops 472 if ( k < mergedParents.length ) { // write index < last position; trim 473 if ( k == 1 ) { // for just one merged element, return singleton top 474 PredictionContext a_ = 475 SingletonPredictionContext.create(mergedParents[0], 476 mergedReturnStates[0]); 477 if ( mergeCache !is null ) mergeCache.put(a,b,a_); 478 return a_; 479 } 480 mergedParents = mergedParents[0..k]; 481 mergedReturnStates = mergedReturnStates[0..k]; 482 } 483 484 PredictionContext M = 485 new ArrayPredictionContext(mergedParents, mergedReturnStates); 486 487 // if we created same array as a or b, return that instead 488 // TODO: track whether this is possible above during merge sort for speed 489 if ( M.opEquals(a) ) { 490 if ( mergeCache !is null ) mergeCache.put(a,b,a); 491 return a; 492 } 493 if ( M.opEquals(b) ) { 494 if ( mergeCache !is null ) mergeCache.put(a,b,b); 495 return b; 496 } 497 498 combineCommonParents(mergedParents); 499 500 if ( mergeCache !is null ) mergeCache.put(a,b,M); 501 return M; 502 } 503 504 /** 505 * Make pass over all <em>M</em> {@code parents}; merge any {@code equals()} 506 * ones. 507 */ 508 public static void combineCommonParents(ref PredictionContext[] parents) 509 { 510 PredictionContext[PredictionContext] uniqueParents; 511 for (int p = 0; p < parents.length; p++) { 512 PredictionContext parent = parents[p]; 513 if (!(parent in uniqueParents)) { // don't replace 514 uniqueParents[parent] = parent; 515 } 516 } 517 for (int p = 0; p < parents.length; p++) { 518 parents[p] = uniqueParents[parents[p]]; 519 } 520 } 521 522 public static string toDOTString(PredictionContext context) 523 { 524 if (context is null) return ""; 525 auto buf = appender!string; 526 buf.put("digraph G {\n"); 527 buf.put("rankdir=LR;\n"); 528 529 PredictionContext[] nodes = getAllContextNodes(context); 530 nodes.sort(); 531 foreach (PredictionContext current; nodes) { 532 if (current.classinfo == SingletonPredictionContext.classinfo) { 533 string s = to!string(current.id); 534 buf.put(" s" ~ s); 535 string returnState = to!string(current.getReturnState(0)); 536 if (current.classinfo == EmptyPredictionContext.classinfo) 537 returnState = "$"; 538 buf.put(" [label=\"" ~ returnState ~ "\"];\n"); 539 continue; 540 } 541 ArrayPredictionContext arr = cast(ArrayPredictionContext)current; 542 buf.put(" s" ~ to!string(arr.id)); 543 buf.put(" [shape=box, label=\""); 544 buf.put("["); 545 bool first = true; 546 foreach (int inv; arr.returnStates) { 547 if (!first) buf.put(", "); 548 if (inv == EMPTY_RETURN_STATE) buf.put("$"); 549 else buf.put(to!string(inv)); 550 first = false; 551 } 552 buf.put("]"); 553 buf.put("\"];\n"); 554 } 555 556 foreach (PredictionContext current; nodes) { 557 if (current == EMPTY) continue; 558 for (int i = 0; i < current.size(); i++) { 559 if (current.getParent(i) is null) continue; 560 string s = to!string(current.id); 561 buf.put(" s" ~ s); 562 buf.put("->"); 563 buf.put("s"); 564 buf.put(to!string(current.getParent(i).id)); 565 if ( current.size()>1 ) buf.put(" [label=\"parent[" ~ to!string(i) ~ "]\"];\n"); 566 else buf.put(";\n"); 567 } 568 } 569 570 buf.put("}\n"); 571 return buf.data; 572 } 573 574 /** 575 * ref visited ? 576 */ 577 public static PredictionContext getCachedContext(PredictionContext context, PredictionContextCache contextCache, 578 PredictionContext[PredictionContext] visited) 579 { 580 if (context.isEmpty) { 581 return context; 582 } 583 584 if (context in visited) { 585 return visited[context]; 586 } 587 588 if (contextCache.hasKey(context)) { 589 auto existing = contextCache.get(context); 590 visited[context] = existing; 591 return existing; 592 } 593 594 bool changed = false; 595 PredictionContext[] parents = new PredictionContext[context.size]; 596 for (int i = 0; i < parents.length; i++) { 597 PredictionContext parent = getCachedContext(context.getParent(i), contextCache, visited); 598 if (changed || parent != context.getParent(i)) { 599 if (!changed) { 600 parents = new PredictionContext[context.size]; 601 for (int j = 0; j < context.size; j++) { 602 parents[j] = context.getParent(j); 603 } 604 changed = true; 605 } 606 parents[i] = parent; 607 } 608 } 609 610 if (!changed) { 611 contextCache.add(context); 612 visited[context] = context; 613 return context; 614 } 615 616 PredictionContext updated; 617 if (parents.length == 0) { 618 updated = cast(PredictionContext)EMPTY; 619 } 620 else if (parents.length == 1) { 621 updated = SingletonPredictionContext.create(parents[0], context.getReturnState(0)); 622 } 623 else { 624 ArrayPredictionContext arrayPredictionContext = cast(ArrayPredictionContext)context; 625 updated = new ArrayPredictionContext(parents, arrayPredictionContext.returnStates); 626 } 627 contextCache.add(updated); 628 visited[updated] = updated; 629 visited[context] = updated; 630 631 return updated; 632 } 633 634 /** 635 * recursive version of Sam's getAllNodes() 636 */ 637 public static PredictionContext[] getAllContextNodes(PredictionContext context) 638 { 639 PredictionContext[] nodes; 640 PredictionContext[PredictionContext] visited; 641 getAllContextNodes_(context, nodes, visited); 642 return nodes; 643 } 644 645 public static void getAllContextNodes_(PredictionContext context, PredictionContext[] nodes, 646 PredictionContext[PredictionContext] visited) 647 { 648 if (context is null || (context in visited)) return; 649 visited[context] = context; 650 nodes ~= context; 651 for (int i = 0; i < context.size; i++) { 652 getAllContextNodes_(context.getParent(i), nodes, visited); 653 } 654 } 655 656 public string[] toStrings(InterfaceRecognizer recognizer, int currentState) 657 { 658 return toStrings(recognizer, cast(PredictionContext)EMPTY, currentState); 659 } 660 661 public string[] toStrings(InterfaceRecognizer recognizer, PredictionContext stop, 662 int currentState) 663 { 664 string[] result; 665 outer: 666 for (int perm = 0; ; perm++) { 667 int offset = 0; 668 bool last = true; 669 PredictionContext p = this; 670 int stateNumber = currentState; 671 auto localBuffer = appender!string; 672 localBuffer.put("["); 673 while (!p.isEmpty() && p != stop) { 674 int index = 0; 675 if (p.size > 0) { 676 int bits = 1; 677 while ((1 << bits) < p.size) { 678 bits++; 679 } 680 int mask = (1 << bits) - 1; 681 index = (perm >> offset) & mask; 682 last &= index >= p.size - 1; 683 if (index >= p.size) { 684 continue outer; 685 } 686 offset += bits; 687 } 688 if (recognizer !is null) { 689 if (localBuffer.data.length > 1) { 690 // first char is '[', if more than that this isn't the first rule 691 localBuffer.put(' '); 692 } 693 ATN atn = recognizer.getATN(); 694 ATNState s = atn.states[stateNumber]; 695 string ruleName = recognizer.getRuleNames()[s.ruleIndex]; 696 localBuffer.put(ruleName); 697 } 698 else if ( p.getReturnState(index)!= EMPTY_RETURN_STATE) { 699 if ( !p.isEmpty ) { 700 if (localBuffer.data.length > 1) { 701 // first char is '[', if more than that this isn't the first rule 702 localBuffer.put(' '); 703 } 704 705 localBuffer.put(to!string(p.getReturnState(index))); 706 } 707 } 708 stateNumber = p.getReturnState(index); 709 p = p.getParent(index); 710 } 711 localBuffer.put("]"); 712 result ~= localBuffer.data; 713 714 if (last) { 715 break; 716 } 717 } 718 return result; 719 } 720 721 } 722 723 version(unittest) { 724 import dshould : be, equal, not, should; 725 import unit_threaded; 726 727 class Test { 728 729 @Tags("ArrayPredictionContext") 730 @("empty") 731 unittest { 732 auto spcA = new EmptyPredictionContext; 733 spcA.should.not.be(null); 734 auto spcB = new SingletonPredictionContext(new EmptyPredictionContext, 0); 735 spcA.should.not.equal(spcB); 736 spcA.hasEmptyPath.should.equal(true); 737 spcA.isEmpty.should.equal(true); 738 spcB.hasEmptyPath.should.equal(false); 739 spcB.isEmpty.should.equal(false); 740 } 741 742 @Tags("ArrayPredictionContext") 743 @("mergeArrayContext") 744 unittest { 745 DoubleKeyMap!(PredictionContext, PredictionContext, PredictionContext) mergeCache; 746 auto spcA = new EmptyPredictionContext; 747 auto apcA = new ArrayPredictionContext(spcA); 748 apcA.should.not.be(null); 749 auto spcB = new EmptyPredictionContext; 750 auto apcB = new ArrayPredictionContext(spcB); 751 apcB.should.not.be(null); 752 auto spcC = new EmptyPredictionContext; 753 754 PredictionContext[] predA = [apcA, apcB, spcC, apcA]; // not unique 755 predA.length.should.equal(4); 756 PredictionContext.combineCommonParents(predA); 757 predA.length.should.equal(4); 758 } 759 760 @Tags("ArrayPredictionContext") 761 @("mergeEmptyContext") 762 unittest { 763 auto spcC = new EmptyPredictionContext; 764 spcC.should.not.be(null); 765 auto spcD = new EmptyPredictionContext; 766 767 PredictionContext[] predB = [spcC, spcD]; 768 PredictionContext.combineCommonParents(predB); 769 } 770 } 771 }