001package daikon.split;
002
003import static daikon.tools.nullness.NullnessUtil.castNonNullDeep;
004
005import daikon.Debug;
006import daikon.DynamicConstants;
007import daikon.PptConditional;
008import daikon.PptRelation;
009import daikon.PptSlice;
010import daikon.PptTopLevel;
011import daikon.ValueTuple;
012import daikon.VarInfo;
013import daikon.inv.DummyInvariant;
014import daikon.inv.Implication;
015import daikon.inv.Invariant;
016import daikon.suppress.NIS;
017import java.io.Serializable;
018import java.util.ArrayList;
019import java.util.Arrays;
020import java.util.Comparator;
021import java.util.Iterator;
022import java.util.LinkedHashMap;
023import java.util.List;
024import java.util.Map;
025import java.util.NavigableSet;
026import java.util.TreeSet;
027import java.util.logging.Level;
028import java.util.logging.Logger;
029import org.checkerframework.checker.interning.qual.UsesObjectEquals;
030import org.checkerframework.checker.lock.qual.GuardSatisfied;
031import org.checkerframework.checker.nullness.qual.KeyFor;
032import org.checkerframework.checker.nullness.qual.NonNull;
033import org.checkerframework.checker.nullness.qual.Nullable;
034import org.checkerframework.checker.nullness.qual.RequiresNonNull;
035import org.checkerframework.dataflow.qual.SideEffectFree;
036import org.plumelib.util.CollectionsPlume;
037import org.plumelib.util.MPair;
038import org.plumelib.util.OrderedPairIterator;
039
040/**
041 * PptSplitter contains the splitter and its associated PptConditional ppts. Currently all splitters
042 * are binary (have exactly two PptConditional ppts) and this is presumed in the implementation.
043 * However, this could be extended by extending this class with specific other implementations.
044 */
045@UsesObjectEquals
046public class PptSplitter implements Serializable {
047
048  static final long serialVersionUID = 20031031L;
049
050  /**
051   * Boolean. If true, the built-in splitting rules are disabled. The built-in rules look for
052   * implications based on boolean return values and also when there are exactly two exit points
053   * from a method.
054   */
055  public static boolean dkconfig_disable_splitting = false;
056
057  /**
058   * Integer. A value of zero indicates that DummyInvariant objects should not be created. A value
059   * of one indicates that dummy invariants should be created only when no suitable condition was
060   * found in the regular output. A value of two indicates that dummy invariants should be created
061   * for each splitting condition.
062   */
063  public static int dkconfig_dummy_invariant_level = 0;
064
065  /**
066   * Split bi-implications ("{@code a <==> b}") into two separate implications ("{@code a ==> b}"
067   * and "{@code b ==> a}").
068   */
069  public static boolean dkconfig_split_bi_implications = false;
070
071  /**
072   * When true, compilation errors during splitter file generation will not be reported to the user.
073   */
074  public static boolean dkconfig_suppressSplitterErrors = true;
075
076  /** General debug tracer. */
077  public static final Logger debug = Logger.getLogger("daikon.split.PptSplitter");
078
079  /** PptTopLevel that contains this split. */
080  private PptTopLevel parent;
081
082  /**
083   * Splitter that chooses which PptConditional a sample is applied to. May be null if no
084   * computation is required (e.g., splitting over exit points).
085   */
086  public transient @Nullable Splitter splitter;
087
088  /**
089   * One PptConditional for each splitter result. ppts[0] is used when the splitter is true, ppts[1]
090   * when the splitter is false. The contents are PptConditional objects if the splitter is valid,
091   * but are PptTopLevel if the PptSplitter represents two exit points (for which no splitter is
092   * required).
093   */
094  public PptTopLevel[] ppts;
095
096  private static final Comparator<Invariant> icfp = new Invariant.InvariantComparatorForPrinting();
097
098  /**
099   * Create a binary PptSplitter with the specied splitter for the specified PptTopLevel parent. The
100   * parent should be a leaf (i.e., a numbered exit point).
101   */
102  public PptSplitter(PptTopLevel parent, Splitter splitter) {
103
104    this.parent = parent;
105    this.splitter = splitter;
106    ppts =
107        new PptTopLevel[] {
108          new PptConditional(parent, splitter, false), new PptConditional(parent, splitter, true)
109        };
110
111    if (debug.isLoggable(Level.FINE)) {
112      debug.fine(
113          "PptSplitter("
114              + parent.name()
115              + ", "
116              + splitter.condition()
117              + "): "
118              + parent.var_infos.length
119              + " VarInfos");
120      for (int ii = 0; ii < parent.var_infos.length; ii++) {
121        debug.fine(
122            "  VarInfo #"
123                + ii
124                + ": at parent="
125                + parent.var_infos[ii].name()
126                + " at ppts[0]="
127                + ppts[0].var_infos[ii].name()
128                + " at ppts[1]="
129                + ppts[1].var_infos[ii].name());
130      }
131    }
132  }
133
134  /** Creates a PptSplitter over two exit points. No splitter is required. */
135  public PptSplitter(PptTopLevel parent, PptTopLevel exit1, PptTopLevel exit2) {
136    this.parent = parent;
137    this.splitter = null;
138    ppts = new PptTopLevel[] {exit1, exit2};
139  }
140
141  /** Returns true if the splitter is valid at this point, false otherwise. */
142  public boolean splitter_valid() {
143
144    assert ((PptConditional) ppts[1]).splitter_valid()
145        == ((PptConditional) ppts[0]).splitter_valid();
146    return ((PptConditional) ppts[0]).splitter_valid();
147  }
148
149  /** Adds the sample to one of the conditional ppts in the split. */
150  @RequiresNonNull({
151    "daikon.suppress.NIS.suppressor_map",
152    "daikon.suppress.NIS.suppressor_map_suppression_count",
153    "daikon.suppress.NIS.all_suppressions",
154    "daikon.suppress.NIS.suppressor_proto_invs"
155  })
156  public void add_bottom_up(ValueTuple vt, int count) {
157
158    // Choose the appropriate conditional point based on the condition result
159    PptConditional ppt_cond = choose_conditional(vt);
160    if (ppt_cond == null) {
161      return;
162    }
163
164    // ??? MDE
165    // If any parent variables were missing out of bounds on this
166    // sample, apply that to this conditional as well.  A more
167    // efficient way to do this would be better.
168    ppt_cond.get_missingOutOfBounds(parent, vt);
169
170    // Add the point
171    ppt_cond.add_bottom_up(vt, count);
172
173    if (Debug.ppt_match(ppt_cond)) {
174      String related_vars = Debug.related_vars(ppt_cond, vt);
175      debug.fine(
176          "Adding sample to "
177              + ppt_cond
178              + " with "
179              + vt.size()
180              + " vars"
181              + (!related_vars.equals("") ? (" including " + related_vars) : ""));
182    }
183  }
184
185  /**
186   * Chooses the correct conditional point based on the values in this sample. Returns null if none
187   * is applicable.
188   */
189  public @Nullable PptConditional choose_conditional(ValueTuple vt) {
190
191    // Currently only binary implications are supported
192    assert ppts.length == 2;
193
194    boolean splitter_test;
195    try {
196      splitter_test = ((PptConditional) ppts[0]).splitter.test(vt);
197    } catch (Throwable e) {
198      // If an exception is thrown, don't put the data on either side
199      // of the split.
200      if (false) { // need to add a debugging switch
201        System.out.println(
202            "Exception thrown in PptSplitter.choose_conditional() for " + ppts[0].name());
203        System.out.println("Vars = " + Debug.related_vars(ppts[0], vt));
204      }
205      return null;
206    }
207
208    // Choose the appropriate conditional point based on the condition result
209    return ((PptConditional) ppts[splitter_test ? 0 : 1]);
210  }
211
212  /** Adds implication invariants based on the invariants found on each side of the split. */
213  @RequiresNonNull({
214    "parent.equality_view",
215    "daikon.suppress.NIS.all_suppressions",
216    "daikon.suppress.NIS.suppressor_map"
217  })
218  public void add_implications() {
219
220    // Currently only binary implications are supported
221    assert ppts.length == 2;
222
223    // Create any NIS suppressed invariants in each conditional
224    @SuppressWarnings({"unchecked", "rawtypes"})
225    List<Invariant> suppressed_invs[] =
226        (ArrayList<Invariant>[]) new @Nullable ArrayList[ppts.length];
227    for (int i = 0; i < ppts.length; i++) {
228      suppressed_invs[i] = NIS.create_suppressed_invs(ppts[i]);
229    }
230
231    add_implications_pair();
232
233    // Remove all of the NIS suppressed invariants that we just created.
234    for (int i = 0; i < ppts.length; i++) {
235      ppts[i].remove_invs(suppressed_invs[i]);
236    }
237  }
238
239  /**
240   * Given a pair of conditional program points, form implications from the invariants true at each
241   * one.
242   *
243   * <p>The algorithm first divides the invariants into two groups:
244   *
245   * <ol>
246   *   <li>the "same" invariants: those that are true at both program points.
247   *   <li>the "different" invariants: all other invariants.
248   * </ol>
249   *
250   * The "exclusive" invariants (a subset of the "different" inviariants) are true at one program
251   * point, and their negation is true at the other program point.
252   *
253   * <p>At the first program point, for each exclusive invariant and each different invariant,
254   * create a conditional of the form "exclusive &hArr; different". Do the same at the second
255   * program point.
256   *
257   * <p>This method is correct only if the two conditional program points fully partition the input
258   * space (their conditions are negations of one another). For instance, suppose there is a
259   * three-way split with the following invariants detected at each:
260   *
261   * <pre>
262   *   {A,B}  {!A,!B}  {A,!B}
263   * </pre>
264   *
265   * Examining just the first two would suggest that "A &hArr; B" is valid, but in fact that is a
266   * false inference. Note that this situation can occur if the splitting condition uses variables
267   * that can ever be missing. (Or, presumably, if the condition ever cannot be computed.)
268   */
269  @RequiresNonNull("parent.equality_view")
270  private void add_implications_pair() {
271
272    for (PptTopLevel pchild : ppts) {
273      // System.out.printf("splitter child = %s%n", pchild.name());
274      if (pchild.equality_view == null) {
275        System.out.printf("this: %s%n", this);
276        System.out.printf("pchild: %s[%s]%n", pchild, System.identityHashCode(pchild));
277        System.out.printf("pchild.children: %s%n", pchild.children);
278        for (PptRelation rel : pchild.children) {
279          System.out.printf("  relation = %s%n", rel);
280        }
281        System.out.printf("parent: %s%n", parent);
282        throw new Error();
283      }
284    }
285
286    debug.fine("add_implications_pair: parent = " + parent.name);
287
288    // Maps permuted invariants to their original invariants
289    Map<Invariant, Invariant> orig_invs = new LinkedHashMap<>();
290
291    List<@KeyFor("orig_invs") Invariant[]> exclusive_invs_vec =
292        new ArrayList<@KeyFor("orig_invs") Invariant[]>();
293
294    // Does not contain anything that is in exclusive_invs_vec.
295    // (Those may be added temporarily, but are removed later.)
296    List<@Nullable @KeyFor("orig_invs") Invariant[]> different_invs_vec =
297        new ArrayList<@Nullable @KeyFor("orig_invs") Invariant[]>();
298
299    // ??? MDE
300    // Loop through each possible parent slice
301    List<VarInfo[]> slices = possible_slices();
302
303    int num_children = ppts.length;
304
305    for (VarInfo[] vis : slices) {
306
307      // Each element of invs[i] is an invariant from the i-th child, permuted to
308      // the parent (and with a parent slice as its ppt slot).
309      @SuppressWarnings({"unchecked", "rawtypes"})
310      /*NNC:@MonotonicNonNull*/ List<Invariant> invs[] =
311          (ArrayList<Invariant>[]) new @Nullable ArrayList[num_children];
312
313      // find the parent slice
314      PptSlice pslice = parent.get_or_instantiate_slice(vis);
315
316      // Daikon.debugProgress.fine ("    slice: " + pslice.name());
317
318      // Loop through each child ppt
319      for (int childno = 0; childno < num_children; childno++) {
320        PptTopLevel child_ppt = ppts[childno];
321
322        assert child_ppt.equality_view != null : child_ppt.name();
323        assert parent.equality_view != null : parent.name();
324
325        invs[childno] = new ArrayList<Invariant>(); // permuted to parent
326
327        // vis is in parent order.  Find corresponding child vis, in child order.
328        // Each of these arrays contains child vis.
329        /*NNC:@MonotonicNonNull*/ VarInfo[] cvis_non_canonical = new VarInfo[vis.length];
330        /*NNC:@MonotonicNonNull*/ VarInfo[] cvis = new VarInfo[vis.length];
331        /*NNC:@MonotonicNonNull*/ VarInfo[] cvis_sorted = new VarInfo[vis.length];
332        for (int kk = 0; kk < vis.length; kk++) {
333          cvis_non_canonical[kk] = matching_var(child_ppt, vis[kk]);
334          cvis[kk] = cvis_non_canonical[kk].canonicalRep();
335          cvis_sorted[kk] = cvis[kk];
336        }
337        Arrays.sort(cvis_sorted, VarInfo.IndexComparator.getInstance());
338
339        cvis_non_canonical = castNonNullDeep(cvis_non_canonical); // https://tinyurl.com/cfissue/986
340        cvis = castNonNullDeep(cvis); // https://tinyurl.com/cfissue/986
341        cvis_sorted = castNonNullDeep(cvis_sorted); // https://tinyurl.com/cfissue/986
342
343        // Look for an equality invariant in the non-canonical slice (if any).
344        // Note that only an equality invariant can exist in a non-canonical
345        // slice.  If it does exist, we want it rather than the canonical
346        // form (which for equality invariants will always be of the form
347        // 'a == a').
348        Invariant eq_inv = null;
349        if (!Arrays.equals(cvis_non_canonical, cvis)) {
350          PptSlice nc_slice = child_ppt.findSlice(cvis_non_canonical);
351          if (nc_slice != null) {
352            if (nc_slice.invs.size() != 1) {
353              // Impossible: multiple invariants found
354              System.out.println("Found " + nc_slice.invs.size() + " invs at " + nc_slice);
355              for (Invariant inv2 : nc_slice.invs) {
356                System.out.println(" -- inv = " + inv2);
357              }
358              for (VarInfo cvi : cvis_non_canonical) {
359                System.out.println(" -- equality set = " + cvi.equalitySet.shortString());
360              }
361              throw new Error("nc_slice.invs.size() == " + nc_slice.invs.size());
362            }
363            eq_inv = nc_slice.invs.get(0);
364            debug.fine("Found eq inv " + eq_inv);
365          }
366        }
367
368        // Find the corresponding slice
369        PptSlice cslice = child_ppt.findSlice(cvis_sorted);
370        if (cslice == null) {
371          if (eq_inv != null) {
372            // There is trouble.  Print a lot of debugging information.
373            System.out.println("cvis_non_canonical:");
374            for (VarInfo cvi : cvis_non_canonical) {
375              System.out.println("  " + cvi);
376            }
377            System.out.println("cvis:");
378            for (VarInfo cvi : cvis) {
379              System.out.println("  " + cvi);
380            }
381            System.out.println("cvis_sorted:");
382            for (VarInfo cvi : cvis_sorted) {
383              System.out.println("  " + cvi);
384            }
385            if (DynamicConstants.dkconfig_use_dynamic_constant_optimization) {
386              assert child_ppt.constants != null
387                  : "@AssumeAssertion(nullness):  dependent:  config var";
388              System.out.println("constant values for cvis_sorted:");
389              for (VarInfo cvi : cvis_sorted) {
390                System.out.println("  " + child_ppt.constants.getConstant(cvi));
391              }
392            }
393            String eq_inv_ppt = eq_inv.ppt.toString();
394            assert eq_inv.ppt.equals(child_ppt.findSlice(cvis_non_canonical));
395
396            System.out.println("All child_ppt slices: ");
397            for (PptSlice slice : child_ppt.views_iterable()) {
398              System.out.println("  " + slice);
399            }
400
401            // found slice on non-canonical, but didn't find it here
402            throw new RuntimeException(
403                String.join(
404                    System.lineSeparator(),
405                    "found eq_inv",
406                    "  " + eq_inv,
407                    "  @" + eq_inv_ppt,
408                    "  but can't find slice for " + Arrays.toString(cvis_sorted)));
409          }
410          // If no slice, just give up?
411          continue;
412        }
413
414        // Copy each invariant permuted to the parent.
415        // This permits them to be directly compared to one another.
416        int[] permute = PptTopLevel.build_permute(cvis_sorted, cvis);
417        for (Invariant orig_inv : cslice.invs) {
418          Invariant inv = orig_inv.clone_and_permute(permute);
419          inv.ppt = pslice;
420          if ((eq_inv != null) && orig_inv.getClass().equals(eq_inv.getClass())) {
421            orig_inv = eq_inv;
422          }
423          assert !orig_invs.containsKey(inv);
424          orig_invs.put(inv, orig_inv);
425          invs[childno].add(inv);
426        }
427      } // children loop
428
429      invs = castNonNullDeep(invs); // https://tinyurl.com/cfissue/986
430
431      // If neither child slice has invariants there is nothing to do
432      if ((invs[0].size() == 0) && (invs[1].size() == 0)) {
433        if (pslice.invs.size() == 0) {
434          parent.removeSlice(pslice);
435        }
436        continue;
437      }
438
439      if (pslice.invs.size() == 0) {
440        debug.fine("PptSplitter: created new slice " + Arrays.toString(vis) + " @" + parent.name);
441      }
442
443      // Add any exclusive conditions for this slice to the list
444      @SuppressWarnings("keyfor") // need qualifier parameter to Invariants
445      List<@KeyFor("orig_invs") Invariant[]> ec = exclusive_conditions(invs[0], invs[1]);
446      exclusive_invs_vec.addAll(ec);
447
448      // Add any invariants that are different to the list
449      @SuppressWarnings("keyfor") // need qualifier parameter to Invariants
450      List<@Nullable @KeyFor("orig_invs") Invariant[]> di = different_invariants(invs[0], invs[1]);
451      different_invs_vec.addAll(di);
452    } // slices.iterator() loop
453
454    if (debug.isLoggable(Level.FINE)) {
455      debug.fine("Found " + exclusive_invs_vec.size() + " exclusive conditions ");
456      for (Invariant[] invs : exclusive_invs_vec) {
457        invs[0].log("exclusive condition with %s", invs[1].format());
458        invs[1].log("exclusive condition with %s", invs[0].format());
459        debug.fine("-- " + invs[0] + " -- " + invs[1]);
460      }
461      debug.fine("Found " + different_invs_vec.size() + " different invariants ");
462      for (@Nullable Invariant[] invs : different_invs_vec) {
463        if (invs[0] != null) {
464          invs[0].log("%s differs from %s", invs[0], invs[1]);
465        }
466        if (invs[1] != null) {
467          invs[1].log("%s differs from %s", invs[0], invs[1]);
468        }
469        debug.fine("-- " + invs[0] + " -- " + invs[1]);
470      }
471    }
472
473    PptTopLevel ppt1 = ppts[0];
474    PptTopLevel ppt2 = ppts[1];
475
476    // Add the splitting condition as an exclusive condition if requested
477    if ((splitter != null) && dkconfig_dummy_invariant_level > 0) {
478      if (exclusive_invs_vec.size() == 0 || dkconfig_dummy_invariant_level >= 2) {
479        // As a last resort, try using the user's supplied DummyInvariant
480        debug.fine("addImplications: resorting to dummy");
481        PptConditional cond1 = (PptConditional) ppt1;
482        PptConditional cond2 = (PptConditional) ppt2;
483        debug.fine("addImplications: cond1 " + cond1 + " cond2 " + cond2);
484        cond1.splitter.instantiateDummy(ppt1);
485        cond2.splitter.instantiateDummy(ppt2);
486        DummyInvariant dummy1 = cond1.dummyInvariant();
487        DummyInvariant dummy2 = cond2.dummyInvariant();
488        debug.fine("addImplications: dummy1 " + dummy1 + " dummy2 " + dummy2);
489        if (dummy1 != null && dummy1.valid && dummy2 != null && dummy2.valid) {
490          assert !cond1.splitter_inverse;
491          assert cond2.splitter_inverse;
492          dummy2.negate();
493          orig_invs.put(dummy1, dummy1);
494          orig_invs.put(dummy2, dummy2);
495          @KeyFor("orig_invs") Invariant[] dummy_pair = new @KeyFor("orig_invs") Invariant[] {dummy1, dummy2};
496          exclusive_invs_vec.add(dummy_pair);
497          // Don't add the dummy_pair, as it would just be removed afterward.
498          // different_invs_vec.add(dummy_pair);
499        } else {
500          // nothing to do
501        }
502      }
503    }
504
505    // If there are no exclusive conditions, we can do nothing here
506    if (exclusive_invs_vec.size() == 0) {
507      debug.fine("addImplications: no exclusive conditions");
508      return;
509    }
510
511    // Remove exclusive invariants from the different invariants list.
512    // It would be better not to have added them in the first place,
513    // but this is easier for now.
514    for (Iterator<@Nullable @KeyFor("orig_invs") Invariant[]> ii = different_invs_vec.iterator();
515        ii.hasNext(); ) {
516      @Nullable Invariant[] diff_invs = ii.next();
517      if (diff_invs[0] != null) {
518        assert diff_invs[1] == null;
519        // debug.fine ("Considering inv0 " + diff_invs[0]);
520        for (Invariant[] ex_invs : exclusive_invs_vec) {
521          if (ex_invs[0] == diff_invs[0]) {
522            debug.fine("removed exclusive invariant " + ex_invs[0]);
523            ii.remove();
524            break;
525          }
526        }
527      } else {
528        assert diff_invs[1] != null;
529        // debug.fine ("Considering inv1 " + diff_invs[1]);
530        for (Invariant[] ex_invs : exclusive_invs_vec) {
531          if (ex_invs[1] == diff_invs[1]) {
532            debug.fine("removed exclusive invariant " + ex_invs[1]);
533            ii.remove();
534            break;
535          }
536        }
537      }
538    }
539
540    // Get the canonical predicate invariants from the exclusive list.
541    // We pick the first one that is neither obvious nor suppressed.
542    // If all are either obvious or suppressed, we just pick the first
543    // one in the list.
544    // TODO: Why do we want canonical predicate invariants?  How will they be used?  It seems that
545    // different elements of this list have different semantics.
546    // TODO: After this loop, might the two canonical invariants not be exclusive with one another?
547    // TODO: con_invs should probably be renamed to canon_invs.
548    /*NNC:@MonotonicNonNull*/ Invariant[] con_invs = new Invariant[2];
549    for (Invariant[] invs : exclusive_invs_vec) {
550      assert invs.length == 2;
551      for (int jj = 0; jj < con_invs.length; jj++) {
552        if (con_invs[jj] == null) {
553          @SuppressWarnings("nullness") // map
554          @NonNull Invariant orig = orig_invs.get(invs[jj]);
555          assert orig != null : "Not in orig_invs: " + invs[jj] + " " + invs[jj].getClass();
556          if ((orig.isObvious() == null) && !orig.is_ni_suppressed()) {
557            con_invs[jj] = invs[jj];
558          }
559        }
560      }
561    }
562    Invariant[] first = exclusive_invs_vec.get(0);
563    for (int jj = 0; jj < con_invs.length; jj++) {
564      if (con_invs[jj] == null) {
565        System.out.println(
566            "Warning: No non-obvious non-suppressed exclusive"
567                + " invariants found in "
568                + parent.name);
569        // throw new Error();
570        con_invs[jj] = first[jj];
571      }
572    }
573    con_invs = castNonNullDeep(con_invs); // https://tinyurl.com/cfissue/986
574
575    // Create double-implications for each exclusive invariant
576    for (Invariant[] invs : exclusive_invs_vec) {
577      for (int jj = 0; jj < con_invs.length; jj++) {
578        if (con_invs[jj] != invs[jj]) {
579          add_implication(parent, con_invs[jj], invs[jj], true, orig_invs);
580        }
581      }
582    }
583
584    // Create single implication for each different invariant
585    for (@Nullable Invariant[] invs : different_invs_vec) {
586      for (int jj = 0; jj < con_invs.length; jj++) {
587        if (invs[jj] != null) {
588          add_implication(parent, con_invs[jj], invs[jj], false, orig_invs);
589        }
590      }
591    }
592  } // add_implications_pair
593
594  /**
595   * Returns a list of all possible slices that may appear at the parent. The parent must have
596   * already been created by merging the invariants from its child conditionals.
597   *
598   * <p>This is a subset of the slices that actually exist at the parent because the parent may have
599   * implications created from invariants in child slices that only exist in one child (and thus
600   * don't exist in the parent).
601   */
602  @RequiresNonNull("parent.equality_view")
603  private List<VarInfo[]> possible_slices() {
604
605    List<VarInfo[]> result = new ArrayList<>();
606
607    // Get an array of leaders at the parent to build slices over
608    VarInfo[] leaders = parent.equality_view.get_leaders_sorted();
609
610    // Create unary views
611    for (int i = 0; i < leaders.length; i++) {
612      if (parent.is_slice_ok(leaders[i])) {
613        result.add(new VarInfo[] {leaders[i]});
614      }
615    }
616
617    // Create binary views
618    for (int i = 0; i < leaders.length; i++) {
619      for (int j = i; j < leaders.length; j++) {
620        if (parent.is_slice_ok(leaders[i], leaders[j])) {
621          result.add(new VarInfo[] {leaders[i], leaders[j]});
622        }
623      }
624    }
625
626    // Expensive!
627    // ??? MDE
628    // Create ternary views
629    for (int i = 0; i < leaders.length; i++) {
630      for (int j = i; j < leaders.length; j++) {
631        for (int k = j; k < leaders.length; k++) {
632          if (parent.is_slice_ok(leaders[i], leaders[j], leaders[k])) {
633            result.add(new VarInfo[] {leaders[i], leaders[j], leaders[k]});
634          }
635        }
636      }
637    }
638
639    return result;
640  }
641
642  // Could be used in assertion that all invariants are at same point.
643  @SuppressWarnings("UnusedMethod")
644  private boolean at_same_ppt(List<Invariant> invs1, List<Invariant> invs2) {
645    PptSlice ppt = null;
646    Iterator<Invariant> itor =
647        new CollectionsPlume.MergedIterator2<Invariant>(invs1.iterator(), invs2.iterator());
648    for (; itor.hasNext(); ) {
649      Invariant inv = itor.next();
650      if (ppt == null) {
651        ppt = inv.ppt;
652      } else {
653        if (inv.ppt != ppt) {
654          return false;
655        }
656      }
657    }
658    return true;
659  }
660
661  // TODO: Should this only include invariants such that all of their variables are defined
662  // everywhere?
663  /**
664   * Determine which elements of invs1 are mutually exclusive with elements of invs2. Result
665   * elements are pairs of {@code List<Invariant>}. All the arguments should be over the same
666   * program point.
667   *
668   * @param invs1 a set of invariants
669   * @param invs2 a set of invariants
670   */
671  List<Invariant[]> exclusive_conditions(List<Invariant> invs1, List<Invariant> invs2) {
672
673    List<Invariant[]> result = new ArrayList<>();
674    for (Invariant inv1 : invs1) {
675      for (Invariant inv2 : invs2) {
676        // // This is a debugging tool, to make sure that various versions
677        // // of isExclusiveFormula remain coordinated.  (That's also one
678        // // reason we don't break out of the loop early:  also, there will
679        // // be few invariants in a slice, so breaking out is of minimal
680        // // benefit.)
681        // assert inv1.isExclusiveFormula(inv2)
682        //                  == inv2.isExclusiveFormula(inv1)
683        //              : "Bad exclusivity: " + inv1.isExclusiveFormula(inv2)
684        //               + " " + inv2.isExclusiveFormula(inv1)
685        //               + "    " + inv1.format() + "    " + inv2.format();
686        if (inv1.isExclusiveFormula(inv2)) {
687          result.add(new Invariant[] {inv1, inv2});
688        }
689      }
690    }
691    return result;
692  }
693
694  /**
695   * Determine which elements of invs1 differ from elements of invs2. Result elements are pairs of
696   * {@code List<Invariant>} (with one or the other always null). All the arguments should be over
697   * the same program point.
698   *
699   * @param invs1 a set of invariants
700   * @param invs2 a set of invariants
701   * @return invariants in the exclusive-or of {@code invs1} and {@code invs2}
702   */
703  List<@Nullable Invariant[]> different_invariants(List<Invariant> invs1, List<Invariant> invs2) {
704    NavigableSet<Invariant> ss1 = new TreeSet<>(icfp);
705    ss1.addAll(invs1);
706    NavigableSet<Invariant> ss2 = new TreeSet<>(icfp);
707    ss2.addAll(invs2);
708    List<@Nullable Invariant[]> result = new ArrayList<>();
709    for (OrderedPairIterator<Invariant> opi =
710            new OrderedPairIterator<Invariant>(ss1.iterator(), ss2.iterator(), icfp);
711        opi.hasNext(); ) {
712      MPair<@Nullable Invariant, @Nullable Invariant> pair = opi.next();
713      if ((pair.first == null) || (pair.second == null)
714      // || (icfp.compare(pair.a, pair.b) != 0)
715      ) {
716        result.add(new @Nullable Invariant[] {pair.first, pair.second});
717      }
718    }
719    return result;
720  }
721
722  /**
723   * Determine which elements of invs1 are the same as elements of invs2. Result elements are {@code
724   * List<Invariant>} (from the invs1 list). All the arguments should be over the same program
725   * point.
726   *
727   * @param invs1 a set of invariants
728   * @param invs2 a set of invariants
729   * @return the intersection of {@code invs1} and {@code invs2}
730   */
731  List<Invariant> same_invariants(List<Invariant> invs1, List<Invariant> invs2) {
732
733    NavigableSet<Invariant> ss1 = new TreeSet<>(icfp);
734    ss1.addAll(invs1);
735    NavigableSet<Invariant> ss2 = new TreeSet<>(icfp);
736    ss2.addAll(invs2);
737
738    ss1.retainAll(ss2);
739    return new ArrayList<Invariant>(ss1);
740
741    // // This seems like a rather complicated implementation.  Why can't it
742    // // just use set intersection?
743    // List<@Nullable Invariant> result = new ArrayList<>();
744    // for (OrderedPairIterator<Invariant> opi = new OrderedPairIterator<>(ss1.iterator(),
745    //                                 ss2.iterator(), icfp);
746    //      opi.hasNext(); ) {
747    //   IPair<@Nullable Invariant,@Nullable Invariant> pair = opi.next();
748    //   if (pair.a != null && pair.b != null) {
749    //     Invariant inv1 = pair.a;
750    //     Invariant inv2 = pair.b;
751    //     result.add(inv1);
752    //   }
753    // }
754    // return result;
755  }
756
757  /**
758   * If the implication specified by predicate and consequent is a valid implication, adds it to the
759   * joiner view of parent.
760   *
761   * @param orig_invs maps permuted invariants to their original invariants
762   */
763  public void add_implication(
764      PptTopLevel ppt,
765      Invariant predicate,
766      Invariant consequent,
767      boolean iff,
768      Map<Invariant, Invariant> orig_invs) {
769    debug.fine("add_implication " + ppt + " " + predicate + " " + consequent + " " + iff);
770
771    assert predicate != null;
772    assert consequent != null;
773
774    @SuppressWarnings("nullness") // map: method precondition
775    @NonNull Invariant orig_pred = orig_invs.get(predicate);
776    Invariant orig_cons = orig_invs.get(consequent);
777    if (orig_cons == null) {
778      assert (consequent instanceof DummyInvariant);
779      orig_cons = consequent;
780    }
781    assert orig_pred != null : "predicate is not in orig_invs: " + predicate;
782    assert orig_cons != null;
783
784    // Don't add consequents that are obvious or suppressed.
785    // JHP: Jan 2005: It might be better to create them anyway and
786    // only suppress them in printing.  Also, this could possibly be
787    // better implemented by changing the way that we create the list
788    // of invariants that is in one conditional and not in the other
789    // to not include an invariant if it is suppressed on the other
790    // side.  This would have the pleasant side effect of not forcing
791    // all of the suppressed invariants to be created before
792    // determining implications.
793    if (orig_cons.isObvious() != null) {
794      debug.fine("add_implication obvious: " + orig_cons.isObvious().format());
795      return;
796    }
797    if (orig_cons.is_ni_suppressed()) {
798      debug.fine("add_implication suppressed: " + orig_cons.is_ni_suppressed());
799      return;
800    }
801
802    // System.out.println("add_implication:");
803    // System.out.println("  predicate = " + predicate.format());
804    // System.out.println("  consequent= " + consequent.format());
805    // System.out.println("  orig_pred = " + orig_pred.format());
806    // System.out.println("  orig_cons = " + orig_cons.format());
807
808    if (dkconfig_split_bi_implications && iff) {
809      Implication imp =
810          Implication.makeImplication(ppt, predicate, consequent, false, orig_pred, orig_cons);
811      if (imp != null) {
812        ppt.joiner_view.addInvariant(imp);
813      }
814      imp = Implication.makeImplication(ppt, consequent, predicate, false, orig_cons, orig_pred);
815      if (imp != null) {
816        ppt.joiner_view.addInvariant(imp);
817      }
818
819      return;
820    }
821
822    Implication imp =
823        Implication.makeImplication(ppt, predicate, consequent, iff, orig_pred, orig_cons);
824    if (imp == null) {
825      // The predicate is the same as the consequent, or the implication
826      // already exists.
827      debug.fine("add_implication imp == null");
828      return;
829    }
830
831    ppt.joiner_view.addInvariant(imp);
832  }
833
834  /**
835   * Adds the specified relation from each conditional ppt in this to the corresponding conditional
836   * ppt in ppt_split. The relation specified should be a relation from this.parent to
837   * ppt_split.parent.
838   *
839   * @param rel the relation to add
840   * @param ppt_split the target of the relation; that is, the relation goes from {@code this} to
841   *     {@code ppt_split}
842   */
843  public void add_relation(PptRelation rel, PptSplitter ppt_split) {
844    for (int ii = 0; ii < ppts.length; ii++) {
845      @SuppressWarnings("UnusedVariable")
846      PptRelation cond_rel = rel.copy(ppts[ii], ppt_split.ppts[ii]);
847      // System.out.println ("Added relation: " + cond_rel);
848      // System.out.println ("with relations: "
849      //                      + cond_rel.parent_to_child_var_string());
850    }
851  }
852
853  /**
854   * Returns the VarInfo in ppt1 that matches the specified VarInfo. The variables at each point
855   * must match exactly. This is a reasonable assumption for the ppts in PptSplitter and their
856   * parent.
857   *
858   * @param ppt1 a program point
859   * @param ppt2_var a variable from a different program point
860   */
861  private VarInfo matching_var(PptTopLevel ppt1, VarInfo ppt2_var) {
862    VarInfo v = ppt1.var_infos[ppt2_var.varinfo_index];
863    assert v.name().equals(ppt2_var.name());
864    return v;
865  }
866
867  @SideEffectFree
868  @Override
869  public String toString(@GuardSatisfied PptSplitter this) {
870    return "Splitter " + splitter + ": ppt1 " + ppts[0].name() + ": ppt2 " + ppts[1].name;
871  }
872}