001package daikon.split;
002
003import static daikon.tools.nullness.NullnessUtil.castNonNullDeep;
004
005import daikon.Debug;
006import daikon.DynamicConstants;
007import daikon.PptConditional;
008import daikon.PptRelation;
009import daikon.PptSlice;
010import daikon.PptTopLevel;
011import daikon.ValueTuple;
012import daikon.VarInfo;
013import daikon.inv.DummyInvariant;
014import daikon.inv.Implication;
015import daikon.inv.Invariant;
016import daikon.suppress.NIS;
017import java.io.Serializable;
018import java.util.ArrayList;
019import java.util.Arrays;
020import java.util.Comparator;
021import java.util.Iterator;
022import java.util.LinkedHashMap;
023import java.util.List;
024import java.util.Map;
025import java.util.NavigableSet;
026import java.util.TreeSet;
027import java.util.logging.Level;
028import java.util.logging.Logger;
029import org.checkerframework.checker.interning.qual.UsesObjectEquals;
030import org.checkerframework.checker.lock.qual.GuardSatisfied;
031import org.checkerframework.checker.nullness.qual.KeyFor;
032import org.checkerframework.checker.nullness.qual.NonNull;
033import org.checkerframework.checker.nullness.qual.Nullable;
034import org.checkerframework.checker.nullness.qual.RequiresNonNull;
035import org.checkerframework.dataflow.qual.SideEffectFree;
036import org.plumelib.util.CollectionsPlume;
037import org.plumelib.util.MPair;
038import org.plumelib.util.OrderedPairIterator;
039
040/**
041 * PptSplitter contains the splitter and its associated PptConditional ppts. Currently all splitters
042 * are binary (have exactly two PptConditional ppts) and this is presumed in the implementation.
043 * However, this could be extended by extending this class with specific other implementations.
044 */
045@UsesObjectEquals
046public class PptSplitter implements Serializable {
047
048  // We are Serializable, so we specify a version to allow changes to
049  // method signatures without breaking serialization.  If you add or
050  // remove fields, you should change this number to the current date.
051  static final long serialVersionUID = 20031031L;
052
053  /**
054   * Boolean. If true, the built-in splitting rules are disabled. The built-in rules look for
055   * implications based on boolean return values and also when there are exactly two exit points
056   * from a method.
057   */
058  public static boolean dkconfig_disable_splitting = false;
059
060  /**
061   * Integer. A value of zero indicates that DummyInvariant objects should not be created. A value
062   * of one indicates that dummy invariants should be created only when no suitable condition was
063   * found in the regular output. A value of two indicates that dummy invariants should be created
064   * for each splitting condition.
065   */
066  public static int dkconfig_dummy_invariant_level = 0;
067
068  /**
069   * Split bi-implications ("{@code a <==> b}") into two separate implications ("{@code a ==> b}"
070   * and "{@code b ==> a}").
071   */
072  public static boolean dkconfig_split_bi_implications = false;
073
074  /**
075   * When true, compilation errors during splitter file generation will not be reported to the user.
076   */
077  public static boolean dkconfig_suppressSplitterErrors = true;
078
079  /** General debug tracer. */
080  public static final Logger debug = Logger.getLogger("daikon.split.PptSplitter");
081
082  /** PptTopLevel that contains this split. */
083  private PptTopLevel parent;
084
085  /**
086   * Splitter that chooses which PptConditional a sample is applied to. May be null if no
087   * computation is required (e.g., splitting over exit points).
088   */
089  public transient @Nullable Splitter splitter;
090
091  /**
092   * One PptConditional for each splitter result. ppts[0] is used when the splitter is true, ppts[1]
093   * when the splitter is false. The contents are PptConditional objects if the splitter is valid,
094   * but are PptTopLevel if the PptSplitter represents two exit points (for which no splitter is
095   * required).
096   */
097  public PptTopLevel[] ppts;
098
099  private static final Comparator<Invariant> icfp = new Invariant.InvariantComparatorForPrinting();
100
101  /**
102   * Create a binary PptSplitter with the specied splitter for the specified PptTopLevel parent. The
103   * parent should be a leaf (i.e., a numbered exit point).
104   */
105  public PptSplitter(PptTopLevel parent, Splitter splitter) {
106
107    this.parent = parent;
108    this.splitter = splitter;
109    ppts =
110        new PptTopLevel[] {
111          new PptConditional(parent, splitter, false), new PptConditional(parent, splitter, true)
112        };
113
114    if (debug.isLoggable(Level.FINE)) {
115      debug.fine(
116          "PptSplitter("
117              + parent.name()
118              + ", "
119              + splitter.condition()
120              + "): "
121              + parent.var_infos.length
122              + " VarInfos");
123      for (int ii = 0; ii < parent.var_infos.length; ii++) {
124        debug.fine(
125            "  VarInfo #"
126                + ii
127                + ": at parent="
128                + parent.var_infos[ii].name()
129                + " at ppts[0]="
130                + ppts[0].var_infos[ii].name()
131                + " at ppts[1]="
132                + ppts[1].var_infos[ii].name());
133      }
134    }
135  }
136
137  /** Creates a PptSplitter over two exit points. No splitter is required. */
138  public PptSplitter(PptTopLevel parent, PptTopLevel exit1, PptTopLevel exit2) {
139    this.parent = parent;
140    this.splitter = null;
141    ppts = new PptTopLevel[] {exit1, exit2};
142  }
143
144  /** Returns true if the splitter is valid at this point, false otherwise. */
145  public boolean splitter_valid() {
146
147    assert ((PptConditional) ppts[1]).splitter_valid()
148        == ((PptConditional) ppts[0]).splitter_valid();
149    return ((PptConditional) ppts[0]).splitter_valid();
150  }
151
152  /** Adds the sample to one of the conditional ppts in the split. */
153  @RequiresNonNull({
154    "daikon.suppress.NIS.suppressor_map",
155    "daikon.suppress.NIS.suppressor_map_suppression_count",
156    "daikon.suppress.NIS.all_suppressions",
157    "daikon.suppress.NIS.suppressor_proto_invs"
158  })
159  public void add_bottom_up(ValueTuple vt, int count) {
160
161    // Choose the appropriate conditional point based on the condition result
162    PptConditional ppt_cond = choose_conditional(vt);
163    if (ppt_cond == null) {
164      return;
165    }
166
167    /// ??? MDE
168    // If any parent variables were missing out of bounds on this
169    // sample, apply that to this conditional as well.  A more
170    // efficient way to do this would be better.
171    ppt_cond.get_missingOutOfBounds(parent, vt);
172
173    // Add the point
174    ppt_cond.add_bottom_up(vt, count);
175
176    if (Debug.ppt_match(ppt_cond)) {
177      String related_vars = Debug.related_vars(ppt_cond, vt);
178      debug.fine(
179          "Adding sample to "
180              + ppt_cond
181              + " with "
182              + vt.size()
183              + " vars"
184              + (!related_vars.equals("") ? (" including " + related_vars) : ""));
185    }
186  }
187
188  /**
189   * Chooses the correct conditional point based on the values in this sample. Returns null if none
190   * is applicable.
191   */
192  public @Nullable PptConditional choose_conditional(ValueTuple vt) {
193
194    // Currently only binary implications are supported
195    assert ppts.length == 2;
196
197    boolean splitter_test;
198    try {
199      splitter_test = ((PptConditional) ppts[0]).splitter.test(vt);
200    } catch (Throwable e) {
201      // If an exception is thrown, don't put the data on either side
202      // of the split.
203      if (false) { // need to add a debugging switch
204        System.out.println(
205            "Exception thrown in PptSplitter.choose_conditional() for " + ppts[0].name());
206        System.out.println("Vars = " + Debug.related_vars(ppts[0], vt));
207      }
208      return null;
209    }
210
211    // Choose the appropriate conditional point based on the condition result
212    return ((PptConditional) ppts[splitter_test ? 0 : 1]);
213  }
214
215  /** Adds implication invariants based on the invariants found on each side of the split. */
216  @RequiresNonNull({
217    "parent.equality_view",
218    "daikon.suppress.NIS.all_suppressions",
219    "daikon.suppress.NIS.suppressor_map"
220  })
221  public void add_implications() {
222
223    // Currently only binary implications are supported
224    assert ppts.length == 2;
225
226    // Create any NIS suppressed invariants in each conditional
227    @SuppressWarnings({"unchecked", "rawtypes"})
228    List<Invariant> suppressed_invs[] =
229        (ArrayList<Invariant>[]) new @Nullable ArrayList[ppts.length];
230    for (int i = 0; i < ppts.length; i++) {
231      suppressed_invs[i] = NIS.create_suppressed_invs(ppts[i]);
232    }
233
234    add_implications_pair();
235
236    // Remove all of the NIS suppressed invariants that we just created.
237    for (int i = 0; i < ppts.length; i++) {
238      ppts[i].remove_invs(suppressed_invs[i]);
239    }
240  }
241
242  /**
243   * Given a pair of conditional program points, form implications from the invariants true at each
244   * one.
245   *
246   * <p>The algorithm first divides the invariants into two groups:
247   *
248   * <ol>
249   *   <li>the "same" invariants: those that are true at both program points.
250   *   <li>the "different" invariants: all other invariants.
251   * </ol>
252   *
253   * The "exclusive" invariants (a subset of the "different" inviariants) are true at one program
254   * point, and their negation is true at the other program point.
255   *
256   * <p>At the first program point, for each exclusive invariant and each different invariant,
257   * create a conditional of the form "exclusive &hArr; different". Do the same at the second
258   * program point.
259   *
260   * <p>This method is correct only if the two conditional program points fully partition the input
261   * space (their conditions are negations of one another). For instance, suppose there is a
262   * three-way split with the following invariants detected at each:
263   *
264   * <pre>
265   *   {A,B}  {!A,!B}  {A,!B}
266   * </pre>
267   *
268   * Examining just the first two would suggest that "A &hArr; B" is valid, but in fact that is a
269   * false inference. Note that this situation can occur if the splitting condition uses variables
270   * that can ever be missing. (Or, presumably, if the condition ever cannot be computed.)
271   */
272  @RequiresNonNull("parent.equality_view")
273  private void add_implications_pair() {
274
275    for (PptTopLevel pchild : ppts) {
276      // System.out.printf("splitter child = %s%n", pchild.name());
277      if (pchild.equality_view == null) {
278        System.out.printf("this: %s%n", this);
279        System.out.printf("pchild: %s[%s]%n", pchild, System.identityHashCode(pchild));
280        System.out.printf("pchild.children: %s%n", pchild.children);
281        for (PptRelation rel : pchild.children) {
282          System.out.printf("  relation = %s%n", rel);
283        }
284        System.out.printf("parent: %s%n", parent);
285        throw new Error();
286      }
287    }
288
289    debug.fine("add_implications_pair: parent = " + parent.name);
290
291    // Maps permuted invariants to their original invariants
292    Map<Invariant, Invariant> orig_invs = new LinkedHashMap<>();
293
294    List<@KeyFor("orig_invs") Invariant[]> exclusive_invs_vec =
295        new ArrayList<@KeyFor("orig_invs") Invariant[]>();
296
297    // Does not contain anything that is in exclusive_invs_vec.
298    // (Those may be added temporarily, but are removed later.)
299    List<@Nullable @KeyFor("orig_invs") Invariant[]> different_invs_vec =
300        new ArrayList<@Nullable @KeyFor("orig_invs") Invariant[]>();
301
302    /// ??? MDE
303    // Loop through each possible parent slice
304    List<VarInfo[]> slices = possible_slices();
305
306    int num_children = ppts.length;
307
308    for (VarInfo[] vis : slices) {
309
310      // Each element of invs[i] is an invariant from the i-th child, permuted to
311      // the parent (and with a parent slice as its ppt slot).
312      @SuppressWarnings({"unchecked", "rawtypes"})
313      /*NNC:@MonotonicNonNull*/ List<Invariant> invs[] =
314          (ArrayList<Invariant>[]) new @Nullable ArrayList[num_children];
315
316      // find the parent slice
317      PptSlice pslice = parent.get_or_instantiate_slice(vis);
318
319      // Daikon.debugProgress.fine ("    slice: " + pslice.name());
320
321      // Loop through each child ppt
322      for (int childno = 0; childno < num_children; childno++) {
323        PptTopLevel child_ppt = ppts[childno];
324
325        assert child_ppt.equality_view != null : child_ppt.name();
326        assert parent.equality_view != null : parent.name();
327
328        invs[childno] = new ArrayList<Invariant>(); // permuted to parent
329
330        // vis is in parent order.  Find corresponding child vis, in child order.
331        // Each of these arrays contains child vis.
332        /*NNC:@MonotonicNonNull*/ VarInfo[] cvis_non_canonical = new VarInfo[vis.length];
333        /*NNC:@MonotonicNonNull*/ VarInfo[] cvis = new VarInfo[vis.length];
334        /*NNC:@MonotonicNonNull*/ VarInfo[] cvis_sorted = new VarInfo[vis.length];
335        for (int kk = 0; kk < vis.length; kk++) {
336          cvis_non_canonical[kk] = matching_var(child_ppt, vis[kk]);
337          cvis[kk] = cvis_non_canonical[kk].canonicalRep();
338          cvis_sorted[kk] = cvis[kk];
339        }
340        Arrays.sort(cvis_sorted, VarInfo.IndexComparator.getInstance());
341
342        cvis_non_canonical = castNonNullDeep(cvis_non_canonical); // https://tinyurl.com/cfissue/986
343        cvis = castNonNullDeep(cvis); // https://tinyurl.com/cfissue/986
344        cvis_sorted = castNonNullDeep(cvis_sorted); // https://tinyurl.com/cfissue/986
345
346        // Look for an equality invariant in the non-canonical slice (if any).
347        // Note that only an equality invariant can exist in a non-canonical
348        // slice.  If it does exist, we want it rather than the canonical
349        // form (which for equality invariants will always be of the form
350        // 'a == a').
351        Invariant eq_inv = null;
352        if (!Arrays.equals(cvis_non_canonical, cvis)) {
353          PptSlice nc_slice = child_ppt.findSlice(cvis_non_canonical);
354          if (nc_slice != null) {
355            if (nc_slice.invs.size() != 1) {
356              // Impossible: multiple invariants found
357              System.out.println("Found " + nc_slice.invs.size() + " invs at " + nc_slice);
358              for (Invariant inv2 : nc_slice.invs) {
359                System.out.println(" -- inv = " + inv2);
360              }
361              for (VarInfo cvi : cvis_non_canonical) {
362                System.out.println(" -- equality set = " + cvi.equalitySet.shortString());
363              }
364              throw new Error("nc_slice.invs.size() == " + nc_slice.invs.size());
365            }
366            eq_inv = nc_slice.invs.get(0);
367            debug.fine("Found eq inv " + eq_inv);
368          }
369        }
370
371        // Find the corresponding slice
372        PptSlice cslice = child_ppt.findSlice(cvis_sorted);
373        if (cslice == null) {
374          if (eq_inv != null) {
375            // There is trouble.  Print a lot of debugging information.
376            System.out.println("cvis_non_canonical:");
377            for (VarInfo cvi : cvis_non_canonical) {
378              System.out.println("  " + cvi);
379            }
380            System.out.println("cvis:");
381            for (VarInfo cvi : cvis) {
382              System.out.println("  " + cvi);
383            }
384            System.out.println("cvis_sorted:");
385            for (VarInfo cvi : cvis_sorted) {
386              System.out.println("  " + cvi);
387            }
388            if (DynamicConstants.dkconfig_use_dynamic_constant_optimization) {
389              assert child_ppt.constants != null
390                  : "@AssumeAssertion(nullness):  dependent:  config var";
391              System.out.println("constant values for cvis_sorted:");
392              for (VarInfo cvi : cvis_sorted) {
393                System.out.println("  " + child_ppt.constants.getConstant(cvi));
394              }
395            }
396            String eq_inv_ppt = eq_inv.ppt.toString();
397            assert eq_inv.ppt.equals(child_ppt.findSlice(cvis_non_canonical));
398
399            System.out.println("All child_ppt slices: ");
400            for (PptSlice slice : child_ppt.views_iterable()) {
401              System.out.println("  " + slice);
402            }
403
404            // found slice on non-canonical, but didn't find it here
405            throw new RuntimeException(
406                String.join(
407                    System.lineSeparator(),
408                    "found eq_inv",
409                    "  " + eq_inv,
410                    "  @" + eq_inv_ppt,
411                    "  but can't find slice for " + Arrays.toString(cvis_sorted)));
412          }
413          // If no slice, just give up?
414          continue;
415        }
416
417        // Copy each invariant permuted to the parent.
418        // This permits them to be directly compared to one another.
419        int[] permute = PptTopLevel.build_permute(cvis_sorted, cvis);
420        for (Invariant orig_inv : cslice.invs) {
421          Invariant inv = orig_inv.clone_and_permute(permute);
422          inv.ppt = pslice;
423          if ((eq_inv != null) && orig_inv.getClass().equals(eq_inv.getClass())) orig_inv = eq_inv;
424          assert !orig_invs.containsKey(inv);
425          orig_invs.put(inv, orig_inv);
426          invs[childno].add(inv);
427        }
428      } // children loop
429
430      invs = castNonNullDeep(invs); // https://tinyurl.com/cfissue/986
431
432      // If neither child slice has invariants there is nothing to do
433      if ((invs[0].size() == 0) && (invs[1].size() == 0)) {
434        if (pslice.invs.size() == 0) parent.removeSlice(pslice);
435        continue;
436      }
437
438      if (pslice.invs.size() == 0) {
439        debug.fine("PptSplitter: created new slice " + Arrays.toString(vis) + " @" + parent.name);
440      }
441
442      // Add any exclusive conditions for this slice to the list
443      @SuppressWarnings("keyfor") // need qualifier parameter to Invariants
444      List<@KeyFor("orig_invs") Invariant[]> ec = exclusive_conditions(invs[0], invs[1]);
445      exclusive_invs_vec.addAll(ec);
446
447      // Add any invariants that are different to the list
448      @SuppressWarnings("keyfor") // need qualifier parameter to Invariants
449      List<@Nullable @KeyFor("orig_invs") Invariant[]> di = different_invariants(invs[0], invs[1]);
450      different_invs_vec.addAll(di);
451    } // slices.iterator() loop
452
453    if (debug.isLoggable(Level.FINE)) {
454      debug.fine("Found " + exclusive_invs_vec.size() + " exclusive conditions ");
455      for (Invariant[] invs : exclusive_invs_vec) {
456        invs[0].log("exclusive condition with %s", invs[1].format());
457        invs[1].log("exclusive condition with %s", invs[0].format());
458        debug.fine("-- " + invs[0] + " -- " + invs[1]);
459      }
460      debug.fine("Found " + different_invs_vec.size() + " different invariants ");
461      for (@Nullable Invariant[] invs : different_invs_vec) {
462        if (invs[0] != null) invs[0].log("%s differs from %s", invs[0], invs[1]);
463        if (invs[1] != null) invs[1].log("%s differs from %s", invs[0], invs[1]);
464        debug.fine("-- " + invs[0] + " -- " + invs[1]);
465      }
466    }
467
468    PptTopLevel ppt1 = ppts[0];
469    PptTopLevel ppt2 = ppts[1];
470
471    // Add the splitting condition as an exclusive condition if requested
472    if ((splitter != null) && dkconfig_dummy_invariant_level > 0) {
473      if (exclusive_invs_vec.size() == 0 || dkconfig_dummy_invariant_level >= 2) {
474        // As a last resort, try using the user's supplied DummyInvariant
475        debug.fine("addImplications: resorting to dummy");
476        PptConditional cond1 = (PptConditional) ppt1;
477        PptConditional cond2 = (PptConditional) ppt2;
478        debug.fine("addImplications: cond1 " + cond1 + " cond2 " + cond2);
479        cond1.splitter.instantiateDummy(ppt1);
480        cond2.splitter.instantiateDummy(ppt2);
481        DummyInvariant dummy1 = cond1.dummyInvariant();
482        DummyInvariant dummy2 = cond2.dummyInvariant();
483        debug.fine("addImplications: dummy1 " + dummy1 + " dummy2 " + dummy2);
484        if (dummy1 != null && dummy1.valid && dummy2 != null && dummy2.valid) {
485          assert !cond1.splitter_inverse;
486          assert cond2.splitter_inverse;
487          dummy2.negate();
488          orig_invs.put(dummy1, dummy1);
489          orig_invs.put(dummy2, dummy2);
490          @KeyFor("orig_invs") Invariant[] dummy_pair = new @KeyFor("orig_invs") Invariant[] {dummy1, dummy2};
491          exclusive_invs_vec.add(dummy_pair);
492          // Don't add the dummy_pair, as it would just be removed afterward.
493          // different_invs_vec.add(dummy_pair);
494        } else {
495          // nothing to do
496        }
497      }
498    }
499
500    // If there are no exclusive conditions, we can do nothing here
501    if (exclusive_invs_vec.size() == 0) {
502      debug.fine("addImplications: no exclusive conditions");
503      return;
504    }
505
506    // Remove exclusive invariants from the different invariants list.
507    // It would be better not to have added them in the first place,
508    // but this is easier for now.
509    for (Iterator<@Nullable @KeyFor("orig_invs") Invariant[]> ii = different_invs_vec.iterator();
510        ii.hasNext(); ) {
511      @Nullable Invariant[] diff_invs = ii.next();
512      if (diff_invs[0] != null) {
513        assert diff_invs[1] == null;
514        // debug.fine ("Considering inv0 " + diff_invs[0]);
515        for (Invariant[] ex_invs : exclusive_invs_vec) {
516          if (ex_invs[0] == diff_invs[0]) {
517            debug.fine("removed exclusive invariant " + ex_invs[0]);
518            ii.remove();
519            break;
520          }
521        }
522      } else {
523        assert diff_invs[1] != null;
524        // debug.fine ("Considering inv1 " + diff_invs[1]);
525        for (Invariant[] ex_invs : exclusive_invs_vec) {
526          if (ex_invs[1] == diff_invs[1]) {
527            debug.fine("removed exclusive invariant " + ex_invs[1]);
528            ii.remove();
529            break;
530          }
531        }
532      }
533    }
534
535    // Get the canonical predicate invariants from the exclusive list.
536    // We pick the first one that is neither obvious nor suppressed.
537    // If all are either obvious or suppressed, we just pick the first
538    // one in the list.
539    // TODO: Why do we want canonical predicate invariants?  How will they be used?  It seems that
540    // different elements of this list have different semantics.
541    // TODO: After this loop, might the two canonical invariants not be exclusive with one another?
542    // TODO: con_invs should probably be renamed to canon_invs.
543    /*NNC:@MonotonicNonNull*/ Invariant[] con_invs = new Invariant[2];
544    for (Invariant[] invs : exclusive_invs_vec) {
545      assert invs.length == 2;
546      for (int jj = 0; jj < con_invs.length; jj++) {
547        if (con_invs[jj] == null) {
548          @SuppressWarnings("nullness") // map
549          @NonNull Invariant orig = orig_invs.get(invs[jj]);
550          assert orig != null : "Not in orig_invs: " + invs[jj] + " " + invs[jj].getClass();
551          if ((orig.isObvious() == null) && !orig.is_ni_suppressed()) con_invs[jj] = invs[jj];
552        }
553      }
554    }
555    Invariant[] first = exclusive_invs_vec.get(0);
556    for (int jj = 0; jj < con_invs.length; jj++) {
557      if (con_invs[jj] == null) {
558        System.out.println(
559            "Warning: No non-obvious non-suppressed exclusive"
560                + " invariants found in "
561                + parent.name);
562        // throw new Error();
563        con_invs[jj] = first[jj];
564      }
565    }
566    con_invs = castNonNullDeep(con_invs); // https://tinyurl.com/cfissue/986
567
568    // Create double-implications for each exclusive invariant
569    for (Invariant[] invs : exclusive_invs_vec) {
570      for (int jj = 0; jj < con_invs.length; jj++) {
571        if (con_invs[jj] != invs[jj]) {
572          add_implication(parent, con_invs[jj], invs[jj], true, orig_invs);
573        }
574      }
575    }
576
577    // Create single implication for each different invariant
578    for (@Nullable Invariant[] invs : different_invs_vec) {
579      for (int jj = 0; jj < con_invs.length; jj++) {
580        if (invs[jj] != null) add_implication(parent, con_invs[jj], invs[jj], false, orig_invs);
581      }
582    }
583  } // add_implications_pair
584
585  /**
586   * Returns a list of all possible slices that may appear at the parent. The parent must have
587   * already been created by merging the invariants from its child conditionals.
588   *
589   * <p>This is a subset of the slices that actually exist at the parent because the parent may have
590   * implications created from invariants in child slices that only exist in one child (and thus
591   * don't exist in the parent).
592   */
593  @RequiresNonNull("parent.equality_view")
594  private List<VarInfo[]> possible_slices() {
595
596    List<VarInfo[]> result = new ArrayList<>();
597
598    // Get an array of leaders at the parent to build slices over
599    VarInfo[] leaders = parent.equality_view.get_leaders_sorted();
600
601    // Create unary views
602    for (int i = 0; i < leaders.length; i++) {
603      if (parent.is_slice_ok(leaders[i])) {
604        result.add(new VarInfo[] {leaders[i]});
605      }
606    }
607
608    // Create binary views
609    for (int i = 0; i < leaders.length; i++) {
610      for (int j = i; j < leaders.length; j++) {
611        if (parent.is_slice_ok(leaders[i], leaders[j])) {
612          result.add(new VarInfo[] {leaders[i], leaders[j]});
613        }
614      }
615    }
616
617    /// Expensive!
618    /// ??? MDE
619    // Create ternary views
620    for (int i = 0; i < leaders.length; i++) {
621      for (int j = i; j < leaders.length; j++) {
622        for (int k = j; k < leaders.length; k++) {
623          if (parent.is_slice_ok(leaders[i], leaders[j], leaders[k])) {
624            result.add(new VarInfo[] {leaders[i], leaders[j], leaders[k]});
625          }
626        }
627      }
628    }
629
630    return result;
631  }
632
633  // Could be used in assertion that all invariants are at same point.
634  @SuppressWarnings("UnusedMethod")
635  private boolean at_same_ppt(List<Invariant> invs1, List<Invariant> invs2) {
636    PptSlice ppt = null;
637    Iterator<Invariant> itor =
638        new CollectionsPlume.MergedIterator2<Invariant>(invs1.iterator(), invs2.iterator());
639    for (; itor.hasNext(); ) {
640      Invariant inv = itor.next();
641      if (ppt == null) {
642        ppt = inv.ppt;
643      } else {
644        if (inv.ppt != ppt) {
645          return false;
646        }
647      }
648    }
649    return true;
650  }
651
652  // TODO: Should this only include invariants such that all of their variables are defined
653  // everywhere?
654  /**
655   * Determine which elements of invs1 are mutually exclusive with elements of invs2. Result
656   * elements are pairs of {@code List<Invariant>}. All the arguments should be over the same
657   * program point.
658   *
659   * @param invs1 a set of invariants
660   * @param invs2 a set of invariants
661   */
662  List<Invariant[]> exclusive_conditions(List<Invariant> invs1, List<Invariant> invs2) {
663
664    List<Invariant[]> result = new ArrayList<>();
665    for (Invariant inv1 : invs1) {
666      for (Invariant inv2 : invs2) {
667        // // This is a debugging tool, to make sure that various versions
668        // // of isExclusiveFormula remain coordinated.  (That's also one
669        // // reason we don't break out of the loop early:  also, there will
670        // // be few invariants in a slice, so breaking out is of minimal
671        // // benefit.)
672        // assert inv1.isExclusiveFormula(inv2)
673        //                  == inv2.isExclusiveFormula(inv1)
674        //              : "Bad exclusivity: " + inv1.isExclusiveFormula(inv2)
675        //               + " " + inv2.isExclusiveFormula(inv1)
676        //               + "    " + inv1.format() + "    " + inv2.format();
677        if (inv1.isExclusiveFormula(inv2)) {
678          result.add(new Invariant[] {inv1, inv2});
679        }
680      }
681    }
682    return result;
683  }
684
685  /**
686   * Determine which elements of invs1 differ from elements of invs2. Result elements are pairs of
687   * {@code List<Invariant>} (with one or the other always null). All the arguments should be over
688   * the same program point.
689   *
690   * @param invs1 a set of invariants
691   * @param invs2 a set of invariants
692   * @return invariants in the exclusive-or of {@code invs1} and {@code invs2}
693   */
694  List<@Nullable Invariant[]> different_invariants(List<Invariant> invs1, List<Invariant> invs2) {
695    NavigableSet<Invariant> ss1 = new TreeSet<>(icfp);
696    ss1.addAll(invs1);
697    NavigableSet<Invariant> ss2 = new TreeSet<>(icfp);
698    ss2.addAll(invs2);
699    List<@Nullable Invariant[]> result = new ArrayList<>();
700    for (OrderedPairIterator<Invariant> opi =
701            new OrderedPairIterator<Invariant>(ss1.iterator(), ss2.iterator(), icfp);
702        opi.hasNext(); ) {
703      MPair<@Nullable Invariant, @Nullable Invariant> pair = opi.next();
704      if ((pair.first == null) || (pair.second == null)
705      // || (icfp.compare(pair.a, pair.b) != 0)
706      ) {
707        result.add(new @Nullable Invariant[] {pair.first, pair.second});
708      }
709    }
710    return result;
711  }
712
713  /**
714   * Determine which elements of invs1 are the same as elements of invs2. Result elements are {@code
715   * List<Invariant>} (from the invs1 list). All the arguments should be over the same program
716   * point.
717   *
718   * @param invs1 a set of invariants
719   * @param invs2 a set of invariants
720   * @return the intersection of {@code invs1} and {@code invs2}
721   */
722  List<Invariant> same_invariants(List<Invariant> invs1, List<Invariant> invs2) {
723
724    NavigableSet<Invariant> ss1 = new TreeSet<>(icfp);
725    ss1.addAll(invs1);
726    NavigableSet<Invariant> ss2 = new TreeSet<>(icfp);
727    ss2.addAll(invs2);
728
729    ss1.retainAll(ss2);
730    return new ArrayList<Invariant>(ss1);
731
732    // // This seems like a rather complicated implementation.  Why can't it
733    // // just use set intersection?
734    // List<@Nullable Invariant> result = new ArrayList<>();
735    // for (OrderedPairIterator<Invariant> opi = new OrderedPairIterator<>(ss1.iterator(),
736    //                                 ss2.iterator(), icfp);
737    //      opi.hasNext(); ) {
738    //   IPair<@Nullable Invariant,@Nullable Invariant> pair = opi.next();
739    //   if (pair.a != null && pair.b != null) {
740    //     Invariant inv1 = pair.a;
741    //     Invariant inv2 = pair.b;
742    //     result.add(inv1);
743    //   }
744    // }
745    // return result;
746  }
747
748  /**
749   * If the implication specified by predicate and consequent is a valid implication, adds it to the
750   * joiner view of parent.
751   *
752   * @param orig_invs maps permuted invariants to their original invariants
753   */
754  public void add_implication(
755      PptTopLevel ppt,
756      Invariant predicate,
757      Invariant consequent,
758      boolean iff,
759      Map<Invariant, Invariant> orig_invs) {
760    debug.fine("add_implication " + ppt + " " + predicate + " " + consequent + " " + iff);
761
762    assert predicate != null;
763    assert consequent != null;
764
765    @SuppressWarnings("nullness") // map: method precondition
766    @NonNull Invariant orig_pred = orig_invs.get(predicate);
767    Invariant orig_cons = orig_invs.get(consequent);
768    if (orig_cons == null) {
769      assert (consequent instanceof DummyInvariant);
770      orig_cons = consequent;
771    }
772    assert orig_pred != null : "predicate is not in orig_invs: " + predicate;
773    assert orig_cons != null;
774
775    // Don't add consequents that are obvious or suppressed.
776    // JHP: Jan 2005: It might be better to create them anyway and
777    // only suppress them in printing.  Also, this could possibly be
778    // better implemented by changing the way that we create the list
779    // of invariants that is in one conditional and not in the other
780    // to not include an invariant if it is suppressed on the other
781    // side.  This would have the pleasant side effect of not forcing
782    // all of the suppressed invariants to be created before
783    // determining implications.
784    if (orig_cons.isObvious() != null) {
785      debug.fine("add_implication obvious: " + orig_cons.isObvious().format());
786      return;
787    }
788    if (orig_cons.is_ni_suppressed()) {
789      debug.fine("add_implication suppressed: " + orig_cons.is_ni_suppressed());
790      return;
791    }
792
793    // System.out.println("add_implication:");
794    // System.out.println("  predicate = " + predicate.format());
795    // System.out.println("  consequent= " + consequent.format());
796    // System.out.println("  orig_pred = " + orig_pred.format());
797    // System.out.println("  orig_cons = " + orig_cons.format());
798
799    if (dkconfig_split_bi_implications && iff) {
800      Implication imp =
801          Implication.makeImplication(ppt, predicate, consequent, false, orig_pred, orig_cons);
802      if (imp != null) ppt.joiner_view.addInvariant(imp);
803      imp = Implication.makeImplication(ppt, consequent, predicate, false, orig_cons, orig_pred);
804      if (imp != null) {
805        ppt.joiner_view.addInvariant(imp);
806      }
807
808      return;
809    }
810
811    Implication imp =
812        Implication.makeImplication(ppt, predicate, consequent, iff, orig_pred, orig_cons);
813    if (imp == null) {
814      // The predicate is the same as the consequent, or the implication
815      // already exists.
816      debug.fine("add_implication imp == null");
817      return;
818    }
819
820    ppt.joiner_view.addInvariant(imp);
821  }
822
823  /**
824   * Adds the specified relation from each conditional ppt in this to the corresponding conditional
825   * ppt in ppt_split. The relation specified should be a relation from this.parent to
826   * ppt_split.parent.
827   *
828   * @param rel the relation to add
829   * @param ppt_split the target of the relation; that is, the relation goes from {@code this} to
830   *     {@code ppt_split}
831   */
832  public void add_relation(PptRelation rel, PptSplitter ppt_split) {
833    for (int ii = 0; ii < ppts.length; ii++) {
834      @SuppressWarnings("UnusedVariable")
835      PptRelation cond_rel = rel.copy(ppts[ii], ppt_split.ppts[ii]);
836      // System.out.println ("Added relation: " + cond_rel);
837      // System.out.println ("with relations: "
838      //                      + cond_rel.parent_to_child_var_string());
839    }
840  }
841
842  /**
843   * Returns the VarInfo in ppt1 that matches the specified VarInfo. The variables at each point
844   * must match exactly. This is a reasonable assumption for the ppts in PptSplitter and their
845   * parent.
846   *
847   * @param ppt1 a program point
848   * @param ppt2_var a variable from a different program point
849   */
850  private VarInfo matching_var(PptTopLevel ppt1, VarInfo ppt2_var) {
851    VarInfo v = ppt1.var_infos[ppt2_var.varinfo_index];
852    assert v.name().equals(ppt2_var.name());
853    return v;
854  }
855
856  @SideEffectFree
857  @Override
858  public String toString(@GuardSatisfied PptSplitter this) {
859    return "Splitter " + splitter + ": ppt1 " + ppts[0].name() + ": ppt2 " + ppts[1].name;
860  }
861}