1 package Analysis.Disjoint;
3 import Analysis.CallGraph.*;
4 import Analysis.Liveness;
5 import Analysis.ArrayReferencees;
6 import Analysis.OoOJava.Accessible;
7 import Analysis.OoOJava.RBlockRelationAnalysis;
8 import Analysis.FlatIRGraph.*;
11 import IR.Tree.Modifiers;
16 public class DisjointAnalysis implements HeapAnalysis {
18 ///////////////////////////////////////////
20 // Public interface to discover possible
21 // sharing in the program under analysis
23 ///////////////////////////////////////////
25 // if an object allocated at the target site may be
26 // reachable from both an object from root1 and an
27 // object allocated at root2, return TRUE
28 public boolean mayBothReachTarget(FlatMethod fm,
33 AllocSite asr1 = getAllocationSiteFromFlatNew(fnRoot1);
34 AllocSite asr2 = getAllocationSiteFromFlatNew(fnRoot2);
35 assert asr1.isFlagged();
36 assert asr2.isFlagged();
38 AllocSite ast = getAllocationSiteFromFlatNew(fnTarget);
39 ReachGraph rg = getPartial(fm.getMethod() );
41 return rg.mayBothReachTarget(asr1, asr2, ast);
44 // similar to the method above, return TRUE if ever
45 // more than one object from the root allocation site
46 // may reach an object from the target site
47 public boolean mayManyReachTarget(FlatMethod fm,
51 AllocSite asr = getAllocationSiteFromFlatNew(fnRoot);
52 assert asr.isFlagged();
54 AllocSite ast = getAllocationSiteFromFlatNew(fnTarget);
55 ReachGraph rg = getPartial(fm.getMethod() );
57 return rg.mayManyReachTarget(asr, ast);
63 public HashSet<AllocSite>
64 getFlaggedAllocationSitesReachableFromTask(TaskDescriptor td) {
65 checkAnalysisComplete();
66 return getFlaggedAllocationSitesReachableFromTaskPRIVATE(td);
69 public AllocSite getAllocationSiteFromFlatNew(FlatNew fn) {
70 checkAnalysisComplete();
71 return getAllocSiteFromFlatNewPRIVATE(fn);
74 public AllocSite getAllocationSiteFromHeapRegionNodeID(Integer id) {
75 checkAnalysisComplete();
76 return mapHrnIdToAllocSite.get(id);
79 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
82 checkAnalysisComplete();
83 ReachGraph rg=mapDescriptorToCompleteReachGraph.get(taskOrMethod);
84 FlatMethod fm=state.getMethodFlat(taskOrMethod);
86 return rg.mayReachSharedObjects(fm, paramIndex1, paramIndex2);
89 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
90 int paramIndex, AllocSite alloc) {
91 checkAnalysisComplete();
92 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
93 FlatMethod fm=state.getMethodFlat(taskOrMethod);
95 return rg.mayReachSharedObjects(fm, paramIndex, alloc);
98 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
99 AllocSite alloc, int paramIndex) {
100 checkAnalysisComplete();
101 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
102 FlatMethod fm=state.getMethodFlat(taskOrMethod);
104 return rg.mayReachSharedObjects(fm, paramIndex, alloc);
107 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
108 AllocSite alloc1, AllocSite alloc2) {
109 checkAnalysisComplete();
110 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
112 return rg.mayReachSharedObjects(alloc1, alloc2);
115 public String prettyPrintNodeSet(Set<HeapRegionNode> s) {
116 checkAnalysisComplete();
120 Iterator<HeapRegionNode> i = s.iterator();
121 while (i.hasNext()) {
122 HeapRegionNode n = i.next();
124 AllocSite as = n.getAllocSite();
126 out += " " + n.toString() + ",\n";
128 out += " " + n.toString() + ": " + as.toStringVerbose()
137 // use the methods given above to check every possible sharing class
138 // between task parameters and flagged allocation sites reachable
140 public void writeAllSharing(String outputFile,
143 boolean tabularOutput,
146 throws java.io.IOException {
147 checkAnalysisComplete();
149 BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
151 if (!tabularOutput) {
152 bw.write("Conducting ownership analysis with allocation depth = "
153 + allocationDepth + "\n");
154 bw.write(timeReport + "\n");
159 // look through every task for potential sharing
160 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
161 while (taskItr.hasNext()) {
162 TaskDescriptor td = (TaskDescriptor) taskItr.next();
164 if (!tabularOutput) {
165 bw.write("\n---------" + td + "--------\n");
168 HashSet<AllocSite> allocSites = getFlaggedAllocationSitesReachableFromTask(td);
170 Set<HeapRegionNode> common;
172 // for each task parameter, check for sharing classes with
173 // other task parameters and every allocation site
174 // reachable from this task
175 boolean foundSomeSharing = false;
177 FlatMethod fm = state.getMethodFlat(td);
178 for (int i = 0; i < fm.numParameters(); ++i) {
180 // skip parameters with types that cannot reference
182 if( !shouldAnalysisTrack(fm.getParameter(i).getType() ) ) {
186 // for the ith parameter check for sharing classes to all
187 // higher numbered parameters
188 for (int j = i + 1; j < fm.numParameters(); ++j) {
190 // skip parameters with types that cannot reference
192 if( !shouldAnalysisTrack(fm.getParameter(j).getType() ) ) {
197 common = hasPotentialSharing(td, i, j);
198 if (!common.isEmpty()) {
199 foundSomeSharing = true;
201 if (!tabularOutput) {
202 bw.write("Potential sharing between parameters " + i
203 + " and " + j + ".\n");
204 bw.write(prettyPrintNodeSet(common) + "\n");
209 // for the ith parameter, check for sharing classes against
210 // the set of allocation sites reachable from this
212 Iterator allocItr = allocSites.iterator();
213 while (allocItr.hasNext()) {
214 AllocSite as = (AllocSite) allocItr.next();
215 common = hasPotentialSharing(td, i, as);
216 if (!common.isEmpty()) {
217 foundSomeSharing = true;
219 if (!tabularOutput) {
220 bw.write("Potential sharing between parameter " + i
221 + " and " + as.getFlatNew() + ".\n");
222 bw.write(prettyPrintNodeSet(common) + "\n");
228 // for each allocation site check for sharing classes with
229 // other allocation sites in the context of execution
231 HashSet<AllocSite> outerChecked = new HashSet<AllocSite>();
232 Iterator allocItr1 = allocSites.iterator();
233 while (allocItr1.hasNext()) {
234 AllocSite as1 = (AllocSite) allocItr1.next();
236 Iterator allocItr2 = allocSites.iterator();
237 while (allocItr2.hasNext()) {
238 AllocSite as2 = (AllocSite) allocItr2.next();
240 if (!outerChecked.contains(as2)) {
241 common = hasPotentialSharing(td, as1, as2);
243 if (!common.isEmpty()) {
244 foundSomeSharing = true;
246 if (!tabularOutput) {
247 bw.write("Potential sharing between "
248 + as1.getFlatNew() + " and "
249 + as2.getFlatNew() + ".\n");
250 bw.write(prettyPrintNodeSet(common) + "\n");
256 outerChecked.add(as1);
259 if (!foundSomeSharing) {
260 if (!tabularOutput) {
261 bw.write("No sharing between flagged objects in Task " + td
269 bw.write(" & " + numSharing + " & " + justTime + " & " + numLines
270 + " & " + numMethodsAnalyzed() + " \\\\\n");
272 bw.write("\nNumber sharing classes: "+numSharing);
280 // this version of writeAllSharing is for Java programs that have no tasks
281 // ***********************************
282 // WARNING: THIS DOES NOT DO THE RIGHT THING, REPORTS 0 ALWAYS!
283 // It should use mayBothReachTarget and mayManyReachTarget like
284 // OoOJava does to query analysis results
285 // ***********************************
286 public void writeAllSharingJava(String outputFile,
289 boolean tabularOutput,
292 throws java.io.IOException {
293 checkAnalysisComplete();
299 BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
301 bw.write("Conducting disjoint reachability analysis with allocation depth = "
302 + allocationDepth + "\n");
303 bw.write(timeReport + "\n\n");
305 boolean foundSomeSharing = false;
307 Descriptor d = typeUtil.getMain();
308 HashSet<AllocSite> allocSites = getFlaggedAllocationSites(d);
310 // for each allocation site check for sharing classes with
311 // other allocation sites in the context of execution
313 HashSet<AllocSite> outerChecked = new HashSet<AllocSite>();
314 Iterator allocItr1 = allocSites.iterator();
315 while (allocItr1.hasNext()) {
316 AllocSite as1 = (AllocSite) allocItr1.next();
318 Iterator allocItr2 = allocSites.iterator();
319 while (allocItr2.hasNext()) {
320 AllocSite as2 = (AllocSite) allocItr2.next();
322 if (!outerChecked.contains(as2)) {
323 Set<HeapRegionNode> common = hasPotentialSharing(d,
326 if (!common.isEmpty()) {
327 foundSomeSharing = true;
328 bw.write("Potential sharing between "
329 + as1.getDisjointAnalysisId() + " and "
330 + as2.getDisjointAnalysisId() + ".\n");
331 bw.write(prettyPrintNodeSet(common) + "\n");
337 outerChecked.add(as1);
340 if (!foundSomeSharing) {
341 bw.write("No sharing classes between flagged objects found.\n");
343 bw.write("\nNumber sharing classes: "+numSharing);
346 bw.write("Number of methods analyzed: "+numMethodsAnalyzed()+"\n");
353 public Alloc getCmdLineArgsAlloc() {
354 return getAllocationSiteFromFlatNew( constructedCmdLineArgsNew );
356 public Alloc getCmdLineArgAlloc() {
357 return getAllocationSiteFromFlatNew( constructedCmdLineArgNew );
359 public Alloc getCmdLineArgBytesAlloc() {
360 return getAllocationSiteFromFlatNew( constructedCmdLineArgBytesNew );
362 public Alloc getNewStringLiteralAlloc() {
363 return newStringLiteralAlloc;
365 public Alloc getNewStringLiteralBytesAlloc() {
366 return newStringLiteralBytesAlloc;
369 ///////////////////////////////////////////
371 // end public interface
373 ///////////////////////////////////////////
377 protected void checkAnalysisComplete() {
378 if( !analysisComplete ) {
379 throw new Error("Warning: public interface method called while analysis is running.");
388 // run in faster mode, only when bugs wrung out!
389 public static boolean releaseMode;
391 // use command line option to set this, analysis
392 // should attempt to be deterministic
393 public static boolean determinismDesired;
395 // when we want to enforce determinism in the
396 // analysis we need to sort descriptors rather
397 // than toss them in efficient sets, use this
398 public static DescriptorComparator dComp =
399 new DescriptorComparator();
402 // data from the compiler
404 public CallGraph callGraph;
405 public Liveness liveness;
406 public ArrayReferencees arrayReferencees;
407 public RBlockRelationAnalysis rblockRel;
408 public TypeUtil typeUtil;
409 public int allocationDepth;
411 protected boolean doEffectsAnalysis = false;
412 protected EffectsAnalysis effectsAnalysis;
413 protected BuildStateMachines buildStateMachines;
416 // data structure for public interface
417 private Hashtable< Descriptor, HashSet<AllocSite> >
418 mapDescriptorToAllocSiteSet;
421 // for public interface methods to warn that they
422 // are grabbing results during analysis
423 private boolean analysisComplete;
426 // used to identify HeapRegionNode objects
427 // A unique ID equates an object in one
428 // ownership graph with an object in another
429 // graph that logically represents the same
431 // start at 10 and increment to reserve some
432 // IDs for special purposes
433 static protected int uniqueIDcount = 10;
436 // An out-of-scope method created by the
437 // analysis that has no parameters, and
438 // appears to allocate the command line
439 // arguments, then invoke the source code's
440 // main method. The purpose of this is to
441 // provide the analysis with an explicit
442 // top-level context with no parameters
443 protected MethodDescriptor mdAnalysisEntry;
444 protected FlatMethod fmAnalysisEntry;
446 // main method defined by source program
447 protected MethodDescriptor mdSourceEntry;
449 // the set of task and/or method descriptors
450 // reachable in call graph
451 protected Set<Descriptor>
452 descriptorsToAnalyze;
454 // current descriptors to visit in fixed-point
455 // interprocedural analysis, prioritized by
456 // dependency in the call graph
457 protected Stack<Descriptor>
458 descriptorsToVisitStack;
459 protected PriorityQueue<DescriptorQWrapper>
462 // a duplication of the above structure, but
463 // for efficient testing of inclusion
464 protected HashSet<Descriptor>
465 descriptorsToVisitSet;
467 // storage for priorities (doesn't make sense)
468 // to add it to the Descriptor class, just in
470 protected Hashtable<Descriptor, Integer>
471 mapDescriptorToPriority;
473 // when analyzing a method and scheduling more:
474 // remember set of callee's enqueued for analysis
475 // so they can be put on top of the callers in
476 // the stack-visit mode
477 protected Set<Descriptor>
480 // maps a descriptor to its current partial result
481 // from the intraprocedural fixed-point analysis--
482 // then the interprocedural analysis settles, this
483 // mapping will have the final results for each
485 protected Hashtable<Descriptor, ReachGraph>
486 mapDescriptorToCompleteReachGraph;
488 // maps a descriptor to its known dependents: namely
489 // methods or tasks that call the descriptor's method
490 // AND are part of this analysis (reachable from main)
491 protected Hashtable< Descriptor, Set<Descriptor> >
492 mapDescriptorToSetDependents;
494 // if the analysis client wants to flag allocation sites
495 // programmatically, it should provide a set of FlatNew
496 // statements--this may be null if unneeded
497 protected Set<FlatNew> sitesToFlag;
499 // maps each flat new to one analysis abstraction
500 // allocate site object, these exist outside reach graphs
501 protected Hashtable<FlatNew, AllocSite>
502 mapFlatNewToAllocSite;
504 // maps intergraph heap region IDs to intergraph
505 // allocation sites that created them, a redundant
506 // structure for efficiency in some operations
507 protected Hashtable<Integer, AllocSite>
510 // maps a method to its initial heap model (IHM) that
511 // is the set of reachability graphs from every caller
512 // site, all merged together. The reason that we keep
513 // them separate is that any one call site's contribution
514 // to the IHM may changed along the path to the fixed point
515 protected Hashtable< Descriptor, Hashtable< FlatCall, ReachGraph > >
516 mapDescriptorToIHMcontributions;
518 // additionally, keep a mapping from descriptors to the
519 // merged in-coming initial context, because we want this
520 // initial context to be STRICTLY MONOTONIC
521 protected Hashtable<Descriptor, ReachGraph>
522 mapDescriptorToInitialContext;
524 // mapping of current partial results for a given node. Note that
525 // to reanalyze a method we discard all partial results because a
526 // null reach graph indicates the node needs to be visited on the
527 // way to the fixed point.
528 // The reason for a persistent mapping is so after the analysis we
529 // can ask for the graph of any node at the fixed point, but this
530 // option is only enabled with a compiler flag.
531 protected Hashtable<FlatNode, ReachGraph> mapFlatNodeToReachGraphPersist;
532 protected Hashtable<FlatNode, ReachGraph> mapFlatNodeToReachGraph;
535 // make the result for back edges analysis-wide STRICTLY
536 // MONOTONIC as well, but notice we use FlatNode as the
537 // key for this map: in case we want to consider other
538 // nodes as back edge's in future implementations
539 protected Hashtable<FlatNode, ReachGraph>
540 mapBackEdgeToMonotone;
543 public static final String arrayElementFieldName = "___element_";
544 static protected Hashtable<TypeDescriptor, FieldDescriptor>
548 protected boolean suppressOutput;
550 // for controlling DOT file output
551 protected boolean writeFinalDOTs;
552 protected boolean writeAllIncrementalDOTs;
554 // supporting DOT output--when we want to write every
555 // partial method result, keep a tally for generating
557 protected Hashtable<Descriptor, Integer>
558 mapDescriptorToNumUpdates;
560 //map task descriptor to initial task parameter
561 protected Hashtable<Descriptor, ReachGraph>
562 mapDescriptorToReachGraph;
564 protected PointerMethod pm;
566 //Keeps track of all the reach graphs at every program point
567 //DO NOT USE UNLESS YOU REALLY NEED IT
568 static protected Hashtable<FlatNode, ReachGraph> fn2rgAtEnter =
569 new Hashtable<FlatNode, ReachGraph>();
571 static protected Hashtable<FlatNode, ReachGraph> fn2rgAtExit =
572 new Hashtable<FlatNode, ReachGraph>();
575 private Hashtable<FlatCall, Descriptor> fc2enclosing;
577 Accessible accessible;
580 // we construct an entry method of flat nodes complete
581 // with a new allocation site to model the command line
582 // args creation just for the analysis, so remember that
583 // allocation site. Later in code gen we might want to
584 // know if something is pointing-to to the cmd line args
585 // and we can verify by checking the allocation site field.
586 protected FlatNew constructedCmdLineArgsNew;
587 protected FlatNew constructedCmdLineArgNew;
588 protected FlatNew constructedCmdLineArgBytesNew;
590 // similar to above, the runtime allocates new strings
591 // for literal nodes, so make up an alloc to model that
592 protected AllocSite newStringLiteralAlloc;
593 protected AllocSite newStringLiteralBytesAlloc;
595 // both of the above need the descriptor of the field
596 // for the String's value field to reference by the
597 // byte array from the string object
598 protected TypeDescriptor stringType;
599 protected TypeDescriptor stringBytesType;
600 protected FieldDescriptor stringBytesField;
603 protected void initImplicitStringsModel() {
605 ClassDescriptor cdString = typeUtil.getClass( typeUtil.StringClass );
606 assert cdString != null;
610 new TypeDescriptor( cdString );
613 new TypeDescriptor(TypeDescriptor.CHAR).makeArray( state );
616 stringBytesField = null;
617 Iterator sFieldsItr = cdString.getFields();
618 while( sFieldsItr.hasNext() ) {
619 FieldDescriptor fd = (FieldDescriptor) sFieldsItr.next();
620 if( fd.getSymbol().equals( typeUtil.StringClassValueField ) ) {
621 stringBytesField = fd;
625 assert stringBytesField != null;
628 TempDescriptor throwAway1 =
629 new TempDescriptor("stringLiteralTemp_dummy1",
632 FlatNew fnStringLiteral =
633 new FlatNew(stringType,
637 newStringLiteralAlloc
638 = getAllocSiteFromFlatNewPRIVATE( fnStringLiteral );
641 TempDescriptor throwAway2 =
642 new TempDescriptor("stringLiteralTemp_dummy2",
645 FlatNew fnStringLiteralBytes =
646 new FlatNew(stringBytesType,
650 newStringLiteralBytesAlloc
651 = getAllocSiteFromFlatNewPRIVATE( fnStringLiteralBytes );
657 // allocate various structures that are not local
658 // to a single class method--should be done once
659 protected void allocateStructures() {
661 if( determinismDesired ) {
662 // use an ordered set
663 descriptorsToAnalyze = new TreeSet<Descriptor>(dComp);
665 // otherwise use a speedy hashset
666 descriptorsToAnalyze = new HashSet<Descriptor>();
669 mapDescriptorToCompleteReachGraph =
670 new Hashtable<Descriptor, ReachGraph>();
672 mapDescriptorToNumUpdates =
673 new Hashtable<Descriptor, Integer>();
675 mapDescriptorToSetDependents =
676 new Hashtable< Descriptor, Set<Descriptor> >();
678 mapFlatNewToAllocSite =
679 new Hashtable<FlatNew, AllocSite>();
681 mapDescriptorToIHMcontributions =
682 new Hashtable< Descriptor, Hashtable< FlatCall, ReachGraph > >();
684 mapDescriptorToInitialContext =
685 new Hashtable<Descriptor, ReachGraph>();
687 mapFlatNodeToReachGraphPersist =
688 new Hashtable<FlatNode, ReachGraph>();
690 mapBackEdgeToMonotone =
691 new Hashtable<FlatNode, ReachGraph>();
693 mapHrnIdToAllocSite =
694 new Hashtable<Integer, AllocSite>();
696 mapTypeToArrayField =
697 new Hashtable <TypeDescriptor, FieldDescriptor>();
699 if( state.DISJOINTDVISITSTACK ||
700 state.DISJOINTDVISITSTACKEESONTOP
702 descriptorsToVisitStack =
703 new Stack<Descriptor>();
706 if( state.DISJOINTDVISITPQUE ) {
707 descriptorsToVisitQ =
708 new PriorityQueue<DescriptorQWrapper>();
711 descriptorsToVisitSet =
712 new HashSet<Descriptor>();
714 mapDescriptorToPriority =
715 new Hashtable<Descriptor, Integer>();
718 new HashSet<Descriptor>();
720 mapDescriptorToAllocSiteSet =
721 new Hashtable<Descriptor, HashSet<AllocSite> >();
723 mapDescriptorToReachGraph =
724 new Hashtable<Descriptor, ReachGraph>();
726 pm = new PointerMethod();
728 fc2enclosing = new Hashtable<FlatCall, Descriptor>();
733 // this analysis generates a disjoint reachability
734 // graph for every reachable method in the program
735 public DisjointAnalysis(State s,
740 Set<FlatNew> sitesToFlag,
741 RBlockRelationAnalysis rra
743 init(s, tu, cg, l, ar, sitesToFlag, rra, null, false);
746 public DisjointAnalysis(State s,
751 Set<FlatNew> sitesToFlag,
752 RBlockRelationAnalysis rra,
753 boolean suppressOutput
755 init(s, tu, cg, l, ar, sitesToFlag, rra, null, suppressOutput);
758 public DisjointAnalysis(State s,
763 Set<FlatNew> sitesToFlag,
764 RBlockRelationAnalysis rra,
765 BuildStateMachines bsm,
766 boolean suppressOutput
768 init(s, tu, cg, l, ar, sitesToFlag, rra, bsm, suppressOutput);
771 protected void init(State state,
775 ArrayReferencees arrayReferencees,
776 Set<FlatNew> sitesToFlag,
777 RBlockRelationAnalysis rra,
778 BuildStateMachines bsm,
779 boolean suppressOutput
782 analysisComplete = false;
785 this.typeUtil = typeUtil;
786 this.callGraph = callGraph;
787 this.liveness = liveness;
788 this.arrayReferencees = arrayReferencees;
789 this.sitesToFlag = sitesToFlag;
790 this.rblockRel = rra;
791 this.suppressOutput = suppressOutput;
792 this.buildStateMachines = bsm;
794 if( rblockRel != null ) {
795 doEffectsAnalysis = true;
796 effectsAnalysis = new EffectsAnalysis();
798 EffectsAnalysis.state = state;
799 EffectsAnalysis.buildStateMachines = buildStateMachines;
801 //note: instead of reachgraph's isAccessible, using the result of accessible analysis
802 //since accessible gives us more accurate results
803 accessible=new Accessible(state, callGraph, rra, liveness);
804 accessible.doAnalysis();
807 this.allocationDepth = state.DISJOINTALLOCDEPTH;
808 this.releaseMode = state.DISJOINTRELEASEMODE;
809 this.determinismDesired = state.DISJOINTDETERMINISM;
811 this.writeFinalDOTs = state.DISJOINTWRITEDOTS && !state.DISJOINTWRITEALL;
812 this.writeAllIncrementalDOTs = state.DISJOINTWRITEDOTS && state.DISJOINTWRITEALL;
814 this.takeDebugSnapshots = state.DISJOINTSNAPSYMBOL != null;
815 this.descSymbolDebug = state.DISJOINTSNAPSYMBOL;
816 this.visitStartCapture = state.DISJOINTSNAPVISITTOSTART;
817 this.numVisitsToCapture = state.DISJOINTSNAPNUMVISITS;
818 this.stopAfterCapture = state.DISJOINTSNAPSTOPAFTER;
819 this.snapVisitCounter = 1; // count visits from 1 (user will write 1, means 1st visit)
820 this.snapNodeCounter = 0; // count nodes from 0
823 state.DISJOINTDVISITSTACK ||
824 state.DISJOINTDVISITPQUE ||
825 state.DISJOINTDVISITSTACKEESONTOP;
826 assert !(state.DISJOINTDVISITSTACK && state.DISJOINTDVISITPQUE);
827 assert !(state.DISJOINTDVISITSTACK && state.DISJOINTDVISITSTACKEESONTOP);
828 assert !(state.DISJOINTDVISITPQUE && state.DISJOINTDVISITSTACKEESONTOP);
830 // set some static configuration for ReachGraphs
831 ReachGraph.allocationDepth = allocationDepth;
832 ReachGraph.typeUtil = typeUtil;
833 ReachGraph.state = state;
835 ReachGraph.initOutOfScopeTemps();
837 ReachGraph.debugCallSiteVisitStartCapture
838 = state.DISJOINTDEBUGCALLVISITTOSTART;
840 ReachGraph.debugCallSiteNumVisitsToCapture
841 = state.DISJOINTDEBUGCALLNUMVISITS;
843 ReachGraph.debugCallSiteStopAfter
844 = state.DISJOINTDEBUGCALLSTOPAFTER;
846 ReachGraph.debugCallSiteVisitCounter
847 = 0; // count visits from 1, is incremented before first visit
851 if( suppressOutput ) {
852 System.out.println("* Running disjoint reachability analysis with output suppressed! *");
856 allocateStructures();
858 initImplicitStringsModel();
862 double timeStartAnalysis = (double) System.nanoTime();
864 // start interprocedural fixed-point computation
867 } catch( IOException e ) {
868 throw new Error("IO Exception while writing disjointness analysis output.");
871 analysisComplete=true;
873 double timeEndAnalysis = (double) System.nanoTime();
874 double dt = (timeEndAnalysis - timeStartAnalysis)/(Math.pow(10.0, 9.0) );
877 if( sitesToFlag != null ) {
878 treport = String.format("Disjoint reachability analysis flagged %d sites and took %.3f sec.", sitesToFlag.size(), dt);
879 if(sitesToFlag.size()>0) {
880 treport+="\nFlagged sites:"+"\n"+sitesToFlag.toString();
883 treport = String.format("Disjoint reachability analysis took %.3f sec.", dt);
885 String justtime = String.format("%.2f", dt);
886 System.out.println(treport);
890 if( writeFinalDOTs && !writeAllIncrementalDOTs ) {
894 if( state.DISJOINTWRITEIHMS ) {
898 if( state.DISJOINTWRITEINITCONTEXTS ) {
899 writeInitialContexts();
902 if( state.DISJOINT_WRITE_ALL_NODE_FINAL_GRAPHS ) {
903 writeFinalGraphsForEveryNode();
906 if( state.DISJOINTALIASFILE != null && !suppressOutput ) {
908 writeAllSharing(state.DISJOINTALIASFILE, treport, justtime, state.DISJOINTALIASTAB, state.lines);
910 writeAllSharingJava(state.DISJOINTALIASFILE,
913 state.DISJOINTALIASTAB,
920 buildStateMachines.writeStateMachines();
923 } catch( IOException e ) {
924 throw new Error("IO Exception while writing disjointness analysis output.");
929 protected boolean moreDescriptorsToVisit() {
930 if( state.DISJOINTDVISITSTACK ||
931 state.DISJOINTDVISITSTACKEESONTOP
933 return !descriptorsToVisitStack.isEmpty();
935 } else if( state.DISJOINTDVISITPQUE ) {
936 return !descriptorsToVisitQ.isEmpty();
939 throw new Error("Neither descriptor visiting mode set");
943 // fixed-point computation over the call graph--when a
944 // method's callees are updated, it must be reanalyzed
945 protected void analyzeMethods() throws java.io.IOException {
947 // task or non-task (java) mode determines what the roots
948 // of the call chain are, and establishes the set of methods
949 // reachable from the roots that will be analyzed
952 if( !suppressOutput ) {
953 System.out.println("Bamboo mode...");
956 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
957 while( taskItr.hasNext() ) {
958 TaskDescriptor td = (TaskDescriptor) taskItr.next();
959 if( !descriptorsToAnalyze.contains(td) ) {
960 // add all methods transitively reachable from the
962 descriptorsToAnalyze.add(td);
963 descriptorsToAnalyze.addAll(callGraph.getAllMethods(td) );
968 if( !suppressOutput ) {
969 System.out.println("Java mode...");
972 // add all methods transitively reachable from the
973 // source's main to set for analysis
974 mdSourceEntry = typeUtil.getMain();
975 descriptorsToAnalyze.add(mdSourceEntry);
976 descriptorsToAnalyze.addAll(callGraph.getAllMethods(mdSourceEntry) );
978 // fabricate an empty calling context that will call
979 // the source's main, but call graph doesn't know
980 // about it, so explicitly add it
981 makeAnalysisEntryMethod(mdSourceEntry);
982 descriptorsToAnalyze.add(mdAnalysisEntry);
987 // now, depending on the interprocedural mode for visiting
988 // methods, set up the needed data structures
990 if( state.DISJOINTDVISITPQUE ) {
992 // topologically sort according to the call graph so
993 // leaf calls are last, helps build contexts up first
994 LinkedList<Descriptor> sortedDescriptors =
995 topologicalSort(descriptorsToAnalyze);
997 // add sorted descriptors to priority queue, and duplicate
998 // the queue as a set for efficiently testing whether some
999 // method is marked for analysis
1001 Iterator<Descriptor> dItr;
1003 // for the priority queue, give items at the head
1004 // of the sorted list a low number (highest priority)
1005 while( !sortedDescriptors.isEmpty() ) {
1006 Descriptor d = sortedDescriptors.removeFirst();
1007 mapDescriptorToPriority.put(d, new Integer(p) );
1008 descriptorsToVisitQ.add(new DescriptorQWrapper(p, d) );
1009 descriptorsToVisitSet.add(d);
1013 } else if( state.DISJOINTDVISITSTACK ||
1014 state.DISJOINTDVISITSTACKEESONTOP
1016 // if we're doing the stack scheme, just throw the root
1017 // method or tasks on the stack
1019 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
1020 while( taskItr.hasNext() ) {
1021 TaskDescriptor td = (TaskDescriptor) taskItr.next();
1022 descriptorsToVisitStack.add(td);
1023 descriptorsToVisitSet.add(td);
1027 descriptorsToVisitStack.add(mdAnalysisEntry);
1028 descriptorsToVisitSet.add(mdAnalysisEntry);
1032 throw new Error("Unknown method scheduling mode");
1036 // analyze scheduled methods until there are no more to visit
1037 while( moreDescriptorsToVisit() ) {
1038 Descriptor d = null;
1040 if( state.DISJOINTDVISITSTACK ||
1041 state.DISJOINTDVISITSTACKEESONTOP
1043 d = descriptorsToVisitStack.pop();
1045 } else if( state.DISJOINTDVISITPQUE ) {
1046 d = descriptorsToVisitQ.poll().getDescriptor();
1049 assert descriptorsToVisitSet.contains(d);
1050 descriptorsToVisitSet.remove(d);
1052 // because the task or method descriptor just extracted
1053 // was in the "to visit" set it either hasn't been analyzed
1054 // yet, or some method that it depends on has been
1055 // updated. Recompute a complete reachability graph for
1056 // this task/method and compare it to any previous result.
1057 // If there is a change detected, add any methods/tasks
1058 // that depend on this one to the "to visit" set.
1060 if( !suppressOutput ) {
1061 System.out.println("Analyzing " + d);
1064 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1065 assert calleesToEnqueue.isEmpty();
1068 ReachGraph rg = analyzeMethod(d);
1069 ReachGraph rgPrev = getPartial(d);
1071 if( !rg.equals(rgPrev) ) {
1074 if( state.DISJOINTDEBUGSCHEDULING ) {
1075 System.out.println(" complete graph changed, scheduling callers for analysis:");
1078 // results for d changed, so enqueue dependents
1079 // of d for further analysis
1080 Iterator<Descriptor> depsItr = getDependents(d).iterator();
1081 while( depsItr.hasNext() ) {
1082 Descriptor dNext = depsItr.next();
1085 if( state.DISJOINTDEBUGSCHEDULING ) {
1086 System.out.println(" "+dNext);
1091 // whether or not the method under analysis changed,
1092 // we may have some callees that are scheduled for
1093 // more analysis, and they should go on the top of
1094 // the stack now (in other method-visiting modes they
1095 // are already enqueued at this point
1096 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1097 Iterator<Descriptor> depsItr = calleesToEnqueue.iterator();
1098 while( depsItr.hasNext() ) {
1099 Descriptor dNext = depsItr.next();
1102 calleesToEnqueue.clear();
1108 protected ReachGraph analyzeMethod(Descriptor d)
1109 throws java.io.IOException {
1111 // get the flat code for this descriptor
1113 if( d == mdAnalysisEntry ) {
1114 fm = fmAnalysisEntry;
1116 fm = state.getMethodFlat(d);
1118 pm.analyzeMethod(fm);
1120 // intraprocedural work set
1121 Set<FlatNode> flatNodesToVisit = new HashSet<FlatNode>();
1122 flatNodesToVisit.add(fm);
1124 // if determinism is desired by client, shadow the
1125 // set with a queue to make visit order deterministic
1126 Queue<FlatNode> flatNodesToVisitQ = null;
1127 if( determinismDesired ) {
1128 flatNodesToVisitQ = new LinkedList<FlatNode>();
1129 flatNodesToVisitQ.add(fm);
1132 // start a new mapping of partial results
1133 mapFlatNodeToReachGraph =
1134 new Hashtable<FlatNode, ReachGraph>();
1136 // the set of return nodes partial results that will be combined as
1137 // the final, conservative approximation of the entire method
1138 HashSet<FlatReturnNode> setReturns = new HashSet<FlatReturnNode>();
1142 boolean snapThisMethod = false;
1143 if( takeDebugSnapshots && d instanceof MethodDescriptor ) {
1144 MethodDescriptor mdThisMethod = (MethodDescriptor)d;
1145 ClassDescriptor cdThisMethod = mdThisMethod.getClassDesc();
1146 if( cdThisMethod != null ) {
1148 descSymbolDebug.equals( cdThisMethod.getSymbol()+
1150 mdThisMethod.getSymbol()
1157 while( !flatNodesToVisit.isEmpty() ) {
1160 if( determinismDesired ) {
1161 assert !flatNodesToVisitQ.isEmpty();
1162 fn = flatNodesToVisitQ.remove();
1164 fn = flatNodesToVisit.iterator().next();
1166 flatNodesToVisit.remove(fn);
1168 // effect transfer function defined by this node,
1169 // then compare it to the old graph at this node
1170 // to see if anything was updated.
1172 ReachGraph rg = new ReachGraph();
1173 TaskDescriptor taskDesc;
1174 if(fn instanceof FlatMethod && (taskDesc=((FlatMethod)fn).getTask())!=null) {
1175 if(mapDescriptorToReachGraph.containsKey(taskDesc)) {
1176 // retrieve existing reach graph if it is not first time
1177 rg=mapDescriptorToReachGraph.get(taskDesc);
1179 // create initial reach graph for a task
1180 rg=createInitialTaskReachGraph((FlatMethod)fn);
1182 mapDescriptorToReachGraph.put(taskDesc, rg);
1186 // start by merging all node's parents' graphs
1187 for( int i = 0; i < pm.numPrev(fn); ++i ) {
1188 FlatNode pn = pm.getPrev(fn,i);
1189 if( mapFlatNodeToReachGraph.containsKey(pn) ) {
1190 ReachGraph rgParent = mapFlatNodeToReachGraph.get(pn);
1196 if( snapThisMethod ) {
1197 debugSnapshot(rg, fn, true);
1201 // modify rg with appropriate transfer function
1202 rg = analyzeFlatNode(d, fm, fn, setReturns, rg);
1205 if( snapThisMethod ) {
1206 debugSnapshot(rg, fn, false);
1211 // if the results of the new graph are different from
1212 // the current graph at this node, replace the graph
1213 // with the update and enqueue the children
1214 ReachGraph rgPrev = mapFlatNodeToReachGraph.get(fn);
1215 if( !rg.equals(rgPrev) ) {
1216 mapFlatNodeToReachGraph.put(fn, rg);
1218 // we don't necessarily want to keep the reach graph for every
1219 // node in the program unless a client or the user wants it
1220 if( state.DISJOINT_WRITE_ALL_NODE_FINAL_GRAPHS ) {
1221 mapFlatNodeToReachGraphPersist.put(fn, rg);
1224 for( int i = 0; i < pm.numNext(fn); i++ ) {
1225 FlatNode nn = pm.getNext(fn, i);
1227 flatNodesToVisit.add(nn);
1228 if( determinismDesired ) {
1229 flatNodesToVisitQ.add(nn);
1236 // end by merging all return nodes into a complete
1237 // reach graph that represents all possible heap
1238 // states after the flat method returns
1239 ReachGraph completeGraph = new ReachGraph();
1241 if( setReturns.isEmpty() ) {
1242 System.out.println( "d = "+d );
1245 assert !setReturns.isEmpty();
1246 Iterator retItr = setReturns.iterator();
1247 while( retItr.hasNext() ) {
1248 FlatReturnNode frn = (FlatReturnNode) retItr.next();
1250 assert mapFlatNodeToReachGraph.containsKey(frn);
1251 ReachGraph rgRet = mapFlatNodeToReachGraph.get(frn);
1253 completeGraph.merge(rgRet);
1257 if( snapThisMethod ) {
1258 // increment that we've visited the debug snap
1259 // method, and reset the node counter
1260 System.out.println(" @@@ debug snap at visit "+snapVisitCounter);
1262 snapNodeCounter = 0;
1264 if( snapVisitCounter == visitStartCapture + numVisitsToCapture &&
1267 System.out.println("!!! Stopping analysis after debug snap captures. !!!");
1273 return completeGraph;
1277 protected ReachGraph
1278 analyzeFlatNode(Descriptor d,
1279 FlatMethod fmContaining,
1281 HashSet<FlatReturnNode> setRetNodes,
1283 ) throws java.io.IOException {
1286 // any variables that are no longer live should be
1287 // nullified in the graph to reduce edges
1288 //rg.nullifyDeadVars( liveness.getLiveInTemps( fmContaining, fn ) );
1292 FieldDescriptor fld;
1293 TypeDescriptor tdElement;
1294 FieldDescriptor fdElement;
1295 FlatSESEEnterNode sese;
1296 FlatSESEExitNode fsexn;
1298 //Stores the flatnode's reach graph at enter
1299 ReachGraph rgOnEnter = new ReachGraph();
1300 rgOnEnter.merge(rg);
1301 fn2rgAtEnter.put(fn, rgOnEnter);
1305 // use node type to decide what transfer function
1306 // to apply to the reachability graph
1307 switch( fn.kind() ) {
1309 case FKind.FlatGenReachNode: {
1310 FlatGenReachNode fgrn = (FlatGenReachNode) fn;
1312 System.out.println(" Generating reach graph for program point: "+fgrn.getGraphName() );
1315 rg.writeGraph("genReach"+fgrn.getGraphName(),
1316 true, // write labels (variables)
1317 true, // selectively hide intermediate temp vars
1318 true, // prune unreachable heap regions
1319 false, // hide reachability altogether
1320 true, // hide subset reachability states
1321 true, // hide predicates
1322 true); //false); // hide edge taints
1326 case FKind.FlatMethod: {
1327 // construct this method's initial heap model (IHM)
1328 // since we're working on the FlatMethod, we know
1329 // the incoming ReachGraph 'rg' is empty
1331 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1332 getIHMcontributions(d);
1334 Set entrySet = heapsFromCallers.entrySet();
1335 Iterator itr = entrySet.iterator();
1336 while( itr.hasNext() ) {
1337 Map.Entry me = (Map.Entry)itr.next();
1338 FlatCall fc = (FlatCall) me.getKey();
1339 ReachGraph rgContrib = (ReachGraph) me.getValue();
1341 // note that "fc.getMethod()" like (Object.toString)
1342 // might not be equal to "d" like (String.toString)
1343 // because the mapping gets set up when we resolve
1345 rg.merge(rgContrib);
1348 // additionally, we are enforcing STRICT MONOTONICITY for the
1349 // method's initial context, so grow the context by whatever
1350 // the previously computed context was, and put the most
1351 // up-to-date context back in the map
1352 ReachGraph rgPrevContext = mapDescriptorToInitialContext.get(d);
1353 rg.merge(rgPrevContext);
1354 mapDescriptorToInitialContext.put(d, rg);
1358 case FKind.FlatOpNode:
1359 FlatOpNode fon = (FlatOpNode) fn;
1360 if( fon.getOp().getOp() == Operation.ASSIGN ) {
1361 lhs = fon.getDest();
1362 rhs = fon.getLeft();
1364 // before transfer, do effects analysis support
1365 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1366 if(rblockRel.isPotentialStallSite(fn)) {
1367 // x gets status of y
1368 if(!accessible.isAccessible(fn, rhs)) {
1369 rg.makeInaccessible(lhs);
1375 rg.assignTempXEqualToTempY(lhs, rhs);
1379 case FKind.FlatCastNode:
1380 FlatCastNode fcn = (FlatCastNode) fn;
1384 TypeDescriptor td = fcn.getType();
1387 // before transfer, do effects analysis support
1388 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1389 if(rblockRel.isPotentialStallSite(fn)) {
1390 // x gets status of y
1391 if(!accessible.isAccessible(fn,rhs)) {
1392 rg.makeInaccessible(lhs);
1398 rg.assignTempXEqualToCastedTempY(lhs, rhs, td);
1401 case FKind.FlatFieldNode:
1402 FlatFieldNode ffn = (FlatFieldNode) fn;
1406 fld = ffn.getField();
1408 // before graph transform, possible inject
1409 // a stall-site taint
1410 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1412 if(rblockRel.isPotentialStallSite(fn)) {
1413 // x=y.f, stall y if not accessible
1414 // contributes read effects on stall site of y
1415 if(!accessible.isAccessible(fn,rhs)) {
1416 rg.taintStallSite(fn, rhs);
1419 // after this, x and y are accessbile.
1420 rg.makeAccessible(lhs);
1421 rg.makeAccessible(rhs);
1425 if( shouldAnalysisTrack(fld.getType() ) ) {
1427 rg.assignTempXEqualToTempYFieldF(lhs, rhs, fld, fn);
1430 // after transfer, use updated graph to
1431 // do effects analysis
1432 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1433 effectsAnalysis.analyzeFlatFieldNode(rg, rhs, fld, fn);
1437 case FKind.FlatSetFieldNode:
1438 FlatSetFieldNode fsfn = (FlatSetFieldNode) fn;
1440 lhs = fsfn.getDst();
1441 fld = fsfn.getField();
1442 rhs = fsfn.getSrc();
1444 boolean strongUpdate = false;
1446 // before transfer func, possibly inject
1447 // stall-site taints
1448 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1450 if(rblockRel.isPotentialStallSite(fn)) {
1451 // x.y=f , stall x and y if they are not accessible
1452 // also contribute write effects on stall site of x
1453 if(!accessible.isAccessible(fn,lhs)) {
1454 rg.taintStallSite(fn, lhs);
1457 if(!accessible.isAccessible(fn,rhs)) {
1458 rg.taintStallSite(fn, rhs);
1461 // accessible status update
1462 rg.makeAccessible(lhs);
1463 rg.makeAccessible(rhs);
1467 if( shouldAnalysisTrack(fld.getType() ) ) {
1469 strongUpdate = rg.assignTempXFieldFEqualToTempY(lhs, fld, rhs, fn);
1472 // use transformed graph to do effects analysis
1473 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1474 effectsAnalysis.analyzeFlatSetFieldNode(rg, lhs, fld, fn, strongUpdate);
1478 case FKind.FlatElementNode:
1479 FlatElementNode fen = (FlatElementNode) fn;
1484 assert rhs.getType() != null;
1485 assert rhs.getType().isArray();
1487 tdElement = rhs.getType().dereference();
1488 fdElement = getArrayField(tdElement);
1490 // before transfer func, possibly inject
1492 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1493 if(rblockRel.isPotentialStallSite(fn)) {
1494 // x=y.f, stall y if not accessible
1495 // contributes read effects on stall site of y
1496 // after this, x and y are accessbile.
1497 if(!accessible.isAccessible(fn,rhs)) {
1498 rg.taintStallSite(fn, rhs);
1501 rg.makeAccessible(lhs);
1502 rg.makeAccessible(rhs);
1506 if( shouldAnalysisTrack(lhs.getType() ) ) {
1508 rg.assignTempXEqualToTempYFieldF(lhs, rhs, fdElement, fn);
1511 // use transformed graph to do effects analysis
1512 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1513 effectsAnalysis.analyzeFlatFieldNode(rg, rhs, fdElement, fn);
1517 case FKind.FlatSetElementNode:
1518 FlatSetElementNode fsen = (FlatSetElementNode) fn;
1520 lhs = fsen.getDst();
1521 rhs = fsen.getSrc();
1523 assert lhs.getType() != null;
1524 assert lhs.getType().isArray();
1526 tdElement = lhs.getType().dereference();
1527 fdElement = getArrayField(tdElement);
1529 // before transfer func, possibly inject
1530 // stall-site taints
1531 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1533 if(rblockRel.isPotentialStallSite(fn)) {
1534 // x.y=f , stall x and y if they are not accessible
1535 // also contribute write effects on stall site of x
1536 if(!accessible.isAccessible(fn,lhs)) {
1537 rg.taintStallSite(fn, lhs);
1540 if(!accessible.isAccessible(fn,rhs)) {
1541 rg.taintStallSite(fn, rhs);
1544 // accessible status update
1545 rg.makeAccessible(lhs);
1546 rg.makeAccessible(rhs);
1550 if( shouldAnalysisTrack(rhs.getType() ) ) {
1551 // transfer func, BUT
1552 // skip this node if it cannot create new reachability paths
1553 if( !arrayReferencees.doesNotCreateNewReaching(fsen) ) {
1554 rg.assignTempXFieldFEqualToTempY(lhs, fdElement, rhs, fn);
1558 // use transformed graph to do effects analysis
1559 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1560 effectsAnalysis.analyzeFlatSetFieldNode(rg, lhs, fdElement, fn,
1566 FlatNew fnn = (FlatNew) fn;
1568 if( shouldAnalysisTrack(lhs.getType() ) ) {
1569 AllocSite as = getAllocSiteFromFlatNewPRIVATE(fnn);
1571 // before transform, support effects analysis
1572 if (doEffectsAnalysis && fmContaining != fmAnalysisEntry) {
1573 if (rblockRel.isPotentialStallSite(fn)) {
1574 // after creating new object, lhs is accessible
1575 rg.makeAccessible(lhs);
1580 rg.assignTempEqualToNewAlloc(lhs, as);
1585 case FKind.FlatLiteralNode:
1586 // BIG NOTE: this transfer function is only here for
1587 // points-to information for String literals. That's it.
1588 // Effects and disjoint reachability and all of that don't
1589 // care about references to literals.
1590 FlatLiteralNode fln = (FlatLiteralNode) fn;
1592 if( fln.getType().equals( stringType ) ) {
1593 rg.assignTempEqualToStringLiteral( fln.getDst(),
1594 newStringLiteralAlloc,
1595 newStringLiteralBytesAlloc,
1601 case FKind.FlatSESEEnterNode:
1602 sese = (FlatSESEEnterNode) fn;
1604 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1606 // always remove ALL stall site taints at enter
1607 rg.removeAllStallSiteTaints();
1609 // inject taints for in-set vars
1610 rg.taintInSetVars(sese);
1615 case FKind.FlatSESEExitNode:
1616 fsexn = (FlatSESEExitNode) fn;
1617 sese = fsexn.getFlatEnter();
1619 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1621 // @ sese exit make all live variables
1622 // inaccessible to later parent statements
1623 rg.makeInaccessible(liveness.getLiveInTemps(fmContaining, fn) );
1625 // always remove ALL stall site taints at exit
1626 rg.removeAllStallSiteTaints();
1628 // remove in-set var taints for the exiting rblock
1629 rg.removeInContextTaints(sese);
1634 case FKind.FlatCall: {
1635 Descriptor mdCaller;
1636 if( fmContaining.getMethod() != null ) {
1637 mdCaller = fmContaining.getMethod();
1639 mdCaller = fmContaining.getTask();
1641 FlatCall fc = (FlatCall) fn;
1642 MethodDescriptor mdCallee = fc.getMethod();
1643 FlatMethod fmCallee = state.getMethodFlat(mdCallee);
1653 // the transformation for a call site should update the
1654 // current heap abstraction with any effects from the callee,
1655 // or if the method is virtual, the effects from any possible
1656 // callees, so find the set of callees...
1657 Set<MethodDescriptor> setPossibleCallees;
1658 if( determinismDesired ) {
1659 // use an ordered set
1660 setPossibleCallees = new TreeSet<MethodDescriptor>(dComp);
1662 // otherwise use a speedy hashset
1663 setPossibleCallees = new HashSet<MethodDescriptor>();
1666 if( mdCallee.isStatic() ) {
1667 setPossibleCallees.add(mdCallee);
1669 TypeDescriptor typeDesc = fc.getThis().getType();
1670 setPossibleCallees.addAll(callGraph.getMethods(mdCallee,
1676 DebugCallSiteData dcsd = new DebugCallSiteData();
1678 ReachGraph rgMergeOfPossibleCallers = new ReachGraph();
1681 Iterator<MethodDescriptor> mdItr = setPossibleCallees.iterator();
1682 while( mdItr.hasNext() ) {
1683 MethodDescriptor mdPossible = mdItr.next();
1684 FlatMethod fmPossible = state.getMethodFlat(mdPossible);
1686 addDependent(mdPossible, // callee
1690 // decide for each possible resolution of the method whether we
1691 // want to debug this call site
1692 decideDebugCallSite( dcsd, mdCaller, mdPossible );
1696 // calculate the heap this call site can reach--note this is
1697 // not used for the current call site transform, we are
1698 // grabbing this heap model for future analysis of the callees,
1699 // so if different results emerge we will return to this site
1700 ReachGraph heapForThisCall_old =
1701 getIHMcontribution(mdPossible, fc);
1703 // the computation of the callee-reachable heap
1704 // is useful for making the callee starting point
1705 // and for applying the call site transfer function
1706 Set<Integer> callerNodeIDsCopiedToCallee =
1707 new HashSet<Integer>();
1710 ReachGraph heapForThisCall_cur =
1711 rg.makeCalleeView(fc,
1713 callerNodeIDsCopiedToCallee,
1718 // enforce that a call site contribution can only
1719 // monotonically increase
1720 heapForThisCall_cur.merge(heapForThisCall_old);
1722 if( !heapForThisCall_cur.equals(heapForThisCall_old) ) {
1723 // if heap at call site changed, update the contribution,
1724 // and reschedule the callee for analysis
1725 addIHMcontribution(mdPossible, fc, heapForThisCall_cur);
1727 // map a FlatCall to its enclosing method/task descriptor
1728 // so we can write that info out later
1729 fc2enclosing.put(fc, mdCaller);
1731 if( state.DISJOINTDEBUGSCHEDULING ) {
1732 System.out.println(" context changed, scheduling callee: "+mdPossible);
1735 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1736 calleesToEnqueue.add(mdPossible);
1738 enqueue(mdPossible);
1745 // don't alter the working graph (rg) until we compute a
1746 // result for every possible callee, merge them all together,
1747 // then set rg to that
1748 ReachGraph rgPossibleCaller = new ReachGraph();
1749 rgPossibleCaller.merge(rg);
1751 ReachGraph rgPossibleCallee = getPartial(mdPossible);
1753 if( rgPossibleCallee == null ) {
1754 // if this method has never been analyzed just schedule it
1755 // for analysis and skip over this call site for now
1756 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1757 calleesToEnqueue.add(mdPossible);
1759 enqueue(mdPossible);
1762 if( state.DISJOINTDEBUGSCHEDULING ) {
1763 System.out.println(" callee hasn't been analyzed, scheduling: "+mdPossible);
1769 // calculate the method call transform
1770 rgPossibleCaller.resolveMethodCall(fc,
1773 callerNodeIDsCopiedToCallee,
1778 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1779 if( !accessible.isAccessible(fn, ReachGraph.tdReturn) ) {
1780 rgPossibleCaller.makeInaccessible(fc.getReturnTemp() );
1786 rgMergeOfPossibleCallers.merge(rgPossibleCaller);
1791 statusDebugCallSite( dcsd );
1795 // now that we've taken care of building heap models for
1796 // callee analysis, finish this transformation
1797 rg = rgMergeOfPossibleCallers;
1800 // jjenista: what is this? It breaks compilation
1801 // of programs with no tasks/SESEs/rblocks...
1802 //XXXXXXXXXXXXXXXXXXXXXXXXX
1803 //need to consider more
1804 if( state.OOOJAVA ) {
1805 FlatNode nextFN=fmCallee.getNext(0);
1806 if( nextFN instanceof FlatSESEEnterNode ) {
1807 FlatSESEEnterNode calleeSESE=(FlatSESEEnterNode)nextFN;
1808 if(!calleeSESE.getIsLeafSESE()) {
1809 rg.makeInaccessible(liveness.getLiveInTemps(fmContaining, fn) );
1817 case FKind.FlatReturnNode:
1818 FlatReturnNode frn = (FlatReturnNode) fn;
1819 rhs = frn.getReturnTemp();
1821 // before transfer, do effects analysis support
1822 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1823 if(!accessible.isAccessible(fn,rhs)) {
1824 rg.makeInaccessible(ReachGraph.tdReturn);
1828 if( rhs != null && shouldAnalysisTrack(rhs.getType() ) ) {
1829 rg.assignReturnEqualToTemp(rhs);
1832 setRetNodes.add(frn);
1838 // dead variables were removed before the above transfer function
1839 // was applied, so eliminate heap regions and edges that are no
1840 // longer part of the abstractly-live heap graph, and sweep up
1841 // and reachability effects that are altered by the reduction
1842 //rg.abstractGarbageCollect();
1846 // back edges are strictly monotonic
1847 if( pm.isBackEdge(fn) ) {
1848 ReachGraph rgPrevResult = mapBackEdgeToMonotone.get(fn);
1849 rg.merge(rgPrevResult);
1850 mapBackEdgeToMonotone.put(fn, rg);
1854 ReachGraph rgOnExit = new ReachGraph();
1856 fn2rgAtExit.put(fn, rgOnExit);
1860 // at this point rg should be the correct update
1861 // by an above transfer function, or untouched if
1862 // the flat node type doesn't affect the heap
1868 // this method should generate integers strictly greater than zero!
1869 // special "shadow" regions are made from a heap region by negating
1871 static public Integer generateUniqueHeapRegionNodeID() {
1873 return new Integer(uniqueIDcount);
1878 static public FieldDescriptor getArrayField(TypeDescriptor tdElement) {
1879 FieldDescriptor fdElement = mapTypeToArrayField.get(tdElement);
1880 if( fdElement == null ) {
1881 fdElement = new FieldDescriptor(new Modifiers(Modifiers.PUBLIC),
1883 arrayElementFieldName,
1886 mapTypeToArrayField.put(tdElement, fdElement);
1893 private void writeFinalGraphs() {
1894 Set entrySet = mapDescriptorToCompleteReachGraph.entrySet();
1895 Iterator itr = entrySet.iterator();
1896 while( itr.hasNext() ) {
1897 Map.Entry me = (Map.Entry)itr.next();
1898 Descriptor d = (Descriptor) me.getKey();
1899 ReachGraph rg = (ReachGraph) me.getValue();
1902 if( d instanceof TaskDescriptor ) {
1903 graphName = "COMPLETEtask"+d;
1905 graphName = "COMPLETE"+d;
1908 rg.writeGraph(graphName,
1909 true, // write labels (variables)
1910 true, // selectively hide intermediate temp vars
1911 true, // prune unreachable heap regions
1912 true, // hide reachability altogether
1913 true, // hide subset reachability states
1914 true, // hide predicates
1915 false); // hide edge taints
1919 private void writeFinalIHMs() {
1920 Iterator d2IHMsItr = mapDescriptorToIHMcontributions.entrySet().iterator();
1921 while( d2IHMsItr.hasNext() ) {
1922 Map.Entry me1 = (Map.Entry)d2IHMsItr.next();
1923 Descriptor d = (Descriptor) me1.getKey();
1924 Hashtable<FlatCall, ReachGraph> IHMs = (Hashtable<FlatCall, ReachGraph>)me1.getValue();
1926 Iterator fc2rgItr = IHMs.entrySet().iterator();
1927 while( fc2rgItr.hasNext() ) {
1928 Map.Entry me2 = (Map.Entry)fc2rgItr.next();
1929 FlatCall fc = (FlatCall) me2.getKey();
1930 ReachGraph rg = (ReachGraph) me2.getValue();
1932 rg.writeGraph("IHMPARTFOR"+d+"FROM"+fc2enclosing.get(fc)+fc,
1933 true, // write labels (variables)
1934 true, // selectively hide intermediate temp vars
1935 true, // hide reachability altogether
1936 true, // prune unreachable heap regions
1937 true, // hide subset reachability states
1938 false, // hide predicates
1939 true); // hide edge taints
1944 private void writeInitialContexts() {
1945 Set entrySet = mapDescriptorToInitialContext.entrySet();
1946 Iterator itr = entrySet.iterator();
1947 while( itr.hasNext() ) {
1948 Map.Entry me = (Map.Entry)itr.next();
1949 Descriptor d = (Descriptor) me.getKey();
1950 ReachGraph rg = (ReachGraph) me.getValue();
1952 rg.writeGraph("INITIAL"+d,
1953 true, // write labels (variables)
1954 true, // selectively hide intermediate temp vars
1955 true, // prune unreachable heap regions
1956 false, // hide all reachability
1957 true, // hide subset reachability states
1958 true, // hide predicates
1959 false); // hide edge taints
1963 private void writeFinalGraphsForEveryNode() {
1964 Set entrySet = mapFlatNodeToReachGraphPersist.entrySet();
1965 Iterator itr = entrySet.iterator();
1966 while( itr.hasNext() ) {
1967 Map.Entry me = (Map.Entry) itr.next();
1968 FlatNode fn = (FlatNode) me.getKey();
1969 ReachGraph rg = (ReachGraph) me.getValue();
1971 rg.writeGraph("NODEFINAL"+fn,
1972 true, // write labels (variables)
1973 false, // selectively hide intermediate temp vars
1974 true, // prune unreachable heap regions
1975 true, // hide all reachability
1976 true, // hide subset reachability states
1977 true, // hide predicates
1978 true); // hide edge taints
1983 protected ReachGraph getPartial(Descriptor d) {
1984 return mapDescriptorToCompleteReachGraph.get(d);
1987 protected void setPartial(Descriptor d, ReachGraph rg) {
1988 mapDescriptorToCompleteReachGraph.put(d, rg);
1990 // when the flag for writing out every partial
1991 // result is set, we should spit out the graph,
1992 // but in order to give it a unique name we need
1993 // to track how many partial results for this
1994 // descriptor we've already written out
1995 if( writeAllIncrementalDOTs ) {
1996 if( !mapDescriptorToNumUpdates.containsKey(d) ) {
1997 mapDescriptorToNumUpdates.put(d, new Integer(0) );
1999 Integer n = mapDescriptorToNumUpdates.get(d);
2002 if( d instanceof TaskDescriptor ) {
2003 graphName = d+"COMPLETEtask"+String.format("%05d", n);
2005 graphName = d+"COMPLETE"+String.format("%05d", n);
2008 rg.writeGraph(graphName,
2009 true, // write labels (variables)
2010 true, // selectively hide intermediate temp vars
2011 true, // prune unreachable heap regions
2012 false, // hide all reachability
2013 true, // hide subset reachability states
2014 false, // hide predicates
2015 false); // hide edge taints
2017 mapDescriptorToNumUpdates.put(d, n + 1);
2023 // return just the allocation site associated with one FlatNew node
2024 protected AllocSite getAllocSiteFromFlatNewPRIVATE(FlatNew fnew) {
2026 boolean flagProgrammatically = false;
2027 if( sitesToFlag != null && sitesToFlag.contains(fnew) ) {
2028 flagProgrammatically = true;
2031 if( !mapFlatNewToAllocSite.containsKey(fnew) ) {
2032 AllocSite as = AllocSite.factory(allocationDepth,
2034 fnew.getDisjointId(),
2035 flagProgrammatically
2038 // the newest nodes are single objects
2039 for( int i = 0; i < allocationDepth; ++i ) {
2040 Integer id = generateUniqueHeapRegionNodeID();
2041 as.setIthOldest(i, id);
2042 mapHrnIdToAllocSite.put(id, as);
2045 // the oldest node is a summary node
2046 as.setSummary(generateUniqueHeapRegionNodeID() );
2048 mapFlatNewToAllocSite.put(fnew, as);
2051 return mapFlatNewToAllocSite.get(fnew);
2055 public static boolean shouldAnalysisTrack(TypeDescriptor type) {
2056 // don't track primitive types, but an array
2057 // of primitives is heap memory
2058 if( type.isImmutable() ) {
2059 return type.isArray();
2062 // everything else is an object
2066 protected int numMethodsAnalyzed() {
2067 return descriptorsToAnalyze.size();
2073 // Take in source entry which is the program's compiled entry and
2074 // create a new analysis entry, a method that takes no parameters
2075 // and appears to allocate the command line arguments and call the
2076 // source entry with them. The purpose of this analysis entry is
2077 // to provide a top-level method context with no parameters left.
2078 protected void makeAnalysisEntryMethod(MethodDescriptor mdSourceEntry) {
2080 Modifiers mods = new Modifiers();
2081 mods.addModifier(Modifiers.PUBLIC);
2082 mods.addModifier(Modifiers.STATIC);
2084 TypeDescriptor returnType = new TypeDescriptor(TypeDescriptor.VOID);
2086 this.mdAnalysisEntry =
2087 new MethodDescriptor(mods,
2089 "analysisEntryMethod"
2092 TypeDescriptor argsType = mdSourceEntry.getParamType(0);
2093 TempDescriptor cmdLineArgs =
2094 new TempDescriptor("analysisEntryTemp_args",
2098 new FlatNew(argsType,
2102 this.constructedCmdLineArgsNew = fnArgs;
2104 TypeDescriptor argType = argsType.dereference();
2105 TempDescriptor anArg =
2106 new TempDescriptor("analysisEntryTemp_arg",
2110 new FlatNew(argType,
2114 this.constructedCmdLineArgNew = fnArg;
2116 TypeDescriptor typeIndex = new TypeDescriptor(TypeDescriptor.INT);
2117 TempDescriptor index =
2118 new TempDescriptor("analysisEntryTemp_index",
2121 FlatLiteralNode fli =
2122 new FlatLiteralNode(typeIndex,
2127 FlatSetElementNode fse =
2128 new FlatSetElementNode(cmdLineArgs,
2133 TypeDescriptor typeSize = new TypeDescriptor(TypeDescriptor.INT);
2134 TempDescriptor sizeBytes =
2135 new TempDescriptor("analysisEntryTemp_size",
2138 FlatLiteralNode fls =
2139 new FlatLiteralNode(typeSize,
2144 TempDescriptor strBytes =
2145 new TempDescriptor("analysisEntryTemp_strBytes",
2149 new FlatNew(stringBytesType,
2154 this.constructedCmdLineArgBytesNew = fnBytes;
2156 FlatSetFieldNode fsf =
2157 new FlatSetFieldNode(anArg,
2162 // throw this in so you can always see what the initial heap context
2163 // looks like if you want to, its cheap
2164 FlatGenReachNode fgen = new FlatGenReachNode( "argContext" );
2166 TempDescriptor[] sourceEntryArgs = new TempDescriptor[1];
2167 sourceEntryArgs[0] = cmdLineArgs;
2169 new FlatCall(mdSourceEntry,
2175 FlatReturnNode frn = new FlatReturnNode(null);
2177 FlatExit fe = new FlatExit();
2179 this.fmAnalysisEntry =
2180 new FlatMethod(mdAnalysisEntry,
2184 List<FlatNode> nodes = new LinkedList<FlatNode>();
2185 nodes.add( fnArgs );
2190 nodes.add( fnBytes );
2197 FlatNode current = this.fmAnalysisEntry;
2198 for( FlatNode next: nodes ) {
2199 current.addNext( next );
2204 // jjenista - this is useful for looking at the FlatIRGraph of the
2205 // analysis entry method constructed above if you have to modify it.
2206 // The usual method of writing FlatIRGraphs out doesn't work because
2207 // this flat method is private to the model of this analysis only.
2209 // FlatIRGraph flatMethodWriter =
2210 // new FlatIRGraph( state, false, false, false );
2211 // flatMethodWriter.writeFlatIRGraph( fmAnalysisEntry, "analysisEntry" );
2212 //} catch( IOException e ) {}
2216 protected LinkedList<Descriptor> topologicalSort(Set<Descriptor> toSort) {
2218 Set<Descriptor> discovered;
2220 if( determinismDesired ) {
2221 // use an ordered set
2222 discovered = new TreeSet<Descriptor>(dComp);
2224 // otherwise use a speedy hashset
2225 discovered = new HashSet<Descriptor>();
2228 LinkedList<Descriptor> sorted = new LinkedList<Descriptor>();
2230 Iterator<Descriptor> itr = toSort.iterator();
2231 while( itr.hasNext() ) {
2232 Descriptor d = itr.next();
2234 if( !discovered.contains(d) ) {
2235 dfsVisit(d, toSort, sorted, discovered);
2242 // While we're doing DFS on call graph, remember
2243 // dependencies for efficient queuing of methods
2244 // during interprocedural analysis:
2246 // a dependent of a method decriptor d for this analysis is:
2247 // 1) a method or task that invokes d
2248 // 2) in the descriptorsToAnalyze set
2249 protected void dfsVisit(Descriptor d,
2250 Set <Descriptor> toSort,
2251 LinkedList<Descriptor> sorted,
2252 Set <Descriptor> discovered) {
2255 // only methods have callers, tasks never do
2256 if( d instanceof MethodDescriptor ) {
2258 MethodDescriptor md = (MethodDescriptor) d;
2260 // the call graph is not aware that we have a fabricated
2261 // analysis entry that calls the program source's entry
2262 if( md == mdSourceEntry ) {
2263 if( !discovered.contains(mdAnalysisEntry) ) {
2264 addDependent(mdSourceEntry, // callee
2265 mdAnalysisEntry // caller
2267 dfsVisit(mdAnalysisEntry, toSort, sorted, discovered);
2271 // otherwise call graph guides DFS
2272 Iterator itr = callGraph.getCallerSet(md).iterator();
2273 while( itr.hasNext() ) {
2274 Descriptor dCaller = (Descriptor) itr.next();
2276 // only consider callers in the original set to analyze
2277 if( !toSort.contains(dCaller) ) {
2281 if( !discovered.contains(dCaller) ) {
2282 addDependent(md, // callee
2286 dfsVisit(dCaller, toSort, sorted, discovered);
2291 // for leaf-nodes last now!
2296 protected void enqueue(Descriptor d) {
2298 if( !descriptorsToVisitSet.contains(d) ) {
2300 if( state.DISJOINTDVISITSTACK ||
2301 state.DISJOINTDVISITSTACKEESONTOP
2303 descriptorsToVisitStack.add(d);
2305 } else if( state.DISJOINTDVISITPQUE ) {
2306 Integer priority = mapDescriptorToPriority.get(d);
2307 descriptorsToVisitQ.add(new DescriptorQWrapper(priority,
2312 descriptorsToVisitSet.add(d);
2317 // a dependent of a method decriptor d for this analysis is:
2318 // 1) a method or task that invokes d
2319 // 2) in the descriptorsToAnalyze set
2320 protected void addDependent(Descriptor callee, Descriptor caller) {
2321 Set<Descriptor> deps = mapDescriptorToSetDependents.get(callee);
2322 if( deps == null ) {
2323 deps = new HashSet<Descriptor>();
2326 mapDescriptorToSetDependents.put(callee, deps);
2329 protected Set<Descriptor> getDependents(Descriptor callee) {
2330 Set<Descriptor> deps = mapDescriptorToSetDependents.get(callee);
2331 if( deps == null ) {
2332 deps = new HashSet<Descriptor>();
2333 mapDescriptorToSetDependents.put(callee, deps);
2339 public Hashtable<FlatCall, ReachGraph> getIHMcontributions(Descriptor d) {
2341 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
2342 mapDescriptorToIHMcontributions.get(d);
2344 if( heapsFromCallers == null ) {
2345 heapsFromCallers = new Hashtable<FlatCall, ReachGraph>();
2346 mapDescriptorToIHMcontributions.put(d, heapsFromCallers);
2349 return heapsFromCallers;
2352 public ReachGraph getIHMcontribution(Descriptor d,
2355 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
2356 getIHMcontributions(d);
2358 if( !heapsFromCallers.containsKey(fc) ) {
2362 return heapsFromCallers.get(fc);
2366 public void addIHMcontribution(Descriptor d,
2370 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
2371 getIHMcontributions(d);
2373 heapsFromCallers.put(fc, rg);
2377 private AllocSite createParameterAllocSite(ReachGraph rg,
2378 TempDescriptor tempDesc,
2384 flatNew = new FlatNew(tempDesc.getType(), // type
2385 tempDesc, // param temp
2386 false, // global alloc?
2387 "param"+tempDesc // disjoint site ID string
2390 flatNew = new FlatNew(tempDesc.getType(), // type
2391 tempDesc, // param temp
2392 false, // global alloc?
2393 null // disjoint site ID string
2397 // create allocation site
2398 AllocSite as = AllocSite.factory(allocationDepth,
2400 flatNew.getDisjointId(),
2403 for (int i = 0; i < allocationDepth; ++i) {
2404 Integer id = generateUniqueHeapRegionNodeID();
2405 as.setIthOldest(i, id);
2406 mapHrnIdToAllocSite.put(id, as);
2408 // the oldest node is a summary node
2409 as.setSummary(generateUniqueHeapRegionNodeID() );
2417 private Set<FieldDescriptor> getFieldSetTobeAnalyzed(TypeDescriptor typeDesc) {
2419 Set<FieldDescriptor> fieldSet=new HashSet<FieldDescriptor>();
2420 if(!typeDesc.isImmutable()) {
2421 ClassDescriptor classDesc = typeDesc.getClassDesc();
2422 for (Iterator it = classDesc.getFields(); it.hasNext(); ) {
2423 FieldDescriptor field = (FieldDescriptor) it.next();
2424 TypeDescriptor fieldType = field.getType();
2425 if (shouldAnalysisTrack(fieldType)) {
2426 fieldSet.add(field);
2434 private HeapRegionNode createMultiDeimensionalArrayHRN(ReachGraph rg, AllocSite alloc, HeapRegionNode srcHRN, FieldDescriptor fd, Hashtable<HeapRegionNode, HeapRegionNode> map, Hashtable<TypeDescriptor, HeapRegionNode> mapToExistingNode, ReachSet alpha) {
2436 int dimCount=fd.getType().getArrayCount();
2437 HeapRegionNode prevNode=null;
2438 HeapRegionNode arrayEntryNode=null;
2439 for(int i=dimCount; i>0; i--) {
2440 TypeDescriptor typeDesc=fd.getType().dereference(); //hack to get instance of type desc
2441 typeDesc.setArrayCount(i);
2442 TempDescriptor tempDesc=new TempDescriptor(typeDesc.getSymbol(),typeDesc);
2443 HeapRegionNode hrnSummary;
2444 if(!mapToExistingNode.containsKey(typeDesc)) {
2449 as = createParameterAllocSite(rg, tempDesc, false);
2451 // make a new reference to allocated node
2453 rg.createNewHeapRegionNode(as.getSummary(), // id or null to generate a new one
2454 false, // single object?
2456 false, // out-of-context?
2457 as.getType(), // type
2458 as, // allocation site
2459 alpha, // inherent reach
2460 alpha, // current reach
2461 ExistPredSet.factory(rg.predTrue), // predicates
2462 tempDesc.toString() // description
2464 rg.id2hrn.put(as.getSummary(),hrnSummary);
2466 mapToExistingNode.put(typeDesc, hrnSummary);
2468 hrnSummary=mapToExistingNode.get(typeDesc);
2471 if(prevNode==null) {
2472 // make a new reference between new summary node and source
2473 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2476 fd.getSymbol(), // field name
2478 ExistPredSet.factory(rg.predTrue), // predicates
2482 rg.addRefEdge(srcHRN, hrnSummary, edgeToSummary);
2483 prevNode=hrnSummary;
2484 arrayEntryNode=hrnSummary;
2486 // make a new reference between summary nodes of array
2487 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2490 arrayElementFieldName, // field name
2492 ExistPredSet.factory(rg.predTrue), // predicates
2496 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2497 prevNode=hrnSummary;
2502 // create a new obj node if obj has at least one non-primitive field
2503 TypeDescriptor type=fd.getType();
2504 if(getFieldSetTobeAnalyzed(type).size()>0) {
2505 TypeDescriptor typeDesc=type.dereference();
2506 typeDesc.setArrayCount(0);
2507 if(!mapToExistingNode.containsKey(typeDesc)) {
2508 TempDescriptor tempDesc=new TempDescriptor(type.getSymbol(),typeDesc);
2509 AllocSite as = createParameterAllocSite(rg, tempDesc, false);
2510 // make a new reference to allocated node
2511 HeapRegionNode hrnSummary =
2512 rg.createNewHeapRegionNode(as.getSummary(), // id or null to generate a new one
2513 false, // single object?
2515 false, // out-of-context?
2517 as, // allocation site
2518 alpha, // inherent reach
2519 alpha, // current reach
2520 ExistPredSet.factory(rg.predTrue), // predicates
2521 tempDesc.toString() // description
2523 rg.id2hrn.put(as.getSummary(),hrnSummary);
2524 mapToExistingNode.put(typeDesc, hrnSummary);
2525 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2528 arrayElementFieldName, // field name
2530 ExistPredSet.factory(rg.predTrue), // predicates
2533 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2534 prevNode=hrnSummary;
2536 HeapRegionNode hrnSummary=mapToExistingNode.get(typeDesc);
2537 if(prevNode.getReferenceTo(hrnSummary, typeDesc, arrayElementFieldName)==null) {
2538 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2541 arrayElementFieldName, // field name
2543 ExistPredSet.factory(rg.predTrue), // predicates
2546 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2548 prevNode=hrnSummary;
2552 map.put(arrayEntryNode, prevNode);
2553 return arrayEntryNode;
2556 private ReachGraph createInitialTaskReachGraph(FlatMethod fm) {
2557 ReachGraph rg = new ReachGraph();
2558 TaskDescriptor taskDesc = fm.getTask();
2560 for (int idx = 0; idx < taskDesc.numParameters(); idx++) {
2561 Descriptor paramDesc = taskDesc.getParameter(idx);
2562 TypeDescriptor paramTypeDesc = taskDesc.getParamType(idx);
2564 // setup data structure
2565 Set<HashMap<HeapRegionNode, FieldDescriptor>> workSet =
2566 new HashSet<HashMap<HeapRegionNode, FieldDescriptor>>();
2567 Hashtable<TypeDescriptor, HeapRegionNode> mapTypeToExistingSummaryNode =
2568 new Hashtable<TypeDescriptor, HeapRegionNode>();
2569 Hashtable<HeapRegionNode, HeapRegionNode> mapToFirstDimensionArrayNode =
2570 new Hashtable<HeapRegionNode, HeapRegionNode>();
2571 Set<String> doneSet = new HashSet<String>();
2573 TempDescriptor tempDesc = fm.getParameter(idx);
2575 AllocSite as = createParameterAllocSite(rg, tempDesc, true);
2576 VariableNode lnX = rg.getVariableNodeFromTemp(tempDesc);
2577 Integer idNewest = as.getIthOldest(0);
2578 HeapRegionNode hrnNewest = rg.id2hrn.get(idNewest);
2580 // make a new reference to allocated node
2581 RefEdge edgeNew = new RefEdge(lnX, // source
2583 taskDesc.getParamType(idx), // type
2585 hrnNewest.getAlpha(), // beta
2586 ExistPredSet.factory(rg.predTrue), // predicates
2589 rg.addRefEdge(lnX, hrnNewest, edgeNew);
2591 // set-up a work set for class field
2592 ClassDescriptor classDesc = paramTypeDesc.getClassDesc();
2593 for (Iterator it = classDesc.getFields(); it.hasNext(); ) {
2594 FieldDescriptor fd = (FieldDescriptor) it.next();
2595 TypeDescriptor fieldType = fd.getType();
2596 if (shouldAnalysisTrack(fieldType)) {
2597 HashMap<HeapRegionNode, FieldDescriptor> newMap = new HashMap<HeapRegionNode, FieldDescriptor>();
2598 newMap.put(hrnNewest, fd);
2599 workSet.add(newMap);
2603 int uniqueIdentifier = 0;
2604 while (!workSet.isEmpty()) {
2605 HashMap<HeapRegionNode, FieldDescriptor> map = workSet
2607 workSet.remove(map);
2609 Set<HeapRegionNode> key = map.keySet();
2610 HeapRegionNode srcHRN = key.iterator().next();
2611 FieldDescriptor fd = map.get(srcHRN);
2612 TypeDescriptor type = fd.getType();
2613 String doneSetIdentifier = srcHRN.getIDString() + "_" + fd;
2615 if (!doneSet.contains(doneSetIdentifier)) {
2616 doneSet.add(doneSetIdentifier);
2617 if (!mapTypeToExistingSummaryNode.containsKey(type)) {
2618 // create new summary Node
2619 TempDescriptor td = new TempDescriptor("temp"
2620 + uniqueIdentifier, type);
2622 AllocSite allocSite;
2623 if(type.equals(paramTypeDesc)) {
2624 //corresponding allocsite has already been created for a parameter variable.
2627 allocSite = createParameterAllocSite(rg, td, false);
2629 String strDesc = allocSite.toStringForDOT()
2631 TypeDescriptor allocType=allocSite.getType();
2633 HeapRegionNode hrnSummary;
2634 if(allocType.isArray() && allocType.getArrayCount()>0) {
2635 hrnSummary=createMultiDeimensionalArrayHRN(rg,allocSite,srcHRN,fd,mapToFirstDimensionArrayNode,mapTypeToExistingSummaryNode,hrnNewest.getAlpha());
2638 rg.createNewHeapRegionNode(allocSite.getSummary(), // id or null to generate a new one
2639 false, // single object?
2641 false, // out-of-context?
2642 allocSite.getType(), // type
2643 allocSite, // allocation site
2644 hrnNewest.getAlpha(), // inherent reach
2645 hrnNewest.getAlpha(), // current reach
2646 ExistPredSet.factory(rg.predTrue), // predicates
2647 strDesc // description
2649 rg.id2hrn.put(allocSite.getSummary(),hrnSummary);
2651 // make a new reference to summary node
2652 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2655 fd.getSymbol(), // field name
2656 hrnNewest.getAlpha(), // beta
2657 ExistPredSet.factory(rg.predTrue), // predicates
2661 rg.addRefEdge(srcHRN, hrnSummary, edgeToSummary);
2665 mapTypeToExistingSummaryNode.put(type, hrnSummary);
2667 // set-up a work set for fields of the class
2668 Set<FieldDescriptor> fieldTobeAnalyzed=getFieldSetTobeAnalyzed(type);
2669 for (Iterator iterator = fieldTobeAnalyzed.iterator(); iterator
2671 FieldDescriptor fieldDescriptor = (FieldDescriptor) iterator
2673 HeapRegionNode newDstHRN;
2674 if(mapToFirstDimensionArrayNode.containsKey(hrnSummary)) {
2675 //related heap region node is already exsited.
2676 newDstHRN=mapToFirstDimensionArrayNode.get(hrnSummary);
2678 newDstHRN=hrnSummary;
2680 doneSetIdentifier = newDstHRN.getIDString() + "_" + fieldDescriptor;
2681 if(!doneSet.contains(doneSetIdentifier)) {
2682 // add new work item
2683 HashMap<HeapRegionNode, FieldDescriptor> newMap =
2684 new HashMap<HeapRegionNode, FieldDescriptor>();
2685 newMap.put(newDstHRN, fieldDescriptor);
2686 workSet.add(newMap);
2691 // if there exists corresponding summary node
2692 HeapRegionNode hrnDst=mapTypeToExistingSummaryNode.get(type);
2694 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2696 fd.getType(), // type
2697 fd.getSymbol(), // field name
2698 srcHRN.getAlpha(), // beta
2699 ExistPredSet.factory(rg.predTrue), // predicates
2702 rg.addRefEdge(srcHRN, hrnDst, edgeToSummary);
2712 // return all allocation sites in the method (there is one allocation
2713 // site per FlatNew node in a method)
2714 private HashSet<AllocSite> getAllocationSiteSet(Descriptor d) {
2715 if( !mapDescriptorToAllocSiteSet.containsKey(d) ) {
2716 buildAllocationSiteSet(d);
2719 return mapDescriptorToAllocSiteSet.get(d);
2723 private void buildAllocationSiteSet(Descriptor d) {
2724 HashSet<AllocSite> s = new HashSet<AllocSite>();
2727 if( d instanceof MethodDescriptor ) {
2728 fm = state.getMethodFlat( (MethodDescriptor) d);
2730 assert d instanceof TaskDescriptor;
2731 fm = state.getMethodFlat( (TaskDescriptor) d);
2733 pm.analyzeMethod(fm);
2735 // visit every node in this FlatMethod's IR graph
2736 // and make a set of the allocation sites from the
2737 // FlatNew node's visited
2738 HashSet<FlatNode> visited = new HashSet<FlatNode>();
2739 HashSet<FlatNode> toVisit = new HashSet<FlatNode>();
2742 while( !toVisit.isEmpty() ) {
2743 FlatNode n = toVisit.iterator().next();
2745 if( n instanceof FlatNew ) {
2746 s.add(getAllocSiteFromFlatNewPRIVATE( (FlatNew) n) );
2752 for( int i = 0; i < pm.numNext(n); ++i ) {
2753 FlatNode child = pm.getNext(n, i);
2754 if( !visited.contains(child) ) {
2760 mapDescriptorToAllocSiteSet.put(d, s);
2763 private HashSet<AllocSite> getFlaggedAllocationSites(Descriptor dIn) {
2765 HashSet<AllocSite> out = new HashSet<AllocSite>();
2766 HashSet<Descriptor> toVisit = new HashSet<Descriptor>();
2767 HashSet<Descriptor> visited = new HashSet<Descriptor>();
2771 while (!toVisit.isEmpty()) {
2772 Descriptor d = toVisit.iterator().next();
2776 HashSet<AllocSite> asSet = getAllocationSiteSet(d);
2777 Iterator asItr = asSet.iterator();
2778 while (asItr.hasNext()) {
2779 AllocSite as = (AllocSite) asItr.next();
2780 if (as.getDisjointAnalysisId() != null) {
2785 // enqueue callees of this method to be searched for
2786 // allocation sites also
2787 Set callees = callGraph.getCalleeSet(d);
2788 if (callees != null) {
2789 Iterator methItr = callees.iterator();
2790 while (methItr.hasNext()) {
2791 MethodDescriptor md = (MethodDescriptor) methItr.next();
2793 if (!visited.contains(md)) {
2804 private HashSet<AllocSite>
2805 getFlaggedAllocationSitesReachableFromTaskPRIVATE(TaskDescriptor td) {
2807 HashSet<AllocSite> asSetTotal = new HashSet<AllocSite>();
2808 HashSet<Descriptor> toVisit = new HashSet<Descriptor>();
2809 HashSet<Descriptor> visited = new HashSet<Descriptor>();
2813 // traverse this task and all methods reachable from this task
2814 while( !toVisit.isEmpty() ) {
2815 Descriptor d = toVisit.iterator().next();
2819 HashSet<AllocSite> asSet = getAllocationSiteSet(d);
2820 Iterator asItr = asSet.iterator();
2821 while( asItr.hasNext() ) {
2822 AllocSite as = (AllocSite) asItr.next();
2823 TypeDescriptor typed = as.getType();
2824 if( typed != null ) {
2825 ClassDescriptor cd = typed.getClassDesc();
2826 if( cd != null && cd.hasFlags() ) {
2832 // enqueue callees of this method to be searched for
2833 // allocation sites also
2834 Set callees = callGraph.getCalleeSet(d);
2835 if( callees != null ) {
2836 Iterator methItr = callees.iterator();
2837 while( methItr.hasNext() ) {
2838 MethodDescriptor md = (MethodDescriptor) methItr.next();
2840 if( !visited.contains(md) ) {
2850 public Set<Descriptor> getDescriptorsToAnalyze() {
2851 return descriptorsToAnalyze;
2854 public EffectsAnalysis getEffectsAnalysis() {
2855 return effectsAnalysis;
2858 public ReachGraph getReachGraph(Descriptor d) {
2859 return mapDescriptorToCompleteReachGraph.get(d);
2862 public ReachGraph getEnterReachGraph(FlatNode fn) {
2863 return fn2rgAtEnter.get(fn);
2868 protected class DebugCallSiteData {
2869 public boolean debugCallSite;
2870 public boolean didOneDebug;
2871 public boolean writeDebugDOTs;
2872 public boolean stopAfter;
2874 public DebugCallSiteData() {
2875 debugCallSite = false;
2876 didOneDebug = false;
2877 writeDebugDOTs = false;
2882 protected void decideDebugCallSite( DebugCallSiteData dcsd,
2883 Descriptor taskOrMethodCaller,
2884 MethodDescriptor mdCallee ) {
2886 // all this jimma jamma to debug call sites is WELL WORTH the
2887 // effort, so so so many bugs or buggy info appears through call
2890 if( state.DISJOINTDEBUGCALLEE == null ||
2891 state.DISJOINTDEBUGCALLER == null ) {
2896 boolean debugCalleeMatches = false;
2897 boolean debugCallerMatches = false;
2899 ClassDescriptor cdCallee = mdCallee.getClassDesc();
2900 if( cdCallee != null ) {
2901 debugCalleeMatches =
2902 state.DISJOINTDEBUGCALLEE.equals( cdCallee.getSymbol()+
2904 mdCallee.getSymbol()
2909 if( taskOrMethodCaller instanceof MethodDescriptor ) {
2910 ClassDescriptor cdCaller = ((MethodDescriptor)taskOrMethodCaller).getClassDesc();
2911 if( cdCaller != null ) {
2912 debugCallerMatches =
2913 state.DISJOINTDEBUGCALLER.equals( cdCaller.getSymbol()+
2915 taskOrMethodCaller.getSymbol()
2919 // for bristlecone style tasks
2920 debugCallerMatches =
2921 state.DISJOINTDEBUGCALLER.equals( taskOrMethodCaller.getSymbol() );
2925 dcsd.debugCallSite = debugCalleeMatches && debugCallerMatches;
2928 dcsd.writeDebugDOTs =
2930 dcsd.debugCallSite &&
2932 (ReachGraph.debugCallSiteVisitCounter >=
2933 ReachGraph.debugCallSiteVisitStartCapture) &&
2935 (ReachGraph.debugCallSiteVisitCounter <
2936 ReachGraph.debugCallSiteVisitStartCapture +
2937 ReachGraph.debugCallSiteNumVisitsToCapture);
2941 if( dcsd.debugCallSite ) {
2942 dcsd.didOneDebug = true;
2946 protected void statusDebugCallSite( DebugCallSiteData dcsd ) {
2948 dcsd.writeDebugDOTs = false;
2949 dcsd.stopAfter = false;
2951 if( dcsd.didOneDebug ) {
2952 System.out.println(" $$$ Debug call site visit "+
2953 ReachGraph.debugCallSiteVisitCounter+
2957 (ReachGraph.debugCallSiteVisitCounter >=
2958 ReachGraph.debugCallSiteVisitStartCapture) &&
2960 (ReachGraph.debugCallSiteVisitCounter <
2961 ReachGraph.debugCallSiteVisitStartCapture +
2962 ReachGraph.debugCallSiteNumVisitsToCapture)
2964 dcsd.writeDebugDOTs = true;
2965 System.out.println(" $$$ Capturing this call site visit $$$");
2966 if( ReachGraph.debugCallSiteStopAfter &&
2967 (ReachGraph.debugCallSiteVisitCounter ==
2968 ReachGraph.debugCallSiteVisitStartCapture +
2969 ReachGraph.debugCallSiteNumVisitsToCapture - 1)
2971 dcsd.stopAfter = true;
2975 ++ReachGraph.debugCallSiteVisitCounter;
2978 if( dcsd.stopAfter ) {
2979 System.out.println("$$$ Exiting after requested captures of call site. $$$");
2988 // get successive captures of the analysis state, use compiler
2990 boolean takeDebugSnapshots = false;
2991 String descSymbolDebug = null;
2992 boolean stopAfterCapture = false;
2993 int snapVisitCounter = 0;
2994 int snapNodeCounter = 0;
2995 int visitStartCapture = 0;
2996 int numVisitsToCapture = 0;
2999 void debugSnapshot(ReachGraph rg, FlatNode fn, boolean in) {
3000 if( snapVisitCounter > visitStartCapture + numVisitsToCapture ) {
3008 if( snapVisitCounter >= visitStartCapture ) {
3009 System.out.println(" @@@ snapping visit="+snapVisitCounter+
3010 ", node="+snapNodeCounter+
3014 graphName = String.format("snap%03d_%04din",
3018 graphName = String.format("snap%03d_%04dout",
3023 graphName = graphName + fn;
3025 rg.writeGraph(graphName,
3026 true, // write labels (variables)
3027 true, // selectively hide intermediate temp vars
3028 true, // prune unreachable heap regions
3029 false, // hide reachability
3030 true, // hide subset reachability states
3031 true, // hide predicates
3032 true); // hide edge taints
3039 public Set<Alloc> canPointToAt( TempDescriptor x,
3040 FlatNode programPoint ) {
3042 ReachGraph rgAtEnter = fn2rgAtEnter.get( programPoint );
3043 if( rgAtEnter == null ) {
3047 return rgAtEnter.canPointTo( x );
3051 public Hashtable< Alloc, Set<Alloc> > canPointToAt( TempDescriptor x,
3053 FlatNode programPoint ) {
3055 ReachGraph rgAtEnter = fn2rgAtEnter.get( programPoint );
3056 if( rgAtEnter == null ) {
3060 return rgAtEnter.canPointTo( x, f.getSymbol(), f.getType() );
3064 public Hashtable< Alloc, Set<Alloc> > canPointToAtElement( TempDescriptor x,
3065 FlatNode programPoint ) {
3067 ReachGraph rgAtEnter = fn2rgAtEnter.get( programPoint );
3068 if( rgAtEnter == null ) {
3072 assert x.getType() != null;
3073 assert x.getType().isArray();
3075 return rgAtEnter.canPointTo( x, arrayElementFieldName, x.getType().dereference() );
3079 public Set<Alloc> canPointToAfter( TempDescriptor x,
3080 FlatNode programPoint ) {
3082 ReachGraph rgAtExit = fn2rgAtExit.get( programPoint );
3084 if( rgAtExit == null ) {
3088 return rgAtExit.canPointTo( x );
3092 public Hashtable< Alloc, Set<Alloc> > canPointToAfter( TempDescriptor x,
3094 FlatNode programPoint ) {
3096 ReachGraph rgAtExit = fn2rgAtExit.get( programPoint );
3097 if( rgAtExit == null ) {
3101 return rgAtExit.canPointTo( x, f.getSymbol(), f.getType() );
3105 public Hashtable< Alloc, Set<Alloc> > canPointToAfterElement( TempDescriptor x,
3106 FlatNode programPoint ) {
3108 ReachGraph rgAtExit = fn2rgAtExit.get( programPoint );
3109 if( rgAtExit == null ) {
3113 assert x.getType() != null;
3114 assert x.getType().isArray();
3116 return rgAtExit.canPointTo( x, arrayElementFieldName, x.getType().dereference() );