1 package Analysis.Disjoint;
3 import Analysis.CallGraph.*;
4 import Analysis.Liveness;
5 import Analysis.ArrayReferencees;
8 import IR.Tree.Modifiers;
13 public class DisjointAnalysis {
15 ///////////////////////////////////////////
17 // Public interface to discover possible
18 // aliases in the program under analysis
20 ///////////////////////////////////////////
22 public HashSet<AllocSite>
23 getFlaggedAllocationSitesReachableFromTask(TaskDescriptor td) {
24 checkAnalysisComplete();
25 return getFlaggedAllocationSitesReachableFromTaskPRIVATE(td);
28 public AllocSite getAllocationSiteFromFlatNew(FlatNew fn) {
29 checkAnalysisComplete();
30 return getAllocSiteFromFlatNewPRIVATE(fn);
33 public AllocSite getAllocationSiteFromHeapRegionNodeID(Integer id) {
34 checkAnalysisComplete();
35 return mapHrnIdToAllocSite.get(id);
38 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
41 checkAnalysisComplete();
42 ReachGraph rg=mapDescriptorToCompleteReachGraph.get(taskOrMethod);
43 FlatMethod fm=state.getMethodFlat(taskOrMethod);
45 return rg.mayReachSharedObjects(fm, paramIndex1, paramIndex2);
48 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
49 int paramIndex, AllocSite alloc) {
50 checkAnalysisComplete();
51 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
52 FlatMethod fm=state.getMethodFlat(taskOrMethod);
54 return rg.mayReachSharedObjects(fm, paramIndex, alloc);
57 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
58 AllocSite alloc, int paramIndex) {
59 checkAnalysisComplete();
60 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
61 FlatMethod fm=state.getMethodFlat(taskOrMethod);
63 return rg.mayReachSharedObjects(fm, paramIndex, alloc);
66 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
67 AllocSite alloc1, AllocSite alloc2) {
68 checkAnalysisComplete();
69 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
71 return rg.mayReachSharedObjects(alloc1, alloc2);
74 public String prettyPrintNodeSet(Set<HeapRegionNode> s) {
75 checkAnalysisComplete();
79 Iterator<HeapRegionNode> i = s.iterator();
81 HeapRegionNode n = i.next();
83 AllocSite as = n.getAllocSite();
85 out += " " + n.toString() + ",\n";
87 out += " " + n.toString() + ": " + as.toStringVerbose()
96 // use the methods given above to check every possible sharing class
97 // between task parameters and flagged allocation sites reachable
99 public void writeAllSharing(String outputFile,
102 boolean tabularOutput,
105 throws java.io.IOException {
106 checkAnalysisComplete();
108 BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
110 if (!tabularOutput) {
111 bw.write("Conducting ownership analysis with allocation depth = "
112 + allocationDepth + "\n");
113 bw.write(timeReport + "\n");
118 // look through every task for potential sharing
119 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
120 while (taskItr.hasNext()) {
121 TaskDescriptor td = (TaskDescriptor) taskItr.next();
123 if (!tabularOutput) {
124 bw.write("\n---------" + td + "--------\n");
127 HashSet<AllocSite> allocSites = getFlaggedAllocationSitesReachableFromTask(td);
129 Set<HeapRegionNode> common;
131 // for each task parameter, check for sharing classes with
132 // other task parameters and every allocation site
133 // reachable from this task
134 boolean foundSomeSharing = false;
136 FlatMethod fm = state.getMethodFlat(td);
137 for (int i = 0; i < fm.numParameters(); ++i) {
139 // skip parameters with types that cannot reference
141 if( !shouldAnalysisTrack( fm.getParameter( i ).getType() ) ) {
145 // for the ith parameter check for sharing classes to all
146 // higher numbered parameters
147 for (int j = i + 1; j < fm.numParameters(); ++j) {
149 // skip parameters with types that cannot reference
151 if( !shouldAnalysisTrack( fm.getParameter( j ).getType() ) ) {
156 common = hasPotentialSharing(td, i, j);
157 if (!common.isEmpty()) {
158 foundSomeSharing = true;
160 if (!tabularOutput) {
161 bw.write("Potential sharing between parameters " + i
162 + " and " + j + ".\n");
163 bw.write(prettyPrintNodeSet(common) + "\n");
168 // for the ith parameter, check for sharing classes against
169 // the set of allocation sites reachable from this
171 Iterator allocItr = allocSites.iterator();
172 while (allocItr.hasNext()) {
173 AllocSite as = (AllocSite) allocItr.next();
174 common = hasPotentialSharing(td, i, as);
175 if (!common.isEmpty()) {
176 foundSomeSharing = true;
178 if (!tabularOutput) {
179 bw.write("Potential sharing between parameter " + i
180 + " and " + as.getFlatNew() + ".\n");
181 bw.write(prettyPrintNodeSet(common) + "\n");
187 // for each allocation site check for sharing classes with
188 // other allocation sites in the context of execution
190 HashSet<AllocSite> outerChecked = new HashSet<AllocSite>();
191 Iterator allocItr1 = allocSites.iterator();
192 while (allocItr1.hasNext()) {
193 AllocSite as1 = (AllocSite) allocItr1.next();
195 Iterator allocItr2 = allocSites.iterator();
196 while (allocItr2.hasNext()) {
197 AllocSite as2 = (AllocSite) allocItr2.next();
199 if (!outerChecked.contains(as2)) {
200 common = hasPotentialSharing(td, as1, as2);
202 if (!common.isEmpty()) {
203 foundSomeSharing = true;
205 if (!tabularOutput) {
206 bw.write("Potential sharing between "
207 + as1.getFlatNew() + " and "
208 + as2.getFlatNew() + ".\n");
209 bw.write(prettyPrintNodeSet(common) + "\n");
215 outerChecked.add(as1);
218 if (!foundSomeSharing) {
219 if (!tabularOutput) {
220 bw.write("No sharing between flagged objects in Task " + td
228 bw.write(" & " + numSharing + " & " + justTime + " & " + numLines
229 + " & " + numMethodsAnalyzed() + " \\\\\n");
231 bw.write("\nNumber sharing classes: "+numSharing);
237 // this version of writeAllSharing is for Java programs that have no tasks
238 public void writeAllSharingJava(String outputFile,
241 boolean tabularOutput,
244 throws java.io.IOException {
245 checkAnalysisComplete();
251 BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
253 bw.write("Conducting disjoint reachability analysis with allocation depth = "
254 + allocationDepth + "\n");
255 bw.write(timeReport + "\n\n");
257 boolean foundSomeSharing = false;
259 Descriptor d = typeUtil.getMain();
260 HashSet<AllocSite> allocSites = getFlaggedAllocationSites(d);
262 // for each allocation site check for sharing classes with
263 // other allocation sites in the context of execution
265 HashSet<AllocSite> outerChecked = new HashSet<AllocSite>();
266 Iterator allocItr1 = allocSites.iterator();
267 while (allocItr1.hasNext()) {
268 AllocSite as1 = (AllocSite) allocItr1.next();
270 Iterator allocItr2 = allocSites.iterator();
271 while (allocItr2.hasNext()) {
272 AllocSite as2 = (AllocSite) allocItr2.next();
274 if (!outerChecked.contains(as2)) {
275 Set<HeapRegionNode> common = hasPotentialSharing(d,
278 if (!common.isEmpty()) {
279 foundSomeSharing = true;
280 bw.write("Potential sharing between "
281 + as1.getDisjointAnalysisId() + " and "
282 + as2.getDisjointAnalysisId() + ".\n");
283 bw.write(prettyPrintNodeSet(common) + "\n");
289 outerChecked.add(as1);
292 if (!foundSomeSharing) {
293 bw.write("No sharing classes between flagged objects found.\n");
295 bw.write("\nNumber sharing classes: "+numSharing);
298 bw.write("Number of methods analyzed: "+numMethodsAnalyzed()+"\n");
303 ///////////////////////////////////////////
305 // end public interface
307 ///////////////////////////////////////////
309 protected void checkAnalysisComplete() {
310 if( !analysisComplete ) {
311 throw new Error("Warning: public interface method called while analysis is running.");
316 // run in faster mode, only when bugs wrung out!
317 public static boolean releaseMode;
319 // use command line option to set this, analysis
320 // should attempt to be deterministic
321 public static boolean determinismDesired;
323 // when we want to enforce determinism in the
324 // analysis we need to sort descriptors rather
325 // than toss them in efficient sets, use this
326 public static DescriptorComparator dComp =
327 new DescriptorComparator();
330 // data from the compiler
332 public CallGraph callGraph;
333 public Liveness liveness;
334 public ArrayReferencees arrayReferencees;
335 public TypeUtil typeUtil;
336 public int allocationDepth;
338 // data structure for public interface
339 private Hashtable< Descriptor, HashSet<AllocSite> >
340 mapDescriptorToAllocSiteSet;
343 // for public interface methods to warn that they
344 // are grabbing results during analysis
345 private boolean analysisComplete;
348 // used to identify HeapRegionNode objects
349 // A unique ID equates an object in one
350 // ownership graph with an object in another
351 // graph that logically represents the same
353 // start at 10 and increment to reserve some
354 // IDs for special purposes
355 static protected int uniqueIDcount = 10;
358 // An out-of-scope method created by the
359 // analysis that has no parameters, and
360 // appears to allocate the command line
361 // arguments, then invoke the source code's
362 // main method. The purpose of this is to
363 // provide the analysis with an explicit
364 // top-level context with no parameters
365 protected MethodDescriptor mdAnalysisEntry;
366 protected FlatMethod fmAnalysisEntry;
368 // main method defined by source program
369 protected MethodDescriptor mdSourceEntry;
371 // the set of task and/or method descriptors
372 // reachable in call graph
373 protected Set<Descriptor>
374 descriptorsToAnalyze;
376 // current descriptors to visit in fixed-point
377 // interprocedural analysis, prioritized by
378 // dependency in the call graph
379 protected Stack<Descriptor>
380 descriptorsToVisitStack;
381 protected PriorityQueue<DescriptorQWrapper>
384 // a duplication of the above structure, but
385 // for efficient testing of inclusion
386 protected HashSet<Descriptor>
387 descriptorsToVisitSet;
389 // storage for priorities (doesn't make sense)
390 // to add it to the Descriptor class, just in
392 protected Hashtable<Descriptor, Integer>
393 mapDescriptorToPriority;
395 // when analyzing a method and scheduling more:
396 // remember set of callee's enqueued for analysis
397 // so they can be put on top of the callers in
398 // the stack-visit mode
399 protected Set<Descriptor>
402 // maps a descriptor to its current partial result
403 // from the intraprocedural fixed-point analysis--
404 // then the interprocedural analysis settles, this
405 // mapping will have the final results for each
407 protected Hashtable<Descriptor, ReachGraph>
408 mapDescriptorToCompleteReachGraph;
410 // maps a descriptor to its known dependents: namely
411 // methods or tasks that call the descriptor's method
412 // AND are part of this analysis (reachable from main)
413 protected Hashtable< Descriptor, Set<Descriptor> >
414 mapDescriptorToSetDependents;
416 // maps each flat new to one analysis abstraction
417 // allocate site object, these exist outside reach graphs
418 protected Hashtable<FlatNew, AllocSite>
419 mapFlatNewToAllocSite;
421 // maps intergraph heap region IDs to intergraph
422 // allocation sites that created them, a redundant
423 // structure for efficiency in some operations
424 protected Hashtable<Integer, AllocSite>
427 // maps a method to its initial heap model (IHM) that
428 // is the set of reachability graphs from every caller
429 // site, all merged together. The reason that we keep
430 // them separate is that any one call site's contribution
431 // to the IHM may changed along the path to the fixed point
432 protected Hashtable< Descriptor, Hashtable< FlatCall, ReachGraph > >
433 mapDescriptorToIHMcontributions;
435 // additionally, keep a mapping from descriptors to the
436 // merged in-coming initial context, because we want this
437 // initial context to be STRICTLY MONOTONIC
438 protected Hashtable<Descriptor, ReachGraph>
439 mapDescriptorToInitialContext;
441 // make the result for back edges analysis-wide STRICTLY
442 // MONOTONIC as well, but notice we use FlatNode as the
443 // key for this map: in case we want to consider other
444 // nodes as back edge's in future implementations
445 protected Hashtable<FlatNode, ReachGraph>
446 mapBackEdgeToMonotone;
449 public static final String arrayElementFieldName = "___element_";
450 static protected Hashtable<TypeDescriptor, FieldDescriptor>
453 // for controlling DOT file output
454 protected boolean writeFinalDOTs;
455 protected boolean writeAllIncrementalDOTs;
457 // supporting DOT output--when we want to write every
458 // partial method result, keep a tally for generating
460 protected Hashtable<Descriptor, Integer>
461 mapDescriptorToNumUpdates;
463 //map task descriptor to initial task parameter
464 protected Hashtable<Descriptor, ReachGraph>
465 mapDescriptorToReachGraph;
467 protected PointerMethod pm;
469 static protected Hashtable<FlatNode, ReachGraph> fn2rg =
470 new Hashtable<FlatNode, ReachGraph>();
472 private Hashtable<FlatCall, Descriptor> fc2enclosing;
474 //protected RBlockRelationAnalysis rra;
477 // allocate various structures that are not local
478 // to a single class method--should be done once
479 protected void allocateStructures() {
481 if( determinismDesired ) {
482 // use an ordered set
483 descriptorsToAnalyze = new TreeSet<Descriptor>( dComp );
485 // otherwise use a speedy hashset
486 descriptorsToAnalyze = new HashSet<Descriptor>();
489 mapDescriptorToCompleteReachGraph =
490 new Hashtable<Descriptor, ReachGraph>();
492 mapDescriptorToNumUpdates =
493 new Hashtable<Descriptor, Integer>();
495 mapDescriptorToSetDependents =
496 new Hashtable< Descriptor, Set<Descriptor> >();
498 mapFlatNewToAllocSite =
499 new Hashtable<FlatNew, AllocSite>();
501 mapDescriptorToIHMcontributions =
502 new Hashtable< Descriptor, Hashtable< FlatCall, ReachGraph > >();
504 mapDescriptorToInitialContext =
505 new Hashtable<Descriptor, ReachGraph>();
507 mapBackEdgeToMonotone =
508 new Hashtable<FlatNode, ReachGraph>();
510 mapHrnIdToAllocSite =
511 new Hashtable<Integer, AllocSite>();
513 mapTypeToArrayField =
514 new Hashtable <TypeDescriptor, FieldDescriptor>();
516 if( state.DISJOINTDVISITSTACK ||
517 state.DISJOINTDVISITSTACKEESONTOP
519 descriptorsToVisitStack =
520 new Stack<Descriptor>();
523 if( state.DISJOINTDVISITPQUE ) {
524 descriptorsToVisitQ =
525 new PriorityQueue<DescriptorQWrapper>();
528 descriptorsToVisitSet =
529 new HashSet<Descriptor>();
531 mapDescriptorToPriority =
532 new Hashtable<Descriptor, Integer>();
535 new HashSet<Descriptor>();
537 mapDescriptorToAllocSiteSet =
538 new Hashtable<Descriptor, HashSet<AllocSite> >();
540 mapDescriptorToReachGraph =
541 new Hashtable<Descriptor, ReachGraph>();
543 pm = new PointerMethod();
545 fc2enclosing = new Hashtable<FlatCall, Descriptor>();
550 // this analysis generates a disjoint reachability
551 // graph for every reachable method in the program
552 public DisjointAnalysis( State s,
557 //RBlockRelationAnalysis rra
558 ) throws java.io.IOException {
559 init( s, tu, cg, l, ar );
562 protected void init( State state,
566 ArrayReferencees arrayReferencees
567 //RBlockRelationAnalysis rra
568 ) throws java.io.IOException {
570 analysisComplete = false;
573 this.typeUtil = typeUtil;
574 this.callGraph = callGraph;
575 this.liveness = liveness;
576 this.arrayReferencees = arrayReferencees;
577 this.allocationDepth = state.DISJOINTALLOCDEPTH;
578 this.releaseMode = state.DISJOINTRELEASEMODE;
579 this.determinismDesired = state.DISJOINTDETERMINISM;
581 this.writeFinalDOTs = state.DISJOINTWRITEDOTS && !state.DISJOINTWRITEALL;
582 this.writeAllIncrementalDOTs = state.DISJOINTWRITEDOTS && state.DISJOINTWRITEALL;
584 this.takeDebugSnapshots = state.DISJOINTSNAPSYMBOL != null;
585 this.descSymbolDebug = state.DISJOINTSNAPSYMBOL;
586 this.visitStartCapture = state.DISJOINTSNAPVISITTOSTART;
587 this.numVisitsToCapture = state.DISJOINTSNAPNUMVISITS;
588 this.stopAfterCapture = state.DISJOINTSNAPSTOPAFTER;
589 this.snapVisitCounter = 1; // count visits from 1 (user will write 1, means 1st visit)
590 this.snapNodeCounter = 0; // count nodes from 0
593 state.DISJOINTDVISITSTACK ||
594 state.DISJOINTDVISITPQUE ||
595 state.DISJOINTDVISITSTACKEESONTOP;
596 assert !(state.DISJOINTDVISITSTACK && state.DISJOINTDVISITPQUE);
597 assert !(state.DISJOINTDVISITSTACK && state.DISJOINTDVISITSTACKEESONTOP);
598 assert !(state.DISJOINTDVISITPQUE && state.DISJOINTDVISITSTACKEESONTOP);
600 // set some static configuration for ReachGraphs
601 ReachGraph.allocationDepth = allocationDepth;
602 ReachGraph.typeUtil = typeUtil;
604 ReachGraph.debugCallSiteVisitStartCapture
605 = state.DISJOINTDEBUGCALLVISITTOSTART;
607 ReachGraph.debugCallSiteNumVisitsToCapture
608 = state.DISJOINTDEBUGCALLNUMVISITS;
610 ReachGraph.debugCallSiteStopAfter
611 = state.DISJOINTDEBUGCALLSTOPAFTER;
613 ReachGraph.debugCallSiteVisitCounter
614 = 0; // count visits from 1, is incremented before first visit
618 allocateStructures();
620 double timeStartAnalysis = (double) System.nanoTime();
622 // start interprocedural fixed-point computation
624 analysisComplete=true;
626 double timeEndAnalysis = (double) System.nanoTime();
627 double dt = (timeEndAnalysis - timeStartAnalysis)/(Math.pow( 10.0, 9.0 ) );
628 String treport = String.format( "The reachability analysis took %.3f sec.", dt );
629 String justtime = String.format( "%.2f", dt );
630 System.out.println( treport );
632 if( writeFinalDOTs && !writeAllIncrementalDOTs ) {
636 if( state.DISJOINTWRITEIHMS ) {
640 if( state.DISJOINTWRITEINITCONTEXTS ) {
641 writeInitialContexts();
644 if( state.DISJOINTALIASFILE != null ) {
646 writeAllSharing(state.DISJOINTALIASFILE, treport, justtime, state.DISJOINTALIASTAB, state.lines);
648 writeAllSharingJava(state.DISJOINTALIASFILE,
651 state.DISJOINTALIASTAB,
659 protected boolean moreDescriptorsToVisit() {
660 if( state.DISJOINTDVISITSTACK ||
661 state.DISJOINTDVISITSTACKEESONTOP
663 return !descriptorsToVisitStack.isEmpty();
665 } else if( state.DISJOINTDVISITPQUE ) {
666 return !descriptorsToVisitQ.isEmpty();
669 throw new Error( "Neither descriptor visiting mode set" );
673 // fixed-point computation over the call graph--when a
674 // method's callees are updated, it must be reanalyzed
675 protected void analyzeMethods() throws java.io.IOException {
677 // task or non-task (java) mode determines what the roots
678 // of the call chain are, and establishes the set of methods
679 // reachable from the roots that will be analyzed
682 System.out.println( "Bamboo mode..." );
684 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
685 while( taskItr.hasNext() ) {
686 TaskDescriptor td = (TaskDescriptor) taskItr.next();
687 if( !descriptorsToAnalyze.contains( td ) ) {
688 // add all methods transitively reachable from the
690 descriptorsToAnalyze.add( td );
691 descriptorsToAnalyze.addAll( callGraph.getAllMethods( td ) );
696 System.out.println( "Java mode..." );
698 // add all methods transitively reachable from the
699 // source's main to set for analysis
700 mdSourceEntry = typeUtil.getMain();
701 descriptorsToAnalyze.add( mdSourceEntry );
702 descriptorsToAnalyze.addAll( callGraph.getAllMethods( mdSourceEntry ) );
704 // fabricate an empty calling context that will call
705 // the source's main, but call graph doesn't know
706 // about it, so explicitly add it
707 makeAnalysisEntryMethod( mdSourceEntry );
708 descriptorsToAnalyze.add( mdAnalysisEntry );
712 // now, depending on the interprocedural mode for visiting
713 // methods, set up the needed data structures
715 if( state.DISJOINTDVISITPQUE ) {
717 // topologically sort according to the call graph so
718 // leaf calls are last, helps build contexts up first
719 LinkedList<Descriptor> sortedDescriptors =
720 topologicalSort( descriptorsToAnalyze );
722 // add sorted descriptors to priority queue, and duplicate
723 // the queue as a set for efficiently testing whether some
724 // method is marked for analysis
726 Iterator<Descriptor> dItr;
728 // for the priority queue, give items at the head
729 // of the sorted list a low number (highest priority)
730 while( !sortedDescriptors.isEmpty() ) {
731 Descriptor d = sortedDescriptors.removeFirst();
732 mapDescriptorToPriority.put( d, new Integer( p ) );
733 descriptorsToVisitQ.add( new DescriptorQWrapper( p, d ) );
734 descriptorsToVisitSet.add( d );
738 } else if( state.DISJOINTDVISITSTACK ||
739 state.DISJOINTDVISITSTACKEESONTOP
741 // if we're doing the stack scheme, just throw the root
742 // method or tasks on the stack
744 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
745 while( taskItr.hasNext() ) {
746 TaskDescriptor td = (TaskDescriptor) taskItr.next();
747 descriptorsToVisitStack.add( td );
748 descriptorsToVisitSet.add( td );
752 descriptorsToVisitStack.add( mdAnalysisEntry );
753 descriptorsToVisitSet.add( mdAnalysisEntry );
757 throw new Error( "Unknown method scheduling mode" );
761 // analyze scheduled methods until there are no more to visit
762 while( moreDescriptorsToVisit() ) {
765 if( state.DISJOINTDVISITSTACK ||
766 state.DISJOINTDVISITSTACKEESONTOP
768 d = descriptorsToVisitStack.pop();
770 } else if( state.DISJOINTDVISITPQUE ) {
771 d = descriptorsToVisitQ.poll().getDescriptor();
774 assert descriptorsToVisitSet.contains( d );
775 descriptorsToVisitSet.remove( d );
777 // because the task or method descriptor just extracted
778 // was in the "to visit" set it either hasn't been analyzed
779 // yet, or some method that it depends on has been
780 // updated. Recompute a complete reachability graph for
781 // this task/method and compare it to any previous result.
782 // If there is a change detected, add any methods/tasks
783 // that depend on this one to the "to visit" set.
785 System.out.println( "Analyzing " + d );
787 if( state.DISJOINTDVISITSTACKEESONTOP ) {
788 assert calleesToEnqueue.isEmpty();
791 ReachGraph rg = analyzeMethod( d );
792 ReachGraph rgPrev = getPartial( d );
794 if( !rg.equals( rgPrev ) ) {
797 if( state.DISJOINTDEBUGSCHEDULING ) {
798 System.out.println( " complete graph changed, scheduling callers for analysis:" );
801 // results for d changed, so enqueue dependents
802 // of d for further analysis
803 Iterator<Descriptor> depsItr = getDependents( d ).iterator();
804 while( depsItr.hasNext() ) {
805 Descriptor dNext = depsItr.next();
808 if( state.DISJOINTDEBUGSCHEDULING ) {
809 System.out.println( " "+dNext );
814 // whether or not the method under analysis changed,
815 // we may have some callees that are scheduled for
816 // more analysis, and they should go on the top of
817 // the stack now (in other method-visiting modes they
818 // are already enqueued at this point
819 if( state.DISJOINTDVISITSTACKEESONTOP ) {
820 Iterator<Descriptor> depsItr = calleesToEnqueue.iterator();
821 while( depsItr.hasNext() ) {
822 Descriptor dNext = depsItr.next();
825 calleesToEnqueue.clear();
831 protected ReachGraph analyzeMethod( Descriptor d )
832 throws java.io.IOException {
834 // get the flat code for this descriptor
836 if( d == mdAnalysisEntry ) {
837 fm = fmAnalysisEntry;
839 fm = state.getMethodFlat( d );
841 pm.analyzeMethod( fm );
843 // intraprocedural work set
844 Set<FlatNode> flatNodesToVisit = new HashSet<FlatNode>();
845 flatNodesToVisit.add( fm );
847 // if determinism is desired by client, shadow the
848 // set with a queue to make visit order deterministic
849 Queue<FlatNode> flatNodesToVisitQ = null;
850 if( determinismDesired ) {
851 flatNodesToVisitQ = new LinkedList<FlatNode>();
852 flatNodesToVisitQ.add( fm );
855 // mapping of current partial results
856 Hashtable<FlatNode, ReachGraph> mapFlatNodeToReachGraph =
857 new Hashtable<FlatNode, ReachGraph>();
859 // the set of return nodes partial results that will be combined as
860 // the final, conservative approximation of the entire method
861 HashSet<FlatReturnNode> setReturns = new HashSet<FlatReturnNode>();
863 while( !flatNodesToVisit.isEmpty() ) {
866 if( determinismDesired ) {
867 assert !flatNodesToVisitQ.isEmpty();
868 fn = flatNodesToVisitQ.remove();
870 fn = flatNodesToVisit.iterator().next();
872 flatNodesToVisit.remove( fn );
874 // effect transfer function defined by this node,
875 // then compare it to the old graph at this node
876 // to see if anything was updated.
878 ReachGraph rg = new ReachGraph();
879 TaskDescriptor taskDesc;
880 if(fn instanceof FlatMethod && (taskDesc=((FlatMethod)fn).getTask())!=null){
881 if(mapDescriptorToReachGraph.containsKey(taskDesc)){
882 // retrieve existing reach graph if it is not first time
883 rg=mapDescriptorToReachGraph.get(taskDesc);
885 // create initial reach graph for a task
886 rg=createInitialTaskReachGraph((FlatMethod)fn);
888 mapDescriptorToReachGraph.put(taskDesc, rg);
892 // start by merging all node's parents' graphs
893 for( int i = 0; i < pm.numPrev(fn); ++i ) {
894 FlatNode pn = pm.getPrev(fn,i);
895 if( mapFlatNodeToReachGraph.containsKey( pn ) ) {
896 ReachGraph rgParent = mapFlatNodeToReachGraph.get( pn );
897 rg.merge( rgParent );
901 //if(rra.isEndOfRegion(fn)){
902 // rg.clearAccessibleVarSet();
903 // also need to clear stall mapping
906 if( takeDebugSnapshots &&
907 d.getSymbol().equals( descSymbolDebug )
909 debugSnapshot( rg, fn, true );
913 // modify rg with appropriate transfer function
914 rg = analyzeFlatNode( d, fm, fn, setReturns, rg );
917 if( takeDebugSnapshots &&
918 d.getSymbol().equals( descSymbolDebug )
920 debugSnapshot( rg, fn, false );
925 // if the results of the new graph are different from
926 // the current graph at this node, replace the graph
927 // with the update and enqueue the children
928 ReachGraph rgPrev = mapFlatNodeToReachGraph.get( fn );
929 if( !rg.equals( rgPrev ) ) {
930 mapFlatNodeToReachGraph.put( fn, rg );
932 for( int i = 0; i < pm.numNext( fn ); i++ ) {
933 FlatNode nn = pm.getNext( fn, i );
935 flatNodesToVisit.add( nn );
936 if( determinismDesired ) {
937 flatNodesToVisitQ.add( nn );
944 // end by merging all return nodes into a complete
945 // reach graph that represents all possible heap
946 // states after the flat method returns
947 ReachGraph completeGraph = new ReachGraph();
949 assert !setReturns.isEmpty();
950 Iterator retItr = setReturns.iterator();
951 while( retItr.hasNext() ) {
952 FlatReturnNode frn = (FlatReturnNode) retItr.next();
954 assert mapFlatNodeToReachGraph.containsKey( frn );
955 ReachGraph rgRet = mapFlatNodeToReachGraph.get( frn );
957 completeGraph.merge( rgRet );
961 if( takeDebugSnapshots &&
962 d.getSymbol().equals( descSymbolDebug )
964 // increment that we've visited the debug snap
965 // method, and reset the node counter
966 System.out.println( " @@@ debug snap at visit "+snapVisitCounter );
970 if( snapVisitCounter == visitStartCapture + numVisitsToCapture &&
973 System.out.println( "!!! Stopping analysis after debug snap captures. !!!" );
979 return completeGraph;
984 analyzeFlatNode( Descriptor d,
985 FlatMethod fmContaining,
987 HashSet<FlatReturnNode> setRetNodes,
989 ) throws java.io.IOException {
992 // any variables that are no longer live should be
993 // nullified in the graph to reduce edges
994 //rg.nullifyDeadVars( liveness.getLiveInTemps( fmContaining, fn ) );
1001 // use node type to decide what transfer function
1002 // to apply to the reachability graph
1003 switch( fn.kind() ) {
1005 case FKind.FlatMethod: {
1006 // construct this method's initial heap model (IHM)
1007 // since we're working on the FlatMethod, we know
1008 // the incoming ReachGraph 'rg' is empty
1010 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1011 getIHMcontributions( d );
1013 Set entrySet = heapsFromCallers.entrySet();
1014 Iterator itr = entrySet.iterator();
1015 while( itr.hasNext() ) {
1016 Map.Entry me = (Map.Entry) itr.next();
1017 FlatCall fc = (FlatCall) me.getKey();
1018 ReachGraph rgContrib = (ReachGraph) me.getValue();
1020 assert fc.getMethod().equals( d );
1022 rg.merge( rgContrib );
1025 // additionally, we are enforcing STRICT MONOTONICITY for the
1026 // method's initial context, so grow the context by whatever
1027 // the previously computed context was, and put the most
1028 // up-to-date context back in the map
1029 ReachGraph rgPrevContext = mapDescriptorToInitialContext.get( d );
1030 rg.merge( rgPrevContext );
1031 mapDescriptorToInitialContext.put( d, rg );
1035 case FKind.FlatOpNode:
1036 FlatOpNode fon = (FlatOpNode) fn;
1037 if( fon.getOp().getOp() == Operation.ASSIGN ) {
1038 lhs = fon.getDest();
1039 rhs = fon.getLeft();
1040 rg.assignTempXEqualToTempY( lhs, rhs );
1044 case FKind.FlatCastNode:
1045 FlatCastNode fcn = (FlatCastNode) fn;
1049 TypeDescriptor td = fcn.getType();
1052 rg.assignTempXEqualToCastedTempY( lhs, rhs, td );
1055 case FKind.FlatFieldNode:
1056 FlatFieldNode ffn = (FlatFieldNode) fn;
1059 fld = ffn.getField();
1060 if( shouldAnalysisTrack( fld.getType() ) ) {
1061 rg.assignTempXEqualToTempYFieldF( lhs, rhs, fld );
1065 case FKind.FlatSetFieldNode:
1066 FlatSetFieldNode fsfn = (FlatSetFieldNode) fn;
1067 lhs = fsfn.getDst();
1068 fld = fsfn.getField();
1069 rhs = fsfn.getSrc();
1070 if( shouldAnalysisTrack( fld.getType() ) ) {
1071 rg.assignTempXFieldFEqualToTempY( lhs, fld, rhs );
1075 case FKind.FlatElementNode:
1076 FlatElementNode fen = (FlatElementNode) fn;
1079 if( shouldAnalysisTrack( lhs.getType() ) ) {
1081 assert rhs.getType() != null;
1082 assert rhs.getType().isArray();
1084 TypeDescriptor tdElement = rhs.getType().dereference();
1085 FieldDescriptor fdElement = getArrayField( tdElement );
1087 rg.assignTempXEqualToTempYFieldF( lhs, rhs, fdElement );
1091 case FKind.FlatSetElementNode:
1092 FlatSetElementNode fsen = (FlatSetElementNode) fn;
1094 if( arrayReferencees.doesNotCreateNewReaching( fsen ) ) {
1095 // skip this node if it cannot create new reachability paths
1099 lhs = fsen.getDst();
1100 rhs = fsen.getSrc();
1101 if( shouldAnalysisTrack( rhs.getType() ) ) {
1103 assert lhs.getType() != null;
1104 assert lhs.getType().isArray();
1106 TypeDescriptor tdElement = lhs.getType().dereference();
1107 FieldDescriptor fdElement = getArrayField( tdElement );
1109 rg.assignTempXFieldFEqualToTempY( lhs, fdElement, rhs );
1114 FlatNew fnn = (FlatNew) fn;
1116 if( shouldAnalysisTrack( lhs.getType() ) ) {
1117 AllocSite as = getAllocSiteFromFlatNewPRIVATE( fnn );
1118 rg.assignTempEqualToNewAlloc( lhs, as );
1123 case FKind.FlatSESEEnterNode:
1124 FlatSESEEnterNode sese = (FlatSESEEnterNode) fn;
1125 rg.taintLiveTemps( sese,
1126 liveness.getLiveInTemps( fmContaining, fn )
1130 case FKind.FlatSESEExitNode:
1131 FlatSESEExitNode fsexn = (FlatSESEExitNode) fn;
1132 rg.removeInContextTaints( fsexn.getFlatEnter() );
1136 case FKind.FlatCall: {
1137 Descriptor mdCaller;
1138 if( fmContaining.getMethod() != null ){
1139 mdCaller = fmContaining.getMethod();
1141 mdCaller = fmContaining.getTask();
1143 FlatCall fc = (FlatCall) fn;
1144 MethodDescriptor mdCallee = fc.getMethod();
1145 FlatMethod fmCallee = state.getMethodFlat( mdCallee );
1148 boolean debugCallSite =
1149 mdCaller.getSymbol().equals( state.DISJOINTDEBUGCALLER ) &&
1150 mdCallee.getSymbol().equals( state.DISJOINTDEBUGCALLEE );
1152 boolean writeDebugDOTs = false;
1153 boolean stopAfter = false;
1154 if( debugCallSite ) {
1155 ++ReachGraph.debugCallSiteVisitCounter;
1156 System.out.println( " $$$ Debug call site visit "+
1157 ReachGraph.debugCallSiteVisitCounter+
1161 (ReachGraph.debugCallSiteVisitCounter >=
1162 ReachGraph.debugCallSiteVisitStartCapture) &&
1164 (ReachGraph.debugCallSiteVisitCounter <
1165 ReachGraph.debugCallSiteVisitStartCapture +
1166 ReachGraph.debugCallSiteNumVisitsToCapture)
1168 writeDebugDOTs = true;
1169 System.out.println( " $$$ Capturing this call site visit $$$" );
1170 if( ReachGraph.debugCallSiteStopAfter &&
1171 (ReachGraph.debugCallSiteVisitCounter ==
1172 ReachGraph.debugCallSiteVisitStartCapture +
1173 ReachGraph.debugCallSiteNumVisitsToCapture - 1)
1181 // calculate the heap this call site can reach--note this is
1182 // not used for the current call site transform, we are
1183 // grabbing this heap model for future analysis of the callees,
1184 // so if different results emerge we will return to this site
1185 ReachGraph heapForThisCall_old =
1186 getIHMcontribution( mdCallee, fc );
1188 // the computation of the callee-reachable heap
1189 // is useful for making the callee starting point
1190 // and for applying the call site transfer function
1191 Set<Integer> callerNodeIDsCopiedToCallee =
1192 new HashSet<Integer>();
1194 ReachGraph heapForThisCall_cur =
1195 rg.makeCalleeView( fc,
1197 callerNodeIDsCopiedToCallee,
1201 if( !heapForThisCall_cur.equals( heapForThisCall_old ) ) {
1202 // if heap at call site changed, update the contribution,
1203 // and reschedule the callee for analysis
1204 addIHMcontribution( mdCallee, fc, heapForThisCall_cur );
1206 // map a FlatCall to its enclosing method/task descriptor
1207 // so we can write that info out later
1208 fc2enclosing.put( fc, mdCaller );
1210 if( state.DISJOINTDEBUGSCHEDULING ) {
1211 System.out.println( " context changed, scheduling callee: "+mdCallee );
1214 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1215 calleesToEnqueue.add( mdCallee );
1217 enqueue( mdCallee );
1223 // the transformation for a call site should update the
1224 // current heap abstraction with any effects from the callee,
1225 // or if the method is virtual, the effects from any possible
1226 // callees, so find the set of callees...
1227 Set<MethodDescriptor> setPossibleCallees;
1228 if( determinismDesired ) {
1229 // use an ordered set
1230 setPossibleCallees = new TreeSet<MethodDescriptor>( dComp );
1232 // otherwise use a speedy hashset
1233 setPossibleCallees = new HashSet<MethodDescriptor>();
1236 if( mdCallee.isStatic() ) {
1237 setPossibleCallees.add( mdCallee );
1239 TypeDescriptor typeDesc = fc.getThis().getType();
1240 setPossibleCallees.addAll( callGraph.getMethods( mdCallee,
1245 ReachGraph rgMergeOfEffects = new ReachGraph();
1247 Iterator<MethodDescriptor> mdItr = setPossibleCallees.iterator();
1248 while( mdItr.hasNext() ) {
1249 MethodDescriptor mdPossible = mdItr.next();
1250 FlatMethod fmPossible = state.getMethodFlat( mdPossible );
1252 addDependent( mdPossible, // callee
1255 // don't alter the working graph (rg) until we compute a
1256 // result for every possible callee, merge them all together,
1257 // then set rg to that
1258 ReachGraph rgCopy = new ReachGraph();
1261 ReachGraph rgEffect = getPartial( mdPossible );
1263 if( rgEffect == null ) {
1264 // if this method has never been analyzed just schedule it
1265 // for analysis and skip over this call site for now
1266 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1267 calleesToEnqueue.add( mdPossible );
1269 enqueue( mdPossible );
1272 if( state.DISJOINTDEBUGSCHEDULING ) {
1273 System.out.println( " callee hasn't been analyzed, scheduling: "+mdPossible );
1278 rgCopy.resolveMethodCall( fc,
1281 callerNodeIDsCopiedToCallee,
1286 rgMergeOfEffects.merge( rgCopy );
1291 System.out.println( "$$$ Exiting after requested captures of call site. $$$" );
1296 // now that we've taken care of building heap models for
1297 // callee analysis, finish this transformation
1298 rg = rgMergeOfEffects;
1302 case FKind.FlatReturnNode:
1303 FlatReturnNode frn = (FlatReturnNode) fn;
1304 rhs = frn.getReturnTemp();
1305 if( rhs != null && shouldAnalysisTrack( rhs.getType() ) ) {
1306 rg.assignReturnEqualToTemp( rhs );
1308 setRetNodes.add( frn );
1314 // dead variables were removed before the above transfer function
1315 // was applied, so eliminate heap regions and edges that are no
1316 // longer part of the abstractly-live heap graph, and sweep up
1317 // and reachability effects that are altered by the reduction
1318 //rg.abstractGarbageCollect();
1322 // back edges are strictly monotonic
1323 if( pm.isBackEdge( fn ) ) {
1324 ReachGraph rgPrevResult = mapBackEdgeToMonotone.get( fn );
1325 rg.merge( rgPrevResult );
1326 mapBackEdgeToMonotone.put( fn, rg );
1329 // at this point rg should be the correct update
1330 // by an above transfer function, or untouched if
1331 // the flat node type doesn't affect the heap
1337 // this method should generate integers strictly greater than zero!
1338 // special "shadow" regions are made from a heap region by negating
1340 static public Integer generateUniqueHeapRegionNodeID() {
1342 return new Integer( uniqueIDcount );
1347 static public FieldDescriptor getArrayField( TypeDescriptor tdElement ) {
1348 FieldDescriptor fdElement = mapTypeToArrayField.get( tdElement );
1349 if( fdElement == null ) {
1350 fdElement = new FieldDescriptor( new Modifiers( Modifiers.PUBLIC ),
1352 arrayElementFieldName,
1355 mapTypeToArrayField.put( tdElement, fdElement );
1362 private void writeFinalGraphs() {
1363 Set entrySet = mapDescriptorToCompleteReachGraph.entrySet();
1364 Iterator itr = entrySet.iterator();
1365 while( itr.hasNext() ) {
1366 Map.Entry me = (Map.Entry) itr.next();
1367 Descriptor d = (Descriptor) me.getKey();
1368 ReachGraph rg = (ReachGraph) me.getValue();
1370 rg.writeGraph( "COMPLETE"+d,
1371 true, // write labels (variables)
1372 true, // selectively hide intermediate temp vars
1373 true, // prune unreachable heap regions
1374 false, // hide reachability altogether
1375 true, // hide subset reachability states
1376 true, // hide predicates
1377 false ); // hide edge taints
1381 private void writeFinalIHMs() {
1382 Iterator d2IHMsItr = mapDescriptorToIHMcontributions.entrySet().iterator();
1383 while( d2IHMsItr.hasNext() ) {
1384 Map.Entry me1 = (Map.Entry) d2IHMsItr.next();
1385 Descriptor d = (Descriptor) me1.getKey();
1386 Hashtable<FlatCall, ReachGraph> IHMs = (Hashtable<FlatCall, ReachGraph>) me1.getValue();
1388 Iterator fc2rgItr = IHMs.entrySet().iterator();
1389 while( fc2rgItr.hasNext() ) {
1390 Map.Entry me2 = (Map.Entry) fc2rgItr.next();
1391 FlatCall fc = (FlatCall) me2.getKey();
1392 ReachGraph rg = (ReachGraph) me2.getValue();
1394 rg.writeGraph( "IHMPARTFOR"+d+"FROM"+fc2enclosing.get( fc )+fc,
1395 true, // write labels (variables)
1396 true, // selectively hide intermediate temp vars
1397 true, // hide reachability altogether
1398 true, // prune unreachable heap regions
1399 true, // hide subset reachability states
1400 false, // hide predicates
1401 true ); // hide edge taints
1406 private void writeInitialContexts() {
1407 Set entrySet = mapDescriptorToInitialContext.entrySet();
1408 Iterator itr = entrySet.iterator();
1409 while( itr.hasNext() ) {
1410 Map.Entry me = (Map.Entry) itr.next();
1411 Descriptor d = (Descriptor) me.getKey();
1412 ReachGraph rg = (ReachGraph) me.getValue();
1414 rg.writeGraph( "INITIAL"+d,
1415 true, // write labels (variables)
1416 true, // selectively hide intermediate temp vars
1417 true, // prune unreachable heap regions
1418 false, // hide all reachability
1419 true, // hide subset reachability states
1420 true, // hide predicates
1421 false );// hide edge taints
1426 protected ReachGraph getPartial( Descriptor d ) {
1427 return mapDescriptorToCompleteReachGraph.get( d );
1430 protected void setPartial( Descriptor d, ReachGraph rg ) {
1431 mapDescriptorToCompleteReachGraph.put( d, rg );
1433 // when the flag for writing out every partial
1434 // result is set, we should spit out the graph,
1435 // but in order to give it a unique name we need
1436 // to track how many partial results for this
1437 // descriptor we've already written out
1438 if( writeAllIncrementalDOTs ) {
1439 if( !mapDescriptorToNumUpdates.containsKey( d ) ) {
1440 mapDescriptorToNumUpdates.put( d, new Integer( 0 ) );
1442 Integer n = mapDescriptorToNumUpdates.get( d );
1444 rg.writeGraph( d+"COMPLETE"+String.format( "%05d", n ),
1445 true, // write labels (variables)
1446 true, // selectively hide intermediate temp vars
1447 true, // prune unreachable heap regions
1448 false, // hide all reachability
1449 true, // hide subset reachability states
1450 false, // hide predicates
1451 false); // hide edge taints
1453 mapDescriptorToNumUpdates.put( d, n + 1 );
1459 // return just the allocation site associated with one FlatNew node
1460 protected AllocSite getAllocSiteFromFlatNewPRIVATE( FlatNew fnew ) {
1462 if( !mapFlatNewToAllocSite.containsKey( fnew ) ) {
1463 AllocSite as = AllocSite.factory( allocationDepth,
1465 fnew.getDisjointId(),
1469 // the newest nodes are single objects
1470 for( int i = 0; i < allocationDepth; ++i ) {
1471 Integer id = generateUniqueHeapRegionNodeID();
1472 as.setIthOldest( i, id );
1473 mapHrnIdToAllocSite.put( id, as );
1476 // the oldest node is a summary node
1477 as.setSummary( generateUniqueHeapRegionNodeID() );
1479 mapFlatNewToAllocSite.put( fnew, as );
1482 return mapFlatNewToAllocSite.get( fnew );
1486 public static boolean shouldAnalysisTrack( TypeDescriptor type ) {
1487 // don't track primitive types, but an array
1488 // of primitives is heap memory
1489 if( type.isImmutable() ) {
1490 return type.isArray();
1493 // everything else is an object
1497 protected int numMethodsAnalyzed() {
1498 return descriptorsToAnalyze.size();
1505 // Take in source entry which is the program's compiled entry and
1506 // create a new analysis entry, a method that takes no parameters
1507 // and appears to allocate the command line arguments and call the
1508 // source entry with them. The purpose of this analysis entry is
1509 // to provide a top-level method context with no parameters left.
1510 protected void makeAnalysisEntryMethod( MethodDescriptor mdSourceEntry ) {
1512 Modifiers mods = new Modifiers();
1513 mods.addModifier( Modifiers.PUBLIC );
1514 mods.addModifier( Modifiers.STATIC );
1516 TypeDescriptor returnType =
1517 new TypeDescriptor( TypeDescriptor.VOID );
1519 this.mdAnalysisEntry =
1520 new MethodDescriptor( mods,
1522 "analysisEntryMethod"
1525 TempDescriptor cmdLineArgs =
1526 new TempDescriptor( "args",
1527 mdSourceEntry.getParamType( 0 )
1531 new FlatNew( mdSourceEntry.getParamType( 0 ),
1536 TempDescriptor[] sourceEntryArgs = new TempDescriptor[1];
1537 sourceEntryArgs[0] = cmdLineArgs;
1540 new FlatCall( mdSourceEntry,
1546 FlatReturnNode frn = new FlatReturnNode( null );
1548 FlatExit fe = new FlatExit();
1550 this.fmAnalysisEntry =
1551 new FlatMethod( mdAnalysisEntry,
1555 this.fmAnalysisEntry.addNext( fn );
1562 protected LinkedList<Descriptor> topologicalSort( Set<Descriptor> toSort ) {
1564 Set<Descriptor> discovered;
1566 if( determinismDesired ) {
1567 // use an ordered set
1568 discovered = new TreeSet<Descriptor>( dComp );
1570 // otherwise use a speedy hashset
1571 discovered = new HashSet<Descriptor>();
1574 LinkedList<Descriptor> sorted = new LinkedList<Descriptor>();
1576 Iterator<Descriptor> itr = toSort.iterator();
1577 while( itr.hasNext() ) {
1578 Descriptor d = itr.next();
1580 if( !discovered.contains( d ) ) {
1581 dfsVisit( d, toSort, sorted, discovered );
1588 // While we're doing DFS on call graph, remember
1589 // dependencies for efficient queuing of methods
1590 // during interprocedural analysis:
1592 // a dependent of a method decriptor d for this analysis is:
1593 // 1) a method or task that invokes d
1594 // 2) in the descriptorsToAnalyze set
1595 protected void dfsVisit( Descriptor d,
1596 Set <Descriptor> toSort,
1597 LinkedList<Descriptor> sorted,
1598 Set <Descriptor> discovered ) {
1599 discovered.add( d );
1601 // only methods have callers, tasks never do
1602 if( d instanceof MethodDescriptor ) {
1604 MethodDescriptor md = (MethodDescriptor) d;
1606 // the call graph is not aware that we have a fabricated
1607 // analysis entry that calls the program source's entry
1608 if( md == mdSourceEntry ) {
1609 if( !discovered.contains( mdAnalysisEntry ) ) {
1610 addDependent( mdSourceEntry, // callee
1611 mdAnalysisEntry // caller
1613 dfsVisit( mdAnalysisEntry, toSort, sorted, discovered );
1617 // otherwise call graph guides DFS
1618 Iterator itr = callGraph.getCallerSet( md ).iterator();
1619 while( itr.hasNext() ) {
1620 Descriptor dCaller = (Descriptor) itr.next();
1622 // only consider callers in the original set to analyze
1623 if( !toSort.contains( dCaller ) ) {
1627 if( !discovered.contains( dCaller ) ) {
1628 addDependent( md, // callee
1632 dfsVisit( dCaller, toSort, sorted, discovered );
1637 // for leaf-nodes last now!
1638 sorted.addLast( d );
1642 protected void enqueue( Descriptor d ) {
1644 if( !descriptorsToVisitSet.contains( d ) ) {
1646 if( state.DISJOINTDVISITSTACK ||
1647 state.DISJOINTDVISITSTACKEESONTOP
1649 descriptorsToVisitStack.add( d );
1651 } else if( state.DISJOINTDVISITPQUE ) {
1652 Integer priority = mapDescriptorToPriority.get( d );
1653 descriptorsToVisitQ.add( new DescriptorQWrapper( priority,
1658 descriptorsToVisitSet.add( d );
1663 // a dependent of a method decriptor d for this analysis is:
1664 // 1) a method or task that invokes d
1665 // 2) in the descriptorsToAnalyze set
1666 protected void addDependent( Descriptor callee, Descriptor caller ) {
1667 Set<Descriptor> deps = mapDescriptorToSetDependents.get( callee );
1668 if( deps == null ) {
1669 deps = new HashSet<Descriptor>();
1672 mapDescriptorToSetDependents.put( callee, deps );
1675 protected Set<Descriptor> getDependents( Descriptor callee ) {
1676 Set<Descriptor> deps = mapDescriptorToSetDependents.get( callee );
1677 if( deps == null ) {
1678 deps = new HashSet<Descriptor>();
1679 mapDescriptorToSetDependents.put( callee, deps );
1685 public Hashtable<FlatCall, ReachGraph> getIHMcontributions( Descriptor d ) {
1687 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1688 mapDescriptorToIHMcontributions.get( d );
1690 if( heapsFromCallers == null ) {
1691 heapsFromCallers = new Hashtable<FlatCall, ReachGraph>();
1692 mapDescriptorToIHMcontributions.put( d, heapsFromCallers );
1695 return heapsFromCallers;
1698 public ReachGraph getIHMcontribution( Descriptor d,
1701 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1702 getIHMcontributions( d );
1704 if( !heapsFromCallers.containsKey( fc ) ) {
1708 return heapsFromCallers.get( fc );
1712 public void addIHMcontribution( Descriptor d,
1716 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1717 getIHMcontributions( d );
1719 heapsFromCallers.put( fc, rg );
1723 private AllocSite createParameterAllocSite( ReachGraph rg,
1724 TempDescriptor tempDesc,
1730 flatNew = new FlatNew( tempDesc.getType(), // type
1731 tempDesc, // param temp
1732 false, // global alloc?
1733 "param"+tempDesc // disjoint site ID string
1736 flatNew = new FlatNew( tempDesc.getType(), // type
1737 tempDesc, // param temp
1738 false, // global alloc?
1739 null // disjoint site ID string
1743 // create allocation site
1744 AllocSite as = AllocSite.factory( allocationDepth,
1746 flatNew.getDisjointId(),
1749 for (int i = 0; i < allocationDepth; ++i) {
1750 Integer id = generateUniqueHeapRegionNodeID();
1751 as.setIthOldest(i, id);
1752 mapHrnIdToAllocSite.put(id, as);
1754 // the oldest node is a summary node
1755 as.setSummary( generateUniqueHeapRegionNodeID() );
1763 private Set<FieldDescriptor> getFieldSetTobeAnalyzed(TypeDescriptor typeDesc){
1765 Set<FieldDescriptor> fieldSet=new HashSet<FieldDescriptor>();
1766 if(!typeDesc.isImmutable()){
1767 ClassDescriptor classDesc = typeDesc.getClassDesc();
1768 for (Iterator it = classDesc.getFields(); it.hasNext();) {
1769 FieldDescriptor field = (FieldDescriptor) it.next();
1770 TypeDescriptor fieldType = field.getType();
1771 if (shouldAnalysisTrack( fieldType )) {
1772 fieldSet.add(field);
1780 private HeapRegionNode createMultiDeimensionalArrayHRN(ReachGraph rg, AllocSite alloc, HeapRegionNode srcHRN, FieldDescriptor fd, Hashtable<HeapRegionNode, HeapRegionNode> map, Hashtable<TypeDescriptor, HeapRegionNode> mapToExistingNode, ReachSet alpha ){
1782 int dimCount=fd.getType().getArrayCount();
1783 HeapRegionNode prevNode=null;
1784 HeapRegionNode arrayEntryNode=null;
1785 for(int i=dimCount;i>0;i--){
1786 TypeDescriptor typeDesc=fd.getType().dereference();//hack to get instance of type desc
1787 typeDesc.setArrayCount(i);
1788 TempDescriptor tempDesc=new TempDescriptor(typeDesc.getSymbol(),typeDesc);
1789 HeapRegionNode hrnSummary ;
1790 if(!mapToExistingNode.containsKey(typeDesc)){
1795 as = createParameterAllocSite(rg, tempDesc, false);
1797 // make a new reference to allocated node
1799 rg.createNewHeapRegionNode(as.getSummary(), // id or null to generate a new one
1800 false, // single object?
1802 false, // out-of-context?
1803 as.getType(), // type
1804 as, // allocation site
1805 alpha, // inherent reach
1806 alpha, // current reach
1807 ExistPredSet.factory(rg.predTrue), // predicates
1808 tempDesc.toString() // description
1810 rg.id2hrn.put(as.getSummary(),hrnSummary);
1812 mapToExistingNode.put(typeDesc, hrnSummary);
1814 hrnSummary=mapToExistingNode.get(typeDesc);
1818 // make a new reference between new summary node and source
1819 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
1822 fd.getSymbol(), // field name
1824 ExistPredSet.factory(rg.predTrue), // predicates
1828 rg.addRefEdge(srcHRN, hrnSummary, edgeToSummary);
1829 prevNode=hrnSummary;
1830 arrayEntryNode=hrnSummary;
1832 // make a new reference between summary nodes of array
1833 RefEdge edgeToSummary = new RefEdge(prevNode, // source
1836 arrayElementFieldName, // field name
1838 ExistPredSet.factory(rg.predTrue), // predicates
1842 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
1843 prevNode=hrnSummary;
1848 // create a new obj node if obj has at least one non-primitive field
1849 TypeDescriptor type=fd.getType();
1850 if(getFieldSetTobeAnalyzed(type).size()>0){
1851 TypeDescriptor typeDesc=type.dereference();
1852 typeDesc.setArrayCount(0);
1853 if(!mapToExistingNode.containsKey(typeDesc)){
1854 TempDescriptor tempDesc=new TempDescriptor(type.getSymbol(),typeDesc);
1855 AllocSite as = createParameterAllocSite(rg, tempDesc, false);
1856 // make a new reference to allocated node
1857 HeapRegionNode hrnSummary =
1858 rg.createNewHeapRegionNode(as.getSummary(), // id or null to generate a new one
1859 false, // single object?
1861 false, // out-of-context?
1863 as, // allocation site
1864 alpha, // inherent reach
1865 alpha, // current reach
1866 ExistPredSet.factory(rg.predTrue), // predicates
1867 tempDesc.toString() // description
1869 rg.id2hrn.put(as.getSummary(),hrnSummary);
1870 mapToExistingNode.put(typeDesc, hrnSummary);
1871 RefEdge edgeToSummary = new RefEdge(prevNode, // source
1874 arrayElementFieldName, // field name
1876 ExistPredSet.factory(rg.predTrue), // predicates
1879 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
1880 prevNode=hrnSummary;
1882 HeapRegionNode hrnSummary=mapToExistingNode.get(typeDesc);
1883 if(prevNode.getReferenceTo(hrnSummary, typeDesc, arrayElementFieldName)==null){
1884 RefEdge edgeToSummary = new RefEdge(prevNode, // source
1887 arrayElementFieldName, // field name
1889 ExistPredSet.factory(rg.predTrue), // predicates
1892 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
1894 prevNode=hrnSummary;
1898 map.put(arrayEntryNode, prevNode);
1899 return arrayEntryNode;
1902 private ReachGraph createInitialTaskReachGraph(FlatMethod fm) {
1903 ReachGraph rg = new ReachGraph();
1904 TaskDescriptor taskDesc = fm.getTask();
1906 for (int idx = 0; idx < taskDesc.numParameters(); idx++) {
1907 Descriptor paramDesc = taskDesc.getParameter(idx);
1908 TypeDescriptor paramTypeDesc = taskDesc.getParamType(idx);
1910 // setup data structure
1911 Set<HashMap<HeapRegionNode, FieldDescriptor>> workSet =
1912 new HashSet<HashMap<HeapRegionNode, FieldDescriptor>>();
1913 Hashtable<TypeDescriptor, HeapRegionNode> mapTypeToExistingSummaryNode =
1914 new Hashtable<TypeDescriptor, HeapRegionNode>();
1915 Hashtable<HeapRegionNode, HeapRegionNode> mapToFirstDimensionArrayNode =
1916 new Hashtable<HeapRegionNode, HeapRegionNode>();
1917 Set<String> doneSet = new HashSet<String>();
1919 TempDescriptor tempDesc = fm.getParameter(idx);
1921 AllocSite as = createParameterAllocSite(rg, tempDesc, true);
1922 VariableNode lnX = rg.getVariableNodeFromTemp(tempDesc);
1923 Integer idNewest = as.getIthOldest(0);
1924 HeapRegionNode hrnNewest = rg.id2hrn.get(idNewest);
1926 // make a new reference to allocated node
1927 RefEdge edgeNew = new RefEdge(lnX, // source
1929 taskDesc.getParamType(idx), // type
1931 hrnNewest.getAlpha(), // beta
1932 ExistPredSet.factory(rg.predTrue), // predicates
1935 rg.addRefEdge(lnX, hrnNewest, edgeNew);
1937 // set-up a work set for class field
1938 ClassDescriptor classDesc = paramTypeDesc.getClassDesc();
1939 for (Iterator it = classDesc.getFields(); it.hasNext();) {
1940 FieldDescriptor fd = (FieldDescriptor) it.next();
1941 TypeDescriptor fieldType = fd.getType();
1942 if (shouldAnalysisTrack( fieldType )) {
1943 HashMap<HeapRegionNode, FieldDescriptor> newMap = new HashMap<HeapRegionNode, FieldDescriptor>();
1944 newMap.put(hrnNewest, fd);
1945 workSet.add(newMap);
1949 int uniqueIdentifier = 0;
1950 while (!workSet.isEmpty()) {
1951 HashMap<HeapRegionNode, FieldDescriptor> map = workSet
1953 workSet.remove(map);
1955 Set<HeapRegionNode> key = map.keySet();
1956 HeapRegionNode srcHRN = key.iterator().next();
1957 FieldDescriptor fd = map.get(srcHRN);
1958 TypeDescriptor type = fd.getType();
1959 String doneSetIdentifier = srcHRN.getIDString() + "_" + fd;
1961 if (!doneSet.contains(doneSetIdentifier)) {
1962 doneSet.add(doneSetIdentifier);
1963 if (!mapTypeToExistingSummaryNode.containsKey(type)) {
1964 // create new summary Node
1965 TempDescriptor td = new TempDescriptor("temp"
1966 + uniqueIdentifier, type);
1968 AllocSite allocSite;
1969 if(type.equals(paramTypeDesc)){
1970 //corresponding allocsite has already been created for a parameter variable.
1973 allocSite = createParameterAllocSite(rg, td, false);
1975 String strDesc = allocSite.toStringForDOT()
1977 TypeDescriptor allocType=allocSite.getType();
1979 HeapRegionNode hrnSummary;
1980 if(allocType.isArray() && allocType.getArrayCount()>0){
1981 hrnSummary=createMultiDeimensionalArrayHRN(rg,allocSite,srcHRN,fd,mapToFirstDimensionArrayNode,mapTypeToExistingSummaryNode,hrnNewest.getAlpha());
1984 rg.createNewHeapRegionNode(allocSite.getSummary(), // id or null to generate a new one
1985 false, // single object?
1987 false, // out-of-context?
1988 allocSite.getType(), // type
1989 allocSite, // allocation site
1990 hrnNewest.getAlpha(), // inherent reach
1991 hrnNewest.getAlpha(), // current reach
1992 ExistPredSet.factory(rg.predTrue), // predicates
1993 strDesc // description
1995 rg.id2hrn.put(allocSite.getSummary(),hrnSummary);
1997 // make a new reference to summary node
1998 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2001 fd.getSymbol(), // field name
2002 hrnNewest.getAlpha(), // beta
2003 ExistPredSet.factory(rg.predTrue), // predicates
2007 rg.addRefEdge(srcHRN, hrnSummary, edgeToSummary);
2011 mapTypeToExistingSummaryNode.put(type, hrnSummary);
2013 // set-up a work set for fields of the class
2014 Set<FieldDescriptor> fieldTobeAnalyzed=getFieldSetTobeAnalyzed(type);
2015 for (Iterator iterator = fieldTobeAnalyzed.iterator(); iterator
2017 FieldDescriptor fieldDescriptor = (FieldDescriptor) iterator
2019 HeapRegionNode newDstHRN;
2020 if(mapToFirstDimensionArrayNode.containsKey(hrnSummary)){
2021 //related heap region node is already exsited.
2022 newDstHRN=mapToFirstDimensionArrayNode.get(hrnSummary);
2024 newDstHRN=hrnSummary;
2026 doneSetIdentifier = newDstHRN.getIDString() + "_" + fieldDescriptor;
2027 if(!doneSet.contains(doneSetIdentifier)){
2028 // add new work item
2029 HashMap<HeapRegionNode, FieldDescriptor> newMap =
2030 new HashMap<HeapRegionNode, FieldDescriptor>();
2031 newMap.put(newDstHRN, fieldDescriptor);
2032 workSet.add(newMap);
2037 // if there exists corresponding summary node
2038 HeapRegionNode hrnDst=mapTypeToExistingSummaryNode.get(type);
2040 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2042 fd.getType(), // type
2043 fd.getSymbol(), // field name
2044 srcHRN.getAlpha(), // beta
2045 ExistPredSet.factory(rg.predTrue), // predicates
2048 rg.addRefEdge(srcHRN, hrnDst, edgeToSummary);
2054 // debugSnapshot(rg, fm, true);
2058 // return all allocation sites in the method (there is one allocation
2059 // site per FlatNew node in a method)
2060 private HashSet<AllocSite> getAllocationSiteSet(Descriptor d) {
2061 if( !mapDescriptorToAllocSiteSet.containsKey(d) ) {
2062 buildAllocationSiteSet(d);
2065 return mapDescriptorToAllocSiteSet.get(d);
2069 private void buildAllocationSiteSet(Descriptor d) {
2070 HashSet<AllocSite> s = new HashSet<AllocSite>();
2073 if( d instanceof MethodDescriptor ) {
2074 fm = state.getMethodFlat( (MethodDescriptor) d);
2076 assert d instanceof TaskDescriptor;
2077 fm = state.getMethodFlat( (TaskDescriptor) d);
2079 pm.analyzeMethod(fm);
2081 // visit every node in this FlatMethod's IR graph
2082 // and make a set of the allocation sites from the
2083 // FlatNew node's visited
2084 HashSet<FlatNode> visited = new HashSet<FlatNode>();
2085 HashSet<FlatNode> toVisit = new HashSet<FlatNode>();
2088 while( !toVisit.isEmpty() ) {
2089 FlatNode n = toVisit.iterator().next();
2091 if( n instanceof FlatNew ) {
2092 s.add(getAllocSiteFromFlatNewPRIVATE( (FlatNew) n) );
2098 for( int i = 0; i < pm.numNext(n); ++i ) {
2099 FlatNode child = pm.getNext(n, i);
2100 if( !visited.contains(child) ) {
2106 mapDescriptorToAllocSiteSet.put(d, s);
2109 private HashSet<AllocSite> getFlaggedAllocationSites(Descriptor dIn) {
2111 HashSet<AllocSite> out = new HashSet<AllocSite>();
2112 HashSet<Descriptor> toVisit = new HashSet<Descriptor>();
2113 HashSet<Descriptor> visited = new HashSet<Descriptor>();
2117 while (!toVisit.isEmpty()) {
2118 Descriptor d = toVisit.iterator().next();
2122 HashSet<AllocSite> asSet = getAllocationSiteSet(d);
2123 Iterator asItr = asSet.iterator();
2124 while (asItr.hasNext()) {
2125 AllocSite as = (AllocSite) asItr.next();
2126 if (as.getDisjointAnalysisId() != null) {
2131 // enqueue callees of this method to be searched for
2132 // allocation sites also
2133 Set callees = callGraph.getCalleeSet(d);
2134 if (callees != null) {
2135 Iterator methItr = callees.iterator();
2136 while (methItr.hasNext()) {
2137 MethodDescriptor md = (MethodDescriptor) methItr.next();
2139 if (!visited.contains(md)) {
2150 private HashSet<AllocSite>
2151 getFlaggedAllocationSitesReachableFromTaskPRIVATE(TaskDescriptor td) {
2153 HashSet<AllocSite> asSetTotal = new HashSet<AllocSite>();
2154 HashSet<Descriptor> toVisit = new HashSet<Descriptor>();
2155 HashSet<Descriptor> visited = new HashSet<Descriptor>();
2159 // traverse this task and all methods reachable from this task
2160 while( !toVisit.isEmpty() ) {
2161 Descriptor d = toVisit.iterator().next();
2165 HashSet<AllocSite> asSet = getAllocationSiteSet(d);
2166 Iterator asItr = asSet.iterator();
2167 while( asItr.hasNext() ) {
2168 AllocSite as = (AllocSite) asItr.next();
2169 TypeDescriptor typed = as.getType();
2170 if( typed != null ) {
2171 ClassDescriptor cd = typed.getClassDesc();
2172 if( cd != null && cd.hasFlags() ) {
2178 // enqueue callees of this method to be searched for
2179 // allocation sites also
2180 Set callees = callGraph.getCalleeSet(d);
2181 if( callees != null ) {
2182 Iterator methItr = callees.iterator();
2183 while( methItr.hasNext() ) {
2184 MethodDescriptor md = (MethodDescriptor) methItr.next();
2186 if( !visited.contains(md) ) {
2196 public Set<Descriptor> getDescriptorsToAnalyze() {
2197 return descriptorsToAnalyze;
2202 // get successive captures of the analysis state, use compiler
2204 boolean takeDebugSnapshots = false;
2205 String descSymbolDebug = null;
2206 boolean stopAfterCapture = false;
2207 int snapVisitCounter = 0;
2208 int snapNodeCounter = 0;
2209 int visitStartCapture = 0;
2210 int numVisitsToCapture = 0;
2213 void debugSnapshot( ReachGraph rg, FlatNode fn, boolean in ) {
2214 if( snapVisitCounter > visitStartCapture + numVisitsToCapture ) {
2222 if( snapVisitCounter >= visitStartCapture ) {
2223 System.out.println( " @@@ snapping visit="+snapVisitCounter+
2224 ", node="+snapNodeCounter+
2228 graphName = String.format( "snap%03d_%04din",
2232 graphName = String.format( "snap%03d_%04dout",
2237 graphName = graphName + fn;
2239 rg.writeGraph( graphName,
2240 true, // write labels (variables)
2241 true, // selectively hide intermediate temp vars
2242 true, // prune unreachable heap regions
2243 false, // hide reachability
2244 true, // hide subset reachability states
2245 true, // hide predicates
2246 false );// hide edge taints