1 package Analysis.Disjoint;
3 import Analysis.CallGraph.*;
4 import Analysis.Liveness;
5 import Analysis.ArrayReferencees;
6 import Analysis.OoOJava.Accessible;
7 import Analysis.OoOJava.RBlockRelationAnalysis;
8 import Analysis.FlatIRGraph.*;
11 import IR.Tree.Modifiers;
16 public class DisjointAnalysis implements HeapAnalysis {
18 ///////////////////////////////////////////
20 // Public interface to discover possible
21 // sharing in the program under analysis
23 ///////////////////////////////////////////
25 // if an object allocated at the target site may be
26 // reachable from both an object from root1 and an
27 // object allocated at root2, return TRUE
28 public boolean mayBothReachTarget(FlatMethod fm,
33 AllocSite asr1 = getAllocationSiteFromFlatNew(fnRoot1);
34 AllocSite asr2 = getAllocationSiteFromFlatNew(fnRoot2);
35 assert asr1.isFlagged();
36 assert asr2.isFlagged();
38 AllocSite ast = getAllocationSiteFromFlatNew(fnTarget);
39 ReachGraph rg = getPartial(fm.getMethod() );
41 return rg.mayBothReachTarget(asr1, asr2, ast);
44 // similar to the method above, return TRUE if ever
45 // more than one object from the root allocation site
46 // may reach an object from the target site
47 public boolean mayManyReachTarget(FlatMethod fm,
51 AllocSite asr = getAllocationSiteFromFlatNew(fnRoot);
52 assert asr.isFlagged();
54 AllocSite ast = getAllocationSiteFromFlatNew(fnTarget);
55 ReachGraph rg = getPartial(fm.getMethod() );
57 return rg.mayManyReachTarget(asr, ast);
63 public HashSet<AllocSite>
64 getFlaggedAllocationSitesReachableFromTask(TaskDescriptor td) {
65 checkAnalysisComplete();
66 return getFlaggedAllocationSitesReachableFromTaskPRIVATE(td);
69 public AllocSite getAllocationSiteFromFlatNew(FlatNew fn) {
70 checkAnalysisComplete();
71 return getAllocSiteFromFlatNewPRIVATE(fn);
74 public AllocSite getAllocationSiteFromHeapRegionNodeID(Integer id) {
75 checkAnalysisComplete();
76 return mapHrnIdToAllocSite.get(id);
79 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
82 checkAnalysisComplete();
83 ReachGraph rg=mapDescriptorToCompleteReachGraph.get(taskOrMethod);
84 FlatMethod fm=state.getMethodFlat(taskOrMethod);
86 return rg.mayReachSharedObjects(fm, paramIndex1, paramIndex2);
89 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
90 int paramIndex, AllocSite alloc) {
91 checkAnalysisComplete();
92 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
93 FlatMethod fm=state.getMethodFlat(taskOrMethod);
95 return rg.mayReachSharedObjects(fm, paramIndex, alloc);
98 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
99 AllocSite alloc, int paramIndex) {
100 checkAnalysisComplete();
101 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
102 FlatMethod fm=state.getMethodFlat(taskOrMethod);
104 return rg.mayReachSharedObjects(fm, paramIndex, alloc);
107 public Set<HeapRegionNode> hasPotentialSharing(Descriptor taskOrMethod,
108 AllocSite alloc1, AllocSite alloc2) {
109 checkAnalysisComplete();
110 ReachGraph rg = mapDescriptorToCompleteReachGraph.get(taskOrMethod);
112 return rg.mayReachSharedObjects(alloc1, alloc2);
115 public String prettyPrintNodeSet(Set<HeapRegionNode> s) {
116 checkAnalysisComplete();
120 Iterator<HeapRegionNode> i = s.iterator();
121 while (i.hasNext()) {
122 HeapRegionNode n = i.next();
124 AllocSite as = n.getAllocSite();
126 out += " " + n.toString() + ",\n";
128 out += " " + n.toString() + ": " + as.toStringVerbose()
137 // use the methods given above to check every possible sharing class
138 // between task parameters and flagged allocation sites reachable
140 public void writeAllSharing(String outputFile,
143 boolean tabularOutput,
146 throws java.io.IOException {
147 checkAnalysisComplete();
149 BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
151 if (!tabularOutput) {
152 bw.write("Conducting ownership analysis with allocation depth = "
153 + allocationDepth + "\n");
154 bw.write(timeReport + "\n");
159 // look through every task for potential sharing
160 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
161 while (taskItr.hasNext()) {
162 TaskDescriptor td = (TaskDescriptor) taskItr.next();
164 if (!tabularOutput) {
165 bw.write("\n---------" + td + "--------\n");
168 HashSet<AllocSite> allocSites = getFlaggedAllocationSitesReachableFromTask(td);
170 Set<HeapRegionNode> common;
172 // for each task parameter, check for sharing classes with
173 // other task parameters and every allocation site
174 // reachable from this task
175 boolean foundSomeSharing = false;
177 FlatMethod fm = state.getMethodFlat(td);
178 for (int i = 0; i < fm.numParameters(); ++i) {
180 // skip parameters with types that cannot reference
182 if( !shouldAnalysisTrack(fm.getParameter(i).getType() ) ) {
186 // for the ith parameter check for sharing classes to all
187 // higher numbered parameters
188 for (int j = i + 1; j < fm.numParameters(); ++j) {
190 // skip parameters with types that cannot reference
192 if( !shouldAnalysisTrack(fm.getParameter(j).getType() ) ) {
197 common = hasPotentialSharing(td, i, j);
198 if (!common.isEmpty()) {
199 foundSomeSharing = true;
201 if (!tabularOutput) {
202 bw.write("Potential sharing between parameters " + i
203 + " and " + j + ".\n");
204 bw.write(prettyPrintNodeSet(common) + "\n");
209 // for the ith parameter, check for sharing classes against
210 // the set of allocation sites reachable from this
212 Iterator allocItr = allocSites.iterator();
213 while (allocItr.hasNext()) {
214 AllocSite as = (AllocSite) allocItr.next();
215 common = hasPotentialSharing(td, i, as);
216 if (!common.isEmpty()) {
217 foundSomeSharing = true;
219 if (!tabularOutput) {
220 bw.write("Potential sharing between parameter " + i
221 + " and " + as.getFlatNew() + ".\n");
222 bw.write(prettyPrintNodeSet(common) + "\n");
228 // for each allocation site check for sharing classes with
229 // other allocation sites in the context of execution
231 HashSet<AllocSite> outerChecked = new HashSet<AllocSite>();
232 Iterator allocItr1 = allocSites.iterator();
233 while (allocItr1.hasNext()) {
234 AllocSite as1 = (AllocSite) allocItr1.next();
236 Iterator allocItr2 = allocSites.iterator();
237 while (allocItr2.hasNext()) {
238 AllocSite as2 = (AllocSite) allocItr2.next();
240 if (!outerChecked.contains(as2)) {
241 common = hasPotentialSharing(td, as1, as2);
243 if (!common.isEmpty()) {
244 foundSomeSharing = true;
246 if (!tabularOutput) {
247 bw.write("Potential sharing between "
248 + as1.getFlatNew() + " and "
249 + as2.getFlatNew() + ".\n");
250 bw.write(prettyPrintNodeSet(common) + "\n");
256 outerChecked.add(as1);
259 if (!foundSomeSharing) {
260 if (!tabularOutput) {
261 bw.write("No sharing between flagged objects in Task " + td
269 bw.write(" & " + numSharing + " & " + justTime + " & " + numLines
270 + " & " + numMethodsAnalyzed() + " \\\\\n");
272 bw.write("\nNumber sharing classes: "+numSharing);
280 // this version of writeAllSharing is for Java programs that have no tasks
281 // ***********************************
282 // WARNING: THIS DOES NOT DO THE RIGHT THING, REPORTS 0 ALWAYS!
283 // It should use mayBothReachTarget and mayManyReachTarget like
284 // OoOJava does to query analysis results
285 // ***********************************
286 public void writeAllSharingJava(String outputFile,
289 boolean tabularOutput,
292 throws java.io.IOException {
293 checkAnalysisComplete();
299 BufferedWriter bw = new BufferedWriter(new FileWriter(outputFile));
301 bw.write("Conducting disjoint reachability analysis with allocation depth = "
302 + allocationDepth + "\n");
303 bw.write(timeReport + "\n\n");
305 boolean foundSomeSharing = false;
307 Descriptor d = typeUtil.getMain();
308 HashSet<AllocSite> allocSites = getFlaggedAllocationSites(d);
310 // for each allocation site check for sharing classes with
311 // other allocation sites in the context of execution
313 HashSet<AllocSite> outerChecked = new HashSet<AllocSite>();
314 Iterator allocItr1 = allocSites.iterator();
315 while (allocItr1.hasNext()) {
316 AllocSite as1 = (AllocSite) allocItr1.next();
318 Iterator allocItr2 = allocSites.iterator();
319 while (allocItr2.hasNext()) {
320 AllocSite as2 = (AllocSite) allocItr2.next();
322 if (!outerChecked.contains(as2)) {
323 Set<HeapRegionNode> common = hasPotentialSharing(d,
326 if (!common.isEmpty()) {
327 foundSomeSharing = true;
328 bw.write("Potential sharing between "
329 + as1.getDisjointAnalysisId() + " and "
330 + as2.getDisjointAnalysisId() + ".\n");
331 bw.write(prettyPrintNodeSet(common) + "\n");
337 outerChecked.add(as1);
340 if (!foundSomeSharing) {
341 bw.write("No sharing classes between flagged objects found.\n");
343 bw.write("\nNumber sharing classes: "+numSharing);
346 bw.write("Number of methods analyzed: "+numMethodsAnalyzed()+"\n");
353 public Alloc getCmdLineArgsAlloc() {
354 return getAllocationSiteFromFlatNew( constructedCmdLineArgsNew );
356 public Alloc getCmdLineArgAlloc() {
357 return getAllocationSiteFromFlatNew( constructedCmdLineArgNew );
359 public Alloc getCmdLineArgBytesAlloc() {
360 return getAllocationSiteFromFlatNew( constructedCmdLineArgBytesNew );
362 public Alloc getNewStringLiteralAlloc() {
363 return newStringLiteralAlloc;
366 ///////////////////////////////////////////
368 // end public interface
370 ///////////////////////////////////////////
374 protected void checkAnalysisComplete() {
375 if( !analysisComplete ) {
376 throw new Error("Warning: public interface method called while analysis is running.");
385 // run in faster mode, only when bugs wrung out!
386 public static boolean releaseMode;
388 // use command line option to set this, analysis
389 // should attempt to be deterministic
390 public static boolean determinismDesired;
392 // when we want to enforce determinism in the
393 // analysis we need to sort descriptors rather
394 // than toss them in efficient sets, use this
395 public static DescriptorComparator dComp =
396 new DescriptorComparator();
399 // data from the compiler
401 public CallGraph callGraph;
402 public Liveness liveness;
403 public ArrayReferencees arrayReferencees;
404 public RBlockRelationAnalysis rblockRel;
405 public TypeUtil typeUtil;
406 public int allocationDepth;
408 protected boolean doEffectsAnalysis = false;
409 protected EffectsAnalysis effectsAnalysis;
410 protected BuildStateMachines buildStateMachines;
413 // data structure for public interface
414 private Hashtable< Descriptor, HashSet<AllocSite> >
415 mapDescriptorToAllocSiteSet;
418 // for public interface methods to warn that they
419 // are grabbing results during analysis
420 private boolean analysisComplete;
423 // used to identify HeapRegionNode objects
424 // A unique ID equates an object in one
425 // ownership graph with an object in another
426 // graph that logically represents the same
428 // start at 10 and increment to reserve some
429 // IDs for special purposes
430 static protected int uniqueIDcount = 10;
433 // An out-of-scope method created by the
434 // analysis that has no parameters, and
435 // appears to allocate the command line
436 // arguments, then invoke the source code's
437 // main method. The purpose of this is to
438 // provide the analysis with an explicit
439 // top-level context with no parameters
440 protected MethodDescriptor mdAnalysisEntry;
441 protected FlatMethod fmAnalysisEntry;
443 // main method defined by source program
444 protected MethodDescriptor mdSourceEntry;
446 // the set of task and/or method descriptors
447 // reachable in call graph
448 protected Set<Descriptor>
449 descriptorsToAnalyze;
451 // current descriptors to visit in fixed-point
452 // interprocedural analysis, prioritized by
453 // dependency in the call graph
454 protected Stack<Descriptor>
455 descriptorsToVisitStack;
456 protected PriorityQueue<DescriptorQWrapper>
459 // a duplication of the above structure, but
460 // for efficient testing of inclusion
461 protected HashSet<Descriptor>
462 descriptorsToVisitSet;
464 // storage for priorities (doesn't make sense)
465 // to add it to the Descriptor class, just in
467 protected Hashtable<Descriptor, Integer>
468 mapDescriptorToPriority;
470 // when analyzing a method and scheduling more:
471 // remember set of callee's enqueued for analysis
472 // so they can be put on top of the callers in
473 // the stack-visit mode
474 protected Set<Descriptor>
477 // maps a descriptor to its current partial result
478 // from the intraprocedural fixed-point analysis--
479 // then the interprocedural analysis settles, this
480 // mapping will have the final results for each
482 protected Hashtable<Descriptor, ReachGraph>
483 mapDescriptorToCompleteReachGraph;
485 // maps a descriptor to its known dependents: namely
486 // methods or tasks that call the descriptor's method
487 // AND are part of this analysis (reachable from main)
488 protected Hashtable< Descriptor, Set<Descriptor> >
489 mapDescriptorToSetDependents;
491 // if the analysis client wants to flag allocation sites
492 // programmatically, it should provide a set of FlatNew
493 // statements--this may be null if unneeded
494 protected Set<FlatNew> sitesToFlag;
496 // maps each flat new to one analysis abstraction
497 // allocate site object, these exist outside reach graphs
498 protected Hashtable<FlatNew, AllocSite>
499 mapFlatNewToAllocSite;
501 // maps intergraph heap region IDs to intergraph
502 // allocation sites that created them, a redundant
503 // structure for efficiency in some operations
504 protected Hashtable<Integer, AllocSite>
507 // maps a method to its initial heap model (IHM) that
508 // is the set of reachability graphs from every caller
509 // site, all merged together. The reason that we keep
510 // them separate is that any one call site's contribution
511 // to the IHM may changed along the path to the fixed point
512 protected Hashtable< Descriptor, Hashtable< FlatCall, ReachGraph > >
513 mapDescriptorToIHMcontributions;
515 // additionally, keep a mapping from descriptors to the
516 // merged in-coming initial context, because we want this
517 // initial context to be STRICTLY MONOTONIC
518 protected Hashtable<Descriptor, ReachGraph>
519 mapDescriptorToInitialContext;
521 // make the result for back edges analysis-wide STRICTLY
522 // MONOTONIC as well, but notice we use FlatNode as the
523 // key for this map: in case we want to consider other
524 // nodes as back edge's in future implementations
525 protected Hashtable<FlatNode, ReachGraph>
526 mapBackEdgeToMonotone;
529 public static final String arrayElementFieldName = "___element_";
530 static protected Hashtable<TypeDescriptor, FieldDescriptor>
534 protected boolean suppressOutput;
536 // for controlling DOT file output
537 protected boolean writeFinalDOTs;
538 protected boolean writeAllIncrementalDOTs;
540 // supporting DOT output--when we want to write every
541 // partial method result, keep a tally for generating
543 protected Hashtable<Descriptor, Integer>
544 mapDescriptorToNumUpdates;
546 //map task descriptor to initial task parameter
547 protected Hashtable<Descriptor, ReachGraph>
548 mapDescriptorToReachGraph;
550 protected PointerMethod pm;
552 //Keeps track of all the reach graphs at every program point
553 //DO NOT USE UNLESS YOU REALLY NEED IT
554 static protected Hashtable<FlatNode, ReachGraph> fn2rgAtEnter =
555 new Hashtable<FlatNode, ReachGraph>();
557 static protected Hashtable<FlatNode, ReachGraph> fn2rgAtExit =
558 new Hashtable<FlatNode, ReachGraph>();
561 private Hashtable<FlatCall, Descriptor> fc2enclosing;
563 Accessible accessible;
566 // we construct an entry method of flat nodes complete
567 // with a new allocation site to model the command line
568 // args creation just for the analysis, so remember that
569 // allocation site. Later in code gen we might want to
570 // know if something is pointing-to to the cmd line args
571 // and we can verify by checking the allocation site field.
572 protected FlatNew constructedCmdLineArgsNew;
573 protected FlatNew constructedCmdLineArgNew;
574 protected FlatNew constructedCmdLineArgBytesNew;
577 // similar to above, the runtime allocates new strings
578 // for literal nodes, so make up an alloc to model that
579 protected TypeDescriptor strLiteralType;
580 protected AllocSite newStringLiteralAlloc;
584 // allocate various structures that are not local
585 // to a single class method--should be done once
586 protected void allocateStructures() {
588 if( determinismDesired ) {
589 // use an ordered set
590 descriptorsToAnalyze = new TreeSet<Descriptor>(dComp);
592 // otherwise use a speedy hashset
593 descriptorsToAnalyze = new HashSet<Descriptor>();
596 mapDescriptorToCompleteReachGraph =
597 new Hashtable<Descriptor, ReachGraph>();
599 mapDescriptorToNumUpdates =
600 new Hashtable<Descriptor, Integer>();
602 mapDescriptorToSetDependents =
603 new Hashtable< Descriptor, Set<Descriptor> >();
605 mapFlatNewToAllocSite =
606 new Hashtable<FlatNew, AllocSite>();
608 mapDescriptorToIHMcontributions =
609 new Hashtable< Descriptor, Hashtable< FlatCall, ReachGraph > >();
611 mapDescriptorToInitialContext =
612 new Hashtable<Descriptor, ReachGraph>();
614 mapBackEdgeToMonotone =
615 new Hashtable<FlatNode, ReachGraph>();
617 mapHrnIdToAllocSite =
618 new Hashtable<Integer, AllocSite>();
620 mapTypeToArrayField =
621 new Hashtable <TypeDescriptor, FieldDescriptor>();
623 if( state.DISJOINTDVISITSTACK ||
624 state.DISJOINTDVISITSTACKEESONTOP
626 descriptorsToVisitStack =
627 new Stack<Descriptor>();
630 if( state.DISJOINTDVISITPQUE ) {
631 descriptorsToVisitQ =
632 new PriorityQueue<DescriptorQWrapper>();
635 descriptorsToVisitSet =
636 new HashSet<Descriptor>();
638 mapDescriptorToPriority =
639 new Hashtable<Descriptor, Integer>();
642 new HashSet<Descriptor>();
644 mapDescriptorToAllocSiteSet =
645 new Hashtable<Descriptor, HashSet<AllocSite> >();
647 mapDescriptorToReachGraph =
648 new Hashtable<Descriptor, ReachGraph>();
650 pm = new PointerMethod();
652 fc2enclosing = new Hashtable<FlatCall, Descriptor>();
657 // this analysis generates a disjoint reachability
658 // graph for every reachable method in the program
659 public DisjointAnalysis(State s,
664 Set<FlatNew> sitesToFlag,
665 RBlockRelationAnalysis rra
667 init(s, tu, cg, l, ar, sitesToFlag, rra, null, false);
670 public DisjointAnalysis(State s,
675 Set<FlatNew> sitesToFlag,
676 RBlockRelationAnalysis rra,
677 boolean suppressOutput
679 init(s, tu, cg, l, ar, sitesToFlag, rra, null, suppressOutput);
682 public DisjointAnalysis(State s,
687 Set<FlatNew> sitesToFlag,
688 RBlockRelationAnalysis rra,
689 BuildStateMachines bsm,
690 boolean suppressOutput
692 init(s, tu, cg, l, ar, sitesToFlag, rra, bsm, suppressOutput);
695 protected void init(State state,
699 ArrayReferencees arrayReferencees,
700 Set<FlatNew> sitesToFlag,
701 RBlockRelationAnalysis rra,
702 BuildStateMachines bsm,
703 boolean suppressOutput
706 analysisComplete = false;
709 this.typeUtil = typeUtil;
710 this.callGraph = callGraph;
711 this.liveness = liveness;
712 this.arrayReferencees = arrayReferencees;
713 this.sitesToFlag = sitesToFlag;
714 this.rblockRel = rra;
715 this.suppressOutput = suppressOutput;
716 this.buildStateMachines = bsm;
718 if( rblockRel != null ) {
719 doEffectsAnalysis = true;
720 effectsAnalysis = new EffectsAnalysis();
722 EffectsAnalysis.state = state;
723 EffectsAnalysis.buildStateMachines = buildStateMachines;
725 //note: instead of reachgraph's isAccessible, using the result of accessible analysis
726 //since accessible gives us more accurate results
727 accessible=new Accessible(state, callGraph, rra, liveness);
728 accessible.doAnalysis();
731 this.allocationDepth = state.DISJOINTALLOCDEPTH;
732 this.releaseMode = state.DISJOINTRELEASEMODE;
733 this.determinismDesired = state.DISJOINTDETERMINISM;
735 this.writeFinalDOTs = state.DISJOINTWRITEDOTS && !state.DISJOINTWRITEALL;
736 this.writeAllIncrementalDOTs = state.DISJOINTWRITEDOTS && state.DISJOINTWRITEALL;
738 this.takeDebugSnapshots = state.DISJOINTSNAPSYMBOL != null;
739 this.descSymbolDebug = state.DISJOINTSNAPSYMBOL;
740 this.visitStartCapture = state.DISJOINTSNAPVISITTOSTART;
741 this.numVisitsToCapture = state.DISJOINTSNAPNUMVISITS;
742 this.stopAfterCapture = state.DISJOINTSNAPSTOPAFTER;
743 this.snapVisitCounter = 1; // count visits from 1 (user will write 1, means 1st visit)
744 this.snapNodeCounter = 0; // count nodes from 0
747 state.DISJOINTDVISITSTACK ||
748 state.DISJOINTDVISITPQUE ||
749 state.DISJOINTDVISITSTACKEESONTOP;
750 assert !(state.DISJOINTDVISITSTACK && state.DISJOINTDVISITPQUE);
751 assert !(state.DISJOINTDVISITSTACK && state.DISJOINTDVISITSTACKEESONTOP);
752 assert !(state.DISJOINTDVISITPQUE && state.DISJOINTDVISITSTACKEESONTOP);
754 // set some static configuration for ReachGraphs
755 ReachGraph.allocationDepth = allocationDepth;
756 ReachGraph.typeUtil = typeUtil;
757 ReachGraph.state = state;
759 ReachGraph.debugCallSiteVisitStartCapture
760 = state.DISJOINTDEBUGCALLVISITTOSTART;
762 ReachGraph.debugCallSiteNumVisitsToCapture
763 = state.DISJOINTDEBUGCALLNUMVISITS;
765 ReachGraph.debugCallSiteStopAfter
766 = state.DISJOINTDEBUGCALLSTOPAFTER;
768 ReachGraph.debugCallSiteVisitCounter
769 = 0; // count visits from 1, is incremented before first visit
774 if( suppressOutput ) {
775 System.out.println("* Running disjoint reachability analysis with output suppressed! *");
778 allocateStructures();
780 // model the implicit alloction site for new string literals
781 strLiteralType = new TypeDescriptor( typeUtil.getClass( typeUtil.StringClass ) );
782 TempDescriptor throwAway =
783 new TempDescriptor("stringLiteralTemp_dummy",
786 FlatNew fnStringLiteral =
787 new FlatNew(strLiteralType,
791 newStringLiteralAlloc
792 = getAllocSiteFromFlatNewPRIVATE( fnStringLiteral );
797 double timeStartAnalysis = (double) System.nanoTime();
799 // start interprocedural fixed-point computation
802 } catch( IOException e ) {
803 throw new Error("IO Exception while writing disjointness analysis output.");
806 analysisComplete=true;
808 double timeEndAnalysis = (double) System.nanoTime();
809 double dt = (timeEndAnalysis - timeStartAnalysis)/(Math.pow(10.0, 9.0) );
812 if( sitesToFlag != null ) {
813 treport = String.format("Disjoint reachability analysis flagged %d sites and took %.3f sec.", sitesToFlag.size(), dt);
814 if(sitesToFlag.size()>0) {
815 treport+="\nFlagged sites:"+"\n"+sitesToFlag.toString();
818 treport = String.format("Disjoint reachability analysis took %.3f sec.", dt);
820 String justtime = String.format("%.2f", dt);
821 System.out.println(treport);
825 if( writeFinalDOTs && !writeAllIncrementalDOTs ) {
829 if( state.DISJOINTWRITEIHMS && !suppressOutput ) {
833 if( state.DISJOINTWRITEINITCONTEXTS && !suppressOutput ) {
834 writeInitialContexts();
837 if( state.DISJOINTALIASFILE != null && !suppressOutput ) {
839 writeAllSharing(state.DISJOINTALIASFILE, treport, justtime, state.DISJOINTALIASTAB, state.lines);
841 writeAllSharingJava(state.DISJOINTALIASFILE,
844 state.DISJOINTALIASTAB,
851 buildStateMachines.writeStateMachines();
854 } catch( IOException e ) {
855 throw new Error("IO Exception while writing disjointness analysis output.");
860 protected boolean moreDescriptorsToVisit() {
861 if( state.DISJOINTDVISITSTACK ||
862 state.DISJOINTDVISITSTACKEESONTOP
864 return !descriptorsToVisitStack.isEmpty();
866 } else if( state.DISJOINTDVISITPQUE ) {
867 return !descriptorsToVisitQ.isEmpty();
870 throw new Error("Neither descriptor visiting mode set");
874 // fixed-point computation over the call graph--when a
875 // method's callees are updated, it must be reanalyzed
876 protected void analyzeMethods() throws java.io.IOException {
878 // task or non-task (java) mode determines what the roots
879 // of the call chain are, and establishes the set of methods
880 // reachable from the roots that will be analyzed
883 if( !suppressOutput ) {
884 System.out.println("Bamboo mode...");
887 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
888 while( taskItr.hasNext() ) {
889 TaskDescriptor td = (TaskDescriptor) taskItr.next();
890 if( !descriptorsToAnalyze.contains(td) ) {
891 // add all methods transitively reachable from the
893 descriptorsToAnalyze.add(td);
894 descriptorsToAnalyze.addAll(callGraph.getAllMethods(td) );
899 if( !suppressOutput ) {
900 System.out.println("Java mode...");
903 // add all methods transitively reachable from the
904 // source's main to set for analysis
905 mdSourceEntry = typeUtil.getMain();
906 descriptorsToAnalyze.add(mdSourceEntry);
907 descriptorsToAnalyze.addAll(callGraph.getAllMethods(mdSourceEntry) );
909 // fabricate an empty calling context that will call
910 // the source's main, but call graph doesn't know
911 // about it, so explicitly add it
912 makeAnalysisEntryMethod(mdSourceEntry);
913 descriptorsToAnalyze.add(mdAnalysisEntry);
918 // now, depending on the interprocedural mode for visiting
919 // methods, set up the needed data structures
921 if( state.DISJOINTDVISITPQUE ) {
923 // topologically sort according to the call graph so
924 // leaf calls are last, helps build contexts up first
925 LinkedList<Descriptor> sortedDescriptors =
926 topologicalSort(descriptorsToAnalyze);
928 // add sorted descriptors to priority queue, and duplicate
929 // the queue as a set for efficiently testing whether some
930 // method is marked for analysis
932 Iterator<Descriptor> dItr;
934 // for the priority queue, give items at the head
935 // of the sorted list a low number (highest priority)
936 while( !sortedDescriptors.isEmpty() ) {
937 Descriptor d = sortedDescriptors.removeFirst();
938 mapDescriptorToPriority.put(d, new Integer(p) );
939 descriptorsToVisitQ.add(new DescriptorQWrapper(p, d) );
940 descriptorsToVisitSet.add(d);
944 } else if( state.DISJOINTDVISITSTACK ||
945 state.DISJOINTDVISITSTACKEESONTOP
947 // if we're doing the stack scheme, just throw the root
948 // method or tasks on the stack
950 Iterator taskItr = state.getTaskSymbolTable().getDescriptorsIterator();
951 while( taskItr.hasNext() ) {
952 TaskDescriptor td = (TaskDescriptor) taskItr.next();
953 descriptorsToVisitStack.add(td);
954 descriptorsToVisitSet.add(td);
958 descriptorsToVisitStack.add(mdAnalysisEntry);
959 descriptorsToVisitSet.add(mdAnalysisEntry);
963 throw new Error("Unknown method scheduling mode");
967 // analyze scheduled methods until there are no more to visit
968 while( moreDescriptorsToVisit() ) {
971 if( state.DISJOINTDVISITSTACK ||
972 state.DISJOINTDVISITSTACKEESONTOP
974 d = descriptorsToVisitStack.pop();
976 } else if( state.DISJOINTDVISITPQUE ) {
977 d = descriptorsToVisitQ.poll().getDescriptor();
980 assert descriptorsToVisitSet.contains(d);
981 descriptorsToVisitSet.remove(d);
983 // because the task or method descriptor just extracted
984 // was in the "to visit" set it either hasn't been analyzed
985 // yet, or some method that it depends on has been
986 // updated. Recompute a complete reachability graph for
987 // this task/method and compare it to any previous result.
988 // If there is a change detected, add any methods/tasks
989 // that depend on this one to the "to visit" set.
991 if( !suppressOutput ) {
992 System.out.println("Analyzing " + d);
995 if( state.DISJOINTDVISITSTACKEESONTOP ) {
996 assert calleesToEnqueue.isEmpty();
999 ReachGraph rg = analyzeMethod(d);
1000 ReachGraph rgPrev = getPartial(d);
1002 if( !rg.equals(rgPrev) ) {
1005 if( state.DISJOINTDEBUGSCHEDULING ) {
1006 System.out.println(" complete graph changed, scheduling callers for analysis:");
1009 // results for d changed, so enqueue dependents
1010 // of d for further analysis
1011 Iterator<Descriptor> depsItr = getDependents(d).iterator();
1012 while( depsItr.hasNext() ) {
1013 Descriptor dNext = depsItr.next();
1016 if( state.DISJOINTDEBUGSCHEDULING ) {
1017 System.out.println(" "+dNext);
1022 // whether or not the method under analysis changed,
1023 // we may have some callees that are scheduled for
1024 // more analysis, and they should go on the top of
1025 // the stack now (in other method-visiting modes they
1026 // are already enqueued at this point
1027 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1028 Iterator<Descriptor> depsItr = calleesToEnqueue.iterator();
1029 while( depsItr.hasNext() ) {
1030 Descriptor dNext = depsItr.next();
1033 calleesToEnqueue.clear();
1039 protected ReachGraph analyzeMethod(Descriptor d)
1040 throws java.io.IOException {
1042 // get the flat code for this descriptor
1044 if( d == mdAnalysisEntry ) {
1045 fm = fmAnalysisEntry;
1047 fm = state.getMethodFlat(d);
1049 pm.analyzeMethod(fm);
1051 // intraprocedural work set
1052 Set<FlatNode> flatNodesToVisit = new HashSet<FlatNode>();
1053 flatNodesToVisit.add(fm);
1055 // if determinism is desired by client, shadow the
1056 // set with a queue to make visit order deterministic
1057 Queue<FlatNode> flatNodesToVisitQ = null;
1058 if( determinismDesired ) {
1059 flatNodesToVisitQ = new LinkedList<FlatNode>();
1060 flatNodesToVisitQ.add(fm);
1063 // mapping of current partial results
1064 Hashtable<FlatNode, ReachGraph> mapFlatNodeToReachGraph =
1065 new Hashtable<FlatNode, ReachGraph>();
1067 // the set of return nodes partial results that will be combined as
1068 // the final, conservative approximation of the entire method
1069 HashSet<FlatReturnNode> setReturns = new HashSet<FlatReturnNode>();
1073 boolean snapThisMethod = false;
1074 if( takeDebugSnapshots && d instanceof MethodDescriptor ) {
1075 MethodDescriptor mdThisMethod = (MethodDescriptor)d;
1076 ClassDescriptor cdThisMethod = mdThisMethod.getClassDesc();
1077 if( cdThisMethod != null ) {
1079 descSymbolDebug.equals( cdThisMethod.getSymbol()+
1081 mdThisMethod.getSymbol()
1088 while( !flatNodesToVisit.isEmpty() ) {
1091 if( determinismDesired ) {
1092 assert !flatNodesToVisitQ.isEmpty();
1093 fn = flatNodesToVisitQ.remove();
1095 fn = flatNodesToVisit.iterator().next();
1097 flatNodesToVisit.remove(fn);
1099 // effect transfer function defined by this node,
1100 // then compare it to the old graph at this node
1101 // to see if anything was updated.
1103 ReachGraph rg = new ReachGraph();
1104 TaskDescriptor taskDesc;
1105 if(fn instanceof FlatMethod && (taskDesc=((FlatMethod)fn).getTask())!=null) {
1106 if(mapDescriptorToReachGraph.containsKey(taskDesc)) {
1107 // retrieve existing reach graph if it is not first time
1108 rg=mapDescriptorToReachGraph.get(taskDesc);
1110 // create initial reach graph for a task
1111 rg=createInitialTaskReachGraph((FlatMethod)fn);
1113 mapDescriptorToReachGraph.put(taskDesc, rg);
1117 // start by merging all node's parents' graphs
1118 for( int i = 0; i < pm.numPrev(fn); ++i ) {
1119 FlatNode pn = pm.getPrev(fn,i);
1120 if( mapFlatNodeToReachGraph.containsKey(pn) ) {
1121 ReachGraph rgParent = mapFlatNodeToReachGraph.get(pn);
1127 if( snapThisMethod ) {
1128 debugSnapshot(rg, fn, true);
1132 // modify rg with appropriate transfer function
1133 rg = analyzeFlatNode(d, fm, fn, setReturns, rg);
1136 if( snapThisMethod ) {
1137 debugSnapshot(rg, fn, false);
1142 // if the results of the new graph are different from
1143 // the current graph at this node, replace the graph
1144 // with the update and enqueue the children
1145 ReachGraph rgPrev = mapFlatNodeToReachGraph.get(fn);
1146 if( !rg.equals(rgPrev) ) {
1147 mapFlatNodeToReachGraph.put(fn, rg);
1149 for( int i = 0; i < pm.numNext(fn); i++ ) {
1150 FlatNode nn = pm.getNext(fn, i);
1152 flatNodesToVisit.add(nn);
1153 if( determinismDesired ) {
1154 flatNodesToVisitQ.add(nn);
1161 // end by merging all return nodes into a complete
1162 // reach graph that represents all possible heap
1163 // states after the flat method returns
1164 ReachGraph completeGraph = new ReachGraph();
1166 assert !setReturns.isEmpty();
1167 Iterator retItr = setReturns.iterator();
1168 while( retItr.hasNext() ) {
1169 FlatReturnNode frn = (FlatReturnNode) retItr.next();
1171 assert mapFlatNodeToReachGraph.containsKey(frn);
1172 ReachGraph rgRet = mapFlatNodeToReachGraph.get(frn);
1174 completeGraph.merge(rgRet);
1178 if( snapThisMethod ) {
1179 // increment that we've visited the debug snap
1180 // method, and reset the node counter
1181 System.out.println(" @@@ debug snap at visit "+snapVisitCounter);
1183 snapNodeCounter = 0;
1185 if( snapVisitCounter == visitStartCapture + numVisitsToCapture &&
1188 System.out.println("!!! Stopping analysis after debug snap captures. !!!");
1194 return completeGraph;
1198 protected ReachGraph
1199 analyzeFlatNode(Descriptor d,
1200 FlatMethod fmContaining,
1202 HashSet<FlatReturnNode> setRetNodes,
1204 ) throws java.io.IOException {
1207 // any variables that are no longer live should be
1208 // nullified in the graph to reduce edges
1209 //rg.nullifyDeadVars( liveness.getLiveInTemps( fmContaining, fn ) );
1213 FieldDescriptor fld;
1214 TypeDescriptor tdElement;
1215 FieldDescriptor fdElement;
1216 FlatSESEEnterNode sese;
1217 FlatSESEExitNode fsexn;
1219 //Stores the flatnode's reach graph at enter
1220 ReachGraph rgOnEnter = new ReachGraph();
1221 rgOnEnter.merge(rg);
1222 fn2rgAtEnter.put(fn, rgOnEnter);
1226 // use node type to decide what transfer function
1227 // to apply to the reachability graph
1228 switch( fn.kind() ) {
1230 case FKind.FlatGenReachNode: {
1231 FlatGenReachNode fgrn = (FlatGenReachNode) fn;
1233 System.out.println(" Generating reach graph for program point: "+fgrn.getGraphName() );
1236 rg.writeGraph("genReach"+fgrn.getGraphName(),
1237 true, // write labels (variables)
1238 false, //true, // selectively hide intermediate temp vars
1239 false, //true, // prune unreachable heap regions
1240 true, // hide reachability altogether
1241 true, // hide subset reachability states
1242 true, // hide predicates
1243 true); //false); // hide edge taints
1247 case FKind.FlatMethod: {
1248 // construct this method's initial heap model (IHM)
1249 // since we're working on the FlatMethod, we know
1250 // the incoming ReachGraph 'rg' is empty
1252 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
1253 getIHMcontributions(d);
1255 Set entrySet = heapsFromCallers.entrySet();
1256 Iterator itr = entrySet.iterator();
1257 while( itr.hasNext() ) {
1258 Map.Entry me = (Map.Entry)itr.next();
1259 FlatCall fc = (FlatCall) me.getKey();
1260 ReachGraph rgContrib = (ReachGraph) me.getValue();
1262 assert fc.getMethod().equals(d);
1264 rg.merge(rgContrib);
1267 // additionally, we are enforcing STRICT MONOTONICITY for the
1268 // method's initial context, so grow the context by whatever
1269 // the previously computed context was, and put the most
1270 // up-to-date context back in the map
1271 ReachGraph rgPrevContext = mapDescriptorToInitialContext.get(d);
1272 rg.merge(rgPrevContext);
1273 mapDescriptorToInitialContext.put(d, rg);
1277 case FKind.FlatOpNode:
1278 FlatOpNode fon = (FlatOpNode) fn;
1279 if( fon.getOp().getOp() == Operation.ASSIGN ) {
1280 lhs = fon.getDest();
1281 rhs = fon.getLeft();
1283 // before transfer, do effects analysis support
1284 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1285 if(rblockRel.isPotentialStallSite(fn)) {
1286 // x gets status of y
1287 // if(!rg.isAccessible(rhs)){
1288 if(!accessible.isAccessible(fn, rhs)) {
1289 rg.makeInaccessible(lhs);
1295 rg.assignTempXEqualToTempY(lhs, rhs);
1299 case FKind.FlatCastNode:
1300 FlatCastNode fcn = (FlatCastNode) fn;
1304 TypeDescriptor td = fcn.getType();
1307 // before transfer, do effects analysis support
1308 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1309 if(rblockRel.isPotentialStallSite(fn)) {
1310 // x gets status of y
1311 // if(!rg.isAccessible(rhs)){
1312 if(!accessible.isAccessible(fn,rhs)) {
1313 rg.makeInaccessible(lhs);
1319 rg.assignTempXEqualToCastedTempY(lhs, rhs, td);
1322 case FKind.FlatFieldNode:
1323 FlatFieldNode ffn = (FlatFieldNode) fn;
1327 fld = ffn.getField();
1329 // before graph transform, possible inject
1330 // a stall-site taint
1331 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1333 if(rblockRel.isPotentialStallSite(fn)) {
1334 // x=y.f, stall y if not accessible
1335 // contributes read effects on stall site of y
1336 // if(!rg.isAccessible(rhs)) {
1337 if(!accessible.isAccessible(fn,rhs)) {
1338 rg.taintStallSite(fn, rhs);
1341 // after this, x and y are accessbile.
1342 rg.makeAccessible(lhs);
1343 rg.makeAccessible(rhs);
1347 if( shouldAnalysisTrack(fld.getType() ) ) {
1349 rg.assignTempXEqualToTempYFieldF(lhs, rhs, fld, fn);
1352 // after transfer, use updated graph to
1353 // do effects analysis
1354 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1355 effectsAnalysis.analyzeFlatFieldNode(rg, rhs, fld, fn);
1359 case FKind.FlatSetFieldNode:
1360 FlatSetFieldNode fsfn = (FlatSetFieldNode) fn;
1362 lhs = fsfn.getDst();
1363 fld = fsfn.getField();
1364 rhs = fsfn.getSrc();
1366 boolean strongUpdate = false;
1368 // before transfer func, possibly inject
1369 // stall-site taints
1370 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1372 if(rblockRel.isPotentialStallSite(fn)) {
1373 // x.y=f , stall x and y if they are not accessible
1374 // also contribute write effects on stall site of x
1375 // if(!rg.isAccessible(lhs)) {
1376 if(!accessible.isAccessible(fn,lhs)) {
1377 rg.taintStallSite(fn, lhs);
1380 // if(!rg.isAccessible(rhs)) {
1381 if(!accessible.isAccessible(fn,rhs)) {
1382 rg.taintStallSite(fn, rhs);
1385 // accessible status update
1386 rg.makeAccessible(lhs);
1387 rg.makeAccessible(rhs);
1391 if( shouldAnalysisTrack(fld.getType() ) ) {
1393 strongUpdate = rg.assignTempXFieldFEqualToTempY(lhs, fld, rhs, fn);
1396 // use transformed graph to do effects analysis
1397 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1398 effectsAnalysis.analyzeFlatSetFieldNode(rg, lhs, fld, fn, strongUpdate);
1402 case FKind.FlatElementNode:
1403 FlatElementNode fen = (FlatElementNode) fn;
1408 assert rhs.getType() != null;
1409 assert rhs.getType().isArray();
1411 tdElement = rhs.getType().dereference();
1412 fdElement = getArrayField(tdElement);
1414 // before transfer func, possibly inject
1416 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1417 if(rblockRel.isPotentialStallSite(fn)) {
1418 // x=y.f, stall y if not accessible
1419 // contributes read effects on stall site of y
1420 // after this, x and y are accessbile.
1421 // if(!rg.isAccessible(rhs)) {
1422 if(!accessible.isAccessible(fn,rhs)) {
1423 rg.taintStallSite(fn, rhs);
1426 rg.makeAccessible(lhs);
1427 rg.makeAccessible(rhs);
1431 if( shouldAnalysisTrack(lhs.getType() ) ) {
1433 rg.assignTempXEqualToTempYFieldF(lhs, rhs, fdElement, fn);
1436 // use transformed graph to do effects analysis
1437 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1438 effectsAnalysis.analyzeFlatFieldNode(rg, rhs, fdElement, fn);
1442 case FKind.FlatSetElementNode:
1443 FlatSetElementNode fsen = (FlatSetElementNode) fn;
1445 lhs = fsen.getDst();
1446 rhs = fsen.getSrc();
1448 assert lhs.getType() != null;
1449 assert lhs.getType().isArray();
1451 tdElement = lhs.getType().dereference();
1452 fdElement = getArrayField(tdElement);
1454 // before transfer func, possibly inject
1455 // stall-site taints
1456 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1458 if(rblockRel.isPotentialStallSite(fn)) {
1459 // x.y=f , stall x and y if they are not accessible
1460 // also contribute write effects on stall site of x
1461 // if(!rg.isAccessible(lhs)) {
1462 if(!accessible.isAccessible(fn,lhs)) {
1463 rg.taintStallSite(fn, lhs);
1466 // if(!rg.isAccessible(rhs)) {
1467 if(!accessible.isAccessible(fn,rhs)) {
1468 rg.taintStallSite(fn, rhs);
1471 // accessible status update
1472 rg.makeAccessible(lhs);
1473 rg.makeAccessible(rhs);
1477 if( shouldAnalysisTrack(rhs.getType() ) ) {
1478 // transfer func, BUT
1479 // skip this node if it cannot create new reachability paths
1480 if( !arrayReferencees.doesNotCreateNewReaching(fsen) ) {
1481 rg.assignTempXFieldFEqualToTempY(lhs, fdElement, rhs, fn);
1485 // use transformed graph to do effects analysis
1486 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1487 effectsAnalysis.analyzeFlatSetFieldNode(rg, lhs, fdElement, fn,
1493 FlatNew fnn = (FlatNew) fn;
1495 if( shouldAnalysisTrack(lhs.getType() ) ) {
1496 AllocSite as = getAllocSiteFromFlatNewPRIVATE(fnn);
1498 // before transform, support effects analysis
1499 if (doEffectsAnalysis && fmContaining != fmAnalysisEntry) {
1500 if (rblockRel.isPotentialStallSite(fn)) {
1501 // after creating new object, lhs is accessible
1502 rg.makeAccessible(lhs);
1507 rg.assignTempEqualToNewAlloc(lhs, as);
1512 case FKind.FlatLiteralNode:
1513 // BIG NOTE: this transfer function is only here for
1514 // points-to information for String literals. That's it.
1515 // Effects and disjoint reachability and all of that don't
1516 // care about references to literals.
1517 FlatLiteralNode fln = (FlatLiteralNode) fn;
1519 if( fln.getType().equals( strLiteralType ) ) {
1520 rg.assignTempEqualToNewAlloc( fln.getDst(),
1521 newStringLiteralAlloc );
1526 case FKind.FlatSESEEnterNode:
1527 sese = (FlatSESEEnterNode) fn;
1529 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1531 // always remove ALL stall site taints at enter
1532 rg.removeAllStallSiteTaints();
1534 // inject taints for in-set vars
1535 rg.taintInSetVars(sese);
1540 case FKind.FlatSESEExitNode:
1541 fsexn = (FlatSESEExitNode) fn;
1542 sese = fsexn.getFlatEnter();
1544 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1546 // @ sese exit make all live variables
1547 // inaccessible to later parent statements
1548 rg.makeInaccessible(liveness.getLiveInTemps(fmContaining, fn) );
1550 // always remove ALL stall site taints at exit
1551 rg.removeAllStallSiteTaints();
1553 // remove in-set var taints for the exiting rblock
1554 rg.removeInContextTaints(sese);
1559 case FKind.FlatCall: {
1560 Descriptor mdCaller;
1561 if( fmContaining.getMethod() != null ) {
1562 mdCaller = fmContaining.getMethod();
1564 mdCaller = fmContaining.getTask();
1566 FlatCall fc = (FlatCall) fn;
1567 MethodDescriptor mdCallee = fc.getMethod();
1568 FlatMethod fmCallee = state.getMethodFlat(mdCallee);
1572 // all this jimma jamma to debug call sites is WELL WORTH the
1573 // effort, so many bugs or buggy info goes crazy through call
1575 boolean debugCallSite = false;
1576 if( state.DISJOINTDEBUGCALLEE != null &&
1577 state.DISJOINTDEBUGCALLER != null ) {
1579 boolean debugCalleeMatches = false;
1580 boolean debugCallerMatches = false;
1582 ClassDescriptor cdCallee = mdCallee.getClassDesc();
1583 if( cdCallee != null ) {
1584 debugCalleeMatches =
1585 state.DISJOINTDEBUGCALLEE.equals( cdCallee.getSymbol()+
1587 mdCallee.getSymbol()
1592 if( mdCaller instanceof MethodDescriptor ) {
1593 ClassDescriptor cdCaller = ((MethodDescriptor)mdCaller).getClassDesc();
1594 if( cdCaller != null ) {
1595 debugCallerMatches =
1596 state.DISJOINTDEBUGCALLER.equals( cdCaller.getSymbol()+
1598 mdCaller.getSymbol()
1602 // for bristlecone style tasks
1603 debugCallerMatches =
1604 state.DISJOINTDEBUGCALLER.equals( mdCaller.getSymbol() );
1607 debugCallSite = debugCalleeMatches && debugCallerMatches;
1613 boolean writeDebugDOTs = false;
1614 boolean stopAfter = false;
1615 if( debugCallSite ) {
1616 ++ReachGraph.debugCallSiteVisitCounter;
1617 System.out.println(" $$$ Debug call site visit "+
1618 ReachGraph.debugCallSiteVisitCounter+
1622 (ReachGraph.debugCallSiteVisitCounter >=
1623 ReachGraph.debugCallSiteVisitStartCapture) &&
1625 (ReachGraph.debugCallSiteVisitCounter <
1626 ReachGraph.debugCallSiteVisitStartCapture +
1627 ReachGraph.debugCallSiteNumVisitsToCapture)
1629 writeDebugDOTs = true;
1630 System.out.println(" $$$ Capturing this call site visit $$$");
1631 if( ReachGraph.debugCallSiteStopAfter &&
1632 (ReachGraph.debugCallSiteVisitCounter ==
1633 ReachGraph.debugCallSiteVisitStartCapture +
1634 ReachGraph.debugCallSiteNumVisitsToCapture - 1)
1642 // calculate the heap this call site can reach--note this is
1643 // not used for the current call site transform, we are
1644 // grabbing this heap model for future analysis of the callees,
1645 // so if different results emerge we will return to this site
1646 ReachGraph heapForThisCall_old =
1647 getIHMcontribution(mdCallee, fc);
1649 // the computation of the callee-reachable heap
1650 // is useful for making the callee starting point
1651 // and for applying the call site transfer function
1652 Set<Integer> callerNodeIDsCopiedToCallee =
1653 new HashSet<Integer>();
1655 ReachGraph heapForThisCall_cur =
1656 rg.makeCalleeView(fc,
1658 callerNodeIDsCopiedToCallee,
1662 // enforce that a call site contribution can only
1663 // monotonically increase
1664 heapForThisCall_cur.merge(heapForThisCall_old);
1666 if( !heapForThisCall_cur.equals(heapForThisCall_old) ) {
1667 // if heap at call site changed, update the contribution,
1668 // and reschedule the callee for analysis
1669 addIHMcontribution(mdCallee, fc, heapForThisCall_cur);
1671 // map a FlatCall to its enclosing method/task descriptor
1672 // so we can write that info out later
1673 fc2enclosing.put(fc, mdCaller);
1675 if( state.DISJOINTDEBUGSCHEDULING ) {
1676 System.out.println(" context changed, scheduling callee: "+mdCallee);
1679 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1680 calleesToEnqueue.add(mdCallee);
1687 // the transformation for a call site should update the
1688 // current heap abstraction with any effects from the callee,
1689 // or if the method is virtual, the effects from any possible
1690 // callees, so find the set of callees...
1691 Set<MethodDescriptor> setPossibleCallees;
1692 if( determinismDesired ) {
1693 // use an ordered set
1694 setPossibleCallees = new TreeSet<MethodDescriptor>(dComp);
1696 // otherwise use a speedy hashset
1697 setPossibleCallees = new HashSet<MethodDescriptor>();
1700 if( mdCallee.isStatic() ) {
1701 setPossibleCallees.add(mdCallee);
1703 TypeDescriptor typeDesc = fc.getThis().getType();
1704 setPossibleCallees.addAll(callGraph.getMethods(mdCallee,
1709 ReachGraph rgMergeOfPossibleCallers = new ReachGraph();
1711 Iterator<MethodDescriptor> mdItr = setPossibleCallees.iterator();
1712 while( mdItr.hasNext() ) {
1713 MethodDescriptor mdPossible = mdItr.next();
1714 FlatMethod fmPossible = state.getMethodFlat(mdPossible);
1716 addDependent(mdPossible, // callee
1719 // don't alter the working graph (rg) until we compute a
1720 // result for every possible callee, merge them all together,
1721 // then set rg to that
1722 ReachGraph rgPossibleCaller = new ReachGraph();
1723 rgPossibleCaller.merge(rg);
1725 ReachGraph rgPossibleCallee = getPartial(mdPossible);
1727 if( rgPossibleCallee == null ) {
1728 // if this method has never been analyzed just schedule it
1729 // for analysis and skip over this call site for now
1730 if( state.DISJOINTDVISITSTACKEESONTOP ) {
1731 calleesToEnqueue.add(mdPossible);
1733 enqueue(mdPossible);
1736 if( state.DISJOINTDEBUGSCHEDULING ) {
1737 System.out.println(" callee hasn't been analyzed, scheduling: "+mdPossible);
1741 // calculate the method call transform
1742 rgPossibleCaller.resolveMethodCall(fc,
1745 callerNodeIDsCopiedToCallee,
1749 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1750 // if( !rgPossibleCallee.isAccessible( ReachGraph.tdReturn ) ) {
1751 if( !accessible.isAccessible(fn, ReachGraph.tdReturn) ) {
1752 rgPossibleCaller.makeInaccessible(fc.getReturnTemp() );
1758 rgMergeOfPossibleCallers.merge(rgPossibleCaller);
1763 System.out.println("$$$ Exiting after requested captures of call site. $$$");
1768 // now that we've taken care of building heap models for
1769 // callee analysis, finish this transformation
1770 rg = rgMergeOfPossibleCallers;
1773 // jjenista: what is this? It breaks compilation
1774 // of programs with no tasks/SESEs/rblocks...
1775 //XXXXXXXXXXXXXXXXXXXXXXXXX
1776 //need to consider more
1777 if( state.OOOJAVA ) {
1778 FlatNode nextFN=fmCallee.getNext(0);
1779 if( nextFN instanceof FlatSESEEnterNode ) {
1780 FlatSESEEnterNode calleeSESE=(FlatSESEEnterNode)nextFN;
1781 if(!calleeSESE.getIsLeafSESE()) {
1782 rg.makeInaccessible(liveness.getLiveInTemps(fmContaining, fn) );
1790 case FKind.FlatReturnNode:
1791 FlatReturnNode frn = (FlatReturnNode) fn;
1792 rhs = frn.getReturnTemp();
1794 // before transfer, do effects analysis support
1795 if( doEffectsAnalysis && fmContaining != fmAnalysisEntry ) {
1796 // if(!rg.isAccessible(rhs)){
1797 if(!accessible.isAccessible(fn,rhs)) {
1798 rg.makeInaccessible(ReachGraph.tdReturn);
1802 if( rhs != null && shouldAnalysisTrack(rhs.getType() ) ) {
1803 rg.assignReturnEqualToTemp(rhs);
1806 setRetNodes.add(frn);
1812 // dead variables were removed before the above transfer function
1813 // was applied, so eliminate heap regions and edges that are no
1814 // longer part of the abstractly-live heap graph, and sweep up
1815 // and reachability effects that are altered by the reduction
1816 //rg.abstractGarbageCollect();
1820 // back edges are strictly monotonic
1821 if( pm.isBackEdge(fn) ) {
1822 ReachGraph rgPrevResult = mapBackEdgeToMonotone.get(fn);
1823 rg.merge(rgPrevResult);
1824 mapBackEdgeToMonotone.put(fn, rg);
1828 ReachGraph rgOnExit = new ReachGraph();
1830 fn2rgAtExit.put(fn, rgOnExit);
1833 // at this point rg should be the correct update
1834 // by an above transfer function, or untouched if
1835 // the flat node type doesn't affect the heap
1841 // this method should generate integers strictly greater than zero!
1842 // special "shadow" regions are made from a heap region by negating
1844 static public Integer generateUniqueHeapRegionNodeID() {
1846 return new Integer(uniqueIDcount);
1851 static public FieldDescriptor getArrayField(TypeDescriptor tdElement) {
1852 FieldDescriptor fdElement = mapTypeToArrayField.get(tdElement);
1853 if( fdElement == null ) {
1854 fdElement = new FieldDescriptor(new Modifiers(Modifiers.PUBLIC),
1856 arrayElementFieldName,
1859 mapTypeToArrayField.put(tdElement, fdElement);
1866 private void writeFinalGraphs() {
1867 Set entrySet = mapDescriptorToCompleteReachGraph.entrySet();
1868 Iterator itr = entrySet.iterator();
1869 while( itr.hasNext() ) {
1870 Map.Entry me = (Map.Entry)itr.next();
1871 Descriptor d = (Descriptor) me.getKey();
1872 ReachGraph rg = (ReachGraph) me.getValue();
1875 if( d instanceof TaskDescriptor ) {
1876 graphName = "COMPLETEtask"+d;
1878 graphName = "COMPLETE"+d;
1881 rg.writeGraph(graphName,
1882 true, // write labels (variables)
1883 true, // selectively hide intermediate temp vars
1884 true, // prune unreachable heap regions
1885 true, // hide reachability altogether
1886 true, // hide subset reachability states
1887 true, // hide predicates
1888 false); // hide edge taints
1892 private void writeFinalIHMs() {
1893 Iterator d2IHMsItr = mapDescriptorToIHMcontributions.entrySet().iterator();
1894 while( d2IHMsItr.hasNext() ) {
1895 Map.Entry me1 = (Map.Entry)d2IHMsItr.next();
1896 Descriptor d = (Descriptor) me1.getKey();
1897 Hashtable<FlatCall, ReachGraph> IHMs = (Hashtable<FlatCall, ReachGraph>)me1.getValue();
1899 Iterator fc2rgItr = IHMs.entrySet().iterator();
1900 while( fc2rgItr.hasNext() ) {
1901 Map.Entry me2 = (Map.Entry)fc2rgItr.next();
1902 FlatCall fc = (FlatCall) me2.getKey();
1903 ReachGraph rg = (ReachGraph) me2.getValue();
1905 rg.writeGraph("IHMPARTFOR"+d+"FROM"+fc2enclosing.get(fc)+fc,
1906 true, // write labels (variables)
1907 true, // selectively hide intermediate temp vars
1908 true, // hide reachability altogether
1909 true, // prune unreachable heap regions
1910 true, // hide subset reachability states
1911 false, // hide predicates
1912 true); // hide edge taints
1917 private void writeInitialContexts() {
1918 Set entrySet = mapDescriptorToInitialContext.entrySet();
1919 Iterator itr = entrySet.iterator();
1920 while( itr.hasNext() ) {
1921 Map.Entry me = (Map.Entry)itr.next();
1922 Descriptor d = (Descriptor) me.getKey();
1923 ReachGraph rg = (ReachGraph) me.getValue();
1925 rg.writeGraph("INITIAL"+d,
1926 true, // write labels (variables)
1927 true, // selectively hide intermediate temp vars
1928 true, // prune unreachable heap regions
1929 false, // hide all reachability
1930 true, // hide subset reachability states
1931 true, // hide predicates
1932 false); // hide edge taints
1937 protected ReachGraph getPartial(Descriptor d) {
1938 return mapDescriptorToCompleteReachGraph.get(d);
1941 protected void setPartial(Descriptor d, ReachGraph rg) {
1942 mapDescriptorToCompleteReachGraph.put(d, rg);
1944 // when the flag for writing out every partial
1945 // result is set, we should spit out the graph,
1946 // but in order to give it a unique name we need
1947 // to track how many partial results for this
1948 // descriptor we've already written out
1949 if( writeAllIncrementalDOTs ) {
1950 if( !mapDescriptorToNumUpdates.containsKey(d) ) {
1951 mapDescriptorToNumUpdates.put(d, new Integer(0) );
1953 Integer n = mapDescriptorToNumUpdates.get(d);
1956 if( d instanceof TaskDescriptor ) {
1957 graphName = d+"COMPLETEtask"+String.format("%05d", n);
1959 graphName = d+"COMPLETE"+String.format("%05d", n);
1962 rg.writeGraph(graphName,
1963 true, // write labels (variables)
1964 true, // selectively hide intermediate temp vars
1965 true, // prune unreachable heap regions
1966 false, // hide all reachability
1967 true, // hide subset reachability states
1968 false, // hide predicates
1969 false); // hide edge taints
1971 mapDescriptorToNumUpdates.put(d, n + 1);
1977 // return just the allocation site associated with one FlatNew node
1978 protected AllocSite getAllocSiteFromFlatNewPRIVATE(FlatNew fnew) {
1980 boolean flagProgrammatically = false;
1981 if( sitesToFlag != null && sitesToFlag.contains(fnew) ) {
1982 flagProgrammatically = true;
1985 if( !mapFlatNewToAllocSite.containsKey(fnew) ) {
1986 AllocSite as = AllocSite.factory(allocationDepth,
1988 fnew.getDisjointId(),
1989 flagProgrammatically
1992 // the newest nodes are single objects
1993 for( int i = 0; i < allocationDepth; ++i ) {
1994 Integer id = generateUniqueHeapRegionNodeID();
1995 as.setIthOldest(i, id);
1996 mapHrnIdToAllocSite.put(id, as);
1999 // the oldest node is a summary node
2000 as.setSummary(generateUniqueHeapRegionNodeID() );
2002 mapFlatNewToAllocSite.put(fnew, as);
2005 return mapFlatNewToAllocSite.get(fnew);
2009 public static boolean shouldAnalysisTrack(TypeDescriptor type) {
2010 // don't track primitive types, but an array
2011 // of primitives is heap memory
2012 if( type.isImmutable() ) {
2013 return type.isArray();
2016 // everything else is an object
2020 protected int numMethodsAnalyzed() {
2021 return descriptorsToAnalyze.size();
2027 // Take in source entry which is the program's compiled entry and
2028 // create a new analysis entry, a method that takes no parameters
2029 // and appears to allocate the command line arguments and call the
2030 // source entry with them. The purpose of this analysis entry is
2031 // to provide a top-level method context with no parameters left.
2032 protected void makeAnalysisEntryMethod(MethodDescriptor mdSourceEntry) {
2034 Modifiers mods = new Modifiers();
2035 mods.addModifier(Modifiers.PUBLIC);
2036 mods.addModifier(Modifiers.STATIC);
2038 TypeDescriptor returnType = new TypeDescriptor(TypeDescriptor.VOID);
2040 this.mdAnalysisEntry =
2041 new MethodDescriptor(mods,
2043 "analysisEntryMethod"
2046 TypeDescriptor argsType = mdSourceEntry.getParamType(0);
2047 TempDescriptor cmdLineArgs =
2048 new TempDescriptor("analysisEntryTemp_args",
2052 new FlatNew(argsType,
2056 this.constructedCmdLineArgsNew = fnArgs;
2058 TypeDescriptor argType = argsType.dereference();
2059 TempDescriptor anArg =
2060 new TempDescriptor("analysisEntryTemp_arg",
2064 new FlatNew(argType,
2068 this.constructedCmdLineArgNew = fnArg;
2070 TypeDescriptor typeIndex = new TypeDescriptor(TypeDescriptor.INT);
2071 TempDescriptor index =
2072 new TempDescriptor("analysisEntryTemp_index",
2075 FlatLiteralNode fli =
2076 new FlatLiteralNode(typeIndex,
2081 FlatSetElementNode fse =
2082 new FlatSetElementNode(cmdLineArgs,
2087 TypeDescriptor typeSize = new TypeDescriptor(TypeDescriptor.INT);
2088 TempDescriptor sizeBytes =
2089 new TempDescriptor("analysisEntryTemp_size",
2092 FlatLiteralNode fls =
2093 new FlatLiteralNode(typeSize,
2098 TypeDescriptor typeBytes =
2099 new TypeDescriptor(TypeDescriptor.CHAR).makeArray( state );
2100 TempDescriptor strBytes =
2101 new TempDescriptor("analysisEntryTemp_strBytes",
2105 new FlatNew(typeBytes,
2110 this.constructedCmdLineArgBytesNew = fnBytes;
2112 ClassDescriptor cdString = argType.getClassDesc();
2113 assert cdString != null;
2114 FieldDescriptor argBytes = null;
2115 Iterator sFieldsItr = cdString.getFields();
2116 while( sFieldsItr.hasNext() ) {
2117 FieldDescriptor fd = (FieldDescriptor) sFieldsItr.next();
2118 if( fd.getSymbol().equals( typeUtil.StringClassValueField ) ) {
2123 assert argBytes != null;
2124 FlatSetFieldNode fsf =
2125 new FlatSetFieldNode(anArg,
2130 // throw this in so you can always see what the initial heap context
2131 // looks like if you want to, its cheap
2132 FlatGenReachNode fgen = new FlatGenReachNode( "argContext" );
2134 TempDescriptor[] sourceEntryArgs = new TempDescriptor[1];
2135 sourceEntryArgs[0] = cmdLineArgs;
2137 new FlatCall(mdSourceEntry,
2143 FlatReturnNode frn = new FlatReturnNode(null);
2145 FlatExit fe = new FlatExit();
2147 this.fmAnalysisEntry =
2148 new FlatMethod(mdAnalysisEntry,
2152 List<FlatNode> nodes = new LinkedList<FlatNode>();
2153 nodes.add( fnArgs );
2158 nodes.add( fnBytes );
2165 FlatNode current = this.fmAnalysisEntry;
2166 for( FlatNode next: nodes ) {
2167 current.addNext( next );
2172 // jjenista - this is useful for looking at the FlatIRGraph of the
2173 // analysis entry method constructed above if you have to modify it.
2174 // The usual method of writing FlatIRGraphs out doesn't work because
2175 // this flat method is private to the model of this analysis only.
2177 // FlatIRGraph flatMethodWriter =
2178 // new FlatIRGraph( state, false, false, false );
2179 // flatMethodWriter.writeFlatIRGraph( fmAnalysisEntry, "analysisEntry" );
2180 //} catch( IOException e ) {}
2184 protected LinkedList<Descriptor> topologicalSort(Set<Descriptor> toSort) {
2186 Set<Descriptor> discovered;
2188 if( determinismDesired ) {
2189 // use an ordered set
2190 discovered = new TreeSet<Descriptor>(dComp);
2192 // otherwise use a speedy hashset
2193 discovered = new HashSet<Descriptor>();
2196 LinkedList<Descriptor> sorted = new LinkedList<Descriptor>();
2198 Iterator<Descriptor> itr = toSort.iterator();
2199 while( itr.hasNext() ) {
2200 Descriptor d = itr.next();
2202 if( !discovered.contains(d) ) {
2203 dfsVisit(d, toSort, sorted, discovered);
2210 // While we're doing DFS on call graph, remember
2211 // dependencies for efficient queuing of methods
2212 // during interprocedural analysis:
2214 // a dependent of a method decriptor d for this analysis is:
2215 // 1) a method or task that invokes d
2216 // 2) in the descriptorsToAnalyze set
2217 protected void dfsVisit(Descriptor d,
2218 Set <Descriptor> toSort,
2219 LinkedList<Descriptor> sorted,
2220 Set <Descriptor> discovered) {
2223 // only methods have callers, tasks never do
2224 if( d instanceof MethodDescriptor ) {
2226 MethodDescriptor md = (MethodDescriptor) d;
2228 // the call graph is not aware that we have a fabricated
2229 // analysis entry that calls the program source's entry
2230 if( md == mdSourceEntry ) {
2231 if( !discovered.contains(mdAnalysisEntry) ) {
2232 addDependent(mdSourceEntry, // callee
2233 mdAnalysisEntry // caller
2235 dfsVisit(mdAnalysisEntry, toSort, sorted, discovered);
2239 // otherwise call graph guides DFS
2240 Iterator itr = callGraph.getCallerSet(md).iterator();
2241 while( itr.hasNext() ) {
2242 Descriptor dCaller = (Descriptor) itr.next();
2244 // only consider callers in the original set to analyze
2245 if( !toSort.contains(dCaller) ) {
2249 if( !discovered.contains(dCaller) ) {
2250 addDependent(md, // callee
2254 dfsVisit(dCaller, toSort, sorted, discovered);
2259 // for leaf-nodes last now!
2264 protected void enqueue(Descriptor d) {
2266 if( !descriptorsToVisitSet.contains(d) ) {
2268 if( state.DISJOINTDVISITSTACK ||
2269 state.DISJOINTDVISITSTACKEESONTOP
2271 descriptorsToVisitStack.add(d);
2273 } else if( state.DISJOINTDVISITPQUE ) {
2274 Integer priority = mapDescriptorToPriority.get(d);
2275 descriptorsToVisitQ.add(new DescriptorQWrapper(priority,
2280 descriptorsToVisitSet.add(d);
2285 // a dependent of a method decriptor d for this analysis is:
2286 // 1) a method or task that invokes d
2287 // 2) in the descriptorsToAnalyze set
2288 protected void addDependent(Descriptor callee, Descriptor caller) {
2289 Set<Descriptor> deps = mapDescriptorToSetDependents.get(callee);
2290 if( deps == null ) {
2291 deps = new HashSet<Descriptor>();
2294 mapDescriptorToSetDependents.put(callee, deps);
2297 protected Set<Descriptor> getDependents(Descriptor callee) {
2298 Set<Descriptor> deps = mapDescriptorToSetDependents.get(callee);
2299 if( deps == null ) {
2300 deps = new HashSet<Descriptor>();
2301 mapDescriptorToSetDependents.put(callee, deps);
2307 public Hashtable<FlatCall, ReachGraph> getIHMcontributions(Descriptor d) {
2309 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
2310 mapDescriptorToIHMcontributions.get(d);
2312 if( heapsFromCallers == null ) {
2313 heapsFromCallers = new Hashtable<FlatCall, ReachGraph>();
2314 mapDescriptorToIHMcontributions.put(d, heapsFromCallers);
2317 return heapsFromCallers;
2320 public ReachGraph getIHMcontribution(Descriptor d,
2323 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
2324 getIHMcontributions(d);
2326 if( !heapsFromCallers.containsKey(fc) ) {
2330 return heapsFromCallers.get(fc);
2334 public void addIHMcontribution(Descriptor d,
2338 Hashtable<FlatCall, ReachGraph> heapsFromCallers =
2339 getIHMcontributions(d);
2341 heapsFromCallers.put(fc, rg);
2345 private AllocSite createParameterAllocSite(ReachGraph rg,
2346 TempDescriptor tempDesc,
2352 flatNew = new FlatNew(tempDesc.getType(), // type
2353 tempDesc, // param temp
2354 false, // global alloc?
2355 "param"+tempDesc // disjoint site ID string
2358 flatNew = new FlatNew(tempDesc.getType(), // type
2359 tempDesc, // param temp
2360 false, // global alloc?
2361 null // disjoint site ID string
2365 // create allocation site
2366 AllocSite as = AllocSite.factory(allocationDepth,
2368 flatNew.getDisjointId(),
2371 for (int i = 0; i < allocationDepth; ++i) {
2372 Integer id = generateUniqueHeapRegionNodeID();
2373 as.setIthOldest(i, id);
2374 mapHrnIdToAllocSite.put(id, as);
2376 // the oldest node is a summary node
2377 as.setSummary(generateUniqueHeapRegionNodeID() );
2385 private Set<FieldDescriptor> getFieldSetTobeAnalyzed(TypeDescriptor typeDesc) {
2387 Set<FieldDescriptor> fieldSet=new HashSet<FieldDescriptor>();
2388 if(!typeDesc.isImmutable()) {
2389 ClassDescriptor classDesc = typeDesc.getClassDesc();
2390 for (Iterator it = classDesc.getFields(); it.hasNext(); ) {
2391 FieldDescriptor field = (FieldDescriptor) it.next();
2392 TypeDescriptor fieldType = field.getType();
2393 if (shouldAnalysisTrack(fieldType)) {
2394 fieldSet.add(field);
2402 private HeapRegionNode createMultiDeimensionalArrayHRN(ReachGraph rg, AllocSite alloc, HeapRegionNode srcHRN, FieldDescriptor fd, Hashtable<HeapRegionNode, HeapRegionNode> map, Hashtable<TypeDescriptor, HeapRegionNode> mapToExistingNode, ReachSet alpha) {
2404 int dimCount=fd.getType().getArrayCount();
2405 HeapRegionNode prevNode=null;
2406 HeapRegionNode arrayEntryNode=null;
2407 for(int i=dimCount; i>0; i--) {
2408 TypeDescriptor typeDesc=fd.getType().dereference(); //hack to get instance of type desc
2409 typeDesc.setArrayCount(i);
2410 TempDescriptor tempDesc=new TempDescriptor(typeDesc.getSymbol(),typeDesc);
2411 HeapRegionNode hrnSummary;
2412 if(!mapToExistingNode.containsKey(typeDesc)) {
2417 as = createParameterAllocSite(rg, tempDesc, false);
2419 // make a new reference to allocated node
2421 rg.createNewHeapRegionNode(as.getSummary(), // id or null to generate a new one
2422 false, // single object?
2424 false, // out-of-context?
2425 as.getType(), // type
2426 as, // allocation site
2427 alpha, // inherent reach
2428 alpha, // current reach
2429 ExistPredSet.factory(rg.predTrue), // predicates
2430 tempDesc.toString() // description
2432 rg.id2hrn.put(as.getSummary(),hrnSummary);
2434 mapToExistingNode.put(typeDesc, hrnSummary);
2436 hrnSummary=mapToExistingNode.get(typeDesc);
2439 if(prevNode==null) {
2440 // make a new reference between new summary node and source
2441 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2444 fd.getSymbol(), // field name
2446 ExistPredSet.factory(rg.predTrue), // predicates
2450 rg.addRefEdge(srcHRN, hrnSummary, edgeToSummary);
2451 prevNode=hrnSummary;
2452 arrayEntryNode=hrnSummary;
2454 // make a new reference between summary nodes of array
2455 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2458 arrayElementFieldName, // field name
2460 ExistPredSet.factory(rg.predTrue), // predicates
2464 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2465 prevNode=hrnSummary;
2470 // create a new obj node if obj has at least one non-primitive field
2471 TypeDescriptor type=fd.getType();
2472 if(getFieldSetTobeAnalyzed(type).size()>0) {
2473 TypeDescriptor typeDesc=type.dereference();
2474 typeDesc.setArrayCount(0);
2475 if(!mapToExistingNode.containsKey(typeDesc)) {
2476 TempDescriptor tempDesc=new TempDescriptor(type.getSymbol(),typeDesc);
2477 AllocSite as = createParameterAllocSite(rg, tempDesc, false);
2478 // make a new reference to allocated node
2479 HeapRegionNode hrnSummary =
2480 rg.createNewHeapRegionNode(as.getSummary(), // id or null to generate a new one
2481 false, // single object?
2483 false, // out-of-context?
2485 as, // allocation site
2486 alpha, // inherent reach
2487 alpha, // current reach
2488 ExistPredSet.factory(rg.predTrue), // predicates
2489 tempDesc.toString() // description
2491 rg.id2hrn.put(as.getSummary(),hrnSummary);
2492 mapToExistingNode.put(typeDesc, hrnSummary);
2493 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2496 arrayElementFieldName, // field name
2498 ExistPredSet.factory(rg.predTrue), // predicates
2501 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2502 prevNode=hrnSummary;
2504 HeapRegionNode hrnSummary=mapToExistingNode.get(typeDesc);
2505 if(prevNode.getReferenceTo(hrnSummary, typeDesc, arrayElementFieldName)==null) {
2506 RefEdge edgeToSummary = new RefEdge(prevNode, // source
2509 arrayElementFieldName, // field name
2511 ExistPredSet.factory(rg.predTrue), // predicates
2514 rg.addRefEdge(prevNode, hrnSummary, edgeToSummary);
2516 prevNode=hrnSummary;
2520 map.put(arrayEntryNode, prevNode);
2521 return arrayEntryNode;
2524 private ReachGraph createInitialTaskReachGraph(FlatMethod fm) {
2525 ReachGraph rg = new ReachGraph();
2526 TaskDescriptor taskDesc = fm.getTask();
2528 for (int idx = 0; idx < taskDesc.numParameters(); idx++) {
2529 Descriptor paramDesc = taskDesc.getParameter(idx);
2530 TypeDescriptor paramTypeDesc = taskDesc.getParamType(idx);
2532 // setup data structure
2533 Set<HashMap<HeapRegionNode, FieldDescriptor>> workSet =
2534 new HashSet<HashMap<HeapRegionNode, FieldDescriptor>>();
2535 Hashtable<TypeDescriptor, HeapRegionNode> mapTypeToExistingSummaryNode =
2536 new Hashtable<TypeDescriptor, HeapRegionNode>();
2537 Hashtable<HeapRegionNode, HeapRegionNode> mapToFirstDimensionArrayNode =
2538 new Hashtable<HeapRegionNode, HeapRegionNode>();
2539 Set<String> doneSet = new HashSet<String>();
2541 TempDescriptor tempDesc = fm.getParameter(idx);
2543 AllocSite as = createParameterAllocSite(rg, tempDesc, true);
2544 VariableNode lnX = rg.getVariableNodeFromTemp(tempDesc);
2545 Integer idNewest = as.getIthOldest(0);
2546 HeapRegionNode hrnNewest = rg.id2hrn.get(idNewest);
2548 // make a new reference to allocated node
2549 RefEdge edgeNew = new RefEdge(lnX, // source
2551 taskDesc.getParamType(idx), // type
2553 hrnNewest.getAlpha(), // beta
2554 ExistPredSet.factory(rg.predTrue), // predicates
2557 rg.addRefEdge(lnX, hrnNewest, edgeNew);
2559 // set-up a work set for class field
2560 ClassDescriptor classDesc = paramTypeDesc.getClassDesc();
2561 for (Iterator it = classDesc.getFields(); it.hasNext(); ) {
2562 FieldDescriptor fd = (FieldDescriptor) it.next();
2563 TypeDescriptor fieldType = fd.getType();
2564 if (shouldAnalysisTrack(fieldType)) {
2565 HashMap<HeapRegionNode, FieldDescriptor> newMap = new HashMap<HeapRegionNode, FieldDescriptor>();
2566 newMap.put(hrnNewest, fd);
2567 workSet.add(newMap);
2571 int uniqueIdentifier = 0;
2572 while (!workSet.isEmpty()) {
2573 HashMap<HeapRegionNode, FieldDescriptor> map = workSet
2575 workSet.remove(map);
2577 Set<HeapRegionNode> key = map.keySet();
2578 HeapRegionNode srcHRN = key.iterator().next();
2579 FieldDescriptor fd = map.get(srcHRN);
2580 TypeDescriptor type = fd.getType();
2581 String doneSetIdentifier = srcHRN.getIDString() + "_" + fd;
2583 if (!doneSet.contains(doneSetIdentifier)) {
2584 doneSet.add(doneSetIdentifier);
2585 if (!mapTypeToExistingSummaryNode.containsKey(type)) {
2586 // create new summary Node
2587 TempDescriptor td = new TempDescriptor("temp"
2588 + uniqueIdentifier, type);
2590 AllocSite allocSite;
2591 if(type.equals(paramTypeDesc)) {
2592 //corresponding allocsite has already been created for a parameter variable.
2595 allocSite = createParameterAllocSite(rg, td, false);
2597 String strDesc = allocSite.toStringForDOT()
2599 TypeDescriptor allocType=allocSite.getType();
2601 HeapRegionNode hrnSummary;
2602 if(allocType.isArray() && allocType.getArrayCount()>0) {
2603 hrnSummary=createMultiDeimensionalArrayHRN(rg,allocSite,srcHRN,fd,mapToFirstDimensionArrayNode,mapTypeToExistingSummaryNode,hrnNewest.getAlpha());
2606 rg.createNewHeapRegionNode(allocSite.getSummary(), // id or null to generate a new one
2607 false, // single object?
2609 false, // out-of-context?
2610 allocSite.getType(), // type
2611 allocSite, // allocation site
2612 hrnNewest.getAlpha(), // inherent reach
2613 hrnNewest.getAlpha(), // current reach
2614 ExistPredSet.factory(rg.predTrue), // predicates
2615 strDesc // description
2617 rg.id2hrn.put(allocSite.getSummary(),hrnSummary);
2619 // make a new reference to summary node
2620 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2623 fd.getSymbol(), // field name
2624 hrnNewest.getAlpha(), // beta
2625 ExistPredSet.factory(rg.predTrue), // predicates
2629 rg.addRefEdge(srcHRN, hrnSummary, edgeToSummary);
2633 mapTypeToExistingSummaryNode.put(type, hrnSummary);
2635 // set-up a work set for fields of the class
2636 Set<FieldDescriptor> fieldTobeAnalyzed=getFieldSetTobeAnalyzed(type);
2637 for (Iterator iterator = fieldTobeAnalyzed.iterator(); iterator
2639 FieldDescriptor fieldDescriptor = (FieldDescriptor) iterator
2641 HeapRegionNode newDstHRN;
2642 if(mapToFirstDimensionArrayNode.containsKey(hrnSummary)) {
2643 //related heap region node is already exsited.
2644 newDstHRN=mapToFirstDimensionArrayNode.get(hrnSummary);
2646 newDstHRN=hrnSummary;
2648 doneSetIdentifier = newDstHRN.getIDString() + "_" + fieldDescriptor;
2649 if(!doneSet.contains(doneSetIdentifier)) {
2650 // add new work item
2651 HashMap<HeapRegionNode, FieldDescriptor> newMap =
2652 new HashMap<HeapRegionNode, FieldDescriptor>();
2653 newMap.put(newDstHRN, fieldDescriptor);
2654 workSet.add(newMap);
2659 // if there exists corresponding summary node
2660 HeapRegionNode hrnDst=mapTypeToExistingSummaryNode.get(type);
2662 RefEdge edgeToSummary = new RefEdge(srcHRN, // source
2664 fd.getType(), // type
2665 fd.getSymbol(), // field name
2666 srcHRN.getAlpha(), // beta
2667 ExistPredSet.factory(rg.predTrue), // predicates
2670 rg.addRefEdge(srcHRN, hrnDst, edgeToSummary);
2680 // return all allocation sites in the method (there is one allocation
2681 // site per FlatNew node in a method)
2682 private HashSet<AllocSite> getAllocationSiteSet(Descriptor d) {
2683 if( !mapDescriptorToAllocSiteSet.containsKey(d) ) {
2684 buildAllocationSiteSet(d);
2687 return mapDescriptorToAllocSiteSet.get(d);
2691 private void buildAllocationSiteSet(Descriptor d) {
2692 HashSet<AllocSite> s = new HashSet<AllocSite>();
2695 if( d instanceof MethodDescriptor ) {
2696 fm = state.getMethodFlat( (MethodDescriptor) d);
2698 assert d instanceof TaskDescriptor;
2699 fm = state.getMethodFlat( (TaskDescriptor) d);
2701 pm.analyzeMethod(fm);
2703 // visit every node in this FlatMethod's IR graph
2704 // and make a set of the allocation sites from the
2705 // FlatNew node's visited
2706 HashSet<FlatNode> visited = new HashSet<FlatNode>();
2707 HashSet<FlatNode> toVisit = new HashSet<FlatNode>();
2710 while( !toVisit.isEmpty() ) {
2711 FlatNode n = toVisit.iterator().next();
2713 if( n instanceof FlatNew ) {
2714 s.add(getAllocSiteFromFlatNewPRIVATE( (FlatNew) n) );
2720 for( int i = 0; i < pm.numNext(n); ++i ) {
2721 FlatNode child = pm.getNext(n, i);
2722 if( !visited.contains(child) ) {
2728 mapDescriptorToAllocSiteSet.put(d, s);
2731 private HashSet<AllocSite> getFlaggedAllocationSites(Descriptor dIn) {
2733 HashSet<AllocSite> out = new HashSet<AllocSite>();
2734 HashSet<Descriptor> toVisit = new HashSet<Descriptor>();
2735 HashSet<Descriptor> visited = new HashSet<Descriptor>();
2739 while (!toVisit.isEmpty()) {
2740 Descriptor d = toVisit.iterator().next();
2744 HashSet<AllocSite> asSet = getAllocationSiteSet(d);
2745 Iterator asItr = asSet.iterator();
2746 while (asItr.hasNext()) {
2747 AllocSite as = (AllocSite) asItr.next();
2748 if (as.getDisjointAnalysisId() != null) {
2753 // enqueue callees of this method to be searched for
2754 // allocation sites also
2755 Set callees = callGraph.getCalleeSet(d);
2756 if (callees != null) {
2757 Iterator methItr = callees.iterator();
2758 while (methItr.hasNext()) {
2759 MethodDescriptor md = (MethodDescriptor) methItr.next();
2761 if (!visited.contains(md)) {
2772 private HashSet<AllocSite>
2773 getFlaggedAllocationSitesReachableFromTaskPRIVATE(TaskDescriptor td) {
2775 HashSet<AllocSite> asSetTotal = new HashSet<AllocSite>();
2776 HashSet<Descriptor> toVisit = new HashSet<Descriptor>();
2777 HashSet<Descriptor> visited = new HashSet<Descriptor>();
2781 // traverse this task and all methods reachable from this task
2782 while( !toVisit.isEmpty() ) {
2783 Descriptor d = toVisit.iterator().next();
2787 HashSet<AllocSite> asSet = getAllocationSiteSet(d);
2788 Iterator asItr = asSet.iterator();
2789 while( asItr.hasNext() ) {
2790 AllocSite as = (AllocSite) asItr.next();
2791 TypeDescriptor typed = as.getType();
2792 if( typed != null ) {
2793 ClassDescriptor cd = typed.getClassDesc();
2794 if( cd != null && cd.hasFlags() ) {
2800 // enqueue callees of this method to be searched for
2801 // allocation sites also
2802 Set callees = callGraph.getCalleeSet(d);
2803 if( callees != null ) {
2804 Iterator methItr = callees.iterator();
2805 while( methItr.hasNext() ) {
2806 MethodDescriptor md = (MethodDescriptor) methItr.next();
2808 if( !visited.contains(md) ) {
2818 public Set<Descriptor> getDescriptorsToAnalyze() {
2819 return descriptorsToAnalyze;
2822 public EffectsAnalysis getEffectsAnalysis() {
2823 return effectsAnalysis;
2826 public ReachGraph getReachGraph(Descriptor d) {
2827 return mapDescriptorToCompleteReachGraph.get(d);
2830 public ReachGraph getEnterReachGraph(FlatNode fn) {
2831 return fn2rgAtEnter.get(fn);
2834 // get successive captures of the analysis state, use compiler
2836 boolean takeDebugSnapshots = false;
2837 String descSymbolDebug = null;
2838 boolean stopAfterCapture = false;
2839 int snapVisitCounter = 0;
2840 int snapNodeCounter = 0;
2841 int visitStartCapture = 0;
2842 int numVisitsToCapture = 0;
2845 void debugSnapshot(ReachGraph rg, FlatNode fn, boolean in) {
2846 if( snapVisitCounter > visitStartCapture + numVisitsToCapture ) {
2854 if( snapVisitCounter >= visitStartCapture ) {
2855 System.out.println(" @@@ snapping visit="+snapVisitCounter+
2856 ", node="+snapNodeCounter+
2860 graphName = String.format("snap%03d_%04din",
2864 graphName = String.format("snap%03d_%04dout",
2869 graphName = graphName + fn;
2871 rg.writeGraph(graphName,
2872 true, // write labels (variables)
2873 true, // selectively hide intermediate temp vars
2874 true, // prune unreachable heap regions
2875 false, // hide reachability
2876 false, // hide subset reachability states
2877 true, // hide predicates
2878 true); // hide edge taints
2885 public Set<Alloc> canPointToAt( TempDescriptor x,
2886 FlatNode programPoint ) {
2888 ReachGraph rgAtEnter = fn2rgAtEnter.get( programPoint );
2889 if( rgAtEnter == null ) {
2893 return rgAtEnter.canPointTo( x );
2897 public Set<Alloc> canPointToAfter( TempDescriptor x,
2898 FlatNode programPoint ) {
2900 ReachGraph rgAtExit = fn2rgAtExit.get( programPoint );
2901 if( rgAtExit == null ) {
2905 return rgAtExit.canPointTo( x );
2909 public Hashtable< Alloc, Set<Alloc> > canPointToAt( TempDescriptor x,
2911 FlatNode programPoint ) {
2913 ReachGraph rgAtEnter = fn2rgAtEnter.get( programPoint );
2914 if( rgAtEnter == null ) {
2918 return rgAtEnter.canPointTo( x, f.getSymbol(), f.getType() );
2922 public Hashtable< Alloc, Set<Alloc> > canPointToAtElement( TempDescriptor x,
2923 FlatNode programPoint ) {
2925 ReachGraph rgAtEnter = fn2rgAtEnter.get( programPoint );
2926 if( rgAtEnter == null ) {
2930 assert x.getType() != null;
2931 assert x.getType().isArray();
2933 return rgAtEnter.canPointTo( x, arrayElementFieldName, x.getType().dereference() );