compiler.cpp

Go to the documentation of this file.
00001 /* Copyright 1994 - 1996, LongView Technologies L.L.C. $Revision: 1.143 $ */
00002 /* Copyright (c) 2006, Sun Microsystems, Inc.
00003 All rights reserved.
00004 
00005 Redistribution and use in source and binary forms, with or without modification, are permitted provided that the 
00006 following conditions are met:
00007 
00008     * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
00009     * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following 
00010           disclaimer in the documentation and/or other materials provided with the distribution.
00011     * Neither the name of Sun Microsystems nor the names of its contributors may be used to endorse or promote products derived 
00012           from this software without specific prior written permission.
00013 
00014 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT 
00015 NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL 
00016 THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 
00017 (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 
00018 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE 
00019 OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE
00020 
00021 
00022 */
00023 
00024 # include "incls/_precompiled.incl"
00025 
00026 # ifdef DELTA_COMPILER
00027 
00028 # include "incls/_compiler.cpp.incl"
00029 
00030 # ifdef ASSERT
00031     bool verifyOften = false;
00032 # else
00033     bool verifyOften = false;
00034 # endif
00035 
00036 int             nofCompilations = 0;
00037 Compiler*       theCompiler     = NULL;
00038 Compiler*       lastCompiler    = NULL;     // for debugging
00039 BBIterator*     last_bbIterator;
00040 
00041 void compiler_init() {
00042 #ifdef ASSERT
00043   CompilerDebug = true;   
00044 #endif
00045 }
00046 
00047 
00048 ScopeDescRecorder* Compiler::scopeDescRecorder() { return rec; }
00049 
00050 CodeBuffer* Compiler::code() const { return _code; }
00051 
00052 Compiler::Compiler(LookupKey* k, methodOop m, CompiledIC* i) {
00053   key           = k;
00054   method        = m;
00055   ic            = i;
00056   parentNMethod = NULL;
00057   blockScope    = NULL;
00058 
00059   main_jumpTable_id     = jumpTableID();
00060   promoted_jumpTable_id = jumpTableID();
00061 
00062   initialize();
00063 }
00064 
00065 
00066 Compiler::Compiler(RScope* scope) {
00067   assert(scope != NULL, "scope must exist");
00068 
00069   key           = scope->key();
00070   method        = scope->method();
00071   ic            = NULL;
00072   parentNMethod = NULL;
00073   blockScope    = NULL;
00074 
00075   main_jumpTable_id     = jumpTableID();
00076   promoted_jumpTable_id = jumpTableID();
00077 
00078   initialize(scope);
00079 }
00080 
00081 Compiler::Compiler(blockClosureOop blk, NonInlinedBlockScopeDesc* scope) : _scopeStack(10) {
00082   // Create a valid key for the compiled method.
00083   // {receiver class, block method} see key.hpp
00084   key = LookupKey::allocate(scope->parent()->selfKlass(), scope->method());
00085 
00086   assert(blk->isCompiledBlock(), "must be compiled block");
00087   jumpTableEntry* e = blk->jump_table_entry();
00088   int sub_index;
00089   parentNMethod = e->parent_nmethod(sub_index);
00090 
00091   short main_index = parentNMethod->main_id.is_block() 
00092                      ? parentNMethod->promoted_id.major()
00093                      : parentNMethod->main_id.major() ;
00094 
00095   main_jumpTable_id     = jumpTableID(main_index, sub_index);
00096   promoted_jumpTable_id = jumpTableID();
00097 
00098   blockScope = scope;
00099   method = scope->method(); 
00100   ic = NULL;
00101 
00102   // Check if the inlining database is active
00103   RScope* rs = NULL;
00104   if (UseInliningDatabase) {
00105     LookupKey* outer = &parentNMethod->outermost()->key;
00106     rs = InliningDatabase::lookup_and_remove(outer, key);
00107     if (rs && TraceInliningDatabase) {
00108       std->print("ID block compile: ");
00109       key->print();
00110       std->cr();
00111     }
00112   }
00113   initialize(rs);
00114 }
00115 
00116 
00117 void Compiler::finalize() {
00118   assert(theMacroAssm == NULL, "shouldn't have an assembler anymore");
00119   _code = NULL;
00120   last_bbIterator = bbIterator;
00121   bbIterator = NULL;
00122   theCompiler = NULL;
00123 }
00124 
00125 
00126 int Compiler::level() const { 
00127   return _noInlinableSends ? MaxRecompilationLevels - 1 : _nextLevel; 
00128 }
00129 
00130 
00131 int Compiler::version() const { 
00132   if (recompilee) {
00133     // don't increment version number when uncommon-recompiling
00134     // (otherwise limit is reached too quickly)
00135     return recompilee->version() + (is_uncommon_compile() ?  0 : 1); 
00136   } else {
00137     return 0;
00138   }
00139 }
00140 
00141 
00142 int Compiler::estimatedSize() const {
00143   // estimated target nmethod size (bytes)
00144   return NodeFactory::cumulCost;
00145 }
00146 
00147 
00148 InlinedScope* Compiler::currentScope() const    { return _scopeStack.top(); }
00149 void Compiler::enterScope(InlinedScope* s)      { _scopeStack.push(s); }
00150 void Compiler::exitScope (InlinedScope* s)      { assert(s == _scopeStack.top(), "bad nesting"); _scopeStack.pop(); }
00151 
00152 
00153 void Compiler::initialize(RScope* remote_scope) {
00154   assert(VMProcess::vm_operation() != NULL, "must be in vmProcess to compile");
00155   nofCompilations++;
00156 #ifdef DEBUG
00157   messages = new stringStream(250 * K);
00158 #endif
00159 
00160   if (remote_scope) {
00161     _uses_inlining_database = true;
00162     recompileeRScope = remote_scope;
00163   } else {
00164     _uses_inlining_database = false;
00165   }
00166 
00167   recompileeRScope = remote_scope;
00168   assert(theCompiler == NULL, "shouldn't have but one compiler at a time");
00169   assert(theMacroAssm == NULL, "shouldn't have an assembler yet");
00170   PReg::initPRegs();    // must come early (before any PReg allocation)
00171   initNodes();          // same here (before creating nodes)
00172   initLimits();
00173   theCompiler   = lastCompiler = this;
00174   _code         = new CodeBuffer(CompilerInstrsSize, CompilerInstrsSize / 2);
00175   countID       = -1;
00176   topScope      = NULL;
00177   bbIterator    = new BBIterator;
00178   /* theAllocator = */ new RegisterAllocator();
00179   assert(method, "must have method");
00180   Scope::initialize();
00181   _totalNofBytes = 0;
00182   _special_handler_call_offset = -1;
00183   _entry_point_offset = -1;
00184   _verified_entry_point_offset = -1;
00185   _totalNofFloatTemporaries = -1;
00186   _float_section_size = 0;
00187   _float_section_start_offset = 0;
00188   rec = new ScopeDescRecorder(CompilerScopesSize, CompilerPCsSize);
00189   // Save dependency information in the scopeDesc recorder.
00190   rec->add_dependant(key);
00191 
00192   nlrTestPoints = new GrowableArray<NLRTestNode*>(50); 
00193   contextList = NULL;
00194   scopes = new GrowableArray<InlinedScope*>(50);
00195   blockClosures = new GrowableArray<BlockPReg*>(50);
00196   firstNode = NULL;
00197   reporter = new PerformanceDebugger(this);
00198   initTopScope();
00199 }
00200 
00201 
00202 void Compiler::initLimits() {
00203   if (recompileeRScope) {
00204     // We're compiling from the inlining data base
00205     _nextLevel = MaxRecompilationLevels - 1;
00206   } else if (recompilee) {
00207     if (DeltaProcess::active()->isUncommon()) {
00208       // when recompiling because of an uncommon trap, reset level
00209       _nextLevel = 0;
00210     } else {
00211       _nextLevel = recompilee->level() + 1;
00212       if (_nextLevel >= MaxRecompilationLevels) {
00213         warning("recompilation level too high -- should not happen");
00214         _nextLevel = MaxRecompilationLevels;
00215       }
00216     }
00217   } else {
00218     // new nmethod
00219     _nextLevel = 0;
00220   }
00221   _noInlinableSends = true;
00222 
00223 #ifdef LATER
00224   inlineLimit[NormalFnLimit]         = getLimit(limits[NormalFnLimit],          level);
00225   inlineLimit[BlockFnLimit]          = getLimit(limits[BlockFnLimit],           level);
00226   inlineLimit[BlockArgFnLimit]       = getLimit(limits[BlockArgFnLimit],        level);
00227   inlineLimit[NormalFnInstrLimit]    = getLimit(limits[NormalFnInstrLimit],     level);
00228   inlineLimit[BlockFnInstrLimit]     = getLimit(limits[BlockFnInstrLimit],      level);
00229   inlineLimit[BlockArgFnInstrLimit]  = getLimit(limits[BlockArgFnInstrLimit],   level);
00230   inlineLimit[SplitCostLimit]        = getLimit(limits[SplitCostLimit],         level);
00231   inlineLimit[NmInstrLimit]          = getLimit(limits[NmInstrLimit],           level);
00232 
00233   if (CompilerAdjustLimits) {
00234     // adjust NmInstrLimit if top-level method is large
00235     int cost = sicCost((methodKlass*)method->klass(), topScope, costP);
00236     if (cost > NormalMethodLen) {
00237       float l = (float)cost / NormalMethodLen * inlineLimit[NmInstrLimit];
00238       inlineLimit[NmInstrLimit] = min(int(l), CompilerInstructionsSize / 3);
00239     }
00240   }
00241 #endif
00242 }
00243 
00244 
00245 bool Compiler::registerUninlinable(Inliner* inliner) {
00246   // All sends that aren't inlined for some reason are registered here
00247   // to determine the minimum optimization level needed for recompilation
00248   // (i.e. if the send wouldn't be inlined even at the highest optimization
00249   // level there's no point in recompiling).
00250   // At the end of compilation, _nextLevel will contain the lowest
00251   // optimization level that will generate better code than the current level.
00252   // Return true if the send is considered non-inlinable.
00253   if (!Inline) return true;                 // no point recompiling
00254   SendInfo* info = inliner->info();
00255   if (is_database_compile()) {
00256     info->counting = false;
00257     info->uninlinable = true;               // for now, never inline if not inlined in DB 
00258     // (would need to change DB format to allow counting and uninlinable sends)
00259   }
00260 
00261   if (!UseRecompilation) {
00262     // make sure we're not using counting sends
00263     info->counting = false;
00264   }
00265   if (info->uninlinable) {
00266     info->counting = false;
00267     return true;                            // won't be inlined, ever
00268   }
00269   if (is_uncommon_compile()) {
00270     info->counting = true;                  // make sure the uncommon nmethod is recompiled eventually
00271   }
00272   if (inliner->msg() == NULL) {
00273     info->counting = true;                  // for now
00274     _noInlinableSends = false;              // unknown receiver (?)
00275     return false;
00276   } else {
00277     assert(!info->rcvr->isUnknownExpr(), "oops");
00278     return true;
00279   }
00280 }
00281 
00282 
00283 bool Compiler::is_uncommon_compile() const {
00284   return DeltaProcess::active()->isUncommon();
00285 }
00286 
00287 
00288 // NewBackendGuard is used only to set the right flags to enable the
00289 // new backend (enabled via TryNewBackend) instead of setting them
00290 // all manually. At some point all the bugs should be fixed and this
00291 // class and its use can simply be removed.
00292 //
00293 // This class basically simplifies Dave's (or whoever's) life since
00294 // only one flag (TryNewBackend) needs to be set and everything else
00295 // is setup automatically. Eventually UseNewBackend should do the job.
00296 //
00297 // gri 10/2/96
00298 
00299 class NewBackendGuard: StackObj {
00300  private:
00301   static bool _first_use;
00302 
00303   bool _UseNewBackend;
00304   bool _LocalCopyPropagate;
00305   bool _OptimizeLoops;
00306   bool _OptimizeIntegerLoops;
00307 
00308  public:
00309   NewBackendGuard() {
00310     // save original settings in any case
00311     _UseNewBackend        = UseNewBackend;
00312     _LocalCopyPropagate   = LocalCopyPropagate;
00313     _OptimizeLoops        = OptimizeLoops;
00314     _OptimizeIntegerLoops = OptimizeIntegerLoops;
00315 
00316     if (TryNewBackend) {
00317       // print out a warning if this class is used
00318       if (_first_use) {
00319         warning("TryNewBackend automatically changes some flags for compilation - for temporary use only");
00320         _first_use = false;
00321       }
00322 
00323       // switch to right settings
00324       UseNewBackend        = true;
00325       LocalCopyPropagate   = false;
00326       OptimizeLoops        = false;
00327       OptimizeIntegerLoops = false;
00328     }
00329   }
00330 
00331   ~NewBackendGuard() {
00332     // restore original settings in any case
00333     UseNewBackend        = _UseNewBackend;
00334     LocalCopyPropagate   = _LocalCopyPropagate;
00335     OptimizeLoops        = _OptimizeLoops;
00336     OptimizeIntegerLoops = _OptimizeIntegerLoops;
00337   }
00338 };
00339 
00340 bool NewBackendGuard::_first_use = true;
00341 
00342 
00343 nmethod* Compiler::compile() {
00344   NewBackendGuard guard;
00345 
00346   if ((PrintProgress > 0) && (nofCompilations % PrintProgress == 0)) std->print(".");
00347   char* compiling;
00348   if (DeltaProcess::active()->isUncommon()) {
00349     compiling = recompilee ? "Uncommon-Recompiling " : "Uncommon-Compiling ";
00350   } else {
00351     if (_uses_inlining_database) {
00352       compiling = recompilee ? "Recompiling (database)" : "Compiling (database)";
00353     } else {
00354       compiling = recompilee ? "Recompiling " : "Compiling ";
00355     }
00356   }
00357   EventMarker em("%s%#lx %#lx", compiling, key->selector(), NULL);
00358 
00359   // don't use uncommon traps when recompiling because of trap
00360   useUncommonTraps = DeferUncommonBranches && !is_uncommon_compile();
00361   if (is_uncommon_compile()) reporter->report_uncommon(false);
00362   if (recompilee && recompilee->isUncommonRecompiled()) reporter->report_uncommon(true);
00363   // don't use counters when compiling from DB
00364   FlagSetting fs(UseRecompilation, UseRecompilation && !is_database_compile());
00365 
00366   bool should_trace = _uses_inlining_database ? PrintInliningDatabaseCompilation : PrintCompilation;
00367   TraceTime t(compiling, should_trace);
00368     
00369   if (should_trace || PrintCode) {
00370     print_key(std);
00371     if (PrintCode || PrintInlining) std->print("\n");
00372   }
00373 
00374   topScope->genCode();
00375   fixupNLRTestPoints();
00376   buildBBs();
00377 
00378   if (PrintCode) print_code(false);
00379   if (verifyOften) bbIterator->verify();
00380 
00381   // compute escaping blocks and up-level accessed vars
00382   bbIterator->computeEscapingBlocks();
00383   bbIterator->computeUplevelAccesses();
00384   if (verifyOften) bbIterator->verify();
00385 
00386   // construct def & use information
00387   bbIterator->makeUses();
00388   if (verifyOften) bbIterator->verify();
00389 
00390   if (LocalCopyPropagate) {
00391     bbIterator->localCopyPropagate();
00392     if (verifyOften) bbIterator->verify();
00393   }
00394   if (GlobalCopyPropagate) {
00395     bbIterator->globalCopyPropagate();
00396     if (verifyOften) bbIterator->verify();
00397   }
00398   if (BruteForcePropagate) {
00399     bbIterator->bruteForceCopyPropagate();
00400     if (verifyOften) bbIterator->verify();
00401   }
00402   if (EliminateUnneededNodes) {
00403     bbIterator->eliminateUnneededResults();
00404     if (verifyOften) bbIterator->verify();
00405   }
00406   if (OptimizeIntegerLoops) {
00407     // run after copy propagation so that loop increment is easier to recognize
00408     // also run after eliminateUnneededResults so that cpInfo is set for eliminated PRegs
00409     topScope->optimizeLoops();
00410     if (verifyOften) bbIterator->verify();
00411   }
00412  
00413   // compute existence & format of run-time context objects and blocks
00414   computeBlockInfo();
00415 
00416   // allocate floats
00417   _totalNofFloatTemporaries = topScope->allocateFloatTemporaries(0);
00418   
00419   // HACK: Fix preallocation
00420   // Necessary because a few primitives (allocateContext/Closure) need self or
00421   // the previous context after calling a primitive; i.e., self or the previous
00422   // context should not be allocated to a register. Currently not working correctly
00423   // -> allocated to stack as a temporary fix for the problem.
00424   theAllocator->preAllocate(topScope->self()->preg());
00425   bbIterator->localAlloc();             // allocate regs within basic blocks
00426   theAllocator->allocate(bbIterator->globals);
00427 
00428   if (PrintCode) print_code(false);
00429 #ifdef ASSERT
00430   bbIterator->verify();
00431 #endif
00432 
00433   topScope->generateDebugInfo();        // must come before gen to set scopeInfo
00434   topScope->generateDebugInfoForNonInlinedBlocks();
00435 
00436   // generate machine code
00437   theMacroAssm  = new MacroAssembler(_code);
00438   if (UseNewBackend) {
00439     PRegMapping* mapping = new PRegMapping(theMacroAssm, topScope->nofArguments(), 6, topScope->nofTemporaries());
00440     CodeGenerator* cgen = new CodeGenerator(theMacroAssm, mapping);
00441     cgen->initialize(topScope);
00442     bbIterator->apply(cgen);
00443     cgen->finalize(topScope);
00444   } else {
00445     // use a node visitor to generate code
00446     OldCodeGenerator* cgen = new OldCodeGenerator();
00447     bbIterator->apply(cgen);
00448   }
00449   theMacroAssm->finalize();
00450   theMacroAssm = NULL;
00451 
00452 #ifndef ASSERT
00453   if (verifyOften) {
00454 #endif
00455     bool ok = bbIterator->verifyLabels();
00456     if (!ok) print_code(false);
00457 #ifndef ASSERT
00458   }
00459 #endif
00460 
00461   rec->generate();                      // write debugging info
00462   nmethod* nm = new_nmethod(this);      // construct new nmethod
00463   em.event.args[1] = nm;
00464 
00465   if (PrintAssemblyCode) Disassembler::decode(nm);
00466 
00467   reporter->finish_reporting();
00468   if (should_trace) {
00469     lprintf(": %#lx (%d bytes; level %ld v%d)\n", nm, nm->instsLen(), nm->level(), nm->version());
00470     flush_logFile();
00471   }
00472 
00473   if (verifyOften) nm->verify();
00474 
00475   if (PrintDebugInfo) nm->print_inlining(std, true);
00476 
00477   return nm;
00478 }
00479 
00480 
00481 void Compiler::buildBBs() {             // build the basic block graph
00482   bbIterator->build(firstNode);
00483 }
00484 
00485 
00486 void Compiler::fixupNLRTestPoints() {
00487   // the NLRTest nodes didn't get their correct successors during node generation because
00488   // their sender scopes' nlrTestPoints may not yet have been created; fix them up now
00489   int i = nlrTestPoints->length();
00490   while (i-- > 0) nlrTestPoints->at(i)->fixup();
00491 }
00492 
00493 
00494 void Compiler::computeBlockInfo() {
00495   FlagSetting(EliminateUnneededNodes, true);  // unused context nodes must be eliminated
00496   GrowableArray<InlinedScope*>* allContexts = new GrowableArray<InlinedScope*>(25);
00497   topScope->collectContextInfo(allContexts);
00498   // for now, just allocate all contexts as in interpreter
00499   // fix this later: collect all uplevel-accessed PRegs at same loop depth, form physical
00500   // contexts for these
00501   // also, if uplevel-read and single def --> could copy into context and keep
00502   // stack/register copy
00503 
00504 
00505   // remove all unused contexts 
00506   // need to iterate because removing a nested context may enable removal of a parent context
00507   // (could avoid iteration with topo sort, but there are few contexts anyway)
00508   bool changed = EliminateContexts;
00509   while (changed) {
00510     changed = false;
00511     for (int i = allContexts->length() - 1; i >= 0; i--) {
00512       InlinedScope* s = allContexts->at(i);
00513       if (s == NULL) continue;
00514       PReg* contextPR = s->context();
00515       assert(contextPR->isSinglyAssigned(), "should have exactly one def");
00516       GrowableArray<Expr*>* temps = s->contextTemporaries();
00517       bool noUplevelAccesses = true;
00518       // check if all context temps can be stack-allocated
00519       for (int j = temps->length() - 1; j >= 0; j--) {
00520         PReg* r = temps->at(j)->preg();
00521         if (r->uplevelR() || r->uplevelW()          // this temp is still uplevel-accessed, so can't eliminate context
00522             || (r->isBlockPReg() && !r->isUnused()) // this block still forces a context
00523             ) {
00524           noUplevelAccesses = false;
00525           break;
00526         }
00527       }
00528       // TO DO: check if context is needed for NLRs
00529       // (noUplevelAccesses alone does not allow elimination)
00530       if (/*noUplevelAccesses || */contextPR->isSinglyUsed()) {
00531         // can eliminate context -- no uplevel-accessed vars
00532         // (single use is context initializer)
00533         if (CompilerDebug) cout(PrintEliminateContexts)->print("%*s*eliminating context %s\n", s->depth, "", contextPR->safeName());
00534         contextPR->scope()->gen()->removeContextCreation();
00535         allContexts->at_put(i, NULL);     // make code generator break if it tries to access this context
00536         changed = true;
00537       }
00538     }
00539   }
00540 
00541   // now collect all remaining contexts
00542   int i = allContexts->length();
00543   contextList = new GrowableArray<InlinedScope*>(i, i, NULL);
00544   while (i-- > 0) {
00545     // should merge several contexts into one physical context if possible
00546     // fix this later
00547     InlinedScope* s = allContexts->at(i);
00548     if (s == NULL) continue;
00549     PReg* contextPR = s->context();
00550     if (CompilerDebug) {
00551       cout(PrintEliminateContexts)->print("%*s*could not eliminate context %s in scope %s\n", 
00552                                           s->depth, "", contextPR->safeName(), s->key()->print_string());
00553     }
00554     reporter->report_context(s);
00555     contextList->at_put(i, s);
00556     ContextCreateNode* c = s->contextInitializer()->creator();
00557     c->set_contextNo(i);
00558     GrowableArray<Expr*>* temps = s->contextTemporaries();
00559     // allocate the temps in this context (but only if they're used)
00560     int ntemps = temps->length();
00561     int size = 0;
00562     for (int j = 0; j < ntemps; j++) {
00563       PReg* p = temps->at(j)->preg();
00564 // should be:
00565 //     if (p->isUsed() && (p->uplevelR() || p->uplevelW())) {
00566 // but doesn't work yet (probably must fix set_self_via_context etc.)
00567 // -Urs 6/96
00568       if (p->isUsed()) {
00569         // allocate p to context temp
00570         assert(p->scope() == s || p->isBlockPReg(), "oops");
00571         Location loc = Mapping::contextTemporary(i, size, s->scopeID());
00572         if (p->isBlockPReg()) {
00573           // Blocks aren't actually assigned (at the PReg level) so that the inlining info
00574           // isn't lost.  Thus we need to create a fake destination here if the context exists.
00575           SAPReg* dest = new SAPReg(s, loc, true, true, PrologueBCI, EpilogueBCI);
00576           Expr* e = new UnknownExpr(dest, NULL);
00577           //contextPR->scope()->contextInitializer()->initialize(j, init);
00578           temps->at_put(j, e);
00579         } else {
00580           p->allocateTo(loc);
00581         }
00582         size++;
00583       }
00584     }
00585     c->set_sizeOfContext(size);
00586     if (size < ntemps && c->scope()->number_of_noninlined_blocks() > 0) {
00587       // this hasn't been exercised much 
00588       compiler_warning("while compiling %s: eliminated some context temps", key->print_string());
00589     }
00590   }
00591 
00592   // Compute the number of noninlined blocks for the nmethod and allocate 
00593   const int nblocks = topScope->number_of_noninlined_blocks();
00594 
00595   if (is_method_compile() || nblocks > 0) {
00596     // allocate nblocks+1 jumpTable entries
00597     const jumpTableID id = Universe::code->jump_table()->allocate(nblocks + 1);
00598 
00599     if (is_method_compile()) {
00600       main_jumpTable_id = id;
00601     } else {
00602       promoted_jumpTable_id = id;
00603     }
00604 
00605     // first is for nmethod itself
00606     int block_index = 1;
00607     for (int i = bbIterator->exposedBlks->length() - 1; i >= 0; i--) {
00608       BlockPReg* blk = bbIterator->exposedBlks->at(i);
00609       if (blk->isUsed()) {
00610         assert(block_index <= nblocks, "nblocks too small");
00611         blk->closure()->set_id(id.sub(block_index++));
00612       }
00613     }
00614     assert(nblocks + 1 == block_index, "just checking");
00615   }
00616 }
00617 
00618 void Compiler::initTopScope() {
00619   if (recompileeRScope == NULL) {
00620     if (TypeFeedback) {
00621       recompileeRScope = 
00622         recompilee ?
00623           (RScope*) RNonDummyScope::constructRScopes(recompilee) : 
00624           (RScope*) new RInterpretedScope(NULL, -1, key, method, 0, true);
00625     } else {
00626       recompileeRScope = new RNullScope;
00627     }
00628   }
00629   if (PrintRScopes) recompileeRScope->printTree(0, 0);
00630 
00631   countID = Universe::code->nextNMethodID(); 
00632   Scope* parentScope = NULL;
00633   SendInfo* info = new SendInfo(NULL, key, NULL);
00634   InlinedScope* sender = NULL;  // no sender -- top scope in nmethod
00635     
00636   if (is_block_compile()) {
00637     // block method
00638     assert(parentNMethod != NULL, "parentNMethod must be set for block compile");
00639     assert(blockScope->parent() != NULL, "must know parent");
00640     parentScope = new_OutlinedScope(parentNMethod, blockScope->parent());
00641     topScope = BlockScope::new_BlockScope(method, parentScope->methodHolder(), parentScope, sender, recompileeRScope, info);
00642   } else {
00643     // normal method
00644     klassOop methodHolder = key->klass()->klass_part()->lookup_method_holder_for(method);
00645     topScope = MethodScope::new_MethodScope(method, methodHolder, sender, recompileeRScope, info);
00646   }
00647   // make sure home exists always
00648   assert(topScope->home() != NULL, "no home");
00649 }
00650 
00651 
00652 void Compiler::print() {
00653   print_short(); lprintf(":");
00654   key->print();
00655   lprintf("\tmethod: %s\n", method->print_string());
00656   lprintf("\tp ((Compiler*)%#lx)->print_code()\n", this);
00657 }
00658 
00659 
00660 void Compiler::print_short() {
00661   lprintf("(Compiler*) %#lx", this);
00662 }
00663 
00664 
00665 void Compiler::print_key(outputStream* str) {
00666   key->print_on(str);
00667   if (topScope == NULL) return; // print_key may be used during fatals where the compiler isn't set up yet
00668 
00669   str->print(" (no. %d, method %#x", nofCompilations, method);
00670   // print the parent scope offset for block compiles.
00671   if (blockScope) {
00672     std->print(", parent offset %d", blockScope->parent()->offset());
00673   }
00674   str->print(")...");
00675 }
00676 
00677 void Compiler::print_code(bool suppressTrivial) {
00678   if (theCompiler == NULL) {
00679     // This will not work, another indication that firstNode should be stored with the BBIterator
00680     // anyway, not fixed for now (gri 6/6/96)
00681     last_bbIterator->print_code(suppressTrivial);
00682     last_bbIterator->print();
00683   } else {
00684     bool hadBBs = bbIterator != NULL;
00685     if (! hadBBs) {
00686       // need BBs for printing
00687       bbIterator = new BBIterator;
00688       buildBBs();
00689     }
00690     bbIterator->print_code(suppressTrivial);
00691     bbIterator->print();
00692     if (!hadBBs) {
00693       bbIterator->clear();
00694       bbIterator = NULL;
00695     }
00696   }
00697   lprintf("\n\n");
00698 }
00699 
00700 
00701 int Compiler::get_invocation_counter_limit() const {
00702   if (is_uncommon_compile()) {
00703     return RecompilationPolicy::uncommonNMethodInvocationLimit(version());
00704   } else {
00705     return Interpreter::get_invocation_counter_limit();
00706   }
00707 }
00708 
00709 
00710 void Compiler::set_special_handler_call_offset(int offset) {
00711   // doesn't need to be aligned since called rarely and from within the nmethod only
00712   _special_handler_call_offset = offset;
00713 }
00714 
00715 
00716 void Compiler::set_entry_point_offset(int offset) {
00717   assert(offset % oopSize == 0, "entry point must be aligned");
00718   _entry_point_offset = offset;
00719 }
00720 
00721 
00722 void Compiler::set_verified_entry_point_offset(int offset) {
00723   assert(offset % oopSize == 0, "verified entry point must be aligned");
00724   _verified_entry_point_offset = offset;
00725 }
00726 
00727 
00728 void Compiler::set_float_section_size(int size) {
00729   assert(size >= 0, "size cannot be negative");
00730   _float_section_size = size;
00731 }
00732 
00733 
00734 void Compiler::set_float_section_start_offset(int offset) {
00735   _float_section_start_offset = offset;
00736 }
00737 
00738 
00739 int Compiler::number_of_noninlined_blocks() const {
00740   return topScope->number_of_noninlined_blocks();
00741 }
00742 
00743 
00744 void Compiler::copy_noninlined_block_info(nmethod* nm) {
00745   topScope->copy_noninlined_block_info(nm);
00746 }
00747 
00748 #ifdef DEBUG
00749 outputStream* cout(bool flag) {
00750   return (flag || theCompiler == NULL) ? std : theCompiler->messages;
00751 }
00752 
00753 void print_cout() {
00754   ResourceMark rm;
00755   lputs(theCompiler->messages->as_string());
00756 }
00757 
00758 #endif  // DEBUG
00759 
00760 #endif  // DELTA_COMPILER

Generated on Mon Oct 9 13:37:07 2006 for Strongtalk VM by  doxygen 1.4.7