// --- generate
address MethodStubBlob::generate( heapRef moop, address c2i_adapter ) {
  // NativeMethodStubs must be jumped-to directly and are packed back-to-back.
  // Hence they start CodeEntryAligned, and each later one has to be
  // CodeEntryAligned so we expect the instruction_size to be a multiple.
  assert0( round_to(NativeMethodStub::instruction_size,CodeEntryAlignment) == NativeMethodStub::instruction_size );
  NativeMethodStub *nms;
  do {
    // The _free_list is a racing CAS-managed link-list.  Must read the
    // _free_list exactly ONCE before the CAS attempt below, or otherwise know
    // we have something that used to be on the free_list and is not-null.  In
    // generally, if we re-read the free_list we have to null-check the result.
    nms = _free_list;
    if( !nms ) {
      // CodeCache makes CodeBlobs.  Make a CodeBlob typed as a methodCodeStub.
      CodeBlob *cb = CodeCache::malloc_CodeBlob( CodeBlob::methodstub, 256*NativeMethodStub::instruction_size );
      address adr = (address)round_to((intptr_t)cb->code_begins(),CodeEntryAlignment);
      cb->_code_start_offset = adr-(address)cb->_code_begins;
      while( adr+NativeMethodStub::instruction_size < cb->end() ) {
        free_stub((NativeMethodStub*)adr);
        adr += NativeMethodStub::instruction_size;
      }
      // The last not-null thing jammed on the freelist.
      nms = (NativeMethodStub*)(adr-NativeMethodStub::instruction_size);
    }
  } while( Atomic::cmpxchg_ptr(*(NativeMethodStub**)nms,&_free_list,nms) != nms );
  nms->fill( moop, c2i_adapter );
return(address)nms;
}