Esempio n. 1
0
/*
 * is l2 fully contained in l1?
 *   start1                             end1
 *   [----------------------------------)
 *           start2           end2
 *           [----------------)
 */
static inline int
lo_seg_contained(struct pnfs_layout_range *l1,
		 struct pnfs_layout_range *l2)
{
	u64 start1 = l1->offset;
	u64 end1 = end_offset(start1, l1->length);
	u64 start2 = l2->offset;
	u64 end2 = end_offset(start2, l2->length);

	return (start1 <= start2) && (end1 >= end2);
}
Esempio n. 2
0
/*
 * is l2 fully contained in l1?
 *   start1                             end1
 *   [----------------------------------)
 *           start2           end2
 *           [----------------)
 */
static bool
pnfs_lseg_range_contained(const struct pnfs_layout_range *l1,
		 const struct pnfs_layout_range *l2)
{
	u64 start1 = l1->offset;
	u64 end1 = end_offset(start1, l1->length);
	u64 start2 = l2->offset;
	u64 end2 = end_offset(start2, l2->length);

	return (start1 <= start2) && (end1 >= end2);
}
Esempio n. 3
0
/*
 * is l1 and l2 intersecting?
 *   start1                             end1
 *   [----------------------------------)
 *                              start2           end2
 *                              [----------------)
 */
static inline int
lo_seg_intersecting(struct pnfs_layout_range *l1,
		    struct pnfs_layout_range *l2)
{
	u64 start1 = l1->offset;
	u64 end1 = end_offset(start1, l1->length);
	u64 start2 = l2->offset;
	u64 end2 = end_offset(start2, l2->length);

	return (end1 == NFS4_MAX_UINT64 || end1 > start2) &&
	       (end2 == NFS4_MAX_UINT64 || end2 > start1);
}
Esempio n. 4
0
void CompiledMethod::shrink(jint code_size, jint relocation_size) {
  // The current implementation copies the relocation information down
  // and "shrinks" the compiled method object in place, allocating a
  // dummy filler object in the now unused end part.
  //
  // The compiled method object will generally not be the last object in
  // the heap, since the compiler allocates other objects and GC might
  // have occurred. However, if the GC always does sliding compaction
  // and the compiler *guarantees* not to hold on to any allocated
  // object other than the compiled method, we could simply move the
  // top of the object heap down!

  // Copy the relocation segment down
  void* src = field_base(end_offset() - relocation_size);
  void* dst = field_base(base_offset() + code_size);
  GUARANTEE(src >= dst, "should be copying down");
  jvm_memmove(dst, src, relocation_size); // possibly overlapping regions

  // Shrink compiled method object
  size_t new_size = CompiledMethodDesc::allocation_size(code_size +
                                                        relocation_size);
  Universe::shrink_object(this, new_size);
  ((CompiledMethodDesc*) obj())->set_size(code_size + relocation_size);
  GUARANTEE(object_size() == new_size, "invalid shrunk size");
}
Esempio n. 5
0
static struct objlayout_io_state *
objlayout_alloc_io_state(struct pnfs_layout_hdr *pnfs_layout_type,
			struct page **pages,
			unsigned pgbase,
			loff_t offset,
			size_t count,
			struct pnfs_layout_segment *lseg,
			void *rpcdata,
			gfp_t gfp_flags)
{
	struct objlayout_io_state *state;
	u64 lseg_end_offset;

	dprintk("%s: allocating io_state\n", __func__);
	if (objio_alloc_io_state(lseg, &state, gfp_flags))
		return NULL;

	BUG_ON(offset < lseg->pls_range.offset);
	lseg_end_offset = end_offset(lseg->pls_range.offset,
				     lseg->pls_range.length);
	BUG_ON(offset >= lseg_end_offset);
	if (offset + count > lseg_end_offset) {
		count = lseg->pls_range.length -
				(offset - lseg->pls_range.offset);
		dprintk("%s: truncated count %Zd\n", __func__, count);
	}

	if (pgbase > PAGE_SIZE) {
		pages += pgbase >> PAGE_SHIFT;
		pgbase &= ~PAGE_MASK;
	}
Esempio n. 6
0
static void _fix_verify_io_params(struct pnfs_layout_segment *lseg,
			   struct page ***p_pages, unsigned *p_pgbase,
			   u64 offset, unsigned long count)
{
	u64 lseg_end_offset;

	BUG_ON(offset < lseg->pls_range.offset);
	lseg_end_offset = end_offset(lseg->pls_range.offset,
				     lseg->pls_range.length);
	BUG_ON(offset >= lseg_end_offset);
	WARN_ON(offset + count > lseg_end_offset);

	if (*p_pgbase > PAGE_SIZE) {
		dprintk("%s: pgbase(0x%x) > PAGE_SIZE\n", __func__, *p_pgbase);
		*p_pages += *p_pgbase >> PAGE_SHIFT;
		*p_pgbase &= ~PAGE_MASK;
	}
Esempio n. 7
0
bool CompiledMethod::expand_compiled_code_space(int delta, int relocation_size) {
  if (ObjectHeap::expand_current_compiled_method(delta)) {
    if (Verbose) {
      TTY_TRACE_CR(("Expanding compiled method from %d to %d bytes", 
                    size(), size() + delta));
    }
    void* src = field_base(end_offset() - relocation_size);
    void* dst = DERIVED(void*, src, delta);
    GUARANTEE(src < dst, "should be copying up");
    jvm_memmove(dst, src, relocation_size); // possibly overlapping regions
    // It's probably OK only to clear dst[-1], but let's just make sure.
    jvm_memset(src, 0, delta);
    ((CompiledMethodDesc*) obj())->set_size(size() + delta);
    
    

    if (VerifyGC > 2) {
      ObjectHeap::verify();
    }
    return true;
  } else {
    return false;