Example #1
0
/* GC procedures */
static 
CLEAR_MARKS_PROC(sproc_clear_marks)
{
    stream_proc_state *const pptr = vptr;

    r_clear_attrs(&pptr->proc, l_mark);
    r_clear_attrs(&pptr->data, l_mark);
}
Example #2
0
/* GC descriptors */
static 
CLEAR_MARKS_PROC(context_state_clear_marks)
{
    gs_context_state_t *const pcst = vptr;

    r_clear_attrs(&pcst->stdio[0], l_mark);
    r_clear_attrs(&pcst->stdio[1], l_mark);
    r_clear_attrs(&pcst->stdio[2], l_mark);
    r_clear_attrs(&pcst->error_object, l_mark);
    r_clear_attrs(&pcst->userparams, l_mark);
}
Example #3
0
static 
CLEAR_MARKS_PROC(ref_stack_clear_marks)
{
    ref_stack_t *const sptr = vptr;

    r_clear_attrs(&sptr->current, l_mark);
}
Example #4
0
static int
zrunandhide(i_ctx_t *i_ctx_p)
{
    os_ptr op = osp;
    es_ptr ep;

    check_op(2);
    if (!r_is_array(op - 1))
        return_op_typecheck(op);
    if (!r_has_attr(op, a_executable))
        return 0;		/* literal object just gets pushed back */
    check_estack(5);
    ep = esp += 5;
    make_mark_estack(ep - 4, es_other, err_end_runandhide); /* error case */
    make_op_estack(ep - 1,  end_runandhide); /* normal case */
    ref_assign(ep, op);
    /* Store the object we are hiding  and it's current tas.type_attrs */
    /* on the exec stack then change to 'noaccess' */
    make_int(ep - 3, (int)op[-1].tas.type_attrs);
    ref_assign(ep - 2, op - 1);
    r_clear_attrs(ep - 2, a_all);
    /* replace the array with a special kind of mark that has a_read access */
    esfile_check_cache();
    pop(2);
    return o_push_estack;
}
Example #5
0
static int
zcurrentfile(i_ctx_t *i_ctx_p)
{
    os_ptr op = osp;
    ref *fp;

    push(1);
    /* Check the cache first */
    if (esfile != 0) {
#ifdef DEBUG
        /* Check that esfile is valid. */
        ref *efp = zget_current_file(i_ctx_p);

        if (esfile != efp) {
            lprintf2("currentfile: esfile=0x%lx, efp=0x%lx\n",
                     (ulong) esfile, (ulong) efp);
            ref_assign(op, efp);
        } else
#endif
            ref_assign(op, esfile);
    } else if ((fp = zget_current_file(i_ctx_p)) == 0) {	/* Return an invalid file object. */
        /* This doesn't make a lot of sense to me, */
        /* but it's what the PostScript manual specifies. */
        make_invalid_file(i_ctx_p, op);
    } else {
        ref_assign(op, fp);
        esfile_set_cache(fp);
    }
    /* Make the returned value literal. */
    r_clear_attrs(op, a_executable);
    return 0;
}
Example #6
0
static
CLEAR_MARKS_PROC(change_clear_marks)
{
    alloc_change_t *const ptr = (alloc_change_t *)vptr;

    if (r_is_packed(&ptr->contents))
        r_clear_pmark((ref_packed *) & ptr->contents);
    else
        r_clear_attrs(&ptr->contents, l_mark);
}
Example #7
0
/* or if the object did not have the access already when modify=1. */
static int
access_check(i_ctx_t *i_ctx_p,
	     int access,	/* mask for attrs */
	     bool modify)	/* if true, reduce access */
{
    os_ptr op = osp;
    ref *aop;

    switch (r_type(op)) {
	case t_dictionary:
	    aop = dict_access_ref(op);
	    if (modify) {
		if (!r_has_attrs(aop, access))
		    return_error(e_invalidaccess);
		ref_save(op, aop, "access_check(modify)");
		r_clear_attrs(aop, a_all);
		r_set_attrs(aop, access);
		dict_set_top();
		return 0;
	    }
	    break;
	case t_array:
	case t_file:
	case t_string:
	case t_mixedarray:
	case t_shortarray:
	case t_astruct:
	case t_device:;
	    if (modify) {
		if (!r_has_attrs(op, access))
		    return_error(e_invalidaccess);
		r_clear_attrs(op, a_all);
		r_set_attrs(op, access);
		return 0;
	    }
	    aop = op;
	    break;
	default:
	    return_op_typecheck(op);
    }
    return (r_has_attrs(aop, access) ? 1 : 0);
}
Example #8
0
/* <obj> cvlit <obj> */
static int
zcvlit(i_ctx_t *i_ctx_p)
{
    os_ptr op = osp;
    ref *aop;

    check_op(1);
    aop = ACCESS_REF(op);
    r_clear_attrs(aop, a_executable);
    return 0;
}
Example #9
0
static int
runandhide_restore_hidden(i_ctx_t *i_ctx_p, ref *obj, ref *attrs)
{
    os_ptr op = osp;

    push(1);
    /* restore the hidden_object and its type_attrs */
    ref_assign(op, obj);
    r_clear_attrs(op, a_all);
    r_set_attrs(op, attrs->value.intval);
    return 0;
}
Example #10
0
/* r_size(op1) was set just above. */
static int
do_execstack(i_ctx_t *i_ctx_p, bool include_marks, os_ptr op1)
{
    os_ptr op = osp;
    ref *arefs = op1->value.refs;
    uint asize = r_size(op1);
    uint i;
    ref *rq;

    /*
     * Copy elements from the stack to the array,
     * optionally skipping executable nulls.
     * Clear the executable bit in any internal operators, and
     * convert t_structs and t_astructs (which can only appear
     * in connection with stack marks, which means that they will
     * probably be freed when unwinding) to something harmless.
     */
    for (i = 0, rq = arefs + asize; rq != arefs; ++i) {
        const ref *rp = ref_stack_index(&e_stack, (long)i);

        if (r_has_type_attrs(rp, t_null, a_executable) && !include_marks)
            continue;
        --rq;
        ref_assign_old(op1, rq, rp, "execstack");
        switch (r_type(rq)) {
            case t_operator: {
                uint opidx = op_index(rq);

                if (opidx == 0 || op_def_is_internal(op_index_def(opidx)))
                    r_clear_attrs(rq, a_executable);
                break;
            }
            case t_struct:
            case t_astruct: {
                const char *tname = rq->value.pstruct ?
                    gs_struct_type_name_string(
                                gs_object_type(imemory, rq->value.pstruct))
                    : "NULL";

                make_const_string(rq, a_readonly | avm_foreign,
                                  strlen(tname), (const byte *)tname);
                break;
            }
            default:
                ;
        }
    }
    pop(op - op1);
    return 0;
}
Example #11
0
} RELOC_PTRS_END

/* ------ Unmarking phase ------ */

/* Unmark a single ref. */
void
ptr_ref_unmark(enum_ptr_t *pep, gc_state_t * ignored)
{
    ref_packed *rpp = (ref_packed *)pep->ptr;

    if (r_is_packed(rpp))
	r_clear_pmark(rpp);
    else
	r_clear_attrs((ref *)rpp, l_mark);
}
Example #12
0
/*
 * If the new save level is zero, fix up the contents of a stack
 * by clearing the l_new bit in all the entries (since we can't tolerate
 * values with l_new set if the save level is zero).
 * Also, in any case, fix up the e-stack by replacing empty executable
 * strings and closed executable files that are newer than the save
 * with canonical ones that aren't.
 *
 * Note that this procedure is only called if restore_check_stack succeeded.
 */
static void
restore_fix_stack(ref_stack_t * pstack, const alloc_save_t * asave,
                  bool is_estack)
{
    ref_stack_enum_t rsenum;

    ref_stack_enum_begin(&rsenum, pstack);
    do {
        ref *stkp = rsenum.ptr;
        uint size = rsenum.size;

        for (; size; stkp++, size--) {
            r_clear_attrs(stkp, l_new);		/* always do it, no harm */
            if (is_estack) {
                ref ofile;

                ref_assign(&ofile, stkp);
                switch (r_type(stkp)) {
                case t_string:
                    if (r_size(stkp) == 0 &&
                            alloc_is_since_save(stkp->value.bytes,
                                                asave)
                       ) {
                        make_empty_const_string(stkp,
                                                avm_foreign);
                        break;
                    }
                    continue;
                case t_file:
                    if (alloc_is_since_save(stkp->value.pfile,
                                            asave)
                       ) {
                        make_invalid_file(stkp);
                        break;
                    }
                    continue;
                default:
                    continue;
                }
                r_copy_attrs(stkp, a_all | a_executable,
                             &ofile);
            }
        }
    } while (ref_stack_enum_next(&rsenum));
}
Example #13
0
/*
 * Push a continuation, the arguments removed for the OtherSubr, and
 * the OtherSubr procedure.
 */
static int
type1_push_OtherSubr(i_ctx_t *i_ctx_p, const gs_type1exec_state *pcxs,
                     int (*cont)(i_ctx_t *), const ref *pos)
{
    int i, n = pcxs->num_args;

    push_op_estack(cont);
    /*
     * Push the saved arguments (in reverse order, so they will get put
     * back on the operand stack in the correct order) on the e-stack.
     */
    for (i = n; --i >= 0; ) {
        *++esp = pcxs->save_args[i];
        r_clear_attrs(esp, a_executable);  /* just in case */
    }
    ++esp;
    *esp = *pos;
    return o_push_estack;
}
Example #14
0
/* Unmarking routine for ref objects. */
static void
refs_clear_marks(const gs_memory_t *cmem, 
		 void /*obj_header_t */ *vptr, uint size,
		 const gs_memory_struct_type_t * pstype)
{
    ref_packed *rp = (ref_packed *) vptr;
    ref_packed *end = (ref_packed *) ((byte *) vptr + size);

    /* Since the last ref is full-size, we only need to check for */
    /* the end of the block when we see one of those. */
    for (;;) {
	if (r_is_packed(rp)) {
#ifdef DEBUG
	    if (gs_debug_c('8')) {
		dlprintf1("  [8]unmark packed 0x%lx ", (ulong) rp);
		debug_print_ref(cmem, (const ref *)rp);
		dputs("\n");
	    }
#endif
	    r_clear_pmark(rp);
	    rp++;
	} else {		/* full-size ref */
	    ref *const pref = (ref *)rp;

#ifdef DEBUG
	    if (gs_debug_c('8')) {
		dlprintf1("  [8]unmark ref 0x%lx ", (ulong) rp);
		debug_print_ref(cmem, pref);
		dputs("\n");
	    }
#endif
	    r_clear_attrs(pref, l_mark);
	    rp += packed_per_ref;
	    if (rp >= (ref_packed *) end)
		break;
	}
    }
}
Example #15
0
int
zdefault_make_font(gs_font_dir * pdir, const gs_font * oldfont,
                   const gs_matrix * pmat, gs_font ** ppfont)
{
    gs_font *newfont = *ppfont;
    gs_memory_t *mem = newfont->memory;
    /* HACK: we know this font was allocated by the interpreter. */
    gs_ref_memory_t *imem = (gs_ref_memory_t *)mem;
    ref *fp = pfont_dict(oldfont);
    font_data *pdata;
    ref newdict, newmat, scalemat;
    uint dlen = dict_maxlength(fp);
    uint mlen = dict_length(fp) + 3;	/* FontID, OrigFont, ScaleMatrix */
    int code;

    if (dlen < mlen)
        dlen = mlen;
    if ((pdata = gs_alloc_struct(mem, font_data, &st_font_data,
                                 "make_font(font_data)")) == 0
        )
        return_error(e_VMerror);
    /*
     * This dictionary is newly created: it's safe to pass NULL as the
     * dstack pointer to dict_copy and dict_put_string.
     */
    if ((code = dict_alloc(imem, dlen, &newdict)) < 0 ||
        (code = dict_copy(fp, &newdict, NULL)) < 0 ||
        (code = gs_alloc_ref_array(imem, &newmat, a_all, 12,
                                   "make_font(matrices)")) < 0
        )
        return code;
    refset_null_new(newmat.value.refs, 12, imemory_new_mask(imem));
    ref_assign(&scalemat, &newmat);
    r_set_size(&scalemat, 6);
    scalemat.value.refs += 6;
    /*
     * Create the scaling matrix.  We could do this several different
     * ways: by "dividing" the new FontMatrix by the base FontMatrix, by
     * multiplying the current scaling matrix by a ScaleMatrix kept in
     * the gs_font, or by multiplying the current scaling matrix by the
     * ScaleMatrix from the font dictionary.  We opt for the last of
     * these.
     */
    {
        gs_matrix scale, prev_scale;
        ref *ppsm;

        if (!(dict_find_string(fp, "ScaleMatrix", &ppsm) > 0 &&
              read_matrix(mem, ppsm, &prev_scale) >= 0 &&
              gs_matrix_multiply(pmat, &prev_scale, &scale) >= 0)
            )
            scale = *pmat;
        write_matrix_new(&scalemat, &scale, imem);
    }
    r_clear_attrs(&scalemat, a_write);
    r_set_size(&newmat, 6);
    write_matrix_new(&newmat, &newfont->FontMatrix, imem);
    r_clear_attrs(&newmat, a_write);
    if ((code = dict_put_string(&newdict, "FontMatrix", &newmat, NULL)) < 0 ||
        (code = dict_put_string(&newdict, "OrigFont", pfont_dict(oldfont->base), NULL)) < 0 ||
        (code = dict_put_string(&newdict, "ScaleMatrix", &scalemat, NULL)) < 0 ||
        (code = add_FID(NULL, &newdict, newfont, imem)) < 0
        )
        return code;
    newfont->client_data = pdata;
    *pdata = *pfont_data(oldfont);
    pdata->dict = newdict;
    r_clear_attrs(dict_access_ref(&newdict), a_write);
    return 0;
}
Example #16
0
/* GC procedures */
static
CLEAR_MARKS_PROC(aos_clear_marks)
{   aos_state_t *const pptr = vptr;

    r_clear_attrs(&pptr->blocks, l_mark);
}
Example #17
0
File: zmisc.c Project: hackqiang/gs
static int
zbind(i_ctx_t *i_ctx_p)
{
    os_ptr op = osp;
    uint depth = 1;
    ref defn;
    register os_ptr bsp;

    switch (r_type(op)) {
        case t_array:
            if (!r_has_attr(op, a_write)) {
                return 0;	/* per PLRM3 */
            }
        case t_mixedarray:
        case t_shortarray:
            defn = *op;
            break;
        case t_oparray:
            defn = *op->value.const_refs;
            break;
        default:
            return_op_typecheck(op);
    }
    push(1);
    *op = defn;
    bsp = op;
    /*
     * We must not make the top-level procedure read-only,
     * but we must bind it even if it is read-only already.
     *
     * Here are the invariants for the following loop:
     *      `depth' elements have been pushed on the ostack;
     *      For i < depth, p = ref_stack_index(&o_stack, i):
     *        *p is an array (or packedarray) ref.
     */
    while (depth) {
        while (r_size(bsp)) {
            ref_packed *const tpp = (ref_packed *)bsp->value.packed; /* break const */

            r_dec_size(bsp, 1);
            if (r_is_packed(tpp)) {
                /* Check for a packed executable name */
                ushort elt = *tpp;

                if (r_packed_is_exec_name(&elt)) {
                    ref nref;
                    ref *pvalue;

                    name_index_ref(imemory, packed_name_index(&elt),
                                   &nref);
                    if ((pvalue = dict_find_name(&nref)) != 0 &&
                        r_is_ex_oper(pvalue)
                        ) {
                        store_check_dest(bsp, pvalue);
                        /*
                         * Always save the change, since this can only
                         * happen once.
                         */
                        ref_do_save(bsp, tpp, "bind");
                        *tpp = pt_tag(pt_executable_operator) +
                            op_index(pvalue);
                    }
                }
                bsp->value.packed = tpp + 1;
            } else {
                ref *const tp = bsp->value.refs++;

                switch (r_type(tp)) {
                    case t_name:	/* bind the name if an operator */
                        if (r_has_attr(tp, a_executable)) {
                            ref *pvalue;

                            if ((pvalue = dict_find_name(tp)) != 0 &&
                                r_is_ex_oper(pvalue)
                                ) {
                                store_check_dest(bsp, pvalue);
                                ref_assign_old(bsp, tp, pvalue, "bind");
                            }
                        }
                        break;
                    case t_array:	/* push into array if writable */
                        if (!r_has_attr(tp, a_write))
                            break;
                    case t_mixedarray:
                    case t_shortarray:
                        if (r_has_attr(tp, a_executable)) {
                            /* Make reference read-only */
                            r_clear_attrs(tp, a_write);
                            if (bsp >= ostop) {
                                /* Push a new stack block. */
                                ref temp;
                                int code;

                                temp = *tp;
                                osp = bsp;
                                code = ref_stack_push(&o_stack, 1);
                                if (code < 0) {
                                    ref_stack_pop(&o_stack, depth);
                                    return_error(code);
                                }
                                bsp = osp;
                                *bsp = temp;
                            } else
                                *++bsp = *tp;
                            depth++;
                        }
                }
            }
        }
        bsp--;
        depth--;
        if (bsp < osbot) {	/* Pop back to the previous stack block. */
            osp = bsp;
            ref_stack_pop_block(&o_stack);
            bsp = osp;
        }
    }
    osp = bsp;
    return 0;
}
Example #18
0
/* Remove the marks at the same time. */
static void
refs_compact(const gs_memory_t *mem, obj_header_t * pre, obj_header_t * dpre, uint size)
{
    ref_packed *dest;
    ref_packed *src;
    ref_packed *end;
    uint new_size;

   /* The next switch controls an optimization 
      for the loop termination condition.
      It was useful during the development,
      when some assumptions were temporary wrong.
      We keep it for records. */

    src = (ref_packed *) (pre + 1);
    end = (ref_packed *) ((byte *) src + size);
    /*
     * We know that a block of refs always ends with a
     * full-size ref, so we only need to check for reaching the end
     * of the block when we see one of those.
     */
    if (dpre == pre)		/* Loop while we don't need to copy. */
	for (;;) {
	    if (r_is_packed(src)) {
		if (!r_has_pmark(src))
		    break;
		if_debug1('8', "  [8]packed ref 0x%lx \"copied\"\n",
			  (ulong) src);
		*src &= ~lp_mark;
		src++;
	    } else {		/* full-size ref */
		ref *const pref = (ref *)src;

		if (!r_has_attr(pref, l_mark))
		    break;
		if_debug1('8', "  [8]ref 0x%lx \"copied\"\n", (ulong) src);
		r_clear_attrs(pref, l_mark);
		src += packed_per_ref;
	    }
    } else
	*dpre = *pre;
    dest = (ref_packed *) ((char *)dpre + ((char *)src - (char *)pre));
    for (;;) {
	if (r_is_packed(src)) {
	    if (r_has_pmark(src)) {
		if_debug2('8', "  [8]packed ref 0x%lx copied to 0x%lx\n",
			  (ulong) src, (ulong) dest);
		*dest++ = *src & ~lp_mark;
	    }
	    src++;
	} else {		/* full-size ref */
	    if (r_has_attr((ref *) src, l_mark)) {
		ref rtemp;

		if_debug2('8', "  [8]ref 0x%lx copied to 0x%lx\n",
			  (ulong) src, (ulong) dest);
		/* We can't just use ref_assign_inline, */
		/* because the source and destination */
		/* might overlap! */
		ref_assign_inline(&rtemp, (ref *) src);
		r_clear_attrs(&rtemp, l_mark);
		ref_assign_inline((ref *) dest, &rtemp);
		src += packed_per_ref;
		dest += packed_per_ref;
	    } else {		/* check for end of block */
		src += packed_per_ref;
		if (src >= end)
		    break;
	    }
	}
    }
    new_size = (byte *) dest - (byte *) (dpre + 1) + sizeof(ref);
#ifdef DEBUG
    /* Check that the relocation came out OK. */
    /* NOTE: this check only works within a single chunk. */
    if ((byte *) src - (byte *) dest != r_size((ref *) src - 1) + sizeof(ref)) {
	lprintf3("Reloc error for refs 0x%lx: reloc = %lu, stored = %u\n",
		 (ulong) dpre, (ulong) ((byte *) src - (byte *) dest),
		 (uint) r_size((ref *) src - 1));
	gs_abort(mem);
    }
#endif
    /* Pad to a multiple of sizeof(ref). */
    while (new_size & (sizeof(ref) - 1))
	*dest++ = pt_tag(pt_integer),
	    new_size += sizeof(ref_packed);
    /* We want to make the newly freed space into a free block, */
    /* but we can only do this if we have enough room. */
    if (size - new_size < sizeof(obj_header_t)) {	/* Not enough room.  Pad to original size. */
	while (new_size < size)
	    *dest++ = pt_tag(pt_integer),
		new_size += sizeof(ref_packed);
    } else {
	obj_header_t *pfree = (obj_header_t *) ((ref *) dest + 1);

	pfree->o_alone = 0;
	pfree->o_size = size - new_size - sizeof(obj_header_t);
	pfree->o_type = &st_bytes;
    }
    /* Re-create the final ref. */
    r_set_type((ref *) dest, t_integer);
    dpre->o_size = new_size;
}
Example #19
0
/* Set the relocation for a ref object. */
static bool
refs_set_reloc(obj_header_t * hdr, uint reloc, uint size)
{
    ref_packed *rp = (ref_packed *) (hdr + 1);
    ref_packed *end = (ref_packed *) ((byte *) rp + size);
    uint freed = 0;

    /*
     * We have to be careful to keep refs aligned properly.
     * For the moment, we do this by either keeping or discarding
     * an entire (aligned) block of align_packed_per_ref packed elements
     * as a unit.  We know that align_packed_per_ref <= packed_per_ref,
     * and we also know that packed refs are always allocated in blocks
     * of align_packed_per_ref, so this makes things relatively easy.
     */
    while (rp < end) {
	if (r_is_packed(rp)) {
#if align_packed_per_ref == 1
	    if (r_has_pmark(rp)) {
		if_debug1('8',
			  "  [8]packed ref 0x%lx is marked\n",
			  (ulong) rp);
		rp++;
	    } else {
#else
	    int i;

	    /*
	     * Note: align_packed_per_ref is typically
	     * 2 or 4 for 32-bit processors.
	     */
#define all_marked (align_packed_per_ref * lp_mark)
# if align_packed_per_ref == 2
#  if arch_sizeof_int == arch_sizeof_short * 2
#    undef all_marked
#    define all_marked ( (lp_mark << (sizeof(short) * 8)) + lp_mark )
#    define marked (*(int *)rp & all_marked)
#  else
#    define marked ((*rp & lp_mark) + (rp[1] & lp_mark))
#  endif
# else
#  if align_packed_per_ref == 4
#    define marked ((*rp & lp_mark) + (rp[1] & lp_mark) +\
		    (rp[2] & lp_mark) + (rp[3] & lp_mark))
#  else
	    /*
	     * The value of marked is logically a uint, not an int:
	     * we declare it as int only to avoid a compiler warning
	     * message about using a non-int value in a switch statement.
	     */
	    int marked = *rp & lp_mark;

	    for (i = 1; i < align_packed_per_ref; i++)
		marked += rp[i] & lp_mark;
#  endif
# endif
	    /*
	     * Now marked is lp_mark * the number of marked
	     * packed refs in the aligned block, except for
	     * a couple of special cases above.
	     */
	    switch (marked) {
		case all_marked:
		    if_debug2('8',
			      "  [8]packed refs 0x%lx..0x%lx are marked\n",
			      (ulong) rp,
			      (ulong) (rp + (align_packed_per_ref - 1)));
		    rp += align_packed_per_ref;
		    break;
		default:
		    /* At least one packed ref in the block */
		    /* is marked: Keep the whole block. */
		    for (i = align_packed_per_ref; i--; rp++) {
			r_set_pmark(rp);
			if_debug1('8',
				  "  [8]packed ref 0x%lx is marked\n",
				  (ulong) rp);
		    }
		    break;
		case 0:
#endif
		    if_debug2('8', "  [8]%d packed ref(s) at 0x%lx are unmarked\n",
			      align_packed_per_ref, (ulong) rp);
		    {
			uint rel = reloc + freed;

			/* Change this to an integer so we can */
			/* store the relocation here. */
			*rp = pt_tag(pt_integer) +
			    min(rel, packed_max_value);
		    }
		    rp += align_packed_per_ref;
		    freed += sizeof(ref_packed) * align_packed_per_ref;
	    }
	} else {		/* full-size ref */
	    uint rel = reloc + freed;

	    /* The following assignment is logically */
	    /* unnecessary; we do it only for convenience */
	    /* in debugging. */
	    ref *pref = (ref *) rp;

	    if (!r_has_attr(pref, l_mark)) {
		if_debug1('8', "  [8]ref 0x%lx is unmarked\n",
			  (ulong) pref);
		/* Change this to a mark so we can */
		/* store the relocation. */
		r_set_type(pref, t_mark);
		r_set_size(pref, rel);
		freed += sizeof(ref);
	    } else {
		if_debug1('8', "  [8]ref 0x%lx is marked\n",
			  (ulong) pref);
		/* Store the relocation here if possible. */
		if (!ref_type_uses_size_or_null(r_type(pref))) {
		    if_debug2('8', "  [8]storing reloc %u at 0x%lx\n",
			      rel, (ulong) pref);
		    r_set_size(pref, rel);
		}
	    }
	    rp += packed_per_ref;
	}
    }
    if_debug3('7', " [7]at end of refs 0x%lx, size = %u, freed = %u\n",
	      (ulong) (hdr + 1), size, freed);
    if (freed == size)
	return false;
#if arch_sizeof_int > arch_sizeof_short
    /*
     * If the final relocation can't fit in the r_size field
     * (which can't happen if the object shares a chunk with
     * any other objects, so we know reloc = 0 in this case),
     * we have to keep the entire object unless there are no
     * references to any ref in it.
     */
    if (freed <= max_ushort)
	return true;
    /*
     * We have to mark all surviving refs, but we also must
     * overwrite any non-surviving refs with something that
     * doesn't contain any pointers.
     */
    rp = (ref_packed *) (hdr + 1);
    while (rp < end) {
	if (r_is_packed(rp)) {
	    if (!r_has_pmark(rp))
		*rp = pt_tag(pt_integer) | lp_mark;
	    ++rp;
	} else {		/* The following assignment is logically */
	    /* unnecessary; we do it only for convenience */
	    /* in debugging. */
	    ref *pref = (ref *) rp;

	    if (!r_has_attr(pref, l_mark)) {
		r_set_type_attrs(pref, t_mark, l_mark);
		r_set_size(pref, reloc);
	    } else {
		if (!ref_type_uses_size_or_null(r_type(pref)))
		    r_set_size(pref, reloc);
	    }
	    rp += packed_per_ref;
	}
    }
    /* The last ref has to remain unmarked. */
    r_clear_attrs((ref *) rp - 1, l_mark);
#endif
    return true;
}
Example #20
0
/* GC procedures */
static
CLEAR_MARKS_PROC(unicode_decoder_clear_marks)
{   gs_unicode_decoder *const pptr = vptr;

    r_clear_attrs(&pptr->data, l_mark);
}