void CValueTable::Realloc(int NewPitch, int NewAllocRows) { n_assert(NewAllocRows >= NumRows); n_assert(NewPitch >= RowPitch); AllocatedRows = NewAllocRows; int NewValueBufferSize = NewPitch * NewAllocRows; ValueBuffer = n_realloc(ValueBuffer, NewValueBufferSize); if (NewPitch != RowPitch) { int RowIdx = NumRows - 1; int PitchDiff = NewPitch - RowPitch; char* FromPtr = (char*)ValueBuffer + RowIdx * RowPitch; char* ToPtr = (char*)ValueBuffer + RowIdx * NewPitch; while (FromPtr > (char*)ValueBuffer) { memcpy(ToPtr, FromPtr, RowPitch); memset(ToPtr - PitchDiff, 0, PitchDiff); FromPtr -= RowPitch; ToPtr -= NewPitch; } } int LastRowDataEndOffset = NewPitch * (NumRows - 1) + RowPitch; memset((char*)ValueBuffer + LastRowDataEndOffset, 0, NewValueBufferSize - LastRowDataEndOffset); RowPitch = NewPitch; //???pad to 4 bytes (see buffer-wide operations & DWORD usage)? RowStateBuffer = (uchar*)n_realloc(RowStateBuffer, NewAllocRows); memset(RowStateBuffer + NumRows, 0, NewAllocRows - NumRows); }
void add_stack_frame(offset return_PC, unsigned num_locals, zword *locals, unsigned num_args, int result_var) { unsigned n; /* Don't increment the frame yet because we have error messages yet to show which need to receive a valid frame to output local variables */ if(frame_count+1 >= frame_max) { frame_max *= 2; if(stacklimit && frame_max > stacklimit) { frame_max = stacklimit; if(frame_count+1 >= frame_max) { n_show_fatal(E_STACK, "recursed deeper than allowed", frame_count+1); } } stack_frames = (Stack_frame *) n_realloc(stack_frames, sizeof(*stack_frames) * frame_max); n_show_port(E_STACK, "deep recursion not available on some 'terps", frame_max); } frame_count++; stack_frames[frame_count].stack_stack_start = stack_pointer; stack_frames[frame_count].return_PC = return_PC; stack_frames[frame_count].num_locals = num_locals; stack_frames[frame_count].arguments = num_args; stack_frames[frame_count].result_variable = result_var; check_stack_stack(num_locals); for(n = 0; n < num_locals; n++) stack_stack[stack_pointer++] = locals[n]; stack_min = stack_pointer; local_vars = stack_stack + stack_frames[frame_count].stack_stack_start; }
/* Insure we have at least addsize more zwords available on the stack, and * if not, allocate more space */ static void check_stack_stack(offset addsize) { if(stack_pointer + addsize >= stack_max) { stack_max *= 2; stack_stack = (zword *) n_realloc(stack_stack, sizeof(*stack_stack) * stack_max); n_show_port(E_STACK, "stack larger than available on some interps", stack_max); local_vars = stack_stack + stack_frames[frame_count].stack_stack_start; } }
static tn_array *n_array_realloc(tn_array *arr, size_t new_size) { register int diff; register size_t i; void **tmp; diff = new_size - arr->allocated; n_assert(diff > 0); tmp = n_realloc(arr->data, new_size * sizeof(*tmp)); for (i = arr->allocated; i < new_size; i++) tmp[i] = NULL; arr->data = tmp; arr->allocated = new_size; return arr; }
DWORD CMemStream::Write(const void* pData, DWORD Size) { n_assert(!IsMapped() && ((AccessMode & SAM_WRITE) || (AccessMode & SAM_APPEND))); if (Pos + Size > AllocSize) { AllocSize = Pos + Size; if (SelfAlloc) pBuffer = (char*)n_realloc(pBuffer, AllocSize); else { void* pOld = pBuffer; pBuffer = (char*)n_malloc(AllocSize); SelfAlloc = true; if (pOld) memcpy(pBuffer, pOld, DataSize); } n_assert(pBuffer); } memcpy(pBuffer + Pos, pData, Size); Pos += Size; if (Pos > DataSize) DataSize = Pos; return Size; }