void DumpBackTrace(void *processId) { printf("\n*********** BACKTRACE **************\n"); typedef VOID NTAPI RtlCaptureContext_(PCONTEXT ContextRecord); HANDLE process = GetCurrentProcess(); HANDLE thread = GetCurrentThread(); HINSTANCE hinstLib = LoadLibrary("kernel32.dll"); RtlCaptureContext_* contextFunc = (RtlCaptureContext_*)GetProcAddress(hinstLib, "RtlCaptureContext"); STACKFRAME frame; CONTEXT context; int limit = 50; DWORD machineType; memset(&frame, 0, sizeof(STACKFRAME)); memset(&context, 0, sizeof(CONTEXT)); context.ContextFlags = CONTEXT_FULL; contextFunc(&context); #if _WIN64 machineType = IMAGE_FILE_MACHINE_AMD64; frame.AddrPC.Offset = context.Rip; frame.AddrStack.Offset = context.Rsp; frame.AddrFrame.Offset = context.Rbp; #else machineType = IMAGE_FILE_MACHINE_I386; frame.AddrPC.Offset = context.Eip; frame.AddrStack.Offset = context.Esp; frame.AddrFrame.Offset = context.Ebp; #endif frame.AddrPC.Mode = AddrModeFlat; frame.AddrStack.Mode = AddrModeFlat; frame.AddrFrame.Mode = AddrModeFlat; while (StackWalk(machineType, process, thread, &frame, &context, 0, SymFunctionTableAccess, SymGetModuleBase, 0)) { if (limit-- == 0) break; DumpFrame(process, frame.AddrPC.Offset); } printf("*********** BACKTRACE **************\n\n"); }
void CMux0710Protocol::Create0710UIHLayer4FrameFromMsg(TDesC8& aMsgBuf, TInt aOffset, CCsyMsgBufBpFrame* aFrameBuf, TInt aLength, TUint8 aDlcNum, TBool aLayer4Begin, TBool aLayer4End) /** * This method creates an 27.010 UIH frame from a message. * NOTE: This method assumes that a free frame buffer has been allocated. * * @param aMsgBuf - Pointer to a CSY memory element pointing to a Msg * @param aOffset - Offset from the start of the message to be copied to the frame. * This is needed if the calling method needs to defragment a long message. * This method builds the message as a Convergence Layer 4 type, so * only Advanced Option. * @param aFrameBuf - Pointer to a CSY memory elelemt pointing to a Frame * @param aLength - Length of the payload to be copied. * @param aDlcNum - DLC channel number. * @param aLayer4Begin - Beginning of Convergence Layer 4. * @param aLayer4End - End of Convergence Layer 4. */ { _LOG_L4C3(">>CMux0710Protocol::Create0710UIHLayer4FrameFromMsg [aOffset=%d,aLength=%d]",aOffset,aLength); _LOG_L4C3("[aLayer4Begin=%d,aLayer4End=%d]",aLayer4Begin,aLayer4End); //aFrameBuf->iMsg.Zero(); - this is done in GetFreeFrameBuf // set initial length for headers aFrameBuf->iMsg.SetLength(3); // Octet 0 = Start Flag aFrameBuf->iMsg[0] = KCsy0710StartEndFlag; // Octet 1 = Non-extended Address, Command/Response, DLCI number aFrameBuf->iMsg[1] = (TUint8) ((aDlcNum << 2) | 0x03); // Set the DLCI, EA, C/R // Octet 2 = Control Field = Frame Type (UIH) aFrameBuf->iMsg[2] = (TUint8) KCsy0710CTLUIH; TInt checksumLength = 2; TUint8 checksum; checksum = (TUint8) CalcFCS(&aFrameBuf->iMsg[0], checksumLength); // Octet 5-x // Build the Convergence Layer 4 byte TUint8 tempLayer4Byte = 0x01; // Default Middle Frame fragment if (aLayer4Begin && aLayer4End) // Begin and End - Single Frame Message { tempLayer4Byte = 0xC1; // MAF magic numbers } else if (aLayer4Begin) // Begin Frame { tempLayer4Byte = 0x41; } else if (aLayer4End) // End Frame { tempLayer4Byte = 0x81; } aFrameBuf->iMsg.Append(tempLayer4Byte); const TUint8* temp = &aMsgBuf[aOffset]; aFrameBuf->iMsg.Append(temp, aLength); TInt tempLength = aFrameBuf->iMsg.Length(); aFrameBuf->iMsg.SetLength(tempLength + 2); aFrameBuf->iMsg[tempLength] = checksum; aFrameBuf->iMsg[tempLength+1] = KCsy0710StartEndFlag; DumpFrame(aFrameBuf); _LOG_L4C1("<<CMux0710Protocol::Create0710UIHLayer4FrameFromMsg"); }
// Read animated GIF bitstream from 'filename' into 'AnimatedImage' struct. static int ReadAnimatedGIF(const char filename[], AnimatedImage* const image, int dump_frames, const char dump_folder[]) { uint32_t frame_count; uint32_t canvas_width, canvas_height; uint32_t i; int gif_error; GifFileType* gif; gif = DGifOpenFileName(filename, NULL); if (gif == NULL) { fprintf(stderr, "Could not read file: %s.\n", filename); return 0; } gif_error = DGifSlurp(gif); if (gif_error != GIF_OK) { fprintf(stderr, "Could not parse image: %s.\n", filename); GIFDisplayError(gif, gif_error); DGifCloseFile(gif, NULL); return 0; } // Animation properties. image->canvas_width = (uint32_t)gif->SWidth; image->canvas_height = (uint32_t)gif->SHeight; if (image->canvas_width > MAX_CANVAS_SIZE || image->canvas_height > MAX_CANVAS_SIZE) { fprintf(stderr, "Invalid canvas dimension: %d x %d\n", image->canvas_width, image->canvas_height); DGifCloseFile(gif, NULL); return 0; } image->loop_count = GetLoopCountGIF(gif); image->bgcolor = GetBackgroundColorGIF(gif); frame_count = (uint32_t)gif->ImageCount; if (frame_count == 0) { DGifCloseFile(gif, NULL); return 0; } if (image->canvas_width == 0 || image->canvas_height == 0) { image->canvas_width = gif->SavedImages[0].ImageDesc.Width; image->canvas_height = gif->SavedImages[0].ImageDesc.Height; gif->SavedImages[0].ImageDesc.Left = 0; gif->SavedImages[0].ImageDesc.Top = 0; if (image->canvas_width == 0 || image->canvas_height == 0) { fprintf(stderr, "Invalid canvas size in GIF.\n"); DGifCloseFile(gif, NULL); return 0; } } // Allocate frames. AllocateFrames(image, frame_count); canvas_width = image->canvas_width; canvas_height = image->canvas_height; // Decode and reconstruct frames. for (i = 0; i < frame_count; ++i) { const int canvas_width_in_bytes = canvas_width * kNumChannels; const SavedImage* const curr_gif_image = &gif->SavedImages[i]; GraphicsControlBlock curr_gcb; DecodedFrame* curr_frame; uint8_t* curr_rgba; memset(&curr_gcb, 0, sizeof(curr_gcb)); DGifSavedExtensionToGCB(gif, i, &curr_gcb); curr_frame = &image->frames[i]; curr_rgba = curr_frame->rgba; curr_frame->duration = GetFrameDurationGIF(gif, i); if (i == 0) { // Initialize as transparent. curr_frame->is_key_frame = 1; ZeroFillCanvas(curr_rgba, canvas_width, canvas_height); } else { DecodedFrame* const prev_frame = &image->frames[i - 1]; const GifImageDesc* const prev_desc = &gif->SavedImages[i - 1].ImageDesc; GraphicsControlBlock prev_gcb; memset(&prev_gcb, 0, sizeof(prev_gcb)); DGifSavedExtensionToGCB(gif, i - 1, &prev_gcb); curr_frame->is_key_frame = IsKeyFrameGIF(prev_desc, prev_gcb.DisposalMode, prev_frame, canvas_width, canvas_height); if (curr_frame->is_key_frame) { // Initialize as transparent. ZeroFillCanvas(curr_rgba, canvas_width, canvas_height); } else { int prev_frame_disposed, curr_frame_opaque; int prev_frame_completely_covered; // Initialize with previous canvas. uint8_t* const prev_rgba = image->frames[i - 1].rgba; CopyCanvas(prev_rgba, curr_rgba, canvas_width, canvas_height); // Dispose previous frame rectangle. prev_frame_disposed = (prev_gcb.DisposalMode == DISPOSE_BACKGROUND || prev_gcb.DisposalMode == DISPOSE_PREVIOUS); curr_frame_opaque = (curr_gcb.TransparentColor == NO_TRANSPARENT_COLOR); prev_frame_completely_covered = curr_frame_opaque && CoversFrameGIF(&curr_gif_image->ImageDesc, prev_desc); if (prev_frame_disposed && !prev_frame_completely_covered) { switch (prev_gcb.DisposalMode) { case DISPOSE_BACKGROUND: { ZeroFillFrameRect(curr_rgba, canvas_width_in_bytes, prev_desc->Left, prev_desc->Top, prev_desc->Width, prev_desc->Height); break; } case DISPOSE_PREVIOUS: { int src_frame_num = i - 2; while (src_frame_num >= 0) { GraphicsControlBlock src_frame_gcb; memset(&src_frame_gcb, 0, sizeof(src_frame_gcb)); DGifSavedExtensionToGCB(gif, src_frame_num, &src_frame_gcb); if (src_frame_gcb.DisposalMode != DISPOSE_PREVIOUS) break; --src_frame_num; } if (src_frame_num >= 0) { // Restore pixels inside previous frame rectangle to // corresponding pixels in source canvas. uint8_t* const src_frame_rgba = image->frames[src_frame_num].rgba; CopyFrameRectangle(src_frame_rgba, curr_rgba, canvas_width_in_bytes, prev_desc->Left, prev_desc->Top, prev_desc->Width, prev_desc->Height); } else { // Source canvas doesn't exist. So clear previous frame // rectangle to background. ZeroFillFrameRect(curr_rgba, canvas_width_in_bytes, prev_desc->Left, prev_desc->Top, prev_desc->Width, prev_desc->Height); } break; } default: break; // Nothing to do. } } } } // Decode current frame. if (!ReadFrameGIF(curr_gif_image, gif->SColorMap, curr_gcb.TransparentColor, canvas_width_in_bytes, curr_rgba)) { DGifCloseFile(gif, NULL); return 0; } if (dump_frames) { if (!DumpFrame(filename, dump_folder, i, curr_rgba, canvas_width, canvas_height)) { DGifCloseFile(gif, NULL); return 0; } } } DGifCloseFile(gif, NULL); return 1; }
// Read animated WebP bitstream 'file_str' into 'AnimatedImage' struct. static int ReadAnimatedWebP(const char filename[], const WebPData* const webp_data, AnimatedImage* const image, int dump_frames, const char dump_folder[]) { int ok = 0; int dump_ok = 1; uint32_t frame_index = 0; int prev_frame_timestamp = 0; WebPAnimDecoder* dec; WebPAnimInfo anim_info; memset(image, 0, sizeof(*image)); dec = WebPAnimDecoderNew(webp_data, NULL); if (dec == NULL) { fprintf(stderr, "Error parsing image: %s\n", filename); goto End; } if (!WebPAnimDecoderGetInfo(dec, &anim_info)) { fprintf(stderr, "Error getting global info about the animation\n"); goto End; } // Animation properties. image->canvas_width = anim_info.canvas_width; image->canvas_height = anim_info.canvas_height; image->loop_count = anim_info.loop_count; image->bgcolor = anim_info.bgcolor; // Allocate frames. if (!AllocateFrames(image, anim_info.frame_count)) return 0; // Decode frames. while (WebPAnimDecoderHasMoreFrames(dec)) { DecodedFrame* curr_frame; uint8_t* curr_rgba; uint8_t* frame_rgba; int timestamp; if (!WebPAnimDecoderGetNext(dec, &frame_rgba, ×tamp)) { fprintf(stderr, "Error decoding frame #%u\n", frame_index); goto End; } assert(frame_index < anim_info.frame_count); curr_frame = &image->frames[frame_index]; curr_rgba = curr_frame->rgba; curr_frame->duration = timestamp - prev_frame_timestamp; curr_frame->is_key_frame = 0; // Unused. memcpy(curr_rgba, frame_rgba, image->canvas_width * kNumChannels * image->canvas_height); // Needed only because we may want to compare with GIF later. CleanupTransparentPixels((uint32_t*)curr_rgba, image->canvas_width, image->canvas_height); if (dump_frames && dump_ok) { dump_ok = DumpFrame(filename, dump_folder, frame_index, curr_rgba, image->canvas_width, image->canvas_height); if (!dump_ok) { // Print error once, but continue decode loop. fprintf(stderr, "Error dumping frames to %s\n", dump_folder); } } ++frame_index; prev_frame_timestamp = timestamp; } ok = dump_ok; End: WebPAnimDecoderDelete(dec); return ok; }
static void DumpExceptionInfo(void *processId, struct _EXCEPTION_RECORD *exceptionRec, struct _CONTEXT *contextRecord) { printf("\n*********** EXCEPTION **************\n"); printf("Registers:\n"); #ifdef _WIN64 printf("RAX: %08X RBX: %08X RCX: %08X RDX: %08X RSI: %08X RDI: %08X RSP: %08X RBP: %08X\n", contextRecord->Rax, contextRecord->Rbx, contextRecord->Rcx, contextRecord->Rdx, contextRecord->Rsi, contextRecord->Rdi, contextRecord->Rsp, contextRecord->Rbp); printf("R8: %08X R9: %08X R10: %08X R11: %08X R12: %08X R13: %08X R14: %08X R15: %08X\n", contextRecord->R8, contextRecord->R9, contextRecord->R10, contextRecord->R11, contextRecord->R12, contextRecord->R13, contextRecord->R14, contextRecord->R15); printf("RIP: %08X EFlags: %08X\n\n", contextRecord->Rip, contextRecord->EFlags); #else printf("EAX: %08X EBX: %08X ECX: %08X EDX: %08X ESI: %08X\n", contextRecord->Eax, contextRecord->Ebx, contextRecord->Ecx, contextRecord->Edx, contextRecord->Esi); printf("EDI: %08X ESP: %08X EBP: %08X EIP: %08X EFlags: %08X\n\n", contextRecord->Edi, contextRecord->Esp, contextRecord->Ebp, contextRecord->Eip, contextRecord->EFlags); #endif printf("Exception Code: "); switch (exceptionRec->ExceptionCode) { case EXCEPTION_ACCESS_VIOLATION: printf("EXCEPTION_ACCESS_VIOLATION"); break; case EXCEPTION_ARRAY_BOUNDS_EXCEEDED: printf("EXCEPTION_ARRAY_BOUNDS_EXCEEDED"); break; case EXCEPTION_BREAKPOINT: printf("EXCEPTION_BREAKPOINT"); break; case EXCEPTION_DATATYPE_MISALIGNMENT: printf("EXCEPTION_DATATYPE_MISALIGNMENT"); break; case EXCEPTION_FLT_DENORMAL_OPERAND: printf("EXCEPTION_FLT_DENORMAL_OPERAND"); break; case EXCEPTION_FLT_DIVIDE_BY_ZERO: printf("EXCEPTION_FLT_DIVIDE_BY_ZERO"); break; case EXCEPTION_FLT_INEXACT_RESULT: printf("EXCEPTION_FLT_INEXACT_RESULT"); break; case EXCEPTION_FLT_INVALID_OPERATION: printf("EXCEPTION_FLT_INVALID_OPERATION"); break; case EXCEPTION_FLT_OVERFLOW: printf("EXCEPTION_FLT_OVERFLOW"); break; case EXCEPTION_FLT_STACK_CHECK: printf("EXCEPTION_FLT_STACK_CHECK"); break; case EXCEPTION_FLT_UNDERFLOW: printf("EXCEPTION_FLT_UNDERFLOW"); break; case EXCEPTION_ILLEGAL_INSTRUCTION: printf("EXCEPTION_ILLEGAL_INSTRUCTION"); break; case EXCEPTION_IN_PAGE_ERROR: printf("EXCEPTION_IN_PAGE_ERROR"); break; case EXCEPTION_INT_DIVIDE_BY_ZERO: printf("EXCEPTION_INT_DIVIDE_BY_ZERO"); break; case EXCEPTION_INT_OVERFLOW: printf("EXCEPTION_INT_OVERFLOW"); break; case EXCEPTION_INVALID_DISPOSITION: printf("EXCEPTION_INVALID_DISPOSITION"); break; case EXCEPTION_NONCONTINUABLE_EXCEPTION: printf("EXCEPTION_NONCONTINUABLE_EXCEPTION"); break; case EXCEPTION_PRIV_INSTRUCTION: printf("EXCEPTION_PRIV_INSTRUCTION"); break; case EXCEPTION_SINGLE_STEP: printf("EXCEPTION_SINGLE_STEP"); break; case EXCEPTION_STACK_OVERFLOW: printf("EXCEPTION_STACK_OVERFLOW"); break; default: printf("UNKNOWN"); } printf(" (%08X)\n", exceptionRec->ExceptionCode); printf("Exception Flags: %08X\n", exceptionRec->ExceptionFlags); printf("\nOrigin:\n"); #ifdef _WIN64 DumpFrame(processId, contextRecord->Rip); #else DumpFrame(processId, contextRecord->Eip); #endif printf("*********** EXCEPTION **************\n"); fflush(stdout); }
static unsigned int CaptureThread(void *params) { int i = 0; NvU64 stime, ctime; NvMediaTime t1 = {0}, t2 = {0}, st = {0}, ct = {0}; CaptureContext *ctx = (CaptureContext *)params; NvMediaVideoSurface *releaseList[4] = {NULL}, **relList; NvMediaRect primarySrcRect; NvMediaPrimaryVideo primaryVideo; primarySrcRect.x0 = 0; primarySrcRect.y0 = 0; primarySrcRect.x1 = ctx->inputWidth; primarySrcRect.y1 = ctx->inputHeight; primaryVideo.next = NULL; primaryVideo.previous = NULL; primaryVideo.previous2 = NULL; primaryVideo.srcRect = &primarySrcRect; primaryVideo.dstRect = NULL; NvSemaphoreDecrement(ctx->semStart, NV_TIMEOUT_INFINITE); if(ctx->timeNotCount) { GetTime(&t1); AddTime(&t1, ctx->last * 1000000LL, &t1); GetTime(&t2); printf("timeNotCount\n"); } GetTime(&st); stime = (NvU64)st.tv_sec * 1000000000LL + (NvU64)st.tv_nsec; while((ctx->timeNotCount? (SubTime(&t1, &t2)): ((unsigned int)i < ctx->last)) && !stop) { GetTime(&ct); ctime = (NvU64)ct.tv_sec * 1000000000LL + (NvU64)ct.tv_nsec; printf("frame=%3d, time=%llu.%09llu[s] \n", i, (ctime-stime)/1000000000LL, (ctime-stime)%1000000000LL); //printf("frame=%3d, time=%llu.%09llu[s] \n", i, (ctime-stime)/1000000000LL, (ctime-stime)%1000000000LL); pthread_mutex_lock(&mutex); // for ControlThread() if(!(capSurf = NvMediaVideoCaptureGetFrame(ctx->capture, ctx->timeout))) { // TBD MESSAGE_PRINTF("NvMediaVideoCaptureGetFrame() failed in %sThread\n", ctx->name); stop = NVMEDIA_TRUE; break; } if(i%3 == 0) // once in three loop = 10 Hz pthread_cond_signal(&cond); // ControlThread() is called pthread_mutex_unlock(&mutex); // for ControlThread() primaryVideo.current = capSurf; primaryVideo.pictureStructure = NVMEDIA_PICTURE_STRUCTURE_TOP_FIELD; if(NVMEDIA_STATUS_OK != NvMediaVideoMixerRender(ctx->mixer, // mixer NVMEDIA_OUTPUT_DEVICE_0, // outputDeviceMask NULL, // background &primaryVideo, // primaryVideo NULL, // secondaryVideo NULL, // graphics0 NULL, // graphics1 releaseList, // releaseList NULL)) // timeStamp { // TBD MESSAGE_PRINTF("NvMediaVideoMixerRender() failed for the top field in %sThread\n", ctx->name); stop = NVMEDIA_TRUE; } primaryVideo.pictureStructure = NVMEDIA_PICTURE_STRUCTURE_BOTTOM_FIELD; if(NVMEDIA_STATUS_OK != NvMediaVideoMixerRender(ctx->mixer, // mixer NVMEDIA_OUTPUT_DEVICE_0, // outputDeviceMask NULL, // background &primaryVideo, // primaryVideo NULL, // secondaryVideo NULL, // graphics0 NULL, // graphics1 releaseList, // releaseList NULL)) // timeStamp { // TBD MESSAGE_PRINTF("NvMediaVideoMixerRender() failed for the bottom field in %sThread\n", ctx->name); stop = NVMEDIA_TRUE; } if(ctx->fileDumpEnabled) { if(!DumpFrame(ctx->fout, capSurf)) { // TBD MESSAGE_PRINTF("DumpFrame() failed in %sThread\n", ctx->name); stop = NVMEDIA_TRUE; } if(!ctx->displayEnabled) releaseList[0] = capSurf; } relList = &releaseList[0]; while(*relList) { if(NvMediaVideoCaptureReturnFrame(ctx->capture, *relList) != NVMEDIA_STATUS_OK) { // TBD MESSAGE_PRINTF("NvMediaVideoCaptureReturnFrame() failed in %sThread\n", ctx->name); stop = NVMEDIA_TRUE; break; } relList++; } if(ctx->timeNotCount) GetTime(&t2); i++; } // while end // Release any left-over frames // if(ctx->displayEnabled && capSurf && capSurf->type != NvMediaSurfaceType_YV16x2) // To allow returning frames after breaking out of the while loop in case of error if(ctx->displayEnabled && capSurf) { NvMediaVideoMixerRender(ctx->mixer, // mixer NVMEDIA_OUTPUT_DEVICE_0, // outputDeviceMask NULL, // background NULL, // primaryVideo NULL, // secondaryVideo NULL, // graphics0 NULL, // graphics1 releaseList, // releaseList NULL); // timeStamp relList = &releaseList[0]; while(*relList) { if(NvMediaVideoCaptureReturnFrame(ctx->capture, *relList) != NVMEDIA_STATUS_OK) MESSAGE_PRINTF("NvMediaVideoCaptureReturnFrame() failed in %sThread\n", ctx->name); relList++; } } NvSemaphoreIncrement(ctx->semDone); return 0; }