/* Initializes datastructures used for profiling. */ ebp_buffers * ebp_start (int bitmap) { (void)fprintf(stdout,"LIB start1\n"); message m; (void)fprintf(stdout,"LIB start101\n"); buffers = malloc(sizeof(ebp_buffers)); (void)fprintf(stdout,"LIB start11\n"); buffers->first = alloc_buffers(); (void)fprintf(stdout,"LIB start12\n"); buffers->second = alloc_buffers(); (void)fprintf(stdout,"LIB start13\n"); relevant_buffer = malloc(sizeof(int)); (void)fprintf(stdout,"LIB start2\n"); /* Set profiling flag */ bitmap &= 0xFFF; (void)fprintf(stdout,"LIB start3\n"); /* do syscall */ m.EBP_BUFFER1 = buffers->first; m.EBP_BUFFER2 = buffers->second; m.EBP_RELBUF = relevant_buffer; m.EBP_BITMAP = bitmap; m.m_type = SYS_EBPROF; (void)fprintf(stdout,"LIB start4 newer\n"); sleep(1); _do_kernel_call(&m); // _syscall(PM_PROC_NR, EBPROF, &m); (void)fprintf(stdout,"LIB start5\n"); return buffers; }
static void begin(void) { debug = 0; verbose = 0; openBoards(); alloc_buffers(); build_mask(mask, NBIT); }
static int v4l2_alloc(struct frame_format *ff, unsigned max_mem, struct frame **fr, unsigned *nf) { struct vid_buffer *vb; struct frame *frames; int offs[3], stride[3]; int frame_size; int nframes; int i, j; if (get_plane_fmt(&sfmt.fmt.pix, offs, stride)) return -1; switch (sfmt.fmt.pix.pixelformat) { case V4L2_PIX_FMT_YUV420: frame_size = ff->width * ff->height * 3 / 2; break; case V4L2_PIX_FMT_NV12: frame_size = ff->width * ff->height * 3 / 2; break; case V4L2_PIX_FMT_YUYV: frame_size = ff->width * ff->height * 2; break; default: return -1; } nframes = MAX(max_mem / frame_size, MIN_FRAMES + 1); fprintf(stderr, "V4L2: memman allocating %d frames\n", nframes); vb = alloc_buffers(&sfmt.fmt.pix, &nframes); if (!vb) return -1; frames = calloc(nframes, sizeof(*frames)); if (!frames) goto err; for (i = 0; i < nframes; i++) { frames[i].ff = ff; for (j = 0; j < 3; j++) { frames[i].virt[j] = vb[i].data[j]; frames[i].linesize[j] = stride[j]; } } vid_buffers = vb; *fr = vid_frames = frames; *nf = nframes; return 0; err: free_buffers(vb, nframes); return -1; }
static int v4l2_enable(struct frame_format *ff, unsigned flags, const struct pixconv *pc, struct frame_format *df) { struct v4l2_format fmt = { 0 }; int i; if (!vid_buffers) { int nbufs = NUM_BUFFERS; vid_buffers = alloc_buffers(&sfmt.fmt.pix, &nbufs); if (!vid_buffers) goto err; num_buffers = nbufs; for (i = 0; i < num_buffers; i++) xioctl(vid_fd, VIDIOC_QBUF, &vid_buffers[i].buf); pixconv = pc; } else { struct frame *f = ofbp_get_frame(); xioctl(vid_fd, VIDIOC_QBUF, &vid_buffers[f->frame_num].buf); } fprintf(stderr, "V4L2: crop %dx%d from %dx%d\n", ff->disp_w, ff->disp_h, ff->width, ff->height); crop.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; crop.c.left = 0; crop.c.top = 0; crop.c.width = ff->disp_w; crop.c.height = ff->disp_h; xioctl(vid_fd, VIDIOC_S_CROP, &crop); fprintf(stderr, "V4L2: overlay %dx%d @ %d,%d\n", df->disp_w, df->disp_h, df->disp_x, df->disp_y); fmt.type = V4L2_BUF_TYPE_VIDEO_OVERLAY; fmt.fmt.win.w.left = df->disp_x; fmt.fmt.win.w.top = df->disp_y; fmt.fmt.win.w.width = df->disp_w; fmt.fmt.win.w.height = df->disp_h; fmt.fmt.win.field = V4L2_FIELD_NONE; fmt.fmt.win.global_alpha = 255; xioctl(vid_fd, VIDIOC_S_FMT, &fmt); i = V4L2_BUF_TYPE_VIDEO_OUTPUT; xioctl(vid_fd, VIDIOC_STREAMON, &i); return 0; err: return -1; }
/*! \brief Start drawing raster \param mask non-zero int for mask \param s source (map) extent (left, right, top, bottom) \param fd destination (image) extent (left, right, top, bottom) */ void PNG_begin_raster(int mask, int s[2][2], double fd[2][2]) { int d[2][2]; int i; d[0][0] = (int) floor(fd[0][0] + 0.5); d[0][1] = (int) floor(fd[0][1] + 0.5); d[1][0] = (int) floor(fd[1][0] + 0.5); d[1][1] = (int) floor(fd[1][1] + 0.5); ncols = d[0][1] - d[0][0]; memcpy(src, s, sizeof(src)); memcpy(dst, d, sizeof(dst)); masked = mask; alloc_buffers(); for (i = 0; i < ncols; i++) trans[i] = scale_rev_x(d[0][0] + i); }
static av_cold int aac_encode_init(AVCodecContext *avctx) { AACEncContext *s = avctx->priv_data; int i, ret = 0; const uint8_t *sizes[2]; uint8_t grouping[AAC_MAX_CHANNELS]; int lengths[2]; s->channels = avctx->channels; s->chan_map = aac_chan_configs[s->channels-1]; s->lambda = avctx->global_quality > 0 ? avctx->global_quality : 120; s->last_frame_pb_count = 0; avctx->extradata_size = 5; avctx->frame_size = 1024; avctx->initial_padding = 1024; avctx->bit_rate = (int)FFMIN( 6144 * s->channels / 1024.0 * avctx->sample_rate, avctx->bit_rate); avctx->profile = avctx->profile == FF_PROFILE_UNKNOWN ? FF_PROFILE_AAC_LOW : avctx->profile; for (i = 0; i < 16; i++) if (avctx->sample_rate == avpriv_mpeg4audio_sample_rates[i]) break; s->samplerate_index = i; ERROR_IF(s->samplerate_index == 16 || s->samplerate_index >= ff_aac_swb_size_1024_len || s->samplerate_index >= ff_aac_swb_size_128_len, "Unsupported sample rate %d\n", avctx->sample_rate); ERROR_IF(s->channels > AAC_MAX_CHANNELS || s->channels == 7, "Unsupported number of channels: %d\n", s->channels); WARN_IF(1024.0 * avctx->bit_rate / avctx->sample_rate > 6144 * s->channels, "Too many bits %f > %d per frame requested, clamping to max\n", 1024.0 * avctx->bit_rate / avctx->sample_rate, 6144 * s->channels); for (i = 0; i < FF_ARRAY_ELEMS(aacenc_profiles); i++) if (avctx->profile == aacenc_profiles[i]) break; ERROR_IF(i == FF_ARRAY_ELEMS(aacenc_profiles), "Unsupported encoding profile: %d\n", avctx->profile); if (avctx->profile == FF_PROFILE_MPEG2_AAC_LOW) { avctx->profile = FF_PROFILE_AAC_LOW; ERROR_IF(s->options.pred, "Main prediction unavailable in the \"mpeg2_aac_low\" profile\n"); ERROR_IF(s->options.ltp, "LTP prediction unavailable in the \"mpeg2_aac_low\" profile\n"); WARN_IF(s->options.pns, "PNS unavailable in the \"mpeg2_aac_low\" profile, turning off\n"); s->options.pns = 0; } else if (avctx->profile == FF_PROFILE_AAC_LTP) { s->options.ltp = 1; ERROR_IF(s->options.pred, "Main prediction unavailable in the \"aac_ltp\" profile\n"); } else if (avctx->profile == FF_PROFILE_AAC_MAIN) { s->options.pred = 1; ERROR_IF(s->options.ltp, "LTP prediction unavailable in the \"aac_main\" profile\n"); } else if (s->options.ltp) { avctx->profile = FF_PROFILE_AAC_LTP; WARN_IF(1, "Chainging profile to \"aac_ltp\"\n"); ERROR_IF(s->options.pred, "Main prediction unavailable in the \"aac_ltp\" profile\n"); } else if (s->options.pred) { avctx->profile = FF_PROFILE_AAC_MAIN; WARN_IF(1, "Chainging profile to \"aac_main\"\n"); ERROR_IF(s->options.ltp, "LTP prediction unavailable in the \"aac_main\" profile\n"); } s->profile = avctx->profile; s->coder = &ff_aac_coders[s->options.coder]; if (s->options.coder != AAC_CODER_TWOLOOP) { ERROR_IF(avctx->strict_std_compliance > FF_COMPLIANCE_EXPERIMENTAL, "Coders other than twoloop require -strict -2 and some may be removed in the future\n"); WARN_IF(s->options.coder == AAC_CODER_FAAC, "The FAAC-like coder will be removed in the near future, please use twoloop!\n"); s->options.intensity_stereo = 0; s->options.pns = 0; } if ((ret = dsp_init(avctx, s)) < 0) goto fail; if ((ret = alloc_buffers(avctx, s)) < 0) goto fail; put_audio_specific_config(avctx); sizes[0] = ff_aac_swb_size_1024[s->samplerate_index]; sizes[1] = ff_aac_swb_size_128[s->samplerate_index]; lengths[0] = ff_aac_num_swb_1024[s->samplerate_index]; lengths[1] = ff_aac_num_swb_128[s->samplerate_index]; for (i = 0; i < s->chan_map[0]; i++) grouping[i] = s->chan_map[i + 1] == TYPE_CPE; if ((ret = ff_psy_init(&s->psy, avctx, 2, sizes, lengths, s->chan_map[0], grouping)) < 0) goto fail; s->psypp = ff_psy_preprocess_init(avctx); ff_lpc_init(&s->lpc, 2*avctx->frame_size, TNS_MAX_ORDER, FF_LPC_TYPE_LEVINSON); av_lfg_init(&s->lfg, 0x72adca55); if (HAVE_MIPSDSP) ff_aac_coder_init_mips(s); if ((ret = ff_thread_once(&aac_table_init, &aac_encode_init_tables)) != 0) return AVERROR_UNKNOWN; ff_af_queue_init(avctx, &s->afq); return 0; fail: aac_encode_end(avctx); return ret; }
static int read_ppm_frame(int fd, ppm_info_t *ppm, uint8_t *buffers[], uint8_t *buffers2[], int ilace, int ileave, int bgr) { int width, height; static uint8_t *rowbuffer = NULL; int err; err = read_ppm_header(fd, &width, &height); if (err > 0) return 1; /* EOF */ if (err < 0) return -1; /* error */ mjpeg_debug("Got PPM header: %dx%d", width, height); if (ppm->width == 0) { /* first time */ mjpeg_debug("Initializing PPM read_frame"); ppm->width = width; ppm->height = height; rowbuffer = malloc(width * 3 * sizeof(rowbuffer[0])); } else { /* make sure everything matches */ if ( (ppm->width != width) || (ppm->height != height) ) mjpeg_error_exit1("One of these frames is not like the others!"); } if (buffers[0] == NULL) alloc_buffers(buffers, width, height); if ((buffers2[0] == NULL) && (ilace != Y4M_ILACE_NONE)) alloc_buffers(buffers2, width, height); mjpeg_debug("Reading rows"); if ((ilace != Y4M_ILACE_NONE) && (ileave)) { /* Interlaced and Interleaved: --> read image and deinterleave fields at same time */ if (ilace == Y4M_ILACE_TOP_FIRST) { /* 1st buff arg == top field == temporally first == "buffers" */ read_ppm_into_two_buffers(fd, buffers, buffers2, rowbuffer, width, height, bgr); } else { /* bottom-field-first */ /* 1st buff art == top field == temporally second == "buffers2" */ read_ppm_into_two_buffers(fd, buffers2, buffers, rowbuffer, width, height, bgr); } } else if ((ilace == Y4M_ILACE_NONE) || (!ileave)) { /* Not Interlaced, or Not Interleaved: --> read image into first buffer... */ read_ppm_into_one_buffer(fd, buffers, rowbuffer, width, height, bgr); if ((ilace != Y4M_ILACE_NONE) && (!ileave)) { /* ...Actually Interlaced: --> read the second image/field into second buffer */ err = read_ppm_header(fd, &width, &height); if (err > 0) return 1; /* EOF */ if (err < 0) return -1; /* error */ mjpeg_debug("Got PPM header: %dx%d", width, height); /* make sure everything matches */ if ( (ppm->width != width) || (ppm->height != height) ) mjpeg_error_exit1("One of these frames is not like the others!"); read_ppm_into_one_buffer(fd, buffers2, rowbuffer, width, height, bgr); } } return 0; }
static av_cold int aac_encode_init(AVCodecContext *avctx) { AACEncContext *s = avctx->priv_data; int i, ret = 0; const uint8_t *sizes[2]; uint8_t grouping[AAC_MAX_CHANNELS]; int lengths[2]; avctx->frame_size = 1024; for (i = 0; i < 16; i++) if (avctx->sample_rate == avpriv_mpeg4audio_sample_rates[i]) break; s->channels = avctx->channels; ERROR_IF(i == 16, "Unsupported sample rate %d\n", avctx->sample_rate); ERROR_IF(s->channels > AAC_MAX_CHANNELS, "Unsupported number of channels: %d\n", s->channels); ERROR_IF(avctx->profile != FF_PROFILE_UNKNOWN && avctx->profile != FF_PROFILE_AAC_LOW, "Unsupported profile %d\n", avctx->profile); ERROR_IF(1024.0 * avctx->bit_rate / avctx->sample_rate > 6144 * s->channels, "Too many bits per frame requested\n"); s->samplerate_index = i; s->chan_map = aac_chan_configs[s->channels-1]; if (ret = dsp_init(avctx, s)) goto fail; if (ret = alloc_buffers(avctx, s)) goto fail; avctx->extradata_size = 5; put_audio_specific_config(avctx); sizes[0] = swb_size_1024[i]; sizes[1] = swb_size_128[i]; lengths[0] = ff_aac_num_swb_1024[i]; lengths[1] = ff_aac_num_swb_128[i]; for (i = 0; i < s->chan_map[0]; i++) grouping[i] = s->chan_map[i + 1] == TYPE_CPE; if (ret = ff_psy_init(&s->psy, avctx, 2, sizes, lengths, s->chan_map[0], grouping)) goto fail; s->psypp = ff_psy_preprocess_init(avctx); s->coder = &ff_aac_coders[s->options.aac_coder]; if (HAVE_MIPSDSPR1) ff_aac_coder_init_mips(s); s->lambda = avctx->global_quality ? avctx->global_quality : 120; ff_aac_tableinit(); for (i = 0; i < 428; i++) ff_aac_pow34sf_tab[i] = sqrt(ff_aac_pow2sf_tab[i] * sqrt(ff_aac_pow2sf_tab[i])); avctx->delay = 1024; ff_af_queue_init(avctx, &s->afq); return 0; fail: aac_encode_end(avctx); return ret; }
static av_cold int aac_encode_init(AVCodecContext *avctx) { AACEncContext *s = avctx->priv_data; int i, ret = 0; const uint8_t *sizes[2]; uint8_t grouping[AAC_MAX_CHANNELS]; int lengths[2]; /* Constants */ s->last_frame_pb_count = 0; avctx->extradata_size = 5; avctx->frame_size = 1024; avctx->initial_padding = 1024; s->lambda = avctx->global_quality > 0 ? avctx->global_quality : 120; /* Channel map and unspecified bitrate guessing */ s->channels = avctx->channels; ERROR_IF(s->channels > AAC_MAX_CHANNELS || s->channels == 7, "Unsupported number of channels: %d\n", s->channels); s->chan_map = aac_chan_configs[s->channels-1]; if (!avctx->bit_rate) { for (i = 1; i <= s->chan_map[0]; i++) { avctx->bit_rate += s->chan_map[i] == TYPE_CPE ? 128000 : /* Pair */ s->chan_map[i] == TYPE_LFE ? 16000 : /* LFE */ 69000 ; /* SCE */ } } /* Samplerate */ for (i = 0; i < 16; i++) if (avctx->sample_rate == avpriv_mpeg4audio_sample_rates[i]) break; s->samplerate_index = i; ERROR_IF(s->samplerate_index == 16 || s->samplerate_index >= ff_aac_swb_size_1024_len || s->samplerate_index >= ff_aac_swb_size_128_len, "Unsupported sample rate %d\n", avctx->sample_rate); /* Bitrate limiting */ WARN_IF(1024.0 * avctx->bit_rate / avctx->sample_rate > 6144 * s->channels, "Too many bits %f > %d per frame requested, clamping to max\n", 1024.0 * avctx->bit_rate / avctx->sample_rate, 6144 * s->channels); avctx->bit_rate = (int64_t)FFMIN(6144 * s->channels / 1024.0 * avctx->sample_rate, avctx->bit_rate); /* Profile and option setting */ avctx->profile = avctx->profile == FF_PROFILE_UNKNOWN ? FF_PROFILE_AAC_LOW : avctx->profile; for (i = 0; i < FF_ARRAY_ELEMS(aacenc_profiles); i++) if (avctx->profile == aacenc_profiles[i]) break; if (avctx->profile == FF_PROFILE_MPEG2_AAC_LOW) { avctx->profile = FF_PROFILE_AAC_LOW; ERROR_IF(s->options.pred, "Main prediction unavailable in the \"mpeg2_aac_low\" profile\n"); ERROR_IF(s->options.ltp, "LTP prediction unavailable in the \"mpeg2_aac_low\" profile\n"); WARN_IF(s->options.pns, "PNS unavailable in the \"mpeg2_aac_low\" profile, turning off\n"); s->options.pns = 0; } else if (avctx->profile == FF_PROFILE_AAC_LTP) { s->options.ltp = 1; ERROR_IF(s->options.pred, "Main prediction unavailable in the \"aac_ltp\" profile\n"); } else if (avctx->profile == FF_PROFILE_AAC_MAIN) { s->options.pred = 1; ERROR_IF(s->options.ltp, "LTP prediction unavailable in the \"aac_main\" profile\n"); } else if (s->options.ltp) { avctx->profile = FF_PROFILE_AAC_LTP; WARN_IF(1, "Chainging profile to \"aac_ltp\"\n"); ERROR_IF(s->options.pred, "Main prediction unavailable in the \"aac_ltp\" profile\n"); } else if (s->options.pred) { avctx->profile = FF_PROFILE_AAC_MAIN; WARN_IF(1, "Chainging profile to \"aac_main\"\n"); ERROR_IF(s->options.ltp, "LTP prediction unavailable in the \"aac_main\" profile\n"); } s->profile = avctx->profile; /* Coder limitations */ s->coder = &ff_aac_coders[s->options.coder]; if (s->options.coder == AAC_CODER_ANMR) { ERROR_IF(avctx->strict_std_compliance > FF_COMPLIANCE_EXPERIMENTAL, "The ANMR coder is considered experimental, add -strict -2 to enable!\n"); s->options.intensity_stereo = 0; s->options.pns = 0; } ERROR_IF(s->options.ltp && avctx->strict_std_compliance > FF_COMPLIANCE_EXPERIMENTAL, "The LPT profile requires experimental compliance, add -strict -2 to enable!\n"); /* M/S introduces horrible artifacts with multichannel files, this is temporary */ if (s->channels > 3) s->options.mid_side = 0; if ((ret = dsp_init(avctx, s)) < 0) goto fail; if ((ret = alloc_buffers(avctx, s)) < 0) goto fail; put_audio_specific_config(avctx); sizes[0] = ff_aac_swb_size_1024[s->samplerate_index]; sizes[1] = ff_aac_swb_size_128[s->samplerate_index]; lengths[0] = ff_aac_num_swb_1024[s->samplerate_index]; lengths[1] = ff_aac_num_swb_128[s->samplerate_index]; for (i = 0; i < s->chan_map[0]; i++) grouping[i] = s->chan_map[i + 1] == TYPE_CPE; if ((ret = ff_psy_init(&s->psy, avctx, 2, sizes, lengths, s->chan_map[0], grouping)) < 0) goto fail; s->psypp = ff_psy_preprocess_init(avctx); ff_lpc_init(&s->lpc, 2*avctx->frame_size, TNS_MAX_ORDER, FF_LPC_TYPE_LEVINSON); s->random_state = 0x1f2e3d4c; s->abs_pow34 = abs_pow34_v; s->quant_bands = quantize_bands; if (ARCH_X86) ff_aac_dsp_init_x86(s); if (HAVE_MIPSDSP) ff_aac_coder_init_mips(s); if ((ret = ff_thread_once(&aac_table_init, &aac_encode_init_tables)) != 0) return AVERROR_UNKNOWN; ff_af_queue_init(avctx, &s->afq); return 0; fail: aac_encode_end(avctx); return ret; }
int main( int argc, char *argv[]) { int ret = -1; int world_rank = 0; MPI_Comm comm; PVFS_BMI_addr_t *bmi_peer_array; int *mpi_peer_array; int num_clients; struct bench_options opts; struct mem_buffers mpi_send_bufs; struct mem_buffers mpi_recv_bufs; struct mem_buffers bmi_send_bufs; struct mem_buffers bmi_recv_bufs; enum bmi_buffer_type buffer_type = BMI_EXT_ALLOC; double mpi_time, bmi_time; bmi_context_id context; /* start up benchmark environment */ ret = bench_init(&opts, argc, argv, &num_clients, &world_rank, &comm, &bmi_peer_array, &mpi_peer_array, &context); if (ret < 0) { fprintf(stderr, "bench_init() failure.\n"); return (-1); } /* verify that we didn't get any weird parameters */ if (num_clients > 1 || opts.num_servers > 1) { fprintf(stderr, "Too many procs specified.\n"); return (-1); } /* setup MPI buffers */ ret = alloc_buffers(&mpi_send_bufs, ITERATIONS, opts.message_len); ret += alloc_buffers(&mpi_recv_bufs, ITERATIONS, opts.message_len); if (ret < 0) { fprintf(stderr, "alloc_buffers() failure.\n"); return (-1); } /* setup BMI buffers (differs depending on command line args) */ if (opts.flags & BMI_ALLOCATE_MEMORY) { buffer_type = BMI_PRE_ALLOC; ret = BMI_alloc_buffers(&bmi_send_bufs, ITERATIONS, opts.message_len, bmi_peer_array[0], BMI_SEND); ret += BMI_alloc_buffers(&bmi_recv_bufs, ITERATIONS, opts.message_len, bmi_peer_array[0], BMI_RECV); if (ret < 0) { fprintf(stderr, "BMI_alloc_buffers() failure.\n"); return (-1); } } else { buffer_type = BMI_EXT_ALLOC; ret = alloc_buffers(&bmi_send_bufs, ITERATIONS, opts.message_len); ret += alloc_buffers(&bmi_recv_bufs, ITERATIONS, opts.message_len); if (ret < 0) { fprintf(stderr, "alloc_buffers() failure.\n"); return (-1); } } /* mark all send buffers */ ret = mark_buffers(&bmi_send_bufs); ret += mark_buffers(&mpi_send_bufs); if (ret < 0) { fprintf(stderr, "mark_buffers() failure.\n"); return (-1); } /******************************************************************/ /* Actually measure some stuff */ /* BMI series */ if (world_rank == 0) { ret = bmi_server(&opts, &bmi_recv_bufs, &bmi_send_bufs, bmi_peer_array[0], buffer_type, &bmi_time, context); } else { ret = bmi_client(&opts, &bmi_recv_bufs, &bmi_send_bufs, bmi_peer_array[0], buffer_type, &bmi_time, context); } if (ret < 0) { return (-1); } /* MPI series */ if (world_rank == 0) { ret = mpi_server(&opts, &mpi_recv_bufs, &mpi_send_bufs, mpi_peer_array[0], &mpi_time); } else { ret = mpi_client(&opts, &mpi_recv_bufs, &mpi_send_bufs, mpi_peer_array[0], &mpi_time); } if (ret < 0) { return (-1); } /******************************************************************/ #if 0 if (!(opts.flags & REUSE_BUFFERS)) { /* verify received buffers */ ret = check_buffers(&mpi_recv_bufs); if (ret < 0) { fprintf(stderr, "MPI buffer verification failed.\n"); return (-1); } ret = check_buffers(&bmi_recv_bufs); if (ret < 0) { fprintf(stderr, "BMI buffer verification failed.\n"); return (-1); } } #endif /* print out results */ if (world_rank == 0) { bench_args_dump(&opts); printf("number of iterations: %d\n", ITERATIONS); printf ("all times measure round trip in seconds unless otherwise noted\n"); printf("\"ave\" field is computed as (total time)/iterations\n"); } /* enforce output ordering */ fflush(stdout); MPI_Barrier(MPI_COMM_WORLD); if (world_rank != 0) { printf("%d\t%f\t%f\t(size,total,ave)", bmi_recv_bufs.size, bmi_time, (bmi_time / ITERATIONS)); printf(" bmi server\n"); printf("%d\t%f\t%f\t(size,total,ave)", mpi_recv_bufs.size, mpi_time, (mpi_time / ITERATIONS)); printf(" mpi server\n"); } /* enforce output ordering */ fflush(stdout); MPI_Barrier(MPI_COMM_WORLD); if (world_rank == 0) { printf("%d\t%f\t%f\t(size,total,ave)", bmi_recv_bufs.size, bmi_time, (bmi_time / ITERATIONS)); printf(" bmi client\n"); printf("%d\t%f\t%f\t(size,total,ave)", mpi_recv_bufs.size, mpi_time, (mpi_time / ITERATIONS)); printf(" mpi client\n"); } /* free buffers */ free_buffers(&mpi_send_bufs); free_buffers(&mpi_recv_bufs); if (opts.flags & BMI_ALLOCATE_MEMORY) { BMI_free_buffers(&bmi_send_bufs, bmi_peer_array[0], BMI_SEND); BMI_free_buffers(&bmi_recv_bufs, bmi_peer_array[0], BMI_RECV); } else { free_buffers(&bmi_send_bufs); free_buffers(&bmi_recv_bufs); } /* shutdown interfaces */ BMI_close_context(context); BMI_finalize(); MPI_Finalize(); return 0; }
int main(int argc, char *argv[]) { int i, iRet = 0; struct pwcmech * pwcmech; // // Signal Handling // sigset_t block_no_signals; sigemptyset(&block_no_signals); struct sigaction pwc_sig_handler; struct sigaction standard_sig_handler; struct sigaction video_io_sig_handler; pwc_sig_handler.sa_flags = 0; pwc_sig_handler.sa_handler = &sig_handler; sigaction(SIGINT, &pwc_sig_handler, &standard_sig_handler); sigaction(SIGTERM, &pwc_sig_handler, &standard_sig_handler); video_io_sig_handler.sa_flags = 0; video_io_sig_handler.sa_handler = &ioctl_callback; sigaction(SIGIO, &video_io_sig_handler, &standard_sig_handler); // // Userspace Driver Interfaces // struct devClass * video_dev; struct images * p_img; // // Buffering Initialization // printf("Allocating buffers..."); frames = alloc_buffers(); if ( frames == NULL ) { printf("failed?!\n"); goto finish; } init_data_buffer(frames); printf("done!\n"); // // Device Initialization // printf("Create camera device..."); pwcmech = pwc_devmech_start(); if ( pwcmech_register_driver(pwcmech) ) { printf("failed?!\n"); goto release_buffers; } printf("done!\n"); printf("Initialize camera device..."); if ( pwcmech_register_handler(pwcmech) ) { perror("Failed opening camera device"); printf("failed?!\n"); goto stop_usb; } printf("done!\n"); printf("Set camera's video mode..."); //if_claim should work seamslessly somehow iRet = usb_if_claim(pwcmech->com, pwcmech->com->standard_descriptors.highspeed.cameraStream.cameraStreamConf); if ( iRet ) { perror("Failed setting camera's video mode"); printf("failed?!\n"); goto close_camera; } iRet = setVideoMode(pwcmech, 9); //still old way, pre-setting instead of from V4L if ( iRet ) { perror("Failed setting camera's video mode"); printf("failed?!\n"); goto close_camera; } set_ctrl0_timeout(pwcmech->com, 1000); setLeds(pwcmech, 0,0); //don't bother with the light iRet = sendVideoCommand(pwcmech, video_mode_string, VIDEO_MODE_STRING_LEN); if ( iRet ) { perror("Failed setting camera's video mode"); printf("failed?!\n"); goto close_camera; } printf("done!\n"); // // Decompression // printf("Initializing decompression system..."); p_img = alloc_images(); if ( p_img == NULL ) { printf("failed?!\n"); goto close_camera; } if ( ( iRet = init_images(p_img) ) ) { printf("failed?!\n"); goto free_p_img; } printf("done!\n"); #ifndef NO_IMG // // Open video pipe // printf("Opening the video pipe (vl4 device emulation)..."); video_dev = alloc_devClass(); if ( video_dev == NULL ) { printf("failed?!\n"); goto free_p_img; } p_img->img_buf = init_devClass(video_dev, argv[1], p_img); if ( p_img->img_buf == NULL ) { perror("Initializing device class failed"); printf("failed?!\n"); printf("Did you input the device path, \"/dev/video0\"?\"\n\n\n"); goto free_video_dev; } printf("done!\n"); #endif // // ISO transfers // printf("Create USB isochronous transfers..."); for (i = 0; i < ISO_BUFFERS_NR; i++) { iRet = assignVideoBuffer(pwcmech, frames->iso_buffer[i], ISO_BUFFER_SIZE); if (iRet) { perror("Failed creating image transfer"); printf("failed?!\n"); goto unregister_buffers; } } printf("done!\n"); // // Threads // printf("Set-up threads..."); int active_threads = 0; pthread_t threads[NUM_THREADS]; pthread_attr_t attr; pthread_attr_init(&attr); pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); struct acqArg * acq_arg; acq_arg = malloc(sizeof(struct acqArg)); if ( acq_arg == NULL ) { printf("failed?!\n"); goto close_threads; } acq_arg->abort = &thread_abort; acq_arg->pwcmech = pwcmech; struct imgArg * img_arg; img_arg = malloc(sizeof(struct imgArg)); if ( img_arg == NULL ) { printf("failed?!\n"); goto close_threads; } img_arg->frames = frames; img_arg->p_img = p_img; img_arg->video_pipe = video_dev; img_arg->capture = 0; img_arg->abort = &thread_abort; printf("done!\n"); // // MAIN // printf("Registering callback function for Isochronous transfer..."); setPower(pwcmech, 0); //enable camera (our state machine discovered that this was missing) registerVideoCallback(pwcmech, acquire_frame, pwcmech); if ( acknowledgeVideoCallback(pwcmech) ) goto close_threads; printf("done!\n"); #ifndef NO_COM printf("Creating the data acquisition thread..."); if ( ( iRet = pthread_create(&threads[active_threads], &attr, process_usb, (void *)acq_arg) ) ) { printf("failed?!\n"); goto stop_camera; } active_threads++; printf("done!\n"); #endif #ifndef NO_IMG printf("Creating the image fill thread..."); if ( ( iRet = pthread_create(&threads[active_threads], &attr, fillImageData, (void *)img_arg) ) ) { printf("failed?!\n"); goto stop_camera; } active_threads++; printf("done!\n"); #endif while (!op_abort) { // sigsuspend(&block_no_signals); if ( ioctl_call ) { devClass_ioctl(video_dev, &img_arg->capture); ioctl_call = 0; } } // // ~MAIN // stop_camera: printf("Releasing video callback..."); releaseVideoCallback(pwcmech); printf("done!\n"); printf("Unregistering buffers..."); unassignVideoBuffers(pwcmech); printf("done!\n"); close_threads: printf("Closing all threads..."); thread_abort = 1; for ( i = 0; i < active_threads; i++) pthread_join(threads[i], NULL); free(acq_arg); free(img_arg); pthread_attr_destroy(&attr); printf("done!\n"); unregister_buffers: printf("Unregistering buffers if not done before..."); unassignVideoBuffers(pwcmech); printf("done!\n"); free_video_dev: #ifndef NO_IMG printf("Closing video pipe..."); close_devClass(video_dev); free_devClass(video_dev); printf("done!\n"); #endif free_p_img: printf("Releasing decompression system..."); free_images(p_img); printf("done!\n"); usb_release_interface(pwcmech->com, pwcmech->com->standard_descriptors.highspeed.cameraStream.cameraStreamConf); close_camera: printf("Close camera device..."); pwcmech_deregister_handler(pwcmech); printf("done!\n"); stop_usb: printf("Stopping USB interface..."); pwcmech_deregister_driver(pwcmech); printf("done!\n"); release_buffers: pwc_devmech_stop(pwcmech); //release buffers, they are linked to the camera device //so only release them after releasing the camera printf("Releasing buffers..."); free_buffers(frames); printf("done!\n"); finish: return 0; }