EXPORT(sqInt) primitiveMPEG3AudioChannels(void) { mpeg3_t * file; sqInt result; sqInt fileHandle; sqInt aNumber; sqInt _return_value; fileHandle = interpreterProxy->stackValue(1); aNumber = interpreterProxy->stackIntegerValue(0); if (interpreterProxy->failed()) { return null; } file = mpeg3tValueOf(fileHandle); if (file == null) { if (interpreterProxy->failed()) { return null; } interpreterProxy->popthenPush(3, 0); return null; } if (aNumber < 0) { interpreterProxy->success(0); if (interpreterProxy->failed()) { return null; } interpreterProxy->popthenPush(3, 0); return null; } if (aNumber >= (mpeg3_total_astreams(file))) { interpreterProxy->success(0); if (interpreterProxy->failed()) { return null; } interpreterProxy->popthenPush(3, 0); return null; } result = mpeg3_audio_channels(file,aNumber); _return_value = interpreterProxy->integerObjectOf(result); if (interpreterProxy->failed()) { return null; } interpreterProxy->popthenPush(3, _return_value); return null; }
int main2() { mpeg3_t *file; int i, result = 0; unsigned char *output, **output_rows; float *audio_output_f; short *audio_output_i; long total_samples = 0; Rect sourceRect; OSErr error; PixMapHandle hPixmap; Ptr gBaseLocation; long targetRowBytes; file = mpeg3_open("randomAlien.mpg"); if(file) { mpeg3_set_cpus(file, 1); //audio_output_f = (float *) memoryAllocate(1,BUFSIZE * sizeof(float)); //audio_output_i = (short *) memoryAllocate(1,BUFSIZE * sizeof(short)); //mpeg3_set_sample(file, 11229518, 0); //result = mpeg3_read_audio(file, audio_output_f, 0, 0, BUFSIZE, 0); // result = mpeg3_read_audio(file, 0, audio_output_i, 1, BUFSIZE, 0); // fwrite(audio_output_i, BUFSIZE, 1, stdout); //mpeg3_set_frame(file, 1000, 0); sourceRect.top = 0; sourceRect.left = 0; sourceRect.bottom = mpeg3_video_height(file, 0); sourceRect.right = mpeg3_video_width(file, 0); error = NewGWorld (&gpGWOffScreen, 32, &sourceRect, NULL, NULL, keepLocal); if (error != noErr) { DebugStr ("\pUnable to allocate off screen image"); } hPixmap = GetGWorldPixMap (gpGWOffScreen); error = LockPixels (hPixmap); gBaseLocation = GetPixBaseAddr(hPixmap); targetRowBytes = ((**hPixmap).rowBytes & 0x3FFF)/4; output_rows = (unsigned char **) memoryAllocate(1,sizeof(unsigned char*) * mpeg3_video_height(file, 0)); for(i = 0; i < mpeg3_video_height(file, 0); i++) output_rows[i] = (unsigned char*) gBaseLocation + i * targetRowBytes*4; for (i=0;i < mpeg3_video_frames(file, 0);i++) { result = mpeg3_read_frame(file, output_rows, 0, 0, mpeg3_video_width(file, 0), mpeg3_video_height(file, 0), mpeg3_video_width(file, 0), mpeg3_video_height(file, 0), MPEG3_RGBAF8888, 0); CopyBits (GetPortBitMapForCopyBits(gpGWOffScreen), GetPortBitMapForCopyBits(GetWindowPort(pWindow)), &sourceRect, &sourceRect, srcCopy, NULL); } UnlockPixels (hPixmap); DisposeGWorld(gpGWOffScreen); memoryFree(output_rows); fprintf(stderr, "Audio streams: %d\n", mpeg3_total_astreams(file)); for(i = 0; i < mpeg3_total_astreams(file); i++) { fprintf(stderr, " Stream %d: channels %d sample rate %d total samples %ld\n", i, mpeg3_audio_channels(file, i), mpeg3_sample_rate(file, i), mpeg3_audio_samples(file, i)); } fprintf(stderr, "Video streams: %d\n", mpeg3_total_vstreams(file)); for(i = 0; i < mpeg3_total_vstreams(file); i++) { fprintf(stderr, " Stream %d: width %d height %d frame rate %0.3f total frames %ld\n", i, mpeg3_video_width(file, i), mpeg3_video_height(file, i), mpeg3_frame_rate(file, i), mpeg3_video_frames(file, i)); } mpeg3_close(file); }
int main(int argc, char *argv[]) { int error = 0; int frame_count = -1; char *row_pointers[3]; int do_audio = 0; int do_video = 0; int channels = 0; long afragment = 65536; float **audio_output; int layer = 0; int astream = 0; char input_path[1024]; char output_path[1024]; int i; long current_frame = 0; long current_sample = 0; pthread_mutex_init(&mutex, NULL); signal(SIGINT, trap_interrupt); input_path[0] = 0; output_path[0] = 0; if(argc < 3) { printf("Usage: %s [-a] [-v] <mpeg file> <output movie> [frame count]\n", argv[0]); exit(1); } for(i = 1; i < argc; i++) { if(!strcmp(argv[i], "-a")) { do_audio = 1; } else if(!strcmp(argv[i], "-v")) { do_video = 1; } else if(!input_path[0]) { strcpy(input_path, argv[i]); } else if(!output_path[0]) { strcpy(output_path, argv[i]); } else frame_count = atol(argv[i]); } //printf("main 1\n"); if(!(input = mpeg3_open(input_path, &error))) { exit(1); } //printf("main 1\n"); if(!(output = quicktime_open(output_path, 0, 1))) { exit(1); } //printf("main 1\n"); if(do_video) { if(!mpeg3_total_vstreams(input)) { do_video = 0; } else { quicktime_set_video(output, 1, mpeg3_video_width(input, layer), mpeg3_video_height(input, layer), mpeg3_frame_rate(input, layer), VIDEO_CODEC); quicktime_set_jpeg(output, 80, 0); } } //printf("main 1\n"); if(do_audio) { if(!mpeg3_total_astreams(input)) { do_audio = 0; } else { int i; channels = mpeg3_audio_channels(input, astream); quicktime_set_audio(output, channels, mpeg3_sample_rate(input, 0), 24, AUDIO_CODEC); audio_output = malloc(sizeof(float*) * channels); for(i = 0; i < channels; i++) audio_output[i] = malloc(sizeof(float) * afragment); } } //printf("main 1\n"); // quicktime_set_jpeg(output, 100, 0); // mpeg3_set_mmx(input, 0); while((!(do_video && mpeg3_end_of_video(input, layer)) || !(do_audio && mpeg3_end_of_audio(input, astream))) && (current_frame < frame_count || frame_count < 0)) { //printf("%d %d\n", mpeg3_end_of_video(input, layer), mpeg3_end_of_audio(input, astream)); if(do_audio) { if(!mpeg3_end_of_audio(input, astream)) { int fragment = afragment; int i, j, k; i = astream; k = 0; for(j = 0; j < mpeg3_audio_channels(input, i); j++, k++) { if(j == 0) mpeg3_read_audio(input, audio_output[k], /* Pointer to pre-allocated buffer of floats */ 0, /* Pointer to pre-allocated buffer of int16's */ j, /* Channel to decode */ fragment, /* Number of samples to decode */ i); else mpeg3_reread_audio(input, audio_output[k], /* Pointer to pre-allocated buffer of floats */ 0, /* Pointer to pre-allocated buffer of int16's */ j, /* Channel to decode */ fragment, /* Number of samples to decode */ i); } quicktime_encode_audio(output, 0, audio_output, fragment); current_sample += fragment; if(!do_video) { printf(" %d samples written\r", current_sample); fflush(stdout); } } else current_sample += afragment; } if(do_video) { if(!mpeg3_end_of_video(input, layer)) { int fragment; if(do_audio) fragment = (long)((double)current_sample / mpeg3_sample_rate(input, 0) * mpeg3_frame_rate(input, layer) - current_frame); else fragment = 1; for(i = 0; i < fragment && !mpeg3_end_of_video(input, layer); i++) { mpeg3_read_yuvframe_ptr(input, &row_pointers[0], &row_pointers[1], &row_pointers[2], layer); switch(mpeg3_colormodel(input, layer)) { case MPEG3_YUV420P: quicktime_set_cmodel(output, BC_YUV420P); break; case MPEG3_YUV422P: quicktime_set_cmodel(output, BC_YUV422P); break; } quicktime_encode_video(output, (unsigned char **)row_pointers, 0); current_frame++; printf(" %d frames written\r", current_frame); fflush(stdout); } } } } printf("main 2\n"); printf("\n"); quicktime_close(output); mpeg3_close(input); }