static void reduce_fraction(struct fraction *f) { unsigned int gcd = get_gcd(f->num, f->denom); f->num /= gcd; f->denom /= gcd; }
int main() { int itr; int nCount; /* 문제의 테스트 케이스 */ scanf("%d", &nCount); /* 테스트 케이스 입력 */ for(itr=0; itr<nCount; itr++) { printf("#testcase%d\n",itr+1); int a,b; scanf("%d %d", &a, &b); int gcd = get_gcd(a,b); printf("%d",gcd); /* 알고리즘이 들어가는 부분 */ } return 0; /* 반드시 return 0으로 해주셔야합니다. */ }
int main() { int x, y; scanf("%d%d", &x, &y); printf("%d\n", get_gcd(x, y)); return 0; }
int get_gcd(int a, int b){ int t = a; a = b; b = t % b; if(b == 0){ return a; } return get_gcd(a,b); }
int main(int argc, char **argv) { int result = 0; //double elapsed_avg_nsec = 0; uint64_t nsec1 = 0; uint64_t nsec2 = 0; uint64_t elapsed_nsec = 0; uint64_t total_time_for_one = 0; //double final_time_sum = 0; //double final_avg_time = 0; struct timespec t_start, t_end; FILE *file = fopen("./result.txt", "a"); if (file) { int big, small; //int i = 0; for (big = 9999; big > 9900; big--) { total_time_for_one = 0; for (small = 2; big>small; small++) { //final_avg_time = 0; nsec1 = 0; nsec2 = 0; elapsed_nsec = 0; clock_gettime(CLOCK_MONOTONIC, &t_start); nsec1 = (uint64_t)(t_start.tv_sec) * 1000000000LL + t_start.tv_nsec; result = get_gcd(big, small); clock_gettime(CLOCK_MONOTONIC, &t_end); nsec2 = (uint64_t)(t_end.tv_sec) * 1000000000LL + t_end.tv_nsec; //printf("%" PRIu64 "\n", nsec1); //printf("%" PRIu64 "\n", nsec2); elapsed_nsec = nsec2 - nsec1; //elapsed_avg_nsec = (double)elapsed_nsec; //final_avg_time = (final_time_sum / repeat_time );//get average time of repeat_time*10 times total_time_for_one += elapsed_nsec; printf("big=%d small=%d gcd=%d %" PRIu64 "\n", big,small,result,elapsed_nsec); } fprintf(file, "%d %" PRIu64 "\n", big, total_time_for_one); } fclose(file); } else { printf("Open file error!\n"); } return 0; }
/* * Depending on memory configuration, objects addresses are spreaded * between channels and ranks in RAM: the pool allocator will add * padding between objects. This function return the new size of the * object. */ static unsigned optimize_object_size(unsigned obj_size) { unsigned nrank, nchan; unsigned new_obj_size; /* get number of channels */ nchan = rte_memory_get_nchannel(); if (nchan == 0) nchan = 1; nrank = rte_memory_get_nrank(); if (nrank == 0) nrank = 1; /* process new object size */ new_obj_size = (obj_size + CACHE_LINE_MASK) / CACHE_LINE_SIZE; while (get_gcd(new_obj_size, nrank * nchan) != 1 || get_gcd(nchan, new_obj_size) != 1) new_obj_size++; return new_obj_size * CACHE_LINE_SIZE; }
int read(int fhandle, void *buffer, int length) { int read_bytes; if(fhandle == FILEHANDLE_STDIN && length > 0) { gcd_t *gcd = get_gcd(); read_bytes = gcd->read(gcd, buffer, length); return read_bytes; } // TODO - other filehandles return 0; }
int write(int fhandle, const void *buffer, int length) { int written_bytes; // Write to stdout if(fhandle == FILEHANDLE_STDOUT && length > 0) { gcd_t *gcd = get_gcd(); written_bytes = gcd->write(gcd, buffer, length); return written_bytes; } // TODO - other filehandles return 0; }
static void build_full_cmd(char *cmd, size_t nSize, const CONF_GUIEX *conf, const OUTPUT_INFO *oip, const PRM_ENC *pe, const SYSTEM_DATA *sys_dat, const char *input) { CONF_GUIEX prm; //パラメータをコピー memcpy(&prm, conf, sizeof(CONF_GUIEX)); //共通置換を実行 cmd_replace(prm.vid.cmdex, sizeof(prm.vid.cmdex), pe, sys_dat, conf, oip); cmd_replace(prm.vid.stats, sizeof(prm.vid.stats), pe, sys_dat, conf, oip); cmd_replace(prm.vid.tcfile_in, sizeof(prm.vid.tcfile_in), pe, sys_dat, conf, oip); cmd_replace(prm.vid.cqmfile, sizeof(prm.vid.cqmfile), pe, sys_dat, conf, oip); if (!prm.oth.disable_guicmd) { //cliモードでない //自動設定の適用 apply_guiEx_auto_settings(&prm.x264, oip->w, oip->h, oip->rate, oip->scale, sys_dat->exstg->s_local.auto_ref_limit_by_level); //GUI部のコマンドライン生成 build_cmd_from_conf(cmd, nSize, &prm.x264, &prm.vid, FALSE); } //cmdexのうち、読み取られなかったコマンドを追加する if (str_has_char(prm.vid.cmdex)) append_cmdex(cmd, nSize, prm.vid.cmdex, prm.oth.disable_guicmd, conf); //メッセージの発行 if ((conf->x264.vbv_bufsize != 0 || conf->x264.vbv_maxrate != 0) && prm.vid.afs) write_log_auo_line(LOG_INFO, "自動フィールドシフト使用時はvbv設定は正確に反映されません。"); //キーフレーム検出を行い、そのQPファイルが存在し、かつ--qpfileの指定がなければ、それをqpfileで読み込む char auoqpfile[MAX_PATH_LEN]; apply_appendix(auoqpfile, _countof(auoqpfile), pe->temp_filename, pe->append.qp); BOOL disable_keyframe_afs = conf->vid.afs && !sys_dat->exstg->s_local.set_keyframe_as_afs_24fps; if (prm.vid.check_keyframe && !disable_keyframe_afs && PathFileExists(auoqpfile) && strstr(cmd, "--qpfile") == NULL) sprintf_s(cmd + strlen(cmd), nSize - strlen(cmd), " --qpfile \"%s\"", auoqpfile); //1pass目でafsでない、--framesがなければ--framesを指定 if ((!prm.vid.afs || pe->current_x264_pass > 1) && strstr(cmd, "--frames") == NULL) sprintf_s(cmd + strlen(cmd), nSize - strlen(cmd), " --frames %d", oip->n - pe->drop_count + pe->delay_cut_additional_vframe); //解像度情報追加(--input-res) if (strcmp(input, PIPE_FN) == NULL) sprintf_s(cmd + strlen(cmd), nSize - strlen(cmd), " --input-res %dx%d", oip->w, oip->h); //rawの形式情報追加 sprintf_s(cmd + strlen(cmd), nSize - strlen(cmd), " --input-csp %s", specify_input_csp(prm.x264.output_csp)); //fps//tcfile-inが指定されていた場合、fpsの自動付加を停止] if (!prm.x264.use_tcfilein && strstr(cmd, "--tcfile-in") == NULL) { int gcd = get_gcd(oip->rate, oip->scale); sprintf_s(cmd + strlen(cmd), nSize - strlen(cmd), " --fps %d/%d", oip->rate / gcd, oip->scale / gcd); } //出力ファイル const char * const outfile = (prm.x264.nul_out) ? "nul" : pe->temp_filename; sprintf_s(cmd + strlen(cmd), nSize - strlen(cmd), " -o \"%s\"", outfile); //入力 sprintf_s(cmd + strlen(cmd), nSize - strlen(cmd), " \"%s\"", input); }
void main(void) { int u, v; int gcd; u = 220; v = 1200000; gcd = get_gcd(u, v); printf("%d %d GCD result = %d\n", u, v, gcd); gcd = gcd_modulus(u, v); printf("%d %d GCD result = %d\n", u, v, gcd); gcd = gcd_recursion(u, v); printf("%d %d GCD result = %d\n", u, v, gcd); }
int main() { int itr; int nCount; int w; long long h; long long tsize; scanf("%d", &nCount); for(itr=0; itr<nCount; itr++){ printf("#testCase%d\n", itr+1); scanf("%lld %lld", &w, &h); tsize = get_gcd(w, h); printf("%lld\n", tsize); printf("%lld\n", w/tsize * h/tsize); } }
int main() { int u, v; puts("\n EUCLID1 :\tGet GCD of two positive integer" "\n \t\tInput 0 to end program\n"); while(1) { puts("\n\n Input two positive integer -> "); scanf("%d %d", &u, &v); if(u < 0 || v < 0) /* 음수의 입력은 무효로 한다 */ continue; if(u == 0 || v == 0) /* 0이 입력되면 끝낸다 */ break; printf("\n\t GCD of %d and %d is %d.\n", u, v, get_gcd(u, v)); } return 0; }
int libavsmash_video_setup_timestamp_info ( libavsmash_video_decode_handler_t *vdhp, libavsmash_video_output_handler_t *vohp, int64_t *framerate_num, int64_t *framerate_den ) { int err = -1; uint64_t media_timescale = lsmash_get_media_timescale( vdhp->root, vdhp->track_id ); uint64_t media_duration = lsmash_get_media_duration_from_media_timeline( vdhp->root, vdhp->track_id ); if( media_duration == 0 ) media_duration = INT32_MAX; if( vdhp->sample_count == 1 ) { /* Calculate average framerate. */ reduce_fraction( &media_timescale, &media_duration ); *framerate_num = (int64_t)media_timescale; *framerate_den = (int64_t)media_duration; err = 0; goto setup_finish; } lw_log_handler_t *lhp = &vdhp->config.lh; lsmash_media_ts_list_t ts_list; if( lsmash_get_media_timestamps( vdhp->root, vdhp->track_id, &ts_list ) < 0 ) { lw_log_show( lhp, LW_LOG_ERROR, "Failed to get timestamps." ); goto setup_finish; } if( ts_list.sample_count != vdhp->sample_count ) { lw_log_show( lhp, LW_LOG_ERROR, "Failed to count number of video samples." ); goto setup_finish; } uint32_t composition_sample_delay; if( lsmash_get_max_sample_delay( &ts_list, &composition_sample_delay ) < 0 ) { lsmash_delete_media_timestamps( &ts_list ); lw_log_show( lhp, LW_LOG_ERROR, "Failed to get composition delay." ); goto setup_finish; } if( composition_sample_delay ) { /* Consider composition order for keyframe detection. * Note: sample number for L-SMASH is 1-origin. */ vdhp->order_converter = (order_converter_t *)lw_malloc_zero( (ts_list.sample_count + 1) * sizeof(order_converter_t) ); if( !vdhp->order_converter ) { lsmash_delete_media_timestamps( &ts_list ); lw_log_show( lhp, LW_LOG_ERROR, "Failed to allocate memory." ); goto setup_finish; } for( uint32_t i = 0; i < ts_list.sample_count; i++ ) ts_list.timestamp[i].dts = i + 1; lsmash_sort_timestamps_composition_order( &ts_list ); for( uint32_t i = 0; i < ts_list.sample_count; i++ ) vdhp->order_converter[i + 1].composition_to_decoding = (uint32_t)ts_list.timestamp[i].dts; } /* Calculate average framerate. */ uint64_t largest_cts = ts_list.timestamp[0].cts; uint64_t second_largest_cts = 0; uint64_t first_duration = ts_list.timestamp[1].cts - ts_list.timestamp[0].cts; uint64_t composition_timebase = first_duration; int strict_cfr = 1; for( uint32_t i = 1; i < ts_list.sample_count; i++ ) { uint64_t duration = ts_list.timestamp[i].cts - ts_list.timestamp[i - 1].cts; if( duration == 0 ) { lsmash_delete_media_timestamps( &ts_list ); lw_log_show( lhp, LW_LOG_WARNING, "Detected CTS duplication at frame %" PRIu32, i ); err = 0; goto setup_finish; } if( strict_cfr && duration != first_duration ) strict_cfr = 0; composition_timebase = get_gcd( composition_timebase, duration ); second_largest_cts = largest_cts; largest_cts = ts_list.timestamp[i].cts; } uint64_t reduce = reduce_fraction( &media_timescale, &composition_timebase ); uint64_t composition_duration = ((largest_cts - ts_list.timestamp[0].cts) + (largest_cts - second_largest_cts)) / reduce; lsmash_delete_media_timestamps( &ts_list ); double avg_frame_rate = (vdhp->sample_count * ((double)media_timescale / composition_duration)); if( strict_cfr || !lw_try_rational_framerate( avg_frame_rate, framerate_num, framerate_den, composition_timebase ) ) { uint64_t num = (uint64_t)(avg_frame_rate * composition_timebase + 0.5); uint64_t den = composition_timebase; if( num && den ) reduce_fraction( &num, &den ); else { num = 1; den = 1; } *framerate_num = (int64_t)num; *framerate_den = (int64_t)den; } err = 0; setup_finish:; if( vohp->vfr2cfr ) { /* Override average framerate by specified output constant framerate. */ *framerate_num = (int64_t)vohp->cfr_num; *framerate_den = (int64_t)vohp->cfr_den; vohp->frame_count = ((double)vohp->cfr_num / vohp->cfr_den) * ((double)media_duration / media_timescale) + 0.5; } else vohp->frame_count = libavsmash_video_get_sample_count( vdhp ); uint32_t min_cts_sample_number = get_decoding_sample_number( vdhp->order_converter, 1 ); vdhp->config.error = lsmash_get_cts_from_media_timeline( vdhp->root, vdhp->track_id, min_cts_sample_number, &vdhp->min_cts ); return err; }
long long get_gcd(long long n1, long long n2) { if(n2 == 0) { return n1; } return get_gcd(n2, n1 % n2); }
static void setup_timestamp_info( libavsmash_video_decode_handler_t *hp, VideoInfo *vi, uint64_t media_timescale, IScriptEnvironment *env ) { if( vi->num_frames == 1 ) { /* Calculate average framerate. */ uint64_t media_duration = lsmash_get_media_duration_from_media_timeline( hp->root, hp->track_ID ); if( media_duration == 0 ) media_duration = INT32_MAX; reduce_fraction( &media_timescale, &media_duration ); vi->fps_numerator = (unsigned int)media_timescale; vi->fps_denominator = (unsigned int)media_duration; return; } lsmash_media_ts_list_t ts_list; if( lsmash_get_media_timestamps( hp->root, hp->track_ID, &ts_list ) ) env->ThrowError( "LSMASHVideoSource: failed to get timestamps." ); if( ts_list.sample_count != vi->num_frames ) env->ThrowError( "LSMASHVideoSource: failed to count number of video samples." ); uint32_t composition_sample_delay; if( lsmash_get_max_sample_delay( &ts_list, &composition_sample_delay ) ) { lsmash_delete_media_timestamps( &ts_list ); env->ThrowError( "LSMASHVideoSource: failed to get composition delay." ); } if( composition_sample_delay ) { /* Consider composition order for keyframe detection. * Note: sample number for L-SMASH is 1-origin. */ hp->order_converter = (order_converter_t *)malloc( (ts_list.sample_count + 1) * sizeof(order_converter_t) ); if( !hp->order_converter ) { lsmash_delete_media_timestamps( &ts_list ); env->ThrowError( "LSMASHVideoSource: failed to allocate memory." ); } for( uint32_t i = 0; i < ts_list.sample_count; i++ ) ts_list.timestamp[i].dts = i + 1; lsmash_sort_timestamps_composition_order( &ts_list ); for( uint32_t i = 0; i < ts_list.sample_count; i++ ) hp->order_converter[i + 1].composition_to_decoding = (uint32_t)ts_list.timestamp[i].dts; } /* Calculate average framerate. */ uint64_t largest_cts = ts_list.timestamp[1].cts; uint64_t second_largest_cts = ts_list.timestamp[0].cts; uint64_t composition_timebase = ts_list.timestamp[1].cts - ts_list.timestamp[0].cts; for( uint32_t i = 2; i < ts_list.sample_count; i++ ) { if( ts_list.timestamp[i].cts == ts_list.timestamp[i - 1].cts ) { lsmash_delete_media_timestamps( &ts_list ); return; } composition_timebase = get_gcd( composition_timebase, ts_list.timestamp[i].cts - ts_list.timestamp[i - 1].cts ); second_largest_cts = largest_cts; largest_cts = ts_list.timestamp[i].cts; } uint64_t reduce = reduce_fraction( &media_timescale, &composition_timebase ); uint64_t composition_duration = ((largest_cts - ts_list.timestamp[0].cts) + (largest_cts - second_largest_cts)) / reduce; lsmash_delete_media_timestamps( &ts_list ); vi->fps_numerator = (unsigned int)((vi->num_frames * ((double)media_timescale / composition_duration)) * composition_timebase + 0.5); vi->fps_denominator = (unsigned int)composition_timebase; }
void lwlibav_setup_timestamp_info ( lwlibav_file_handler_t *lwhp, lwlibav_video_decode_handler_t *vdhp, lwlibav_video_output_handler_t *vohp, int64_t *framerate_num, int64_t *framerate_den ) { AVStream *video_stream = vdhp->format->streams[ vdhp->stream_index ]; if( vdhp->frame_count == 1 || lwhp->raw_demuxer || ((lwhp->format_flags & AVFMT_TS_DISCONT) && !(vdhp->lw_seek_flags & SEEK_DTS_BASED)) || !(vdhp->lw_seek_flags & (SEEK_DTS_BASED | SEEK_PTS_BASED | SEEK_PTS_GENERATED)) ) { *framerate_num = (int64_t)video_stream->avg_frame_rate.num; *framerate_den = (int64_t)video_stream->avg_frame_rate.den; return; } video_frame_info_t *info = vdhp->frame_list; int64_t first_ts; int64_t largest_ts; int64_t second_largest_ts; uint64_t first_duration; uint64_t stream_timebase; int strict_cfr; if( !(lwhp->format_flags & AVFMT_TS_DISCONT) && (vdhp->lw_seek_flags & (SEEK_PTS_BASED | SEEK_PTS_GENERATED)) ) { first_ts = info[1].pts; largest_ts = first_ts; second_largest_ts = first_ts; first_duration = info[2].pts - info[1].pts; stream_timebase = first_duration; strict_cfr = (first_duration != 0); for( uint32_t i = 2; i <= vdhp->frame_count; i++ ) { uint64_t duration = info[i].pts - info[i - 1].pts; if( duration == 0 ) { if( vdhp->lh.show_log ) vdhp->lh.show_log( &vdhp->lh, LW_LOG_WARNING, "Detected PTS %"PRId64" duplication at frame %"PRIu32, info[i].pts, i ); goto fail; } if( strict_cfr && duration != first_duration ) strict_cfr = 0; stream_timebase = get_gcd( stream_timebase, duration ); second_largest_ts = largest_ts; largest_ts = info[i].pts; } } else { uint32_t prev; uint32_t curr; if( vdhp->order_converter ) { prev = vdhp->order_converter[1].decoding_to_presentation; curr = vdhp->order_converter[2].decoding_to_presentation; } else { prev = 1; curr = 2; } first_ts = info[prev].dts; largest_ts = first_ts; second_largest_ts = first_ts; first_duration = info[curr].dts - info[prev].dts; stream_timebase = first_duration; strict_cfr = (first_duration != 0); for( uint32_t i = 2; i <= vdhp->frame_count; i++ ) { if( vdhp->order_converter ) { prev = vdhp->order_converter[i - 1].decoding_to_presentation; curr = vdhp->order_converter[i ].decoding_to_presentation; } else { prev = i - 1; curr = i; } uint64_t duration = info[curr].dts - info[prev].dts; if( duration == 0 ) { if( vdhp->lh.show_log ) vdhp->lh.show_log( &vdhp->lh, LW_LOG_WARNING, "Detected DTS %"PRId64" duplication at frame %"PRIu32, info[curr].dts, curr ); goto fail; } if( strict_cfr && duration != first_duration ) strict_cfr = 0; stream_timebase = get_gcd( stream_timebase, duration ); second_largest_ts = largest_ts; largest_ts = info[curr].dts; } } stream_timebase *= video_stream->time_base.num; uint64_t stream_timescale = video_stream->time_base.den; uint64_t reduce = reduce_fraction( &stream_timescale, &stream_timebase ); uint64_t stream_duration = (((largest_ts - first_ts) + (largest_ts - second_largest_ts)) * video_stream->time_base.num) / reduce; double stream_framerate = (vohp->frame_count - (vohp->repeat_correction_ts ? 1 : 0)) * ((double)stream_timescale / stream_duration); if( strict_cfr || !lw_try_rational_framerate( stream_framerate, framerate_num, framerate_den, stream_timebase ) ) { if( stream_timebase > INT_MAX || (uint64_t)(stream_framerate * stream_timebase + 0.5) > INT_MAX ) goto fail; uint64_t num = (uint64_t)(stream_framerate * stream_timebase + 0.5); uint64_t den = stream_timebase; if( num && den ) reduce_fraction( &num, &den ); else if( video_stream->avg_frame_rate.num == 0 || video_stream->avg_frame_rate.den == 0 ) { num = 1; den = 1; } else goto fail; *framerate_num = (int64_t)num; *framerate_den = (int64_t)den; } return; fail: *framerate_num = (int64_t)video_stream->avg_frame_rate.num; *framerate_den = (int64_t)video_stream->avg_frame_rate.den; return; }