//---------------------------------------------------------------------------------------------------------------------- Fragment Sphere::GetFragment(Vector& _rayOrigin, Vector& _ray, const float _distance) { // point of intersection in world space Vector point = _ray * _distance + _rayOrigin; // normal = point - center Vector normal = point - GetPosition(); normal.Normalise(); return Fragment(point, normal, GetColour(), GetMaterial()); }
const CFI IRI::ContentFragmentIdentifier() const { if ( !_url->has_ref() ) return CFI(); string ref = Fragment(); if ( ref.find("epubcfi(") != 0 ) return CFI(); return CFI(ref); }
void RenderTarget::Write(const std::string& inString, EColour inColour, int inX, int inY) { int x = inX; int y = inY; for (size_t i = 0; i < inString.size(); ++i) { if (inString[i] == '\n') { x = inX; y++; } else { Write(Fragment(inString[i], inColour), x, y); x++; } } }
/*---------------------------------------------------------------------- | main +---------------------------------------------------------------------*/ int main(int argc, char** argv) { if (argc < 2) { PrintUsageAndExit(); } // init the variables const char* input_filename = NULL; const char* output_filename = NULL; const char* track_selector = NULL; AP4_UI32 selected_track_id = 0; unsigned int fragment_duration = 0; bool auto_detect_fragment_duration = true; bool create_segment_index = false; bool quiet = false; AP4_UI32 timescale = 0; AP4_Result result; Options.verbosity = 1; Options.debug = false; Options.trim = false; Options.no_tfdt = false; Options.force_i_frame_sync = AP4_FRAGMENTER_FORCE_SYNC_MODE_NONE; // parse the command line argv++; char* arg; while ((arg = *argv++)) { if (!strcmp(arg, "--verbosity")) { arg = *argv++; if (arg == NULL) { fprintf(stderr, "ERROR: missing argument after --verbosity option\n"); return 1; } Options.verbosity = strtoul(arg, NULL, 10); } else if (!strcmp(arg, "--debug")) { Options.debug = true; } else if (!strcmp(arg, "--index")) { create_segment_index = true; } else if (!strcmp(arg, "--quiet")) { quiet = true; } else if (!strcmp(arg, "--trim")) { Options.trim = true; } else if (!strcmp(arg, "--no-tfdt")) { Options.no_tfdt = true; } else if (!strcmp(arg, "--force-i-frame-sync")) { arg = *argv++; if (arg == NULL) { fprintf(stderr, "ERROR: missing argument after --fragment-duration option\n"); return 1; } if (!strcmp(arg, "all")) { Options.force_i_frame_sync = AP4_FRAGMENTER_FORCE_SYNC_MODE_ALL; } else if (!strcmp(arg, "auto")) { Options.force_i_frame_sync = AP4_FRAGMENTER_FORCE_SYNC_MODE_AUTO; } else { fprintf(stderr, "ERROR: unknown mode for --force-i-frame-sync\n"); return 1; } } else if (!strcmp(arg, "--fragment-duration")) { arg = *argv++; if (arg == NULL) { fprintf(stderr, "ERROR: missing argument after --fragment-duration option\n"); return 1; } fragment_duration = strtoul(arg, NULL, 10); auto_detect_fragment_duration = false; } else if (!strcmp(arg, "--timescale")) { arg = *argv++; if (arg == NULL) { fprintf(stderr, "ERROR: missing argument after --timescale option\n"); return 1; } timescale = strtoul(arg, NULL, 10); } else if (!strcmp(arg, "--track")) { track_selector = *argv++; if (track_selector == NULL) { fprintf(stderr, "ERROR: missing argument after --track option\n"); return 1; } } else { if (input_filename == NULL) { input_filename = arg; } else if (output_filename == NULL) { output_filename = arg; } else { fprintf(stderr, "ERROR: unexpected argument '%s'\n", arg); return 1; } } } if (Options.debug && Options.verbosity == 0) { Options.verbosity = 1; } if (input_filename == NULL) { fprintf(stderr, "ERROR: no input specified\n"); return 1; } AP4_ByteStream* input_stream = NULL; result = AP4_FileByteStream::Create(input_filename, AP4_FileByteStream::STREAM_MODE_READ, input_stream); if (AP4_FAILED(result)) { fprintf(stderr, "ERROR: cannot open input (%d)\n", result); return 1; } if (output_filename == NULL) { fprintf(stderr, "ERROR: no output specified\n"); return 1; } AP4_ByteStream* output_stream = NULL; result = AP4_FileByteStream::Create(output_filename, AP4_FileByteStream::STREAM_MODE_WRITE, output_stream); if (AP4_FAILED(result)) { fprintf(stderr, "ERROR: cannot create/open output (%d)\n", result); return 1; } // parse the input MP4 file (moov only) AP4_File input_file(*input_stream, AP4_DefaultAtomFactory::Instance, true); // check the file for basic properties if (input_file.GetMovie() == NULL) { fprintf(stderr, "ERROR: no movie found in the file\n"); return 1; } if (!quiet && input_file.GetMovie()->HasFragments()) { fprintf(stderr, "NOTICE: file is already fragmented, it will be re-fragmented\n"); } // create a cusor list to keep track of the tracks we will read from AP4_Array<TrackCursor*> cursors; // iterate over all tracks TrackCursor* video_track = NULL; TrackCursor* audio_track = NULL; TrackCursor* subtitles_track = NULL; unsigned int video_track_count = 0; unsigned int audio_track_count = 0; unsigned int subtitles_track_count = 0; for (AP4_List<AP4_Track>::Item* track_item = input_file.GetMovie()->GetTracks().FirstItem(); track_item; track_item = track_item->GetNext()) { AP4_Track* track = track_item->GetData(); // sanity check if (track->GetSampleCount() == 0 && !input_file.GetMovie()->HasFragments()) { fprintf(stderr, "WARNING: track %d has no samples, it will be skipped\n", track->GetId()); continue; } // create a sample array for this track SampleArray* sample_array; if (input_file.GetMovie()->HasFragments()) { sample_array = new CachedSampleArray(track); } else { sample_array = new SampleArray(track); } // create a cursor for the track TrackCursor* cursor = new TrackCursor(track, sample_array); cursor->m_Tfra->SetTrackId(track->GetId()); cursors.Append(cursor); if (track->GetType() == AP4_Track::TYPE_VIDEO) { if (video_track) { fprintf(stderr, "WARNING: more than one video track found\n"); } else { video_track = cursor; } video_track_count++; } else if (track->GetType() == AP4_Track::TYPE_AUDIO) { if (audio_track == NULL) { audio_track = cursor; } audio_track_count++; } else if (track->GetType() == AP4_Track::TYPE_SUBTITLES) { if (subtitles_track == NULL) { subtitles_track = cursor; } subtitles_track_count++; } } if (cursors.ItemCount() == 0) { fprintf(stderr, "ERROR: no valid track found\n"); return 1; } if (track_selector) { if (!strncmp("audio", track_selector, 5)) { if (audio_track) { selected_track_id = audio_track->m_Track->GetId(); } else { fprintf(stderr, "ERROR: no audio track found\n"); return 1; } } else if (!strncmp("video", track_selector, 5)) { if (video_track) { selected_track_id = video_track->m_Track->GetId(); } else { fprintf(stderr, "ERROR: no video track found\n"); return 1; } } else if (!strncmp("subtitles", track_selector, 9)) { if (subtitles_track) { selected_track_id = subtitles_track->m_Track->GetId(); } else { fprintf(stderr, "ERROR: no subtitles track found\n"); return 1; } } else { selected_track_id = (AP4_UI32)strtol(track_selector, NULL, 10); bool found = false; for (unsigned int i=0; i<cursors.ItemCount(); i++) { if (cursors[i]->m_Track->GetId() == selected_track_id) { found = true; break; } } if (!found) { fprintf(stderr, "ERROR: track not found\n"); return 1; } } } if (video_track_count == 0 && audio_track_count == 0 && subtitles_track_count == 0) { fprintf(stderr, "ERROR: no audio, video, or subtitles track in the file\n"); return 1; } AP4_AvcSampleDescription* avc_desc = NULL; if (video_track && (Options.force_i_frame_sync != AP4_FRAGMENTER_FORCE_SYNC_MODE_NONE)) { // that feature is only supported for AVC AP4_SampleDescription* sdesc = video_track->m_Track->GetSampleDescription(0); if (sdesc) { avc_desc = AP4_DYNAMIC_CAST(AP4_AvcSampleDescription, sdesc); } if (avc_desc == NULL) { fprintf(stderr, "--force-i-frame-sync can only be used with AVC/H.264 video\n"); return 1; } } // remember where the stream was AP4_Position position; input_stream->Tell(position); // for fragmented input files, we need to populate the sample arrays if (input_file.GetMovie()->HasFragments()) { AP4_LinearReader reader(*input_file.GetMovie(), input_stream); for (unsigned int i=0; i<cursors.ItemCount(); i++) { reader.EnableTrack(cursors[i]->m_Track->GetId()); } AP4_UI32 track_id; AP4_Sample sample; do { result = reader.GetNextSample(sample, track_id); if (AP4_SUCCEEDED(result)) { for (unsigned int i=0; i<cursors.ItemCount(); i++) { if (cursors[i]->m_Track->GetId() == track_id) { cursors[i]->m_Samples->AddSample(sample); break; } } } } while (AP4_SUCCEEDED(result)); } else if (video_track && (Options.force_i_frame_sync != AP4_FRAGMENTER_FORCE_SYNC_MODE_NONE)) { AP4_Sample sample; if (Options.force_i_frame_sync == AP4_FRAGMENTER_FORCE_SYNC_MODE_AUTO) { // detect if this looks like an open-gop source for (unsigned int i=1; i<video_track->m_Samples->GetSampleCount(); i++) { if (AP4_SUCCEEDED(video_track->m_Samples->GetSample(i, sample))) { if (sample.IsSync()) { // we found a sync i-frame, assume this is *not* an open-gop source Options.force_i_frame_sync = AP4_FRAGMENTER_FORCE_SYNC_MODE_NONE; if (Options.debug) { printf("this does not look like an open-gop source, not forcing i-frame sync flags\n"); } break; } } } } if (Options.force_i_frame_sync != AP4_FRAGMENTER_FORCE_SYNC_MODE_NONE) { for (unsigned int i=0; i<video_track->m_Samples->GetSampleCount(); i++) { if (AP4_SUCCEEDED(video_track->m_Samples->GetSample(i, sample))) { if (IsIFrame(sample, avc_desc)) { video_track->m_Samples->ForceSync(i); } } } } } // return the stream to its original position input_stream->Seek(position); // auto-detect the fragment duration if needed if (auto_detect_fragment_duration) { if (video_track) { fragment_duration = AutoDetectFragmentDuration(video_track); } else if (audio_track && input_file.GetMovie()->HasFragments()) { fragment_duration = AutoDetectAudioFragmentDuration(*input_stream, audio_track); } if (fragment_duration == 0) { if (Options.verbosity > 0) { fprintf(stderr, "unable to autodetect fragment duration, using default\n"); } fragment_duration = AP4_FRAGMENTER_DEFAULT_FRAGMENT_DURATION; } else if (fragment_duration > AP4_FRAGMENTER_MAX_AUTO_FRAGMENT_DURATION) { if (Options.verbosity > 0) { fprintf(stderr, "auto-detected fragment duration too large, using default\n"); } fragment_duration = AP4_FRAGMENTER_DEFAULT_FRAGMENT_DURATION; } } // fragment the file Fragment(input_file, *output_stream, cursors, fragment_duration, timescale, selected_track_id, create_segment_index); // cleanup and exit if (input_stream) input_stream->Release(); if (output_stream) output_stream->Release(); return 0; }
void RenderTarget::Write(char inChar, int inX, int inY) { Write(Fragment(inChar), inX, inY); }
/** Returns a list of text \a fragments from \a s containing one or * more \a words. The list is sorted occording to the * number of occurrences of words within the fragment. */ static void highlighter(const std::string &s, const std::vector<std::string> &words, std::vector<Fragment> &fragments) { const std::string spanStart="<span class=\"hl\">"; const std::string spanEnd="</span>"; const std::string dots="..."; const int fragLen = 60; int sl=s.length(); // find positions of words in s size_t j=0; std::vector<WordPosition> positions; for (std::vector<std::string>::const_iterator it=words.begin(); it!=words.end(); ++it,++j ) { int pos=0; size_t i; std::string word = *it; while ((i=s.find(word,pos))!=std::string::npos) { positions.push_back(WordPosition(i,j)); pos=i+word.length(); } } // sort on position std::sort(positions.begin(),positions.end(),WordPosition_less()); // get fragments around words std::vector<Range> ranges; for (std::vector<WordPosition>::const_iterator it=positions.begin(); it!=positions.end(); ++it) { WordPosition wp = *it; std::string w = words[wp.index]; int i=wp.start; int wl=w.length(); if (!insideRange(ranges,i,wl)) { if (wl>fragLen) { fragments.push_back(Fragment(spanStart+w+spanEnd,1)); ranges.push_back(Range(i,i+wl)); } else { std::string startFragment,endFragment; int bi=i-(fragLen-wl)/2; int ei=i+wl+(fragLen-wl)/2; int occ=0; if (bi<0) { ei-=bi; bi=0; } else startFragment=dots; if (ei>sl) { ei=sl; } else endFragment=dots; while (bi>0 && !isspace(s[bi])) bi--; // round to start of the word while (ei<sl && !isspace(s[ei])) ei++; // round to end of the word // highlight any word in s between indexes bi and ei std::string fragment=startFragment; int pos=bi; for (std::vector<WordPosition>::const_iterator it2=positions.begin(); it2!=positions.end(); ++it2) { WordPosition wp2 = *it2; std::string w2 = words[wp2.index]; int wl2 = w2.length(); if (wp2.start>=bi && wp2.start+wl2<=ei) // word is inside the range! { fragment+=s.substr(pos,wp2.start-pos)+ spanStart+ s.substr(wp2.start,wl2)+ spanEnd; pos=wp2.start+wl2; occ++; } } fragment+=s.substr(pos,ei-pos)+endFragment; fragments.push_back(Fragment(fragment,occ)); ranges.push_back(Range(bi,ei)); } } } std::sort(fragments.begin(),fragments.end(),Fragment_greater()); }
/*---------------------------------------------------------------------- | main +---------------------------------------------------------------------*/ int main(int argc, char** argv) { if (argc < 2) { PrintUsageAndExit(); } // init the variables const char* input_filename = NULL; const char* output_filename = NULL; unsigned int fragment_duration = 0; bool auto_detect_fragment_duration = true; AP4_UI32 timescale = 0; AP4_Result result; Options.verbosity = 0; // parse the command line argv++; char* arg; while ((arg = *argv++)) { if (!strcmp(arg, "--verbosity")) { arg = *argv++; if (arg == NULL) { fprintf(stderr, "ERROR: missing argument after --verbosity option\n"); return 1; } Options.verbosity = strtoul(arg, NULL, 10); } else if (!strcmp(arg, "--fragment-duration")) { arg = *argv++; if (arg == NULL) { fprintf(stderr, "ERROR: missing argument after --fragment-duration option\n"); return 1; } fragment_duration = strtoul(arg, NULL, 10); auto_detect_fragment_duration = false; } else if (!strcmp(arg, "--timescale")) { arg = *argv++; if (arg == NULL) { fprintf(stderr, "ERROR: missing argument after --timescale option\n"); return 1; } timescale = strtoul(arg, NULL, 10); } else { if (input_filename == NULL) { input_filename = arg; } else if (output_filename == NULL) { output_filename = arg; } else { fprintf(stderr, "ERROR: unexpected argument '%s'\n", arg); return 1; } } } if (input_filename == NULL) { fprintf(stderr, "ERROR: no input specified\n"); return 1; } AP4_ByteStream* input_stream = NULL; result = AP4_FileByteStream::Create(input_filename, AP4_FileByteStream::STREAM_MODE_READ, input_stream); if (AP4_FAILED(result)) { fprintf(stderr, "ERROR: cannot open input (%d)\n", result); return 1; } if (output_filename == NULL) { fprintf(stderr, "ERROR: no output specified\n"); return 1; } AP4_ByteStream* output_stream = NULL; result = AP4_FileByteStream::Create(output_filename, AP4_FileByteStream::STREAM_MODE_WRITE, output_stream); if (AP4_FAILED(result)) { fprintf(stderr, "ERROR: cannot create/open output (%d)\n", result); return 1; } // parse the input MP4 file (moov only) AP4_File input_file(*input_stream, AP4_DefaultAtomFactory::Instance, true); // check the file for basic properties if (input_file.GetMovie() == NULL) { fprintf(stderr, "ERROR: no movie found in the file\n"); return 1; } if (input_file.GetMovie()->HasFragments()) { fprintf(stderr, "ERROR: file is already fragmented\n"); return 1; } AP4_Track* audio_track = input_file.GetMovie()->GetTrack(AP4_Track::TYPE_AUDIO); AP4_Track* video_track = input_file.GetMovie()->GetTrack(AP4_Track::TYPE_VIDEO); if (audio_track == NULL && video_track == NULL) { fprintf(stderr, "ERROR: no audio or video track in the file\n"); return 1; } if (audio_track && audio_track->GetSampleCount() == 0) { fprintf(stderr, "ERROR: audio track has no samples\n"); return 1; } if (video_track && video_track->GetSampleCount() == 0) { fprintf(stderr, "ERROR: video track has no samples\n"); return 1; } // auto-detect the fragment duration if needed if (auto_detect_fragment_duration) { if (video_track) { fragment_duration = AutoDetectFragmentDuration(video_track); } else { if (Options.verbosity > 0) { fprintf(stderr, "no video track, cannot autodetect fragment duration\n"); } } if (fragment_duration == 0) { if (Options.verbosity > 0) { fprintf(stderr, "unable to detect fragment duration, using default\n"); } fragment_duration = AP4_FRAGMENTER_DEFAULT_FRAGMENT_DURATION; } else if (fragment_duration > AP4_FRAGMENTER_MAX_AUTO_FRAGMENT_DURATION) { if (Options.verbosity > 0) { fprintf(stderr, "auto-detected fragment duration too large, using default\n"); } fragment_duration = AP4_FRAGMENTER_DEFAULT_FRAGMENT_DURATION; } } // fragment the file Fragment(input_file, *output_stream, fragment_duration, timescale); // cleanup and exit if (input_stream) input_stream->Release(); if (output_stream) output_stream->Release(); return 0; }