void callEditFn(vector<Media*> *medias, unsigned idx, bool del) { if (del) { try { Media* item = medias->at(idx); medias->erase(medias->begin() + idx); cout << endl; cout << "Item Deleted" << endl << item->toString() << endl; delete item; } catch (out_of_range e) { cout << endl << "Item not Found! Nothing Deleted" << endl; } } else { try { if (Book* b = dynamic_cast<Book*>(medias->at(idx))) editBook(b); if(Music* a = dynamic_cast<Music*>(medias->at(idx))) editAudioTrack(a); if(Movie* m = dynamic_cast<Movie*>(medias->at(idx))) editMovie(m); } catch (out_of_range e) { cout << endl << "Item not Found! Nothing Edited" << endl; } } }
System::EngineResponse System::validateEntry (WebUpload::Entry * entry) { System::EngineResponse res = System::ENGINE_RESPONSE_OK; qDebug() << __FUNCTION__ << ": called"; // Ok here's the problem: the target dir can be different for each medium // in the entry, but will it ever in real life be other than .share? // We need to have enough space for preprocessing all media before trying // to upload anything. Further checks are done later anyway, so let's just // assume everything goes under ~/MyDocs/.share. Bying checking available // space now we make sure the engine doesn't hang up and doesn't leave // garbage xml files. QDir targetDir = QDir::homePath() + "/MyDocs/.share/"; bool enoughDiskSpace = checkDiskSpace(targetDir, entry); if (!enoughDiskSpace) { return System::ENGINE_RESPONSE_NO_DISKSPACE; } QVectorIterator<Media *> mediaIter = entry->media(); while (mediaIter.hasNext ()) { Media * media = mediaIter.next (); QString filePath = media->srcFilePath (); qDebug() << "SYSTEM::" << __FUNCTION__ << filePath; if ((!filePath.isEmpty ()) && (!QFile::exists (filePath))) { return System::ENGINE_RESPONSE_FILES_MISSING; } } return res; }
void MediaLibrary::dropEvent( QDropEvent *event ) { const QList<QUrl> &fileList = event->mimeData()->urls(); if ( fileList.isEmpty() ) { event->ignore(); return; } Q_ASSERT( Library::getInstance() != NULL ); foreach ( const QUrl &url, fileList ) { const QString &fileName = url.toLocalFile(); if ( fileName.isEmpty() ) continue; Media *media = Library::getInstance()->addMedia( fileName ); if ( media != NULL ) { Clip* clip = new Clip( media ); media->setBaseClip( clip ); Library::getInstance()->addClip( clip ); event->accept(); } else qCritical() << "Clip already present in library or an error occurred while loading media:" << fileName; } event->accept(); }
std::shared_ptr<ShowEpisode> Show::addEpisode( Media& media, unsigned int episodeNumber) { auto episode = ShowEpisode::create( m_ml, media.id(), episodeNumber, m_id ); media.setShowEpisode( episode ); media.save(); return episode; }
void ending(int& sceen){ Texture message = Texture("res/mese-ji.png"); Texture jijii = Texture("res/jijii.png"); Texture con = Texture("res/kongura.png"); Font font = ("res/meiryo.ttc"); std::string text1 = "いい人生じゃった最後までやってくれて"; std::string text2 = "本当にありがとうございました"; Media clear = Media ("res/se/clear.wav"); clear.play(); while (sceen == 3) { if (!env.isOpen()) exit(0); env.begin(); env.bgColor(Color::black); if (env.isPushKey(GLFW_KEY_ENTER)){ sceen = 0; } drawTextureBox(-1024, -312, 2048, 1024, 0, 0, 2048, 1024, jijii); drawTextureBox(-1024, -512, 2048, 1024, 0, 0, 2048, 1024, message); drawTextureBox(-1024, 0, 2048-400, 256-50, 0, 0, 2048, 256, con); font.size(100); font.draw(text1, Vec2f(-900, -300), Color::white); font.draw(text2, Vec2f(-900, -400), Color::white); font.size(30); env.end(); } }
LatestCommentsDialog::LatestCommentsDialog(Session* session, MediaCollection *mediaCollection, WObject* parent): WDialog{parent} { setResizable(true); setWindowTitle(wtr("menu.latest.comments")); setClosable(true); setTransient(true); setMaximumSize(700, WLength::Auto); Dbo::Transaction t(*session); Dbo::collection<CommentPtr> latestComments = session->find<Comment>().orderBy("last_updated desc").limit(5); if(!latestComments.size()) contents()->addWidget(new WText{wtr("comments.empty")}); for(CommentPtr comment: latestComments) { WContainerWidget* commentWidget = new WContainerWidget; Media media = mediaCollection->media(comment->mediaId()); WContainerWidget *header = WW<WContainerWidget>(); header->setContentAlignment(AlignCenter); WAnchor *videoLink = WW<WAnchor>("", media.title(t)).css("link-hand label label-info comment-box-element"); header->addWidget(videoLink); Dbo::ptr<AuthInfo> authInfo = session->find<AuthInfo>().where("user_id = ?").bind(comment->user().id()); header->addWidget(WW<WText>(WString("{1} ({2})").arg(authInfo->identity("loginname")).arg(comment->lastUpdated().toString())) .css("label label-success comment-box-element")); commentWidget->addWidget(header); videoLink->clicked().connect([=](WMouseEvent){ _mediaClicked.emit(media); accept(); }); commentWidget->addWidget(WW<WText>(WString::fromUTF8(comment->content())).css("well comment-text comment-box-element").setInline(false)); contents()->addWidget(WW<WContainerWidget>().css("comment-text").add(commentWidget)); } }
void LibraryManagerPrivate::fileScan(QString file) { if ( mustClean ) { DataBase::instance()->clean(); } Media *m = Media::specializedObjectForFile(file); if ( totalFiles != 0 ) { currentFile++; emit processingFile(file, currentFile, totalFiles); } if ( m ) { if ( m->isValid() ) { QStringList paths = LibraryManager::searchPaths(); QString shortestPath; for ( QString dpath : paths ) { dpath = QDir(dpath).canonicalPath(); if ( file.startsWith(dpath) && ( ( dpath.size() < shortestPath.size() ) || shortestPath.isEmpty() ) ) { shortestPath = dpath; } } DataBase::instance()->save(m, shortestPath); } else { m->deleteLater(); } } }
void Display::render(Media & media, Player & player, std::vector<Enemy>& enemies, std::vector<Laser>& playerLasers, std::vector<Laser>& enemyLasers) { SDL_RenderClear(m_renderer); SDL_SetRenderDrawColor(m_renderer, 255, 255, 255, 0); //Render player SDL_RenderCopy(m_renderer, media.getPlayerTexture(), nullptr, &player.getPosition()); //Render enemies for (auto &i : enemies) { SDL_RenderCopy(m_renderer, media.getEnemyTexture(), nullptr, &i.m_pos); } //Render Player Lasers for (auto &i : playerLasers) { SDL_RenderCopy(m_renderer, media.getLaserTexture(), nullptr, &i.m_pos); } for (auto &i : enemyLasers) { SDL_RenderCopy(m_renderer, media.getLaserTexture(), nullptr, &i.m_pos); } SDL_RenderPresent(m_renderer); }
int main(int, char**) { Media app; if (app.InitCheck() == B_OK) app.Run(); return 0; }
void User::rate(Wt::Dbo::ptr< User > userPtr, const Media& media, int rating, Wt::Dbo::Transaction& transaction) { MediaRatingPtr previousRating = transaction.session().find<MediaRating>() .where("user_id = ?").bind(userPtr.id()) .where("media_id = ?").bind(media.uid()); if(!previousRating) { transaction.session().add(new MediaRating{userPtr, media.uid(), rating}); return; } previousRating.modify()->setRating(rating); }
bool Settings::autoplay(const Media& media) { string autoplay = value(Settings::mediaAutoplay); if(autoplay == "autoplay_always") return true; if(autoplay == "autoplay_audio_only") return media.mimetype().find("audio") != string::npos; if(autoplay == "autoplay_video_only") return media.mimetype().find("video") != string::npos; return false; }
TextureDialog::TextureDialog(EditorState *pstate, Node *pnode, CubeSide pface): Dialog(pstate), node(pnode), face(pface), lb(NULL), the_image(NULL), context(NULL) { IVideoDriver *driver = state->device->getVideoDriver(); IGUIEnvironment *guienv = state->device->getGUIEnvironment(); // Window and basic items win = guienv->addWindow(rect<s32>(340, 50, 340 + 74 * 3 + 10, 50 + 74 * 3 + 10), true, narrow_to_wide(std::string(getCubeSideName(face)) + " texture").c_str()); guienv->addButton(rect<s32>(155, 30, 74*3, 55), win, ETD_GUI_ID_APPLY, L"Apply", L"Apply this texture selection to the node face"); guienv->addButton(rect<s32>(155, 60, 74*3, 85), win, ETD_GUI_ID_IMPORT, L"Import", L"Import images from files"); guienv->addButton(rect<s32>(84, 60, 150, 85), win, ETD_GUI_ID_ACTIONS, L"Actions"); // Fill out listbox lb = guienv->addListBox(rect<s32>(10, 104, 74 * 3, 74 * 3), win, 502); Media *media = &state->project->media; std::map<std::string, Media::Image*>& images = media->getList(); int count = 1; lb->addItem(L""); lb->setSelected(0); for (std::map<std::string, Media::Image*>::const_iterator it = images.begin(); it != images.end(); ++it) { if (!it->second) { continue; } if (it->second->name == "default") { lb->addItem(L""); } else { lb->addItem(narrow_to_wide(it->second->name + " [used " + num_to_str(it->second->getHolders()) + " times]").c_str()); } if (it->second == node->getTexture(face)) lb->setSelected(count); count++; } Media::Image *image = node->getTexture(face); if (image) { the_image = driver->addTexture("tmpicon.png", image->get()); } // Context menu context = guienv->addContextMenu(rect<s32>(84, 85, 150, 180), win, ETD_GUI_ID_ACTIONS_CM); context->addItem(L"Export", ETD_GUI_ID_EXPORT); context->setCloseHandling(ECMC_HIDE); context->setVisible(false); context->setEventParent(win); }
bool Artist::addMedia( Media& media ) { static const std::string req = "INSERT INTO MediaArtistRelation VALUES(?, ?)"; // If track's ID is 0, the request will fail due to table constraints sqlite::ForeignKey artistForeignKey( m_id ); return sqlite::Tools::executeInsert( m_ml->getConn(), req, media.id(), artistForeignKey ) != 0; }
/** * \brief Wrapper for the Media Flush function. * This wrapper protects from simultanoeous access to the same media * \param pRtMedia Pointer to a RtMedia instance * \return Operation result code */ unsigned char RTMEDIA_Flush( RtMedia *pRtMedia) { unsigned char result; Media *pMedia = &(pRtMedia->media); /* Take the mutex */ xSemaphoreTake( pRtMedia->mutex, ( portTickType ) 0 ); /* Invoke the Media Write function */ result = pMedia->flush(pMedia); /* Release the mutex */ xSemaphoreGive( pRtMedia->mutex ); return result; }
int main(int argc, char **argv) { SDL_Event *event = av_mallocz(sizeof(SDL_Event)); Content *content = av_mallocz(sizeof(Content)); Media *video = av_mallocz(sizeof(Media)); Media *audio = av_mallocz(sizeof(Media)); State *state = av_mallocz(sizeof(State)); SDL_Thread *video_decode_tid1; SDL_Thread *video_decode_tid2; SDL_Thread *read_pkt_tid; SDL_Thread *refresh_tid; // InitPool(2); state->content = content; state->video = video; state->audio = audio; init_video(video); if(argc < 2) { LOGE("Usage : play <content>"); exit(1); } av_register_all(); if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) { LOGE("fail to initialize SDL"); exit(1); } av_strlcpy(content->name, argv[1], sizeof(content->name)); LOGE("addr : %x", video->frame_buf); get_content_info(content); LOGI("finding streams"); find_av_streams(content, video, audio); LOGI("finding decoder"); find_decoder(video); video->get_info(video); LOGI("creating reading thread..."); read_pkt_tid = SDL_CreateThread(queue_av_pkt, "read", state); LOGI("initing screen"); init_screen(video); // LOGI("alloc frame"); alloc_frame(video); LOGI("alloc all the frame"); // alloc_all_frame(video); LOGI("creating decode thread1..."); video_decode_tid1 = SDL_CreateThread(decode_video, "decode1", video); refresh(state, 50); event_handle(event); return 0; }
void Player::PlayFile(const Media& entry, bool play_file) { media_.clearQueue(); if (entry.valid()) { if (play_file || file_active_) { LOG("Player") << "Play: " << entry; media_.setCurrentSource(entry.path()); media_.play(); file_active_ = true; } } else { Stop(); } }
bool Mp3Demuxer::RequestMoreData (MediaCallback *callback, guint32 count) { guint64 start = 0; guint32 left = 0; guint64 previous_read_position = next_read_position; g_return_val_if_fail (!waiting_for_read, false); if (read_closure != NULL) { if (read_closure->GetCount () != read_closure->GetData ()->GetSize ()) { /* The last read didn't read everything we requested, so there is nothing more to read */ LOG_MP3 ("Mp3Demuxer::RequestMoreData (): the last read didn't read everything we requested, so we reached eof.\n"); return false; } gint64 position = read_closure->GetData ()->GetPosition (); left = read_closure->GetData ()->GetRemainingSize (); start = read_closure->GetOffset () + position; read_closure->unref (); read_closure = NULL; } if (next_read_position != G_MAXUINT64) { start = next_read_position; next_read_position = G_MAXUINT64; } Media *media = GetMediaReffed (); read_closure = new MediaReadClosure (media, callback, this, start, count + left); media->unref (); LOG_MP3 ("Mp3Demuxer::RequestMoreData (%u) requesting: %u at offset %" G_GINT64_FORMAT " (left: %u next read position: %" G_GUINT64_FORMAT ")\n", count, read_closure->GetCount (), read_closure->GetOffset (), left, previous_read_position); waiting_for_read = true; current_position = start; source->ReadAsync (read_closure); return true; }
/** * \brief Wrapper for the Media Write function. * This wrapper protects from simultanoeous access to the same media * \param pRtMedia Pointer to a RtMedia instance * \param address Address at which to write * \param data Pointer to the data to write * \param length Size of the data buffer * \param callback Optional pointer to a callback function to invoke when * the write operation terminates * \param argument Optional argument for the callback function * \return Operation result code */ unsigned char RTMEDIA_Write( RtMedia *pRtMedia, unsigned int address, void *data, unsigned int length, MediaCallback callback, void *argument) { unsigned char result; Media *pMedia = &(pRtMedia->media); /* Take the mutex */ xSemaphoreTake( pRtMedia->mutex, ( portTickType ) 0 ); /* Invoke the Media Write function */ result = pMedia->write(pMedia, address, data, length, callback, argument); /* Release the mutex */ xSemaphoreGive( pRtMedia->mutex ); return result; }
void MediaCollection::Private::listDirectory( boost::filesystem::path path, shared_ptr< MediaDirectory > rootDirectory ) { vector<fs::directory_entry> v; try { fs::recursive_directory_iterator it( path, fs::symlink_option::recurse ); while(it != fs::recursive_directory_iterator()) { if(it.level() > 20) it.pop(); try { v.push_back(*it++); } catch(std::exception &e) { log("notice") << "Error scanning for path " << *it << ": " << e.what(); it.pop(); } } //copy( fs::recursive_directory_iterator( path, fs::symlink_option::recurse, ec ), fs::recursive_directory_iterator(), back_inserter( v ) ); // sort( v.begin(), v.end() ); // TODO: not needed? for( fs::directory_entry entry : v ) { Media media = resolveMedia( entry.path() ); if( media.valid() && isAllowed( entry.path() ) ) { Media media {entry.path()}; collection[media.uid()] = media; rootDirectory->add(media); } } } catch ( std::exception &e ) { log( "error" ) << "Error trying to add path " << path << ": " << e.what(); } }
gboolean Media::minbif_media_ready_cb(PurpleMedia *media) { Media m = media_list.getMedia(media); string alias = m.getBuddy().getAlias(); //PurpleMediaSessionType type = purple_media_get_session_type(media, sid); PurpleMediaSessionType type = PURPLE_MEDIA_VIDEO; gchar *message = NULL; PurpleAccount* account = purple_media_get_account(media); if (type & PURPLE_MEDIA_AUDIO && type & PURPLE_MEDIA_VIDEO) { message = g_strdup_printf("%s wishes to start an audio/video session with you.", alias.c_str()); } else if (type & PURPLE_MEDIA_AUDIO) { message = g_strdup_printf("%s wishes to start an audio session with you.", alias.c_str()); } else if (type & PURPLE_MEDIA_VIDEO) { message = g_strdup_printf("%s wishes to start a video session with you.", alias.c_str()); } /* purple_request_accept_cancel is a macro and calls _() to translates * buttons strings. * There isn't (yet?) any translation system in Minbif, so the _ macro * is defined to make minbif compiles. */ #define _ purple_request_accept_cancel(media, "Incoming Call", message, NULL, PURPLE_DEFAULT_ACTION_NONE, account, alias.c_str(), NULL, media, minbif_media_accept_cb, minbif_media_reject_cb); #undef _ g_free(message); return FALSE; }
void MediaChannel::Load(const Media& media, bool autoplay, const MediaOperationCallback& callback) { JsonMessage message; message["type"] = "LOAD"; media.ToMessage(message); Request<MediaResponse>(move(message), [=](const MediaResponse& result) { if (result.Succeeded()) _last_status = result.GetStatus(); if (callback) callback(result); }); }
void YouTubeSearch::readyRead() { int statusCode = m_reply->attribute(QNetworkRequest::HttpStatusCodeAttribute).toInt(); if (statusCode >= 200 && statusCode < 300) { QString currentTag; QString linkString; QString descriptionString; QString titleString; xml.addData(m_reply->readAll()); while (!xml.atEnd()) { xml.readNext(); if (xml.isStartElement()) { if (xml.name() == QLatin1String("item")) linkString = xml.attributes().value("link").toString(); currentTag = xml.qualifiedName().toString(); } else if (xml.isEndElement()) { if (xml.qualifiedName() == QLatin1String("media:title")) { QUrl url(linkString); QString videoId = url.queryItemValue("v"); QString videoImage = QString(YTIMG) .arg(qrand() % 3 + 1) .arg(videoId); Media *media = new Media(m_context); media->setTitle(titleString); media->setDescription(descriptionString); media->setId(videoId); media->setImage(QUrl(videoImage)); media->setUrl(url); m_objects << media; m_context->setContextProperty("youtubeModel", QVariant::fromValue(m_objects)); currentTag.clear(); titleString.clear(); linkString.clear(); descriptionString.clear(); } } else if (xml.isCharacters() && !xml.isWhitespace()) { if (currentTag == QLatin1String("media:title")) { titleString += xml.text().toString(); } else if (currentTag == QLatin1String("media:description")) { descriptionString += xml.text().toString(); } else if (currentTag == QLatin1String("link")) { linkString += xml.text().toString(); } } } if (xml.error() && xml.error() != QXmlStreamReader::PrematureEndOfDocumentError) { qWarning() << xml.lineNumber() << xml.errorString(); } } }
QueueItem::QueueItem(Media media, std::list< QueueItem* >& queue, WContainerWidget* container, Session* session, WContainerWidget* parent) : WContainerWidget(parent), PlaylistItem(media) { QueueItem *queueItem = this; Dbo::Transaction t(*session); WAnchor *anchor = new WAnchor{this}; anchor->addWidget(WW<WText>(media.title(t)).css("link-hand").onClick([=](WMouseEvent&){ playSignal.emit(this); })); WContainerWidget *actionsContainer = WW<WContainerWidget>(anchor).css("pull-right"); auto fixButtons = [=,&queue] { for(QueueItem *item: queue) { item->upButton->setHidden(item == queue.front()); item->downButton->setHidden(item == queue.back()); } }; auto moveElement = [=,&queue](int direction) { auto element = std::find(begin(queue), end(queue), queueItem); auto nextElement = element; direction>0 ? nextElement++ : nextElement--; swap(*nextElement, *element); int index = container->indexOf(queueItem); container->removeWidget(queueItem); container->insertWidget(index + direction, queueItem); fixButtons(); }; actionsContainer->addWidget(upButton = WW<WImage>(Settings::staticPath("/icons/actions/up.png")) .css("link-hand").onClick([=,&queue](WMouseEvent){ if(queue.front() == queueItem) return; moveElement(-1); })); actionsContainer->addWidget(downButton = WW<WImage>(Settings::staticPath("/icons/actions/down.png")) .css("link-hand").onClick([=,&queue](WMouseEvent){ if(queue.back() == queueItem) return; moveElement(+1); })); actionsContainer->addWidget(removeButton = WW<WImage>(Settings::staticPath("/icons/actions/delete.png")) .css("link-hand").onClick([=,&queue](WMouseEvent){ queue.erase(std::remove(begin(queue), end(queue), queueItem)); delete queueItem; fixButtons(); })); upButton->setHiddenKeepsGeometry(true); downButton->setHiddenKeepsGeometry(true); container->addWidget(this); queue.push_back(this); fixButtons(); }
bool Media::operator==( const Media &other ) const { return other.uid() == uid(); }
void Story(int& sceen){ Texture message = Texture("res/mese-ji.png"); Texture jijii = Texture("res/jijii.png"); Font font = ("res/meiryo.ttc"); std::string text1 = " "; std::string text2 = " "; Media ok = Media("res/se/kettei_.wav"); Media bgm = Media("res/se/sontyou.wav"); int count = 0; bgm.looping(1); bgm.play(); ok.play(); while (sceen == 1) { if (!env.isOpen()) exit(0); env.begin(); env.bgColor(Color::black); if (env.isPushKey(GLFW_KEY_ENTER)){ count++; ok.play(); if (count == 12){ sceen = 2; } } switch (count) { case 0: break; case 1: text1 = "こんばんは"; text2 = "わしはこの村の村長じゃ"; break; case 2: text1 = "すまないこの村は今"; text2 = "何もないのじゃ"; break; case 3: text1 = "この村は前まではいろんな建物が"; text2 = "あって栄えていたんじゃが"; break; case 4: text1 = "数日前にワニに襲われて"; text2 = "村は一気に崩壊してもうたのじゃ"; break; case 5: text1 = "えっこの村を救ってくれるのか"; text2 = "本当にありがたいのぉー"; break; case 6: text1 = "おぬし名はなんと申す?"; text2 = " "; break; case 7: text1 = "そうか金山かぁ"; text2 = ""; break; case 8: text1 = "基本的にWASDとENTERキー"; text2 = "を使用するのじゃ"; break; case 9: text1 = "お金を効率的に稼ぐとステータスを"; text2 = "上げることができるじゃ"; break; case 10: text1 = "10年ごと40年間、強い敵が来るで"; text2 = "それまでに備えて強くなっておくのじゃ"; break; case 11: text1 = "最初はまずわしの家に来て"; text2 = "お金稼ぎじゃ"; break; } drawTextureBox(-1024, -312, 2048, 1024, 0, 0, 2048, 1024, jijii); drawTextureBox(-1024, -512, 2048, 1024, 0, 0, 2048, 1024, message); font.size(100); font.draw(text1, Vec2f(-900, -300), Color::white); font.draw(text2, Vec2f(-900, -400), Color::white); font.size(30); font.draw("PUSH ENTER", Vec2f(WIDTH / 2, -HEIGHT / 2+30) - font.drawSize("PSHU ENTER"), Color::white); env.end(); } }
static v8::Handle<v8::Value> matchMediumCallback(const v8::Arguments& args) { INC_STATS("DOM.Media.matchMedium"); Media* imp = V8Media::toNative(args.Holder()); V8Parameter<> mediaquery = args[0]; return v8Boolean(imp->matchMedium(mediaquery)); }
static v8::Handle<v8::Value> typeAttrGetter(v8::Local<v8::String> name, const v8::AccessorInfo& info) { INC_STATS("DOM.Media.type._get"); Media* imp = V8Media::toNative(info.Holder()); return v8String(imp->type()); }
bool SampleService::sendFile(BtpAction* action, FileInfo& fi) { bool rval = false; // FIXME: only audio supported at this time // FIXME: get content type for file info instead of "extension" const char* extension = fi["extension"]->getString(); if((rval = (strcmp(extension, "mp3") == 0))) { // ensure file exists, get sample range for media File file(fi["path"]->getString()); Media media; BM_ID_SET(media["id"], BM_MEDIA_ID(fi["mediaId"])); if((rval = (file->exists() && getSampleRange(media)))) { // set up response header HttpResponseHeader* header = action->getResponse()->getHeader(); header->setField("Content-Type", "audio/mpeg"); header->setField( "Content-Disposition", "attachment; filename=bitmunk-sample.mp3"); // calculate content length for sample for HTTP/1.0 if(strcmp( action->getRequest()->getHeader()->getVersion(), "HTTP/1.0") == 0) { // check for cached sample content length int64_t contentLength = 0; if(media->hasMember("sampleContentLength")) { contentLength = media["sampleContentLength"]->getUInt64(); } else { // create time parser to produce sample MpegAudioTimeParser matp; matp.addTimeSet( media["sampleRange"][0]->getUInt32(), media["sampleRange"][1]->getUInt32()); // FIXME: build a method into the catalog that will return // the content-length for a sample // read data, strip id3 tag, parse mp3 data, add new id3 tag // get content length Id3v2Tag tag(media); Id3v2TagWriter stripper(NULL); Id3v2TagWriter embedder( &tag, false, tag.getFrameSource(), false); FileInputStream fis(file); MutatorInputStream strip(&fis, false, &stripper, false); MutatorInputStream parse(&strip, false, &matp, false); MutatorInputStream embed(&parse, false, &embedder, false); int64_t numBytes; while((numBytes = embed.skip(file->getLength())) > 0) { contentLength += numBytes; } embed.close(); if(numBytes != -1) { // cache content length media["sampleContentLength"] = contentLength; } } // replace chunked encoding with content length header->setField("Connection", "close"); header->removeField("Transfer-Encoding"); header->removeField("TE"); header->setField("Content-Length", contentLength); } // create time parser to produce sample MpegAudioTimeParser matp; matp.addTimeSet( media["sampleRange"][0]->getUInt32(), media["sampleRange"][1]->getUInt32()); // read data, strip id3 tag, embed new id3 tag, produce sample Id3v2Tag tag(media); Id3v2TagWriter stripper(NULL); Id3v2TagWriter embedder( &tag, false, tag.getFrameSource(), false); FileInputStream fis(file); MutatorInputStream strip(&fis, false, &stripper, false); MutatorInputStream parse(&strip, false, &matp, false); MutatorInputStream embed(&parse, false, &embedder, false); // send sample, remove any special encoding to prevent compression action->getRequest()->getHeader()->removeField("Accept-Encoding"); action->getResponse()->getHeader()->setStatus(200, "OK"); action->sendResult(&embed); // close stream embed.close(); } } return rval; }
bool SampleService::getSampleRange(Media& media) { bool rval = false; // FIXME: change cache to just store sample range array? not entire media? // keep in mind that the code currently uses other information from // the media like the performer and title for the sample id3v2 tags // check with cache bool found = false; mSampleRangeCacheLock.lock(); { if(mSampleRangeCache["cache"]->hasMember(media["id"]->getString())) { media = mSampleRangeCache["cache"][media["id"]->getString()]; rval = found = true; } } mSampleRangeCacheLock.unlock(); if(!found) { // get sample range for media from bitmunk Url url; url.format("/api/3.0/media/%" PRIu64, BM_MEDIA_ID(media["id"])); StringTokenizer st; if((rval = mNode->getMessenger()->getFromBitmunk(&url, media))) { // validate sample range st.tokenize(media["sampleRange"]->getString(), '-'); if((rval = (st.getTokenCount() == 2))) { // set sample range start and end media["sampleRange"][0] = (uint32_t)strtoul(st.nextToken(), NULL, 10); media["sampleRange"][1] = (uint32_t)strtoul(st.nextToken(), NULL, 10); // update media length to use sample length int sampleLength = media["sampleRange"][1]->getUInt32() - media["sampleRange"][0]->getUInt32(); media["length"] = sampleLength; // update media title to include "- Bitmunk Sample" string title = media["title"]->getString(); title.append(" - Bitmunk Sample"); media["title"] = title.c_str(); // update cache mSampleRangeCacheLock.lock(); { if(mSampleRangeCache["cache"]->length() + 1 >= mSampleRangeCache["capacity"]->getInt32()) { // clear cache, capacity reached mSampleRangeCache->clear(); } mSampleRangeCache["cache"][media["id"]->getString()] = media; } mSampleRangeCacheLock.unlock(); } else { // no sample range available media["sampleRange"][0] = 0; media["sampleRange"][1] = 0; } // clear any sample content-length media->removeMember("sampleContentLength"); } } return rval; }
void Media::_loadThumbnailCallback(struct tgl_state *TLS, void *callback_extra, int success, char *filename) { Media* photo = (Media*)callback_extra; photo->m_thumbnail = QUrl::fromLocalFile(filename); emit photo->thumbnailChanged(); }