Example #1
0
TrackInfo
ITunesDevice::nextTrack()
{
    while ( m_handler->trackCount() < 20 && !m_file->atEnd() )
    {
        m_xmlInput->setData( m_file->read( 32768 ) );
        m_xmlReader->parseContinue();

        emit progress( (float)( (float)m_file->pos() / (float)m_file->size() ) * 100.0, m_handler->peekTrack() );
    }

    TrackInfo t = m_handler->takeTrack();
    if ( !t.isEmpty() )
    {
        return t;
    }

    if ( m_file->atEnd() )
    {
        // Finished with the database, let's close our stuff
        qDebug() << "Finished reading";

        m_file->close();
        delete m_file;
        m_file = 0;
    }

    return TrackInfo();
}
Example #2
0
 void
 AddMediaFormatChecker(const TrackInfo& aTrackConfig)
 {
   if (aTrackConfig.IsVideo()) {
     auto mimeType = aTrackConfig.GetAsVideoInfo()->mMimeType;
     RefPtr<MediaByteBuffer> extraData =
       aTrackConfig.GetAsVideoInfo()->mExtraData;
     AddToCheckList([mimeType, extraData]() {
       if (MP4Decoder::IsH264(mimeType)) {
         mp4_demuxer::SPSData spsdata;
         // WMF H.264 Video Decoder and Apple ATDecoder
         // do not support YUV444 format.
         // For consistency, all decoders should be checked.
         if (mp4_demuxer::H264::DecodeSPSFromExtraData(extraData, spsdata)
             && (spsdata.profile_idc == 244 /* Hi444PP */
                 || spsdata.chroma_format_idc == PDMFactory::kYUV444)) {
           return CheckResult(
             SupportChecker::Reason::kVideoFormatNotSupported,
             MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
                         RESULT_DETAIL("Decoder may not have the capability "
                                       "to handle the requested video format "
                                       "with YUV444 chroma subsampling.")));
         }
       }
       return CheckResult(SupportChecker::Reason::kSupported);
     });
   }
 }
Example #3
0
void StreamTable::onActivate(wxListEvent& event) {
    // find out what we have got in our second column:
    long index = event.GetIndex();
    const wxString& loc = getLocation(index);
    if (loc.IsEmpty()) {
        std::cerr << "Location is invalid (empty)" << std::endl;
        return;
    }

    try {
        TrackInfo* onDaHeap = new TrackInfo();
        onDaHeap->setLocation(loc);
        // We set a void* here. It's a pointer to the `onDaHeap' TrackInfo object.
        // Later on (see the `TrackStatusHandler::onStreamActivated' function), we
        // can cast it back to a TrackInfo* and work with it. That function must
        // also delete that very same pointer, or else we're f****d.
        event.SetClientObject(onDaHeap);

        // make sure this event is handled by event handlers up in the chain.
        event.Skip();
    } catch (AudioException& ex) {
        wxString msg;
        msg << wxT("Unable to open the URL `") << loc << wxT("'.\n\n");
        msg << wxT("GStreamer error description:\n") << ex.getAsWxString();
        wxMessageDialog dlg(this, msg, wxT("Error"), wxICON_ERROR | wxOK);
        dlg.ShowModal();
    }
}
Example #4
0
void
WMFVideoMFTManager::ConfigurationChanged(const TrackInfo& aConfig)
{
  MOZ_ASSERT(aConfig.GetAsVideoInfo());
  mVideoInfo = *aConfig.GetAsVideoInfo();
  mImageSize = mVideoInfo.mImage;
}
Example #5
0
File: main.cpp Project: krpors/navi
void TrackStatusHandler::onPrev(wxCommandEvent& event) {
    TrackTable* tt = m_mainFrame->getTrackTable();
    TrackInfo info = tt->getPrev(true);
    if (info.isValid()) {
        m_playedTrack = info;
        play();
    }
}
Example #6
0
File: main.cpp Project: krpors/navi
void TrackStatusHandler::onNext(wxCommandEvent& event) {
    TrackTable* tt = m_mainFrame->getTrackTable();
    TrackInfo info = tt->getNext(true);
    if (info.isValid()) {
        std::cout << "Invoked by wxThread." << std::endl;
        m_playedTrack = info;
        play();
    }
}
void
WizardBootstrapPage::onTrackFound( int percentage, const TrackInfo& track )
{
    uiProgress.progressBar->setValue( percentage );

    if ( !track.isEmpty() )
        uiProgress.bottomLabel->setText( tr( "Found" ) + QString( ": %1 - %2" )
                                         .arg( track.artist() ).arg( track.track() ) );

    qApp->processEvents();
}
Example #8
0
TrackInfo
ITunesLibrary::Track::trackInfo() const
{
    // NOTE we only what data we require for scrobbling, though we could fill in
    // more of the TrackInfo object
    TrackInfo t;
    t.setSource( TrackInfo::MediaDevice );

    QString source;
    if (!m_sourcePersistentId.isEmpty())
        source = "tell first source whose persistent ID is '" + m_sourcePersistentId + "' to ";

    // TODO compile once and pass pid as argument
    // NOTE trust me, the code is ugly, but doesn't work any other way, don't clean it up!
    AppleScript script;
    script << "tell application 'iTunes'"
        <<     source + "set lib to library playlist 1"
        <<     "set t to first track of lib whose persistent ID is '" + persistentId() + "'"
		<<     "if (get class of t) is list then set t to item 1 of t"
        <<     "set d to played date of t"
        << "end tell"

        << "try"
        <<     "set d to (year of d) & ':' & "
        "((month of d) as integer) & ':' & "
        "(day of d) & ':' & "
        "(time of d)"
        << "end try"

        << "tell application 'iTunes' to tell t"
        <<     "set l to location"
        <<     "try" << "set l to POSIX path of l" << "end try"
        <<     "return artist & '\n' & name & '\n' & (duration as integer)  & '\n' & album & '\n' & played count  & '\n' & d & '\n' & l"
        << "end tell";

    QString out = script.exec();
    QTextStream s( &out, QIODevice::ReadOnly );

    t.setArtist( s.readLine() );
    t.setTrack( s.readLine() );
    t.setDuration( (uint) s.readLine().toFloat() );
    t.setAlbum( s.readLine() );
    t.setPlayCount( s.readLine().toInt() );
    t.setTimeStamp( qDateTimeFromScriptString( s.readLine() ).toTime_t() );

    QFileInfo fileinfo( s.readLine() );
    t.setFileName( fileinfo.fileName() );
    t.setPath( fileinfo.absolutePath() );

    return t;
}
void FingerprinterProgressBar::setCurrentTrack( TrackInfo& track )
{
    QString elidedPath =
        ui.trackLabel->fontMetrics().elidedText(
            track.path(), Qt::ElideMiddle, ui.trackLabel->width() );
    ui.trackLabel->setText( elidedPath );
}
PlatformDecoderModule::ConversionRequired
AndroidDecoderModule::DecoderNeedsConversion(const TrackInfo& aConfig) const
{
  if (aConfig.IsVideo()) {
    return ConversionRequired::kNeedAnnexB;
  }
  return ConversionRequired::kNeedNone;
}
PlatformDecoderModule::ConversionRequired
WMFDecoderModule::DecoderNeedsConversion(const TrackInfo& aConfig) const
{
  if (aConfig.IsVideo() && MP4Decoder::IsH264(aConfig.mMimeType)) {
    return ConversionRequired::kNeedAnnexB;
  } else {
    return ConversionRequired::kNeedNone;
  }
}
PlatformDecoderModule::ConversionRequired
EMEDecoderModule::DecoderNeedsConversion(const TrackInfo& aConfig) const
{
  if (aConfig.IsVideo()) {
    return kNeedAVCC;
  } else {
    return kNeedNone;
  }
}
 ConversionRequired
 DecoderNeedsConversion(const TrackInfo& aConfig) const override
 {
   if (aConfig.IsVideo() && MP4Decoder::IsH264(aConfig.mMimeType)) {
     return kNeedAVCC;
   } else {
     return kNeedNone;
   }
 }
PlatformDecoderModule::ConversionRequired
GMPDecoderModule::DecoderNeedsConversion(const TrackInfo& aConfig) const
{
  // GMPVideoCodecType::kGMPVideoCodecH264 specifies that encoded frames must be in AVCC format.
  if (aConfig.IsVideo() && MP4Decoder::IsH264(aConfig.mMimeType)) {
    return ConversionRequired::kNeedAVCC;
  } else {
    return ConversionRequired::kNeedNone;
  }
}
already_AddRefed<MediaDataDecoder>
PlatformDecoderModule::CreateDecoder(const TrackInfo& aConfig,
                                     FlushableTaskQueue* aTaskQueue,
                                     MediaDataDecoderCallback* aCallback,
                                     layers::LayersBackend aLayersBackend,
                                     layers::ImageContainer* aImageContainer)
{
  nsRefPtr<MediaDataDecoder> m;

  bool hasPlatformDecoder = SupportsMimeType(aConfig.mMimeType);

  if (aConfig.GetAsAudioInfo()) {
    if (!hasPlatformDecoder && VorbisDataDecoder::IsVorbis(aConfig.mMimeType)) {
      m = new VorbisDataDecoder(*aConfig.GetAsAudioInfo(),
                                aTaskQueue,
                                aCallback);
    } else if (!hasPlatformDecoder && OpusDataDecoder::IsOpus(aConfig.mMimeType)) {
      m = new OpusDataDecoder(*aConfig.GetAsAudioInfo(),
                              aTaskQueue,
                              aCallback);
    } else {
      m = CreateAudioDecoder(*aConfig.GetAsAudioInfo(),
                             aTaskQueue,
                             aCallback);
    }
    return m.forget();
  }

  if (!aConfig.GetAsVideoInfo()) {
    return nullptr;
  }

  if (H264Converter::IsH264(aConfig)) {
    m = new H264Converter(this,
                          *aConfig.GetAsVideoInfo(),
                          aLayersBackend,
                          aImageContainer,
                          aTaskQueue,
                          aCallback);
  } else if (!hasPlatformDecoder && VPXDecoder::IsVPX(aConfig.mMimeType)) {
    m = new VPXDecoder(*aConfig.GetAsVideoInfo(),
                       aImageContainer,
                       aTaskQueue,
                       aCallback);
  } else {
    m = CreateVideoDecoder(*aConfig.GetAsVideoInfo(),
                           aLayersBackend,
                           aImageContainer,
                           aTaskQueue,
                           aCallback);
  }
  return m.forget();
}
PlatformDecoderModule::ConversionRequired
WMFDecoderModule::DecoderNeedsConversion(const TrackInfo& aConfig) const
{
  if (aConfig.IsVideo() &&
      (aConfig.mMimeType.EqualsLiteral("video/avc") ||
       aConfig.mMimeType.EqualsLiteral("video/mp4"))) {
    return kNeedAnnexB;
  } else {
    return kNeedNone;
  }
}
Example #17
0
void
TagDialog::setTrack( const TrackInfo& track )
{
    m_metaData = track;
    onTagTypeChanged( ui.tagTypeBox->currentIndex() );

    // We can't tag album if there isn't one
    if ( track.album().isEmpty() )
        ui.tagTypeBox->removeItem( 2 );

    ui.spinner->show();
}
Example #18
0
 void
 AddMediaFormatChecker(const TrackInfo& aTrackConfig)
 {
   if (aTrackConfig.IsVideo()) {
   auto mimeType = aTrackConfig.GetAsVideoInfo()->mMimeType;
   RefPtr<MediaByteBuffer> extraData = aTrackConfig.GetAsVideoInfo()->mExtraData;
   AddToCheckList(
     [mimeType, extraData]() {
       if (MP4Decoder::IsH264(mimeType)) {
         mp4_demuxer::SPSData spsdata;
         // WMF H.264 Video Decoder and Apple ATDecoder
         // do not support YUV444 format.
         // For consistency, all decoders should be checked.
         if (mp4_demuxer::H264::DecodeSPSFromExtraData(extraData, spsdata) &&
             spsdata.chroma_format_idc == PDMFactory::kYUV444) {
           return SupportChecker::Result::kVideoFormatNotSupported;
         }
       }
       return SupportChecker::Result::kSupported;
     });
   }
 }
Example #19
0
nsresult
WMFMediaDataDecoder::ConfigurationChanged(const TrackInfo& aConfig)
{
  MOZ_ASSERT(mCallback->OnReaderTaskQueue());

  nsCOMPtr<nsIRunnable> runnable =
    NS_NewRunnableMethodWithArg<UniquePtr<TrackInfo>&&>(
    this,
    &WMFMediaDataDecoder::ProcessConfigurationChanged,
    aConfig.Clone());
  mTaskQueue->Dispatch(runnable.forget());
  return NS_OK;

}
Example #20
0
void ControlWidget::handleTrackPlaying( const TrackInfo &trackInfo )
{
   /* pass through to track info widget */
   mpPlaylist->setTrackInfo( trackInfo );
   QString title( trackInfo.displayString( Settings::value( Settings::PartymanNamePattern ) ) );
   QString bubble( trackInfo.displayString( Settings::value( Settings::PartymanTrayIconPattern ) ) );
   if( trackInfo.mTitle.isEmpty() )
   {
      title  = QApplication::applicationName();
      bubble = trackInfo.mDirectory;
      bubble.append( '\n' );
      bubble.append( trackInfo.mFileName );
   }
   PartymanMainWindow::setIconAndTitle( this, mPlayIcon, title );
   mpTrayIcon->setToolTip( bubble );
   if( Settings::value( Settings::PartymanTrayIcon ) &&
       Settings::value( Settings::PartymanTrayIconBubble ) &&
       QSystemTrayIcon::supportsMessages() )
   {
      mpTrayIcon->showMessage( tr("Now Playing:"), bubble,
         (QSystemTrayIcon::MessageIcon)(Settings::value( Settings::PartymanTrayIconBubbleIcon ) ),
         (int)(Settings::value( Settings::PartymanTrayIconBubbleTime ) * 1000) );
   }
}
already_AddRefed<MediaDataDecoder>
PlatformDecoderModule::CreateDecoder(const TrackInfo& aConfig,
                                     FlushableTaskQueue* aTaskQueue,
                                     MediaDataDecoderCallback* aCallback,
                                     layers::LayersBackend aLayersBackend,
                                     layers::ImageContainer* aImageContainer)
{
  nsRefPtr<MediaDataDecoder> m;

  if (aConfig.GetAsAudioInfo()) {
    m = CreateAudioDecoder(*aConfig.GetAsAudioInfo(),
                           aTaskQueue,
                           aCallback);
    return m.forget();
  }

  if (!aConfig.GetAsVideoInfo()) {
    return nullptr;
  }

  if (H264Converter::IsH264(aConfig)) {
    m = new H264Converter(this,
                          *aConfig.GetAsVideoInfo(),
                          aLayersBackend,
                          aImageContainer,
                          aTaskQueue,
                          aCallback);
  } else {
    m = CreateVideoDecoder(*aConfig.GetAsVideoInfo(),
                           aLayersBackend,
                           aImageContainer,
                           aTaskQueue,
                           aCallback);
  }
  return m.forget();
}
bool
WMFDecoderModule::Supports(const TrackInfo& aTrackInfo,
                           DecoderDoctorDiagnostics* aDiagnostics) const
{
  if ((aTrackInfo.mMimeType.EqualsLiteral("audio/mp4a-latm") ||
       aTrackInfo.mMimeType.EqualsLiteral("audio/mp4")) &&
       WMFDecoderModule::HasAAC()) {
    return true;
  }
  if (MP4Decoder::IsH264(aTrackInfo.mMimeType) && WMFDecoderModule::HasH264()) {
    const VideoInfo* videoInfo = aTrackInfo.GetAsVideoInfo();
    MOZ_ASSERT(videoInfo);
    // Check Windows format constraints, based on:
    // https://msdn.microsoft.com/en-us/library/windows/desktop/dd797815(v=vs.85).aspx
    if (IsWin8OrLater()) {
      // Windows >7 supports at most 4096x2304.
      if (videoInfo->mImage.width > 4096 || videoInfo->mImage.height > 2304) {
        return false;
      }
    } else {
      // Windows <=7 supports at most 1920x1088.
      if (videoInfo->mImage.width > 1920 || videoInfo->mImage.height > 1088) {
        return false;
      }
    }
    return true;
  }
  if (aTrackInfo.mMimeType.EqualsLiteral("audio/mpeg") &&
      CanCreateWMFDecoder<CLSID_CMP3DecMediaObject>()) {
    return true;
  }
  if (MediaPrefs::PDMWMFIntelDecoderEnabled() && sDXVAEnabled) {
    if (VPXDecoder::IsVP8(aTrackInfo.mMimeType) &&
        CanCreateWMFDecoder<CLSID_WebmMfVp8Dec>()) {
      return true;
    }
    if (VPXDecoder::IsVP9(aTrackInfo.mMimeType) &&
        CanCreateWMFDecoder<CLSID_WebmMfVp9Dec>()) {
      return true;
    }
  }

  // Some unsupported codec.
  return false;
}
Example #23
0
void
GrowlNotifyExtension::onAppEvent( int e, const QVariant& data )
{
    switch (e) 
    {
        case Event::PlaybackStarted:
        case Event::TrackChanged:
            break;
        default:
            return;
    }

    // conveniently, this determines if Growl is installed for us
    if (UnicornUtils::isGrowlInstalled() == false)
        return;

    TrackInfo metaData = data.value<TrackInfo>();

    #define APPEND_IF_NOT_EMPTY( x ) if (!x.isEmpty()) description += x + '\n';
    QString description;
    APPEND_IF_NOT_EMPTY( metaData.artist() );
    APPEND_IF_NOT_EMPTY( metaData.album() );
    description += metaData.durationString();

    QString title = metaData.track();
    if (title.isEmpty())
        title = QFileInfo( metaData.path() ).fileName();

    AppleScript script;
    script << "tell application 'GrowlHelperApp'"
           <<     "register as application 'Last.fm'"
                          " all notifications {'Track Notification'}"
                          " default notifications {'Track Notification'}"
                          " icon of application 'Last.fm.app'"
           <<     "notify with name 'Track Notification'"
                          " title " + AppleScript::asUnicodeText( title ) +
                          " description " + AppleScript::asUnicodeText( description ) + 
                          " application name 'Last.fm'"
           << "end tell";
    script.exec();
}
already_AddRefed<MediaDataDecoder>
PlatformDecoderModule::CreateDecoder(const TrackInfo& aConfig,
                                     FlushableTaskQueue* aTaskQueue,
                                     MediaDataDecoderCallback* aCallback,
                                     layers::LayersBackend aLayersBackend,
                                     layers::ImageContainer* aImageContainer)
{
    nsRefPtr<MediaDataDecoder> m;

    bool hasPlatformDecoder = SupportsMimeType(aConfig.mMimeType);

    if (aConfig.GetAsAudioInfo()) {
        if (!hasPlatformDecoder && VorbisDataDecoder::IsVorbis(aConfig.mMimeType)) {
            m = new VorbisDataDecoder(*aConfig.GetAsAudioInfo(),
                                      aTaskQueue,
                                      aCallback);
        } else if (!hasPlatformDecoder && OpusDataDecoder::IsOpus(aConfig.mMimeType)) {
            m = new OpusDataDecoder(*aConfig.GetAsAudioInfo(),
                                    aTaskQueue,
                                    aCallback);
        } else {
            m = CreateAudioDecoder(*aConfig.GetAsAudioInfo(),
                                   aTaskQueue,
                                   aCallback);
        }
        return m.forget();
    }

    if (!aConfig.GetAsVideoInfo()) {
        return nullptr;
    }

    MediaDataDecoderCallback* callback = aCallback;
    nsRefPtr<DecoderCallbackFuzzingWrapper> callbackWrapper;
    if (sEnableFuzzingWrapper) {
        callbackWrapper = new DecoderCallbackFuzzingWrapper(aCallback);
        callbackWrapper->SetVideoOutputMinimumInterval(
            TimeDuration::FromMilliseconds(sVideoOutputMinimumInterval_ms));
        callbackWrapper->SetDontDelayInputExhausted(sDontDelayInputExhausted);
        callback = callbackWrapper.get();
    }

    if (H264Converter::IsH264(aConfig)) {
        m = new H264Converter(this,
                              *aConfig.GetAsVideoInfo(),
                              aLayersBackend,
                              aImageContainer,
                              aTaskQueue,
                              callback);
    } else if (!hasPlatformDecoder && VPXDecoder::IsVPX(aConfig.mMimeType)) {
        m = new VPXDecoder(*aConfig.GetAsVideoInfo(),
                           aImageContainer,
                           aTaskQueue,
                           callback);
    } else {
        m = CreateVideoDecoder(*aConfig.GetAsVideoInfo(),
                               aLayersBackend,
                               aImageContainer,
                               aTaskQueue,
                               callback);
    }

    if (callbackWrapper && m) {
        m = new DecoderFuzzingWrapper(m.forget(), callbackWrapper.forget());
    }

    return m.forget();
}
Example #25
0
void RecurseWorker::updateTrackInfo( const TrackInfo &trackInfo )
{
   bool tagsChanged     = false;
   TrackInfo ti         = trackInfo;
   TrackInfo oldTags;
   QString oldpath( trackInfo.mDirectory + "/" + trackInfo.mFileName );

   mTagMap.clear();
   {
      TagLib::FileRef f( oldpath.toLocal8Bit().data() );
      if( f.file() )
      {
         oldTags = trackInfo;

         TagLib::AudioProperties *audioProperties = f.audioProperties();
         TagLib::Tag *tag = f.tag();
         if( audioProperties )
         {
            oldTags.mPlayTime = audioProperties->length();
         }
         if( tag )
         {
            oldTags.mArtist  = QString::fromUtf8( tag->artist().toCString( true ) );
            oldTags.mTitle   = QString::fromUtf8( tag->title().toCString( true ) );
            oldTags.mAlbum   = QString::fromUtf8( tag->album().toCString( true ) );
            oldTags.mTrackNr = tag->track();
            oldTags.mYear    = tag->year();
            oldTags.mGenre   = QString::fromUtf8( tag->genre().toCString( true ) );
         }
         else
         {
            emit error( tr("Could not read tags"), oldpath );
         }
         ti = oldTags;
      }
      else
      {
         emit error( tr("Could not read file"), oldpath );
      }
   }

   switch( mMode )
   {
   case ModeSetTags:
      if( mSetArtist )
      {
         ti.mArtist = mTrackInfo.mArtist;
      }
      if( mSetTitle )
      {
         ti.mTitle.append( " " );
         ti.mTitle.append( mTrackInfo.mTitle );
      }
      if( mSetAlbum )
      {
         ti.mAlbum = mTrackInfo.mAlbum;
      }
      if( mSetYear )
      {
         ti.mYear = mTrackInfo.mYear;
      }
      if( mSetGenre )
      {
         ti.mGenre = mTrackInfo.mGenre;
      }
      tagsChanged = (ti != oldTags);
      if( mSetFlags )
      {
         if( mTrackInfo.isFlagged( TrackInfo::Favorite ) )
         {
            ti.setFlag( TrackInfo::Favorite, true );
         }
         if( mTrackInfo.isFlagged( TrackInfo::Unwanted ) )
         {
            ti.setFlag( TrackInfo::Unwanted, true );
         }
      }
      if( mUnsetFlags )
      {
         if( mTrackInfo.isFlagged( TrackInfo::Favorite ) )
         {
            ti.setFlag( TrackInfo::Favorite, false );
         }
         if( mTrackInfo.isFlagged( TrackInfo::Unwanted ) )
         {
            ti.setFlag( TrackInfo::Unwanted, false );
         }
         if( mTrackInfo.isFlagged( TrackInfo::ScannedWithPower ) ||
             mTrackInfo.isFlagged( TrackInfo::ScannedWithPeak ) )
         {
            ti.setFlag( TrackInfo::ScannedWithPeak, false );
            ti.mLastScanned = 0;
         }
      }
      if( mSetGroups || mUnsetGroups )
      {
         QStringList groups( mTrackInfo.getGroups() );
         foreach( const QString &group, groups )
         {
            ti.setGroup( group, mSetGroups );
         }
      }
DWORD *XMISong::SendCommand (DWORD *events, EventSource due, DWORD delay, ptrdiff_t room, bool &sysex_noroom)
{
	DWORD len;
	BYTE event, data1 = 0, data2 = 0;

	if (due == EVENT_Fake)
	{
		AutoNoteOff off;
		NoteOffs.Pop(off);
		events[0] = delay;
		events[1] = 0;
		events[2] = MIDI_NOTEON | off.Channel | (off.Key << 8);
		return events + 3;
	}

	TrackInfo *track = CurrSong;

	sysex_noroom = false;
	size_t start_p = track->EventP;

	CHECK_FINISHED
	event = track->EventChunk[track->EventP++];
	CHECK_FINISHED

	// The actual event type will be filled in below. If it's not a NOP,
	// the events pointer will be advanced once the actual event is written.
	// Otherwise, we do it at the end of the function.
	events[0] = delay;
	events[1] = 0;
	events[2] = MEVT_NOP << 24;

	if (event != MIDI_SYSEX && event != MIDI_META && event != MIDI_SYSEXEND)
	{
		// Normal short message
		if ((event & 0xF0) == 0xF0)
		{
			if (MIDI_CommonLengths[event & 15] > 0)
			{
				data1 = track->EventChunk[track->EventP++];
				if (MIDI_CommonLengths[event & 15] > 1)
				{
					data2 = track->EventChunk[track->EventP++];
				}
			}
		}
		else
		{
			data1 = track->EventChunk[track->EventP++];
		}

		CHECK_FINISHED

		if (MIDI_EventLengths[(event&0x70)>>4] == 2)
		{
			data2 = track->EventChunk[track->EventP++];
		}

		if ((event & 0x70) == (MIDI_CTRLCHANGE & 0x70))
		{
			switch (data1)
			{
			case 7:		// Channel volume
				data2 = VolumeControllerChange(event & 15, data2);
				break;

			case 110:	// XMI channel lock
			case 111:	// XMI channel lock protect
			case 112:	// XMI voice protect
			case 113:	// XMI timbre protect
			case 115:	// XMI indirect controller prefix
			case 118:	// XMI clear beat/bar count
			case 119:	// XMI callback trigger
			case 120:
				event = MIDI_META;		// none of these are relevant to us.
				break;

			case 114:	// XMI patch bank select
				data1 = 0;				// Turn this into a standard MIDI bank select controller.
				break;

			case 116:	// XMI for loop controller
				if (track->ForDepth < MAX_FOR_DEPTH)
				{
					track->ForLoops[track->ForDepth].LoopBegin = track->EventP;
					track->ForLoops[track->ForDepth].LoopCount = ClampLoopCount(data2);
					track->ForLoops[track->ForDepth].LoopFinished = track->Finished;
				}
				track->ForDepth++;
				event = MIDI_META;
				break;

			case 117:	// XMI next loop controller
				if (track->ForDepth > 0)
				{
					int depth = track->ForDepth - 1;
					if (depth < MAX_FOR_DEPTH)
					{
						if (data2 < 64 || (track->ForLoops[depth].LoopCount == 0 && !m_Looping))
						{ // throw away this loop.
							track->ForLoops[depth].LoopCount = 1;
						}
						// A loop count of 0 loops forever.
						if (track->ForLoops[depth].LoopCount == 0 || --track->ForLoops[depth].LoopCount > 0)
						{
							track->EventP = track->ForLoops[depth].LoopBegin;
							track->Finished = track->ForLoops[depth].LoopFinished;
						}
						else
						{ // done with this loop
							track->ForDepth = depth;
						}
					}
					else
					{ // ignore any loops deeper than the max depth
						track->ForDepth = depth;
					}
				}
				event = MIDI_META;
				break;
			}
		}
		events[0] = delay;
		events[1] = 0;
		if (event != MIDI_META)
		{
			events[2] = event | (data1<<8) | (data2<<16);
		}
		events += 3;


		if ((event & 0x70) == (MIDI_NOTEON & 0x70))
		{ // XMI note on events include the time until an implied note off event.
			NoteOffs.AddNoteOff(track->ReadVarLen(), event & 0x0F, data1);
		}
	}
	else
	{
		// SysEx events could potentially not have enough room in the buffer...
		if (event == MIDI_SYSEX || event == MIDI_SYSEXEND)
Example #27
0
DWORD *XMISong::SendCommand (DWORD *events, EventSource due, DWORD delay)
{
	DWORD len;
	BYTE event, data1 = 0, data2 = 0;

	if (due == EVENT_Fake)
	{
		AutoNoteOff off;
		NoteOffs.Pop(off);
		events[0] = delay;
		events[1] = 0;
		events[2] = MIDI_NOTEON | off.Channel | (off.Key << 8);
		return events + 3;
	}

	TrackInfo *track = CurrSong;

	CHECK_FINISHED
	event = track->EventChunk[track->EventP++];
	CHECK_FINISHED

	if (event != MIDI_SYSEX && event != MIDI_META && event != MIDI_SYSEXEND)
	{
		// Normal short message
		if ((event & 0xF0) == 0xF0)
		{
			if (MIDI_CommonLengths[event & 15] > 0)
			{
				data1 = track->EventChunk[track->EventP++];
				if (MIDI_CommonLengths[event & 15] > 1)
				{
					data2 = track->EventChunk[track->EventP++];
				}
			}
		}
		else
		{
			data1 = track->EventChunk[track->EventP++];
		}

		CHECK_FINISHED

		if (MIDI_EventLengths[(event&0x70)>>4] == 2)
		{
			data2 = track->EventChunk[track->EventP++];
		}

		if ((event & 0x70) == (MIDI_CTRLCHANGE & 0x70))
		{
			switch (data1)
			{
			case 7:		// Channel volume
				data2 = VolumeControllerChange(event & 15, data2);
				break;

			case 110:	// XMI channel lock
			case 111:	// XMI channel lock protect
			case 112:	// XMI voice protect
			case 113:	// XMI timbre protect
			case 115:	// XMI indirect controller prefix
			case 118:	// XMI clear beat/bar count
			case 119:	// XMI callback trigger
			case 120:
				event = MIDI_META;		// none of these are relevant to us.
				break;

			case 114:	// XMI patch bank select
				data1 = 0;				// Turn this into a standard MIDI bank select controller.
				break;

			case 116:	// XMI for loop controller
				if (track->ForDepth < MAX_FOR_DEPTH)
				{
					track->ForLoops[track->ForDepth].LoopBegin = track->EventP;
					track->ForLoops[track->ForDepth].LoopCount = ClampLoopCount(data2);
					track->ForLoops[track->ForDepth].LoopFinished = track->Finished;
				}
				track->ForDepth++;
				event = MIDI_META;
				break;

			case 117:	// XMI next loop controller
				if (track->ForDepth > 0)
				{
					int depth = track->ForDepth - 1;
					if (depth < MAX_FOR_DEPTH)
					{
						if (data2 < 64 || (track->ForLoops[depth].LoopCount == 0 && !m_Looping))
						{ // throw away this loop.
							track->ForLoops[depth].LoopCount = 1;
						}
						// A loop count of 0 loops forever.
						if (track->ForLoops[depth].LoopCount == 0 || --track->ForLoops[depth].LoopCount > 0)
						{
							track->EventP = track->ForLoops[depth].LoopBegin;
							track->Finished = track->ForLoops[depth].LoopFinished;
						}
						else
						{ // done with this loop
							track->ForDepth = depth;
						}
					}
					else
					{ // ignore any loops deeper than the max depth
						track->ForDepth = depth;
					}
				}
				event = MIDI_META;
				break;
			}
		}
		events[0] = delay;
		events[1] = 0;
		if (event != MIDI_META)
		{
			events[2] = event | (data1<<8) | (data2<<16);
		}
		else
		{
			events[2] = MEVT_NOP << 24;
		}
		events += 3;


		if ((event & 0x70) == (MIDI_NOTEON & 0x70))
		{ // XMI note on events include the time until an implied note off event.
			NoteOffs.AddNoteOff(track->ReadVarLen(), event & 0x0F, data1);
		}
	}
	else
	{
		// Skip SysEx events just because I don't want to bother with them.
		// The old MIDI player ignored them too, so this won't break
		// anything that played before.
		if (event == MIDI_SYSEX || event == MIDI_SYSEXEND)
Example #28
0
already_AddRefed<MediaDataDecoder>
PDMFactory::CreateDecoderWithPDM(PlatformDecoderModule* aPDM,
                                 const TrackInfo& aConfig,
                                 FlushableTaskQueue* aTaskQueue,
                                 MediaDataDecoderCallback* aCallback,
                                 layers::LayersBackend aLayersBackend,
                                 layers::ImageContainer* aImageContainer)
{
  MOZ_ASSERT(aPDM);
  RefPtr<MediaDataDecoder> m;

  if (aConfig.GetAsAudioInfo()) {
    m = aPDM->CreateAudioDecoder(*aConfig.GetAsAudioInfo(),
                                 aTaskQueue,
                                 aCallback);
    return m.forget();
  }

  if (!aConfig.GetAsVideoInfo()) {
    return nullptr;
  }

  MediaDataDecoderCallback* callback = aCallback;
  RefPtr<DecoderCallbackFuzzingWrapper> callbackWrapper;
  if (sEnableFuzzingWrapper) {
    callbackWrapper = new DecoderCallbackFuzzingWrapper(aCallback);
    callbackWrapper->SetVideoOutputMinimumInterval(
      TimeDuration::FromMilliseconds(sVideoOutputMinimumInterval_ms));
    callbackWrapper->SetDontDelayInputExhausted(sDontDelayInputExhausted);
    callback = callbackWrapper.get();
  }

  if (H264Converter::IsH264(aConfig)) {
    RefPtr<H264Converter> h
      = new H264Converter(aPDM,
                          *aConfig.GetAsVideoInfo(),
                          aLayersBackend,
                          aImageContainer,
                          aTaskQueue,
                          callback);
    const nsresult rv = h->GetLastError();
    if (NS_SUCCEEDED(rv) || rv == NS_ERROR_NOT_INITIALIZED) {
      // The H264Converter either successfully created the wrapped decoder,
      // or there wasn't enough AVCC data to do so. Otherwise, there was some
      // problem, for example WMF DLLs were missing.
      m = h.forget();
    }
  } else {
    m = aPDM->CreateVideoDecoder(*aConfig.GetAsVideoInfo(),
                                 aLayersBackend,
                                 aImageContainer,
                                 aTaskQueue,
                                 callback);
  }

  if (callbackWrapper && m) {
    m = new DecoderFuzzingWrapper(m.forget(), callbackWrapper.forget());
  }

  return m.forget();
}
Example #29
0
bool operator==(const TrackInfo& lhs, const TrackInfo& rhs) {
    return (lhs.getArtist() == rhs.getArtist()) &&
            (lhs.getBpm() == rhs.getBpm()) &&
            (lhs.getComment() == rhs.getComment()) &&
            (lhs.getComposer() == rhs.getComposer()) &&
            (lhs.getConductor() == rhs.getConductor()) &&
            (lhs.getGrouping() == rhs.getGrouping()) &&
            (lhs.getGenre() == rhs.getGenre()) &&
            (lhs.getISRC() == rhs.getISRC()) &&
            (lhs.getKey() == rhs.getKey()) &&
            (lhs.getLanguage() == rhs.getLanguage()) &&
            (lhs.getLyricist() == rhs.getLyricist()) &&
            (lhs.getMood() == rhs.getMood()) &&
            (lhs.getMusicBrainzArtistId() == rhs.getMusicBrainzArtistId()) &&
            (lhs.getMusicBrainzReleaseId() == rhs.getMusicBrainzReleaseId()) &&
            (lhs.getRecordLabel() == rhs.getRecordLabel()) &&
            (lhs.getRemixer() == rhs.getRemixer()) &&
            (lhs.getReplayGain() == rhs.getReplayGain()) &&
            (lhs.getSubtitle() == rhs.getSubtitle()) &&
            (lhs.getTitle() == rhs.getTitle()) &&
            (lhs.getTrackNumber() == rhs.getTrackNumber()) &&
            (lhs.getTrackTotal() == rhs.getTrackTotal()) &&
            (lhs.getYear() == rhs.getYear());
}
Example #30
0
QDebug operator<<(QDebug dbg, const TrackInfo& arg) {
    dbg << '{';
    arg.dbgArtist(dbg);
    arg.dbgBpm(dbg);
    arg.dbgComment(dbg);
    arg.dbgComposer(dbg);
    arg.dbgConductor(dbg);
    arg.dbgGrouping(dbg);
    arg.dbgGenre(dbg);
    arg.dbgISRC(dbg);
    arg.dbgKey(dbg);
    arg.dbgLanguage(dbg);
    arg.dbgLyricist(dbg);
    arg.dbgMood(dbg);
    arg.dbgMusicBrainzArtistId(dbg);
    arg.dbgMusicBrainzReleaseId(dbg);
    arg.dbgRecordLabel(dbg);
    arg.dbgRemixer(dbg);
    arg.dbgReplayGain(dbg);
    arg.dbgSubtitle(dbg);
    arg.dbgTitle(dbg);
    arg.dbgTrackNumber(dbg);
    arg.dbgTrackTotal(dbg);
    arg.dbgYear(dbg);
    dbg << '}';
    return dbg;
}