Ejemplo n.º 1
0
boolByte removeDirectory(const CharString absolutePath) {
  boolByte result = false;

#if UNIX
  if(!fileExists(absolutePath->data)) {
    return false;
  }

  // This is a bit lazy, perhaps...
  CharString removeCommand = newCharString();
  snprintf(removeCommand->data, removeCommand->length, "/bin/rm -rf \"%s\"", absolutePath->data);
  result = system(removeCommand->data);
  freeCharString(removeCommand);
  return (result == 0);
#elif WINDOWS
  SHFILEOPSTRUCTA fileOperation = {0};
  fileOperation.wFunc = FO_DELETE;
  fileOperation.pFrom = absolutePath->data;
  fileOperation.fFlags = FOF_NO_UI;
  return (SHFileOperationA(&fileOperation) == 0);
#else
  logUnsupportedFeature("Copy directory recursively");
  return false;
#endif
}
Ejemplo n.º 2
0
// This could live in File, however this is currently the only place it is being
// used
// and also it's a rather cheap hack, so I would prefer to keep it as a static
// function
// here until another use-case presents itself. If that should happen, then we
// should
// refactor this code properly and move it to File.
static boolByte _copyDirectoryToErrorReportDir(ErrorReporter self,
                                               CharString path) {
  boolByte success;

#if UNIX
  int result;
  CharString copyCommand = newCharString();
  // TODO: This is the lazy way of doing this...
  snprintf(copyCommand->data, copyCommand->capacity, "/bin/cp -r \"%s\" \"%s\"",
           path->data, self->reportDirPath->data);
  result = system(copyCommand->data);
  success = (boolByte)(WEXITSTATUS(result) == 0);

  if (!success) {
    logError("Could not copy '%s' to '%s'\n", path->data,
             self->reportDirPath->data);
  }

#else
  logUnsupportedFeature("Copy directory recursively");
  success = false;
#endif

  return success;
}
Ejemplo n.º 3
0
static boolByte _readMidiEventsFile(void *midiSourcePtr, MidiSequence midiSequence)
{
    MidiSource midiSource = (MidiSource)midiSourcePtr;
    MidiSourceFileData extraData = (MidiSourceFileData)(midiSource->extraData);
    unsigned short formatType, numTracks, timeDivision = 0;
    int track;

    if (!_readMidiFileHeader(extraData->fileHandle, &formatType, &numTracks, &timeDivision)) {
        return false;
    }

    if (formatType != 0) {
        logUnsupportedFeature("MIDI file types other than 0");
        return false;
    } else if (formatType == 0 && numTracks != 1) {
        logError("MIDI file '%s' is of type 0, but contains %d tracks", midiSource->sourceName->data, numTracks);
        return false;
    }

    // Determine time division type
    if (timeDivision & 0x7fff) {
        extraData->divisionType = TIME_DIVISION_TYPE_TICKS_PER_BEAT;
    } else {
        extraData->divisionType = TIME_DIVISION_TYPE_FRAMES_PER_SECOND;
        logUnsupportedFeature("MIDI file with time division in frames/second");
        return false;
    }

    logDebug("MIDI file is type %d, has %d tracks, and time division %d (type %d)",
             formatType, numTracks, timeDivision, extraData->divisionType);

    for (track = 0; track < numTracks; track++) {
        if (!_readMidiFileTrack(extraData->fileHandle, track, timeDivision, extraData->divisionType, midiSequence)) {
            return false;
        }
    }

    return true;
}
Ejemplo n.º 4
0
// Note that this method skips hidden files
LinkedList listDirectory(const CharString directory) {
  LinkedList items = newLinkedList();
  CharString filename;

#if UNIX
  DIR* directoryPtr = opendir(directory->data);
  if(directoryPtr == NULL) {
    freeLinkedList(items);
    return 0;
  }
  struct dirent* entry;
  while((entry = readdir(directoryPtr)) != NULL) {
    if(entry->d_name[0] != '.') {
      filename = newCharStringWithCString(entry->d_name);
      linkedListAppend(items, filename);
    }
  }
  closedir(directoryPtr);

#elif WINDOWS
  WIN32_FIND_DATAA findData;
  HANDLE findHandle;
  CharString searchString = newCharString();

  snprintf(searchString->data, searchString->length, "%s\\*", directory->data);
  findHandle = FindFirstFileA((LPCSTR)(searchString->data), &findData);
  freeCharString(searchString);
  if(findHandle == INVALID_HANDLE_VALUE) {
    freeLinkedList(items);
    return 0;
  }
  do {
    if(findData.cFileName[0] != '.') {
      filename = newCharString();
      strncpy(filename->data, findData.cFileName, filename->length);
      linkedListAppend(items, filename);
    }
  } while(FindNextFileA(findHandle, &findData) != 0);

  FindClose(findHandle);

#else
  logUnsupportedFeature("List directory contents");
#endif

  return items;
}
Ejemplo n.º 5
0
static boolByte _writeBlockToOggFile(void* sampleSourcePtr, const SampleBuffer sampleBuffer) {
  logUnsupportedFeature("Ogg file I/O");
  return false;
}
Ejemplo n.º 6
0
static boolByte _readBlockFromOggFile(void* sampleSourcePtr, SampleBuffer sampleBuffer) {
  logUnsupportedFeature("Ogg file I/O");
  return false;
}
Ejemplo n.º 7
0
static boolByte _openSampleSourceOgg(void* sampleSourcePtr, const SampleSourceOpenAs openAs) {
  logUnsupportedFeature("Ogg file I/O");
  return false;
}
Ejemplo n.º 8
0
static boolByte _readWaveFileInfo(const char *filename, SampleSourcePcmData extraData)
{
    int chunkOffset = 0;
    RiffChunk chunk = newRiffChunk();
    boolByte dataChunkFound = false;
    char format[4];
    size_t itemsRead;
    unsigned int audioFormat;
    unsigned int byteRate;
    unsigned int expectedByteRate;
    unsigned int blockAlign;
    unsigned int expectedBlockAlign;

    if (riffChunkReadNext(chunk, extraData->fileHandle, false)) {
        if (!riffChunkIsIdEqualTo(chunk, "RIFF")) {
            logFileError(filename, "Invalid RIFF chunk descriptor");
            freeRiffChunk(chunk);
            return false;
        }

        // The WAVE file format has two sub-chunks, with the size of both calculated in the size field. Before
        // either of the subchunks, there are an extra 4 bytes which indicate the format type. We need to read
        // that before either of the subchunks can be parsed.
        itemsRead = fread(format, sizeof(byte), 4, extraData->fileHandle);

        if (itemsRead != 4 || strncmp(format, "WAVE", 4)) {
            logFileError(filename, "Invalid format description");
            freeRiffChunk(chunk);
            return false;
        }
    } else {
        logFileError(filename, "No chunks following descriptor");
        freeRiffChunk(chunk);
        return false;
    }

    if (riffChunkReadNext(chunk, extraData->fileHandle, true)) {
        if (!riffChunkIsIdEqualTo(chunk, "fmt ")) {
            logError(filename, "Invalid format chunk header");
            freeRiffChunk(chunk);
            return false;
        }

        audioFormat = convertByteArrayToUnsignedShort(chunk->data + chunkOffset);
        chunkOffset += 2;

        if (audioFormat != 1) {
            logError("WAVE file with audio format %d is not supported", audioFormat);
            freeRiffChunk(chunk);
            return false;
        }

        extraData->numChannels = convertByteArrayToUnsignedShort(chunk->data + chunkOffset);
        chunkOffset += 2;
        setNumChannels(extraData->numChannels);

        extraData->sampleRate = convertByteArrayToUnsignedInt(chunk->data + chunkOffset);
        chunkOffset += 4;
        setSampleRate(extraData->sampleRate);

        byteRate = convertByteArrayToUnsignedInt(chunk->data + chunkOffset);
        chunkOffset += 4;

        blockAlign = convertByteArrayToUnsignedShort(chunk->data + chunkOffset);
        chunkOffset += 2;

        extraData->bitDepth = (BitDepth) convertByteArrayToUnsignedShort(chunk->data + chunkOffset);

        if (extraData->bitDepth != kBitDepth16Bit) {
            logUnsupportedFeature("Non-16-bit files with internal WAVE file support (build with audiofile instead!)");
            freeRiffChunk(chunk);
            return false;
        }

        expectedByteRate = (unsigned int)(extraData->sampleRate) *
                           extraData->numChannels * extraData->bitDepth / 8;

        if (expectedByteRate != byteRate) {
            logWarn("Possibly invalid bitrate %d, expected %d", byteRate, expectedByteRate);
        }

        expectedBlockAlign = (unsigned int)(extraData->numChannels * extraData->bitDepth / 8);

        if (expectedBlockAlign != blockAlign) {
            logWarn("Possibly invalid block align %d, expected %d", blockAlign, expectedBlockAlign);
        }
    } else {
        logFileError(filename, "WAVE file has no chunks following format");
        freeRiffChunk(chunk);
        return false;
    }

    // We don't need the format data anymore, so free and re-alloc the chunk to avoid a small memory leak
    freeRiffChunk(chunk);
    chunk = newRiffChunk();

    // FFMpeg (and possibly other programs) have extra sections between the fmt and data chunks. They
    // can be safely ignored. We just need to find the data chunk. See also:
    // http://forum.videohelp.com/threads/359689-ffmpeg-Override-Set-ISFT-Metadata
    while (!dataChunkFound) {
        if (riffChunkReadNext(chunk, extraData->fileHandle, false)) {
            if (riffChunkIsIdEqualTo(chunk, "data")) {
                logDebug("WAVE file has %d bytes", chunk->size);
                dataChunkFound = true;
            } else {
                fseek(extraData->fileHandle, (long) chunk->size, SEEK_CUR);
            }
        } else {
            break;
        }
    }

    if (!dataChunkFound) {
        logFileError(filename, "Could not find a data chunk. Possibly malformed WAVE file.");
        freeRiffChunk(chunk);
        return false;
    }

    freeRiffChunk(chunk);
    return true;
}
Ejemplo n.º 9
0
static boolByte _writeWaveFileInfo(SampleSourcePcmData extraData)
{
    RiffChunk chunk = newRiffChunk();
    unsigned short audioFormat = 1;
    unsigned int byteRate = (unsigned int)(extraData->sampleRate) * extraData->numChannels * extraData->bitDepth / 8;
    unsigned short blockAlign = (unsigned short)(extraData->numChannels * extraData->bitDepth / 8);
    unsigned int extraParams = 0;

    memcpy(chunk->id, "RIFF", 4);

    if (fwrite(chunk->id, sizeof(byte), 4, extraData->fileHandle) != 4) {
        logError("Could not write RIFF header");
        freeRiffChunk(chunk);
        return false;
    }

    // Write the size, but this will need to be set again when the file is finished writing
    if (fwrite(&(chunk->size), sizeof(unsigned int), 1, extraData->fileHandle) != 1) {
        logError("Could not write RIFF chunk size");
        freeRiffChunk(chunk);
        return false;
    }

    memcpy(chunk->id, "WAVE", 4);

    if (fwrite(chunk->id, sizeof(byte), 4, extraData->fileHandle) != 4) {
        logError("Could not WAVE format");
        freeRiffChunk(chunk);
        return false;
    }

    // Write the format header
    memcpy(chunk->id, "fmt ", 4);
    chunk->size = 20;

    if (fwrite(chunk->id, sizeof(byte), 4, extraData->fileHandle) != 4) {
        logError("Could not write format header");
        freeRiffChunk(chunk);
        return false;
    }

    if (fwrite(&(chunk->size), sizeof(unsigned int), 1, extraData->fileHandle) != 1) {
        logError("Could not write format chunk size");
        freeRiffChunk(chunk);
        return false;
    }

    if (!platformInfoIsLittleEndian()) {
        logUnsupportedFeature("WAVE files on big-endian platforms");
        freeRiffChunk(chunk);
        return false;
    }

    if (fwrite(&audioFormat, sizeof(unsigned short), 1, extraData->fileHandle) != 1) {
        logError("Could not write audio format");
        freeRiffChunk(chunk);
        return false;
    }

    if (fwrite(&(extraData->numChannels), sizeof(unsigned short), 1, extraData->fileHandle) != 1) {
        logError("Could not write channel count");
        freeRiffChunk(chunk);
        return false;
    }

    unsigned int sampleRateAsUInt = (unsigned int)extraData->sampleRate;
    if (fwrite(&(sampleRateAsUInt), sizeof(unsigned int), 1, extraData->fileHandle) != 1) {
        logError("Could not write sample rate");
        freeRiffChunk(chunk);
        return false;
    }

    if (fwrite(&(byteRate), sizeof(unsigned int), 1, extraData->fileHandle) != 1) {
        logError("Could not write byte rate");
        freeRiffChunk(chunk);
        return false;
    }

    if (fwrite(&(blockAlign), sizeof(unsigned short), 1, extraData->fileHandle) != 1) {
        logError("Could not write block align");
        freeRiffChunk(chunk);
        return false;
    }

    if (fwrite(&(extraData->bitDepth), sizeof(unsigned short), 1, extraData->fileHandle) != 1) {
        logError("Could not write bits per sample");
        freeRiffChunk(chunk);
        return false;
    }

    if (fwrite(&(extraParams), sizeof(byte), 4, extraData->fileHandle) != 4) {
        logError("Could not write extra PCM parameters");
        freeRiffChunk(chunk);
        return false;
    }

    memcpy(chunk->id, "data", 4);

    if (fwrite(chunk->id, sizeof(byte), 4, extraData->fileHandle) != 4) {
        logError("Could not write format header");
        freeRiffChunk(chunk);
        return false;
    }

    if (fwrite(&(chunk->size), sizeof(unsigned int), 1, extraData->fileHandle) != 1) {
        logError("Could not write data chunk size");
        freeRiffChunk(chunk);
        return false;
    }

    freeRiffChunk(chunk);
    return true;
}
Ejemplo n.º 10
0
int
notifyServiceReady (void) {
  logUnsupportedFeature("service ready notification");
  return 0;
}
Ejemplo n.º 11
0
static boolByte _readWaveFileInfo(const char* filename, SampleSourcePcmData extraData) {
  int chunkOffset = 0;
  RiffChunk chunk = newRiffChunk();
  char format[4];
  size_t itemsRead;
  unsigned int audioFormat;
  unsigned int byteRate;
  unsigned int expectedByteRate;
  unsigned int blockAlign;
  unsigned int expectedBlockAlign;

  if(riffChunkReadNext(chunk, extraData->fileHandle, false)) {
    if(!riffChunkIsIdEqualTo(chunk, "RIFF")) {
      logFileError(filename, "Invalid RIFF chunk descriptor");
      freeRiffChunk(chunk);
      return false;
    }

    // The WAVE file format has two sub-chunks, with the size of both calculated in the size field. Before
    // either of the subchunks, there are an extra 4 bytes which indicate the format type. We need to read
    // that before either of the subchunks can be parsed.
    itemsRead = fread(format, sizeof(byte), 4, extraData->fileHandle);
    if(itemsRead != 4 || strncmp(format, "WAVE", 4)) {
      logFileError(filename, "Invalid format description");
      freeRiffChunk(chunk);
      return false;
    }
  }
  else {
    logFileError(filename, "No chunks following descriptor");
    freeRiffChunk(chunk);
    return false;
  }

  if(riffChunkReadNext(chunk, extraData->fileHandle, true)) {
    if(!riffChunkIsIdEqualTo(chunk, "fmt ")) {
      logError(filename, "Invalid format chunk header");
      freeRiffChunk(chunk);
      return false;
    }

    audioFormat = convertByteArrayToUnsignedShort(chunk->data + chunkOffset);
    chunkOffset += 2;
    if(audioFormat != 1) {
      logUnsupportedFeature("Compressed WAVE files");
      freeRiffChunk(chunk);
      return false;
    }

    extraData->numChannels = convertByteArrayToUnsignedShort(chunk->data + chunkOffset);
    chunkOffset += 2;
    setNumChannels(extraData->numChannels);

    extraData->sampleRate = convertByteArrayToUnsignedInt(chunk->data + chunkOffset);
    chunkOffset += 4;
    setSampleRate(extraData->sampleRate);

    byteRate = convertByteArrayToUnsignedInt(chunk->data + chunkOffset);
    chunkOffset += 4;

    blockAlign = convertByteArrayToUnsignedShort(chunk->data + chunkOffset);
    chunkOffset += 2;

    extraData->bitsPerSample = convertByteArrayToUnsignedShort(chunk->data + chunkOffset);
    if(extraData->bitsPerSample > 16) {
      logUnsupportedFeature("Bitrates greater than 16");
      freeRiffChunk(chunk);
      return false;
    }
    else if(extraData->bitsPerSample < 16) {
      logUnsupportedFeature("Bitrates lower than 16");
      freeRiffChunk(chunk);
      return false;
    }

    expectedByteRate = extraData->sampleRate * extraData->numChannels * extraData->bitsPerSample / 8;
    if(expectedByteRate != byteRate) {
      logWarn("Possibly invalid bitrate %d, expected %d", byteRate, expectedByteRate);
    }

    expectedBlockAlign = extraData->numChannels * extraData->bitsPerSample / 8;
    if(expectedBlockAlign != blockAlign) {
      logWarn("Possibly invalid block align %d, expected %d", blockAlign, expectedBlockAlign);
    }
  }
  else {
    logFileError(filename, "WAVE file has no chunks following format");
    freeRiffChunk(chunk);
    return false;
  }

  // We don't need the format data anymore, so free and re-alloc the chunk to avoid a small memory leak
  freeRiffChunk(chunk);
  chunk = newRiffChunk();

  if(riffChunkReadNext(chunk, extraData->fileHandle, false)) {
    if(!riffChunkIsIdEqualTo(chunk, "data")) {
      logFileError(filename, "WAVE file has invalid data chunk header");
      freeRiffChunk(chunk);
      return false;
    }

    logDebug("WAVE file has %d bytes", chunk->size);
  }

  freeRiffChunk(chunk);
  return true;
}
Ejemplo n.º 12
0
static boolByte _readMidiFileTrack(FILE *midiFile, const int trackNumber,
                                   const int timeDivision, const MidiFileTimeDivisionType divisionType,
                                   MidiSequence midiSequence)
{
    unsigned int numBytesBuffer;
    byte *trackData, *currentByte, *endByte;
    size_t itemsRead, numBytes;
    unsigned long currentTimeInSampleFrames = 0;
    unsigned long unpackedVariableLength;
    MidiEvent midiEvent = NULL;
    unsigned int i;

    if (!_readMidiFileChunkHeader(midiFile, "MTrk")) {
        return false;
    }

    itemsRead = fread(&numBytesBuffer, sizeof(unsigned int), 1, midiFile);

    if (itemsRead < 1) {
        logError("Short read of MIDI file (at track %d header, num items)", trackNumber);
        return false;
    }

    // Read in the entire track in one pass and parse the events from the buffer data. Much easier
    // than having to call fread() for each event.
    numBytes = (size_t)convertBigEndianIntToPlatform(numBytesBuffer);
    trackData = (byte *)malloc(numBytes);
    itemsRead = fread(trackData, 1, numBytes, midiFile);

    if (itemsRead != numBytes) {
        logError("Short read of MIDI file (at track %d)", trackNumber);
        free(trackData);
        return false;
    }

    currentByte = trackData;
    endByte = trackData + numBytes;

    while (currentByte < endByte) {
        // Unpack variable length timestamp
        unpackedVariableLength = *currentByte;

        if (unpackedVariableLength & 0x80) {
            unpackedVariableLength &= 0x7f;

            do {
                unpackedVariableLength = (unpackedVariableLength << 7) + (*(++currentByte) & 0x7f);
            } while (*currentByte & 0x80);
        }

        currentByte++;
        freeMidiEvent(midiEvent);
        midiEvent = newMidiEvent();

        switch (*currentByte) {
        case 0xff:
            midiEvent->eventType = MIDI_TYPE_META;
            currentByte++;
            midiEvent->status = *(currentByte++);
            numBytes = *(currentByte++);
            midiEvent->extraData = (byte *)malloc(numBytes);

            for (i = 0; i < numBytes; i++) {
                midiEvent->extraData[i] = *(currentByte++);
            }

            break;

        case 0x7f:
            logUnsupportedFeature("MIDI files containing sysex events");
            free(trackData);
            freeMidiEvent(midiEvent);
            return false;

        default:
            midiEvent->eventType = MIDI_TYPE_REGULAR;
            midiEvent->status = *currentByte++;
            midiEvent->data1 = *currentByte++;

            // All regular MIDI events have 3 bytes except for program change and channel aftertouch
            if (!((midiEvent->status & 0xf0) == 0xc0 || (midiEvent->status & 0xf0) == 0xd0)) {
                midiEvent->data2 = *currentByte++;
            }

            break;
        }

        switch (divisionType) {
        case TIME_DIVISION_TYPE_TICKS_PER_BEAT: {
            double ticksPerSecond = (double)timeDivision * getTempo() / 60.0;
            double sampleFramesPerTick = getSampleRate() / ticksPerSecond;
            currentTimeInSampleFrames += (long)(unpackedVariableLength * sampleFramesPerTick);
        }
        break;

        case TIME_DIVISION_TYPE_FRAMES_PER_SECOND:
            // Actually, this should be caught when parsing the file type
            logUnsupportedFeature("Time division frames/sec");
            free(trackData);
            freeMidiEvent(midiEvent);
            return false;

        case TIME_DIVISION_TYPE_INVALID:
        default:
            logInternalError("Invalid time division type");
            free(trackData);
            freeMidiEvent(midiEvent);
            return false;
        }

        midiEvent->timestamp = currentTimeInSampleFrames;

        if (midiEvent->eventType == MIDI_TYPE_META) {
            switch (midiEvent->status) {
            case MIDI_META_TYPE_TEXT:
            case MIDI_META_TYPE_COPYRIGHT:
            case MIDI_META_TYPE_SEQUENCE_NAME:
            case MIDI_META_TYPE_INSTRUMENT:
            case MIDI_META_TYPE_LYRIC:
            case MIDI_META_TYPE_MARKER:
            case MIDI_META_TYPE_CUE_POINT:

            // This event type could theoretically be supported, as long as the
            // plugin supports it
            case MIDI_META_TYPE_PROGRAM_NAME:
            case MIDI_META_TYPE_DEVICE_NAME:
            case MIDI_META_TYPE_KEY_SIGNATURE:
            case MIDI_META_TYPE_PROPRIETARY:
                logDebug("Ignoring MIDI meta event of type 0x%x at %ld", midiEvent->status, midiEvent->timestamp);
                break;

            case MIDI_META_TYPE_TEMPO:
            case MIDI_META_TYPE_TIME_SIGNATURE:
            case MIDI_META_TYPE_TRACK_END:
                logDebug("Parsed MIDI meta event of type 0x%02x at %ld", midiEvent->status, midiEvent->timestamp);
                appendMidiEventToSequence(midiSequence, midiEvent);
                midiEvent = NULL;
                break;

            default:
                logWarn("Ignoring MIDI meta event of type 0x%x at %ld", midiEvent->status, midiEvent->timestamp);
                break;
            }
        } else {
            logDebug("MIDI event of type 0x%02x parsed at %ld", midiEvent->status, midiEvent->timestamp);
            appendMidiEventToSequence(midiSequence, midiEvent);
            midiEvent = NULL;
        }
    }

    free(trackData);
    freeMidiEvent(midiEvent);
    return true;
}
Ejemplo n.º 13
0
VstIntPtr VSTCALLBACK pluginVst2xHostCallback(AEffect *effect, VstInt32 opcode, VstInt32 index, VstIntPtr value, void *dataPtr, float opt) {
  // This string is used in a bunch of logging calls below
  PluginVst2xId pluginId;
  if(effect != NULL) {
    pluginId = newPluginVst2xIdWithId(effect->uniqueID);
  }
  else {
    // During plugin initialization, the dispatcher can be called without a
    // valid plugin instance, as the AEffect* struct is still not fully constructed
    // at that point.
    pluginId = newPluginVst2xId();
  }
  const char* pluginIdString = pluginId->idString->data;
  VstIntPtr result = 0;

  logDebug("Plugin '%s' called host dispatcher with %d, %d, %d", pluginIdString, opcode, index, value);
  switch(opcode) {
    case audioMasterAutomate:
      // The plugin will call this if a parameter has changed via MIDI or the GUI, so the host can update
      // itself accordingly. We don't care about this (for the time being), and as we don't support either
      // GUI's or live MIDI, this opcode can be ignored.
      break;
    case audioMasterVersion:
      // We are a VST 2.4 compatible host
      result = 2400;
      break;
    case audioMasterCurrentId:
      // Use the current plugin ID, needed by VST shell plugins to determine which sub-plugin to load
      result = currentPluginUniqueId;
      break;
    case audioMasterIdle:
      // Ignore
      result = 1;
      break;
    case audioMasterPinConnected:
      logDeprecated("audioMasterPinConnected", pluginIdString);
      break;
    case audioMasterWantMidi:
      // This (deprecated) call is sometimes made by VST2.3 instruments to tell
      // the host that it is an instrument. We can safely ignore it.
      result = 1;
      break;
    case audioMasterGetTime: {
      AudioClock audioClock = getAudioClock();

      // These values are always valid
      vstTimeInfo.samplePos = audioClock->currentFrame;
      vstTimeInfo.sampleRate = getSampleRate();

      // Set flags for transport state
      vstTimeInfo.flags = 0;
      vstTimeInfo.flags |= audioClock->transportChanged ? kVstTransportChanged : 0;
      vstTimeInfo.flags |= audioClock->isPlaying ? kVstTransportPlaying : 0;

      // Fill values based on other flags which may have been requested
      if(value & kVstNanosValid) {
        // It doesn't make sense to return this value, as the plugin may try to calculate
        // something based on the current system time. As we are running offline, anything
        // the plugin calculates here will probably be wrong given the way we are running.
        // However, for realtime mode, this flag should be implemented in that case.
        logWarn("Plugin '%s' asked for time in nanoseconds (unsupported)", pluginIdString);
      }
      if(value & kVstPpqPosValid) {
        // TODO: Move calculations to AudioClock
        double samplesPerBeat = (60.0 / getTempo()) * getSampleRate();
        // Musical time starts with 1, not 0
        vstTimeInfo.ppqPos = (vstTimeInfo.samplePos / samplesPerBeat) + 1.0;
        logDebug("Current PPQ position is %g", vstTimeInfo.ppqPos);
        vstTimeInfo.flags |= kVstPpqPosValid;
      }
      if(value & kVstTempoValid) {
        vstTimeInfo.tempo = getTempo();
        vstTimeInfo.flags |= kVstTempoValid;
      }
      if(value & kVstBarsValid) {
        if(!(value & kVstPpqPosValid)) {
          logError("Plugin requested position in bars, but not PPQ");
        }
        // TODO: Move calculations to AudioClock
        double currentBarPos = floor(vstTimeInfo.ppqPos / (double)getTimeSignatureBeatsPerMeasure());
        vstTimeInfo.barStartPos = currentBarPos * (double)getTimeSignatureBeatsPerMeasure() + 1.0;
        logDebug("Current bar is %g", vstTimeInfo.barStartPos);
        vstTimeInfo.flags |= kVstBarsValid;
      }
      if(value & kVstCyclePosValid) {
        // We don't support cycling, so this is always 0
      }
      if(value & kVstTimeSigValid) {
        vstTimeInfo.timeSigNumerator = getTimeSignatureBeatsPerMeasure();
        vstTimeInfo.timeSigDenominator = getTimeSignatureNoteValue();
        vstTimeInfo.flags |= kVstTimeSigValid;
      }
      if(value & kVstSmpteValid) {
        logUnsupportedFeature("Current time in SMPTE format");
      }
      if(value & kVstClockValid) {
        logUnsupportedFeature("Sample frames until next clock");
      }

      result = (VstIntPtr)&vstTimeInfo;
      break;
    }
    case audioMasterProcessEvents:
      logUnsupportedFeature("VST master opcode audioMasterProcessEvents");
      break;
    case audioMasterSetTime:
      logDeprecated("audioMasterSetTime", pluginIdString);
      break;
    case audioMasterTempoAt:
      logDeprecated("audioMasterTempoAt", pluginIdString);
      break;
    case audioMasterGetNumAutomatableParameters:
      logDeprecated("audioMasterGetNumAutomatableParameters", pluginIdString);
      break;
    case audioMasterGetParameterQuantization:
      logDeprecated("audioMasterGetParameterQuantization", pluginIdString);
      break;
    case audioMasterIOChanged: {
      PluginChain pluginChain = getPluginChain();
      logDebug("Number of inputs: %d", effect->numInputs);
      logDebug("Number of outputs: %d", effect->numOutputs);
      logDebug("Number of parameters: %d", effect->numParams);
      logDebug("Initial Delay: %d", effect->initialDelay);
      result = -1;
      for(unsigned int i = 0; i < pluginChain->numPlugins; ++i){
        if((unsigned long)effect->uniqueID == pluginVst2xGetUniqueId(pluginChain->plugins[i])){
          logDebug("Updating plugin");
          pluginVst2xAudioMasterIOChanged(pluginChain->plugins[i], effect);
          result = 0;
          break;//Only one plugin will match anyway.
        }
      }
      break;
    }
    case audioMasterNeedIdle:
      logDeprecated("audioMasterNeedIdle", pluginIdString);
      break;
    case audioMasterSizeWindow:
      logWarn("Plugin '%s' asked us to resize window (unsupported)", pluginIdString);
      break;
    case audioMasterGetSampleRate:
      result = (int)getSampleRate();
      break;
    case audioMasterGetBlockSize:
      result = getBlocksize();
      break;
    case audioMasterGetInputLatency:
      // Input latency is not used, and is always 0
      result = 0;
      break;
    case audioMasterGetOutputLatency:
      // Output latency is not used, and is always 0
      result = 0;
      break;
    case audioMasterGetPreviousPlug:
      logDeprecated("audioMasterGetPreviousPlug", pluginIdString);
      break;
    case audioMasterGetNextPlug:
      logDeprecated("audioMasterGetNextPlug", pluginIdString);
      break;
    case audioMasterWillReplaceOrAccumulate:
      logDeprecated("audioMasterWillReplaceOrAccumulate", pluginIdString);
      break;
    case audioMasterGetCurrentProcessLevel:
      // We are not a multithreaded app and have no GUI, so this is unsupported.
      result = kVstProcessLevelUnknown;
      break;
    case audioMasterGetAutomationState:
      // Automation is also not supported (for now)
      result = kVstAutomationUnsupported;
      break;
    case audioMasterOfflineStart:
      logWarn("Plugin '%s' asked us to start offline processing (unsupported)", pluginIdString);
      break;
    case audioMasterOfflineRead:
      logWarn("Plugin '%s' asked to read offline data (unsupported)", pluginIdString);
      break;
    case audioMasterOfflineWrite:
      logWarn("Plugin '%s' asked to write offline data (unsupported)", pluginIdString);
      break;
    case audioMasterOfflineGetCurrentPass:
      logWarn("Plugin '%s' asked for current offline pass (unsupported)", pluginIdString);
      break;
    case audioMasterOfflineGetCurrentMetaPass:
      logWarn("Plugin '%s' asked for current offline meta pass (unsupported)", pluginIdString);
      break;
    case audioMasterSetOutputSampleRate:
      logDeprecated("audioMasterSetOutputSampleRate", pluginIdString);
      break;
    case audioMasterGetOutputSpeakerArrangement:
      logDeprecated("audioMasterGetOutputSpeakerArrangement", pluginIdString);
      break;
    case audioMasterGetVendorString:
      strncpy((char*)dataPtr, VENDOR_NAME, kVstMaxVendorStrLen);
      result = 1;
      break;
    case audioMasterGetProductString:
      strncpy((char*)dataPtr, PROGRAM_NAME, kVstMaxProductStrLen);
      result = 1;
      break;
    case audioMasterGetVendorVersion:
      // Return our version as a single string, in the form ABCC, which corresponds to version A.B.C
      // Often times the patch can reach double-digits, so it gets two decimal places.
      result = VERSION_MAJOR * 1000 + VERSION_MINOR * 100 + VERSION_PATCH;
      break;
    case audioMasterVendorSpecific:
      logWarn("Plugin '%s' made a vendor specific call (unsupported). Arguments: %d, %d, %f", pluginIdString, index, value, opt);
      break;
    case audioMasterCanDo:
      result = _canHostDo(pluginIdString, (char*)dataPtr);
      break;
    case audioMasterSetIcon:
      logDeprecated("audioMasterSetIcon", pluginIdString);
      break;
    case audioMasterGetLanguage:
      result = kVstLangEnglish;
      break;
    case audioMasterOpenWindow:
      logDeprecated("audioMasterOpenWindow", pluginIdString);
      break;
    case audioMasterCloseWindow:
      logDeprecated("audioMasterCloseWindow", pluginIdString);
      break;
    case audioMasterGetDirectory:
      logWarn("Plugin '%s' asked for directory pointer (unsupported)", pluginIdString);
      break;
    case audioMasterUpdateDisplay:
      // Ignore
      break;
    case audioMasterBeginEdit:
      logWarn("Plugin '%s' asked to begin parameter automation (unsupported)", pluginIdString);
      break;
    case audioMasterEndEdit:
      logWarn("Plugin '%s' asked to end parameter automation (unsupported)", pluginIdString);
      break;
    case audioMasterOpenFileSelector:
      logWarn("Plugin '%s' asked us to open file selector (unsupported)", pluginIdString);
      break;
    case audioMasterCloseFileSelector:
      logWarn("Plugin '%s' asked us to close file selector (unsupported)", pluginIdString);
      break;
    case audioMasterEditFile:
      logDeprecated("audioMasterEditFile", pluginIdString);
      break;
    case audioMasterGetChunkFile:
      logDeprecated("audioMasterGetChunkFile", pluginIdString);
      break;
    case audioMasterGetInputSpeakerArrangement:
      logDeprecated("audioMasterGetInputSpeakerArrangement", pluginIdString);
      break;
    default:
      logWarn("Plugin '%s' asked if host can do unknown opcode %d", pluginIdString, opcode);
      break;
  }

  freePluginVst2xId(pluginId);
  return result;
}
Ejemplo n.º 14
0
int
removeService (const char *name) {
  logUnsupportedFeature("service removal");
  return 0;
}
Ejemplo n.º 15
0
int
installService (const char *name, const char *description) {
  logUnsupportedFeature("service installation");
  return 0;
}
Ejemplo n.º 16
0
void runApplicationTest(char *applicationPath, const char *testName, LinkedList testArguments, ReturnCodes expectedResultCode, boolByte anazyleOutput) {
  char** applicationArguments;
  ArgumentsCopyData argumentsCopyData;
  int resultCode = -1;
  LinkedList defaultArguments = getDefaultArguments(testName);
  LinkedList arguments = _appendLinkedLists(defaultArguments, testArguments);
  CharString failedAnalysisFunctionName = newCharString();
  unsigned long failedAnalysisSample;

  // Remove any output files which may have been left from previous tests
  foreachItemInList(defaultArguments, _removeOutputFile, NULL);

#if WINDOWS
#else
  mkdir("out", 0755);
#endif

#if WINDOWS
  logUnsupportedFeature("Application testing");
#else
  int numArgs = numItemsInList(arguments);
  // Add two extra items to the array, one for the application path and another for a NULL object.
  // These are required for the calls to the execv* functions.
  applicationArguments = (char**)malloc(sizeof(char*) * (numArgs + 2));
  applicationArguments[0] = applicationPath;
  applicationArguments[numArgs + 1] = NULL;
  argumentsCopyData.currentIndex = 1;
  argumentsCopyData.outArray = applicationArguments;
  foreachItemInList(arguments, _copyArgumentToArray, &argumentsCopyData);
  printf("  %s: ", testName);

  pid_t forkedPid = fork();
  if(forkedPid == 0) {
    resultCode = execvp(applicationPath, applicationArguments);
    exit(resultCode);
  }
  else {
    int statusLoc;
    waitpid(forkedPid, &statusLoc, 0);
    if(WIFEXITED(statusLoc)) {
      resultCode = WEXITSTATUS(statusLoc);
    }
  }
#endif

  if(resultCode == expectedResultCode) {
    if(anazyleOutput) {
      if(analyzeFile(_getTestOutputFilename(testName, "pcm"), failedAnalysisFunctionName, &failedAnalysisSample)) {
        testsPassed++;
        foreachItemInList(defaultArguments, _removeOutputFile, NULL);
        printTestSuccess();
      }
      else {
        printTestFail();
        printf("    in test '%s', while analyzing output for %s at sample %lu.\n",
          testName, failedAnalysisFunctionName->data, failedAnalysisSample);
        testsFailed++;
      }
    }
    else {
      testsPassed++;
      foreachItemInList(defaultArguments, _removeOutputFile, NULL);
      printTestSuccess();
    }
  }
  else {
    printTestFail();
    printf("    in %s. Expected result code %d, got %d.\n", testName, expectedResultCode, resultCode);
    testsFailed++;
  }

  freeLinkedList(defaultArguments);
  freeLinkedList(testArguments);
  freeLinkedList(arguments);
}