AudioInjector* AudioInjector::playSound(const QString& soundUrl, const float volume, const float stretchFactor, const glm::vec3 position) { if (soundUrl.isEmpty()) { return NULL; } auto soundCache = DependencyManager::get<SoundCache>(); if (soundCache.isNull()) { return NULL; } SharedSoundPointer sound = soundCache->getSound(QUrl(soundUrl)); if (sound.isNull() || !sound->isReady()) { return NULL; } AudioInjectorOptions options; options.stereo = sound->isStereo(); options.position = position; options.volume = volume; QByteArray samples = sound->getByteArray(); if (stretchFactor == 1.0f) { return playSound(samples, options, NULL); } soxr_io_spec_t spec = soxr_io_spec(SOXR_INT16_I, SOXR_INT16_I); soxr_quality_spec_t qualitySpec = soxr_quality_spec(SOXR_MQ, 0); const int channelCount = sound->isStereo() ? 2 : 1; const int standardRate = AudioConstants::SAMPLE_RATE; const int resampledRate = standardRate * stretchFactor; const int nInputSamples = samples.size() / sizeof(int16_t); const int nOutputSamples = nInputSamples * stretchFactor; QByteArray resampled(nOutputSamples * sizeof(int16_t), '\0'); const int16_t* receivedSamples = reinterpret_cast<const int16_t*>(samples.data()); soxr_error_t soxError = soxr_oneshot(standardRate, resampledRate, channelCount, receivedSamples, nInputSamples, NULL, reinterpret_cast<int16_t*>(resampled.data()), nOutputSamples, NULL, &spec, &qualitySpec, 0); if (soxError) { qCDebug(audio) << "Unable to resample" << soundUrl << "from" << nInputSamples << "@" << standardRate << "to" << nOutputSamples << "@" << resampledRate; resampled = samples; } return playSound(resampled, options, NULL); }
AudioInjector* AudioInjector::playSound(const QString& soundUrl, const float volume, const float stretchFactor, const glm::vec3 position) { if (soundUrl.isEmpty()) { return NULL; } auto soundCache = DependencyManager::get<SoundCache>(); if (soundCache.isNull()) { return NULL; } SharedSoundPointer sound = soundCache->getSound(QUrl(soundUrl)); if (sound.isNull() || !sound->isReady()) { return NULL; } AudioInjectorOptions options; options.stereo = sound->isStereo(); options.position = position; options.volume = volume; QByteArray samples = sound->getByteArray(); if (stretchFactor == 1.0f) { return playSoundAndDelete(samples, options, NULL); } const int standardRate = AudioConstants::SAMPLE_RATE; const int resampledRate = standardRate * stretchFactor; const int channelCount = sound->isStereo() ? 2 : 1; AudioSRC resampler(standardRate, resampledRate, channelCount); const int nInputFrames = samples.size() / (channelCount * sizeof(int16_t)); const int maxOutputFrames = resampler.getMaxOutput(nInputFrames); QByteArray resampled(maxOutputFrames * channelCount * sizeof(int16_t), '\0'); int nOutputFrames = resampler.render(reinterpret_cast<const int16_t*>(samples.data()), reinterpret_cast<int16_t*>(resampled.data()), nInputFrames); Q_UNUSED(nOutputFrames); return playSoundAndDelete(resampled, options, NULL); }
void Gesture::resample(int n) { assert(points.size()); float I = length()/(n - 1); float D = 0; resampled_points.clear(); resampled_points.push_back(points[0]); for(int i = 1; i < points.size(); ++i) { Vec2 curr = points[i]; Vec2 prev = points[i-1]; Vec2 dir = prev - curr; float d = dir.length(); if( (D + d) >= I) { float qx = prev.x + ((I-D)/d) * (curr.x - prev.x); float qy = prev.y + ((I-D)/d) * (curr.y - prev.y); Vec2 resampled(qx, qy); resampled_points.push_back(resampled); points.insert(points.begin() + i, resampled); D = 0.0; } else { D += d; } } while(resampled_points.size() <= (n - 1)) { resampled_points.push_back(points.back()); } if(resampled_points.size() > n) { resampled_points.erase(resampled_points.begin(), resampled_points.begin()+n); } }
void ConsensusMapNormalizerAlgorithmQuantile::normalizeMaps(ConsensusMap& map) { //extract feature intensities vector<vector<double> > feature_ints; extractIntensityVectors(map, feature_ints); Size number_of_maps = feature_ints.size(); //determine largest number of features in any map Size largest_number_of_features = 0; for (Size i = 0; i < number_of_maps; ++i) { if (feature_ints[i].size() > largest_number_of_features) { largest_number_of_features = feature_ints[i].size(); } } //resample n data points from each sorted intensity distribution (from the different maps), n = maximum number of features in any map vector<vector<double> > resampled_sorted_data; for (Size i = 0; i < number_of_maps; ++i) { vector<double> sorted = feature_ints[i]; std::sort(sorted.begin(), sorted.end()); vector<double> resampled(largest_number_of_features); resample(sorted, resampled, static_cast<UInt>(largest_number_of_features)); resampled_sorted_data.push_back(resampled); } //compute reference distribution from all resampled distributions vector<double> reference_distribution(largest_number_of_features); for (Size i = 0; i < number_of_maps; ++i) { for (Size j = 0; j < largest_number_of_features; ++j) { reference_distribution[j] += (resampled_sorted_data[i][j] / (double)number_of_maps); } } //for each map: resample from the reference distribution down to the respective original size again vector<vector<double> > normalized_sorted_ints(number_of_maps); for (Size i = 0; i < number_of_maps; ++i) { vector<double> ints; resample(reference_distribution, ints, static_cast<UInt>(feature_ints[i].size())); normalized_sorted_ints[i] = ints; } //set the intensities of feature_ints to the normalized intensities for (Size i = 0; i < number_of_maps; ++i) { // We do not want to change the order in feature_ints[i] but normalized_sorted_ints // comes sorted, so we transfer the values in feature_ints[i] into pairs that store // the value and the index in feature_ints[i]. Then we sort the vector of pair and as // a result store the indexes of feature_ints[i] in a sorted order in sort_indices. std::vector<std::pair<double, UInt> > sort_pairs; sort_pairs.reserve(feature_ints[i].size()); for (Size j = 0; j < feature_ints[i].size(); ++j) { sort_pairs.push_back(std::make_pair(feature_ints[i][j], j)); } std::sort(sort_pairs.begin(), sort_pairs.end()); vector<Size> sort_indices; sort_indices.reserve(sort_pairs.size()); for (Size j = 0; j < sort_pairs.size(); ++j) { sort_indices.push_back(sort_pairs.at(j).second); } Size k = 0; for (Size j = 0; j < sort_indices.size(); ++j) { Size idx = sort_indices[j]; feature_ints[i][idx] = normalized_sorted_ints[i][k++]; } } //write new feature intensities to the consensus map setNormalizedIntensityValues(feature_ints, map); }