示例#1
0
// Transformation of the sequence.
SequenceDataPtr TransposeTransformer::Transform(SequenceDataPtr sequence)
{
    auto inputSequence = dynamic_cast<ImageSequenceData*>(sequence.get());
    if (inputSequence == nullptr)
        RuntimeError("Currently Transpose transform only works with images.");

    DataType elementType = m_inputStream.m_elementType != DataType::Unknown ?
        m_inputStream.m_elementType :
        sequence->m_elementType;

    switch (elementType)
    {
    case DataType::Double:
        if (m_precision == DataType::Float)
            return m_floatTransform.Apply<double>(inputSequence);
        if (m_precision == DataType::Double)
            return m_doubleTransform.Apply<double>(inputSequence);
    case DataType::Float:
        if (m_precision == DataType::Double)
            return m_doubleTransform.Apply<float>(inputSequence);
        if (m_precision == DataType::Float)
            return m_floatTransform.Apply<float>(inputSequence);
    case DataType::UChar:
        if (m_precision == DataType::Double)
            return m_doubleTransform.Apply<unsigned char>(inputSequence);
        if (m_precision == DataType::Float)
            return m_floatTransform.Apply<unsigned char>(inputSequence);
    default:
        RuntimeError("Unsupported type. Please apply a cast transform with 'double' or 'float' precision.");
    }
    return nullptr; // Make compiler happy
}
SequenceDataPtr TransposeTransformer::TypedApply(SequenceDataPtr sequence,
                                                 const StreamDescription &inputStream,
                                                 const StreamDescription &outputStream)
{
    assert(inputStream.m_storageType == StorageType::dense);
    auto inputSequence = static_cast<DenseSequenceData&>(*sequence.get());
    assert(inputSequence.m_numberOfSamples == 1);
    assert(inputStream.m_sampleLayout->GetNumElements() == outputStream.m_sampleLayout->GetNumElements());

    size_t count = inputStream.m_sampleLayout->GetNumElements() * GetSizeByType(inputStream.m_elementType);

    auto result = std::make_shared<DenseSequenceWithBuffer>();
    result->m_buffer.resize(count);

    ImageDimensions dimensions(*inputStream.m_sampleLayout, ImageLayoutKind::HWC);
    size_t rowCount = dimensions.m_height * dimensions.m_width;
    size_t channelCount = dimensions.m_numChannels;

    auto src = reinterpret_cast<TElemType*>(inputSequence.m_data);
    auto dst = reinterpret_cast<TElemType*>(result->m_buffer.data());

    for (size_t irow = 0; irow < rowCount; irow++)
    {
        for (size_t icol = 0; icol < channelCount; icol++)
        {
            dst[icol * rowCount + irow] = src[irow * channelCount + icol];
        }
    }

    result->m_sampleLayout = outputStream.m_sampleLayout;
    result->m_data = result->m_buffer.data();
    result->m_numberOfSamples = inputSequence.m_numberOfSamples;
    return result;
}