コード例 #1
0
ファイル: NDMask.cpp プロジェクト: DanielMerget/CNTK
 size_t NDMask::MaskedCount() const
 {
     auto maskMatrix = GetMatrix();
     std::unique_ptr<char[]> maskData(maskMatrix->CopyToArray());
     return std::count_if(maskData.get(), maskData.get() + maskMatrix->GetNumElements(), [](const char& val) {
         return val == 0;
     });
 }
コード例 #2
0
void EnginioBackendConnection::ping()
{
    if (_sentCloseFrame)
        return;

    // The WebSocket server should accept ping frames without payload according to
    // the specification, but ours does not, so let's add a dummy payload.
    QByteArray dummy;
    dummy.append(QStringLiteral("Ping.").toUtf8());
    QByteArray maskingKey = generateMaskingKey();
    QByteArray message = constructFrameHeader(/*isFinalFragment*/ true, PingOp, dummy.size(), maskingKey);
    Q_ASSERT(!message.isEmpty());

    maskData(dummy, maskingKey);
    message.append(dummy);
    _tcpSocket->write(message);
}
コード例 #3
0
void EnginioBackendConnection::close(WebSocketCloseStatus closeStatus)
{
    if (_sentCloseFrame)
        return;

    _sentCloseFrame = true;
    _keepAliveTimer.stop();

    QByteArray payload;
    quint16 closeStatusBigEndian = qToBigEndian<quint16>(closeStatus);
    payload.append(reinterpret_cast<char*>(&closeStatusBigEndian), DefaultHeaderLength);

    QByteArray maskingKey = generateMaskingKey();
    QByteArray message = constructFrameHeader(/*isFinalFragment*/ true, ConnectionCloseOp, payload.size(), maskingKey);
    Q_ASSERT(!message.isEmpty());

    maskData(payload, maskingKey);
    message.append(payload);
    _tcpSocket->write(message);
}
コード例 #4
0
void Deconvolution::InitializeDeconvolutionAlgorithm(const ImagingTable& groupTable, PolarizationEnum psfPolarization, class ImageBufferAllocator* imageAllocator, size_t imgWidth, size_t imgHeight, double pixelScaleX, double pixelScaleY, size_t outputChannels, double beamSize, size_t threadCount)
{
	_imageAllocator = imageAllocator;
	_imgWidth = imgWidth;
	_imgHeight = imgHeight;
	_psfPolarization = psfPolarization;
	FreeDeconvolutionAlgorithms();
	
	_summedCount = groupTable.SquaredGroupCount();
	if(_summedCount == 0)
		throw std::runtime_error("Nothing to clean");
	ImagingTable firstSquaredGroup = groupTable.GetSquaredGroup(0);
	_squaredCount = firstSquaredGroup.EntryCount();
	_polarizations.clear();
	for(size_t p=0; p!=_squaredCount; ++p)
	{
		if(_polarizations.count(firstSquaredGroup[p].polarization) != 0)
			throw std::runtime_error("Two equal polarizations were given to deconvolution algorithm within a single olarized group");
		else
			_polarizations.insert(firstSquaredGroup[p].polarization);
	}
	
	if(_useMoreSane)
	{
		_cleanAlgorithm.reset(new MoreSane(_moreSaneLocation, _moreSaneArgs, _moreSaneSigmaLevels, _prefixName));
	}
	else if(_useIUWT)
	{
		_cleanAlgorithm.reset(new IUWTDeconvolution());
	}
	else if(_multiscale)
	{
		_cleanAlgorithm.reset(new MultiScaleDeconvolution(*_imageAllocator, beamSize, pixelScaleX, pixelScaleY));
	}
	else if(_squaredCount != 1)
	{
		if(_squaredCount != 2 && _squaredCount != 4)
			throw std::runtime_error("Joined polarization cleaning was requested, but can't find a compatible set of 2 or 4 pols to clean");
		bool hasXY = _polarizations.count(Polarization::XY)!=0;
		bool hasYX = _polarizations.count(Polarization::YX)!=0;
		if((hasXY && !hasYX) || (hasYX && !hasXY))
			throw std::runtime_error("Cannot jointly clean polarization XY or YX without cleaning both.");
			
		if(_summedCount != 1)
		{
			if(_fastMultiscale)
			{
				if(_squaredCount == 4)
				{
					_cleanAlgorithm.reset(
					new FastMultiScaleClean
					<deconvolution::MultiImageSet
					<deconvolution::PolarizedImageSet<4>>>(beamSize, pixelScaleX, pixelScaleY));
				}
				else {
					_cleanAlgorithm.reset(
					new FastMultiScaleClean
					<deconvolution::MultiImageSet<deconvolution::PolarizedImageSet<2>>>(beamSize, pixelScaleX, pixelScaleY));
				}
			}
			else {
				if(_squaredCount == 4)
					_cleanAlgorithm.reset(new JoinedClean<deconvolution::MultiImageSet<deconvolution::PolarizedImageSet<4>>>());
				else
					_cleanAlgorithm.reset(new JoinedClean<deconvolution::MultiImageSet<deconvolution::PolarizedImageSet<2>>>());
			}
		}
		else {
			if(_fastMultiscale)
			{
				if(_squaredCount == 4)
					_cleanAlgorithm.reset(new FastMultiScaleClean<deconvolution::PolarizedImageSet<4>>(beamSize, pixelScaleX, pixelScaleY));
				else
					_cleanAlgorithm.reset(new FastMultiScaleClean<deconvolution::PolarizedImageSet<2>>(beamSize, pixelScaleX, pixelScaleY));
			}
			else
			{
				if(_squaredCount == 4)
					_cleanAlgorithm.reset(new JoinedClean<deconvolution::PolarizedImageSet<4>>());
				else
					_cleanAlgorithm.reset(new JoinedClean<deconvolution::PolarizedImageSet<2>>());
			}
		}
	}
	else { // squaredCount == 1
		if(_summedCount != 1)
		{
			if(_fastMultiscale)
				_cleanAlgorithm.reset(new FastMultiScaleClean<deconvolution::MultiImageSet<deconvolution::SingleImageSet>>(beamSize, pixelScaleX, pixelScaleY));
			else
				_cleanAlgorithm.reset(new JoinedClean<deconvolution::MultiImageSet<deconvolution::SingleImageSet>>());
		}
		else {
			if(_fastMultiscale)
				_cleanAlgorithm.reset(new FastMultiScaleClean<deconvolution::SingleImageSet>(beamSize, pixelScaleX, pixelScaleY));
			else
				_cleanAlgorithm.reset(new SimpleClean());
		}
	}
	
	_cleanAlgorithm->SetMaxNIter(_nIter);
	_cleanAlgorithm->SetThreshold(_threshold);
	_cleanAlgorithm->SetSubtractionGain(_gain);
	_cleanAlgorithm->SetStopGain(_mGain);
	_cleanAlgorithm->SetCleanBorderRatio(_cleanBorderRatio);
	_cleanAlgorithm->SetAllowNegativeComponents(_allowNegative);
	_cleanAlgorithm->SetStopOnNegativeComponents(_stopOnNegative);
	_cleanAlgorithm->SetThreadCount(threadCount);
	_cleanAlgorithm->SetMultiscaleScaleBias(_multiscaleScaleBias);
	_cleanAlgorithm->SetMultiscaleThresholdBias(_multiscaleThresholdBias);
	
	if(!_fitsMask.empty())
	{
		if(_cleanMask.empty())
		{
			std::cout << "Reading mask '" << _fitsMask << "'...\n";
			FitsReader maskReader(_fitsMask);
			if(maskReader.ImageWidth() != _imgWidth || maskReader.ImageHeight() != _imgHeight)
				throw std::runtime_error("Specified Fits file mask did not have same dimensions as output image!");
			ao::uvector<float> maskData(_imgWidth*_imgHeight);
			maskReader.Read(maskData.data());
			_cleanMask.assign(_imgWidth*_imgHeight, false);
			for(size_t i=0; i!=_imgWidth*_imgHeight; ++i)
				_cleanMask[i] = maskData[i]!=0.0;
		}
		_cleanAlgorithm->SetCleanMask(_cleanMask.data());
	}
	else if(!_casaMask.empty())
	{
		if(_cleanMask.empty())
		{
			std::cout << "Reading CASA mask '" << _casaMask << "'...\n";
			_cleanMask.assign(_imgWidth*_imgHeight, false);
			CasaMaskReader maskReader(_casaMask);
			if(maskReader.Width() != _imgWidth || maskReader.Height() != _imgHeight)
				throw std::runtime_error("Specified CASA mask did not have same dimensions as output image!");
			maskReader.Read(_cleanMask.data());
		}
		_cleanAlgorithm->SetCleanMask(_cleanMask.data());
	}
}
コード例 #5
0
void EnginioBackendConnection::onSocketReadyRead()
{
    //     WebSocket Protocol (RFC6455)
    //     Base Framing Protocol
    //     http://tools.ietf.org/html/rfc6455#section-5.2
    //
    //      0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
    //     +-+-+-+-+-------+-+-------------+-------------------------------+
    //     |F|R|R|R| opcode|M| Payload len |    Extended payload length    |
    //     |I|S|S|S|  (4)  |A|     (7)     |             (16/64)           |
    //     |N|V|V|V|       |S|             |   (if payload len==126/127)   |
    //     | |1|2|3|       |K|             |                               |
    //     +-+-+-+-+-------+-+-------------+ - - - - - - - - - - - - - - - +
    //     |     Extended payload length continued, if payload len == 127  |
    //     + - - - - - - - - - - - - - - - +-------------------------------+
    //     |                               |Masking-key, if MASK set to 1  |
    //     +-------------------------------+-------------------------------+
    //     | Masking-key (continued)       |          Payload Data         |
    //     +-------------------------------- - - - - - - - - - - - - - - - +
    //     :                     Payload Data continued ...                :
    //     + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +
    //     |                     Payload Data continued ...                |
    //     +---------------------------------------------------------------+

    while (_tcpSocket->bytesAvailable()) {
        switch (_protocolDecodeState) {
        case HandshakePending: {
            // The response is closed by a CRLF line on its own (e.g. ends with two newlines).
            while (_handshakeReply.isEmpty()
                   || (!_handshakeReply.endsWith(QString(CRLF % CRLF).toUtf8())
                   // According to documentation QIODevice::readLine replaces newline characters on
                   // Windows with '\n', so just to be on the safe side:
                   && !_handshakeReply.endsWith(QByteArrayLiteral("\n\n")))) {

                if (!_tcpSocket->bytesAvailable())
                    return;

                _handshakeReply.append(_tcpSocket->readLine());
            }

            QString response = QString::fromUtf8(_handshakeReply);
            _handshakeReply.clear();

            int statusCode = extractResponseStatus(response);
            QString secWebSocketAccept = extractResponseHeader(SecWebSocketAcceptHeader, response, /* ignoreCase */ false);
            bool hasValidKey = secWebSocketAccept == gBase64EncodedSha1VerificationKey;

            if (statusCode != 101 || !hasValidKey
                    || extractResponseHeader(UpgradeHeader, response) != QStringLiteral("websocket")
                    || extractResponseHeader(ConnectionHeader, response) != QStringLiteral("upgrade")
                    )
                return protocolError("Handshake failed!");

            _keepAliveTimer.start(TwoMinutes, this);
            _protocolDecodeState = FrameHeaderPending;
            emit stateChanged(ConnectedState);
        } // Fall-through.

        case FrameHeaderPending: {
            if (quint64(_tcpSocket->bytesAvailable()) < DefaultHeaderLength)
                return;

            // Large payload.
            if (_payloadLength == LargePayloadMarker) {
                if (quint64(_tcpSocket->bytesAvailable()) < LargePayloadHeaderLength)
                    return;

                char data[LargePayloadHeaderLength];
                if (quint64(_tcpSocket->read(data, LargePayloadHeaderLength)) != LargePayloadHeaderLength)
                    return protocolError("Reading large payload length failed!");

                if (data[0] & MSB)
                    return protocolError("The most significant bit of a large payload length must be 0!", MessageTooBigCloseStatus);

                // 8 bytes interpreted as a 64-bit unsigned integer
                _payloadLength = qFromBigEndian<quint64>(reinterpret_cast<uchar*>(data));
                _protocolDecodeState = PayloadDataPending;

                break;
            }

            char data[DefaultHeaderLength];
            if (quint64(_tcpSocket->read(data, DefaultHeaderLength)) != DefaultHeaderLength)
                return protocolError("Reading header failed!");

            if (!_payloadLength) {
                // This is the initial frame header data.
                _isFinalFragment = (data[0] & FIN);
                _protocolOpcode = static_cast<WebSocketOpcode>(data[0] & OPC);
                _isPayloadMasked = (data[1] & MSK);
                _payloadLength = (data[1] & LEN);

                if (_isPayloadMasked)
                    return protocolError("Invalid masked frame received from server.");

                // For data length 0-125 LEN is the payload length.
                if (_payloadLength < NormalPayloadMarker)
                    _protocolDecodeState = PayloadDataPending;

            } else {
                Q_ASSERT(_payloadLength == NormalPayloadMarker);
                // Normal sized payload: 2 bytes interpreted as the payload
                // length expressed in network byte order (e.g. big endian).
                _payloadLength = qFromBigEndian<quint16>(reinterpret_cast<uchar*>(data));
                _protocolDecodeState = PayloadDataPending;
            }

            break;
        }

        case PayloadDataPending: {
            if (static_cast<quint64>(_tcpSocket->bytesAvailable()) < _payloadLength)
                return;

            if (_protocolOpcode == ConnectionCloseOp) {
                WebSocketCloseStatus closeStatus = UnknownCloseStatus;
                if (_payloadLength >= DefaultHeaderLength) {
                    char data[DefaultHeaderLength];
                    if (quint64(_tcpSocket->read(data, DefaultHeaderLength)) != DefaultHeaderLength)
                        return protocolError("Reading connection close status failed!");

                     closeStatus = static_cast<WebSocketCloseStatus>(qFromBigEndian<quint16>(reinterpret_cast<uchar*>(data)));

                     // The body may contain UTF-8-encoded data with value /reason/,
                     // the interpretation of this data is however not defined by the
                     // specification. Further more the data is not guaranteed to be
                     // human readable, thus it is safe for us to just discard the rest
                     // of the message at this point.
                }

                qDebug() << "Connection closed by the server with status:" << closeStatus;

                QJsonObject data;
                data[EnginioString::messageType] = QStringLiteral("close");
                data[EnginioString::status] = closeStatus;
                emit dataReceived(data);

                close(closeStatus);

                _tcpSocket->close();
                return;
            }

            // We received data from the server so restart the timer.
            _keepAliveTimer.start(TwoMinutes, this);

            _applicationData.append(_tcpSocket->read(_payloadLength));
            _protocolDecodeState = FrameHeaderPending;
            _payloadLength = 0;

            if (!_isFinalFragment)
                break;

            switch (_protocolOpcode) {
            case TextFrameOp: {
                QJsonObject data = QJsonDocument::fromJson(_applicationData).object();
                data[EnginioString::messageType] = QStringLiteral("data");
                emit dataReceived(data);
                break;
            }
            case PingOp:{
                // We must send back identical application data as found in the message.
                QByteArray payload = _applicationData;
                QByteArray maskingKey = generateMaskingKey();
                QByteArray message = constructFrameHeader(/*isFinalFragment*/ true, PongOp, payload.size(), maskingKey);
                Q_ASSERT(!message.isEmpty());
                maskData(payload, maskingKey);
                message.append(payload);
                _tcpSocket->write(message);
                break;
            }
            case PongOp:
                _pingTimeoutTimer.stop();
                emit pong();
                break;
            default:
                protocolError("WebSocketOpcode not yet supported.", UnsupportedDataTypeCloseStatus);
                qWarning() << "\t\t->" << _protocolOpcode;
            }

            _applicationData.clear();

            break;
        }
        }
    }
}