Esempio n. 1
0
bool PaymentServer::processPaymentRequest(const PaymentRequestPlus& request, SendCoinsRecipient& recipient)
{
    if (!optionsModel)
        return false;

    if (request.IsInitialized()) {
        // Payment request network matches client network?
        if (!verifyNetwork(request.getDetails())) {
            Q_EMIT message(tr("Payment request rejected"), tr("Payment request network doesn't match client network."),
                CClientUIInterface::MSG_ERROR);

            return false;
        }

        // Make sure any payment requests involved are still valid.
        // This is re-checked just before sending coins in WalletModel::sendCoins().
        if (verifyExpired(request.getDetails())) {
            Q_EMIT message(tr("Payment request rejected"), tr("Payment request expired."),
                CClientUIInterface::MSG_ERROR);

            return false;
        }
    } else {
        Q_EMIT message(tr("Payment request error"), tr("Payment request is not initialized."),
            CClientUIInterface::MSG_ERROR);

        return false;
    }

    recipient.paymentRequest = request;
    recipient.message = GUIUtil::HtmlEscape(request.getDetails().memo());

    request.getMerchant(certStore.get(), recipient.authenticatedMerchant);

    QList<std::pair<CScript, CAmount> > sendingTos = request.getPayTo();
    QStringList addresses;

    for (const std::pair<CScript, CAmount>& sendingTo : sendingTos) {
        // Extract and check destination addresses
        CTxDestination dest;
        if (ExtractDestination(sendingTo.first, dest)) {
            // Append destination address
            addresses.append(QString::fromStdString(EncodeDestination(dest)));
        }
        else if (!recipient.authenticatedMerchant.isEmpty()) {
            // Unauthenticated payment requests to custom bitcoin addresses are not supported
            // (there is no good way to tell the user where they are paying in a way they'd
            // have a chance of understanding).
            Q_EMIT message(tr("Payment request rejected"),
                tr("Unverified payment requests to custom payment scripts are unsupported."),
                CClientUIInterface::MSG_ERROR);
            return false;
        }

        // Bitcoin amounts are stored as (optional) uint64 in the protobuf messages (see paymentrequest.proto),
        // but CAmount is defined as int64_t. Because of that we need to verify that amounts are in a valid range
        // and no overflow has happened.
        if (!verifyAmount(sendingTo.second)) {
            Q_EMIT message(tr("Payment request rejected"), tr("Invalid payment request."), CClientUIInterface::MSG_ERROR);
            return false;
        }

        // Extract and check amounts
        CTxOut txOut(sendingTo.second, sendingTo.first);
        if (IsDust(txOut, ::dustRelayFee)) {
            Q_EMIT message(tr("Payment request error"), tr("Requested payment amount of %1 is too small (considered dust).")
                .arg(BitcoinUnits::formatWithUnit(optionsModel->getDisplayUnit(), sendingTo.second)),
                CClientUIInterface::MSG_ERROR);

            return false;
        }

        recipient.amount += sendingTo.second;
        // Also verify that the final amount is still in a valid range after adding additional amounts.
        if (!verifyAmount(recipient.amount)) {
            Q_EMIT message(tr("Payment request rejected"), tr("Invalid payment request."), CClientUIInterface::MSG_ERROR);
            return false;
        }
    }
    // Store addresses and format them to fit nicely into the GUI
    recipient.address = addresses.join("<br />");

    if (!recipient.authenticatedMerchant.isEmpty()) {
        qDebug() << "PaymentServer::processPaymentRequest: Secure payment request from " << recipient.authenticatedMerchant;
    }
    else {
        qDebug() << "PaymentServer::processPaymentRequest: Insecure payment request to " << addresses.join(", ");
    }

    return true;
}
Esempio n. 2
0
int main (int argc, char **argv) {
	ImageDataset d;
	d.loadImagesFromFile("train-images-idx3-ubyte");
	d.loadLabelsFromFile("train-labels-idx1-ubyte");
	std::shared_ptr<clneural::ActivationFunction> act(new clneural::SigmoidActivationFunction());
	std::shared_ptr<clneural::ActivationFunction> act2(new clneural::LinearActivationFunction());
	std::vector<std::list<unsigned int>> C1_connections(6, std::list<unsigned int>({0}));
	clneural::ConvolutionalLayer::Dimension C1_input;
	clneural::ConvolutionalLayer::Dimension C1_filter;
	float training_speed = 0.7f;
	C1_input.width = 32;
	C1_input.height = 32;
	C1_filter.width = 5;
	C1_filter.height = 5;
	std::shared_ptr<clneural::NeuralNetworkLayer> C1(new clneural::ConvolutionalLayer(C1_input, C1_filter, C1_connections, act, training_speed));
	clneural::SubsamplingLayer::Dimension S2_input;
	clneural::SubsamplingLayer::Dimension S2_filter;
	S2_input.width = 28;
	S2_input.height = 28;
	S2_filter.width = 2;
	S2_filter.height = 2;
	std::shared_ptr<clneural::NeuralNetworkLayer> S2(new clneural::SubsamplingLayer(S2_input, S2_filter, 6, act2, training_speed));
	std::vector<std::list<unsigned int>> C3_connections(16);
	C3_connections[0] = std::list<unsigned int>({0,1,2});
	C3_connections[1] = std::list<unsigned int>({1,2,3});
	C3_connections[2] = std::list<unsigned int>({2,3,4});
	C3_connections[3] = std::list<unsigned int>({3,4,5});
	C3_connections[4] = std::list<unsigned int>({4,5,0});
	C3_connections[5] = std::list<unsigned int>({5,0,1});
	C3_connections[6] = std::list<unsigned int>({0,1,2,3});
	C3_connections[7] = std::list<unsigned int>({1,2,3,4});
	C3_connections[8] = std::list<unsigned int>({2,3,4,5});
	C3_connections[9] = std::list<unsigned int>({3,4,5,0});
	C3_connections[10] = std::list<unsigned int>({4,5,0,1});
	C3_connections[11] = std::list<unsigned int>({5,0,1,2});
	C3_connections[12] = std::list<unsigned int>({0,1,3,4});
	C3_connections[13] = std::list<unsigned int>({1,2,4,5});
	C3_connections[14] = std::list<unsigned int>({0,2,3,5});
	C3_connections[15] = std::list<unsigned int>({0,1,2,3,4,5});
	clneural::ConvolutionalLayer::Dimension C3_input;
	clneural::ConvolutionalLayer::Dimension C3_filter;
	C3_input.width = 14;
	C3_input.height = 14;
	C3_filter.width = 5;
	C3_filter.height = 5;
	std::shared_ptr<clneural::NeuralNetworkLayer> C3(new clneural::ConvolutionalLayer(C3_input, C3_filter, C3_connections, act, training_speed));
	clneural::SubsamplingLayer::Dimension S4_input;
	clneural::SubsamplingLayer::Dimension S4_filter;
	S4_input.width = 10;
	S4_input.height = 10;
	S4_filter.width = 2;
	S4_filter.height = 2;
	std::shared_ptr<clneural::NeuralNetworkLayer> S4(new clneural::SubsamplingLayer(S4_input, S4_filter, 16, act2, training_speed));
	std::shared_ptr<clneural::NeuralNetworkLayer> N1(new clneural::FullFeedforwardLayer(400, 84, act, training_speed));
	std::shared_ptr<clneural::NeuralNetworkLayer> N2(new clneural::FullFeedforwardLayer(84, 10, act, training_speed));
	clneural::NeuralNetwork n;
	n.addLayer(C1);
	n.addLayer(S2);
	n.addLayer(C3);
	n.addLayer(S4);
	n.addLayer(N1);
	n.addLayer(N2);


	std::shared_ptr<OpenCLInterface> ocl = OpenCLInterface::getInstance();
	ocl->initialize(CL_DEVICE_TYPE_CPU);

	float dist = 0.0f;
	for (unsigned int i = 0; i < 60000; i++) {
		std::pair<std::vector<float>, uint8_t> trainelem = d.popRandomElementWithLabel();
		std::vector<float> desired(10, 0.0f);
		desired[trainelem.second] = 1.0f;
		dist += n.trainNetwork(trainelem.first, desired);
		std::vector<float> nout = n.getLastOutput();
		if ((i % 1000) == 0) {
			std::cout << "TIME: " << ((float) clock())/CLOCKS_PER_SEC << ", STEP:" << (i + 1) << ", MDIST: " << dist/1000.0f << ", OUT: (" << nout[0];
			for (unsigned int j = 1; j < nout.size(); j++) std::cout << "," << nout[j];
			std::cout << "), DESIRED: (" << desired[0];
			for (unsigned int j = 1; j < desired.size(); j++) std::cout << "," << desired[j];
			std::cout << ")" << std::endl;
			dist = 0.0f;
		}
	}
	n.saveToFile("conv_images1.net");
	verifyNetwork(n);
	return 0;
}