示例#1
0
void Config::get_layers_config(string &str,
		SoftMax &SMR) {
	std::vector<string> layers;
	if (str.empty())
		return;
	int head = 0;
	int tail = 0;
	while (1) {
		if (head == str.length())
			break;
		if (str[head] == '$') {
			tail = head + 1;
			while (1) {
				if (tail == str.length())
					break;
				if (str[tail] == '&')
					break;
				++tail;
			}
			string sub = str.substr(head, tail - head + 1);
			if (sub[sub.length() - 1] == '&') {
				sub.erase(sub.begin() + sub.length() - 1);
				sub.erase(sub.begin());
				layers.push_back(sub);
			}
			str.erase(head, tail - head + 1);
		} else
			++head;
	}
	for (int i = 0; i < layers.size(); i++) {
		int type = get_word_type(layers[i], "LAYER");
		switch (type) {
		case 0: {
			int nn = get_word_int(layers[i], "NUM_HIDDEN_NEURONS");
			double wd = get_word_float(layers[i], "WEIGHT_DECAY");
			double dr = get_word_float(layers[i], "DROPOUT_RATE");
			HiddenConfigs.push_back(HiddenConfig(nn, wd, dr));
			break;
		}
		case 1: {
			int classnum = get_word_int(layers[i], "NUM_CLASSES");
			double wd = get_word_float(layers[i], "WEIGHT_DECAY");
			SMR.set_NumClasses(classnum);
			SMR.set_WeightDecay(wd);
			break;
		}
		}
	}
}
示例#2
0
void Config::init(string path,
		SoftMax &SMR) {
	m_configStr = read_2_string(path);
	deleteComment();
	deleteSpace();
	get_layers_config(m_configStr,SMR);
	use_log = get_word_bool(m_configStr, "USE_LOG");
	batch_size = get_word_int(m_configStr, "BATCH_SIZE");
	non_linearity = get_word_type(m_configStr, "NON_LINEARITY");
	training_epochs = get_word_int(m_configStr, "TRAINING_EPOCHS");
	lrate_w = get_word_float(m_configStr, "LRATE_W");
	lrate_b = get_word_float(m_configStr, "LRATE_B");
	iter_per_epo = get_word_int(m_configStr, "ITER_PER_EPO");
	ngram = get_word_int(m_configStr, "NGRAM");
	training_percent = get_word_float(m_configStr, "TRAINING_PERCENT");
	cout
			<< "****************************************************************************"
			<< endl
			<< "**                    READ CONFIG FILE COMPLETE                             "
			<< endl
			<< "****************************************************************************"
			<< endl << endl;

	for (int i = 0; i < HiddenConfigs.size(); i++) {
		cout << "***** hidden layer: " << i << " *****" << endl;
		cout << "NumHiddenNeurons = " << HiddenConfigs[i].get_NeuronNum()
				<< endl;
		cout << "WeightDecay = " << HiddenConfigs[i].get_WeightDecay() << endl;
		cout << "DropoutRate = " << HiddenConfigs[i].get_DropoutRate() << endl << endl;
	}
	cout << "***** softmax layer: *****" << endl;
	//    cout<<"NumClasses = "<<softmaxConfig.NumClasses<<endl;
	cout << "WeightDecay = " << SMR.get_WeightDecay() << endl << endl;
	cout << "***** general config *****" << endl;
	cout << "use_log = " << use_log << endl;
	cout << "batch size = " << batch_size << endl;
	cout << "non-linearity method = " << non_linearity << endl;
	cout << "training epochs = " << training_epochs << endl;
	cout << "learning rate for weight matrices = " << lrate_w << endl;
	cout << "learning rate for bias = " << lrate_b << endl;
	cout << "iteration per epoch = " << iter_per_epo << endl;
	cout << "ngram = " << ngram << endl;
	cout << "training percent = " << training_percent << endl;
	cout << endl;
}
示例#3
0
void FeatureGenBackoff::backed_off_features(const Sentence &sentence,
                                            const AppliedRule &rule,
                                            int index,
                                            int extra,
                                            FeatureState *state) const {
    int word = get_word_int(lexicon_, sentence, index);

    int rule_ = rule.rule;
    int X = grammar_->head_symbol[rule.rule];

        state->inc(word, rule_, extra);
        state->inc(word, X, extra);

    if (!simple_) {
        int tag = get_tag_int(lexicon_, sentence, index);
        state->inc(tag, rule_, extra);
        state->inc(tag, X, extra);
    }
}