Esempio n. 1
0
Mat pwd(Mat a, int b) {
	Mat r;
	r.init();
	for (; b; b >>= 1, a = a * a)
		if (b & 1)
			r = r * a;
	return r;
}
Esempio n. 2
0
void Mat::Sum(size_t dim, Mat &vect) const {
  
  if (dim == 1) {
    vect.init(1, mat_.size2(), 0);
    for (size_t i = 0; i < mat_.size1(); ++i) {
      for (size_t j = 0; j < mat_.size2(); ++j) {
        vect(0, j) += mat_(i, j);
      }
    }    
  } else if (dim == 2) {    
    vect.init(mat_.size1(), 1, 0);
    for (size_t i = 0; i < mat_.size1(); ++i) {
      for (size_t j = 0; j < mat_.size2(); ++j) {
        vect(i, 0) += mat_(i, j);
      }     
    }    
  } else {
    mexAssert(false, "In Mat::Sum the dimension parameter must be either 1 or 2");
  }  
}
Esempio n. 3
0
Mat operator ^ (Mat a, long long k) {
	Mat c;
	c.init(1);
	while (k)
	{
		if (k & 1)
			c = a * c;
		a = a*a;
		k >>= 1;
	}
	return c;
}
Esempio n. 4
0
Mat Sum(const Mat &a, size_t dim) {
  
  Mat vect;
  if (dim == 1) {    
    vect.init(1, a.size2_, 0);
    for (size_t i = 0; i < a.size1_; ++i) {
      for (size_t j = 0; j < a.size2_; ++j) {
        vect(0, j) += a(i, j);        
      }
    }    
  } else if (dim == 2) {    
    vect.init(a.size1_, 1, 0);
    for (size_t i = 0; i < a.size1_; ++i) {
      for (size_t j = 0; j < a.size2_; ++j) {
        vect(i, 0) += a(i, j);        
      }     
    }    
  } else {
    mexAssert(false, "In Mat Sum(a, dim) the dimension parameter must be either 1 or 2");
  }
  return vect;
}
Esempio n. 5
0
bool SpeechRec::ProcessOffline(data_format inpf, data_format outpf, void *inpSig, int sigNBytes, Mat<float> *inpMat, Mat<float> *outMat)
{
	assert((int)inpf < (int)outpf);
	assert(outMat || outpf == dfStrings);
	assert(inpMat || inpf == dfWaveform);

	Mat<float> *paramMat = 0;
	Mat<float> *posteriorsMat = 0;

	// waveform -> parameters
	int nFrames;
	if(inpf == dfWaveform)
	{
		if(!ConvertWaveformFormat(waveFormat, inpSig, sigNBytes, &waveform, &waveformLen))
			return false;

		nFrames = (waveformLen > actualParams->GetVectorSize() ? (waveformLen - actualParams->GetVectorSize()) / actualParams->GetStep() + 1 : 1);
				
		actualParams->AddWaveform(waveform, waveformLen);
			
		if(outpf == dfParams)
		{
			paramMat = outMat;
		}
		else
		{
			paramMat = new Mat<float>; 
			if(!paramMat)
			{
				MERROR("Insufficient memory\n");
				return false;
			}
		}
		if(actualParams->GetNParams() != paramMat->columns() || nFrames != paramMat->rows())
		   paramMat->init(nFrames, actualParams->GetNParams());

		int fr = 0;
		while(actualParams->GetFeatures(params))
		{				
			FrameBasedNormalization(params, actualParams->GetNParams());
			paramMat->set(fr, fr, 0, actualParams->GetNParams() - 1, params);
			fr++;
		}

		if(outpf == dfParams)
			return true;
	}

	// sentence based normalization
	if(inpf == dfWaveform || inpf == dfParams)
	{
		if(inpf == dfParams)
			paramMat = inpMat;

                if(paramMat->columns() < actualParams->GetNParams())
                {
			MERROR("Invalid dimensionality of parameter vectors\n");
			return false;
                }
		else if(paramMat->columns() > actualParams->GetNParams())
		{
			Mat<float> *tmpMat = new Mat<float>;
			tmpMat->init(paramMat->rows(), actualParams->GetNParams());
			tmpMat->copy(*paramMat, 0, paramMat->rows() - 1, 0, actualParams->GetNParams() - 1, 
                                                   0, paramMat->rows() - 1, 0, actualParams->GetNParams() - 1);
			delete paramMat;
			paramMat = tmpMat;
			inpMat = paramMat;
		}

		SentenceBasedNormalization(paramMat);
	}

	// parameters -> posteriors
	if(outpf == dfPosteriors && !mTrapsEnabled)
        {
		MERROR("The 'traps' module have to be enabled for generating posteriors\n");
		return false;
        }

	if((inpf == dfWaveform || inpf == dfParams) && mTrapsEnabled)
	{
		if(inpf == dfParams)
			paramMat = inpMat;

		if(outpf == dfPosteriors)
		{
			posteriorsMat = outMat;
		}
		else
		{
			posteriorsMat = new Mat<float>;
			if(!posteriorsMat)
			{
				if(inpf != dfParams)
						delete paramMat;
				MERROR("Insufficient memory\n");
				return false;
			}
		}

		nFrames = paramMat->rows();

		if(TR.GetNumOuts() != posteriorsMat->columns() || nFrames != posteriorsMat->rows())
			posteriorsMat->init(nFrames, TR.GetNumOuts());

		// first part - initialization
		int i;
		int trapShift = TR.GetTrapShift();
		int nparams = actualParams->GetNParams();
		if(nFrames >= trapShift)
		{
			TR.CalcFeaturesBunched((float *)paramMat->getMem(), posteriors, trapShift, false);
		}
		else
		{
			sCopy(nFrames * paramMat->columns(), params, (float *)paramMat->getMem());
			for(i = nFrames; i < TR.GetTrapShift(); i++)
				paramMat->extr(nFrames - 1, nFrames - 1, 0, nparams - 1, params + i * nparams);
			TR.CalcFeaturesBunched(params, posteriors, trapShift, false);
		}

		// second part - main block
		if(nFrames > trapShift)
			TR.CalcFeaturesBunched((float *)paramMat->getMem() + trapShift * nparams, (float *)posteriorsMat->getMem(), nFrames - trapShift);

		// last part - termination
		int n = (nFrames > trapShift ? trapShift : nFrames);
		for(i = 0; i < n; i++)
			paramMat->extr(nFrames - 1, nFrames - 1, 0, nparams - 1, params + i * nparams);
		TR.CalcFeaturesBunched(params, (float *)posteriorsMat->getMem() + (nFrames - n) * posteriorsMat->columns(), n);

		// softening function: posteriors -> posteriors/log. posteriors
                int nPost = posteriorsMat->columns();
		for(i = 0; i < nFrames; i++)
		{
			posteriorsMat->extr(i, i, 0, nPost - 1, posteriors);
			int j;
			for(j = 0; j < nPost; j++)
				posteriors[j] = (*postSoftFunc)(posteriors[j], postSoftArg1, postSoftArg2, postSoftArg3);
			posteriorsMat->set(i, i, 0, nPost - 1, posteriors);
		}

		if(inpf != dfParams)
			delete paramMat;

		if(outpf == dfPosteriors)
			return true;
	}

	// posteriors -> strings
	if(inpf == dfWaveform || inpf == dfParams || inpf == dfPosteriors)
	{
		if(inpf == dfPosteriors || (inpf == dfParams && !mTrapsEnabled))
			posteriorsMat = inpMat;

		nFrames = posteriorsMat->rows();
                int nPost = posteriorsMat->columns(); // TR.GetNumOuts()

		// softening function: posteriors -> log. posteriors
		int i;
		for(i = 0; i < nFrames; i++)
		{
			posteriorsMat->extr(i, i, 0, nPost - 1, posteriors);
			int j;
			for(j = 0; j < nPost; j++)
				posteriors[j] = (*decSoftFunc)(posteriors[j], decSoftArg1, decSoftArg2, decSoftArg3);
			posteriorsMat->set(i, i, 0, nPost - 1, posteriors);
		}

		// log posteriors -> strings
		for(i = 0; i <  nFrames; i++)
		{
			posteriorsMat->extr(i, i, 0, nPost - 1, posteriors);
			DE->ProcessFrame(posteriors);
		}

		if(inpf != dfPosteriors)
			delete posteriorsMat;
	}

	return true;
}
Esempio n. 6
0
void SpeechRec::SentenceBasedNormalization(Mat<float> *mat)
{
//        mat->saveAscii("c:\\before");

	// sentence mean and variance normalization
	bool mean_norm = C.GetBool("offlinenorm", "sent_mean_norm");
	bool var_norm = C.GetBool("offlinenorm", "sent_var_norm");

	if(mean_norm || var_norm)
	{
		Mat<float> mean;

		// mean calcualtion
		mat->sumColumns(mean);
		mean.div((float)mat->rows());

		// mean norm
		int i, j;
		for(i = 0; i < mat->columns(); i++)
			mat->sub(0, mat->rows() - 1, i, i, mean.get(0, i));

		if(var_norm)
		{
			// variance calculation
			Mat<float> var;
			var.init(mean.rows(), mean.columns());
			var.set(0.0f);
			for(i = 0; i < mat->columns(); i++)
			{
				for(j = 0; j < mat->rows(); j++)
				{
					float v = mat->get(j, i);
					var.add(0, i, v * v);
				}
			}
			var.div((float)mat->rows());
			var.sqrt();

			// lower threshold
			float lowerThr = C.GetFloat("melbanks", "sent_std_thr");
			var.lowerLimit(lowerThr);

			// variance norm
			for(i = 0; i < mat->columns(); i++)
				mat->mul(0, mat->rows() - 1, i, i, 1.0f / var.get(0, i));

			// add mean if not mean norm
			if(!mean_norm)
			{
				for(i = 0; i < mat->columns(); i++)
					mat->add(0, mat->rows() - 1, i, i, mean.get(0, i));
			}
		}
	}

	// sentence maximum normalization
	bool max_norm = C.GetBool("offlinenorm", "sent_max_norm");
	bool channel_max_norm = C.GetBool("offlinenorm", "sent_chmax_norm");

	if(max_norm || channel_max_norm)
	{
		Mat<float> max;
		max.init(1, mat->columns());
		max.set(-9999.9f);
		int i, j;

		for(i = 0; i < mat->columns(); i++)
		{
			for(j = 0; j < mat->rows(); j++)
			{
				float v = mat->get(j, i);
				if(v > max.get(0, i))
				{
				   max.set(0, i, v);
				}
			}
		}

		// global sentence maximum normalization
		if(max_norm)
		{
			float global_max = -9999.9f;
			for(i = 0; i < max.columns(); i++)
			{
				if(max.get(0, i) > global_max)
				{
				   global_max = max.get(0, i);
				}
				max.set(global_max);
			}
		}

		for(i = 0; i < mat->columns(); i++)
		{
			for(j = 0; j < mat->rows(); j++)
			{
	            		mat->set(j, i, mat->get(j, i) - max.get(0, i));
			}
		}
	}
}