Exemplo n.º 1
0
bool
CVML::
outputSections(CFile *file)
{
  reset();

  if (! outputHeader(file)) {
    std::cerr << "Not a valid VML file" << std::endl;
    return false;
  }

  if (! outputStringTable(file)) {
    std::cerr << "Bad String Table" << std::endl;
    return false;
  }

  if (! outputDebug(file)) {
    std::cerr << "Bad Debug Section" << std::endl;
    return false;
  }

  if (! outputData(file)) {
    std::cerr << "Bad Data Section" << std::endl;
    return false;
  }

  if (! outputInstructions(file)) {
    std::cerr << "Bad Code Section" << std::endl;
    return false;
  }

  return true;
}
bool filterWithModels(std::vector<std::unique_ptr<w2xc::Model>>& models, cv::Mat& srcImgY, cv::Mat& dstImgY) {
	std::unique_ptr<std::vector<cv::Mat>> inputPlanes =
		std::unique_ptr<std::vector<cv::Mat>>(
		new std::vector<cv::Mat>());
	std::unique_ptr<std::vector<cv::Mat>> outputPlanes =
		std::unique_ptr<std::vector<cv::Mat>>(
		new std::vector<cv::Mat>());

	inputPlanes->clear();
	inputPlanes->push_back(srcImgY);

	for (int index = 0; index < models.size(); index++) {
		outputDebug([&](std::ostringstream& s) { 
			s << "Waifu2x Iteration #" << (index + 1) << "..." << std::endl;
		});

		if (!models[index]->filter(*inputPlanes, *outputPlanes)) {
			return false;
		}

		if (index != models.size() - 1) {
			inputPlanes = std::move(outputPlanes);
			outputPlanes = std::unique_ptr<std::vector<cv::Mat>>(
				new std::vector<cv::Mat>());
		}
	}

	outputPlanes->at(0).copyTo(dstImgY);
	return true;
}
Exemplo n.º 3
0
void CTalkMasterConsoleDlg::releaseCurrentSettings()
{
	struct _iComQueueList *list = m_pIcomQueueList;
	struct _itemData *item = m_pItemData;

	outputDebug(_T("releaseCurrentSettings: Deleting Calls Waiting Information"));

	if( m_pCallWaitingItem )							// Is one already selected?
	{
		if( m_pCallWaitingItem->hCallQueue )			// If we have an archive open
			audioArchive.Close(m_pCallWaitingItem->hCallQueue);

//		m_pCallWaitingItem->hCallQueue = NULL;			// Should we be clearing this out???

		m_pCallWaitingItem = NULL;						// No more item
	}

	clearCallsWaitingItemData();
	m_listCallsWaiting.DeleteAllItems();

	outputDebug(_T("releaseCurrentSettings: Deleting Queue Information"));

	while(list)										// Get rid of the queue information
	{
		m_pIcomQueueList = list->next;
		free(list);
		list = m_pIcomQueueList;
	}

	outputDebug(_T("releaseCurrentSettings: Deleting Intercom Information"));

	Sleep(200);										// Give everything a chance to settle down

	deleteIcoms(TRUE);								// Clear the list

	while(item)
	{
		m_pItemData = item->next;
		free(item);
		item = m_pItemData;
	}

	outputDebug(_T("releaseCurrentSettings: Deleting Server Group Information"));

	deleteGroups();
	deleteMessages();
}
PVideoFrame Waifu2xVideoFilter::GetFrame(int n, IScriptEnvironment* env) {
	int percent = (int)((n / (double)vi.num_frames) * 100);
	outputDebug([&](std::ostringstream& s) {
		s << "Waifu2x GetFrame Starting: " << n << "/" << vi.num_frames << "(" << percent << "%)";
	});

	PVideoFrame src = child->GetFrame(n, env);

	// Assume YV12, YV16 or YV24 (with chroma, planar)
	// Process Y at first.
	cv::Mat yImg(src->GetHeight(PLANAR_Y), src->GetRowSize(PLANAR_Y), CV_8U, (void *)src->GetReadPtr(PLANAR_Y), src->GetPitch(PLANAR_Y));
	yImg.convertTo(yImg, CV_32F, 1.0 / 255.0);

	if (this->nrLevel > 0) {
		OutputDebugStringA("Waifu2x NR Start.");

		if (!filterWithModels(this->modelsNR, yImg, yImg)) {
			env->ThrowError("Waifu2x NR Failed.");
			return src;
		}

		OutputDebugStringA("Waifu2x NR Finished.");
	}

	if (this->enableScaling) {
		OutputDebugStringA("Waifu2x Scaling Start.");

		int curRowSize = src->GetRowSize(PLANAR_Y);
		int curHeight = src->GetHeight(PLANAR_Y);
		for (int i = 0; i < iterTimesTwiceScaling; i++) {
			curRowSize *= 2;
			curHeight *= 2;

			cv::resize(yImg, yImg, cv::Size(curRowSize, curHeight), 0, 0, cv::INTER_NEAREST);

			if (!filterWithModels(this->modelsScale, yImg, yImg)) {
				env->ThrowError("Waifu2x filtering failed.");
				return src;
			}
		}

		OutputDebugStringA("Waifu2x Scaling Finished.");
	}
	yImg.convertTo(yImg, CV_8U, 255.0);

	// Finally process U, V
	cv::Mat uImg(src->GetHeight(PLANAR_U), src->GetRowSize(PLANAR_U), CV_8U, (void *)src->GetReadPtr(PLANAR_U), src->GetPitch(PLANAR_U));
	cv::Mat vImg(src->GetHeight(PLANAR_V), src->GetRowSize(PLANAR_V), CV_8U, (void *)src->GetReadPtr(PLANAR_V), src->GetPitch(PLANAR_V));
	if (this->enableScaling) {
		// process U and V at first (just INTER_CUBIC resize).
		cv::resize(uImg, uImg, cv::Size(uImg.cols * this->scaleRatioAdjusted, uImg.rows * this->scaleRatioAdjusted), 0, 0, cv::INTER_CUBIC);
		cv::resize(vImg, vImg, cv::Size(vImg.cols * this->scaleRatioAdjusted, vImg.rows * this->scaleRatioAdjusted), 0, 0, cv::INTER_CUBIC);
	}

	auto dst = env->NewVideoFrame(vi);
	env->BitBlt(dst->GetWritePtr(PLANAR_Y), dst->GetPitch(PLANAR_Y),
		yImg.data, yImg.step, yImg.cols, yImg.rows);
	env->BitBlt(dst->GetWritePtr(PLANAR_U), dst->GetPitch(PLANAR_U),
		uImg.data, uImg.step, uImg.cols, uImg.rows);
	env->BitBlt(dst->GetWritePtr(PLANAR_V), dst->GetPitch(PLANAR_V),
		vImg.data, vImg.step, vImg.cols, vImg.rows);

	OutputDebugStringA("Waifu2x GetFrame Finished.");

	return dst;
}