Example #1
0
main(){
	int a;
	int b;
	scanf("%d,%d",&a,&b);
	int c =maxNum(a,b);
	printf("max is %d\n",c);
}
Example #2
0
int longestValidParentheses(char* s)
{
	char stack[MAX]={0};
	int maxCnt;
	int i;
	int top;	
	int cnt;
	int maxLen;
	int curLen;	

	if(NULL==s)
	{
		return 0;
	}
	
	i=0;
	top=0;
	curLen=0;
	maxLen=0;
	while(s[i]!='\0')
	{
		if(s[i]=='(')
		{
			stack[top++] = s[i++];
		}
		else
		{
			if(top>=0 &&  (stack[--top]=='(' && s[i]==')'))
			{
				if(top==0)
				{
					curLen = i+1;			
				}
				else
				{
					curLen = i-(top+1);
				}
				
				maxLen = maxNum(maxLen,curLen);		
				i++;
			}
			else
			{
				top++;
				i++;
			}
		}
	}
	
	return maxLen;	
}
Example #3
0
// 处理大数版本,字符串存储
int printMaxN(int n)
{
	if (n <= 0)
	{
		return 0;
	}
	
	string maxNum(n, ' ');	// 初始化为 0
	maxNum[n] = '1';	// 第一个打印的是 1
	while (maxNum[0] != '9')
	{
		cout << maxNum << endl;
		maxNum = add1(maxNum);
	}
	
	cout << maxNum << endl;
	
	return 0;
}
Example #4
0
int maxNum(int max, int min)
{
	int m=max;
	int n=min;
	if(max==1&&min==1)
		return 0;
	if(max==0 || min==0)
		return 0;
	if(max-2>min)
		max=max-2;
	else
	{
		int temp=max;
		max=min;
		min=temp-2;
	}
	//printf("max :%d ; min :%d\n",max,min);
	return n+maxNum(max,min);
}
Example #5
0
int main()
{
	int m,n;
	int max,min;

	scanf("%d%d",&m,&n);
	if(m>n)
	{
		max=m;
		min=n;
	}
	else
	{
		max=n;
		min=m;
	}

	printf("%d\n",maxNum(max,min));
	return 0;
}
Example #6
0
// add new numbers at the head of the list
main(){
  void printList(NodePtr);
  NodePtr makeNode(int);
  int n;
  NodePtr top, np;

  top = NULL;
  if (scanf("%d", &n) != 1) n = 0;
  while (n != 0){
    np = makeNode(n);                // create a new node containing n
    np -> next = top;                // set link of new node to first node
    top = np;                        // set top to point to new node
    if (scanf("%d", &n) != 1) n = 0;
  }
  printf("Below is the linked list in reverse order\n");
  printList(top);
  printf("This is the end of the list\n"); 
  printf("The max value of the list is: %d\n", maxNum(max));  
  
  
}
float UCBVHaarSingleStumpLearner::run()
{
    if ( UCBVHaarSingleStumpLearner::_numOfCalling == 0 ) {
        init();
    }

    UCBVHaarSingleStumpLearner::_numOfCalling++;
    //cout << "Num of iter:\t" << UCBVHaarSingleStumpLearner::_numOfCalling << " " << this->getTthSeriesElement( UCBVHaarSingleStumpLearner::_numOfCalling ) << flush << endl;
    const int numClasses = _pTrainingData->getNumClasses();

    // set the smoothing value to avoid numerical problem
    // when theta=0.
    setSmoothingVal( 1.0 / (float)_pTrainingData->getNumExamples() * 0.01 );

    vector<sRates> mu(numClasses); // The class-wise rates. See BaseLearner::sRates for more info.
    vector<float> tmpV(numClasses); // The class-wise votes/abstentions

    float tmpThreshold;
    float tmpAlpha;

    float bestEnergy = numeric_limits<float>::max();
    float tmpEnergy;

    HaarData* pHaarData = static_cast<HaarData*>(_pTrainingData);

    // get the whole data matrix
    //const vector<int*>& intImages = pHaarData->getIntImageVector();

    // The data matrix transformed into the feature's space
    vector< pair<int, float> > processedHaarData(_pTrainingData->getNumExamples());

    // I need to prepare both type of sampling

    StumpAlgorithm<float> sAlgo(numClasses);
    sAlgo.initSearchLoop(_pTrainingData);

    float halfTheta;
    if ( _abstention == ABST_REAL || _abstention == ABST_CLASSWISE )
        halfTheta = _theta/2.0;
    else
        halfTheta = 0;

    // The declared features types
    vector<HaarFeature*>& loadedFeatures = pHaarData->getLoadedFeatures();

    // for every feature type
    vector<HaarFeature*>::iterator ftIt;
    //vector<HaarFeature*>::iterator maxftIt;

    vector<float> maxV( loadedFeatures.size() );
    vector<int> maxKey( loadedFeatures.size() );
    vector<int> maxNum( loadedFeatures.size() );

    //claculate the Bk,s,t of the randomly chosen features
    int key = getKeyOfMaximalElement();
    int featureIdx = (int) (key / 10);
    int featureType = (key % 10);

    //for (i = 0, ftIt = loadedFeatures.begin(); ftIt != loadedFeatures.end(); i++ ++ftIt)
    //*ftIt = loadedFeatures[ featureType ];

    // just for readability
    //HaarFeature* pCurrFeature = *ftIt;
    HaarFeature* pCurrFeature = loadedFeatures[ featureType ];
    if (_samplingType != ST_NO_SAMPLING)
        pCurrFeature->setAccessType(AT_RANDOM_SAMPLING);

    // Reset the iterator on the configurations. For random sampling
    // this clear the visited list
    pCurrFeature->loadConfigByNum( featureIdx );


    if (_verbose > 1)
        cout << "Learning type " << pCurrFeature->getName() << ".." << flush;

    // transform the data from intImages to the feature's space
    pCurrFeature->fillHaarData( _pTrainingData->getExamples(), processedHaarData );
    //pCurrFeature->fillHaarData(intImages, processedHaarData);

    // sort the examples in the new space by their coordinate
    sort( processedHaarData.begin(), processedHaarData.end(),
          nor_utils::comparePair<2, int, float, less<float> >() );

    // find the optimal threshold
    tmpThreshold = sAlgo.findSingleThresholdWithInit(processedHaarData.begin(),
                   processedHaarData.end(),
                   _pTrainingData, halfTheta, &mu, &tmpV);

    tmpEnergy = getEnergy(mu, tmpAlpha, tmpV);


    // Store it in the current weak hypothesis.
    // note: I don't really like having so many temp variables
    // but the alternative would be a structure, which would need
    // to be inheritable to make things more consistent. But this would
    // make it less flexible. Therefore, I am still undecided. This
    // might change!
    _alpha = tmpAlpha;
    _v = tmpV;

    // I need to save the configuration because it changes within the object
    _selectedConfig = pCurrFeature->getCurrentConfig();
    // I save the object because it contains the informations about the type,
    // the name, etc..
    _pSelectedFeature = pCurrFeature;
    _threshold = tmpThreshold;

    bestEnergy = tmpEnergy;

    float edge = 0.0;
    for( vector<sRates>::iterator itR = mu.begin(); itR != mu.end(); itR++ ) edge += ( itR->rPls - itR->rMin );
    //need to set the X value
    updateKeys( key, edge * edge );

    if (!_pSelectedFeature)
    {
        cerr << "ERROR: No Haar Feature found. Something must be wrong!" << endl;
        exit(1);
    }
    else
    {
        if (_verbose > 1)
            cout << "Selected type: " << _pSelectedFeature->getName() << endl;
    }

    return bestEnergy;
}