bool TabNavigationWalker::FocusChild () { UIElement *child; bool child_is_control; int current_index = -1; // Add each branch of the visual tree to the array and then sort them // based on the TabIndex of the first Control in each branch VisualTreeWalker child_walker (root); while ((child = child_walker.Step ())) g_ptr_array_add (tab_sorted, child); if (tab_sorted->len > 1) { Sort (tab_sorted, types); if (ReverseTab ()) { GPtrArray *reverse = g_ptr_array_sized_new (tab_sorted->len); for (int i = tab_sorted->len - 1; i >= 0; i--) g_ptr_array_add (reverse, tab_sorted->pdata [i]); g_ptr_array_free (tab_sorted, true); tab_sorted = reverse; } } // Find the index of the currently selected node so we know which node to // tab to next for (uint i = 0; i < tab_sorted->len; i++) if (tab_sorted->pdata [i] == current) current_index = i; // If a child of the root element is Focused and we're forward-tabbing, it means we should // skip the entire subtree and go to the next 'root'. If we're reverse-tabbing it means we // should Focus the root. if (current_index != -1 && GetActiveNavigationMode (root, types) == KeyboardNavigationModeOnce) { // If we're tabbing backwards and a child of this control is currently Focused, we // should focus this control. if (ReverseTab () && types->IsSubclassOf (root->GetObjectType (), Type::CONTROL)) return TabTo ((Control *)root); return false; } if (tab_sorted->len > 0) { // If the currently selected element was found at index 'i' we need to tab // to the *next* index. If the currently selected item was not here, we // need to start at index 0. for (unsigned int i = 0; i < tab_sorted->len; i++) { // If we are not cycling, it means we've tabbed to the last element of this node and so should if ((i + current_index + 1) == tab_sorted->len && GetActiveNavigationMode (root, types) != KeyboardNavigationModeCycle) break; child = (UIElement *) tab_sorted->pdata [(i + current_index + 1) % tab_sorted->len]; child_is_control = types->IsSubclassOf (child->GetObjectType (), Type::CONTROL); if (child_is_control && !((Control *)child)->GetIsEnabled ()) continue; // When tabbing backwards, we recurse all children *before* attempting to select this node if (ReverseTab () && WalkChildren (child)) return true; if (child_is_control && TabTo ((Control *)child)) return true; if (ForwardTab () && WalkChildren (child)) return true; } } // If we're tabbing backwards and a child of this control is currently Focused, we // should focus this control. if (current_index != -1 && ReverseTab ()) { if (types->IsSubclassOf (root->GetObjectType (), Type::CONTROL)) return TabTo ((Control *)root); } // Nothing could be tabbed to on this visual level return false; }
} array<String^>^ Manager::runSort(int setSizeModifier) { array<int>^ originalSet = af->MakeArray(setSizeModifier); String^ orginalSetStr = arrayToString(originalSet); array<int>^ sortedSet = sortMethod(originalSet); String^ sortedSetStr = arrayToString(sortedSet); array<String^>^ result = { orginalSetStr, sortedSetStr }; return result; } void Manager::RunTests(String^ fileName) { Statistics^ stats = gcnew Statistics(); Sort^ sorter = gcnew Sort(stats); stats->OpenFile(fileName); stats->WriteToFile("Bubble Sort"); SortMethod^ bubble = gcnew SortMethod(sorter, &Sort::Bubble); RunTest(bubble, stats); stats->WriteToFile("Insertion Sort"); SortMethod^ insert = gcnew SortMethod(sorter, &Sort::Insertion); RunTest(insert, stats); stats->WriteToFile("Merge Sort"); SortMethod^ merge = gcnew SortMethod(sorter, &Sort::Merge); RunTest(merge, stats); stats->WriteToFile("Selection Sort");
void Material::SortTechniques() { Sort(techniques_.Begin(), techniques_.End(), CompareTechniqueEntries); }
Rlist *RealSortRListNames(Rlist *list) { return Sort(list, &RlistItemRealLess, &RlistGetNext, &RlistPutNext, NULL); }
Rlist *MACSortRListNames(Rlist *list) { return Sort(list, &RlistItemMACLess, &RlistGetNext, &RlistPutNext, NULL); }
Item *SortItemListCounters(Item *list) { return Sort(list, &ItemCounterMore, &ItemGetNext, &ItemPutNext, NULL); }
Rlist *SortRlist(Rlist *list, int (*CompareItems) ()) { return Sort(list, &RlistCustomItemLess, &RlistGetNext, &RlistPutNext, CompareItems); }
int KTgaManager::FormateSaveData() { int nResult = false; int nRetCode = false; KTgaBlock *pBlock = NULL; KTGABLOCKSAVEDATA SaveData; int nCurrentSaveIndex = 0; int nSaveTgaWidth = 0; int nSaveTgaHeight = 0; RECT rcSaveBuffer; ClearSaveData(); Sort(SORT_TO_SAVE); GetSaveArea(rcSaveBuffer); nSaveTgaWidth = rcSaveBuffer.right; nSaveTgaHeight = rcSaveBuffer.bottom; KSaveManager::GetSelf().GetCorrectWidthAndHeight(nSaveTgaWidth, nSaveTgaHeight); m_SaveTga.Create(nSaveTgaWidth, nSaveTgaHeight); for (KTgaBlockArray::iterator it = m_aTgaBlock.begin(); it != m_aTgaBlock.end(); ++it) { pBlock = (*it); ASSERT(pBlock); if (!(pBlock->IsIndexChangeable())) { int nIndex = pBlock->GetOriginalIndex(); ASSERT(nIndex >= nCurrentSaveIndex); for (; nCurrentSaveIndex < nIndex; ++nCurrentSaveIndex) { SaveData.szName[0] = _T('\0'); SaveData.nLeft = 0; SaveData.nTop = 0; SaveData.nWidth = 0; SaveData.nHeight = 0; m_aSaveData.push_back(SaveData); } if (pBlock->IsPosHasBeenSeted()) { pBlock->GetName(SaveData.szName, sizeof(SaveData.szName) / sizeof(TCHAR)); SaveData.nLeft = pBlock->GetLeft() - rcSaveBuffer.left; ASSERT(SaveData.nLeft >= 0); SaveData.nTop = pBlock->GetTop() - rcSaveBuffer.top; ASSERT(SaveData.nTop >= 0); SaveData.nWidth = pBlock->GetWidth(); SaveData.nHeight = pBlock->GetHeight(); m_aSaveData.push_back(SaveData); nRetCode = AppendSaveTgaData(pBlock, SaveData.nLeft, SaveData.nTop); KG_PROCESS_ERROR(nRetCode); } else { SaveData.szName[0] = _T('\0'); SaveData.nLeft = 0; SaveData.nTop = 0; SaveData.nWidth = 0; SaveData.nHeight = 0; m_aSaveData.push_back(SaveData); } ++nCurrentSaveIndex; continue; } if (pBlock->IsPosHasBeenSeted()) { pBlock->GetName(SaveData.szName, sizeof(SaveData.szName) / sizeof(TCHAR)); SaveData.nLeft = pBlock->GetLeft() - rcSaveBuffer.left; ASSERT(SaveData.nLeft >= 0); SaveData.nTop = pBlock->GetTop() - rcSaveBuffer.top; ASSERT(SaveData.nTop >= 0); SaveData.nWidth = pBlock->GetWidth(); SaveData.nHeight = pBlock->GetHeight(); m_aSaveData.push_back(SaveData); nRetCode = AppendSaveTgaData(pBlock, SaveData.nLeft, SaveData.nTop); KG_PROCESS_ERROR(nRetCode); ++nCurrentSaveIndex; } } m_nSaveWidth = rcSaveBuffer.right; m_nSaveHeight = rcSaveBuffer.bottom; nResult = true; Exit0: return true; }
int KTgaManager::AutoMakeupNothingDanger() { int nResult = false; KTgaBlock *pBlockSelf = NULL; KTgaBlock *pBlockPosed = NULL; RECT rcSelf; RECT rcPosed; int nSelfWidth = 0; int nSelfHeight = 0; int nCount = (int)m_aTgaBlock.size(); int nDesiredWidth = KShow::GetSelf().GetDesiredWidth(); int nDesiredHeight = KShow::GetSelf().GetDesiredHeight(); Sort(SORT_TO_AUTO_MAKE_SIZE); for (int nIndex = 0; nIndex < nCount; ++nIndex) { pBlockSelf = m_aTgaBlock[nIndex]; ASSERT(pBlockSelf); nSelfWidth = pBlockSelf->GetWidth(); nSelfHeight = pBlockSelf->GetHeight(); if (pBlockSelf->IsPosHasBeenSeted()) continue; pBlockSelf->SetLeft(0); for (int i = 0; i <= nDesiredHeight - nSelfHeight; ++i) { i = g_GetCorrectPosition(i); pBlockSelf->SetTop(i); //pBlockSelf->GetByRect(rcSelf); for (int nIndexSeted = 0; nIndexSeted < nCount; ++nIndexSeted) { pBlockPosed = m_aTgaBlock[nIndexSeted]; ASSERT(pBlockPosed); if (!pBlockPosed->IsPosHasBeenSeted()) { pBlockSelf->SetPosHasBeenSeted(); break; } pBlockSelf->GetByRect(rcSelf); pBlockPosed->GetByRect(rcPosed); if (!g_IntersectRect(rcSelf, rcPosed)) continue; if (rcPosed.right + FRAME_DISTANCE * 2 + nSelfWidth > nDesiredWidth) { pBlockSelf->SetLeft(0); break; } pBlockSelf->SetLeft(g_GetCorrectPosition(rcPosed.right + FRAME_DISTANCE * 2)); nIndexSeted = -1; } if (pBlockSelf->IsPosHasBeenSeted()) break; } KG_PROCESS_ERROR(pBlockSelf->IsPosHasBeenSeted()); } nResult = true; Exit0: return nResult; }
//-------------------------------------------------------------------------------------- //! Update the list of devices when a device changes //-------------------------------------------------------------------------------------- void tNDP2kDevicePage::OnDeviceChanged(const tN2kName &N2kName) { tNDP2kDevice *pDevice; if(tGlobal<tNDP2k>::Instance()->DeviceManager().NameToDevice(N2kName, &pDevice)) { if(pDevice->IsAlive()) { if (m_DeviceList.contains(pDevice)) { // Search for device in table for (int row=0; row < m_pTableWidget->rowCount(); ++row) { if (m_pTableWidget->item(row, m_NameColumn)->text() == pDevice->HexName()) { rowChanged(row, pDevice); if (m_SortOption >= 0) { if ( m_pTableWidget->IsScrolling() == false ) { Sort(); } } break; } } } else { // Add the device if missing from the list m_DeviceList.prepend(pDevice); // Insert device at the beginning of the table (they are sorted anyhow) insertDevice(pDevice, 0); if( pDevice->ProductCode() == N2K_PRESSURE_ID ) { tPressureSensorProxy proxy( pDevice->N2kName() ); proxy.RequestPressureConfiguration( ); } if ( m_pTableWidget->IsScrolling() == false ) { Sort(); //always update the selection, even if the sortOption is < 0 } } } else // Device IsALive == false { if (m_DeviceList.contains(pDevice)) // Device Lost { // Search for device in table for (int row=0; row < m_pTableWidget->rowCount(); ++row) { if (m_pTableWidget->item(row, m_NameColumn)->text() == pDevice->HexName()) { m_pTableWidget->removeRow(row); m_DeviceList.removeOne(pDevice); break; } } } } } }
/** * @brief Load arrays with all directories & files in specified dir */ void LoadAndSortFiles(LPCTSTR sDir, DirItemArray * dirs, DirItemArray * files, bool casesensitive) { LoadFiles(sDir, dirs, files); Sort(dirs, casesensitive); Sort(files, casesensitive); }
void Playlist::OnSortDuration() { Sort(2); }
void Playlist::OnSortTitle() { Sort(1); }
void Playlist::OnSortFile() { Sort(0); }
Item *SortItemListNames(Item *list) { return Sort(list, &ItemNameLess, &ItemGetNext, &ItemPutNext, NULL); }
void ScoreFile::Insert(const string& name, int score) { scores[9] = ScoreEntry(name.c_str(), score); Sort(); needsWrite = true; }
Item *SortItemListClasses(Item *list) { return Sort(list, &ItemClassesLess, &ItemGetNext, &ItemPutNext, NULL); }
/* Implementation *************************************************************/ void CTimeSyncTrack::Process(CParameter& Parameter, CComplexVector& veccChanEst, int iNewTiCorr, FXP& rLenPDS_fxp, FXP& rOffsPDS_fxp) { int i, j; int iIntShiftVal; int iFirstPathDelay; CFReal rPeakBound_fxp; CFReal rPropGain_fxp; long long rCurEnergy_fxp; long long rWinEnergy_fxp; long long rMaxWinEnergy_fxp; _BOOLEAN bDelayFound; //ok _BOOLEAN bPDSResultFound; //ok /* Rotate the averaged PDP to follow the time shifts -------------------- */ /* Update timing correction history (shift register) */ vecTiCorrHist.AddEnd(iNewTiCorr); /* Calculate the actual shift of the timing correction. Since we do the timing correction at the sound card sample rate (48 kHz) and the estimated impulse response has a different sample rate (since the spectrum is only one little part of the sound card frequency range) we have to correct the timing correction by a certain bandwidth factor */ const CFReal rActShiftTiCor_fxp = rFracPartTiCor_fxp - (_FREAL) vecTiCorrHist[0] * iNumCarrier / iDFTSize; /* Integer part of shift */ const int iIntPartTiCorr = (int) Round(rActShiftTiCor_fxp); /* Extract the fractional part since we can only correct integer timing shifts */ rFracPartTiCor_fxp = rActShiftTiCor_fxp - iIntPartTiCorr; /* Shift the values in the vector storing the averaged impulse response. We have to consider two cases for shifting (left and right shift) */ if (rActShiftTiCor_fxp < 0) iIntShiftVal = iIntPartTiCorr + iNumIntpFreqPil; else iIntShiftVal = iIntPartTiCorr; /* If new correction is out of range, do not apply rotation */ if ((iIntShiftVal > 0) && (iIntShiftVal < iNumIntpFreqPil)) { /* Actual rotation of vector */ vecrAvPoDeSp.Merge(vecrAvPoDeSp(iIntShiftVal + 1, iNumIntpFreqPil), vecrAvPoDeSp(1, iIntShiftVal)); } /* New estimate for impulse response ------------------------------------ */ /* Apply hamming window, Eq (15) */ veccPilots = veccChanEst * vecrHammingWindow; /* Transform in time-domain to get an estimate for the delay power profile, Eq (15) */ veccPilots = Ifft(veccPilots, FftPlan); /* Average result, Eq (16) (Should be a moving average function, for simplicity we have chosen an IIR filter here) */ IIR1(vecrAvPoDeSp, SqMag(veccPilots), rLamAvPDS); /* Rotate the averaged result vector to put the earlier peaks (which can also detected in a certain amount) at the beginning of the vector */ vecrAvPoDeSpRot.Merge(vecrAvPoDeSp(iStPoRot, iNumIntpFreqPil), vecrAvPoDeSp(1, iStPoRot - 1)); /* Different timing algorithms ------------------------------------------ */ switch (TypeTiSyncTrac) { case TSFIRSTPEAK: /* Detect first peak algorithm proposed by Baoguo Yang */ /* Lower and higher bound */ rBoundHigher = Max(vecrAvPoDeSpRot) * rConst1; //ok rBoundLower = Min(vecrAvPoDeSpRot) * rConst2; //ok /* Calculate the peak bound, Eq (19) */ rPeakBound_fxp = FXP (Max(rBoundHigher, rBoundLower)); /* Get final estimate, Eq (18) */ bDelayFound = FALSE; /* Init flag */ for (i = 0; i < iNumIntpFreqPil - 1; i++) { /* We are only interested in the first peak */ if (bDelayFound == FALSE) { if ((vecrAvPoDeSpRot[i] > vecrAvPoDeSpRot[i + 1]) && (FXP (vecrAvPoDeSpRot[i]) > rPeakBound_fxp)) { /* The first peak was found, store index */ iFirstPathDelay = i; /* Set the flag */ bDelayFound = TRUE; } } } break; case TSENERGY: /* Determin position of window with maximum energy in guard-interval. A window with the size of the guard-interval is moved over the entire profile and the energy inside this window is calculated. The window position which maximises this energy is taken as the new timing position */ rMaxWinEnergy_fxp = 0; iFirstPathDelay = 0; for (i = 0; i < iNumIntpFreqPil - 1 - rGuardSizeFFT; i++) { rWinEnergy_fxp = 0; /* Energy IN the guard-interval */ for (j = 0; j < rGuardSizeFFT; j++) rWinEnergy_fxp += (long long)(vecrAvPoDeSpRot[i + j]*(1<<FXP_TIME_SYNC_TRACK_SCALE)); /* Get maximum */ if (rWinEnergy_fxp > rMaxWinEnergy_fxp) { rMaxWinEnergy_fxp = rWinEnergy_fxp; iFirstPathDelay = i; } } /* We always have a valid measurement, set flag */ bDelayFound = TRUE; break; } /* Only apply timing correction if search was successful and tracking is activated */ if ((bDelayFound == TRUE) && (bTiSyncTracking == TRUE)) { /* Consider the rotation introduced for earlier peaks in path delay. Since the "iStPoRot" is the position of the beginning of the block at the end for cutting out, "iNumIntpFreqPil" must be substracted. (Actually, a part of the following line should be look like this: "iStPoRot - 1 - iNumIntpFreqPil + 1" but the "- 1 + 1" compensate each other) */ iFirstPathDelay += iStPoRot - iNumIntpFreqPil - iTargetTimingPos - 1; /* Correct timing offset -------------------------------------------- */ /* Final offset is target position in comparision to the estimated first path delay. Since we have a delay from the channel estimation, the previous correction is subtracted "- vecrNewMeasHist[0]". If the "real" correction arrives after the delay, this correction is compensated. The length of the history buffer (vecrNewMeasHist) must be equal to the delay of the channel estimation. The corrections must be quantized to the upsampled output sample rate ("* iDFTSize / iNumCarrier") */ iDFTSize / iNumCarrier - veciNewMeasHist[0]; const CFReal rTiOffset_fxp = (CFReal) -iFirstPathDelay * iDFTSize / iNumCarrier - veciNewMeasHist[0]; /* Different controlling parameters for different types of tracking */ switch (TypeTiSyncTrac) { case TSFIRSTPEAK: /* Adapt the linear control parameter to the region, where the peak was found. The region left of the desired timing position is critical, because we immediately get ISI if a peak appers here. Therefore we apply fast correction here. At the other positions, we smooth the controlling to improve the immunity against false peaks */ if (rTiOffset_fxp > 0) rPropGain_fxp = CONT_PROP_BEFORE_GUARD_INT; else rPropGain_fxp = CONT_PROP_IN_GUARD_INT; break; case TSENERGY: rPropGain_fxp = CONT_PROP_ENERGY_METHOD; break; } /* In case of sample rate offset acquisition phase, use faster timing corrections */ if (bSamRaOffsAcqu == TRUE) rPropGain_fxp *= 2; /* Apply proportional control and fix result to sample grid */ const CFReal rCurCorrValue_fxp = rTiOffset_fxp * rPropGain_fxp + rFracPartContr_fxp; const int iContrTiOffs = (int) Fix(rCurCorrValue_fxp); /* Calculate new fractional part of controlling */ rFracPartContr_fxp = rCurCorrValue_fxp - iContrTiOffs; /* Manage correction history */ veciNewMeasHist.AddEnd(0); for (i = 0; i < iSymDelay - 1; i++) veciNewMeasHist[i] += iContrTiOffs; /* Apply correction */ Parameter.iTimingOffsTrack = -iContrTiOffs; } /* Sample rate offset estimation ---------------------------------------- */ /* This sample rate offset estimation is based on the movement of the estimated PDS with time. The movement per symbol (or a number of symbols) is proportional to the sample rate offset. It has to be considered the PDS shiftings of the timing correction unit ("rActShiftTiCor" can be used for that). The procedere is to detect the maximum peak in the PDS and use this as a reference, assuming tha delay of this peak is not changing. The problem is when another peak get higher than this due to fading. In this case we must adjust the history to this new peak (the new reference) */ int iMaxInd; CReal rMax; //int rMax_fxp; /* Find index of maximum peak in PDS estimation. This is our reference for this estimation method */ Max(rMax, iMaxInd, vecrAvPoDeSpRot); /* Integration of timing corrections FIXME: Check for overrun of "iIntegTiCorrections" variable! */ iIntegTiCorrections += (long long) iIntPartTiCorr; /* We need to consider the timing corrections done by the timing unit. What we want to estimate is only the real movement of the detected maximum peak */ const long long iCurRes = iIntegTiCorrections + iMaxInd; veciSRTiCorrHist.AddEnd(iCurRes); /* We assumed that the detected peak is always the same peak in the actual PDS. But due to fading the maximum can change to a different peak. In this case the estimation would be wrong. We try to detect the detection of a different peak by defining a maximum sample rate change. The sample rate offset is very likely to be very constant since usually crystal oscialltors are used. Thus, if a larger change of sample rate offset happens, we assume that the maximum peak has changed */ const long long iNewDiff = veciSRTiCorrHist[iLenCorrectionHist - 2] - iCurRes; /* If change is larger than 2, it is most likely that a new peak was chosen by the maximum function. Also, if the sign has changed of the difference (and it is not zero), we also say that a new peak was selected */ if ((llabs(iNewDiff) > 2) || ((Sign(iOldNonZeroDiff) != Sign(iNewDiff)) && (iNewDiff != 0))) { /* Correct the complete history to the new reference peak. Reference peak was already added, therefore do not use last element */ for (i = 0; i < iLenCorrectionHist - 1; i++) veciSRTiCorrHist[i] -= iNewDiff; } /* Store old difference if it is not zero */ if (iNewDiff != 0) iOldNonZeroDiff = iNewDiff; /* Check, if we are in acquisition phase */ if (iResOffsetAcquCnt > 0) { /* Acquisition phase */ iResOffsetAcquCnt--; } else { /* Apply the result from acquisition only once */ if (bSamRaOffsAcqu == TRUE) { /* End of acquisition phase */ bSamRaOffsAcqu = FALSE; /* Set sample rate offset to initial estimate. We consider the initialization phase of channel estimation by "iSymDelay" */ /*rInitSamOffset = FXP GetSamOffHz(iCurRes - veciSRTiCorrHist[ iLenCorrectionHist - (iResOffAcqCntMax - iSymDelay)], iResOffAcqCntMax - iSymDelay - 1); */ CFReal rInitSamOffset = FXP (GetSamOffHz(iCurRes - veciSRTiCorrHist[ iLenCorrectionHist - (iResOffAcqCntMax - iSymDelay)], iResOffAcqCntMax - iSymDelay - 1)); #ifndef USE_SAMOFFS_TRACK_FRE_PIL /* Apply initial sample rate offset estimation */ // (Parameter.rResampleOffset) -= rInitSamOffset; FXP (Parameter.rResampleOffset) -= rInitSamOffset; #endif /* Reset estimation history (init with zeros) since the sample rate offset was changed */ veciSRTiCorrHist.Init(iLenCorrectionHist, 0); iIntegTiCorrections = 0; } else { /* Tracking phase */ /* Get actual sample rate offset in Hertz */ /* const CReal rSamOffset = GetSamOffHz(iCurRes - veciSRTiCorrHist[0], iLenCorrectionHist - 1); */ const CFReal rSamOffset = FXP (GetSamOffHz(iCurRes - veciSRTiCorrHist[0], iLenCorrectionHist - 1)); #ifndef USE_SAMOFFS_TRACK_FRE_PIL /* Apply result from sample rate offset estimation */ //Parameter.rResampleOffset -= CONTR_SAMP_OFF_INT_FTI * rSamOffset; FXP (Parameter.rResampleOffset) -= CONTR_SAMP_OFF_INT_FTI * rSamOffset; #endif } } /* Delay spread length estimation --------------------------------------- */ /* Estimate the noise energy using the minimum statistic. We assume that the noise variance is equal on all samples of the impulse response. Therefore we subtract the variance on each sample. The total estimated signal energy is the total energy minus the noise energy */ /* Calculate total energy */ const CFReal rTotEgy = Sum(vecrAvPoDeSpRot); /* Sort the values of the PDS to get the smallest values */ CRealVector rSortAvPoDeSpRot(Sort(vecrAvPoDeSpRot)); /* Average the result of smallest values and overestimate result */ const long long rSigmaNoise_fxp = (long long)(Sum(rSortAvPoDeSpRot(1, NUM_SAM_IR_FOR_MIN_STAT - 1)) / NUM_SAM_IR_FOR_MIN_STAT * OVER_EST_FACT_MIN_STAT*(1<<FXP_TIME_SYNC_TRACK_SCALE)); /* Calculate signal energy by subtracting the noise energy from total energy (energy cannot by negative -> bound at zero) */ const long long rSigEnergyBound_fxp = (long long)((double)Max(rTotEgy - ((double)rSigmaNoise_fxp/(1<<FXP_TIME_SYNC_TRACK_SCALE)) * iNumIntpFreqPil, (FXP)0)*(1<<FXP_TIME_SYNC_TRACK_SCALE)); /* From left to the right -> search for end of PDS */ rEstPDSEnd_fxp = (FXP) (iNumIntpFreqPil - 1); rCurEnergy_fxp = 0; bPDSResultFound = FALSE; for (i = 0; i < iNumIntpFreqPil; i++) { if (bPDSResultFound == FALSE) { if (rCurEnergy_fxp > rSigEnergyBound_fxp) { /* Delay index */ rEstPDSEnd_fxp = (CReal) i; bPDSResultFound = TRUE; } /* Accumulate signal energy, subtract noise on each sample */ rCurEnergy_fxp += (long long)(vecrAvPoDeSpRot[i]*(1<<FXP_TIME_SYNC_TRACK_SCALE)) - rSigmaNoise_fxp; //slu2 change } } /* From right to the left -> search for beginning of PDS */ rEstPDSBegin_fxp = 0; rCurEnergy_fxp = 0; bPDSResultFound = FALSE; for (i = iNumIntpFreqPil - 1; i >= 0; i--) { if (bPDSResultFound == FALSE) { if (rCurEnergy_fxp > rSigEnergyBound_fxp) { /* Delay index */ rEstPDSBegin_fxp = (CFReal) i; bPDSResultFound = TRUE; } /* Accumulate signal energy, subtract noise on each sample */ rCurEnergy_fxp += (long long)(vecrAvPoDeSpRot[i]*(1<<FXP_TIME_SYNC_TRACK_SCALE)) - rSigmaNoise_fxp; //slu2 change } } /* If the signal energy is too low it can happen that the estimated beginning of the impulse response is before the end -> correct */ if (rEstPDSBegin_fxp > rEstPDSEnd_fxp) { /* Set beginning and end to their maximum (minimum) value */ //rEstPDSBegin = (CReal) 0.0; rEstPDSBegin_fxp = 0; rEstPDSEnd_fxp = (CFReal) (iNumIntpFreqPil - 1); } /* Correct estimates of begin and end of PDS by the rotation */ const CReal rPDSLenCorrection = iNumIntpFreqPil - iStPoRot + 1; /* slu2: dont' have to change here */ rEstPDSBegin_fxp -= FXP (rPDSLenCorrection); rEstPDSEnd_fxp -= FXP (rPDSLenCorrection); /* Set return parameters */ rLenPDS_fxp = rEstPDSEnd_fxp - rEstPDSBegin_fxp; rOffsPDS_fxp= rEstPDSBegin_fxp; }
Item *SortItemListTimes(Item *list) { return Sort(list, &ItemTimeMore, &ItemGetNext, &ItemPutNext, NULL); }
int ARRAY::Convert(PGLOBAL g, int k, PVAL vp) { int i, prec = 0; bool b = FALSE; PMBV ovblk = Valblk; PVBLK ovblp = Vblp; Type = k; // k is the new type Valblk = new(g) MBVALS; switch (Type) { case TYPE_DOUBLE: prec = 2; case TYPE_SHORT: case TYPE_INT: case TYPE_DATE: Len = 1; break; default: sprintf(g->Message, MSG(BAD_CONV_TYPE), Type); return TYPE_ERROR; } // endswitch k Size = Nval; Nval = 0; Vblp = Valblk->Allocate(g, Type, Len, prec, Size); if (!Valblk->GetMemp()) // The error message was built by PlgDBalloc return TYPE_ERROR; else Value = AllocateValue(g, Type, Len, prec); /*********************************************************************/ /* Converting STRING to DATE can be done according to date format. */ /*********************************************************************/ if (Type == TYPE_DATE && ovblp->GetType() == TYPE_STRING && vp) if (((DTVAL*)Value)->SetFormat(g, vp)) return TYPE_ERROR; else b = TRUE; // Sort the new array on date internal values /*********************************************************************/ /* Do the actual conversion. */ /*********************************************************************/ for (i = 0; i < Size; i++) { Value->SetValue_pvblk(ovblp, i); if (AddValue(g, Value)) return TYPE_ERROR; } // endfor i /*********************************************************************/ /* For sorted arrays, get the initial find values. */ /*********************************************************************/ if (b) Sort(g); ovblk->Free(); return Type; } // end of Convert
Rlist *AlphaSortRListNames(Rlist *list) { return Sort(list, &RlistItemLess, &RlistGetNext, &RlistPutNext, NULL); }
void CLightManager::AddGeometryLights (void) { int nFace, nSegment, nSide, nTexture, nLight; CSegFace* faceP; tFaceColor* colorP; #if 0 for (nTexture = 0; nTexture < 910; nTexture++) nLight = IsLight (nTexture); #endif gameStates.render.bHaveDynLights = 1; #if 0 if (gameStates.app.bD1Mission) gameData.render.fAttScale [0] *= 2; #endif ogl.m_states.fLightRange = fLightRanges [IsMultiGame ? 1 : extraGameInfo [IsMultiGame].nLightRange]; m_headlights.Init (); if (gameStates.render.nLightingMethod) gameData.render.color.vertices.Clear (); m_data.Init (); for (nFace = gameData.segs.nFaces, faceP = FACES.faces.Buffer (); nFace; nFace--, faceP++) { nSegment = faceP->nSegment; if (SEGMENTS [nSegment].m_nType == SEGMENT_IS_SKYBOX) continue; #if DBG if (nSegment == nDbgSeg) nDbgSeg = nDbgSeg; #endif nSide = faceP->nSide; #if DBG if ((nSegment == nDbgSeg) && ((nDbgSide < 0) || (nSide == nDbgSide))) nDbgSeg = nDbgSeg; #endif nTexture = faceP->nBaseTex; if ((nTexture < 0) || (nTexture >= MAX_WALL_TEXTURES)) continue; colorP = gameData.render.color.textures + nTexture; if ((nLight = IsLight (nTexture))) Add (faceP, &colorP->color, nLight, (short) nSegment, (short) nSide, -1, nTexture, NULL, 1); faceP->nOvlTex = SEGMENTS [faceP->nSegment].Side (faceP->nSide)->m_nOvlTex; nTexture = faceP->nOvlTex; #if 0//DBG if (gameStates.app.bD1Mission && (nTexture == 289)) //empty, light continue; #endif if ((nTexture > 0) && (nTexture < MAX_WALL_TEXTURES) && (nLight = IsLight (nTexture)) /*gameData.pig.tex.info.fBrightness [nTexture]*/) { colorP = gameData.render.color.textures + nTexture; Add (faceP, &colorP->color, nLight, (short) nSegment, (short) nSide, -1, nTexture, NULL); } //if (m_data.nLights [0]) // return; if (!gameStates.render.bHaveDynLights) { Reset (); return; } } Sort (); #if 0 PrintLog ("light sources:\n"); CDynLight* pl = m_data.lights [0]; for (int i = m_data.nLights [0]; i; i--, pl++) PrintLog ("%d,%d,%d,%d\n", pl->info.nSegment, pl->info.nSide, pl->info.nObject, pl->info.bVariable); PrintLog ("\n"); #endif }
Rlist *IPSortRListNames(Rlist *list) { return Sort(list, &RlistItemIPLess, &RlistGetNext, &RlistPutNext, NULL); }
void nsCStringArray::Sort(void) { Sort(CompareCString, nsnull); }
void Navigator::Search() { sortitems.Check(sorting); int sc = scope.GetScroll(); String key = scope.GetKey(); String s = TrimBoth(~search); String search_name, search_nest; bool wholeclass = false; bool both = false; navigator_global = false; if(s.Find('.') >= 0) { Vector<String> h = Split((String)~search, '.'); if(*s.Last() == '.') search_nest = Join(h, "::"); else { search_name = h.Pop(); if(h.GetCount()) search_nest = Join(h, "::"); } wholeclass = *s == '.' && search_nest.GetCount(); } else { search_name = search_nest = ~search; both = true; } s = Join(Split(s, '.'), "::") + (s.EndsWith(".") ? "::" : ""); int lineno = StrInt(s); gitem.Clear(); nitem.Clear(); if(IsNull(theide->editfile)) return; int fileii = GetSourceFileIndex(theide->editfile); if(!IsNull(lineno)) { NavItem& m = nitem.Add(); m.type = "Go to line " + AsString(lineno); m.kind = KIND_LINE; m.line = lineno; gitem.Add(Null).Add(&m); } else if(IsNull(s) && !sorting) { const CppBase& b = CodeBase(); for(int i = 0; i < b.GetCount(); i++) { String nest = b.GetKey(i); const Array<CppItem>& ci = b[i]; for(int j = 0; j < ci.GetCount(); j++) { const CppItem& m = ci[j]; if(m.file == fileii) { NavItem& n = nitem.Add(); n.Set(m); n.nest = nest; n.decl_line = m.line; n.decl_file = m.file; n.decl = !m.impl; NavLine& l = n.linefo.Add(); l.impl = m.impl; l.file = m.file; l.line = m.line; } } } Sort(nitem, FieldRelation(&NavItem::line, StdLess<int>())); NavGroup(true); } else { navigator_global = true; const CppBase& b = CodeBase(); String usearch_nest = ToUpper(search_nest); String usearch_name = ToUpper(search_name); ArrayMap<String, NavItem> imap; bool local = sorting && IsNull(s); VectorMap<String, int> nest_pass; for(int pass = -1; pass < 2; pass++) { for(int i = 0; i < b.GetCount(); i++) { String nest = b.GetKey(i); bool foundnest = (wholeclass ? pass < 0 ? false : pass ? ToUpper(nest) == usearch_nest : nest == search_nest : pass < 0 ? nest == search_nest : (pass ? ToUpper(nest).Find(usearch_nest) >= 0 : nest.StartsWith(search_nest))) && nest.Find('@') < 0; if(local || foundnest || both) { const Array<CppItem>& ci = b[i]; for(int j = 0; j < ci.GetCount(); j++) { const CppItem& m = ci[j]; if(local ? m.file == fileii : m.uname.Find('@') < 0 && (pass < 0 ? m.name == search_name : pass ? m.uname.Find(usearch_name) >= 0 : m.name.StartsWith(search_name)) || both && foundnest) { String key = nest + '\1' + m.qitem; int q = nest_pass.Find(nest); int p = pass; if(q < 0) // We do not want classes to be split based on pass nest_pass.Add(nest, pass); else p = nest_pass[q]; q = imap.Find(key); if(q < 0) { NavItem& ni = imap.Add(key); ni.Set(m); ni.nest = nest; ni.decl_line = ni.line; ni.decl_file = ni.file; ni.decl = !ni.impl; ni.pass = p; NavLine& l = ni.linefo.Add(); l.impl = m.impl; l.file = m.file; l.line = m.line; } else { NavItem& mm = imap[q]; if(!m.impl && (!mm.decl || CombineCompare(mm.decl_file, m.file)(mm.decl_line, m.line) < 0)) { mm.decl = true; mm.decl_line = m.line; mm.decl_file = m.file; mm.natural = m.natural; } NavLine& l = mm.linefo.Add(); l.impl = m.impl; l.file = m.file; l.line = m.line; } } } } } } nitem = imap.PickValues(); NavGroup(false); SortByKey(gitem); Vector<String> keys = gitem.PickKeys(); Vector<Vector<NavItem *> > values = gitem.PickValues(); IndexSort(keys, values); for(int i = 0; i < keys.GetCount(); i++) keys[i].Remove(0); VectorMap<String, Vector<NavItem *> > h(pick(keys), pick(values)); gitem = pick(h); for(int i = 0; i < gitem.GetCount(); i++) Sort(gitem[i], sorting ? SortByNames : SortByLines); } scope.Clear(); scope.Add(Null); Index<String> done; for(int i = 0; i < gitem.GetCount(); i++) { String s = gitem.GetKey(i); if(done.Find(s) < 0) { done.Add(s); scope.Add(s); } } scope.ScrollTo(sc); if(!navigator_global || !scope.FindSetCursor(key)) scope.GoBegin(); }
void nsCStringArray::SortIgnoreCase(void) { Sort(CompareCStringIgnoreCase, nsnull); }
NAMESPACE_UPP // recalculates relative toolbar's positions // needed after adding or removing a toolbar void XMLToolBarFrame::Reposition(void) { // clears position mapper posMapper.Clear(); // setup a dummy frame size, just in case frame is empty frameSize = 3; // if no toolbars inside, just do nothing else if(!toolBars.GetCount()) return; // fixup 'vertical' positions, i.e. rows on which toolbars belong Array<int>vPos; for(int iPos = 0; iPos < relativePositions.GetCount(); iPos++) { Size const &sz = relativePositions[iPos]; vPos.Add(sz.cy); } Sort(vPos); VectorMap<int, int>vMap; int v = 0; for(int iPos = 0; iPos < vPos.GetCount(); iPos++) { if(vMap.Find(vPos[iPos]) < 0) { vMap.Add(vPos[iPos], v); v ++; } } for(int iPos = 0; iPos < relativePositions.GetCount(); iPos++) { Size &sz = relativePositions[iPos]; sz.cy = vMap.Get(sz.cy); } // now, scans each row, fixing up positions inside it and // building true toolbars positions int curVPos = 0; for(int iRow = 0; iRow < vMap.GetCount(); iRow++) { Vector<int>ps, idx; int rowHeight = 0; for(int iPos = 0; iPos < relativePositions.GetCount(); iPos++) { Size &sz = relativePositions[iPos]; if(sz.cy == iRow) { ps.Add(sz.cx); idx.Add(iPos); } } IndexSort(ps, idx); int minNextPos = 0; for(int i = 0; i < ps.GetCount(); i++) { if(ps[i] < minNextPos) ps[i] = minNextPos; XMLToolBarCtrl &tb = *toolBars[idx[i]]; Size sz; switch(toolBarState) { case TOOLBAR_LEFT : case TOOLBAR_RIGHT : sz = tb.GetVertSize(); rowHeight = max(rowHeight, sz.cx); minNextPos = ps[i] + sz.cy + 2; // 2 pixels gap between columns break; case TOOLBAR_TOP : case TOOLBAR_BOTTOM : sz = tb.GetHorzSize(); rowHeight = max(rowHeight, sz.cy); minNextPos = ps[i] + sz.cx + 2; // 2 pixels gap between columns break; default: NEVER(); } } VectorMap<int, int> &rowMap = posMapper.Add(curVPos); for(int i = 0; i < ps.GetCount(); i++) rowMap.Add(ps[i], idx[i]); curVPos += rowHeight; } // store total frame size frameSize = curVPos; // if reversed direction, fixup cols if(toolBarState == TOOLBAR_RIGHT || toolBarState == TOOLBAR_BOTTOM) { int lastPos = posMapper.GetKey(posMapper.GetCount()-1); for(int iRow = 0; iRow < posMapper.GetCount(); iRow++) posMapper.SetKey(iRow, lastPos - posMapper.GetKey(iRow)); } }
void CServerBrowser::Update(bool ForceResort) { int64 Timeout = time_freq(); int64 Now = time_get(); int Count; CServerEntry *pEntry, *pNext; // do server list requests if(m_NeedRefresh && !m_pMasterServer->IsRefreshing()) { NETADDR Addr; CNetChunk Packet; int i; m_NeedRefresh = 0; mem_zero(&Packet, sizeof(Packet)); Packet.m_ClientID = -1; Packet.m_Flags = NETSENDFLAG_CONNLESS; Packet.m_DataSize = sizeof(SERVERBROWSE_GETLIST); Packet.m_pData = SERVERBROWSE_GETLIST; for(i = 0; i < IMasterServer::MAX_MASTERSERVERS; i++) { if(!m_pMasterServer->IsValid(i)) continue; Addr = m_pMasterServer->GetAddr(i); Packet.m_Address = Addr; m_pNetClient->Send(&Packet); } if(g_Config.m_Debug) m_pConsole->Print(IConsole::OUTPUT_LEVEL_DEBUG, "client_srvbrowse", "requesting server list"); } // do timeouts pEntry = m_pFirstReqServer; while(1) { if(!pEntry) // no more entries break; pNext = pEntry->m_pNextReq; if(pEntry->m_RequestTime && pEntry->m_RequestTime+Timeout < Now) { // timeout RemoveRequest(pEntry); } pEntry = pNext; } // do timeouts pEntry = m_pFirstReqServer; Count = 0; while(1) { if(!pEntry) // no more entries break; // no more then 10 concurrent requests if(Count == g_Config.m_BrMaxRequests) break; if(pEntry->m_RequestTime == 0) RequestImpl(pEntry->m_Addr, pEntry); Count++; pEntry = pEntry->m_pNextReq; } // check if we need to resort if(m_Sorthash != SortHash() || ForceResort) Sort(); }
int FileIconCollection::ReadDirectory(ieIDirectoryEnumerator *&pEnum, PCTCHAR pcszPath, muiSize whIcon_, IconSortFunc *pSortFunc) { if (bReadingImages || bSavingCache) { if (bReadingImages) { bCancelReads = true; tg.cancel(); } tg.wait(); bReadingImages = bSavingCache = false; } bool bGotPath = pcszPath && *pcszPath; bool bSamePath = bGotPath && (_tcsicmp(pcszPath, szPath) == 0); FileIcon *pOldIcons = nullptr; if (pFirstIcon && bSamePath) { // Same path -> Save a copy of previous icons that we can retrieve the new icons from for all files that hasn't changed pOldIcons = pFirstIcon; pFirstIcon = pLastIcon = nullptr; } else { FreeIcons(); } if (!bGotPath) return 0; // Store new path int nPathLen = _tcslen(pcszPath); memcpy(szPath, pcszPath, (nPathLen+1)*sizeof(*pcszPath)); bInclSubDirs = false; bCancelReads = false; // Read icon cache file (if present) tg.run([&] { Cache.Load(szPath, whIcon_); }); // Create a list of icons for all files in the directory int iNumIcons = 0; for (;;) { // (repeat until we've got a list where the directory hasn't changed during the enumeration) ieIDirectoryEnumerator *pOldEnum = pEnum; pEnum = g_ieFM.DirCache.CreateEnumerator(szPath); if (pOldEnum) pOldEnum->Release(); const ieDirectoryFileInfo *pDFI; if (((nPathLen == 2) || (nPathLen == 3)) && (szPath[1] == ':')) { // Special case for root path: then add drive letters int iCurDrive = _totupper(szPath[0]) - 'A'; DWORD dwDriveMap = GetLogicalDrives(); TCHAR szFile[4] = _T("X:\\"); for (int i = 32; i--; ) { if (!(dwDriveMap & (1<<i))) continue; if (i == iCurDrive) continue; szFile[0] = 'A' + i; AddIcon(szFile, 0, 0, true); iNumIcons++; } } if (!pEnum) break; // No files? // Enumerate files for (PCTCHAR pcszName = nullptr; pEnum->IsStillValid() && (pcszName = pEnum->NextFile(pcszName, &pDFI)) != nullptr;) { // Ignore hidden files if (pDFI->bHiddenFile) continue; // Ignore . directory int nNameLen = _tcslen(pcszName); if ((nNameLen == 1) && (pcszName[0] == '.') && (pcszName[1] == 0)) continue; // Ignore .iei and .iea files if (!pDFI->bSubDirectory) { if ((nNameLen > 4) && (pcszName[nNameLen-4] == '.') && ((pcszName[nNameLen-3] == 'i') || (pcszName[nNameLen-3] == 'I')) && ((pcszName[nNameLen-2] == 'e') || (pcszName[nNameLen-2] == 'E')) && ((pcszName[nNameLen-1] == 'i') || (pcszName[nNameLen-1] == 'I') || (pcszName[nNameLen-1] == 'A') || (pcszName[nNameLen-1] == 'A'))) continue; } // Expand to full file path TCHAR szFile[MAX_PATH]; int nFileLen = nPathLen; memcpy(szFile, szPath, nFileLen*sizeof(*szPath)); szFile[nFileLen++] = '\\'; memcpy(szFile+nFileLen, pcszName, (nNameLen+1)*sizeof(*pcszName)); nFileLen += nNameLen; // See if we can re-use any old icon... FileIcon *pThis = pOldIcons; while (pThis) { if ( (pThis->GetFileSize() == pDFI->qwSize) // Must have same file size && (pThis->GetFileTime() == pDFI->ftFileTime) // ... and time stampe && (pThis->GetFileStrLength() == nFileLen) // ... and length && !memcmp(pThis->GetFileStr(), szFile, nFileLen*sizeof(*szFile)) // ... and name && ((pThis->GetType() != fitImage) || pThis->HasImage()) ) // ... and if it's an image, we must've read the image thumbnail... break; pThis = pThis->NextIcon(); } if (pThis) { // Yes, unlink from old chain and re-use it in the new chain if (pThis == pOldIcons) pOldIcons = pThis->NextIcon(); pThis->UnlinkIcon(); if (!pFirstIcon) pFirstIcon = pThis; pThis->LinkIcon(pLastIcon, nullptr); pLastIcon = pThis; } else { // No, so create new icon AddIcon(szFile, pDFI->qwSize, pDFI->ftFileTime, pDFI->bSubDirectory); } iNumIcons++; } if (pEnum->IsStillValid()) { // Success! break; } // Directory has changed during the enumeration -> Try again (save any icons read as old icons that can be re-used) if (pFirstIcon) { pLastIcon->LinkIcon(pLastIcon->PrevIcon(), pOldIcons); pOldIcons = pFirstIcon; } pFirstIcon = pLastIcon = nullptr; iNumIcons = 0; } // Free any remaining old icons for (;;) { FileIcon *pThis = pOldIcons; if (!pThis) break; pOldIcons = pThis->NextIcon(); delete pThis; } // Sort new icons? if (pSortFunc && (pSortFunc != ByImageSize)) { Sort(pSortFunc); } // Wait for cache file to load tg.wait(); whIcon = whIcon_; if (!whIcon.w) whIcon.w = pCfg->idx.iIconX; if (!whIcon.h) whIcon.h = pCfg->idx.iIconY; // Return the number of FileIcon's created return iNumIcons; }
void CServerBrowser::Set(const NETADDR &Addr, int Type, int Token, const CServerInfo *pInfo) { CServerEntry *pEntry = 0; if(Type == IServerBrowser::SET_MASTER_ADD) { if(m_ServerlistType != IServerBrowser::TYPE_INTERNET) return; if(!Find(Addr)) { pEntry = Add(Addr); QueueRequest(pEntry); } } else if(Type == IServerBrowser::SET_FAV_ADD) { if(m_ServerlistType != IServerBrowser::TYPE_FAVORITES) return; if(!Find(Addr)) { pEntry = Add(Addr); QueueRequest(pEntry); } } else if(Type == IServerBrowser::SET_TOKEN) { if(Token != m_CurrentToken) return; pEntry = Find(Addr); if(!pEntry) pEntry = Add(Addr); if(pEntry) { SetInfo(pEntry, *pInfo); if(m_ServerlistType == IServerBrowser::TYPE_LAN) pEntry->m_Info.m_Latency = (time_get()-m_BroadcastTime)*1000/time_freq(); else pEntry->m_Info.m_Latency = (time_get()-pEntry->m_RequestTime)*1000/time_freq(); RemoveRequest(pEntry); } } else if(Type == IServerBrowser::SET_OLD_INTERNET) { pEntry = Find(Addr); if(pEntry) { SetInfo(pEntry, *pInfo); if(m_ServerlistType == IServerBrowser::TYPE_LAN) pEntry->m_Info.m_Latency = (time_get()-m_BroadcastTime)*1000/time_freq(); else pEntry->m_Info.m_Latency = (time_get()-pEntry->m_RequestTime)*1000/time_freq(); RemoveRequest(pEntry); } } else if(Type == IServerBrowser::SET_OLD_LAN) { pEntry = Find(Addr); if(pEntry) if(!pEntry) pEntry = Add(Addr); if(pEntry) SetInfo(pEntry, *pInfo); } Sort(); }