Beispiel #1
0
// Update our composite status and deal with things if it's changed
void CCachedDirectory::UpdateCurrentStatus()
{
	git_wc_status_kind newStatus = CalculateRecursiveStatus();
	CTraceToOutputDebugString::Instance()(_T(__FUNCTION__) _T(": UpdateCurrentStatus %s new:%d old: %d\n"),
		m_directoryPath.GetWinPath(),
		newStatus, m_currentFullStatus);

	if (newStatus != m_currentFullStatus && m_ownStatus.IsDirectory())
	{
		m_currentFullStatus = newStatus;

		// Our status has changed - tell the shell
		CTraceToOutputDebugString::Instance()(_T(__FUNCTION__) _T(": Dir %s, status change from %d to %d\n"), m_directoryPath.GetWinPath(), m_currentFullStatus, newStatus);
		CGitStatusCache::Instance().UpdateShell(m_directoryPath);
	}
	// And tell our parent, if we've got one...
	// we tell our parent *always* about our status, even if it hasn't
	// changed. This is to make sure that the parent has really our current
	// status - the parent can decide itself if our status has changed
	// or not.
	CTGitPath parentPath = m_directoryPath.GetContainingDirectory();
	if(!parentPath.IsEmpty())
	{
		// We have a parent
		// just version controled directory need to cache.
		CString root1, root2;
		if (parentPath.HasAdminDir(&root1) && (CGitStatusCache::Instance().IsRecurseSubmodules() || m_directoryPath.HasAdminDir(&root2) && root1 == root2))
		{
			CCachedDirectory * cachedDir = CGitStatusCache::Instance().GetDirectoryCacheEntry(parentPath);
			if (cachedDir)
				cachedDir->UpdateChildDirectoryStatus(m_directoryPath, m_currentFullStatus);
		}
	}
}
Beispiel #2
0
LRESULT CAddDlg::OnFileDropped(WPARAM, LPARAM lParam)
{
	BringWindowToTop();
	SetForegroundWindow();
	SetActiveWindow();
	// if multiple files/folders are dropped
	// this handler is called for every single item
	// separately.
	// To avoid creating multiple refresh threads and
	// causing crashes, we only add the items to the
	// list control and start a timer.
	// When the timer expires, we start the refresh thread,
	// but only if it isn't already running - otherwise we
	// restart the timer.
	CTGitPath path;
	path.SetFromWin((LPCTSTR)lParam);

	// check whether the dropped file belongs to the very same repository
	CString projectDir;
	if (!path.HasAdminDir(&projectDir) || !CTGitPath::ArePathStringsEqual(g_Git.m_CurrentDir, projectDir))
		return 0;

	if (!m_addListCtrl.HasPath(path))
	{
		if (m_pathList.AreAllPathsFiles())
		{
			m_pathList.AddPath(path);
			m_pathList.RemoveDuplicates();
		}
		else
		{
			// if the path list contains folders, we have to check whether
			// our just (maybe) added path is a child of one of those. If it is
			// a child of a folder already in the list, we must not add it. Otherwise
			// that path could show up twice in the list.
			bool bHasParentInList = false;
			for (int i=0; i<m_pathList.GetCount(); ++i)
			{
				if (m_pathList[i].IsAncestorOf(path))
				{
					bHasParentInList = true;
					break;
				}
			}
			if (!bHasParentInList)
			{
				m_pathList.AddPath(path);
				m_pathList.RemoveDuplicates();
			}
		}
	}
	m_addListCtrl.ResetChecked(path);

	// Always start the timer, since the status of an existing item might have changed
	SetTimer(REFRESHTIMER, 200, nullptr);
	CTraceToOutputDebugString::Instance()(_T(__FUNCTION__) L": Item %s dropped, timer started\n", path.GetWinPath());
	return 0;
}
Beispiel #3
0
bool CTGitPath::PredLeftSameWCPathAsRight(const CTGitPath& left, const CTGitPath& right)
{
	if (left.IsAdminDir() && right.IsAdminDir())
	{
		CTGitPath l = left;
		CTGitPath r = right;
		do
		{
			l = l.GetContainingDirectory();
		} while(l.HasAdminDir());
		do
		{
			r = r.GetContainingDirectory();
		} while(r.HasAdminDir());
		return l.GetContainingDirectory().IsEquivalentTo(r.GetContainingDirectory());
	}
	return left.GetDirectory().IsEquivalentTo(right.GetDirectory());
}
Beispiel #4
0
int _tmain(int argc, _TCHAR* argv[])
{
	CTGitPath path;
	CString root;
	TCHAR buff[256];
	
	GetCurrentDirectory(256,buff);
	path.SetFromWin(buff);

	if(!path.HasAdminDir(&root))
	{
		printf("not in git repository\n");
		return -1;
	}

	CGitIndexList list;
	list.ReadIndex(root+_T("\\.git\\index"));

	CGitHeadFileList filelist;
	filelist.ReadHeadHash(buff);
	_tprintf(_T("update %d\n"), filelist.CheckHeadUpdate());

	git_init();
//	filelist.ReadTree();

	WIN32_FIND_DATA data;
	CString str(buff);
	str+=_T("\\*.*");
	GitStatus status;

	HANDLE handle = FindFirstFile(str,&data);
	while(FindNextFile(handle,&data))
	{
		if( _tcsnccmp(data.cFileName, _T(".."),2) ==0) 
			continue;
		if( _tcsnccmp(data.cFileName, _T("."),1) ==0 ) 
			continue;

		CString spath(buff);
		spath += _T("\\");
		spath += data.cFileName;
		CTGitPath path(spath);

		TCHAR name[100];
		int t1,t2;
		t1 = ::GetCurrentTime();
		status.GetStatusString(status.GetAllStatus(path), 100,name);
		t2 = ::GetCurrentTime();

		_tprintf(_T("%s - %s - %d\n"),data.cFileName, name, t2-t1);
		
	}

	return 0;
}
void GitStatus::GetStatus(const CTGitPath& path, bool /*update*/ /* = false */, bool noignore /* = false */, bool /*noexternals*/ /* = false */)
{
	// NOTE: unlike the SVN version this one does not cache the enumerated files, because in practice no code in all of
	//       Tortoise uses this, all places that call GetStatus create a temp GitStatus object which gets destroyed right
	//       after the call again

	CString sProjectRoot;
	if ( !path.HasAdminDir(&sProjectRoot) )
		return;

	bool isfull = ((DWORD)CRegStdDWORD(_T("Software\\TortoiseGit\\CacheType"),
				GetSystemMetrics(SM_REMOTESESSION) ? ShellCache::dll : ShellCache::exe) == ShellCache::dllFull);

	int err = 0;

	{
		LPCTSTR lpszSubPath = NULL;
		CString sSubPath;
		CString s = path.GetWinPathString();
		if (s.GetLength() > sProjectRoot.GetLength())
		{
			sSubPath = s.Right(s.GetLength() - sProjectRoot.GetLength());
			lpszSubPath = sSubPath;
			// skip initial slash if necessary
			if (*lpszSubPath == _T('\\'))
				++lpszSubPath;
		}

		m_status.prop_status = m_status.text_status = git_wc_status_none;
		m_status.assumeValid = false;
		m_status.skipWorktree = false;

		if(path.IsDirectory())
		{
			err = GetDirStatus(sProjectRoot,lpszSubPath,&m_status.text_status , isfull, false,!noignore, NULL, NULL);
			if (m_status.text_status == git_wc_status_added || m_status.text_status == git_wc_status_deleted) // fix for issue #1769; a folder is either modified, conflicted or normal
				m_status.text_status = git_wc_status_modified;
		}
		else
		{
			err = GetFileStatus(sProjectRoot, lpszSubPath, &m_status.text_status ,isfull, false,!noignore, NULL,NULL, &m_status.assumeValid, &m_status.skipWorktree);
		}
	}

	// Error present if function is not under version control
	if (err)
	{
		status = NULL;
		return;
	}

	status = &m_status;
}
// static method
git_wc_status_kind GitStatus::GetAllStatus(const CTGitPath& path, git_depth_t depth)
{
	git_wc_status_kind			statuskind;
//	git_client_ctx_t * 			ctx;

//	apr_pool_t *				pool;
//	git_error_t *				err;
	BOOL						err;
	BOOL						isDir;
	CString						sProjectRoot;

	isDir = path.IsDirectory();
	if (!path.HasAdminDir(&sProjectRoot))
		return git_wc_status_none;

//	pool = git_pool_create (NULL);				// create the memory pool

//	git_error_clear(git_client_create_context(&ctx, pool));

//	git_revnum_t youngest = Git_INVALID_REVNUM;
//	git_opt_revision_t rev;
//	rev.kind = git_opt_revision_unspecified;
	statuskind = git_wc_status_none;

	const BOOL bIsRecursive = (depth == git_depth_infinity || depth == git_depth_unknown); // taken from SVN source

	CString sSubPath;
	CString s = path.GetWinPathString();
	if (s.GetLength() > sProjectRoot.GetLength())
	{
		if (sProjectRoot.GetLength() == 3 && sProjectRoot[1] == _T(':'))
			sSubPath = s.Right(s.GetLength() - sProjectRoot.GetLength());
		else
			sSubPath = s.Right(s.GetLength() - sProjectRoot.GetLength() - 1/*otherwise it gets initial slash*/);
	}

	bool isfull = ((DWORD)CRegStdDWORD(_T("Software\\TortoiseGit\\CacheType"),
				GetSystemMetrics(SM_REMOTESESSION) ? ShellCache::dll : ShellCache::exe) == ShellCache::dllFull);

	if(isDir)
	{
		err = GetDirStatus(sProjectRoot,sSubPath,&statuskind, isfull,bIsRecursive,isfull,NULL, NULL);

	}
	else
	{
		err = GetFileStatus(sProjectRoot,sSubPath,&statuskind,isfull, false,isfull, NULL,NULL);
	}

	return statuskind;
}
// Update our composite status and deal with things if it's changed
void CCachedDirectory::UpdateCurrentStatus()
{
	git_wc_status_kind newStatus = CalculateRecursiveStatus();
	ATLTRACE(_T("UpdateCurrentStatus %s new:%d old: %d\n"),
		m_directoryPath.GetWinPath(),
		newStatus, m_currentFullStatus);

	if ( this->m_ownStatus.GetEffectiveStatus() < git_wc_status_normal )
	{
		if (::PathFileExists(this->m_directoryPath.GetWinPathString()+_T("\\.git")))
		{
			//project root must be normal status at least.
			ATLTRACE(_T("force update project root directory as normal status\n"));
			this->m_ownStatus.ForceStatus(git_wc_status_normal);
		}
	}

	if ((newStatus != m_currentFullStatus) && m_ownStatus.IsVersioned())
	{
		if ((m_currentFullStatus != git_wc_status_none)&&(m_ownStatus.GetEffectiveStatus() != git_wc_status_missing))
		{
			// Our status has changed - tell the shell
			ATLTRACE(_T("Dir %s, status change from %d to %d, send shell notification\n"), m_directoryPath.GetWinPath(), m_currentFullStatus, newStatus);
			CGitStatusCache::Instance().UpdateShell(m_directoryPath);
		}
		if (m_ownStatus.GetEffectiveStatus() != git_wc_status_missing)
			m_currentFullStatus = newStatus;
		else
			m_currentFullStatus = git_wc_status_missing;
	}
	// And tell our parent, if we've got one...
	// we tell our parent *always* about our status, even if it hasn't
	// changed. This is to make sure that the parent has really our current
	// status - the parent can decide itself if our status has changed
	// or not.
	CTGitPath parentPath = m_directoryPath.GetContainingDirectory();
	if(!parentPath.IsEmpty())
	{
		// We have a parent
		// just version controled directory need to cache.
		CString root1, root2;
		if(parentPath.HasAdminDir(&root1) && m_directoryPath.HasAdminDir(&root2) && root1 == root2)
		{
			CCachedDirectory * cachedDir = CGitStatusCache::Instance().GetDirectoryCacheEntry(parentPath);
			if (cachedDir)
				cachedDir->UpdateChildDirectoryStatus(m_directoryPath, m_currentFullStatus);
		}
	}
}
Beispiel #8
0
CStatusCacheEntry CCachedDirectory::GetStatusForMember(const CTGitPath& path, bool bRecursive,  bool bFetch /* = true */)
{
	CString sProjectRoot;
	bool bIsVersionedPath;

	bool bRequestForSelf = false;
	if(path.IsEquivalentToWithoutCase(m_directoryPath))
	{
		bRequestForSelf = true;
		AutoLocker lock(m_critSec);
		// HasAdminDir might modify m_directoryPath, so we need to do it synchronized
		bIsVersionedPath = m_directoryPath.HasAdminDir(&sProjectRoot);
	}
	else
		bIsVersionedPath = path.HasAdminDir(&sProjectRoot);

	// In all most circumstances, we ask for the status of a member of this directory.
	ATLASSERT(m_directoryPath.IsEquivalentToWithoutCase(path.GetContainingDirectory()) || bRequestForSelf);

	//If is not version control path
	if( !bIsVersionedPath)
	{
		CTraceToOutputDebugString::Instance()(_T(__FUNCTION__) _T(": %s is not underversion control\n"), path.GetWinPath());
		return CStatusCacheEntry();
	}

	// We've not got this item in the cache - let's add it
	// We never bother asking SVN for the status of just one file, always for its containing directory

	if (GitAdminDir::IsAdminDirPath(path.GetWinPathString()))
	{
		// We're being asked for the status of an .git directory
		// It's not worth asking for this
		return CStatusCacheEntry();
	}


	if(bFetch)
	{
		return GetStatusFromGit(path, sProjectRoot);
	}
	else
	{
		return GetStatusFromCache(path, bRecursive);
	}
}
// static method
git_wc_status_kind GitStatus::GetAllStatus(const CTGitPath& path, git_depth_t depth, bool * assumeValid, bool * skipWorktree)
{
	git_wc_status_kind			statuskind;
	BOOL						err;
	BOOL						isDir;
	CString						sProjectRoot;

	isDir = path.IsDirectory();
	if (!path.HasAdminDir(&sProjectRoot))
		return git_wc_status_none;

//	rev.kind = git_opt_revision_unspecified;
	statuskind = git_wc_status_none;

	const BOOL bIsRecursive = (depth == git_depth_infinity || depth == git_depth_unknown); // taken from SVN source

	CString sSubPath;
	CString s = path.GetWinPathString();
	if (s.GetLength() > sProjectRoot.GetLength())
	{
		if (sProjectRoot.GetLength() == 3 && sProjectRoot[1] == _T(':'))
			sSubPath = s.Right(s.GetLength() - sProjectRoot.GetLength());
		else
			sSubPath = s.Right(s.GetLength() - sProjectRoot.GetLength() - 1/*otherwise it gets initial slash*/);
	}

	bool isfull = ((DWORD)CRegStdDWORD(_T("Software\\TortoiseGit\\CacheType"),
				GetSystemMetrics(SM_REMOTESESSION) ? ShellCache::dll : ShellCache::exe) == ShellCache::dllFull);

	if(isDir)
	{
		err = GetDirStatus(sProjectRoot,sSubPath,&statuskind, isfull,bIsRecursive,isfull,NULL, NULL);
		// folders must not be displayed as added or deleted only as modified (this is for Shell Overlay-Modes)
		if (statuskind == git_wc_status_unversioned && sSubPath.IsEmpty())
			statuskind = git_wc_status_normal;
		else if (statuskind == git_wc_status_deleted || statuskind == git_wc_status_added)
			statuskind = git_wc_status_modified;
	}
	else
	{
		err = GetFileStatus(sProjectRoot, sSubPath, &statuskind, isfull, false, isfull, NULL, NULL, assumeValid, skipWorktree);
	}

	return statuskind;
}
Beispiel #10
0
CStatusCacheEntry CCachedDirectory::GetStatusForMember(const CTGitPath& path, bool bRecursive,  bool bFetch /* = true */)
{
	CString strCacheKey;
	bool bRequestForSelf = false;
	if(path.IsEquivalentToWithoutCase(m_directoryPath))
	{
		bRequestForSelf = true;
	}
//OutputDebugStringA("GetStatusForMember: ");OutputDebugStringW(path.GetWinPathString());OutputDebugStringA("\r\n");
	// In all most circumstances, we ask for the status of a member of this directory.
	ATLASSERT(m_directoryPath.IsEquivalentToWithoutCase(path.GetContainingDirectory()) || bRequestForSelf);

	CString sProjectRoot;
	const BOOL bIsVersionedPath = path.HasAdminDir(&sProjectRoot);

	//If is not version control path
	if( !bIsVersionedPath)
	{
		ATLTRACE(_T("%s is not underversion control\n"), path.GetWinPath());
		return CStatusCacheEntry();
	}

	// We've not got this item in the cache - let's add it
	// We never bother asking SVN for the status of just one file, always for its containing directory

	if (g_GitAdminDir.IsAdminDirPath(path.GetWinPathString()))
	{
		// We're being asked for the status of an .git directory
		// It's not worth asking for this
		return CStatusCacheEntry();
	}


	if(bFetch)
	{
		return GetStatusFromGit(path, sProjectRoot);
	}
	else
	{
		return GetStatusFromCache(path, bRecursive);
	}
}
bool ImportPatchCommand::Execute()
{
	CImportPatchDlg dlg;
//	dlg.m_bIsTag=TRUE;
	CString cmd;
	CString output;

	if (!orgPathList.IsEmpty() && !orgPathList[0].HasAdminDir())
	{
		CString str=CAppUtils::ChooseRepository(NULL);
		if(str.IsEmpty())
			return FALSE;

		CTGitPath path;
		path.SetFromWin(str);

		if(!path.HasAdminDir())
		{
			CString err;
			err.Format(IDS_ERR_NOT_REPOSITORY, (LPCTSTR)str);
			CMessageBox::Show(NULL,err,_T("TortoiseGit"),MB_OK|MB_ICONERROR);
			return FALSE;
		}
		g_Git.m_CurrentDir=str;
	}

	for(int i = 0 ; i < this->orgPathList.GetCount(); ++i)
	{
		if(!orgPathList[i].IsDirectory())
		{
			dlg.m_PathList.AddPath(orgPathList[i]);
		}
	}

	if(dlg.DoModal()==IDOK)
	{
		return TRUE;
	}

	return FALSE;
}
Beispiel #12
0
// static method
git_wc_status_kind GitStatus::GetAllStatus(const CTGitPath& path, git_depth_t depth)
{
	git_wc_status_kind			statuskind;
//	git_client_ctx_t * 			ctx;
	
//	apr_pool_t *				pool;
//	git_error_t *				err;
	BOOL						err;
	BOOL						isDir;
	CString						sProjectRoot;

	isDir = path.IsDirectory();
	if (!path.HasAdminDir(&sProjectRoot))
		return git_wc_status_none;

//	pool = git_pool_create (NULL);				// create the memory pool

//	git_error_clear(git_client_create_context(&ctx, pool));

//	git_revnum_t youngest = Git_INVALID_REVNUM;
//	git_opt_revision_t rev;
//	rev.kind = git_opt_revision_unspecified;
	statuskind = git_wc_status_none;

	const BOOL bIsRecursive = (depth == git_depth_infinity || depth == git_depth_unknown); // taken from SVN source

#ifdef _TORTOISESHELL
	if (g_ShellCache.GetCacheType() == ShellCache::dll)
#else
	if ((DWORD)CRegStdWORD(_T("Software\\TortoiseGit\\CacheType"), GetSystemMetrics(SM_REMOTESESSION) ? 2 : 1) == 2)
#endif
	{
		// gitindex.h based status

		CString sSubPath;
		CString s = path.GetWinPathString();
		if (s.GetLength() > sProjectRoot.GetLength())
		{
			sSubPath = CStringA(s.Right(s.GetLength() - sProjectRoot.GetLength() - 1/*otherwise it gets initial slash*/));
		}

		err = g_IndexFileMap.GetFileStatus(sProjectRoot,sSubPath,&statuskind);
	}
	else
	{
		LPCTSTR lpszSubPath = NULL;
		CString sSubPath;
		CString s = path.GetWinPathString();
		if (s.GetLength() > sProjectRoot.GetLength())
		{
			sSubPath = s.Right(s.GetLength() - sProjectRoot.GetLength() - 1/*otherwise it gets initial slash*/);
			lpszSubPath = sSubPath;
		}

#if 1
		// when recursion enabled, let wingit determine the recursive status for folders instead of enumerating all files here
		UINT nFlags = WGEFF_SingleFile;
		if (!bIsRecursive)
			nFlags |= WGEFF_NoRecurse;
		if (!lpszSubPath)
			// report root dir as normal (otherwise it could be considered git_wc_status_unversioned, which would be wrong?)
			nFlags |= WGEFF_EmptyAsNormal;
#else
		// enumerate all files, recursively if requested
		UINT nFlags = 0;
		if (!bIsRecursive)
			nFlags |= WGEFF_NoRecurse;
#endif

		err = !wgEnumFiles(sProjectRoot, lpszSubPath, nFlags, &getallstatus, &statuskind);

		/*err = git_client_status4 (&youngest,
							path.GetSVNApiPath(pool),
							&rev,
							getallstatus,
							&statuskind,
							depth,
							TRUE,		//getall
							FALSE,		//update
							TRUE,		//noignore
							FALSE,		//ignore externals
							NULL,
							ctx,
							pool);*/
	}

	// Error present
	if (err != NULL)
	{
//		git_error_clear(err);
//		git_pool_destroy (pool);				//free allocated memory
		return git_wc_status_none;	
	}

//	git_pool_destroy (pool);				//free allocated memory

	return statuskind;
}
Beispiel #13
0
void CDirectoryWatcher::WorkerThread()
{
	DWORD numBytes;
	CDirWatchInfo * pdi = NULL;
	LPOVERLAPPED lpOverlapped;
	WCHAR buf[READ_DIR_CHANGE_BUFFER_SIZE] = {0};
	WCHAR * pFound = NULL;
	while (m_bRunning)
	{
		CleanupWatchInfo();
		if (watchedPaths.GetCount())
		{
			// Any incoming notifications?

			pdi = NULL;
			numBytes = 0;
			InterlockedExchange(&m_bCleaned, FALSE);
			if ((!m_hCompPort)
				|| !GetQueuedCompletionStatus(m_hCompPort,
											  &numBytes,
											  (PULONG_PTR) &pdi,
											  &lpOverlapped,
											  600000 /*10 minutes*/))
			{
				// No. Still trying?

				if (!m_bRunning)
					return;

				ATLTRACE(_T(": restarting watcher\n"));
				m_hCompPort.CloseHandle();

				// We must sync the whole section because other threads may
				// receive "AddPath" calls that will delete the completion
				// port *while* we are adding references to it .

				AutoLocker lock(m_critSec);

				// Clear the list of watched objects and recreate that list.
				// This will also delete the old completion port

				ClearInfoMap();
				CleanupWatchInfo();

				for (int i=0; i<watchedPaths.GetCount(); ++i)
				{
					CTGitPath watchedPath = watchedPaths[i];

					CAutoFile hDir = CreateFile(watchedPath.GetWinPath(),
											FILE_LIST_DIRECTORY,
											FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE,
											NULL, //security attributes
											OPEN_EXISTING,
											FILE_FLAG_BACKUP_SEMANTICS | //required privileges: SE_BACKUP_NAME and SE_RESTORE_NAME.
											FILE_FLAG_OVERLAPPED,
											NULL);
					if (!hDir)
					{
						// this could happen if a watched folder has been removed/renamed
						ATLTRACE(_T("CDirectoryWatcher: CreateFile failed. Can't watch directory %s\n"), watchedPaths[i].GetWinPath());
						watchedPaths.RemovePath(watchedPath);
						break;
					}

					DEV_BROADCAST_HANDLE NotificationFilter;
					SecureZeroMemory(&NotificationFilter, sizeof(NotificationFilter));
					NotificationFilter.dbch_size = sizeof(DEV_BROADCAST_HANDLE);
					NotificationFilter.dbch_devicetype = DBT_DEVTYP_HANDLE;
					NotificationFilter.dbch_handle = hDir;
					// RegisterDeviceNotification sends a message to the UI thread:
					// make sure we *can* send it and that the UI thread isn't waiting on a lock
					int numPaths = watchedPaths.GetCount();
					size_t numWatch = watchInfoMap.size();
					lock.Unlock();
					NotificationFilter.dbch_hdevnotify = RegisterDeviceNotification(hWnd, &NotificationFilter, DEVICE_NOTIFY_WINDOW_HANDLE);
					lock.Lock();
					// since we released the lock to prevent a deadlock with the UI thread,
					// it could happen that new paths were added to watch, or another thread
					// could have cleared our info map.
					// if that happened, we have to restart watching all paths again.
					if ((numPaths != watchedPaths.GetCount()) || (numWatch != watchInfoMap.size()))
					{
						ClearInfoMap();
						CleanupWatchInfo();
						Sleep(200);
						break;
					}

					CDirWatchInfo * pDirInfo = new CDirWatchInfo(hDir, watchedPath);
					hDir.Detach();  // the new CDirWatchInfo object owns the handle now
					pDirInfo->m_hDevNotify = NotificationFilter.dbch_hdevnotify;


					HANDLE port = CreateIoCompletionPort(pDirInfo->m_hDir, m_hCompPort, (ULONG_PTR)pDirInfo, 0);
					if (port == NULL)
					{
						ATLTRACE(_T("CDirectoryWatcher: CreateIoCompletionPort failed. Can't watch directory %s\n"), watchedPath.GetWinPath());

						// we must close the directory handle to allow ClearInfoMap()
						// to close the completion port properly
						pDirInfo->CloseDirectoryHandle();

						ClearInfoMap();
						CleanupWatchInfo();
						delete pDirInfo;
						pDirInfo = NULL;

						watchedPaths.RemovePath(watchedPath);
						break;
					}
					m_hCompPort = port;

					if (!ReadDirectoryChangesW(pDirInfo->m_hDir,
												pDirInfo->m_Buffer,
												READ_DIR_CHANGE_BUFFER_SIZE,
												TRUE,
												FILE_NOTIFY_CHANGE_FILE_NAME | FILE_NOTIFY_CHANGE_DIR_NAME | FILE_NOTIFY_CHANGE_LAST_WRITE,
												&numBytes,// not used
												&pDirInfo->m_Overlapped,
												NULL))  //no completion routine!
					{
						ATLTRACE(_T("CDirectoryWatcher: ReadDirectoryChangesW failed. Can't watch directory %s\n"), watchedPath.GetWinPath());

						// we must close the directory handle to allow ClearInfoMap()
						// to close the completion port properly
						pDirInfo->CloseDirectoryHandle();

						ClearInfoMap();
						CleanupWatchInfo();
						delete pDirInfo;
						pDirInfo = NULL;
						watchedPaths.RemovePath(watchedPath);
						break;
					}

					ATLTRACE(_T("watching path %s\n"), pDirInfo->m_DirName.GetWinPath());
					watchInfoMap[pDirInfo->m_hDir] = pDirInfo;
				}
			}
			else
			{
				if (!m_bRunning)
					return;
				if (watchInfoMap.empty())
					continue;

				// NOTE: the longer this code takes to execute until ReadDirectoryChangesW
				// is called again, the higher the chance that we miss some
				// changes in the file system!
				if (pdi)
				{
					BOOL bRet = false;
					std::list<CTGitPath> notifyPaths;
					{
						AutoLocker lock(m_critSec);
						// in case the CDirectoryWatcher objects have been cleaned,
						// the m_bCleaned variable will be set to true here. If the
						// objects haven't been cleared, we can access them here.
						if (InterlockedExchange(&m_bCleaned, FALSE))
							continue;
						if (   (!pdi->m_hDir) || watchInfoMap.empty()
							|| (watchInfoMap.find(pdi->m_hDir) == watchInfoMap.end()))
						{
							continue;
						}
						PFILE_NOTIFY_INFORMATION pnotify = (PFILE_NOTIFY_INFORMATION)pdi->m_Buffer;
						DWORD nOffset = 0;

						do
						{
							pnotify = (PFILE_NOTIFY_INFORMATION)((LPBYTE)pnotify + nOffset);

							if ((ULONG_PTR)pnotify - (ULONG_PTR)pdi->m_Buffer > READ_DIR_CHANGE_BUFFER_SIZE)
								break;

							nOffset = pnotify->NextEntryOffset;

							if (pnotify->FileNameLength >= (READ_DIR_CHANGE_BUFFER_SIZE*sizeof(TCHAR)))
								continue;

							SecureZeroMemory(buf, READ_DIR_CHANGE_BUFFER_SIZE*sizeof(TCHAR));
							_tcsncpy_s(buf, pdi->m_DirPath, _countof(buf) - 1);
							errno_t err = _tcsncat_s(buf + pdi->m_DirPath.GetLength(), READ_DIR_CHANGE_BUFFER_SIZE-pdi->m_DirPath.GetLength(), pnotify->FileName, min(READ_DIR_CHANGE_BUFFER_SIZE-pdi->m_DirPath.GetLength(), pnotify->FileNameLength/sizeof(TCHAR)));
							if (err == STRUNCATE)
							{
								continue;
							}
							buf[(pnotify->FileNameLength/sizeof(TCHAR))+pdi->m_DirPath.GetLength()] = 0;

							if (m_FolderCrawler)
							{
								if ((pFound = wcsstr(buf, L"\\tmp")) != NULL)
								{
									pFound += 4;
									if (((*pFound)=='\\')||((*pFound)=='\0'))
									{
										continue;
									}
								}
								if ((pFound = wcsstr(buf, L":\\RECYCLER\\")) != NULL)
								{
									if ((pFound-buf) < 5)
									{
										// a notification for the recycle bin - ignore it
										continue;
									}
								}
								if ((pFound = wcsstr(buf, L":\\$Recycle.Bin\\")) != NULL)
								{
									if ((pFound-buf) < 5)
									{
										// a notification for the recycle bin - ignore it
										continue;
									}
								}
								if (wcsstr(buf, L".tmp") != NULL)
								{
									// assume files with a .tmp extension are not versioned and interesting,
									// so ignore them.
									continue;
								}

								CTGitPath path;
								bool isIndex = false;
								if ((pFound = wcsstr(buf, L".git")) != NULL)
								{
									// omit repository data change except .git/index.lock- or .git/HEAD.lock-files
									if ((ULONG_PTR)pnotify - (ULONG_PTR)pdi->m_Buffer > READ_DIR_CHANGE_BUFFER_SIZE)
										break;

									path = g_AdminDirMap.GetWorkingCopy(CTGitPath(buf).GetContainingDirectory().GetWinPathString());

									if ((wcsstr(pFound, L"index.lock") != NULL || wcsstr(pFound, L"HEAD.lock") != NULL) && pnotify->Action == FILE_ACTION_ADDED)
									{
										CGitStatusCache::Instance().BlockPath(path);
										continue;
									}
									else if (((wcsstr(pFound, L"index.lock") != NULL || wcsstr(pFound, L"HEAD.lock") != NULL) && pnotify->Action == FILE_ACTION_REMOVED) || (((wcsstr(pFound, L"index") != NULL && wcsstr(pFound, L"index.lock") == NULL) || (wcsstr(pFound, L"HEAD") != NULL && wcsstr(pFound, L"HEAD.lock") != NULL)) && pnotify->Action == FILE_ACTION_MODIFIED) || ((wcsstr(pFound, L"index.lock") == NULL || wcsstr(pFound, L"HEAD.lock") != NULL) && pnotify->Action == FILE_ACTION_RENAMED_NEW_NAME))
									{
										isIndex = true;
										CGitStatusCache::Instance().BlockPath(path, 1);
									}
									else
									{
										continue;
									}
								}
								else
									path.SetFromUnknown(buf);

								if(!path.HasAdminDir() && !isIndex)
									continue;

								ATLTRACE(_T("change notification: %s\n"), buf);
								notifyPaths.push_back(path);
							}
						} while ((nOffset > 0)&&(nOffset < READ_DIR_CHANGE_BUFFER_SIZE));

						// setup next notification cycle
						SecureZeroMemory (pdi->m_Buffer, sizeof(pdi->m_Buffer));
						SecureZeroMemory (&pdi->m_Overlapped, sizeof(OVERLAPPED));
						bRet = ReadDirectoryChangesW(pdi->m_hDir,
							pdi->m_Buffer,
							READ_DIR_CHANGE_BUFFER_SIZE,
							TRUE,
							FILE_NOTIFY_CHANGE_FILE_NAME | FILE_NOTIFY_CHANGE_DIR_NAME | FILE_NOTIFY_CHANGE_LAST_WRITE,
							&numBytes,// not used
							&pdi->m_Overlapped,
							NULL); //no completion routine!
					}
					if (!notifyPaths.empty())
					{
						for (std::list<CTGitPath>::const_iterator nit = notifyPaths.begin(); nit != notifyPaths.end(); ++nit)
						{
							m_FolderCrawler->AddPathForUpdate(*nit);
						}
					}

					// any clean-up to do?

					CleanupWatchInfo();

					if (!bRet)
					{
						// Since the call to ReadDirectoryChangesW failed, just
						// wait a while. We don't want to have this thread
						// running using 100% CPU if something goes completely
						// wrong.
						Sleep(200);
					}
				}
			}
		}// if (watchedPaths.GetCount())
		else
			Sleep(200);
	}// while (m_bRunning)
}
void CDirectoryWatcher::WorkerThread()
{
	DWORD numBytes;
	CDirWatchInfo * pdi = NULL;
	LPOVERLAPPED lpOverlapped;
	WCHAR buf[READ_DIR_CHANGE_BUFFER_SIZE] = {0};
	WCHAR * pFound = NULL;
	CTGitPath path;

	while (m_bRunning)
	{
		if (watchedPaths.GetCount())
		{
			if (!GetQueuedCompletionStatus(m_hCompPort,
											&numBytes,
											(PULONG_PTR) &pdi,
											&lpOverlapped,
											INFINITE))
			{
				// Error retrieving changes
				// Clear the list of watched objects and recreate that list
				if (!m_bRunning)
					return;
				{
					AutoLocker lock(m_critSec);
					ClearInfoMap();
				}
				DWORD lasterr = GetLastError();
				if ((m_hCompPort != INVALID_HANDLE_VALUE)&&(lasterr!=ERROR_SUCCESS)&&(lasterr!=ERROR_INVALID_HANDLE))
				{
					CloseHandle(m_hCompPort);
					m_hCompPort = INVALID_HANDLE_VALUE;
				}
				// Since we pass m_hCompPort to CreateIoCompletionPort, we
				// have to set this to NULL to have that API create a new
				// handle.
				m_hCompPort = NULL;
				for (int i=0; i<watchedPaths.GetCount(); ++i)
				{
					CTGitPath watchedPath = watchedPaths[i];

					HANDLE hDir = CreateFile(watchedPath.GetWinPath(),
											FILE_LIST_DIRECTORY,
											FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE,
											NULL, //security attributes
											OPEN_EXISTING,
											FILE_FLAG_BACKUP_SEMANTICS | //required privileges: SE_BACKUP_NAME and SE_RESTORE_NAME.
											FILE_FLAG_OVERLAPPED,
											NULL);
					if (hDir == INVALID_HANDLE_VALUE)
					{
						// this could happen if a watched folder has been removed/renamed
						ATLTRACE(_T("CDirectoryWatcher: CreateFile failed. Can't watch directory %s\n"), watchedPaths[i].GetWinPath());
						CloseHandle(m_hCompPort);
						m_hCompPort = INVALID_HANDLE_VALUE;
						AutoLocker lock(m_critSec);
						watchedPaths.RemovePath(watchedPath);
						i--; if (i<0) i=0;
						break;
					}

					DEV_BROADCAST_HANDLE NotificationFilter;
					SecureZeroMemory(&NotificationFilter, sizeof(NotificationFilter));
					NotificationFilter.dbch_size = sizeof(DEV_BROADCAST_HANDLE);
					NotificationFilter.dbch_devicetype = DBT_DEVTYP_HANDLE;
					NotificationFilter.dbch_handle = hDir;
					NotificationFilter.dbch_hdevnotify = RegisterDeviceNotification(hWnd, &NotificationFilter, DEVICE_NOTIFY_WINDOW_HANDLE);

					CDirWatchInfo * pDirInfo = new CDirWatchInfo(hDir, watchedPath);
					pDirInfo->m_hDevNotify = NotificationFilter.dbch_hdevnotify;
					m_hCompPort = CreateIoCompletionPort(hDir, m_hCompPort, (ULONG_PTR)pDirInfo, 0);
					if (m_hCompPort == NULL)
					{
						ATLTRACE(_T("CDirectoryWatcher: CreateIoCompletionPort failed. Can't watch directory %s\n"), watchedPath.GetWinPath());
						AutoLocker lock(m_critSec);
						ClearInfoMap();
						delete pDirInfo;
						pDirInfo = NULL;
						watchedPaths.RemovePath(watchedPath);
						i--; if (i<0) i=0;
						break;
					}
					if (!ReadDirectoryChangesW(pDirInfo->m_hDir,
												pDirInfo->m_Buffer,
												READ_DIR_CHANGE_BUFFER_SIZE,
												TRUE,
												FILE_NOTIFY_CHANGE_FILE_NAME | FILE_NOTIFY_CHANGE_DIR_NAME | FILE_NOTIFY_CHANGE_LAST_WRITE,
												&numBytes,// not used
												&pDirInfo->m_Overlapped,
												NULL))	//no completion routine!
					{
						ATLTRACE(_T("CDirectoryWatcher: ReadDirectoryChangesW failed. Can't watch directory %s\n"), watchedPath.GetWinPath());
						AutoLocker lock(m_critSec);
						ClearInfoMap();
						delete pDirInfo;
						pDirInfo = NULL;
						watchedPaths.RemovePath(watchedPath);
						i--; if (i<0) i=0;
						break;
					}
					AutoLocker lock(m_critSec);
					watchInfoMap[pDirInfo->m_hDir] = pDirInfo;
					ATLTRACE(_T("watching path %s\n"), pDirInfo->m_DirName.GetWinPath());
				}
			}
			else
			{
				if (!m_bRunning)
					return;
				// NOTE: the longer this code takes to execute until ReadDirectoryChangesW
				// is called again, the higher the chance that we miss some
				// changes in the file system!
				if (pdi)
				{
					if (numBytes == 0)
					{
						goto continuewatching;
					}
					PFILE_NOTIFY_INFORMATION pnotify = (PFILE_NOTIFY_INFORMATION)pdi->m_Buffer;
					if ((ULONG_PTR)pnotify - (ULONG_PTR)pdi->m_Buffer > READ_DIR_CHANGE_BUFFER_SIZE)
						goto continuewatching;
					DWORD nOffset = pnotify->NextEntryOffset;

					do
					{
						nOffset = pnotify->NextEntryOffset;
						if (pnotify->FileNameLength >= (READ_DIR_CHANGE_BUFFER_SIZE*sizeof(TCHAR)))
							continue;
						SecureZeroMemory(buf, READ_DIR_CHANGE_BUFFER_SIZE*sizeof(TCHAR));
						_tcsncpy_s(buf, READ_DIR_CHANGE_BUFFER_SIZE, pdi->m_DirPath, READ_DIR_CHANGE_BUFFER_SIZE);
						errno_t err = _tcsncat_s(buf+pdi->m_DirPath.GetLength(), READ_DIR_CHANGE_BUFFER_SIZE-pdi->m_DirPath.GetLength(), pnotify->FileName, _TRUNCATE);
						if (err == STRUNCATE)
						{
							pnotify = (PFILE_NOTIFY_INFORMATION)((LPBYTE)pnotify + nOffset);
							continue;
						}
						buf[(pnotify->FileNameLength/sizeof(TCHAR))+pdi->m_DirPath.GetLength()] = 0;
						pnotify = (PFILE_NOTIFY_INFORMATION)((LPBYTE)pnotify + nOffset);
						if (m_FolderCrawler)
						{
							if ((pFound = wcsstr(buf, L"\\tmp"))!=NULL)
							{
								pFound += 4;
								if (((*pFound)=='\\')||((*pFound)=='\0'))
								{
									if ((ULONG_PTR)pnotify - (ULONG_PTR)pdi->m_Buffer > READ_DIR_CHANGE_BUFFER_SIZE)
										break;
									continue;
								}
							}
							if ((pFound = wcsstr(buf, L":\\RECYCLER\\"))!=NULL)
							{
								if ((pFound-buf) < 5)
								{
									// a notification for the recycle bin - ignore it
									if ((ULONG_PTR)pnotify - (ULONG_PTR)pdi->m_Buffer > READ_DIR_CHANGE_BUFFER_SIZE)
										break;
									continue;
								}
							}
							if ((pFound = wcsstr(buf, L":\\$Recycle.Bin\\"))!=NULL)
							{
								if ((pFound-buf) < 5)
								{
									// a notification for the recycle bin - ignore it
									if ((ULONG_PTR)pnotify - (ULONG_PTR)pdi->m_Buffer > READ_DIR_CHANGE_BUFFER_SIZE)
										break;
									continue;
								}
							}
							if ((pFound = wcsstr(buf, L".tmp"))!=NULL)
							{
								// assume files with a .tmp extension are not versioned and interesting,
								// so ignore them.
								if ((ULONG_PTR)pnotify - (ULONG_PTR)pdi->m_Buffer > READ_DIR_CHANGE_BUFFER_SIZE)
									break;
								continue;
							}
							bool isIndex = false;
							if ((pFound = wcsstr(buf, L".git"))!=NULL)
							{
								// omit repository data change except .git/index.lock- or .git/HEAD.lock-files
								if ((ULONG_PTR)pnotify - (ULONG_PTR)pdi->m_Buffer > READ_DIR_CHANGE_BUFFER_SIZE)
									break;

								if ((wcsstr(pFound, L"index.lock") != NULL && wcsstr(pFound, L"HEAD.lock") != NULL) && pnotify->Action == FILE_ACTION_ADDED)
								{
									m_FolderCrawler->BlockPath(CTGitPath(buf).GetContainingDirectory().GetContainingDirectory()); // optimize here, and use general BlockPath with priorities
									continue;
								}
								else if ((wcsstr(pFound, L"index.lock") != NULL && wcsstr(pFound, L"HEAD.lock") != NULL) && pnotify->Action == FILE_ACTION_REMOVED)
								{
									isIndex = true;
									m_FolderCrawler->BlockPath(CTGitPath(buf).GetContainingDirectory().GetContainingDirectory(), 1);
								}
								else
								{
									continue;
								}
							}

							path.SetFromWin(buf);
							if(!path.HasAdminDir() && !isIndex)
								continue;

							ATLTRACE(_T("change notification: %s\n"), buf);
							m_FolderCrawler->AddPathForUpdate(CTGitPath(buf));
						}
						if ((ULONG_PTR)pnotify - (ULONG_PTR)pdi->m_Buffer > READ_DIR_CHANGE_BUFFER_SIZE)
							break;
					} while (nOffset);
continuewatching:
					SecureZeroMemory(pdi->m_Buffer, sizeof(pdi->m_Buffer));
					SecureZeroMemory(&pdi->m_Overlapped, sizeof(OVERLAPPED));
					if (!ReadDirectoryChangesW(pdi->m_hDir,
												pdi->m_Buffer,
												READ_DIR_CHANGE_BUFFER_SIZE,
												TRUE,
												FILE_NOTIFY_CHANGE_FILE_NAME | FILE_NOTIFY_CHANGE_DIR_NAME | FILE_NOTIFY_CHANGE_LAST_WRITE,
												&numBytes,// not used
												&pdi->m_Overlapped,
												NULL))	//no completion routine!
					{
						// Since the call to ReadDirectoryChangesW failed, just
						// wait a while. We don't want to have this thread
						// running using 100% CPU if something goes completely
						// wrong.
						Sleep(200);
					}
				}
			}
		} // if (watchedPaths.GetCount())
		else
			Sleep(200);
	} // while (m_bRunning)
}
Beispiel #15
0
int CCachedDirectory::EnumFiles(const CTGitPath &path , bool IsFull)
{
	CString sProjectRoot;
	path.HasAdminDir(&sProjectRoot);

	CTraceToOutputDebugString::Instance()(_T(__FUNCTION__) _T(": EnumFiles %s\n"), path.GetWinPath());

	ATLASSERT( !m_directoryPath.IsEmpty() );

	CString sSubPath;

	CString s = path.GetWinPath();

	if (s.GetLength() > sProjectRoot.GetLength())
	{
		// skip initial slash if necessary
		if(s[sProjectRoot.GetLength()] == _T('\\'))
			sSubPath = s.Right(s.GetLength() - sProjectRoot.GetLength() -1);
		else
			sSubPath = s.Right(s.GetLength() - sProjectRoot.GetLength() );
	}

	// strip "\" at the end, otherwise cache lookups for drives do not work correctly
	sProjectRoot.TrimRight(_T("\\"));

	GitStatus *pStatus = &CGitStatusCache::Instance().m_GitStatus;
	UNREFERENCED_PARAMETER(pStatus);
	git_wc_status_kind status = git_wc_status_none;

	if (!path.IsDirectory())
	{
		bool assumeValid = false;
		bool skipWorktree = false;
		pStatus->GetFileStatus(sProjectRoot, sSubPath, &status, IsFull, false, true, GetStatusCallback, this, &assumeValid, &skipWorktree);
		if (status < m_mostImportantFileStatus)
			RefreshMostImportant();
	}
	else
	{
		bool isSelf = path == m_directoryPath;
		if (isSelf)
		{
			AutoLocker lock(m_critSec);
			// clear subdirectory status cache
			m_childDirectories.clear();
			// build new files status cache
			m_entryCache_tmp.clear();
		}

		m_mostImportantFileStatus = git_wc_status_none;
		pStatus->EnumDirStatus(sProjectRoot, sSubPath, &status, IsFull, false, true, GetStatusCallback,this);
		m_mostImportantFileStatus = GitStatus::GetMoreImportant(m_mostImportantFileStatus, status);

		if (isSelf)
		{
			AutoLocker lock(m_critSec);
			// use a tmp files status cache so that we can still use the old cached values
			// for deciding whether we have to issue a shell notify
			m_entryCache = m_entryCache_tmp;
			m_entryCache_tmp.clear();
		}

		// need to set/construct m_ownStatus (only unversioned and normal are valid values)
		m_ownStatus = git_wc_status_unversioned;
		m_ownStatus.SetKind(git_node_dir);
		if (m_mostImportantFileStatus > git_wc_status_unversioned)
		{
			git_wc_status2_t status2;
			status2.text_status = status2.prop_status = git_wc_status_normal;
			m_ownStatus.SetStatus(&status2);
		}
		else
		{
			if (::PathFileExists(m_directoryPath.GetWinPathString() + _T("\\.git"))) {
				git_wc_status2_t status2;
				status2.text_status = status2.prop_status = git_wc_status_normal;
				m_ownStatus.SetStatus(&status2);
			}
			else
			{
				git_wc_status2_t status2;
				status2.text_status = status2.prop_status = CalculateRecursiveStatus();
				m_ownStatus.SetStatus(&status2);
			}
		}
	}

	return 0;
}
void CFolderCrawler::WorkerThread()
{
	HANDLE hWaitHandles[2];
	hWaitHandles[0] = m_hTerminationEvent;
	hWaitHandles[1] = m_hWakeEvent;
	CTGitPath workingPath;
	ULONGLONG currentTicks = 0;

	for(;;)
	{
		bool bRecursive = !!(DWORD)CRegStdDWORD(L"Software\\TortoiseGit\\RecursiveOverlay", TRUE);

		SetThreadPriority(GetCurrentThread(), THREAD_MODE_BACKGROUND_END);

		DWORD waitResult = WaitForMultipleObjects(_countof(hWaitHandles), hWaitHandles, FALSE, INFINITE);

		// exit event/working loop if the first event (m_hTerminationEvent)
		// has been signaled or if one of the events has been abandoned
		// (i.e. ~CFolderCrawler() is being executed)
		if(m_bRun == false || waitResult == WAIT_OBJECT_0 || waitResult == WAIT_ABANDONED_0 || waitResult == WAIT_ABANDONED_0+1)
		{
			// Termination event
			break;
		}

		SetThreadPriority(GetCurrentThread(), THREAD_MODE_BACKGROUND_BEGIN);

		// If we get here, we've been woken up by something being added to the queue.
		// However, it's important that we don't do our crawling while
		// the shell is still asking for items
		bool bFirstRunAfterWakeup = true;
		for(;;)
		{
			if (!m_bRun)
				break;
			// Any locks today?
			if (CGitStatusCache::Instance().m_bClearMemory)
			{
				CAutoWriteLock writeLock(CGitStatusCache::Instance().GetGuard());
				CGitStatusCache::Instance().ClearCache();
				CGitStatusCache::Instance().m_bClearMemory = false;
			}
			if(m_lCrawlInhibitSet > 0)
			{
				// We're in crawl hold-off
				CTraceToOutputDebugString::Instance()(_T(__FUNCTION__) L": Crawl hold-off\n");
				Sleep(50);
				continue;
			}
			if (bFirstRunAfterWakeup)
			{
				Sleep(20);
				CTraceToOutputDebugString::Instance()(_T(__FUNCTION__) L": Crawl bFirstRunAfterWakeup\n");
				bFirstRunAfterWakeup = false;
				continue;
			}
			if ((m_blockReleasesAt < GetTickCount64()) && (!m_blockedPath.IsEmpty()))
			{
				CTraceToOutputDebugString::Instance()(_T(__FUNCTION__) L": Crawl stop blocking path %s\n", m_blockedPath.GetWinPath());
				m_blockedPath.Reset();
			}
			CGitStatusCache::Instance().RemoveTimedoutBlocks();

			while (!m_pathsToRelease.empty())
			{
				AutoLocker lock(m_critSec);
				CTGitPath path = m_pathsToRelease.Pop();
				GitStatus::ReleasePath(path.GetWinPathString());
			}

			if (m_foldersToUpdate.empty() && m_pathsToUpdate.empty())
			{
				// Nothing left to do
				break;
			}
			currentTicks = GetTickCount64();
			if (!m_pathsToUpdate.empty())
			{
				{
					AutoLocker lock(m_critSec);

					m_bPathsAddedSinceLastCrawl = false;

					workingPath = m_pathsToUpdate.Pop();
					if ((!m_blockedPath.IsEmpty()) && (m_blockedPath.IsAncestorOf(workingPath)))
					{
						// move the path to the end of the list
						m_pathsToUpdate.Push(workingPath);
						if (m_pathsToUpdate.size() < 3)
							Sleep(50);
						continue;
					}
				}

				// don't crawl paths that are excluded
				if (!CGitStatusCache::Instance().IsPathAllowed(workingPath))
					continue;
				// check if the changed path is inside an .git folder
				CString projectroot;
				if ((workingPath.HasAdminDir(&projectroot)&&workingPath.IsDirectory()) || workingPath.IsAdminDir())
				{
					// we don't crawl for paths changed in a tmp folder inside an .git folder.
					// Because we also get notifications for those even if we just ask for the status!
					// And changes there don't affect the file status at all, so it's safe
					// to ignore notifications on those paths.
					if (workingPath.IsAdminDir())
					{
						// TODO: add git specific filters here. is there really any change besides index file in .git
						//       that is relevant for overlays?
						/*CString lowerpath = workingPath.GetWinPathString();
						lowerpath.MakeLower();
						if (lowerpath.Find(L"\\tmp\\") > 0)
							continue;
						if (CStringUtils::EndsWith(lowerpath, L"\\tmp"))
							continue;
						if (lowerpath.Find(L"\\log") > 0)
							continue;*/
						// Here's a little problem:
						// the lock file is also created for fetching the status
						// and not just when committing.
						// If we could find out why the lock file was changed
						// we could decide to crawl the folder again or not.
						// But for now, we have to crawl the parent folder
						// no matter what.

						//if (lowerpath.Find(L"\\lock") > 0)
						//	continue;
						// only go back to wc root if we are in .git-dir
						do
						{
							workingPath = workingPath.GetContainingDirectory();
						} while(workingPath.IsAdminDir());
					}
					else if (!workingPath.Exists())
					{
						CAutoWriteLock writeLock(CGitStatusCache::Instance().GetGuard());
						CGitStatusCache::Instance().RemoveCacheForPath(workingPath);
						continue;
					}

					if (!CGitStatusCache::Instance().IsPathGood(workingPath))
					{
						AutoLocker lock(m_critSec);
						// move the path, the root of the repository, to the end of the list
						if (projectroot.IsEmpty())
							m_pathsToUpdate.Push(workingPath);
						else
							m_pathsToUpdate.Push(CTGitPath(projectroot));
						if (m_pathsToUpdate.size() < 3)
							Sleep(50);
						continue;
					}

					CTraceToOutputDebugString::Instance()(_T(__FUNCTION__) L": Invalidating and refreshing folder: %s\n", workingPath.GetWinPath());
					{
						AutoLocker print(critSec);
						_sntprintf_s(szCurrentCrawledPath[nCurrentCrawledpathIndex], MAX_CRAWLEDPATHSLEN, _TRUNCATE, L"Invalidating and refreshing folder: %s", workingPath.GetWinPath());
						++nCurrentCrawledpathIndex;
						if (nCurrentCrawledpathIndex >= MAX_CRAWLEDPATHS)
							nCurrentCrawledpathIndex = 0;
					}
					InvalidateRect(hWndHidden, nullptr, FALSE);
					{
						CAutoReadLock readLock(CGitStatusCache::Instance().GetGuard());
						// Invalidate the cache of this folder, to make sure its status is fetched again.
						CCachedDirectory * pCachedDir = CGitStatusCache::Instance().GetDirectoryCacheEntry(workingPath);
						if (pCachedDir)
						{
							git_wc_status_kind status = pCachedDir->GetCurrentFullStatus();
							pCachedDir->Invalidate();
							if (workingPath.Exists())
							{
								pCachedDir->RefreshStatus(bRecursive);
								// if the previous status wasn't normal and now it is, then
								// send a notification too.
								// We do this here because GetCurrentFullStatus() doesn't send
								// notifications for 'normal' status - if it would, we'd get tons
								// of notifications when crawling a working copy not yet in the cache.
								if ((status != git_wc_status_normal) && (pCachedDir->GetCurrentFullStatus() != status))
								{
									CGitStatusCache::Instance().UpdateShell(workingPath);
									CTraceToOutputDebugString::Instance()(_T(__FUNCTION__) L": shell update in crawler for %s\n", workingPath.GetWinPath());
								}
							}
							else
							{
								CAutoWriteLock writeLock(CGitStatusCache::Instance().GetGuard());
								CGitStatusCache::Instance().RemoveCacheForPath(workingPath);
							}
						}
					}
					//In case that svn_client_stat() modified a file and we got
					//a notification about that in the directory watcher,
					//remove that here again - this is to prevent an endless loop
					AutoLocker lock(m_critSec);
					m_pathsToUpdate.erase(workingPath);
				}
				else if (workingPath.HasAdminDir())
				{
					if (!workingPath.Exists())
					{
						CAutoWriteLock writeLock(CGitStatusCache::Instance().GetGuard());
						CGitStatusCache::Instance().RemoveCacheForPath(workingPath);
						if (!workingPath.GetContainingDirectory().Exists())
							continue;
						else
							workingPath = workingPath.GetContainingDirectory();
					}
					CTraceToOutputDebugString::Instance()(_T(__FUNCTION__) L": Updating path: %s\n", workingPath.GetWinPath());
					{
						AutoLocker print(critSec);
						_sntprintf_s(szCurrentCrawledPath[nCurrentCrawledpathIndex], MAX_CRAWLEDPATHSLEN, _TRUNCATE, L"Updating path: %s", workingPath.GetWinPath());
						++nCurrentCrawledpathIndex;
						if (nCurrentCrawledpathIndex >= MAX_CRAWLEDPATHS)
							nCurrentCrawledpathIndex = 0;
					}
					InvalidateRect(hWndHidden, nullptr, FALSE);
					{
						CAutoReadLock readLock(CGitStatusCache::Instance().GetGuard());
						// Invalidate the cache of folders manually. The cache of files is invalidated
						// automatically if the status is asked for it and the file times don't match
						// anymore, so we don't need to manually invalidate those.
						CCachedDirectory* cachedDir = CGitStatusCache::Instance().GetDirectoryCacheEntry(workingPath.GetDirectory());
						if (cachedDir && workingPath.IsDirectory())
							cachedDir->Invalidate();
						if (cachedDir && cachedDir->GetStatusForMember(workingPath, bRecursive).GetEffectiveStatus() > git_wc_status_unversioned)
							CGitStatusCache::Instance().UpdateShell(workingPath);
					}
					AutoLocker lock(m_critSec);
					m_pathsToUpdate.erase(workingPath);
				}
				else
				{
					if (!workingPath.Exists())
					{
						CAutoWriteLock writeLock(CGitStatusCache::Instance().GetGuard());
						CGitStatusCache::Instance().RemoveCacheForPath(workingPath);
					}
				}
			}
			if (!m_foldersToUpdate.empty())
			{
				{
					AutoLocker lock(m_critSec);
					m_bItemsAddedSinceLastCrawl = false;

					// create a new CTGitPath object to make sure the cached flags are requested again.
					// without this, a missing file/folder is still treated as missing even if it is available
					// now when crawling.
					workingPath = CTGitPath(m_foldersToUpdate.Pop().GetWinPath());

					if ((!m_blockedPath.IsEmpty())&&(m_blockedPath.IsAncestorOf(workingPath)))
					{
						// move the path to the end of the list
						m_foldersToUpdate.Push(workingPath);
						if (m_foldersToUpdate.size() < 3)
							Sleep(50);
						continue;
					}
				}
				if ((!m_blockedPath.IsEmpty())&&(m_blockedPath.IsAncestorOf(workingPath)))
					continue;
				if (!CGitStatusCache::Instance().IsPathAllowed(workingPath))
					continue;
				if (!CGitStatusCache::Instance().IsPathGood(workingPath))
					continue;

				CTraceToOutputDebugString::Instance()(_T(__FUNCTION__) L": Crawling folder: %s\n", workingPath.GetWinPath());
				{
					AutoLocker print(critSec);
					_sntprintf_s(szCurrentCrawledPath[nCurrentCrawledpathIndex], MAX_CRAWLEDPATHSLEN, _TRUNCATE, L"Crawling folder: %s", workingPath.GetWinPath());
					++nCurrentCrawledpathIndex;
					if (nCurrentCrawledpathIndex >= MAX_CRAWLEDPATHS)
						nCurrentCrawledpathIndex = 0;
				}
				InvalidateRect(hWndHidden, nullptr, FALSE);
				{
					CAutoReadLock readLock(CGitStatusCache::Instance().GetGuard());
					// Now, we need to visit this folder, to make sure that we know its 'most important' status
					CCachedDirectory * cachedDir = CGitStatusCache::Instance().GetDirectoryCacheEntry(workingPath.GetDirectory());
					// check if the path is monitored by the watcher. If it isn't, then we have to invalidate the cache
					// for that path and add it to the watcher.
					if (!CGitStatusCache::Instance().IsPathWatched(workingPath))
					{
						if (workingPath.HasAdminDir())
						{
							CTraceToOutputDebugString::Instance()(_T(__FUNCTION__) L": Add watch path %s\n", workingPath.GetWinPath());
							CGitStatusCache::Instance().AddPathToWatch(workingPath);
						}
						if (cachedDir)
							cachedDir->Invalidate();
						else
						{
							CAutoWriteLock writeLock(CGitStatusCache::Instance().GetGuard());
							CGitStatusCache::Instance().RemoveCacheForPath(workingPath);
							// now cacheDir is invalid because it got deleted in the RemoveCacheForPath() call above.
							cachedDir = nullptr;
						}
					}
					if (cachedDir)
						cachedDir->RefreshStatus(bRecursive);
				}

				// While refreshing the status, we could get another crawl request for the same folder.
				// This can happen if the crawled folder has a lower status than one of the child folders
				// (recursively). To avoid double crawlings, remove such a crawl request here
				AutoLocker lock(m_critSec);
				if (m_bItemsAddedSinceLastCrawl)
				{
					m_foldersToUpdate.erase(workingPath);
				}
			}
		}
	}
	_endthread();
}
Beispiel #17
0
git_error_t * CCachedDirectory::GetStatusCallback(void *baton, const char *path, git_wc_status2_t *status)
{
	CCachedDirectory* pThis = (CCachedDirectory*)baton;

	if (path == NULL)
		return 0;

	CTGitPath svnPath;

	if(status->entry)
	{
		if ((status->text_status != git_wc_status_none)&&(status->text_status != git_wc_status_missing))
			svnPath.SetFromSVN(path, (status->entry->kind == svn_node_dir));
		else
			svnPath.SetFromSVN(path);

		if(svnPath.IsDirectory())
		{
			if(!svnPath.IsEquivalentToWithoutCase(pThis->m_directoryPath))
			{
				if (pThis->m_bRecursive)
				{
					// Add any versioned directory, which is not our 'self' entry, to the list for having its status updated
					CGitStatusCache::Instance().AddFolderForCrawling(svnPath);
				}

				// Make sure we know about this child directory
				// This initial status value is likely to be overwritten from below at some point
				git_wc_status_kind s = GitStatus::GetMoreImportant(status->text_status, status->prop_status);
				CCachedDirectory * cdir = CGitStatusCache::Instance().GetDirectoryCacheEntryNoCreate(svnPath);
				if (cdir)
				{
					// This child directory is already in our cache!
					// So ask this dir about its recursive status
					git_wc_status_kind st = GitStatus::GetMoreImportant(s, cdir->GetCurrentFullStatus());
					AutoLocker lock(pThis->m_critSec);
					pThis->m_childDirectories[svnPath] = st;
				}
				else
				{
					// the child directory is not in the cache. Create a new entry for it in the cache which is
					// initially 'unversioned'. But we added that directory to the crawling list above, which
					// means the cache will be updated soon.
					CGitStatusCache::Instance().GetDirectoryCacheEntry(svnPath);
					AutoLocker lock(pThis->m_critSec);
					pThis->m_childDirectories[svnPath] = s;
				}
			}
		}
		else
		{
			// Keep track of the most important status of all the files in this directory
			// Don't include subdirectories in this figure, because they need to provide their
			// own 'most important' value
			pThis->m_mostImportantFileStatus = GitStatus::GetMoreImportant(pThis->m_mostImportantFileStatus, status->text_status);
			pThis->m_mostImportantFileStatus = GitStatus::GetMoreImportant(pThis->m_mostImportantFileStatus, status->prop_status);
			if (((status->text_status == git_wc_status_unversioned)||(status->text_status == git_wc_status_none))
				&&(CGitStatusCache::Instance().IsUnversionedAsModified()))
			{
				// treat unversioned files as modified
				if (pThis->m_mostImportantFileStatus != git_wc_status_added)
					pThis->m_mostImportantFileStatus = GitStatus::GetMoreImportant(pThis->m_mostImportantFileStatus, git_wc_status_modified);
			}
		}
	}
	else
	{
		svnPath.SetFromSVN(path);
		// Subversion returns no 'entry' field for versioned folders if they're
		// part of another working copy (nested layouts).
		// So we have to make sure that such an 'unversioned' folder really
		// is unversioned.
		if (((status->text_status == git_wc_status_unversioned)||(status->text_status == git_wc_status_missing))&&(!svnPath.IsEquivalentToWithoutCase(pThis->m_directoryPath))&&(svnPath.IsDirectory()))
		{
			if (svnPath.HasAdminDir())
			{
				CGitStatusCache::Instance().AddFolderForCrawling(svnPath);
				// Mark the directory as 'versioned' (status 'normal' for now).
				// This initial value will be overwritten from below some time later
				{
					AutoLocker lock(pThis->m_critSec);
					pThis->m_childDirectories[svnPath] = git_wc_status_normal;
				}
				// Make sure the entry is also in the cache
				CGitStatusCache::Instance().GetDirectoryCacheEntry(svnPath);
				// also mark the status in the status object as normal
				status->text_status = git_wc_status_normal;
			}
		}
		else if (status->text_status == git_wc_status_external)
		{
			CGitStatusCache::Instance().AddFolderForCrawling(svnPath);
			// Mark the directory as 'versioned' (status 'normal' for now).
			// This initial value will be overwritten from below some time later
			{
				AutoLocker lock(pThis->m_critSec);
				pThis->m_childDirectories[svnPath] = git_wc_status_normal;
			}
			// we have added a directory to the child-directory list of this
			// directory. We now must make sure that this directory also has
			// an entry in the cache.
			CGitStatusCache::Instance().GetDirectoryCacheEntry(svnPath);
			// also mark the status in the status object as normal
			status->text_status = git_wc_status_normal;
		}
		else
		{
			if (svnPath.IsDirectory())
			{
				AutoLocker lock(pThis->m_critSec);
				pThis->m_childDirectories[svnPath] = GitStatus::GetMoreImportant(status->text_status, status->prop_status);
			}
			else if ((CGitStatusCache::Instance().IsUnversionedAsModified())&&(status->text_status != git_wc_status_missing))
			{
				// make this unversioned item change the most important status of this
				// folder to modified if it doesn't already have another status
				if (pThis->m_mostImportantFileStatus != git_wc_status_added)
					pThis->m_mostImportantFileStatus = GitStatus::GetMoreImportant(pThis->m_mostImportantFileStatus, git_wc_status_modified);
			}
		}
	}

	pThis->AddEntry(svnPath, status);

	return 0;
}
void CFolderCrawler::WorkerThread()
{
	HANDLE hWaitHandles[2];
	hWaitHandles[0] = m_hTerminationEvent;
	hWaitHandles[1] = m_hWakeEvent;
	CTGitPath workingPath;
	bool bFirstRunAfterWakeup = false;
	DWORD currentTicks = 0;

	// Quick check if we're on Vista
	OSVERSIONINFOEX inf;
	SecureZeroMemory(&inf, sizeof(OSVERSIONINFOEX));
	inf.dwOSVersionInfoSize = sizeof(OSVERSIONINFOEX);
	GetVersionEx((OSVERSIONINFO *)&inf);
	WORD fullver = MAKEWORD(inf.dwMinorVersion, inf.dwMajorVersion);

	for(;;)
	{
		bool bRecursive = !!(DWORD)CRegStdDWORD(_T("Software\\TortoiseGit\\RecursiveOverlay"), TRUE);

		if (fullver >= 0x0600)
		{
			SetThreadPriority(GetCurrentThread(), THREAD_MODE_BACKGROUND_END);
		}
		DWORD waitResult = WaitForMultipleObjects(_countof(hWaitHandles), hWaitHandles, FALSE, INFINITE);

		// exit event/working loop if the first event (m_hTerminationEvent)
		// has been signaled or if one of the events has been abandoned
		// (i.e. ~CFolderCrawler() is being executed)
		if(m_bRun == false || waitResult == WAIT_OBJECT_0 || waitResult == WAIT_ABANDONED_0 || waitResult == WAIT_ABANDONED_0+1)
		{
			// Termination event
			break;
		}

		if (fullver >= 0x0600)
		{
			SetThreadPriority(GetCurrentThread(), THREAD_MODE_BACKGROUND_BEGIN);
		}

		// If we get here, we've been woken up by something being added to the queue.
		// However, it's important that we don't do our crawling while
		// the shell is still asking for items
		bFirstRunAfterWakeup = true;
		for(;;)
		{
			if (!m_bRun)
				break;
			// Any locks today?
			if (CGitStatusCache::Instance().m_bClearMemory)
			{
				CGitStatusCache::Instance().WaitToWrite();
				CGitStatusCache::Instance().ClearCache();
				CGitStatusCache::Instance().Done();
				CGitStatusCache::Instance().m_bClearMemory = false;
			}
			if(m_lCrawlInhibitSet > 0)
			{
				// We're in crawl hold-off
				ATLTRACE("Crawl hold-off\n");
				Sleep(50);
				continue;
			}
			if (bFirstRunAfterWakeup)
			{
				Sleep(20);
				ATLTRACE("Crawl bFirstRunAfterWakeup\n");
				bFirstRunAfterWakeup = false;
				continue;
			}
			if ((m_blockReleasesAt < GetTickCount())&&(!m_blockedPath.IsEmpty()))
			{
				ATLTRACE(_T("Crawl stop blocking path %s\n"), m_blockedPath.GetWinPath());
				m_blockedPath.Reset();
			}

			if ((m_foldersToUpdate.empty())&&(m_pathsToUpdate.empty()))
			{
				// Nothing left to do
				break;
			}
			currentTicks = GetTickCount();
			if (!m_pathsToUpdate.empty())
			{
				{
					AutoLocker lock(m_critSec);

					m_bPathsAddedSinceLastCrawl = false;

					workingPath = m_pathsToUpdate.front();
					//m_pathsToUpdateUnique.erase (workingPath);
					m_pathsToUpdate.pop_front();
					if ((DWORD(workingPath.GetCustomData()) >= currentTicks) ||
						((!m_blockedPath.IsEmpty())&&(m_blockedPath.IsAncestorOf(workingPath))))
					{
						// move the path to the end of the list
						//m_pathsToUpdateUnique.insert (workingPath);
						m_pathsToUpdate.push_back(workingPath);
						if (m_pathsToUpdate.size() < 3)
							Sleep(50);
						continue;
					}
				}

				// don't crawl paths that are excluded
				if (!CGitStatusCache::Instance().IsPathAllowed(workingPath))
					continue;
				// check if the changed path is inside an .git folder
				CString projectroot;
				if ((workingPath.HasAdminDir(&projectroot)&&workingPath.IsDirectory()) || workingPath.IsAdminDir())
				{
					// we don't crawl for paths changed in a tmp folder inside an .git folder.
					// Because we also get notifications for those even if we just ask for the status!
					// And changes there don't affect the file status at all, so it's safe
					// to ignore notifications on those paths.
					if (workingPath.IsAdminDir())
					{
						// TODO: add git specific filters here. is there really any change besides index file in .git
						//       that is relevant for overlays?
						/*CString lowerpath = workingPath.GetWinPathString();
						lowerpath.MakeLower();
						if (lowerpath.Find(_T("\\tmp\\"))>0)
							continue;
						if (lowerpath.Find(_T("\\tmp")) == (lowerpath.GetLength()-4))
							continue;
						if (lowerpath.Find(_T("\\log"))>0)
							continue;*/
						// Here's a little problem:
						// the lock file is also created for fetching the status
						// and not just when committing.
						// If we could find out why the lock file was changed
						// we could decide to crawl the folder again or not.
						// But for now, we have to crawl the parent folder
						// no matter what.

						//if (lowerpath.Find(_T("\\lock"))>0)
						//	continue;
					}
					else if (!workingPath.Exists())
					{
						CGitStatusCache::Instance().WaitToWrite();
						CGitStatusCache::Instance().RemoveCacheForPath(workingPath);
						CGitStatusCache::Instance().Done();
						continue;
					}

					do
					{
						workingPath = workingPath.GetContainingDirectory();
					} while(workingPath.IsAdminDir());

					ATLTRACE(_T("Invalidating and refreshing folder: %s\n"), workingPath.GetWinPath());
					{
						AutoLocker print(critSec);
						_stprintf_s(szCurrentCrawledPath[nCurrentCrawledpathIndex], MAX_CRAWLEDPATHSLEN, _T("Invalidating and refreshing folder: %s"), workingPath.GetWinPath());
						nCurrentCrawledpathIndex++;
						if (nCurrentCrawledpathIndex >= MAX_CRAWLEDPATHS)
							nCurrentCrawledpathIndex = 0;
					}
					InvalidateRect(hWnd, NULL, FALSE);
					CGitStatusCache::Instance().WaitToRead();
					// Invalidate the cache of this folder, to make sure its status is fetched again.
					CCachedDirectory * pCachedDir = CGitStatusCache::Instance().GetDirectoryCacheEntry(workingPath);
					if (pCachedDir)
					{
						git_wc_status_kind status = pCachedDir->GetCurrentFullStatus();
						pCachedDir->Invalidate();
						if (workingPath.Exists())
						{
							pCachedDir->RefreshStatus(bRecursive);
							// if the previous status wasn't normal and now it is, then
							// send a notification too.
							// We do this here because GetCurrentFullStatus() doesn't send
							// notifications for 'normal' status - if it would, we'd get tons
							// of notifications when crawling a working copy not yet in the cache.
							if ((status != git_wc_status_normal)&&(pCachedDir->GetCurrentFullStatus() != status))
							{
								CGitStatusCache::Instance().UpdateShell(workingPath);
								ATLTRACE(_T("shell update in crawler for %s\n"), workingPath.GetWinPath());
							}
						}
						else
						{
							CGitStatusCache::Instance().Done();
							CGitStatusCache::Instance().WaitToWrite();
							CGitStatusCache::Instance().RemoveCacheForPath(workingPath);
						}
					}
					CGitStatusCache::Instance().Done();
					//In case that svn_client_stat() modified a file and we got
					//a notification about that in the directory watcher,
					//remove that here again - this is to prevent an endless loop
					AutoLocker lock(m_critSec);
					m_pathsToUpdate.erase(std::remove(m_pathsToUpdate.begin(), m_pathsToUpdate.end(), workingPath), m_pathsToUpdate.end());
				}
				else if (workingPath.HasAdminDir())
				{
					if (!workingPath.Exists())
					{
						CGitStatusCache::Instance().WaitToWrite();
						CGitStatusCache::Instance().RemoveCacheForPath(workingPath);
						CGitStatusCache::Instance().Done();
						continue;
					}
					if (!workingPath.Exists())
						continue;
					ATLTRACE(_T("Updating path: %s\n"), workingPath.GetWinPath());
					{
						AutoLocker print(critSec);
						_stprintf_s(szCurrentCrawledPath[nCurrentCrawledpathIndex], MAX_CRAWLEDPATHSLEN, _T("Updating path: %s"), workingPath.GetWinPath());
						nCurrentCrawledpathIndex++;
						if (nCurrentCrawledpathIndex >= MAX_CRAWLEDPATHS)
							nCurrentCrawledpathIndex = 0;
					}
					InvalidateRect(hWnd, NULL, FALSE);
					// HasAdminDir() already checks if the path points to a dir
					DWORD flags = TGITCACHE_FLAGS_FOLDERISKNOWN;
					flags |= (workingPath.IsDirectory() ? TGITCACHE_FLAGS_ISFOLDER : 0);
					flags |= (bRecursive ? TGITCACHE_FLAGS_RECUSIVE_STATUS : 0);
					CGitStatusCache::Instance().WaitToRead();
					// Invalidate the cache of folders manually. The cache of files is invalidated
					// automatically if the status is asked for it and the file times don't match
					// anymore, so we don't need to manually invalidate those.
					if (workingPath.IsDirectory())
					{
						CCachedDirectory * cachedDir = CGitStatusCache::Instance().GetDirectoryCacheEntry(workingPath);
						if (cachedDir)
							cachedDir->Invalidate();
					}
					CStatusCacheEntry ce = CGitStatusCache::Instance().GetStatusForPath(workingPath, flags);
					if (ce.GetEffectiveStatus() > git_wc_status_unversioned)
					{
						CGitStatusCache::Instance().UpdateShell(workingPath);
						ATLTRACE(_T("shell update in folder crawler for %s\n"), workingPath.GetWinPath());
					}
					CGitStatusCache::Instance().Done();
					AutoLocker lock(m_critSec);
					m_pathsToUpdate.erase(std::remove(m_pathsToUpdate.begin(), m_pathsToUpdate.end(), workingPath), m_pathsToUpdate.end());
				}
				else
				{
					if (!workingPath.Exists())
					{
						CGitStatusCache::Instance().WaitToWrite();
						CGitStatusCache::Instance().RemoveCacheForPath(workingPath);
						CGitStatusCache::Instance().Done();
					}
				}
			}
			else if (!m_foldersToUpdate.empty())
			{
				{
					AutoLocker lock(m_critSec);
					m_bItemsAddedSinceLastCrawl = false;

					// create a new CTSVNPath object to make sure the cached flags are requested again.
					// without this, a missing file/folder is still treated as missing even if it is available
					// now when crawling.
					CTGitPath& folderToUpdate = m_foldersToUpdate.front();
					workingPath = CTGitPath(folderToUpdate.GetWinPath());
					workingPath.SetCustomData(folderToUpdate.GetCustomData());
					m_foldersToUpdate.pop_front();

					if ((DWORD(workingPath.GetCustomData()) >= currentTicks) ||
						((!m_blockedPath.IsEmpty())&&(m_blockedPath.IsAncestorOf(workingPath))))
					{
						// move the path to the end of the list
						m_foldersToUpdate.push_back (workingPath);
						if (m_foldersToUpdate.size() < 3)
							Sleep(50);
						continue;
					}
				}
				if (DWORD(workingPath.GetCustomData()) >= currentTicks)
				{
					Sleep(50);
					continue;
				}
				if ((!m_blockedPath.IsEmpty())&&(m_blockedPath.IsAncestorOf(workingPath)))
					continue;
				if (!CGitStatusCache::Instance().IsPathAllowed(workingPath))
					continue;

				ATLTRACE(_T("Crawling folder: %s\n"), workingPath.GetWinPath());
				{
					AutoLocker print(critSec);
					_stprintf_s(szCurrentCrawledPath[nCurrentCrawledpathIndex], MAX_CRAWLEDPATHSLEN, _T("Crawling folder: %s"), workingPath.GetWinPath());
					nCurrentCrawledpathIndex++;
					if (nCurrentCrawledpathIndex >= MAX_CRAWLEDPATHS)
						nCurrentCrawledpathIndex = 0;
				}
				InvalidateRect(hWnd, NULL, FALSE);
				CGitStatusCache::Instance().WaitToRead();
				// Now, we need to visit this folder, to make sure that we know its 'most important' status
				CCachedDirectory * cachedDir = CGitStatusCache::Instance().GetDirectoryCacheEntry(workingPath.GetDirectory());
				// check if the path is monitored by the watcher. If it isn't, then we have to invalidate the cache
				// for that path and add it to the watcher.
				if (!CGitStatusCache::Instance().IsPathWatched(workingPath))
				{
					if (workingPath.HasAdminDir())
					{
						ATLTRACE(_T("Add watch path %s\n"), workingPath.GetWinPath());
						CGitStatusCache::Instance().AddPathToWatch(workingPath);
					}
					if (cachedDir)
						cachedDir->Invalidate();
					else
					{
						CGitStatusCache::Instance().Done();
						CGitStatusCache::Instance().WaitToWrite();
						CGitStatusCache::Instance().RemoveCacheForPath(workingPath);
					}
				}
				if (cachedDir)
					cachedDir->RefreshStatus(bRecursive);

#if 0
				// While refreshing the status, we could get another crawl request for the same folder.
				// This can happen if the crawled folder has a lower status than one of the child folders
				// (recursively). To avoid double crawlings, remove such a crawl request here
				AutoLocker lock(m_critSec);
				if (m_bItemsAddedSinceLastCrawl)
				{
					if (m_foldersToUpdate.back().IsEquivalentToWithoutCase(workingPath))
					{
						m_foldersToUpdate.pop_back();
						m_bItemsAddedSinceLastCrawl = false;
					}
				}
#endif
				CGitStatusCache::Instance().Done();
			}
		}
	}
	_endthread();
}
Beispiel #19
0
git_revnum_t GitStatus::GetStatus(const CTGitPath& path, bool update /* = false */, bool noignore /* = false */, bool noexternals /* = false */)
{
	// NOTE: unlike the SVN version this one does not cache the enumerated files, because in practice no code in all of
	//       Tortoise uses this, all places that call GetStatus create a temp GitStatus object which gets destroyed right
	//       after the call again

//	apr_hash_t *				statushash;
//	apr_hash_t *				exthash;
//	apr_array_header_t *		statusarray;
//	const sort_item*			item;
	
//	git_error_clear(m_err);
//	statushash = apr_hash_make(m_pool);
//	exthash = apr_hash_make(m_pool);
	git_revnum_t youngest = GIT_INVALID_REVNUM;
//	git_opt_revision_t rev;
//	rev.kind = git_opt_revision_unspecified;

	CString sProjectRoot;
	if ( !path.HasAdminDir(&sProjectRoot) )
		return youngest;

	struct hashbaton_t hashbaton;
//	hashbaton.hash = statushash;
//	hashbaton.exthash = exthash;
	hashbaton.pThis = this;

#ifdef _TORTOISESHELL
	if (g_ShellCache.GetCacheType() == ShellCache::dll)
#else
	if ((DWORD)CRegStdWORD(_T("Software\\TortoiseGit\\CacheType"), GetSystemMetrics(SM_REMOTESESSION) ? 2 : 1) == 2)
#endif
	{
		// gitindex.h based status

		CString sSubPath;
		CString s = path.GetWinPathString();
		if (s.GetLength() > sProjectRoot.GetLength())
		{
			sSubPath = CString(s.Right(s.GetLength() - sProjectRoot.GetLength() - 1/*otherwise it gets initial slash*/));
		}

		m_status.prop_status = m_status.text_status = git_wc_status_none;

		m_err = g_IndexFileMap.GetFileStatus(sProjectRoot,sSubPath,&m_status.text_status);
	}
	else
	{
		LPCTSTR lpszSubPath = NULL;
		CString sSubPath;
		CString s = path.GetWinPathString();
		if (s.GetLength() > sProjectRoot.GetLength())
		{
			sSubPath = s.Right(s.GetLength() - sProjectRoot.GetLength() - 1/*otherwise it gets initial slash*/);
			lpszSubPath = sSubPath;
		}

		// when recursion enabled, let wingit determine the recursive status for folders instead of enumerating all files here
		UINT nFlags = WGEFF_SingleFile | WGEFF_NoRecurse;
		if (!lpszSubPath)
			// report root dir as normal (otherwise it could be considered git_wc_status_unversioned, which would be wrong?)
			nFlags |= WGEFF_EmptyAsNormal;

		m_status.prop_status = m_status.text_status = git_wc_status_none;

		// NOTE: currently wgEnumFiles will not enumerate file if it isn't versioned (so status will be git_wc_status_none)
		m_err = !wgEnumFiles(sProjectRoot, lpszSubPath, nFlags, &getstatus, &m_status);

		/*m_err = git_client_status4 (&youngest,
							path.GetGitApiPath(m_pool),
							&rev,
							getstatushash,
							&hashbaton,
							git_depth_empty,		//depth
							TRUE,		//getall
							update,		//update
							noignore,		//noignore
							noexternals,
							NULL,
							ctx,
							m_pool);*/
	}

	// Error present if function is not under version control
	if (m_err) /*|| (apr_hash_count(statushash) == 0)*/
	{
		status = NULL;
//		return -2;	
		return GIT_INVALID_REVNUM;
	}

	// Convert the unordered hash to an ordered, sorted array
	/*statusarray = sort_hash (statushash,
							  sort_compare_items_as_paths,
							  m_pool);*/

	// only the first entry is needed (no recurse)
//	item = &APR_ARRAY_IDX (statusarray, 0, const sort_item);
	
//	status = (git_wc_status2_t *) item->value;
	status = &m_status;

	if (update)
	{
		// done to match TSVN functionality of this function (not sure if any code uses the reutrn val)
		// if TGit does not need this, then change the return type of function
		youngest = g_Git.GetHash(CString(_T("HEAD")));
	}

	return youngest;
}
Beispiel #20
0
git_revnum_t GitStatus::GetStatus(const CTGitPath& path, bool update /* = false */, bool noignore /* = false */, bool /*noexternals*/ /* = false */)
{
	// NOTE: unlike the SVN version this one does not cache the enumerated files, because in practice no code in all of
	//       Tortoise uses this, all places that call GetStatus create a temp GitStatus object which gets destroyed right
	//       after the call again

//	apr_hash_t *				statushash;
//	apr_hash_t *				exthash;
//	apr_array_header_t *		statusarray;
//	const sort_item*			item;

//	git_error_clear(m_err);
//	statushash = apr_hash_make(m_pool);
//	exthash = apr_hash_make(m_pool);
	git_revnum_t youngest = GIT_INVALID_REVNUM;
//	git_opt_revision_t rev;
//	rev.kind = git_opt_revision_unspecified;

	CString sProjectRoot;
	if ( !path.HasAdminDir(&sProjectRoot) )
		return youngest;

	struct hashbaton_t hashbaton;
//	hashbaton.hash = statushash;
//	hashbaton.exthash = exthash;
	hashbaton.pThis = this;

	bool isfull = ((DWORD)CRegStdDWORD(_T("Software\\TortoiseGit\\CacheType"),
				GetSystemMetrics(SM_REMOTESESSION) ? ShellCache::dll : ShellCache::exe) == ShellCache::dllFull);

	{
		LPCTSTR lpszSubPath = NULL;
		CString sSubPath;
		CString s = path.GetWinPathString();
		if (s.GetLength() > sProjectRoot.GetLength())
		{
			sSubPath = s.Right(s.GetLength() - sProjectRoot.GetLength());
			lpszSubPath = sSubPath;
			// skip initial slash if necessary
			if (*lpszSubPath == _T('\\'))
				lpszSubPath++;
		}

		m_status.prop_status = m_status.text_status = git_wc_status_none;

		if(path.IsDirectory())
		{
			m_err = GetDirStatus(sProjectRoot,CString(lpszSubPath),&m_status.text_status , isfull, false,!noignore, NULL, NULL);

		}
		else
		{
			m_err = GetFileStatus(sProjectRoot,CString(lpszSubPath),&m_status.text_status ,isfull, false,!noignore, NULL,NULL);
		}
	}

	// Error present if function is not under version control
	if (m_err) /*|| (apr_hash_count(statushash) == 0)*/
	{
		status = NULL;
		return GIT_INVALID_REVNUM;
	}

	// Convert the unordered hash to an ordered, sorted array
	/*statusarray = sort_hash (statushash,
							  sort_compare_items_as_paths,
							  m_pool);*/

	// only the first entry is needed (no recurse)
//	item = &APR_ARRAY_IDX (statusarray, 0, const sort_item);

//	status = (git_wc_status2_t *) item->value;
	status = &m_status;

	if (update)
	{
		// done to match TSVN functionality of this function (not sure if any code uses the reutrn val)
		// if TGit does not need this, then change the return type of function
		youngest = g_Git.GetHash(_T("HEAD"));
	}

	return youngest;
}
BOOL ProjectProperties::ReadProps(CTGitPath path)
{
	CString sPropVal;

	GetStringProps(this->sLabel,BUGTRAQPROPNAME_LABEL);
	GetStringProps(this->sMessage,BUGTRAQPROPNAME_MESSAGE);
	GetStringProps(this->sUrl,BUGTRAQPROPNAME_URL);

	GetBOOLProps(this->bWarnIfNoIssue,BUGTRAQPROPNAME_WARNIFNOISSUE);
	GetBOOLProps(this->bNumber,BUGTRAQPROPNAME_NUMBER);
	GetBOOLProps(this->bAppend,BUGTRAQPROPNAME_APPEND);

	GetStringProps(sPropVal,BUGTRAQPROPNAME_LOGREGEX,false);

	sCheckRe = sPropVal;
	if (sCheckRe.Find('\n')>=0)
	{
		sBugIDRe = sCheckRe.Mid(sCheckRe.Find('\n')).Trim();
		sCheckRe = sCheckRe.Left(sCheckRe.Find('\n')).Trim();
	}
	if (!sCheckRe.IsEmpty())
	{
		sCheckRe = sCheckRe.Trim();
	}
	return TRUE;


#if 0
	BOOL bFoundBugtraqLabel = FALSE;
	BOOL bFoundBugtraqMessage = FALSE;
	BOOL bFoundBugtraqNumber = FALSE;
	BOOL bFoundBugtraqLogRe = FALSE;
	BOOL bFoundBugtraqURL = FALSE;
	BOOL bFoundBugtraqWarnIssue = FALSE;
	BOOL bFoundBugtraqAppend = FALSE;
	BOOL bFoundLogWidth = FALSE;
	BOOL bFoundLogTemplate = FALSE;
	BOOL bFoundMinLogSize = FALSE;
	BOOL bFoundMinLockMsgSize = FALSE;
	BOOL bFoundFileListEnglish = FALSE;
	BOOL bFoundProjectLanguage = FALSE;
	BOOL bFoundUserFileProps = FALSE;
	BOOL bFoundUserDirProps = FALSE;
	BOOL bFoundWebViewRev = FALSE;
	BOOL bFoundWebViewPathRev = FALSE;
	BOOL bFoundAutoProps = FALSE;
	BOOL bFoundLogSummary = FALSE;

	if (!path.IsDirectory())
		path = path.GetContainingDirectory();

	for (;;)
	{
		GitProperties props(path, GitRev::REV_WC, false);
		for (int i=0; i<props.GetCount(); ++i)
		{
			CString sPropName = props.GetItemName(i).c_str();
			CString sPropVal = CUnicodeUtils::GetUnicode(((char *)props.GetItemValue(i).c_str()));
			if ((!bFoundBugtraqLabel)&&(sPropName.Compare(BUGTRAQPROPNAME_LABEL)==0))
			{
				sLabel = sPropVal;
				bFoundBugtraqLabel = TRUE;
			}
			if ((!bFoundBugtraqMessage)&&(sPropName.Compare(BUGTRAQPROPNAME_MESSAGE)==0))
			{
				sMessage = sPropVal;
				bFoundBugtraqMessage = TRUE;
			}
			if ((!bFoundBugtraqNumber)&&(sPropName.Compare(BUGTRAQPROPNAME_NUMBER)==0))
			{
				CString val;
				val = sPropVal;
				val = val.Trim(_T(" \n\r\t"));
				if ((val.CompareNoCase(_T("false"))==0)||(val.CompareNoCase(_T("no"))==0))
					bNumber = FALSE;
				else
					bNumber = TRUE;
				bFoundBugtraqNumber = TRUE;
			}
			if ((!bFoundBugtraqLogRe)&&(sPropName.Compare(BUGTRAQPROPNAME_LOGREGEX)==0))
			{
				sCheckRe = sPropVal;
				if (sCheckRe.Find('\n')>=0)
				{
					sBugIDRe = sCheckRe.Mid(sCheckRe.Find('\n')).Trim();
					sCheckRe = sCheckRe.Left(sCheckRe.Find('\n')).Trim();
				}
				if (!sCheckRe.IsEmpty())
				{
					sCheckRe = sCheckRe.Trim();
				}
				bFoundBugtraqLogRe = TRUE;
			}
			if ((!bFoundBugtraqURL)&&(sPropName.Compare(BUGTRAQPROPNAME_URL)==0))
			{
				sUrl = sPropVal;
				bFoundBugtraqURL = TRUE;
			}
			if ((!bFoundBugtraqWarnIssue)&&(sPropName.Compare(BUGTRAQPROPNAME_WARNIFNOISSUE)==0))
			{
				CString val;
				val = sPropVal;
				val = val.Trim(_T(" \n\r\t"));
				if ((val.CompareNoCase(_T("true"))==0)||(val.CompareNoCase(_T("yes"))==0))
					bWarnIfNoIssue = TRUE;
				else
					bWarnIfNoIssue = FALSE;
				bFoundBugtraqWarnIssue = TRUE;
			}
			if ((!bFoundBugtraqAppend)&&(sPropName.Compare(BUGTRAQPROPNAME_APPEND)==0))
			{
				CString val;
				val = sPropVal;
				val = val.Trim(_T(" \n\r\t"));
				if ((val.CompareNoCase(_T("true"))==0)||(val.CompareNoCase(_T("yes"))==0))
					bAppend = TRUE;
				else
					bAppend = FALSE;
				bFoundBugtraqAppend = TRUE;
			}
			if ((!bFoundLogWidth)&&(sPropName.Compare(PROJECTPROPNAME_LOGWIDTHLINE)==0))
			{
				CString val;
				val = sPropVal;
				if (!val.IsEmpty())
				{
					nLogWidthMarker = _ttoi(val);
				}
				bFoundLogWidth = TRUE;
			}
			if ((!bFoundLogTemplate)&&(sPropName.Compare(PROJECTPROPNAME_LOGTEMPLATE)==0))
			{
				sLogTemplate = sPropVal;
				sLogTemplate.Remove('\r');
				sLogTemplate.Replace(_T("\n"), _T("\r\n"));
				bFoundLogTemplate = TRUE;
			}
			if ((!bFoundMinLogSize)&&(sPropName.Compare(PROJECTPROPNAME_LOGMINSIZE)==0))
			{
				CString val;
				val = sPropVal;
				if (!val.IsEmpty())
				{
					nMinLogSize = _ttoi(val);
				}
				bFoundMinLogSize = TRUE;
			}
			if ((!bFoundMinLockMsgSize)&&(sPropName.Compare(PROJECTPROPNAME_LOCKMSGMINSIZE)==0))
			{
				CString val;
				val = sPropVal;
				if (!val.IsEmpty())
				{
					nMinLockMsgSize = _ttoi(val);
				}
				bFoundMinLockMsgSize = TRUE;
			}
			if ((!bFoundFileListEnglish)&&(sPropName.Compare(PROJECTPROPNAME_LOGFILELISTLANG)==0))
			{
				CString val;
				val = sPropVal;
				val = val.Trim(_T(" \n\r\t"));
				if ((val.CompareNoCase(_T("false"))==0)||(val.CompareNoCase(_T("no"))==0))
					bFileListInEnglish = TRUE;
				else
					bFileListInEnglish = FALSE;
				bFoundFileListEnglish = TRUE;
			}
			if ((!bFoundProjectLanguage)&&(sPropName.Compare(PROJECTPROPNAME_PROJECTLANGUAGE)==0))
			{
				CString val;
				val = sPropVal;
				if (!val.IsEmpty())
				{
					LPTSTR strEnd;
					lProjectLanguage = _tcstol(val, &strEnd, 0);
				}
				bFoundProjectLanguage = TRUE;
			}
			if ((!bFoundUserFileProps)&&(sPropName.Compare(PROJECTPROPNAME_USERFILEPROPERTY)==0))
			{
				sFPPath = sPropVal;
				sFPPath.Replace(_T("\r\n"), _T("\n"));
				bFoundUserFileProps = TRUE;
			}
			if ((!bFoundUserDirProps)&&(sPropName.Compare(PROJECTPROPNAME_USERDIRPROPERTY)==0))
			{
				sDPPath = sPropVal;
				sDPPath.Replace(_T("\r\n"), _T("\n"));
				bFoundUserDirProps = TRUE;
			}
			if ((!bFoundAutoProps)&&(sPropName.Compare(PROJECTPROPNAME_AUTOPROPS)==0))
			{
				sAutoProps = sPropVal;
				sAutoProps.Replace(_T("\r\n"), _T("\n"));
				bFoundAutoProps = TRUE;
			}
			if ((!bFoundWebViewRev)&&(sPropName.Compare(PROJECTPROPNAME_WEBVIEWER_REV)==0))
			{
				sWebViewerRev = sPropVal;
				bFoundWebViewRev = TRUE;
			}
			if ((!bFoundWebViewPathRev)&&(sPropName.Compare(PROJECTPROPNAME_WEBVIEWER_PATHREV)==0))
			{
				sWebViewerPathRev = sPropVal;
				bFoundWebViewPathRev = TRUE;
			}
			if ((!bFoundLogSummary)&&(sPropName.Compare(PROJECTPROPNAME_LOGSUMMARY)==0))
			{
				sLogSummaryRe = sPropVal;
				bFoundLogSummary = TRUE;
			}
		}
		if (PathIsRoot(path.GetWinPath()))
			return FALSE;
		propsPath = path;
		path = path.GetContainingDirectory();
		if ((!path.HasAdminDir())||(path.IsEmpty()))
		{
			if (bFoundBugtraqLabel | bFoundBugtraqMessage | bFoundBugtraqNumber
				| bFoundBugtraqURL | bFoundBugtraqWarnIssue | bFoundLogWidth
				| bFoundLogTemplate | bFoundBugtraqLogRe | bFoundMinLockMsgSize
				| bFoundUserFileProps | bFoundUserDirProps | bFoundAutoProps
				| bFoundWebViewRev | bFoundWebViewPathRev | bFoundLogSummary)
			{
				return TRUE;
			}
			propsPath.Reset();
			return FALSE;
		}
	}
#endif
	return FALSE;		//never reached
}
Beispiel #22
0
void CShellUpdater::WorkerThread()
{
	HANDLE hWaitHandles[2];
	hWaitHandles[0] = m_hTerminationEvent;
	hWaitHandles[1] = m_hWakeEvent;

	for(;;)
	{
		DWORD waitResult = WaitForMultipleObjects(_countof(hWaitHandles), hWaitHandles, FALSE, INFINITE);

		// exit event/working loop if the first event (m_hTerminationEvent)
		// has been signaled or if one of the events has been abandoned
		// (i.e. ~CShellUpdater() is being executed)
		if(waitResult == WAIT_OBJECT_0 || waitResult == WAIT_ABANDONED_0 || waitResult == WAIT_ABANDONED_0+1)
		{
			// Termination event
			break;
		}
		// wait some time before we notify the shell
		Sleep(50);
		for(;;)
		{
			CTGitPath workingPath;
			if (!m_bRunning)
				return;
			Sleep(0);
			{
				AutoLocker lock(m_critSec);
				if(m_pathsToUpdate.empty())
				{
					// Nothing left to do
					break;
				}

				if(m_bItemsAddedSinceLastUpdate)
				{
					m_pathsToUpdate.erase(std::unique(m_pathsToUpdate.begin(), m_pathsToUpdate.end(), &CTGitPath::PredLeftEquivalentToRight), m_pathsToUpdate.end());
					m_bItemsAddedSinceLastUpdate = false;
				}

				workingPath = m_pathsToUpdate.front();
				m_pathsToUpdate.pop_front();
			}
			if (workingPath.IsEmpty())
				continue;
			ATLTRACE(_T("Update notifications for: %s\n"), workingPath.GetWinPath());
			if (workingPath.IsDirectory())
			{
				// check if the path is monitored by the watcher. If it isn't, then we have to invalidate the cache
				// for that path and add it to the watcher.
				if (!CGitStatusCache::Instance().IsPathWatched(workingPath))
				{
					if (workingPath.HasAdminDir())
						CGitStatusCache::Instance().AddPathToWatch(workingPath);
				}
				// first send a notification about a sub folder change, so explorer doesn't discard
				// the folder notification. Since we only know for sure that the subversion admin
				// dir is present, we send a notification for that folder.
				CString admindir = workingPath.GetWinPathString() + _T("\\") + g_GitAdminDir.GetAdminDirName();
				if(::PathFileExists(admindir))
					SHChangeNotify(SHCNE_UPDATEITEM, SHCNF_PATH | SHCNF_FLUSHNOWAIT, (LPCTSTR)admindir, NULL);

				SHChangeNotify(SHCNE_UPDATEITEM, SHCNF_PATH | SHCNF_FLUSHNOWAIT, workingPath.GetWinPath(), NULL);
				// Sending an UPDATEDIR notification somehow overwrites/deletes the UPDATEITEM message. And without
				// that message, the folder overlays in the current view don't get updated without hitting F5.
				// Drawback is, without UPDATEDIR, the left tree view isn't always updated...

				SHChangeNotify(SHCNE_UPDATEDIR, SHCNF_PATH | SHCNF_FLUSHNOWAIT, workingPath.GetWinPath(), NULL);
			}
			else
				SHChangeNotify(SHCNE_UPDATEITEM, SHCNF_PATH | SHCNF_FLUSHNOWAIT, workingPath.GetWinPath(), NULL);
		}
	}
	_endthread();
}