_VECTOR_IMPL<_Tp,_Alloc>& _VECTOR_IMPL<_Tp,_Alloc>::operator=(const _VECTOR_IMPL<_Tp, _Alloc>& __x) { if (&__x != this) { const size_type __xlen = __x.size(); if (__xlen > capacity()) { pointer __tmp = _M_allocate_and_copy(__xlen, __CONST_CAST(const_pointer, __x._M_start)+0, __CONST_CAST(const_pointer, __x._M_finish)+0); _M_clear(); this->_M_start = __tmp; this->_M_end_of_storage._M_data = this->_M_start + __xlen; } else if (size() >= __xlen) { pointer __i = __copy_ptrs(__CONST_CAST(const_pointer, __x._M_start)+0, __CONST_CAST(const_pointer, __x._M_finish)+0, this->_M_start, _TrivialAss()); _STLP_STD::_Destroy_Range(__i, this->_M_finish); } else { __copy_ptrs(__CONST_CAST(const_pointer, __x._M_start), __CONST_CAST(const_pointer, __x._M_start) + size(), this->_M_start, _TrivialAss()); __uninitialized_copy(__CONST_CAST(const_pointer, __x._M_start) + size(), __CONST_CAST(const_pointer, __x._M_finish)+0, this->_M_finish, _TrivialUCpy()); } this->_M_finish = this->_M_start + __xlen; } return *this; }
__vector__<_Tp,_Alloc>& __vector__<_Tp,_Alloc>::operator=(const __vector__<_Tp, _Alloc>& __x) { if (&__x != this) { const size_type __xlen = __x.size(); if (__xlen > capacity()) { pointer __tmp = _M_allocate_and_copy(__xlen, (const_pointer)__x._M_start+0, (const_pointer)__x._M_finish+0); _M_clear(); this->_M_start = __tmp; this->_M_end_of_storage._M_data = this->_M_start + __xlen; } else if (size() >= __xlen) { pointer __i = __copy_ptrs((const_pointer)__x._M_start+0, (const_pointer)__x._M_finish+0, (pointer)this->_M_start, _TrivialAss()); _STLP_STD::_Destroy(__i, this->_M_finish); } else { __copy_ptrs((const_pointer)__x._M_start, (const_pointer)__x._M_start + size(), (pointer)this->_M_start, _TrivialAss()); __uninitialized_copy((const_pointer)__x._M_start + size(), (const_pointer)__x._M_finish+0, this->_M_finish, _IsPODType()); } this->_M_finish = this->_M_start + __xlen; } return *this; }