void __vector__<_Tp, _Alloc>::_M_assign_aux(_ForwardIter __first, _ForwardIter __last, forward_iterator_tag) { size_type __len = 0; distance(__first, __last, __len); if (__len > capacity()) { iterator __tmp = _M_allocate_and_copy(__len, __first, __last); _Destroy(_M_start, _M_finish); _M_end_of_storage.deallocate(_M_start, _M_end_of_storage._M_data - _M_start); _M_start = __tmp; _M_end_of_storage._M_data = _M_finish = _M_start + __len; } else if (size() >= __len) { iterator __new_finish = copy(__first, __last, _M_start); _Destroy(__new_finish, _M_finish); _M_finish = __new_finish; } else { _ForwardIter __mid = __first; advance(__mid, size()); copy(__first, __mid, _M_start); _M_finish = uninitialized_copy(__mid, __last, _M_finish); } }
void __vector__<_Tp, _Alloc>::reserve(size_type __n) { if (capacity() < __n) { const size_type __old_size = size(); pointer __tmp; if (this->_M_start) { __tmp = _M_allocate_and_copy(__n, this->_M_start, this->_M_finish); _M_clear(); } else { __tmp = this->_M_end_of_storage.allocate(__n); } _M_set(__tmp, __tmp + __old_size, __tmp + __n); } }
void _VECTOR_IMPL<_Tp, _Alloc>::reserve(size_type __n) { if (capacity() < __n) { if (max_size() < __n) { this->_M_throw_length_error(); } const size_type __old_size = size(); pointer __tmp; if (this->_M_start) { __tmp = _M_allocate_and_copy(__n, this->_M_start, this->_M_finish); _M_clear(); } else { __tmp = this->_M_end_of_storage.allocate(__n); } _M_set(__tmp, __tmp + __old_size, __tmp + __n); } }
void __vector__<_Tp, _Alloc>::reserve(__size_type__ __n) { if (capacity() < __n) { const size_type __old_size = size(); pointer __tmp; if (_M_start) { __tmp = _M_allocate_and_copy(__n, _M_start, _M_finish); _Destroy(_M_start, _M_finish); _M_end_of_storage.deallocate(_M_start, _M_end_of_storage._M_data - _M_start); } else { __tmp = _M_end_of_storage.allocate(__n); } _M_start = __tmp; _M_finish = __tmp + __old_size; _M_end_of_storage._M_data = _M_start + __n; } }
_VECTOR_IMPL<_Tp,_Alloc>& _VECTOR_IMPL<_Tp,_Alloc>::operator=(const _VECTOR_IMPL<_Tp, _Alloc>& __x) { if (&__x != this) { const size_type __xlen = __x.size(); if (__xlen > capacity()) { pointer __tmp = _M_allocate_and_copy(__xlen, __CONST_CAST(const_pointer, __x._M_start)+0, __CONST_CAST(const_pointer, __x._M_finish)+0); _M_clear(); this->_M_start = __tmp; this->_M_end_of_storage._M_data = this->_M_start + __xlen; } else if (size() >= __xlen) { pointer __i = __copy_ptrs(__CONST_CAST(const_pointer, __x._M_start)+0, __CONST_CAST(const_pointer, __x._M_finish)+0, this->_M_start, _TrivialAss()); _STLP_STD::_Destroy_Range(__i, this->_M_finish); } else { __copy_ptrs(__CONST_CAST(const_pointer, __x._M_start), __CONST_CAST(const_pointer, __x._M_start) + size(), this->_M_start, _TrivialAss()); __uninitialized_copy(__CONST_CAST(const_pointer, __x._M_start) + size(), __CONST_CAST(const_pointer, __x._M_finish)+0, this->_M_finish, _TrivialUCpy()); } this->_M_finish = this->_M_start + __xlen; } return *this; }
__vector__<_Tp,_Alloc>& __vector__<_Tp,_Alloc>::operator=(const __vector__<_Tp, _Alloc>& __x) { if (&__x != this) { const size_type __xlen = __x.size(); if (__xlen > capacity()) { pointer __tmp = _M_allocate_and_copy(__xlen, (const_pointer)__x._M_start+0, (const_pointer)__x._M_finish+0); _M_clear(); this->_M_start = __tmp; this->_M_end_of_storage._M_data = this->_M_start + __xlen; } else if (size() >= __xlen) { pointer __i = __copy_ptrs((const_pointer)__x._M_start+0, (const_pointer)__x._M_finish+0, (pointer)this->_M_start, _TrivialAss()); _STLP_STD::_Destroy(__i, this->_M_finish); } else { __copy_ptrs((const_pointer)__x._M_start, (const_pointer)__x._M_start + size(), (pointer)this->_M_start, _TrivialAss()); __uninitialized_copy((const_pointer)__x._M_start + size(), (const_pointer)__x._M_finish+0, this->_M_finish, _IsPODType()); } this->_M_finish = this->_M_start + __xlen; } return *this; }