long item_stack::amount_can_fit( const item &it ) const { // Without stacking charges, would we violate the count limit? const bool violates_count = size() >= ( size_t )count_limit(); const item *here = it.count_by_charges() ? stacks_with( it ) : nullptr; if( violates_count && !here ) { return 0l; } // Call max because a tile may have been overfilled to begin with (e.g. #14115) long ret = std::max( 0l, it.charges_per_volume( free_volume() ) ); return it.count_by_charges() ? std::min( ret, it.charges ) : ret; }
// @todo Move it into some 'item_stack' class. std::vector<std::list<item *>> restack_items( const std::list<item>::const_iterator &from, const std::list<item>::const_iterator &to ) { std::vector<std::list<item *>> res; for( auto it = from; it != to; ++it ) { auto match = std::find_if( res.begin(), res.end(), [ &it ]( const std::list<item *> &e ) { return it->stacks_with( *const_cast<item *>( e.back() ) ); } ); if( match != res.end() ) { match->push_back( const_cast<item *>( &*it ) ); } else { res.emplace_back( 1, const_cast<item *>( &*it ) ); } } return res; }
void pseudo_inv_to_slice( Collection here, Filter filter, pseudo_inventory &item_stacks, indexed_invslice &result_slice, std::vector<item *> &selectables, char &cur_invlet ) { for( auto candidate = here.begin(); candidate != here.end(); ++candidate ) { if( filter( *candidate ) ) { // Check if we can stack the item with an existing one bool stacks = false; for( auto &elem : item_stacks ) { if( candidate->stacks_with( elem.back() ) ) { stacks = true; elem.push_back( *candidate ); break; } } if( !stacks ) { item_stacks.push_back( std::list<item>( 1, *candidate ) ); if( cur_invlet <= last_invlet ) { item_stacks.back().front().invlet = cur_invlet; cur_invlet++; } else { item_stacks.back().front().invlet = ' '; } selectables.push_back( &*candidate ); } } } for( size_t a = 0; a < item_stacks.size(); a++ ) { // avoid INT_MIN, as it can be confused with "no item at all" result_slice.push_back( indexed_invslice::value_type( &item_stacks[a], INT_MIN + a + 1 ) ); } }
std::list<item> profession::items( bool male, const std::vector<trait_id> &traits ) const { std::list<item> result; auto add_legacy_items = [&result]( const itypedecvec & vec ) { for( const itypedec &elem : vec ) { item it( elem.type_id, 0, item::default_charges_tag {} ); if( !elem.snippet_id.empty() ) { it.set_snippet( elem.snippet_id ); } it = it.in_its_container(); result.push_back( it ); } }; add_legacy_items( legacy_starting_items ); add_legacy_items( male ? legacy_starting_items_male : legacy_starting_items_female ); const std::vector<item> group_both = item_group::items_from( _starting_items ); const std::vector<item> group_gender = item_group::items_from( male ? _starting_items_male : _starting_items_female ); result.insert( result.begin(), group_both.begin(), group_both.end() ); result.insert( result.begin(), group_gender.begin(), group_gender.end() ); std::vector<itype_id> bonus = item_substitutions.get_bonus_items( traits ); for( const itype_id &elem : bonus ) { if( elem != no_bonus ) { result.push_back( item( elem, 0, item::default_charges_tag {} ) ); } } for( auto iter = result.begin(); iter != result.end(); ) { const auto sub = item_substitutions.get_substitution( *iter, traits ); if( !sub.empty() ) { result.insert( result.begin(), sub.begin(), sub.end() ); iter = result.erase( iter ); } else { ++iter; } } for( item &it : result ) { if( it.has_flag( "VARSIZE" ) ) { it.item_tags.insert( "FIT" ); } } if( result.empty() ) { // No need to do the below stuff. Plus it would cause said below stuff to crash return result; } // Merge charges for items that stack with each other for( auto outer = result.begin(); outer != result.end(); ++outer ) { if( !outer->count_by_charges() ) { continue; } for( auto inner = std::next( outer ); inner != result.end(); ) { if( outer->stacks_with( *inner ) ) { outer->merge_charges( *inner ); inner = result.erase( inner ); } else { ++inner; } } } result.sort( []( const item & first, const item & second ) { return first.get_layer() < second.get_layer(); } ); return result; }