• Main Page
  • Related Pages
  • Modules
  • Namespaces
  • Classes
  • Files
  • Examples
  • File List

stack.h

00001 /***************************************************************************
00002  *  include/stxxl/bits/containers/stack.h
00003  *
00004  *  Part of the STXXL. See http://stxxl.sourceforge.net
00005  *
00006  *  Copyright (C) 2003-2004 Roman Dementiev <[email protected]>
00007  *
00008  *  Distributed under the Boost Software License, Version 1.0.
00009  *  (See accompanying file LICENSE_1_0.txt or copy at
00010  *  http://www.boost.org/LICENSE_1_0.txt)
00011  **************************************************************************/
00012 
00013 #ifndef STXXL_STACK_HEADER
00014 #define STXXL_STACK_HEADER
00015 
00016 #include <stack>
00017 #include <vector>
00018 
00019 #include <stxxl/bits/mng/mng.h>
00020 #include <stxxl/bits/common/simple_vector.h>
00021 #include <stxxl/bits/common/tmeta.h>
00022 #include <stxxl/bits/mng/write_pool.h>
00023 #include <stxxl/bits/mng/prefetch_pool.h>
00024 
00025 
00026 __STXXL_BEGIN_NAMESPACE
00027 
00030 
00031 template <class ValTp,
00032           unsigned BlocksPerPage = 4,
00033           unsigned BlkSz = STXXL_DEFAULT_BLOCK_SIZE(ValTp),
00034           class AllocStr = STXXL_DEFAULT_ALLOC_STRATEGY,
00035           class SzTp = stxxl::int64>
00036 struct stack_config_generator
00037 {
00038     typedef ValTp value_type;
00039     enum { blocks_per_page = BlocksPerPage };
00040     typedef AllocStr alloc_strategy;
00041     enum { block_size = BlkSz };
00042     typedef SzTp size_type;
00043 };
00044 
00045 
00047 
00053 template <class Config_>
00054 class normal_stack : private noncopyable
00055 {
00056 public:
00057     typedef Config_ cfg;
00058     typedef typename cfg::value_type value_type;
00059     typedef typename cfg::alloc_strategy alloc_strategy;
00060     typedef typename cfg::size_type size_type;
00061     enum {
00062         blocks_per_page = cfg::blocks_per_page,
00063         block_size = cfg::block_size
00064     };
00065 
00066     typedef typed_block<block_size, value_type> block_type;
00067     typedef BID<block_size> bid_type;
00068 
00069 private:
00070     size_type size_;
00071     unsigned_type cache_offset;
00072     value_type * current_element;
00073     simple_vector<block_type> cache;
00074     typename simple_vector<block_type>::iterator front_page;
00075     typename simple_vector<block_type>::iterator back_page;
00076     std::vector<bid_type> bids;
00077     alloc_strategy alloc_strategy_;
00078 
00079 public:
00080     normal_stack() :
00081         size_(0),
00082         cache_offset(0),
00083         current_element(NULL),
00084         cache(blocks_per_page * 2),
00085         front_page(cache.begin() + blocks_per_page),
00086         back_page(cache.begin()),
00087         bids(0)
00088     {
00089         bids.reserve(blocks_per_page);
00090     }
00091 
00092     void swap(normal_stack & obj)
00093     {
00094         std::swap(size_, obj.size_);
00095         std::swap(cache_offset, obj.cache_offset);
00096         std::swap(current_element, obj.current_element);
00097         std::swap(cache, obj.cache);
00098         std::swap(front_page, obj.front_page);
00099         std::swap(back_page, obj.back_page);
00100         std::swap(bids, obj.bids);
00101         std::swap(alloc_strategy_, obj.alloc_strategy_);
00102     }
00103 
00107     template <class stack_type>
00108     normal_stack(const stack_type & stack_) :
00109         size_(0),
00110         cache_offset(0),
00111         current_element(NULL),
00112         cache(blocks_per_page * 2),
00113         front_page(cache.begin() + blocks_per_page),
00114         back_page(cache.begin()),
00115         bids(0)
00116     {
00117         bids.reserve(blocks_per_page);
00118 
00119         stack_type stack_copy = stack_;
00120         const size_type sz = stack_copy.size();
00121         size_type i;
00122 
00123         std::vector<value_type> tmp(sz);
00124 
00125         for (i = 0; i < sz; ++i)
00126         {
00127             tmp[sz - i - 1] = stack_copy.top();
00128             stack_copy.pop();
00129         }
00130         for (i = 0; i < sz; ++i)
00131             this->push(tmp[i]);
00132     }
00133     virtual ~normal_stack()
00134     {
00135         STXXL_VERBOSE(STXXL_PRETTY_FUNCTION_NAME);
00136         block_manager::get_instance()->delete_blocks(bids.begin(), bids.end());
00137     }
00138     size_type size() const
00139     {
00140         return size_;
00141     }
00142     bool empty() const
00143     {
00144         return (!size_);
00145     }
00146     value_type & top()
00147     {
00148         assert(size_ > 0);
00149         return (*current_element);
00150     }
00151     const value_type & top() const
00152     {
00153         assert(size_ > 0);
00154         return (*current_element);
00155     }
00156     void push(const value_type & val)
00157     {
00158         assert(cache_offset <= 2 * blocks_per_page * block_type::size);
00159         //assert(cache_offset >= 0);
00160 
00161         if (cache_offset == 2 * blocks_per_page * block_type::size) // cache overflow
00162         {
00163             STXXL_VERBOSE2("growing, size: " << size_);
00164 
00165             bids.resize(bids.size() + blocks_per_page);
00166             typename std::vector<bid_type>::iterator cur_bid = bids.end() - blocks_per_page;
00167             block_manager::get_instance()->new_blocks(
00168                 offset_allocator<alloc_strategy>(cur_bid - bids.begin(), alloc_strategy_), cur_bid, bids.end());
00169 
00170             simple_vector<request_ptr> requests(blocks_per_page);
00171 
00172             for (int i = 0; i < blocks_per_page; ++i, ++cur_bid)
00173             {
00174                 requests[i] = (back_page + i)->write(*cur_bid);
00175             }
00176 
00177 
00178             std::swap(back_page, front_page);
00179 
00180             bids.reserve(bids.size() + blocks_per_page);
00181 
00182             cache_offset = blocks_per_page * block_type::size + 1;
00183             current_element = &((*front_page)[0]);
00184             ++size_;
00185 
00186             wait_all(requests.begin(), blocks_per_page);
00187 
00188             *current_element = val;
00189 
00190             return;
00191         }
00192 
00193         current_element = element(cache_offset);
00194         *current_element = val;
00195         ++size_;
00196         ++cache_offset;
00197     }
00198     void pop()
00199     {
00200         assert(cache_offset <= 2 * blocks_per_page * block_type::size);
00201         assert(cache_offset > 0);
00202         assert(size_ > 0);
00203 
00204         if (cache_offset == 1 && bids.size() >= blocks_per_page)
00205         {
00206             STXXL_VERBOSE2("shrinking, size: " << size_);
00207 
00208             simple_vector<request_ptr> requests(blocks_per_page);
00209 
00210             {
00211                 typename std::vector<bid_type>::const_iterator cur_bid = bids.end();
00212                 for (int i = blocks_per_page - 1; i >= 0; --i)
00213                 {
00214                     requests[i] = (front_page + i)->read(*(--cur_bid));
00215                 }
00216             }
00217 
00218             std::swap(front_page, back_page);
00219 
00220             cache_offset = blocks_per_page * block_type::size;
00221             --size_;
00222             current_element = &((*(back_page + (blocks_per_page - 1)))[block_type::size - 1]);
00223 
00224             wait_all(requests.begin(), blocks_per_page);
00225 
00226             block_manager::get_instance()->delete_blocks(bids.end() - blocks_per_page, bids.end());
00227             bids.resize(bids.size() - blocks_per_page);
00228 
00229             return;
00230         }
00231 
00232         --size_;
00233 
00234         current_element = element((--cache_offset) - 1);
00235     }
00236 
00237 private:
00238     value_type * element(unsigned_type offset)
00239     {
00240         if (offset < blocks_per_page * block_type::size)
00241             return &((*(back_page + offset / block_type::size))[offset % block_type::size]);
00242 
00243 
00244         unsigned_type unbiased_offset = offset - blocks_per_page * block_type::size;
00245         return &((*(front_page + unbiased_offset / block_type::size))[unbiased_offset % block_type::size]);
00246     }
00247 };
00248 
00249 
00251 
00255 template <class Config_>
00256 class grow_shrink_stack : private noncopyable
00257 {
00258 public:
00259     typedef Config_ cfg;
00260     typedef typename cfg::value_type value_type;
00261     typedef typename cfg::alloc_strategy alloc_strategy;
00262     typedef typename cfg::size_type size_type;
00263     enum {
00264         blocks_per_page = cfg::blocks_per_page,
00265         block_size = cfg::block_size,
00266     };
00267 
00268     typedef typed_block<block_size, value_type> block_type;
00269     typedef BID<block_size> bid_type;
00270 
00271 private:
00272     size_type size_;
00273     unsigned_type cache_offset;
00274     value_type * current_element;
00275     simple_vector<block_type> cache;
00276     typename simple_vector<block_type>::iterator cache_buffers;
00277     typename simple_vector<block_type>::iterator overlap_buffers;
00278     simple_vector<request_ptr> requests;
00279     std::vector<bid_type> bids;
00280     alloc_strategy alloc_strategy_;
00281 
00282 public:
00283     grow_shrink_stack() :
00284         size_(0),
00285         cache_offset(0),
00286         current_element(NULL),
00287         cache(blocks_per_page * 2),
00288         cache_buffers(cache.begin()),
00289         overlap_buffers(cache.begin() + blocks_per_page),
00290         requests(blocks_per_page),
00291         bids(0)
00292     {
00293         bids.reserve(blocks_per_page);
00294     }
00295 
00296     void swap(grow_shrink_stack & obj)
00297     {
00298         std::swap(size_, obj.size_);
00299         std::swap(cache_offset, obj.cache_offset);
00300         std::swap(current_element, obj.current_element);
00301         std::swap(cache, obj.cache);
00302         std::swap(cache_buffers, obj.cache_buffers);
00303         std::swap(overlap_buffers, obj.overlap_buffers);
00304         std::swap(requests, obj.requests);
00305         std::swap(bids, obj.bids);
00306         std::swap(alloc_strategy_, obj.alloc_strategy_);
00307     }
00308 
00312     template <class stack_type>
00313     grow_shrink_stack(const stack_type & stack_) :
00314         size_(0),
00315         cache_offset(0),
00316         current_element(NULL),
00317         cache(blocks_per_page * 2),
00318         cache_buffers(cache.begin()),
00319         overlap_buffers(cache.begin() + blocks_per_page),
00320         requests(blocks_per_page),
00321         bids(0)
00322     {
00323         bids.reserve(blocks_per_page);
00324 
00325         stack_type stack_copy = stack_;
00326         const size_type sz = stack_copy.size();
00327         size_type i;
00328 
00329         std::vector<value_type> tmp(sz);
00330 
00331         for (i = 0; i < sz; ++i)
00332         {
00333             tmp[sz - i - 1] = stack_copy.top();
00334             stack_copy.pop();
00335         }
00336         for (i = 0; i < sz; ++i)
00337             this->push(tmp[i]);
00338     }
00339     virtual ~grow_shrink_stack()
00340     {
00341         STXXL_VERBOSE(STXXL_PRETTY_FUNCTION_NAME);
00342         try
00343         {
00344             if (requests[0].get())
00345                 wait_all(requests.begin(), blocks_per_page);
00346         }
00347         catch (const io_error & ex)
00348         { }
00349         block_manager::get_instance()->delete_blocks(bids.begin(), bids.end());
00350     }
00351     size_type size() const
00352     {
00353         return size_;
00354     }
00355     bool empty() const
00356     {
00357         return (!size_);
00358     }
00359     value_type & top()
00360     {
00361         assert(size_ > 0);
00362         return (*current_element);
00363     }
00364     const value_type & top() const
00365     {
00366         assert(size_ > 0);
00367         return (*current_element);
00368     }
00369     void push(const value_type & val)
00370     {
00371         assert(cache_offset <= blocks_per_page * block_type::size);
00372         //assert(cache_offset >= 0);
00373 
00374         if (cache_offset == blocks_per_page * block_type::size) // cache overflow
00375         {
00376             STXXL_VERBOSE2("growing, size: " << size_);
00377 
00378             bids.resize(bids.size() + blocks_per_page);
00379             typename std::vector<bid_type>::iterator cur_bid = bids.end() - blocks_per_page;
00380             block_manager::get_instance()->new_blocks(
00381                 offset_allocator<alloc_strategy>(cur_bid - bids.begin(), alloc_strategy_), cur_bid, bids.end());
00382 
00383             for (int i = 0; i < blocks_per_page; ++i, ++cur_bid)
00384             {
00385                 if (requests[i].get())
00386                     requests[i]->wait();
00387 
00388                 requests[i] = (cache_buffers + i)->write(*cur_bid);
00389             }
00390 
00391             std::swap(cache_buffers, overlap_buffers);
00392 
00393             bids.reserve(bids.size() + blocks_per_page);
00394 
00395             cache_offset = 1;
00396             current_element = &((*cache_buffers)[0]);
00397             ++size_;
00398 
00399             *current_element = val;
00400 
00401             return;
00402         }
00403 
00404         current_element = &((*(cache_buffers + cache_offset / block_type::size))[cache_offset % block_type::size]);
00405         *current_element = val;
00406         ++size_;
00407         ++cache_offset;
00408     }
00409     void pop()
00410     {
00411         assert(cache_offset <= blocks_per_page * block_type::size);
00412         assert(cache_offset > 0);
00413         assert(size_ > 0);
00414 
00415         if (cache_offset == 1 && bids.size() >= blocks_per_page)
00416         {
00417             STXXL_VERBOSE2("shrinking, size: " << size_);
00418 
00419             if (requests[0].get())
00420                 wait_all(requests.begin(), blocks_per_page);
00421 
00422 
00423             std::swap(cache_buffers, overlap_buffers);
00424 
00425             if (bids.size() > blocks_per_page)
00426             {
00427                 STXXL_VERBOSE2("prefetching, size: " << size_);
00428                 typename std::vector<bid_type>::const_iterator cur_bid = bids.end() - blocks_per_page;
00429                 for (int i = blocks_per_page - 1; i >= 0; --i)
00430                     requests[i] = (overlap_buffers + i)->read(*(--cur_bid));
00431             }
00432 
00433             block_manager::get_instance()->delete_blocks(bids.end() - blocks_per_page, bids.end());
00434             bids.resize(bids.size() - blocks_per_page);
00435 
00436             cache_offset = blocks_per_page * block_type::size;
00437             --size_;
00438             current_element = &((*(cache_buffers + (blocks_per_page - 1)))[block_type::size - 1]);
00439 
00440             return;
00441         }
00442 
00443         --size_;
00444         unsigned_type cur_offset = (--cache_offset) - 1;
00445         current_element = &((*(cache_buffers + cur_offset / block_type::size))[cur_offset % block_type::size]);
00446     }
00447 };
00448 
00450 template <class Config_>
00451 class grow_shrink_stack2 : private noncopyable
00452 {
00453 public:
00454     typedef Config_ cfg;
00455     typedef typename cfg::value_type value_type;
00456     typedef typename cfg::alloc_strategy alloc_strategy;
00457     typedef typename cfg::size_type size_type;
00458     enum {
00459         blocks_per_page = cfg::blocks_per_page,     // stack of this type has only one page
00460         block_size = cfg::block_size,
00461     };
00462 
00463     typedef typed_block<block_size, value_type> block_type;
00464     typedef BID<block_size> bid_type;
00465 
00466 private:
00467     size_type size_;
00468     unsigned_type cache_offset;
00469     block_type * cache;
00470     value_type current;
00471     std::vector<bid_type> bids;
00472     alloc_strategy alloc_strategy_;
00473     unsigned_type pref_aggr;
00474     prefetch_pool<block_type> & p_pool;
00475     write_pool<block_type> & w_pool;
00476 
00477 public:
00482     grow_shrink_stack2(
00483         prefetch_pool<block_type> & p_pool_,
00484         write_pool<block_type> & w_pool_,
00485         unsigned_type prefetch_aggressiveness = 0) :
00486         size_(0),
00487         cache_offset(0),
00488         cache(new block_type),
00489         pref_aggr(prefetch_aggressiveness),
00490         p_pool(p_pool_),
00491         w_pool(w_pool_)
00492     {
00493         STXXL_VERBOSE2("grow_shrink_stack2::grow_shrink_stack2(...)");
00494     }
00495 
00496     void swap(grow_shrink_stack2 & obj)
00497     {
00498         std::swap(size_, obj.size_);
00499         std::swap(cache_offset, obj.cache_offset);
00500         std::swap(cache, obj.cache);
00501         std::swap(current, obj.current);
00502         std::swap(bids, obj.bids);
00503         std::swap(alloc_strategy_, obj.alloc_strategy_);
00504         std::swap(pref_aggr, obj.pref_aggr);
00505         //std::swap(p_pool,obj.p_pool);
00506         //std::swap(w_pool,obj.w_pool);
00507     }
00508 
00509     virtual ~grow_shrink_stack2()
00510     {
00511         try
00512         {
00513             STXXL_VERBOSE2("grow_shrink_stack2::~grow_shrink_stack2()");
00514             const int_type bids_size = bids.size();
00515             const int_type last_pref = STXXL_MAX(int_type(bids_size) - int_type(pref_aggr), (int_type)0);
00516             int_type i;
00517             for (i = bids_size - 1; i >= last_pref; --i)
00518             {
00519                 if (p_pool.in_prefetching(bids[i]))
00520                     p_pool.read(cache, bids[i])->wait();
00521                 // clean the prefetch buffer
00522             }
00523             typename std::vector<bid_type>::iterator cur = bids.begin();
00524             typename std::vector<bid_type>::const_iterator end = bids.end();
00525             for ( ; cur != end; ++cur)
00526             {
00527                 block_type * b = w_pool.steal(*cur);
00528                 if (b)
00529                 {
00530                     w_pool.add(cache); // return buffer
00531                     cache = b;
00532                 }
00533             }
00534             delete cache;
00535         }
00536         catch (const io_error & ex)
00537         { }
00538         block_manager::get_instance()->delete_blocks(bids.begin(), bids.end());
00539     }
00540     size_type size() const { return size_; }
00541 
00542     bool empty() const
00543     {
00544         return (!size_);
00545     }
00546 
00547     void push(const value_type & val)
00548     {
00549         STXXL_VERBOSE3("grow_shrink_stack2::push(" << val << ")");
00550         assert(cache_offset <= block_type::size);
00551 
00552         if (cache_offset == block_type::size)
00553         {
00554             STXXL_VERBOSE2("grow_shrink_stack2::push(" << val << ") growing, size: " << size_);
00555 
00556             bids.resize(bids.size() + 1);
00557             typename std::vector<bid_type>::iterator cur_bid = bids.end() - 1;
00558             block_manager::get_instance()->new_blocks(
00559                 offset_allocator<alloc_strategy>(cur_bid - bids.begin(), alloc_strategy_), cur_bid, bids.end());
00560             w_pool.write(cache, bids.back());
00561             cache = w_pool.steal();
00562             const int_type bids_size = bids.size();
00563             const int_type last_pref = STXXL_MAX(int_type(bids_size) - int_type(pref_aggr) - 1, (int_type)0);
00564             for (int_type i = bids_size - 2; i >= last_pref; --i)
00565             {
00566                 if (p_pool.in_prefetching(bids[i]))
00567                     p_pool.read(cache, bids[i])->wait();
00568                 //  clean prefetch buffers
00569             }
00570             cache_offset = 0;
00571         }
00572         current = val;
00573         (*cache)[cache_offset] = val;
00574         ++size_;
00575         ++cache_offset;
00576 
00577         assert(cache_offset > 0);
00578         assert(cache_offset <= block_type::size);
00579     }
00580     value_type & top()
00581     {
00582         assert(size_ > 0);
00583         assert(cache_offset > 0);
00584         assert(cache_offset <= block_type::size);
00585         return current;
00586     }
00587     const value_type & top() const
00588     {
00589         assert(size_ > 0);
00590         assert(cache_offset > 0);
00591         assert(cache_offset <= block_type::size);
00592         return current;
00593     }
00594     void pop()
00595     {
00596         STXXL_VERBOSE3("grow_shrink_stack2::pop()");
00597         assert(size_ > 0);
00598         assert(cache_offset > 0);
00599         assert(cache_offset <= block_type::size);
00600         if (cache_offset == 1 && (!bids.empty()))
00601         {
00602             STXXL_VERBOSE2("grow_shrink_stack2::pop() shrinking, size = " << size_);
00603 
00604             bid_type last_block = bids.back();
00605             bids.pop_back();
00606             /*block_type * b = w_pool.steal(last_block);
00607                if(b)
00608                {
00609                STXXL_VERBOSE2("grow_shrink_stack2::pop() block is still in write buffer");
00610                w_pool.add(cache);
00611                cache = b;
00612                }
00613                else*/
00614             {
00615                 //STXXL_VERBOSE2("grow_shrink_stack2::pop() block is no longer in write buffer"
00616                 //  ", reading from prefetch/read pool");
00617                 p_pool.read(cache, last_block)->wait();
00618             }
00619             block_manager::get_instance()->delete_block(last_block);
00620             const int_type bids_size = bids.size();
00621             const int_type last_pref = STXXL_MAX(int_type(bids_size) - int_type(pref_aggr), (int_type)0);
00622             for (int_type i = bids_size - 1; i >= last_pref; --i)
00623             {
00624                 p_pool.hint(bids[i]); // prefetch
00625             }
00626             cache_offset = block_type::size + 1;
00627         }
00628 
00629         --cache_offset;
00630         if (cache_offset > 0)
00631             current = (*cache)[cache_offset - 1];
00632 
00633         --size_;
00634     }
00638     void set_prefetch_aggr(unsigned_type new_p)
00639     {
00640         if (pref_aggr > new_p)
00641         {
00642             const int_type bids_size = bids.size();
00643             const int_type last_pref = STXXL_MAX(int_type(bids_size) - int_type(pref_aggr), (int_type)0);
00644             for (int_type i = bids_size - new_p - 1; i >= last_pref; --i)
00645             {
00646                 if (p_pool.in_prefetching(bids[i]))
00647                     p_pool.read(cache, bids[i])->wait();
00648                 //  clean prefetch buffers
00649             }
00650         }
00651         else if (pref_aggr < new_p)
00652         {
00653             const int_type bids_size = bids.size();
00654             const int_type last_pref = STXXL_MAX(int_type(bids_size) - int_type(new_p), (int_type)0);
00655             for (int_type i = bids_size - 1; i >= last_pref; --i)
00656             {
00657                 p_pool.hint(bids[i]); // prefetch
00658             }
00659         }
00660         pref_aggr = new_p;
00661     }
00663     unsigned_type get_prefetch_aggr() const
00664     {
00665         return pref_aggr;
00666     }
00667 };
00668 
00669 
00671 
00673 template <unsigned_type CritSize, class ExternalStack, class InternalStack>
00674 class migrating_stack : private noncopyable
00675 {
00676 public:
00677     typedef typename ExternalStack::cfg cfg;
00678     typedef typename cfg::value_type value_type;
00679     typedef typename cfg::alloc_strategy alloc_strategy;
00680     typedef typename cfg::size_type size_type;
00681     enum {
00682         blocks_per_page = cfg::blocks_per_page,
00683         block_size = cfg::block_size
00684     };
00685 
00686 
00687     typedef InternalStack int_stack_type;
00688     typedef ExternalStack ext_stack_type;
00689 
00690 private:
00691     enum { critical_size = CritSize };
00692 
00693     int_stack_type * int_impl;
00694     ext_stack_type * ext_impl;
00695 
00696     // not implemented yet
00697     template <class stack_type>
00698     migrating_stack(const stack_type & stack_);
00699 
00700 public:
00701     migrating_stack() : int_impl(new int_stack_type()), ext_impl(NULL) { }
00702 
00703     void swap(migrating_stack & obj)
00704     {
00705         std::swap(int_impl, obj.int_impl);
00706         std::swap(ext_impl, obj.ext_impl);
00707     }
00708 
00710     bool internal() const
00711     {
00712         assert((int_impl && !ext_impl) || (!int_impl && ext_impl));
00713         return int_impl;
00714     }
00716     bool external() const
00717     {
00718         assert((int_impl && !ext_impl) || (!int_impl && ext_impl));
00719         return ext_impl;
00720     }
00721 
00722     bool empty() const
00723     {
00724         assert((int_impl && !ext_impl) || (!int_impl && ext_impl));
00725 
00726         if (int_impl)
00727             return int_impl->empty();
00728 
00729 
00730         return ext_impl->empty();
00731     }
00732     size_type size() const
00733     {
00734         assert((int_impl && !ext_impl) || (!int_impl && ext_impl));
00735 
00736         if (int_impl)
00737             return int_impl->size();
00738 
00739 
00740         return ext_impl->size();
00741     }
00742     value_type & top()
00743     {
00744         assert((int_impl && !ext_impl) || (!int_impl && ext_impl));
00745 
00746         if (int_impl)
00747             return int_impl->top();
00748 
00749 
00750         return ext_impl->top();
00751     }
00752     const value_type & top() const
00753     {
00754         assert((int_impl && !ext_impl) || (!int_impl && ext_impl));
00755 
00756         if (int_impl)
00757             return int_impl->top();
00758 
00759 
00760         return ext_impl->top();
00761     }
00762     void push(const value_type & val)
00763     {
00764         assert((int_impl && !ext_impl) || (!int_impl && ext_impl));
00765 
00766         if (int_impl)
00767         {
00768             int_impl->push(val);
00769             if (int_impl->size() == critical_size)
00770             {
00771                 // migrate to external stack
00772                 ext_impl = new ext_stack_type(*int_impl);
00773                 delete int_impl;
00774                 int_impl = NULL;
00775             }
00776         }
00777         else
00778             ext_impl->push(val);
00779     }
00780     void pop()
00781     {
00782         assert((int_impl && !ext_impl) || (!int_impl && ext_impl));
00783 
00784         if (int_impl)
00785             int_impl->pop();
00786 
00787         else
00788             ext_impl->pop();
00789     }
00790     virtual ~migrating_stack()
00791     {
00792         assert((int_impl && !ext_impl) || (!int_impl && ext_impl));
00793 
00794         if (int_impl)
00795             delete int_impl;
00796 
00797         else
00798             delete ext_impl;
00799     }
00800 };
00801 
00803 
00804 
00807 
00808 enum stack_externality { external, migrating, internal };
00809 enum stack_behaviour { normal, grow_shrink, grow_shrink2 };
00810 
00812 
00848 template <
00849     class ValTp,
00850     stack_externality Externality = external,
00851     stack_behaviour Behaviour = normal,
00852     unsigned BlocksPerPage = 4,
00853     unsigned BlkSz = STXXL_DEFAULT_BLOCK_SIZE(ValTp),
00854 
00855     class IntStackTp = std::stack<ValTp>,
00856     unsigned_type MigrCritSize = (2 * BlocksPerPage * BlkSz),
00857 
00858     class AllocStr = STXXL_DEFAULT_ALLOC_STRATEGY,
00859     class SzTp = stxxl::int64
00860     >
00861 class STACK_GENERATOR
00862 {
00863     typedef stack_config_generator<ValTp, BlocksPerPage, BlkSz, AllocStr, SzTp> cfg;
00864 
00865     typedef typename IF<Behaviour == grow_shrink,
00866                         grow_shrink_stack<cfg>,
00867                         grow_shrink_stack2<cfg> >::result GrShrTp;
00868     typedef typename IF<Behaviour == normal, normal_stack<cfg>, GrShrTp>::result ExtStackTp;
00869     typedef typename IF<Externality == migrating,
00870                         migrating_stack<MigrCritSize, ExtStackTp, IntStackTp>, ExtStackTp>::result MigrOrNotStackTp;
00871 
00872 public:
00873     typedef typename IF<Externality == internal, IntStackTp, MigrOrNotStackTp>::result result;
00874 };
00875 
00877 
00878 __STXXL_END_NAMESPACE
00879 
00880 
00881 namespace std
00882 {
00883     template <class Config_>
00884     void swap(stxxl::normal_stack<Config_> & a,
00885               stxxl::normal_stack<Config_> & b)
00886     {
00887         a.swap(b);
00888     }
00889 
00890     template <class Config_>
00891     void swap(stxxl::grow_shrink_stack<Config_> & a,
00892               stxxl::grow_shrink_stack<Config_> & b)
00893     {
00894         a.swap(b);
00895     }
00896 
00897     template <class Config_>
00898     void swap(stxxl::grow_shrink_stack2<Config_> & a,
00899               stxxl::grow_shrink_stack2<Config_> & b)
00900     {
00901         a.swap(b);
00902     }
00903 
00904     template <stxxl::unsigned_type CritSize, class ExternalStack, class InternalStack>
00905     void swap(stxxl::migrating_stack<CritSize, ExternalStack, InternalStack> & a,
00906               stxxl::migrating_stack<CritSize, ExternalStack, InternalStack> & b)
00907     {
00908         a.swap(b);
00909     }
00910 }
00911 
00912 #endif // !STXXL_STACK_HEADER

Generated by  doxygen 1.7.1