# HG changeset patch
# User Peter Kovacs <kpeter@inf.elte.hu>
# Date 1250023955 -7200
# Node ID 8642452f583c94b3f0bbb9a023ae7f402c11a196
# Parent 00dd2f3ccfa9844f9a7c98b86948f9e56462a06b
Simplify comparisons in min mean cycle classes (#179)
using extreme INF values instead of bool flags.
diff --git a/lemon/hartmann_orlin.h b/lemon/hartmann_orlin.h
a
|
b
|
|
25 | 25 | /// \brief Hartmann-Orlin's algorithm for finding a minimum mean cycle. |
26 | 26 | |
27 | 27 | #include <vector> |
| 28 | #include <limits> |
28 | 29 | #include <lemon/core.h> |
29 | 30 | #include <lemon/path.h> |
30 | 31 | #include <lemon/tolerance.h> |
… |
… |
|
149 | 150 | // Data sturcture for path data |
150 | 151 | struct PathData |
151 | 152 | { |
152 | | bool found; |
153 | 153 | LargeValue dist; |
154 | 154 | Arc pred; |
155 | | PathData(bool f = false, LargeValue d = 0, Arc p = INVALID) : |
156 | | found(f), dist(d), pred(p) {} |
| 155 | PathData(LargeValue d, Arc p = INVALID) : |
| 156 | dist(d), pred(p) {} |
157 | 157 | }; |
158 | 158 | |
159 | 159 | typedef typename Digraph::template NodeMap<std::vector<PathData> > |
… |
… |
|
190 | 190 | |
191 | 191 | Tolerance _tolerance; |
192 | 192 | |
| 193 | // Infinite constant |
| 194 | const LargeValue INF; |
| 195 | |
193 | 196 | public: |
194 | 197 | |
195 | 198 | /// \name Named Template Parameters |
… |
… |
|
244 | 247 | const LengthMap &length ) : |
245 | 248 | _gr(digraph), _length(length), _comp(digraph), _out_arcs(digraph), |
246 | 249 | _best_found(false), _best_length(0), _best_size(1), |
247 | | _cycle_path(NULL), _local_path(false), _data(digraph) |
| 250 | _cycle_path(NULL), _local_path(false), _data(digraph), |
| 251 | INF(std::numeric_limits<LargeValue>::has_infinity ? |
| 252 | std::numeric_limits<LargeValue>::infinity() : |
| 253 | std::numeric_limits<LargeValue>::max()) |
248 | 254 | {} |
249 | 255 | |
250 | 256 | /// Destructor. |
… |
… |
|
471 | 477 | return false; |
472 | 478 | } |
473 | 479 | for (int i = 0; i < n; ++i) { |
474 | | _data[(*_nodes)[i]].resize(n + 1); |
| 480 | _data[(*_nodes)[i]].resize(n + 1, PathData(INF)); |
475 | 481 | } |
476 | 482 | return true; |
477 | 483 | } |
… |
… |
|
481 | 487 | // node to node v containing exactly k arcs. |
482 | 488 | void processRounds() { |
483 | 489 | Node start = (*_nodes)[0]; |
484 | | _data[start][0] = PathData(true, 0); |
| 490 | _data[start][0] = PathData(0); |
485 | 491 | _process.clear(); |
486 | 492 | _process.push_back(start); |
487 | 493 | |
… |
… |
|
516 | 522 | e = _out_arcs[u][j]; |
517 | 523 | v = _gr.target(e); |
518 | 524 | d = _data[u][k-1].dist + _length[e]; |
519 | | if (!_data[v][k].found) { |
520 | | next.push_back(v); |
521 | | _data[v][k] = PathData(true, _data[u][k-1].dist + _length[e], e); |
522 | | } |
523 | | else if (_tolerance.less(d, _data[v][k].dist)) { |
524 | | _data[v][k] = PathData(true, d, e); |
| 525 | if (_tolerance.less(d, _data[v][k].dist)) { |
| 526 | if (_data[v][k].dist == INF) next.push_back(v); |
| 527 | _data[v][k] = PathData(d, e); |
525 | 528 | } |
526 | 529 | } |
527 | 530 | } |
… |
… |
|
539 | 542 | e = _out_arcs[u][j]; |
540 | 543 | v = _gr.target(e); |
541 | 544 | d = _data[u][k-1].dist + _length[e]; |
542 | | if (!_data[v][k].found || _tolerance.less(d, _data[v][k].dist)) { |
543 | | _data[v][k] = PathData(true, d, e); |
| 545 | if (_tolerance.less(d, _data[v][k].dist)) { |
| 546 | _data[v][k] = PathData(d, e); |
544 | 547 | } |
545 | 548 | } |
546 | 549 | } |
… |
… |
|
560 | 563 | _curr_found = false; |
561 | 564 | for (int i = 0; i < n; ++i) { |
562 | 565 | u = (*_nodes)[i]; |
563 | | if (!_data[u][k].found) continue; |
| 566 | if (_data[u][k].dist == INF) continue; |
564 | 567 | for (int j = k; j >= 0; --j) { |
565 | 568 | if (level[u].first == i && level[u].second > 0) { |
566 | 569 | // A cycle is found |
… |
… |
|
585 | 588 | // Find node potentials |
586 | 589 | for (int i = 0; i < n; ++i) { |
587 | 590 | u = (*_nodes)[i]; |
588 | | pi[u] = std::numeric_limits<LargeValue>::max(); |
| 591 | pi[u] = INF; |
589 | 592 | for (int j = 0; j <= k; ++j) { |
590 | | d = _data[u][j].dist * _curr_size - j * _curr_length; |
591 | | if (_data[u][j].found && _tolerance.less(d, pi[u])) { |
592 | | pi[u] = d; |
| 593 | if (_data[u][j].dist < INF) { |
| 594 | d = _data[u][j].dist * _curr_size - j * _curr_length; |
| 595 | if (_tolerance.less(d, pi[u])) pi[u] = d; |
593 | 596 | } |
594 | 597 | } |
595 | 598 | } |
diff --git a/lemon/howard.h b/lemon/howard.h
a
|
b
|
|
25 | 25 | /// \brief Howard's algorithm for finding a minimum mean cycle. |
26 | 26 | |
27 | 27 | #include <vector> |
| 28 | #include <limits> |
28 | 29 | #include <lemon/core.h> |
29 | 30 | #include <lemon/path.h> |
30 | 31 | #include <lemon/tolerance.h> |
… |
… |
|
177 | 178 | |
178 | 179 | Tolerance _tolerance; |
179 | 180 | |
| 181 | // Infinite constant |
| 182 | const LargeValue INF; |
| 183 | |
180 | 184 | public: |
181 | 185 | |
182 | 186 | /// \name Named Template Parameters |
… |
… |
|
229 | 233 | /// \param length The lengths (costs) of the arcs. |
230 | 234 | Howard( const Digraph &digraph, |
231 | 235 | const LengthMap &length ) : |
232 | | _gr(digraph), _length(length), _cycle_path(NULL), _local_path(false), |
| 236 | _gr(digraph), _length(length), _best_found(false), |
| 237 | _best_length(0), _best_size(1), _cycle_path(NULL), _local_path(false), |
233 | 238 | _policy(digraph), _reached(digraph), _level(digraph), _dist(digraph), |
234 | | _comp(digraph), _in_arcs(digraph) |
| 239 | _comp(digraph), _in_arcs(digraph), |
| 240 | INF(std::numeric_limits<LargeValue>::has_infinity ? |
| 241 | std::numeric_limits<LargeValue>::infinity() : |
| 242 | std::numeric_limits<LargeValue>::max()) |
235 | 243 | {} |
236 | 244 | |
237 | 245 | /// Destructor. |
… |
… |
|
306 | 314 | if (!computeNodeDistances()) break; |
307 | 315 | } |
308 | 316 | // Update the best cycle (global minimum mean cycle) |
309 | | if ( !_best_found || (_curr_found && |
| 317 | if ( _curr_found && (!_best_found || |
310 | 318 | _curr_length * _best_size < _best_length * _curr_size) ) { |
311 | 319 | _best_found = true; |
312 | 320 | _best_length = _curr_length; |
… |
… |
|
445 | 453 | return false; |
446 | 454 | } |
447 | 455 | for (int i = 0; i < int(_nodes->size()); ++i) { |
448 | | _dist[(*_nodes)[i]] = std::numeric_limits<LargeValue>::max(); |
| 456 | _dist[(*_nodes)[i]] = INF; |
449 | 457 | } |
450 | 458 | Node u, v; |
451 | 459 | Arc e; |
diff --git a/lemon/karp.h b/lemon/karp.h
a
|
b
|
|
25 | 25 | /// \brief Karp's algorithm for finding a minimum mean cycle. |
26 | 26 | |
27 | 27 | #include <vector> |
| 28 | #include <limits> |
28 | 29 | #include <lemon/core.h> |
29 | 30 | #include <lemon/path.h> |
30 | 31 | #include <lemon/tolerance.h> |
… |
… |
|
147 | 148 | // Data sturcture for path data |
148 | 149 | struct PathData |
149 | 150 | { |
150 | | bool found; |
151 | 151 | LargeValue dist; |
152 | 152 | Arc pred; |
153 | | PathData(bool f = false, LargeValue d = 0, Arc p = INVALID) : |
154 | | found(f), dist(d), pred(p) {} |
| 153 | PathData(LargeValue d, Arc p = INVALID) : |
| 154 | dist(d), pred(p) {} |
155 | 155 | }; |
156 | 156 | |
157 | 157 | typedef typename Digraph::template NodeMap<std::vector<PathData> > |
… |
… |
|
185 | 185 | std::vector<Node> _process; |
186 | 186 | |
187 | 187 | Tolerance _tolerance; |
| 188 | |
| 189 | // Infinite constant |
| 190 | const LargeValue INF; |
188 | 191 | |
189 | 192 | public: |
190 | 193 | |
… |
… |
|
240 | 243 | const LengthMap &length ) : |
241 | 244 | _gr(digraph), _length(length), _comp(digraph), _out_arcs(digraph), |
242 | 245 | _cycle_length(0), _cycle_size(1), _cycle_node(INVALID), |
243 | | _cycle_path(NULL), _local_path(false), _data(digraph) |
| 246 | _cycle_path(NULL), _local_path(false), _data(digraph), |
| 247 | INF(std::numeric_limits<LargeValue>::has_infinity ? |
| 248 | std::numeric_limits<LargeValue>::infinity() : |
| 249 | std::numeric_limits<LargeValue>::max()) |
244 | 250 | {} |
245 | 251 | |
246 | 252 | /// Destructor. |
… |
… |
|
457 | 463 | return false; |
458 | 464 | } |
459 | 465 | for (int i = 0; i < n; ++i) { |
460 | | _data[(*_nodes)[i]].resize(n + 1); |
| 466 | _data[(*_nodes)[i]].resize(n + 1, PathData(INF)); |
461 | 467 | } |
462 | 468 | return true; |
463 | 469 | } |
… |
… |
|
467 | 473 | // node to node v containing exactly k arcs. |
468 | 474 | void processRounds() { |
469 | 475 | Node start = (*_nodes)[0]; |
470 | | _data[start][0] = PathData(true, 0); |
| 476 | _data[start][0] = PathData(0); |
471 | 477 | _process.clear(); |
472 | 478 | _process.push_back(start); |
473 | 479 | |
… |
… |
|
492 | 498 | e = _out_arcs[u][j]; |
493 | 499 | v = _gr.target(e); |
494 | 500 | d = _data[u][k-1].dist + _length[e]; |
495 | | if (!_data[v][k].found) { |
496 | | next.push_back(v); |
497 | | _data[v][k] = PathData(true, _data[u][k-1].dist + _length[e], e); |
498 | | } |
499 | | else if (_tolerance.less(d, _data[v][k].dist)) { |
500 | | _data[v][k] = PathData(true, d, e); |
| 501 | if (_tolerance.less(d, _data[v][k].dist)) { |
| 502 | if (_data[v][k].dist == INF) next.push_back(v); |
| 503 | _data[v][k] = PathData(d, e); |
501 | 504 | } |
502 | 505 | } |
503 | 506 | } |
… |
… |
|
515 | 518 | e = _out_arcs[u][j]; |
516 | 519 | v = _gr.target(e); |
517 | 520 | d = _data[u][k-1].dist + _length[e]; |
518 | | if (!_data[v][k].found || _tolerance.less(d, _data[v][k].dist)) { |
519 | | _data[v][k] = PathData(true, d, e); |
| 521 | if (_tolerance.less(d, _data[v][k].dist)) { |
| 522 | _data[v][k] = PathData(d, e); |
520 | 523 | } |
521 | 524 | } |
522 | 525 | } |
… |
… |
|
527 | 530 | int n = _nodes->size(); |
528 | 531 | for (int i = 0; i < n; ++i) { |
529 | 532 | Node u = (*_nodes)[i]; |
530 | | if (!_data[u][n].found) continue; |
| 533 | if (_data[u][n].dist == INF) continue; |
531 | 534 | LargeValue length, max_length = 0; |
532 | 535 | int size, max_size = 1; |
533 | 536 | bool found_curr = false; |
534 | 537 | for (int k = 0; k < n; ++k) { |
535 | | if (!_data[u][k].found) continue; |
| 538 | if (_data[u][k].dist == INF) continue; |
536 | 539 | length = _data[u][n].dist - _data[u][k].dist; |
537 | 540 | size = n - k; |
538 | 541 | if (!found_curr || length * max_size > max_length * size) { |