// // libsemigroups - C++ library for semigroups and monoids // Copyright (C) 2019 James D. Mitchell // // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program. If not, see <http://www.gnu.org/licenses/>. //
// This file contains the implementation of the class KnuthBendixImpl, which is // the main implementation for the class KnuthBendix.
#include <algorithm> // for max, min #include <atomic> // for atomic #include <cinttypes> // for int64_t #include <cstddef> // for size_t #include <limits> // for numeric_limits #include <list> // for list, list<>::iterator #include <ostream> // for string #include <set> // for set #include <stack> // for stack #include <string> // for operator!=, basic_strin... #include <type_traits> // for swap #include <utility> // for pair #include <vector> // for vector
#ifdef LIBSEMIGROUPS_VERBOSE #include <unordered_set> // for unordered_set #endif
#include"libsemigroups/config.hpp"// for LIBSEMIGROUPS_DEBUG #include"libsemigroups/constants.hpp"// for POSITIVE_INFINITY #include"libsemigroups/debug.hpp"// for LIBSEMIGROUPS_ASSERT #include"libsemigroups/knuth-bendix.hpp"// for KnuthBendix, KnuthBendi... #include"libsemigroups/order.hpp"// for shortlex_compare #include"libsemigroups/report.hpp"// for REPORT #include"libsemigroups/string.hpp"// for detail::is_suffix, maximum_comm... #include"libsemigroups/timer.hpp"// for detail::Timer #include"libsemigroups/types.hpp"// for word_type
// Rule and RuleLookup classes class Rule { public: // Construct from KnuthBendix with new but empty internal_string_type's explicit Rule(KnuthBendixImpl const* kbimpl, int64_t id)
: _kbimpl(kbimpl),
_lhs(new internal_string_type()),
_rhs(new internal_string_type()),
_id(-1 * id) {
LIBSEMIGROUPS_ASSERT(_id < 0);
}
// The Rule class does not support an assignment constructor to avoid // accidental copying.
Rule& operator=(Rule const& copy) = delete;
// The Rule class does not support a copy constructor to avoid // accidental copying.
Rule(Rule const& copy) = delete;
// Destructor, deletes pointers used to create the rule.
~Rule() { delete _lhs; delete _rhs;
}
// Returns the left hand side of the rule, which is guaranteed to be // greater than its right hand side according to the reduction ordering // of the KnuthBendix used to construct this.
internal_string_type* lhs() const { return _lhs;
}
// Returns the right hand side of the rule, which is guaranteed to be // less than its left hand side according to the reduction ordering of // the KnuthBendix used to construct this.
internal_string_type* rhs() const { return _rhs;
}
void rewrite() {
LIBSEMIGROUPS_ASSERT(_id != 0);
_kbimpl->internal_rewrite(_lhs);
_kbimpl->internal_rewrite(_rhs); // reorder if necessary if (shortlex_compare(_lhs, _rhs)) {
std::swap(_lhs, _rhs);
}
}
// This implements reverse lex comparison of this and that, which // satisfies the requirement of std::set that equivalent items be // incomparable, so, for example bcbc and abcbc are considered // equivalent, but abcba and bcbc are not. booloperator<(RuleLookup const& that) const { auto it_this = _last - 1; auto it_that = that._last - 1; while (it_this > _first && it_that > that._first
&& *it_this == *it_that) {
--it_that;
--it_this;
} return *it_this < *it_that;
}
////////////////////////////////////////////////////////////////////////// // KnuthBendixImpl - other methods - public //////////////////////////////////////////////////////////////////////////
// TEST_2 from Sims, p76 void clear_stack() { while (!_stack.empty() && !_kb->stopped()) { #ifdef LIBSEMIGROUPS_VERBOSE
_max_stack_depth = std::max(_max_stack_depth, _stack.size()); #endif
Rule* rule1 = _stack.top();
_stack.pop();
LIBSEMIGROUPS_ASSERT(!rule1->active());
LIBSEMIGROUPS_ASSERT(*rule1->lhs() != *rule1->rhs()); // Rewrite both sides and reorder if necessary . . .
rule1->rewrite();
if (*rule1->lhs() != *rule1->rhs()) {
internal_string_type const* lhs = rule1->lhs(); for (auto it = _active_rules.begin(); it != _active_rules.end();) {
Rule* rule2 = const_cast<Rule*>(*it); if (rule2->lhs()->find(*lhs) != external_string_type::npos) {
it = remove_rule(it);
LIBSEMIGROUPS_ASSERT(*rule2->lhs() != *rule2->rhs()); // rule2 is added to _inactive_rules by clear_stack
_stack.emplace(rule2);
} else { if (rule2->rhs()->find(*lhs) != external_string_type::npos) {
internal_rewrite(rule2->rhs());
}
++it;
}
}
add_rule(rule1); // rule1 is activated, we do this after removing rules that rule1 // makes redundant to avoid failing to insert rule1 in _set_rules
} else {
_inactive_rules.push_back(rule1);
} if (_kb->report()) {
REPORT_DEFAULT( "active rules = %d, inactive rules = %d, rules defined = " "%d\n",
_active_rules.size(),
_inactive_rules.size(),
_total_rules);
REPORT_VERBOSE_DEFAULT("max stack depth = %d\n" "max word length = %d\n" "max active word length = %d\n" "max active rules = %d\n" "number of unique lhs = %d\n",
_max_stack_depth,
_max_word_length,
max_active_word_length(),
_max_active_rules,
_unique_lhs_rules.size());
}
}
} // FIXME(later) there is a possibly infinite loop here clear_stack -> // push_stack -> clear_stack and so on void push_stack(Rule* rule) {
LIBSEMIGROUPS_ASSERT(!rule->active()); if (*rule->lhs() != *rule->rhs()) {
_stack.emplace(rule);
clear_stack();
} else {
_inactive_rules.push_back(rule);
}
}
// OVERLAP_2 from Sims, p77 void overlap(Rule const* u, Rule const* v) {
LIBSEMIGROUPS_ASSERT(u->active() && v->active()); auto limit
= u->lhs()->cend() - std::min(u->lhs()->size(), v->lhs()->size());
int64_t u_id = u->id();
int64_t v_id = v->id(); for (auto it = u->lhs()->cend() - 1;
it > limit && u_id == u->id() && v_id == v->id() && !_kb->stopped()
&& (_kb->_settings._max_overlap == POSITIVE_INFINITY
|| (*_overlap_measure)(u, v, it)
<= _kb->_settings._max_overlap);
--it) { // Check if B = [it, u->lhs()->cend()) is a prefix of v->lhs() if (detail::is_prefix(
v->lhs()->cbegin(), v->lhs()->cend(), it, u->lhs()->cend())) { // u = P_i = AB -> Q_i and v = P_j = BC -> Q_j // This version of new_rule does not reorder
Rule* rule = new_rule(u->lhs()->cbegin(),
it,
u->rhs()->cbegin(),
u->rhs()->cend()); // rule = A -> Q_i
rule->_lhs->append(*v->rhs()); // rule = AQ_j -> Q_i
rule->_rhs->append(v->lhs()->cbegin() + (u->lhs()->cend() - it),
v->lhs()->cend()); // rule = AQ_j -> Q_iC // rule is reordered during rewriting in clear_stack
push_stack(rule); // It can be that the iterator `it` is invalidated by the call to // push_stack (i.e. if `u` is deactivated, then rewritten, actually // changed, and reactivated) and that is the reason for the checks // in the for-loop above. If this is the case, then we should stop // considering the overlaps of u and v here, and note that they will // be considered later, because when the rule `u` is reactivated it // is added to the end of the active rules list.
}
}
}
public: ////////////////////////////////////////////////////////////////////////// // KnuthBendixImpl - main methods - public //////////////////////////////////////////////////////////////////////////
for (auto it1 = _active_rules.cbegin();
it1 != _active_rules.cend()
&& (!_kb->running() || !_kb->stopped());
++it1) {
Rule const* rule1 = *it1; // Seems to be much faster to do this in reverse. for (auto it2 = _active_rules.crbegin();
it2 != _active_rules.crend()
&& (!_kb->running() || !_kb->stopped());
++it2) {
seen++;
Rule const* rule2 = *it2; for (auto it = rule1->lhs()->cend() - 1;
it >= rule1->lhs()->cbegin()
&& (!_kb->running() || !_kb->stopped());
--it) { // Find longest common prefix of suffix B of rule1.lhs() defined // by it and R = rule2.lhs() auto prefix
= detail::maximum_common_prefix(it,
rule1->lhs()->cend(),
rule2->lhs()->cbegin(),
rule2->lhs()->cend()); if (prefix.first == rule1->lhs()->cend()
|| prefix.second == rule2->lhs()->cend()) {
word1.clear();
word1.append(rule1->lhs()->cbegin(), it); // A
word1.append(*rule2->rhs()); // S
word1.append(prefix.first, rule1->lhs()->cend()); // D
word2.clear();
word2.append(*rule1->rhs()); // Q
word2.append(prefix.second, rule2->lhs()->cend()); // E
if (word1 != word2) {
internal_rewrite(&word1);
internal_rewrite(&word2); if (word1 != word2) {
_confluent = false; return _confluent;
}
}
}
}
} if (_kb->report()) {
REPORT_DEFAULT("checked %llu pairs of overlaps out of %llu\n",
uint64_t(seen),
uint64_t(_active_rules.size())
* uint64_t(_active_rules.size()));
}
} if (_kb->running() && _kb->stopped()) {
_confluence_known = false;
}
} return _confluent;
}
// KBS_2 from Sims, p77-78 bool knuth_bendix() {
detail::Timer timer; if (_stack.empty() && confluent() && !_kb->stopped()) { // _stack can be non-empty if non-reduced rules were used to define // the KnuthBendix. If _stack is non-empty, then it means that the // rules in _active_rules might not define the system.
REPORT_DEFAULT("the system is confluent already\n"); returntrue;
} elseif (_active_rules.size() >= _kb->_settings._max_rules) {
REPORT_DEFAULT("too many rules\n"); returnfalse;
} // Reduce the rules
_next_rule_it1 = _active_rules.begin(); while (_next_rule_it1 != _active_rules.end() && !_kb->stopped()) { // Copy *_next_rule_it1 and push_stack so that it is not modified by // the call to clear_stack.
LIBSEMIGROUPS_ASSERT((*_next_rule_it1)->lhs()
!= (*_next_rule_it1)->rhs());
push_stack(new_rule(*_next_rule_it1));
++_next_rule_it1;
}
_next_rule_it1 = _active_rules.begin();
size_t nr = 0; while (_next_rule_it1 != _active_rules.cend()
&& _active_rules.size() < _kb->_settings._max_rules
&& !_kb->stopped()) {
Rule const* rule1 = *_next_rule_it1;
_next_rule_it2 = _next_rule_it1;
++_next_rule_it1;
overlap(rule1, rule1); while (_next_rule_it2 != _active_rules.begin() && rule1->active()) {
--_next_rule_it2;
Rule const* rule2 = *_next_rule_it2;
overlap(rule1, rule2);
++nr; if (rule1->active() && rule2->active()) {
++nr;
overlap(rule2, rule1);
}
} if (nr > _kb->_settings._check_confluence_interval) { if (confluent()) { break;
}
nr = 0;
} if (_next_rule_it1 == _active_rules.cend()) {
clear_stack();
}
} // LIBSEMIGROUPS_ASSERT(_stack.empty()); // Seems that the stack can be non-empty here in KnuthBendix 12, 14, 16 // and maybe more bool ret; if (_kb->_settings._max_overlap == POSITIVE_INFINITY
&& _kb->_settings._max_rules == POSITIVE_INFINITY
&& !_kb->stopped()) {
_confluence_known = true;
_confluent = true; for (Rule* rule : _inactive_rules) { delete rule;
}
_inactive_rules.clear();
ret = true;
} else {
ret = false;
}
REPORT_DEFAULT("stopping with active rules = %d, inactive rules = %d, " "rules defined = %d\n",
_active_rules.size(),
_inactive_rules.size(),
_total_rules);
REPORT_VERBOSE_DEFAULT("max stack depth = %d", _max_stack_depth);
REPORT_TIME(timer); return ret;
}
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung ist noch experimentell.