mirror of
https://github.com/Jaxan/hybrid-ads.git
synced 2025-04-27 15:07:45 +02:00
generalised a bit and uses trie now
This commit is contained in:
parent
b6033eec4c
commit
69942cd683
5 changed files with 89 additions and 51 deletions
|
@ -8,11 +8,8 @@
|
|||
using namespace std;
|
||||
|
||||
adaptive_distinguishing_sequence::adaptive_distinguishing_sequence(size_t N, size_t d)
|
||||
: CI(N)
|
||||
, depth(d)
|
||||
{
|
||||
for(size_t i = 0; i < N; ++i)
|
||||
CI[i] = {i, i};
|
||||
: CI(N), depth(d) {
|
||||
for (size_t i = 0; i < N; ++i) CI[i] = {i, i};
|
||||
}
|
||||
|
||||
adaptive_distinguishing_sequence create_adaptive_distinguishing_sequence(const result & splitting_tree){
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#include "seperating_family.hpp"
|
||||
#include "trie.hpp"
|
||||
|
||||
#include <boost/range/algorithm.hpp>
|
||||
#include <boost/range/algorithm_ext/erase.hpp>
|
||||
|
@ -9,48 +10,73 @@
|
|||
|
||||
using namespace std;
|
||||
|
||||
seperating_family create_seperating_family(const adaptive_distinguishing_sequence & sequence, const seperating_matrix & all_pair_seperating_sequences){
|
||||
seperating_family seperating_family(all_pair_seperating_sequences.size());
|
||||
characterization_family create_seperating_family(const adaptive_distinguishing_sequence & sequence,
|
||||
const seperating_matrix & sep_matrix) {
|
||||
const auto N = sequence.CI.size();
|
||||
// all words (global/local) for all states
|
||||
vector<trie> suffixes(N);
|
||||
|
||||
// all global separating sequences, which we will add to a state in the end ...
|
||||
trie all_global_separating_words;
|
||||
|
||||
// ... to these particualr states
|
||||
vector<bool> state_needs_global_suffixes(N, false);
|
||||
|
||||
|
||||
// First we accumulate the kind-of-UIOs and the separating words we need. We will do this with a
|
||||
// breath first search.
|
||||
stack<pair<word, reference_wrapper<const adaptive_distinguishing_sequence>>> work;
|
||||
work.push({{}, sequence});
|
||||
|
||||
while(!work.empty()){
|
||||
while (!work.empty()) {
|
||||
auto word = work.top().first;
|
||||
const adaptive_distinguishing_sequence & node = work.top().second;
|
||||
work.pop();
|
||||
|
||||
if(node.children.empty()){
|
||||
// add sequence to this leave
|
||||
for(auto && p : node.CI){
|
||||
// On a leaf, we need to add the accumulated word as suffix (this is more or less a UIO).
|
||||
// And, if needed, we also need to augment the set of suffixes (for all pairs).
|
||||
if (node.children.empty()) {
|
||||
for (auto && p : node.CI) {
|
||||
const auto state = p.second;
|
||||
seperating_family[state].push_back(word);
|
||||
suffixes[state].insert(word);
|
||||
}
|
||||
|
||||
// if the leaf is not a singleton, we need the all_pair seperating seqs
|
||||
for(auto && p : node.CI){
|
||||
for(auto && q : node.CI){
|
||||
for (auto && p : node.CI) {
|
||||
for (auto && q : node.CI) {
|
||||
const auto s = p.second;
|
||||
const auto t = q.second;
|
||||
if(s == t) continue;
|
||||
seperating_family[s].push_back(all_pair_seperating_sequences[s][t]);
|
||||
if (s == t) continue;
|
||||
|
||||
const auto & sep_word = sep_matrix[s][t];
|
||||
|
||||
suffixes[s].insert(sep_word);
|
||||
all_global_separating_words.insert(sep_word);
|
||||
state_needs_global_suffixes[s] = true;
|
||||
}
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
for(auto && i : node.word)
|
||||
word.push_back(i);
|
||||
|
||||
for(auto && c : node.children)
|
||||
work.push({word, c});
|
||||
// add some work
|
||||
for (auto && i : node.word) word.push_back(i); // extend the word
|
||||
for (auto && c : node.children) work.push({word, c}); // and visit the children with word
|
||||
}
|
||||
|
||||
// Remove duplicates
|
||||
for(auto & vec : seperating_family){
|
||||
boost::erase(vec, boost::unique<boost::return_found_end>(boost::sort(vec)));
|
||||
|
||||
// Then we flatten them into a characterization family.
|
||||
characterization_family ret(N);
|
||||
|
||||
for (state s = 0; s < N; ++s) {
|
||||
auto & current_suffixes = suffixes[s];
|
||||
ret[s].local_suffixes = flatten(current_suffixes);
|
||||
|
||||
if (state_needs_global_suffixes[s]) {
|
||||
all_global_separating_words.for_each(
|
||||
[¤t_suffixes](auto w) { current_suffixes.insert(w); });
|
||||
}
|
||||
|
||||
ret[s].global_suffixes = flatten(current_suffixes);
|
||||
}
|
||||
|
||||
return seperating_family;
|
||||
return ret;
|
||||
}
|
||||
|
|
|
@ -4,14 +4,26 @@
|
|||
#include "seperating_matrix.hpp"
|
||||
#include "types.hpp"
|
||||
|
||||
/*
|
||||
* Given an (incomplete) adaptive distinguishing sequence and all pair
|
||||
* seperating sequences, we can construct a seperating family (as defined
|
||||
* in Lee & Yannakakis). If the adaptive distinguishing sequence is complete,
|
||||
* then the all pair seperating sequences are not needed.
|
||||
*/
|
||||
/// \brief From the LY algorithm we generate characterizations sets (as in the Chow framework)
|
||||
/// If the adaptive distinguihsing sequence is complete, then we do not need to augment the LY
|
||||
/// result. This results in a separating family, which is stronger than a characterization set.
|
||||
/// However, if it is not complete, we augment it with sequences from the Wp-method.
|
||||
|
||||
using seperating_set = std::vector<word>;
|
||||
using seperating_family = std::vector<seperating_set>;
|
||||
/// \brief A set (belonging to some state) of characterizing sequences
|
||||
/// It contains global_suffixes which should be used for testing whether the state is correct. Once
|
||||
/// we know the states make sense, we can test the transitions with the smaller set local_suffixes.
|
||||
/// There is some redundancy in this struct, but we have plenty of memory at our disposal.
|
||||
/// Note that even the global_suffixes may really on the state (because of the adaptiveness of the
|
||||
/// LY distinguishing sequence).
|
||||
struct characterization_set {
|
||||
std::vector<word> global_suffixes;
|
||||
std::vector<word> local_suffixes;
|
||||
};
|
||||
|
||||
seperating_family create_seperating_family(adaptive_distinguishing_sequence const & sequence, seperating_matrix const & all_pair_seperating_sequences);
|
||||
/// \brief A family (indexed by states) of characterizations
|
||||
using characterization_family = std::vector<characterization_set>;
|
||||
|
||||
/// \brief Creates the characterization family from the results of the LY algorithm
|
||||
/// If the sequence is complete, we do not need the separating_matrix
|
||||
characterization_family create_seperating_family(const adaptive_distinguishing_sequence & sequence,
|
||||
const seperating_matrix & sep_matrix);
|
||||
|
|
29
lib/trie.hpp
29
lib/trie.hpp
|
@ -26,13 +26,13 @@
|
|||
struct trie {
|
||||
/// \brief Inserts a word (given by iterators \p begin and \p end)
|
||||
/// \returns true if the element was inserted, false if already there
|
||||
template <typename Iterator> bool insert(Iterator&& begin, Iterator&& end) {
|
||||
template <typename Iterator> bool insert(Iterator && begin, Iterator && end) {
|
||||
if (begin == end) return false;
|
||||
|
||||
size_t i = *begin++;
|
||||
if (i >= branches.size()) branches.resize(i + 1);
|
||||
|
||||
auto& b = branches[i];
|
||||
auto & b = branches[i];
|
||||
if (b) return b->insert(begin, end);
|
||||
|
||||
b = trie();
|
||||
|
@ -43,27 +43,30 @@ struct trie {
|
|||
|
||||
/// \brief Inserts a word given as range \p r
|
||||
/// \returns true if the element was inserted, false if already there
|
||||
template <typename Range> bool insert(Range const& r) {
|
||||
return insert(begin(r), end(r));
|
||||
}
|
||||
template <typename Range> bool insert(Range const & r) { return insert(begin(r), end(r)); }
|
||||
|
||||
/// \p function is applied to all word (not to the prefixes)
|
||||
template <typename Fun> void for_each(Fun&& function) const {
|
||||
/// \brief Applies \p function to all word (not to the prefixes)
|
||||
template <typename Fun> void for_each(Fun && function) const {
|
||||
std::vector<size_t> word;
|
||||
return for_each_impl(std::forward<Fun>(function), word);
|
||||
}
|
||||
|
||||
private:
|
||||
template <typename Fun>
|
||||
void for_each_impl(Fun&& function, std::vector<size_t>& word) const {
|
||||
/// \brief Empties the complete set
|
||||
void clear() {
|
||||
count = 0;
|
||||
branches.clear();
|
||||
}
|
||||
|
||||
private:
|
||||
template <typename Fun> void for_each_impl(Fun && function, std::vector<size_t> & word) const {
|
||||
if (count == 0) {
|
||||
const auto& cword = word;
|
||||
const auto & cword = word;
|
||||
function(cword); // we don't want function to modify word
|
||||
return;
|
||||
}
|
||||
|
||||
for (size_t i = 0; i < branches.size(); ++i) {
|
||||
auto const& b = branches[i];
|
||||
auto const & b = branches[i];
|
||||
if (b) {
|
||||
word.push_back(i);
|
||||
b->for_each_impl(function, word);
|
||||
|
@ -78,4 +81,4 @@ struct trie {
|
|||
|
||||
/// \brief Flattens a trie \p t
|
||||
/// \returns an array of words (without the prefixes)
|
||||
std::vector<std::vector<size_t>> flatten(trie const& t);
|
||||
std::vector<std::vector<size_t>> flatten(trie const & t);
|
||||
|
|
|
@ -183,7 +183,7 @@ int main(int argc, char *argv[]) try {
|
|||
for(state s = 0; s < machine.graph_size; ++s){
|
||||
const auto prefix = transfer_sequences[s];
|
||||
|
||||
for(auto && suffix : seperating_family[s]){
|
||||
for(auto && suffix : seperating_family[s].local_suffixes){
|
||||
for(auto && r : all_sequences){
|
||||
print_word(prefix);
|
||||
print_word(r);
|
||||
|
@ -226,7 +226,7 @@ int main(int argc, char *argv[]) try {
|
|||
}
|
||||
|
||||
using params = uniform_int_distribution<size_t>::param_type;
|
||||
const auto & suffixes = seperating_family[current_state];
|
||||
const auto & suffixes = seperating_family[current_state].local_suffixes;
|
||||
const auto & s = suffixes[suffix_selection(generator, params{0, suffixes.size()-1})];
|
||||
|
||||
print_word(p);
|
||||
|
|
Loading…
Add table
Reference in a new issue