mirror of
https://github.com/Jaxan/hybrid-ads.git
synced 2025-04-28 07:27:45 +02:00
Replaces the poly-lambdas with typed lambdas :(
This commit is contained in:
parent
598e72b880
commit
795b79e85d
4 changed files with 9 additions and 6 deletions
|
@ -33,6 +33,6 @@ auto partition_(Iterator b, Iterator e, Fun && function, size_t output_size) {
|
||||||
ar->splice(ar->end(), elements, current);
|
ar->splice(ar->end(), elements, current);
|
||||||
}
|
}
|
||||||
|
|
||||||
assert(s == accumulate(begin(blocks), end(blocks), 0, [](auto && l, auto && r) { return l + r.size(); }));
|
assert(s == accumulate(begin(blocks), end(blocks), 0, [](size_t l, const list<T> & r) { return l + r.size(); }));
|
||||||
return blocks;
|
return blocks;
|
||||||
}
|
}
|
||||||
|
|
|
@ -49,7 +49,7 @@ result create_splitting_tree(const mealy& g, options opt){
|
||||||
mt19937 generator(rd());
|
mt19937 generator(rd());
|
||||||
|
|
||||||
// Some lambda functions capturing some state, makes the code a bit easier :)
|
// Some lambda functions capturing some state, makes the code a bit easier :)
|
||||||
const auto add_push_new_block = [&work](auto new_blocks, auto & boom) {
|
const auto add_push_new_block = [&work](list<list<state>> const & new_blocks, splitting_tree& boom) {
|
||||||
boom.children.assign(new_blocks.size(), splitting_tree(0, boom.depth + 1));
|
boom.children.assign(new_blocks.size(), splitting_tree(0, boom.depth + 1));
|
||||||
|
|
||||||
auto i = 0;
|
auto i = 0;
|
||||||
|
@ -61,9 +61,9 @@ result create_splitting_tree(const mealy& g, options opt){
|
||||||
work.push(c);
|
work.push(c);
|
||||||
}
|
}
|
||||||
|
|
||||||
assert(boom.states.size() == accumulate(begin(boom.children), end(boom.children), 0, [](auto l, auto r) { return l + r.states.size(); }));
|
assert(boom.states.size() == accumulate(begin(boom.children), end(boom.children), 0, [](size_t l, const splitting_tree & r) { return l + r.states.size(); }));
|
||||||
};
|
};
|
||||||
const auto is_valid = [N, opt, &g](auto blocks, auto symbol){
|
const auto is_valid = [N, opt, &g](list<list<state>> const & blocks, input symbol){
|
||||||
if(!opt.check_validity) return true;
|
if(!opt.check_validity) return true;
|
||||||
|
|
||||||
for(auto && block : blocks) {
|
for(auto && block : blocks) {
|
||||||
|
|
|
@ -37,7 +37,7 @@ void write_adaptive_distinguishing_sequence_to_dot(const adaptive_distinguishing
|
||||||
out << node.word;
|
out << node.word;
|
||||||
} else {
|
} else {
|
||||||
vector<state> I(node.CI.size());
|
vector<state> I(node.CI.size());
|
||||||
transform(begin(node.CI), end(node.CI), begin(I), [](auto p){ return p.second; });
|
transform(begin(node.CI), end(node.CI), begin(I), [](const pair<state, state> p){ return p.second; });
|
||||||
out << "I = " << I;
|
out << "I = " << I;
|
||||||
}
|
}
|
||||||
}, out);
|
}, out);
|
||||||
|
|
|
@ -121,10 +121,12 @@ int main(int argc, char *argv[]) try {
|
||||||
const auto transfer_sequences = transfer_sequences_fut.get();
|
const auto transfer_sequences = transfer_sequences_fut.get();
|
||||||
const auto inputs = inputs_fut.get();
|
const auto inputs = inputs_fut.get();
|
||||||
|
|
||||||
const auto print_word = [&](auto w){
|
const auto print_word = [&](vector<input> w){
|
||||||
for(auto && x : w) cout << inputs[x] << ' ';
|
for(auto && x : w) cout << inputs[x] << ' ';
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// This part is commented out, as the polymorphic lambdas are kinda important
|
||||||
|
#if 0
|
||||||
if(statistics){
|
if(statistics){
|
||||||
const auto adder = [](auto const & x){
|
const auto adder = [](auto const & x){
|
||||||
return [&x](auto const & l, auto const & r) { return l + x(r); };
|
return [&x](auto const & l, auto const & r) { return l + x(r); };
|
||||||
|
@ -168,6 +170,7 @@ int main(int argc, char *argv[]) try {
|
||||||
length += 1;
|
length += 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
if(streaming){
|
if(streaming){
|
||||||
time_logger t("outputting all preset tests");
|
time_logger t("outputting all preset tests");
|
||||||
|
|
Loading…
Add table
Reference in a new issue