Search code examples
c++serializationboostgraphstack-overflow

Boost::serialize stack overflow error when storing structure


we're making graph structure and we made it.

we want use graph structure in other way, so we serialize graph structure.

But, problem is when we serialize our graph structure the error occur. The error says stack overflow.

we tried to find out what is problem. but, we failed.

Can someone tell us what is problem? or How we can fix it?

our code in github. help us! please. https://github.com/parkkihyeon/17Capstone_project/tree/develop_ssl

void SaveTestData(Adjcency_grpah *i, char *fileName) {
    Adjcency_grpah g(i);
    std::ofstream ofs(fileName);
    boost::archive::text_oarchive oa(ofs);
    oa & BOOST_SERIALIZATION_NVP(g); --> problem line!
}

//만들어진 파일을 다시 로드
Adjcency_grpah LoadTestData(char *fileName) {
    Adjcency_grpah g;
    std::ifstream ifs(fileName);
    boost::archive::text_iarchive ia(ifs);
    ia & BOOST_SERIALIZATION_NVP(g);

    return g;
}

int main()
{
    clock_t t = clock();

    Adjcency_grpah *g = new Adjcency_grpah();
    vector<State_node*> state;
    vector<Play*> play;
    Insert_Gibo(play);

    try {
        for (int i = 0; i < play.size(); i++) {
            state.clear();
            Play_to_Statenode(play, state, i);
            g->Insert(state);
        }
    }
    catch (exception &e) {
        std::cerr << e.what();
    }

    SaveTestData(g, "G");
    clock_t end_t = clock();

    return 0;
}
#include <boost/serialization/serialization.hpp>
#include <boost/archive/text_iarchive.hpp> // 텍스트 형태로 입력하기 위해
#include <boost/archive/text_oarchive.hpp> // 텍스트 형태로 출력하기 위해
#include <boost/serialization/vector.hpp> // 직렬화 vector를 사용하기 위해
#include <boost/serialization/deque.hpp> // 직렬화 stack을 사용하기 위해
#include <boost/serialization/stack.hpp> // 직렬화 stack을 사용하기 위해
#include <boost/serialization/map.hpp>
#include <boost/serialization/utility.hpp>

#define WIDTH_SIZE 10
#define HEIGHT_SIZE 11

using namespace std ;

typedef pair<int, int> Cha_pos;
typedef pair<int, int> Pho_pos;

class State_node
{
private:
    vector<State_node*>* next;
    vector<State_node*>* prev;
    pair<Cha_pos, Pho_pos> sum_of_horsepos;

    friend class boost::serialization::access;
    template <typename Archive>
    void serialize(Archive &ar, const unsigned int ver) {
        ar & BOOST_SERIALIZATION_NVP(sum_of_horsepos);
        ar & BOOST_SERIALIZATION_NVP(next);
        ar & BOOST_SERIALIZATION_NVP(prev);
    }
};

State_node::State_node(char data[HEIGHT_SIZE][WIDTH_SIZE]) {
    for (int i = 0; i < WIDTH_SIZE; i++)
        arr[0][i] = NULL;
    for (int i = 0; i < HEIGHT_SIZE; i++)
        arr[i][0] = NULL;
    for (int i = 1; i < HEIGHT_SIZE; i++)
        for (int j = 1; j < WIDTH_SIZE; j++)
            arr[i][j] = data[i][j];

    next = new vector<State_node*>();
    prev = new vector<State_node*>();
};
State_node::State_node() {
    for (int i = 0; i < HEIGHT_SIZE; i++)
        for (int j = 0; j < WIDTH_SIZE; j++)
            arr[i][j] = NULL;
    next = new vector<State_node*>();
    prev = new vector<State_node*>();
};
//node의 자식을 생성.
void State_node::Addlist_Child(State_node *add_state) {
    this->next->push_back(add_state);
    this->num_of_next++;
};
void State_node::Connect_Parent(State_node *parent_state) {
    this->prev->push_back(parent_state);
    this->num_of_prev++;
};
// n번째 자식을 return
State_node* State_node::NthCheck_Childnode(int n) {
    return next->at(n);
};
State_node* State_node::NthCheck_Parentnode(int n) {
    return prev->at(n);
};

#define WIDTH_SIZE 10
#define HEIGHT_SIZE 11
#define NUMUNIT 17

using namespace std ;

typedef pair<Cha_pos, Pho_pos> pair_key;
typedef multimap<pair_key, State_node*> hash_4d; // 4차원 해쉬

class Adjcency_grpah
{
private :
    State_node *root ;
    State_node *leaf ;
    hash_4d* hashstate_list[NUMUNIT][NUMUNIT] ;
    stack<State_node *> state_stack ;
    int count = 0;
    friend class boost::serialization::access;
    template <typename Archive>
    void serialize(Archive &ar, const unsigned int ver) {
        ar & BOOST_SERIALIZATION_NVP(root);
        ar & BOOST_SERIALIZATION_NVP(leaf);
        ar & BOOST_SERIALIZATION_NVP(state_stack);
        ar & BOOST_SERIALIZATION_NVP(hashstate_list);
    }
};
Adjcency_grpah::Adjcency_grpah(){

    char Init_jannggi[HEIGHT_SIZE][WIDTH_SIZE] ;
    for(int i=0 ; i< HEIGHT_SIZE ; i++){
        memset(Init_jannggi[i], 'X', sizeof(char)*WIDTH_SIZE);
    }

    root = new State_node(Init_jannggi) ;
    root->Set_numUnit(0,0) ;

    //node_list.push_back(root) ;
    Init_hashtable();
    PushList_Hashtable(root) ;

    leaf = NULL ;
}
//수정 필요--------------------------------------------------------------------------------
//Serialize를 위한 깊은 복사 생성자
Adjcency_grpah::Adjcency_grpah(Adjcency_grpah &graph) {
    root = graph.root;
    memcpy(hashstate_list, graph.hashstate_list, sizeof(graph.hashstate_list));
    memcpy(&state_stack, &graph.state_stack, sizeof(graph.state_stack));
    leaf = graph.leaf;
}
//Serialize를 위한 깊은 복사 생성자
Adjcency_grpah::Adjcency_grpah(Adjcency_grpah *graph) {
    root = graph->root;
    memcpy(hashstate_list, graph->hashstate_list, sizeof(graph->hashstate_list));
    memcpy(&state_stack, &graph->state_stack, sizeof(graph->state_stack));
    leaf = graph->leaf;
}
//----------------------------------------------------------------------------------------
void Adjcency_grpah::Init_hashtable() {
    for(int i=0 ; i < NUMUNIT ; i++){
        for (int j = 0; j < NUMUNIT; j++) {
            hashstate_list[i][j] = new hash_4d();
        }
    }
}

void Adjcency_grpah::PushList_Hashtable(State_node* state){
    int cho = state->Getcho();
    int han = state->Gethan();
    int cha_y ; int cha_x ; int pho_y ; int pho_x ;
    Set_4Dhashdata(cha_y, cha_x, pho_y, pho_x, state);
    hashstate_list[cho][han]->insert(hash_4d::value_type(pair_key(Cha_pos(cha_y,cha_x), Pho_pos(pho_y, pho_x)), state));
}


void Adjcency_grpah::Insert(vector<State_node*> state){
    State_node *now_state = root ;

    for(int index = 0 ; index < state.size() ; index++){
        State_node* add_state = state.at(index) ;

        int childnode = Is_Have_childnode(now_state,add_state) ;

        // 자기 자식과 같은게 있으면 그대로 이동.
        if(childnode >= 0){
            now_state = now_state->NthCheck_Childnode(childnode) ;
        }
        else {
            // 자기 자식과 같은게 없지만 어떤 노드에 존재하면 그 노드를 next로 지정한다.
            State_node* check_node = Is_In_The_List_State(add_state) ;
            if(check_node){
                now_state->Addlist_Child(check_node) ;
                check_node->Connect_Parent(now_state) ;
                now_state = check_node ;
            }
            else {
                PushList_Hashtable(add_state) ;
                add_state->Set_sequence_node(count++);
                now_state->Addlist_Child(add_state) ;
                add_state->Connect_Parent(now_state) ;
                add_state->Set_Stateorder(now_state->Getnext()->size()) ;
                now_state = add_state ;
            }
        }
        state_stack.push(now_state) ;
    }
    leaf = now_state ;
}
void Adjcency_grpah::Set_4Dhashdata(int &cha_y, int &cha_x, int &pho_y, int &pho_x, State_node* state) {
    cha_y = state->GetHorse_pos().first.first;
    cha_x = state->GetHorse_pos().first.second;
    pho_y = state->GetHorse_pos().second.first;
    pho_x = state->GetHorse_pos().second.second;
}

State_node* Adjcency_grpah::getRoot(){
    return root ;
}

State_node* Adjcency_grpah::getLeaf(){
    return leaf ;
}

// 현재 위치한 노드에서의 자식노드와 추가할 state와 같은게 있는지.
int Adjcency_grpah::Is_Have_childnode(State_node* sub_root, State_node* state) {
    for (int i = 0; i<sub_root->Getnumnext(); i++)
        if (!Diff_State(sub_root->NthCheck_Childnode(i), state))
            return i;
    return -1;
}

int Adjcency_grpah::Direction_parentnode(State_node* sub_node) {
    State_node* temp = state_stack.top();
    state_stack.pop();
    for (int i = 0; i<sub_node->Getnumprev(); i++) {
        if (!Diff_State(sub_node->NthCheck_Parentnode(i), temp))
            return i;
    }
    return -1;
}

// 현재 노드 state가 그래프로 존재하고 있는지
State_node* Adjcency_grpah::Is_In_The_List_State(State_node *state){

    int cho = state->Getcho();
    int han = state->Gethan();
    int cha_y;  int cha_x;  int pho_y;  int pho_x;
    Set_4Dhashdata(cha_y, cha_x, pho_y, pho_x, state);

    hash_4d *m = hashstate_list[cho][han];
    hash_4d::iterator itCur;
    pair<hash_4d::iterator, hash_4d::iterator> it_pair;
    it_pair = m->equal_range(pair_key(Cha_pos(cha_y, cha_x), Pho_pos(pho_y, pho_x)));
    //cout << horse_x << " " << horse_y << endl;
    for (itCur = it_pair.first ; itCur != it_pair.second ; itCur++) {
        if (!Diff_State(itCur->second, state))
            return itCur->second;
    }

    return NULL ;
}

// 두 state가 같은지 다른지 확인하는 함수.
bool Adjcency_grpah::Diff_State(State_node *stateA, State_node *stateB){
    for(int i=1 ; i< HEIGHT_SIZE; i++)
        for(int j=1 ; j< WIDTH_SIZE ; j++){
            if ( stateA->arr[i][j] != stateB->arr[i][j] )
                return true ;
        }
        return false ;
}

More detail codes are in our github! Thanks!


Solution

  • This problem is caused by the fact the you are using a general purpose object serializer on a deep graph.

    The way the general purpose traversal happens to be implemented is essentially (tree) recursive, meaning that it may run out of stack on very deep graphs.

    Cycles are not actually a problem for Boost Serialization (because of Object Tracking).

    You can get around the uncontrollable recursion by making sure you serialize a flat list of e.g. all graph nodes (vertices) and the relationships only later. That way you control in what manner you traverse the relations and can prevent undue recursion.

    Another approach might be to replace the graph traversal logic in Boost Serialization by something that allocates the nesting stack on the free store, rather than implicitly on the stack (see e.g. http://blog.moertel.com/posts/2013-05-11-recursive-to-iterative.html).

    You could file it as a PR. For the reason you showed, explicit stack allocation is strictly better for general purpose traversal.

    I'd probably go for the workaround at first, myself.