void PatternConstraint::LoadBin( lem::Stream &bin )
{
 bin.read( &from_marker, sizeof(from_marker) );
 bin.read( &to_marker, sizeof(to_marker) );
 from_coord_id = bin.read_int();
 to_coord_id = bin.read_int();
 bin.read( &constraint_func, sizeof(constraint_func) );
 return;
}
Beispiel #2
0
void SG_calibrator::LoadBin(lem::Stream &bin)
{
    freq_type = bin.read_int();

    if (!bin.eof())
    {
        bin.read(&word, sizeof(word));
        id_class = bin.read_int();
        coords.LoadBin(bin);
        freq = bin.read_int();
    }

    return;
}
Beispiel #3
0
void TreeDimension::LoadBin( lem::Stream &bin )
{
    bin.read( &name, sizeof(name) );
    const int n = bin.read_int();
    for( int i=0; i<n; ++i )
        nodes.push_back( new Tree_Node(bin) );

    return;
}
Beispiel #4
0
/*****************************************************
 Загружает содержимое из указанного бинарного потока.
******************************************************/
void Word_Form::LoadBin( lem::Stream &bin )
{
 bool p=bin.read_bool();
 
 if( p )
  {
   Lexem *m = new Lexem;
   m->LoadBin(bin);
   name = RC_Lexem(m);
  }

 p=bin.read_bool();
 if( p )
  {
   Lexem *m2 = new Lexem;
   m2->LoadBin(bin);
   normalized = RC_Lexem(m2);
  }

// lexem_owner.LoadBin(bin);
// e_list.LoadBin(bin);
 pair.LoadBin(bin);

// bin.read( &tfield,     sizeof(tfield)     );
 bin.read( &entry_key,  sizeof(entry_key)  );
 bin.read( &val,        sizeof(val)        );
 bin.read( &score,        sizeof(score)        );
// bin.read( &icenter,    sizeof(icenter)    );
 bin.read( &origin_pos, sizeof(origin_pos) );

 const int n = bin.read_int();
 alt.reserve(n);
 for( int i=0; i<n; ++i )
  {
   alt.push_back( new Word_Form );
   alt.back()->LoadBin(bin);
  }

 return;
}
void WordEntries_File::LoadBin(lem::Stream &bin)
{
    ml_ref.LoadBin(bin);

    Delete_Entries();

    entry_pos.clear();

    coord_ref.LoadBin(bin);

    /*
     LEM_CHECKIT_Z( group.empty() );
     const int n_group = bin.read_int();
     for( int i=0; i<n_group; ++i )
      {
       SG_EntryGroup *g = new SG_EntryGroup(bin);
       group.insert( std::make_pair( g->GetKey(), g ) );
      }
    */
    lookup_table.LoadBin(bin);

    /*
    // #if LEM_DEBUGGING==1
     lem::mout->eol();
     for( int kkk=0; kkk<lookup_table.lexem_slot.size(); ++kkk )
      {
       WordEntries_LookUpItem xxx = lookup_table.lexem_slot[kkk];
       lem::mout->printf( "x[%d]=%d,%d ", kkk, xxx.start_index, xxx.len );
      }
     lem::mout->eol();
    // #endif
    */

    ientry.LoadBin(bin);

    u_entry.LoadBin(bin);

    ML_entry_key_list.LoadBin(bin);
    ML_entry_lex_list.LoadBin(bin);
    ML_entry_pre_list.LoadBin(bin);

    bin.read(&max_ml_len, sizeof(max_ml_len));
    bin.read(predefined_entry, sizeof(predefined_entry));

    const int n_entry = bin.read_int();
    entry.resize(n_entry);

    // загрузили все обязательные данные, теперь опциональная загрузка.
    lem::Stream::pos_type epos_begin_pos = 0;
    lem::Stream::pos_type entries_begin_pos = 0;
    lem::Stream::pos_type end_pos = 0;

    bin.read(&epos_begin_pos, sizeof(epos_begin_pos));
    bin.read(&entries_begin_pos, sizeof(entries_begin_pos));
    bin.read(&end_pos, sizeof(end_pos));

    if (lazy_load)
    {
        // статьи пока не загружаем.

        entry.Nullify();

        // прочитаем только список позиций начала каждой статьи.
        bin.seekp(epos_begin_pos);
        entry_pos.LoadBin(bin);

        lem::MCollect< std::pair<int, int> > key_to_index_tmp;
        key_to_index_tmp.LoadBin(bin);
        for (lem::Container::size_type i = 0; i < key_to_index_tmp.size(); ++i)
            key_to_index.insert(key_to_index_tmp[i]);

        lem::MCollect<Lexem> ML_tmp;
        ML_tmp.LoadBin(bin);
        for (lem::Container::size_type i = 0; i < ML_tmp.size(); ++i)
            ML_fronts.insert(ML_tmp[i]);

        ML_tmp.LoadBin(bin);
        for (lem::Container::size_type i = 0; i < ML_tmp.size(); ++i)
            ML_ML.insert(ML_tmp[i]);
    }
    else
    {
        bin.seekp(entries_begin_pos);

        entry_pos.clear();

        // список словарных статей загружаем сразу целиком в память.
        for (int i0 = 0; i0 < n_entry; i0++)
        {
            entry[i0] = new SG_Entry;
            entry[i0]->LoadBin(bin);

            if (entry[i0] != bogus_entry)
                entry[i0]->ReattachToRefs(*sg);
        }

        for (lem::Container::size_type ie = 0; ie < entry.size(); ie++)
        {
            key_to_index.insert(std::make_pair(entry[ientry[ie]]->GetKey(), ie));
        }

        coord_ref.AllAttached();

#if defined SOL_LOADTXT && defined SOL_COMPILER
        for (lem::Container::size_type k = 0; k < entry.size(); ++k)
        {
            const SG_Entry & e = GetWordEntry(k);
            entry_class_lookup.insert(std::make_pair(std::make_pair(lem::to_upper(e.GetName()), e.GetClass()), e.GetKey()));
    }
#endif

        for (lem::Container::size_type i = 0; i < ML_entry_key_list.size(); ++i)
        {
            const int ik = ML_entry_key_list[i];
            const SG_Entry &e = GetEntryByKey(ik);

            for (lem::Container::size_type j = 0; j < e.forms().size(); ++j)
            {
                const Lexem & form = e.forms()[j].name();

                if (form.Count_Lexems() > 1)
                {
                    UCStringSet list;
                    form.Split(list);
                    ML_fronts.insert(list.front());
                    ML_ML.insert(form);
                }
            }
        }
}

    bin.seekp(end_pos);

    return;
}