Esempio n. 1
0
int locate_pattern(const char* text_cod,const char* tokens,const char* fst2_name,const char* dlf,const char* dlc,const char* err,
                   const char* alphabet,MatchPolicy match_policy,OutputPolicy output_policy,
                   Encoding encoding_output,int bom_output,int mask_encoding_compatibility_input,
                   const char* dynamicDir,TokenizationPolicy tokenization_policy,
                   SpacePolicy space_policy,int search_limit,const char* morpho_dic_list,
                   AmbiguousOutputPolicy ambiguous_output_policy,
                   VariableErrorPolicy variable_error_policy,int protect_dic_chars,
                   int is_korean,int max_count_call,int max_count_call_warning,
                   char* arabic_rules,int tilde_negation_operator,int useLocateCache,int allow_trace) {

    U_FILE* out;
    U_FILE* info;
    struct locate_parameters* p=new_locate_parameters();
    p->text_cod=af_open_mapfile(text_cod,MAPFILE_OPTION_READ,0);
    p->buffer=(int*)af_get_mapfile_pointer(p->text_cod);
    long text_size=(long)af_get_mapfile_size(p->text_cod)/sizeof(int);
    p->buffer_size=(int)text_size;
    p->tilde_negation_operator=tilde_negation_operator;
    p->useLocateCache=useLocateCache;
    if (max_count_call == -1) {
        max_count_call = (int)text_size;
    }
    if (max_count_call_warning == -1) {
        max_count_call_warning = (int)text_size;
    }
    p->match_policy=match_policy;
    p->tokenization_policy=tokenization_policy;
    p->space_policy=space_policy;
    p->output_policy=output_policy;
    p->search_limit=search_limit;
    p->ambiguous_output_policy=ambiguous_output_policy;
    p->variable_error_policy=variable_error_policy;
    p->protect_dic_chars=protect_dic_chars;
    p->mask_encoding_compatibility_input = mask_encoding_compatibility_input;
    p->max_count_call = max_count_call;
    p->max_count_call_warning = max_count_call_warning;
    p->token_filename = tokens;
    char concord[FILENAME_MAX];
    char concord_info[FILENAME_MAX];

    strcpy(concord,dynamicDir);
    strcat(concord,"concord.ind");

    strcpy(concord_info,dynamicDir);
    strcat(concord_info,"concord.n");

    char morpho_bin[FILENAME_MAX];
    strcpy(morpho_bin,dynamicDir);
    strcat(morpho_bin,"morpho.bin");
    if (arabic_rules!=NULL && arabic_rules[0]!='\0') {
        load_arabic_typo_rules(arabic_rules,&(p->arabic));
    }
    out=u_fopen_versatile_encoding(encoding_output,bom_output,mask_encoding_compatibility_input,concord,U_WRITE);
    if (out==NULL) {
        error("Cannot write %s\n",concord);
        af_release_mapfile_pointer(p->text_cod,p->buffer);
        af_close_mapfile(p->text_cod);
        free_stack_unichar(p->stack);
        free_locate_parameters(p);
        u_fclose(out);
        return 0;
    }
    info=u_fopen_versatile_encoding(encoding_output,bom_output,mask_encoding_compatibility_input,concord_info,U_WRITE);
    if (info==NULL) {
        error("Cannot write %s\n",concord_info);
    }
    switch(output_policy) {
    case IGNORE_OUTPUTS:
        u_fprintf(out,"#I\n");
        break;
    case MERGE_OUTPUTS:
        u_fprintf(out,"#M\n");
        break;
    case REPLACE_OUTPUTS:
        u_fprintf(out,"#R\n");
        break;
    }
    if (alphabet!=NULL && alphabet[0]!='\0') {
        u_printf("Loading alphabet...\n");
        p->alphabet=load_alphabet(alphabet,is_korean);
        if (p->alphabet==NULL) {
            error("Cannot load alphabet file %s\n",alphabet);
            af_release_mapfile_pointer(p->text_cod,p->buffer);
            af_close_mapfile(p->text_cod);
            free_stack_unichar(p->stack);
            free_locate_parameters(p);
            if (info!=NULL) u_fclose(info);
            u_fclose(out);
            return 0;
        }
    }
    struct string_hash* semantic_codes=new_string_hash();
    extract_semantic_codes(dlf,semantic_codes);
    extract_semantic_codes(dlc,semantic_codes);

    if (is_cancelling_requested() != 0) {
        error("user cancel request.\n");
        free_alphabet(p->alphabet);
        free_string_hash(semantic_codes);
        af_release_mapfile_pointer(p->text_cod,p->buffer);
        af_close_mapfile(p->text_cod);
        free_stack_unichar(p->stack);
        free_locate_parameters(p);
        if (info!=NULL) u_fclose(info);
        u_fclose(out);
        return 0;
    }

    u_printf("Loading fst2...\n");
    struct FST2_free_info fst2load_free;
    Fst2* fst2load=load_abstract_fst2(fst2_name,1,&fst2load_free);
    if (fst2load==NULL) {
        error("Cannot load grammar %s\n",fst2_name);
        free_alphabet(p->alphabet);
        free_string_hash(semantic_codes);
        af_release_mapfile_pointer(p->text_cod,p->buffer);
        af_close_mapfile(p->text_cod);
        free_stack_unichar(p->stack);
        free_locate_parameters(p);
        if (info!=NULL) u_fclose(info);
        u_fclose(out);
        return 0;
    }

    Abstract_allocator locate_abstract_allocator=create_abstract_allocator("locate_pattern",AllocatorCreationFlagAutoFreePrefered);


    p->fst2=new_Fst2_clone(fst2load,locate_abstract_allocator);
    free_abstract_Fst2(fst2load,&fst2load_free);

    if (is_cancelling_requested() != 0) {
        error("User cancel request..\n");
        free_alphabet(p->alphabet);
        free_string_hash(semantic_codes);
        free_Fst2(p->fst2,locate_abstract_allocator);
        close_abstract_allocator(locate_abstract_allocator);
        af_release_mapfile_pointer(p->text_cod,p->buffer);
        af_close_mapfile(p->text_cod);
        free_stack_unichar(p->stack);
        free_locate_parameters(p);
        if (info!=NULL) u_fclose(info);
        u_fclose(out);
        return 0;
    }

    p->tags=p->fst2->tags;
#ifdef TRE_WCHAR
    p->filters=new_FilterSet(p->fst2,p->alphabet);
    if (p->filters==NULL) {
        error("Cannot compile filter(s)\n");
        free_alphabet(p->alphabet);
        free_string_hash(semantic_codes);
        free_Fst2(p->fst2,locate_abstract_allocator);
        close_abstract_allocator(locate_abstract_allocator);
        free_stack_unichar(p->stack);
        free_locate_parameters(p);
        af_release_mapfile_pointer(p->text_cod,p->buffer);
        af_close_mapfile(p->text_cod);
        if (info!=NULL) u_fclose(info);
        u_fclose(out);
        return 0;
    }
#endif
    u_printf("Loading token list...\n");
    int n_text_tokens=0;

    p->tokens=load_text_tokens_hash(tokens,mask_encoding_compatibility_input,&(p->SENTENCE),&(p->STOP),&n_text_tokens);
    if (p->tokens==NULL) {
        error("Cannot load token list %s\n",tokens);
        free_alphabet(p->alphabet);
        free_string_hash(semantic_codes);
        free_Fst2(p->fst2,locate_abstract_allocator);
        close_abstract_allocator(locate_abstract_allocator);
        free_locate_parameters(p);
        af_release_mapfile_pointer(p->text_cod,p->buffer);
        af_close_mapfile(p->text_cod);
        if (info!=NULL) u_fclose(info);
        u_fclose(out);
        return 0;
    }
    Abstract_allocator locate_work_abstract_allocator = locate_abstract_allocator;

    p->match_cache=(LocateCache*)malloc_cb(p->tokens->size * sizeof(LocateCache),locate_work_abstract_allocator);
    memset(p->match_cache,0,p->tokens->size * sizeof(LocateCache));
    if (p->match_cache==NULL) {
        fatal_alloc_error("locate_pattern");
    }

#ifdef TRE_WCHAR
    p->filter_match_index=new_FilterMatchIndex(p->filters,p->tokens);
    if (p->filter_match_index==NULL) {
        error("Cannot optimize filter(s)\n");
        free_alphabet(p->alphabet);
        free_string_hash(semantic_codes);
        free_string_hash(p->tokens);
        close_abstract_allocator(locate_abstract_allocator);
        free_locate_parameters(p);
        af_release_mapfile_pointer(p->text_cod,p->buffer);
        af_close_mapfile(p->text_cod);
        if (info!=NULL) u_fclose(info);
        u_fclose(out);
        return 0;
    }
#endif

    if (allow_trace!=0) {
        open_locate_trace(p,&p->fnc_locate_trace_step,&p->private_param_locate_trace);
    }
    extract_semantic_codes_from_tokens(p->tokens,semantic_codes,locate_abstract_allocator);
    u_printf("Loading morphological dictionaries...\n");
    load_morphological_dictionaries(morpho_dic_list,p,morpho_bin);
    extract_semantic_codes_from_morpho_dics(p->morpho_dic_inf,p->n_morpho_dics,semantic_codes,locate_abstract_allocator);
    p->token_control=(unsigned char*)malloc(n_text_tokens*sizeof(unsigned char));
    if (p->token_control==NULL) {
        fatal_alloc_error("locate_pattern");
    }
    p->matching_patterns=(struct bit_array**)malloc(n_text_tokens*sizeof(struct bit_array*));
    if (p->matching_patterns==NULL) {
        fatal_alloc_error("locate_pattern");
    }
    for (int i=0; i<n_text_tokens; i++) {
        p->token_control[i]=0;
        p->matching_patterns[i]=NULL;
    }
    compute_token_controls(p->alphabet,err,p);
    int number_of_patterns,is_DIC,is_CDIC,is_SDIC;
    p->pattern_tree_root=new_pattern_node(locate_abstract_allocator);
    u_printf("Computing fst2 tags...\n");
    process_tags(&number_of_patterns,semantic_codes,&is_DIC,&is_CDIC,&is_SDIC,p,locate_abstract_allocator);
    p->current_compound_pattern=number_of_patterns;
    p->DLC_tree=new_DLC_tree(p->tokens->size);
    struct lemma_node* root=new_lemma_node();
    u_printf("Loading dlf...\n");
    load_dic_for_locate(dlf,mask_encoding_compatibility_input,p->alphabet,number_of_patterns,is_DIC,is_CDIC,root,p);
    u_printf("Loading dlc...\n");
    load_dic_for_locate(dlc,mask_encoding_compatibility_input,p->alphabet,number_of_patterns,is_DIC,is_CDIC,root,p);
    /* We look if tag tokens like "{today,.ADV}" verify some patterns */
    check_patterns_for_tag_tokens(p->alphabet,number_of_patterns,root,p,locate_abstract_allocator);
    u_printf("Optimizing fst2 pattern tags...\n");
    optimize_pattern_tags(p->alphabet,root,p,locate_abstract_allocator);
    u_printf("Optimizing compound word dictionary...\n");
    optimize_DLC(p->DLC_tree);
    free_string_hash(semantic_codes);
    int nb_input_variable=0;
    p->input_variables=new_Variables(p->fst2->input_variables,&nb_input_variable);
    p->output_variables=new_OutputVariables(p->fst2->output_variables,&p->nb_output_variables);


    Abstract_allocator locate_recycle_abstract_allocator=NULL;
    locate_recycle_abstract_allocator=create_abstract_allocator("locate_pattern_recycle",
                                      AllocatorFreeOnlyAtAllocatorDelete|AllocatorTipOftenRecycledObject,
                                      get_prefered_allocator_item_size_for_nb_variable(nb_input_variable));

    u_printf("Optimizing fst2...\n");
    p->optimized_states=build_optimized_fst2_states(p->input_variables,p->output_variables,p->fst2,locate_abstract_allocator);
    if (is_korean) {
        p->korean=new Korean(p->alphabet);
        p->jamo_tags=create_jamo_tags(p->korean,p->tokens);
    }
    p->failfast=new_bit_array(n_text_tokens,ONE_BIT);

    u_printf("Working...\n");
    p->prv_alloc=locate_work_abstract_allocator;
    p->prv_alloc_recycle=locate_recycle_abstract_allocator;
    launch_locate(out,text_size,info,p);
    if (allow_trace!=0) {
        close_locate_trace(p,p->fnc_locate_trace_step,p->private_param_locate_trace);
    }
    free_bit_array(p->failfast);
    free_Variables(p->input_variables);
    free_OutputVariables(p->output_variables);
    af_release_mapfile_pointer(p->text_cod,p->buffer);
    af_close_mapfile(p->text_cod);
    if (info!=NULL) u_fclose(info);
    u_fclose(out);

    if (p->match_cache!=NULL) {
        for (int i=0; i<p->tokens->size; i++) {
            free_LocateCache(p->match_cache[i],locate_work_abstract_allocator);
        }
        free_cb(p->match_cache,locate_work_abstract_allocator);
    }
    int free_abstract_allocator_item=(get_allocator_cb_flag(locate_abstract_allocator) & AllocatorGetFlagAutoFreePresent) ? 0 : 1;

    if (free_abstract_allocator_item) {
        free_optimized_states(p->optimized_states,p->fst2->number_of_states,locate_abstract_allocator);
    }
    free_stack_unichar(p->stack);
    /** Too long to free the DLC tree if it is big
     * free_DLC_tree(p->DLC_tree);
     */
    if (free_abstract_allocator_item) {
        free_pattern_node(p->pattern_tree_root,locate_abstract_allocator);
        free_Fst2(p->fst2,locate_abstract_allocator);
        free_list_int(p->tag_token_list,locate_abstract_allocator);
    }
    close_abstract_allocator(locate_abstract_allocator);
    close_abstract_allocator(locate_recycle_abstract_allocator);
    locate_recycle_abstract_allocator=locate_abstract_allocator=NULL;

    /* We don't free 'parameters->tags' because it was just a link on 'parameters->fst2->tags' */
    free_alphabet(p->alphabet);
    if (p->korean!=NULL) {
        delete p->korean;
    }
    if (p->jamo_tags!=NULL) {
        /* jamo tags must be freed before tokens, because we need to know how
         * many jamo tags there are, and this number is the number of tokens */
        for (int i=0; i<p->tokens->size; i++) {
            free(p->jamo_tags[i]);
        }
        free(p->jamo_tags);
    }
    free_string_hash(p->tokens);
    free_lemma_node(root);
    free(p->token_control);
    for (int i=0; i<n_text_tokens; i++) {
        free_bit_array(p->matching_patterns[i]);
    }
    free(p->matching_patterns);
#ifdef TRE_WCHAR
    free_FilterSet(p->filters);
    free_FilterMatchIndex(p->filter_match_index);
#endif
    for (int i=0; i<p->n_morpho_dics; i++) {
        free_abstract_INF(p->morpho_dic_inf[i],&(p->morpho_dic_inf_free[i]));
        free_abstract_BIN(p->morpho_dic_bin[i],&(p->morpho_dic_bin_free[i]));
    }
    free(p->morpho_dic_inf);
    free(p->morpho_dic_inf_free);
    free(p->morpho_dic_bin);
    free(p->morpho_dic_bin_free);
#if (defined(UNITEX_LIBRARY) || defined(UNITEX_RELEASE_MEMORY_AT_EXIT))
    free_DLC_tree(p->DLC_tree);
#endif
    free_locate_parameters(p);
    u_printf("Done.\n");
    return 1;
}
Esempio n. 2
0
ErrorCode ReadDamsel::load_file( const char* filename, 
                                 const EntityHandle* file_set, 
                                 const FileOptions& opts,
                                 const ReaderIface::SubsetList* subset_list,
                                 const Tag* file_id_tag )
{
  ErrorCode rval;
 
  rval = parse_options(opts, nativeParallel);
  if (MB_SUCCESS != rval)
    return rval;

    // initialize damsel
  dU.dmslLib = DMSLlib_init();
  
    // create a damsel model
  dU.dmslModel = DMSLmodel_create(sizeof(EntityHandle) == 8 ? DAMSEL_HANDLE_TYPE_HANDLE64 : 
                                  DAMSEL_HANDLE_TYPE_HANDLE32);
  
    // model attach - need model id from make model, filename
#ifdef USE_MPI
  MPI_Comm comm = MPI_COMM_WORLD;
  if (nativeParallel) {
    comm = myPcomm->proc_config().proc_comm();
  }
#endif

  damsel_err_t err;
  err = DMSLmodel_attach(dU.dmslModel, filename, comm, NULL);
  CHK_DMSL_ERR(err, "DMSLmodel_attach failed.");
  err = DMSLmodel_populate(dU.dmslModel);
  CHK_DMSL_ERR(err, "DMSLmodel_populate failed.");
  
    // STEP 0: GET COLLECTION, TAG, ENTITY INFOS FOR GLOBAL MODEL
  int num_containers = 0, num_tag_infos = 0, num_ent_infos = 0;
  DMSLmodel_get_tuple_count(dU.dmslModel, &num_containers, &num_tag_infos);
  num_ent_infos = DMSLmodel_get_entity_count(dU.dmslModel);
  int num_coll_infos = DMSLmodel_get_collection_count(dU.dmslModel);
  CHK_DMSL_ERR(err, "DMSLmodel_get_collection_count failed.");
  if (-1 == num_containers || -1 == num_tag_infos || -1 == num_ent_infos) 
    CHK_MB_ERR(MB_FAILURE, "Bad count for containers/tags/ents.");

  std::vector<damsel_entity_buf_type> ent_infos(num_ent_infos);
  std::vector<damsel_collection_buf_type> coll_infos(num_coll_infos);
  std::vector<damsel_tag_buf_type> tag_infos(num_tag_infos);
  std::vector<damsel_container_buf_type> cont_infos(num_containers);
  err = DMSLmodel_get_entity_infos(dU.dmslModel, &ent_infos[0]);
  CHK_DMSL_ERR(err, "Failure getting entity infos.");
  err = DMSLmodel_get_collection_infos(dU.dmslModel, &coll_infos[0]);
  CHK_DMSL_ERR(err, "Failure getting collection infos.");
  err = DMSLmodel_get_tag_infos(dU.dmslModel, &tag_infos[0]);
  CHK_DMSL_ERR(err, "Failure getting tag infos.");
  err = DMSLmodel_get_container_infos(dU.dmslModel, &cont_infos[0]);
  CHK_DMSL_ERR(err, "Failure getting container infos.");

    // create MOAB-side tags for all damsel tags except pre-defined ones
  rval = process_tags(tag_infos);
  CHK_MB_ERR(rval, "Error processing tags.");
  
/*
  
  if (nativeParallel) {
      // STEP 1: GET COLLECTION(S) REPRESENTING PARTITION: 
      // input: tag name, optionally value; 
      // output: container with file-side handles of collections satisfying those criteria
      // - get all handles/values for tag
      // - select handles matching criteria for tag value (will be collection handles)
    std::string partn_tag_name("PARALLEL_PARTITION");
    damsel_handle partn_tag = DMSLselect_tag_by_name(dU.dmslModel, partn_tag_name.c_str());
      // get all the parts with that tag regardless of value
    damsel_container part_handles = DMSLselect_handles_with_values(dU.dmslModel, partn_tag);

      // STEP 2: GET HANDLES FOR TAGS WE NEED FOR THIS READER:
      // - "SET_CHARACTERISTIC"
    damsel_handle setchar_tag = DMSLselect_tag_by_name(dU.dmslModel, "SET_CHARACTERISTIC");
      // - "PARENT_LIST"
      //damsel_handle plist_tag = DMSLselect_tag_by_name(dU.dmslModel, "PARENT_LIST");
      // - "CHILD_LIST"
      //damsel_handle clist_tag = DMSLselect_tag_by_name(dU.dmslModel, "CHILD_LIST");

      // STEP 3: GET VALUES FOR "SET_CHARACTERISTIC" TAG ON PARTITION COLLECTIONS,
      //         GET VECTOR- OR SET-TYPE FLAGS FOR PARTITION COLLECTIONS
      // (gives tracking flag for moab)
    int num_handles = DMSLcontainer_count(part_handles);
    std::vector<unsigned> char_tagvals(num_handles);
      // map the set chars
    err = DMSLmodel_map_tag(&char_tagvals[0], part_handles, &setchar_tag);
    CHK_DMSL_ERR(err, "Problem calling DMSLmodel_map_tag");
    
      // execute the transfer
    err = DMSLmodel_transfer_sync(dU.dmslModel, DAMSEL_TRANSFER_TYPE_READ);
    CHK_DMSL_ERR(err, "Problem calling DMSLmodel_transfer_sync");

      // STEP 4: READ/PROCESS PARTITION COLLECTION(S)
      // decide the handles I am responsible using round-robin for now
    // - GET TYPE, CONTENTS OF COLLECTION CONTENTS CONTAINER
    // - allocate moab-side container (using count from container)
    // - MAP storage TO CONTAINER 
    // - EXECUTE
    // ==> have list of all handles (entities + collections) represented on this proc

    int tmp_num = num_handles / proc_size, extra = num_handles % proc_size;
    if (extra) tmp_num++;
    int my_num_handles = tmp_num;
    if (proc_rank >= extra) my_num_handles--;
    int first_ind = std::min(proc_rank,extra) * tmp_num + 
        std::max(proc_rank-extra,0) * (tmp_num-1);

      // - create moab entity sets for partition collection(s)
    EntityHandle start_handle;
    rval = readMeshIface->create_entity_sets(my_num_handles, &char_tagvals[first_ind], 0, start_handle);
    CHK_MB_ERR(rval, "Problem creating entity sets.");
  }
  else {

*/
      // initialize just by entity; each call to process_ent_info will:
      // a. create moab-side representation to read into
      // b. map those handles to damsel handles
      // c. map coords / connectivity storage to damsel equivalent
      // d. for each tag, map moab storage to damsel storage
    std::vector<damsel_entity_buf_type>::iterator eiit;

      // process verts info first    
    for (eiit =  ent_infos.begin(); eiit != ent_infos.end(); eiit++) {
      if ((*eiit).entity_type == DAMSEL_ENTITY_TYPE_VERTEX) {
        rval = process_ent_info(*eiit);
        CHK_MB_ERR(rval, " ");
      }
    }

    for (eiit =  ent_infos.begin(); eiit != ent_infos.end(); eiit++) {
      if ((*eiit).entity_type != DAMSEL_ENTITY_TYPE_VERTEX) {
        rval = process_ent_info(*eiit);
        CHK_MB_ERR(rval, " ");
      }
    }
    
/*
  }

    // process collections
  rval = process_coll_infos(coll_infos); 
  CHK_MB_ERR(rval, " ");
  
    // STEP 5: process into list of local info structs, each represents file-side struct and
    // portion of that struct
    // ASSUMPTION: each local info struct represents single entity type & # vertices or collection
  
    // STEP 6: For each local info struct:

    // STEP 6b: READ CONTAINER INTO LOCAL BUFFER
    // STEP 6c: create app representation of entities/vertices/collection, and damsel container for them,
    //    and MAP APP HANDLE CONTAINER TO DAMSEL CONTAINER
    // STEP 6d: process vertices/entities/collection
    //    6d1: if vertices, continue
    //    6d2: if entities:
    //    - MAP LOCAL CONNECTIVITY REP'N TO DAMSEL (might be tag, don't know yet)
    //    6d3: if collection:
    //    - (everything in STEP 4 for this collection except for EXECUTE)
    //    - might need to filter out things not represented on this rank
    //    6d4: if sparse tag:
    //    - create app-side representation of sparse tag
    //    - READ CONTAINER OF MODEL HANDLES INTO LOCAL BUFFER
    //    - allocate app-side storage for tag values
    //    - MAP APP STORAGE TO MODEL TAG + (implicit?) CONTAINER

    // STEP 6e: process dense tags for the local info struct; for each dense tag:
    //   - get app tag handle for model tag handle
    //   - get app storage for app tag handle + container
    //   - MAP APP STORAGE TO MODEL TAG + CONTAINER

    // STEP 7: EXECUTE
    //   - assign all mapped data
    //   - translate all damsel handles to app handles
    // uninit damsel

    */

  return MB_SUCCESS;
}
Esempio n. 3
0
static int psf_load_internal( psf_load_state * state, const char * file_name )
{
    psf_tag * tags = NULL;
    psf_tag * tag;

    char * full_path;

    void * file;

    long file_size, tag_size;

    int n;

    uint8_t header_buffer[16];

    uint8_t * exe_compressed_buffer = NULL;
    uint8_t * exe_decompressed_buffer = NULL;
    uint8_t * reserved_buffer = NULL;
    char * tag_buffer = NULL;

    uint32_t exe_compressed_size, exe_crc32, reserved_size;
    uLong exe_decompressed_size, try_exe_decompressed_size;

    int zerr;

    size_t full_path_size;

    if ( ++state->depth > max_recursion_depth ) return -1;

    full_path_size = strlen(state->base_path) + strlen(file_name) + 1;
    full_path = (char *) malloc( full_path_size );
    if ( !full_path ) return -1;

#if _MSC_VER >= 1300
    strcpy_s( full_path, full_path_size, state->base_path );
    strcat_s( full_path, full_path_size, file_name );
#else
    strcpy( full_path, state->base_path );
    strcat( full_path, file_name );
#endif

    file = state->file_callbacks->fopen( full_path );

    free( full_path );

    if ( !file ) return -1;

    if ( state->file_callbacks->fread( header_buffer, 1, 16, file ) < 16 ) goto error_close_file;

    if ( memcmp( header_buffer, "PSF", 3 ) ) goto error_close_file;

    if ( state->allowed_version && ( header_buffer[ 3 ] != state->allowed_version ) ) goto error_close_file;

    reserved_size = header_buffer[ 4 ] | ( header_buffer[ 5 ] << 8 ) | ( header_buffer[ 6 ] << 16 ) | ( header_buffer[ 7 ] << 24 );
    exe_compressed_size = header_buffer[ 8 ] | ( header_buffer[ 9 ] << 8 ) | ( header_buffer[ 10 ] << 16 ) | ( header_buffer[ 11 ] << 24 );
    exe_crc32 = header_buffer[ 12 ] | ( header_buffer[ 13 ] << 8 ) | ( header_buffer[ 14 ] << 16 ) | ( header_buffer[ 15 ] << 24 );

    if ( state->file_callbacks->fseek( file, 0, SEEK_END ) ) goto error_close_file;

    file_size = state->file_callbacks->ftell( file );

    if ( file_size <= 0 ) goto error_close_file;

    if ( (unsigned long)file_size >= 16 + reserved_size + exe_compressed_size + 5 )
    {
        tag_size = file_size - ( 16 + reserved_size + exe_compressed_size );
        if ( state->file_callbacks->fseek( file, -tag_size, SEEK_CUR ) ) goto error_close_file;
        tag_buffer = (char *) malloc( tag_size + 1 );
        if ( !tag_buffer ) goto error_close_file;
        if ( state->file_callbacks->fread( tag_buffer, 1, tag_size, file ) < (size_t)tag_size ) goto error_free_buffers;
        tag_buffer[ tag_size ] = 0;
        if ( !memcmp( tag_buffer, "[TAG]", 5 ) ) tags = process_tags( tag_buffer + 5 );
        free( tag_buffer );
        tag_buffer = NULL;

        if ( tags && state->info_target && ( state->depth == 1 || state->info_want_nested_tags ) )
        {
            tag = tags;
            while ( tag->next ) tag = tag->next;
            while ( tag )
            {
                state->info_target( state->info_context, tag->name, tag->value );
                tag = tag->prev;
            }
        }
    }

    if ( !state->load_target ) goto done;

    tag = find_tag( tags, "_lib" );
    if ( tag )
    {
        if ( psf_load_internal( state, tag->value ) < 0 ) goto error_free_tags;
    }

    reserved_buffer = (uint8_t *) malloc( reserved_size );
    if ( !reserved_buffer ) goto error_free_tags;
    exe_compressed_buffer = (uint8_t *) malloc( exe_compressed_size );
    if ( !exe_compressed_buffer ) goto error_free_tags;

    if ( state->file_callbacks->fseek( file, 16, SEEK_SET ) ) goto error_free_tags;
    if ( reserved_size && state->file_callbacks->fread( reserved_buffer, 1, reserved_size, file ) < reserved_size ) goto error_free_tags;
    if ( exe_compressed_size && state->file_callbacks->fread( exe_compressed_buffer, 1, exe_compressed_size, file ) < exe_compressed_size ) goto error_free_tags;
    state->file_callbacks->fclose( file );
    file = NULL;

    if ( exe_compressed_size )
    {
        if ( exe_crc32 != crc32(crc32(0L, Z_NULL, 0), exe_compressed_buffer, exe_compressed_size) ) goto error_free_tags;

        exe_decompressed_size = try_exe_decompressed_size = exe_compressed_size * 3;
        exe_decompressed_buffer = (uint8_t *) malloc( exe_decompressed_size );
        if ( !exe_decompressed_buffer ) goto error_free_tags;

        while ( Z_OK != ( zerr = uncompress( exe_decompressed_buffer, &exe_decompressed_size, exe_compressed_buffer, exe_compressed_size ) ) )
        {
            void * try_exe_decompressed_buffer;

            if ( Z_MEM_ERROR != zerr && Z_BUF_ERROR != zerr ) goto error_free_tags;

            if ( try_exe_decompressed_size < 1 * 1024 * 1024 )
                try_exe_decompressed_size += 1 * 1024 * 1024;
            else
                try_exe_decompressed_size += try_exe_decompressed_size;

            exe_decompressed_size = try_exe_decompressed_size;

            try_exe_decompressed_buffer = realloc( exe_decompressed_buffer, exe_decompressed_size );
            if ( !try_exe_decompressed_buffer ) goto error_free_tags;

            exe_decompressed_buffer = (uint8_t *) try_exe_decompressed_buffer;
        }
    }
    else
    {
        exe_decompressed_size = 0;
        exe_decompressed_buffer = (uint8_t *) malloc( exe_decompressed_size );
        if ( !exe_decompressed_buffer ) goto error_free_tags;
    }

    free( exe_compressed_buffer );
    exe_compressed_buffer = NULL;

    if ( state->load_target( state->load_context, exe_decompressed_buffer, exe_decompressed_size, reserved_buffer, reserved_size ) ) goto error_free_tags;

    free( reserved_buffer );
    reserved_buffer = NULL;

    free( exe_decompressed_buffer );
    exe_decompressed_buffer = NULL;

    n = 2;
    snprintf( state->lib_name_temp, 31, "_lib%u", n );
    state->lib_name_temp[ 31 ] = '\0';
    tag = find_tag( tags, state->lib_name_temp );
    while ( tag )
    {
        if ( psf_load_internal( state, tag->value ) < 0 ) goto error_free_tags;
        ++n;
        snprintf( state->lib_name_temp, 31, "_lib%u", n );
        state->lib_name_temp[ 31 ] = '\0';
        tag = find_tag( tags, state->lib_name_temp );
    }

done:
    if ( file ) state->file_callbacks->fclose( file );

    free_tags( tags );

    --state->depth;

    return header_buffer[ 3 ];

error_free_tags:
    free_tags( tags );
error_free_buffers:
    if ( exe_compressed_buffer ) free( exe_compressed_buffer );
    if ( exe_decompressed_buffer ) free( exe_decompressed_buffer );
    if ( reserved_buffer ) free( reserved_buffer );
    if ( tag_buffer ) free( tag_buffer );
error_close_file:
    if ( file ) state->file_callbacks->fclose( file );
    return -1;
}