/*return 0 if extracted, -1 if not*/ int extract_key_value( const char* src, int src_len, char** key, uint16_t* key_len, char** val, uint16_t*val_len ){ /* just search KEY_VALUE_DELIMETER ('=') char, all before that will key, and rest of data would be value*/ char* delim = strchr(src, KEY_VALUE_DELIMETER ); if ( delim == NULL || delim > src+src_len ) return -1; /*not valid pair*/ int keylen = delim-src; /*keylen is full length, key_len, val_en are striped lengths*/ *key = (char*)strip_all(src, keylen, key_len ); *val = (char*)strip_all(delim+1, src_len-keylen-1, val_len ); return 0; }
struct ParsedRecords* get_parsed_records(struct ParsedRecords* records, const char* text, int len, struct KeyList* key_list){ assert(records); assert(text); assert(key_list); int lex_cursor = 0; int lex_length = 0; int parsed_params_count=0; struct internal_parse_data key_val_parse; struct internal_parse_data temp_keys_parsed[NVRAM_MAX_KEYS_COUNT_IN_RECORD]; memset(&temp_keys_parsed, '\0', sizeof(temp_keys_parsed)); enum ParsingStatus st = EStProcessing; enum ParsingStatus st_new = st; int new_record_flag=0; #ifdef PARSER_DEBUG_LOG ZRT_LOG(L_INFO, P_TEXT, "parsing"); #endif records->count=0; int cursor=0; do { /*set comment state if comment begin located*/ if ( text[cursor] == COMMENT_CHAR ){ if ( st == EStProcessing ){ /*retrieve already processed data, if has, before comment begin*/ st_new = EStComment; st = EStToken; } else{ /*just a comment, nothing processed previously*/ st = EStComment; } #ifdef PARSER_DEBUG_LOG ZRT_LOG(L_EXTRA, "cursor=%d EStComment", cursor); #endif } /* If comma in non comment state OR * for new line OR * double quotes " begins text block for processing * if previous state was complete token * then set processing state to catch new processing data by cursor position*/ else if ( (st != EStComment && ( text[cursor] == ',' )) || text[cursor] == '\n' ){ if ( st == EStProcessing ){ /*if now processing data then handle it*/ #ifdef PARSER_DEBUG_LOG ZRT_LOG(L_EXTRA, "cursor=%d EStToken", cursor); #endif st = EStToken; if ( text[cursor] == '\n' ) new_record_flag = 1; else st_new = EStProcessing; } else{ /*start processing of significant data*/ #ifdef PARSER_DEBUG_LOG ZRT_LOG(L_EXTRA, "cursor=%d EStProcessing", cursor); #endif st = EStProcessing; /*save begin of unprocessed data to know start position of unhandled data*/ lex_cursor = cursor+1; /*set lex_length -1. it's will be incremented in *switch EStProcessing and lex_length value become valid*/ lex_length=-1; } } /*right bound reached of processing data*/ else if (cursor == len-1){ /*extend current token up to last char*/ ++lex_length; st = EStToken; } ++cursor; switch(st){ case EStProcessing: ++lex_length; break; case EStComment: lex_cursor = -1; lex_length=0; break; /*token now is ready to retrieve and check*/ case EStToken:{ #ifdef PARSER_DEBUG_LOG ZRT_LOG(L_INFO, "swicth EStToken: lex_cursor=%d, lex_length=%d, pointer=%p", lex_cursor, lex_length, &text[lex_cursor]); /*log non-striped lexema*/ ZRT_LOG(L_EXTRA, "lex= '%s'", GET_STRING(&text[lex_cursor], lex_length)); #endif uint16_t striped_token_len=0; const char* striped_token = strip_all(&text[lex_cursor], lex_length, &striped_token_len ); #ifdef PARSER_DEBUG_LOG ZRT_LOG(L_INFO, "swicth EStToken: striped token len=%d, pointer=%p", striped_token_len, striped_token); #endif /*If token has data, try to extract key and value*/ if ( striped_token_len > 0 ){ #ifdef PARSER_DEBUG_LOG /*log striped token*/ ZRT_LOG(L_INFO, "token= '%s'", GET_STRING(striped_token, striped_token_len)); #endif /*parse pair 'key=value', strip spaces */ int parsed_key_index = -1; if ( !extract_key_value( striped_token, striped_token_len, &key_val_parse.key, &key_val_parse.keylen, &key_val_parse.val, &key_val_parse.vallen ) ) { /*get key index*/ parsed_key_index = key_list->find(key_list, key_val_parse.key, key_val_parse.keylen); #ifdef PARSER_DEBUG_LOG ZRT_LOG(L_INFO, "key (len=%d,index=%d) '%s' found, keyval=%s,", key_val_parse.keylen, parsed_key_index, GET_STRING(key_val_parse.key, key_val_parse.keylen), GET_STRING(key_val_parse.val, key_val_parse.vallen)); #endif if ( parsed_key_index >= 0 ){ if ( temp_keys_parsed[parsed_key_index].key != NULL ){ /*parsed item with the same key already saved, and new one will be ignored*/ #ifdef PARSER_DEBUG_LOG ZRT_LOG(L_ERROR, P_TEXT, "last key duplicated, "); #endif } else{ ZRT_LOG(L_INFO, "parsed key(len=%d)=%s saved", key_val_parse.vallen, GET_STRING(key_val_parse.val, key_val_parse.vallen) ); /*save parsed key,value*/ temp_keys_parsed[parsed_key_index] = key_val_parse; /*one of record parameters was parsed*/ /*increase counter of parsed data*/ ++parsed_params_count; } } else{ /*wrong key*/ ZRT_LOG(L_ERROR, "wrong key, key=%s", GET_STRING(key_val_parse.key, key_val_parse.keylen)); } } else{ #ifdef PARSER_DEBUG_LOG ZRT_LOG(L_ERROR, P_TEXT, "last token parsing error"); #endif } /*If get waiting count of record parameters*/ if ( key_list->count == parsed_params_count ){ #ifdef PARSER_DEBUG_LOG ZRT_LOG(L_INFO, "key_list->count =%d", parsed_params_count); #endif /*parsed params count is enough to save it as single record. *add it to parsed records array*/ struct ParsedRecord record; if ( get_parsed_record(&record, key_list, temp_keys_parsed, parsed_params_count) ){ /*record parsed OK*/ #ifdef PARSER_DEBUG_LOG ZRT_LOG(L_INFO, "save record #%d OK", records->count); #endif records->records[records->count++] = record; } /* current record parsed, reset params count * to be able parse new record*/ parsed_params_count=0; memset(&temp_keys_parsed, '\0', sizeof(temp_keys_parsed) ); } } if ( new_record_flag ){ new_record_flag=0; /*free previosly parsed results, new record start*/ parsed_params_count=0; memset(&temp_keys_parsed, '\0', sizeof(temp_keys_parsed) ); } /*restore processing state*/ st = st_new; #ifdef PARSER_DEBUG_LOG ZRT_LOG(L_INFO, P_TEXT, "restore previous parsing state"); #endif /*save begin of unprocessed data to know start position of unhandled data*/ lex_cursor = cursor; lex_length=0; break; } default: break; } }while( cursor < len ); #ifdef PARSER_DEBUG_LOG ZRT_LOG(L_INFO, P_TEXT, "Section parsed"); #endif return records; /*complete if OK, or NULL if error*/ }
int main(int argc, char *argv[]) { extern int optind; int c; int usage = 0; gp_init(); /* initalize */ verbose = false; state.strip_debug = false; state.preserve_dates = false; state.strip_all = false; state.strip_unneeded = false; state.discard_all = false; state.output_file = NULL; state.symbol_keep = push_symbol_table(NULL, false); state.symbol_remove = push_symbol_table(NULL, false); state.section_remove = push_symbol_table(NULL, false); while ((c = GETOPT_FUNC) != EOF) { switch (c) { case '?': case 'h': usage = 1; break; case 'g': state.strip_debug = true; break; case 'k': add_name(state.symbol_keep, optarg); break; case 'n': add_name(state.symbol_remove, optarg); break; case 'o': state.output_file = optarg; break; case 'p': state.preserve_dates = true; break; case 'r': add_name(state.section_remove, optarg); break; case 's': state.strip_all = true; break; case 'u': state.strip_unneeded = true; break; case 'x': state.discard_all = true; break; case 'V': verbose = true; break; case 'v': fprintf(stderr, "%s\n", GPSTRIP_VERSION_STRING); exit(0); } if (usage) break; } if ((optind == argc) || (usage)) { show_usage(); } for ( ; optind < argc; optind++) { state.input_file = argv[optind]; if (gp_identify_coff_file(state.input_file) != object_file_v2 && gp_identify_coff_file(state.input_file) != object_file) { gp_error("\"%s\" is not a valid object file", state.input_file); exit(1); } state.object = gp_read_coff(state.input_file); if (state.object) { remove_sections(); remove_symbols(); if (state.strip_all) { strip_all(); } if (state.strip_debug) { if (state.strip_all) { gp_message("strip debug ignored"); } else { strip_debug(); } } if (state.strip_unneeded) { if (state.strip_all) { gp_message("strip unneeded ignored"); } else { strip_unneeded(); } } if (state.discard_all) { if (state.strip_all) { gp_message("discard all ignored"); } else { discard_all(); } } if (state.output_file) { state.object->filename = state.output_file; } if (!state.preserve_dates) { /* FIXME: need to update the output file dates */ state.object->time = (long)time(NULL); } if (gp_num_errors == 0) { /* no errors have occured so write the file */ if (gp_write_coff(state.object, 0)) gp_error("system error while writing object file"); } else if (state.output_file) { /* a new file is being written, but errors have occurred, delete the file if it exists */ unlink(state.output_file); } /* FIXME: free state.output_file */ } } if (gp_num_errors) return EXIT_FAILURE; else return EXIT_SUCCESS; }