inline int MIMEParser<ParserConfig>::parse(const char* line, size_t length) { bool simple_body = false; if(!line) { global_state = GS_ERROR; return PARSE_ERROR; } int ret_code = PARSE_OK; simple_body = !(content_type.boundary || start_partial_parser || partial_parser); if(global_state == GS_HEADERS) { if(parse_headers(line, length) == PARSE_ERROR) { global_state = GS_BODY; if(simple_body) parse_body_simple(line, length); else parse_body(line, length); } } else if(global_state == GS_BODY && !simple_body) ret_code = parse_body(line, length); else if(global_state == GS_BODY_EPILOGUE || simple_body) ret_code = parse_body_simple(line, length); else ret_code = PARSE_ERROR; return ret_code; }
int main(int argc, char * argv[]){ FILE * input = stdin; FILE * output = stdout; taglib_init(); values = g_ptr_array_new(); required = g_hash_table_new(g_str_hash, g_str_equal); ssize_t result = my_getline(input); if ( result == -1 ) { fprintf(stderr, "empty file input.\n"); exit(ENODATA); } if (!parse_headline(input, output)) exit(ENODATA); result = my_getline(input); if ( result != -1 ) parse_body(input, output); taglib_fini(); return 0; }
int main(int argc, char * argv[]){ FILE * input = stdin; setlocale(LC_ALL, ""); GError * error = NULL; GOptionContext * context; context = g_option_context_new("- import k mixture model"); g_option_context_add_main_entries(context, entries, NULL); if (!g_option_context_parse(context, &argc, &argv, &error)) { g_print("option parsing failed:%s\n", error->message); exit(EINVAL); } SystemTableInfo2 system_table_info; bool retval = system_table_info.load(SYSTEM_TABLE_INFO); if (!retval) { fprintf(stderr, "load table.conf failed.\n"); exit(ENOENT); } PhraseLargeTable3 phrase_table; phrase_table.attach(SYSTEM_PHRASE_INDEX, ATTACH_READONLY); FacadePhraseIndex phrase_index; const pinyin_table_info_t * phrase_files = system_table_info.get_default_tables(); if (!load_phrase_index(phrase_files, &phrase_index)) exit(ENOENT); KMixtureModelBigram bigram(K_MIXTURE_MODEL_MAGIC_NUMBER); bigram.attach(k_mixture_model_filename, ATTACH_READWRITE|ATTACH_CREATE); taglib_init(); /* prepare to read n-gram model */ values = g_ptr_array_new(); required = g_hash_table_new(g_str_hash, g_str_equal); ssize_t result = my_getline(input); if ( result == -1 ) { fprintf(stderr, "empty file input.\n"); exit(ENODATA); } if (!parse_headline(&bigram)) exit(ENODATA); result = my_getline(input); if ( result != -1 ) parse_body(input, &phrase_table, &phrase_index, &bigram); taglib_fini(); return 0; }
void BillingTask::run() { if (_req.method() != htp_method_POST) { send_http_reply(405); delete this; return; } bool timer_interim = false; if (_req.param(TIMER_INTERIM_PARAM) == "true") { timer_interim = true; SAS::Marker cid_assoc(trail(), MARKER_ID_SIP_CALL_ID, 0); cid_assoc.add_var_param(call_id()); SAS::report_marker(cid_assoc); SAS::Event timer_pop(trail(), SASEvent::INTERIM_TIMER_POPPED, 0); SAS::report_event(timer_pop); } Message* msg = NULL; HTTPCode rc = parse_body(call_id(), timer_interim, _req.get_rx_body(), &msg, trail()); if (rc != HTTP_OK) { SAS::Event rejected(trail(), SASEvent::REQUEST_REJECTED_INVALID_JSON, 0); SAS::report_event(rejected); send_http_reply(rc); } else { if (msg != NULL) { TRC_DEBUG("Handle the received message"); // The session manager takes ownership of the message object and is // responsible for deleting it. _sess_mgr->handle(msg); msg = NULL; } // The HTTP reply won't be sent until afer we leave this function, so by // putting this last we ensure that the load monitor will get a sensible // value for the latency send_http_reply(rc); } delete this; }
SectorBuilder::SectorBuilder(const Pathname& filename, Sector& sector) : m_filename(filename), m_sector(sector), id_table(), parent_table() { FileReader reader = FileReader::parse(m_filename); if(reader.get_name() != "windstille-sector") { std::ostringstream msg; msg << "'" << m_filename << "' is not a windstille-sector file"; throw std::runtime_error(msg.str()); } else { parse_body(reader); } }
/* atomic expressions like literals, but also if-then-else, (<expr>*). */ static ASTNode *parse_atomic(Parser *p) { ASTNode *tmp; int i; char *s; switch (peek_id(p)) { /* literals */ case TOK_INTEGER: i = peek(p)->ival; accept(p); return ast_create_integer(i); case TOK_STRING: s = strdup(peek(p)->sval); accept(p); tmp = ast_create_string(s); free(s); return tmp; /* grouping by parentheses */ case TOK_LPAR: /* term = '(' expr ')' */ accept(p); ignore_eols(p); tmp = parse_expr(p); ignore_eols(p); expect(p, TOK_RPAR, "expected closing parenthesis"); return tmp; /* identifier */ case TOK_IDENTIFIER: s = strdup(peek(p)->sval); accept(p); tmp = ast_create_identifier(s); free(s); return tmp; /* complex expressions */ case TOK_DEF: return parse_let_expression(p); case TOK_BEGIN: return parse_body(p, TOK_BEGIN, TOK_END, -1); default: assert(0); } }
void Stencil::parse() { std::ifstream infile (source_filename()); if (infile.is_open()) { parse_header(infile); // Both stencil dimensions must be odd assert(rows() == columns()); assert(rows() % 2 != 0); assert(columns() % 2 != 0); kernel = std::make_shared<Array2d>(rows(), columns()); parse_body(infile); infile.close(); normalize(); } else{ std::cerr << "Unable to open file " << source_filename() << "\n"; } }
static lcb_error_t htvb_parse(struct htvb_st *vbs, lcb_type_t btype) { lcb_error_t status = LCB_ERROR; int done = 0; if (vbs->header.nused == 0) { status = parse_header(vbs, btype); if (status != LCB_SUCCESS) { return status; /* BUSY or otherwise */ } } lcb_assert(vbs->header.nused); if (btype == LCB_TYPE_CLUSTER) { /* Do not parse payload for cluster connection type */ return LCB_SUCCESS; } do { status = parse_body(vbs, &done); } while (!done); return status; }
static void parse_sound(char *raw, song_t* parse_song) { uint8_t part = 0; char *start = raw; while (*raw != 0) { if (*raw == ':') { *raw = 0; if (part == 0) { parse_title(start, parse_song); } else if (part == 1) { parse_header(start, parse_song); } start = raw+1; part++; } raw++; } parse_body(start, parse_song); }
HttpResponse::HttpResponse(const std::string& header_lines) { m_header_body = header_lines; m_is_valid = parse_body(); }
HttpRequest::HttpRequest(const IOBuffer& io_buf) { m_body.assign(io_buf.data(), io_buf.size()); m_is_valid = parse_body(); }
int main(int argc, char * argv[]){ FILE * input = stdin; const char * bigram_filename = "bigram.db"; PhraseLargeTable phrases; MemoryChunk * chunk = new MemoryChunk; bool retval = chunk->load("phrase_index.bin"); if (!retval) { fprintf(stderr, "open phrase_index.bin failed!\n"); exit(ENOENT); } phrases.load(chunk); FacadePhraseIndex phrase_index; if (!load_phrase_index(&phrase_index)) exit(ENOENT); Bigram bigram; retval = bigram.attach(bigram_filename, ATTACH_CREATE|ATTACH_READWRITE); if (!retval) { fprintf(stderr, "open %s failed!\n", bigram_filename); exit(ENOENT); } taglib_init(); values = g_ptr_array_new(); required = g_hash_table_new(g_str_hash, g_str_equal); //enter "\data" line assert(taglib_add_tag(BEGIN_LINE, "\\data", 0, "model", "")); ssize_t result = my_getline(input); if ( result == -1 ) { fprintf(stderr, "empty file input.\n"); exit(ENODATA); } //read "\data" line if ( !taglib_read(linebuf, line_type, values, required) ) { fprintf(stderr, "error: interpolation model expected.\n"); exit(ENODATA); } assert(line_type == BEGIN_LINE); char * value = NULL; assert(g_hash_table_lookup_extended(required, "model", NULL, (gpointer *)&value)); if ( !( strcmp("interpolation", value) == 0 ) ) { fprintf(stderr, "error: interpolation model expected.\n"); exit(ENODATA); } result = my_getline(input); if ( result != -1 ) parse_body(input, &phrases, &phrase_index, &bigram); taglib_fini(); if (!save_phrase_index(&phrase_index)) exit(ENOENT); return 0; }
HttpResponse::HttpResponse(const IOBuffer& io_buf) { m_header_body.assign(io_buf.data(), io_buf.size()); m_is_valid = parse_body(); }
/** * A subparser function for compiler_build() that looks at the current token, * checks for a 'function' token. If found, it proceeds to evaluate the function * declaration, make note of the number of arguments, and store the index where * the function will begin in the byte code. This function then dispatches * subparsers that define stack variables, evaluate logical blocks (if, else, * while, etc), and evaluate straight code. * c: an instance of Compiler. * l: an instance of lexer. * returns: false if the current token is not the start of a function declaration * ('function'), and true if it is. If an error occurs, function returns true, * but c->err is set to a relevant error code. */ static bool parse_function_definitions(Compiler * c, Lexer * l) { /* * A Function definition looks like so: * * function [EXPORTED] [NAME] ( [ARG1], [ARG2], ... ) { * [code] * } * * The code below parses these tokens and then dispatches the straight code * parsers to handle the function body. */ bool exported = false; size_t len; LexerType type; char * token = lexer_current_token(l, &type, &len); char * name; size_t nameLen; int numArgs; int numVars; /* check that this is a function declaration token */ if(!tokens_equal(token, len, LANG_FUNCTION, LANG_FUNCTION_LEN)) { c->err = COMPILERERR_UNEXPECTED_TOKEN; return false; } /* advance to next token. if is is EXPORTED, take note for later */ token = lexer_next(l, &type, &len); if(tokens_equal(token, len, LANG_EXPORTED, LANG_EXPORTED_LEN)) { exported = true; token = lexer_next(l, &type, &len); } /* this is the name token, store it and check for correct type */ name = token; nameLen = len; if(type != LEXERTYPE_KEYVAR || is_keyword(name, nameLen)) { c->err = COMPILERERR_EXPECTED_FNAME; return true; } /* check if name is too long */ if(nameLen > GS_MAX_FUNCTION_NAME_LEN) { c->err = COMPILERERR_FUNCTION_NAME_TOO_LONG; return true; } /* check for the open parenthesis */ token = lexer_next(l, &type, &len); if(!tokens_equal(token, len, LANG_OPARENTH, LANG_OPARENTH_LEN)) { c->err = COMPILERERR_EXPECTED_OPARENTH; return true; } /* we're going down a level. push new symbol table to stack */ if(!symtblstk_push(c)) { c->err = COMPILERERR_ALLOC_FAILED; return true; } /* parse the arguments, return if the process fails */ if((numArgs = parse_arguments(c, l)) == -1) { return true; } /* check for open brace defining start of function "{" */ token = lexer_next(l, &type, &len); if(!tokens_equal(token, len, LANG_OBRACKET, LANG_OBRACKET_LEN)) { c->err = COMPILERERR_EXPECTED_OBRACKET; return true; } token = lexer_next(l, &type, &len); /****************************** Do function body ****************************/ /* handle variable declarations */ if((numVars = define_variables(c, l)) == -1) { return false; } /* retrieve next token (it was modified by define_variable */ token = lexer_current_token(l, &type, &len); /* store the function name, location in the output, and # of args and vars */ if(!function_store_definition(c, name, nameLen, numArgs, numVars, exported)) { return true; } if(!parse_body(c, l)) { return true; } /* retrieve current token (it was modified by parse_body) */ token = lexer_current_token(l, &type, &len); /****************************** End function body ***************************/ /* check for closing brace defining end of body "}" */ if(!tokens_equal(token, len, LANG_CBRACKET, LANG_CBRACKET_LEN)) { c->err = COMPILERERR_EXPECTED_CBRACKET; return true; } /* push default return value. if no other return is given, this value is * returned */ buffer_append_char(vm_buffer(c->vm), OP_NULL_PUSH); /* pop function frame and return to calling function */ buffer_append_char(vm_buffer(c->vm), OP_FRM_POP); token = lexer_next(l, &type, &len); /* we're done here! pop the symbol table for this function off the stack. */ ht_free(symtblstk_pop(c)); return true; }
struct ovsdb_error * ovsdb_log_read(struct ovsdb_log *file, struct json **jsonp) { uint8_t expected_sha1[SHA1_DIGEST_SIZE]; uint8_t actual_sha1[SHA1_DIGEST_SIZE]; struct ovsdb_error *error; off_t data_offset; unsigned long data_length; struct json *json; char header[128]; *jsonp = json = NULL; if (file->read_error) { return ovsdb_error_clone(file->read_error); } else if (file->mode == OVSDB_LOG_WRITE) { return OVSDB_BUG("reading file in write mode"); } if (!fgets(header, sizeof header, file->stream)) { if (feof(file->stream)) { error = NULL; } else { error = ovsdb_io_error(errno, "%s: read failed", file->name); } goto error; } if (!parse_header(header, &data_length, expected_sha1)) { error = ovsdb_syntax_error(NULL, NULL, "%s: parse error at offset " "%lld in header line \"%.*s\"", file->name, (long long int) file->offset, (int) strcspn(header, "\n"), header); goto error; } data_offset = file->offset + strlen(header); error = parse_body(file, data_offset, data_length, actual_sha1, &json); if (error) { goto error; } if (memcmp(expected_sha1, actual_sha1, SHA1_DIGEST_SIZE)) { error = ovsdb_syntax_error(NULL, NULL, "%s: %lu bytes starting at " "offset %lld have SHA-1 hash "SHA1_FMT" " "but should have hash "SHA1_FMT, file->name, data_length, (long long int) data_offset, SHA1_ARGS(actual_sha1), SHA1_ARGS(expected_sha1)); goto error; } if (json->type == JSON_STRING) { error = ovsdb_syntax_error(NULL, NULL, "%s: %lu bytes starting at " "offset %lld are not valid JSON (%s)", file->name, data_length, (long long int) data_offset, json->u.string); goto error; } file->prev_offset = file->offset; file->offset = data_offset + data_length; *jsonp = json; return NULL; error: file->read_error = ovsdb_error_clone(error); json_destroy(json); return error; }
bool Parser::parse() { skip_newline(false); while (1) { ptr<Token> token = cur(); if (!token->type()) { break; } eat(); if (*token == '\n') { continue; } if (*token == TOKEN_INCLUDE) { bool old_skip_newline = skip_newline(); skip_newline(false); token = cur(); if (*token == TOKEN_CONST_STRING && look()->is_eol()) { eat(); eat(); skip_newline(old_skip_newline); ptr<Path> path = object<Path>(token->text()); if (_input.is_root()) { _symbols.exportSymbol(object<IncludeTree>(path)); } _input.load(path); } else { log_expect(token->loc(), "string eol"); } continue; } SegmentToken *seg = nullptr; if (token->type() == TOKEN_SEGMENT) { seg = static_cast<SegmentToken*>(token.get()); if (!seg->name()) { bool old_skip_newline = skip_newline(); skip_newline(false); if (!look()->is_eol()) { log_expect(token->loc(), "eol"); } skip_newline(old_skip_newline); eat(); } else { seg = nullptr; } } switch (_phase) { case PARSE_PHASE_HEAD: if (seg) { _phase = PARSE_PHASE_BODY; continue; } skip_newline(false); parse_head(token); break; case PARSE_PHASE_BODY: if (seg) { _phase = PARSE_PHASE_TAIL; continue; } skip_newline(true); parse_body(token); break; case PARSE_PHASE_TAIL: if (seg) { log_error(token->loc(), "too more segment declear."); } skip_newline(false); parse_tail(token); break; } } return true; }
static void recv_handler(struct ev_loop *loop, struct ev_io *watcher, int events) { //debug("recv_handler"); size_t i; domain_t *domain = (domain_t *) watcher; ev_io_stop(loop, &domain->io); ev_timer_stop(loop, &domain->tw); //debug("recv_header %s -- data buffer:%p; data len: %d", domain->domain, domain->data.buffer + domain->data.len, domain->data.len); ssize_t len = readn(domain->io.fd, domain->data.buffer + domain->data.len, sizeof (domain->data.buffer) - domain->data.len); if (len <= 0) { if (EAGAIN == errno) { // сокет занят буфер кончился и прочее //err_ret("error read socket %s: ", domain->domain); ev_io_start(loop, &domain->io); ev_timer_start(loop, &domain->tw); return; } else { // жесткая ошибка err_ret("error read socket %s: ", domain->domain); free_domain(domain); return; } } else { domain->data.len += len; int pret = parse_response(domain); debug("parse_response %s:%d", domain->domain, pret); if (pret > 0) { switch (domain->http.status) { case 301: case 302: { for (i = 0; i != domain->http.num_headers; ++i) { if (NULL != memmem(domain->http.headers[i].name, domain->http.headers[i].name_len, "Location", 8)) { follow_location(domain, domain->http.headers[i].value, domain->http.headers[i].value_len); return; //break; } } break; } case 200: { if (true == parse_body(&domain->data.buffer[pret], domain->data.len - pret)) { success_checked(domain); } else { if (++domain->index_search < (sizeof (search_path) / sizeof (search_path[0]))) { //ares_gethostbyname(domain->options->ares.channel, domain->domain, AF_INET, ev_ares_dns_callback, (void *) domain); http_request(domain); return; } } break; } } } else { error_parse(domain); } } debug("-- %s %d checked", domain->domain, domain->http.status); free_domain(domain); }
/** * Parses the client request. The request is parsed to properly consume * the request data, identifying GET and HEAD HTTP methods. Most of the * data read is actually discarded, in the current implementation. */ void parse_request(struct parser *p) { int r; if(!read_socket(p)) return; switch (p->state) { case PARSING_START: p->mark = 0; p->state = PARSING_METHOD; debug("parse started"); case PARSING_METHOD: r = parse_request_method(p); if (r != PARSING_DONE) break; p->state = PARSING_URI; debug("parsed method: %d", p->request.method); case PARSING_URI: r = parse_request_uri(p); if (r != PARSING_DONE) break; p->state = PARSING_VERSION; debug("parsed uri"); case PARSING_VERSION: r = parse_http_version(p); if (r != PARSING_DONE) break; p->state = PARSING_HEADERS; debug("parsed http version"); case PARSING_HEADERS: case PARSING_HEADER_NAME: case PARSING_HEADER_NAME_ANY: case PARSING_HEADER_VALUE: case PARSING_HEADER_CONTENT_LENGTH: r = parse_headers(p); if (r != PARSING_DONE) break; p->state = PARSING_BODY; debug("parsed headers"); debug("content-length: %ld", p->request.content_length); case PARSING_BODY: r = parse_body(p); if (r != PARSING_DONE) break; p->state = PARSING_DONE; debug("parsed body"); return; default: debug("illegal parser state: %d", p->state); p->state = PARSING_ERROR; p->error = E_PARSE; return; } if (r == PARSING_ERROR) { p->state = PARSING_ERROR; if (p->error == E_NONE) p->error = E_PARSE; } return; }
void http_server::http_request_state::update(http_server* server) // Continues processing the request. Deactivates our state when the // request is finished. { if (! is_alive()) { return; } int bytes_in; static const int MAX_LINE_BYTES = 32768; // very generous, but not insane, max line size. static const float CONNECTION_TIMEOUT = 300.0f; // How long to leave the socket open. if (m_request_state == IDLE) { // Watch for the start of a new request. if (m_req.m_sock->is_readable() == false) { uint64 now = tu_timer::get_ticks(); if (tu_timer::ticks_to_seconds(now - m_last_activity) > CONNECTION_TIMEOUT) { // Timed out; close the socket. deactivate(); printf("socket timed out, deactivating.\n");//xxxxxx } // Idle. return; } else { // We have some data on the socket; start // parsing it. m_request_state = PARSE_START_LINE; m_last_activity = tu_timer::get_ticks(); // Fall through and start processing. //printf("socket is now readable\n");//xxxx } } if (m_req.m_sock->is_open() == false) { // The connection closed on us -- abort the current request! deactivate(); printf("socket closed, deactivating.\n");//xxxxxx return; } switch (m_request_state) { default: // Invalid state. assert(0); deactivate(); break; case PARSE_START_LINE: case PARSE_HEADER: // wait for a whole line, parse it. bytes_in = m_req.m_sock->read_line(&m_line_buffer, MAX_LINE_BYTES - m_line_buffer.length(), 0.010f); if (m_line_buffer.length() >= 2 && m_line_buffer[m_line_buffer.length() - 1] == '\n') { //printf("req got header line: %s", m_line_buffer.c_str());//xxxxxx // We have the whole line. Parse and continue. m_req.m_status = parse_message_line(m_line_buffer.c_str()); if (m_req.m_status >= 400) { m_line_buffer.clear(); m_request_state = PARSE_DONE; } // else we're either still in the header, or // process_header changed our parse state. m_line_buffer.clear(); } else if (m_line_buffer.length() >= MAX_LINE_BYTES) { printf("req invalid header line length\n");//xxxxxx // Invalid line. m_line_buffer.clear(); m_req.m_status = HTTP_BAD_REQUEST; m_request_state = PARSE_DONE; } break; case PARSE_BODY_IDENTITY: case PARSE_BODY_CHUNKED_CHUNK: case PARSE_BODY_CHUNKED_TRAILER: m_req.m_status = parse_body(); if (m_req.m_status >= 400) { // Something bad happened. m_request_state = PARSE_DONE; } break; case PARSE_DONE: // Respond to the request. server->dispatch_request(&m_req); // Leave the connection open, but go idle, waiting for // another request. m_request_state = IDLE; m_last_activity = tu_timer::get_ticks(); clear(); break; } }
/* * detect and read the header of the incoming message from the gimbal */ void AP_Mount_Alexmos::read_incoming() { uint8_t data; int16_t numc; numc = _port->available(); if (numc < 0 ){ return; } for (int16_t i = 0; i < numc; i++) { // Process bytes received data = _port->read(); switch (_step) { case 0: if ( '>' == data) { _step = 1; _checksum = 0; //reset checksum accumulator _last_command_confirmed = false; } break; case 1: // command ID _checksum = data; _command_id = data; _step++; break; case 2: // Size of the body of the message _checksum += data; _payload_length = data; _step++; break; case 3: // checksum of the header if (_checksum != data ) { _step = 0; _checksum = 0; // checksum error break; } _step++; _checksum = 0; _payload_counter = 0; // prepare to receive payload break; case 4: // parsing body _checksum += data; if (_payload_counter < sizeof(_buffer)) { _buffer[_payload_counter] = data; } if (++_payload_counter == _payload_length) _step++; break; case 5:// body checksum _step = 0; if (_checksum != data) { break; } parse_body(); } } }
http_status http_server::http_request_state::process_header() // Call this after finishing parsing the header. Sets the following // parse state. Returns an HTTP status code. { assert(m_request_state == PARSE_HEADER); assert(m_req.m_body.length() == 0); // Set up path/file vars. const char* pathend = strchr(m_req.m_uri.c_str(), '?'); const char* query = NULL; if (pathend) { query = pathend + 1; } else { pathend = m_req.m_uri.c_str() + m_req.m_uri.length(); } m_req.m_path = tu_string(m_req.m_uri.c_str(), pathend - m_req.m_uri.c_str()); unescape_url_component(&m_req.m_path); const char* filestart = strrchr(m_req.m_path.c_str(), '/'); if (filestart) { m_req.m_file = (filestart + 1); unescape_url_component(&m_req.m_file); } // Parse params in the request string. parse_query_string(query); m_content_length = -1; tu_string content_length_str; if (m_req.m_header.get("content-length", &content_length_str)) { m_content_length = atol(content_length_str.c_str()); if (m_content_length > MAX_CONTENT_LENGTH_ACCEPTED) { m_request_state = PARSE_DONE; return HTTP_REQUEST_ENTITY_TOO_LARGE; } } tu_string transfer_encoding; if (m_req.m_header.get("transfer-encoding", &transfer_encoding)) { if (transfer_encoding.to_tu_stringi() == "chunked") { // We're required to ignore the content-length // header. m_content_length = -1; m_request_state = PARSE_BODY_CHUNKED_CHUNK; return parse_body(); } else if (transfer_encoding.to_tu_stringi() == "identity") { // This is OK; basically a no-op. } else { // A transfer encoding we don't know how to handle. // Reject it. m_request_state = PARSE_DONE; return HTTP_NOT_IMPLEMENTED; } } // If there's a body section, then we need to read it & parse it. if (m_content_length >= 0) { m_request_state = PARSE_BODY_IDENTITY; return parse_body(); } else { m_request_state = PARSE_DONE; } return HTTP_OK; }
HttpRequest::HttpRequest(const std::string& request_lines) { m_body = request_lines; m_is_valid = parse_body(); }