static VALUE Unpacker_read(VALUE self) { UNPACKER(self, uk); int r = msgpack_unpacker_read(uk, 0); if(r < 0) { raise_unpacker_error(r); } return msgpack_unpacker_get_last_object(uk); }
VALUE MessagePack_unpack(int argc, VALUE* argv) { VALUE src; switch(argc) { case 1: src = argv[0]; break; default: rb_raise(rb_eArgError, "wrong number of arguments (%d for 1)", argc); } VALUE io = Qnil; if(rb_type(src) != T_STRING) { io = src; src = Qnil; } VALUE self = Unpacker_alloc(cMessagePack_Unpacker); UNPACKER(self, uk); //msgpack_unpacker_reset(s_unpacker); //msgpack_buffer_reset_io(UNPACKER_BUFFER_(s_unpacker)); /* prefer reference than copying */ msgpack_buffer_set_write_reference_threshold(UNPACKER_BUFFER_(uk), 0); if(io != Qnil) { MessagePack_Buffer_initialize(UNPACKER_BUFFER_(uk), io, Qnil); } if(src != Qnil) { /* prefer reference than copying; see MessagePack_Unpacker_module_init */ msgpack_buffer_append_string(UNPACKER_BUFFER_(uk), src); } int r = msgpack_unpacker_read(uk, 0); if(r < 0) { raise_unpacker_error(r); } /* raise if extra bytes follow */ if(msgpack_buffer_top_readable_size(UNPACKER_BUFFER_(uk)) > 0) { rb_raise(eMalformedFormatError, "extra bytes follow after a deserialized object"); } #ifdef RB_GC_GUARD /* This prevents compilers from optimizing out the `self` variable * from stack. Otherwise GC free()s it. */ RB_GC_GUARD(self); #endif return msgpack_unpacker_get_last_object(uk); }
VALUE MessagePack_unpack(int argc, VALUE* argv) { VALUE src; switch(argc) { case 1: src = argv[0]; break; default: rb_raise(rb_eArgError, "wrong number of arguments (%d for 1)", argc); } VALUE io = Qnil; if(rb_type(src) != T_STRING) { io = src; src = Qnil; } // TODO create an instance if io is set for thread safety? //VALUE self = Unpacker_alloc(cMessagePack_Unpacker); //UNPACKER(self, uk); msgpack_unpacker_reset(s_unpacker); msgpack_buffer_reset_io(UNPACKER_BUFFER_(s_unpacker)); if(io != Qnil) { MessagePack_Buffer_initialize(UNPACKER_BUFFER_(s_unpacker), io, Qnil); } if(src != Qnil) { /* prefer reference than copying; see MessagePack_Unpacker_module_init */ msgpack_buffer_append_string(UNPACKER_BUFFER_(s_unpacker), src); } int r = msgpack_unpacker_read(s_unpacker, 0); if(r < 0) { raise_unpacker_error(r); } /* raise if extra bytes follow */ if(msgpack_buffer_top_readable_size(UNPACKER_BUFFER_(s_unpacker)) > 0) { rb_raise(eMalformedFormatError, "extra bytes follow after a deserialized object"); } return msgpack_unpacker_get_last_object(s_unpacker); }
VALUE MessagePack_unpack(int argc, VALUE* argv) { VALUE src; VALUE self; if (argc < 0 || argc > 2) { rb_raise(rb_eArgError, "wrong number of arguments (%d for 1..2)", argc); } src = argv[0]; if(rb_type(src) == T_STRING) { self = MessagePack_Factory_unpacker(argc - 1, argv + 1, cMessagePack_DefaultFactory); UNPACKER(self, uk); msgpack_buffer_append_string(UNPACKER_BUFFER_(uk), src); } else { self = MessagePack_Factory_unpacker(argc, argv, cMessagePack_DefaultFactory); } UNPACKER(self, uk); /* prefer reference than copying; see MessagePack_Unpacker_module_init */ msgpack_buffer_set_write_reference_threshold(UNPACKER_BUFFER_(uk), 0); int r = msgpack_unpacker_read(uk, 0); if(r < 0) { raise_unpacker_error(r); } /* raise if extra bytes follow */ size_t extra = msgpack_buffer_top_readable_size(UNPACKER_BUFFER_(uk)); if(extra > 0) { rb_raise(eMalformedFormatError, "%zd extra bytes after the deserialized object", extra); } #ifdef RB_GC_GUARD /* This prevents compilers from optimizing out the `self` variable * from stack. Otherwise GC free()s it. */ RB_GC_GUARD(self); #endif return msgpack_unpacker_get_last_object(uk); }
static VALUE Unpacker_each_impl(VALUE self) { UNPACKER(self, uk); while(true) { int r = msgpack_unpacker_read(uk, 0); if(r < 0) { if(r == PRIMITIVE_EOF) { return Qnil; } raise_unpacker_error(r); } VALUE v = msgpack_unpacker_get_last_object(uk); #ifdef JRUBY /* TODO JRuby's rb_yield behaves differently from Ruby 1.9.3 or Rubinius. */ if(rb_type(v) == T_ARRAY) { v = rb_ary_new3(1, v); } #endif rb_yield(v); } }