예제 #1
0
static GstFlowReturn
gst_test_reverse_negotiation_sink_buffer_alloc (GstBaseSink * bsink,
    guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf)
{
  GstTestReverseNegotiationSink *sink =
      GST_TEST_REVERSE_NEGOTIATION_SINK (bsink);
  GstVideoFormat fmt;
  gint width, height;

  fail_unless (gst_video_format_parse_caps (caps, &fmt, &width, &height));

  if (sink->nbuffers < 2) {
    *buf =
        gst_buffer_new_and_alloc (gst_video_format_get_size (fmt, width,
            height));
    gst_buffer_set_caps (*buf, caps);
  } else {
    gint fps_n, fps_d;

    fail_unless (gst_video_parse_caps_framerate (caps, &fps_n, &fps_d));

    width = 512;
    height = 128;
    *buf =
        gst_buffer_new_and_alloc (gst_video_format_get_size (fmt, width,
            height));
    caps = gst_video_format_new_caps (fmt, width, height, fps_n, fps_d, 1, 1);
    gst_buffer_set_caps (*buf, caps);
    gst_caps_unref (caps);
  }

  return GST_FLOW_OK;
}
예제 #2
0
static GstCaps *
gst_dshowvideosrc_getcaps_from_enum_mediatypes (GstDshowVideoSrc * src, IPin * pin)
{
  GstCaps *caps = NULL;
  IEnumMediaTypes *enum_mediatypes = NULL;
  HRESULT hres = S_OK;
  GstCapturePinMediaType *pin_mediatype = NULL;

  hres = pin->EnumMediaTypes (&enum_mediatypes);
  if (FAILED (hres)) {
    GST_ERROR ("Failed to retrieve IEnumMediaTypes (error=0x%x)", hres);
    return NULL;
  }

  caps = gst_caps_new_empty ();

  while ((pin_mediatype = gst_dshow_new_pin_mediatype_from_enum_mediatypes (pin, enum_mediatypes)) != NULL) {

    GstCaps *mediacaps = NULL;
    GstVideoFormat video_format = gst_dshow_guid_to_gst_video_format (pin_mediatype->mediatype);

    if (video_format != GST_VIDEO_FORMAT_UNKNOWN)
      mediacaps = gst_video_format_new_caps (video_format, 
          pin_mediatype->defaultWidth, pin_mediatype->defaultHeight,
          pin_mediatype->defaultFPS, 1, 1, 1);

    if (mediacaps) {
      src->pins_mediatypes =
          g_list_append (src->pins_mediatypes, pin_mediatype);
      gst_caps_append (caps, mediacaps);
    } else {
      /* failed to convert dshow caps */
      gst_dshow_free_pin_mediatype (pin_mediatype);
    }
  }

  enum_mediatypes->Release ();

  if (caps && gst_caps_is_empty (caps)) {
    gst_caps_unref (caps);
    caps = NULL;
  }

  return caps;
}
예제 #3
0
void
gst_base_video_decoder_set_src_caps (GstBaseVideoDecoder * base_video_decoder)
{
  GstCaps *caps;
  GstVideoState *state = &base_video_decoder->state;

  if (base_video_decoder->have_src_caps)
    return;

  caps = gst_video_format_new_caps (state->format,
      state->width, state->height,
      state->fps_n, state->fps_d, state->par_n, state->par_d);
  gst_caps_set_simple (caps, "interlaced",
      G_TYPE_BOOLEAN, state->interlaced, NULL);

  GST_DEBUG ("setting caps %" GST_PTR_FORMAT, caps);

  gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder), caps);

  base_video_decoder->have_src_caps = TRUE;
}
예제 #4
0
static GstFlowReturn
gst_jasper_dec_negotiate (GstJasperDec * dec, jas_image_t * image)
{
  GstFlowReturn flow_ret = GST_FLOW_OK;
  gint width, height, channels;
  gint i, j;
  gboolean negotiate = FALSE;
  jas_clrspc_t clrspc;
  GstCaps *allowed_caps, *caps;

  width = jas_image_width (image);
  height = jas_image_height (image);
  channels = jas_image_numcmpts (image);

  GST_LOG_OBJECT (dec, "%d x %d, %d components", width, height, channels);

  /* jp2c bitstream has no real colour space info (kept in container),
   * so decoder may only pretend to know, where it really does not */
  if (!jas_clrspc_isunknown (dec->clrspc)) {
    clrspc = dec->clrspc;
    GST_DEBUG_OBJECT (dec, "forcing container supplied colour space %d",
        clrspc);
    jas_image_setclrspc (image, clrspc);
  } else
    clrspc = jas_image_clrspc (image);

  if (!width || !height || !channels || jas_clrspc_isunknown (clrspc))
    goto fail_image;

  if (dec->width != width || dec->height != height ||
      dec->channels != channels || dec->clrspc != clrspc)
    negotiate = TRUE;

  if (channels != 3)
    goto not_supported;

  for (i = 0; i < channels; i++) {
    gint cheight, cwidth, depth, sgnd;

    cheight = jas_image_cmptheight (image, i);
    cwidth = jas_image_cmptwidth (image, i);
    depth = jas_image_cmptprec (image, i);
    sgnd = jas_image_cmptsgnd (image, i);

    GST_LOG_OBJECT (dec, "image component %d, %dx%d, depth %d, sgnd %d", i,
        cwidth, cheight, depth, sgnd);

    if (depth != 8 || sgnd)
      goto not_supported;

    if (dec->cheight[i] != cheight || dec->cwidth[i] != cwidth) {
      dec->cheight[i] = cheight;
      dec->cwidth[i] = cwidth;
      negotiate = TRUE;
    }
  }

  if (!negotiate && dec->format != GST_VIDEO_FORMAT_UNKNOWN)
    goto done;

  /* clear and refresh to new state */
  flow_ret = GST_FLOW_NOT_NEGOTIATED;
  dec->format = GST_VIDEO_FORMAT_UNKNOWN;
  dec->width = width;
  dec->height = height;
  dec->channels = channels;

  /* retrieve allowed caps, and find the first one that reasonably maps
   * to the parameters of the colourspace */
  caps = gst_pad_get_allowed_caps (dec->srcpad);
  if (!caps) {
    GST_DEBUG_OBJECT (dec, "... but no peer, using template caps");
    /* need to copy because get_allowed_caps returns a ref,
       and get_pad_template_caps doesn't */
    caps = gst_caps_copy (gst_pad_get_pad_template_caps (dec->srcpad));
  }
  /* avoid lists of fourcc, etc */
  allowed_caps = gst_caps_normalize (caps);
  caps = NULL;
  GST_LOG_OBJECT (dec, "allowed source caps %" GST_PTR_FORMAT, allowed_caps);

  for (i = 0; i < gst_caps_get_size (allowed_caps); i++) {
    GstVideoFormat format;
    gboolean ok;

    if (caps)
      gst_caps_unref (caps);
    caps = gst_caps_copy_nth (allowed_caps, i);
    /* sigh, ds and _parse_caps need fixed caps for parsing, fixate */
    gst_pad_fixate_caps (dec->srcpad, caps);
    GST_LOG_OBJECT (dec, "checking caps %" GST_PTR_FORMAT, caps);
    if (!gst_video_format_parse_caps (caps, &format, NULL, NULL))
      continue;
    if (gst_video_format_is_rgb (format) &&
        jas_clrspc_fam (clrspc) == JAS_CLRSPC_FAM_RGB) {
      GST_DEBUG_OBJECT (dec, "trying RGB");
      if ((dec->cmpt[0] = jas_image_getcmptbytype (image,
                  JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_RGB_R))) < 0 ||
          (dec->cmpt[1] = jas_image_getcmptbytype (image,
                  JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_RGB_G))) < 0 ||
          (dec->cmpt[2] = jas_image_getcmptbytype (image,
                  JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_RGB_B))) < 0) {
        GST_DEBUG_OBJECT (dec, "missing RGB color component");
        continue;
      }
    } else if (gst_video_format_is_yuv (format) &&
        jas_clrspc_fam (clrspc) == JAS_CLRSPC_FAM_YCBCR) {
      GST_DEBUG_OBJECT (dec, "trying YUV");
      if ((dec->cmpt[0] = jas_image_getcmptbytype (image,
                  JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_YCBCR_Y))) < 0 ||
          (dec->cmpt[1] = jas_image_getcmptbytype (image,
                  JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_YCBCR_CB))) < 0 ||
          (dec->cmpt[2] = jas_image_getcmptbytype (image,
                  JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_YCBCR_CR))) < 0) {
        GST_DEBUG_OBJECT (dec, "missing YUV color component");
        continue;
      }
    } else
      continue;
    /* match format with validity checks */
    ok = TRUE;
    for (j = 0; j < channels; j++) {
      gint cmpt;

      cmpt = dec->cmpt[j];
      if (dec->cwidth[cmpt] != gst_video_format_get_component_width (format, j,
              width) ||
          dec->cheight[cmpt] != gst_video_format_get_component_height (format,
              j, height))
        ok = FALSE;
    }
    /* commit to this format */
    if (ok) {
      dec->format = format;
      break;
    }
  }

  if (caps)
    gst_caps_unref (caps);
  gst_caps_unref (allowed_caps);

  if (dec->format != GST_VIDEO_FORMAT_UNKNOWN) {
    /* cache some video format properties */
    for (j = 0; j < channels; ++j) {
      dec->offset[j] = gst_video_format_get_component_offset (dec->format, j,
          dec->width, dec->height);
      dec->inc[j] = gst_video_format_get_pixel_stride (dec->format, j);
      dec->stride[j] = gst_video_format_get_row_stride (dec->format, j,
          dec->width);
    }
    dec->image_size = gst_video_format_get_size (dec->format, width, height);
    dec->alpha = gst_video_format_has_alpha (dec->format);

    if (dec->buf)
      g_free (dec->buf);
    dec->buf = g_new0 (glong, dec->width);

    caps = gst_video_format_new_caps (dec->format, dec->width, dec->height,
        dec->framerate_numerator, dec->framerate_denominator, 1, 1);

    GST_DEBUG_OBJECT (dec, "Set format to %d, size to %dx%d", dec->format,
        dec->width, dec->height);

    if (!gst_pad_set_caps (dec->srcpad, caps))
      flow_ret = GST_FLOW_NOT_NEGOTIATED;
    else
      flow_ret = GST_FLOW_OK;

    gst_caps_unref (caps);
  }

done:
  return flow_ret;

  /* ERRORS */
fail_image:
  {
    GST_DEBUG_OBJECT (dec, "Failed to process decoded image.");
    flow_ret = GST_FLOW_NOT_NEGOTIATED;
    goto done;
  }
not_supported:
  {
    GST_DEBUG_OBJECT (dec, "Decoded image has unsupported colour space.");
    GST_ELEMENT_ERROR (dec, STREAM, DECODE, (NULL), ("Unsupported colorspace"));
    flow_ret = GST_FLOW_ERROR;
    goto done;
  }
}
예제 #5
0
bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc,
                                    double fps, CvSize frameSize, bool is_color )
{
    CV_FUNCNAME("CvVideoWriter_GStreamer::open");

    __BEGIN__;
    //actually doesn't support fourcc parameter and encode an avi with jpegenc
    //we need to find a common api between backend to support fourcc for avi
    //but also to choose in a common way codec and container format (ogg,dirac,matroska)
    // check arguments

    assert (filename);
    assert (fps > 0);
    assert (frameSize.width > 0  &&  frameSize.height > 0);
    std::map<int,char*>::iterator encit;
    encit=encs.find(fourcc);
    if (encit==encs.end())
        CV_ERROR( CV_StsUnsupportedFormat,"Gstreamer Opencv backend doesn't support this codec acutally.");
    if(!isInited) {
        gst_init (NULL, NULL);
        isInited = true;
    }
    close();
    source=gst_element_factory_make("appsrc",NULL);
    file=gst_element_factory_make("filesink", NULL);
    enc=gst_element_factory_make(encit->second, NULL);
    mux=gst_element_factory_make("avimux", NULL);
    color = gst_element_factory_make("ffmpegcolorspace", NULL);
    if (!enc)
        CV_ERROR( CV_StsUnsupportedFormat, "Your version of Gstreamer doesn't support this codec acutally or needed plugin missing.");
    g_object_set(G_OBJECT(file), "location", filename, NULL);
    pipeline = gst_pipeline_new (NULL);
    GstCaps* caps;
    if (is_color) {
        input_pix_fmt=1;
        caps= gst_video_format_new_caps(GST_VIDEO_FORMAT_BGR,
                                        frameSize.width,
                                        frameSize.height,
                                        (int) (fps * 1000),
                                        1000,
                                        1,
                                        1);
    }
    else  {
        input_pix_fmt=0;
        caps= gst_caps_new_simple("video/x-raw-gray",
                                  "width", G_TYPE_INT, frameSize.width,
                                  "height", G_TYPE_INT, frameSize.height,
                                  "framerate", GST_TYPE_FRACTION, int(fps),1,
                                  "bpp",G_TYPE_INT,8,
                                  "depth",G_TYPE_INT,8,
                                  NULL);
    }
    gst_app_src_set_caps(GST_APP_SRC(source), caps);
    if (fourcc==CV_FOURCC_DEFAULT) {
        gst_bin_add_many(GST_BIN(pipeline), source, color,mux, file, NULL);
        if(!gst_element_link_many(source,color,enc,mux,file,NULL)) {
            CV_ERROR(CV_StsError, "GStreamer: cannot link elements\n");
        }
    }
    else {
        gst_bin_add_many(GST_BIN(pipeline), source, color,enc,mux, file, NULL);
        if(!gst_element_link_many(source,color,enc,mux,file,NULL)) {
            CV_ERROR(CV_StsError, "GStreamer: cannot link elements\n");
        }
    }


    if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING) ==
            GST_STATE_CHANGE_FAILURE) {
        CV_ERROR(CV_StsError, "GStreamer: cannot put pipeline to play\n");
    }
    __END__;
    return true;
}
예제 #6
0
/*!
 * \brief CvVideoWriter_GStreamer::open
 * \param filename filename to output to
 * \param fourcc desired codec fourcc
 * \param fps desired framerate
 * \param frameSize the size of the expected frames
 * \param is_color color or grayscale
 * \return success
 *
 * We support 2 modes of operation. Either the user enters a filename and a fourcc
 * code, or enters a manual pipeline description like in CvVideoCapture_Gstreamer.
 * In the latter case, we just push frames on the appsink with appropriate caps.
 * In the former case, we try to deduce the correct container from the filename,
 * and the correct encoder from the fourcc profile.
 *
 * If the file extension did was not recognize, an avi container is used
 *
 */
bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc,
                                    double fps, CvSize frameSize, bool is_color )
{
    CV_FUNCNAME("CvVideoWriter_GStreamer::open");

    // check arguments
    assert (filename);
    assert (fps > 0);
    assert (frameSize.width > 0  &&  frameSize.height > 0);

    // init gstreamer
    gst_initializer::init();

    // init vars
    bool manualpipeline = true;
    int  bufsize = 0;
    GError *err = NULL;
    const char* mime = NULL;
    GstStateChangeReturn stateret;

    GstCaps* caps = NULL;
    GstCaps* videocaps = NULL;

#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
    GstCaps* containercaps = NULL;
    GstEncodingContainerProfile* containerprofile = NULL;
    GstEncodingVideoProfile* videoprofile = NULL;
#endif

    GstIterator* it = NULL;
    gboolean done = FALSE;
    GstElement *element = NULL;
    gchar* name = NULL;
    GstElement* splitter = NULL;
    GstElement* combiner = NULL;

    // we first try to construct a pipeline from the given string.
    // if that fails, we assume it is an ordinary filename

    __BEGIN__;

    encodebin = gst_parse_launch(filename, &err);
    manualpipeline = (encodebin != NULL);

    if(manualpipeline)
    {
#if GST_VERSION_MAJOR == 0
        it = gst_bin_iterate_sources(GST_BIN(encodebin));
        if(gst_iterator_next(it, (gpointer *)&source) != GST_ITERATOR_OK) {
            CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
            return false;
        }
#else
        it = gst_bin_iterate_sources (GST_BIN(encodebin));
        GValue value = G_VALUE_INIT;

        while (!done) {
          switch (gst_iterator_next (it, &value)) {
            case GST_ITERATOR_OK:
              element = GST_ELEMENT (g_value_get_object (&value));
              name = gst_element_get_name(element);
              if (name){
                if(strstr(name, "opencvsrc") != NULL || strstr(name, "appsrc") != NULL) {
                  source = GST_ELEMENT ( gst_object_ref (element) );
                  done = TRUE;
                }
                g_free(name);
              }
              g_value_unset (&value);

              break;
            case GST_ITERATOR_RESYNC:
              gst_iterator_resync (it);
              break;
            case GST_ITERATOR_ERROR:
            case GST_ITERATOR_DONE:
              done = TRUE;
              break;
          }
        }
        gst_iterator_free (it);

        if (!source){
            CV_ERROR(CV_StsError, "GStreamer: cannot find appsrc in manual pipeline\n");
            return false;
        }
#endif
        pipeline = encodebin;
    }
    else
    {
        pipeline = gst_pipeline_new (NULL);

        // we just got a filename and a fourcc code.
        // first, try to guess the container from the filename
        //encodebin = gst_element_factory_make("encodebin", NULL);

        //proxy old non existing fourcc ids. These were used in previous opencv versions,
        //but do not even exist in gstreamer any more
        if (fourcc == CV_FOURCC('M','P','1','V')) fourcc = CV_FOURCC('M', 'P', 'G' ,'1');
        if (fourcc == CV_FOURCC('M','P','2','V')) fourcc = CV_FOURCC('M', 'P', 'G' ,'2');
        if (fourcc == CV_FOURCC('D','R','A','C')) fourcc = CV_FOURCC('d', 'r', 'a' ,'c');


        //create encoder caps from fourcc

        videocaps = gst_riff_create_video_caps(fourcc, NULL, NULL, NULL, NULL, NULL);
        if (!videocaps){
            CV_ERROR( CV_StsUnsupportedFormat, "Gstreamer Opencv backend does not support this codec.");
        }

        //create container caps from file extension
        mime = filenameToMimetype(filename);
        if (!mime) {
            CV_ERROR( CV_StsUnsupportedFormat, "Gstreamer Opencv backend does not support this file type.");
        }

#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
        containercaps = gst_caps_from_string(mime);

        //create encodebin profile
        containerprofile = gst_encoding_container_profile_new("container", "container", containercaps, NULL);
        videoprofile = gst_encoding_video_profile_new(videocaps, NULL, NULL, 1);
        gst_encoding_container_profile_add_profile(containerprofile, (GstEncodingProfile *) videoprofile);
#endif

        //create pipeline elements
        encodebin = gst_element_factory_make("encodebin", NULL);

#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
        g_object_set(G_OBJECT(encodebin), "profile", containerprofile, NULL);
#endif
        source = gst_element_factory_make("appsrc", NULL);
        file = gst_element_factory_make("filesink", NULL);
        g_object_set(G_OBJECT(file), "location", filename, NULL);
    }

    if (is_color)
    {
        input_pix_fmt = GST_VIDEO_FORMAT_BGR;
        bufsize = frameSize.width * frameSize.height * 3;

#if GST_VERSION_MAJOR == 0
        caps = gst_video_format_new_caps(GST_VIDEO_FORMAT_BGR,
                                         frameSize.width,
                                         frameSize.height,
                                         int(fps), 1,
                                         1, 1);
#else
        caps = gst_caps_new_simple("video/x-raw",
                                   "format", G_TYPE_STRING, "BGR",
                                   "width", G_TYPE_INT, frameSize.width,
                                   "height", G_TYPE_INT, frameSize.height,
                                   "framerate", GST_TYPE_FRACTION, int(fps), 1,
                                   NULL);
        caps = gst_caps_fixate(caps);

#endif

    }
    else
    {
#if FULL_GST_VERSION >= VERSION_NUM(0,10,29)
        input_pix_fmt = GST_VIDEO_FORMAT_GRAY8;
        bufsize = frameSize.width * frameSize.height;

#if GST_VERSION_MAJOR == 0
        caps = gst_video_format_new_caps(GST_VIDEO_FORMAT_GRAY8,
                                         frameSize.width,
                                         frameSize.height,
                                         int(fps), 1,
                                         1, 1);
#else
        caps = gst_caps_new_simple("video/x-raw",
                                   "format", G_TYPE_STRING, "GRAY8",
                                   "width", G_TYPE_INT, frameSize.width,
                                   "height", G_TYPE_INT, frameSize.height,
                                   "framerate", GST_TYPE_FRACTION, int(fps), 1,
                                   NULL);
        caps = gst_caps_fixate(caps);
#endif
#else
        CV_Assert(!"Gstreamer 0.10.29 or newer is required for grayscale input");
#endif
    }

    gst_app_src_set_caps(GST_APP_SRC(source), caps);
    gst_app_src_set_stream_type(GST_APP_SRC(source), GST_APP_STREAM_TYPE_STREAM);
    gst_app_src_set_size (GST_APP_SRC(source), -1);

    g_object_set(G_OBJECT(source), "format", GST_FORMAT_TIME, NULL);
    g_object_set(G_OBJECT(source), "block", 1, NULL);
    g_object_set(G_OBJECT(source), "is-live", 0, NULL);


    if(!manualpipeline)
    {
        g_object_set(G_OBJECT(file), "buffer-size", bufsize, NULL);
        gst_bin_add_many(GST_BIN(pipeline), source, encodebin, file, NULL);
        if(!gst_element_link_many(source, encodebin, file, NULL)) {
            CV_ERROR(CV_StsError, "GStreamer: cannot link elements\n");
        }
    }

#if GST_VERSION_MAJOR == 0
    // HACK: remove streamsplitter and streamcombiner from
    // encodebin pipeline to prevent early EOF event handling
    // We always fetch BGR or gray-scale frames, so combiner->spliter
    // endge in graph is useless.
    it = gst_bin_iterate_recurse (GST_BIN(encodebin));
    while (!done) {
      switch (gst_iterator_next (it, (void**)&element)) {
        case GST_ITERATOR_OK:
          name = gst_element_get_name(element);
          if (strstr(name, "streamsplitter"))
            splitter = element;
          else if (strstr(name, "streamcombiner"))
            combiner = element;
          break;
        case GST_ITERATOR_RESYNC:
          gst_iterator_resync (it);
          break;
        case GST_ITERATOR_ERROR:
          done = true;
          break;
        case GST_ITERATOR_DONE:
          done = true;
          break;
      }
    }

    gst_iterator_free (it);

    if (splitter && combiner)
    {
        gst_element_unlink(splitter, combiner);

        GstPad* src  = gst_element_get_pad(combiner, "src");
        GstPad* sink = gst_element_get_pad(combiner, "encodingsink");

        GstPad* srcPeer = gst_pad_get_peer(src);
        GstPad* sinkPeer = gst_pad_get_peer(sink);

        gst_pad_unlink(sinkPeer, sink);
        gst_pad_unlink(src, srcPeer);

        gst_pad_link(sinkPeer, srcPeer);

        src = gst_element_get_pad(splitter, "encodingsrc");
        sink = gst_element_get_pad(splitter, "sink");

        srcPeer = gst_pad_get_peer(src);
        sinkPeer = gst_pad_get_peer(sink);

        gst_pad_unlink(sinkPeer, sink);
        gst_pad_unlink(src, srcPeer);

        gst_pad_link(sinkPeer, srcPeer);
    }
#endif

    stateret = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
    if(stateret  == GST_STATE_CHANGE_FAILURE) {
        handleMessage(pipeline);
        CV_ERROR(CV_StsError, "GStreamer: cannot put pipeline to play\n");
    }

    framerate = fps;
    num_frames = 0;

    handleMessage(pipeline);

    __END__;

    return true;
}