示例#1
0
int CFfmpeg::Seek(int64_t pos)
{
	int ret;
	double rate;
      int64_t seek_target;
	pthread_mutex_lock(&iolock);
	if(transcode)
	{
		rate = ((double)pos)/((double)filesize);
		seek_target = infmt_ctx->duration * rate;
		FFMPEG_DEBUG("rate=%f, target = %lld, duration=%lld", rate, seek_target, infmt_ctx->duration);
		int defaultStreamIndex = av_find_default_stream_index(infmt_ctx);
		seek_target = av_rescale_q(seek_target, AV_TIME_BASE_Q, infmt_ctx->streams[defaultStreamIndex]->time_base);
		ret = av_seek_frame(infmt_ctx, defaultStreamIndex, seek_target, AVSEEK_FLAG_ANY);
		FFMPEG_DEBUG("ret = %s", ret_str(ret));
	      eof= 0;
		if(curpos > pos)
		{
			if(vst)
				vdts_base = vst->cur_dts;
			if(ast1)
				a1dts_base = ast1->cur_dts;
			if(ast2)
				a2dts_base = ast2->cur_dts;
		}
		outputringbuffer.data_ptr = outputringbuffer.buffer_base;
		outputringbuffer.data_size = 0;
	}
	else
	{
		ret = fseek(m_pFp, pos, SEEK_SET);
		if(ret == 0)
		{
			curpos = pos;
		}
		FFMPEG_DEBUG("pos=%lld, m_nCurpos=%lld", pos, curpos);
	}
	pthread_mutex_unlock(&iolock);
	return 0;
}
示例#2
0
void CFfmpeg::check_transcode(void)
{
	int i;
	AVStream *stream;
	FFMPEG_DEBUG("probed type: %s", infmt_ctx->iformat->name);
	if((strcmp(infmt_ctx->iformat->name,"mpegts") == 0)
		|| (strcmp(infmt_ctx->iformat->name,"avi") == 0)
		|| (strcmp(infmt_ctx->iformat->name,"mpeg") == 0)
		|| (strcmp(infmt_ctx->iformat->name,"mp3") == 0))
	{
		transcode = 0;
	}
/*	else if((strcmp(infmt_ctx->iformat->name,"mov,mp4,m4a,3gp,3g2,mj2") == 0)
		|| (strcmp(infmt_ctx->iformat->name,"matroska,webm") == 0)
		|| (strcmp(infmt_ctx->iformat->name,"flv") == 0))
	{
		transcode = 1;
	}*/
	else
	{
		//MULTIMEDIA_WARN("unknown container format");
		transcode = 1;
	}

	for(i=0; i<infmt_ctx->nb_streams; i++)	 
	{	
		stream = infmt_ctx->streams[i];
		if(stream->codec->codec_type == AVMEDIA_TYPE_AUDIO)//2¨¦?¨°¨°??¦Ì¨¢¡Â
		{
			switch (stream->codec->codec_id)
			{
				/*case CODEC_ID_MP2:
				case CODEC_ID_MP3:
				case CODEC_ID_AAC:
					transcode = 0;
					break;*/
				case CODEC_ID_AC3:
				case CODEC_ID_DTS:
				case CODEC_ID_PCM_S16BE:
				case CODEC_ID_PCM_S16LE:
					transcode = 1;
					break;
				default:
					break;
			}
		}
	}
}
C_RESULT ffmpeg_stage_decoding_transform(ffmpeg_stage_decoding_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
{
  static const int        sws_flags = SWS_FAST_BILINEAR;
  AVCodecContext  *pCodecCtxMP4 = cfg->pCodecCtxMP4;
  AVCodecContext  *pCodecCtxH264 = cfg->pCodecCtxH264;
  AVFrame         *pFrame = cfg->pFrame;
  AVFrame	  *pFrameOutput = cfg->pFrameOutput;
  static AVPacket packet;
  int	frameFinished = 0;
    
  bool_t frameDimChanged = FALSE;
  static parrot_video_encapsulation_t PaVE, prevPaVE;
    
#if WAIT_FOR_I_FRAME
  static bool_t waitForIFrame = TRUE;
#endif
    
#ifdef NUM_SAMPLES
  static struct timeval start_time, start_time2;
  static int numsamples = 0;
#endif	
    
  if (0 == in->size) // No frame
    {
      FFMPEG_DEBUG ("in->size is zero, don't do anything");
      return C_OK;
    }
  
  vp_os_mutex_lock( &out->lock );
  
  if(out->status == VP_API_STATUS_INIT) // Init only code
    {		
      out->numBuffers   = 1;
      out->buffers      = cfg->bufferArray;
      out->buffers[0]   = NULL;
      out->indexBuffer  = 0;
      out->lineSize     = 0;
        
      av_init_packet(&packet);
 
        
#if __FFMPEG_DEBUG_ENABLED
#else
      av_log_set_callback (&empty_av_log_callback);
#endif
    }
 
  if (! check_and_copy_PaVE(&PaVE, in, &prevPaVE, &frameDimChanged))
    {
      FFMPEG_DEBUG("Received a frame without PaVE informations");
      vp_os_mutex_unlock( &out->lock );
      return C_FAIL;
    }
    
  if ((out->status == VP_API_STATUS_INIT) || frameDimChanged) // Init and "new frame dimensions" code
    {
      pCodecCtxMP4->width = PaVE.encoded_stream_width;
      pCodecCtxMP4->height = PaVE.encoded_stream_height;
      pCodecCtxH264->width = PaVE.encoded_stream_width;
      pCodecCtxH264->height = PaVE.encoded_stream_height;
		
      cfg->src_picture.width = PaVE.display_width;
      cfg->src_picture.height = PaVE.display_height;
      cfg->src_picture.format = pCodecCtxH264->pix_fmt;
      cfg->dst_picture.width = PaVE.display_width;
      cfg->dst_picture.height = PaVE.display_height;
		
      out->size = avpicture_get_size(cfg->dst_picture.format, cfg->dst_picture.width, cfg->dst_picture.height);
      cfg->buffer = (uint8_t *)av_realloc(cfg->buffer, out->size * sizeof(uint8_t));
      out->buffers[0] = cfg->buffer;
		
      avpicture_fill((AVPicture *)pFrameOutput, (uint8_t*)out->buffers[out->indexBuffer], cfg->dst_picture.format,
                     cfg->dst_picture.width, cfg->dst_picture.height);
		
        
      cfg->img_convert_ctx = sws_getCachedContext(cfg->img_convert_ctx, PaVE.display_width, PaVE.display_height,
                                             pCodecCtxH264->pix_fmt, PaVE.display_width, PaVE.display_height,
                                             cfg->dst_picture.format, sws_flags, NULL, NULL, NULL);

      if (out->status == VP_API_STATUS_INIT)
        {
#ifdef NUM_SAMPLES
          gettimeofday(&start_time, NULL);
#endif		
          out->status = VP_API_STATUS_PROCESSING;
          FFMPEG_DEBUG("End of init");
        }
    }

#if	WAIT_FOR_I_FRAME
  if ( (PaVE.frame_number != (prevPaVE.frame_number +1)) 
        && 
        ( PaVE.frame_number != prevPaVE.frame_number || PaVE.slice_index != (prevPaVE.slice_index+1) )   )
    {
      FFMPEG_DEBUG ("Missed a frame :\nPrevious was %d of type %d\nNew is %d of type %d", prevPaVE.frame_number, prevPaVE.frame_type,
                    PaVE.frame_number, PaVE.frame_type);
      waitForIFrame = TRUE;  
    }
    
#if DISPLAY_DROPPED_FRAMES
  if (waitForIFrame && PaVE.frame_type == FRAME_TYPE_P_FRAME)
    {
      FFMPEG_DEBUG ("Dropped a P frame\n");
      dropped_frames++;
    }
#endif
    
  if(out->status == VP_API_STATUS_PROCESSING && (!waitForIFrame || (PaVE.frame_type == FRAME_TYPE_IDR_FRAME) || (PaVE.frame_type == FRAME_TYPE_I_FRAME))) // Processing code
    {
      waitForIFrame = FALSE;
#else
      if(out->status == VP_API_STATUS_PROCESSING) // Processing code  
        {
#endif
          /* The 'check_and_copy_PaVE' function already removed the PaVE from the 'in' buffer */
          packet.data = ((unsigned char*)in->buffers[in->indexBuffer]);
          packet.size = in->size;
          FFMPEG_DEBUG("Size : %d", packet.size);
        
#ifdef NUM_SAMPLES
          struct timeval end_time;
          static float32_t frame_decoded_time = 0;

          gettimeofday(&start_time2, NULL);
#endif
          // Decode video frame
          if (PaVE.video_codec == CODEC_MPEG4_VISUAL)
            {
              avcodec_decode_video2 (pCodecCtxMP4, pFrame, &frameFinished, &packet);
            }
          else if (PaVE.video_codec == CODEC_MPEG4_AVC)
            {
              avcodec_decode_video2 (pCodecCtxH264, pFrame, &frameFinished, &packet);
            }
        
          // Did we get a video frame?
          if(frameFinished)
            {
              pFrameOutput->data[0] = (uint8_t*)out->buffers[out->indexBuffer];
              sws_scale(cfg->img_convert_ctx, (const uint8_t *const*)pFrame->data, 
                        pFrame->linesize, 0, 
                        PaVE.display_height,
                        pFrameOutput->data, pFrameOutput->linesize);
				
              cfg->num_picture_decoded++;

#ifdef NUM_SAMPLES
              gettimeofday(&end_time, NULL);
              frame_decoded_time += ((end_time.tv_sec * 1000.0 + end_time.tv_usec / 1000.0) - (start_time2.tv_sec * 1000.0 + start_time2.tv_usec / 1000.0));

              if(numsamples++ > NUM_SAMPLES)
                {
                  float32_t value = ((end_time.tv_sec * 1000.0 + end_time.tv_usec / 1000.0) - (start_time.tv_sec * 1000.0 + start_time.tv_usec / 1000.0));
					
                  printf("Frames decoded in average %f fps, received and decoded in average %f fps\n", (1000.0 / (frame_decoded_time / (float32_t)NUM_SAMPLES)), 1000.0 / (value / (float32_t)NUM_SAMPLES));
                  gettimeofday(&start_time, NULL);
                  frame_decoded_time = 0;
                  numsamples = 0;
                }					
#endif
            }
          else
            {
        	  /* Skip frames are usually 7 bytes long
        	   * and make FFMPEG return an error. It is however normal to get
        	   * skip frames from the drone.
        	   */
        	  if (7!=PaVE.payload_size)
              printf ("Decoding failed for a %s\n", (PaVE.frame_type == FRAME_TYPE_P_FRAME) ? "P Frame" : "I Frame");
            }
        
#if DISPLAY_DROPPED_FRAMES
          if ((PaVE.frame_type == FRAME_TYPE_IDR_FRAME) || (PaVE.frame_type == FRAME_TYPE_I_FRAME))
            {
              if (previous_ok_frame != 0)
                {
                  static int globalMiss = 0, globalDrop = 0, globalFrames = 0;
                  globalMiss += missed_frames;
                  globalDrop += dropped_frames;
                  int globalMissDrop = globalMiss + globalDrop;
                  int total_miss = missed_frames + dropped_frames;
                  int total_frames = PaVE.frame_number - previous_ok_frame;
                  globalFrames += total_frames;
                  float missPercent = (100.0 * missed_frames) / (1.0 * total_frames);
                  float dropPercent = (100.0 * dropped_frames) / (1.0 * total_frames);
                  float totalPercent = (100.0 * total_miss) / (1.0 * total_frames);
                  float missMean = (100.0 * globalMiss) / (1.0 * globalFrames);
                  float dropMean = (100.0 * globalDrop) / (1.0 * globalFrames);
                  float totalMean = (100.0 * globalMissDrop) / (1.0 * globalFrames);
                  printf ("LAST %4d F => M %4d (%4.1f%%) / D %4d (%4.1f%%) / T %4d (%4.1f%%) <=> ALL %4d F => M %4d (%4.1f%%) / D %4d (%4.1f%%) / T %4d (%4.1f%%)\n", total_frames, missed_frames, missPercent, dropped_frames, dropPercent, total_miss, totalPercent, globalFrames, globalMiss, missMean, globalDrop, dropMean, globalMissDrop, totalMean);
                }
              missed_frames = 0; dropped_frames = 0;
              previous_ok_frame = PaVE.frame_number;
            }
#endif
        
	}
	
      vp_os_mutex_unlock( &out->lock );
	
      return C_OK;
    }

#define FFMPEG_CHECK_AND_FREE(pointer, freeFunc)        \
  do                                                    \
    {                                                   \
      if (NULL != pointer)                              \
        {                                               \
          freeFunc (pointer);                           \
          pointer = NULL;                               \
        }                                               \
    } while (0)

#define FFMPEG_CHECK_AND_FREE_WITH_CALL(pointer, func, freeFunc)        \
  do                                                                    \
    {                                                                   \
      if (NULL != pointer)                                              \
        {                                                               \
          func (pointer);                                               \
          freeFunc (pointer);                                           \
          pointer = NULL;                                               \
        }                                                               \
    } while (0)
  

  C_RESULT ffmpeg_stage_decoding_close(ffmpeg_stage_decoding_config_t *cfg)
  {
    FFMPEG_CHECK_AND_FREE_WITH_CALL(cfg->pCodecCtxMP4, avcodec_close, av_free);
    FFMPEG_CHECK_AND_FREE_WITH_CALL(cfg->pCodecCtxH264, avcodec_close, av_free);
    FFMPEG_CHECK_AND_FREE(cfg->pFrame, av_free);
    FFMPEG_CHECK_AND_FREE(cfg->pFrameOutput, av_free);
    FFMPEG_CHECK_AND_FREE(cfg->bufferArray, vp_os_free);
    FFMPEG_CHECK_AND_FREE(cfg->buffer, av_free);
    FFMPEG_CHECK_AND_FREE(cfg->img_convert_ctx, sws_freeContext);
    return C_OK;
  }
static inline bool_t check_and_copy_PaVE (parrot_video_encapsulation_t *PaVE, vp_api_io_data_t *data, parrot_video_encapsulation_t *prevPaVE, bool_t *dimChanged)
{
  parrot_video_encapsulation_t *localPaVE = (parrot_video_encapsulation_t *)data->buffers[data->indexBuffer];
  if (localPaVE->signature[0] == 'P' &&
      localPaVE->signature[1] == 'a' &&
      localPaVE->signature[2] == 'V' &&
      localPaVE->signature[3] == 'E')
    {
      //FFMPEG_DEBUG("Found a PaVE");
      vp_os_memcpy (prevPaVE, PaVE, sizeof (parrot_video_encapsulation_t)); // Make a backup of previous PaVE so we can check if things have changed
      
      vp_os_memcpy (PaVE, localPaVE, sizeof (parrot_video_encapsulation_t)); // Copy PaVE to our local one
      


#if __FFMPEG_DEBUG_ENABLED
      printf ("------------------------------------\n");
      printf ("PREV : ");
      ffmpeg_decoder_dumpPave (prevPaVE);
      printf ("CURR : ");
      ffmpeg_decoder_dumpPave (PaVE);
      printf ("------------------------------------\n");
      
      
      
#endif
      if (prevPaVE->encoded_stream_width  != PaVE->encoded_stream_width   ||
          prevPaVE->encoded_stream_height != PaVE->encoded_stream_height  ||
          prevPaVE->display_width         != PaVE->display_width          ||
          prevPaVE->display_width         != PaVE->display_width          ||
          prevPaVE->stream_id             != PaVE->stream_id                )
        {
          *dimChanged = TRUE;
        }
      else
        {
          *dimChanged = FALSE;
        }
      data->size = localPaVE->payload_size;
      memmove(data->buffers[data->indexBuffer], &(data->buffers[data->indexBuffer])[localPaVE->header_size], data->size);
#if DISPLAY_DROPPED_FRAMES
      missed_frames += PaVE->frame_number - prevPaVE->frame_number - 1;
#endif
      return TRUE;
    }
  else
    {    
      FFMPEG_DEBUG("No PaVE, signature was [%c][%c][%c][%c]",
                   localPaVE->signature[0],
                   localPaVE->signature[1],
                   localPaVE->signature[2],
                   localPaVE->signature[3]);
#if FAKE_PaVE
      PaVE->encoded_stream_width = 640;
      PaVE->encoded_stream_height = 368;
      PaVE->display_width = 640;
      PaVE->display_height = 360;
      PaVE->video_codec = FAKE_PaVE_CODEC;
      PaVE->frame_type = FRAME_TYPE_I_FRAME;
      vp_os_memcpy (prevPaVE, PaVE, sizeof (parrot_video_encapsulation_t));
      *dimChanged = FALSE;
      return TRUE;
#else
      return FALSE;
#endif
    }
}
示例#5
0
int CFfmpeg::Open(const char* pszUrl)
{
	unsigned int i;

	m_sUrl = pszUrl;
	m_sUrl.erase(0, strlen("ffmpeg://"));
	
	//+
	infmt_ctx = avformat_alloc_context();
	//ring_buffer_write(&cbuffer.ringbuffer, inputbuffer, sizeof(inputbuffer));
	//unsigned char* inputbuffer = NULL;
	//inputbuffer = (unsigned char*)malloc(MAIN_BUFFER_SIZE);
	init_put_byte(&inputpb, inputbuffer, MAIN_BUFFER_SIZE, 0, &cbuffer, i_read_data, NULL, i_seek_data );
	//inputpb.buf_end = inputpb.buf_ptr;
	infmt_ctx->pb = &inputpb;
	//av_read_frame(infmt_ctx, &pkt);
	//+
	avformat_open_input(&infmt_ctx, m_sUrl.c_str(), NULL, NULL);
	if(!infmt_ctx)
	{
		FFMPEG_ERROR("unknown url: %s", pszUrl);
		return -1;
	}
	
	av_find_stream_info(infmt_ctx);
	av_dump_format(infmt_ctx, 0, m_sUrl.c_str(), 0);
	
	filesize = avio_size(infmt_ctx->pb);
	printf("filesize = %d\n", filesize);
	
	check_transcode();

	if(!transcode)
	{
		if(infmt_ctx)
		{
			av_close_input_file(infmt_ctx);
			infmt_ctx = NULL;
		}
		m_pFp = fopen(m_sUrl.c_str(), "rb");
		if(!m_pFp)
		{
			//perror("fopen");
			FFMPEG_ERROR("error fopen: %s", strerror(errno));
			return -1;
		}
	}
	else
	{
		FFMPEG_DEBUG("transcode or remux");
		avformat_alloc_output_context2(&oc, NULL, "mpegts", NULL);

		unsigned int pid = 0x100;
		for(i=0; i<infmt_ctx->nb_streams; i++)
		{
			AVStream *stream = infmt_ctx->streams[i];
			if(stream->codec->codec_type==AVMEDIA_TYPE_VIDEO && video==-1)
			{
				video = i;
				FFMPEG_DEBUG("video index: %d, pid: 0x%x", i, pid++);
				vst = av_new_stream(oc, 0);
				avcodec_copy_context(vst->codec, infmt_ctx->streams[video]->codec); 
				//vst->codec->time_base = infmt_ctx->streams[video]->time_base;
				vst->codec->sample_aspect_ratio = vst->sample_aspect_ratio = infmt_ctx->streams[video]->codec->sample_aspect_ratio;
				vst->stream_copy = 1;
				vst->avg_frame_rate = infmt_ctx->streams[video]->avg_frame_rate;
				vst->discard = AVDISCARD_NONE;
				vst->disposition = infmt_ctx->streams[video]->disposition;
				vst->duration = infmt_ctx->streams[video]->duration;
				vst->first_dts = infmt_ctx->streams[video]->first_dts;
				vst->r_frame_rate = infmt_ctx->streams[video]->r_frame_rate;
				vst->time_base = infmt_ctx->streams[video]->time_base;
				vst->quality = infmt_ctx->streams[video]->quality;
				vst->start_time = infmt_ctx->streams[video]->start_time;
			}
			else if(stream->codec->codec_type==AVMEDIA_TYPE_AUDIO && audio1==-1)
			{
				audio1 = i;
				FFMPEG_DEBUG("audio1 index: %d, pid: 0x%x", i, pid++);
				ast1 = av_new_stream(oc, 0);
				if(stream->codec->codec_id == CODEC_ID_AC3
					|| stream->codec->codec_id == CODEC_ID_DTS
					|| stream->codec->codec_id == CODEC_ID_PCM_S16BE
					|| stream->codec->codec_id == CODEC_ID_PCM_S16LE)
				{
					acodec1 = stream->codec;
					AVCodec *inAcodec = avcodec_find_decoder(stream->codec->codec_id);     
					avcodec_open(stream->codec, inAcodec);     
					AVCodec *outAcodec = avcodec_find_encoder(CODEC_ID_MP2);
					//ast1->codec = avcodec_alloc_context3(outAcodec);
					ast1->codec->bit_rate = 128000;
					ast1->codec->sample_rate = stream->codec->sample_rate;
					if(stream->codec->channels > 2)
					{
						stream->codec->request_channels = 2;
					}
					ast1->codec->channels = 2;
					ast1->codec->sample_fmt = AV_SAMPLE_FMT_S16;
					avcodec_open(ast1->codec, outAcodec);
					ast1->codec->time_base = infmt_ctx->streams[audio1]->time_base;
					ring_buffer_init(&adecrbuffer1, 524288);
				}
				else
				{
					avcodec_copy_context(ast1->codec, infmt_ctx->streams[audio1]->codec);
					//ast1->codec->time_base = infmt_ctx->streams[audio1]->time_base;
					ast1->stream_copy = 1;
					ast1->first_dts = infmt_ctx->streams[audio1]->first_dts;
					ast1->r_frame_rate = infmt_ctx->streams[audio1]->r_frame_rate;
					ast1->time_base = infmt_ctx->streams[audio1]->time_base;
					ast1->quality = infmt_ctx->streams[audio1]->quality;
					ast1->start_time = infmt_ctx->streams[audio1]->start_time;
					ast1->duration = infmt_ctx->streams[audio1]->duration;
				}
			}
			else if(stream->codec->codec_type==AVMEDIA_TYPE_AUDIO && audio1!=i && audio2==-1)
			{
				audio2 = i;
				FFMPEG_DEBUG("audio2 index: %d, pid: 0x%x", i, pid++);
				ast2 = av_new_stream(oc, 0);
				if(stream->codec->codec_id == CODEC_ID_AC3
					|| stream->codec->codec_id == CODEC_ID_DTS
					|| stream->codec->codec_id == CODEC_ID_PCM_S16BE
					|| stream->codec->codec_id == CODEC_ID_PCM_S16LE)
				{
					acodec2 = stream->codec;
					AVCodec *inAcodec = avcodec_find_decoder(stream->codec->codec_id);     
					avcodec_open(stream->codec, inAcodec);     
					AVCodec *outAcodec = avcodec_find_encoder(CODEC_ID_MP2);
					//ast2->codec = avcodec_alloc_context3(outAcodec);
					ast2->codec->bit_rate = 128000;
					ast2->codec->sample_rate = stream->codec->sample_rate;
					if(stream->codec->channels > 2)
					{
						stream->codec->request_channels = 2;
					}
					ast2->codec->channels = 2;
					ast2->codec->sample_fmt = AV_SAMPLE_FMT_S16;
					avcodec_open(ast2->codec, outAcodec);
					ast2->codec->time_base = infmt_ctx->streams[audio2]->time_base;
					ring_buffer_init(&adecrbuffer2, 524288);
				}
				else
				{
					avcodec_copy_context(ast2->codec, infmt_ctx->streams[audio2]->codec);
					//ast2->codec->time_base = infmt_ctx->streams[audio2]->time_base;
					ast2->stream_copy = 1;
					ast2->first_dts = infmt_ctx->streams[audio2]->first_dts;
					ast2->r_frame_rate = infmt_ctx->streams[audio2]->r_frame_rate;
					ast2->time_base = infmt_ctx->streams[audio2]->time_base;
					ast2->quality = infmt_ctx->streams[audio2]->quality;
					ast2->start_time = infmt_ctx->streams[audio2]->start_time;
					ast2->duration = infmt_ctx->streams[audio2]->duration;
				}
			}
		}
		
		init_put_byte(&outputpb, outputbuffer, MAIN_BUFFER_SIZE, 1, &outputringbuffer, NULL, write_data, NULL );
		oc->pb = &outputpb;
		avformat_write_header(oc, NULL);
		//av_dump_format(oc, 0, "output.ts", 1);

		if(infmt_ctx->streams[video]->codec->codec_id == CODEC_ID_H264)
		{
			FFMPEG_DEBUG("open h264_mp4toannexb filter");
			bsfc = av_bitstream_filter_init("h264_mp4toannexb");
			if (!bsfc)
			{
				FFMPEG_ERROR("Cannot open the h264_mp4toannexb BSF!");
				return -1;
			}
		}
	}
	return 0;
}