void BleX264Encoder::fini() { BleFree(m_pictureIn); if (m_x264Encoder) { x264_encoder_close(m_x264Encoder); } BleFree(m_x264Param); }
void BleImageCaptureThread::fini() { BleAutoLocker(m_mutex); while (!m_queue.isEmpty()) { BleImage *image = m_queue.dequeue(); BleFree(image); } }
int BleAudioCapture::stopCapture() { if (m_grabEngine) m_grabEngine->closeStream(); BleFree(m_grabEngine); return BLE_SUCESS; }
int BleRtmpSendThread::service(BleRtmpMuxer & muxer) { int ret = BLE_SUCESS; if ((ret = sendVideoSh(muxer)) != BLE_SUCESS) { return ret; } if ((ret = sendAudioSh(muxer)) != BLE_SUCESS) { return ret; } while (!m_stop) { QQueue<BleAVPacket *> pkts = BleAVQueue::instance()->dequeue(); if (pkts.isEmpty()) { msleep(50); continue; } BleAutoLocker(m_mutex); while (!pkts.empty()) { BleAVPacket *pkt = pkts.dequeue(); BleAutoFree(BleAVPacket, pkt); MStream &data = pkt->data; if (pkt->pktType == Packet_Type_Video) { if (muxer.addH264(data, pkt->dts) != TRUE ) { ret = BLE_RTMPSEND_ERROR; break; } m_videoKbps += (data.size() + 11); m_fps += 1; } else if (pkt->pktType == Packet_Type_Audio) { if (muxer.addAAC(data, pkt->dts) != TRUE ) { ret = BLE_RTMPSEND_ERROR; break; } m_audioKbps += (data.size() + 11); } m_sendDataCount += (data.size() + 11); } // if send failed, then pkts may has some pkt // we should delete it. for (int i = 0; i < pkts.size(); ++i) { BleAVPacket *pkt = pkts.at(i); BleFree(pkt); } } return ret; }
int BleAudioCapture::startCapture(int bitrate, int sampleRate, int channels, int deviceID) { m_bitrate = bitrate; m_sampleRate = sampleRate; m_channels = channels; QString audioFormat = MOption::instance()->option("format", "audio").toString(); if (audioFormat == "AAC") { m_audioEncoder = new BleAudioEncoder_AAC; } else if (audioFormat == "MP3") { // TODO impl } BleAssert(m_audioEncoder); if (!m_audioEncoder->init(m_sampleRate, m_channels, m_bitrate)) { log_error("audio encoder error"); return BLE_AUDIO_INIT_ERROR; } m_grabEngine = new RtAudio; unsigned int bufferFrames = 2048; RtAudio::StreamParameters params; if (deviceID == -1) { deviceID = m_grabEngine->getDefaultInputDevice(); } params.deviceId = deviceID; params.nChannels = m_channels; params.firstChannel = 0; try { m_grabEngine->openStream(NULL, ¶ms, m_bitDepth, m_sampleRate, &bufferFrames, &handleAudioData, this); m_grabEngine->startStream(); } catch (RtError& e) { e.printMessage(); BleFree(m_grabEngine); return BLE_AUDIO_DEVICE_OPEN_ERROR; } BleAVQueue::instance()->timestampBuilder()-> setAudioCaptureInternal(m_audioEncoder->getFrameDuration()); start(); return BLE_SUCESS; }
void BleEncoderThread::run() { BleImageProcessThread * imageProcessThread = dynamic_cast<BleImageProcessThread *> (m_imageProcessThread); BleAssert(imageProcessThread); while (!m_stop) { QQueue<BleImage*> images = BleAVContext::instance()->captureThread->getQueue(); // if can't get image, then sleep 50 ms. if (images.isEmpty()) { msleep(5); continue; } while (!images.empty()) { BleImage * image = images.dequeue(); BleAutoFree(BleImage, image); if (image->dataSize <= 0) continue; IplImage* imgYUV = cvCreateImage(cvSize(image->width, image->height * 3 / 2), IPL_DEPTH_8U, 1); IplImage *cvImage = cvCreateImageHeader(cvSize(image->width, image->height), IPL_DEPTH_8U, 3); cvImage->imageData = image->data; cvImage->imageDataOrigin = image->data; cvCvtColor(cvImage, imgYUV, CV_BGR2YUV_I420); m_x264Encoder->encode((uchar*)imgYUV->imageData, image->pts, image->opaque); cvReleaseImageHeader(&cvImage); cvReleaseImage(&imgYUV); if (m_stop) break; } // do clean for (int i = 0; i > images.size(); ++i) { BleImage *img = images.at(i); BleFree(img); } } log_trace("BleEncoderThread exit normally."); }
void BleEncoderThread::fini() { BleFree(m_x264Encoder); }
void BleContext::fini() { BleFree(videoSH); BleFree(audioSH); BleFree(seiPkt); }
void BleContext::setSei(BleAVPacket *pkt) { BleFree(seiPkt); seiPkt = pkt; }
void BleContext::setAudioSh(BleAVPacket *pkt) { BleFree(audioSH); audioSH = pkt; }
void BleContext::setVideoSh(BleAVPacket *pkt) { BleFree(videoSH); videoSH = pkt; }