Example #1
0
//--------------------------------------------------------------
void ofApp::setup(){
    ofBackground(0,0,0);
    ofEnableSmoothing();
    
    ofSetFrameRate(60);
    videoWidth =  640;
    videoHeight = 480;
    
    
    setVideoSize();
    if(MOVIE_DEBUG_MODE){
        //movie
        movie.loadMovie("debugmov1.mov");
        movie.play();
    }else {
        //Setting of WebCam
        cam.setVerbose(true);
        cam.setDeviceID(USE_CAMERA_ID);
        cam.initGrabber(actVideoWidth, actVideoHeight);
    }
    
    //setup of gui
    int guiMargin = 10;
    float guiW = 300;
    float guiH = ofGetHeight() - guiMargin*2;
    ofxGuiSetDefaultWidth(guiW);
    ofxGuiSetDefaultHeight(18);
    
    gui.setup();
    gui.add(minRadius.setup("MinRadius", 1, 10, 50));
    gui.add(maxRadius.setup("MaxRadius", 100, 10, 500));
    gui.add(minArea.setup("minArea",10,0,50));
    gui.add(maxArea.setup("maxArea",5500,40,5500));
    gui.add(mThreshold.setup("Threshold", 15, 0, 255));
    gui.setPosition(guiMargin, guiMargin);
    
    
    contourFinder.setMinAreaRadius(minRadius);
    contourFinder.setMaxAreaRadius(maxRadius);
    contourFinder.setThreshold(mThreshold);
    // wait for half a frame before forgetting something
    contourFinder.getTracker().setPersistence(15);
    // an object can move up to 32 pixels per frame
    contourFinder.getTracker().setMaximumDistance(32);
    
    showLabels = true;
    
    
    //指定したIPアドレスとポート番号でサーバーに接続
    sender.setup( HOST, PORT );
}
Example #2
0
WMediaPlayer::WMediaPlayer(MediaType mediaType, WContainerWidget *parent)
  : WCompositeWidget(parent),
    mediaType_(mediaType),
    videoWidth_(0),
    videoHeight_(0),
    gui_(this),
    boundSignals_(0)
{
  for (unsigned i = 0; i < 11; ++i)
    control_[i] = 0;

  for (unsigned i = 0; i < 3; ++i)
    display_[i] = 0;

  for (unsigned i = 0; i < 2; ++i)
    progressBar_[i] = 0;

  WTemplate *impl = new WMediaPlayerImpl(this, tr("Wt.WMediaPlayer.template"));
  impl->bindString("gui", std::string());

  setImplementation(impl);

  WApplication *app = WApplication::instance();

  LOAD_JAVASCRIPT(app, "js/WMediaPlayer.js", "WMediaPlayer", wtjs1);

  std::string res = WApplication::relativeResourcesUrl() + "jPlayer/";

  if (!app->environment().ajax())
    app->require(res + "jquery.min.js");

  if (app->require(res + "jquery.jplayer.min.js"))
    app->useStyleSheet(res + "skin/jplayer.blue.monday.css");

  if (mediaType_ == Video)
    setVideoSize(480, 270);

#ifndef WT_TARGET_JAVA
  implementJavaScript(&WMediaPlayer::play,
		      jsPlayerRef() + ".jPlayer('play');");
  implementJavaScript(&WMediaPlayer::pause,
		      jsPlayerRef() + ".jPlayer('pause');");
  implementJavaScript(&WMediaPlayer::stop,
		      jsPlayerRef() + ".jPlayer('stop');");
#endif
}
bool CvCaptureCAM_CMU::setProperty( int property_id, double value )
{
    bool retval = false;
    int ival = cvRound(value);
    C1394Camera* cmucam = camera();
    if( !cmucam )
        return false;

    switch (property_id) {
        case CV_CAP_PROP_FRAME_WIDTH:
        case CV_CAP_PROP_FRAME_HEIGHT:
            {
                int width, height;
                if (property_id == CV_CAP_PROP_FRAME_WIDTH)
                {
                    width = ival;
                    height = width*3/4;
                }
                else {
                    height = ival;
                    width = height*4/3;
                }
                retval = setVideoSize(width, height);
            }
            break;
        case CV_CAP_PROP_FPS:
            retval = setFrameRate(ival);
            break;
        case CV_CAP_PROP_MODE:
            retval = setMode(ival);
            break;
        case CV_CAP_PROP_FORMAT:
            retval = setFormat(ival);
            break;
    }

    // resize image if its not the right size anymore
    CvSize size = getSize();
    if( !image || image->width != size.width || image->height != size.height )
    {
        cvReleaseImage( &image );
        image = cvCreateImage( size, 8, 3 );
    }
    return retval;
}
Example #4
0
status_t BnMediaRecorder::onTransact(
                                     uint32_t code, const Parcel& data, Parcel* reply,
                                     uint32_t flags)
{
    switch (code) {
        case RELEASE: {
            ALOGV("RELEASE");
            CHECK_INTERFACE(IMediaRecorder, data, reply);
            reply->writeInt32(release());
            return NO_ERROR;
        } break;
        case INIT: {
            ALOGV("INIT");
            CHECK_INTERFACE(IMediaRecorder, data, reply);
            reply->writeInt32(init());
            return NO_ERROR;
        } break;
        case CLOSE: {
            ALOGV("CLOSE");
            CHECK_INTERFACE(IMediaRecorder, data, reply);
            reply->writeInt32(close());
            return NO_ERROR;
        } break;
        case RESET: {
            ALOGV("RESET");
            CHECK_INTERFACE(IMediaRecorder, data, reply);
            reply->writeInt32(reset());
            return NO_ERROR;
        } break;
        case STOP: {
            ALOGV("STOP");
            CHECK_INTERFACE(IMediaRecorder, data, reply);
            reply->writeInt32(stop());
            return NO_ERROR;
        } break;
        case START: {
            ALOGV("START");
            CHECK_INTERFACE(IMediaRecorder, data, reply);
            reply->writeInt32(start());
            return NO_ERROR;
        } break;
        case PREPARE: {
            ALOGV("PREPARE");
            CHECK_INTERFACE(IMediaRecorder, data, reply);
            reply->writeInt32(prepare());
            return NO_ERROR;
        } break;
        case GET_MAX_AMPLITUDE: {
            ALOGV("GET_MAX_AMPLITUDE");
            CHECK_INTERFACE(IMediaRecorder, data, reply);
            int max = 0;
            status_t ret = getMaxAmplitude(&max);
            reply->writeInt32(max);
            reply->writeInt32(ret);
            return NO_ERROR;
        } break;
        case SET_VIDEO_SOURCE: {
            ALOGV("SET_VIDEO_SOURCE");
            CHECK_INTERFACE(IMediaRecorder, data, reply);
            int vs = data.readInt32();
            reply->writeInt32(setVideoSource(vs));
            return NO_ERROR;
        } break;
        case SET_AUDIO_SOURCE: {
            ALOGV("SET_AUDIO_SOURCE");
            CHECK_INTERFACE(IMediaRecorder, data, reply);
            int as = data.readInt32();
            reply->writeInt32(setAudioSource(as));
            return NO_ERROR;
        } break;
        case SET_OUTPUT_FORMAT: {
            ALOGV("SET_OUTPUT_FORMAT");
            CHECK_INTERFACE(IMediaRecorder, data, reply);
            int of = data.readInt32();
            reply->writeInt32(setOutputFormat(of));
            return NO_ERROR;
        } break;
        case SET_VIDEO_ENCODER: {
            ALOGV("SET_VIDEO_ENCODER");
            CHECK_INTERFACE(IMediaRecorder, data, reply);
            int ve = data.readInt32();
            reply->writeInt32(setVideoEncoder(ve));
            return NO_ERROR;
        } break;
        case SET_AUDIO_ENCODER: {
            ALOGV("SET_AUDIO_ENCODER");
            CHECK_INTERFACE(IMediaRecorder, data, reply);
            int ae = data.readInt32();
            reply->writeInt32(setAudioEncoder(ae));
            return NO_ERROR;

        } break;
        case SET_OUTPUT_FILE_FD: {
            ALOGV("SET_OUTPUT_FILE_FD");
            CHECK_INTERFACE(IMediaRecorder, data, reply);
            int fd = dup(data.readFileDescriptor());
            int64_t offset = data.readInt64();
            int64_t length = data.readInt64();
            reply->writeInt32(setOutputFile(fd, offset, length));
            ::close(fd);
            return NO_ERROR;
        } break;
        case SET_VIDEO_SIZE: {
            ALOGV("SET_VIDEO_SIZE");
            CHECK_INTERFACE(IMediaRecorder, data, reply);
            int width = data.readInt32();
            int height = data.readInt32();
            reply->writeInt32(setVideoSize(width, height));
            return NO_ERROR;
        } break;
        case SET_VIDEO_FRAMERATE: {
            ALOGV("SET_VIDEO_FRAMERATE");
            CHECK_INTERFACE(IMediaRecorder, data, reply);
            int frames_per_second = data.readInt32();
            reply->writeInt32(setVideoFrameRate(frames_per_second));
            return NO_ERROR;
        } break;
        case SET_PARAMETERS: {
            ALOGV("SET_PARAMETER");
            CHECK_INTERFACE(IMediaRecorder, data, reply);
            reply->writeInt32(setParameters(data.readString8()));
            return NO_ERROR;
        } break;
        case SET_LISTENER: {
            ALOGV("SET_LISTENER");
            CHECK_INTERFACE(IMediaRecorder, data, reply);
            sp<IMediaRecorderClient> listener =
                interface_cast<IMediaRecorderClient>(data.readStrongBinder());
            reply->writeInt32(setListener(listener));
            return NO_ERROR;
        } break;
        case SET_CLIENT_NAME: {
            ALOGV("SET_CLIENT_NAME");
            CHECK_INTERFACE(IMediaRecorder, data, reply);
            reply->writeInt32(setClientName(data.readString16()));
            return NO_ERROR;
        }
        case SET_PREVIEW_SURFACE: {
            ALOGV("SET_PREVIEW_SURFACE");
            CHECK_INTERFACE(IMediaRecorder, data, reply);
            sp<IGraphicBufferProducer> surface = interface_cast<IGraphicBufferProducer>(
                    data.readStrongBinder());
            reply->writeInt32(setPreviewSurface(surface));
            return NO_ERROR;
        } break;
        case SET_CAMERA: {
            ALOGV("SET_CAMERA");
            CHECK_INTERFACE(IMediaRecorder, data, reply);
            sp<ICamera> camera = interface_cast<ICamera>(data.readStrongBinder());
            sp<ICameraRecordingProxy> proxy =
                interface_cast<ICameraRecordingProxy>(data.readStrongBinder());
            reply->writeInt32(setCamera(camera, proxy));
            return NO_ERROR;
        } break;
        case SET_INPUT_SURFACE: {
            ALOGV("SET_INPUT_SURFACE");
            CHECK_INTERFACE(IMediaRecorder, data, reply);
            sp<IGraphicBufferConsumer> surface = interface_cast<IGraphicBufferConsumer>(
                    data.readStrongBinder());
            reply->writeInt32(setInputSurface(surface));
            return NO_ERROR;
        } break;
        case QUERY_SURFACE_MEDIASOURCE: {
            ALOGV("QUERY_SURFACE_MEDIASOURCE");
            CHECK_INTERFACE(IMediaRecorder, data, reply);
            // call the mediaserver side to create
            // a surfacemediasource
            sp<IGraphicBufferProducer> surfaceMediaSource = querySurfaceMediaSource();
            // The mediaserver might have failed to create a source
            int returnedNull= (surfaceMediaSource == NULL) ? 1 : 0 ;
            reply->writeInt32(returnedNull);
            if (!returnedNull) {
                reply->writeStrongBinder(IInterface::asBinder(surfaceMediaSource));
            }
            return NO_ERROR;
        } break;
        default:
            return BBinder::onTransact(code, data, reply, flags);
    }
}
status_t BnMediaRecorder::onTransact(
    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
    switch(code) {
    case RELEASE: {
        LOGV("RELEASE");
        CHECK_INTERFACE(IMediaRecorder, data, reply);
        reply->writeInt32(release());
        return NO_ERROR;
    }
    break;
    case INIT: {
        LOGV("INIT");
        CHECK_INTERFACE(IMediaRecorder, data, reply);
        reply->writeInt32(init());
        return NO_ERROR;
    }
    break;
    case CLOSE: {
        LOGV("CLOSE");
        CHECK_INTERFACE(IMediaRecorder, data, reply);
        reply->writeInt32(close());
        return NO_ERROR;
    }
    break;
    case RESET: {
        LOGV("RESET");
        CHECK_INTERFACE(IMediaRecorder, data, reply);
        reply->writeInt32(reset());
        return NO_ERROR;
    }
    break;
    case STOP: {
        LOGV("STOP");
        CHECK_INTERFACE(IMediaRecorder, data, reply);
        reply->writeInt32(stop());
        return NO_ERROR;
    }
    break;
    case START: {
        LOGV("START");
        CHECK_INTERFACE(IMediaRecorder, data, reply);
        reply->writeInt32(start());
        return NO_ERROR;
    }
    break;
    case PREPARE: {
        LOGV("PREPARE");
        CHECK_INTERFACE(IMediaRecorder, data, reply);
        reply->writeInt32(prepare());
        return NO_ERROR;
    }
    break;
    case GET_MAX_AMPLITUDE: {
        LOGV("GET_MAX_AMPLITUDE");
        CHECK_INTERFACE(IMediaRecorder, data, reply);
        int max = 0;
        status_t ret = getMaxAmplitude(&max);
        reply->writeInt32(max);
        reply->writeInt32(ret);
        return NO_ERROR;
    }
    break;
    case SET_VIDEO_SOURCE: {
        LOGV("SET_VIDEO_SOURCE");
        CHECK_INTERFACE(IMediaRecorder, data, reply);
        int vs = data.readInt32();
        reply->writeInt32(setVideoSource(vs));
        return NO_ERROR;
    }
    break;
    case SET_AUDIO_SOURCE: {
        LOGV("SET_AUDIO_SOURCE");
        CHECK_INTERFACE(IMediaRecorder, data, reply);
        int as = data.readInt32();
        reply->writeInt32(setAudioSource(as));
        return NO_ERROR;
    }
    break;
    case SET_OUTPUT_FORMAT: {
        LOGV("SET_OUTPUT_FORMAT");
        CHECK_INTERFACE(IMediaRecorder, data, reply);
        int of = data.readInt32();
        reply->writeInt32(setOutputFormat(of));
        return NO_ERROR;
    }
    break;
    case SET_VIDEO_ENCODER: {
        LOGV("SET_VIDEO_ENCODER");
        CHECK_INTERFACE(IMediaRecorder, data, reply);
        int ve = data.readInt32();
        reply->writeInt32(setVideoEncoder(ve));
        return NO_ERROR;
    }
    break;
    case SET_AUDIO_ENCODER: {
        LOGV("SET_AUDIO_ENCODER");
        CHECK_INTERFACE(IMediaRecorder, data, reply);
        int ae = data.readInt32();
        reply->writeInt32(setAudioEncoder(ae));
        return NO_ERROR;

    }
    break;
    case SET_OUTPUT_FILE_PATH: {
        LOGV("SET_OUTPUT_FILE_PATH");
        CHECK_INTERFACE(IMediaRecorder, data, reply);
        const char* path = data.readCString();
        reply->writeInt32(setOutputFile(path));
        return NO_ERROR;
    }
    break;
    case SET_OUTPUT_FILE_FD: {
        LOGV("SET_OUTPUT_FILE_FD");
        CHECK_INTERFACE(IMediaRecorder, data, reply);
        int fd = dup(data.readFileDescriptor());
        int64_t offset = data.readInt64();
        int64_t length = data.readInt64();
        reply->writeInt32(setOutputFile(fd, offset, length));
        return NO_ERROR;
    }
    break;
    case SET_VIDEO_SIZE: {
        LOGV("SET_VIDEO_SIZE");
        CHECK_INTERFACE(IMediaRecorder, data, reply);
        int width = data.readInt32();
        int height = data.readInt32();
        reply->writeInt32(setVideoSize(width, height));
        return NO_ERROR;
    }
    break;
    case SET_VIDEO_FRAMERATE: {
        LOGV("SET_VIDEO_FRAMERATE");
        CHECK_INTERFACE(IMediaRecorder, data, reply);
        int frames_per_second = data.readInt32();
        reply->writeInt32(setVideoFrameRate(frames_per_second));
        return NO_ERROR;
    }
    break;
    case SET_PARAMETERS: {
        LOGV("SET_PARAMETER");
        CHECK_INTERFACE(IMediaRecorder, data, reply);
        reply->writeInt32(setParameters(data.readString8()));
        return NO_ERROR;
    }
    break;
    case SET_LISTENER: {
        LOGV("SET_LISTENER");
        CHECK_INTERFACE(IMediaRecorder, data, reply);
        sp<IMediaPlayerClient> listener =
            interface_cast<IMediaPlayerClient>(data.readStrongBinder());
        reply->writeInt32(setListener(listener));
        return NO_ERROR;
    }
    break;
    case SET_PREVIEW_SURFACE: {
        LOGV("SET_PREVIEW_SURFACE");
        CHECK_INTERFACE(IMediaRecorder, data, reply);
        sp<ISurface> surface = interface_cast<ISurface>(data.readStrongBinder());
        reply->writeInt32(setPreviewSurface(surface));
        return NO_ERROR;
    }
    break;
    case SET_CAMERA: {
        LOGV("SET_CAMERA");
        CHECK_INTERFACE(IMediaRecorder, data, reply);
        sp<ICamera> camera = interface_cast<ICamera>(data.readStrongBinder());
        reply->writeInt32(setCamera(camera));
        return NO_ERROR;
    }
    break;
    default:
        return BBinder::onTransact(code, data, reply, flags);
    }
}
Example #6
0
//--------------------------------------------------------------
void ofApp::update(){
    contourFinder.setMinAreaRadius(minRadius);
    contourFinder.setMaxAreaRadius(maxRadius);
    contourFinder.setMinArea(minArea);
    contourFinder.setMaxArea(maxArea);
    contourFinder.setThreshold(mThreshold);
    
    //ビデオのサイズ調整
    setVideoSize();
    
    if(MOVIE_DEBUG_MODE){
        movie.update();
        if(movie.isFrameNew()){
            ofxCv::blur(movie,10);
            contourFinder.findContours(movie);
        }
    }else {
        cam.update();
        if(cam.isFrameNew()){
            ofxCv::blur(cam,10);
            contourFinder.findContours(cam);
        }
    }
    
    //当たり判定
    if(showLabels){
        //接しているか判定
        float objRads[contourFinder.size()];
        float objPosX[contourFinder.size()];
        float objPosY[contourFinder.size()];
        
        for(int i = 0;i<contourFinder.size();i++){
            cv::Rect targetRect = contourFinder.getBoundingRect(i);
            float targetW = targetRect.width;
            float targetH = targetRect.height;
            float targetF = max(targetW,targetH);
            objRads[i] = targetF;
            objPosX[i] = contourFinder.getCenter(i).x;
            objPosY[i] = contourFinder.getCenter(i).y;
        }
        
        for(int i = 0;i < contourFinder.size();i++){
            for (int n = i+1; n<contourFinder.size(); n++) {
                if(objRads[n]){
                    float distance = ofDist(objPosX[i], objPosY[i], objPosX[n], objPosY[n]);
                    if(objRads[i] + objRads[n] + collisionDist > distance){
                        std::cout << "Collision!!" << endl;
                        std::cout << distance << endl;
                        
                        
                        //OSCメッセージの準備
                        ofxOscMessage m;
                        //OSCアドレスの指定
                        m.setAddress( "/collision/flag" );
                        m.addIntArg(0);
                        //メッセージを送信
                        sender.sendMessage( m );
                        
                    }
                }
            }
        }
            
    }
}
void DIA_encodingBase::refresh(void)
{
          uint32_t time=clock.getElapsedMS();
          if(time>_nextUpdate)
          {
                uint32_t deltaTime=time-_lastClock;
                uint32_t deltaFrame=_currentFrameCount-_lastFrameCount;
                uint64_t deltaDts=_currentDts-_lastDts;
                if(sampleIndex>ADM_ENCODING_SAMPLE)
                {
                    uint32_t qSum=0;
                    for(int i=0;i<ADM_ENCODING_SAMPLE;i++)
                            qSum+=samples[i].qz;
                    qSum/=ADM_ENCODING_SAMPLE;
                    aprintf("Q:%d\n",qSum);
                    setAverageQz(qSum);
                }

                if(sampleIndex>ADM_ENCODING_SAMPLE)
                {
                    int start=sampleIndex%ADM_ENCODING_SAMPLE;
                    int end=(sampleIndex+ADM_ENCODING_SAMPLE-1)%ADM_ENCODING_SAMPLE;
                    uint64_t deltaTime=samples[end].sampleTime-samples[start].sampleTime;
                    uint64_t deltaSize=samples[end].size-samples[start].size;
                    aprintf("dTime:%d dSize:%d\n",deltaTime,deltaSize);
                    if(deltaTime>1000)
                    {
                        float delta;
                        delta=deltaSize;
                        delta/=deltaTime;
                        delta*=8; // byte -> bit
                        delta*=1000; // b/us -> kb/s
                        aprintf("br:%d\n",(int)delta);
                        setAverageBitrateKbits((uint32_t)delta);
                    }
                }
                if(deltaFrame)
                {
                    float thisAverage;
                    //printf("**********************************DFrame=%d, DTime=%d\n",(int)deltaFrame,(int)deltaTime);
                    thisAverage=((float)deltaFrame);
                    thisAverage/=deltaTime;
                    thisAverage*=1000;
                    _fps_average=_fps_average*0.5+0.5*thisAverage;
                    //printf("************** Fps:%d\n",(int)_fps_average);
                    setFps(_fps_average);
                    float percent=(float)_currentDts/(float)_totalDurationUs;
                    if(percent>1.0) percent=1.0;
                    percent*=100;
                    setPercent((uint32_t)percent);
                    setFrameCount(_currentFrameCount);
                    setElapsedTimeMs(time);
                }
                if(deltaDts )
                {
                    float dtsPerSec=deltaDts;
                    dtsPerSec/=deltaTime;
                    dtsPerSec/=1000.;  // dts advance per second
                    float leftDts=_totalDurationUs-_currentDts;
                    //printf("***************%u to encoding\n",(int)(leftDts/1000000));
                    //printf("Advanc=%d ms/sec\n",(int)(dtsPerSec*1000));
                    if(dtsPerSec>0.01)
                    {
                        leftDts=leftDts/dtsPerSec;
                        _remainingTimeUs=(_remainingTimeUs/2)+(leftDts/2);
                        leftDts=_remainingTimeUs;
                        leftDts/=1000.; // us -> ms
                        //printf("***************%u s left\n",(int)(leftDts/1000));
                        setRemainingTimeMS((uint32_t)leftDts);
                    }
                    
                }
                _nextUpdate=time+GUI_UPDATE_RATE;
                setAudioSize(_audioSize);
                setVideoSize(_videoSize);
                setTotalSize(_audioSize+_videoSize);
                _lastFrameCount=_currentFrameCount;
                _lastDts=_currentDts;
                _lastClock=time;
           
          }
          UI_purge();
}