bool GPUImage_UYVY422::setupShader(const char* VS, const char* FS) {
 prog = rx_create_shader(VS, FS, vert_id, frag_id);
  glBindAttribLocation(prog, 0, "a_pos");
  glBindAttribLocation(prog, 1, "a_tex");
  glLinkProgram(prog);
  glUseProgram(prog);

  u_tex = glGetUniformLocation(prog, "u_tex");
  if(u_tex < 0) {
    RX_ERROR("Error while trying to get the u_tex uniform; not used?");
    return false;
  }

  u_pm = glGetUniformLocation(prog, "u_pm");
  if(u_pm < 0) {
    RX_ERROR("Error while trying to get the u_pm uniform; not used?");
    return false;
  }

  u_mm = glGetUniformLocation(prog, "u_mm");
  if(u_mm < 0) {
    RX_ERROR("Error while trying to get the u_mm uniform; not used?");
    return false;
  }

  glUniform1i(u_tex, 0);

  return true;
}
Exemplo n.º 2
0
static void on_loaded(mos::ImageTask* img, void* user) {

  if (0 == img->nbytes) {
    RX_ERROR("The laoded image has no bytes?");
    return;
  }

  if (NULL == pixels) {
    pixels = (unsigned char*)malloc(img->nbytes);
    loaded_bytes = img->nbytes;
  }
  else {
    if (img->nbytes > loaded_bytes) {
      unsigned char* tmp = (unsigned char*)realloc(pixels, img->nbytes);
      if (tmp == NULL) {
        RX_ERROR("Cannot realloc");
        return;
      }

      pixels = tmp;
      loaded_bytes = img->nbytes;
    }
  }

  if (img->width != tex_width || img->height != tex_height) {
    must_recreate = true;
    tex_width = img->width;
    tex_height = img->height;
    tex_channels = img->channels;
  }

  memcpy(pixels, img->pixels, img->nbytes);
  must_update = true;
}
bool YouTubeUploadStart::parse() {

  if(!http_body.size()) {
    RX_ERROR("The received response is empty; cannot parse result of upload start action");
    return false;
  }
  
  if(http_code == 0) {
    RX_ERROR("We can only start parsing the http result when we got a valid http status code. make sure that you called start() before trying to parse the result");
    return false;
  }
  else if(http_code == 200) {
    RX_VERBOSE("Need to parse/handle 200 in upload start");
  }
  else if(http_code >= 400) {
    std::vector<YouTubeError> errors;
    if(!youtube_parse_errors(http_body, errors)) {
      RX_ERROR("Cannot parse the error json in the upload start");
      return false;
    }

    for(std::vector<YouTubeError>::iterator it = errors.begin(); it != errors.end(); ++it) {
      (*it).print();
    }
  }
  
  return true;
}
bool VideoCaptureDirectShow2::closeDevice() {

  if(!media_control) {
    RX_ERROR("Cannot close the device because it's not setup or is already closed");
    return false;
  }

  HRESULT hr = media_control->StopWhenReady();
  if(FAILED(hr)) {
    RX_ERROR("Failed to stop the capture stream");
    return false;
  }

  safeReleaseDirectShow(&null_renderer_filter);
  safeReleaseDirectShow(&device_filter);
  safeReleaseDirectShow(&sample_grabber);
  safeReleaseDirectShow(&sample_grabber_filter);
  safeReleaseDirectShow(&media_control);
  safeReleaseDirectShow(&media_event);
  safeReleaseDirectShow(&capture_graph_builder);
  safeReleaseDirectShow(&graph_builder);

  if(capture_cb) {
    delete capture_cb;
    capture_cb = NULL;
  }

  RX_VERBOSE("%p, %p, %p, %p", null_renderer_filter, device_filter, sample_grabber, sample_grabber_filter);
  RX_VERBOSE("%p, %p, %p, %p", media_control, media_event, graph_builder, capture_graph_builder);
  return true;
}
Exemplo n.º 5
0
bool AVEncoderSettings::validateAudio() {
  if(!useAudio()) {
    return true;
  }

  if(audio_codec != AV_CODEC_ID_MP3) {
    RX_ERROR(ERR_AV_INVALID_AUDIO_CODEC_ID);
    return false;
  }

  if(!sample_rate) {
    RX_ERROR(ERR_AV_INVALID_SAMPLE_RATE);
    return false;
  }
  
  if(num_channels != 1) {
    RX_ERROR(ERR_AV_INVALID_NUM_CHANNELS);
    return false;
  }

  if(!audio_bit_rate) {
    RX_ERROR(ERR_AV_INVALID_AUDIO_BIT_RATE);
    return false;
  }

  return true;
}
// Create + setup an device, IGraphBuilder and ICaptureGraphBuilder = user must realease all
bool VideoCaptureDirectShow2::initCaptureGraphBuilderForDevice(int device, IBaseFilter** deviceFilter, IGraphBuilder** graphBuilder, ICaptureGraphBuilder2** captureBuilder) {

  // Setup the ICaptureGraphBuilder2 + IGraphBuilder
  if(!initCaptureGraphBuilder(graphBuilder, captureBuilder)) {
    RX_ERROR("Cannot initialize the capture graph builder for a device");
    return false;
  }

  // Get the device filter that we add to the graph
  if(!createDeviceFilter(device, deviceFilter)) {
    RX_ERROR("Cannot find device to setup a graph");
    safeReleaseDirectShow(graphBuilder);
    safeReleaseDirectShow(captureBuilder);
    return false;
  }

  // Add the device to the graph.
  HRESULT hr = (*graphBuilder)->AddFilter(*deviceFilter, L"Video Capture Device");
  if(FAILED(hr)) {
    RX_ERROR("Error while trying to add the capture device to the graph");
    safeReleaseDirectShow(graphBuilder);
    safeReleaseDirectShow(captureBuilder);
    safeReleaseDirectShow(deviceFilter);
    return false;
  }

  return true;
}
bool HTTPConnection::connect(httpconnection_event_callback eventCB,  /* gets called when a socket event occurs */
                             void* eventUser)                        /* gets passed into eventCB */

 {

  int r = uv_tcp_init(loop, sock);
  if(r) {
    RX_ERROR("Cannot init socket");
    return false;
  }

  cb_event = eventCB;
  cb_event_user = eventUser;

  struct addrinfo hints;
  hints.ai_family = PF_INET;
  hints.ai_socktype = SOCK_STREAM;
  hints.ai_protocol = IPPROTO_TCP;
  hints.ai_flags = 0;
 
  RX_VERBOSE("Connecting to: %s", host.c_str());
  r = uv_getaddrinfo(loop, &resolver_req, httpconnection_on_resolved, 
                     host.c_str(), port.c_str(), &hints);

  if(r) {
    RX_ERROR("cannot uv_tcp_init(): %s", uv_strerror(uv_last_error(loop)));
    return false;
  }
    
  return true;
}
Exemplo n.º 8
0
bool MP3Writer::end() {
  if(!is_setup) {
    RX_ERROR(MP3_WRERR_NOT_SETUP);
    return false;
  }

  if(!is_started) {
    RX_ERROR(MP3_WRERR_NOT_STARTED);
    return false;
  }
  
  int written = lame_encode_flush(lame_flags, (unsigned char*)mp3_buffer, MP3_WRITER_BUFFER_SIZE);
  if(written < 0) {
    RX_ERROR(MP3_WRERR_CANNOT_FLUSH);
  }
  else if(config.cb_data) {
    config.cb_data((const char*)mp3_buffer, written, config.user);
  }

  lame_close(lame_flags);
  
  config.cb_close(config.user);
  
  lame_flags = NULL;
  is_started = false;

  return true;
}
void httpconnection_on_connect(uv_connect_t* req, int status) {
  HTTPConnection* c = static_cast<HTTPConnection*>(req->data);
  if(status == -1) {
    RX_ERROR("> cannot connect: %s:", uv_strerror(uv_last_error(c->loop)));
    RX_ERROR("@ todo should be `delete` the connection here?");
    return;
  }


  int r = uv_read_start((uv_stream_t*)c->sock, httpconnection_on_alloc, httpconnection_on_read);
  if(r) {
    RX_ERROR("> uv_read_start() failed: %s", uv_strerror(uv_last_error(c->loop)));
    RX_ERROR("@ todo should be `delete` the connection here?");
    return;
  }

  if(c->ssl) {
    SSL_set_connect_state(c->ssl);
    SSL_do_handshake(c->ssl);
    c->buffer->update();
   }

  // trigger the output buffer 
  c->buffer->flushOutputBuffer();
}
void httpconnection_on_read(uv_stream_t* handle, ssize_t nread, uv_buf_t buf) {
  HTTPConnection* c = static_cast<HTTPConnection*>(handle->data);

  if(nread < 0) {
    int r = uv_read_stop(handle);
    if(r) {
      RX_ERROR("> error uv_read_stop: %s", uv_strerror(uv_last_error(handle->loop)));
      
    }
    if(buf.base) {
      delete buf.base;
      buf.base = NULL;
    }

    uv_err_t err = uv_last_error(handle->loop);
    if(err.code != UV_EOF) {
      RX_ERROR("> disconnected from server but not correctly: %s",uv_strerror(uv_last_error(handle->loop))) ;
    }

    r = uv_shutdown(&c->shutdown_req, handle, httpconnection_on_shutdown);
    if(r) {
      RX_ERROR("> error shutting down client: %s", uv_strerror(uv_last_error(handle->loop)));
      RX_ERROR("@ todo should be `delete` the connection here?");
    }

    return;
  }

  c->addToInputBuffer(buf.base, nread);

  if(buf.base) {
    delete[] buf.base;
    buf.base = NULL;
  }
}
Exemplo n.º 11
0
static void on_font_load_clicked(int id, void* user) {

  KankerApp* app = static_cast<KankerApp*>(user);
  if (NULL == app) {
    RX_ERROR("error: cannot cast to KankerApp* in on_file_selected().");
    return;
  }

  std::vector<std::string> files;
  if (0 != app->getFontFiles(files)) {
    RX_ERROR("error: cannot load font, file not found.");
    return;
  }

  if (app->selected_font_dx >= files.size()) {
    RX_ERROR("error: selected font dx is too big: %d, files.size() = %lu.", app->selected_font_dx, files.size());
    return;
  }

  std::string filepath = rx_to_data_path("fonts/" +files[app->selected_font_dx]);
  if (!rx_file_exists(filepath)) {
    RX_ERROR("error: cannot load file; file seems to be removed?");
    return;
  }
  
  if (0 != app->kanker_font.load(filepath)) {
    RX_ERROR("error: font failed to load: %s\n", filepath.c_str());
    return;
  }

  app->font_filename = files[app->selected_font_dx];

  app->switchState(KSTATE_CHAR_OVERVIEW);
}
bool VideoCaptureDirectShow2::setSampleGrabberCallback(ISampleGrabber* sampleGrabber, ISampleGrabberCB* sampleGrabberCB) {

  if(!sampleGrabber) {
    RX_ERROR("Cannot set a ISampleGrabberCB on a invalide ISampleGrabber*");
    return false;
  }
  if(!sampleGrabberCB) {
    RX_ERROR("Cannot set a ISampleGrabberCB when its invalid");
    return false;
  }
  
  HRESULT hr = sampleGrabber->SetBufferSamples(TRUE);
  if(FAILED(hr)) {
    RX_ERROR("SetBufferSampled() failed");
    return false;
  }

  hr = sampleGrabber->SetOneShot(FALSE);
  if(FAILED(hr)) {
    RX_ERROR("SetOneShot() failed");
    return false;
  }

  hr = sampleGrabber->SetCallback(sampleGrabberCB,1);
  if(FAILED(hr)) {
    RX_ERROR("setCallback() failed");
    return false;
  }

  return true;
}
bool VideoCaptureDirectShow2::setSampleGrabberMediaType(ISampleGrabber* sampleGrabber, AVPixelFormat pixelFormat) {
  if(!sampleGrabber) {
    RX_ERROR("Cannot set the media type for an uninitialize sample grabber");
    return false;
  }

  GUID subtype = libavPixelFormatToMediaSubTypeGUID(pixelFormat);
  if(subtype == GUID_NULL) {
    std::string str_fmt = rx_libav_pixel_format_to_string(pixelFormat);
    RX_ERROR("Cannot find a matching MediaSubType for AVPixelFormat: %s", str_fmt.c_str());
    return false;
  }

  AM_MEDIA_TYPE mt;
  ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
  mt.majortype = MEDIATYPE_Video;
  mt.subtype = subtype;
  mt.formattype = FORMAT_VideoInfo;

  HRESULT hr = sampleGrabber->SetMediaType(&mt);
  if(FAILED(hr)) {
    RX_ERROR("Error while trying to set the media type for the ISampleGrabber");
    return false;
  }

  return true;
}
// Returns the IMoniker* for the given device index. - caller must Release() the object
bool VideoCaptureDirectShow2::createDeviceFilter(int device, IBaseFilter** deviceFilter) {

  IEnumMoniker* enum_moniker = NULL;
  if(!createDeviceMoniker(&enum_moniker)) {
    RX_ERROR("Cannot create IEnumMoniker so also not the device");
    return false;
  }
  
  // Get information from devices
  IMoniker* moniker = NULL;
  int count = 0;
  bool found = false;
  while(enum_moniker->Next(1, &moniker, NULL) == S_OK) {

    if(count == device) {
      HRESULT hr = moniker->BindToObject(0, 0, IID_IBaseFilter, (void**)deviceFilter);
      if(FAILED(hr)) {
        RX_ERROR("Error while trying to bind the device to the IBaseFilter");
        break;
      }
      found = true;
      (*deviceFilter)->AddRef(); // @todo - do we really need this here?
      break;
    }

    ++count;
    moniker->Release();
  }

  safeReleaseDirectShow(&enum_moniker);
  safeReleaseDirectShow(&moniker);
  return true;
}
Exemplo n.º 15
0
void YouTubeModel::setAccessToken(std::string atoken, uint64_t timeout) {
  if(!db.insert("state").use("name", "access_token").use("value", atoken).orReplace().execute()) {
    RX_ERROR("Cannot update/replace the access token");
  }

  if(!db.insert("state").use("name", "token_timeout").use("value", timeout).orReplace().execute()) {
    RX_ERROR("Cannot update/replace the token timeout");
  }
}
Exemplo n.º 16
0
// decompress from memory stream
bool JPG::load(unsigned char* compressed, size_t nbytes) {
  struct jpeg_error_mgr jerr;
  struct jpeg_decompress_struct cinfo;
  
  cinfo.err = jpeg_std_error(&jerr);
  jpeg_create_decompress(&cinfo);
  jpeg_mem_src(&cinfo, compressed, nbytes);

  int rc = jpeg_read_header(&cinfo, TRUE);
  if(rc != 1) {
    RX_ERROR("Error while reading the jpeg header");
    return false;
  }

  bool need_alloc = false;
  jpeg_start_decompress(&cinfo);

  if(cinfo.output_width != width) {
    width = cinfo.output_width;
    need_alloc = true;
  }
  if(cinfo.output_height != height) {
    height = cinfo.output_height;
    need_alloc = true;
  }
  if(cinfo.output_components != num_channels) {
    num_channels = cinfo.output_components;
    need_alloc = true;
  }

  if(!width || !height) {
    RX_ERROR("Read incorrect jpg size: %d x %d", width, height);
    jpeg_finish_decompress(&cinfo);
    jpeg_destroy_decompress(&cinfo);
    return false;
  }

  // only allocate when the sizes or num channels change.
  if(need_alloc) {
    bit_depth = 8;
    num_bytes = width * height * num_channels;
    stride = width * num_channels;
    pixels = new unsigned char[num_bytes];
  }

  size_t dest_row = 0;
  while(cinfo.output_scanline < cinfo.output_height) {
    unsigned char* buffer_array[1];
    buffer_array[0] = pixels + cinfo.output_scanline * stride;
    jpeg_read_scanlines(&cinfo, buffer_array, 1);
  }

  jpeg_finish_decompress(&cinfo);
  jpeg_destroy_decompress(&cinfo);
 
  return true;
}
Exemplo n.º 17
0
  bool VerticalBlur::setup(int w, int h) { 

    // CREATE SHADER
    if(!shader.create(VERTICAL_BLUR_FILTER_VS, VERTICAL_BLUR_FILTER_FS)) {
      RX_ERROR(ERR_GL_VBLUR_SHADER);
      return false;
    }

    shader.bindAttribLocation("a_pos", 0);
    shader.bindAttribLocation("a_tex", 1);

    if(!shader.link()) {
      RX_ERROR(ERR_GL_VBLUR_SHADER);
      return false;
    }

    shader.use();
    u_tex = shader.getUniformLocation("u_tex");
    u_height = shader.getUniformLocation("u_height");

    glUniform1f(u_height, h); 


    // SET BLUR VALUES
    const int num_weights = 10;
    char weight_uniform_name[50];
    char offset_uniform_name[50];
    float weights[num_weights];
    float sum;
    float sigma2 = blur_amount;

    weights[0] = gauss(0.0f, sigma2);
    sum = weights[0];
    for(int i = 1; i < num_weights; ++i) {
      weights[i] = gauss(i, sigma2);
      sum += 2 * weights[i];
    }

    for(int i = 0; i < num_weights; ++i) {
      sprintf(weight_uniform_name, "u_weight[%d]", i);
      GLint weight_uniform_loc = glGetUniformLocation(shader.prog, weight_uniform_name);
      float val = weights[i] / sum;
      glUniform1f(weight_uniform_loc, val);

      sprintf(offset_uniform_name, "u_pix_offset[%d]", i);
      GLint offset_uniform_loc = glGetUniformLocation(shader.prog, offset_uniform_name);
      glUniform1f(offset_uniform_loc, 2.5f * i);

      //printf("> weight: %s, %d offset: %s, %d, value: %f\n", weight_uniform_name, weight_uniform_loc, offset_uniform_name, offset_uniform_loc, val);
    }

    return true;
  }
Exemplo n.º 18
0
  void InteractiveGrid::update() {

    if (NULL == tracker) {
      RX_ERROR("Tracker is invalid.");
      return;
    }
    if (NULL == on_activate) {
      RX_ERROR("No on_activate set so no use to execute the update function.");
      return;
    }

    uint64_t now = rx_hrtime();
    float iv_w = 1.0f / mos::config.webcam_width;
    float iv_h = 1.0f / mos::config.webcam_height;

    /* get the detected blobs. */
    for (size_t i = 0; i < tracker->blobs.blobs.size(); ++i) {

      Blob& blob = tracker->blobs.blobs[i];

#if 0
      if (false == blob.matched) {
        continue;
      }
      
      if (15 > blob.trail.size()) {
        continue;
      }
#endif
      
      /* convert the position of the blob to a cell index. */
      cv::Point& pt = blob.position;
      float px = float(pt.x) * iv_w;
      float py = float(pt.y) * iv_h;
      
      int col = px * fex::config.cols;
      int row = py * fex::config.rows;
      int dx = row * fex::config.cols + col;

      if (dx >= cells.size()) {
        RX_ERROR("Not supposed to happen, but the calculated index is bigger then the total number of cells, col: %d, row: %d, dx: %d", col, row, dx);
        continue;
      }

      InteractiveCell& cell = cells[dx];
      if (0 == cell.timeout || (0 != cell.timeout && now > cell.timeout)) {
        /* new cell, make active. */
        //RX_VERBOSE("Activated: %d x %d, timeout: %llu", col, row, cell.timeout);
        on_activate(col, row, user);
        cell.timeout = now + 1e9; /* this cell can be reused after X seconds (Xe9) */
      }
    }
  }
Exemplo n.º 19
0
static void on_abb_load_settings_clicked(int id, void* user) {

  KankerApp* app = static_cast<KankerApp*>(user);
  if (NULL == app) {
    RX_ERROR("Failed to cast to KankerApp");
    return;
  }

  if (0 != app->kanker_abb.loadSettings(rx_to_data_path("abb_settings.xml"))) {
    RX_ERROR("Failed to load the settings.");
  }
}
Exemplo n.º 20
0
bool JPG::load(std::string filename, bool datapath) {
  if(datapath) {
    filename = rx_to_data_path(filename);
  }

  struct jpeg_error_mgr jerr;
  struct jpeg_decompress_struct cinfo;
  FILE* fp;
  JSAMPARRAY buffer;
  
  if( (fp = fopen(filename.c_str(), "rb")) == NULL ) {
    RX_ERROR(ERR_JPG_FILE_NOT_OPENED, filename.c_str());
    return false;
  }

  cinfo.err = jpeg_std_error(&jerr);

  jpeg_create_decompress(&cinfo);
  jpeg_stdio_src(&cinfo, fp);
  jpeg_read_header(&cinfo, TRUE);
  jpeg_start_decompress(&cinfo);

  stride = cinfo.output_width * cinfo.output_components;
  num_channels = cinfo.output_components;
  width = cinfo.output_width;
  height = cinfo.output_height;
  bit_depth = 8;
  num_bytes = width * height * num_channels;

  pixels = new unsigned char[num_bytes];
  if(!pixels) {
    RX_ERROR(ERR_JPG_CANNOT_ALLOC);
    jpeg_finish_decompress(&cinfo);
    jpeg_destroy_decompress(&cinfo);
    fclose(fp);
    return false;
  }

  size_t dest_row = 0;
  buffer = (*cinfo.mem->alloc_sarray)((j_common_ptr) &cinfo, JPOOL_IMAGE, stride, 1);
  while(cinfo.output_scanline < cinfo.output_height) {
    jpeg_read_scanlines(&cinfo, buffer, 1);
    memcpy(pixels + (dest_row * stride), buffer[0], stride);
    dest_row++;
  }

  jpeg_finish_decompress(&cinfo);
  jpeg_destroy_decompress(&cinfo);
  fclose(fp);
  
  return true;
}
Exemplo n.º 21
0
// create the request string
bool HTTPRequest::toString(std::string& result) {

  // create the content string
  std::string http_body;
  if(!createBody(http_body)) {
    RX_ERROR("Cannot create request body");
    return false;
  }

  // create the headers.
  addDefaultHTTPHeaders();
  addHeader(HTTPHeader("Content-Length", http_body.size()));

  // construct the request
  result = getHTTPString() +"\r\n";

  result += headers.join();
  result += "\r\n";

#if 1
  printf("%s", result.c_str());
  for(size_t i = 0; i < http_body.size(); ++i) {
    if(i > 40) {
      break;
    }
    printf("%c", http_body[i]);
  }
  printf("\n");
  for(size_t i = 0; i < http_body.size(); ++i) {
    if(i > 40) {
      break;
    }
    printf("%02X ", (unsigned char)http_body[i]);
  }
  printf("\n");
#endif

  result += http_body;

#if 0 
  std::ofstream ofs(rx_to_data_path("out.raw").c_str(), std::ios::binary | std::ios::out);
  if(!ofs.is_open()) {
    RX_ERROR("Cannot open output file");
  }
  else {
    ofs.write(result.c_str(), result.size());
    ofs.close();
  }
#endif

  return true;
}
// Allocates the ISampleGrabber interface
bool VideoCaptureDirectShow2::createSampleGrabber(IBaseFilter* grabberFilter, ISampleGrabber** sampleGrabber) {
  if(!grabberFilter) {
    RX_ERROR("The given grabberFilter is invalid");
    return false;
  }

  HRESULT hr = grabberFilter->QueryInterface(IID_ISampleGrabber, (void**)sampleGrabber);
  if(FAILED(hr)) {
    RX_ERROR("Cannot find the IID_ISampleGrabber interface");
    return false;
  }
  return true;
}
Exemplo n.º 23
0
static void on_dir_change(std::string dir, std::string filename, void* user) {

  /* get a handle */
  mos::ImageLoader* loader = static_cast<mos::ImageLoader*>(user);
  if (NULL == loader) {
    RX_ERROR("Invalid user pointer.");
    return;
  }
  
  if (0 != loader->load(dir +"/" +filename)) {
    RX_ERROR("Failed to load file.");
  }
}
Exemplo n.º 24
0
static void on_abb_send_message_to_robot_clicked(int id, void* user) {

  KankerApp* app = static_cast<KankerApp*>(user);
  if (NULL == app) {
    RX_ERROR("Failed to cast to KankerApp");
    return;
  }
  
  if (0 != app->controller.writeText(12, app->test_message)) {
    RX_ERROR("Failed to write the text to the ABB.");
    return;
  }
}
bool VideoCaptureDirectShow2::stopCapture() {
  if(!media_control) {
    RX_ERROR("Cannot stop the capture because the capture filter hasn't been setup");
    return false;
  }

  HRESULT hr = media_control->Stop();
  if(FAILED(hr)) {
    RX_ERROR("Erro while trying to stop the capture");
    return false;
  }

  return true;
}
Exemplo n.º 26
0
bool JPG::setPixels(unsigned char* pix, int w, int h, J_COLOR_SPACE type) {

  if(!pix) {
    RX_ERROR("Invalid pixels given");
    return false;
  }

  if(!w) {
    RX_ERROR("Invalid width given");
    return false;
  }

  if(!h) {
    RX_ERROR("invalid height given");
    return false;
  }

  if(pixels) {
    delete[] pixels;
    pixels = NULL;
  }

  color_space = type;
  width = w;
  height = h;
  
  switch(color_space) {
    case JCS_RGB: {
      num_bytes = width * height * 3;
      num_channels = 3;
      stride = width * 3;
      bit_depth = 8; 
      break;
    }
    default: {
      RX_ERROR("Unhandled color space: %d", type);
      return false;
    }
  }

  pixels = new unsigned char[num_bytes];
  if(!pixels) {
    RX_ERROR("Cannot allocate %ld bytes for the jpg", num_bytes);
    return false;
  }

  memcpy((char*)pixels, (char*)pix, num_bytes);

  return true;
}
bool VideoEncoderService::setup() {

  if(!rx_file_exists(avconv)) {
    RX_ERROR("Cannot find avconv at: %s", avconv.c_str());
    return false;
  }

  if(!encoder.setup(avconv)) {
    RX_ERROR("Cannot setup the encoder.");
    return false;
  }

  return true;
}
// Create IMediaControl - user must call Release()
bool VideoCaptureDirectShow2::createMediaControl(IGraphBuilder* graphBuilder, IMediaControl** mediaControl) {

  if(!graphBuilder) {
    RX_ERROR("Before calling createMediaControl, make sure you created the graphBuilder");
    return false;
  }

  HRESULT hr = graphBuilder->QueryInterface(IID_IMediaControl, (void**)mediaControl);
  if(FAILED(hr)) {
    RX_ERROR("Cannot create IMediaControl");
    return false;
  }
  return true;
}
Exemplo n.º 29
0
int main() {
  
  Jansson config;
  if(!config.load("twitter.json", true)) {
    RX_ERROR("Cannot load the twitter.json file with the config tokens");
    ::exit(EXIT_FAILURE);
  }
  std::string access_token;
  std::string access_token_secret;
  std::string consumer_key;
  std::string consumer_secret;

  if(!config.getString("/access_token", access_token) 
     || !config.getString("/access_token_secret", access_token_secret)
     || !config.getString("/consumer_key", consumer_key)
     || !config.getString("/consumer_secret", consumer_secret))
    {
      RX_ERROR("Cannot find the correct values in the config file");
      ::exit(EXIT_FAILURE);
    }

  Twitter tw;
  bool r =  tw.setup(access_token, access_token_secret, consumer_key, consumer_secret);

  if(!r) {
    RX_ERROR("Cannot setup the twitter obj");
    ::exit(EXIT_FAILURE);
  }


#if 1
  TwitterStatusesFilter tw_filter;
  tw_filter.track("love,openframeworks"); // comma separated list of keywords to track
  tw.apiStatusesFilter(tw_filter, twitter_filter_cb, NULL);
#endif

#if 0
  // Follow users timeline
  TwitterStatusesUserTimeline tl;
  tl.count = 10;
  tw.apiStatusesUserTimeline(tl, twitter_usertimeline_cb, NULL);
#endif

  while(true) {
    tw.update();
  }

  return 0;
};
// UTILS 
// --------------------------------------------------------------------------------------
void VideoCaptureDirectShow2::printConnectedMediaTypeForSampleGrabber(ISampleGrabber* sampleGrabber) {
  if(!sampleGrabber) {
    RX_ERROR("Cannot print the connected media type because the given ISampleGrabber* is invalid");
    return;
  }

  AM_MEDIA_TYPE mt;
  HRESULT hr = sampleGrabber->GetConnectedMediaType(&mt);
  if(FAILED(hr)) {
    RX_ERROR("Error while trying to get the conneted media type from the ISampleGrabber* - did you call ICaptureGraphBuilder2->RenderStream()? make sure you did before calling this function");
    return;
  }

  printMediaType(&mt);
}