void VideoCaptureBase::printSupportedPixelFormats(int device) {

  std::vector<AVCapability> caps = getCapabilities(device);
  if(!caps.size()) {
    RX_VERBOSE("No capabilities found for device: %d", device);
    return;
  }

  RX_VERBOSE("Supported pixel formats for device: %d", device);
  RX_VERBOSE("--------------------------------------------------------------------");

  std::set<enum AVPixelFormat> unique_fmts;
  for(std::vector<AVCapability>::iterator it = caps.begin();
      it != caps.end();
      ++it)
    {
      AVCapability cap = *it;
      unique_fmts.insert(cap.pixel_format);
    }

  for(std::set<enum AVPixelFormat>::iterator it = unique_fmts.begin();
      it != unique_fmts.end();
      ++it) 
    {
     
      enum AVPixelFormat pix_fmt = *it;
      RX_VERBOSE("\t%s", rx_libav_pixel_format_to_string(pix_fmt).c_str());
    }
}
bool VideoCaptureDirectShow2::closeDevice() {

  if(!media_control) {
    RX_ERROR("Cannot close the device because it's not setup or is already closed");
    return false;
  }

  HRESULT hr = media_control->StopWhenReady();
  if(FAILED(hr)) {
    RX_ERROR("Failed to stop the capture stream");
    return false;
  }

  safeReleaseDirectShow(&null_renderer_filter);
  safeReleaseDirectShow(&device_filter);
  safeReleaseDirectShow(&sample_grabber);
  safeReleaseDirectShow(&sample_grabber_filter);
  safeReleaseDirectShow(&media_control);
  safeReleaseDirectShow(&media_event);
  safeReleaseDirectShow(&capture_graph_builder);
  safeReleaseDirectShow(&graph_builder);

  if(capture_cb) {
    delete capture_cb;
    capture_cb = NULL;
  }

  RX_VERBOSE("%p, %p, %p, %p", null_renderer_filter, device_filter, sample_grabber, sample_grabber_filter);
  RX_VERBOSE("%p, %p, %p, %p", media_control, media_event, graph_builder, capture_graph_builder);
  return true;
}
void VideoCaptureBase::printSupportedSizes(int device) { 

  std::vector<AVSize> sizes = getSupportedSizes(device);
  if(!sizes.size()) {
    RX_VERBOSE("No supported sizes found");
    return;
  }

  RX_VERBOSE("Supported sizes for device: %d", device);
  RX_VERBOSE("--------------------------------------------------------------------");
  for(std::vector<AVSize>::iterator it = sizes.begin(); it != sizes.end(); ++it) {
    AVSize& s = *it;
    RX_VERBOSE("\t%dx%d", s.width, s.height);
  }
}
void VideoCaptureBase::printSupportedPixelFormats(int device, int width, int height) {

  std::vector<enum AVPixelFormat> formats = getSupportedPixelFormats(device, width, height);
  if(!formats.size()) {
    RX_VERBOSE("No supported pixel formats for: %dx%d", width, height);
    return;
  }

  RX_VERBOSE("Supported pixel formats for device: %d and size: %dx%d", device, width, height)
  RX_VERBOSE("--------------------------------------------------------------------");
  for(std::vector<enum AVPixelFormat>::iterator it = formats.begin(); it != formats.end(); ++it) {
    enum AVPixelFormat f = *it;
    std::string fmt_name = rx_libav_pixel_format_to_string(f);
    RX_VERBOSE("\t%s", fmt_name.c_str());
  }
}
void VideoEncoderClientIPC::customCommand(VideoEncoderEncodeTask task) {
  RX_VERBOSE("Sending /cmd command");

  Buffer b;
  task.pack(b);
  client.call("/cmd", b.ptr(), b.size());
}
void VideoEncoderClientIPC::addAudio(VideoEncoderEncodeTask task) {
  RX_VERBOSE("Sending /add_audio command");

  Buffer b;
  task.pack(b);
  client.call("/add_audio", b.ptr(), b.size());
}
bool YouTubeUploadStart::parse() {

  if(!http_body.size()) {
    RX_ERROR("The received response is empty; cannot parse result of upload start action");
    return false;
  }
  
  if(http_code == 0) {
    RX_ERROR("We can only start parsing the http result when we got a valid http status code. make sure that you called start() before trying to parse the result");
    return false;
  }
  else if(http_code == 200) {
    RX_VERBOSE("Need to parse/handle 200 in upload start");
  }
  else if(http_code >= 400) {
    std::vector<YouTubeError> errors;
    if(!youtube_parse_errors(http_body, errors)) {
      RX_ERROR("Cannot parse the error json in the upload start");
      return false;
    }

    for(std::vector<YouTubeError>::iterator it = errors.begin(); it != errors.end(); ++it) {
      (*it).print();
    }
  }
  
  return true;
}
bool HTTPConnection::connect(httpconnection_event_callback eventCB,  /* gets called when a socket event occurs */
                             void* eventUser)                        /* gets passed into eventCB */

 {

  int r = uv_tcp_init(loop, sock);
  if(r) {
    RX_ERROR("Cannot init socket");
    return false;
  }

  cb_event = eventCB;
  cb_event_user = eventUser;

  struct addrinfo hints;
  hints.ai_family = PF_INET;
  hints.ai_socktype = SOCK_STREAM;
  hints.ai_protocol = IPPROTO_TCP;
  hints.ai_flags = 0;
 
  RX_VERBOSE("Connecting to: %s", host.c_str());
  r = uv_getaddrinfo(loop, &resolver_req, httpconnection_on_resolved, 
                     host.c_str(), port.c_str(), &hints);

  if(r) {
    RX_ERROR("cannot uv_tcp_init(): %s", uv_strerror(uv_last_error(loop)));
    return false;
  }
    
  return true;
}
Esempio n. 9
0
void KankerApp::onMouseRelease(double x, double y, int bt, int mods) {

  is_mouse_pressed = false;

  if (NULL != gui) {
    gui->onMouseRelease(x, y, bt, mods);
  }

  switch (state) {
    case KSTATE_CHAR_INPUT_DRAWING: {
      kanker_glyph->onEndLine();
      break;
    }
    case KSTATE_CHAR_EDIT: {
      if (is_mouse_inside_char) {
        if (kanker_glyph) {
          kanker_glyph->translate(char_offset_x, char_offset_y);
        }
        is_mouse_inside_char = false;
        char_offset_x = 0.0f;
        char_offset_y = 0.0f;
      }

      if (kanker_glyph) {
        kanker_glyph->advance_x = advance_x - gui_width;
        RX_VERBOSE("advance_x: %f", kanker_glyph->advance_x);
      }
      break;
    }
    default: { 
      break;
    }
  }
}
Esempio n. 10
0
static int on_mouse(const char* path, const char* types, lo_arg** argv, int argc, void* data, void* user) {
  RX_VERBOSE("on_mouse: %d, %d", argv[0]->i, argv[1]->i);

  Simulation* sim = static_cast<Simulation*>(user);
  sim->points.push_back(Vec2(argv[0]->i, argv[1]->i));

  return 0;
}
void VideoCaptureDirectShow2::printMediaType(AM_MEDIA_TYPE* mt) {
  RX_VERBOSE("AM_MEDIA_TYPE.majortype: %s",              mediaFormatMajorTypeToString(mt->majortype).c_str());
  RX_VERBOSE("AM_MEDIA_TYPE.subtype: %s",                mediaFormatSubTypeToString(mt->subtype).c_str());
  RX_VERBOSE("AM_MEDIA_TYPE.bFixedSizeSamples: %c",      (mt->bFixedSizeSamples == TRUE) ? 'y' : 'n');
  RX_VERBOSE("AM_MEDIA_TYPE.bTemporalCompression: %c",   (mt->bTemporalCompression == TRUE) ? 'y' : 'n');
  RX_VERBOSE("AM_MEDIA_TYPE.lSampleSize: %Iu",           mt->lSampleSize);
  RX_VERBOSE("AM_MEDIA_TYPE.formattype: %s",             mediaFormatFormatTypeToString(mt->formattype).c_str());
  RX_VERBOSE("AM_MEDIA_TYPE.cbFormat: %Iu",              mt->cbFormat);

  if(mt->formattype == FORMAT_VideoInfo && mt->cbFormat >= sizeof(VIDEOINFOHEADER)) {
    VIDEOINFOHEADER* ih = (VIDEOINFOHEADER*)mt->pbFormat;
    double fps = 1.0 / (double(ih->AvgTimePerFrame) / (1000000.0f * 10)); 

    RX_VERBOSE("VIDEOINFOHEADER - width: %Iu",        LONG(ih->bmiHeader.biWidth));
    RX_VERBOSE("VIDEOINFOHEADER - height: %Iu",       LONG(ih->bmiHeader.biHeight));
    RX_VERBOSE("VIDEOINFOHEADER - fps: %f", fps);
  }
}
int main() {

  // get necessary config
  // -----------------------------------------------
  Jansson j;
  if(!j.load("youtube.cfg", true)) {
    RX_ERROR("Make sure that you created the youtube.cfg; we're using that config to authorize ourself");
    return EXIT_FAILURE;
  }
  
  std::string client_id;
  std::string client_secret;
  std::string auth_code;

  j.getString("/client_id", client_id);
  j.getString("/client_secret", client_secret);
  j.getString("/auth_code", auth_code);

  if(!client_id.size() || !client_secret.size() || !auth_code.size()) {
    RX_ERROR("One of the configuration options is empty! Did you set the auth code? See html/index.html and readme.");
    return EXIT_FAILURE;
  }

  // test an upload with some garbage json 
  // -----------------------------------------------
  YouTube yt;
  if(!yt.setup(client_id, client_secret)) {
    RX_ERROR("Cannot setup the youtube API handler");
    return EXIT_FAILURE;
  }

  if(!yt.hasAccessToken()) {
    RX_VERBOSE("Fetching access token");
    if(!yt.exchangeAuthCode(auth_code)) {
      RX_ERROR("Cannot update the access token");
      return EXIT_FAILURE;
    }
  }

  YouTubeVideo video;
  video.title = "some title";
  video.filename = rx_to_data_path("test.mov");
  video.datapath = false;
  video.video_resource_json = "_invalid_json_"; 
  video.bytes_total = rx_get_file_size(video.filename);

  YouTubeUploadStart uploader;
  if(uploader.start(video, yt.getAccessToken())) {
    RX_ERROR("TEST FAILED, uploader should return false on error");
  }
  else {
    RX_ERROR("TEST SUCCEEDED!");
  }

  return EXIT_SUCCESS;
}
Esempio n. 13
0
void twitter_usertimeline_cb(HTTPConnection* c, HTTPConnectionEvent event,
                             const char* data, size_t len, void* user)
{
  if(event == HTTP_ON_STATUS) {
    RX_VERBOSE("HTTP status: %d", c->parser.status_code);
  }
  else if(event == HTTP_ON_BODY) {
    std::string str(data, data+len);
    printf("%s\n", str.c_str());
  }
}
// LOGGING SUPPORTED CAPABILITIES 
// --------------------------------------------------------------------------------------
void VideoCaptureBase::printSupportedFrameRates(int device, 
                                                int width, 
                                                int height,
                                                enum AVPixelFormat fmt) 
{

  std::vector<AVRational> rates = getSupportedFrameRates(device, width, height, fmt);
  if(!rates.size()) {
    RX_VERBOSE("No supported framerates for: %dx%d", width, height);
    return;
  }

  RX_VERBOSE("Supported framerate for device: %d and size: %dx%d", device, width, height); 
  RX_VERBOSE("--------------------------------------------------------------------");
  for(std::vector<AVRational>::iterator it = rates.begin(); it != rates.end(); ++it) {
    AVRational r = *it;
    double fps = 1.0 / (double(r.num) / double(r.den));
    RX_VERBOSE("\t %02.02f fps", fps); 
  }

}
WebmScreenRecorder::~WebmScreenRecorder() {
  stop();

  RX_VERBOSE("~WebmScreenRecorder()");
  nbytes_per_video_frame = 0;

#if !defined(WEBM_SCREENREC_USE_PBOS)
  if(pixels) {
    delete[] pixels;
    pixels = NULL;
  }
#endif
}
void youtube_client_ipc_on_uploaded(std::string path, char* data, size_t nbytes, void* user) {
  RX_VERBOSE("Client got a message that the video was uploaded.");

  YouTubeClientIPC* ipc = static_cast<YouTubeClientIPC*>(user);

  if(ipc->cb_upload_ready) {
    Buffer buf(data, nbytes);
    YouTubeVideo video;
    video.unpack(buf);
    ipc->cb_upload_ready(video, ipc->cb_upload_ready_user);
  }

}
void video_encoder_client_ipc_on_cmd_executed(std::string path, char* data, size_t nbytes, void* user) {
  RX_VERBOSE("Command executed  added!");
  if(nbytes > 0) {

    Buffer b(data, nbytes);
    VideoEncoderEncodeTask task;
    task.unpack(b);

    VideoEncoderClientIPC* ipc = static_cast<VideoEncoderClientIPC*>(user);
    if(ipc->cb_cmd_executed) {
      ipc->cb_cmd_executed(task, ipc->cb_user);
    }
  }
}
Esempio n. 18
0
// gets called when we receive new data
void twitter_filter_cb(HTTPConnection* c, HTTPConnectionEvent event, 
                               const char* data, size_t len, void* user) 
{
  if(event == HTTP_ON_STATUS) {
    RX_VERBOSE("HTTP status: %d", c->parser.status_code);
  }
  else if(event == HTTP_ON_BODY) {
    // parse the tweet.
    std::string str(data, data+len);
    Tweet tweet;
    tweet.parseJSON(str);
    tweet.print();
  }
}
HTTPConnection::~HTTPConnection() {
  RX_VERBOSE("@todo shouldn't we delete the sock here?, chack httpconnection_on_close");
  RX_VERBOSE("@todo WHAT DO WE DO WITH THE SSL BUFFER?");
  RX_VERBOSE("@todo delete buffer object ");

  if(buffer) {
    delete buffer;
    buffer = NULL;
  }

  loop = NULL;
  cb_event = NULL;
  cb_event_user = NULL;
  cb_close = NULL;
  cb_close_user = NULL;

  host.clear();
  port.clear();

  if(ssl) {
    SSL_free(ssl);
  }
}
void VideoCaptureBase::printCapabilities(int device) {

  std::vector<AVCapability> caps = getCapabilities(device);
  if(!caps.size()) {
    RX_VERBOSE("No capabilities found for device: %d", device);
    return;
  }

  int last_width = 0;
  int last_height = 0;
  enum AVPixelFormat last_pix_fmt = AV_PIX_FMT_NONE;
  
  for(std::vector<AVCapability>::iterator it = caps.begin();
      it != caps.end();
      ++it)
    {
      AVCapability cap = *it;
      if(cap.size.width != last_width 
         || cap.size.height != last_height 
         || cap.pixel_format != last_pix_fmt) 
        {
          RX_VERBOSE("");
          RX_VERBOSE("%s, %dx%d", 
                     rx_libav_pixel_format_to_string(cap.pixel_format).c_str(),
                     cap.size.width,
                     cap.size.height);
          RX_VERBOSE("--------------------------------------------------------------------");
          last_width = cap.size.width;
          last_height = cap.size.height;
          last_pix_fmt = cap.pixel_format;
      }
      
      AVRational r = cap.framerate;
      double fps = 1.0 / (double(r.num) / double(r.den));
      RX_VERBOSE("\tFPS %2.02f ", fps);
    }
}
Esempio n. 21
0
static void on_font_save_clicked(int id, void* user) {

  KankerApp* app = static_cast<KankerApp*>(user);
  if (NULL == app) {
    RX_ERROR("error: cannot cast to KankerApp* in on_save_clicked().");
    return;
  }

  if (0 == app->font_filename.size()) {
    RX_ERROR("No filename entered. Cannot save.");
    return;
  }

  RX_VERBOSE("Saving file: %s, origin_x: %f", app->font_filename.c_str(), app->kanker_font.origin_x);
  
  app->kanker_font.save(rx_to_data_path("fonts/" +app->font_filename));
}
// Capture control
// --------------------------------------------------------------------------------------
int VideoCaptureDirectShow2::listDevices() {
  // Get enumerator creator 
  int count = 0;

  IEnumMoniker* enum_moniker = NULL;
  if(!createDeviceMoniker(&enum_moniker)) {
    RX_ERROR("Cannot create IEnumMoniker so also not the device");
    goto done;
  }

  // Get information from devices
  HRESULT hr = S_OK;
  IMoniker* moniker = NULL;
  while(enum_moniker->Next(1, &moniker, NULL) == S_OK) {
    
    // Get the properties bag
    IPropertyBag* property_bag = NULL;
    hr = moniker->BindToStorage(0, 0, IID_PPV_ARGS(&property_bag));
    if(FAILED(hr)) {
      RX_ERROR("Cannot bind property bag to storage");
      moniker->Release();
      continue;
    }

    VARIANT var;
    VariantInit(&var);

    // Get the FriendlyName
    hr = property_bag->Read(L"FriendlyName", &var, 0);
    if(SUCCEEDED(hr)) {
      RX_VERBOSE("[%d] %S", count, var.bstrVal);
      VariantClear(&var);
    }
    else {
      RX_ERROR("Cannot retrieve the FriendlyName");
    }
    
    ++count;
    property_bag->Release();
    moniker->Release();
  }

 done:
  safeReleaseDirectShow(&enum_moniker);
  return count;
}
Esempio n. 23
0
  int Image::load(std::string filepath) {

    /* validate */
    if (0 == filepath.size()) { 
      RX_ERROR("Error: empty filepath.\n");
      return -1;
    }

    if (false == rx_file_exists(filepath)) { 
      RX_ERROR("File doesn't exist.\n");
      return -2;
    }

    std::string ext = rx_get_file_ext(filepath);
    if (ext == "jpg") {
      type = IMAGE_JPEG;
    }
    else if(ext == "png") {
      type = IMAGE_PNG;
    }
    else {
      RX_ERROR("Unknown extension: %s\n", ext.c_str());
      return -3;
    }

    if (type == IMAGE_PNG) {
      if (!rx_load_png(filepath, &pixels, width, height, channels, &capacity)) {
        RX_ERROR("Cannot load png: %s", filepath.c_str());
        return -4;
      }
    }
    else if(type == IMAGE_JPEG) {
      if (!rx_load_jpg(filepath, &pixels, width, height, channels, &capacity)) {
        RX_ERROR("Cannot load jpg: %s", filepath.c_str());
        return -5;
      }
    }
    else {
      RX_ERROR("Invald image type (shouldn't happen).");
      return -6;
    }

    RX_VERBOSE("Loaded: %s, allocated: %d bytes", filepath.c_str(), capacity);
    return 0;
  }
Esempio n. 24
0
  /* Creates a texture for the loaded image. */
  int Image::createTexture() {

    if (0 != texid) {
      RX_ERROR("Already created a texture for this image.");
      return -1;
    }

    glGenTextures(1, &texid);
    glBindTexture(GL_TEXTURE_2D, texid);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);

    RX_VERBOSE("Created texture: %d", texid);

    return 0;
  }
VideoCaptureDirectShow2::VideoCaptureDirectShow2()
  :graph_builder(NULL)
  ,capture_graph_builder(NULL)
  ,device_filter(NULL)
  ,sample_grabber_filter(NULL)
  ,sample_grabber(NULL)
  ,null_renderer_filter(NULL)
  ,media_control(NULL)
  ,media_event(NULL)
  ,capture_cb(NULL)
{
  RX_VERBOSE("VideoCaptureDirectShow()");
  HRESULT hr = CoInitializeEx(0, COINIT_MULTITHREADED);
  if(FAILED(hr)) {
    RX_ERROR("Cannot intialize COM");
    ::exit(EXIT_FAILURE);
  }

}
Esempio n. 26
0
void KankerApp::switchState(int newstate) {

  if (newstate == state) {
    RX_VERBOSE("warning: trying to switch to the same state? %d", state);
  }

  state = newstate;

  switch (state) {
    case KSTATE_CHAR_INPUT_TITLE: {
      title_font.write("Type a character to record:");  
      break;
    }
    case KSTATE_CHAR_INPUT_DRAWING: {
      info_font.write("Drag with the mouse to add points to the character. "
                      "Press backspace to restart. Space when happy with the character.");
      verbose_font.write((char)kanker_glyph->charcode);
      break;
    }
    case KSTATE_CHAR_EDIT: {
      info_font.write("Position the origin (dot) and set advance-x. Press space when ready.");
      if (kanker_glyph) {

        if (kanker_glyph->advance_x == 0.0f) {
          /* Auto calc advance x. (just the width). */
          advance_x = kanker_glyph->min_x + kanker_glyph->width;
          advance_x = CLAMP(advance_x, gui_width, painter.width() - gui_width);
        }
        else {
          advance_x = gui_width + kanker_glyph->advance_x;
        }

        /* Set the initial (or loaded) advance_x on the glyph. */
        kanker_glyph->advance_x = (advance_x - gui_width);
        kanker_glyph->origin_x = origin_x;
      }
      break;
    }
    case KSTATE_CHAR_PREVIEW: {
      if (kanker_glyph) {

        if (0.0f == kanker_glyph->advance_x) {
          RX_ERROR("The glyph advance_x is 0.0 expect incorrect results..");
        }

        KankerGlyph copy = *kanker_glyph;
        preview_drawer.updateVertices(copy);
      }
      else {
        RX_WARNING("Changing to preview state, but the glyph is NULL.");
      }
      break;
    }
    case KSTATE_CHAR_OVERVIEW: {
      glyph_dx = -1;
      onKeyRelease(GLFW_KEY_RIGHT, 0, 0);
      info_font.write("Press left and right arrows to switch character.");
      break;
    }
    default: {
      break;
    }
  }
}
void sh(int signum) {
  RX_VERBOSE("Received signal; stopping");
  must_run = false;
}
// @todo - we need to construct the correct video resource json
bool YouTubeModel::addVideoToUploadQueue(YouTubeVideo video) {

  if(!video.filename.size()) {
    RX_ERROR("Cannot add a video which has no filename set");
    return false;
  }

  std::string filepath = video.filename;
  if(video.datapath) {
    filepath = rx_to_data_path(video.filename);
  }

  size_t fsize = rx_get_file_size(filepath);
  if(!fsize) {
    RX_ERROR("Filesize of %s returned %ld", filepath.c_str(), fsize);
    return false;
  }

#if USE_JSON_PACK
  // @todo - libjansson is used as DLL, and json_dumps
  // is supposed to free() the memory; this is icky and 
  // libjansson should free() any allocated mem as it's used
  // as a dll
  std::string video_json;
  char* video_resource = NULL;
  json_t* body = NULL;
  
  if(!video.video_resource_json.size()) {
    body = json_pack("{ s: {s:s}, s: { s:s, s:i, s:s, s:s } }",
                             "status", "privacyStatus", "private", 
                             "snippet", "tags", video.tags.c_str(), 
                             "categoryId", video.category,  
                             "description", video.description.c_str(), 
                             "title", video.title.c_str());
 
    video_resource = json_dumps(body, JSON_INDENT(0));
    json_decref(body);

    video_json.assign(video_resource, strlen(video_resource));

    if(!video_resource) {
      RX_ERROR("Cannot create JSON video resource string");
      return false;
    }
  }
  else {
    video_json = video.video_resource_json;
  }
#else
  std::string video_json;

  if(!video.video_resource_json.size()) {
    std::stringstream ss;

    ss << "{ \"status\": { \"privacyStatus\" : \"private\" }, "
       <<   "\"snippet\": {"
       <<       "\"title\":\"" << video.title << "\", "
       <<       "\"tags\":\"" << video.tags << "\", "
       <<       "\"categoryId\":" << video.category << ", "
       <<       "\"description\":\"" << video.description << "\""
       <<    "}"
       << "}";

    video_json = ss.str();
  }
  else {
    video_json = video.video_resource_json;
  }

#endif

  bool r = db.insert("videos")
    .use("filename", video.filename)
    .use("state", YT_VIDEO_STATE_NONE)
    .use("bytes_total", fsize)
    .use("video_resource_json", video_json)
    .use("datapath", (video.datapath) ? 1 : 0)
    .use("title", video.title)
    .use("description", video.description)
    .use("tags", video.tags)
    .use("privacy_status", video.privacy_status)
    .use("category", video.category)
    .execute();

#if USE_JSON_PACK
  if(video_resource) {
    free(video_resource);  
    video_resource = NULL;
  }
#endif

  if(r) {
    RX_VERBOSE("Added video to the queue: %s", video.filename.c_str());
  }
  else {
    RX_ERROR("Error while trying to add: %s to the video upload queue", video.filename.c_str());
  }

  return r;
}
Esempio n. 29
0
static int on_color(const char* path, const char* types, lo_arg** argv, int argc, void* data, void* user) {
  RX_VERBOSE("on_color: %f, %f, %f", argv[0]->f, argv[1]->f, argv[2]->f);

  return 0;
}
Esempio n. 30
0
int Socket::close() {

  /* Already closed? */
  if (0 != isConnected()) {
    return 0; 
  }

  /* Close */
  if (-1 != handle) {
    errno = EINTR;
#if defined(_WIN32)      
    int result = -1;
    int err = 0;
    do { 

      result = ::closesocket(handle);
      handle = -1; /* event when closesocket returns < 0, we unset the handle so we can reuse it. */

      if (0 != result) {

        err = socket_get_error();

        switch(err) {
          case WSANOTINITIALISED: {
            RX_ERROR("Socket not initialized.");
            return 0;
          }
          case WSAENETDOWN: {
            RX_ERROR("Network is down. Socket closed");
            return 0;
          }
          case WSAENOTSOCK: {
            RX_ERROR("The `sock` member is not a real socket. This is not supposed to happen but occurs when connect() fails.");
            return 0;
          }
          case WSAEINPROGRESS: {
            RX_VERBOSE("We're in the process of closing the socket; continuing.");
            return 0;
          }
          case WSAEINTR: {
            RX_VERBOSE("Call was interrupted.");
            return 0;
          }
          case WSAEWOULDBLOCK: {
            RX_ERROR("WSAWOULDBLOCK");
            return 0;
          }
          default: {
            RX_ERROR("Unhandled error in close(), error: %d", err);
            return 0;
          }
        }
      }
    } while (0 != result);
#else
    while (::close(handle) != 0 && EINTR == errno) { }
#endif
    handle = -1;
  }

  return 0;
}