Exemplo n.º 1
0
// *************************************************
// *************************************************
//                                                                      CBR
// *************************************************
// *************************************************
uint8_t xvidEncoderCBR::init (uint32_t br, uint32_t fps1000)
{
  UNUSED_ARG (fps1000);
  return initExtented (br, NULL);
}
Exemplo n.º 2
0
static void mdlTerminate(SimStruct *S)
{
    UNUSED_ARG(S); /* unused input argument */
}
Exemplo n.º 3
0
uint8_t h263Header::setFlag(uint32_t frame,uint32_t flags) {
    UNUSED_ARG(frame);
    UNUSED_ARG(flags);
    return 0;
}
Exemplo n.º 4
0
static void mdlCheckParameters(SimStruct *S)
{
   UNUSED_ARG(S); 
}
Exemplo n.º 5
0
	int i;

	QString title("");

	for (i = 1; i < m_dataModel->columnCount(); i++) {
		title += m_dataModel->headerData(i, Qt::Horizontal).toString();
		if (i != m_dataModel->columnCount() - 1) {
			title += ", ";
		}
	}
	setWindowTitle(title);
}

void WatchGeoDataTree::updateView(bool force)
{
	UNUSED_ARG(force)
	/* TODO */
}

void WatchGeoDataTree::closeView()
{
	hide();
	deleteLater();
}

void WatchGeoDataTree::newSelection(const QModelIndex & index)
{
	if (index.isValid()) {
		int dataIdx = index.data(GeoShaderDataModel::IndexRole).toInt();
		if (dataIdx >= 0
				&& !index.data(GeoShaderDataModel::VertexRole).toBool()) {
Exemplo n.º 6
0
/**
 * @brief Selects super verbose mode on
 *
 * @param arg not used
 */
static void
selfn_super_verbose_mode(const char *arg)
{
        UNUSED_ARG(arg);
        sel_verbose_mode = 2;
}
Exemplo n.º 7
0
/**
 * @brief Selects showing allocation settings
 *
 * @param arg not used
 */
static void selfn_show_allocation(const char *arg)
{
        UNUSED_ARG(arg);
        sel_show_allocation_config = 1;
}
Exemplo n.º 8
0
void *netx_new_txbuf(int size)
{
  UNUSED_ARG(size);
  return malloc(sizeof(UDPPacket));
}
Exemplo n.º 9
0
/* Called when we are done with our buffer. This is done for ports that
 * require the buffer to be freed after a packet is sent. If the stack
 * takes care of this itself, this can do nothing;
 */
void netx_release_txbuf(void * pkt)
{
  UNUSED_ARG(pkt);
/*  delete (UDPPacket*)pkt; */
}
Exemplo n.º 10
0
/** Handler of the progress notifications. Must be overriden in inherited
   classes.
    The default handler is provided (doing nothing).
    @param alg :: Pointer to the algorithm sending the notification. Note that
   this can
    point to a different object than the argument of a observeZZZ(...) method,
   e.g.
    an observer can be connected to an AlgorithmProxy instance and receive
   notifications from
    the corresponding Algorithm type object.
    @param p :: Progress reported by the algorithm, 0 <= p <= 1
    @param msg :: Optional message string sent by the algorithm
*/
void AlgorithmObserver::progressHandle(const IAlgorithm *alg, double p,
                                       const std::string &msg) {
  UNUSED_ARG(alg)
  UNUSED_ARG(p)
  UNUSED_ARG(msg)
}

/** Handler of the start notifications. Must be overriden in inherited classes.
    The default handler is provided (doing nothing).
    @param alg :: Shared Pointer to the algorithm sending the notification.
*/
void AlgorithmObserver::startingHandle(IAlgorithm_sptr alg) { UNUSED_ARG(alg) }

/** Handler of the start notifications. Must be overriden in inherited classes.
    The default handler is provided (doing nothing).
    @param alg :: Pointer to the algorithm sending the notification. Note that
   this can
    point to a different object than the argument of a observeZZZ(...) method,
Exemplo n.º 11
0
void UI_on_key_press(GtkWidget *widget, GdkEventKey* event, gpointer user_data)
{
    UNUSED_ARG(widget);
    UNUSED_ARG(user_data);
	gboolean shift = FALSE;
	gboolean ctrl = FALSE;
	gboolean alt = FALSE;
	Action action;

	//printf("key : %d (%c) \n",event->keyval,event->keyval);
	
	if (event->state & GDK_CONTROL_MASK)
	{
		ctrl = TRUE;
	}
	if (event->state & GDK_SHIFT_MASK)
	{
		shift = TRUE;
	}
	if(event->state & GDK_MOD1_MASK)
	{
		alt = TRUE;
	}
	// ALT+x
	//_____________
	if(alt)
	{
	
		switch(event->keyval)
		{
			case GDK_l:
			case GDK_L: action=ACT_OpenAvi;break;
			case GDK_s:
			case GDK_S: action=ACT_SaveWork;break;
				
			// repeat Keyboard stuff
			case GDK_B:
			
				if(shift)
					action=ACT_GotoMarkA;
				else
					action=ACT_MarkA;
				break;
			case GDK_E:
			
				if(shift)
					action=ACT_GotoMarkB;
				else
					action=ACT_MarkB;
				break;
			default:
				return;		
		}
		HandleAction(action);
		return;
	
	}
	action=ACT_DUMMY;
	// CTRL+x
	//_____________
	if(TRUE==ctrl)
	{
	  switch (event->keyval)
	  {
	  	case GDK_A:
		case GDK_a:
					action=ACT_AppendAvi;break;
					
	  	case GDK_c:
		case GDK_C:	action=ACT_Copy;break;
		
				
	  	case GDK_bracketleft:
					action = ACT_GotoMarkA;break;
		case GDK_bracketright:
					action = ACT_GotoMarkB;break;
					
		case GDK_F:
		case GDK_f:
					action=ACT_VideoParameter;break;					
					
	  	case GDK_G:
		case GDK_g:
					action=ACT_Goto;break;

	  	
		case GDK_M:
		case GDK_m:
					action=ACT_SaveImg;break;
		
		case GDK_O:
		case GDK_o:
					action=ACT_OpenAvi;break;
		case GDK_P:
		case GDK_p:
					action=ACT_DecoderOption;break;					
		
		case GDK_S:
		case GDK_s:
					action=ACT_SaveAvi;break;
		case GDK_u:
		case GDK_U:
					action=ACT_SetMuxParam;break;
		case GDK_v:
		case GDK_V:		action=ACT_Paste;break;		
		
		case GDK_X:
		case GDK_x:
					action=ACT_Delete;break;					
					
		default:
			break;

	  }
	}

	if(action!=ACT_DUMMY)
	{
		HandleAction(action);
		return;
	}

    switch (event->keyval)
	{
	case GDK_F1:
		action=ACT_VideoModeToggle;break;
	case GDK_F2:
		action=ACT_AudioModeToggle;break;
			
	case GDK_space:
		action=ACT_PlayAvi;
		break;
	case GDK_X:
		action=ACT_Delete;
		break;
	
	case GDK_Up:
		action=ACT_NextKFrame;
		break;
	case GDK_Down:
		action=ACT_PreviousKFrame;
		break;
			
		// Position advance
       	case GDK_Left: case GDK_KP_Left:

		// One frame
		if((shift == FALSE) && (ctrl == FALSE))
		{
			action = ACT_PreviousFrame;
		}
		// 50 frames
		else if(ctrl == TRUE)
		{
			action = ACT_Back50Frames;
		}
		// 25 frames
		else
		{
			action = ACT_Back25Frames;
		}
		break;

		// Position reverse
	case GDK_Right: case GDK_KP_Right:
		if((shift == FALSE) && (ctrl == FALSE))
		{
			action = ACT_NextFrame;
		}
		else if(ctrl == TRUE)
		{
			action = ACT_Forward50Frames;
		}
		else
		{
			action = ACT_Forward25Frames;
		}
		break;
		
		// Delete A-B section
	case GDK_Delete: case GDK_KP_Delete:
		action = ACT_Delete;
		break;
		
		// Go to first frame
	case GDK_Home: case GDK_KP_Home:
		//case GDK_Page_Up:  case GDK_KP_Page_Up:
		action = ACT_Begin;
		break;

		// Go to last frame
	case GDK_End: case GDK_KP_End:
		//case GDK_Page_Down: case GDK_KP_Page_Down:
		action = ACT_End;
		break;

		// Set A marker
	case GDK_bracketleft:
		action = ACT_MarkA;
		break;

		// Set B marker
		case GDK_bracketright:
	action = ACT_MarkB;
		break;

		default:
			printf("Key : %x not handled\n",event->keyval);
		return;
		break;
	}

	HandleAction(action);
}
/// Background is the sum of correlation counts, sum of counts is discarded.
double PoldiResidualCorrelationCore::calculateCorrelationBackground(
    double sumOfCorrelationCounts, double sumOfCounts) const {
  UNUSED_ARG(sumOfCounts);

  return sumOfCorrelationCounts;
}
uint8_t ADMVideoStabilize::getFrameNumberNoAlloc(uint32_t frame,
				uint32_t *len,
   				ADMImage *data,
				uint32_t *flags)
{
UNUSED_ARG(flags);
uint32_t uvlen;
uint32_t dlen,dflags;

ADMImage	*_next;
ADMImage	*_previous;  	
ADMImage	*_current;
		
		uvlen=    _info.width*_info.height;
		*len=uvlen+(uvlen>>1);
		if(frame> _info.nb_frames-1) return 0;
		_current=vidCache->getImage(frame);
		if(!_current) return 0;
		data->copyInfo(_current);
		if(!frame || (frame==_info.nb_frames-1))
		{

			data->duplicate(_current);
			vidCache->unlockAll();
			return 1;
		}	 
   		_previous=vidCache->getImage(frame-1);		
		if(!_previous)
		{
			vidCache->unlockAll();
			return 0;
		}
   		_next=vidCache->getImage(frame+1);
		if(!_next)
		{
			vidCache->unlockAll();
			return 0;
		}
		
           // for u & v , no action -> copy it as is
           memcpy(UPLANE(data),UPLANE(_current),uvlen>>2);
	   memcpy(VPLANE(data),VPLANE(_current),uvlen>>2);

           uint8_t *inprev,*innext,*incur,*zout;
              
              inprev=YPLANE(_previous)+1+_info.width;
              innext=YPLANE(_next)+1+_info.width;
              incur =YPLANE(_current)+1+_info.width;
              
              zout=YPLANE(data)+_info.width+1;
              
             
              uint8_t *nl,*pl,*nc,*pc;
              
              uint16_t c,coeff;
              uint32_t x;
              for(uint32_t y= _info.height-1;y>1;y--)
              	{
			nl=incur+_info.width;
			pl=incur-_info.width;
			nc=incur+1;
			pc=incur-1;		   
								   
		        for(x= _info.width-1;x>1;x--)
        		{
	               		c=*incur*4;;
	                    	coeff=4;
#define PONDERATE(x,p) if(distMatrix[*incur][x]<*_param)	                      \
{ c+=x;coeff++;}
																PONDERATE(*innext,1);
				PONDERATE(*inprev,1);
				PONDERATE(*(pc),1);
				PONDERATE(*(nc),1);
				PONDERATE(*(nl),1);
				PONDERATE(*(pl),1);
													//*zout=(uint8_t)floor(0.49+(c/coeff));
																ADM_assert(coeff);
				ADM_assert(coeff<16);
				*zout=(c*fixMul[coeff])>>16;
				zout++;
				incur++;
				innext++;
				inprev++;	
				nl++;pl++;nc++;pc++;	  
			}
			zout+=2;
			incur+=2;
			innext+=2;
			inprev+=2;
		}
		vidCache->unlockAll();
		return 1;	
}
Exemplo n.º 14
0
uint8_t xvidEncoderCQ::init (uint32_t q, uint32_t fps1000, uint32_t extra)
{
  UNUSED_ARG (fps1000);
  UNUSED_ARG (extra);
  return initExtented (q, NULL);
}
Exemplo n.º 15
0
static PyObject *
Util_func_getaddrinfo(PyObject *obj, PyObject *args, PyObject *kwargs)
{
    char *host_str;
    char port_str[6];
    int port, family, socktype, protocol, flags, r;
    struct addrinfo hints;
    uv_getaddrinfo_t* req = NULL;
    Loop *loop;
    PyObject *callback, *host, *idna;

    static char *kwlist[] = {"loop", "callback", "host", "port", "family", "socktype", "protocol", "flags", NULL};

    UNUSED_ARG(obj);
    port = socktype = protocol = flags = 0;
    family = AF_UNSPEC;

    if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O!OO|iiiii:getaddrinfo", kwlist, &LoopType, &loop, &host, &callback, &port, &family, &socktype, &protocol, &flags)) {
        return NULL;
    }

    if (host == Py_None) {
        host_str = NULL;
    } else if (PyUnicode_Check(host)) {
        idna = PyObject_CallMethod(host, "encode", "s", "idna");
        if (!idna)
            return NULL;
        host_str = PyBytes_AS_STRING(idna);
    } else if (PyBytes_Check(host)) {
        host_str = PyBytes_AsString(host);
    } else {
        PyErr_SetString(PyExc_TypeError, "getaddrinfo() argument 2 must be string or None");
        return NULL;
    }

    if (!PyCallable_Check(callback)) {
        PyErr_SetString(PyExc_TypeError, "a callable is required");
        return NULL;
    }

    if (port < 0 || port > 65535) {
        PyErr_SetString(PyExc_ValueError, "port must be between 0 and 65535");
        return NULL;
    }
    snprintf(port_str, sizeof(port_str), "%d", port);

    req = PyMem_Malloc(sizeof(uv_getaddrinfo_t));
    if (!req) {
        PyErr_NoMemory();
        goto error;
    }

    Py_INCREF(loop);
    Py_INCREF(callback);
    req->data = (void *)callback;

    memset(&hints, 0, sizeof(hints));
    hints.ai_family = family;
    hints.ai_socktype = socktype;
    hints.ai_protocol = protocol;
    hints.ai_flags = flags;

    r = uv_getaddrinfo(loop->uv_loop, req, &getaddrinfo_cb, host_str, port_str, &hints);
    if (r != 0) {
        RAISE_UV_EXCEPTION(loop->uv_loop, PyExc_UVError);
        goto error;
    }

    Py_RETURN_NONE;

error:
    if (req) {
        PyMem_Free(req);
    }
    return NULL;
}
Exemplo n.º 16
0
/**
	Load or append a file.
	The file type is determined automatically and the ad-hoc video decoder is spawned


*/
uint8_t ADM_Composer::addFile (char *name, uint8_t mode)
{
  uint8_t    ret =    0;
  aviInfo    info;
  WAVHeader *    _wavinfo;
//  aviHeader *    tmp;
  fileType    type =    Unknown_FileType;

UNUSED_ARG(mode);
	_haveMarkers=0; // by default no markers are present
  ADM_assert (_nb_segment < max_seg);
  ADM_assert (_nb_video < MAX_VIDEO);

  if (!identify (name, &type))
    return 0;


#define OPEN_AS(x,y) case x:\
						_videos[_nb_video]._aviheader=new y; \
						 ret = _videos[_nb_video]._aviheader->open(name); \
						break;
  switch (type)
    {
      case VCodec_FileType:
      		loadVideoCodecConf(name);      		
		return ADM_IGN; // we do it but it wil fail, no problem with that
      		break;
      OPEN_AS (Mp4_FileType, mp4Header);
      OPEN_AS (H263_FileType, h263Header);
      
      case ASF_FileType:
              _videos[_nb_video]._aviheader=new asfHeader; 
              ret = _videos[_nb_video]._aviheader->open(name); 
              if(!ret)
              {
                delete _videos[_nb_video]._aviheader;;
                printf("Trying mpeg\n"); 
                goto thisIsMpeg; 
              }
              break;
      OPEN_AS (NewMpeg_FileType,dmxHeader);
      // For AVI we first try top open it as openDML
      case AVI_FileType:
      			_videos[_nb_video]._aviheader=new OpenDMLHeader; 
			 ret = _videos[_nb_video]._aviheader->open(name); 			
			break;
      
    case Nuppel_FileType:
	{ // look if the idx exists
	  char *tmpname = (char*)ADM_alloc(strlen(name)+strlen(".idx")+1);
		ADM_assert(tmpname);
		sprintf(tmpname,"%s.idx",name);
		if(addFile(tmpname))
		{
			return 1; // Memleak ?
		}
		ADM_dealloc(tmpname);
		// open .nuv file
		_videos[_nb_video]._aviheader=new nuvHeader;
		ret = _videos[_nb_video]._aviheader->open(name);
		// we store the native .nuv file in the edl
		// the next load of the edl will open .idx instead
		break;
	}
      OPEN_AS (BMP_FileType, picHeader);
      OPEN_AS (Matroska_FileType, mkvHeader);
      OPEN_AS (AvsProxy_FileType, avsHeader);
      OPEN_AS (_3GPP_FileType, _3GPHeader);
       OPEN_AS (Ogg_FileType, oggHeader);

    case Mpeg_FileType:
thisIsMpeg:
    	// look if the idx exists
	char tmpname[256];
	ADM_assert(strlen(name)+5<256);;
	strcpy(tmpname,name);
	strcat(tmpname,".idx");
        if(ADM_fileExist(tmpname))
        {
	       return addFile(tmpname);
        }
	/* check for "Read-only file system" */
	{
                int fd = open(tmpname,O_CREAT|O_EXCL|O_WRONLY,S_IRUSR|S_IWUSR);
                if( fd >= 0 )
                {
                    close(fd);
                    unlink(tmpname);
                    printf("Filesystem is writable\n");
		}else if( errno == EROFS ){
		  char *tmpdir = getenv("TMPDIR");
#ifdef CYG_MANGLING
                        printf("Filesystem is not writable, looking for somewhere else\n");
			if( !tmpdir )
				tmpdir = "c:";
			snprintf(tmpname,256,"%s%s.idx",tmpdir,strrchr(name,'\\'));
#else
			if( !tmpdir )
				tmpdir = "/tmp";
			snprintf(tmpname,256,"%s%s.idx",tmpdir,strrchr(name,'/'));
#endif
			tmpname[255] = 0;
                        printf("Storing index in %s\n",tmpname);
                    if(ADM_fileExist(tmpname))
                    {
                        printf("Index present, loading it\n");
                        return addFile(tmpname);
                    }
                }
        }
        if(tryIndexing(name,tmpname))
        {
                return addFile (tmpname);
        }
        return 0;
      break;
	case WorkBench_FileType:

  		return loadWorbench(name);
#if 0
        case Script_FileType:
                return parseScript(name);
#endif
	case ECMAScript_FileType:
                printf("****** This is an ecmascript, run it with avidemux2 --run yourscript *******\n");
                printf("****** This is an ecmascript, run it with avidemux2 --run yourscript *******\n");
                printf("****** This is an ecmascript, run it with avidemux2 --run yourscript *******\n");
                return 0;
		
                
    default:
      if (type == Unknown_FileType)
	{
	  printf ("\n not identified ...\n");
	}
      else
        GUI_Error_HIG(_("File type identified but no loader support detected..."),
                      _("May be related to an old index file."));
      return 0;
    }

   // check opening was successful
   if (ret == 0) {
     char str[512+1];
     snprintf(str,512,_("Attempt to open %s failed!"), name);
      str[512] = '\0';
      GUI_Error_HIG(str,NULL);
      delete _videos[_nb_video]._aviheader;;
      return 0;
   }

   /* check for resolution */
   if( _nb_video ){
      /* append operation */
      aviInfo info0, infox;
      _videos[   0     ]._aviheader->getVideoInfo (&info0);
      _videos[_nb_video]._aviheader->getVideoInfo (&infox);
      if( info0.width != infox.width || info0.height != infox.height ){
        char str[512+1];
         str[0] = '\0';
         if( info0.width != infox.width )
            strcpy(str,"width");
         if( info0.height != infox.height )
            snprintf(str+strlen(str),512-strlen(str),
              "%sheight%sdifferent between first and this video stream",
                 (strlen(str)?" and ":""),
                 (strlen(str)?" are ":" is ") );
         str[512] = '\0';
         GUI_Error_HIG(str,_("You cannot mix different video dimensions yet. Using the partial video filter later, will not work around this problem. The workaround is:\n1.) \"resize\" / \"add border\" / \"crop\" each stream to the same resolution\n2.) concatinate them together"));
         delete _videos[_nb_video]._aviheader;;
         return 0;
      }
   }
 
  // else update info
  _videos[_nb_video]._aviheader->getVideoInfo (&info);
  _videos[_nb_video]._aviheader->setMyName (name);
  // 1st if it is our first video we update postproc
 if(!_nb_video)
 {
        uint32_t type,value;

        if(!prefs->get(DEFAULT_POSTPROC_TYPE,&type)) type=3;
        if(!prefs->get(DEFAULT_POSTPROC_VALUE,&value)) value=3; 	

	deletePostProc(&_pp );
 	initPostProc(&_pp,info.width,info.height);
	_pp.postProcType=type;
	_pp.postProcStrength=value;
	_pp.forcedQuant=0;
	updatePostProc(&_pp);

	if(_imageBuffer) delete _imageBuffer;
	_imageBuffer=new ADMImage(info.width,info.height);
 	_imageBuffer->_qSize= ((info.width+15)>>4)*((info.height+15)>>4);
	_imageBuffer->quant=new uint8_t[_imageBuffer->_qSize];
	_imageBuffer->_qStride=(info.width+15)>>4;
 }
Exemplo n.º 17
0
static PyObject *
Util_func_get_free_memory(PyObject *obj)
{
    UNUSED_ARG(obj);
    return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG)uv_get_free_memory());
}
Exemplo n.º 18
0
/**
 * Restore the window from a string saved by saveToSring method.
 * TODO: not implemented.
 */
void TiledWindow::restore(const QStringList& data)
{
  UNUSED_ARG(data);
}
Exemplo n.º 19
0
/**
 * @brief Sets CAT reset flag
 *
 * @param arg not used
 */
static void selfn_reset_cat(const char *arg)
{
        UNUSED_ARG(arg);
        sel_reset_CAT = 1;
}
Exemplo n.º 20
0
static PyObject *
Util_func_getaddrinfo(PyObject *obj, PyObject *args, PyObject *kwargs)
{
    char *name;
    char port_str[6];
    int port, family, socktype, protocol, flags, r;
    struct addrinfo hints;
    getaddrinfo_cb_data_t *cb_data = NULL;
    uv_getaddrinfo_t* req = NULL;
    Loop *loop;
    PyObject *callback;

    UNUSED_ARG(obj);

    static char *kwlist[] = {"loop", "callback", "name", "port", "family", "socktype", "protocol", "flags", NULL};

    port = socktype = protocol = flags = 0;
    family = AF_UNSPEC;

    if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O!sO|iiiii:getaddrinfo", kwlist, &LoopType, &loop, &name, &callback, &port, &family, &socktype, &protocol, &flags)) {
        return NULL;
    }

    if (!PyCallable_Check(callback)) {
        PyErr_SetString(PyExc_TypeError, "a callable is required");
        return NULL;
    }

    if (port < 0 || port > 65536) {
        PyErr_SetString(PyExc_ValueError, "port must be between 0 and 65536");
        return NULL;
    }
    snprintf(port_str, sizeof(port_str), "%d", port);

    req = PyMem_Malloc(sizeof(uv_getaddrinfo_t));
    if (!req) {
        PyErr_NoMemory();
        goto error;
    }

    cb_data = PyMem_Malloc(sizeof(getaddrinfo_cb_data_t));
    if (!cb_data) {
        PyErr_NoMemory();
        goto error;
    }

    Py_INCREF(loop);
    Py_INCREF(callback);
    cb_data->loop = loop;
    cb_data->cb = callback;
    req->data = (void *)cb_data;

    memset(&hints, 0, sizeof(hints));
    hints.ai_family = family;
    hints.ai_socktype = socktype;
    hints.ai_protocol = protocol;
    hints.ai_flags = flags;

    r = uv_getaddrinfo(loop->uv_loop, req, &getaddrinfo_cb, name, port_str, &hints);
    if (r != 0) {
        RAISE_UV_EXCEPTION(loop->uv_loop, PyExc_UVError);
        goto error;
    }

    Py_RETURN_NONE;

error:
    if (req) {
        PyMem_Free(req);
    }
    return NULL;
}
Exemplo n.º 21
0
/* Function: mdlInitializeConditions ========================================
 * Abstract:
 *    Initialize states
 */
static void mdlInitializeConditions(SimStruct *S)
{
    UNUSED_ARG(S);
}
Exemplo n.º 22
0
/*
 * Return the detector ID corresponding to the [x,y] pixel coordinates.
 */
int getDetectorFromPixel(const int &pixel_x, const int &pixel_y,
                         API::MatrixWorkspace_sptr dataWS) {
  UNUSED_ARG(dataWS);
  return 1000000 + 1000 * pixel_x + pixel_y;
}
Exemplo n.º 23
0
static void mdlProcessParameters(SimStruct *S)
{
	/* to do: verify parameters */
	UNUSED_ARG(S); 
}
Exemplo n.º 24
0
void tickadj(const bool json, const int newtick)
{
#ifndef HAVE_ADJTIMEX
	UNUSED_ARG(json);
	UNUSED_ARG(newtick);
	fputs("ntpfrob: \n", stderr);
	exit(1);
#else
	if (newtick != 0)
	{
#ifdef STRUCT_TIMEX_HAS_TIME_TICK
		if ( (txc.time_tick = newtick) < 1 )
#else
		if ( (txc.tick = newtick) < 1 )
#endif /* STRUCT_TIMEX_HAS_TIME_TICK */
		{
			fprintf(stderr, "ntpfrob: silly value for tick: %d\n", newtick);
			exit(1);
		}
#ifdef MOD_TIMETICK
		txc.modes = MOD_TIMETICK;
#else
#ifdef STRUCT_TIMEX_HAS_MODES
		txc.modes = ADJ_TICK;
#else
		txc.mode = ADJ_TICK;
#endif /* STRUCT_TIMEX_HAS_MODES */
#endif /* MOD_TIMETICK */
	}
	else
	{
#ifdef MOD_TIMETICK
		txc.modes = 0;
#else
#ifdef STRUCT_TIMEX_HAS_MODES
		txc.modes = 0;
#else
		txc.mode = 0;
#endif /* STRUCT_TIMEX_HAS_MODES */
#endif /* MOD_TIMETICK */
	}

	if (ntp_adjtime(&txc) < 0)
	{
		perror("ntp_adjtime");
	}
	else
	{
#ifdef STRUCT_TIMEX_HAS_TIME_TICK
		if (json)
			printf("{\"tick\":%ld,\"tick_adj\":%ld}\n",
			       txc.time_tick, txc.tickadj);
		else
			printf("tick = %ld\ntick_adj = %ld\n",
			       txc.time_tick, txc.tickadj);
#else
		if (json)
			printf("{\"tick\":%ld}\n", txc.tick);
		else
			printf("tick = %ld\n", txc.tick);
#endif /* STRUCT_TIMEX_HAS_TIME_TICK */
	}

#endif /* HAVE_ADJTIMEX */
}
Exemplo n.º 25
0
static int reopen_server_socket(dtls_listener_relay_server_type* server, evutil_socket_t fd)
{
	UNUSED_ARG(fd);

	if(!server)
		return 0;

	FUNCSTART;

	{
		EVENT_DEL(server->udp_listen_ev);

		if(server->udp_listen_s->fd>=0) {
			socket_closesocket(server->udp_listen_s->fd);
			server->udp_listen_s->fd = -1;
		}

		if (!(server->udp_listen_s)) {
			return create_server_socket(server,1);
		}

		ioa_socket_raw udp_listen_fd = socket(server->addr.ss.sa_family, CLIENT_DGRAM_SOCKET_TYPE, CLIENT_DGRAM_SOCKET_PROTOCOL);
		if (udp_listen_fd < 0) {
			perror("socket");
			FUNCEND;
			return -1;
		}

		server->udp_listen_s->fd = udp_listen_fd;

		/* some UDP sessions may fail due to the race condition here */

		set_socket_options(server->udp_listen_s);

		set_sock_buf_size(udp_listen_fd, UR_SERVER_SOCK_BUF_SIZE);

		if (sock_bind_to_device(udp_listen_fd, (unsigned char*) server->ifname) < 0) {
				TURN_LOG_FUNC(TURN_LOG_LEVEL_INFO,
				"Cannot bind listener socket to device %s\n",
				server->ifname);
		}

		if(addr_bind(udp_listen_fd,&server->addr,1,1,UDP_SOCKET)<0) {
			perror("Cannot bind local socket to addr");
			char saddr[129];
			addr_to_string(&server->addr,(u08bits*)saddr);
			TURN_LOG_FUNC(TURN_LOG_LEVEL_INFO,"Cannot bind listener socket to addr %s\n",saddr);
			return -1;
		}

		server->udp_listen_ev = event_new(server->e->event_base, udp_listen_fd,
				EV_READ | EV_PERSIST, udp_server_input_handler,
				server);

		event_add(server->udp_listen_ev, NULL );
	}

	if (!turn_params.no_udp && !turn_params.no_dtls)
		addr_debug_print(server->verbose, &server->addr,
					"DTLS/UDP listener opened on ");
	else if (!turn_params.no_dtls)
		addr_debug_print(server->verbose, &server->addr,
					"DTLS listener opened on ");
	else if (!turn_params.no_udp)
		addr_debug_print(server->verbose, &server->addr,
				"UDP listener opened on ");

	FUNCEND;

	return 0;
}
Exemplo n.º 26
0
uint8_t bitsReader::forward(uint32_t nbBits)
{
        UNUSED_ARG(nbBits);
		ADM_assert(0);
}
Exemplo n.º 27
0
static SSL* tls_connect(ioa_socket_raw fd, ioa_addr *remote_addr, int *try_again, int connect_cycle)
{

	int ctxtype = (int)(((unsigned long)random())%root_tls_ctx_num);

	SSL *ssl;

	ssl = SSL_NEW(root_tls_ctx[ctxtype]);

#if ALPN_SUPPORTED
	SSL_set_alpn_protos(ssl, kALPNProtos, kALPNProtosLen);
#endif

	if(use_tcp) {
		SSL_set_fd(ssl, fd);
	} else {
#if !DTLS_SUPPORTED
	  UNUSED_ARG(remote_addr);
	  fprintf(stderr,"ERROR: DTLS is not supported.\n");
	  exit(-1);
#else
		/* Create BIO, connect and set to already connected */
		BIO *bio = BIO_new_dgram(fd, BIO_CLOSE);
		//bio = BIO_new_socket(fd, BIO_CLOSE);

		BIO_ctrl(bio, BIO_CTRL_DGRAM_SET_CONNECTED, 0, &remote_addr->ss);

		SSL_set_bio(ssl, bio, bio);

		{
			struct timeval timeout;
			/* Set and activate timeouts */
			timeout.tv_sec = DTLS_MAX_CONNECT_TIMEOUT;
			timeout.tv_usec = 0;
			BIO_ctrl(bio, BIO_CTRL_DGRAM_SET_RECV_TIMEOUT, 0, &timeout);
		}

		set_mtu_df(ssl, fd, remote_addr->ss.sa_family, SOSO_MTU, !use_tcp, clnet_verbose);
#endif
	}

	SSL_set_max_cert_list(ssl, 655350);

	if (clnet_verbose)
		TURN_LOG_FUNC(TURN_LOG_LEVEL_INFO, "call SSL_connect...\n");

	int rc = 0;

	do {
		do {
			rc = SSL_connect(ssl);
		} while (rc < 0 && errno == EINTR);
		int orig_errno = errno;
		if (rc > 0) {
		  TURN_LOG_FUNC(TURN_LOG_LEVEL_INFO,"%s: client session connected with cipher %s, method=%s\n",__FUNCTION__,
				  SSL_get_cipher(ssl),turn_get_ssl_method(ssl,NULL));
		  if(clnet_verbose && SSL_get_peer_certificate(ssl)) {
			  TURN_LOG_FUNC(TURN_LOG_LEVEL_INFO, "------------------------------------------------------------\n");
		  	X509_NAME_print_ex_fp(stdout, X509_get_subject_name(SSL_get_peer_certificate(ssl)), 1,
		  						XN_FLAG_MULTILINE);
		  	TURN_LOG_FUNC(TURN_LOG_LEVEL_INFO, "\n\n Cipher: %s\n", SSL_CIPHER_get_name(SSL_get_current_cipher(ssl)));
		  	TURN_LOG_FUNC(TURN_LOG_LEVEL_INFO, "\n------------------------------------------------------------\n\n");
		  }
		  break;
		} else {
			TURN_LOG_FUNC(TURN_LOG_LEVEL_INFO, "%s: cannot connect: rc=%d, ctx=%d\n",
					__FUNCTION__,rc,ctxtype);

			switch (SSL_get_error(ssl, rc)) {
			case SSL_ERROR_WANT_READ:
			case SSL_ERROR_WANT_WRITE:
				if(!dos) usleep(1000);
				continue;
			default: {
				char buf[1025];
				TURN_LOG_FUNC(TURN_LOG_LEVEL_INFO, "errno=%d, err=%d, %s (%d)\n",orig_errno,
								(int)ERR_get_error(), ERR_error_string(ERR_get_error(), buf), (int)SSL_get_error(ssl, rc));
				if(connect_cycle<MAX_TLS_CYCLES) {
					if(try_again) {
						SSL_FREE(ssl);
						*try_again = 1;
						return NULL;
					}
				}
				exit(-1);
			}
			};
		}
	} while (1);

	if (clnet_verbose && SSL_get_peer_certificate(ssl)) {
		if(use_tcp) {
			TURN_LOG_FUNC(TURN_LOG_LEVEL_INFO,
				"------TLS---------------------------------------------------\n");
		} else {
			TURN_LOG_FUNC(TURN_LOG_LEVEL_INFO,
				"------DTLS---------------------------------------------------\n");
		}
		X509_NAME_print_ex_fp(stdout, X509_get_subject_name(
				SSL_get_peer_certificate(ssl)), 1, XN_FLAG_MULTILINE);
		TURN_LOG_FUNC(TURN_LOG_LEVEL_INFO, "\n\n Cipher: %s\n",
				SSL_CIPHER_get_name(SSL_get_current_cipher(ssl)));
		TURN_LOG_FUNC(TURN_LOG_LEVEL_INFO,
				"\n------------------------------------------------------------\n\n");
	}

	return ssl;
}
Exemplo n.º 28
0
static PyObject *
Util_func_hrtime(PyObject *obj)
{
    UNUSED_ARG(obj);
    return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG)uv_hrtime());
}
Exemplo n.º 29
0
uint8_t vidHeader::getFrameSize (uint32_t frame, uint32_t * size)
{
  UNUSED_ARG (frame);
  UNUSED_ARG (size);
  return 0;
}
Exemplo n.º 30
0
static bool select_alive(const side *vs, const fighter *fig, void *cbdata)
{
    UNUSED_ARG(vs);
    UNUSED_ARG(cbdata);
    return fig->alive > 0;
}