void HX711::begin(byte dout, byte pd_sck, byte gain) { PD_SCK = pd_sck; DOUT = dout; pinMode(PD_SCK, OUTPUT); pinMode(DOUT, INPUT); set_gain(gain); }
void StreamSoundSource::update() { ALint processed = 0; alGetSourcei(source, AL_BUFFERS_PROCESSED, &processed); for(ALint i = 0; i < processed; ++i) { ALuint buffer; alSourceUnqueueBuffers(source, 1, &buffer); SoundManager::check_al_error("Couldn't unqueue audio buffer: "); if(fillBufferAndQueue(buffer) == false) break; } if(!playing()) { if(processed == 0 || !looping) return; // we might have to restart the source if we had a buffer underrun log_info << "Restarting audio source because of buffer underrun" << std::endl; play(); } if(fade_state == FadingOn || fade_state == FadingResume) { float time = real_time - fade_start_time; if(time >= fade_time) { set_gain(1.0); fade_state = NoFading; } else { set_gain(time / fade_time); } } else if(fade_state == FadingOff || fade_state == FadingPause) { float time = real_time - fade_start_time; if(time >= fade_time) { if(fade_state == FadingOff) stop(); else pause(); fade_state = NoFading; } else { set_gain( (fade_time-time) / fade_time); } } }
Spc_Emu::Spc_Emu() { set_type( gme_spc_type ); static const char* const names [Snes_Spc::voice_count] = { "DSP 1", "DSP 2", "DSP 3", "DSP 4", "DSP 5", "DSP 6", "DSP 7", "DSP 8" }; set_voice_names( names ); set_gain( 1.4 ); }
static int32_t init_all_setting(void) { uint32_t val; enable_als(0); pStkAlsData->bThreadRunning = 0; set_gain(1); /* x2 */ set_it(1); /* x1 */ val = stk_readb(STK_ALS_CMD_REG); INFO("Init ALS Setting --> CMDREG = 0x%x\n", val); return 0; /* OK */ }
Gbs_Emu::Gbs_Emu() { sound_hardware = sound_gbs; enable_clicking( false ); set_type( gme_gbs_type ); set_silence_lookahead( 6 ); set_max_initial_silence( 21 ); set_gain( 1.2 ); // kind of midway between headphones and speaker static equalizer_t const eq = { -1.0, 120 }; set_equalizer( eq ); }
Nsf_Emu::Nsf_Emu() { vrc6 = 0; namco = 0; fme7 = 0; set_type( gme_nsf_type ); set_silence_lookahead( 6 ); apu.dmc_reader( pcm_read, this ); Music_Emu::set_equalizer( nes_eq ); set_gain( 1.4 ); memset( unmapped_code, Nes_Cpu::bad_opcode, sizeof unmapped_code ); }
void set_eq_value(float value, int index, int chn) { /* Map the gain and preamp values */ if (index >= 0) { set_gain(index, chn, 2.5220207857061455181125E-01 * exp(8.0178361802353992349168E-02 * value) - 2.5220207852836562523180E-01 , value); } else { /* -12dB .. 12dB mapping */ set_preamp(chn, 9.9999946497217584440165E-01 * exp(6.9314738656671842642609E-02 * value) + 3.7119444716771825623636E-07); } }
LCS::LCS(byte pd_sck, byte dout1, byte dout2, byte dout3, byte gain){ PD_SCK = pd_sck; DOUT1 = dout1; DOUT2 = dout2; DOUT3 = dout3; COUNTER = 0; pinMode(PD_SCK, OUTPUT); pinMode(DOUT1, INPUT); pinMode(DOUT2, INPUT); pinMode(DOUT3, INPUT); set_gain(gain); }
/* Takes camera ID, exptime, gain, amplifier, adcspeed */ int picam_set(lua_State *L) { PicamCameraID id; PicamHandle handle, model; PicamError error; piflt exptime_s = 0.0, adcspeed; PicamAdcQuality amplifier; PicamAdcAnalogGain gain; clock_t tick, tock; pibln committed; tick = clock(); id = lua_table_to_camera(L, 1, &handle); exptime_s = lua_tonumber(L, 2); gain = lua_tointeger(L, 3); amplifier = lua_tointeger(L, 4); adcspeed = lua_tonumber(L, 5); printf("Setting camera %s: exptime %3.1f s, gain %i, amp %i, adcspeed %1.1f MHz\n", id.sensor_name, exptime_s, gain, amplifier, adcspeed); error = PicamAdvanced_GetCameraModel( handle, &model ); if( error != PicamError_None ) { lua_pushstring(L, "Failed to get camera model."); lua_error(L); return 0; } set_exposure_time(model, exptime_s, L); set_gain(model, gain, L); set_amplifier(model, amplifier, L); set_adc_speed(model, adcspeed, L); tock = clock(); printf("set took %f seconds\n", ((float) tock-tick)/CLOCKS_PER_SEC); Picam_AreParametersCommitted(handle, &committed); printf("The camera %s all values commited.\n", (committed ? "has" : "does not have")); lua_pushboolean(L, committed); return 1; }
Hes_Emu::Hes_Emu() { timer.raw_load = 0; set_type( gme_hes_type ); static const char* const names [Hes_Apu::osc_count] = { "Wave 1", "Wave 2", "Wave 3", "Wave 4", "Multi 1", "Multi 2" }; set_voice_names( names ); static int const types [Hes_Apu::osc_count] = { wave_type | 0, wave_type | 1, wave_type | 2, wave_type | 3, mixed_type | 0, mixed_type | 1 }; set_voice_types( types ); set_silence_lookahead( 6 ); set_gain( 1.11 ); }
static PyObject * flea_set_auto_gain(PyObject *self, PyObject *args) { int handle = -1; int autoMode = 0; int onOff = 0; float value = 0.0; fleaCamera* cam = NULL; if (!PyArg_ParseTuple(args, "iiif", &handle, &autoMode, &onOff, &value)) return NULL; if (handle >= 0 && handle < NUM_CAMERA_HANDLES && cameras[handle]) { cam = cameras[handle]; set_gain(cam, autoMode, onOff, value); } else { PyErr_SetString(FleaError, "Invalid Handle"); return NULL; } Py_RETURN_NONE; }
void NRF24L01::set_rx_mode() { ce->reset(); set_addr_width(5); set_local_addr(0,rx_addr_0);//写RX节点地址 set_chanal_ack(0,ENABLE); //使能通道0的自动应答 set_chanal(0,ENABLE);//使能通道0的接收地址 set_rf_frq(40); //设置RF通信频率 set_pload_width(0,4);//选择通道0的有效数据宽度 set_gain(RF_N_0DB);//设置TX发射参数,0db增益,低噪声增益开启 set_baudrate(_2MBPS);//2Mbps set_power(1);//PWR_UP set_tx_rx_mode(RX_MODE);//接收模式 set_crc(1,1);//EN_CRC,16BIT_CRC ce->set(); }
static void hdspmmixer_set(t_hdspmmixer *x, t_symbol *s, int argc, t_atom *argv) { int idx, src, dst,val; if (argc < 4 || A_FLOAT != argv->a_type || A_FLOAT != (argv+1)->a_type || A_FLOAT != (argv+2)->a_type ) { error("hdspmmixer: set <float cardnr> <float source> <float destination> <float value>\n"); /* return -EINVAL; */ } idx = atom_getint(argv); src = atom_getint(argv+1); dst = atom_getint(argv+2); val = atom_getint(argv+3); val = set_gain(idx,src,dst,val); if(val < 0) outlet_float(x->x_error,val); else outlet_float(x->x_obj.ob_outlet,(float) val); /* post("gain: %i",set_gain(idx,src,dst,val)); */ }
int TSend::set_state( const QDomNode & node ) { Project* project = pm().get_project(); if (!project) { printf("TSend::set_state: Oh boy, no project?? Can't restore state without a project running!!\n"); return -1; } QDomElement e = node.toElement(); m_id = e.attribute("id", "0").toLongLong(); qint64 busId = e.attribute("bus", "0").toLongLong(); QString type = e.attribute("type", ""); QString busName = e.attribute("busname", "No Busname in Project file"); set_gain(e.attribute("gain", "1.0").toFloat()); set_pan(e.attribute("pan", "0.00").toFloat()); if (type == "post") { m_type = POSTSEND; } else if (type == "pre") { m_type = PRESEND; } else { // default to post send if no type was stored m_type = POSTSEND; } m_bus = project->get_audio_bus(busId); if (!m_bus) { printf("TSend::set_state: Project didn't return my Bus (%s)!\n", busName.toAscii().data()); return -1; } return 1; }
/* phxlive(ForBrief) Simple live capture application code */ int phxliveFrame( etCamConfigLoad eCamConfigLoad, /* Board number, ie 1, 2, or 0 for next available */ char *pszConfigFileName, /* Name of config file */ double exposure_time, int gain, unsigned short *frame_out // added by NC to allow output to another window ) { etStat eStat = PHX_OK; /* Status variable */ etParamValue eParamValue; /* Parameter for use with PHX_ParameterSet/Get calls */ tHandle hCamera = 0; /* Camera Handle */ tPHX hDisplay = 0; /* Display handle */ tPHX hBuffer1 = 0; /* First Image buffer handle */ tPHX hBuffer2 = 0; /* Second Image buffer handle */ //tPhxLive sPhxLive; /* User defined Event Context */ ui32 nBufferReadyLast = 0;/* Previous BufferReady count value */ int i,length; /* Initialise the user defined Event context structure */ memset( &sPhxLive, 0, sizeof( tPhxLive ) ); /* Allocate the board with the config file */ eStat = PHX_CameraConfigLoad( &hCamera, pszConfigFileName, eCamConfigLoad, PHX_ErrHandlerDefault ); if ( PHX_OK != eStat ) goto Error; /* set camera to live acquisition mode */ init_camera_internal_trigger(hCamera); set_exposure(hCamera, exposure_time); set_gain(hCamera, gain); #ifndef _USE_QT // We create our display with a NULL hWnd, this will automatically create an image window. eStat = PDL_DisplayCreate( &hDisplay, NULL, hCamera, PHX_ErrHandlerDefault ); if ( PHX_OK != eStat ) goto Error; // We create two display buffers for our double buffering eStat = PDL_BufferCreate( &hBuffer1, hDisplay, (etBufferMode)PDL_BUFF_SYSTEM_MEM_DIRECT ); if ( PHX_OK != eStat ) goto Error; eStat = PDL_BufferCreate( &hBuffer2, hDisplay, (etBufferMode)PDL_BUFF_SYSTEM_MEM_DIRECT ); if ( PHX_OK != eStat ) goto Error; // Initialise the display, this associates the display buffers with the display eStat = PDL_DisplayInit( hDisplay ); if ( PHX_OK != eStat ) goto Error; // The above code has created 2 display (acquisition) buffers. // Therefore ensure that the Phoenix is configured to use 2 buffers, by overwriting // the value already loaded from the config file. eParamValue = (etParamValue) 2; eStat = PHX_ParameterSet( hCamera, PHX_ACQ_NUM_IMAGES, &eParamValue ); if ( PHX_OK != eStat ) goto Error; #endif /* Enable FIFO Overflow events */ eParamValue = PHX_INTRPT_FIFO_OVERFLOW; eStat = PHX_ParameterSet( hCamera, PHX_INTRPT_SET, &eParamValue ); if ( PHX_OK != eStat ) goto Error; /* Setup our own event context */ eStat = PHX_ParameterSet( hCamera, PHX_EVENT_CONTEXT, (void *) &sPhxLive ); if ( PHX_OK != eStat ) goto Error; /* Now start our capture, using the callback method */ eStat = PHX_Acquire( hCamera, PHX_START, (void*) phxliveFrame_callback ); if ( PHX_OK != eStat ) goto Error; /* Continue processing data until the user presses a key in the console window * or Phoenix detects a FIFO overflow */ //printf("Press a key to exit\n"); /* while(!PhxCommonKbHit() && !sPhxLive.fFifoOverFlow)*/ while(!sPhxLive.fFifoOverFlow) { /* Temporarily sleep, to avoid burning CPU cycles. * An alternative method is to wait on a semaphore, which is signalled * within the callback function. This approach would ensure that the * data processing would only start when there was data to process */ _PHX_SleepMs(10); /* If there are any buffers waiting to display, then process them here */ if ( nBufferReadyLast != sPhxLive.nBufferReadyCount ) { stImageBuff stBuffer; int nStaleBufferCount; /* If the processing is too slow to keep up with acquisition, * then there may be more than 1 buffer ready to process. * The application can either be designed to process all buffers * knowing that it will catch up, or as here, throw away all but the * latest */ nStaleBufferCount = sPhxLive.nBufferReadyCount - nBufferReadyLast; nBufferReadyLast += nStaleBufferCount; /* Throw away all but the last image */ while ( nStaleBufferCount-- > 1 ) { eStat = PHX_Acquire( hCamera, PHX_BUFFER_RELEASE, NULL ); if ( PHX_OK != eStat ) goto Error; } /* Get the info for the last acquired buffer */ eStat = PHX_Acquire( hCamera, PHX_BUFFER_GET, &stBuffer ); if ( PHX_OK != eStat ) goto Error; /* Process the newly acquired buffer, * which in this simple example is a call to display the data. * For our display function we use the pvContext member variable to * pass a display buffer handle. * Alternatively the actual video data can be accessed at stBuffer.pvAddress */ #ifndef _USE_QT PDL_BufferPaint( (tPHX)stBuffer.pvContext ); #elif defined _USE_QT // Load a numpy array here!!!! for(i = 0; i < 1000000; i++) { *(frame_out + i) = *((short unsigned int*)(stBuffer.pvAddress) + i); } //fflush(stdout); //length = sizeof(frame_out); //write(1, frame_out, length); #else printf("EventCount = %5d\r", sPhxLive.nBufferReadyCount ); #endif /* Having processed the data, release the buffer ready for further image data */ eStat = PHX_Acquire( hCamera, PHX_BUFFER_RELEASE, NULL ); if ( PHX_OK != eStat ) goto Error; } } printf("\n"); /* In this simple example we abort the processing loop on an error condition (FIFO overflow). * However handling of this condition is application specific, and generally would involve * aborting the current acquisition, and then restarting. */ if ( sPhxLive.fFifoOverFlow ) { printf("FIFO OverFlow detected..Aborting\n"); } Error: /* Now cease all captures */ if ( hCamera ) PHX_Acquire( hCamera, PHX_ABORT, NULL ); #if defined _PHX_DISPLAY /* Free our display double buffering resources */ if ( hBuffer1 ) PDL_BufferDestroy( (tPHX*) &hBuffer1 ); if ( hBuffer2 ) PDL_BufferDestroy( (tPHX*) &hBuffer2 ); /* Destroy our display */ if ( hDisplay ) PDL_DisplayDestroy( (tPHX*) &hDisplay ); #endif /* Release the Phoenix board */ if ( hCamera ) PHX_CameraRelease( &hCamera ); printf("Exiting\n"); return 0; }
int main (int argc, char **argv) { unsigned short int range = 0; unsigned short int centerforce = 0; unsigned short int gain = 0; int do_validate_wheel = 0; int do_native = 0; int do_range = 0; int do_autocenter = 0; int do_alt_autocenter = 0; int do_gain = 0; int do_list = 0; int rampspeed = -1; int do_help = 0; int do_reset = 0; char device_file_name[128]; char shortname[255]; memset(device_file_name, 0, sizeof(device_file_name)); verbose_flag = 0; static struct option long_options[] = { {"verbose", no_argument, 0, 'v'}, {"help", no_argument, 0, 'h'}, {"list", no_argument, 0, 'l'}, {"wheel", required_argument, 0, 'w'}, {"nativemode", no_argument, 0, 'n'}, {"range", required_argument, 0, 'r'}, {"altautocenter", required_argument, 0, 'b'}, {"autocenter", required_argument, 0, 'a'}, {"rampspeed", required_argument, 0, 's'}, {"gain", required_argument, 0, 'g'}, {"device", required_argument, 0, 'd'}, {"reset", no_argument, 0, 'x'}, {0, 0, 0, 0 } }; while (optind < argc) { int index = -1; int result = getopt_long (argc, argv, "vhlw:nr:a:g:d:s:b:x", long_options, &index); if (result == -1) break; /* end of list */ switch (result) { case 'v': verbose_flag++; break; case 'n': do_native = 1; break; case 'r': range = atoi(optarg); do_range = 1; break; case 'a': centerforce = atoi(optarg); do_autocenter = 1; do_alt_autocenter = 0; break; case 'b': centerforce = atoi(optarg); do_autocenter = 0; do_alt_autocenter = 1; break; case 's': rampspeed = atoi(optarg); break; case 'g': gain = atoi(optarg); do_gain = 1; break; case 'd': strncpy(device_file_name, optarg, 128); break; case 'l': do_list = 1; break; case 'w': strncpy(shortname, optarg, 255); do_validate_wheel = 1; break; case 'x': do_reset = 1; break; case '?': default: do_help = 1; break; } } if (argc > 1) { usb_init(); if (verbose_flag > 1) usb_set_debug(3); int wait_for_udev = 0; const wheelstruct* wheel = NULL; if (do_help) { help(); } else if (do_list) { // list all devices, ignore other options... list_devices(); } else { if (do_validate_wheel) { int numWheels = sizeof(wheels)/sizeof(wheelstruct); int i = 0; for (i=0; i < numWheels; i++) { if (strncasecmp(wheels[i].shortname, shortname, 255) == 0) { // found matching wheel wheel = &(wheels[i]); break; } } if (!wheel) { printf("Wheel \"%s\" not supported. Did you spell the shortname correctly?\n", shortname); } } if (do_reset) { if (!wheel) { printf("Please provide --wheel parameter!\n"); } else { reset_wheel(wheel); wait_for_udev = 1; } } if (do_native) { if (!wheel) { printf("Please provide --wheel parameter!\n"); } else { set_native_mode(wheel); wait_for_udev = 1; } } if (do_range) { if (!wheel) { printf("Please provide --wheel parameter!\n"); } else { set_range(wheel, clamprange(wheel, range)); wait_for_udev = 1; } } if (do_autocenter) { if (!wheel) { printf("Please provide --wheel parameter!\n"); } else { if (centerforce == 0) { set_autocenter(wheel, centerforce, 0); wait_for_udev = 1; } else if (rampspeed == -1) { printf("Please provide '--rampspeed' parameter\n"); } else { set_autocenter(wheel, centerforce, rampspeed); wait_for_udev = 1; } } } if (do_alt_autocenter) { if (strlen(device_file_name)) { alt_set_autocenter(centerforce, device_file_name, wait_for_udev); wait_for_udev = 0; } else { printf("Please provide the according event interface for your wheel using '--device' parameter (E.g. '--device /dev/input/event0')\n"); } } if (do_gain) { if (strlen(device_file_name)) { set_gain(gain, device_file_name, wait_for_udev); wait_for_udev = 0; } else { printf("Please provide the according event interface for your wheel using '--device' parameter (E.g. '--device /dev/input/event0')\n"); } } } // libusb_exit(NULL); < Not provided by libusb-0.1 } else { // display usage information if no arguments given help(); } exit(0); }
static void xmms_eq_config_changed (xmms_object_t * object, xmmsv_t *_data, gpointer userdata) { xmms_config_property_t *val; xmms_equalizer_data_t *priv; const gchar *name; gint value, i, j; g_return_if_fail (object); g_return_if_fail (userdata); val = (xmms_config_property_t *) object; priv = (xmms_equalizer_data_t *) userdata; name = xmms_config_property_get_name (val); value = xmms_config_property_get_int (val); XMMS_DBG ("config value changed! %s => %d", name, value); /* we are passed the full config key, not just the last token, * which makes this code kinda ugly. * fix when bug 97 has been resolved */ name = strrchr (name, '.') + 1; if (!strcmp (name, "enabled")) { priv->enabled = !!value; } else if (!strcmp (name, "extra_filtering")) { priv->extra_filtering = value; } else if (!strcmp (name, "use_legacy")) { gfloat gain; priv->use_legacy = value; if (priv->use_legacy) { for (i=0; i<EQ_BANDS_LEGACY; i++) { gain = xmms_config_property_get_float (priv->legacy[i]); for (j=0; j<EQ_CHANNELS; j++) { set_gain (j, i, xmms_eq_gain_scale (gain, FALSE)); } } } else { for (i=0; i<priv->bands; i++) { gain = xmms_config_property_get_float (priv->gain[i]); for (j=0; j<EQ_CHANNELS; j++) { set_gain (j, i, xmms_eq_gain_scale (gain, FALSE)); } } } } else if (!strcmp (name, "bands")) { if (value != 10 && value != 15 && value != 25 && value != 31) { gchar buf[20]; /* Illegal new value so we restore the old value */ g_snprintf (buf, sizeof (buf), "%d", priv->bands); xmms_config_property_set_data (val, buf); } else { priv->bands = value; for (i=0; i<EQ_MAX_BANDS; i++) { xmms_config_property_set_data (priv->gain[i], "0.0"); if (!priv->use_legacy) { for (j=0; j<EQ_CHANNELS; j++) { set_gain (j, i, xmms_eq_gain_scale (0.0, FALSE)); } } } } } }
static gboolean xmms_eq_init (xmms_xform_t *xform) { xmms_equalizer_data_t *priv; xmms_config_property_t *config; gint i, j, srate; gfloat gain; g_return_val_if_fail (xform, FALSE); priv = g_new0 (xmms_equalizer_data_t, 1); g_return_val_if_fail (priv, FALSE); xmms_xform_private_data_set (xform, priv); config = xmms_xform_config_lookup (xform, "enabled"); g_return_val_if_fail (config, FALSE); xmms_config_property_callback_set (config, xmms_eq_config_changed, priv); priv->enabled = !!xmms_config_property_get_int (config); config = xmms_xform_config_lookup (xform, "bands"); g_return_val_if_fail (config, FALSE); xmms_config_property_callback_set (config, xmms_eq_config_changed, priv); priv->bands = xmms_config_property_get_int (config); config = xmms_xform_config_lookup (xform, "extra_filtering"); g_return_val_if_fail (config, FALSE); xmms_config_property_callback_set (config, xmms_eq_config_changed, priv); priv->extra_filtering = xmms_config_property_get_int (config); config = xmms_xform_config_lookup (xform, "use_legacy"); g_return_val_if_fail (config, FALSE); xmms_config_property_callback_set (config, xmms_eq_config_changed, priv); priv->use_legacy = xmms_config_property_get_int (config); config = xmms_xform_config_lookup (xform, "preamp"); g_return_val_if_fail (config, FALSE); xmms_config_property_callback_set (config, xmms_eq_gain_changed, priv); gain = xmms_config_property_get_float (config); for (i=0; i<EQ_CHANNELS; i++) { set_preamp (i, xmms_eq_gain_scale (gain, TRUE)); } for (i=0; i<EQ_BANDS_LEGACY; i++) { gchar buf[16]; g_snprintf (buf, sizeof (buf), "legacy%d", i); config = xmms_xform_config_lookup (xform, buf); g_return_val_if_fail (config, FALSE); priv->legacy[i] = config; xmms_config_property_callback_set (config, xmms_eq_gain_changed, priv); gain = xmms_config_property_get_float (config); if (priv->use_legacy) { for (j = 0; j < EQ_CHANNELS; j++) { set_gain (i, j, xmms_eq_gain_scale (gain, FALSE)); } } } for (i=0; i<EQ_MAX_BANDS; i++) { gchar buf[16]; g_snprintf (buf, sizeof (buf), "gain%02d", i); config = xmms_xform_config_lookup (xform, buf); g_return_val_if_fail (config, FALSE); priv->gain[i] = config; xmms_config_property_callback_set (config, xmms_eq_gain_changed, priv); gain = xmms_config_property_get_float (config); if (!priv->use_legacy) { for (j = 0; j < EQ_CHANNELS; j++) { set_gain (i, j, xmms_eq_gain_scale (gain, FALSE)); } } } init_iir (); srate = xmms_xform_indata_get_int (xform, XMMS_STREAM_TYPE_FMT_SAMPLERATE); if (priv->use_legacy) { config_iir (srate, EQ_BANDS_LEGACY, 1); } else { config_iir (srate, priv->bands, 0); } xmms_xform_outdata_type_copy (xform); XMMS_DBG ("Equalizer initialized successfully!"); return TRUE; }
int main(int argc, char *argv[]) { int err = 0; int src; int dst; int val; /* int i; for(i=0; i < argc; i++) printf(" %s ", argv[i]); printf("\n"); */ /* if((err =find_cards()) < 0) return err; */ if(argc < 4){ printf("\nusage %s <devnr> <src> <dst> [value] \n\n" " devnr ... ALSA-Device eg. for hw:0 is 0 \n" " src ... inputs 0-63 playback 64-127\n" " dst ... out channel 0-63\n" "\noptional if wanting to set a value:\n\n" " value ... gain 0=0 (mute), 32768=1 (unitGain), 65535 = max\n\n", argv[0]); if(find_cards() < 0) puts("No Hammerfall DSP MADI card found."); puts(""); printf("Version %s, 2003 - IEM, winfried ritsch\n",HDSPMM_VERSION); return -1; } cardid = atoi(argv[1]); snprintf(card_name[cardid], 6, "hw:%i", cardid); src = atoi(argv[2]); dst = atoi(argv[3]); if(argc == 4){ printf(" Get Mixer from %d to %d : %d \n",src,dst, get_gain(cardid,src,dst)); return 0; } /* arg is 5 */ val = atoi(argv[4]); if((err = set_gain(cardid,src,dst,val)) < 0 ) printf("Error: Could not set mixer from %d to %d gain %d:%s \n", src,dst,val,snd_strerror(err)); else printf("Set Mixer from %d to %d : %d \n",src,dst,val); return err; }
void handler_gain(mapper_signal sig, float *pgain) { set_gain(*pgain); }
void init_reverb(Reverb *reverb) { int n = 12; reverb->n = n; /* * 0 is fed into 8 * 1 is fed into 6 etc. */ reverb->perm = (int *)malloc(n*sizeof(int)); reverb->perm[0] = 8; reverb->perm[1] = 6; reverb->perm[2] = 0; reverb->perm[3] = 10; reverb->perm[4] = 11; reverb->perm[5] = 5; reverb->perm[6] = 4; reverb->perm[7] = 1; reverb->perm[8] = 2; reverb->perm[9] = 7; reverb->perm[10] = 3; reverb->perm[11] = 9; /* primes */ reverb->delay_length = (int *)malloc(n*sizeof(int)); reverb->delay_length[0] = 601; reverb->delay_length[1] = 691; reverb->delay_length[2] = 773; reverb->delay_length[3] = 839; reverb->delay_length[4] = 919; reverb->delay_length[5] = 997; reverb->delay_length[6] = 1061; reverb->delay_length[7] = 1093; reverb->delay_length[8] = 1129; reverb->delay_length[9] = 1151; reverb->delay_length[10] = 1171; reverb->delay_length[11] = 1187; reverb->delay_output = (double *)malloc(n*sizeof(double)); reverb->filtered_output = (double *)malloc(n*sizeof(double)); memset(reverb->filtered_output, 0, n*sizeof(double)); reverb->transformed_output = (double *)malloc(n*sizeof(double)); /* * b defines the gain of each copy of the input * fed into delay lines. * Currently just sharing equally so 1/12. */ reverb->b = (double *)malloc(n*sizeof(double)); double in = 1.0/n; for (int i = 0; i < n; ++i) { reverb->b[i] = in; } /* * h controls one pole filter */ reverb->h = (double *)malloc(n*sizeof(double)); set_absorption(reverb, 1/48000.0, 0.5); /* c is how transformed outputs are mixed into * final output. * So no contribution from c to feedback. */ reverb->c = (double *)malloc(n*sizeof(double)); double sign = 1.0; for (int i = 0; i < n; ++i) { reverb->c[i] = sign*in; sign = -sign; } /* * g is the gain of the transformed output * when fed back into delay lines. */ reverb->g = (double *)malloc(n*sizeof(double)); set_gain(reverb, 1/48000.0, 0.9); reverb->delay_line = (double **)malloc(n*sizeof(double *)); reverb->delay_ptr = (int *)malloc(n*sizeof(int)); memset(reverb->delay_ptr, 0, n*sizeof(int)); for (int i = 0; i < n; ++i) { int l = reverb->delay_length[i]; reverb->delay_line[i] = (double *)malloc(l*sizeof(double)); memset(reverb->delay_line[i], 0, l*sizeof(double)); } }
Spc_Emu::Spc_Emu() { set_type( gme_spc_type ); set_gain( 1.4 ); }
void camera_server() { size_t count = 0; initialise_termination_handler(); int deviceDescriptor = v4l2_open("/dev/video0", O_RDWR /* required */| O_NONBLOCK, 0); if (deviceDescriptor == -1) { throw std::runtime_error("Unable to open device"); } // disable_output_processing(deviceDescriptor); if (!isStreamingIOSupported(deviceDescriptor)) { throw std::runtime_error("Streaming is not supported"); } setCameraOutputFormat(deviceDescriptor, CAMERA_FRAME_WIDTH, CAMERA_FRAME_HEIGHT, V4L2_PIX_FMT_YUYV); std::cout << "Absolute focus supported: " << isControlSupported(deviceDescriptor,V4L2_CID_FOCUS_ABSOLUTE) << std::endl; std::cout << "Relative focus supported: " << isControlSupported(deviceDescriptor,V4L2_CID_FOCUS_RELATIVE) << std::endl; set_manual_exposure(deviceDescriptor,true); printf("Is manual exposure set = %u\n", is_manual_exposure(deviceDescriptor)); set_absolute_exposure(100,deviceDescriptor); set_exposure_auto_priority(deviceDescriptor,false); printf("Is exposure auto priority set = %u\n", is_exposure_auto_priority(deviceDescriptor)); set_auto_white_balance(deviceDescriptor,false); printf("Is auto white balance set = %u\n", is_auto_white_balance_set(deviceDescriptor)); set_gain(deviceDescriptor,1); printf("Gain set = %u\n", get_gain(deviceDescriptor)); printf("Focus value = %u\n", get_focus_variable(deviceDescriptor)); set_fps(deviceDescriptor,30); start_capturing(deviceDescriptor); unsigned int counter; int announce_socket=socket(AF_INET,SOCK_DGRAM,0); if (announce_socket < 0) { perror("socket"); exit(1); } sockaddr_in announce_address; memset(&announce_address,0,sizeof(announce_address)); announce_address.sin_family=AF_INET; announce_address.sin_addr.s_addr=inet_addr(CAMERA_ANNOUNCE_GROUP); announce_address.sin_port=htons(CAMERA_ANNOUNCE_PORT); while (running != 0) { fd_set fds; int r; FD_ZERO(&fds); FD_SET(deviceDescriptor, &fds); r = select(deviceDescriptor + 1, &fds, NULL, NULL, NULL); if (r > 0) { struct v4l2_buffer buf; memset(&buf, 0, sizeof(buf)); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_USERPTR; if (-1 == xioctl(deviceDescriptor, VIDIOC_DQBUF, &buf)) { switch (errno) { case EAGAIN: continue; case EIO: /* Could ignore EIO, see spec. */ /* fall through */ default: perror("VIDIOC_DQBUF"); exit(1); } } if ((buf.flags | V4L2_BUF_FLAG_ERROR) != 0) { //TODO Investigate the permanent occurence of the V4L2_BUF_FLAG_ERROR // std::cerr << "Frame buffer error" << std::endl; } printf("Index = %u, seconds = %ld us = %ld\n", buf.index,buf.timestamp.tv_sec,buf.timestamp.tv_usec); // printf("Real time: seconds = %ld, us = %ld\n", tp.tv_sec,tp.tv_nsec/1000); int ret; assert(ptrToSequenceMap.count(buf.m.userptr) != 0); size_t sequence_number = ptrToSequenceMap[buf.m.userptr]; ptrToSequenceMap.erase(buf.m.userptr); queueNextFrameBuffer(deviceDescriptor, buf.index, sequence_number, CAMERA_FRAME_WIDTH*CAMERA_FRAME_HEIGHT*2); //TODO Investigate why the video streaming fails if the unmap call below is placed before the queueNextFrameBuffer call above. //Probably this is because in that case the mmap call returns the same virtual address as the munmap call had just used for the deallocation ret = munmap(reinterpret_cast<void*>(buf.m.userptr),buf.length); if (ret == -1) { perror("munmap"); } BufferReference readyBuffer; readyBuffer.index = buf.index; readyBuffer.offset = 0; readyBuffer.size = buf.bytesused; readyBuffer.timestamp_seconds = buf.timestamp.tv_sec; readyBuffer.timestamp_microseconds = buf.timestamp.tv_usec; readyBuffer.width = CAMERA_FRAME_WIDTH; readyBuffer.height = CAMERA_FRAME_HEIGHT; readyBuffer.sequence = sequence_number; std::array<char,1024> ipc_buffer; asn_enc_rval_t encode_result = der_encode_to_buffer(&asn_DEF_BufferReference, &readyBuffer,ipc_buffer.data(),ipc_buffer.size()); ret = sendto(announce_socket,ipc_buffer.data(),encode_result.encoded,0,(struct sockaddr *) &announce_address,sizeof(announce_address)); if (ret < 0) { perror("sendto"); exit(1); } timespec tp; clock_gettime(CLOCK_MONOTONIC,&tp); std::cout << "Grab frame delay = " << get_milliseconds_delta(buf.timestamp,tp) << " ms" << std::endl; count++; counter++; } } v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (-1 == xioctl(deviceDescriptor, VIDIOC_STREAMOFF, &type)) perror("VIDIOC_STREAMOFF"); if (-1 == close(deviceDescriptor)) perror("close"); close(announce_socket); }
Hes_Emu::Hes_Emu() { set_type( gme_hes_type ); set_silence_lookahead( 6 ); set_gain( 1.11 ); }
/** * Sets the scaling_factor. * */ void Probe::setGain(double gain) { set_gain(gain); }
Sfm_Emu::Sfm_Emu() { set_type( gme_sfm_type ); set_gain( 1.4 ); }