uint32_t scheduler_next_pid(void) { uint32_t pid_runnable = max_id_in_ps_list(&runnable_pss); uint32_t pid_zombie = max_id_in_ps_list(&zombie_pss); return maxu(pid_runnable, pid_zombie) + 1; }
static void GenModList_accumGen(GenModList *self, const Generator *gen) { Generator *i = VECTOR_ITER_BEGIN(self->gens); Generator *end = VECTOR_ITER_END(self->gens); for(;i != end;i++) { if(i->mGenerator == gen->mGenerator) { if(gen->mGenerator == 43 || gen->mGenerator == 44) { /* Range generators accumulate by taking the intersection of * the two ranges. */ ALushort low = maxu(i->mAmount&0x00ff, gen->mAmount&0x00ff); ALushort high = minu(i->mAmount&0xff00, gen->mAmount&0xff00); i->mAmount = low | high; } else i->mAmount += gen->mAmount; return; } } if(VECTOR_PUSH_BACK(self->gens, *gen) == AL_FALSE) { ERR("Failed to insert generator (from %d elements)\n", VECTOR_SIZE(self->gens)); return; } if(gen->mGenerator < 60) VECTOR_BACK(self->gens).mAmount += DefaultGenValue[gen->mGenerator]; }
int radix_sortu(unsigned *data, unsigned size, unsigned base) { unsigned digits; unsigned i; unsigned *counts = NULL; unsigned *output = NULL; assert(data != NULL && size > 0); assert(base > 1 && base <= 256); if (data == NULL || size == 0) return 1; if (base <= 1) return 1; digits = get_digits_count(maxu(data, size), base); if (digits == 0) return 1; if ((counts = calloc(base, sizeof(unsigned))) == NULL || (output = malloc(size * sizeof(unsigned))) == NULL) { free(counts); free(output); return 1; } _radix_sortu_internal.base = base; _radix_sortu_internal.radix = 1; for (i = 1; i <= digits; ++i) { counting_sorti((int *)data, size, base, radix_key, counts, (int *)output); _radix_sortu_internal.radix *= base; } free(counts); free(output); return 0; }
static ALCenum DSoundOpenCapture(ALCdevice *device, const ALCchar *deviceName) { DSoundCaptureData *data = NULL; WAVEFORMATEXTENSIBLE InputType; DSCBUFFERDESC DSCBDescription; LPGUID guid = NULL; HRESULT hr, hrcom; ALuint samples; if(!CaptureDeviceList) { /* Initialize COM to prevent name truncation */ hrcom = CoInitialize(NULL); hr = DirectSoundCaptureEnumerateA(DSoundEnumCaptureDevices, NULL); if(FAILED(hr)) ERR("Error enumerating DirectSound devices (%#x)!\n", (unsigned int)hr); if(SUCCEEDED(hrcom)) CoUninitialize(); } if(!deviceName && NumCaptureDevices > 0) { deviceName = CaptureDeviceList[0].name; guid = &CaptureDeviceList[0].guid; } else { ALuint i; for(i = 0;i < NumCaptureDevices;i++) { if(strcmp(deviceName, CaptureDeviceList[i].name) == 0) { guid = &CaptureDeviceList[i].guid; break; } } if(i == NumCaptureDevices) return ALC_INVALID_VALUE; } switch(device->FmtType) { case DevFmtByte: case DevFmtUShort: case DevFmtUInt: WARN("%s capture samples not supported\n", DevFmtTypeString(device->FmtType)); return ALC_INVALID_ENUM; case DevFmtUByte: case DevFmtShort: case DevFmtInt: case DevFmtFloat: break; } //Initialise requested device data = calloc(1, sizeof(DSoundCaptureData)); if(!data) return ALC_OUT_OF_MEMORY; hr = DS_OK; //DirectSoundCapture Init code if(SUCCEEDED(hr)) hr = DirectSoundCaptureCreate(guid, &data->DSC, NULL); if(SUCCEEDED(hr)) { memset(&InputType, 0, sizeof(InputType)); switch(device->FmtChans) { case DevFmtMono: InputType.dwChannelMask = SPEAKER_FRONT_CENTER; break; case DevFmtStereo: InputType.dwChannelMask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT; break; case DevFmtQuad: InputType.dwChannelMask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT; break; case DevFmtX51: InputType.dwChannelMask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_FRONT_CENTER | SPEAKER_LOW_FREQUENCY | SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT; break; case DevFmtX51Side: InputType.dwChannelMask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_FRONT_CENTER | SPEAKER_LOW_FREQUENCY | SPEAKER_SIDE_LEFT | SPEAKER_SIDE_RIGHT; break; case DevFmtX61: InputType.dwChannelMask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_FRONT_CENTER | SPEAKER_LOW_FREQUENCY | SPEAKER_BACK_CENTER | SPEAKER_SIDE_LEFT | SPEAKER_SIDE_RIGHT; break; case DevFmtX71: InputType.dwChannelMask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT | SPEAKER_FRONT_CENTER | SPEAKER_LOW_FREQUENCY | SPEAKER_BACK_LEFT | SPEAKER_BACK_RIGHT | SPEAKER_SIDE_LEFT | SPEAKER_SIDE_RIGHT; break; } InputType.Format.wFormatTag = WAVE_FORMAT_PCM; InputType.Format.nChannels = ChannelsFromDevFmt(device->FmtChans); InputType.Format.wBitsPerSample = BytesFromDevFmt(device->FmtType) * 8; InputType.Format.nBlockAlign = InputType.Format.nChannels*InputType.Format.wBitsPerSample/8; InputType.Format.nSamplesPerSec = device->Frequency; InputType.Format.nAvgBytesPerSec = InputType.Format.nSamplesPerSec*InputType.Format.nBlockAlign; InputType.Format.cbSize = 0; if(InputType.Format.nChannels > 2 || device->FmtType == DevFmtFloat) { InputType.Format.wFormatTag = WAVE_FORMAT_EXTENSIBLE; InputType.Format.cbSize = sizeof(WAVEFORMATEXTENSIBLE) - sizeof(WAVEFORMATEX); InputType.Samples.wValidBitsPerSample = InputType.Format.wBitsPerSample; if(device->FmtType == DevFmtFloat) InputType.SubFormat = KSDATAFORMAT_SUBTYPE_IEEE_FLOAT; else InputType.SubFormat = KSDATAFORMAT_SUBTYPE_PCM; } samples = device->UpdateSize * device->NumUpdates; samples = maxu(samples, 100 * device->Frequency / 1000); memset(&DSCBDescription, 0, sizeof(DSCBUFFERDESC)); DSCBDescription.dwSize = sizeof(DSCBUFFERDESC); DSCBDescription.dwFlags = 0; DSCBDescription.dwBufferBytes = samples * InputType.Format.nBlockAlign; DSCBDescription.lpwfxFormat = &InputType.Format; hr = IDirectSoundCapture_CreateCaptureBuffer(data->DSC, &DSCBDescription, &data->DSCbuffer, NULL); } if(SUCCEEDED(hr)) { data->Ring = CreateRingBuffer(InputType.Format.nBlockAlign, device->UpdateSize * device->NumUpdates); if(data->Ring == NULL) hr = DSERR_OUTOFMEMORY; } if(FAILED(hr)) { ERR("Device init failed: 0x%08lx\n", hr); DestroyRingBuffer(data->Ring); data->Ring = NULL; if(data->DSCbuffer != NULL) IDirectSoundCaptureBuffer_Release(data->DSCbuffer); data->DSCbuffer = NULL; if(data->DSC) IDirectSoundCapture_Release(data->DSC); data->DSC = NULL; free(data); return ALC_INVALID_VALUE; } data->BufferBytes = DSCBDescription.dwBufferBytes; SetDefaultWFXChannelOrder(device); device->DeviceName = strdup(deviceName); device->ExtraData = data; return ALC_NO_ERROR; }
static ALCboolean pulse_reset_playback(ALCdevice *device) //{{{ { pulse_data *data = device->ExtraData; pa_stream_flags_t flags = 0; pa_channel_map chanmap; pa_threaded_mainloop_lock(data->loop); if(!(device->Flags&DEVICE_CHANNELS_REQUEST)) { pa_operation *o; o = pa_context_get_sink_info_by_name(data->context, data->device_name, sink_info_callback, device); while(pa_operation_get_state(o) == PA_OPERATION_RUNNING) pa_threaded_mainloop_wait(data->loop); pa_operation_unref(o); } if(!(device->Flags&DEVICE_FREQUENCY_REQUEST)) flags |= PA_STREAM_FIX_RATE; data->frame_size = FrameSizeFromDevFmt(device->FmtChans, device->FmtType); data->attr.prebuf = -1; data->attr.fragsize = -1; data->attr.minreq = device->UpdateSize * data->frame_size; data->attr.tlength = data->attr.minreq * maxu(device->NumUpdates, 2); data->attr.maxlength = -1; flags |= PA_STREAM_EARLY_REQUESTS; flags |= PA_STREAM_INTERPOLATE_TIMING | PA_STREAM_AUTO_TIMING_UPDATE; switch(device->FmtType) { case DevFmtByte: device->FmtType = DevFmtUByte; /* fall-through */ case DevFmtUByte: data->spec.format = PA_SAMPLE_U8; break; case DevFmtUShort: device->FmtType = DevFmtShort; /* fall-through */ case DevFmtShort: data->spec.format = PA_SAMPLE_S16NE; break; case DevFmtFloat: data->spec.format = PA_SAMPLE_FLOAT32NE; break; } data->spec.rate = device->Frequency; data->spec.channels = ChannelsFromDevFmt(device->FmtChans); if(pa_sample_spec_valid(&data->spec) == 0) { ERR("Invalid sample format\n"); pa_threaded_mainloop_unlock(data->loop); return ALC_FALSE; } if(!pa_channel_map_init_auto(&chanmap, data->spec.channels, PA_CHANNEL_MAP_WAVEEX)) { ERR("Couldn't build map for channel count (%d)!\n", data->spec.channels); pa_threaded_mainloop_unlock(data->loop); return ALC_FALSE; } SetDefaultWFXChannelOrder(device); data->stream = connect_playback_stream(device, flags, &data->attr, &data->spec, &chanmap); if(!data->stream) { pa_threaded_mainloop_unlock(data->loop); return ALC_FALSE; } pa_stream_set_state_callback(data->stream, stream_state_callback2, device); data->spec = *(pa_stream_get_sample_spec(data->stream)); if(device->Frequency != data->spec.rate) { pa_operation *o; if((device->Flags&DEVICE_FREQUENCY_REQUEST)) ERR("Failed to set frequency %dhz, got %dhz instead\n", device->Frequency, data->spec.rate); device->Flags &= ~DEVICE_FREQUENCY_REQUEST; /* Server updated our playback rate, so modify the buffer attribs * accordingly. */ data->attr.minreq = (ALuint64)(data->attr.minreq/data->frame_size) * data->spec.rate / device->Frequency * data->frame_size; data->attr.tlength = data->attr.minreq * maxu(device->NumUpdates, 2); o = pa_stream_set_buffer_attr(data->stream, &data->attr, stream_success_callback, device); while(pa_operation_get_state(o) == PA_OPERATION_RUNNING) pa_threaded_mainloop_wait(data->loop); pa_operation_unref(o); device->Frequency = data->spec.rate; } #if PA_CHECK_VERSION(0,9,15) if(pa_stream_set_buffer_attr_callback) pa_stream_set_buffer_attr_callback(data->stream, stream_buffer_attr_callback, device); #endif pa_stream_set_moved_callback(data->stream, stream_device_callback, device); pa_stream_set_write_callback(data->stream, stream_write_callback, device); pa_stream_set_underflow_callback(data->stream, stream_signal_callback, device); data->attr = *(pa_stream_get_buffer_attr(data->stream)); ERR("PulseAudio returned minreq=%d, tlength=%d\n", data->attr.minreq, data->attr.tlength); device->UpdateSize = data->attr.minreq / data->frame_size; device->NumUpdates = (data->attr.tlength/data->frame_size) / device->UpdateSize; while(device->NumUpdates <= 2) { pa_operation *o; ERR("minreq too high - expect lag or break up\n"); /* Server gave a comparatively large minreq, so modify the tlength. */ device->NumUpdates = 2; data->attr.tlength = data->attr.minreq * device->NumUpdates; o = pa_stream_set_buffer_attr(data->stream, &data->attr, stream_success_callback, device); while(pa_operation_get_state(o) == PA_OPERATION_RUNNING) pa_threaded_mainloop_wait(data->loop); pa_operation_unref(o); data->attr = *(pa_stream_get_buffer_attr(data->stream)); ERR("PulseAudio returned minreq=%d, tlength=%d", data->attr.minreq, data->attr.tlength); device->UpdateSize = data->attr.minreq / data->frame_size; device->NumUpdates = (data->attr.tlength/data->frame_size) / device->UpdateSize; } data->thread = StartThread(PulseProc, device); if(!data->thread) { #if PA_CHECK_VERSION(0,9,15) if(pa_stream_set_buffer_attr_callback) pa_stream_set_buffer_attr_callback(data->stream, NULL, NULL); #endif pa_stream_set_moved_callback(data->stream, NULL, NULL); pa_stream_set_write_callback(data->stream, NULL, NULL); pa_stream_set_underflow_callback(data->stream, NULL, NULL); pa_stream_disconnect(data->stream); pa_stream_unref(data->stream); data->stream = NULL; pa_threaded_mainloop_unlock(data->loop); return ALC_FALSE; } pa_threaded_mainloop_unlock(data->loop); return ALC_TRUE; } //}}}
static ALCenum pulse_open_capture(ALCdevice *device, const ALCchar *device_name) //{{{ { char *pulse_name = NULL; pulse_data *data; pa_stream_flags_t flags = 0; pa_stream_state_t state; pa_channel_map chanmap; if(!allCaptureDevNameMap) probe_devices(AL_TRUE); if(!device_name) device_name = pulse_device; else if(strcmp(device_name, pulse_device) != 0) { ALuint i; for(i = 0;i < numCaptureDevNames;i++) { if(strcmp(device_name, allCaptureDevNameMap[i].name) == 0) { pulse_name = allCaptureDevNameMap[i].device_name; break; } } if(i == numCaptureDevNames) return ALC_INVALID_VALUE; } if(pulse_open(device, device_name) == ALC_FALSE) return ALC_INVALID_VALUE; data = device->ExtraData; pa_threaded_mainloop_lock(data->loop); data->samples = device->UpdateSize * device->NumUpdates; data->frame_size = FrameSizeFromDevFmt(device->FmtChans, device->FmtType); data->samples = maxu(data->samples, 100 * device->Frequency / 1000); if(!(data->ring = CreateRingBuffer(data->frame_size, data->samples))) { pa_threaded_mainloop_unlock(data->loop); goto fail; } data->attr.minreq = -1; data->attr.prebuf = -1; data->attr.maxlength = data->samples * data->frame_size; data->attr.tlength = -1; data->attr.fragsize = minu(data->samples, 50*device->Frequency/1000) * data->frame_size; data->spec.rate = device->Frequency; data->spec.channels = ChannelsFromDevFmt(device->FmtChans); switch(device->FmtType) { case DevFmtUByte: data->spec.format = PA_SAMPLE_U8; break; case DevFmtShort: data->spec.format = PA_SAMPLE_S16NE; break; case DevFmtFloat: data->spec.format = PA_SAMPLE_FLOAT32NE; break; case DevFmtByte: case DevFmtUShort: ERR("Capture format type %#x capture not supported on PulseAudio\n", device->FmtType); pa_threaded_mainloop_unlock(data->loop); goto fail; } if(pa_sample_spec_valid(&data->spec) == 0) { ERR("Invalid sample format\n"); pa_threaded_mainloop_unlock(data->loop); goto fail; } if(!pa_channel_map_init_auto(&chanmap, data->spec.channels, PA_CHANNEL_MAP_WAVEEX)) { ERR("Couldn't build map for channel count (%d)!\n", data->spec.channels); pa_threaded_mainloop_unlock(data->loop); goto fail; } data->stream = pa_stream_new(data->context, "Capture Stream", &data->spec, &chanmap); if(!data->stream) { ERR("pa_stream_new() failed: %s\n", pa_strerror(pa_context_errno(data->context))); pa_threaded_mainloop_unlock(data->loop); goto fail; } pa_stream_set_state_callback(data->stream, stream_state_callback, data->loop); flags |= PA_STREAM_START_CORKED|PA_STREAM_ADJUST_LATENCY; if(pa_stream_connect_record(data->stream, pulse_name, &data->attr, flags) < 0) { ERR("Stream did not connect: %s\n", pa_strerror(pa_context_errno(data->context))); pa_stream_unref(data->stream); data->stream = NULL; pa_threaded_mainloop_unlock(data->loop); goto fail; } while((state=pa_stream_get_state(data->stream)) != PA_STREAM_READY) { if(!PA_STREAM_IS_GOOD(state)) { ERR("Stream did not get ready: %s\n", pa_strerror(pa_context_errno(data->context))); pa_stream_unref(data->stream); data->stream = NULL; pa_threaded_mainloop_unlock(data->loop); goto fail; } pa_threaded_mainloop_wait(data->loop); } pa_stream_set_state_callback(data->stream, stream_state_callback2, device); pa_threaded_mainloop_unlock(data->loop); return ALC_NO_ERROR; fail: pulse_close(device); return ALC_INVALID_VALUE; } //}}}
static ALCenum alsa_open_capture(ALCdevice *Device, const ALCchar *deviceName) { const char *driver = NULL; snd_pcm_hw_params_t *hp; snd_pcm_uframes_t bufferSizeInFrames; snd_pcm_uframes_t periodSizeInFrames; ALboolean needring = AL_FALSE; snd_pcm_format_t format; const char *funcerr; alsa_data *data; int err; if(deviceName) { size_t idx; if(!allCaptureDevNameMap) allCaptureDevNameMap = probe_devices(SND_PCM_STREAM_CAPTURE, &numCaptureDevNames); for(idx = 0;idx < numCaptureDevNames;idx++) { if(strcmp(deviceName, allCaptureDevNameMap[idx].name) == 0) { driver = allCaptureDevNameMap[idx].device; break; } } if(idx == numCaptureDevNames) return ALC_INVALID_VALUE; } else { deviceName = alsaDevice; driver = GetConfigValue("alsa", "capture", "default"); } data = (alsa_data*)calloc(1, sizeof(alsa_data)); err = snd_pcm_open(&data->pcmHandle, driver, SND_PCM_STREAM_CAPTURE, SND_PCM_NONBLOCK); if(err < 0) { ERR("Could not open capture device '%s': %s\n", driver, snd_strerror(err)); free(data); return ALC_INVALID_VALUE; } format = -1; switch(Device->FmtType) { case DevFmtByte: format = SND_PCM_FORMAT_S8; break; case DevFmtUByte: format = SND_PCM_FORMAT_U8; break; case DevFmtShort: format = SND_PCM_FORMAT_S16; break; case DevFmtUShort: format = SND_PCM_FORMAT_U16; break; case DevFmtInt: format = SND_PCM_FORMAT_S32; break; case DevFmtUInt: format = SND_PCM_FORMAT_U32; break; case DevFmtFloat: format = SND_PCM_FORMAT_FLOAT; break; } funcerr = NULL; bufferSizeInFrames = maxu(Device->UpdateSize*Device->NumUpdates, 100*Device->Frequency/1000); periodSizeInFrames = minu(bufferSizeInFrames, 25*Device->Frequency/1000); snd_pcm_hw_params_malloc(&hp); #define CHECK(x) if((funcerr=#x),(err=(x)) < 0) goto error CHECK(snd_pcm_hw_params_any(data->pcmHandle, hp)); /* set interleaved access */ CHECK(snd_pcm_hw_params_set_access(data->pcmHandle, hp, SND_PCM_ACCESS_RW_INTERLEAVED)); /* set format (implicitly sets sample bits) */ CHECK(snd_pcm_hw_params_set_format(data->pcmHandle, hp, format)); /* set channels (implicitly sets frame bits) */ CHECK(snd_pcm_hw_params_set_channels(data->pcmHandle, hp, ChannelsFromDevFmt(Device->FmtChans))); /* set rate (implicitly constrains period/buffer parameters) */ CHECK(snd_pcm_hw_params_set_rate(data->pcmHandle, hp, Device->Frequency, 0)); /* set buffer size in frame units (implicitly sets period size/bytes/time and buffer time/bytes) */ if(snd_pcm_hw_params_set_buffer_size_min(data->pcmHandle, hp, &bufferSizeInFrames) < 0) { TRACE("Buffer too large, using intermediate ring buffer\n"); needring = AL_TRUE; CHECK(snd_pcm_hw_params_set_buffer_size_near(data->pcmHandle, hp, &bufferSizeInFrames)); } /* set buffer size in frame units (implicitly sets period size/bytes/time and buffer time/bytes) */ CHECK(snd_pcm_hw_params_set_period_size_near(data->pcmHandle, hp, &periodSizeInFrames, NULL)); /* install and prepare hardware configuration */ CHECK(snd_pcm_hw_params(data->pcmHandle, hp)); /* retrieve configuration info */ CHECK(snd_pcm_hw_params_get_period_size(hp, &periodSizeInFrames, NULL)); #undef CHECK snd_pcm_hw_params_free(hp); hp = NULL; if(needring) { data->ring = CreateRingBuffer(FrameSizeFromDevFmt(Device->FmtChans, Device->FmtType), Device->UpdateSize*Device->NumUpdates); if(!data->ring) { ERR("ring buffer create failed\n"); goto error2; } data->size = snd_pcm_frames_to_bytes(data->pcmHandle, periodSizeInFrames); data->buffer = malloc(data->size); if(!data->buffer) { ERR("buffer malloc failed\n"); goto error2; } } Device->DeviceName = strdup(deviceName); Device->ExtraData = data; return ALC_NO_ERROR; error: ERR("%s failed: %s\n", funcerr, snd_strerror(err)); if(hp) snd_pcm_hw_params_free(hp); error2: free(data->buffer); DestroyRingBuffer(data->ring); snd_pcm_close(data->pcmHandle); free(data); Device->ExtraData = NULL; return ALC_INVALID_VALUE; }
SWR_FN void swr_render_model(swr_render_target *target, u32 render_mode, model *model, vec3 cam_pos, mat4 model_mat, mat4 viewproj_mat, mat4 screen_mat, vec3 sun_direction, col4 sun_col, float ambient_intencity, mem_pool *pool) { tex2d target_tex = *target->texture; float *z_buffer = target->z_buffer; u8 *old_hi_ptr = pool->hi; vec4 *vertices = (vec4 *)mem_push_back(pool, model->nvertices * sizeof(*vertices)); vec3 *cam_directions = (vec3 *)mem_push_back(pool, model->nvertices * sizeof(*cam_directions)); for (u32 i = 0, e = model->nvertices; i < e; ++i) { vec3 v = mul_m4v4(model_mat, v3_to_v4(model->vertices[i], 1.0f)).xyz; cam_directions[i] = norm_v3(sub_v3(v, cam_pos)); vertices[i] = mul_m4v4(viewproj_mat, v3_to_v4(v, 1.0f)); } face **culled_faces = (face **)mem_push_back(pool, model->nface_groups * sizeof(*culled_faces)); u32 *nculled_faces = (u32 *)mem_push_back(pool, model->nface_groups * sizeof(*nculled_faces)); for (u32 face_group = 0; face_group < model->nface_groups; ++face_group) { face *src_faces = model->face_groups[face_group].faces; u32 nsrc_faces = model->face_groups[face_group].nfaces; face *faces = culled_faces[face_group] = (face *)mem_push_back(pool, nsrc_faces * sizeof(*faces)); u32 nfaces = 0; for (u32 i = 0; i < nsrc_faces; ++i) { face face = src_faces[i]; b32 inside_frustrum = true; for (u32 j = 0; j < 3; ++j) { vec4 vertex = vertices[face.v[j]]; if (vertex.x > vertex.w || vertex.x < -vertex.w || vertex.y > vertex.w || vertex.y < -vertex.w || vertex.z > vertex.w || vertex.z < -vertex.w || vertex.w == 0) { inside_frustrum = false; break; } } if (inside_frustrum) faces[nfaces++] = face; } nculled_faces[face_group] = nfaces; } // TODO: avoid computing irrelevant data (?) for (u32 i = 0; i < model->nvertices; ++i) { vec4 vertex = vertices[i]; vertex = div_v4f(vertex, vertex.w); vertex = mul_m4v4(screen_mat, vertex); vertices[i] = vertex; } for (u32 face_group = 0; face_group < model->nface_groups; ++face_group) { face *faces = culled_faces[face_group]; u32 nfaces = nculled_faces[face_group]; material *material = model->face_groups[face_group].material; for (u32 i = 0; i < nfaces; ++i) { face face = faces[i]; vec4 verts[] = {vertices[face.v[0]], vertices[face.v[1]], vertices[face.v[2]]}; u32 x1 = (u32)verts[0].x; u32 y1 = (u32)verts[0].y; u32 x2 = (u32)verts[1].x; u32 y2 = (u32)verts[1].y; u32 x3 = (u32)verts[2].x; u32 y3 = (u32)verts[2].y; if (render_mode & (SRM_SHADED | SRM_TEXTURED)) { u32 minX = minu(x1, minu(x2, x3)); u32 minY = minu(y1, minu(y2, y3)); u32 maxX = maxu(x1, maxu(x2, x3)) + 1; u32 maxY = maxu(y1, maxu(y2, y3)) + 1; vec3 norms[3]; float lum[3]; if (render_mode & SRM_SHADED) { // TODO: apply reverse transformations to normales norms[0] = model->normales[face.n[0]]; norms[1] = model->normales[face.n[1]]; norms[2] = model->normales[face.n[2]]; float diffuse[3]; for (u32 j = 0; j < 3; ++j) diffuse[j] = clamp(dot_v3(norms[j], sun_direction), 0, 1.0f); vec3 L = neg_v3(sun_direction); float specular[3] = {0}; for (u32 j = 0; j < 3; ++j) { if (diffuse[j]) { vec3 V = cam_directions[face.v[j]]; vec3 H = norm_v3(add_v3(V, L)); specular[j] = (float)pow(dot_v3(H, norms[j]), 32); } } for (u32 j = 0; j < 3; ++j) lum[j] = ambient_intencity + diffuse[j] + specular[j]; } vec2 face_uvs[3]; face_uvs[0] = model->uvs[face.uv[0]]; face_uvs[1] = model->uvs[face.uv[1]]; face_uvs[2] = model->uvs[face.uv[2]]; vec2 a = {(float)x1, (float)y1}; vec2 b = {(float)x2, (float)y2}; vec2 c = {(float)x3, (float)y3}; vec2 v0 = sub_v2(b, a); vec2 v1 = sub_v2(c, a); for (u32 x = minX; x < maxX; ++x) { for (u32 y = minY; y < maxY; ++y) { // calculate barycentric coords... vec2 p = {(float)x, (float)y}; vec2 v2 = sub_v2(p, a); float d00 = dot_v2(v0, v0); float d01 = dot_v2(v0, v1); float d11 = dot_v2(v1, v1); float d20 = dot_v2(v2, v0); float d21 = dot_v2(v2, v1); float denom = d00 * d11 - d01 * d01; float v = (d11 * d20 - d01 * d21) / denom; float w = (d00 * d21 - d01 * d20) / denom; float u = 1.0f - v - w; if (!(v >= -0.001 && w >= -0.001 && u >= -0.001)) continue; u32 z_buff_idx = y * target_tex.width + x; float z = verts[1].z * v + verts[2].z * w + verts[0].z * u; if (z_buffer[z_buff_idx] > z) { float l = 1.0f; if (render_mode & SRM_SHADED) { l = lum[1] * v + lum[2] * w + lum[0] * u; } col4 texel = {255, 255, 255, 255}; if ((render_mode & SRM_TEXTURED) && material->diffuse) { float tu = face_uvs[1].x * v + face_uvs[2].x * w + face_uvs[0].x * u; float tv = face_uvs[1].y * v + face_uvs[2].y * w + face_uvs[0].y * u; tu *= material->diffuse->width; tv *= material->diffuse->height; texel = *sample_t2d(*material->diffuse, (u32)tu, (u32)tv); } col4 fragment_col = { .e[3] = 255 }; for (u32 j = 0; j < 3; ++j) { float cl = sun_col.e[j] * l / 255.0f; fragment_col.e[j] = (u8)clamp(texel.e[j] * cl, 0, 255.0f); } *sample_t2d(target_tex, x, y) = fragment_col; z_buffer[z_buff_idx] = z; } } } } } if (render_mode & SRM_WIREFRAME) { for (u32 i = 0; i < nfaces; ++i) { const col4 model_col = {255, 255, 255, 255}; vec4 v1 = vertices[faces[i].v[0]]; vec4 v2 = vertices[faces[i].v[1]]; vec4 v3 = vertices[faces[i].v[2]]; s32 x1 = (s32)v1.x; s32 y1 = (s32)v1.y; s32 x2 = (s32)v2.x; s32 y2 = (s32)v2.y; s32 x3 = (s32)v3.x; s32 y3 = (s32)v3.y; swr_line(x1, y1, x2, y2, model_col, target_tex); swr_line(x2, y2, x3, y3, model_col, target_tex); swr_line(x3, y3, x1, y1, model_col, target_tex); } } }
static ALCboolean pulse_reset_playback(ALCdevice *device) { pulse_data *data = device->ExtraData; pa_stream_flags_t flags = 0; pa_channel_map chanmap; pa_threaded_mainloop_lock(data->loop); if(data->stream) { #if PA_CHECK_VERSION(0,9,15) if(pa_stream_set_buffer_attr_callback) pa_stream_set_buffer_attr_callback(data->stream, NULL, NULL); #endif pa_stream_disconnect(data->stream); pa_stream_unref(data->stream); data->stream = NULL; } if(!(device->Flags&DEVICE_CHANNELS_REQUEST)) { pa_operation *o; o = pa_context_get_sink_info_by_name(data->context, data->device_name, sink_info_callback, device); WAIT_FOR_OPERATION(o, data->loop); } if(!(device->Flags&DEVICE_FREQUENCY_REQUEST)) flags |= PA_STREAM_FIX_RATE; flags |= PA_STREAM_INTERPOLATE_TIMING | PA_STREAM_AUTO_TIMING_UPDATE; flags |= PA_STREAM_ADJUST_LATENCY; flags |= PA_STREAM_START_CORKED; flags |= PA_STREAM_DONT_MOVE; switch(device->FmtType) { case DevFmtByte: device->FmtType = DevFmtUByte; /* fall-through */ case DevFmtUByte: data->spec.format = PA_SAMPLE_U8; break; case DevFmtUShort: device->FmtType = DevFmtShort; /* fall-through */ case DevFmtShort: data->spec.format = PA_SAMPLE_S16NE; break; case DevFmtUInt: device->FmtType = DevFmtInt; /* fall-through */ case DevFmtInt: data->spec.format = PA_SAMPLE_S32NE; break; case DevFmtFloat: data->spec.format = PA_SAMPLE_FLOAT32NE; break; } data->spec.rate = device->Frequency; data->spec.channels = ChannelsFromDevFmt(device->FmtChans); if(pa_sample_spec_valid(&data->spec) == 0) { ERR("Invalid sample format\n"); pa_threaded_mainloop_unlock(data->loop); return ALC_FALSE; } if(!pa_channel_map_init_auto(&chanmap, data->spec.channels, PA_CHANNEL_MAP_WAVEEX)) { ERR("Couldn't build map for channel count (%d)!\n", data->spec.channels); pa_threaded_mainloop_unlock(data->loop); return ALC_FALSE; } SetDefaultWFXChannelOrder(device); data->attr.fragsize = -1; data->attr.prebuf = 0; data->attr.minreq = device->UpdateSize * pa_frame_size(&data->spec); data->attr.tlength = data->attr.minreq * maxu(device->NumUpdates, 2); data->attr.maxlength = -1; data->stream = connect_playback_stream(data->device_name, data->loop, data->context, flags, &data->attr, &data->spec, &chanmap); if(!data->stream) { pa_threaded_mainloop_unlock(data->loop); return ALC_FALSE; } pa_stream_set_state_callback(data->stream, stream_state_callback2, device); data->spec = *(pa_stream_get_sample_spec(data->stream)); if(device->Frequency != data->spec.rate) { pa_operation *o; /* Server updated our playback rate, so modify the buffer attribs * accordingly. */ data->attr.minreq = (ALuint64)device->UpdateSize * data->spec.rate / device->Frequency * pa_frame_size(&data->spec); data->attr.tlength = data->attr.minreq * maxu(device->NumUpdates, 2); data->attr.prebuf = 0; o = pa_stream_set_buffer_attr(data->stream, &data->attr, stream_success_callback, device); WAIT_FOR_OPERATION(o, data->loop); device->Frequency = data->spec.rate; } #if PA_CHECK_VERSION(0,9,15) if(pa_stream_set_buffer_attr_callback) pa_stream_set_buffer_attr_callback(data->stream, stream_buffer_attr_callback, device); #endif stream_buffer_attr_callback(data->stream, device); device->NumUpdates = device->UpdateSize*device->NumUpdates / (data->attr.minreq/pa_frame_size(&data->spec)); device->NumUpdates = maxu(device->NumUpdates, 2); device->UpdateSize = data->attr.minreq / pa_frame_size(&data->spec); pa_threaded_mainloop_unlock(data->loop); return ALC_TRUE; }
static ALCenum pulse_open_capture(ALCdevice *device, const ALCchar *device_name) { const char *pulse_name = NULL; pa_stream_flags_t flags = 0; pa_channel_map chanmap; pulse_data *data; pa_operation *o; ALuint samples; if(device_name) { ALuint i; if(!allCaptureDevNameMap) probe_devices(AL_TRUE); for(i = 0;i < numCaptureDevNames;i++) { if(strcmp(device_name, allCaptureDevNameMap[i].name) == 0) { pulse_name = allCaptureDevNameMap[i].device_name; break; } } if(i == numCaptureDevNames) return ALC_INVALID_VALUE; } if(pulse_open(device) == ALC_FALSE) return ALC_INVALID_VALUE; data = device->ExtraData; pa_threaded_mainloop_lock(data->loop); data->spec.rate = device->Frequency; data->spec.channels = ChannelsFromDevFmt(device->FmtChans); switch(device->FmtType) { case DevFmtUByte: data->spec.format = PA_SAMPLE_U8; break; case DevFmtShort: data->spec.format = PA_SAMPLE_S16NE; break; case DevFmtInt: data->spec.format = PA_SAMPLE_S32NE; break; case DevFmtFloat: data->spec.format = PA_SAMPLE_FLOAT32NE; break; case DevFmtByte: case DevFmtUShort: case DevFmtUInt: ERR("%s capture samples not supported\n", DevFmtTypeString(device->FmtType)); pa_threaded_mainloop_unlock(data->loop); goto fail; } if(pa_sample_spec_valid(&data->spec) == 0) { ERR("Invalid sample format\n"); pa_threaded_mainloop_unlock(data->loop); goto fail; } if(!pa_channel_map_init_auto(&chanmap, data->spec.channels, PA_CHANNEL_MAP_WAVEEX)) { ERR("Couldn't build map for channel count (%d)!\n", data->spec.channels); pa_threaded_mainloop_unlock(data->loop); goto fail; } samples = device->UpdateSize * device->NumUpdates; samples = maxu(samples, 100 * device->Frequency / 1000); data->attr.minreq = -1; data->attr.prebuf = -1; data->attr.maxlength = samples * pa_frame_size(&data->spec); data->attr.tlength = -1; data->attr.fragsize = minu(samples, 50*device->Frequency/1000) * pa_frame_size(&data->spec); flags |= PA_STREAM_DONT_MOVE; flags |= PA_STREAM_START_CORKED|PA_STREAM_ADJUST_LATENCY; data->stream = connect_record_stream(pulse_name, data->loop, data->context, flags, &data->attr, &data->spec, &chanmap); if(!data->stream) { pa_threaded_mainloop_unlock(data->loop); goto fail; } pa_stream_set_state_callback(data->stream, stream_state_callback2, device); data->device_name = strdup(pa_stream_get_device_name(data->stream)); o = pa_context_get_source_info_by_name(data->context, data->device_name, source_name_callback, device); WAIT_FOR_OPERATION(o, data->loop); pa_threaded_mainloop_unlock(data->loop); return ALC_NO_ERROR; fail: pulse_close(device); return ALC_INVALID_VALUE; }
static ALCboolean ALCopenslPlayback_reset(ALCopenslPlayback *self) { ALCdevice *device = STATIC_CAST(ALCbackend,self)->mDevice; SLDataLocator_AndroidSimpleBufferQueue loc_bufq; SLDataLocator_OutputMix loc_outmix; SLDataSource audioSrc; SLDataSink audioSnk; ALuint sampleRate; SLInterfaceID ids[2]; SLboolean reqs[2]; SLresult result; JNIEnv *env; if(self->mBufferQueueObj != NULL) VCALL0(self->mBufferQueueObj,Destroy)(); self->mBufferQueueObj = NULL; sampleRate = device->Frequency; if(!(device->Flags&DEVICE_FREQUENCY_REQUEST) && (env=Android_GetJNIEnv()) != NULL) { /* FIXME: Disabled until I figure out how to get the Context needed for * the getSystemService call. */ #if 0 /* Get necessary stuff for using java.lang.Integer, * android.content.Context, and android.media.AudioManager. */ jclass int_cls = JCALL(env,FindClass)("java/lang/Integer"); jmethodID int_parseint = JCALL(env,GetStaticMethodID)(int_cls, "parseInt", "(Ljava/lang/String;)I" ); TRACE("Integer: %p, parseInt: %p\n", int_cls, int_parseint); jclass ctx_cls = JCALL(env,FindClass)("android/content/Context"); jfieldID ctx_audsvc = JCALL(env,GetStaticFieldID)(ctx_cls, "AUDIO_SERVICE", "Ljava/lang/String;" ); jmethodID ctx_getSysSvc = JCALL(env,GetMethodID)(ctx_cls, "getSystemService", "(Ljava/lang/String;)Ljava/lang/Object;" ); TRACE("Context: %p, AUDIO_SERVICE: %p, getSystemService: %p\n", ctx_cls, ctx_audsvc, ctx_getSysSvc); jclass audmgr_cls = JCALL(env,FindClass)("android/media/AudioManager"); jfieldID audmgr_prop_out_srate = JCALL(env,GetStaticFieldID)(audmgr_cls, "PROPERTY_OUTPUT_SAMPLE_RATE", "Ljava/lang/String;" ); jmethodID audmgr_getproperty = JCALL(env,GetMethodID)(audmgr_cls, "getProperty", "(Ljava/lang/String;)Ljava/lang/String;" ); TRACE("AudioManager: %p, PROPERTY_OUTPUT_SAMPLE_RATE: %p, getProperty: %p\n", audmgr_cls, audmgr_prop_out_srate, audmgr_getproperty); const char *strchars; jstring strobj; /* Now make the calls. */ //AudioManager audMgr = (AudioManager)getSystemService(Context.AUDIO_SERVICE); strobj = JCALL(env,GetStaticObjectField)(ctx_cls, ctx_audsvc); jobject audMgr = JCALL(env,CallObjectMethod)(ctx_cls, ctx_getSysSvc, strobj); strchars = JCALL(env,GetStringUTFChars)(strobj, NULL); TRACE("Context.getSystemService(%s) = %p\n", strchars, audMgr); JCALL(env,ReleaseStringUTFChars)(strobj, strchars); //String srateStr = audMgr.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE); strobj = JCALL(env,GetStaticObjectField)(audmgr_cls, audmgr_prop_out_srate); jstring srateStr = JCALL(env,CallObjectMethod)(audMgr, audmgr_getproperty, strobj); strchars = JCALL(env,GetStringUTFChars)(strobj, NULL); TRACE("audMgr.getProperty(%s) = %p\n", strchars, srateStr); JCALL(env,ReleaseStringUTFChars)(strobj, strchars); //int sampleRate = Integer.parseInt(srateStr); sampleRate = JCALL(env,CallStaticIntMethod)(int_cls, int_parseint, srateStr); strchars = JCALL(env,GetStringUTFChars)(srateStr, NULL); TRACE("Got system sample rate %uhz (%s)\n", sampleRate, strchars); JCALL(env,ReleaseStringUTFChars)(srateStr, strchars); if(!sampleRate) sampleRate = device->Frequency; else sampleRate = maxu(sampleRate, MIN_OUTPUT_RATE); #endif } if(sampleRate != device->Frequency) { device->NumUpdates = (device->NumUpdates*sampleRate + (device->Frequency>>1)) / device->Frequency; device->NumUpdates = maxu(device->NumUpdates, 2); device->Frequency = sampleRate; }
void MixDirect_Hrtf(const DirectParams *params, const ALfloat *restrict data, ALuint srcchan, ALuint OutPos, ALuint SamplesToDo, ALuint BufferSize) { ALfloat (*restrict DryBuffer)[BUFFERSIZE] = params->OutBuffer; ALfloat *restrict ClickRemoval = params->ClickRemoval; ALfloat *restrict PendingClicks = params->PendingClicks; const ALuint IrSize = params->Hrtf.Params.IrSize; const ALint *restrict DelayStep = params->Hrtf.Params.DelayStep; const ALfloat (*restrict CoeffStep)[2] = params->Hrtf.Params.CoeffStep; const ALfloat (*restrict TargetCoeffs)[2] = params->Hrtf.Params.Coeffs[srcchan]; const ALuint *restrict TargetDelay = params->Hrtf.Params.Delay[srcchan]; ALfloat *restrict History = params->Hrtf.State->History[srcchan]; ALfloat (*restrict Values)[2] = params->Hrtf.State->Values[srcchan]; ALint Counter = maxu(params->Hrtf.State->Counter, OutPos) - OutPos; ALuint Offset = params->Hrtf.State->Offset + OutPos; ALIGN(16) ALfloat Coeffs[HRIR_LENGTH][2]; ALuint Delay[2]; ALfloat left, right; ALuint pos; ALuint c; pos = 0; for(c = 0;c < IrSize;c++) { Coeffs[c][0] = TargetCoeffs[c][0] - (CoeffStep[c][0]*Counter); Coeffs[c][1] = TargetCoeffs[c][1] - (CoeffStep[c][1]*Counter); } Delay[0] = TargetDelay[0] - (DelayStep[0]*Counter);
static ALCenum alsa_open_capture(ALCdevice *pDevice, const ALCchar *deviceName) { const char *driver = "default"; snd_pcm_hw_params_t *p; snd_pcm_uframes_t bufferSizeInFrames; snd_pcm_uframes_t periodSizeInFrames; snd_pcm_format_t format; ALuint frameSize; alsa_data *data; char str[128]; char *err; int i; ConfigValueStr("alsa", "capture", &driver); if(!allCaptureDevNameMap) allCaptureDevNameMap = probe_devices(SND_PCM_STREAM_CAPTURE, &numCaptureDevNames); if(!deviceName) deviceName = allCaptureDevNameMap[0].name; else { size_t idx; for(idx = 0;idx < numCaptureDevNames;idx++) { if(allCaptureDevNameMap[idx].name && strcmp(deviceName, allCaptureDevNameMap[idx].name) == 0) { if(idx > 0) { snprintf(str, sizeof(str), "%sCARD=%s,DEV=%d", capture_prefix, allCaptureDevNameMap[idx].card, allCaptureDevNameMap[idx].dev); driver = str; } break; } } if(idx == numCaptureDevNames) return ALC_INVALID_VALUE; } data = (alsa_data*)calloc(1, sizeof(alsa_data)); i = snd_pcm_open(&data->pcmHandle, driver, SND_PCM_STREAM_CAPTURE, SND_PCM_NONBLOCK); if(i < 0) { ERR("Could not open capture device '%s': %s\n", driver, snd_strerror(i)); free(data); return ALC_INVALID_VALUE; } format = -1; switch(pDevice->FmtType) { case DevFmtByte: format = SND_PCM_FORMAT_S8; break; case DevFmtUByte: format = SND_PCM_FORMAT_U8; break; case DevFmtShort: format = SND_PCM_FORMAT_S16; break; case DevFmtUShort: format = SND_PCM_FORMAT_U16; break; case DevFmtFloat: format = SND_PCM_FORMAT_FLOAT; break; } err = NULL; bufferSizeInFrames = maxu(pDevice->UpdateSize*pDevice->NumUpdates, 100*pDevice->Frequency/1000); periodSizeInFrames = minu(bufferSizeInFrames, 50*pDevice->Frequency/1000); snd_pcm_hw_params_malloc(&p); if((i=snd_pcm_hw_params_any(data->pcmHandle, p)) < 0) err = "any"; /* set interleaved access */ if(i >= 0 && (i=snd_pcm_hw_params_set_access(data->pcmHandle, p, SND_PCM_ACCESS_RW_INTERLEAVED)) < 0) err = "set access"; /* set format (implicitly sets sample bits) */ if(i >= 0 && (i=snd_pcm_hw_params_set_format(data->pcmHandle, p, format)) < 0) err = "set format"; /* set channels (implicitly sets frame bits) */ if(i >= 0 && (i=snd_pcm_hw_params_set_channels(data->pcmHandle, p, ChannelsFromDevFmt(pDevice->FmtChans))) < 0) err = "set channels"; /* set rate (implicitly constrains period/buffer parameters) */ if(i >= 0 && (i=snd_pcm_hw_params_set_rate(data->pcmHandle, p, pDevice->Frequency, 0)) < 0) err = "set rate near"; /* set buffer size in frame units (implicitly sets period size/bytes/time and buffer time/bytes) */ if(i >= 0 && (i=snd_pcm_hw_params_set_buffer_size_near(data->pcmHandle, p, &bufferSizeInFrames)) < 0) err = "set buffer size near"; /* set buffer size in frame units (implicitly sets period size/bytes/time and buffer time/bytes) */ if(i >= 0 && (i=snd_pcm_hw_params_set_period_size_near(data->pcmHandle, p, &periodSizeInFrames, NULL)) < 0) err = "set period size near"; /* install and prepare hardware configuration */ if(i >= 0 && (i=snd_pcm_hw_params(data->pcmHandle, p)) < 0) err = "set params"; if(i < 0) { ERR("%s failed: %s\n", err, snd_strerror(i)); snd_pcm_hw_params_free(p); goto error; } if((i=snd_pcm_hw_params_get_period_size(p, &bufferSizeInFrames, NULL)) < 0) { ERR("get size failed: %s\n", snd_strerror(i)); snd_pcm_hw_params_free(p); goto error; } snd_pcm_hw_params_free(p); frameSize = FrameSizeFromDevFmt(pDevice->FmtChans, pDevice->FmtType); data->ring = CreateRingBuffer(frameSize, pDevice->UpdateSize*pDevice->NumUpdates); if(!data->ring) { ERR("ring buffer create failed\n"); goto error; } data->size = snd_pcm_frames_to_bytes(data->pcmHandle, bufferSizeInFrames); data->buffer = malloc(data->size); if(!data->buffer) { ERR("buffer malloc failed\n"); goto error; } pDevice->szDeviceName = strdup(deviceName); pDevice->ExtraData = data; return ALC_NO_ERROR; error: free(data->buffer); DestroyRingBuffer(data->ring); snd_pcm_close(data->pcmHandle); free(data); pDevice->ExtraData = NULL; return ALC_INVALID_VALUE; }