static int rtrack2_s_frequency(struct radio_isa_card *isa, u32 freq) { int i; freq = freq / 200 + 856; outb_p(0xc8, isa->io); outb_p(0xc9, isa->io); outb_p(0xc9, isa->io); for (i = 0; i < 10; i++) zero(isa); for (i = 14; i >= 0; i--) if (freq & (1 << i)) one(isa); else zero(isa); outb_p(0xc8, isa->io); outb_p(v4l2_ctrl_g_ctrl(isa->mute), isa->io); return 0; }
int cx18_start_v4l2_encode_stream(struct cx18_stream *s) { u32 data[MAX_MB_ARGUMENTS]; struct cx18 *cx = s->cx; int captype = 0; struct cx18_stream *s_idx; if (!cx18_stream_enabled(s)) return -EINVAL; CX18_DEBUG_INFO("Start encoder stream %s\n", s->name); switch (s->type) { case CX18_ENC_STREAM_TYPE_MPG: captype = CAPTURE_CHANNEL_TYPE_MPEG; cx->mpg_data_received = cx->vbi_data_inserted = 0; cx->dualwatch_jiffies = jiffies; cx->dualwatch_stereo_mode = v4l2_ctrl_g_ctrl(cx->cxhdl.audio_mode); cx->search_pack_header = 0; break; case CX18_ENC_STREAM_TYPE_IDX: captype = CAPTURE_CHANNEL_TYPE_INDEX; break; case CX18_ENC_STREAM_TYPE_TS: captype = CAPTURE_CHANNEL_TYPE_TS; break; case CX18_ENC_STREAM_TYPE_YUV: captype = CAPTURE_CHANNEL_TYPE_YUV; break; case CX18_ENC_STREAM_TYPE_PCM: captype = CAPTURE_CHANNEL_TYPE_PCM; break; case CX18_ENC_STREAM_TYPE_VBI: #ifdef CX18_ENCODER_PARSES_SLICED captype = cx18_raw_vbi(cx) ? CAPTURE_CHANNEL_TYPE_VBI : CAPTURE_CHANNEL_TYPE_SLICED_VBI; #else /* * Currently we set things up so that Sliced VBI from the * digitizer is handled as Raw VBI by the encoder */ captype = CAPTURE_CHANNEL_TYPE_VBI; #endif cx->vbi.frame = 0; cx->vbi.inserted_frame = 0; memset(cx->vbi.sliced_mpeg_size, 0, sizeof(cx->vbi.sliced_mpeg_size)); break; default: return -EINVAL; } /* Clear Streamoff flags in case left from last capture */ clear_bit(CX18_F_S_STREAMOFF, &s->s_flags); cx18_vapi_result(cx, data, CX18_CREATE_TASK, 1, CPU_CMD_MASK_CAPTURE); s->handle = data[0]; cx18_vapi(cx, CX18_CPU_SET_CHANNEL_TYPE, 2, s->handle, captype); /* * For everything but CAPTURE_CHANNEL_TYPE_TS, play it safe and * set up all the parameters, as it is not obvious which parameters the * firmware shares across capture channel types and which it does not. * * Some of the cx18_vapi() calls below apply to only certain capture * channel types. We're hoping there's no harm in calling most of them * anyway, as long as the values are all consistent. Setting some * shared parameters will have no effect once an analog capture channel * has started streaming. */ if (captype != CAPTURE_CHANNEL_TYPE_TS) { cx18_vapi(cx, CX18_CPU_SET_VER_CROP_LINE, 2, s->handle, 0); cx18_vapi(cx, CX18_CPU_SET_MISC_PARAMETERS, 3, s->handle, 3, 1); cx18_vapi(cx, CX18_CPU_SET_MISC_PARAMETERS, 3, s->handle, 8, 0); cx18_vapi(cx, CX18_CPU_SET_MISC_PARAMETERS, 3, s->handle, 4, 1); /* * Audio related reset according to * Documentation/video4linux/cx2341x/fw-encoder-api.txt */ if (atomic_read(&cx->ana_capturing) == 0) cx18_vapi(cx, CX18_CPU_SET_MISC_PARAMETERS, 2, s->handle, 12); /* * Number of lines for Field 1 & Field 2 according to * Documentation/video4linux/cx2341x/fw-encoder-api.txt * Field 1 is 312 for 625 line systems in BT.656 * Field 2 is 313 for 625 line systems in BT.656 */ cx18_vapi(cx, CX18_CPU_SET_CAPTURE_LINE_NO, 3, s->handle, 312, 313); if (cx->v4l2_cap & V4L2_CAP_VBI_CAPTURE) cx18_vbi_setup(s); /* * Select to receive I, P, and B frame index entries, if the * index stream is enabled. Otherwise disable index entry * generation. */ s_idx = &cx->streams[CX18_ENC_STREAM_TYPE_IDX]; cx18_vapi_result(cx, data, CX18_CPU_SET_INDEXTABLE, 2, s->handle, cx18_stream_enabled(s_idx) ? 7 : 0); /* Call out to the common CX2341x API setup for user controls */ cx->cxhdl.priv = s; cx2341x_handler_setup(&cx->cxhdl); /* * When starting a capture and we're set for radio, * ensure the video is muted, despite the user control. */ if (!cx->cxhdl.video_mute && test_bit(CX18_F_I_RADIO_USER, &cx->i_flags)) cx18_vapi(cx, CX18_CPU_SET_VIDEO_MUTE, 2, s->handle, (v4l2_ctrl_g_ctrl(cx->cxhdl.video_mute_yuv) << 8) | 1); /* Enable the Video Format Converter for UYVY 4:2:2 support, * rather than the default HM12 Macroblovk 4:2:0 support. */ if (captype == CAPTURE_CHANNEL_TYPE_YUV) { if (s->pixelformat == V4L2_PIX_FMT_UYVY) cx18_vapi(cx, CX18_CPU_SET_VFC_PARAM, 2, s->handle, 1); else /* If in doubt, default to HM12 */ cx18_vapi(cx, CX18_CPU_SET_VFC_PARAM, 2, s->handle, 0); } } if (atomic_read(&cx->tot_capturing) == 0) { cx2341x_handler_set_busy(&cx->cxhdl, 1); clear_bit(CX18_F_I_EOS, &cx->i_flags); cx18_write_reg(cx, 7, CX18_DSP0_INTERRUPT_MASK); } cx18_vapi(cx, CX18_CPU_DE_SET_MDL_ACK, 3, s->handle, (void __iomem *)&cx->scb->cpu_mdl_ack[s->type][0] - cx->enc_mem, (void __iomem *)&cx->scb->cpu_mdl_ack[s->type][1] - cx->enc_mem); /* Init all the cpu_mdls for this stream */ cx18_stream_configure_mdls(s); _cx18_stream_load_fw_queue(s); /* begin_capture */ if (cx18_vapi(cx, CX18_CPU_CAPTURE_START, 1, s->handle)) { CX18_DEBUG_WARN("Error starting capture!\n"); /* Ensure we're really not capturing before releasing MDLs */ set_bit(CX18_F_S_STOPPING, &s->s_flags); if (s->type == CX18_ENC_STREAM_TYPE_MPG) cx18_vapi(cx, CX18_CPU_CAPTURE_STOP, 2, s->handle, 1); else cx18_vapi(cx, CX18_CPU_CAPTURE_STOP, 1, s->handle); clear_bit(CX18_F_S_STREAMING, &s->s_flags); /* FIXME - CX18_F_S_STREAMOFF as well? */ cx18_vapi(cx, CX18_CPU_DE_RELEASE_MDL, 1, s->handle); cx18_vapi(cx, CX18_DESTROY_TASK, 1, s->handle); s->handle = CX18_INVALID_TASK_HANDLE; clear_bit(CX18_F_S_STOPPING, &s->s_flags); if (atomic_read(&cx->tot_capturing) == 0) { set_bit(CX18_F_I_EOS, &cx->i_flags); cx18_write_reg(cx, 5, CX18_DSP0_INTERRUPT_MASK); } return -EINVAL; } /* you're live! sit back and await interrupts :) */ if (captype != CAPTURE_CHANNEL_TYPE_TS) atomic_inc(&cx->ana_capturing); atomic_inc(&cx->tot_capturing); return 0; }
static void set_formatting(struct go7007 *go) { if (go->format == V4L2_PIX_FMT_MJPEG) { go->pali = 0; go->aspect_ratio = GO7007_RATIO_1_1; go->gop_size = 0; go->ipb = 0; go->closed_gop = 0; go->repeat_seqhead = 0; go->seq_header_enable = 0; go->gop_header_enable = 0; go->dvd_mode = 0; return; } switch (go->format) { case V4L2_PIX_FMT_MPEG1: go->pali = 0; break; default: case V4L2_PIX_FMT_MPEG2: go->pali = 0x48; break; case V4L2_PIX_FMT_MPEG4: /* For future reference: this is the list of MPEG4 * profiles that are available, although they are * untested: * * Profile pali * -------------- ---- * PROFILE_S_L0 0x08 * PROFILE_S_L1 0x01 * PROFILE_S_L2 0x02 * PROFILE_S_L3 0x03 * PROFILE_ARTS_L1 0x91 * PROFILE_ARTS_L2 0x92 * PROFILE_ARTS_L3 0x93 * PROFILE_ARTS_L4 0x94 * PROFILE_AS_L0 0xf0 * PROFILE_AS_L1 0xf1 * PROFILE_AS_L2 0xf2 * PROFILE_AS_L3 0xf3 * PROFILE_AS_L4 0xf4 * PROFILE_AS_L5 0xf5 */ go->pali = 0xf5; break; } go->gop_size = v4l2_ctrl_g_ctrl(go->mpeg_video_gop_size); go->closed_gop = v4l2_ctrl_g_ctrl(go->mpeg_video_gop_closure); go->ipb = v4l2_ctrl_g_ctrl(go->mpeg_video_b_frames) != 0; go->bitrate = v4l2_ctrl_g_ctrl(go->mpeg_video_bitrate); go->repeat_seqhead = v4l2_ctrl_g_ctrl(go->mpeg_video_rep_seqheader); go->gop_header_enable = 1; go->dvd_mode = 0; if (go->format == V4L2_PIX_FMT_MPEG2) go->dvd_mode = go->bitrate == 9800000 && go->gop_size == 15 && go->ipb == 0 && go->repeat_seqhead == 1 && go->closed_gop; switch (v4l2_ctrl_g_ctrl(go->mpeg_video_aspect_ratio)) { default: case V4L2_MPEG_VIDEO_ASPECT_1x1: go->aspect_ratio = GO7007_RATIO_1_1; break; case V4L2_MPEG_VIDEO_ASPECT_4x3: go->aspect_ratio = GO7007_RATIO_4_3; break; case V4L2_MPEG_VIDEO_ASPECT_16x9: go->aspect_ratio = GO7007_RATIO_16_9; break; } }
static void vivid_fillbuff(struct vivid_dev *dev, struct vivid_buffer *buf) { unsigned factor = V4L2_FIELD_HAS_T_OR_B(dev->field_cap) ? 2 : 1; unsigned line_height = 16 / factor; bool is_tv = vivid_is_sdtv_cap(dev); bool is_60hz = is_tv && (dev->std_cap & V4L2_STD_525_60); unsigned p; int line = 1; u8 *basep[TPG_MAX_PLANES][2]; unsigned ms; char str[100]; s32 gain; bool is_loop = false; if (dev->loop_video && dev->can_loop_video && ((vivid_is_svid_cap(dev) && !VIVID_INVALID_SIGNAL(dev->std_signal_mode)) || (vivid_is_hdmi_cap(dev) && !VIVID_INVALID_SIGNAL(dev->dv_timings_signal_mode)))) is_loop = true; buf->vb.v4l2_buf.sequence = dev->vid_cap_seq_count; /* * Take the timestamp now if the timestamp source is set to * "Start of Exposure". */ if (dev->tstamp_src_is_soe) v4l2_get_timestamp(&buf->vb.v4l2_buf.timestamp); if (dev->field_cap == V4L2_FIELD_ALTERNATE) { /* * 60 Hz standards start with the bottom field, 50 Hz standards * with the top field. So if the 0-based seq_count is even, * then the field is TOP for 50 Hz and BOTTOM for 60 Hz * standards. */ buf->vb.v4l2_buf.field = ((dev->vid_cap_seq_count & 1) ^ is_60hz) ? V4L2_FIELD_TOP : V4L2_FIELD_BOTTOM; /* * The sequence counter counts frames, not fields. So divide * by two. */ buf->vb.v4l2_buf.sequence /= 2; } else { buf->vb.v4l2_buf.field = dev->field_cap; } tpg_s_field(&dev->tpg, buf->vb.v4l2_buf.field); tpg_s_perc_fill_blank(&dev->tpg, dev->must_blank[buf->vb.v4l2_buf.index]); vivid_precalc_copy_rects(dev); for (p = 0; p < tpg_g_planes(&dev->tpg); p++) { void *vbuf = vb2_plane_vaddr(&buf->vb, p); /* * The first plane of a multiplanar format has a non-zero * data_offset. This helps testing whether the application * correctly supports non-zero data offsets. */ if (dev->fmt_cap->data_offset[p]) { memset(vbuf, dev->fmt_cap->data_offset[p] & 0xff, dev->fmt_cap->data_offset[p]); vbuf += dev->fmt_cap->data_offset[p]; } tpg_calc_text_basep(&dev->tpg, basep, p, vbuf); if (!is_loop || vivid_copy_buffer(dev, p, vbuf, buf)) tpg_fillbuffer(&dev->tpg, vivid_get_std_cap(dev), p, vbuf); } dev->must_blank[buf->vb.v4l2_buf.index] = false; /* Updates stream time, only update at the start of a new frame. */ if (dev->field_cap != V4L2_FIELD_ALTERNATE || (buf->vb.v4l2_buf.sequence & 1) == 0) dev->ms_vid_cap = jiffies_to_msecs(jiffies - dev->jiffies_vid_cap); ms = dev->ms_vid_cap; if (dev->osd_mode <= 1) { snprintf(str, sizeof(str), " %02d:%02d:%02d:%03d %u%s", (ms / (60 * 60 * 1000)) % 24, (ms / (60 * 1000)) % 60, (ms / 1000) % 60, ms % 1000, buf->vb.v4l2_buf.sequence, (dev->field_cap == V4L2_FIELD_ALTERNATE) ? (buf->vb.v4l2_buf.field == V4L2_FIELD_TOP ? " top" : " bottom") : ""); tpg_gen_text(&dev->tpg, basep, line++ * line_height, 16, str); } if (dev->osd_mode == 0) { snprintf(str, sizeof(str), " %dx%d, input %d ", dev->src_rect.width, dev->src_rect.height, dev->input); tpg_gen_text(&dev->tpg, basep, line++ * line_height, 16, str); gain = v4l2_ctrl_g_ctrl(dev->gain); mutex_lock(dev->ctrl_hdl_user_vid.lock); snprintf(str, sizeof(str), " brightness %3d, contrast %3d, saturation %3d, hue %d ", dev->brightness->cur.val, dev->contrast->cur.val, dev->saturation->cur.val, dev->hue->cur.val); tpg_gen_text(&dev->tpg, basep, line++ * line_height, 16, str); snprintf(str, sizeof(str), " autogain %d, gain %3d, alpha 0x%02x ", dev->autogain->cur.val, gain, dev->alpha->cur.val); mutex_unlock(dev->ctrl_hdl_user_vid.lock); tpg_gen_text(&dev->tpg, basep, line++ * line_height, 16, str); mutex_lock(dev->ctrl_hdl_user_aud.lock); snprintf(str, sizeof(str), " volume %3d, mute %d ", dev->volume->cur.val, dev->mute->cur.val); mutex_unlock(dev->ctrl_hdl_user_aud.lock); tpg_gen_text(&dev->tpg, basep, line++ * line_height, 16, str); mutex_lock(dev->ctrl_hdl_user_gen.lock); snprintf(str, sizeof(str), " int32 %d, int64 %lld, bitmask %08x ", dev->int32->cur.val, *dev->int64->p_cur.p_s64, dev->bitmask->cur.val); tpg_gen_text(&dev->tpg, basep, line++ * line_height, 16, str); snprintf(str, sizeof(str), " boolean %d, menu %s, string \"%s\" ", dev->boolean->cur.val, dev->menu->qmenu[dev->menu->cur.val], dev->string->p_cur.p_char); tpg_gen_text(&dev->tpg, basep, line++ * line_height, 16, str); snprintf(str, sizeof(str), " integer_menu %lld, value %d ", dev->int_menu->qmenu_int[dev->int_menu->cur.val], dev->int_menu->cur.val); mutex_unlock(dev->ctrl_hdl_user_gen.lock); tpg_gen_text(&dev->tpg, basep, line++ * line_height, 16, str); if (dev->button_pressed) { dev->button_pressed--; snprintf(str, sizeof(str), " button pressed!"); tpg_gen_text(&dev->tpg, basep, line++ * line_height, 16, str); } } /* * If "End of Frame" is specified at the timestamp source, then take * the timestamp now. */ if (!dev->tstamp_src_is_soe) v4l2_get_timestamp(&buf->vb.v4l2_buf.timestamp); buf->vb.v4l2_buf.timestamp.tv_sec += dev->time_wrap_offset; }
int ivtv_start_v4l2_encode_stream(struct ivtv_stream *s) { u32 data[CX2341X_MBOX_MAX_DATA]; struct ivtv *itv = s->itv; int captype = 0, subtype = 0; int enable_passthrough = 0; if (s->vdev == NULL) return -EINVAL; IVTV_DEBUG_INFO("Start encoder stream %s\n", s->name); switch (s->type) { case IVTV_ENC_STREAM_TYPE_MPG: captype = 0; subtype = 3; /* Stop Passthrough */ if (itv->output_mode == OUT_PASSTHROUGH) { ivtv_passthrough_mode(itv, 0); enable_passthrough = 1; } itv->mpg_data_received = itv->vbi_data_inserted = 0; itv->dualwatch_jiffies = jiffies; itv->dualwatch_stereo_mode = v4l2_ctrl_g_ctrl(itv->cxhdl.audio_mode); itv->search_pack_header = 0; break; case IVTV_ENC_STREAM_TYPE_YUV: if (itv->output_mode == OUT_PASSTHROUGH) { captype = 2; subtype = 11; /* video+audio+decoder */ break; } captype = 1; subtype = 1; break; case IVTV_ENC_STREAM_TYPE_PCM: captype = 1; subtype = 2; break; case IVTV_ENC_STREAM_TYPE_VBI: captype = 1; subtype = 4; itv->vbi.frame = 0; itv->vbi.inserted_frame = 0; memset(itv->vbi.sliced_mpeg_size, 0, sizeof(itv->vbi.sliced_mpeg_size)); break; default: return -EINVAL; } s->subtype = subtype; s->buffers_stolen = 0; /* Clear Streamoff flags in case left from last capture */ clear_bit(IVTV_F_S_STREAMOFF, &s->s_flags); if (atomic_read(&itv->capturing) == 0) { int digitizer; /* Always use frame based mode. Experiments have demonstrated that byte stream based mode results in dropped frames and corruption. Not often, but occasionally. Many thanks go to Leonard Orb who spent a lot of effort and time trying to trace the cause of the drop outs. */ /* 1 frame per DMA */ /*ivtv_vapi(itv, CX2341X_ENC_SET_DMA_BLOCK_SIZE, 2, 128, 0); */ ivtv_vapi(itv, CX2341X_ENC_SET_DMA_BLOCK_SIZE, 2, 1, 1); /* Stuff from Windows, we don't know what it is */ ivtv_vapi(itv, CX2341X_ENC_SET_VERT_CROP_LINE, 1, 0); /* According to the docs, this should be correct. However, this is untested. I don't dare enable this without having tested it. Only very few old cards actually have this hardware combination. ivtv_vapi(itv, CX2341X_ENC_SET_VERT_CROP_LINE, 1, ((itv->hw_flags & IVTV_HW_SAA7114) && itv->is_60hz) ? 10001 : 0); */ ivtv_vapi(itv, CX2341X_ENC_MISC, 2, 3, !itv->has_cx23415); ivtv_vapi(itv, CX2341X_ENC_MISC, 2, 8, 0); ivtv_vapi(itv, CX2341X_ENC_MISC, 2, 4, 1); ivtv_vapi(itv, CX2341X_ENC_MISC, 1, 12); /* assign placeholder */ ivtv_vapi(itv, CX2341X_ENC_SET_PLACEHOLDER, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0); if (itv->card->hw_all & (IVTV_HW_SAA7115 | IVTV_HW_SAA717X)) digitizer = 0xF1; else if (itv->card->hw_all & IVTV_HW_SAA7114) digitizer = 0xEF; else /* cx25840 */ digitizer = 0x140; ivtv_vapi(itv, CX2341X_ENC_SET_NUM_VSYNC_LINES, 2, digitizer, digitizer); /* Setup VBI */ if (itv->v4l2_cap & V4L2_CAP_VBI_CAPTURE) { ivtv_vbi_setup(itv); } /* assign program index info. Mask 7: select I/P/B, Num_req: 400 max */ ivtv_vapi_result(itv, data, CX2341X_ENC_SET_PGM_INDEX_INFO, 2, 7, 400); itv->pgm_info_offset = data[0]; itv->pgm_info_num = data[1]; itv->pgm_info_write_idx = 0; itv->pgm_info_read_idx = 0; IVTV_DEBUG_INFO("PGM Index at 0x%08x with %d elements\n", itv->pgm_info_offset, itv->pgm_info_num); /* Setup API for Stream */ cx2341x_handler_setup(&itv->cxhdl); /* mute if capturing radio */ if (test_bit(IVTV_F_I_RADIO_USER, &itv->i_flags)) ivtv_vapi(itv, CX2341X_ENC_MUTE_VIDEO, 1, 1 | (v4l2_ctrl_g_ctrl(itv->cxhdl.video_mute_yuv) << 8)); } /* Vsync Setup */ if (itv->has_cx23415 && !test_and_set_bit(IVTV_F_I_DIG_RST, &itv->i_flags)) { /* event notification (on) */ ivtv_vapi(itv, CX2341X_ENC_SET_EVENT_NOTIFICATION, 4, 0, 1, IVTV_IRQ_ENC_VIM_RST, -1); ivtv_clear_irq_mask(itv, IVTV_IRQ_ENC_VIM_RST); } if (atomic_read(&itv->capturing) == 0) { /* Clear all Pending Interrupts */ ivtv_set_irq_mask(itv, IVTV_IRQ_MASK_CAPTURE); clear_bit(IVTV_F_I_EOS, &itv->i_flags); cx2341x_handler_set_busy(&itv->cxhdl, 1); /* Initialize Digitizer for Capture */ /* Avoid tinny audio problem - ensure audio clocks are going */ v4l2_subdev_call(itv->sd_audio, audio, s_stream, 1); /* Avoid unpredictable PCI bus hang - disable video clocks */ v4l2_subdev_call(itv->sd_video, video, s_stream, 0); ivtv_msleep_timeout(300, 0); ivtv_vapi(itv, CX2341X_ENC_INITIALIZE_INPUT, 0); v4l2_subdev_call(itv->sd_video, video, s_stream, 1); } /* begin_capture */ if (ivtv_vapi(itv, CX2341X_ENC_START_CAPTURE, 2, captype, subtype)) { IVTV_DEBUG_WARN( "Error starting capture!\n"); return -EINVAL; } /* Start Passthrough */ if (enable_passthrough) { ivtv_passthrough_mode(itv, 1); } if (s->type == IVTV_ENC_STREAM_TYPE_VBI) ivtv_clear_irq_mask(itv, IVTV_IRQ_ENC_VBI_CAP); else ivtv_clear_irq_mask(itv, IVTV_IRQ_MASK_CAPTURE); /* you're live! sit back and await interrupts :) */ atomic_inc(&itv->capturing); return 0; }
/* auto gain and exposure algorithm based on the knee algorithm described here: http://ytse.tricolour.net/docs/LowLightOptimization.html Returns 0 if no changes were made, 1 if the gain and or exposure settings where changed. */ int gspca_expo_autogain( struct gspca_dev *gspca_dev, int avg_lum, int desired_avg_lum, int deadzone, int gain_knee, int exposure_knee) { s32 gain, orig_gain, exposure, orig_exposure; int i, steps, retval = 0; if (v4l2_ctrl_g_ctrl(gspca_dev->autogain) == 0) return 0; orig_gain = gain = v4l2_ctrl_g_ctrl(gspca_dev->gain); orig_exposure = exposure = v4l2_ctrl_g_ctrl(gspca_dev->exposure); /* If we are of a multiple of deadzone, do multiple steps to reach the desired lumination fast (with the risc of a slight overshoot) */ steps = abs(desired_avg_lum - avg_lum) / deadzone; PDEBUG(D_FRAM, "autogain: lum: %d, desired: %d, steps: %d", avg_lum, desired_avg_lum, steps); for (i = 0; i < steps; i++) { if (avg_lum > desired_avg_lum) { if (gain > gain_knee) gain--; else if (exposure > exposure_knee) exposure--; else if (gain > gspca_dev->gain->default_value) gain--; else if (exposure > gspca_dev->exposure->minimum) exposure--; else if (gain > gspca_dev->gain->minimum) gain--; else break; } else { if (gain < gspca_dev->gain->default_value) gain++; else if (exposure < exposure_knee) exposure++; else if (gain < gain_knee) gain++; else if (exposure < gspca_dev->exposure->maximum) exposure++; else if (gain < gspca_dev->gain->maximum) gain++; else break; } } if (gain != orig_gain) { v4l2_ctrl_s_ctrl(gspca_dev->gain, gain); retval = 1; } if (exposure != orig_exposure) { v4l2_ctrl_s_ctrl(gspca_dev->exposure, exposure); retval = 1; } if (retval) PDEBUG(D_FRAM, "autogain: changed gain: %d, expo: %d", gain, exposure); return retval; }
/* Autogain + exposure algorithm for cameras with a coarse exposure control (usually this means we can only control the clockdiv to change exposure) As changing the clockdiv so that the fps drops from 30 to 15 fps for example, will lead to a huge exposure change (it effectively doubles), this algorithm normally tries to only adjust the gain (between 40 and 80 %) and if that does not help, only then changes exposure. This leads to a much more stable image then using the knee algorithm which at certain points of the knee graph will only try to adjust exposure, which leads to oscilating as one exposure step is huge. Returns 0 if no changes were made, 1 if the gain and or exposure settings where changed. */ int gspca_coarse_grained_expo_autogain( struct gspca_dev *gspca_dev, int avg_lum, int desired_avg_lum, int deadzone) { s32 gain_low, gain_high, gain, orig_gain, exposure, orig_exposure; int steps, retval = 0; if (v4l2_ctrl_g_ctrl(gspca_dev->autogain) == 0) return 0; orig_gain = gain = v4l2_ctrl_g_ctrl(gspca_dev->gain); orig_exposure = exposure = v4l2_ctrl_g_ctrl(gspca_dev->exposure); gain_low = (gspca_dev->gain->maximum - gspca_dev->gain->minimum) / 5 * 2 + gspca_dev->gain->minimum; gain_high = (gspca_dev->gain->maximum - gspca_dev->gain->minimum) / 5 * 4 + gspca_dev->gain->minimum; /* If we are of a multiple of deadzone, do multiple steps to reach the desired lumination fast (with the risc of a slight overshoot) */ steps = (desired_avg_lum - avg_lum) / deadzone; PDEBUG(D_FRAM, "autogain: lum: %d, desired: %d, steps: %d", avg_lum, desired_avg_lum, steps); if ((gain + steps) > gain_high && exposure < gspca_dev->exposure->maximum) { gain = gain_high; gspca_dev->exp_too_low_cnt++; gspca_dev->exp_too_high_cnt = 0; } else if ((gain + steps) < gain_low && exposure > gspca_dev->exposure->minimum) { gain = gain_low; gspca_dev->exp_too_high_cnt++; gspca_dev->exp_too_low_cnt = 0; } else { gain += steps; if (gain > gspca_dev->gain->maximum) gain = gspca_dev->gain->maximum; else if (gain < gspca_dev->gain->minimum) gain = gspca_dev->gain->minimum; gspca_dev->exp_too_high_cnt = 0; gspca_dev->exp_too_low_cnt = 0; } if (gspca_dev->exp_too_high_cnt > 3) { exposure--; gspca_dev->exp_too_high_cnt = 0; } else if (gspca_dev->exp_too_low_cnt > 3) { exposure++; gspca_dev->exp_too_low_cnt = 0; } if (gain != orig_gain) { v4l2_ctrl_s_ctrl(gspca_dev->gain, gain); retval = 1; } if (exposure != orig_exposure) { v4l2_ctrl_s_ctrl(gspca_dev->exposure, exposure); retval = 1; } if (retval) PDEBUG(D_FRAM, "autogain: changed gain: %d, expo: %d", gain, exposure); return retval; }
int ivtv_v4l2_close(struct file *filp) { struct v4l2_fh *fh = filp->private_data; struct ivtv_open_id *id = fh2id(fh); struct ivtv *itv = id->itv; struct ivtv_stream *s = &itv->streams[id->type]; IVTV_DEBUG_FILE("close %s\n", s->name); mutex_lock(&itv->serialize_lock); /* Stop radio */ if (id->type == IVTV_ENC_STREAM_TYPE_RAD && v4l2_fh_is_singular_file(filp)) { /* Closing radio device, return to TV mode */ ivtv_mute(itv); /* Mark that the radio is no longer in use */ clear_bit(IVTV_F_I_RADIO_USER, &itv->i_flags); /* Switch tuner to TV */ ivtv_call_all(itv, video, s_std, itv->std); /* Select correct audio input (i.e. TV tuner or Line in) */ ivtv_audio_set_io(itv); if (itv->hw_flags & IVTV_HW_SAA711X) { ivtv_call_hw(itv, IVTV_HW_SAA711X, video, s_crystal_freq, SAA7115_FREQ_32_11_MHZ, 0); } if (atomic_read(&itv->capturing) > 0) { /* Undo video mute */ ivtv_vapi(itv, CX2341X_ENC_MUTE_VIDEO, 1, v4l2_ctrl_g_ctrl(itv->cxhdl.video_mute) | (v4l2_ctrl_g_ctrl(itv->cxhdl.video_mute_yuv) << 8)); } /* Done! Unmute and continue. */ ivtv_unmute(itv); } v4l2_fh_del(fh); v4l2_fh_exit(fh); /* Easy case first: this stream was never claimed by us */ if (s->fh != &id->fh) goto close_done; /* 'Unclaim' this stream */ if (s->type >= IVTV_DEC_STREAM_TYPE_MPG) { struct ivtv_stream *s_vout = &itv->streams[IVTV_DEC_STREAM_TYPE_VOUT]; ivtv_stop_decoding(id, V4L2_DEC_CMD_STOP_TO_BLACK | V4L2_DEC_CMD_STOP_IMMEDIATELY, 0); /* If all output streams are closed, and if the user doesn't have IVTV_DEC_STREAM_TYPE_VOUT open, then disable CC on TV-out. */ if (itv->output_mode == OUT_NONE && !test_bit(IVTV_F_S_APPL_IO, &s_vout->s_flags)) { /* disable CC on TV-out */ ivtv_disable_cc(itv); } } else { ivtv_stop_capture(id, 0); } close_done: kfree(id); mutex_unlock(&itv->serialize_lock); return 0; }
int cx18_start_v4l2_encode_stream(struct cx18_stream *s) { u32 data[MAX_MB_ARGUMENTS]; struct cx18 *cx = s->cx; int captype = 0; struct cx18_stream *s_idx; if (!cx18_stream_enabled(s)) return -EINVAL; CX18_DEBUG_INFO("Start encoder stream %s\n", s->name); switch (s->type) { case CX18_ENC_STREAM_TYPE_MPG: captype = CAPTURE_CHANNEL_TYPE_MPEG; cx->mpg_data_received = cx->vbi_data_inserted = 0; cx->dualwatch_jiffies = jiffies; cx->dualwatch_stereo_mode = v4l2_ctrl_g_ctrl(cx->cxhdl.audio_mode); cx->search_pack_header = 0; break; case CX18_ENC_STREAM_TYPE_IDX: captype = CAPTURE_CHANNEL_TYPE_INDEX; break; case CX18_ENC_STREAM_TYPE_TS: captype = CAPTURE_CHANNEL_TYPE_TS; break; case CX18_ENC_STREAM_TYPE_YUV: captype = CAPTURE_CHANNEL_TYPE_YUV; break; case CX18_ENC_STREAM_TYPE_PCM: captype = CAPTURE_CHANNEL_TYPE_PCM; break; case CX18_ENC_STREAM_TYPE_VBI: #ifdef CX18_ENCODER_PARSES_SLICED captype = cx18_raw_vbi(cx) ? CAPTURE_CHANNEL_TYPE_VBI : CAPTURE_CHANNEL_TYPE_SLICED_VBI; #else captype = CAPTURE_CHANNEL_TYPE_VBI; #endif cx->vbi.frame = 0; cx->vbi.inserted_frame = 0; memset(cx->vbi.sliced_mpeg_size, 0, sizeof(cx->vbi.sliced_mpeg_size)); break; default: return -EINVAL; } clear_bit(CX18_F_S_STREAMOFF, &s->s_flags); cx18_vapi_result(cx, data, CX18_CREATE_TASK, 1, CPU_CMD_MASK_CAPTURE); s->handle = data[0]; cx18_vapi(cx, CX18_CPU_SET_CHANNEL_TYPE, 2, s->handle, captype); if (captype != CAPTURE_CHANNEL_TYPE_TS) { cx18_vapi(cx, CX18_CPU_SET_VER_CROP_LINE, 2, s->handle, 0); cx18_vapi(cx, CX18_CPU_SET_MISC_PARAMETERS, 3, s->handle, 3, 1); cx18_vapi(cx, CX18_CPU_SET_MISC_PARAMETERS, 3, s->handle, 8, 0); cx18_vapi(cx, CX18_CPU_SET_MISC_PARAMETERS, 3, s->handle, 4, 1); if (atomic_read(&cx->ana_capturing) == 0) cx18_vapi(cx, CX18_CPU_SET_MISC_PARAMETERS, 2, s->handle, 12); cx18_vapi(cx, CX18_CPU_SET_CAPTURE_LINE_NO, 3, s->handle, 312, 313); if (cx->v4l2_cap & V4L2_CAP_VBI_CAPTURE) cx18_vbi_setup(s); s_idx = &cx->streams[CX18_ENC_STREAM_TYPE_IDX]; cx18_vapi_result(cx, data, CX18_CPU_SET_INDEXTABLE, 2, s->handle, cx18_stream_enabled(s_idx) ? 7 : 0); cx->cxhdl.priv = s; cx2341x_handler_setup(&cx->cxhdl); if (!cx->cxhdl.video_mute && test_bit(CX18_F_I_RADIO_USER, &cx->i_flags)) cx18_vapi(cx, CX18_CPU_SET_VIDEO_MUTE, 2, s->handle, (v4l2_ctrl_g_ctrl(cx->cxhdl.video_mute_yuv) << 8) | 1); if (captype == CAPTURE_CHANNEL_TYPE_YUV) { if (s->pixelformat == V4L2_PIX_FMT_UYVY) cx18_vapi(cx, CX18_CPU_SET_VFC_PARAM, 2, s->handle, 1); else cx18_vapi(cx, CX18_CPU_SET_VFC_PARAM, 2, s->handle, 0); } } if (atomic_read(&cx->tot_capturing) == 0) { cx2341x_handler_set_busy(&cx->cxhdl, 1); clear_bit(CX18_F_I_EOS, &cx->i_flags); cx18_write_reg(cx, 7, CX18_DSP0_INTERRUPT_MASK); } cx18_vapi(cx, CX18_CPU_DE_SET_MDL_ACK, 3, s->handle, (void __iomem *)&cx->scb->cpu_mdl_ack[s->type][0] - cx->enc_mem, (void __iomem *)&cx->scb->cpu_mdl_ack[s->type][1] - cx->enc_mem); cx18_stream_configure_mdls(s); _cx18_stream_load_fw_queue(s); if (cx18_vapi(cx, CX18_CPU_CAPTURE_START, 1, s->handle)) { CX18_DEBUG_WARN("Error starting capture!\n"); set_bit(CX18_F_S_STOPPING, &s->s_flags); if (s->type == CX18_ENC_STREAM_TYPE_MPG) cx18_vapi(cx, CX18_CPU_CAPTURE_STOP, 2, s->handle, 1); else cx18_vapi(cx, CX18_CPU_CAPTURE_STOP, 1, s->handle); clear_bit(CX18_F_S_STREAMING, &s->s_flags); cx18_vapi(cx, CX18_CPU_DE_RELEASE_MDL, 1, s->handle); cx18_vapi(cx, CX18_DESTROY_TASK, 1, s->handle); s->handle = CX18_INVALID_TASK_HANDLE; clear_bit(CX18_F_S_STOPPING, &s->s_flags); if (atomic_read(&cx->tot_capturing) == 0) { set_bit(CX18_F_I_EOS, &cx->i_flags); cx18_write_reg(cx, 5, CX18_DSP0_INTERRUPT_MASK); } return -EINVAL; } if (captype != CAPTURE_CHANNEL_TYPE_TS) atomic_inc(&cx->ana_capturing); atomic_inc(&cx->tot_capturing); return 0; }
int ivtv_start_v4l2_encode_stream(struct ivtv_stream *s) { u32 data[CX2341X_MBOX_MAX_DATA]; struct ivtv *itv = s->itv; int captype = 0, subtype = 0; int enable_passthrough = 0; if (s->vdev == NULL) return -EINVAL; IVTV_DEBUG_INFO("Start encoder stream %s\n", s->name); switch (s->type) { case IVTV_ENC_STREAM_TYPE_MPG: captype = 0; subtype = 3; /* */ if (itv->output_mode == OUT_PASSTHROUGH) { ivtv_passthrough_mode(itv, 0); enable_passthrough = 1; } itv->mpg_data_received = itv->vbi_data_inserted = 0; itv->dualwatch_jiffies = jiffies; itv->dualwatch_stereo_mode = v4l2_ctrl_g_ctrl(itv->cxhdl.audio_mode); itv->search_pack_header = 0; break; case IVTV_ENC_STREAM_TYPE_YUV: if (itv->output_mode == OUT_PASSTHROUGH) { captype = 2; subtype = 11; /* */ break; } captype = 1; subtype = 1; break; case IVTV_ENC_STREAM_TYPE_PCM: captype = 1; subtype = 2; break; case IVTV_ENC_STREAM_TYPE_VBI: captype = 1; subtype = 4; itv->vbi.frame = 0; itv->vbi.inserted_frame = 0; memset(itv->vbi.sliced_mpeg_size, 0, sizeof(itv->vbi.sliced_mpeg_size)); break; default: return -EINVAL; } s->subtype = subtype; s->buffers_stolen = 0; /* */ clear_bit(IVTV_F_S_STREAMOFF, &s->s_flags); if (atomic_read(&itv->capturing) == 0) { int digitizer; /* */ /* */ /* */ ivtv_vapi(itv, CX2341X_ENC_SET_DMA_BLOCK_SIZE, 2, 1, 1); /* */ ivtv_vapi(itv, CX2341X_ENC_SET_VERT_CROP_LINE, 1, 0); /* */ ivtv_vapi(itv, CX2341X_ENC_MISC, 2, 3, !itv->has_cx23415); ivtv_vapi(itv, CX2341X_ENC_MISC, 2, 8, 0); ivtv_vapi(itv, CX2341X_ENC_MISC, 2, 4, 1); ivtv_vapi(itv, CX2341X_ENC_MISC, 1, 12); /* */ ivtv_vapi(itv, CX2341X_ENC_SET_PLACEHOLDER, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0); if (itv->card->hw_all & (IVTV_HW_SAA7115 | IVTV_HW_SAA717X)) digitizer = 0xF1; else if (itv->card->hw_all & IVTV_HW_SAA7114) digitizer = 0xEF; else /* */ digitizer = 0x140; ivtv_vapi(itv, CX2341X_ENC_SET_NUM_VSYNC_LINES, 2, digitizer, digitizer); /* */ if (itv->v4l2_cap & V4L2_CAP_VBI_CAPTURE) { ivtv_vbi_setup(itv); } /* */ ivtv_vapi_result(itv, data, CX2341X_ENC_SET_PGM_INDEX_INFO, 2, 7, 400); itv->pgm_info_offset = data[0]; itv->pgm_info_num = data[1]; itv->pgm_info_write_idx = 0; itv->pgm_info_read_idx = 0; IVTV_DEBUG_INFO("PGM Index at 0x%08x with %d elements\n", itv->pgm_info_offset, itv->pgm_info_num); /* */ cx2341x_handler_setup(&itv->cxhdl); /* */ if (test_bit(IVTV_F_I_RADIO_USER, &itv->i_flags)) ivtv_vapi(itv, CX2341X_ENC_MUTE_VIDEO, 1, 1 | (v4l2_ctrl_g_ctrl(itv->cxhdl.video_mute_yuv) << 8)); } /* */ if (itv->has_cx23415 && !test_and_set_bit(IVTV_F_I_DIG_RST, &itv->i_flags)) { /* */ ivtv_vapi(itv, CX2341X_ENC_SET_EVENT_NOTIFICATION, 4, 0, 1, IVTV_IRQ_ENC_VIM_RST, -1); ivtv_clear_irq_mask(itv, IVTV_IRQ_ENC_VIM_RST); } if (atomic_read(&itv->capturing) == 0) { /* */ ivtv_set_irq_mask(itv, IVTV_IRQ_MASK_CAPTURE); clear_bit(IVTV_F_I_EOS, &itv->i_flags); cx2341x_handler_set_busy(&itv->cxhdl, 1); /* */ /* */ v4l2_subdev_call(itv->sd_audio, audio, s_stream, 1); /* */ v4l2_subdev_call(itv->sd_video, video, s_stream, 0); ivtv_msleep_timeout(300, 0); ivtv_vapi(itv, CX2341X_ENC_INITIALIZE_INPUT, 0); v4l2_subdev_call(itv->sd_video, video, s_stream, 1); } /* */ if (ivtv_vapi(itv, CX2341X_ENC_START_CAPTURE, 2, captype, subtype)) { IVTV_DEBUG_WARN( "Error starting capture!\n"); return -EINVAL; } /* */ if (enable_passthrough) { ivtv_passthrough_mode(itv, 1); } if (s->type == IVTV_ENC_STREAM_TYPE_VBI) ivtv_clear_irq_mask(itv, IVTV_IRQ_ENC_VBI_CAP); else ivtv_clear_irq_mask(itv, IVTV_IRQ_MASK_CAPTURE); /* */ atomic_inc(&itv->capturing); return 0; }
static void w9968cf_mode_init_regs(struct sd *sd) { int val, vs_polarity, hs_polarity; w9968cf_set_crop_window(sd); reg_w(sd, 0x14, sd->gspca_dev.pixfmt.width); reg_w(sd, 0x15, sd->gspca_dev.pixfmt.height); /* JPEG width & height */ reg_w(sd, 0x30, sd->gspca_dev.pixfmt.width); reg_w(sd, 0x31, sd->gspca_dev.pixfmt.height); /* Y & UV frame buffer strides (in WORD) */ if (w9968cf_vga_mode[sd->gspca_dev.curr_mode].pixelformat == V4L2_PIX_FMT_JPEG) { reg_w(sd, 0x2c, sd->gspca_dev.pixfmt.width / 2); reg_w(sd, 0x2d, sd->gspca_dev.pixfmt.width / 4); } else reg_w(sd, 0x2c, sd->gspca_dev.pixfmt.width); reg_w(sd, 0x00, 0xbf17); /* reset everything */ reg_w(sd, 0x00, 0xbf10); /* normal operation */ /* Transfer size in WORDS (for UYVY format only) */ val = sd->gspca_dev.pixfmt.width * sd->gspca_dev.pixfmt.height; reg_w(sd, 0x3d, val & 0xffff); /* low bits */ reg_w(sd, 0x3e, val >> 16); /* high bits */ if (w9968cf_vga_mode[sd->gspca_dev.curr_mode].pixelformat == V4L2_PIX_FMT_JPEG) { /* We may get called multiple times (usb isoc bw negotiat.) */ jpeg_define(sd->jpeg_hdr, sd->gspca_dev.pixfmt.height, sd->gspca_dev.pixfmt.width, 0x22); /* JPEG 420 */ jpeg_set_qual(sd->jpeg_hdr, v4l2_ctrl_g_ctrl(sd->jpegqual)); w9968cf_upload_quantizationtables(sd); v4l2_ctrl_grab(sd->jpegqual, true); } /* Video Capture Control Register */ if (sd->sensor == SEN_OV7620) { /* Seems to work around a bug in the image sensor */ vs_polarity = 1; hs_polarity = 1; } else { vs_polarity = 1; hs_polarity = 0; } val = (vs_polarity << 12) | (hs_polarity << 11); /* NOTE: We may not have enough memory to do double buffering while doing compression (amount of memory differs per model cam). So we use the second image buffer also as jpeg stream buffer (see w9968cf_init), and disable double buffering. */ if (w9968cf_vga_mode[sd->gspca_dev.curr_mode].pixelformat == V4L2_PIX_FMT_JPEG) { /* val |= 0x0002; YUV422P */ val |= 0x0003; /* YUV420P */ } else val |= 0x0080; /* Enable HW double buffering */ /* val |= 0x0020; enable clamping */ /* val |= 0x0008; enable (1-2-1) filter */ /* val |= 0x000c; enable (2-3-6-3-2) filter */ val |= 0x8000; /* capt. enable */ reg_w(sd, 0x16, val); sd->gspca_dev.empty_packet = 0; }