示例#1
0
文件: picture.c 项目: M3nace/Prelude
void put_picture(struct context *cnt, char *file, unsigned char *image, int ftype)
{
    FILE *picture;

    picture = myfopen(file, "w");
    if (!picture) {
        /* Report to syslog - suggest solution if the problem is access rights to target dir */
        if (errno ==  EACCES) {
            motion_log(LOG_ERR, 1,
                       "Can't write picture to file %s - check access rights to target directory", file);
            motion_log(LOG_ERR, 1, "Thread is going to finish due to this fatal error");
            cnt->finish = 1;
            cnt->restart = 0;
            return;
        } else {
            /* If target dir is temporarily unavailable we may survive */
            motion_log(LOG_ERR, 1, "Can't write picture to file %s", file);
            return;
        }
    }

    put_picture_fd(cnt, picture, image, cnt->conf.quality);
    fclose(picture);
    event(cnt, EVENT_FILECREATE, NULL, file, (void *)(unsigned long)ftype, NULL);
}
示例#2
0
static int set_input(struct video_dev *viddev, unsigned short input)
{
    int actport;
    int portdata[] = { METEOR_INPUT_DEV0, METEOR_INPUT_DEV1,
                       METEOR_INPUT_DEV2, METEOR_INPUT_DEV3,
                       METEOR_INPUT_DEV_SVIDEO};

    if (input >= array_elem(portdata)) {
        motion_log(LOG_INFO, 0, "%s: Channel Port %d out of range (0-4)", __FUNCTION__, input);
        return -1;
    }

    actport = portdata[input];
    if (ioctl(viddev->fd_bktr, METEORSINPUT, &actport) < 0) {
        if (input != IN_DEFAULT) {
            motion_log(LOG_INFO, 1, "%s: METEORSINPUT %d invalid - Trying default %d", 
                       __FUNCTION__, input, IN_DEFAULT);
            input = IN_DEFAULT;
            actport = portdata[input];
            if (ioctl(viddev->fd_bktr, METEORSINPUT, &actport) < 0) {
                motion_log(LOG_ERR, 1, "%s: METEORSINPUT %d init", __FUNCTION__, input);
                return -1;
            }
        } else {
            motion_log(LOG_ERR, 1, "%s: METEORSINPUT %d init", __FUNCTION__, input);
            return -1;
        }
    }

    if (debug_level >= CAMERA_VIDEO)
        motion_log(-1, 0, "%s: to [%d]", __FUNCTION__, input);
    
    return input;
}
示例#3
0
static int set_freq(struct video_dev *viddev, unsigned long freq)
{
    int tuner_fd = viddev->fd_tuner;
    int old_audio;

    motion_log(LOG_DEBUG, 0, "%s: Not implemented", __FUNCTION__);
    
    return 0; 
    
    /* HACK maybe not need it , but seems that is needed to mute before changing frequency */

    if (ioctl(tuner_fd, BT848_GAUDIO, &old_audio) < 0) {
        motion_log(LOG_ERR, 1, "%s: BT848_GAUDIO", __FUNCTION__);
        return -1;
    }
    
    if (ioctl(tuner_fd, TVTUNER_SETFREQ, &freq) < 0) {
        motion_log(LOG_ERR, 1, "%s: Tuning (TVTUNER_SETFREQ) failed , freq [%lu]", __FUNCTION__, freq);
        return -1;
    }

    old_audio &= AUDIO_MUTE;
    if (old_audio) {
        old_audio = AUDIO_MUTE;
        if (ioctl(tuner_fd , BT848_SAUDIO, &old_audio) < 0) {
            motion_log(LOG_ERR, 1, "%s: BT848_SAUDIO %i", __FUNCTION__, old_audio);
            return -1;
        }
    }
    
    return 0;
}
示例#4
0
/* This function sets up a TCP/IP socket for incoming requests. It is called only during
 * initialisation of Motion from the function webcam_init
 * The function sets up a a socket on the port number given by _port_.
 * If the parameter _local_ is not zero the socket is setup to only accept connects from localhost.
 * Otherwise any client IP address is accepted. The function returns an integer representing the socket.
 */
int http_bindsock(int port, int local)
{
    int sl, optval = 1;
    struct sockaddr_in sin;

    if ((sl = socket(PF_INET, SOCK_STREAM, 0)) < 0) {
        motion_log(LOG_ERR, 1, "socket()");
        return -1;
    }

    memset(&sin, 0, sizeof(struct sockaddr_in));
    sin.sin_family=AF_INET;
    sin.sin_port=htons(port);
    
    if (local)
        sin.sin_addr.s_addr=htonl(INADDR_LOOPBACK);
    else
        sin.sin_addr.s_addr=htonl(INADDR_ANY);

    setsockopt(sl, SOL_SOCKET, SO_REUSEADDR, &optval, sizeof(optval));

    if (bind(sl, (struct sockaddr *)&sin, sizeof(struct sockaddr_in)) == -1) {
        motion_log(LOG_ERR, 1, "bind()");
        close(sl);
        return -1;
    }

    if (listen(sl, DEF_MAXWEBQUEUE) == -1) {
        motion_log(LOG_ERR, 1, "listen()");
        close(sl);
        return -1;
    }

    return sl;
}
示例#5
0
static int set_input_format(struct video_dev *viddev, unsigned short newformat) 
{
    int input_format[] = { NORM_PAL_NEW, NORM_NTSC_NEW, NORM_SECAM_NEW, NORM_DEFAULT_NEW};
    int format;
 
    if (newformat >= array_elem( input_format )) {
        motion_log(LOG_WARNING, 0, "%s: Input format %d out of range (0-2)", __FUNCTION__, newformat);
        return -1;
    } 

    format = input_format[newformat]; 

    if (ioctl( viddev->fd_bktr, BT848SFMT, &format) < 0) {
        motion_log(LOG_ERR, 1, "%s: BT848SFMT, Couldn't set the input format , try again with default",
                   __FUNCTION__);
        format = NORM_DEFAULT_NEW;
        newformat = 3;
        
        if (ioctl(viddev->fd_bktr, BT848SFMT, &format) < 0) {
            motion_log(LOG_ERR, 1, "%s: BT848SFMT, Couldn't set the input format either default", 
                       __FUNCTION__);
            return -1;
        }
    }

    if (debug_level >= CAMERA_VIDEO)
        motion_log(-1, 0, "%s: to %d", __FUNCTION__, newformat);
        
    return newformat;
}
示例#6
0
/* This function is called from the motion_loop when it ends
 * and motion is terminated or restarted
 */
void webcam_stop(struct context *cnt)
{    
    struct webcam *list;
    struct webcam *next = cnt->webcam.next;

    if (cnt->conf.setup_mode)
        motion_log(-1, 0, "Closing webcam listen socket");
    
    close(cnt->webcam.socket);
    cnt->webcam.socket = -1;
    
    if (cnt->conf.setup_mode)
        motion_log(LOG_INFO, 0, "Closing active webcam sockets");

    while (next) {
        list = next;
        next = list->next;
        
        if (list->tmpbuffer) {
            free(list->tmpbuffer->ptr);
            free(list->tmpbuffer);
        }
        
        close(list->socket);
        free(list);
    }
}
示例#7
0
static unsigned short int stepper_center(struct context *cnt, int x_offset, int y_offset)
{
    struct termios adtio;

    if (cnt->track.dev < 0) {
        motion_log(LOG_INFO, 0, "Try to open serial device %s", cnt->track.port);
        
        if ((cnt->track.dev=open(cnt->track.port, O_RDWR | O_NOCTTY)) < 0) {
            motion_log(LOG_ERR, 1, "Unable to open serial device %s", cnt->track.port);
            return 0;
        }

        bzero (&adtio, sizeof(adtio));
        adtio.c_cflag = STEPPER_BAUDRATE | CS8 | CLOCAL | CREAD;
        adtio.c_iflag = IGNPAR;
        adtio.c_oflag = 0;
        adtio.c_lflag = 0;    /* non-canon, no echo */
        adtio.c_cc[VTIME] = 0;    /* timer unused */
        adtio.c_cc[VMIN] = 0;    /* blocking read until 1 char */
        tcflush (cnt->track.dev, TCIFLUSH);

        if (tcsetattr(cnt->track.dev, TCSANOW, &adtio) < 0) {
            motion_log(LOG_ERR, 1, "Unable to initialize serial device %s", cnt->track.port);
            return 0;
        }
        motion_log(LOG_INFO, 0, "Opened serial device %s and initialize, fd %i", 
                   cnt->track.port, cnt->track.dev);
    }

    /* x-axis */
    
    stepper_command(cnt, cnt->track.motorx, STEPPER_COMMAND_SPEED, cnt->track.speed);
    stepper_command(cnt, cnt->track.motorx, STEPPER_COMMAND_LEFT_N, cnt->track.maxx);

    while (stepper_status(cnt, cnt->track.motorx) & STEPPER_STATUS_LEFT);

    stepper_command(cnt, cnt->track.motorx, STEPPER_COMMAND_RIGHT_N,
                    cnt->track.maxx / 2 + x_offset * cnt->track.stepsize);

    while (stepper_status(cnt, cnt->track.motorx) & STEPPER_STATUS_RIGHT);

    /* y-axis */

    stepper_command(cnt, cnt->track.motory, STEPPER_COMMAND_SPEED, cnt->track.speed);
    stepper_command(cnt, cnt->track.motory, STEPPER_COMMAND_UP_N, cnt->track.maxy);

    while (stepper_status(cnt, cnt->track.motory) & STEPPER_STATUS_UP)
    
    stepper_command(cnt, cnt->track.motory, STEPPER_COMMAND_DOWN_N,
                    cnt->track.maxy / 2 + y_offset * cnt->track.stepsize);
        
    while (stepper_status(cnt, cnt->track.motory) & STEPPER_STATUS_DOWN);
    
    return cnt->track.move_wait;
}
示例#8
0
static unsigned short int stepper_move(struct context *cnt, struct coord *cent, 
                                       struct images *imgs)
{
    unsigned short int command = 0, data = 0;

    if (cnt->track.dev < 0) {
        motion_log(LOG_INFO, 0, "No device %s started yet , trying stepper_center()", cnt->track.port);    
        if (!stepper_center(cnt, 0, 0)){
            motion_log(LOG_ERR, 1, "Stepper_center() failed to initialize stepper device on %s , fd [%i].", 
                                    cnt->track.port, cnt->track.dev);    
            return 0;
        }
        motion_log(LOG_INFO, 0, "stepper_center() succeed , device started %s , fd [%i]", 
                   cnt->track.port, cnt->track.dev);    
    }

    /* x-axis */
    
    if (cent->x < imgs->width / 2) {
        command = STEPPER_COMMAND_LEFT_N;
        data = imgs->width / 2 - cent->x;
    }

    if (cent->x > imgs->width / 2) {
        command = STEPPER_COMMAND_RIGHT_N;
        data = cent->x - imgs->width / 2;
    }

    data = data * cnt->track.stepsize / imgs->width;

    if (data) 
        stepper_command(cnt, cnt->track.motorx, command, data);

    /* y-axis */

    if (cent->y < imgs->height / 2) {
        command = STEPPER_COMMAND_UP_N;
        data = imgs->height / 2 - cent->y;
    }

    if (cent->y > imgs->height / 2) {
        command = STEPPER_COMMAND_DOWN_N;
        data = cent->y - imgs->height / 2;
    }
    
    data = data * cnt->track.stepsize / imgs->height;

    if (data) 
        stepper_command(cnt, cnt->track.motory, command, data);    
    
    
    return cnt->track.move_wait;
}
示例#9
0
static int set_hue(int viddev, int new_hue)
{
    signed char ioctlval = new_hue;

    if (ioctl(viddev, METEORSHUE, &ioctlval) < 0) {
                motion_log(LOG_ERR, 1, "%s: METEORSHUE Error setting hue [%d]", __FUNCTION__, new_hue);
                return -1;
        }

    if (debug_level >= CAMERA_VIDEO)
        motion_log(-1, 0, "%s: to [%d]", __FUNCTION__, ioctlval);

    return ioctlval;
}
示例#10
0
static int set_brightness(int viddev, int new_bright)
{
    unsigned char ioctlval = new_bright;

    if (ioctl(viddev, METEORSBRIG, &ioctlval) < 0) {
        motion_log(LOG_ERR, 1, "%s: METEORSBRIG  brightness [%d]", __FUNCTION__, new_bright);
        return -1;
    }

    if (debug_level >= CAMERA_VIDEO)
        motion_log(-1, 0, "%s: to [%d]", __FUNCTION__, ioctlval);
    
    return ioctlval;
}
示例#11
0
static int set_contrast(int viddev, int new_contrast) 
{
    unsigned char ioctlval = new_contrast;

    if (ioctl(viddev, METEORSCONT, &ioctlval) < 0) {
        motion_log(LOG_ERR, 1, "%s: METEORSCONT Error setting contrast [%d]", 
                   __FUNCTION__, new_contrast);
        return 0;
    }

    if (debug_level >= CAMERA_VIDEO)
        motion_log(-1, 0, "%s: to [%d]", __FUNCTION__, ioctlval);

    return ioctlval;
}
示例#12
0
static int get_contrast(int viddev, int *contrast)
{
    unsigned char ioctlval;

    if (ioctl (viddev, METEORGCONT, &ioctlval ) < 0) {
        motion_log(LOG_ERR, 1, "%s: METEORGCONT Error getting contrast", __FUNCTION__);
        return -1;
    }

    if (debug_level >= CAMERA_VIDEO)
         motion_log(-1, 0, "%s: to [%d]", __FUNCTION__, ioctlval);
    
    *contrast = ioctlval; 
    return ioctlval;
}
示例#13
0
static int get_brightness(int viddev, int *brightness)
{
    unsigned char ioctlval;

    if (ioctl(viddev, METEORGBRIG, &ioctlval) < 0) {
        motion_log(LOG_ERR, 1, "%s: METEORGBRIG  getting brightness", __FUNCTION__);
        return -1;
    }

    if (debug_level >= CAMERA_VIDEO)
        motion_log(-1, 0, "%s: to [%d]", __FUNCTION__, ioctlval);
    
    *brightness = ioctlval;
    return ioctlval;
}
示例#14
0
static int set_saturation(int viddev, int new_saturation) 
{
    unsigned char ioctlval= new_saturation;

    if (ioctl(viddev, METEORSCSAT, &ioctlval) < 0) {
        motion_log(LOG_ERR, 1, "%s: METEORSCSAT Error setting saturation [%d]", 
                   __FUNCTION__, new_saturation);
        return -1;
    }

    if (debug_level >= CAMERA_VIDEO)
        motion_log(-1, 0, "%s: to [%d]", __FUNCTION__, ioctlval);

    return ioctlval;
}
示例#15
0
static int get_hue(int viddev , int *hue)
{
    signed char ioctlval;

    if (ioctl(viddev, METEORGHUE, &ioctlval) < 0) {
        motion_log(LOG_ERR, 1, "%s: METEORGHUE Error getting hue", __FUNCTION__);
        return -1;
    }

    if (debug_level >= CAMERA_VIDEO)
        motion_log(-1, 0, "%s: to [%d]", __FUNCTION__, ioctlval);
    
    *hue = ioctlval; 
    return ioctlval;
}
示例#16
0
static unsigned short int lqos_center(struct context *cnt, int dev, int x_angle, int y_angle)
{
    int reset = 3;
    struct pwc_mpt_angles pma;
    struct pwc_mpt_range pmr;

    if (cnt->track.dev == -1) {

        if (ioctl(dev, VIDIOCPWCMPTRESET, &reset) == -1) {
            motion_log(LOG_ERR, 1, "Failed to reset pwc camera to starting position! Reason");
            return 0;
        }

        SLEEP(6,0)

        if (ioctl(dev, VIDIOCPWCMPTGRANGE, &pmr) == -1) {
            motion_log(LOG_ERR, 1, "failed VIDIOCPWCMPTGRANGE");
            return 0;
        }

        cnt->track.dev = dev;
        cnt->track.minmaxfound = 1;
        cnt->track.panmin = pmr.pan_min;
        cnt->track.panmax = pmr.pan_max;
        cnt->track.tiltmin = pmr.tilt_min;
        cnt->track.tiltmax = pmr.tilt_max;
    }

    if (ioctl(dev, VIDIOCPWCMPTGANGLE, &pma) == -1)
        motion_log(LOG_ERR, 1, "ioctl VIDIOCPWCMPTGANGLE");
    
    pma.absolute = 1;

    if (x_angle * 100 < cnt->track.panmax && x_angle * 100 > cnt->track.panmin)
        pma.pan = x_angle * 100;

    if (y_angle * 100 < cnt->track.tiltmax && y_angle * 100 > cnt->track.tiltmin)
        pma.tilt = y_angle * 100;

    if (ioctl(dev, VIDIOCPWCMPTSANGLE, &pma) == -1) {
        motion_log(LOG_ERR, 1, "Failed to pan/tilt pwc camera! Reason");
        return 0;
    }

    motion_log(LOG_INFO, 0, "lqos_center succeed");

    return cnt->track.move_wait;
}
示例#17
0
文件: ffmpeg.c 项目: Winddoing/motion
/** ffmpeg_deinterlace
 *      Make the image suitable for deinterlacing using ffmpeg, then deinterlace the picture.
 * 
 * Parameters
 *      img     image in YUV420P format
 *      width   image width in pixels
 *      height  image height in pixels
 *
 * Returns
 *      Function returns nothing.
 *      img     contains deinterlaced image
 */
void ffmpeg_deinterlace(unsigned char *img, int width, int height)
{
    AVFrame *picture;
    int width2 = width / 2;
    
    picture = avcodec_alloc_frame();
    if (!picture) {
        motion_log(LOG_ERR, 1, "Could not alloc frame");
        return;
    }
    
    picture->data[0] = img;
    picture->data[1] = img+width*height;
    picture->data[2] = picture->data[1]+(width*height)/4;
    picture->linesize[0] = width;
    picture->linesize[1] = width2;
    picture->linesize[2] = width2;
    
    /* We assume using 'PIX_FMT_YUV420P' always */
    avpicture_deinterlace((AVPicture *)picture, (AVPicture *)picture, PIX_FMT_YUV420P, width, height);
    
    av_free(picture);
    
    return;
}
示例#18
0
static boolean empty_output_buffer(j_compress_ptr cinfo)
{
    /*FIXME: */
    motion_log(LOG_ERR, 0, "%s: Given jpeg buffer was too small", __FUNCTION__);
    ERREXIT (cinfo, JERR_BUFFER_SIZE);	/* shouldn't be FILE_WRITE but BUFFER_OVERRUN! */
    return TRUE;
}
示例#19
0
static void webcam_add_client(struct webcam *list, int sc)
{
    struct webcam *_new = (struct webcam*)mymalloc(sizeof(struct webcam));
    static const char header[] = "HTTP/1.0 200 OK\r\n"
            "Server: Motion/"VERSION"\r\n"
            "Connection: close\r\n"
            "Max-Age: 0\r\n"
            "Expires: 0\r\n"
            "Cache-Control: no-cache, private\r\n"
            "Pragma: no-cache\r\n"
            "Content-Type: multipart/x-mixed-replace; boundary=--BoundaryString\r\n\r\n";

    memset(_new, 0, sizeof(struct webcam));
    _new->socket = sc;
    
    if ((_new->tmpbuffer = webcam_tmpbuffer(sizeof(header))) == NULL) {
        motion_log(LOG_ERR, 1, "Error creating tmpbuffer in webcam_add_client");
    } else {
        memcpy(_new->tmpbuffer->ptr, header, sizeof(header)-1);
        _new->tmpbuffer->size = sizeof(header)-1;
    }
    
    _new->prev = list;
    _new->next = list->next;
    
    if (_new->next)
        _new->next->prev=_new;
    
    list->next = _new;
}
示例#20
0
static void add_huff_table(j_decompress_ptr dinfo, JHUFF_TBL **htblptr, 
                           const UINT8 *bits, const UINT8 *val)
/* Define a Huffman table */
{
    int nsymbols, len;

    if (*htblptr == NULL)
        *htblptr = jpeg_alloc_huff_table((j_common_ptr) dinfo);

        /* Copy the number-of-symbols-of-each-code-length counts */
        memcpy((*htblptr)->bits, bits, sizeof((*htblptr)->bits));

        /* Validate the counts.  We do this here mainly so we can copy the right
         * number of symbols from the val[] array, without risking marching off
         * the end of memory.  jchuff.c will do a more thorough test later.
         */
        nsymbols = 0;

        for (len = 1; len <= 16; len++)
            nsymbols += bits[len];

        if (nsymbols < 1 || nsymbols > 256)
            motion_log(LOG_ERR, 0, "%s: Given jpeg buffer was too small", 
                       __FUNCTION__);      

        memcpy((*htblptr)->huffval, val, nsymbols * sizeof(UINT8));
}
示例#21
0
文件: ffmpeg.c 项目: Winddoing/motion
/* Allocates and prepares a picture frame by setting up the U, Y and V pointers in
 * the frame according to the passed pointers.
 *
 * Returns NULL If the allocation fails.
 *
 * The returned AVFrame pointer must be freed after use.
 */
AVFrame *ffmpeg_prepare_frame(struct ffmpeg *ffmpeg, unsigned char *y,
                              unsigned char *u, unsigned char *v)
{
    AVFrame *picture;

    picture = avcodec_alloc_frame();
    if (!picture) {
        motion_log(LOG_ERR, 1, "Could not alloc frame");
        return NULL;
    }

    /* take care of variable bitrate setting */
    if (ffmpeg->vbr) 
        picture->quality = ffmpeg->vbr;
    
    
    /* setup pointers and line widths */
    picture->data[0] = y;
    picture->data[1] = u;
    picture->data[2] = v;
    picture->linesize[0] = ffmpeg->c->width;
    picture->linesize[1] = ffmpeg->c->width / 2;
    picture->linesize[2] = ffmpeg->c->width / 2;

    return picture;
}
示例#22
0
static void iomojo_setspeed(struct context *cnt, unsigned short int speed)
{
    char command[3];
    
    command[0] = IOMOJO_SETSPEED_CMD;
    command[1] = cnt->track.iomojo_id;
    command[2] = speed;
    
    if (iomojo_command(cnt, command, 3, 1) != IOMOJO_SETSPEED_RET)
        motion_log(LOG_ERR, 1, "Unable to set camera speed");
}
示例#23
0
文件: ffmpeg.c 项目: Winddoing/motion
/* Encodes and writes a video frame using the av_write_frame API. This is
 * a helper function for ffmpeg_put_image and ffmpeg_put_other_image. 
 */
void ffmpeg_put_frame(struct ffmpeg *ffmpeg, AVFrame *pic)
{
    int out_size, ret;
#ifdef FFMPEG_AVWRITEFRAME_NEWAPI
    AVPacket pkt;

    av_init_packet(&pkt); /* init static structure */
    pkt.stream_index = ffmpeg->video_st->index;
#endif /* FFMPEG_AVWRITEFRAME_NEWAPI */

    if (ffmpeg->oc->oformat->flags & AVFMT_RAWPICTURE) {
        /* raw video case. The API will change slightly in the near future for that */
#ifdef FFMPEG_AVWRITEFRAME_NEWAPI
        pkt.flags |= PKT_FLAG_KEY;
        pkt.data = (uint8_t *)pic;
        pkt.size = sizeof(AVPicture);
        ret = av_write_frame(ffmpeg->oc, &pkt);
#else
        ret = av_write_frame(ffmpeg->oc, ffmpeg->video_st->index,
            (uint8_t *)pic, sizeof(AVPicture));
#endif /* FFMPEG_AVWRITEFRAME_NEWAPI */
    } else {
        /* encode the image */
        out_size = avcodec_encode_video(AVSTREAM_CODEC_PTR(ffmpeg->video_st),
                                        ffmpeg->video_outbuf, 
                                        ffmpeg->video_outbuf_size, pic);

        /* if zero size, it means the image was buffered */
        if (out_size != 0) {
            /* write the compressed frame in the media file */
            /* XXX: in case of B frames, the pts is not yet valid */
#ifdef FFMPEG_AVWRITEFRAME_NEWAPI
            pkt.pts = AVSTREAM_CODEC_PTR(ffmpeg->video_st)->coded_frame->pts;
            if (AVSTREAM_CODEC_PTR(ffmpeg->video_st)->coded_frame->key_frame) {
                pkt.flags |= PKT_FLAG_KEY;
            }
            pkt.data = ffmpeg->video_outbuf;
            pkt.size = out_size;
            ret = av_write_frame(ffmpeg->oc, &pkt);
#else
            ret = av_write_frame(ffmpeg->oc, ffmpeg->video_st->index, 
                                 ffmpeg->video_outbuf, out_size);
#endif /* FFMPEG_AVWRITEFRAME_NEWAPI */
        } else {
            ret = 0;
        }
    }
    
    if (ret != 0) {
        motion_log(LOG_ERR, 1, "Error while writing video frame");
        return;
    }
}
示例#24
0
/**
 * v4l_next fetches a video frame from a v4l device
 * Parameters:
 *     viddev     Pointer to struct containing video device handle
 *     map        Pointer to the buffer in which the function puts the new image
 *     width      Width of image in pixels
 *     height     Height of image in pixels
 *
 * Returns
 *     0          Success
 *    -1          Fatal error
 *     1          Non fatal error (not implemented)
 */
static int v4l_next(struct video_dev *viddev, unsigned char *map, int width, int height)
{
    int dev_bktr = viddev->fd_bktr;
    unsigned char *cap_map = NULL;
    int single = METEOR_CAP_SINGLE;
    sigset_t set, old;


    /* ONLY MMAP method is used to Capture */

    /* Allocate a new mmap buffer */
    /* Block signals during IOCTL */
    sigemptyset (&set);
    sigaddset (&set, SIGCHLD);
    sigaddset (&set, SIGALRM);
    sigaddset (&set, SIGUSR1);
    sigaddset (&set, SIGTERM);
    sigaddset (&set, SIGHUP);
    pthread_sigmask(SIG_BLOCK, &set, &old);
    cap_map = viddev->v4l_buffers[viddev->v4l_curbuffer];

    viddev->v4l_curbuffer++;
    if (viddev->v4l_curbuffer >= viddev->v4l_maxbuffer)
        viddev->v4l_curbuffer = 0;

    /* capture */
    
    if (viddev->capture_method == METEOR_CAP_CONTINOUS) {
        if (bktr_frame_waiting) 
            bktr_frame_waiting = 0;    
            
    } else if (ioctl(dev_bktr, METEORCAPTUR, &single) < 0) {
        motion_log(LOG_ERR, 1, "%s: Error capturing using single method", __FUNCTION__);
        sigprocmask(SIG_UNBLOCK, &old, NULL);
        return -1;
    }

    /*undo the signal blocking*/
    pthread_sigmask(SIG_UNBLOCK, &old, NULL);
    
    switch (viddev->v4l_fmt) {
    case VIDEO_PALETTE_RGB24:
        rgb24toyuv420p(map, cap_map, width, height);
        break;
    case VIDEO_PALETTE_YUV422:
        yuv422to420p(map, cap_map, width, height);
        break;
    default:
        memcpy(map, cap_map, viddev->v4l_bufsize);
    }
    
    return 0;
}
示例#25
0
static int set_geometry(struct video_dev *viddev, int width, int height)
{
    struct meteor_geomet geom;
    int h_max;

    geom.columns = width;
    geom.rows = height;

    geom.oformat = METEOR_GEO_YUV_422 | METEOR_GEO_YUV_12;


    switch (viddev->norm) {
    case PAL:   
        h_max = PAL_HEIGHT;  
        break;
    case NTSC:  
        h_max = NTSC_HEIGHT; 
        break;
    case SECAM: 
        h_max = SECAM_HEIGHT;
        break;
    default:    
        h_max = PAL_HEIGHT;
    }

    if (height <= h_max / 2) 
        geom.oformat |= METEOR_GEO_EVEN_ONLY;

    geom.frames = 1;

    if (ioctl(viddev->fd_bktr, METEORSETGEO, &geom) < 0) {
        motion_log(LOG_ERR, 1, "%s: Couldn't set the geometry", __FUNCTION__);
        return -1;
    }

    if (debug_level >= CAMERA_VIDEO)
        motion_log(-1, 0, "%s: to [%d/%d] Norm %d", __FUNCTION__, width, height, viddev->norm);        
    
    return 0;
}
示例#26
0
文件: picture.c 项目: M3nace/Prelude
/* If a mask file is asked for but does not exist this function
 * creates an empty mask file in the right binary pgm format and
 * and the right size - easy to edit with Gimp or similar tool.
 */
void put_fixed_mask(struct context *cnt, const char *file)
{
    FILE *picture;

    picture = myfopen(file, "w");
    
    if (!picture) {
        /* Report to syslog - suggest solution if the problem is access rights to target dir */
        if (errno ==  EACCES) {
            motion_log(LOG_ERR, 1,
                       "can't write mask file %s - check access rights to target directory", file);
        } else {
            /* If target dir is temporarily unavailable we may survive */
            motion_log(LOG_ERR, 1, "can't write mask file %s", file);
        }
        return;
    }

    memset(cnt->imgs.out, 255, cnt->imgs.motionsize); /* initialize to unset */
    
    /* Write pgm-header */
    fprintf(picture, "P5\n");
    fprintf(picture, "%d %d\n", cnt->conf.width, cnt->conf.height);
    fprintf(picture, "%d\n", 255);
    
    /* write pgm image data at once */
    if ((int)fwrite(cnt->imgs.out, cnt->conf.width, cnt->conf.height, picture) != cnt->conf.height) {
        motion_log(LOG_ERR, 1, "Failed writing default mask as pgm file");
        return;
    }
    
    fclose(picture);

    motion_log(LOG_ERR, 0, "Creating empty mask %s",cnt->conf.mask_file);
    motion_log(LOG_ERR, 0, "Please edit this file and re-run motion to enable mask feature");
}
示例#27
0
文件: picture.c 项目: M3nace/Prelude
/* put_picture_mem is used for the webcam feature. Depending on the image type
 * (colour YUV420P or greyscale) the corresponding put_jpeg_X_memory function is called.
 * Inputs:
 * - cnt is the global context struct and only cnt->imgs.type is used.
 * - image_size is the size of the input image buffer
 * - *image points to the image buffer that contains the YUV420P or Grayscale image about to be put
 * - quality is the jpeg quality setting from the config file.
 * Output:
 * - **dest_image is a pointer to a pointer that points to the destination buffer in which the
 *   converted image it put
 * Function returns the dest_image_size if successful. Otherwise 0.
 */ 
int put_picture_memory(struct context *cnt, unsigned char* dest_image, int image_size,
                       unsigned char *image, int quality)
{
    switch (cnt->imgs.type) {
    case VIDEO_PALETTE_YUV420P:
        return put_jpeg_yuv420p_memory(dest_image, image_size, image,
                                       cnt->imgs.width, cnt->imgs.height, quality);
    case VIDEO_PALETTE_GREY:
        return put_jpeg_grey_memory(dest_image, image_size, image,
                                    cnt->imgs.width, cnt->imgs.height, quality);
    default:
        motion_log(LOG_ERR, 0, "Unknow image type %d", cnt->imgs.type);            
    }

    return 0;
}
示例#28
0
static int http_acceptsock(int sl)
{
    int sc;
    unsigned long i;
    struct sockaddr_in sin;
    socklen_t addrlen = sizeof(struct sockaddr_in);

    if ((sc = accept(sl, (struct sockaddr *)&sin, &addrlen)) >= 0) {
        i = 1;
        ioctl(sc, FIONBIO, &i);
        return sc;
    }
    
    motion_log(LOG_ERR, 1, "accept()");

    return -1;
}
示例#29
0
文件: picture.c 项目: M3nace/Prelude
void put_picture_fd(struct context *cnt, FILE *picture, unsigned char *image, int quality)
{
    if (cnt->conf.ppm) {
        put_ppm_bgr24_file(picture, image, cnt->imgs.width, cnt->imgs.height);
    } else {
        switch (cnt->imgs.type) {
        case VIDEO_PALETTE_YUV420P:
            put_jpeg_yuv420p_file(picture, image, cnt->imgs.width, cnt->imgs.height, quality);
            break;
        case VIDEO_PALETTE_GREY:
            put_jpeg_grey_file(picture, image, cnt->imgs.width, cnt->imgs.height, quality);
            break;
        default :
            motion_log(LOG_ERR, 0, "Unknow image type %d", cnt->imgs.type);
        }
    }
}
示例#30
0
int vid_start(struct context *cnt)
{
    struct config *conf = &cnt->conf;
    int fd_bktr = -1;

    if (conf->netcam_url) {
        fd_bktr = netcam_start(cnt);
        if (fd_bktr < 0) {
            netcam_cleanup(cnt->netcam, 1);
            cnt->netcam = NULL;
        }
    }    
#ifdef WITHOUT_V4L
    else 
        motion_log(LOG_ERR, 0, "%s: You must setup netcam_url", __FUNCTION__);    
#else
    else {