Beispiel #1
0
bool obs_reset_video(struct obs_video_info *ovi)
{
	if (!obs) return false;

	/* don't allow changing of video settings if active. */
	if (obs->video.video && video_output_active(obs->video.video))
		return false;

	struct obs_core_video *video = &obs->video;

	/* align to multiple-of-two and SSE alignment sizes */
	ovi->output_width  &= 0xFFFFFFFC;
	ovi->output_height &= 0xFFFFFFFE;

	stop_video();
	obs_free_video();

	if (!ovi) {
		obs_free_graphics();
		return true;
	}

	if (!video->graphics && !obs_init_graphics(ovi))
		return false;

	return obs_init_video(ovi);
}
Beispiel #2
0
void obs_shutdown(void)
{
	if (!obs)
		return;

	da_free(obs->input_types);
	da_free(obs->filter_types);
	da_free(obs->encoder_types);
	da_free(obs->transition_types);
	da_free(obs->output_types);
	da_free(obs->service_types);
	da_free(obs->modal_ui_callbacks);
	da_free(obs->modeless_ui_callbacks);

	stop_video();

	obs_free_data();
	obs_free_video();
	obs_free_graphics();
	obs_free_audio();
	proc_handler_destroy(obs->procs);
	signal_handler_destroy(obs->signals);

	for (size_t i = 0; i < obs->modules.num; i++)
		free_module(obs->modules.array+i);
	da_free(obs->modules);

	bfree(obs->locale);
	bfree(obs);
	obs = NULL;
}
Beispiel #3
0
int obs_reset_video(struct obs_video_info *ovi)
{
	if (!obs) return OBS_VIDEO_FAIL;

	/* don't allow changing of video settings if active. */
	if (obs->video.video && video_output_active(obs->video.video))
		return OBS_VIDEO_CURRENTLY_ACTIVE;

	if (!size_valid(ovi->output_width, ovi->output_height) ||
	    !size_valid(ovi->base_width,   ovi->base_height))
		return OBS_VIDEO_INVALID_PARAM;

	struct obs_core_video *video = &obs->video;

	stop_video();
	obs_free_video();

	if (!ovi) {
		obs_free_graphics();
		return OBS_VIDEO_SUCCESS;
	}

	/* align to multiple-of-two and SSE alignment sizes */
	ovi->output_width  &= 0xFFFFFFFC;
	ovi->output_height &= 0xFFFFFFFE;

	if (!video->graphics) {
		int errorcode = obs_init_graphics(ovi);
		if (errorcode != OBS_VIDEO_SUCCESS) {
			obs_free_graphics();
			return errorcode;
		}
	}

	blog(LOG_INFO, "video settings reset:\n"
	               "\tbase resolution:   %dx%d\n"
	               "\toutput resolution: %dx%d\n"
	               "\tfps:               %d/%d",
	               ovi->base_width, ovi->base_height,
	               ovi->output_width, ovi->output_height,
	               ovi->fps_num, ovi->fps_den);

	return obs_init_video(ovi);
}
Beispiel #4
0
void obs_shutdown(void)
{
	struct obs_module *module;

	if (!obs)
		return;

	da_free(obs->input_types);
	da_free(obs->filter_types);
	da_free(obs->encoder_types);
	da_free(obs->transition_types);
	da_free(obs->output_types);
	da_free(obs->service_types);
	da_free(obs->modal_ui_callbacks);
	da_free(obs->modeless_ui_callbacks);

	stop_video();

	obs_free_data();
	obs_free_video();
	obs_free_graphics();
	obs_free_audio();
	proc_handler_destroy(obs->procs);
	signal_handler_destroy(obs->signals);

	module = obs->first_module;
	while (module) {
		struct obs_module *next = module->next;
		free_module(module);
		module = next;
	}
	obs->first_module = NULL;

	for (size_t i = 0; i < obs->module_paths.num; i++)
		free_module_path(obs->module_paths.array+i);
	da_free(obs->module_paths);

	bfree(obs->locale);
	bfree(obs);
	obs = NULL;
}
Beispiel #5
0
void
on_click (ClutterActor *actor,
          ClutterEvent *event,
          gpointer data)
{
  HandCar *all = (HandCar *)data;
  gint x, y;
  gint64 pos = -1;

  clutter_event_get_coords (event, &x, &y);
  if (coord_within_actor (all->btn_actor_play, x, y))
    {
      if (!all->playing)
        {
          play_video (all);
          clutter_texture_set_pixbuf (CLUTTER_TEXTURE(all->btn_actor_play), all->stop, NULL);
          all->playing = TRUE;
        }
      else
        {
          stop_video (all);
          clutter_texture_set_pixbuf (CLUTTER_TEXTURE(all->btn_actor_play), all->play, NULL);
          all->playing = FALSE;
        }
    }
  else if ((coord_within_actor (all->btn_actor_next, x, y) && (all->playing)))
    {
      if (!gst_element_query_position (all->player, &all->format, &pos))
        pos = 0;

      gst_element_seek_simple (all->player, all->format, GST_SEEK_FLAG_FLUSH, pos + 5 * GST_SECOND);
    }
  else if ((coord_within_actor (all->btn_actor_previous, x, y) && (all->playing)))
    {
      if (!gst_element_query_position (all->player, &all->format, &pos))
        pos = 5 * GST_SECOND;

      gst_element_seek_simple (all->player, all->format, GST_SEEK_FLAG_FLUSH, pos - 5 * GST_SECOND);
    }

}
Beispiel #6
0
void AVPlayer::decode_proc()
{
	while (bRun)
	{
		AVPacket packet;
		av_init_packet(&packet);
		std::chrono::milliseconds time_out(10);
		/* 영상 내 프레임을 읽음 */
		while (av_read_frame(pFormatCtx, &packet) >= 0)
		{
// 			int queue_size = vDecoder.video_queue.size();
// 			if ( queue_size >= vDecoder.max_queue_size)
// 			{
// 				Sleep(10);
// 				continue;
// 			}

			int stream_index = packet.stream_index;
			if (stream_index == video_stream_index)
			{
				/* process audio packet */
				vDecoder.video_queue.push(packet, time_out);
			}
			else if (stream_index == audio_stream_index)
			{
				/* process video packet */
				aDecoder.audio_decode(&packet);
			}
			else
			{
				av_free_packet(&packet);
			}
		}

		stop_video();
	}
}
Beispiel #7
0
JNIEXPORT void JNICALL Java_cn_edu_hust_buildingtalkback_jni_NativeInterface_stopVideo(
		JNIEnv *env, jclass clazz)
{
	LOG_FUNC();
	stop_video();
}
Beispiel #8
0
int command_dispatcher(int command_id, char * args, char * result_str)
{
    int result = 0;
    int arg_param;
 
    switch(command_id)
    {
        case TS_PREVIEW_START:
            result = start_preview();
            break;

        case TS_PREVIEW_STOP:
            result = stop_preview();
            break;

	    case TS_VIDEO_START:
            result = start_video();
            break;

        case TS_VIDEO_STOP:
            result = stop_video();
            break;

        case TS_SNAPSHOT_YUV_PICTURE:
            result = take_picture(ACTION_TAKE_YUV_PICTURE);
            break;

        case TS_SNAPSHOT_JPEG_PICTURE:
            result = take_picture(ACTION_TAKE_JPEG_PICTURE);
            break;

        case TS_SNAPSHOT_RAW_PICTURE:
            result = take_raw_picture();
            break;

        case TS_SNAPSHOT_STOP:
            result = testsuite_snapshot_stop();
            break;

        case TS_SYSTEM_INIT:
            result = system_init();
            break;

        case TS_SYSTEM_DESTROY:
            result = system_destroy();
            break;

        case TS_PRINT_MAXZOOM:
            result = print_maxzoom();
            break;

        case TS_PRINT_ZOOMRATIOS:
            result = print_zoomratios();
            break;

        case TS_ZOOM_INCREASE:
            result = zoom_increase(1);
            break;

        case TS_ZOOM_DECREASE:
            result = zoom_decrease(1);
            break;

        case TS_ZOOM_STEP_INCREASE:
            result = zoom_increase(0);
            break;

        case TS_ZOOM_STEP_DECREASE:
            result = zoom_decrease(0);
            break;

        case TS_CONTRAST_INCREASE:
            result = increase_contrast();
            break;

        case TS_CONTRAST_DECREASE:
            result = decrease_contrast();
            break;

        case TS_SATURATION_INCREASE:
            result = increase_saturation();
            break;

        case TS_SATURATION_DECREASE:
            result = decrease_saturation();
            break;

        case TS_SPECIAL_EFFECT:
            result = SpecialEffect();
            break;

        case TS_BRIGHTNESS_INCREASE:
            result = increase_brightness();
            break;

        case TS_BRIGHTNESS_DECREASE:
            result = decrease_brightness();
            break;

        case TS_EV_INCREASE:
            result = increase_EV();
            break;

        case TS_EV_DECREASE:
            result = decrease_EV();
            break;

        case TS_ANTI_BANDING:
            result = set_antibanding();
            break;

        case TS_SET_WHITE_BALANCE:
            result = set_whitebalance();
            break;

        case TS_AEC_MODE:
            result = AEC_mode_change();
            break;

        case TS_ISO_INCREASE:
            result = increase_ISO();
            break;

        case TS_ISO_DECREASE:
            result = decrease_ISO();
            break;

        case TS_SHARPNESS_INCREASE:
            result = increase_sharpness();
            break;

        case TS_SHARPNESS_DECREASE:
            result = decrease_sharpness();
            break;

        case TS_SET_AUTO_FOCUS:
            result = set_auto_focus();
            break;

        case TS_SET_HJR:
            result = set_hjr();
            break;

        case TS_SET_LENS_SHADING:
            result = LensShading();
            break;

        case TS_SET_LED_MODE:
            result = LED_mode_change();
            break;

        case TS_GET_SHARPNESS_AF:
            result = set_sharpness_AF();
            break;

        case TS_SNAPSHOT_RESOLUTION:
            arg_param = atoi(args);
            result = snapshot_resolution(arg_param);
            break;

        case TS_PREVIEW_RESOLUTION:
            arg_param = atoi(args);
            result = preview_video_resolution (arg_param);
            break;

        case TS_MOTION_ISO:
            result = set_MotionIso();
            break;

        case TS_TOGGLE_HUE:
            result = toggle_hue();
            break;

        case TS_CANCEL_AUTO_FOCUS:
            result = cancel_af();
            break;

        case TS_GET_AF_STEP:
            result = get_af_step();
            break;

        case TS_SET_AF_STEP:
            result = set_af_step();
            break;

        case TS_ENABLE_AFD:
            result = enable_afd();
            break;

        case TEST_VIDIOC_G_FMT:
            result = msm_v4l2_vidioc_g_fmt();
            break;

        case TEST_VIDIOC_S_FMT:
            result = msm_v4l2_vidioc_s_fmt();
            break;

        case TEST_VIDIOC_CROPCAP:
            result = msm_v4l2_vidioc_cropcap();
            break;

        case TEST_VIDIOC_G_CROP:
            result = msm_v4l2_vidioc_g_crop();
            break;

        case TEST_VIDIOC_S_CROP:
            result = msm_v4l2_vidioc_s_crop();
            break;

        case TEST_VIDIOC_QUERYMENU:
            result = msm_v4l2_vidioc_querymenu(args);
            break;

        case TEST_VIDIOC_QUERYCTRL:
            result = msm_v4l2_vidioc_queryctrl(NULL);
            break;

        case TEST_VIDIOC_S_CTRL:
            result = msm_v4l2_vidioc_s_ctrl(args);
            break;

        case TEST_VIDIOC_G_CTRL:
            result = msm_v4l2_vidioc_g_ctrl(args);
            break;

        default:
            break;
    }
    return result;
}
Beispiel #9
0
int main (int argc, char* argv[]) {
   monitor();
   int i, fd, length;
   int watchdog = 0, watchdog_errors = 0;
   int onesec_check = 0;
   time_t last_pv_time = 0, pv_time;
   char readbuf[MAX_COMMAND_LEN];

   bcm_host_init();
   //
   // read arguments
   //
   for(i=1; i<argc; i++) {
      if(strcmp(argv[i], "--version") == 0) {
         printf("RaspiMJPEG Version %s\n", VERSION);
         exit(0);
      }
      else if(strcmp(argv[i], "-md") == 0) {
         cfg_val[c_motion_detection] = 1;
      }
   }

   //default base media path
   asprintf(&cfg_stru[c_media_path], "%s", "/var/www/media");
   
   //
   // read configs and init
   //
   read_config("/etc/raspimjpeg", 1);
   if (cfg_stru[c_user_config] != 0)
      read_config(cfg_stru[c_user_config], 0);

   createPath(cfg_stru[c_log_file], cfg_stru[c_base_path]);
   if (cfg_stru[c_boxing_path] != NULL) {
      char *bpath;
      asprintf(&bpath, "%s/temp", cfg_stru[c_boxing_path]);
      createPath(bpath, cfg_stru[c_base_path]);
      free(bpath);
   }
   
   printLog("RaspiMJPEG Version %s\n", VERSION);
   
   if(cfg_val[c_autostart]) start_all(0);

   //
   // run
   //
   if(cfg_val[c_autostart]) {
      if(cfg_stru[c_control_file] != 0){
         printLog("MJPEG streaming, ready to receive commands\n");
         //kick off motion detection at start if required.
         if(cfg_val[c_motion_detection] && cfg_val[c_motion_external]) {
            printLog("Autostart external motion kill any runnng motion\n");
            if(system("killall motion") == -1) error("Could not stop external motion", 1);
            sleep(1);
            printLog("Autostart external motion start external motion\n");
            if(system("motion") == -1) error("Could not start external motion", 1);
         }
      } else {
         printLog("MJPEG streaming\n");
      }
   }
   else {
      if(cfg_stru[c_control_file] != 0) printLog("MJPEG idle, ready to receive commands\n");
      else printLog("MJPEG idle\n");
   }

   updateStatus();
 
   struct sigaction action;
   memset(&action, 0, sizeof(struct sigaction));
   action.sa_handler = term;
   sigaction(SIGTERM, &action, NULL);
   sigaction(SIGINT, &action, NULL);
   
   //Clear out anything in FIFO first
   do {
      fd = open(cfg_stru[c_control_file], O_RDONLY | O_NONBLOCK);
      if(fd < 0) error("Could not open PIPE", 1);
      fcntl(fd, F_SETFL, 0);
      length = read(fd, readbuf, 60);
      close(fd);
   } while (length != 0); 
  
  //Send restart signal to scheduler
  send_schedulecmd("9");
   // Main forever loop
   while(running) {
      if(cfg_stru[c_control_file] != 0) {

         fd = open(cfg_stru[c_control_file], O_RDONLY | O_NONBLOCK);
         if(fd < 0) error("Could not open PIPE", 1);
         fcntl(fd, F_SETFL, 0);
         length = read(fd, readbuf, MAX_COMMAND_LEN -2);
         close(fd);

         if(length) {
            process_cmd(readbuf, length);
         }

      }
      if(timelapse) {
         tl_cnt++;
         if(tl_cnt >= cfg_val[c_tl_interval]) {
            if(i_capturing == 0) {
               capt_img();
               tl_cnt = 0;
            }
         }
      }
      // check to see if image preview changing
      if (!idle && cfg_val[c_watchdog_interval] > 0) {
         if(watchdog++ > cfg_val[c_watchdog_interval]) {
            watchdog = 0;
            pv_time = get_mtime(cfg_stru[c_preview_path]);
            if (pv_time == 0) {
               watchdog_errors++;
            } else {
               if (pv_time > last_pv_time) {
                  watchdog_errors = 0;
               } else {
                  watchdog_errors++;
               }
               last_pv_time = pv_time;
            }
            if (watchdog_errors >= cfg_val[c_watchdog_errors]) {
               printLog("Watchdog detected problem. Stopping");
               running = 0;
            }
         }
      } else {
         watchdog_errors = 0;
      }
      if (++onesec_check >= 10) {
         //run check on background boxing every 10 ticks and check for video timer if capturing
         onesec_check = 0;
         check_box_files();
         if (v_capturing && video_stoptime > 0) {
            if (time(NULL) >= video_stoptime) {
               printLog("Stopping video from timer\n");
               stop_video(0);
            }
         }
      }
      usleep(100000);
   }
   if(system("killall motion") == -1) error("Could not stop external motion", 1);
  
   printLog("SIGINT/SIGTERM received, stopping\n");
   //
   // tidy up
   //
   if(!idle) stop_all();
   return 0;
}
Beispiel #10
0
void process_cmd(char *readbuf, int length) {
   typedef enum pipe_cmd_type{ca,im,tl,px,bo,tv,an,as,at,ac,ab,sh,co,br,sa,is,vs,rl,ec,em,wb,mm,ie,ce,ro,fl,ri,ss,qu,pv,bi,ru,md,sc,rs,bu,mn,mt,mi,mb,me,mx,mf,vm,vp,wd,sy,cn,st} pipe_cmd_type;
   char pipe_cmds[] = "ca,im,tl,px,bo,tv,an,as,at,ac,ab,sh,co,br,sa,is,vs,rl,ec,em,wb,mm,ie,ce,ro,fl,ri,ss,qu,pv,bi,ru,md,sc,rs,bu,mn,mt,mi,mb,me,mx,mf,vm,vp,wd,sy,cn,st";
   pipe_cmd_type pipe_cmd;
   int parcount;
   char pars[128][10];
   long int par0;
   char cmd[3];
   char par[MAX_COMMAND_LEN];
   char *parstring=0, *temp;
   int key = -1;
   
   if (length < 2 || length > (MAX_COMMAND_LEN - 2)) return;
   
   //Get cmd
   strncpy(cmd, readbuf, 2);
    //find 2 letter command and translate into enum
   temp = strstr(pipe_cmds, cmd);
   if (temp == NULL) return;
   pipe_cmd = (pipe_cmd_type)((temp - pipe_cmds) / 3);
  
   if(length > 3) {
      strcpy(par, readbuf + 3);
      par[length-3] = 0;
      //extract space separated numeric parameters
      // and make separate string parameter (strtok changes the original)
      asprintf(&parstring, "%s", par);
      parcount = 0;
      temp = strtok(par, " ");
      while(parcount<10 && temp != NULL) {
         strcpy(pars[parcount], temp);
         parcount++;
         temp = strtok(NULL, " ");
      }
      par0 = strtol(pars[0], NULL, 10);
   } else {
      par0 = 0;
   }
   
   switch(pipe_cmd) {
      case ca:
         if(par0 == 1) {
            if (parcount > 1) {
               long vtime = strtol(pars[1], NULL, 10);
               video_stoptime = time(NULL) + vtime;
               printLog("Capturing %d seconds\n", vtime);
            }
            start_video(0);
         }  else {
            stop_video(0);
         }
         break;
      case im:
         capt_img();
         break;
      case tl:
         if(par0) {
            timelapse = 1;
            lapse_cnt = 1;
            updateStatus();
            printLog("Timelapse started\n");
         }
         else {
            image2_cnt++;
            timelapse = 0;
            updateStatus();
            printLog("Timelapse stopped\n");
         }
         break;
      case px:
         stop_all();
         addUserValue(c_video_width, pars[0]);
         addUserValue(c_video_height, pars[1]);
         addUserValue(c_video_fps, pars[2]);
         addUserValue(c_MP4Box_fps, pars[3]);
         addUserValue(c_image_width, pars[4]);
         addUserValue(c_image_height, pars[5]);
         start_all(0);
         break;
      case bo:
         addUserValue(c_MP4Box, pars[0]);
         break;
      case tv:
         addUserValue(c_tl_interval, pars[0]);
         break;
      case an:
         addUserValue(c_annotation, parstring);
         break;
      case as:
         addUserValue(c_anno_text_size, pars[0]);
         break;
      case at:
         addUserValue(c_anno3_custom_text_colour, pars[0]);
         addUserValue(c_anno3_custom_text_Y, pars[1]);
         addUserValue(c_anno3_custom_text_U, pars[2]);
         addUserValue(c_anno3_custom_text_V, pars[3]);
         break;
      case ac:
         addUserValue(c_anno3_custom_background_colour, pars[0]);
         addUserValue(c_anno3_custom_background_Y, pars[1]);
         addUserValue(c_anno3_custom_background_U, pars[2]);
         addUserValue(c_anno3_custom_background_V, pars[3]);
         break;
      case ab:
         addUserValue(c_anno_background, pars[0]);
         break;
      case sh:
         key = c_sharpness;
         break;
      case co:
         key = c_contrast;
         break;
      case br:
         key = c_brightness;
         break;
      case sa:
         key = c_saturation;
         break;
      case is:
         key = c_iso;
         break;
      case vs:
         key = c_video_stabilisation;
         break;
      case rl:
         key = c_raw_layer;
         break;
      case ec:
         key = 1000 + c_exposure_compensation;
         break;
      case em:
         key = 1000 + c_exposure_mode;
         break;
      case wb:
         key = 1000 + c_white_balance;
         break;
      case mm:
         key = 1000 + c_metering_mode;
         break;
      case ie:
         key = 1000 + c_image_effect;
         break;
      case ce:
         addUserValue(c_colour_effect_u, pars[1]);
         addUserValue(c_colour_effect_v, pars[2]);
         key = c_colour_effect_en;
         break;
      case ro:
         key = c_rotation;
         break;
      case fl:
         if(par0 & 1) addUserValue(c_hflip, "1"); else addUserValue(c_hflip, "0"); 
         if((par0 >> 1) & 1) addUserValue(c_vflip, "1"); else addUserValue(c_vflip, "0"); 
         cam_set(c_hflip);
         break;
      case ri:
         addUserValue(c_sensor_region_y, pars[1]);
         addUserValue(c_sensor_region_w, pars[2]);
         addUserValue(c_sensor_region_h, pars[3]);
         key = c_sensor_region_x;
         break;
      case ss:
         addUserValue(c_shutter_speed, pars[0]);
         key = c_shutter_speed;
         break;
      case qu:
         key = c_image_quality;
         break;
      case pv:
         stop_all();
         addUserValue(c_quality, pars[0]);
         addUserValue(c_width, pars[1]);
         addUserValue(c_divider, pars[2]);
         start_all(0);
         break;
      case bi:
         stop_all();
         addUserValue(c_video_bitrate, pars[0]);
         start_all(0);
         break;
      case st:
         stop_all();
         addUserValue(c_stat_pass, pars[0]);
         start_all(0);
         break;
      case wd:
         addUserValue(c_watchdog_interval, pars[0]);
         addUserValue(c_watchdog_errors, pars[1]);
         break;
      case ru:
         if (par0 == 0) {
            stop_all();
            idle = 1;
            printLog("Stream halted\n");
         } else {
            start_all(1);
            idle = 0;
            printLog("Stream continued\n");
         }
         updateStatus();
         break;
      case mx:
         key = c_motion_external;
         //If switching to internal with motion detection on then try to kill external motion
         if (cfg_val[c_motion_detection] != 0 && !par0) {
            if(system("killall motion") == -1) error("Could not stop external motion", 1);
            printLog("External motion detection stopped\n");
         }
         break;
      case md:
         exec_macro(cfg_stru[c_do_cmd], readbuf);
         stop_all();
         if (cfg_val[c_motion_external]) {
            if(par0 == 0) {
               if(system("killall motion") == -1) error("Could not stop external motion", 1);
               printLog("External motion detection stopped\n");
            }
            else {
               if (cfg_val[c_motion_detection] == 0) {
                  if(system("motion") == -1) error("Could not start external motion", 1);
                  printLog("External motion detection started\n");
               } else {
                  printLog("Motion already running. md 1 ignored\n");
               }
            }
         } else {
            if(par0 == 0) {
               printLog("Internal motion detection stopped\n");
            }
            else {
               printLog("Internal motion detection started\n");
            }
         }
         cfg_val[c_motion_detection] = par0?1:0;
         start_all(0);
         updateStatus();
         break;
      case sc:
         set_counts();
         printLog("Scan for highest count\n");
         break;
      case rs:
         printLog("Reset settings to defaults\n");
         stop_all();
         read_config("/etc/raspimjpeg", 1);
         saveUserConfig(cfg_stru[c_user_config]);
         start_all(0);
         break;
      case bu:
         key = c_video_buffer;
         break;
      case vp:
         stop_all();
         addUserValue(c_vector_preview, pars[0]);
         start_all(0);
         break;
      case mn:
         key = c_motion_noise;
         break;
      case mt:
         key = c_motion_threshold;
         break;
      case mi:
         key = c_motion_image + 1000;
         break;
      case mb:
         key = c_motion_startframes;
         break;
      case me:
         key = c_motion_stopframes;
         break;
      case mf:
         key = c_motion_file;
         break;
      case vm:
         key = c_vector_mode;
         break;
      case sy:
         exec_macro(parstring, NULL);
         break;
      case cn:
         stop_all();
         addUserValue(c_camera_num, pars[0]);
         start_all(0);
         break;
      default:
         printLog("Unrecognised pipe command\n");
         break;
   }