Пример #1
0
void encode_open()
{
	encode_mmap();
	encode_alloc();
	encode_init();
	encode_spspps();
}
Пример #2
0
Task_t* Debounce(void) {
	encode_init();
	static Task_t debounce;
	debounce.canRun = TimedTask_canRun;
	debounce.run = Debounce_run;
	debounce.runTime = 0;
	return &debounce;
}
Пример #3
0
void X264_init()
{
	g_ImgWidth = opt.width;
	g_ImgHeight = opt.height;
	YUVframe = (uint8_t *) malloc(sizeof(uint8_t) * g_ImgWidth * g_ImgHeight * 2);
	encode_init(&g_X264Encoder, g_ImgWidth, g_ImgHeight);
	init_file();
}
Пример #4
0
static void set_up(void) {
  if (p == NULL) {
    p = permanent_pool = make_sub_pool(NULL);
  }

#ifdef PR_USE_NLS
  encode_init();
#endif /* PR_USE_NLS */

  if (getenv("TEST_VERBOSE") != NULL) {
    pr_trace_set_levels("encode", 1, 20);
  }
}
Пример #5
0
int pr_encode_set_charset_encoding(const char *charset, const char *codeset) {
#ifdef HAVE_ICONV_H
  int res;

  if (charset == NULL ||
      codeset == NULL) {
    errno = EINVAL;
    return -1;
  }

  if (local_charset) {
    pr_trace_msg(trace_channel, 5,
      "attempting to switch local charset from %s to %s", local_charset,
      charset);

  } else {
    pr_trace_msg(trace_channel, 5, "attempting to use %s as local charset",
      charset);
  }

  if (encoding) {
    pr_trace_msg(trace_channel, 5,
      "attempting to switch encoding from %s to %s", encoding, codeset);

  } else {
    pr_trace_msg(trace_channel, 5, "attempting to use %s encoding", codeset);
  }

  (void) encode_free();

  local_charset = pstrdup(permanent_pool, charset);
  encoding = pstrdup(permanent_pool, codeset);

  res = encode_init();
  if (res < 0) {
    pr_trace_msg(trace_channel, 1,
      "failed to initialize encoding for local charset %s, encoding %s, "
      "disabling encoding", charset, codeset);
    local_charset = NULL;
    encoding = NULL;
  }

  return res;

#else
  errno = ENOSYS;
  return -1;
#endif /* !HAVE_ICONV_H */
}
Пример #6
0
/* Enables runtime use of encoding using the specified character set (assuming
 * NLS is supported).  Note that "UTF8", "utf8", "utf-8", and "UTF-8" are
 * accepted "character set" designations.
 */
int pr_encode_enable_encoding(const char *codeset) {
#ifdef HAVE_ICONV_H
  int res;

  if (codeset == NULL) {
    errno = EINVAL;
    return -1;
  }

  if (encoding != NULL &&
      strcasecmp(encoding, codeset) == 0) {
    pr_trace_msg(trace_channel, 5, "'%s' encoding already being used", codeset);
    return 0;
  }

  if (encoding) {
    pr_trace_msg(trace_channel, 5,
      "attempting to switch encoding from %s to %s", encoding, codeset);

  } else {
    pr_trace_msg(trace_channel, 5, "attempting to enable %s encoding", codeset);
  }

  (void) encode_free();
  encoding = pstrdup(permanent_pool, codeset);

  res = encode_init();
  if (res < 0) {
    pr_trace_msg(trace_channel, 1,
      "failed to initialize encoding for %s, disabling encoding: %s", codeset,
      strerror(errno));
    encoding = NULL;
  }

  return res;

#else
  errno = ENOSYS;
  return -1;
#endif /* !HAVE_ICONV_H */
}
Пример #7
0
// Returns 0 if successful
// Returns -1 if unsuccessful
static int init_frame_info(twolame_options *glopts)
/* interpret data in hdr str to fields in frame */
{
	frame_info *frame = &glopts->frame;
	frame_header *header = &glopts->header;
	
	frame->actual_mode = header->mode;
	frame->nch = (header->mode == TWOLAME_MONO) ? 1 : 2;
	
	//frame->sblimit = pick_table ( glopts );
	/* MFC FIX this up */
	// Select table number and sblimit
	encode_init( glopts );
	
	if (glopts->mode == TWOLAME_JOINT_STEREO)
		frame->jsbound = get_js_bound(header->mode_ext);
	else
		frame->jsbound = frame->sblimit;
	/* alloc, tab_num set in pick_table */
	
	return 0;
}
Пример #8
0
int main(void)
{
    int8_t enc = 0;
    unsigned int c;
    uint8_t state = ST_DATA;

    /* start-up */
    lcd_init();
    uart_init(UART_BAUD_SELECT(UART_BAUD_RATE, F_CPU));
    encode_init();
    pwm_set(0xFF);

    sei();

    lcd_clrscr();
    lcd_puts_P("I2C demo 0.1");

    /* loop forever */
    for (;;) {
        c = uart_getc();

        if (!(c & 0xFF00)) {
            switch (state) {
                case ST_DATA:
                    if (c == 0x00FE)
                        state = ST_CMD;
                    else if (c == 0x00FD)
                        state = ST_BL;
                    else if (c == 0x00FC)
                        state = ST_ESC;
                    else
                        lcd_data(c);
                    break;
                case ST_CMD:
                    if (c == 0x01)
                        lcd_clrscr();
                    else
                        lcd_command(c);
                    state = ST_DATA;
                    break;
                case ST_BL:
                    pwm_set(c);
                    state = ST_DATA;
                    break;
                default:
                    lcd_data(c);
                    state = ST_DATA;
            }
        }

        enc = encode_read2();   /* Adopt this to your encoder type */
        if (enc > 0) {
            uart_putc(0xFE);
            uart_putc(0x01);
        }
        else if (enc < 0) {
            uart_putc(0xFE);
            uart_putc(0x02);
        }

        if (key_press > 0) {
            uart_putc(0xFE);
            uart_putc(0x04);
            key_press = 0;
        }
    } /* end for() */
}
Пример #9
0
int main(int argc, char **argv)
{
    char logpath[FILENAME_MAX];
    int log;

    if (argc != 2) 
    {
        fprintf(stderr, PACKAGE_STRING "\n"
                "  (c) Copyright 2001-2004 The IceS Development Team <*****@*****.**>\n"
                "        Michael Smith <*****@*****.**>\n"
                "        Karl Heyes    <*****@*****.**>\n"
                "        and others\n"
                "\n"
                "Usage: \"ices config.xml\"\n");
        return 1;
    }

    config_initialize();

    if (config_read(argv[1]) <= 0) 
    {
        fprintf(stderr, "Failed to read config file \"%s\"\n", argv[1]);
        goto fail;
    }
	
    if (ices_config->background)
    {
#ifndef _WIN32		
        int ret = 0;
        /* Start up new session, to lose old session and process group */
        switch (fork())
        {
        case 0: break; /* child continues */
        case -1: perror ("fork"); ret = -1;
        default:
            exit (ret);
        }

        /* Disassociate process group and controlling terminal */ 
        setsid();

        /* Become a NON-session leader so that a */
        /* control terminal can't be reacquired */
        switch (fork())
        {
        case 0: break; /* child continues */
        case -1: perror ("fork"); ret = -1;
        default:
            exit (ret);
        }
#else
        FreeConsole();
#endif    		
    }

    log_initialize();
    thread_initialize();
    shout_init();
    encode_init();
#ifndef _WIN32	
    signals_setup();
#endif

    snprintf(logpath, FILENAME_MAX, "%s/%s", ices_config->logpath, 
            ices_config->logfile);
    if(ices_config->log_stderr)
        log = log_open_file(stderr);
    else
    {
        log = log_open(logpath);
        if (log < 0)
            fprintf (stderr, "unable to open log %s\n", logpath);
        log_set_trigger (log, ices_config->logsize);
    }
    /* Set the log level, if requested - defaults to 2 (WARN) otherwise */
    if (ices_config->loglevel)
        log_set_level(log, ices_config->loglevel);

    ices_config->log_id = log;

    LOG_INFO0(PACKAGE_STRING " started...");
    if (ices_config->pidfile != NULL)
    {
        FILE *f = fopen (ices_config->pidfile, "w");
        if (f)
        {
            fprintf (f, "%i", getpid());
            fclose (f);
        }
        else
        {
            LOG_WARN1("pidfile \"%s\" cannot be written to", ices_config->pidfile);
            xmlFree (ices_config->pidfile);
            ices_config->pidfile = NULL;
        }
    }

    /* Start the core streaming loop */
    input_loop();

    if (ices_config->pidfile)
        remove (ices_config->pidfile);

    LOG_INFO0("Shutdown complete");

    log_close(log);

 fail:
    encode_close();
    shout_shutdown();
    config_shutdown();
    thread_shutdown();
    log_shutdown();

    return 0;
}
Пример #10
0
int main( int argc, char **argv )
{
	//int ret = 0;
	PTZControlInit();
	demo_setting * ext_gSettings = NULL;
	
	// Allocate the "global" settings
	ext_gSettings = (demo_setting*)malloc( sizeof( demo_setting ) );
	if ( NULL == ext_gSettings ) {
		printf( "main::out of memory!\n" );
		return -1;
	}
	
	sig_init();
    atexit(appExit);
	//init the setting struct
	Settings_Initialize( ext_gSettings );

	read_Parse(ext_gSettings);
	//printf("video type = %d \n", ext_gSettings->video_types);
	//...do your job

	//close the led
	setled_off();
	//init dma memory
	akuio_pmem_init();
	encode_init();
	printf("encode_init ok\n");
	//open camera
	camera_open(ext_gSettings->width, ext_gSettings->height);
	printf("camera_open ok\n");

	//encode_open
	T_ENC_INPUT encInput;
	encInput.width = ext_gSettings->width;			//实际编码图像的宽度,能被4整除
	encInput.height = ext_gSettings->height;			//实际编码图像的长度,能被2整除
	encInput.kbpsmode = ext_gSettings->kbpsmode; 
	encInput.qpHdr = ext_gSettings->qpHdr;			//初始的QP的值
	encInput.iqpHdr = ext_gSettings->iqpHdr;			//初始的QP的值
	encInput.bitPerSecond = ext_gSettings->bitPerSecond;	//目标bps
	encInput.minQp = ext_gSettings->minQp;
	encInput.maxQp = ext_gSettings->maxQp;
	encInput.framePerSecond = ext_gSettings->framePerSecond;
	encInput.video_tytes = ext_gSettings->video_types;
	encode_open(&encInput);
	printf("encode_open ok\n");

	//set mux
	mux_input.rec_path = ext_gSettings->rec_path;
	mux_input.m_MediaRecType = MEDIALIB_REC_AVI_NORMAL;

	if (ext_gSettings->bhasAudio)
	{
		bHasAudio = 1;
		//mux_input.m_bCaptureAudio = 1;
	}
	else
	{
		bHasAudio = 0;
		//mux_input.m_bCaptureAudio = 0;
	}
	mux_input.m_bCaptureAudio = 1;
	//mux video
	if(parse.format2 == 0)
	{
		mux_input.m_eVideoType = MEDIALIB_VIDEO_H264;
	}
	else if(parse.format2 == 1)
	{
		mux_input.m_eVideoType = MEDIALIB_VIDEO_MJPEG;
	}
	mux_input.m_nWidth = parse.width2;
	mux_input.m_nHeight = parse.height2;
	
	//mux audio
	mux_input.m_eAudioType = MEDIALIB_AUDIO_AAC;
	mux_input.m_nSampleRate = 8000;
	//mux_input.abitsrate = ext_gSettings->abitsrate;

	printf("mux_open ok\n");

	//if (ext_gSettings->bhasAudio)
	{
		T_AUDIO_INPUT audioInput;
		audioInput.enc_type = (AUDIO_ENCODE_TYPE_CC)ext_gSettings->audioType;
		audioInput.nBitsRate = ext_gSettings->abitsrate;
		audioInput.nBitsPerSample = 16;
		audioInput.nChannels = 1;
		audioInput.nSampleRate = ext_gSettings->aSamplerate;
		audio_open(&audioInput);
		printf("audio_open ok\n");
		audio_start();
	}

	//start ftp server
	//startFTPSrv();

	Init_photograph();
	//PTZControlInit();
	//start video process
	video_process_start();
	InitMotionDetect();
	DemuxForLiveSetCallBack();
	TaskScheduler* scheduler = BasicTaskScheduler::createNew();
	env = BasicUsageEnvironment::createNew(*scheduler);
	UserAuthenticationDatabase* authDB = NULL;
#ifdef ACCESS_CONTROL
	// To implement client access control to the RTSP server, do the following:
	authDB = new UserAuthenticationDatabase;
	authDB->addUserRecord("username1", "password1"); // replace these with real strings
	// Repeat the above with each <username>, <password> that you wish to allow
	// access to the server.
#endif
       
	// Create the RTSP server:
	RTSPServer* rtspServer = AKRTSPServer::createNew(*env, RTSPPORT, authDB);
	if (rtspServer == NULL) 
	{
		*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
		appExit();
		exit(1);
	}

	char const* descriptionString = "Session streamed by \"testOnDemandRTSPServer\"";

	// Set up each of the possible streams that can be served by the
	// RTSP server.  Each such stream is implemented using a
	// "ServerMediaSession" object, plus one or more
	// "ServerMediaSubsession" objects for each audio/video substream.

	int vsIndex = 0;
	VIDEO_MODE vm[2] = {VIDEO_MODE_VGA,VIDEO_MODE_VGA};
	const char* streamName1 = "vs1";
	const char* streamName2 = "vs2";
	((AKRTSPServer*)rtspServer)->SetStreamName(streamName1, streamName2);	
	
	if(ext_gSettings->video_types == 1)
	{
		if(ext_gSettings->width == 640)
		{
			vm[0] = VIDEO_MODE_VGA;
		}
		else if(ext_gSettings->width == 320)
		{
			vm[0] = VIDEO_MODE_QVGA;
		}
		else if(ext_gSettings->width == 720)
		{
			vm[0] = VIDEO_MODE_D1;
		}
		
		AKIPCMJPEGFramedSource* ipcMJPEGSourcecam = NULL;
		ServerMediaSession* smsMJPEGcam = ServerMediaSession::createNew(*env, streamName1, 0, descriptionString);
		AKIPCMJPEGOnDemandMediaSubsession* subsMJPEGcam = AKIPCMJPEGOnDemandMediaSubsession::createNew(*env,ipcMJPEGSourcecam, ext_gSettings->width, ext_gSettings->height, vsIndex);
		smsMJPEGcam->addSubsession(subsMJPEGcam); 
		subsMJPEGcam->getframefunc = video_process_get_buf;
		subsMJPEGcam->setledstart = setled_view_start;
		subsMJPEGcam->setledexit = setled_view_stop;
		
		if(bHasAudio)
			smsMJPEGcam->addSubsession(AKIPCAACAudioOnDemandMediaSubsession::createNew(*env,True,getAACBuf, vsIndex));

		rtspServer->addServerMediaSession(smsMJPEGcam);
		char* url1 = rtspServer->rtspURL(smsMJPEGcam);
		*env << "using url \"" << url1 <<"\"\n";
		delete[] url1;
	}
	else if(ext_gSettings->video_types == 0)
	{
		if(ext_gSettings->width == 1280)
		{
			vm[0] = VIDEO_MODE_720P;
		}
		else if(ext_gSettings->width == 640)
		{
			vm[0] = VIDEO_MODE_VGA;
		}
		else if(ext_gSettings->width == 320)
		{
			vm[0] = VIDEO_MODE_QVGA;
		}
		else if(ext_gSettings->width == 720)
		{
			vm[0] = VIDEO_MODE_D1;
		}
		
		AKIPCH264FramedSource* ipcSourcecam = NULL;
		ServerMediaSession* smscam = ServerMediaSession::createNew(*env, streamName1, 0, descriptionString);
		AKIPCH264OnDemandMediaSubsession* subscam = AKIPCH264OnDemandMediaSubsession::createNew(*env,ipcSourcecam, 0, vsIndex);
		smscam->addSubsession(subscam);
		if(bHasAudio)
			smscam->addSubsession(AKIPCAACAudioOnDemandMediaSubsession::createNew(*env,True,getAACBuf, vsIndex));
	
		subscam->getframefunc = video_process_get_buf;
		subscam->setledstart = setled_view_start;
		subscam->setledexit = setled_view_stop;

		rtspServer->addServerMediaSession(smscam);
		char* url1 = rtspServer->rtspURL(smscam);
		*env << "using url \"" << url1 <<"\"\n";
		delete[] url1;
	}

	vsIndex = 1;
	
	if(parse.format2 == 0)//264
	{
		if(parse.width2 == 1280)
		{
			vm[1] = VIDEO_MODE_720P;
		}
		else if(parse.width2 == 640)
		{
			vm[1] = VIDEO_MODE_VGA;
		}
		else if(parse.width2 == 320)
		{
			vm[1] = VIDEO_MODE_QVGA;
		}
		else if(parse.width2 == 720)
		{
			vm[1] = VIDEO_MODE_D1;
		}
		
		AKIPCH264FramedSource* ipcSourcecam = NULL;
		ServerMediaSession* smscam = ServerMediaSession::createNew(*env, streamName2, 0, descriptionString);
		AKIPCH264OnDemandMediaSubsession* subscam = AKIPCH264OnDemandMediaSubsession::createNew(*env,ipcSourcecam, 0, vsIndex);
		smscam->addSubsession(subscam);
		if(bHasAudio)
			smscam->addSubsession(AKIPCAACAudioOnDemandMediaSubsession::createNew(*env,True,getAACBuf, vsIndex));
	
		subscam->getframefunc = video_process_get_buf;
		subscam->setledstart = setled_view_start;
		subscam->setledexit = setled_view_stop;

		rtspServer->addServerMediaSession(smscam);
		char* url2 = rtspServer->rtspURL(smscam);
		*env << "using url \"" << url2 <<"\"\n";
		delete[] url2;
	}
	else if(parse.format2 == 1)//mjpeg
	{
		if(parse.width2 == 640)
		{
			vm[1] = VIDEO_MODE_VGA;
		}
		else if(parse.width2 == 320)
		{
			vm[1] = VIDEO_MODE_QVGA;
		}
		else if(parse.width2 == 720)
		{
			vm[1] = VIDEO_MODE_D1;
		}
		
		AKIPCMJPEGFramedSource* ipcMJPEGSourcecam = NULL;
		ServerMediaSession* smsMJPEGcam = ServerMediaSession::createNew(*env, streamName2, 0, descriptionString);
		AKIPCMJPEGOnDemandMediaSubsession* subsMJPEGcam = AKIPCMJPEGOnDemandMediaSubsession::createNew(*env,ipcMJPEGSourcecam, parse.width2, parse.height2, vsIndex);
		smsMJPEGcam->addSubsession(subsMJPEGcam); 
		subsMJPEGcam->getframefunc = video_process_get_buf;
		subsMJPEGcam->setledstart = setled_view_start;
		subsMJPEGcam->setledexit = setled_view_stop;
		
		if(bHasAudio)
			smsMJPEGcam->addSubsession(AKIPCAACAudioOnDemandMediaSubsession::createNew(*env,True,getAACBuf, vsIndex));

		rtspServer->addServerMediaSession(smsMJPEGcam);
		char* url2 = rtspServer->rtspURL(smsMJPEGcam);
		*env << "using url \"" << url2 <<"\"\n";
		delete[] url2;
	}
#if 0
	if (rtspServer->setUpTunnelingOverHTTP(80) || rtspServer->setUpTunnelingOverHTTP(8000) || rtspServer->setUpTunnelingOverHTTP(8080)) 
	{
		*env << "\n(We use port " << rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling.)\n";
	}
	else 
	{
		*env << "\n(RTSP-over-HTTP tunneling is not available.)\n";
	}
#endif

	//printf("streamName:%s,Port:%d\n", streamName1, RTSPPORT);
	
	
	NetCtlSrvPar ncsp;
	memset(&ncsp, 0, sizeof(ncsp));
	getDeviceID(ncsp.strDeviceID);
	printf("device id:**%s**\n", ncsp.strDeviceID);
	strcpy(ncsp.strStreamName1, streamName1);
	strcpy(ncsp.strStreamName2, streamName2);
	ncsp.vm1 = vm[0];
	ncsp.vm2 = vm[1];
	ncsp.nRtspPort = RTSPPORT;
	ncsp.nMainFps = parse.fps1;
	ncsp.nSubFps = parse.fps2;
	//start net command server
	startNetCtlServer(&ncsp);

    printf("[##]start record...\n");
    auto_record_file();
    printf("[##]auto_record_file() called..\n");

	//at last,start rtsp loop
	env->taskScheduler().doEventLoop(); // does not return

	return 0;
}
Пример #11
0
int twolame_init_params(twolame_options * glopts)
{

    if (glopts->twolame_init) {
        fprintf(stderr, "Already called twolame_init_params() once.\n");
        return 1;
    }
    // Check the number of channels
    if (glopts->num_channels_in != 1 && glopts->num_channels_in != 2) {
        fprintf(stderr,
                "twolame_init_params(): must specify number of input channels using twolame_set_num_channels().\n");
        return -1;
    }
    // If not output samplerate has been set, then set it to the input sample rate
    if (glopts->samplerate_out < 1) {
        glopts->samplerate_out = glopts->samplerate_in;
    }
    // If the MPEG version has not been set, then choose automatically
    if (glopts->version == -1) {
        // Get the MPEG version for the chosen samplerate
        glopts->version = twolame_get_version_for_samplerate(glopts->samplerate_out);
        if (glopts->version < 0) {
            fprintf(stderr, "twolame_init_params(): invalid samplerate: %i\n",
                    glopts->samplerate_out);
            return -1;
        } else if (glopts->verbosity >= 3) {
            fprintf(stderr, "Chosen version '%s' for samplerate of %d Hz.\n",
                    twolame_mpeg_version_name(glopts->version), glopts->samplerate_out);
        }
    }
    // Choose mode (if none chosen)
    if (glopts->mode == TWOLAME_AUTO_MODE) {
        if (glopts->num_channels_in == 2)
            glopts->mode = TWOLAME_STEREO;
        else
            glopts->mode = TWOLAME_MONO;
        if (glopts->verbosity >= 3) {
            fprintf(stderr, "Chosen mode to be '%s' because of %d input channels.\n",
                    twolame_get_mode_name(glopts), glopts->num_channels_in);
        }
    }
    // Choose the bitrate (if none chosen)
    if (glopts->bitrate <= 0) {
        if (glopts->mode == TWOLAME_MONO) {
            switch (glopts->samplerate_out) {
            case 48000:
                glopts->bitrate = 96;
                break;          // (LAME=64)
            case 44100:
                glopts->bitrate = 96;
                break;          // (LAME=64)
            case 32000:
                glopts->bitrate = 80;
                break;          // (LAME=48)
            case 24000:
                glopts->bitrate = 48;
                break;          // (LAME=32)
            case 22050:
                glopts->bitrate = 48;
                break;          // (LAME=32)
            case 16000:
                glopts->bitrate = 32;
                break;          // (LAME=24)
            }
        } else {
            switch (glopts->samplerate_out) {
            case 48000:
                glopts->bitrate = 192;
                break;          // (LAME=128)
            case 44100:
                glopts->bitrate = 192;
                break;          // (LAME=128)
            case 32000:
                glopts->bitrate = 160;
                break;          // (LAME=96)
            case 24000:
                glopts->bitrate = 96;
                break;          // (LAME=64)
            case 22050:
                glopts->bitrate = 96;
                break;          // (LAME=64)
            case 16000:
                glopts->bitrate = 64;
                break;          // (LAME=48)
            }
        }
        if (glopts->verbosity >= 3) {
            fprintf(stderr, "Chosen bitrate of %dkbps for samplerate of %d Hz.\n",
                    glopts->bitrate, glopts->samplerate_out);
        }
    }

    /* Can't do DAB and energylevel extensions at the same time Because both of them think they're
       the only ones inserting information into the ancillary section of the frame */
    if (glopts->do_dab && glopts->do_energy_levels) {
        fprintf(stderr, "Error: Can't do DAB and Energy Levels at the same time\n");
        return -1;
    }

    /* Set the number of ancillary bits automatically, if none set */
    if (glopts->num_ancillary_bits < 0) {
        if (glopts->do_energy_levels) {
            glopts->num_ancillary_bits = get_required_energy_bits(glopts);
        } else {
            glopts->num_ancillary_bits = 0;
        }
    }

    /* Check that if we're doing energy levels, that there's enough space to put the information */
    if (glopts->do_energy_levels) {
        int required = get_required_energy_bits(glopts);
        if (glopts->num_ancillary_bits < required) {
            fprintf(stderr, "Warning: Too few ancillary bits to store energy levels: %i<%i\n",
                    glopts->num_ancillary_bits, required);
            return -1;
        }
    }

    /* 
     * MFC Feb 2003: in VBR mode, joint
     * stereo doesn't make any sense at
     * the moment, as there are no noisy
     * subbands according to
     * bits_for_nonoise in vbr mode
     */
    if (glopts->vbr && glopts->mode == TWOLAME_JOINT_STEREO) {
        fprintf(stderr, "Warning: Can't do Joint Stereo with VBR, switching to normal stereo.\n");

        // force stereo mode
        twolame_set_mode(glopts, TWOLAME_STEREO);
    }

    /* Can't do padding and VBR at same time */
    if (glopts->vbr && glopts->padding == TRUE) {
        fprintf(stderr, "Error: Can't do padding and VBR at same time\n");
        return -1;
    }
    // Set the Number of output channels
    glopts->num_channels_out = (glopts->mode == TWOLAME_MONO) ? 1 : 2;



    // build mpeg header from parameters
    if (init_header_info(glopts) < 0) {
        return -1;
    }
    // Select table number and sblimit
    if (encode_init(glopts) < 0) {
        return -1;
    }
    // initialise bitrate allocation
    if (init_bit_allocation(glopts) < 0) {
        return -1;
    }
    // Check input samplerate is same as output samplerate
    if (glopts->samplerate_out != glopts->samplerate_in) {
        fprintf(stderr,
                "twolame_init_params(): sorry, twolame doesn't support resampling (yet).\n");
        return -1;
    }

    // Initialise interal variables
    glopts->samples_in_buffer = 0;
    glopts->psycount = 0;


    // Allocate memory to larger buffers 
    glopts->subband = (subband_t *) TWOLAME_MALLOC(sizeof(subband_t));
    glopts->j_sample = (jsb_sample_t *) TWOLAME_MALLOC(sizeof(jsb_sample_t));
    glopts->sb_sample = (sb_sample_t *) TWOLAME_MALLOC(sizeof(sb_sample_t));

    // clear buffers
    memset((char *) glopts->buffer, 0, sizeof(glopts->buffer));
    memset((char *) glopts->bit_alloc, 0, sizeof(glopts->bit_alloc));
    memset((char *) glopts->scfsi, 0, sizeof(glopts->scfsi));
    memset((char *) glopts->scalar, 0, sizeof(glopts->scalar));
    memset((char *) glopts->j_scale, 0, sizeof(glopts->j_scale));
    memset((char *) glopts->smrdef, 0, sizeof(glopts->smrdef));
    memset((char *) glopts->smr, 0, sizeof(glopts->smr));
    memset((char *) glopts->max_sc, 0, sizeof(glopts->max_sc));

    // Initialise subband windowfilter
    if (init_subband(&glopts->smem) < 0) {
        return -1;
    }
    // All initalised now :)
    glopts->twolame_init++;

    return (0);
}
Пример #12
0
static int read_cwdb(char *filename, char *gameid)
{
	int pos=0;
	char *p;
	char cw_buf[300];
	u32 address,val;
	t_mem_table	t;
	PspFile pf;

	if(openfile(filename, &pf)==0) return 1;
	
	if(gameid!=NULL){
		char codename[11];
		mips_memcpy(codename,gameid,10);
		codename[10]=0;
		do{
			pos=read_sect(pos,&pf);
			p=strstr(pf.buf,codename);
			if(p) break;
		}
		while(pos);
	}
	else{
		read_sect(pos,&pf);
		p=pf.buf;
		p=read_line(p,cw_buf);
	}
	
		if(p==0) {
			closefile(&pf);
			return 1;
		}

#ifdef BIG5_ENCODE_TEXT
	t_encodepack pack;
	char *big5buf = malloc(41688);
	if(big5buf==NULL) return 1;
	if(big5_init(big5buf,&pack)==0 && encode_init(&pack)==0)
	{
		charsets_big5_conv(p,&pack);
		encode_free(&pack);
	}
	free(big5buf);
#endif
	
	p=read_line(p,cw_buf);
	mips_memcpy(ui_get_gamename()+12,cw_buf+3,0x40);
	
	int repeat=0;
	int lock =0;
	char namebuf[80];
	char *namep;
	char nullcode=0;
	while(1){
	//金手指码部分	
		p=read_line(p,cw_buf);
		
		if(cw_buf[0]=='_'){
		if(cw_buf[1]=='C'){
			if(nullcode==1) {
			t.addr=0x8800000;
			t.value=0;
			t.type=0;
			t.lock=0;
			if(mem_table_add(&t)<0) goto READOUT;
			}
		repeat=0;
		namep = namebuf;
		mips_memcpy(namebuf,cw_buf+4,70);
		lock = strtoul(cw_buf+2,NULL,16);
		namep = read_name(namep, t.name, 10);
		mips_memcpy(t.name,namebuf,30);
		t.name[30]=0;
		t.name[31]=0;
		nullcode=1;
		}
		else if(cw_buf[1]=='L'){
			nullcode=0;
			if(repeat<5){
				if(repeat==0) {
				}
				else{
					t.name[0] = '+';
					namep = read_name(namep, t.name+1, 9);
				}
				repeat++;
			}
			else{//strcpy(t.name,"+");
				t.name[0]='+';
				t.name[1]=0;
			}
			char *tempptr;
			address=strtoul(cw_buf+2,&tempptr,16)+0x08800000;
			val=strtoul(tempptr,NULL,16);
			t.addr=address;
			t.value=val;
			t.type=0;
			t.lock=lock;
			if(mem_table_add(&t)<0) goto READOUT;
		}
		else if(cw_buf[1]=='S'){
			break;
		}
		}
		if(p==0) break;
		if(p[0]=='_' && p[1]=='S') break;
	}
	
READOUT:
	closefile(&pf);
	return 0;
}
static int encode_test(jpeg_test_input_t *p_input)
{
  int rc = 0;
  mm_jpeg_intf_test_t jpeg_obj;
  uint32_t i = 0;

  memset(&jpeg_obj, 0x0, sizeof(jpeg_obj));
  rc = encode_init(p_input, &jpeg_obj);
  if (rc) {
    CDBG_ERROR("%s:%d] Error",__func__, __LINE__);
    return -1;
  }

  mm_dimension pic_size;
  memset(&pic_size, 0, sizeof(mm_dimension));
  pic_size.w = (uint32_t)p_input->width;
  pic_size.h = (uint32_t)p_input->height;

  jpeg_obj.handle = jpeg_open(&jpeg_obj.ops, pic_size);
  if (jpeg_obj.handle == 0) {
    CDBG_ERROR("%s:%d] Error",__func__, __LINE__);
    goto end;
  }

  rc = jpeg_obj.ops.create_session(jpeg_obj.handle, &jpeg_obj.params,
    &jpeg_obj.job.encode_job.session_id);
  if (jpeg_obj.job.encode_job.session_id == 0) {
    CDBG_ERROR("%s:%d] Error",__func__, __LINE__);
    goto end;
  }

  for (i = 0; i < jpeg_obj.num_bufs; i++) {
    jpeg_obj.job.job_type = JPEG_JOB_TYPE_ENCODE;
    jpeg_obj.job.encode_job.src_index = (int32_t) i;
    jpeg_obj.job.encode_job.dst_index = (int32_t) i;
    jpeg_obj.job.encode_job.thumb_index = (uint32_t) i;

    if (jpeg_obj.params.burst_mode && jpeg_obj.min_out_bufs) {
      jpeg_obj.job.encode_job.dst_index = -1;
    }

    rc = jpeg_obj.ops.start_job(&jpeg_obj.job, &jpeg_obj.job_id[i]);

    if (rc) {
      CDBG_ERROR("%s:%d] Error",__func__, __LINE__);
      goto end;
    }
  }
  jpeg_obj.job_id[i] = 0;

  /*
  usleep(5);
  jpeg_obj.ops.abort_job(jpeg_obj.job_id[0]);
  */
  pthread_mutex_lock(&jpeg_obj.lock);
  pthread_cond_wait(&jpeg_obj.cond, &jpeg_obj.lock);
  pthread_mutex_unlock(&jpeg_obj.lock);


  jpeg_obj.ops.destroy_session(jpeg_obj.job.encode_job.session_id);
  jpeg_obj.ops.close(jpeg_obj.handle);

end:
  for (i = 0; i < jpeg_obj.num_bufs; i++) {
    if (!jpeg_obj.min_out_bufs) {
      // Save output files
      CDBG_ERROR("%s:%d] Saving file%s addr %p len %zu",
          __func__, __LINE__,jpeg_obj.out_filename[i],
          jpeg_obj.output[i].addr, jpeg_obj.buf_filled_len[i]);

      DUMP_TO_FILE(jpeg_obj.out_filename[i], jpeg_obj.output[i].addr,
        jpeg_obj.buf_filled_len[i]);
    }
    mm_jpeg_test_free(&jpeg_obj.input[i]);
    mm_jpeg_test_free(&jpeg_obj.output[i]);
  }
  return 0;
}