示例#1
0
HICON CSIM_ext::createIcon(const char *name)
{
    ICON_MAP::iterator it = m_icons.find(name);
    if (it != m_icons.end())
        return (*it).second;
    string cmd = "ICON ";
    cmd += name;
    CComBSTR in(cmd.c_str());
    CComBSTR out;
    if (!ProcessStr || !ProcessStr(in, &out))
        return NULL;
    size_t size = WideCharToMultiByte(CP_ACP, 0, out, wcslen(out), 0, 0, NULL, NULL);
    char *res = new char[size + 1];
    size = WideCharToMultiByte(CP_ACP, 0, out, wcslen(out), res, size, NULL, NULL);
    res[size] = 0;
    if (res[0] != '>')
        return NULL;
    string r = res + 1;
    ICONINFO info;
    info.fIcon    = TRUE;
    info.xHotspot = 8;
    info.yHotspot = 8;
    info.hbmMask  = createBitmap(r);
    info.hbmColor = createBitmap(r);
    HICON hIcon = CreateIconIndirect(&info);
    m_icons.insert(ICON_MAP::value_type(name, hIcon));
    DeleteObject(info.hbmMask);
    DeleteObject(info.hbmColor);
    return hIcon;
}
示例#2
0
// unused
static void TestNWayCanvasStateConsistency(
    skiatest::Reporter* reporter,
    const TestData& d,
    CanvasTestStep* testStep,
    const SkCanvas& referenceCanvas) {

    SkBitmap indirectStore1;
    createBitmap(&indirectStore1, 0xFFFFFFFF);
    SkCanvas indirectCanvas1(indirectStore1);

    SkBitmap indirectStore2;
    createBitmap(&indirectStore2, 0xFFFFFFFF);
    SkCanvas indirectCanvas2(indirectStore2);

    SkISize canvasSize = referenceCanvas.getDeviceSize();
    SkNWayCanvas nWayCanvas(canvasSize.width(), canvasSize.height());
    nWayCanvas.addCanvas(&indirectCanvas1);
    nWayCanvas.addCanvas(&indirectCanvas2);

    testStep->setAssertMessageFormat(kNWayDrawAssertMessageFormat);
    testStep->draw(&nWayCanvas, d, reporter);
    // Verify that the SkNWayCanvas reports consitent state
    testStep->setAssertMessageFormat(kNWayStateAssertMessageFormat);
    AssertCanvasStatesEqual(reporter, d, &nWayCanvas, &referenceCanvas, testStep);
    // Verify that the indirect canvases report consitent state
    testStep->setAssertMessageFormat(kNWayIndirect1StateAssertMessageFormat);
    AssertCanvasStatesEqual(reporter, d, &indirectCanvas1, &referenceCanvas, testStep);
    testStep->setAssertMessageFormat(kNWayIndirect2StateAssertMessageFormat);
    AssertCanvasStatesEqual(reporter, d, &indirectCanvas2, &referenceCanvas, testStep);
}
示例#3
0
/*
 * This sub-test verifies that the test step passes when executed
 * with SkCanvas and with classes derrived from SkCanvas. It also verifies
 * that the all canvas derivatives report the same state as an SkCanvas
 * after having executed the test step.
 */
static void TestOverrideStateConsistency(skiatest::Reporter* reporter, const TestData& d,
                                         CanvasTestStep* testStep) {
    SkBitmap referenceStore;
    createBitmap(&referenceStore, 0xFFFFFFFF);
    SkCanvas referenceCanvas(referenceStore);
    testStep->setAssertMessageFormat(kCanvasDrawAssertMessageFormat);
    testStep->draw(&referenceCanvas, d, reporter);

    SkDeferredCanvasTester::TestDeferredCanvasStateConsistency(reporter, d, testStep, referenceCanvas, false);

    SkDeferredCanvasTester::TestDeferredCanvasStateConsistency(reporter, d, testStep, referenceCanvas, true);

    // The following test code is disabled because SkNWayCanvas does not
    // report correct clipping and device bounds information
    // Issue: http://code.google.com/p/skia/issues/detail?id=501

    if (false) { // avoid bit rot, suppress warning
        TestNWayCanvasStateConsistency(reporter, d, testStep, referenceCanvas);
    }

    if (false) { // avoid bit rot, suppress warning
        test_clipVisitor(reporter, &referenceCanvas);
    }
    test_clipstack(reporter);
}
示例#4
0
int WINAPI WinMain(HINSTANCE instance, HINSTANCE, LPSTR cmdLine, int)
{
	int argc = 0;
	int nc = 0;
	parseCommandLine(0,0,&argc,&nc);
	char** argv = (char**)malloc((argc*sizeof(char*))+(nc*sizeof(char)));
	parseCommandLine(argv,((char*)argv)+(argc*sizeof(char*)),&argc,&nc);
	argc--;

	InitCommonControls();
	lines = (void**)calloc(MAX_LINES,sizeof (void*));

	if (argc > 1)
	{
		for (int i = 1; i < argc; i++)
			readFile(argv[i]);
	}
	else
		promptReadFiles();

	createWindow(instance);
	createBitmap();
	msgLoop();
	return 0;
}
jint naSetup(JNIEnv *pEnv, jobject pObj, int pWidth, int pHeight) {
	width = pWidth;
	height = pHeight;
	//create a bitmap as the buffer for frameRGBA
	bitmap = createBitmap(pEnv, pWidth, pHeight);
	if (AndroidBitmap_lockPixels(pEnv, bitmap, &buffer) < 0)
		return -1;
	//get the scaling context
	sws_ctx = sws_getContext (
	        codecCtx->width,
	        codecCtx->height,
	        codecCtx->pix_fmt,
	        pWidth,
	        pHeight,
	        AV_PIX_FMT_RGBA,
	        SWS_BILINEAR,
	        NULL,
	        NULL,
	        NULL
	);
	// Assign appropriate parts of bitmap to image planes in pFrameRGBA
	// Note that pFrameRGBA is an AVFrame, but AVFrame is a superset
	// of AVPicture
	avpicture_fill((AVPicture *)frameRGBA, buffer, AV_PIX_FMT_RGBA,
			pWidth, pHeight);
	return 0;
}
bool DeferredImageDecoder::createFrameAtIndex(size_t index, SkBitmap* bitmap)
{
    prepareLazyDecodedFrames();
    if (index < m_frameData.size()) {
        // ImageFrameGenerator has the latest known alpha state. There will
        // be a performance boost if this frame is opaque.
        *bitmap = createBitmap(index);
        if (m_frameGenerator->hasAlpha(index)) {
            m_frameData[index].m_hasAlpha = true;
            bitmap->setAlphaType(kPremul_SkAlphaType);
        } else {
            m_frameData[index].m_hasAlpha = false;
            bitmap->setAlphaType(kOpaque_SkAlphaType);
        }
        m_frameData[index].m_frameBytes = m_size.area() *  sizeof(ImageFrame::PixelData);
        return true;
    }
    if (m_actualDecoder) {
        ImageFrame* buffer = m_actualDecoder->frameBufferAtIndex(index);
        if (!buffer || buffer->status() == ImageFrame::FrameEmpty)
            return false;
        *bitmap = buffer->bitmap();
        return true;
    }
    return false;
}
示例#7
0
/*
 * This sub-test verifies that the test step passes when executed
 * with SkCanvas and with classes derrived from SkCanvas. It also verifies
 * that the all canvas derivatives report the same state as an SkCanvas
 * after having executed the test step.
 */
static void TestOverrideStateConsistency(skiatest::Reporter* reporter, const TestData& d,
                                         CanvasTestStep* testStep) {
    SkBitmap referenceStore;
    createBitmap(&referenceStore, 0xFFFFFFFF);
    SkCanvas referenceCanvas(referenceStore);
    testStep->setAssertMessageFormat(kCanvasDrawAssertMessageFormat);
    testStep->draw(&referenceCanvas, d, reporter);
}
示例#8
0
void DeferredImageDecoder::prepareLazyDecodedFrames()
{
    if (!s_enabled
        || !m_actualDecoder
        || !m_actualDecoder->isSizeAvailable()
        || m_actualDecoder->filenameExtension() == "ico")
        return;

    activateLazyDecoding();

    const size_t previousSize = m_lazyDecodedFrames.size();
    m_lazyDecodedFrames.resize(m_actualDecoder->frameCount());

    // We have encountered a broken image file. Simply bail.
    if (m_lazyDecodedFrames.size() < previousSize)
        return;

    for (size_t i = previousSize; i < m_lazyDecodedFrames.size(); ++i) {
        OwnPtr<ImageFrame> frame(adoptPtr(new ImageFrame()));
        frame->setSkBitmap(createBitmap(i));
        frame->setDuration(m_actualDecoder->frameDurationAtIndex(i));
        frame->setStatus(m_actualDecoder->frameIsCompleteAtIndex(i) ? ImageFrame::FrameComplete : ImageFrame::FramePartial);
        m_lazyDecodedFrames[i] = frame.release();
    }

    // The last lazy decoded frame created from previous call might be
    // incomplete so update its state.
    if (previousSize) {
        const size_t lastFrame = previousSize - 1;
        m_lazyDecodedFrames[lastFrame]->setStatus(m_actualDecoder->frameIsCompleteAtIndex(lastFrame) ? ImageFrame::FrameComplete : ImageFrame::FramePartial);

        // If data has changed then create a new bitmap. This forces
        // Skia to decode again.
        if (m_dataChanged) {
            m_dataChanged = false;
            m_lazyDecodedFrames[lastFrame]->setSkBitmap(createBitmap(lastFrame));
        }
    }

    if (m_allDataReceived) {
        m_repetitionCount = m_actualDecoder->repetitionCount();
        m_actualDecoder.clear();
        m_data = nullptr;
    }
}
示例#9
0
文件: revtrie.c 项目: peper/pizza
revtrie createRevTrie(uint *string, lztrie trie, uint *emptybmap, uint n)
 { 
    revtrie T;
    uint i;
    T        = malloc(sizeof(struct srevtrie));
    T->data  = string;
    T->pdata = createParentheses(string,2*n,false,true);
    T->B     = createBitmap(emptybmap, n,false);
    T->n     = n;
    T->trie  = trie;
    return T;
 }
示例#10
0
jobject
Java_org_giac_xcaspad_Calculator_getBitmap(JNIEnv* env, jobject thiz, jint windowsize, jint fontsize, jdouble r, jdouble g, jdouble b, jstring operation){

    AndroidBitmapInfo  info;
    void*              pixels;
    int                ret;

    const char *compute = env->GetStringUTFChars(operation, 0);

    giac::gen gen(compute, contextptr);

    xcas::PrettyPrint prettyprint((int)windowsize, (int)fontsize, gen);

    int width = prettyprint.getWidth();
    int height = prettyprint.getHeight();

    jobject bitmap = createBitmap(env, width, height, "ARGB_8888");

    if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) {
        LOGE("AndroidBitmap_getInfo() failed: error=%d", ret);
        return;
    }

    if (info.format != ANDROID_BITMAP_FORMAT_RGB_565 && info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
        LOGE("Bitmap format is not RGB_565 or ARGB_8888");
        return;
    }

    if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) {
        LOGE("AndroidBitmap_lockPixels() failed: error=%d", ret);
    }

    cairo_surface_t *cs = NULL;

    if (info.format == ANDROID_BITMAP_FORMAT_RGB_565) {
        cs = cairo_image_surface_create_for_data(pixels, CAIRO_FORMAT_RGB16_565, info.width, info.height, info.stride);
    } else if (info.format == ANDROID_BITMAP_FORMAT_RGBA_8888) {
        cs = cairo_image_surface_create_for_data(pixels, CAIRO_FORMAT_RGB24, info.width, info.height, info.stride);
    }

    try{

        prettyprint.draw(cs, (double)r, (double)g, (double)b);

    }catch (std::runtime_error & err){
        env->ThrowNew(env->FindClass("java/lang/Exception"), err.what());
    }

    AndroidBitmap_unlockPixels(env, bitmap);

    return bitmap;
}
示例#11
0
//--------------------------------------------------------------
// set dimensions of surface
void BitmapSurface::setSurfaceSize(
  int width,
  int height)
{
  if (width == m_bitmapWidth && height == m_bitmapHeight)
    return;

  mgGenSurface::setSurfaceSize(width, height);
  deleteBitmap();
  createBitmap(width, height);
  m_bitmapWidth = width;
  m_bitmapHeight = height;
}
示例#12
0
文件: revtrie.c 项目: peper/pizza
revtrie createRevTrie(uint *string, uint *id, lztrie trie, uint *emptybmap, uint n)
 { 
    revtrie T;
    uint i;
    T        = malloc(sizeof(struct srevtrie));
    T->data  = string;
    T->pdata = createParentheses(string,2*n,false,true);
    T->B     = createBitmap(emptybmap, n,false);
    T->n     = n;
    T->nbits = bits(trie->n-1);
    T->rids  = createPerm(id, trie->n, PARAMETER_T_RIDS);
    T->trie  = trie;
    return T;
 }
示例#13
0
//--------------------------------------------------------------
// set map size
void FlatWorld::resize(
  int width,
  int height)
{
  if (width == m_mapWidth && height == m_mapHeight)
    return;

  m_mapWidth = width;
  m_mapHeight = height;

  destroyBitmap();
  createBitmap();

  rebuildTerrain(0, 0, m_mapWidth, m_mapHeight);
}
示例#14
0
void Head::addParts()
{
	//should use for loop
	dictionary.insert(cratePair("fairy", createBitmap("Fairy_0.png")));
	dictionary.insert(cratePair("frankenstein", createBitmap("Frankenstein_0.png")));
	dictionary.insert(cratePair("skeleton", createBitmap("Skeleton_0.png")));
	dictionary.insert(cratePair("vampire", createBitmap("Vampire_0.png")));
	dictionary.insert(cratePair("werewolf", createBitmap("Werewolf_0.png")));
	dictionary.insert(cratePair("witch", createBitmap("Witch_0.png")));
}
示例#15
0
bool GiGdipImage::draw(GiCanvasWin& graph, const Box2d& rectW, bool fast) const
{
    bool ret = false;

    if (getWidth() > 0)
    {
        if (graph.getCanvasType() == 2)
        {
            GiCanvasGdip* gs = (GiCanvasGdip*)(&graph);
            ret = gs->drawGdipImage(
                getHmWidth(), getHmHeight(), m_impl->bmp, rectW, fast);
        }
        else
        {
            HBITMAP hBmp = createBitmap(graph.getBkColor());
            ret = graph.drawImage(getHmWidth(), getHmHeight(),
                hBmp, rectW, fast);
            ::DeleteObject(hBmp);
        }
    }

    return ret;
}
示例#16
0
revtrie loadRevTrie(FILE *f, lztrie trie)
 { 
    revtrie T;
    uint *emptybmap;
    unsigned long long aux;
    T = malloc(sizeof(struct srevtrie));
    if (fread(&T->n,sizeof(uint),1,f) != 1) {
       fprintf(stderr,"Error: Cannot read RevTrie from file\n");
       exit(1);
    }
    aux = (2*(unsigned long long)T->n+W-1)/W;
    T->data = malloc(aux*sizeof(uint));
    if (fread(T->data,sizeof(uint),aux,f) != aux) {
       fprintf (stderr,"Error: Cannot read RevTrie from file\n");
       exit(1);
    }                                                                       
    T->pdata = createParentheses(T->data,2*T->n,false,true);
    // loads the bitstring indicating the empty nodes
    emptybmap = malloc(((T->n+W-1)/W)*sizeof(uint));
    if (fread(emptybmap,sizeof(uint),(T->n+W-1)/W,f) != (T->n+W-1)/W) {
       fprintf(stderr,"Error: Cannot read RevTrie from file\n");
       exit(1);
    }
    // creates, from the above bitstring, the bitmap indicating the empty nodes
    T->B = createBitmap(emptybmap, T->n, false);
    T->nbits = bits(trie->n-1);
    aux = (((unsigned long long)trie->n)*trie->nbits+W-1)/W;
    T->rids_1 = malloc(aux*sizeof(uint));
    if (fread(T->rids_1,sizeof(uint),aux,f)!= aux) {
       fprintf(stderr,"Error: Cannot read RevTrie from file\n");
       exit(1);
    }
    
    T->trie = trie;
    return T;
 }
示例#17
0
 void init() {
     if (fOnce.once()) {
         fBitmap = createBitmap(64, 64);
     }
 }
示例#18
0
文件: perm.c 项目: peper/pizza
perm createPerm(uint *elems, uint nelems, uint t)
 {
    perm P;
    uint *b, *baux, nextelem, i, j, bptr, 
         aux, antbptr,nbwdptrs, elem,nbits, firstelem, cyclesize;
    auxbwd *auxbwdptr;

    P = malloc(sizeof(struct sperm));
    P->elems  = elems;
    P->nelems = nelems;
    nbits = P->nbits  = bits(nelems-1);
    P->t = t;
    if (t==1) {
       P->bwdptrs = malloc((((unsigned long long)nelems*nbits+W-1)/W)*sizeof(uint));
       P->nbwdptrs = nelems;
       for (i=0; i<nelems; i++)
          bitput(P->bwdptrs, bitget(elems, i*nbits, nbits)*nbits, nbits, i);
       P->bmap = NULL;  
    }
    else {
       auxbwdptr = malloc(sizeof(auxbwd)*(t+((int)ceil((double)nelems/t))));
       b = calloc(((nelems+W-1)/W), sizeof(uint));
       baux = calloc(((nelems+W-1)/W), sizeof(uint));
       nbwdptrs = 0; 
       for (i = 0; i < nelems; i++) {
          if (bitget1(baux,i) == 0) {
             nextelem = j = bptr = antbptr = i; 
             aux = 0;
             bitset(baux, j);
             cyclesize = 0;
             firstelem = j;
             while ((elem=bitget(elems,j*nbits,nbits)) != nextelem) {
                j = elem;
                bitset(baux, j);
                aux++;
                if (aux >= t) {
                   auxbwdptr[nbwdptrs].key = j;
                   auxbwdptr[nbwdptrs++].pointer = bptr;
                   antbptr = bptr;
                   bptr    = j;
                   aux     = 0;
                   bitset(b, j);
                }
                cyclesize++;
             }
             if (cyclesize >= t) {
                auxbwdptr[nbwdptrs].key = nextelem;
                auxbwdptr[nbwdptrs++].pointer = bptr;
                bitset(b, nextelem);
             }
          }
       }
       qsort(auxbwdptr, nbwdptrs, sizeof(auxbwd), &compare);
       aux = ((unsigned long long)nbwdptrs*P->nbits+W-1)/W;
       P->bwdptrs = malloc(sizeof(uint)*aux); 
       P->nbwdptrs = nbwdptrs;
       for (i = 0; i < nbwdptrs; i++) 
          bitput(P->bwdptrs, i*nbits, nbits, auxbwdptr[i].pointer); 
       P->bmap = createBitmap(b, nelems, false);
       free(baux);
       free(auxbwdptr);
    }
#ifdef QUERYREPORT
    P->cont_invperm = 0;
    P->cont_perm = 0;
#endif    
    return P;
 }
示例#19
0
jint naMain(JNIEnv *pEnv, jobject pObj, jobject pMainAct, jstring pFileName, jint pNumOfFrames) {
	AVFormatContext *pFormatCtx = NULL;
	int             i, videoStream;
	AVCodecContext  *pCodecCtx = NULL;
	AVCodec         *pCodec = NULL;
	AVFrame         *pFrame = NULL;
	AVFrame         *pFrameRGBA = NULL;
	AVPacket        packet;
	int             frameFinished;
	jobject			bitmap;
	void* 			buffer;

	AVDictionary    *optionsDict = NULL;
	struct SwsContext      *sws_ctx = NULL;
	char *videoFileName;

	// Register all formats and codecs
	av_register_all();

	//get C string from JNI jstring
	videoFileName = (char *)(*pEnv)->GetStringUTFChars(pEnv, pFileName, NULL);

	// Open video file
	if(avformat_open_input(&pFormatCtx, videoFileName, NULL, NULL)!=0)
		return -1; // Couldn't open file

	// Retrieve stream information
	if(avformat_find_stream_info(pFormatCtx, NULL)<0)
		return -1; // Couldn't find stream information

	// Dump information about file onto standard error
	av_dump_format(pFormatCtx, 0, videoFileName, 0);

	// Find the first video stream
	videoStream=-1;
	for(i=0; i<pFormatCtx->nb_streams; i++) {
		if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) {
			videoStream=i;
			break;
		}
	}
	if(videoStream==-1)
		return -1; // Didn't find a video stream

	// Get a pointer to the codec context for the video stream
	pCodecCtx=pFormatCtx->streams[videoStream]->codec;

	// Find the decoder for the video stream
	pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
	if(pCodec==NULL) {
		fprintf(stderr, "Unsupported codec!\n");
		return -1; // Codec not found
	}
	// Open codec
	if(avcodec_open2(pCodecCtx, pCodec, &optionsDict)<0)
		return -1; // Could not open codec

	// Allocate video frame
	pFrame=avcodec_alloc_frame();

	// Allocate an AVFrame structure
	pFrameRGBA=avcodec_alloc_frame();
	if(pFrameRGBA==NULL)
		return -1;

	//create a bitmap as the buffer for pFrameRGBA
	bitmap = createBitmap(pEnv, pCodecCtx->width, pCodecCtx->height);
	if (AndroidBitmap_lockPixels(pEnv, bitmap, &buffer) < 0)
		return -1;
	//get the scaling context
	sws_ctx = sws_getContext
    (
        pCodecCtx->width,
        pCodecCtx->height,
        pCodecCtx->pix_fmt,
        pCodecCtx->width,
        pCodecCtx->height,
        AV_PIX_FMT_RGBA,
        SWS_BILINEAR,
        NULL,
        NULL,
        NULL
    );

	// Assign appropriate parts of bitmap to image planes in pFrameRGBA
	// Note that pFrameRGBA is an AVFrame, but AVFrame is a superset
	// of AVPicture
	avpicture_fill((AVPicture *)pFrameRGBA, buffer, AV_PIX_FMT_RGBA,
		 pCodecCtx->width, pCodecCtx->height);

	// Read frames and save first five frames to disk
	i=0;
	while(av_read_frame(pFormatCtx, &packet)>=0) {
		// Is this a packet from the video stream?
		if(packet.stream_index==videoStream) {
			// Decode video frame
			avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished,
			   &packet);
			// Did we get a video frame?
			if(frameFinished) {
				// Convert the image from its native format to RGBA
				sws_scale
				(
					sws_ctx,
					(uint8_t const * const *)pFrame->data,
					pFrame->linesize,
					0,
					pCodecCtx->height,
					pFrameRGBA->data,
					pFrameRGBA->linesize
				);

				// Save the frame to disk
				if(++i<=pNumOfFrames) {
					SaveFrame(pEnv, pMainAct, bitmap, pCodecCtx->width, pCodecCtx->height, i);
					LOGI("save frame %d", i);
				}
			}
		}
		// Free the packet that was allocated by av_read_frame
		av_free_packet(&packet);
	}

	//unlock the bitmap
	AndroidBitmap_unlockPixels(pEnv, bitmap);

	// Free the RGB image
	av_free(pFrameRGBA);

	// Free the YUV frame
	av_free(pFrame);

	// Close the codec
	avcodec_close(pCodecCtx);

	// Close the video file
	avformat_close_input(&pFormatCtx);

	return 0;
}
示例#20
0
 MipMapView() {
     fBitmap = createBitmap(N);
     
     fWidth = N;
 }
static SkBitmap testBitmap() {
    SkBitmap bitmap;
    createBitmap(&bitmap, SkBitmap::kARGB_8888_Config, 0x05060708);
    return bitmap;
}
示例#22
0
//启动播放器
int Java_info_sodapanda_sodaplayer_FFmpegVideoView_openfile(JNIEnv* env,jobject obj,jstring file,jlong ptr){
	playInstance * instance = (playInstance *)ptr;
	//初始化队列
	instance->queue = malloc(sizeof(struct threadqueue));
	thread_queue_init(instance->queue);
	instance->video_queue = malloc(sizeof(struct threadqueue));
	thread_queue_init(instance->video_queue);
	instance->audio_queue = malloc(sizeof(struct threadqueue));
	thread_queue_init(instance->audio_queue);

	instance->stop=0;
	instance->timeout_flag = 0;
	instance->vs=av_malloc(sizeof (VideoState));

	LOGE("开始执行openfile\n");
	jboolean isfilenameCopy;
	const char *filename = (*env)-> GetStringUTFChars(env, file, &isfilenameCopy);
	jclass cls = (*env)->GetObjectClass(env,obj);
	instance->initAdudioTrack = (*env)->GetMethodID(env,cls,"initAdudioTrack","(I)[B");
	instance->onNativeConnected = (*env)->GetMethodID(env,cls,"onNativeConnected","()V");
	instance->finishplay = (*env)->GetMethodID(env,cls,"finishplay","()V");

	(*env)->GetJavaVM(env,&(instance->gJavaVm));
	instance->gJavaobj = (*env)->NewGlobalRef(env,obj);

	//video
	AVFormatContext *pFormatCtx =NULL;
	AVCodecContext *pCodecCtx=NULL;
	AVCodec *pCodec=NULL;
	AVFrame *pFrame =NULL;
	int videoStream;
	AVDictionary *videoOptionsDict= NULL;
	struct SwsContext *sws_ctx =NULL;
	void* buffer;
	jobject bitmap;

	//audio
	AVCodecContext *aCodecCtx=NULL;
	AVCodec *aCodec=NULL;
	int audioStream;
	AVDictionary *audioOptionsDict = NULL;
	AVFrame *audio_frame;
	audio_frame = avcodec_alloc_frame();

	av_register_all();	//注册解码器等操作
	avformat_network_init();	//初始化网络
	pFormatCtx= avformat_alloc_context();
	pFormatCtx->max_analyze_duration=1000000;//最长分析时间10000微秒
	pFormatCtx->interrupt_callback.callback = call_back;//设置中断回调函数
	pFormatCtx->interrupt_callback.opaque = instance;//中断回调函数的参数

	//开始读取线程 提前开始为了捕获到打开文件的超时
	pthread_t rtid;
	pthread_create(&rtid,NULL,getPacket,instance);

	//打开视频文件
	if(avformat_open_input(&pFormatCtx,filename, NULL, NULL)!=0){
		if(instance->stop){
			return 0;
		}
		LOGE("无法打开文件\n");
		return -1; // 无法打开视频文件
	}
	if(instance->stop){
		return 0;
	}

	// 检索视频信息
	if(avformat_find_stream_info(pFormatCtx, NULL)<0){
		LOGE("无法找到流信息\n");
		return -1;
	}

	av_dump_format(pFormatCtx, 0, filename, 0);//打印分析的视频信息

	videoStream = -1;
	audioStream = -1;

	int i =0;
	for (i=0;i<pFormatCtx->nb_streams;i++){//遍历寻找音频流和视频流
		if(videoStream<0 && pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO){
			videoStream = i;
			instance->vs->video_time_base = av_q2d(pFormatCtx->streams[videoStream]->time_base);
			LOGE("videostream is %d\n",videoStream);
		}
		if(audioStream<0 && pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_AUDIO){
			audioStream = i;
			LOGE("audiostream is %d\n",audioStream);
			instance->vs->audio_time_base = av_q2d(pFormatCtx->streams[audioStream]->time_base);
			instance->vs->sample_rate_src = pFormatCtx->streams[i]->codec->sample_rate;
			instance->vs->sample_fmt = pFormatCtx->streams[i]->codec->sample_fmt;
			instance->vs->sample_layout = pFormatCtx->streams[i]->codec->channel_layout;
			if(instance->vs->sample_rate_src <= 0){
				LOGE("Audio Sample Rate is wrong");
				return -1;
			}else{
				jbyteArray aarray = (jbyteArray)((*env)->CallObjectMethod(env,obj,instance->initAdudioTrack,instance->vs->sample_rate_src));
				instance->global_aarray = (*env)->NewGlobalRef(env,aarray);
				LOGE("initAdudioTrack返回\n");
			}
		}
	}

	if(videoStream==-1){
		LOGE("无法找到视频流");
		return -1;
	}

	//打开音频解码器
	if(audioStream != -1 && instance->vs->sample_rate_src>0){
		aCodecCtx = pFormatCtx->streams[audioStream]->codec;
		aCodec= avcodec_find_decoder(aCodecCtx->codec_id);

		if(avcodec_open2(aCodecCtx,aCodec,&audioOptionsDict)<0){
			LOGE("无法打开Audio解码器");
			return -1;
		}
	}

	//打开视频解码器
	if(videoStream != -1){
		pCodecCtx=pFormatCtx->streams[videoStream]->codec;
		pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
		if(avcodec_open2(pCodecCtx,pCodec,&videoOptionsDict)<0){
			LOGE("无法打开视频解码器\n");
			return -1;
		}
	}

	if(instance->display_height == 0 ){
		instance->display_width = pCodecCtx->width;
		instance->display_height = pCodecCtx->height;
		setAndroidWindowPix(pCodecCtx->width,pCodecCtx->height,instance);
	}

	pFrame = avcodec_alloc_frame();

	//视频转换
	sws_ctx = sws_getContext(
		pCodecCtx->width,
		pCodecCtx->height,
		pCodecCtx->pix_fmt,
		instance->display_width,
		instance->display_height,
		AV_PIX_FMT_RGBA,
		SWS_BILINEAR,
		NULL,
		NULL,
		NULL
	);

	//创建bitmap
	bitmap = createBitmap(env, instance->display_width, instance->display_height);
	AndroidBitmap_lockPixels(env, bitmap, &buffer);
	AVFrame *RGBAFrame;
	RGBAFrame = avcodec_alloc_frame();
	avpicture_fill((AVPicture *) RGBAFrame, buffer, AV_PIX_FMT_RGBA, instance->display_width, instance->display_height);

	//原始音频转换
	struct SwrContext *swr_ctx;
	swr_ctx = swr_alloc();

	av_opt_set_int(swr_ctx, "in_sample_fmt", instance->vs->sample_fmt, 0);
	av_opt_set_int(swr_ctx, "out_sample_fmt", AV_SAMPLE_FMT_S16, 0);
	av_opt_set_int(swr_ctx, "in_channel_layout", instance->vs->sample_layout, 0);
	av_opt_set_int(swr_ctx, "out_channel_layout", AV_CH_LAYOUT_MONO, 0);

	swr_init(swr_ctx);

	instance->vs->RGBAFrame=RGBAFrame;
	instance->vs->buffer=buffer;
	instance->vs->pCodecCtx=pCodecCtx;
	instance->vs->pFrame=pFrame;
	instance->vs->sws_ctx=sws_ctx;
	instance->vs->videoStream=videoStream;
	instance->vs->aCodecCtx=aCodecCtx;
	instance->vs->audioStream=audioStream;
	instance->vs->audio_decode_frame=audio_frame;
	instance->vs->swr_ctx=swr_ctx;

	//视频线程
	pthread_t video_tid;
	if(videoStream!=-1){
		pthread_create(&video_tid,NULL,video_thread,instance);
	}

	//音频线程
	pthread_t audio_tid;
	if(audioStream!=-1 && instance->vs->sample_rate_src >0){
		pthread_create(&audio_tid,NULL,audio_thread,instance);
	}

	//通知android  connected to RTMPserver
	(*env)->CallVoidMethod(env,obj,instance->onNativeConnected);

	while(1){
		if(instance->stop){//关闭线程
			//队列放空表示结束
			thread_queue_add(instance->queue,NULL,-1);
			break;
		}

		AVPacket *packet_p = malloc(sizeof(AVPacket));
		//加入队列
		if(av_read_frame(pFormatCtx,packet_p)<0){//网络断开或停止视频
			thread_queue_add(instance->queue,NULL,-1);
			break;
		}

		thread_queue_add(instance->queue,packet_p,1);
	}

	LOGE("native主循环退出\n");
	thread_queue_add(instance->queue,NULL,-1);//让get线程停止
	pthread_join(rtid,NULL);
	pthread_join(video_tid,NULL);
	pthread_join(audio_tid,NULL);

	LOGE("getpacket线程环退出\n");
	thread_queue_cleanup(instance->queue,1);
	thread_queue_cleanup(instance->video_queue,1);
	thread_queue_cleanup(instance->audio_queue,1);

    av_free(instance->vs);
    av_free(RGBAFrame);
    av_free(pFrame);
    avcodec_close(pCodecCtx);
    avcodec_close(aCodecCtx);
    avformat_close_input(&pFormatCtx);
    AndroidBitmap_unlockPixels(env,bitmap);
    LOGE("清理退出\n");
    if(instance->stop){
    	return 0;
    }
    if(instance->timeout_flag){
    	return -1;
    }else{
        LOGE("执行到finishplay");
    	(*env)->CallVoidMethod(env,obj,instance->finishplay);
    	return 0;
    }
}
jobject Java_tv_cjump_jni_NativeBitmapFactory_createBitmap19(JNIEnv * env ,jobject  obj,jint w,jint h,SkBitmap::Config config,jboolean hasAlpha)
{
return createBitmap(env,obj,w,h,config,hasAlpha,0x3);
}
示例#24
0
//--------------------------------------------------------------
// Process windows messages
LRESULT CALLBACK WndProc(
  HWND  hWnd,                     // Handle For This Window
  UINT  uMsg,                     // Message For This Window
  WPARAM  wParam,                 // Additional Message Information
  LPARAM  lParam)                 // Additional Message Information
{
  /*
    Windows input events are translated into GUI events (see uiInputEvents.h) and
    passed to the SampleUI instance.  If SampleUI does not want an event (mouse or
    keyboard), we can pass it to the application (movementKeyDown, etc.)

    There's nothing in the GUI that decides when an event should go to the GUI
    vs. the application.  This policy is implemented in SampleUI.
  */

  switch (uMsg)                 
  {
    case WM_CLOSE: 
    {
      PostQuitMessage(0); 
      return 0; 
    }

    case WM_SIZE:             
    {
      if (m_window != NULL)
      {
        RECT rect;
        GetClientRect(m_window, &rect);
        m_windowWidth = rect.right - rect.left;
        m_windowHeight = rect.bottom - rect.top;

        // resize the ui
        if (m_ui != NULL)
          m_ui->resize(m_windowWidth, m_windowHeight);

        // resize the terrain browser
        if (m_flatWorld != NULL)
          m_flatWorld->resize(m_windowWidth, m_windowHeight);

        // recreate output bitmap
        destroyBitmap();
        createBitmap();
      }

      return 0;
    }

    case WM_MOVE:
    {
      m_windowX = LOWORD(lParam);
      m_windowY = HIWORD(lParam);
      return 0;
    }

    case WM_PAINT:
    {
      PAINTSTRUCT ps;
      BeginPaint(m_window, &ps);

      int copyLeft = ps.rcPaint.left;
      int copyTop = ps.rcPaint.top;
      int copyWidth = ps.rcPaint.right - ps.rcPaint.left;
      int copyHeight = ps.rcPaint.bottom - ps.rcPaint.top;
      
      if (m_flatWorld != NULL)
        m_flatWorld->update(m_outputDC, ps.rcPaint);

      // if the UI has been created
      if (m_surface != NULL)
      {
        // if UI has been damaged, merge bitmap into window
        if (copyWidth > 0 && copyHeight > 0)
        {
          BLENDFUNCTION blend;
          blend.BlendFlags = 0;
          blend.BlendOp = AC_SRC_OVER;
          blend.SourceConstantAlpha = 255;
          blend.AlphaFormat = AC_SRC_ALPHA;

          AlphaBlend(m_outputDC, copyLeft, copyTop, copyWidth, copyHeight,
                     m_surface->m_bitmapDC, copyLeft, copyTop, 
                     copyWidth, copyHeight, blend);
        }
      }
      BitBlt(ps.hdc, copyLeft, copyTop, copyWidth, copyHeight, m_outputDC, copyLeft, copyTop, SRCCOPY);
      EndPaint(m_window, &ps);
      return 0;
    }

    case WM_CHAR:
    {
      if (m_ui != NULL)
        m_ui->m_top->surfaceKeyChar((int) wParam, 0);  // =-= modifiers

      return 0;
    }

    case WM_KEYDOWN:
    {
      // set autorepeat flag
      int repeat = (lParam & (1 << 30)) != 0 ? MG_EVENT_KEYREPEAT : 0;
     
      if (wParam == VK_SHIFT)
        m_eventFlags |= MG_EVENT_SHIFT_DOWN;
      else if (wParam == VK_CONTROL)
        m_eventFlags |= MG_EVENT_CNTL_DOWN;
      else if (wParam == VK_MENU)
        m_eventFlags |= MG_EVENT_ALT_DOWN;

      // translate special keys
      int key = translateKey((int) wParam);

      if (m_ui != NULL)
      {
        // handle the help key and console key
        if (key == '`' || key == '~')
          m_ui->toggleConsole();

        else if (key == MG_EVENT_KEY_F1)
          m_ui->toggleHelp();
    
        // if ui is taking input
        else if (m_ui->hasKeyFocus())
          m_ui->m_top->surfaceKeyDown(key, m_eventFlags | repeat);
  
        // otherwise, process as application key
        else movementKeyDown(wParam);
      }

      return 0;
    }

    case WM_KEYUP:
    {
      if (m_ui != NULL && m_ui->hasKeyFocus())
      {
        if (wParam == VK_SHIFT)
          m_eventFlags &= ~MG_EVENT_SHIFT_DOWN;
        else if (wParam == VK_CONTROL)
          m_eventFlags &= ~MG_EVENT_CNTL_DOWN;
        else if (wParam == VK_MENU)
          m_eventFlags &= ~MG_EVENT_ALT_DOWN;

        // translate special keys
        int key = translateKey((int) wParam);

        m_ui->m_top->surfaceKeyUp(key, m_eventFlags);
      }
      else movementKeyUp(wParam);
      return 0;
    }

    case WM_LBUTTONDOWN:
    case WM_MBUTTONDOWN:
    case WM_RBUTTONDOWN:
    {
      int x = LOWORD(lParam);
      int y = HIWORD(lParam);
        
      int pressed = 0;
      if (uMsg == WM_LBUTTONDOWN)
        pressed = MG_EVENT_MOUSE1_DOWN;
      else if (uMsg == WM_RBUTTONDOWN)
        pressed = MG_EVENT_MOUSE2_DOWN;
      else if (uMsg == WM_MBUTTONDOWN)
        pressed = MG_EVENT_MOUSE3_DOWN;

      m_eventFlags |= pressed;

      if (m_ui != NULL && m_ui->useMousePoint(x, y))
      {
        // =-= if another button already down, should this be a dragged, not moved?
        m_ui->m_top->surfaceMouseMove(x, y, m_eventFlags);
        m_ui->m_top->surfaceMouseDown(x, y, m_eventFlags, pressed);
      }
      else movementPress(x, y);

      m_lastMouseX = x;
      m_lastMouseY = y;
      return 0;
    }

    case WM_LBUTTONUP:
    case WM_MBUTTONUP:
    case WM_RBUTTONUP:
    {
      int x = LOWORD(lParam);
      int y = HIWORD(lParam);
      
      int released = 0;
      if (uMsg == WM_LBUTTONUP)
        released = MG_EVENT_MOUSE1_DOWN;
      else if (uMsg == WM_RBUTTONUP)
        released = MG_EVENT_MOUSE2_DOWN;
      else if (uMsg == WM_MBUTTONUP)
        released = MG_EVENT_MOUSE3_DOWN;

      m_eventFlags &= ~released;

      if (m_ui != NULL && m_ui->hasMouseFocus())
      {

        // if button released away from last coords, it was dragged
        if (x != m_lastMouseX || y != m_lastMouseY)
          m_ui->m_top->surfaceMouseDrag(x, y, m_eventFlags);

        m_ui->m_top->surfaceMouseUp(x, y, m_eventFlags, released);
      }
      else movementRelease(x, y);

      m_lastMouseX = x;
      m_lastMouseY = y;
      return 0;
    }

    case WM_MOUSEMOVE:
    {
      int x = LOWORD(lParam);
      int y = HIWORD(lParam);

      if (m_ui != NULL && (m_ui->useMousePoint(x, y) || m_ui->hasMouseFocus()))
      {
        if ((m_eventFlags & MG_EVENT_MOUSE_BUTTONS) != 0)
          m_ui->m_top->surfaceMouseDrag(x, y, m_eventFlags);
        else m_ui->m_top->surfaceMouseMove(x, y, m_eventFlags);
      }
      else if ((m_eventFlags & MG_EVENT_MOUSE_BUTTONS) != 0)
        movementDrag(x, y);

      m_lastMouseX = x;
      m_lastMouseY = y;
      return 0;
    }

    case WM_TIMER:
    {
      // do UI animation
      if (m_ui != NULL)
        m_ui->m_top->animate();
      return 0;
    }
  }

  return DefWindowProc(hWnd, uMsg, wParam, lParam);
}
示例#25
0
jobject GraphicsJNI::createBitmap(JNIEnv* env, SkBitmap* bitmap, bool isMutable,
                            jbyteArray ninepatch, int density)
{
    return createBitmap(env, bitmap, NULL, isMutable, ninepatch, density);
}
示例#26
0
//--------------------------------------------------------------
// main entry
int WINAPI WinMain( 
  HINSTANCE hInstance,            // Instance
  HINSTANCE hPrevInstance,        // Previous Instance
  LPSTR lpCmdLine,                // Command Line Parameters
  int nCmdShow)                   // Window Show State
{
#ifdef DEBUG_MEMORY
  mgDebugMemoryInit();
#endif

  mgDebugReset();         // reset trace file
  mgOSInitTimer();       // performance timer
  
  // initialize random numbers
  time_t seed;
  time(&seed);
  srand(12123123); // srand(seed & 0xFFFF);

  mgOSFindWD("docs");

  // handle utility error messages
  m_errorTable = new mgUtilErrorTable();

  try
  {
    initWindow();
    createWindow();
    createBitmap();
    createUI();

    // create the terrain
    m_flatWorld = new FlatWorld();
    m_flatWorld->resize(m_windowWidth, m_windowHeight);
    m_ui->setValue(m_flatWorld->m_playerX, m_flatWorld->m_playerY);

    // check for screen update every 25 ms
    SetTimer(m_window, 123, 25, NULL);

    while (true)
    {
      MSG msg;      

      // if there is no input pending
      if (!PeekMessage(&msg, NULL, 0, 0, PM_NOREMOVE)) 
      {
        // update any movement keys still down
        updateMovement();

        // if the ui needs an update
        if (m_surface != NULL && m_surface->isDamaged())
        {
          // redraw ui at damaged area
          mgRectangle bounds;
          m_surface->getDamage(bounds);
          m_ui->m_top->surfacePaint(bounds);

          // copy bits from surface into bitmap
          m_surface->repair(bounds);

          // tell windows to redraw the updated area
          RECT rect;
          rect.left = bounds.m_x;
          rect.right = rect.left + bounds.m_width;
          rect.top = bounds.m_y;
          rect.bottom = rect.top + bounds.m_height;
          InvalidateRect(m_window, &rect, false);
        }
      }

      GetMessage(&msg, NULL, 0, 0);     

      // end on quit
      if (msg.message == WM_QUIT)       
        break;

      TranslateMessage(&msg);
      DispatchMessage(&msg);
    }

    // shutdown
    destroyUI();
    destroyBitmap();
    destroyWindow();
    termWindow();
  }
  catch (mgErrorMsg* e)
  {
    mgString msg;
    m_errorTable->msgText(msg, e);
    mgDebug("%s", (const char*) msg);

    MessageBox(m_window, msg, "Error", MB_OK | MB_ICONINFORMATION);
    return 0;
  }
  catch (mgException* e)
  {
    mgDebug("%s", (const char*) e->m_message);

    MessageBox(m_window, e->m_message, "Error", MB_OK | MB_ICONINFORMATION);
    return 0;
  }

  delete m_errorTable;
  m_errorTable = NULL;

#ifdef DEBUG_MEMORY
  // display all memory leaks
  mgDebugMemory();
#endif

  return 0;
}
示例#27
0
 static SkBitmap TestBitmap() {
     SkBitmap bitmap;
     createBitmap(&bitmap, 0x05060708);
     return bitmap;
 }
jint perfetch_start(JNIEnv *pEnv, jobject pObj, jobject pMainAct, jstring pFileName, jint video_fps) {
	AVFormatContext *pFormatCtx = NULL;
	int             i, videoStream;
	AVCodecContext  *pCodecCtx = NULL;
	AVCodec         *pCodec = NULL;
	AVFrame         *pFrame = NULL;
	AVFrame         *pFrameRGBA = NULL;
	AVPacket        packet;
	int             frameFinished;
	jobject			bitmap;
	void* 			buffer;

	AVDictionary    *optionsDict = NULL;
	struct SwsContext      *sws_ctx = NULL;
	char *videoFileName;

	// Register all formats and codecs
	av_register_all();

	//get C string from JNI jstring
	videoFileName = (char *)(*pEnv)->GetStringUTFChars(pEnv, pFileName, NULL);

	// Open video file
	if(avformat_open_input(&pFormatCtx, videoFileName, NULL, NULL)!=0)
		return -1; // Couldn't open file

	// Retrieve stream information
	if(avformat_find_stream_info(pFormatCtx, NULL)<0)
		return -1; // Couldn't find stream information

	// Dump information about file onto standard error
	av_dump_format(pFormatCtx, 0, videoFileName, 0);

	// Find the first video stream
	videoStream=-1;
	for(i=0; i<pFormatCtx->nb_streams; i++) {
		if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) {
			videoStream=i;
			break;
		}
	}
	if(videoStream==-1)
		return -1; // Didn't find a video stream

	// Get a pointer to the codec context for the video stream
	pCodecCtx=pFormatCtx->streams[videoStream]->codec;

	// Find the decoder for the video stream
	pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
	if(pCodec==NULL) {
		fprintf(stderr, "Unsupported codec!\n");
		return -1; // Codec not found
	}
	// Open codec
	if(avcodec_open2(pCodecCtx, pCodec, &optionsDict)<0)
		return -1; // Could not open codec

	// Allocate video frame
	pFrame=avcodec_alloc_frame();

	// Allocate an AVFrame structure
	pFrameRGBA=avcodec_alloc_frame();
	if(pFrameRGBA==NULL)
		return -1;

	//create a bitmap as the buffer for pFrameRGBA
	bitmap = createBitmap(pEnv, pCodecCtx->width, pCodecCtx->height);
	if (AndroidBitmap_lockPixels(pEnv, bitmap, &buffer) < 0)
		return -1;
	//get the scaling context
	sws_ctx = sws_getContext
    (
        pCodecCtx->width,
        pCodecCtx->height,
        pCodecCtx->pix_fmt,
        pCodecCtx->width,
        pCodecCtx->height,
        AV_PIX_FMT_RGBA,
        SWS_BILINEAR,
        NULL,
        NULL,
        NULL
    );

	// Assign appropriate parts of bitmap to image planes in pFrameRGBA
	// Note that pFrameRGBA is an AVFrame, but AVFrame is a superset
	// of AVPicture
	avpicture_fill((AVPicture *)pFrameRGBA, buffer, AV_PIX_FMT_RGBA,
		 pCodecCtx->width, pCodecCtx->height);

	// Read frames and save first five frames to disk
	i=0;
	int ret;
	int fps = 0;
	int previous_pts = 0;
	int current_pts = 0;
	int prefetch_frame_index = 100;
	finish = 0;
	while(finish == 0) {
//		LOGI("av_read_frame start");
		ret = av_read_frame(pFormatCtx, &packet);
//		LOGI("av_read_frame end");
		if (ret <0){
			av_free_packet(&packet);
			break;
		}
		// Is this a packet from the video stream?
		if(packet.stream_index==videoStream) {

			// Decode video frame
			LOGI("avcodec_decode_video2 start");
			avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished,
					&packet);
			LOGI("avcodec_decode_video2 end");
			// Did we get a video frame?
			if(frameFinished) {
				if (fps == 0){
					if (i == 0){
						previous_pts = av_frame_get_best_effort_timestamp ( pFrame );
					}else if (i == 8){
						current_pts = av_frame_get_best_effort_timestamp ( pFrame );
						//fps = 800000/(current_pts - previous_pts);
						fps = video_fps;
						LOGI("video fps %d", fps);
						prefetch_frame_index = fps*16/60;
						LOGI("prefetch_frame_index %d", prefetch_frame_index);
					}
				}


				if (i++%prefetch_frame_index == 0 && i < 1500){
					// Convert the image from its native format to RGBA
					sws_scale
					(
							sws_ctx,
							(uint8_t const * const *)pFrame->data,
							pFrame->linesize,
							0,
							pCodecCtx->height,
							pFrameRGBA->data,
							pFrameRGBA->linesize
					);

					// return the frame to java layer
					int64_t pts = av_frame_get_best_effort_timestamp ( pFrame );
					pts = av_rescale_q ( pts,  pFormatCtx->streams[videoStream]->time_base, AV_TIME_BASE_Q );
					LOGI("save frame %d, pts: %d", i, (int)pts);
					SaveFrame(pEnv, pMainAct, bitmap, pCodecCtx->width, pCodecCtx->height, i, pts);
					int got_packet_ptr;
					AVCodecContext *c= NULL;
					c = avcodec_alloc_context3(avcodec_find_encoder(pCodecCtx->codec_id));
					if (!c) {
							LOGI("Could not allocate video codec context\n");
					        return 0;
					}
					av_free_packet(&packet);
				}
			}
		}
		// Free the packet that was allocated by av_read_frame
		av_free_packet(&packet);
	}
	LOGI("final frame %d", i);
	//unlock the bitmap
	AndroidBitmap_unlockPixels(pEnv, bitmap);

	// Free the RGB image
	av_free(pFrameRGBA);

	// Free the YUV frame
	av_free(pFrame);

	// Close the codec
	avcodec_close(pCodecCtx);

	// Close the video file
	avformat_close_input(&pFormatCtx);

	return 0;
}
 AARectView() {
     fBitmap = createBitmap(N);
     
     fWidth = N;
 }
示例#30
0
文件: icsa.c 项目: bojifengyu/uiHRDC
//ticsa *createIntegerCSA(uint **aintVector, uint textSize, char *build_options) {
int buildIntIndex (uint *aintVector, uint n, char *build_options, void **index ){
	uint textSize=n;	
	intVector = aintVector;  //global variable
	ticsa *myicsa;
	myicsa = (ticsa *) malloc (sizeof (ticsa));
	uint *Psi, *SAI, *C, vocSize;
	register uint i, j;
	uint nsHUFF;

	parametersCSA(myicsa, build_options);
	
	nsHUFF=myicsa->tempNSHUFF;
	
	// Almacenamos o valor dalguns parametros
	myicsa->suffixArraySize = textSize;
	myicsa->D = (uint*) malloc (sizeof(uint) * ((textSize+31)/32));	
	
	myicsa->samplesASize = (textSize + myicsa->T_A - 1) / myicsa->T_A;// + 1;
	myicsa->samplesA = (uint *)malloc(sizeof(int) * myicsa->samplesASize);
	myicsa->BA = (uint*) malloc (sizeof(uint) * ((textSize+31)/32));
	myicsa->samplesAInvSize = (textSize + myicsa->T_AInv - 1) / myicsa->T_AInv;
	myicsa->samplesAInv = (uint *)malloc(sizeof(int) * myicsa->samplesAInvSize);

	// Reservamos espacio para os vectores
	Psi = (uint *) malloc (sizeof(uint) * textSize);

	// CONSTRUIMOS A FUNCION C
	vocSize = 0;
	for(i=0;i<textSize;i++) if(intVector[i]>vocSize) vocSize = intVector[i];
	C = (uint *) malloc(sizeof(uint) * (vocSize + 1));	// Para contar o 0 terminador
	for(i=0;i<vocSize;i++) C[i] = 0;
	for(i=0;i<textSize;i++) C[intVector[i]]++;
	for (i=0,j=0;i<=vocSize;i++) {
		j = j + C[i];
		C[i] = j;
	}
	for(i=vocSize;i>0;i--) C[i] = C[i-1];
	C[0] = 0;

	// Construimos o array de sufixos (en Psi) - con quicksort
	printf("\n\t *BUILDING THE SUFFIX ARRAY over %d integers... (with qsort)", textSize);fflush(stdout);
	for(i=0; i<textSize; i++) Psi[i]=i;
	
	qsort(Psi, textSize, sizeof(uint), suffixCmp);
	
	
	printf("\n\t ...... ended.");


	// CONSTRUIMOS A INVERSA DO ARRAY DE SUFIXOS
	SAI = (uint *) malloc (sizeof(uint) * (textSize + 1));	// +1 para repetir na ultima posición. Evitamos un if
	for(i=0;i<textSize;i++) SAI[Psi[i]] = i;
	SAI[textSize] = SAI[0];

	// ALMACENAMOS AS MOSTRAS DO ARRAY DE SUFIXOS
	for(i=0;i<((textSize+31)/32);i++) myicsa->BA[i] = 0;
	for(i=0; i<textSize; i+=myicsa->T_A) bitset(myicsa->BA, SAI[i]);
	bitset(myicsa->BA, SAI[textSize-1]);			// A ultima posicion sempre muestreada
	//printf("TextSize = %d\n", textSize);
	myicsa->bBA = createBitmap(myicsa->BA, textSize);
	for(i=0,j=0; i<textSize; i++) if(bitget(myicsa->BA, i)) myicsa->samplesA[j++] = Psi[i];
	
	// ALMACENAMOS AS MOSTRAS DA INVERSA DO ARRAY DE SUFIXOS
	for(i=0,j=0;i<textSize;i+=myicsa->T_AInv) myicsa->samplesAInv[j++] = SAI[i];
	
	// CONSTRUIMOS E COMPRIMIMOS PSI
	printf("\n\t Creating compressed Psi...");
	for(i=0;i<textSize;i++) Psi[i] = SAI[Psi[i]+1];
	
	//FILE *ff = fopen("psi.log","w");
	//for (i=0;i<textSize;i++) fprintf(ff,"%d::%u",i,Psi[i]);
	//fclose(ff);	
	
	free(SAI);
	#ifdef PSI_HUFFMANRLE	
	myicsa->hcPsi = huffmanCompressPsi(Psi,textSize,myicsa->T_Psi,nsHUFF);
	#endif				
	#ifdef PSI_GONZALO	
	myicsa->gcPsi = gonzaloCompressPsi(Psi,textSize,myicsa->T_Psi,nsHUFF);
	#endif			
	#ifdef PSI_DELTACODES
	myicsa->dcPsi = deltaCompressPsi(Psi,textSize,myicsa->T_Psi);		
	#endif
  	free(Psi);	
	
	// Contruimos D
	for(i=0;i<((textSize+31)/32);i++) myicsa->D[i] = 0;	
	for(i=0;i<=vocSize;i++) bitset(myicsa->D, C[i]);
	myicsa->bD = createBitmap(myicsa->D,textSize);
	free(C);

	// VARIABLE GLOBAL QUE ALMACENA O ESTADO DOS DISPLAYS (IMPORTANTE PARA DISPLAY SECUENCIAL)
	// Almacena a última posición do array de sufixos que mostramos con display ou displayNext
	// Se nos piden un displayNext, aplicamos PSI sobre esta posición e obtemos a seguinte,
	// coa que podemos obter o símbolo pedido, e actualizamos displayState
	myicsa->displayCSAState = 0;
	myicsa->displayCSAPrevPosition = -2;  //needed by DisplayCSA(position)
	
	aintVector = intVector;
	// Liberamos o espacion non necesario

	*index = myicsa;
	//return (myicsa);
	return 0;
}