Exemple #1
0
int main(int argc, char *argv[])
{
    int frames, currentFrame;
    faacEncHandle hEncoder;
    pcmfile_t *infile = NULL;

    unsigned long samplesInput, maxBytesOutput, totalBytesWritten=0;

    faacEncConfigurationPtr myFormat;
    unsigned int mpegVersion = MPEG2;
    unsigned int objectType = LOW;
    unsigned int useMidSide = 1;
    static unsigned int useTns = DEFAULT_TNS;
    enum container_format container = NO_CONTAINER;
    int optimizeFlag = 0;
    enum stream_format stream = ADTS_STREAM;
    int cutOff = -1;
    int bitRate = 0;
    unsigned long quantqual = 0;
    int chanC = 3;
    int chanLF = 4;

    char *audioFileName = NULL;
    char *aacFileName = NULL;
    char *aacFileExt = NULL;
    int aacFileNameGiven = 0;

    float *pcmbuf;
    int *chanmap = NULL;

    unsigned char *bitbuf;
    int samplesRead = 0;
    const char *dieMessage = NULL;

    int rawChans = 0; // disabled by default
    int rawBits = 16;
    int rawRate = 44100;
    int rawEndian = 1;

    int shortctl = SHORTCTL_NORMAL;

    FILE *outfile = NULL;

#ifdef HAVE_LIBMP4V2
    MP4FileHandle MP4hFile = MP4_INVALID_FILE_HANDLE;
    MP4TrackId MP4track = 0;
    unsigned int ntracks = 0, trackno = 0;
    unsigned int ndiscs = 0, discno = 0;
    u_int8_t compilation = 0;
    const char *artist = NULL, *title = NULL, *album = NULL, *year = NULL,
      *genre = NULL, *comment = NULL, *writer = NULL;
    u_int8_t *art = NULL;
    u_int64_t artSize = 0;
    u_int64_t total_samples = 0;
    u_int64_t encoded_samples = 0;
    unsigned int delay_samples;
    unsigned int frameSize;
#endif
    char *faac_id_string;
    char *faac_copyright_string;

#ifndef _WIN32
    // install signal handler
    signal(SIGINT, signal_handler);
    signal(SIGTERM, signal_handler);
#endif

    // get faac version
    if (faacEncGetVersion(&faac_id_string, &faac_copyright_string) == FAAC_CFG_VERSION)
    {
        fprintf(stderr, "Freeware Advanced Audio Coder\nFAAC %s\n\n", faac_id_string);
    }
    else
    {
        fprintf(stderr, __FILE__ "(%d): wrong libfaac version\n", __LINE__);
        return 1;
    }

    /* begin process command line */
    progName = argv[0];
    while (1) {
        static struct option long_options[] = {
            { "help", 0, 0, 'h'},
            { "long-help", 0, 0, 'H'},
            { "raw", 0, 0, 'r'},
            { "no-midside", 0, 0, NO_MIDSIDE_FLAG},
            { "cutoff", 1, 0, 'c'},
            { "quality", 1, 0, 'q'},
            { "pcmraw", 0, 0, 'P'},
            { "pcmsamplerate", 1, 0, 'R'},
            { "pcmsamplebits", 1, 0, 'B'},
            { "pcmchannels", 1, 0, 'C'},
            { "shortctl", 1, 0, SHORTCTL_FLAG},
            { "tns", 0, 0, TNS_FLAG},
            { "no-tns", 0, 0, NO_TNS_FLAG},
            { "mpeg-version", 1, 0, MPEGVERS_FLAG},
            { "obj-type", 1, 0, OBJTYPE_FLAG},
            { "license", 0, 0, 'L'},
#ifdef HAVE_LIBMP4V2
            { "createmp4", 0, 0, 'w'},
            { "optimize", 0, 0, 's'},
            { "artist", 1, 0, ARTIST_FLAG},
            { "title", 1, 0, TITLE_FLAG},
            { "album", 1, 0, ALBUM_FLAG},
            { "track", 1, 0, TRACK_FLAG},
            { "disc", 1, 0, DISC_FLAG},
            { "genre", 1, 0, GENRE_FLAG},
            { "year", 1, 0, YEAR_FLAG},
            { "cover-art", 1, 0, COVER_ART_FLAG},
            { "comment", 1, 0, COMMENT_FLAG},
        { "writer", 1, 0, WRITER_FLAG},
        { "compilation", 0, 0, COMPILATION_FLAG},
#endif
        { "pcmswapbytes", 0, 0, 'X'},
            { 0, 0, 0, 0}
        };
        int c = -1;
        int option_index = 0;

        c = getopt_long(argc, argv, "Hhb:m:o:rnc:q:PR:B:C:I:X"
#ifdef HAVE_LIBMP4V2
                        "ws"
#endif
            ,long_options, &option_index);

        if (c == -1)
            break;

        if (!c)
        {
          dieMessage = usage;
          break;
        }

        switch (c) {
    case 'o':
        {
            int l = strlen(optarg);
        aacFileName = malloc(l+1);
        memcpy(aacFileName, optarg, l);
        aacFileName[l] = '\0';
        aacFileNameGiven = 1;
        }
        break;
        case 'r': {
            stream = RAW_STREAM;
            break;
        }
        case NO_MIDSIDE_FLAG: {
            useMidSide = 0;
            break;
        }
        case 'c': {
            unsigned int i;
            if (sscanf(optarg, "%u", &i) > 0) {
                cutOff = i;
            }
            break;
        }
        case 'b': {
            unsigned int i;
            if (sscanf(optarg, "%u", &i) > 0)
            {
                bitRate = 1000 * i;
            }
            break;
        }
        case 'q':
        {
            unsigned int i;
            if (sscanf(optarg, "%u", &i) > 0)
            {
                if (i > 0 && i < 1000)
                    quantqual = i;
            }
            break;
        }
        case 'I':
            sscanf(optarg, "%d,%d", &chanC, &chanLF);
            break;
        case 'P':
            rawChans = 2; // enable raw input
            break;
        case 'R':
        {
            unsigned int i;
            if (sscanf(optarg, "%u", &i) > 0)
            {
                rawRate = i;
                rawChans = (rawChans > 0) ? rawChans : 2;
            }
            break;
        }
        case 'B':
        {
            unsigned int i;
            if (sscanf(optarg, "%u", &i) > 0)
            {
                if (i > 32)
                    i = 32;
                if (i < 8)
                    i = 8;
                rawBits = i;
                rawChans = (rawChans > 0) ? rawChans : 2;
            }
            break;
        }
        case 'C':
        {
            unsigned int i;
            if (sscanf(optarg, "%u", &i) > 0)
                rawChans = i;
            break;
        }
#ifdef HAVE_LIBMP4V2
        case 'w':
        container = MP4_CONTAINER;
            break;
        case 's':
        optimizeFlag = 1;
            break;
    case ARTIST_FLAG:
        artist = optarg;
        break;
    case WRITER_FLAG:
        writer = optarg;
        break;
    case TITLE_FLAG:
        title = optarg;
        break;
    case ALBUM_FLAG:
        album = optarg;
        break;
    case TRACK_FLAG:
        sscanf(optarg, "%d/%d", &trackno, &ntracks);
        break;
    case DISC_FLAG:
        sscanf(optarg, "%d/%d", &discno, &ndiscs);
        break;
    case COMPILATION_FLAG:
        compilation = 0x1;
        break;
    case GENRE_FLAG:
        genre = optarg;
        break;
    case YEAR_FLAG:
        year = optarg;
        break;
    case COMMENT_FLAG:
        comment = optarg;
        break;
    case COVER_ART_FLAG: {
        FILE *artFile = fopen(optarg, "rb");

        if(artFile) {
            u_int64_t r;

            fseek(artFile, 0, SEEK_END);
        artSize = ftell(artFile);

        art = malloc(artSize);

            fseek(artFile, 0, SEEK_SET);
        clearerr(artFile);

        r = fread(art, artSize, 1, artFile);

        if (r != 1) {
            dieMessage = "Error reading cover art file!\n";
            free(art);
            art = NULL;
        } else if (artSize < 12 || !check_image_header(art)) {
            /* the above expression checks the image signature */
            dieMessage = "Unsupported cover image file format!\n";
            free(art);
            art = NULL;
        }

        fclose(artFile);
        } else {
            dieMessage = "Error opening cover art file!\n";
        }

        break;
    }
#endif
        case SHORTCTL_FLAG:
            shortctl = atoi(optarg);
            break;
        case TNS_FLAG:
            useTns = 1;
            break;
        case NO_TNS_FLAG:
            useTns = 0;
            break;
    case MPEGVERS_FLAG:
            mpegVersion = atoi(optarg);
            switch(mpegVersion)
            {
            case 2:
                mpegVersion = MPEG2;
                break;
            case 4:
                mpegVersion = MPEG4;
                break;
            default:
            dieMessage = "Unrecognised MPEG version!\n";
            }
            break;
#if 0
    case OBJTYPE_FLAG:
        if (!strcasecmp(optarg, "LC"))
                objectType = LOW;
        else if (!strcasecmp(optarg, "Main"))
            objectType = MAIN;
        else if (!strcasecmp(optarg, "LTP")) {
            mpegVersion = MPEG4;
        objectType = LTP;
        } else
            dieMessage = "Unrecognised object type!\n";
        break;
#endif
        case 'L':
        fprintf(stderr, faac_copyright_string);
        dieMessage = license;
        break;
    case 'X':
      rawEndian = 0;
      break;
    case 'H':
      dieMessage = long_help;
      break;
    case 'h':
          dieMessage = short_help;
      break;
    case '?':
        default:
      dieMessage = usage;
          break;
        }
    }

    /* check that we have at least one non-option arguments */
    if (!dieMessage && (argc - optind) > 1 && aacFileNameGiven)
        dieMessage = "Cannot encode several input files to one output file.\n";

    if (argc - optind < 1 || dieMessage)
    {
        fprintf(stderr, dieMessage ? dieMessage : usage,
           progName, progName, progName, progName);
        return 1;
    }

    while (argc - optind > 0) {

    /* get the input file name */
    audioFileName = argv[optind++];

    /* generate the output file name, if necessary */
    if (!aacFileNameGiven) {
        char *t = strrchr(audioFileName, '.');
    int l = t ? strlen(audioFileName) - strlen(t) : strlen(audioFileName);

#ifdef HAVE_LIBMP4V2
    aacFileExt = container == MP4_CONTAINER ? ".m4a" : ".aac";
#else
    aacFileExt = ".aac";
#endif

    aacFileName = malloc(l+1+4);
    memcpy(aacFileName, audioFileName, l);
    memcpy(aacFileName + l, aacFileExt, 4);
    aacFileName[l+4] = '\0';
    } else {
        aacFileExt = strrchr(aacFileName, '.');

        if (aacFileExt && (!strcmp(".m4a", aacFileExt) || !strcmp(".m4b", aacFileExt) || !strcmp(".mp4", aacFileExt)))
#ifndef HAVE_LIBMP4V2
        fprintf(stderr, "WARNING: MP4 support unavailable!\n");
#else
        container = MP4_CONTAINER;
#endif
    }

    /* open the audio input file */
    if (rawChans > 0) // use raw input
    {
        infile = wav_open_read(audioFileName, 1);
    if (infile)
    {
        infile->bigendian = rawEndian;
        infile->channels = rawChans;
        infile->samplebytes = rawBits / 8;
        infile->samplerate = rawRate;
        infile->samples /= (infile->channels * infile->samplebytes);
    }
    }
    else // header input
        infile = wav_open_read(audioFileName, 0);

    if (infile == NULL)
    {
        fprintf(stderr, "Couldn't open input file %s\n", audioFileName);
    return 1;
    }


    /* open the encoder library */
    hEncoder = faacEncOpen(infile->samplerate, infile->channels,
        &samplesInput, &maxBytesOutput);

#ifdef HAVE_LIBMP4V2
    if (container != MP4_CONTAINER && (ntracks || trackno || artist ||
                       title ||  album || year || art ||
                       genre || comment || discno || ndiscs ||
                       writer || compilation))
    {
        fprintf(stderr, "Metadata requires MP4 output!\n");
    return 1;
    }

    if (container == MP4_CONTAINER)
    {
        mpegVersion = MPEG4;
    stream = RAW_STREAM;
    }

    frameSize = samplesInput/infile->channels;
    delay_samples = frameSize; // encoder delay 1024 samples
#endif
    pcmbuf = (float *)malloc(samplesInput*sizeof(float));
    bitbuf = (unsigned char*)malloc(maxBytesOutput*sizeof(unsigned char));
    chanmap = mkChanMap(infile->channels, chanC, chanLF);
    if (chanmap)
    {
        fprintf(stderr, "Remapping input channels: Center=%d, LFE=%d\n",
            chanC, chanLF);
    }

    if (cutOff <= 0)
    {
        if (cutOff < 0) // default
            cutOff = 0;
        else // disabled
            cutOff = infile->samplerate / 2;
    }
    if (cutOff > (infile->samplerate / 2))
        cutOff = infile->samplerate / 2;

    /* put the options in the configuration struct */
    myFormat = faacEncGetCurrentConfiguration(hEncoder);
    myFormat->aacObjectType = objectType;
    myFormat->mpegVersion = mpegVersion;
    myFormat->useTns = useTns;
    switch (shortctl)
    {
    case SHORTCTL_NOSHORT:
      fprintf(stderr, "disabling short blocks\n");
      myFormat->shortctl = shortctl;
      break;
    case SHORTCTL_NOLONG:
      fprintf(stderr, "disabling long blocks\n");
      myFormat->shortctl = shortctl;
      break;
    }
    if (infile->channels >= 6)
        myFormat->useLfe = 1;
    myFormat->allowMidside = useMidSide;
    if (bitRate)
        myFormat->bitRate = bitRate / infile->channels;
    myFormat->bandWidth = cutOff;
    if (quantqual > 0)
        myFormat->quantqual = quantqual;
    myFormat->outputFormat = stream;
    myFormat->inputFormat = FAAC_INPUT_FLOAT;
    if (!faacEncSetConfiguration(hEncoder, myFormat)) {
        fprintf(stderr, "Unsupported output format!\n");
#ifdef HAVE_LIBMP4V2
        if (container == MP4_CONTAINER) MP4Close(MP4hFile);
#endif
        return 1;
    }

#ifdef HAVE_LIBMP4V2
    /* initialize MP4 creation */
    if (container == MP4_CONTAINER) {
        unsigned char *ASC = 0;
        unsigned long ASCLength = 0;
    char *version_string;

#ifdef MP4_CREATE_EXTENSIBLE_FORMAT
    /* hack to compile against libmp4v2 >= 1.0RC3
     * why is there no version identifier in mp4.h? */
        MP4hFile = MP4Create(aacFileName, MP4_DETAILS_ERROR, 0);
#else
    MP4hFile = MP4Create(aacFileName, MP4_DETAILS_ERROR, 0, 0);
#endif
        if (!MP4_IS_VALID_FILE_HANDLE(MP4hFile)) {
            fprintf(stderr, "Couldn't create output file %s\n", aacFileName);
            return 1;
        }

        MP4SetTimeScale(MP4hFile, 90000);
        MP4track = MP4AddAudioTrack(MP4hFile, infile->samplerate, MP4_INVALID_DURATION, MP4_MPEG4_AUDIO_TYPE);
        MP4SetAudioProfileLevel(MP4hFile, 0x0F);
        faacEncGetDecoderSpecificInfo(hEncoder, &ASC, &ASCLength);
        MP4SetTrackESConfiguration(MP4hFile, MP4track, ASC, ASCLength);
    free(ASC);

    /* set metadata */
    version_string = malloc(strlen(faac_id_string) + 6);
    strcpy(version_string, "FAAC ");
    strcpy(version_string + 5, faac_id_string);
    MP4SetMetadataTool(MP4hFile, version_string);
    free(version_string);

    if (artist) MP4SetMetadataArtist(MP4hFile, artist);
    if (writer) MP4SetMetadataWriter(MP4hFile, writer);
    if (title) MP4SetMetadataName(MP4hFile, title);
    if (album) MP4SetMetadataAlbum(MP4hFile, album);
    if (trackno > 0) MP4SetMetadataTrack(MP4hFile, trackno, ntracks);
    if (discno > 0) MP4SetMetadataDisk(MP4hFile, discno, ndiscs);
    if (compilation) MP4SetMetadataCompilation(MP4hFile, compilation);
    if (year) MP4SetMetadataYear(MP4hFile, year);
    if (genre) MP4SetMetadataGenre(MP4hFile, genre);
    if (comment) MP4SetMetadataComment(MP4hFile, comment);
        if (artSize) {
        MP4SetMetadataCoverArt(MP4hFile, art, artSize);
        free(art);
    }
    }
    else
    {
#endif
        /* open the aac output file */
        if (!strcmp(aacFileName, "-"))
        {
            outfile = stdout;
        }
        else
        {
            outfile = fopen(aacFileName, "wb");
        }
        if (!outfile)
        {
            fprintf(stderr, "Couldn't create output file %s\n", aacFileName);
            return 1;
        }
#ifdef HAVE_LIBMP4V2
    }
#endif

    cutOff = myFormat->bandWidth;
    quantqual = myFormat->quantqual;
    bitRate = myFormat->bitRate;
    if (bitRate)
      fprintf(stderr, "Average bitrate: %d kbps\n",
          (bitRate + 500)/1000*infile->channels);
    fprintf(stderr, "Quantization quality: %ld\n", quantqual);
    fprintf(stderr, "Bandwidth: %d Hz\n", cutOff);
    fprintf(stderr, "Object type: ");
    switch(objectType)
    {
    case LOW:
        fprintf(stderr, "Low Complexity");
        break;
    case MAIN:
        fprintf(stderr, "Main");
        break;
    case LTP:
        fprintf(stderr, "LTP");
        break;
    }
    fprintf(stderr, "(MPEG-%d)", (mpegVersion == MPEG4) ? 4 : 2);
    if (myFormat->useTns)
        fprintf(stderr, " + TNS");
    if (myFormat->allowMidside)
        fprintf(stderr, " + M/S");
    fprintf(stderr, "\n");

    fprintf(stderr, "Container format: ");
    switch(container)
    {
    case NO_CONTAINER:
      switch(stream)
    {
    case RAW_STREAM:
      fprintf(stderr, "Headerless AAC (RAW)\n");
      break;
    case ADTS_STREAM:
      fprintf(stderr, "Transport Stream (ADTS)\n");
      break;
    }
        break;
#ifdef HAVE_LIBMP4V2
    case MP4_CONTAINER:
        fprintf(stderr, "MPEG-4 File Format (MP4)\n");
        break;
#endif
    }

    if (outfile
#ifdef HAVE_LIBMP4V2
        || MP4hFile != MP4_INVALID_FILE_HANDLE
#endif
       )
    {
        int showcnt = 0;
#ifdef _WIN32
        long begin = GetTickCount();
#endif
        if (infile->samples)
            frames = ((infile->samples + 1023) / 1024) + 1;
        else
            frames = 0;
        currentFrame = 0;

        fprintf(stderr, "Encoding %s to %s\n", audioFileName, aacFileName);
        if (frames != 0)
            fprintf(stderr, "   frame          | bitrate | elapsed/estim | "
            "play/CPU | ETA\n");
        else
            fprintf(stderr, " frame | elapsed | play/CPU\n");

        /* encoding loop */
#ifdef _WIN32
    for (;;)
#else
        while (running)
#endif
        {
            int bytesWritten;

            samplesRead = wav_read_float32(infile, pcmbuf, samplesInput, chanmap);

#ifdef HAVE_LIBMP4V2
            total_samples += samplesRead / infile->channels;
#endif

            /* call the actual encoding routine */
            bytesWritten = faacEncEncode(hEncoder,
                (int32_t *)pcmbuf,
                samplesRead,
                bitbuf,
                maxBytesOutput);

            if (bytesWritten)
            {
                currentFrame++;
                showcnt--;
        totalBytesWritten += bytesWritten;
            }

            if ((showcnt <= 0) || !bytesWritten)
            {
                double timeused;
#ifdef __unix__
                struct rusage usage;
#endif
#ifdef _WIN32
                char percent[MAX_PATH + 20];
                timeused = (GetTickCount() - begin) * 1e-3;
#else
#ifdef __unix__
                if (getrusage(RUSAGE_SELF, &usage) == 0) {
                    timeused = (double)usage.ru_utime.tv_sec +
                        (double)usage.ru_utime.tv_usec * 1e-6;
                }
                else
                    timeused = 0;
#else
                timeused = (double)clock() * (1.0 / CLOCKS_PER_SEC);
#endif
#endif
                if (currentFrame && (timeused > 0.1))
                {
                    showcnt += 50;

                    if (frames != 0)
                        fprintf(stderr,
                            "\r%5d/%-5d (%3d%%)|  %5.1f  | %6.1f/%-6.1f | %7.2fx | %.1f ",
                            currentFrame, frames, currentFrame*100/frames,
                ((double)totalBytesWritten * 8.0 / 1000.0) /
                ((double)infile->samples / infile->samplerate * currentFrame / frames),
                            timeused,
                            timeused * frames / currentFrame,
                            (1024.0 * currentFrame / infile->samplerate) / timeused,
                            timeused  * (frames - currentFrame) / currentFrame);
                    else
                        fprintf(stderr,
                            "\r %5d |  %6.1f | %7.2fx ",
                            currentFrame,
                            timeused,
                            (1024.0 * currentFrame / infile->samplerate) / timeused);

                    fflush(stderr);
#ifdef _WIN32
                    if (frames != 0)
                    {
                        sprintf(percent, "%.2f%% encoding %s",
                            100.0 * currentFrame / frames, audioFileName);
                        SetConsoleTitle(percent);
                    }
#endif
                }
            }

            /* all done, bail out */
            if (!samplesRead && !bytesWritten)
                break ;

            if (bytesWritten < 0)
            {
                fprintf(stderr, "faacEncEncode() failed\n");
                break ;
            }

            if (bytesWritten > 0)
            {
#ifdef HAVE_LIBMP4V2
                u_int64_t samples_left = total_samples - encoded_samples + delay_samples;
                MP4Duration dur = samples_left > frameSize ? frameSize : samples_left;
                MP4Duration ofs = encoded_samples > 0 ? 0 : delay_samples;

                if (container == MP4_CONTAINER)
                {
                    /* write bitstream to mp4 file */
                    MP4WriteSample(MP4hFile, MP4track, bitbuf, bytesWritten, dur, ofs, 1);
                }
                else
                {
#endif
                    /* write bitstream to aac file */
                    fwrite(bitbuf, 1, bytesWritten, outfile);
#ifdef HAVE_LIBMP4V2
                }

                encoded_samples += dur;
#endif
            }
        }

#ifdef HAVE_LIBMP4V2
        /* clean up */
        if (container == MP4_CONTAINER)
        {
            MP4Close(MP4hFile);
            if (optimizeFlag == 1)
            {
                fprintf(stderr, "\n\nMP4 format optimization... ");
                MP4Optimize(aacFileName, NULL, 0);
                fprintf(stderr, "Done!");
            }
        } else
#endif
            fclose(outfile);

        fprintf(stderr, "\n\n");
    }

    faacEncClose(hEncoder);

    wav_close(infile);

    if (pcmbuf) free(pcmbuf);
    if (bitbuf) free(bitbuf);
    if (aacFileNameGiven) free(aacFileName);

    }

    return 0;
}
Exemple #2
0
MP4TrackId Mp4vCreator(MP4FileHandle mp4File, FILE* inFile, bool doEncrypt,
		       bool allowVariableFrameRate)
{
    bool rc;

    u_int8_t sampleBuffer[256 * 1024 * 2];
    u_int8_t* pCurrentSample = sampleBuffer;
    u_int32_t maxSampleSize = sizeof(sampleBuffer) / 2;
    u_int32_t prevSampleSize = 0;

    // the current syntactical object
    // typically 1:1 with a sample
    // but not always, i.e. non-VOP's
    u_int8_t* pObj = pCurrentSample;
    u_int32_t objSize;
    u_int8_t objType;

    // the current sample
    MP4SampleId sampleId = 1;
    MP4Timestamp currentSampleTime = 0;

    // the last reference VOP
    MP4SampleId refVopId = 1;
    MP4Timestamp refVopTime = 0;

    // track configuration info
    u_int8_t videoProfileLevel = MPEG4_SP_L3;
    u_int8_t timeBits = 15;
    u_int16_t timeTicks = 30000;
    u_int16_t frameDuration = 3000;
    u_int16_t frameWidth = 320;
    u_int16_t frameHeight = 240;
    u_int32_t esConfigSize = 0;
    int vopType = 0;
    int prevVopType = 0;
    bool foundVOSH = false, foundVO = false, foundVOL = false;
    u_int32_t lastVopTimeIncrement = 0;
    bool variableFrameRate = false;
    bool lastFrame = false;
    bool haveBframes = false;
    mpeg4_frame_t *head = NULL, *tail = NULL;

    // start reading objects until we get the first VOP
    while (LoadNextObject(inFile, pObj, &objSize, &objType)) {
        // guard against buffer overflow
        if (pObj + objSize >= pCurrentSample + maxSampleSize) {
            fprintf(stderr,
                    "%s: buffer overflow, invalid video stream?\n", ProgName);
            return MP4_INVALID_TRACK_ID;
        }
#ifdef DEBUG_MP4V
        if (Verbosity & MP4_DETAILS_SAMPLE) {
            printf("MP4V type %x size %u\n",
                    objType, objSize);
        }
#endif

        if (objType == MP4AV_MPEG4_VOSH_START) {
            MP4AV_Mpeg4ParseVosh(pObj, objSize,
                    &videoProfileLevel);
            foundVOSH = true;
        } else if (objType == MP4AV_MPEG4_VO_START) {
            foundVO = true;
        } else if (objType == MP4AV_MPEG4_VOL_START) {
            MP4AV_Mpeg4ParseVol(pObj, objSize,
                    &timeBits, &timeTicks, &frameDuration,
                    &frameWidth, &frameHeight);

            foundVOL = true;
#ifdef DEBUG_MP4V
            printf("ParseVol: timeBits %u timeTicks %u frameDuration %u\n",
                    timeBits, timeTicks, frameDuration);
#endif

        } else if (foundVOL == true || objType == MP4AV_MPEG4_VOP_START) {
            esConfigSize = pObj - pCurrentSample;
            // ready to set up mp4 track
            break;
        }
        /* XXX why do we need this if ?
         * It looks like it will remove this object ... XXX */
	// It does.  On Purpose.  wmay 6/2004
        if (objType != MP4AV_MPEG4_USER_DATA_START) {
            pObj += objSize;
        }
    }

    if (foundVOSH == false) {
        fprintf(stderr,
                "%s: no VOSH header found in MPEG-4 video.\n"
                "This can cause problems with players other than mp4player. \n",
                ProgName);
    } else {
        if (VideoProfileLevelSpecified &&
                videoProfileLevel != VideoProfileLevel) {
            fprintf(stderr,
                    "%s: You have specified a different video profile level than was detected in the VOSH header\n"
                    "The level you specified was %d and %d was read from the VOSH\n",
                    ProgName, VideoProfileLevel, videoProfileLevel);
        }
    }
    if (foundVO == false) {
        fprintf(stderr,
                "%s: No VO header found in mpeg-4 video.\n"
                "This can cause problems with players other than mp4player\n",
                ProgName);
    }
    if (foundVOL == false) {
        fprintf(stderr,
                "%s: fatal: No VOL header found in mpeg-4 video stream\n",
                ProgName);
        return MP4_INVALID_TRACK_ID;
    }

    // convert frame duration to canonical time scale
    // note zero value for frame duration signals variable rate video
    if (timeTicks == 0) {
        timeTicks = 1;
    }
    u_int32_t mp4FrameDuration = 0;

    if (VideoFrameRate) {
      mp4FrameDuration = (u_int32_t)(((double)Mp4TimeScale) / VideoFrameRate);    
    } else if (frameDuration) {
	  VideoFrameRate = frameDuration;
	  VideoFrameRate /= timeTicks;
	  mp4FrameDuration = (Mp4TimeScale * frameDuration) / timeTicks;
    } else {
      if (allowVariableFrameRate == false ) {
	fprintf(stderr,
		"%s: variable rate video stream signalled,"
		" please specify average frame rate with -r option\n"
		" or --variable-frame-rate argument\n",
		ProgName);
	return MP4_INVALID_TRACK_ID;
      }

        variableFrameRate = true;
    }

    ismacryp_session_id_t ismaCrypSId;
    mp4v2_ismacrypParams *icPp =  (mp4v2_ismacrypParams *) malloc(sizeof(mp4v2_ismacrypParams));
    memset(icPp, 0, sizeof(mp4v2_ismacrypParams));


    // initialize ismacryp session if encrypting
    if (doEncrypt) {

        if (ismacrypInitSession(&ismaCrypSId,KeyTypeVideo) != 0) {
            fprintf(stderr, "%s: could not initialize the ISMAcryp session\n",
                    ProgName);
            return MP4_INVALID_TRACK_ID;
        }
        if (ismacrypGetScheme(ismaCrypSId, &(icPp->scheme_type)) != ismacryp_rc_ok) {
            fprintf(stderr, "%s: could not get ismacryp scheme type. sid %d\n",
                    ProgName, ismaCrypSId);
            ismacrypEndSession(ismaCrypSId);
            return MP4_INVALID_TRACK_ID;
        }
        if (ismacrypGetSchemeVersion(ismaCrypSId, &(icPp->scheme_version)) != ismacryp_rc_ok) {
            fprintf(stderr, "%s: could not get ismacryp scheme ver. sid %d\n",
                    ProgName, ismaCrypSId);
            ismacrypEndSession(ismaCrypSId);
            return MP4_INVALID_TRACK_ID;
        }
        if (ismacrypGetKMSUri(ismaCrypSId, &(icPp->kms_uri)) != ismacryp_rc_ok) {
            fprintf(stderr, "%s: could not get ismacryp kms uri. sid %d\n",
                    ProgName, ismaCrypSId);
            CHECK_AND_FREE(icPp->kms_uri);
            ismacrypEndSession(ismaCrypSId);
            return MP4_INVALID_TRACK_ID;
        }
        if ( ismacrypGetSelectiveEncryption(ismaCrypSId, &(icPp->selective_enc)) != ismacryp_rc_ok ) {
            fprintf(stderr, "%s: could not get ismacryp selec enc. sid %d\n",
                    ProgName, ismaCrypSId);
            ismacrypEndSession(ismaCrypSId);
            return MP4_INVALID_TRACK_ID;
        }
        if (ismacrypGetKeyIndicatorLength(ismaCrypSId, &(icPp->key_ind_len)) != ismacryp_rc_ok) {
            fprintf(stderr, "%s: could not get ismacryp key ind len. sid %d\n",
                    ProgName, ismaCrypSId);
            ismacrypEndSession(ismaCrypSId);
            return MP4_INVALID_TRACK_ID;
        }
        if (ismacrypGetIVLength(ismaCrypSId, &(icPp->iv_len)) != ismacryp_rc_ok) {
            fprintf(stderr, "%s: could not get ismacryp iv len. sid %d\n",
                    ProgName, ismaCrypSId);
            ismacrypEndSession(ismaCrypSId);
            return MP4_INVALID_TRACK_ID;
        }
    }

    // create the new video track
    MP4TrackId trackId;
    if (doEncrypt) {
        trackId =
            MP4AddEncVideoTrack(
                    mp4File,
                    Mp4TimeScale,
                    mp4FrameDuration,
                    frameWidth,
                    frameHeight,
                    icPp,
                    MP4_MPEG4_VIDEO_TYPE);
    } else {
        trackId =
            MP4AddVideoTrack(
                    mp4File,
                    Mp4TimeScale,
                    mp4FrameDuration,
                    frameWidth,
                    frameHeight,
                    MP4_MPEG4_VIDEO_TYPE);
    }

    if (trackId == MP4_INVALID_TRACK_ID) {
        fprintf(stderr,
                "%s: can't create video track\n", ProgName);
        return MP4_INVALID_TRACK_ID;
    }

    if (VideoProfileLevelSpecified) {
        videoProfileLevel = VideoProfileLevel;
    }
    if (MP4GetNumberOfTracks(mp4File, MP4_VIDEO_TRACK_TYPE) == 1) {
        MP4SetVideoProfileLevel(mp4File, videoProfileLevel);
    }
    printf("es config size is %d\n", esConfigSize);
    if (esConfigSize) {
        MP4SetTrackESConfiguration(mp4File, trackId,
                pCurrentSample, esConfigSize);

        // move past ES config, so it doesn't go into first sample
        pCurrentSample += esConfigSize;
    }
    // Move the current frame to the beginning of the
    // buffer
    memmove(sampleBuffer, pCurrentSample, pObj - pCurrentSample + objSize);
    pObj = sampleBuffer + (pObj - pCurrentSample);
    pCurrentSample = sampleBuffer;
    MP4Timestamp prevFrameTimestamp = 0;

    // now process the rest of the video stream
    while ( true ) {
        if ( objType != MP4AV_MPEG4_VOP_START ) {
	  // keep it in the buffer until a VOP comes along
	  // Actually, do nothings, since we only want VOP
	  // headers in the stream - wmay 6/2004
	  //pObj += objSize;

        } else { // we have VOP
            u_int32_t sampleSize = (pObj + objSize) - pCurrentSample;

            vopType = MP4AV_Mpeg4GetVopType(pObj, objSize);

	    mpeg4_frame_t *fr = MALLOC_STRUCTURE(mpeg4_frame_t);
	    if (head == NULL) {
	      head = tail = fr;
	    } else {
	      tail->next = fr;
	      tail = fr;
	    }
	    fr->vopType = vopType;
	    fr->frameTimestamp = currentSampleTime;
	    fr->next = NULL;
            if ( variableFrameRate ) {
                // variable frame rate:  recalculate "mp4FrameDuration"
                if ( lastFrame ) {
                    // last frame
                    mp4FrameDuration = Mp4TimeScale / timeTicks;
                } else {
                    // not the last frame
                    u_int32_t vopTimeIncrement;
                    MP4AV_Mpeg4ParseVop(pObj, objSize, &vopType, timeBits, timeTicks, &vopTimeIncrement);
                    u_int32_t vopTime = vopTimeIncrement - lastVopTimeIncrement;
                    mp4FrameDuration = (Mp4TimeScale * vopTime) / timeTicks;
                    lastVopTimeIncrement = vopTimeIncrement % timeTicks;
                }
	    }
            if ( prevSampleSize > 0 ) { // not the first time
                // fill sample data & length to write
                u_int8_t* sampleData2Write = NULL;
                u_int32_t sampleLen2Write = 0;
                if ( doEncrypt ) {
                    if ( ismacrypEncryptSampleAddHeader(ismaCrypSId,
                                sampleSize,
                                sampleBuffer,
                                &sampleLen2Write,
                                &sampleData2Write) != 0 ) {
                        fprintf(stderr,
                                "%s: can't encrypt video sample and add header %u\n",
                                ProgName, sampleId);
                    }
                } else {
                    sampleData2Write = sampleBuffer;
                    sampleLen2Write = prevSampleSize;
                }

		
            if (variableFrameRate == false) {
	      double now_calc;
	      now_calc = sampleId;
	      now_calc *= Mp4TimeScale;
	      now_calc /= VideoFrameRate;
	      MP4Timestamp now_ts = (MP4Timestamp)now_calc;
	      mp4FrameDuration = now_ts - prevFrameTimestamp;
	      prevFrameTimestamp = now_ts;
	      currentSampleTime = now_ts;
	    }
                // Write the previous sample
                rc = MP4WriteSample(mp4File, trackId,
                        sampleData2Write, sampleLen2Write,
                        mp4FrameDuration, 0, prevVopType == VOP_TYPE_I);

                if ( doEncrypt && sampleData2Write ) {
                    // buffer allocated by encrypt function.
                    // must free it!
                    free(sampleData2Write);
                }

                if ( !rc ) {
                    fprintf(stderr,
                            "%s: can't write video frame %u\n",
                            ProgName, sampleId);
                    MP4DeleteTrack(mp4File, trackId);
                    return MP4_INVALID_TRACK_ID;
                }

                // deal with rendering time offsets
                // that can occur when B frames are being used
                // which is the case for all profiles except Simple Profile
		haveBframes |= (prevVopType == VOP_TYPE_B);

		if ( lastFrame ) {
		  // finish read frames
		  break;
		}
                sampleId++;
            } // not the first time

            currentSampleTime += mp4FrameDuration;

            // Move the current frame to the beginning of the
            // buffer
            memmove(sampleBuffer, pCurrentSample, sampleSize);
            prevSampleSize = sampleSize;
            prevVopType = vopType;
            // reset pointers
            pObj = pCurrentSample = sampleBuffer + sampleSize;
        } // we have VOP

        // load next object from bitstream
        if (!LoadNextObject(inFile, pObj, &objSize, &objType)) {
            if (objType != MP4AV_MPEG4_VOP_START)
                break;
            lastFrame = true;
            objSize = 0;
            continue;
        }
        // guard against buffer overflow
        if (pObj + objSize >= pCurrentSample + maxSampleSize) {
            fprintf(stderr,
                    "%s: buffer overflow, invalid video stream?\n", ProgName);
            MP4DeleteTrack(mp4File, trackId);
            return MP4_INVALID_TRACK_ID;
        }
#ifdef DEBUG_MP4V
        if (Verbosity & MP4_DETAILS_SAMPLE) {
            printf("MP4V type %x size %u\n",
                    objType, objSize);
        }
#endif
    }
    bool doRenderingOffset = false;
    switch (videoProfileLevel) {
    case MPEG4_SP_L0:
    case MPEG4_SP_L1:
    case MPEG4_SP_L2:
    case MPEG4_SP_L3:
      break;
    default:
      doRenderingOffset = true;
      break;
    }
   
    if (doRenderingOffset && haveBframes) {
      // only generate ctts (with rendering offset for I, P frames) when
      // we need one.  We saved all the frames types and timestamps above - 
      // we can't use MP4ReadSample, because the end frames might not have
      // been written 
      refVopId = 1;
      refVopTime = 0;
      MP4SampleId maxSamples = MP4GetTrackNumberOfSamples(mp4File, trackId);
      // start with sample 2 - we know the first one is a I frame
      mpeg4_frame_t *fr = head->next; // skip the first one
      for (MP4SampleId ix = 2; ix <= maxSamples; ix++) {
	if (fr->vopType != VOP_TYPE_B) {
#ifdef DEBUG_MP4V_TS
            printf("sample %u %u renderingOffset "U64"\n",
		   refVopId, fr->vopType, fr->frameTimestamp - refVopTime);
#endif
	  MP4SetSampleRenderingOffset(mp4File, trackId, refVopId, 
				      fr->frameTimestamp - refVopTime);
	  refVopId = ix;
	  refVopTime = fr->frameTimestamp;
	}
	fr = fr->next;
      }
      
#ifdef DEBUG_MP4V_TS
      printf("sample %u %u renderingOffset "U64"\n",
	     refVopId, fr->vopType, fr->frameTimestamp - refVopTime);
#endif
      MP4SetSampleRenderingOffset(mp4File, trackId, refVopId, 
				  fr->frameTimestamp - refVopTime);
    }

    while (head != NULL) {
      tail = head->next;
      free(head);
      head = tail;
    }
    // terminate session if encrypting
    if (doEncrypt) {
        if (ismacrypEndSession(ismaCrypSId) != 0) {
            fprintf(stderr,
                    "%s: could not end the ISMAcryp session\n",
                    ProgName);
        }
    }

    return trackId;
}
/**
    \fn initAudio
*/
bool muxerMp4v2::initAudio(void)
{
    audioTrackIds=new MP4TrackId[nbAStreams];
    audioPackets=new mp4v2AudioPacket[nbAStreams];
    
    for(int i=0;i<nbAStreams;i++)
    {
        WAVHeader *header=aStreams[i]->getInfo();
        ADM_audioStream*a=aStreams[i];
        audioPackets[i].clock=new audioClock(header->frequency);
        // Preload this track...
        if(false==loadAndToggleAudioSlot(i))
        {
            audioPackets[i].eos=true;
            continue;
        }
        
        switch(header->encoding)
        {
            case WAV_AAC:
                    {
                        uint8_t *extraData=NULL;
                        uint32_t extraDataLen=0;
                        if(!a->getExtraData(&extraDataLen,&extraData))
                            {
                                 GUI_Error_HIG("AAC","Cannot get AAC Extra data\n");
                                 return false;
                            }
                        audioTrackIds[i]=MP4AddAudioTrack(handle,
                                                      header->frequency,
                                                      1024,
                                                      MP4_MPEG4_AUDIO_TYPE);
                        if(MP4_INVALID_TRACK_ID==audioTrackIds[i])
                        {
                            ADM_error("Error adding audio track %i of type 0x%x\n",i,header->encoding);
                            return false;
                        }
                        aprintf("Add Track %d fq %d\n",audioTrackIds[i],header->frequency);
                        MP4SetAudioProfileLevel(handle,0x0f);
                        MP4SetTrackIntegerProperty(handle,audioTrackIds[i],"mdia.minf.stbl.stsd.mp4a.channels",
                                    header->channels);
                        MP4SetTrackESConfiguration(handle,audioTrackIds[i],extraData,extraDataLen);
                    break;
                    }
            case WAV_AC3:
                    if(false==addAc3(i, header))
                    {
                            return false;
                    }
                    break;
            case WAV_MP2:
            case WAV_MP3:
                    audioTrackIds[i]=MP4AddAudioTrack(handle,
                                                      header->frequency,
                                                      audioPackets[i].blocks[0].nbSamples,
                                                      MP4_MPEG2_AUDIO_TYPE);
                    if(MP4_INVALID_TRACK_ID==audioTrackIds[i])
                    {
                        ADM_error("Error adding audio track %i of type 0x%x\n",i,header->encoding);
                        return false;
                    }
                    aprintf("Add Track %d fq %d\n",audioTrackIds[i],header->frequency);
                    MP4SetAudioProfileLevel(handle,0x0f);
                    MP4SetTrackIntegerProperty(handle,audioTrackIds[i],"mdia.minf.stbl.stsd.mp4a.channels",
                                header->channels);
                    break;
            default:
                    ADM_error("Cannot create audio track of type 0x%x\n",header->encoding);
                    return false;
        }
        if(aStreams[i]->isLanguageSet())
        {
                MP4SetTrackLanguage(handle,audioTrackIds[i],aStreams[i]->getLanguage().c_str());
                ADM_info("[MP4v2] Setting language to %s \n",aStreams[i]->getLanguage().c_str());
        }else
            ADM_warning("[MP4v2] Language is undefined\n");

         MP4SetTrackBytesProperty(handle,audioTrackIds[i],"udta.name.value",
                    (const uint8_t*)"Stereo", strlen("Stereo"));

    }
    if(nbAStreams)
         MP4SetTrackIntegerProperty(handle, audioTrackIds[0], "tkhd.flags", 3);
    return true;
}
Exemple #4
0
static switch_status_t mp4_file_open(switch_file_handle_t *handle, const char *path)
{
	mp4_file_context_t *context;
	char *ext;
	unsigned int flags = 0;
	const char *tmp = NULL;

	if ((ext = strrchr(path, '.')) == 0) {
		switch_log_printf(SWITCH_CHANNEL_LOG, SWITCH_LOG_ERROR, "Invalid Format\n");
		return SWITCH_STATUS_GENERR;
	}
	ext++;

	if ((context = switch_core_alloc(handle->memory_pool, sizeof(mp4_file_context_t))) == 0) {
		return SWITCH_STATUS_MEMERR;
	}

	memset(context, 0, sizeof(mp4_file_context_t));

	context->offset = -100;
	if (handle->params && (tmp = switch_event_get_header(handle->params, "mp4v2_video_offset"))) {
		context->offset = atoi(tmp);
	}

	context->audio_type = MP4_ULAW_AUDIO_TYPE; // default

	if (handle->params && (tmp = switch_event_get_header(handle->params, "mp4v2_audio_codec"))) {
		if (!strcasecmp(tmp, "PCMU")) {
			context->audio_type = MP4_ULAW_AUDIO_TYPE;
		} else if (!strcasecmp(tmp, "MP3")) {
			context->audio_type = MP4_MP3_AUDIO_TYPE;
		} else if (!strcasecmp(tmp, "AAC")) {
			context->audio_type = MP4_MPEG4_AUDIO_TYPE;
		} else if (!strcasecmp(tmp, "L16")) {
			context->audio_type = MP4_PCM16_LITTLE_ENDIAN_AUDIO_TYPE;
		}
	}

	switch_mutex_init(&context->mutex, SWITCH_MUTEX_NESTED, handle->memory_pool);

	if (switch_test_flag(handle, SWITCH_FILE_FLAG_WRITE)) {
		flags |= SWITCH_FOPEN_WRITE | SWITCH_FOPEN_CREATE;
		if (switch_test_flag(handle, SWITCH_FILE_WRITE_APPEND) || switch_test_flag(handle, SWITCH_FILE_WRITE_OVER)) {
			flags |= SWITCH_FOPEN_READ;
		} else {
			flags |= SWITCH_FOPEN_TRUNCATE;
		}
	}

	if (switch_test_flag(handle, SWITCH_FILE_FLAG_READ)) {
		flags |= SWITCH_FOPEN_READ;
	}

	if ((context->fd = MP4CreateEx(path, 0, 1, 1, NULL, 0, NULL, 0)) == MP4_INVALID_FILE_HANDLE) {
		switch_log_printf(SWITCH_CHANNEL_LOG, SWITCH_LOG_CRIT, "Error opening file %s\n", path);
		return SWITCH_STATUS_GENERR;
	}

	switch_log_printf(SWITCH_CHANNEL_LOG, SWITCH_LOG_DEBUG, "sample rate: %d, channels: %d\n", handle->samplerate, handle->channels);

	if (context->audio_type == MP4_ULAW_AUDIO_TYPE) {
		context->audio = MP4AddULawAudioTrack(context->fd, handle->samplerate);
		MP4SetTrackIntegerProperty(context->fd, context->audio, "mdia.minf.stbl.stsd.ulaw.channels", handle->channels);
		MP4SetTrackIntegerProperty(context->fd, context->audio, "mdia.minf.stbl.stsd.ulaw.sampleSize", 8);
	} else if (context->audio_type == MP4_MP3_AUDIO_TYPE) {
		// handle->samplerate = 44100;
		context->audio = MP4AddAudioTrack(context->fd, handle->samplerate, handle->samplerate, MP4_MP3_AUDIO_TYPE);
		MP4SetTrackName(context->fd, context->audio, ".mp3");
		MP4SetTrackIntegerProperty(context->fd, context->audio, "mdia.minf.stbl.stsd.mp4a.channels", handle->channels);
		// MP4SetTrackIntegerProperty(context->fd, context->audio, "mdia.minf.stbl.stsd..mp3.channels", handle->channels);
	} else if (context->audio_type == MP4_PCM16_LITTLE_ENDIAN_AUDIO_TYPE) {
		context->audio = MP4AddAudioTrack(context->fd, handle->samplerate, handle->samplerate, MP4_PCM16_LITTLE_ENDIAN_AUDIO_TYPE);
		MP4SetTrackName(context->fd, context->audio, "lpcm");
		MP4SetTrackIntegerProperty(context->fd, context->audio, "mdia.minf.stbl.stsd.mp4a.channels", handle->channels);
		MP4SetTrackIntegerProperty(context->fd, context->audio, "mdia.minf.stbl.stsd.lpcm.channels", handle->channels);
		MP4SetTrackIntegerProperty(context->fd, context->audio, "mdia.minf.stbl.stsd.lpcm.sampleSize", 16);
	} else if (context->audio_type == MP4_MPEG4_AUDIO_TYPE) {
		/* AAC object types */
		#define AAC_MAIN 1
		#define AAC_LOW  2
		#define AAC_SSR  3
		#define AAC_LTP  4

		uint16_t info = 0;

		info |= AAC_LOW << 11; // aacObjectType (5bit)
		info |= get_aac_sample_rate_index(handle->samplerate) << 7; //(4bit)
		info |= handle->channels << 3; //(4bit)
		info = htons(info);

		context->audio = MP4AddAudioTrack(context->fd, handle->samplerate, handle->samplerate, MP4_MPEG4_AUDIO_TYPE);
		MP4SetTrackESConfiguration(context->fd, context->audio, (uint8_t *)&info, sizeof(info));
		MP4SetTrackIntegerProperty(context->fd, context->audio, "mdia.minf.stbl.stsd.mp4a.channels", handle->channels);
	}

	handle->format = 0;
	handle->sections = 0;
	handle->seekable = 0;
	handle->speed = 0;
	handle->pos = 0;
	handle->private_info = context;
	context->pool = handle->memory_pool;

	switch_log_printf(SWITCH_CHANNEL_LOG, SWITCH_LOG_INFO, "Opening File [%s] %dhz %s\n",
		path, handle->samplerate, switch_test_flag(handle, SWITCH_FILE_FLAG_VIDEO) ? " with VIDEO" : "");

	if (switch_core_codec_init(&context->audio_codec,
							   get_audio_codec_name(context->audio_type),
							   NULL,
							   NULL,
							   handle->samplerate,
							   20,//ms
							   handle->channels, SWITCH_CODEC_FLAG_ENCODE,
							   NULL, handle->memory_pool) == SWITCH_STATUS_SUCCESS) {
		switch_log_printf(SWITCH_CHANNEL_LOG, SWITCH_LOG_DEBUG, "Audio Codec Activation Success\n");
	} else {
		switch_log_printf(SWITCH_CHANNEL_LOG, SWITCH_LOG_ERROR, "Audio Codec Activation Fail\n");
		goto end;
	}

	if (switch_test_flag(handle, SWITCH_FILE_FLAG_VIDEO)) {
		if (switch_core_codec_init(&context->video_codec,
								   "H264",
								   NULL,
								   NULL,
								   90000,
								   0,//ms
								   1, SWITCH_CODEC_FLAG_ENCODE,
								   NULL, handle->memory_pool) == SWITCH_STATUS_SUCCESS) {
			switch_log_printf(SWITCH_CHANNEL_LOG, SWITCH_LOG_DEBUG, "Video Codec H264 Activation Success\n");
		} else {
			switch_log_printf(SWITCH_CHANNEL_LOG, SWITCH_LOG_ERROR, "Video Codec H264 Activation Fail\n");
			goto end;
		}
	}

	if (switch_test_flag(handle, SWITCH_FILE_FLAG_WRITE)) {
		MP4SetAudioProfileLevel(context->fd, 0x7F);
	}

	switch_buffer_create_dynamic(&context->buf, 512, 512, 1024000);

	return SWITCH_STATUS_SUCCESS;

end:
	if (context->fd) {
		MP4Close(context->fd, 0);
		context->fd = NULL;
	}
	return SWITCH_STATUS_FALSE;
}
Exemple #5
0
int encoder_write(unsigned long id, void* rbuffer, unsigned long bsize)
{	
	int             bytesencoded = 0;
	double         *sbuffer = (double*) rbuffer;
	unsigned char  *bbuffer = (unsigned char*) rbuffer;
	unsigned int    framesize = 0;

	if(pestreams[id].firstwrite)
	{
		int v = 0;

		pestreams[id].enchandle = faacEncOpen(pestreams[id].cfrequency, pestreams[id].cchannels,
											  &pestreams[id].samplesin, &pestreams[id].maxbytesout);

		pestreams[id].obuffer     = (unsigned char*)sys_mem_alloc(pestreams[id].maxbytesout);
		pestreams[id].cachebuffer = (unsigned char*)sys_mem_alloc(pestreams[id].samplesin * 10);
		pestreams[id].fb_size     = 0;
		pestreams[id].floatbuffer = 0;

		pestreams[id].econfig = faacEncGetCurrentConfiguration(pestreams[id].enchandle);

		pestreams[id].econfig->inputFormat = FAAC_INPUT_FLOAT;

		pestreams[id].econfig->aacObjectType = LOW; //objectType;

		if(pestreams[id].ismp4)
		{
			pestreams[id].econfig->mpegVersion   = MPEG4;
			pestreams[id].econfig->outputFormat  = 0;
			pestreams[id].econfig->allowMidside  = 1;
			pestreams[id].econfig->shortctl      = SHORTCTL_NORMAL;

			if(fsettings.plugin_settings_getnum("aac", "bitrate", &v, 0, 0))v = 0xa;
			pestreams[id].econfig->bitRate       = (ibitrates[v] * 1000) / pestreams[id].cchannels;

			v = 0x2;
			if(fsettings.plugin_settings_getnum("aac", "quality", &v, 0, 0))v = 0x2;
			pestreams[id].econfig->quantqual     = iqualities[v];

			pestreams[id].econfig->bandWidth     = pestreams[id].cfrequency / 2;

		}else{

			pestreams[id].econfig->mpegVersion   = MPEG2;

			pestreams[id].econfig->useTns        = 0;
			pestreams[id].econfig->shortctl      = SHORTCTL_NORMAL;
			pestreams[id].econfig->useLfe        = 0;
			pestreams[id].econfig->allowMidside  = 1;

			if(fsettings.plugin_settings_getnum("aac", "bitrate", &v, 0, 0))v = 0xa;
			pestreams[id].econfig->bitRate       = (ibitrates[v] * 1000) / pestreams[id].cchannels;

			v = 0x2;
			if(fsettings.plugin_settings_getnum("aac", "quality", &v, 0, 0))v = 0x2;
			pestreams[id].econfig->quantqual     = iqualities[v];

			pestreams[id].econfig->bandWidth     = 0;
		}


		faacEncSetConfiguration(pestreams[id].enchandle, pestreams[id].econfig);

		/* mp4 stuff */

		if(pestreams[id].ismp4)
		{
			unsigned char *ASC       = 0;
			unsigned long  ASCLength = 0;
			char           afname[v_sys_maxpath];
			BOOL           usedef = 1;

			WideCharToMultiByte(CP_ACP, 0, pestreams[id].filepath, -1, afname, sizeof(afname), "?", &usedef);

#			ifdef MP4_CREATE_EXTENSIBLE_FORMAT

				pestreams[id].mp4file = MP4Create(afname, 0, 0);
#			else

				pestreams[id].mp4file = MP4Create(afname, 0, 0, 0);
#			endif


			MP4SetTimeScale(pestreams[id].mp4file, 90000);
			pestreams[id].mp4track = MP4AddAudioTrack(pestreams[id].mp4file, pestreams[id].cfrequency, MP4_INVALID_DURATION, MP4_MPEG4_AUDIO_TYPE);
			MP4SetAudioProfileLevel(pestreams[id].mp4file, 0x0F);

			faacEncGetDecoderSpecificInfo(pestreams[id].enchandle, &ASC, &ASCLength);

			MP4SetTrackESConfiguration(pestreams[id].mp4file, pestreams[id].mp4track, ASC, ASCLength);


			free(ASC);

			MP4SetMetadataTool(pestreams[id].mp4file, "Fennec Player 1.1 (libfaac)");
			
		}



		pestreams[id].totalsamples   = 0;
		pestreams[id].encodedsamples = 0;
		pestreams[id].cachesize      = 0;
		pestreams[id].firstwrite     = 0;
	}

	
	/* if(pestreams[id].cbitspersample == 64) standard */
	{
		unsigned int i = 0;
		unsigned int sbytes = pestreams[id].cbitspersample / 8;


		framesize = pestreams[id].samplesin / pestreams[id].cchannels;

		if(pestreams[id].ismp4)
			pestreams[id].totalsamples += bsize / sbytes;

		while(1)
		{
			if((int)(bsize / sbytes) - (int)i < (int)pestreams[id].samplesin)
			{
				if((int)(bsize / sbytes) - (int)i > 0)
				{
					memcpy(pestreams[id].cachebuffer + pestreams[id].cachesize, sbuffer + i, (bsize / sbytes) - i);
					pestreams[id].cachesize += (bsize / sbytes) - i;
				}
				break;
			}

			if(pestreams[id].cachesize)
			{
				memcpy(pestreams[id].cachebuffer + pestreams[id].cachesize, sbuffer + i, (pestreams[id].samplesin * sbytes) - pestreams[id].cachesize);
				
				local_write_doublebuffer(id, (double*)pestreams[id].cachebuffer, pestreams[id].samplesin, bytesencoded, framesize);


				pestreams[id].cachesize = 0;

				i += pestreams[id].samplesin - pestreams[id].cachesize;

			}else{

				local_write_doublebuffer(id, (double*)(sbuffer + i), pestreams[id].samplesin, bytesencoded, framesize);

				i += pestreams[id].samplesin;
			}
		}
	}

	return 1;
}