Exemplo n.º 1
0
//finds the intersection of the all lines
//returns intersection coordinates via array
void f_intersect(double *px, double *py, int pn,
                 double *wx, double *wy, int wn, double *c) {
  int i, j;
  double yprime, xprime, wslope[100], pslope[100], c1, c2;
  getslope(px, py, pn, pslope);
  getslope(wx, wy, wn, wslope);

  for (i = 0; i < wn; i++) {
    for (j = 0; j < pn; j++) {
      c1 = py[j] - pslope[j] * px[j]; //y = mx + c -> c = y -mx
      c2 = wy[i] - wslope[i] * wx[i];

      if ((pslope[j] - wslope[i]) == 0) { //prevents divison by 0
        printf("Slope division = 0; ERROR!\n");
      } else {
        xprime = (c2 - c1) / (pslope[j] - wslope[i]); //intersection of x
        yprime = pslope[j] * xprime + c1; //intersection of y

        if (pointincontainer(px[j], py[j], px[looper(j, pn)], py[looper(j, pn)],
                             xprime, yprime) == 1 &&
            pointincontainer(wx[i], wy[i], wx[looper(i, wn)], wy[looper(i, wn)],
                             xprime, yprime ) == 1) {
          c[counter] = xprime; c[counter + 1] = yprime;
          counter += 2;
        }
      }
    }
  }
}
Exemplo n.º 2
0
int main (){
 	signal(SIGINT, sig_handler);
	intro();
 	//myinfo();
	looper();
	
}
static int trekking( struct RE *rexp ){
  struct RE track;
  for( int result = FALSE, iCatch; tracker( rexp, &track ); ){
    switch( track.type ){
    case HOOK:
      openCatch( &iCatch );
      result = loopGroup( &track );
      if( result ) closeCatch( iCatch );
      break;
    case GROUP: case PATH:
      result = loopGroup( &track );
      break;
    case SET:
      if( track.ptr[0] == '^' ){
        cutRexp( &track, 1 );
        track.mods |=  MOD_NEGATIVE;
      }
    case BACKREF: case META: case RANGEAB: case POINT: case SIMPLE:
      result = looper( &track );
    }

    if( result == FALSE ) return FALSE;
  }

  return TRUE;
}
Exemplo n.º 4
0
static long
loopoput(Loop *lb, Link *link, Block *volatile bp)
{
	long n;

	n = BLEN(bp);

	/* make it a single block with space for the loopback timing header */
	if(waserror()){
		freeb(bp);
		nexterror();
	}
	bp = padblock(bp, Tmsize);
	if(bp->next)
		bp = concatblock(bp);
	if(BLEN(bp) < lb->minmtu)
		bp = adjustblock(bp, lb->minmtu);
	poperror();
	ptime(bp->rp, todget(nil));

	link->packets++;
	link->bytes += n;

	qbwrite(link->oq, bp);

	looper(lb);
	return n;
}
Exemplo n.º 5
0
void NetPrefsServerView::SetNetworkData(BMessage* msg)
{
	// this shouldn't theoretically be able to happen but better safe than sorry
	BLooper* looper(Looper());
	if (looper == NULL) return;

	BAutolock lock(Looper());
	if (!lock.IsLocked()) return;
	// clear previous servers (if any)
	while (fServerList->CountRows() > 0) {
		BRow* row(fServerList->RowAt(0));
		fServerList->RemoveRow(row);
		delete row;
	}

	BString netString(S_PREFSERVER_SEL_STRING);
	netString += msg->FindString("name");
	netString += ":";
	type_code type;
	int32 count;
	ssize_t size;
	const ServerData* data;
	msg->GetInfo("server", &type, &count);
	for (int32 i = 0; i < count; i++) {
		msg->FindData("server", B_ANY_TYPE, i, reinterpret_cast<const void**>(&data), &size);
		AddServer(data);
	}
	fActiveNetwork = msg;
	fSelectTitleString->SetText(netString.String());
	fSelectTitleString->ResizeToPreferred();
}
void NuPlayer::Decoder::configure(const sp<AMessage> &format) {
    CHECK(mCodec == NULL);

    AString mime;
    CHECK(format->findString("mime", &mime));

    sp<AMessage> notifyMsg =
        new AMessage(kWhatCodecNotify, id());

    mCSDIndex = 0;
    for (size_t i = 0;; ++i) {
        sp<ABuffer> csd;
        if (!format->findBuffer(StringPrintf("csd-%d", i).c_str(), &csd)) {
            break;
        }

        mCSD.push(csd);
    }

#ifdef QCOM_HARDWARE
    sp<ABuffer> extendedCSD = ExtendedCodec::getRawCodecSpecificData(format);
    if (extendedCSD != NULL) {
        ALOGV("pushing extended CSD of size %d", extendedCSD->size());
        mCSD.push(extendedCSD);
    }

    sp<ABuffer> aacCSD = ExtendedCodec::getAacCodecSpecificData(format);
    if (aacCSD != NULL) {
        ALOGV("pushing AAC CSD of size %d", aacCSD->size());
        mCSD.push(aacCSD);
    }
#endif

    if (mNativeWindow != NULL) {
        format->setObject("native-window", mNativeWindow);
    }

    // Current video decoders do not return from OMX_FillThisBuffer
    // quickly, violating the OpenMAX specs, until that is remedied
    // we need to invest in an extra looper to free the main event
    // queue.
    bool needDedicatedLooper = !strncasecmp(mime.c_str(), "video/", 6);

    mFormat = format;
    mCodec = new ACodec;

    if (needDedicatedLooper && mCodecLooper == NULL) {
        mCodecLooper = new ALooper;
        mCodecLooper->setName("NuPlayerDecoder");
        mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
    }

    (needDedicatedLooper ? mCodecLooper : looper())->registerHandler(mCodec);

    mCodec->setNotificationMessage(notifyMsg);
    mCodec->initiateSetup(format);
}
Exemplo n.º 7
0
void MkDataPack::Clear(void)
{
	MkHashMapLooper<MkHashStr, MkDataUnitInterface*> looper(m_UnitTable);
	MK_STL_LOOP(looper)
	{
		delete looper.GetCurrentField();
	}
	m_UnitTable.Clear();
}
Exemplo n.º 8
0
int
x_cmd(File *f, Cmd *cp)
{
	if(cp->re)
		looper(f, cp, cp->cmdc=='x');
	else
		linelooper(f, cp);
	return TRUE;
}
Exemplo n.º 9
0
Arquivo: xec.c Projeto: deadpixi/sam
bool
x_cmd(File *f, Cmd *cp)
{
    if(cp->re)
        looper(f, cp, cp->cmdc=='x');
    else
        linelooper(f, cp);
    return true;
}
status_t ARTPSession::setup(const sp<ASessionDescription> &desc) {
    CHECK_EQ(mInitCheck, (status_t)NO_INIT);

    mDesc = desc;

    mRTPConn = new ARTPConnection(ARTPConnection::kRegularlyRequestFIR);

    looper()->registerHandler(mRTPConn);

    for (size_t i = 1; i < mDesc->countTracks(); ++i) {
        AString connection;
        if (!mDesc->findAttribute(i, "c=", &connection)) {
            // No per-stream connection information, try global fallback.
            if (!mDesc->findAttribute(0, "c=", &connection)) {
                ALOGE("Unable to find connection attribute.");
                return mInitCheck;
            }
        }
        if (!(connection == "IN IP4 127.0.0.1")) {
            ALOGE("We only support localhost connections for now.");
            return mInitCheck;
        }

        unsigned port;
        if (!validateMediaFormat(i, &port) || (port & 1) != 0) {
            ALOGE("Invalid media format.");
            return mInitCheck;
        }

        sp<APacketSource> source = new APacketSource(mDesc, i);
        if (source->initCheck() != OK) {
            ALOGE("Unsupported format.");
            return mInitCheck;
        }

        int rtpSocket = MakeUDPSocket(port);
        int rtcpSocket = MakeUDPSocket(port + 1);

        mTracks.push(TrackInfo());
        TrackInfo *info = &mTracks.editItemAt(mTracks.size() - 1);
        info->mRTPSocket = rtpSocket;
        info->mRTCPSocket = rtcpSocket;

        sp<AMessage> notify = new AMessage(kWhatAccessUnitComplete, id());
        notify->setSize("track-index", mTracks.size() - 1);

        mRTPConn->addStream(
                rtpSocket, rtcpSocket, mDesc, i, notify, false /* injected */);

        info->mPacketSource = source;
    }

    mInitCheck = OK;

    return OK;
}
Exemplo n.º 11
0
void GameTroopState::UpdateTroopState(void)
{
	m_HitPointGage.UpdateGage();

	MkHashMapLooper<MkHashStr, IntStateType> looper(m_StateTable);
	MK_STL_LOOP(looper)
	{
		looper.GetCurrentField().UpdateState();
	}
}
Exemplo n.º 12
0
    void onDrawContent(SkCanvas* canvas) override {
        SkPaint paint;
        paint.setAntiAlias(true);
        paint.setTextSize(SkIntToScalar(24));
        SkAutoTUnref<SkBlurDrawLooper> looper(
            SkBlurDrawLooper::Create(SK_ColorBLUE,
                                     SkBlurMask::ConvertRadiusToSigma(SkIntToScalar(2)),
                                     0, 0));
        paint.setLooper(looper);
        SkScalar height = paint.getFontMetrics(nullptr);
        if (!fDecodeSucceeded) {
            SkString failure;
            if (fResPath.size() == 0) {
                failure.printf("resource path is required!");
            } else {
                failure.printf("Failed to decode %s", fCurrFile.c_str());
            }
            canvas->drawText(failure.c_str(), failure.size(), 0, height, paint);
            return;
        }

        // Name, size of the file, and whether or not it is premultiplied.
        SkString header(SkOSPath::Basename(fCurrFile.c_str()));
        header.appendf("     [%dx%d]     %s", fBitmap.width(), fBitmap.height(),
                       (fPremul ? "premultiplied" : "unpremultiplied"));
        canvas->drawText(header.c_str(), header.size(), 0, height, paint);
        canvas->translate(0, height);

        // Help messages
        header.printf("Press '%c' to move to the next image.'", fNextImageChar);
        canvas->drawText(header.c_str(), header.size(), 0, height, paint);
        canvas->translate(0, height);

        header.printf("Press '%c' to toggle premultiplied decode.", fTogglePremulChar);
        canvas->drawText(header.c_str(), header.size(), 0, height, paint);

        // Now draw the image itself.
        canvas->translate(height * 2, height * 2);
        if (!fPremul) {
            // A premultiplied bitmap cannot currently be drawn.
            SkAutoLockPixels alp(fBitmap);
            // Copy it to a bitmap which can be drawn, converting
            // to premultiplied:
            SkBitmap bm;
            bm.allocN32Pixels(fBitmap.width(), fBitmap.height());
            for (int i = 0; i < fBitmap.width(); ++i) {
                for (int j = 0; j < fBitmap.height(); ++j) {
                    *bm.getAddr32(i, j) = premultiply_unpmcolor(*fBitmap.getAddr32(i, j));
                }
            }
            canvas->drawBitmap(bm, 0, 0);
        } else {
            canvas->drawBitmap(fBitmap, 0, 0);
        }
    }
Exemplo n.º 13
0
void TimeSyncer::notifyError(status_t err) {
    if (mNotify == NULL) {
        looper()->stop();
        return;
    }

    sp<AMessage> notify = mNotify->dup();
    notify->setInt32("what", kWhatError);
    notify->setInt32("err", err);
    notify->post();
}
DashPlayer::Decoder::~Decoder() {
    ALooper::handler_id id = 0;
    if (mCodec != NULL) {
        id = mCodec->id();
    }
    if (id != 0) {
        if (mCodecLooper != NULL) {
            mCodecLooper->stop();
            mCodecLooper->unregisterHandler(id);
        }
        looper()->unregisterHandler(id);
    }
}
Exemplo n.º 15
0
void DirectRenderer::internalSetFormat(
        size_t trackIndex, const sp<AMessage> &format) {
    CHECK_LT(trackIndex, 2u);

    CHECK(mDecoderContext[trackIndex] == NULL);

    sp<AMessage> notify = new AMessage(kWhatDecoderNotify, id());
    notify->setSize("trackIndex", trackIndex);

    mDecoderContext[trackIndex] = new DecoderContext(notify);
    looper()->registerHandler(mDecoderContext[trackIndex]);

    CHECK_EQ((status_t)OK,
             mDecoderContext[trackIndex]->init(
                 format, trackIndex == 0 ? mSurfaceTex : NULL));

    if (trackIndex == 1) {
        // Audio
        mAudioRenderer = new AudioRenderer(mDecoderContext[1]);
        looper()->registerHandler(mAudioRenderer);
    }
}
Exemplo n.º 16
0
Arquivo: Project3.c Projeto: domen1c/c
int main ( void )
{
    // declare and initialize local array
    char color[MAX_ELEMENTS] = { 'M', 'R', 'O', 'Y', 'L', 'G', 'T', 'B', 'N', 'P', 'K' };
    int today[MAX_ELEMENTS] = { 130, 120, 115, 105, 93, 83, 77, 70, 65, 60, 50 };
    int group[MAX_ELEMENTS] = { 125, 115, 95, 90, 75, 70, 65, 60, 55, 50, 40 };
    int single[MAX_ELEMENTS] = { 125, 127, 110, 100, 88, 78, 72, 65, 60, 55, 45 };

    // declare and initialize local variables
    char runAgain, tempColor, tempType;
    int mainLoop = 0, tempAmount = 0, subTotal = 0, totalTickets = 0, totalPrice = 0;

    // welcome user
    greetingsEarthlings();

    while ( mainLoop == 0 )
    {
        runAgain = looper();
        if ( runAgain == 'Y' )
        {
            tempColor = getColor( color );
            if ( tempColor != 'Q' )
            {
                tempType = getType();
                tempAmount = getAmount( tempType );
                subTotal = confirmPurchase( tempColor, tempType, tempAmount,
                                            today, group, single );
                if ( subTotal != 0 )
                {
                    totalPrice += subTotal;
                    totalTickets += tempAmount;
                    displayCurrent( &totalTickets, &totalPrice );
                }
                else
                    printf ( "Purchase canceled, returning to main menu.\n\n" );
            }
            else
                mainLoop = 1;
        }
        else if ( runAgain == 'N' )
        {
            mainLoop = 1;
        }
    }// end mainLoop

    printf ( "\nTicket Novice closed by user.\n" );
    closeOut( &totalTickets, &totalPrice );

    system( "PAUSE" );
    return;
}// end main
Exemplo n.º 17
0
void run_basicLoop_data()
{

  //the problem with this structure is that it doesn't parallelize
  //parallelization would work well on dellcmscornell

  TString computername = gSystem->Getenv("HOST");
  TString dir = "/cu1/joshmt/"; //default is dellcmscornell
  if (computername =="JoshPC") {
    dir="~/data/";
  }

  dir += "BasicNtuples/";
  dir += version; dir+="/";
  TChain dummy("dummy");
  TString dirs = dir; dirs+="*";
  dummy.Add(dirs);
  TObjArray* dirlist = dummy.GetListOfFiles();
  int nfiles=dirlist->GetEntries();

  unsigned int currentfileindex=1;
  for (int ifile=0; ifile<nfiles; ifile++) {
    TString samplefiles = dirlist->At(ifile)->GetTitle();
    samplefiles+="/*.root";

    cout<<"About to start on files: "<<samplefiles<<endl;

    //    if (!samplefiles.Contains("LM13")) continue; //hack to skip some samples

    TChain ch("BasicTreeMaker/tree");
    TChain info("BasicTreeMaker/infotree");
    ch.Add(samplefiles);
    info.Add(samplefiles);
    basicLoop looper(&ch,&info);
    //important! this is where cuts are defined
    looper.setCutScheme(basicLoop::kBaseline0);
    looper.setMETType(basicLoop::kpfMET);
    looper.setMETRange(basicLoop::kWide); //get more events for plotting
    looper.setJetType(basicLoop::kPF);
    looper.setLeptonType(basicLoop::kPFLeptons);
    looper.setDPType(basicLoop::kminDP); //apply the CU minDeltaPhi cut

    looper.setIgnoredCut("cut3Jets"); //N-1

    looper.setBCut(0);  //no b tagging cut so that plots can apply it selectively

    looper.Loop(currentfileindex);  //go!
    currentfileindex++;
  }

}
Exemplo n.º 18
0
void run_cutflowPlotter()
{

  //TString libname="basicLoop_C.so";
  TString computername = gSystem->Getenv("HOST");
  TString dir = "/cu1/joshmt/";
  if (computername =="JoshPC") {
    dir="~/data/";
  }
  //could also add CASTOR

  dir += "BasicNtuples/";
  if (extrapath!="") { dir += extrapath; dir += "/";}
  dir += version; dir+="/";
  TChain dummy("dummy");
  TString dirs = dir; dirs+="*";
  dummy.Add(dirs);
  TObjArray* dirlist = dummy.GetListOfFiles();
  int nfiles=dirlist->GetEntries();

  for (int ifile=0; ifile<nfiles; ifile++) {
    TString samplefiles = dirlist->At(ifile)->GetTitle();
    samplefiles+="/*.root";

    cout<<"About to start on files: "<<samplefiles<<endl;

    if (samplefiles.Contains("DATA")) continue; //skip data (use run_cutflowPlotter_data.C)

    if (!(samplefiles.Contains("TTbarJ") )) continue; //hack to skip some samples
    
    TChain ch("BasicTreeMaker/tree");
    TChain info("BasicTreeMaker/infotree");
    ch.Add(samplefiles);
    info.Add(samplefiles);
    basicLoop looper(&ch,&info);

    looper.setCutScheme(basicLoop::kBaseline0);
    looper.setMETType(basicLoop::kpfMET);
    looper.setMETRange(basicLoop::kHigh); //signal region
    looper.setJetType(basicLoop::kPF);
    looper.setLeptonType(basicLoop::kPFLeptons);
    looper.setDPType(basicLoop::kminDP);
    looper.setCleaningType(basicLoop::kMuonCleaning);

    looper.setBCut(3); //require 3 b tags so that we run the full cut flow table
    looper.cutflowPlotter();
  }


}
void DashPlayer::Decoder::configure(const sp<MetaData> &meta) {
    CHECK(mCodec == NULL);

    const char *mime;
    CHECK(meta->findCString(kKeyMIMEType, &mime));

    ALOGV("@@@@:: Decoder::configure :: mime is --- %s ---",mime);

    sp<AMessage> notifyMsg =
        new AMessage(kWhatCodecNotify, id());

    sp<AMessage> format = makeFormat(meta);

    if (mNativeWindow != NULL) {
        format->setObject("native-window", mNativeWindow);
    }

    // Current video decoders do not return from OMX_FillThisBuffer
    // quickly, violating the OpenMAX specs, until that is remedied
    // we need to invest in an extra looper to free the main event
    // queue.
    bool isVideo = !strncasecmp(mime, "video/", 6);

    if(!isVideo) {
        const char *mime;
        CHECK(meta->findCString(kKeyMIMEType, &mime));
    }

    ALOGV("@@@@:: DashCodec created ");
    mCodec = new DashCodec;

    bool needDedicatedLooper = false;

    if (isVideo){
        needDedicatedLooper = true;
        if(mCodecLooper == NULL) {
            ALOGV("@@@@:: Creating Looper for %s",(isVideo?"Video":"Audio"));
            mCodecLooper = new ALooper;
            mCodecLooper->setName("DashPlayerDecoder");
            mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
        }
    }

    (needDedicatedLooper ? mCodecLooper : looper())->registerHandler(mCodec);
     mCodec->setNotificationMessage(notifyMsg);
     mCodec->initiateSetup(format);

}
Exemplo n.º 20
0
void WifiDisplaySource::disconnectClient2() {
    ALOGV("disconnectClient2");

    if (mClientInfo.mPlaybackSession != NULL) {
        looper()->unregisterHandler(mClientInfo.mPlaybackSession->id());
        mClientInfo.mPlaybackSession.clear();
    }

    if (mClientSessionID != 0) {
        mNetSession->destroySession(mClientSessionID);
        mClientSessionID = 0;
    }

    mClient->onDisplayDisconnected();

    finishStopAfterDisconnectingClient();
}
Exemplo n.º 21
0
bool MkWindowThemedNode::_UpdateRegion(void)
{
	const MkWindowThemeFormData* formData = MK_STATIC_RES.GetWindowThemeSet().GetFormData(m_ThemeName, m_ComponentType, m_CustomFormName);
	bool ok = (formData != NULL);
	if (ok)
	{
		MkFloatRect oldWinRect = m_WindowRect;
		MkFloatRect oldCliRect = m_ClientRect;
		
		formData->SetClientSizeToForm(this, m_ClientRect.size, m_ClientRect.position, m_WindowRect.size);

		if (m_WindowRect != oldWinRect)
		{
			SetAlignmentCommand(); // window rect가 변경되면 자신의 alignment도 갱신되야 함
		}

		// client rect에 변경이 발생하면 자식 visual pattern 파생 객체들에게 재정렬 명령
		if ((!m_ChildrenNode.Empty()) && (m_ClientRect != oldCliRect))
		{
			MkHashMapLooper<MkHashStr, MkSceneNode*> looper(m_ChildrenNode);
			MK_STL_LOOP(looper)
			{
				if (looper.GetCurrentField()->IsDerivedFrom(ePA_SNT_VisualPatternNode))
				{
					// shadow node. client size를 동기화
					if (looper.GetCurrentField()->GetNodeName() == ShadowNodeName)
					{
						MkWindowThemedNode* shadowNode = dynamic_cast<MkWindowThemedNode*>(looper.GetCurrentField());
						if (shadowNode != NULL)
						{
							shadowNode->SetClientSize(m_ClientRect.size);
						}
					}
					// etc
					else
					{
						MkVisualPatternNode* etcNode = dynamic_cast<MkVisualPatternNode*>(looper.GetCurrentField());
						if (etcNode != NULL)
						{
							etcNode->SetAlignmentCommand();
						}
					}
				}
			}
		}
	}
Exemplo n.º 22
0
void DCCConnect::UpdateBar(const BMessenger& msgr, int readSize, int cps, uint32 size, bool update)
{
	BMessage msg(B_UPDATE_STATUS_BAR);

	if (update) {
		char text[128];

		sprintf(text, "%.1f", size / 1024.0);
		msg.AddString("trailing_text", text);

		sprintf(text, "%d", cps);
		msg.AddString("text", text);
	}

	msg.AddFloat("delta", readSize);
	BLooper* looper(NULL);
	DCCConnect* transferView((DCCConnect*)msgr.Target(&looper));
	if ((looper != NULL) && (transferView != NULL)) looper->PostMessage(&msg, transferView->fBar);
}
Exemplo n.º 23
0
int run_sample_osx(const long num_events = -1)
{
    // load relevant libaries
    gSystem->Load("$CMSSW_BASE/lib/$SCRAM_ARCH/libPackagesLooperTools.dylib");
    gSystem->Load("$CMSSW_BASE/lib/$SCRAM_ARCH/libAnalysisExampleCMS2Looper.dylib");

    // simple style
    gROOT->SetStyle("Plain");
    gStyle->SetOptStat(111111);

    LoadFWLite();
    TChain chain("Events");
    chain.Add("/nfs-7/userdata/rwkelley/cms2/dyjets_ntuple_slim_10k_53X_v2.root");

    CMS2Looper looper("output/dy_plots.root");
    looper.SetRunList("json/Merged_190456-208686_8TeV_PromptReReco_Collisions12_goodruns.txt");
    std::cout << "running cms2 looper..." << std::endl;
    looper.ScanChain(chain, num_events);

    return 0;
}
void WifiDisplaySource::disconnectClient2() {
    ALOGV("disconnectClient2");

    if (mClientInfo.mPlaybackSession != NULL) {
        looper()->unregisterHandler(mClientInfo.mPlaybackSession->id());
        mClientInfo.mPlaybackSession.clear();
    }

    if (mClientSessionID != 0) {
        mNetSession->destroySession(mClientSessionID);
        mClientSessionID = 0;
    }

    mClient->onDisplayDisconnected();

    if (0 != property_set("wfd.enable", "0")) {
        ALOGE("DisplayDisconnected set wfd.enable property fail.");
    }
    ALOGI("DisplayDisconnected set wfd.enable property to 0.");

    finishStopAfterDisconnectingClient();
}
Exemplo n.º 25
0
static void
loopbackclose(Chan *c)
{
	Loop *lb;
	int ref, chan;

	lb = c->aux;

	qlock(lb);

	/*
	 * closing either side hangs up the stream
	 */
	if((c->flag & COPEN) && TYPE(c->qid.path) == Qdata){
		chan = ID(c->qid.path);
		if(--lb->link[chan].ref == 0){
			qhangup(lb->link[chan ^ 1].oq, nil);
			looper(lb);
		}
	}


	/*
	 *  if both sides are closed, they are reusable
	 */
	if(lb->link[0].ref == 0 && lb->link[1].ref == 0){
		for(chan = 0; chan < 2; chan++){
			closelink(&lb->link[chan], 0);
			qreopen(lb->link[chan].iq);
			qreopen(lb->link[chan].oq);
			qsetlimit(lb->link[chan].oq, lb->link[chan].limit);
			qsetlimit(lb->link[chan].iq, lb->link[chan].limit);
		}
	}
	ref = --lb->ref;
	if(ref == 0)
		freelb(lb);
	qunlock(lb);
}
Exemplo n.º 26
0
static void test_mixed(skiatest::Reporter* reporter) {
    SkLayerDrawLooper::Builder looperBuilder;
    SkLayerDrawLooper::LayerInfo layerInfo;

    // Add the back layer, with the defaults.
    (void)looperBuilder.addLayer(layerInfo);

    // Add the front layer, with some layer info set.
    layerInfo.fOffset.set(10.0f, 20.0f);
    layerInfo.fPaintBits |= SkLayerDrawLooper::kXfermode_Bit;
    SkPaint* layerPaint = looperBuilder.addLayerOnTop(layerInfo);
    layerPaint->setBlendMode(SkBlendMode::kSrc);

    FakeDevice device;
    SkCanvas canvas(&device);
    SkPaint paint;
    sk_sp<SkDrawLooper> looper(looperBuilder.detach());
    SkArenaAlloc alloc{48};
    SkDrawLooper::Context* context = looper->makeContext(&canvas, &alloc);

    // The back layer should come first.
    REPORTER_ASSERT(reporter, context->next(&canvas, &paint));
    REPORTER_ASSERT(reporter, paint.getBlendMode() == SkBlendMode::kSrcOver);
    canvas.drawRect(SkRect::MakeWH(50.0f, 50.0f), paint);
    REPORTER_ASSERT(reporter, 0.0f == device.fLastMatrix.getTranslateX());
    REPORTER_ASSERT(reporter, 0.0f == device.fLastMatrix.getTranslateY());
    paint.reset();

    // Then the front layer.
    REPORTER_ASSERT(reporter, context->next(&canvas, &paint));
    REPORTER_ASSERT(reporter, paint.getBlendMode() == SkBlendMode::kSrc);
    canvas.drawRect(SkRect::MakeWH(50.0f, 50.0f), paint);
    REPORTER_ASSERT(reporter, 10.0f == device.fLastMatrix.getTranslateX());
    REPORTER_ASSERT(reporter, 20.0f == device.fLastMatrix.getTranslateY());

    // Only two layers were added, so that should be the end.
    REPORTER_ASSERT(reporter, !context->next(&canvas, &paint));
}
Exemplo n.º 27
0
status_t NuPlayer::instantiateDecoder(bool audio, sp<Decoder> *decoder) {
    if (*decoder != NULL) {
        return OK;
    }

    sp<MetaData> meta = mSource->getFormat(audio);

    if (meta == NULL) {
        return -EWOULDBLOCK;
    }

    if (!audio) {
        const char *mime;
        CHECK(meta->findCString(kKeyMIMEType, &mime));
        mVideoIsAVC = !strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime);
    }

    sp<AMessage> notify =
        new AMessage(audio ? kWhatAudioNotify : kWhatVideoNotify,
                     id());

    *decoder = audio ? new Decoder(notify) :
                       new Decoder(notify, mNativeWindow);
    looper()->registerHandler(*decoder);

    (*decoder)->configure(meta);

    int64_t durationUs;
    if (mDriver != NULL && mSource->getDuration(&durationUs) == OK) {
        sp<NuPlayerDriver> driver = mDriver.promote();
        if (driver != NULL) {
            driver->notifyDuration(durationUs);
        }
    }

    return OK;
}
status_t SensorEventQueue::waitForEvent() const
{
    const int fd = getFd();
    sp<Looper> looper(getLooper());

    int events;
    int32_t result;
    do {
        result = looper->pollOnce(-1, NULL, &events, NULL);
        if (result == ALOOPER_POLL_ERROR) {
            ALOGE("SensorEventQueue::waitForEvent error (errno=%d)", errno);
            result = -EPIPE; // unknown error, so we make up one
            break;
        }
        if (events & ALOOPER_EVENT_HANGUP) {
            // the other-side has died
            ALOGE("SensorEventQueue::waitForEvent error HANGUP");
            result = -EPIPE; // unknown error, so we make up one
            break;
        }
    } while (result != fd);

    return  (result == fd) ? status_t(NO_ERROR) : result;
}
Exemplo n.º 29
0
void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
    switch (msg->what()) {
        case kWhatSetDataSource:
        {
            ALOGV("kWhatSetDataSource");

            CHECK(mSource == NULL);

            sp<RefBase> obj;
            CHECK(msg->findObject("source", &obj));

            mSource = static_cast<Source *>(obj.get());

            looper()->registerHandler(mSource);

            CHECK(mDriver != NULL);
            sp<NuPlayerDriver> driver = mDriver.promote();
            if (driver != NULL) {
                driver->notifySetDataSourceCompleted(OK);
            }
            break;
        }

        case kWhatPrepare:
        {
            mSource->prepareAsync();
            break;
        }

        case kWhatGetTrackInfo:
        {
            uint32_t replyID;
            CHECK(msg->senderAwaitsResponse(&replyID));

            status_t err = INVALID_OPERATION;
            if (mSource != NULL) {
                Parcel* reply;
                CHECK(msg->findPointer("reply", (void**)&reply));
                err = mSource->getTrackInfo(reply);
            }

            sp<AMessage> response = new AMessage;
            response->setInt32("err", err);

            response->postReply(replyID);
            break;
        }

        case kWhatSelectTrack:
        {
            uint32_t replyID;
            CHECK(msg->senderAwaitsResponse(&replyID));

            status_t err = INVALID_OPERATION;
            if (mSource != NULL) {
                size_t trackIndex;
                int32_t select;
                CHECK(msg->findSize("trackIndex", &trackIndex));
                CHECK(msg->findInt32("select", &select));
                err = mSource->selectTrack(trackIndex, select);
            }

            sp<AMessage> response = new AMessage;
            response->setInt32("err", err);

            response->postReply(replyID);
            break;
        }

        case kWhatPollDuration:
        {
            int32_t generation;
            CHECK(msg->findInt32("generation", &generation));

            if (generation != mPollDurationGeneration) {
                // stale
                break;
            }

            int64_t durationUs;
            if (mDriver != NULL && mSource->getDuration(&durationUs) == OK) {
                sp<NuPlayerDriver> driver = mDriver.promote();
                if (driver != NULL) {
                    driver->notifyDuration(durationUs);
                }
            }

            msg->post(1000000ll);  // poll again in a second.
            break;
        }

        case kWhatSetVideoNativeWindow:
        {
            ALOGV("kWhatSetVideoNativeWindow");

            mDeferredActions.push_back(
                    new ShutdownDecoderAction(
                        false /* audio */, true /* video */));

            sp<RefBase> obj;
            CHECK(msg->findObject("native-window", &obj));

            mDeferredActions.push_back(
                    new SetSurfaceAction(
                        static_cast<NativeWindowWrapper *>(obj.get())));

            if (obj != NULL) {
                // If there is a new surface texture, instantiate decoders
                // again if possible.
                mDeferredActions.push_back(
                        new SimpleAction(&NuPlayer::performScanSources));
            }

            processDeferredActions();
            break;
        }

        case kWhatSetAudioSink:
        {
            ALOGV("kWhatSetAudioSink");

            sp<RefBase> obj;
            CHECK(msg->findObject("sink", &obj));

            mAudioSink = static_cast<MediaPlayerBase::AudioSink *>(obj.get());
            break;
        }

        case kWhatStart:
        {
            ALOGV("kWhatStart");

            mVideoIsAVC = false;
            mAudioEOS = false;
            mVideoEOS = false;
            mSkipRenderingAudioUntilMediaTimeUs = -1;
            mSkipRenderingVideoUntilMediaTimeUs = -1;
            mVideoLateByUs = 0;
            mNumFramesTotal = 0;
            mNumFramesDropped = 0;
            mStarted = true;

            mSource->start();

            uint32_t flags = 0;

            if (mSource->isRealTime()) {
                flags |= Renderer::FLAG_REAL_TIME;
            }

            mRenderer = new Renderer(
                    mAudioSink,
                    new AMessage(kWhatRendererNotify, id()),
                    flags);

            looper()->registerHandler(mRenderer);

            postScanSources();
            break;
        }

        case kWhatScanSources:
        {
            int32_t generation;
            CHECK(msg->findInt32("generation", &generation));
            if (generation != mScanSourcesGeneration) {
                // Drop obsolete msg.
                break;
            }

            mScanSourcesPending = false;

            ALOGV("scanning sources haveAudio=%d, haveVideo=%d",
                 mAudioDecoder != NULL, mVideoDecoder != NULL);

            bool mHadAnySourcesBefore =
                (mAudioDecoder != NULL) || (mVideoDecoder != NULL);

            if (mNativeWindow != NULL) {
                instantiateDecoder(false, &mVideoDecoder);
            }

            if (mAudioSink != NULL) {
                instantiateDecoder(true, &mAudioDecoder);
            }

            if (!mHadAnySourcesBefore
                    && (mAudioDecoder != NULL || mVideoDecoder != NULL)) {
                // This is the first time we've found anything playable.

                if (mSourceFlags & Source::FLAG_DYNAMIC_DURATION) {
                    schedulePollDuration();
                }
            }

            status_t err;
            if ((err = mSource->feedMoreTSData()) != OK) {
                if (mAudioDecoder == NULL && mVideoDecoder == NULL) {
                    // We're not currently decoding anything (no audio or
                    // video tracks found) and we just ran out of input data.

                    if (err == ERROR_END_OF_STREAM) {
                        notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0);
                    } else {
                        notifyListener(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
                    }
                }
                break;
            }

            if ((mAudioDecoder == NULL && mAudioSink != NULL)
                    || (mVideoDecoder == NULL && mNativeWindow != NULL)) {
                msg->post(100000ll);
                mScanSourcesPending = true;
            }
            break;
        }

        case kWhatVideoNotify:
        case kWhatAudioNotify:
        {
            bool audio = msg->what() == kWhatAudioNotify;

            sp<AMessage> codecRequest;
            CHECK(msg->findMessage("codec-request", &codecRequest));

            int32_t what;
            CHECK(codecRequest->findInt32("what", &what));

            if (what == ACodec::kWhatFillThisBuffer) {
                status_t err = feedDecoderInputData(
                        audio, codecRequest);

                if (err == -EWOULDBLOCK) {
                    if (mSource->feedMoreTSData() == OK) {
                        msg->post(10000ll);
                    }
                }
            } else if (what == ACodec::kWhatEOS) {
                int32_t err;
                CHECK(codecRequest->findInt32("err", &err));

                if (err == ERROR_END_OF_STREAM) {
                    ALOGV("got %s decoder EOS", audio ? "audio" : "video");
                } else {
                    ALOGV("got %s decoder EOS w/ error %d",
                         audio ? "audio" : "video",
                         err);
                }

                mRenderer->queueEOS(audio, err);
            } else if (what == ACodec::kWhatFlushCompleted) {
                bool needShutdown;

                if (audio) {
                    CHECK(IsFlushingState(mFlushingAudio, &needShutdown));
                    mFlushingAudio = FLUSHED;
                } else {
                    CHECK(IsFlushingState(mFlushingVideo, &needShutdown));
                    mFlushingVideo = FLUSHED;

                    mVideoLateByUs = 0;
                }

                ALOGV("decoder %s flush completed", audio ? "audio" : "video");

                if (needShutdown) {
                    ALOGV("initiating %s decoder shutdown",
                         audio ? "audio" : "video");

                    (audio ? mAudioDecoder : mVideoDecoder)->initiateShutdown();

                    if (audio) {
                        mFlushingAudio = SHUTTING_DOWN_DECODER;
                    } else {
                        mFlushingVideo = SHUTTING_DOWN_DECODER;
                    }
                }

                finishFlushIfPossible();
            } else if (what == ACodec::kWhatOutputFormatChanged) {
                if (audio) {
                    int32_t numChannels;
                    CHECK(codecRequest->findInt32(
                                "channel-count", &numChannels));

                    int32_t sampleRate;
                    CHECK(codecRequest->findInt32("sample-rate", &sampleRate));

                    ALOGV("Audio output format changed to %d Hz, %d channels",
                         sampleRate, numChannels);

                    mAudioSink->close();

                    audio_output_flags_t flags;
                    int64_t durationUs;
                    // FIXME: we should handle the case where the video decoder
                    // is created after we receive the format change indication.
                    // Current code will just make that we select deep buffer
                    // with video which should not be a problem as it should
                    // not prevent from keeping A/V sync.
                    if (mVideoDecoder == NULL &&
                            mSource->getDuration(&durationUs) == OK &&
                            durationUs
                                > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US) {
                        flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
                    } else {
                        flags = AUDIO_OUTPUT_FLAG_NONE;
                    }

                    int32_t channelMask;
                    if (!codecRequest->findInt32("channel-mask", &channelMask)) {
                        channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER;
                    }

                    CHECK_EQ(mAudioSink->open(
                                sampleRate,
                                numChannels,
                                (audio_channel_mask_t)channelMask,
                                AUDIO_FORMAT_PCM_16_BIT,
                                8 /* bufferCount */,
                                NULL,
                                NULL,
                                flags),
                             (status_t)OK);
                    mAudioSink->start();

                    mRenderer->signalAudioSinkChanged();
                } else {
                    // video

                    int32_t width, height;
                    CHECK(codecRequest->findInt32("width", &width));
                    CHECK(codecRequest->findInt32("height", &height));

                    int32_t cropLeft, cropTop, cropRight, cropBottom;
                    CHECK(codecRequest->findRect(
                                "crop",
                                &cropLeft, &cropTop, &cropRight, &cropBottom));

                    int32_t displayWidth = cropRight - cropLeft + 1;
                    int32_t displayHeight = cropBottom - cropTop + 1;

                    ALOGV("Video output format changed to %d x %d "
                         "(crop: %d x %d @ (%d, %d))",
                         width, height,
                         displayWidth,
                         displayHeight,
                         cropLeft, cropTop);

                    sp<AMessage> videoInputFormat =
                        mSource->getFormat(false /* audio */);

                    // Take into account sample aspect ratio if necessary:
                    int32_t sarWidth, sarHeight;
                    if (videoInputFormat->findInt32("sar-width", &sarWidth)
                            && videoInputFormat->findInt32(
                                "sar-height", &sarHeight)) {
                        ALOGV("Sample aspect ratio %d : %d",
                              sarWidth, sarHeight);

                        displayWidth = (displayWidth * sarWidth) / sarHeight;

                        ALOGV("display dimensions %d x %d",
                              displayWidth, displayHeight);
                    }

                    notifyListener(
                            MEDIA_SET_VIDEO_SIZE, displayWidth, displayHeight);
                }
            } else if (what == ACodec::kWhatShutdownCompleted) {
                ALOGV("%s shutdown completed", audio ? "audio" : "video");
                if (audio) {
                    mAudioDecoder.clear();

                    CHECK_EQ((int)mFlushingAudio, (int)SHUTTING_DOWN_DECODER);
                    mFlushingAudio = SHUT_DOWN;
                } else {
                    mVideoDecoder.clear();

                    CHECK_EQ((int)mFlushingVideo, (int)SHUTTING_DOWN_DECODER);
                    mFlushingVideo = SHUT_DOWN;
                }

                finishFlushIfPossible();
            } else if (what == ACodec::kWhatError) {
                ALOGE("Received error from %s decoder, aborting playback.",
                     audio ? "audio" : "video");

                mRenderer->queueEOS(audio, UNKNOWN_ERROR);
            } else if (what == ACodec::kWhatDrainThisBuffer) {
                renderBuffer(audio, codecRequest);
            } else if (what != ACodec::kWhatComponentAllocated
                    && what != ACodec::kWhatComponentConfigured
                    && what != ACodec::kWhatBuffersAllocated) {
                ALOGV("Unhandled codec notification %d '%c%c%c%c'.",
                      what,
                      what >> 24,
                      (what >> 16) & 0xff,
                      (what >> 8) & 0xff,
                      what & 0xff);
            }

            break;
        }

        case kWhatRendererNotify:
        {
            int32_t what;
            CHECK(msg->findInt32("what", &what));

            if (what == Renderer::kWhatEOS) {
                int32_t audio;
                CHECK(msg->findInt32("audio", &audio));

                int32_t finalResult;
                CHECK(msg->findInt32("finalResult", &finalResult));

                if (audio) {
                    mAudioEOS = true;
                } else {
                    mVideoEOS = true;
                }

                if (finalResult == ERROR_END_OF_STREAM) {
                    ALOGV("reached %s EOS", audio ? "audio" : "video");
                } else {
                    ALOGE("%s track encountered an error (%d)",
                         audio ? "audio" : "video", finalResult);

                    notifyListener(
                            MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, finalResult);
                }

                if ((mAudioEOS || mAudioDecoder == NULL)
                        && (mVideoEOS || mVideoDecoder == NULL)) {
                    notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0);
                }
            } else if (what == Renderer::kWhatPosition) {
                int64_t positionUs;
                CHECK(msg->findInt64("positionUs", &positionUs));

                CHECK(msg->findInt64("videoLateByUs", &mVideoLateByUs));

                if (mDriver != NULL) {
                    sp<NuPlayerDriver> driver = mDriver.promote();
                    if (driver != NULL) {
                        driver->notifyPosition(positionUs);

                        driver->notifyFrameStats(
                                mNumFramesTotal, mNumFramesDropped);
                    }
                }
            } else if (what == Renderer::kWhatFlushComplete) {
                int32_t audio;
                CHECK(msg->findInt32("audio", &audio));

                ALOGV("renderer %s flush completed.", audio ? "audio" : "video");
            } else if (what == Renderer::kWhatVideoRenderingStart) {
                notifyListener(MEDIA_INFO, MEDIA_INFO_RENDERING_START, 0);
            } else if (what == Renderer::kWhatMediaRenderingStart) {
                ALOGV("media rendering started");
                notifyListener(MEDIA_STARTED, 0, 0);
            }
            break;
        }

        case kWhatMoreDataQueued:
        {
            break;
        }

        case kWhatReset:
        {
            ALOGV("kWhatReset");

            mDeferredActions.push_back(
                    new ShutdownDecoderAction(
                        true /* audio */, true /* video */));

            mDeferredActions.push_back(
                    new SimpleAction(&NuPlayer::performReset));

            processDeferredActions();
            break;
        }

        case kWhatSeek:
        {
            int64_t seekTimeUs;
            CHECK(msg->findInt64("seekTimeUs", &seekTimeUs));

            ALOGV("kWhatSeek seekTimeUs=%lld us", seekTimeUs);

            mDeferredActions.push_back(
                    new SimpleAction(&NuPlayer::performDecoderFlush));

            mDeferredActions.push_back(new SeekAction(seekTimeUs));

            processDeferredActions();
            break;
        }

        case kWhatPause:
        {
            CHECK(mRenderer != NULL);
            mSource->pause();
            mRenderer->pause();
            break;
        }

        case kWhatResume:
        {
            CHECK(mRenderer != NULL);
            mSource->resume();
            mRenderer->resume();
            break;
        }

        case kWhatSourceNotify:
        {
            onSourceNotify(msg);
            break;
        }

        default:
            TRESPASS();
            break;
    }
Exemplo n.º 30
0
status_t WifiDisplaySource::onSetupRequest(
        int32_t sessionID,
        int32_t cseq,
        const sp<ParsedMessage> &data) {
    CHECK_EQ(sessionID, mClientSessionID);
    if (mClientInfo.mPlaybackSessionID != -1) {
        // We only support a single playback session per client.
        // This is due to the reversed keep-alive design in the wfd specs...
        sendErrorResponse(sessionID, "400 Bad Request", cseq);
        return ERROR_MALFORMED;
    }

    AString transport;
    if (!data->findString("transport", &transport)) {
        sendErrorResponse(sessionID, "400 Bad Request", cseq);
        return ERROR_MALFORMED;
    }

    RTPSender::TransportMode rtpMode = RTPSender::TRANSPORT_UDP;

    int clientRtp, clientRtcp;
    if (transport.startsWith("RTP/AVP/TCP;")) {
        AString interleaved;
        if (ParsedMessage::GetAttribute(
                    transport.c_str(), "interleaved", &interleaved)
                && sscanf(interleaved.c_str(), "%d-%d",
                          &clientRtp, &clientRtcp) == 2) {
            rtpMode = RTPSender::TRANSPORT_TCP_INTERLEAVED;
        } else {
            bool badRequest = false;

            AString clientPort;
            if (!ParsedMessage::GetAttribute(
                        transport.c_str(), "client_port", &clientPort)) {
                badRequest = true;
            } else if (sscanf(clientPort.c_str(), "%d-%d",
                              &clientRtp, &clientRtcp) == 2) {
            } else if (sscanf(clientPort.c_str(), "%d", &clientRtp) == 1) {
                // No RTCP.
                clientRtcp = -1;
            } else {
                badRequest = true;
            }

            if (badRequest) {
                sendErrorResponse(sessionID, "400 Bad Request", cseq);
                return ERROR_MALFORMED;
            }

            rtpMode = RTPSender::TRANSPORT_TCP;
        }
    } else if (transport.startsWith("RTP/AVP;unicast;")
            || transport.startsWith("RTP/AVP/UDP;unicast;")) {
        bool badRequest = false;

        AString clientPort;
        if (!ParsedMessage::GetAttribute(
                    transport.c_str(), "client_port", &clientPort)) {
            badRequest = true;
        } else if (sscanf(clientPort.c_str(), "%d-%d",
                          &clientRtp, &clientRtcp) == 2) {
        } else if (sscanf(clientPort.c_str(), "%d", &clientRtp) == 1) {
            // No RTCP.
            clientRtcp = -1;
        } else {
            badRequest = true;
        }

        if (badRequest) {
            sendErrorResponse(sessionID, "400 Bad Request", cseq);
            return ERROR_MALFORMED;
        }
#if 1
    // The older LG dongles doesn't specify client_port=xxx apparently.
    } else if (transport == "RTP/AVP/UDP;unicast") {
        clientRtp = 19000;
        clientRtcp = -1;
#endif
    } else {
        sendErrorResponse(sessionID, "461 Unsupported Transport", cseq);
        return ERROR_UNSUPPORTED;
    }

    int32_t playbackSessionID = makeUniquePlaybackSessionID();

    sp<AMessage> notify = new AMessage(kWhatPlaybackSessionNotify, this);
    notify->setInt32("playbackSessionID", playbackSessionID);
    notify->setInt32("sessionID", sessionID);

    sp<PlaybackSession> playbackSession =
        new PlaybackSession(
                mOpPackageName, mNetSession, notify, mInterfaceAddr, mHDCP, mMediaPath.c_str());

    looper()->registerHandler(playbackSession);

    AString uri;
    data->getRequestField(1, &uri);

    if (strncasecmp("rtsp://", uri.c_str(), 7)) {
        sendErrorResponse(sessionID, "400 Bad Request", cseq);
        return ERROR_MALFORMED;
    }

    if (!(uri.startsWith("rtsp://") && uri.endsWith("/wfd1.0/streamid=0"))) {
        sendErrorResponse(sessionID, "404 Not found", cseq);
        return ERROR_MALFORMED;
    }

    RTPSender::TransportMode rtcpMode = RTPSender::TRANSPORT_UDP;
    if (clientRtcp < 0) {
        rtcpMode = RTPSender::TRANSPORT_NONE;
    }

    status_t err = playbackSession->init(
            mClientInfo.mRemoteIP.c_str(),
            clientRtp,
            rtpMode,
            clientRtcp,
            rtcpMode,
            mSinkSupportsAudio,
            mUsingPCMAudio,
            mSinkSupportsVideo,
            mChosenVideoResolutionType,
            mChosenVideoResolutionIndex,
            mChosenVideoProfile,
            mChosenVideoLevel);

    if (err != OK) {
        looper()->unregisterHandler(playbackSession->id());
        playbackSession.clear();
    }

    switch (err) {
        case OK:
            break;
        case -ENOENT:
            sendErrorResponse(sessionID, "404 Not Found", cseq);
            return err;
        default:
            sendErrorResponse(sessionID, "403 Forbidden", cseq);
            return err;
    }

    mClientInfo.mPlaybackSessionID = playbackSessionID;
    mClientInfo.mPlaybackSession = playbackSession;

    AString response = "RTSP/1.0 200 OK\r\n";
    AppendCommonResponse(&response, cseq, playbackSessionID);

    if (rtpMode == RTPSender::TRANSPORT_TCP_INTERLEAVED) {
        response.append(
                AStringPrintf(
                    "Transport: RTP/AVP/TCP;interleaved=%d-%d;",
                    clientRtp, clientRtcp));
    } else {
        int32_t serverRtp = playbackSession->getRTPPort();

        AString transportString = "UDP";
        if (rtpMode == RTPSender::TRANSPORT_TCP) {
            transportString = "TCP";
        }

        if (clientRtcp >= 0) {
            response.append(
                    AStringPrintf(
                        "Transport: RTP/AVP/%s;unicast;client_port=%d-%d;"
                        "server_port=%d-%d\r\n",
                        transportString.c_str(),
                        clientRtp, clientRtcp, serverRtp, serverRtp + 1));
        } else {
            response.append(
                    AStringPrintf(
                        "Transport: RTP/AVP/%s;unicast;client_port=%d;"
                        "server_port=%d\r\n",
                        transportString.c_str(),
                        clientRtp, serverRtp));
        }
    }

    response.append("\r\n");

    err = mNetSession->sendRequest(sessionID, response.c_str());

    if (err != OK) {
        return err;
    }

    mState = AWAITING_CLIENT_PLAY;

    scheduleReaper();
    scheduleKeepAlive(sessionID);

    return OK;
}