status_t CameraDeviceClient::dumpClient(int fd, const Vector<String16>& args) { String8 result; result.appendFormat("CameraDeviceClient[%d] (%p) dump:\n", mCameraId, (getRemoteCallback() != NULL ? IInterface::asBinder(getRemoteCallback()).get() : NULL) ); result.appendFormat(" Current client UID %u\n", mClientUid); result.append(" State:\n"); result.appendFormat(" Request ID counter: %d\n", mRequestIdCounter); if (mInputStream.configured) { result.appendFormat(" Current input stream ID: %d\n", mInputStream.id); } else { result.append(" No input stream configured.\n"); } if (!mStreamMap.isEmpty()) { result.append(" Current output stream IDs:\n"); for (size_t i = 0; i < mStreamMap.size(); i++) { result.appendFormat(" Stream %d\n", mStreamMap.valueAt(i)); } } else { result.append(" No output streams configured.\n"); } write(fd, result.string(), result.size()); // TODO: print dynamic/request section from most recent requests mFrameProcessor->dump(fd, args); return dumpDevice(fd, args); }
// ---------------------------------------------------------------------------- void BackupBufDumper::dump(String8 &result, const char* prefix) { // dump status to log buffer first result.appendFormat("%s*BackupBufDumper mRingBuffer(size=%u, count=%u)\n", prefix, mRingBuffer.getSize(), mRingBuffer.getCount()); for (size_t i = 0; i < mRingBuffer.getValidSize(); i++) { const sp<BackupBuffer>& buffer = getItem(i); result.appendFormat("%s [%02u] handle(source=%p, backup=%p)\n", prefix, i, buffer->mSourceHandle, buffer->mGraphicBuffer->handle); } // start buffer dump check and process String8 name; String8 name_prefix; BufferQueueDump::getDumpFileName(name, mName); for (size_t i = 0; i < mRingBuffer.getValidSize(); i++) { const sp<BackupBuffer>& buffer = getItem(i); name_prefix = String8::format("[%s](Backup%02u_H%p_ts%" PRId64 ")", name.string(), i, buffer->mSourceHandle, ns2ms(buffer->mTimeStamp)); getGraphicBufferUtil().dump(buffer->mGraphicBuffer, name_prefix.string(), DUMP_FILE_PATH); BQD_LOGI("[dump] Backup:%02u, handle(source=%p, backup=%p)", i, buffer->mSourceHandle, buffer->mGraphicBuffer->handle); } }
std::string RecentEventLogger::dump() const { std::lock_guard<std::mutex> lk(mLock); //TODO: replace String8 with std::string completely in this function String8 buffer; buffer.appendFormat("last %zu events\n", mRecentEvents.size()); int j = 0; for (int i = mRecentEvents.size() - 1; i >= 0; --i) { const auto& ev = mRecentEvents[i]; struct tm * timeinfo = localtime(&(ev.mWallTime.tv_sec)); buffer.appendFormat("\t%2d (ts=%.9f, wall=%02d:%02d:%02d.%03d) ", ++j, ev.mEvent.timestamp/1e9, timeinfo->tm_hour, timeinfo->tm_min, timeinfo->tm_sec, (int) ns2ms(ev.mWallTime.tv_nsec)); // data if (mSensorType == SENSOR_TYPE_STEP_COUNTER) { buffer.appendFormat("%" PRIu64 ", ", ev.mEvent.u64.step_counter); } else { for (size_t k = 0; k < mEventSize; ++k) { buffer.appendFormat("%.2f, ", ev.mEvent.data[k]); } } buffer.append("\n"); } return std::string(buffer.string()); }
void SensorService::SensorEventConnection::dump(String8& result) { Mutex::Autolock _l(mConnectionLock); result.appendFormat("\tOperating Mode: %s\n",mDataInjectionMode ? "DATA_INJECTION" : "NORMAL"); result.appendFormat("\t %s | WakeLockRefCount %d | uid %d | cache size %d | " "max cache size %d\n", mPackageName.string(), mWakeLockRefCount, mUid, mCacheSize, mMaxCacheSize); for (size_t i = 0; i < mSensorInfo.size(); ++i) { const FlushInfo& flushInfo = mSensorInfo.valueAt(i); result.appendFormat("\t %s 0x%08x | status: %s | pending flush events %d \n", mService->getSensorName(mSensorInfo.keyAt(i)).string(), mSensorInfo.keyAt(i), flushInfo.mFirstFlushPending ? "First flush pending" : "active", flushInfo.mPendingFlushEventsToSend); } #if DEBUG_CONNECTIONS result.appendFormat("\t events recvd: %d | sent %d | cache %d | dropped %d |" " total_acks_needed %d | total_acks_recvd %d\n", mEventsReceived, mEventsSent, mEventsSentFromCache, mEventsReceived - (mEventsSentFromCache + mEventsSent + mCacheSize), mTotalAcksNeeded, mTotalAcksReceived); #endif }
void RenderEngine::dump(String8& result) { const GLExtensions& extensions(GLExtensions::getInstance()); result.appendFormat("GLES: %s, %s, %s\n", extensions.getVendor(), extensions.getRenderer(), extensions.getVersion()); result.appendFormat("%s\n", extensions.getExtension()); }
static bool find(String8& result, const String8& pattern, const char* const search, bool exact) { // in the emulator case, we just return the hardcoded name // of the software renderer. if (checkGlesEmulationStatus() == 0) { ALOGD("Emulator without GPU support detected. " "Fallback to software renderer."); #if defined(__LP64__) result.setTo("/system/lib64/egl/libGLES_android.so"); #else result.setTo("/system/lib/egl/libGLES_android.so"); #endif return true; } if (exact) { String8 absolutePath; absolutePath.appendFormat("%s/%s.so", search, pattern.string()); if (!access(absolutePath.string(), R_OK)) { result = absolutePath; return true; } return false; } DIR* d = opendir(search); if (d != NULL) { struct dirent cur; struct dirent* e; while (readdir_r(d, &cur, &e) == 0 && e) { if (e->d_type == DT_DIR) { continue; } if (!strcmp(e->d_name, "libGLES_android.so")) { // always skip the software renderer continue; } if (strstr(e->d_name, pattern.string()) == e->d_name) { if (!strcmp(e->d_name + strlen(e->d_name) - 3, ".so")) { result.clear(); result.appendFormat("%s/%s", search, e->d_name); closedir(d); return true; } } } closedir(d); } return false; }
void Camera3ZslStream::dump(int fd, const Vector<String16> &args) const { (void) args; String8 lines; lines.appendFormat(" Stream[%d]: ZSL\n", mId); write(fd, lines.string(), lines.size()); Camera3IOStreamBase::dump(fd, args); lines = String8(); lines.appendFormat(" Input buffers pending: %zu, in flight %zu\n", mInputBufferQueue.size(), mBuffersInFlight.size()); write(fd, lines.string(), lines.size()); }
void SensorDevice::dump(String8& result) { if (!mSensorModule) return; sensor_t const* list; ssize_t count = mSensorModule->get_sensors_list(mSensorModule, &list); result.appendFormat("halVersion 0x%08x\n", getHalDeviceVersion()); result.appendFormat("%d h/w sensors:\n", int(count)); Mutex::Autolock _l(mLock); for (size_t i=0 ; i<size_t(count) ; i++) { const Info& info = mActivationCount.valueFor(list[i].handle); if (info.batchParams.isEmpty()) continue; result.appendFormat("handle=0x%08x, active-count=%zu, batch_period(ms)={ ", list[i].handle, info.batchParams.size()); for (size_t j = 0; j < info.batchParams.size(); j++) { const BatchParams& params = info.batchParams.valueAt(j); result.appendFormat("%4.1f%s", params.batchDelay / 1e6f, j < info.batchParams.size() - 1 ? ", " : ""); } result.appendFormat(" }, selected=%4.1f ms\n", info.bestBatchParams.batchDelay / 1e6f); result.appendFormat("handle=0x%08x, active-count=%zu, batch_timeout(ms)={ ", list[i].handle, info.batchParams.size()); for (size_t j = 0; j < info.batchParams.size(); j++) { BatchParams params = info.batchParams.valueAt(j); result.appendFormat("%4.1f%s", params.batchTimeout / 1e6f, j < info.batchParams.size() - 1 ? ", " : ""); } result.appendFormat(" }, selected=%4.1f ms\n", info.bestBatchParams.batchTimeout / 1e6f); } }
static void InternalSetAudioRoutesGB(SwitchState aState) { audio_io_handle_t handle = AudioSystem::getOutput((AudioSystem::stream_type)AudioSystem::SYSTEM); String8 cmd; if (aState == SWITCH_STATE_HEADSET || aState == SWITCH_STATE_HEADPHONE) { cmd.appendFormat("routing=%d", GetRoutingMode(nsIAudioManager::FORCE_HEADPHONES)); } else if (aState == SWITCH_STATE_OFF) { cmd.appendFormat("routing=%d", GetRoutingMode(nsIAudioManager::FORCE_SPEAKER)); } AudioSystem::setParameters(handle, cmd); }
String8 Status::toString8() const { String8 ret; if (mException == EX_NONE) { ret.append("No error"); } else { ret.appendFormat("Status(%d): '", mException); if (mException == EX_SERVICE_SPECIFIC || mException == EX_TRANSACTION_FAILED) { ret.appendFormat("%d: ", mErrorCode); } ret.append(String8(mMessage)); ret.append("'"); } return ret; }
void HDMIAudioCaps::getRatesForAF(String8& rates) { Mutex::Autolock _l(mLock); rates.clear(); // If the sink does not support basic audio, then it supports no audio. if (!mBasicAudioSupported) return; // Basic audio always supports from 32k through 38k. uint32_t tmp = kSR_32000 | kSR_44100 | kSR_48000; // To keep things simple, only report mode information for the PCM mode // which supports the maximum number of channels. ssize_t ndx = getMaxChModeNdx_l(); if (ndx >= 0) tmp |= mModes[ndx].sr_bitmask; bool first = true; for (uint32_t i = 1; tmp; i <<= 1) { if (i & tmp) { rates.appendFormat(first ? "%d" : "|%d", srMaskToSR(i)); first = false; tmp &= ~i; } } }
void HwcDebug::logHwcProps(uint32_t listFlags) { static int hwcModuleCompType = -1; static int sMdpCompMaxLayers = 0; static String8 hwcModuleCompTypeLog(""); if (-1 == hwcModuleCompType) { // One time stuff char mdpCompPropStr[PROPERTY_VALUE_MAX]; if (property_get("debug.mdpcomp.maxlayer", mdpCompPropStr, NULL) > 0) { sMdpCompMaxLayers = atoi(mdpCompPropStr); } hwcModuleCompType = qdutils::QCCompositionType::getInstance().getCompositionType(); hwcModuleCompTypeLog.appendFormat("%s%s%s%s%s%s", // Is hwc module composition type now a bit-field?! (hwcModuleCompType == qdutils::COMPOSITION_TYPE_GPU)? "[GPU]": "", (hwcModuleCompType & qdutils::COMPOSITION_TYPE_MDP)? "[MDP]": "", (hwcModuleCompType & qdutils::COMPOSITION_TYPE_C2D)? "[C2D]": "", (hwcModuleCompType & qdutils::COMPOSITION_TYPE_CPU)? "[CPU]": "", (hwcModuleCompType & qdutils::COMPOSITION_TYPE_DYN)? "[DYN]": "", (hwcModuleCompType >= (qdutils::COMPOSITION_TYPE_DYN << 1))? "[???]": ""); } ALOGI("Display[%s] Layer[*] %s-HwcModuleCompType, %d-layer MdpComp %s", mDisplayName, hwcModuleCompTypeLog.string(), sMdpCompMaxLayers, (listFlags & HWC_GEOMETRY_CHANGED)? "[HwcList Geometry Changed]": ""); }
static void throwExceptionWithRowCol(JNIEnv* env, jint row, jint column) { String8 msg; msg.appendFormat("Couldn't read row %d, col %d from CursorWindow. " "Make sure the Cursor is initialized correctly before accessing data from it.", row, column); jniThrowException(env, "java/lang/IllegalStateException", msg.string()); }
nsresult AudioManager::Observe(nsISupports* aSubject, const char* aTopic, const PRUnichar* aData) { if (!strcmp(aTopic, BLUETOOTH_SCO_STATUS_CHANGED)) { if (aData) { String8 cmd; cmd.appendFormat("bt_samplerate=%d", kBtSampleRate); AudioSystem::setParameters(0, cmd); const char* address = NS_ConvertUTF16toUTF8(nsDependentString(aData)).get(); AudioSystem::setDeviceConnectionState(AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET, AUDIO_POLICY_DEVICE_STATE_AVAILABLE, address); AudioSystem::setDeviceConnectionState(AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, AUDIO_POLICY_DEVICE_STATE_AVAILABLE, address); } else { AudioSystem::setDeviceConnectionState(AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, ""); AudioSystem::setDeviceConnectionState(AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, ""); } return NS_OK; } return NS_ERROR_UNEXPECTED; }
static String8 GetSessionIdString(const Vector<uint8_t> &sessionId) { String8 sessionIdStr; for (size_t i = 0; i < sessionId.size(); ++i) { sessionIdStr.appendFormat("%u ", sessionId[i]); } return sessionIdStr; }
static jlong nativeInit(JNIEnv* env, jclass clazz, jobject receiverWeak, jobject inputChannelObj, jobject messageQueueObj) { sp<InputChannel> inputChannel = android_view_InputChannel_getInputChannel(env, inputChannelObj); if (inputChannel == NULL) { jniThrowRuntimeException(env, "InputChannel is not initialized."); return 0; } sp<MessageQueue> messageQueue = android_os_MessageQueue_getMessageQueue(env, messageQueueObj); if (messageQueue == NULL) { jniThrowRuntimeException(env, "MessageQueue is not initialized."); return 0; } sp<NativeInputEventReceiver> receiver = new NativeInputEventReceiver(env, receiverWeak, inputChannel, messageQueue); status_t status = receiver->initialize(); if (status) { String8 message; message.appendFormat("Failed to initialize input event receiver. status=%d", status); jniThrowRuntimeException(env, message.string()); return 0; } receiver->incStrong(gInputEventReceiverClassInfo.clazz); // retain a reference for the object return reinterpret_cast<jlong>(receiver.get()); }
void EventThread::dump(String8& result) const { Mutex::Autolock _l(mLock); result.appendFormat("VSYNC state: %s\n", mDebugVsyncEnabled?"enabled":"disabled"); result.appendFormat(" soft-vsync: %s\n", mUseSoftwareVSync?"enabled":"disabled"); result.appendFormat(" numListeners=%zu,\n events-delivered: %u\n", mDisplayEventConnections.size(), mVSyncEvent[DisplayDevice::DISPLAY_PRIMARY].vsync.count); for (size_t i=0 ; i<mDisplayEventConnections.size() ; i++) { sp<Connection> connection = mDisplayEventConnections.itemAt(i).promote(); result.appendFormat(" %p: count=%d\n", connection.get(), connection!=NULL ? connection->count : 0); } }
void GonkConsumerBase::dumpLocked(String8& result, const char* prefix) const { result.appendFormat("%smAbandoned=%d\n", prefix, int(mAbandoned)); if (!mAbandoned) { mConsumer->dump(result, prefix); } }
nsresult AudioManager::Observe(nsISupports* aSubject, const char* aTopic, const PRUnichar* aData) { if (!strcmp(aTopic, BLUETOOTH_SCO_STATUS_CHANGED)) { if (aData) { String8 cmd; cmd.appendFormat("bt_samplerate=%d", kBtSampleRate); AudioSystem::setParameters(0, cmd); const char* address = NS_ConvertUTF16toUTF8(nsDependentString(aData)).get(); AudioSystem::setDeviceConnectionState(AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET, AUDIO_POLICY_DEVICE_STATE_AVAILABLE, address); AudioSystem::setDeviceConnectionState(AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, AUDIO_POLICY_DEVICE_STATE_AVAILABLE, address); SetForceForUse(nsIAudioManager::USE_COMMUNICATION, nsIAudioManager::FORCE_BT_SCO); } else { AudioSystem::setDeviceConnectionState(AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, ""); AudioSystem::setDeviceConnectionState(AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, ""); // only force to none if the current force setting is bt_sco int32_t force; GetForceForUse(nsIAudioManager::USE_COMMUNICATION, &force); if (force == nsIAudioManager::FORCE_BT_SCO) SetForceForUse(nsIAudioManager::USE_COMMUNICATION, nsIAudioManager::FORCE_NONE); } return NS_OK; } return NS_ERROR_UNEXPECTED; }
void Layer::dump(String8& result, Colorizer& colorizer) const { const Layer::State& s(getDrawingState()); colorizer.colorize(result, Colorizer::GREEN); result.appendFormat( "+ %s %p (%s)\n", getTypeId(), this, getName().string()); colorizer.reset(result); s.activeTransparentRegion.dump(result, "transparentRegion"); visibleRegion.dump(result, "visibleRegion"); sp<Client> client(mClientRef.promote()); result.appendFormat( " " "layerStack=%4d, z=%9d, pos=(%g,%g), size=(%4d,%4d), crop=(%4d,%4d,%4d,%4d), " "isOpaque=%1d, invalidate=%1d, " "alpha=0x%02x, flags=0x%08x, tr=[%.2f, %.2f][%.2f, %.2f]\n" " client=%p\n", s.layerStack, s.z, s.transform.tx(), s.transform.ty(), s.active.w, s.active.h, s.active.crop.left, s.active.crop.top, s.active.crop.right, s.active.crop.bottom, isOpaque(), contentDirty, s.alpha, s.flags, s.transform[0][0], s.transform[0][1], s.transform[1][0], s.transform[1][1], client.get()); sp<const GraphicBuffer> buf0(mActiveBuffer); uint32_t w0=0, h0=0, s0=0, f0=0; if (buf0 != 0) { w0 = buf0->getWidth(); h0 = buf0->getHeight(); s0 = buf0->getStride(); f0 = buf0->format; } result.appendFormat( " " "format=%2d, activeBuffer=[%4ux%4u:%4u,%3X]," " queued-frames=%d, mRefreshPending=%d\n", mFormat, w0, h0, s0,f0, mQueuedFrames, mRefreshPending); if (mSurfaceFlingerConsumer != 0) { mSurfaceFlingerConsumer->dump(result, " "); } }
int NativeInputEventReceiver::handleEvent(int receiveFd, int events, void* data) { if (events & (ALOOPER_EVENT_ERROR | ALOOPER_EVENT_HANGUP)) { // This error typically occurs when the publisher has closed the input channel // as part of removing a window or finishing an IME session, in which case // the consumer will soon be disposed as well. if (kDebugDispatchCycle) { ALOGD("channel '%s' ~ Publisher closed input channel or an error occurred. " "events=0x%x", getInputChannelName(), events); } return 0; // remove the callback } if (events & ALOOPER_EVENT_INPUT) { JNIEnv* env = AndroidRuntime::getJNIEnv(); status_t status = consumeEvents(env, false /*consumeBatches*/, -1, NULL); mMessageQueue->raiseAndClearException(env, "handleReceiveCallback"); return status == OK || status == NO_MEMORY ? 1 : 0; } if (events & ALOOPER_EVENT_OUTPUT) { for (size_t i = 0; i < mFinishQueue.size(); i++) { const Finish& finish = mFinishQueue.itemAt(i); status_t status = mInputConsumer.sendFinishedSignal(finish.seq, finish.handled); if (status) { mFinishQueue.removeItemsAt(0, i); if (status == WOULD_BLOCK) { if (kDebugDispatchCycle) { ALOGD("channel '%s' ~ Sent %zu queued finish events; %zu left.", getInputChannelName(), i, mFinishQueue.size()); } return 1; // keep the callback, try again later } ALOGW("Failed to send finished signal on channel '%s'. status=%d", getInputChannelName(), status); if (status != DEAD_OBJECT) { JNIEnv* env = AndroidRuntime::getJNIEnv(); String8 message; message.appendFormat("Failed to finish input event. status=%d", status); jniThrowRuntimeException(env, message.string()); mMessageQueue->raiseAndClearException(env, "finishInputEvent"); } return 0; // remove the callback } } if (kDebugDispatchCycle) { ALOGD("channel '%s' ~ Sent %zu queued finish events; none left.", getInputChannelName(), mFinishQueue.size()); } mFinishQueue.clear(); setFdEvents(ALOOPER_EVENT_INPUT); return 1; } ALOGW("channel '%s' ~ Received spurious callback for unhandled poll event. " "events=0x%x", getInputChannelName(), events); return 1; }
void Camera3InputStream::dump(int fd, const Vector<String16> &args) const { (void) args; String8 lines; lines.appendFormat(" Stream[%d]: Input\n", mId); write(fd, lines.string(), lines.size()); Camera3IOStreamBase::dump(fd, args); }
nsresult AudioManager::Observe(nsISupports* aSubject, const char* aTopic, const PRUnichar* aData) { if ((strcmp(aTopic, BLUETOOTH_SCO_STATUS_CHANGED_ID) == 0) || (strcmp(aTopic, BLUETOOTH_A2DP_STATUS_CHANGED_ID) == 0)) { nsresult rv; int status = NS_ConvertUTF16toUTF8(aData).ToInteger(&rv); if (NS_FAILED(rv) || status > 1 || status < 0) { NS_WARNING(nsPrintfCString("Wrong data value of %s", aTopic).get()); return NS_ERROR_FAILURE; } nsAutoString tmp_address; BluetoothProfileManagerBase* profile = static_cast<BluetoothProfileManagerBase*>(aSubject); profile->GetAddress(tmp_address); nsAutoCString address = NS_ConvertUTF16toUTF8(tmp_address); audio_policy_dev_state_t audioState = status ? AUDIO_POLICY_DEVICE_STATE_AVAILABLE : AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE; if (!strcmp(aTopic, BLUETOOTH_SCO_STATUS_CHANGED_ID)) { AudioSystem::setDeviceConnectionState(AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET, audioState, address.get()); AudioSystem::setDeviceConnectionState(AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, audioState, address.get()); if (audioState == AUDIO_POLICY_DEVICE_STATE_AVAILABLE) { String8 cmd; cmd.appendFormat("bt_samplerate=%d", kBtSampleRate); AudioSystem::setParameters(0, cmd); SetForceForUse(nsIAudioManager::USE_COMMUNICATION, nsIAudioManager::FORCE_BT_SCO); } else { // only force to none if the current force setting is bt_sco int32_t force; GetForceForUse(nsIAudioManager::USE_COMMUNICATION, &force); if (force == nsIAudioManager::FORCE_BT_SCO) SetForceForUse(nsIAudioManager::USE_COMMUNICATION, nsIAudioManager::FORCE_NONE); } } else if (!strcmp(aTopic, BLUETOOTH_A2DP_STATUS_CHANGED_ID)) { AudioSystem::setDeviceConnectionState(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP, audioState, address.get()); if (audioState == AUDIO_POLICY_DEVICE_STATE_AVAILABLE) { String8 cmd("bluetooth_enabled=true"); AudioSystem::setParameters(0, cmd); cmd.setTo("A2dpSuspended=false"); AudioSystem::setParameters(0, cmd); } } } else { NS_WARNING("Unexpected topic in AudioManager"); return NS_ERROR_FAILURE; } return NS_OK; }
static jobjectArray Image_createSurfacePlanes(JNIEnv* env, jobject thiz, int numPlanes, int readerFormat) { ALOGV("%s: create SurfacePlane array with size %d", __FUNCTION__, numPlanes); int rowStride = 0; int pixelStride = 0; uint8_t *pData = NULL; uint32_t dataSize = 0; jobject byteBuffer = NULL; PublicFormat publicReaderFormat = static_cast<PublicFormat>(readerFormat); int halReaderFormat = android_view_Surface_mapPublicFormatToHalFormat( publicReaderFormat); if (isFormatOpaque(halReaderFormat) && numPlanes > 0) { String8 msg; msg.appendFormat("Format 0x%x is opaque, thus not writable, the number of planes (%d)" " must be 0", halReaderFormat, numPlanes); jniThrowException(env, "java/lang/IllegalArgumentException", msg.string()); return NULL; } jobjectArray surfacePlanes = env->NewObjectArray(numPlanes, gSurfacePlaneClassInfo.clazz, /*initial_element*/NULL); if (surfacePlanes == NULL) { jniThrowRuntimeException(env, "Failed to create SurfacePlane arrays," " probably out of memory"); return NULL; } if (isFormatOpaque(halReaderFormat)) { // Return 0 element surface array. return surfacePlanes; } LockedImage lockedImg = LockedImage(); Image_getLockedImage(env, thiz, &lockedImg); if (env->ExceptionCheck()) { return NULL; } // Create all SurfacePlanes for (int i = 0; i < numPlanes; i++) { Image_getLockedImageInfo(env, &lockedImg, i, halReaderFormat, &pData, &dataSize, &pixelStride, &rowStride); byteBuffer = env->NewDirectByteBuffer(pData, dataSize); if ((byteBuffer == NULL) && (env->ExceptionCheck() == false)) { jniThrowException(env, "java/lang/IllegalStateException", "Failed to allocate ByteBuffer"); return NULL; } // Finally, create this SurfacePlane. jobject surfacePlane = env->NewObject(gSurfacePlaneClassInfo.clazz, gSurfacePlaneClassInfo.ctor, thiz, rowStride, pixelStride, byteBuffer); env->SetObjectArrayElement(surfacePlanes, i, surfacePlane); } return surfacePlanes; }
void EventHub::dump(String8& dump) { dump.append("Event Hub State:\n"); { // acquire lock AutoMutex _l(mLock); dump.appendFormat(INDENT "HaveFirstKeyboard: %s\n", toString(mHaveFirstKeyboard)); dump.appendFormat(INDENT "FirstKeyboardId: 0x%x\n", mFirstKeyboardId); dump.append(INDENT "Devices:\n"); for (int i = 0; i < mNumDevicesById; i++) { const device_t* device = mDevicesById[i].device; if (device) { if (mFirstKeyboardId == device->id) { dump.appendFormat(INDENT2 "0x%x: %s (aka device 0 - first keyboard)\n", device->id, device->name.string()); } else { dump.appendFormat(INDENT2 "0x%x: %s\n", device->id, device->name.string()); } dump.appendFormat(INDENT3 "Classes: 0x%08x\n", device->classes); dump.appendFormat(INDENT3 "Path: %s\n", device->path.string()); dump.appendFormat(INDENT3 "KeyLayoutFile: %s\n", device->keylayoutFilename.string()); } } } // release lock }
void PatchCache::dumpFreeBlocks(const char* prefix) { String8 dump; BufferBlock* block = mFreeBlocks; while (block) { dump.appendFormat("->(%d, %d)", block->offset, block->size); block = block->next; } ALOGD("%s: Free blocks%s", prefix, dump.string()); }
void Layer::dumpStats(String8& result, char* buffer, size_t SIZE) const { LayerBaseClient::dumpStats(result, buffer, SIZE); const size_t o = mFrameLatencyOffset; const nsecs_t period = mFlinger->getHwComposer().getRefreshPeriod(HWC_DISPLAY_PRIMARY); result.appendFormat("%lld\n", period); for (size_t i=0 ; i<128 ; i++) { const size_t index = (o+i) % 128; const nsecs_t time_app = mFrameStats[index].timestamp; const nsecs_t time_set = mFrameStats[index].set; const nsecs_t time_vsync = mFrameStats[index].vsync; result.appendFormat("%lld\t%lld\t%lld\n", time_app, time_vsync, time_set); } result.append("\n"); }
static void nativeScheduleVsync(JNIEnv* env, jclass clazz, jint receiverPtr) { sp<NativeDisplayEventReceiver> receiver = reinterpret_cast<NativeDisplayEventReceiver*>(receiverPtr); status_t status = receiver->scheduleVsync(); if (status) { String8 message; message.appendFormat("Failed to schedule next vertical sync pulse. status=%d", status); jniThrowRuntimeException(env, message.string()); } }
void Layer::dumpStats(String8& result, char* buffer, size_t SIZE) const { LayerBaseClient::dumpStats(result, buffer, SIZE); const size_t o = mFrameLatencyOffset; const DisplayHardware& hw(graphicPlane(0).displayHardware()); const nsecs_t period = hw.getRefreshPeriod(); result.appendFormat("%lld\n", period); for (size_t i=0 ; i<128 ; i++) { const size_t index = (o+i) % 128; const nsecs_t time_app = mFrameStats[index].timestamp; const nsecs_t time_set = mFrameStats[index].set; const nsecs_t time_vsync = mFrameStats[index].vsync; result.appendFormat("%lld\t%lld\t%lld\n", time_app, time_vsync, time_set); } result.append("\n"); }
TaskManager::TaskManager() { // Get the number of available CPUs. This value does not change over time. int cpuCount = sysconf(_SC_NPROCESSORS_ONLN); for (int i = 0; i < cpuCount / 2; i++) { String8 name; name.appendFormat("hwuiTask%d", i + 1); mThreads.add(new WorkerThread(name)); } }