void OTHER_VALUE_STREAM_CLASS::SetFileTransferSyntax(UID_CLASS& transfer_syntax) // DESCRIPTION : Set file transfer syntax. // PRECONDITIONS : // POSTCONDITIONS : // EXCEPTIONS : // NOTES : //<<=========================================================================== { file_transfer_syntaxM = transfer_syntax; fs_codeM = transferSyntaxUidToCode(file_transfer_syntaxM); }
void OTHER_VALUE_STREAM_CLASS::SetMemoryTransferSyntax(UID_CLASS& transfer_syntax) // DESCRIPTION : Set memory transfer syntax. // PRECONDITIONS : // POSTCONDITIONS : // EXCEPTIONS : // NOTES : //<<=========================================================================== { memory_transfer_syntaxM = transfer_syntax; ms_codeM = transferSyntaxUidToCode(memory_transfer_syntaxM); }
bool SNIFFER_PDUS_CLASS::getCommand(DCM_COMMAND_CLASS **command_ptr_ptr) // DESCRIPTION : Get a command from the PDU File Stream. // PRECONDITIONS : // POSTCONDITIONS : // EXCEPTIONS : // NOTES : //<<=========================================================================== { // initialise the data transfer decode if (!networkTransferM.initialiseDecode(false)) return false; BYTE receivePcId = networkTransferM.getPresentationContextId(); // log maximum length received if (loggerM_ptr) { // log the P-DATA-TF pdu details received loggerM_ptr->text(LOG_INFO, 2, "Maximum length of Command DATA-TF PDU received (with pcId %d) is 0x%X=%d", receivePcId, networkTransferM.getMaxRxLength(), networkTransferM.getMaxRxLength()); } // set the Transfer Syntax Code to use for decode UID_CLASS transferSyntaxUid(IMPLICIT_VR_LITTLE_ENDIAN); networkTransferM.setTsCode(transferSyntaxUidToCode(transferSyntaxUid), (char*) transferSyntaxUid.get()); // allocate a new command object DCM_COMMAND_CLASS *command_ptr = new DCM_COMMAND_CLASS(); // cascade the logger command_ptr->setLogger(loggerM_ptr); // decode the command over the association - network transfer bool result = command_ptr->decode(networkTransferM); if (result) { // save return address *command_ptr_ptr = command_ptr; } // terminate the data transfer decode networkTransferM.terminateDecode(); // return result return result; }
bool FILETAIL_CLASS::write(bool autoCreateDirectory) // DESCRIPTION : Method to stream the dataset trailing padding into the // given file. // PRECONDITIONS : // POSTCONDITIONS : // EXCEPTIONS : // NOTES : //<<=========================================================================== { BASE_WAREHOUSE_ITEM_DATA_CLASS *wid_ptr = NULL; // check if we should write the trailing padding if (!trailingPaddingM) return true; // check the an absolute pathname has been given if (!isAbsolutePath(filenameM)) { // see if a results root is defined if (loggerM_ptr) { // see if the path has been defined string pathname = loggerM_ptr->getStorageRoot(); string filename = filenameM; if (pathname.length()) { // set up filename by including path filenameM = pathname; if (pathname[pathname.length()-1] != '\\') filenameM += "\\"; filenameM += filename; } } } if (loggerM_ptr) { loggerM_ptr->text(LOG_DEBUG, 1, "About to append FILE_TAIL to \"%s\"", filenameM.c_str()); } if (autoCreateDirectory) //autocreate stuff should be here { createDirectory(filenameM); } // set up the file transfer - append dataset trailing padding FILE_TF_CLASS *fileTf_ptr = new FILE_TF_CLASS(filenameM, "ab"); // cascade the logger fileTf_ptr->setLogger(loggerM_ptr); // try to retrive the file head in order to get the transfer syntax to use when // encoding this trailing padding if ((wid_ptr = WAREHOUSE->retrieve("", WID_FILEHEAD)) == NULL) { if (loggerM_ptr) { loggerM_ptr->text(LOG_DEBUG, 1, "Can't find %s in Data Warehouse", WIDName(WID_FILEHEAD)); } return false; } FILEHEAD_CLASS *filehead_ptr = static_cast<FILEHEAD_CLASS*>(wid_ptr); // set the required transfer syntax fileTf_ptr->setTsCode(transferSyntaxUidToCode(filehead_ptr->getTransferSyntaxUid()), (char*) filehead_ptr->getTransferSyntaxUid().get()); // check the file is open if (!fileTf_ptr->isOpen()) { if (loggerM_ptr) { loggerM_ptr->text(LOG_ERROR, 1, "Can't append FILE_TAIL to \"%s\"", filenameM.c_str()); loggerM_ptr->text(LOG_NONE, 1, "Check directory path exists."); } delete fileTf_ptr; return false; } // compute the length of the trailing padding // - need to pad file length to multiple of sector size INT length = fileTf_ptr->getLength(); length = ((length / sectorSizeM) * sectorSizeM) - length + sectorSizeM; bool result = true; // don't add padding if the file length is a multiple of the sectorSize if (length != (INT) sectorSizeM) { // - modify length by dataset trailing padding tag (4), [vr (2), padding (2)] & length (4) if (fileTf_ptr->getTsCode() & TS_EXPLICIT_VR) { // check if the remaining sector length can include at least the tag, etc if (length < 12) length += sectorSizeM; // subtract length required for tag, etc length -= 12; } else { // check if the remaining sector length can include at least the tag, etc if (length < 8) length += sectorSizeM; // subtract length required for tag, etc length -= 8; } // add the attribute with value (void) setOBValue(TAG_DATASET_TRAILING_PADDING, 1, length, paddingValueM); // encode the media header attributes result = DCM_ATTRIBUTE_GROUP_CLASS::encode(*fileTf_ptr); } // clean up the file transfer delete fileTf_ptr; // return result return result; }
bool FILE_DATASET_CLASS::write(DCM_DATASET_CLASS *dataset_ptr, bool autoCreateDirectory) // DESCRIPTION : Method to stream the DICOM object to file using the defined // transfer syntax. // PRECONDITIONS : // POSTCONDITIONS : // EXCEPTIONS : // NOTES : //<<=========================================================================== { BASE_WAREHOUSE_ITEM_DATA_CLASS *wid_ptr = NULL; // check for valid dataset to write if (dataset_ptr == NULL) return false; // check the an absolute pathname has been given if (!isAbsolutePath(filenameM)) { // see if a results root is defined if (loggerM_ptr) { // see if the path has been defined string pathname = loggerM_ptr->getStorageRoot(); string filename = filenameM; if (pathname.length()) { // set up filename by including path filenameM = pathname; if (pathname[pathname.length()-1] != '\\') filenameM += "\\"; filenameM += filename; } } } if (loggerM_ptr) { loggerM_ptr->text(LOG_DEBUG, 1, "About to append Dataset to \"%s\"", filenameM.c_str()); } if (autoCreateDirectory) //autocreate stuff should be here { createDirectory(filenameM); } // set up the file transfer FILE_TF_CLASS *fileTf_ptr = new FILE_TF_CLASS(filenameM, "ab"); // cascade the logger fileTf_ptr->setLogger(dataset_ptr->getLogger()); // check the file is open if (!fileTf_ptr->isOpen()) { if (loggerM_ptr) { loggerM_ptr->text(LOG_ERROR, 1, "Can't append Dataset to \"%s\"", filenameM.c_str()); loggerM_ptr->text(LOG_NONE, 1, "Check directory path exists."); } delete fileTf_ptr; return false; } // to check if the dataset is the FMI - see if the attribute tags are from Group 0002 if (dataset_ptr->containsAttributesFromGroup(GROUP_TWO)) { // force the Group 0002 length dataset_ptr->setDefineGroupLengths(true); // set the required transfer syntax fileTf_ptr->setTsCode(TS_EXPLICIT_VR | TS_LITTLE_ENDIAN, EXPLICIT_VR_LITTLE_ENDIAN); } else { // try to retrive the file head in order to get the transfer syntax to use when // encoding this dataset if ((wid_ptr = WAREHOUSE->retrieve("", WID_FILEHEAD)) == NULL) { if (loggerM_ptr) { loggerM_ptr->text(LOG_DEBUG, 1, "Can't find %s in Data Warehouse in order to get Transfer Syntax for Dataset", WIDName(WID_FILEHEAD)); loggerM_ptr->text(LOG_DEBUG, 1, " - Going to set Transfer Syntax to Explicit VR Little Endian as default for Media"); } // do our best - set the transfer syntax as dicom default for media fileTf_ptr->setTsCode(TS_EXPLICIT_VR | TS_LITTLE_ENDIAN, EXPLICIT_VR_LITTLE_ENDIAN); } else { FILEHEAD_CLASS *filehead_ptr = static_cast<FILEHEAD_CLASS*>(wid_ptr); // set the required transfer syntax fileTf_ptr->setTsCode(transferSyntaxUidToCode(filehead_ptr->getTransferSyntaxUid()), (char*) filehead_ptr->getTransferSyntaxUid().get()); } } // remove any Dataset Trailing Padding dataset_ptr->removeTrailingPadding(); // encode the dataset bool result = dataset_ptr->encode(*fileTf_ptr); // clean up the file transfer delete fileTf_ptr; // return result return result; }
bool FILE_DATASET_CLASS::read() // DESCRIPTION : Method to stream the DICOM object from the file using the defined // transfer syntax. // PRECONDITIONS : // POSTCONDITIONS : // EXCEPTIONS : // NOTES : //<<=========================================================================== { string mediaStorageSopClassUid; string transferSyntax = EXPLICIT_VR_LITTLE_ENDIAN; TS_CODE tsCode = transferSyntaxUidToCode(transferSyntax); // set up the file transfer fileTfM_ptr = new FILE_TF_CLASS(filenameM, "rb"); // set the storage mode fileTfM_ptr->setStorageMode(storageModeM); // cascade the logger fileTfM_ptr->setLogger(loggerM_ptr); // check the file is open if (!fileTfM_ptr->isOpen()) return false; // check if the file only contains a command or dataset if ((fileContentTypeM == MFC_COMMANDSET) || (fileContentTypeM == MFC_DATASET)) { // use the transfer syntax given in the class constructor transferSyntax = transferSyntaxM; // set the required transfer syntax fileTfM_ptr->setTsCode(tsCode, transferSyntax); } else { // set the required transfer syntax fileTfM_ptr->setTsCode(tsCode, transferSyntax); // check if we have the File Meta Information first fmiM_ptr = readFileMetaInformation(fileTfM_ptr); if (fmiM_ptr == NULL) { // try to deduce the transfer syntax from the file contents if (!deduceTransferSyntax(fileTfM_ptr, transferSyntax)) { if (loggerM_ptr) { loggerM_ptr->text(LOG_DEBUG, 1, "Can't deduce Transfer Syntax by reading Dataset from \"%s\". Will assume Explicit VR Little Endian...", filenameM.c_str()); } } } else { // extract the sop class uid from the fmi // - if the Media Storage SOP Class is not present - we can't tell if the media file is a DICOMDIR // - the wrong validation will be done if so - this will be picked up in the validation results. (void) fmiM_ptr->getUIValue(TAG_MEDIA_STORAGE_SOP_CLASS_UID, mediaStorageSopClassUid); // extract transfer syntax for dataset from fmi if (!fmiM_ptr->getUIValue(TAG_TRANSFER_SYNTAX_UID, transferSyntax)) { loggerM_ptr->text(LOG_ERROR, 1, "Failed to retrieve Transfer Syntax from File Meta Information: \"%s\"", filenameM.c_str()); return false; } } } // now get the tsCode from the transfer syntax tsCode = transferSyntaxUidToCode(transferSyntax); // set the required transfer syntax fileTfM_ptr->setTsCode(tsCode, transferSyntax); transferSyntaxM = transferSyntax; bool result = false; // check if we have a DICOMDIR stored if (mediaStorageSopClassUid == MEDIA_STORAGE_DIRECTORY_SOP_CLASS_UID) { // create dicom dir dataset object and decode the stored dataset dicomdirDatasetM_ptr = new DCM_DIR_DATASET_CLASS(); dicomdirDatasetM_ptr->setLogger(loggerM_ptr); // set the UN VR definition look-up flag dicomdirDatasetM_ptr->setUnVrDefinitionLookUp(unVrDefinitionLookUpM); // set the EnsureEvenAttributeValueLength flag dicomdirDatasetM_ptr->setEnsureEvenAttributeValueLength(ensureEvenAttributeValueLengthM); // decode up to the first directory record sequence result = dicomdirDatasetM_ptr->decodeToFirstRecord(*fileTfM_ptr); if ((result == false) && (loggerM_ptr)) { loggerM_ptr->text(LOG_ERROR, 1, "Failed to decode the DICOMDIR in \"%s\"", filenameM.c_str()); } } else { // create dataset object and decode the stored dataset datasetM_ptr = new DCM_DATASET_CLASS(); datasetM_ptr->setLogger(loggerM_ptr); // set the UN VR definition look-up flag datasetM_ptr->setUnVrDefinitionLookUp(unVrDefinitionLookUpM); // set the EnsureEvenAttributeValueLength flag datasetM_ptr->setEnsureEvenAttributeValueLength(ensureEvenAttributeValueLengthM); // decode the whole dataset result = datasetM_ptr->decode(*fileTfM_ptr); if ((result == false) && (loggerM_ptr)) { loggerM_ptr->text(LOG_ERROR, 1, "Failed to decode the Dataset in \"%s\"", filenameM.c_str()); } // clean up the file transfer delete fileTfM_ptr; fileTfM_ptr = NULL; } // return result return result; }
bool FILE_DATASET_CLASS::read(DCM_DATASET_CLASS *dataset_ptr) // DESCRIPTION : Method to stream the DICOM object from the file using the defined // transfer syntax. // PRECONDITIONS : // POSTCONDITIONS : // EXCEPTIONS : // NOTES : //<<=========================================================================== { string transferSyntax = EXPLICIT_VR_LITTLE_ENDIAN; // check for valid dataset to read if (!dataset_ptr) return false; // set up the file transfer FILE_TF_CLASS *fileTf_ptr = new FILE_TF_CLASS(filenameM, "rb"); // set the storage mode fileTf_ptr->setStorageMode(storageModeM); // cascade the logger fileTf_ptr->setLogger(loggerM_ptr); // check the file is open if (!fileTf_ptr->isOpen()) return false; // check if we have the File Meta Information first if (!isFileMetaInformation(fileTf_ptr, transferSyntax)) { // try to deduce the transfer syntax from the file contents if (!deduceTransferSyntax(fileTf_ptr, transferSyntax)) { if (loggerM_ptr) { loggerM_ptr->text(LOG_DEBUG, 1, "Can't deduce Transfer Syntax by reading Dataset from \"%s\". Will assume Explicit VR Little Endian...", filenameM.c_str()); } } } // now get the tsCode from the transfer syntax TS_CODE tsCode = transferSyntaxUidToCode(transferSyntax); // set the required transfer syntax fileTf_ptr->setTsCode(tsCode, transferSyntax); transferSyntaxM = transferSyntax; // decode the dataset if (dataset_ptr->getLogger() == NULL) { // use this logger if none already set dataset_ptr->setLogger(loggerM_ptr); } // set the UN VR definition look-up flag dataset_ptr->setUnVrDefinitionLookUp(unVrDefinitionLookUpM); // set the EnsureEvenAttributeValueLength flag dataset_ptr->setEnsureEvenAttributeValueLength( ensureEvenAttributeValueLengthM); bool result = dataset_ptr->decode(*fileTf_ptr); // clean up the file transfer delete fileTf_ptr; // return result return result; }
bool FILE_DATASET_CLASS::write(bool autoCreateDirectory) // DESCRIPTION : Method to write the FMI and Dataset to file. // PRECONDITIONS : // POSTCONDITIONS : // EXCEPTIONS : // NOTES : //<<=========================================================================== { // check for valid FMI & Dataset to write MEDIA_FILE_HEADER_CLASS *fmi_ptr = fmiM_ptr; DCM_DATASET_CLASS *dataset_ptr = datasetM_ptr; if ((fmi_ptr == NULL) || (dataset_ptr == NULL)) { return false; } if (autoCreateDirectory) //autocreate stuff should be here { createDirectory(filenameM); } //Set group length property if(getAddGroupLength()) dataset_ptr->setDefineGroupLengths(true); // set up the file transfer FILE_TF_CLASS *fileTf_ptr = new FILE_TF_CLASS(filenameM, "wb"); // cascade the logger fileTf_ptr->setLogger(loggerM_ptr); // check the file is open if (!fileTf_ptr->isOpen()) { if (loggerM_ptr) { loggerM_ptr->text(LOG_ERROR, 1, "Can't write Media Dataset to \"%s\"", filenameM.c_str()); } delete fileTf_ptr; return false; } string transferSyntaxUid; if (!fmi_ptr->getUIValue(TAG_TRANSFER_SYNTAX_UID, transferSyntaxUid)) { if (loggerM_ptr) { loggerM_ptr->text(LOG_ERROR, 1, "Can't get Transfer Syntax UID from FMI to write \"%s\"", filenameM.c_str()); } delete fileTf_ptr; return false; } // write the FMI to file // set the required transfer syntax fileTf_ptr->setTsCode(TS_EXPLICIT_VR | TS_LITTLE_ENDIAN, EXPLICIT_VR_LITTLE_ENDIAN); if (!fmi_ptr->write(fileTf_ptr)) { if (loggerM_ptr) { loggerM_ptr->text(LOG_ERROR, 1, "Can't encode the FMI to write \"%s\"", filenameM.c_str()); } delete fileTf_ptr; return false; } // set the required transfer syntax for the dataset fileTf_ptr->setTsCode(transferSyntaxUidToCode(transferSyntaxUid), transferSyntaxUid.c_str()); // encode the dataset bool result = dataset_ptr->encode(*fileTf_ptr); // clean up the file transfer delete fileTf_ptr; // return result return result; }
void DCM_ATTRIBUTE_GROUP_CLASS::computeItemOffsets(string transferSyntax) // DESCRIPTION : Compute any item offsets - for DICOMDIR. // PRECONDITIONS : // POSTCONDITIONS : // EXCEPTIONS : // NOTES : //<<=========================================================================== { // loop computing the offsets for each attribute for (int i = 0; i < GetNrAttributes(); i++) { // get the Attribute Data DCM_ATTRIBUTE_CLASS *attribute_ptr = GetAttribute(i); ATTR_VR_ENUM vr = attribute_ptr->GetVR(); ATTR_TYPE_ENUM type = attribute_ptr->GetType(); UINT32 length = attribute_ptr->getPaddedLength(); if ((vr != ATTR_VR_SQ) && (length == 0) && ((type == ATTR_TYPE_1C) || (type == ATTR_TYPE_2C))) { // do not encode zero length Type1C, Type2C continue; } // count length of the group & element = 4 bytes offsetM += 4; // count the [VR] length if (transferSyntaxUidToCode(transferSyntax) & TS_IMPLICIT_VR) { // implicit VR - 32 bit length used = 4 bytes offsetM += 4; } else { // explicit VR = 2 bytes offsetM += 2; // check for special OB, OF, OW, OL, OD, SQ, UN, UR, UC& UT encoding if ((vr == ATTR_VR_OB) || (vr == ATTR_VR_OF) || (vr == ATTR_VR_OW) || (vr == ATTR_VR_OL) || (vr == ATTR_VR_OD) || (vr == ATTR_VR_SQ) || (vr == ATTR_VR_UN) || (vr == ATTR_VR_UR) || (vr == ATTR_VR_UC) || (vr == ATTR_VR_UT)) { // 16 bit padding & 32 bit length used = 6 bytes offsetM += 6; } else { // 16 bit length used = 4 bytes offsetM += 2; } } // handle the SQs separately if (vr == ATTR_VR_SQ) { // only interested if one SQ value available if (attribute_ptr->GetNrValues() == 1) { // get SQ value DCM_VALUE_SQ_CLASS *sqValue_ptr = static_cast<DCM_VALUE_SQ_CLASS*>(attribute_ptr->GetValue(0)); // run through items in sequence sqValue_ptr->computeItemOffsets(transferSyntax, &offsetM); } } else { // count standard attribute length offsetM += length; } } }
bool SNIFFER_PDUS_CLASS::getDataset(DCM_DATASET_CLASS **dataset_ptr_ptr) // DESCRIPTION : Get a dataset from the PDU File Stream. // PRECONDITIONS : // POSTCONDITIONS : // EXCEPTIONS : // NOTES : //<<=========================================================================== { // check to see if any PDU data is left from the Command receive if (networkTransferM.isTherePduData()) { // initialise the data transfer decode if (!networkTransferM.initialiseDecode(true)) return false; // check if we already have the last PDV bool isLast; bool remainingPdvData = networkTransferM.isRemainingPdvDataInPdu(&isLast); if (!remainingPdvData) return false; // if we don't have the last PDV - we need to read more PDUs if (!isLast) { // we have some remaining data in the current PDU but not // all required data // - get remaining PDU(s) // - on success check that we have the correct PDU // get a PDU from the file stream PDU_CLASS *pdu_ptr = getPdu(); if (pdu_ptr == NULL) return false; if (pdu_ptr->getType() != PDU_PDATA) { // failed to read the Dataset P-DATA-TF PDU(s) delete pdu_ptr; return false; } delete pdu_ptr; } } else { // get Dataset P-DATA-TF PDU(s) // - on success check that we have the correct PDU // get a PDU from the file stream PDU_CLASS *pdu_ptr = getPdu(); if (pdu_ptr == NULL) return false; if (pdu_ptr->getType() != PDU_PDATA) { // failed to read the Dataset P-DATA-TF PDU(s) delete pdu_ptr; return false; } delete pdu_ptr; // initialise the data transfer decode if (!networkTransferM.initialiseDecode(true)) return false; } // get the current Presentation Context Id BYTE pcId = networkTransferM.getPresentationContextId(); // log maximum length received if (loggerM_ptr) { // log the P-DATA-TF pdu details received loggerM_ptr->text(LOG_INFO, 1, "Maximum length of Dataset DATA-TF PDU received (with pcId %d) is 0x%X=%d", pcId, networkTransferM.getMaxRxLength(), networkTransferM.getMaxRxLength()); } // check if Presentation Context is accepted bool result = false; UID_CLASS transferSyntaxUid(IMPLICIT_VR_LITTLE_ENDIAN); if (acceptedPCM.getTransferSyntaxUid(pcId, transferSyntaxUid)) { // set the Transfer Syntax Code to use for decode networkTransferM.setTsCode(transferSyntaxUidToCode(transferSyntaxUid), (char*) transferSyntaxUid.get()); // check if we should serialise the dataset if (networkTransferM.getStorageMode() != SM_NO_STORAGE) { string filename; bool appendToFile; // check storage mode to see if we should include a media header or not if (networkTransferM.getStorageMode() == SM_AS_MEDIA) { // set up the media header MEDIA_FILE_HEADER_CLASS *mediaHeader_ptr = new MEDIA_FILE_HEADER_CLASS(networkTransferM.getSessionId(), sopClassUidM, sopInstanceUidM, (char*) transferSyntaxUid.get(), loggerM_ptr); // write the media header if (mediaHeader_ptr->write()) { if (loggerM_ptr) { loggerM_ptr->text(LOG_DEBUG, 1, "Generating Media Storage File: - %s", mediaHeader_ptr->getFilename()); loggerM_ptr->text(LOG_MEDIA_FILENAME, 1, "%s", mediaHeader_ptr->getFilename()); } } else { if (loggerM_ptr) { loggerM_ptr->text(LOG_INFO, 1, "Failed to generate Media Storage File: - %s", mediaHeader_ptr->getFilename()); } } // append the dataset to the header appendToFile = true; filename = mediaHeader_ptr->getFilename(); // cleanup delete mediaHeader_ptr; } else { // generate a filename for the raw storage appendToFile = false; string storageRoot; if (loggerM_ptr) { // get the storage root storageRoot = loggerM_ptr->getStorageRoot(); } getStorageFilename(storageRoot, networkTransferM.getSessionId(), filename, SFE_DOT_RAW); if (loggerM_ptr) { // log filename used for RAW dataset storage loggerM_ptr->text(LOG_INFO, 1, "Generating Storage Dataset File: - %s", filename.c_str()); } } // serialise the dataset if (!networkTransferM.serialise(filename, appendToFile)) { if (loggerM_ptr) { loggerM_ptr->text(LOG_ERROR, 1, "Failed to store Dataset in File: - %s", filename.c_str()); } // return error return false; } } // allocate a new dataset object DCM_DATASET_CLASS *dataset_ptr = new DCM_DATASET_CLASS(); // cascade the logger dataset_ptr->setLogger(loggerM_ptr); // decode the dataset over the association - network transfer result = dataset_ptr->decode(networkTransferM); if (result) { // save return address *dataset_ptr_ptr = dataset_ptr; } // terminate the data transfer decode networkTransferM.terminateDecode(); } else { // error - can't find Presentation Context Id is accepted list if (loggerM_ptr) { loggerM_ptr->text(LOG_ERROR, 1, "Cannot find Presentation Context ID of %d in Accepted list", pcId); } } // return result return result; }