CJsonNode CExecAndParseStructuredOutput::ExecOn(CNetServer server) { m_NSOutput = server.ExecWithRetry(m_Cmd).response; m_Ch = m_NSOutput.c_str(); return ParseObject(true); }
CJsonNode CNetScheduleStructuredOutputParser::ParseJSON(const string& json) { m_Ch = (m_NSOutput = json).c_str(); while (isspace((unsigned char) *m_Ch)) ++m_Ch; CJsonNode root; switch (*m_Ch) { case '[': ++m_Ch; root = ParseArray(']'); break; case '{': ++m_Ch; root = ParseObject('}'); break; default: INVALID_FORMAT_ERROR(); } while (isspace((unsigned char) *m_Ch)) ++m_Ch; if (*m_Ch != '\0') { INVALID_FORMAT_ERROR(); } return root; }
//------------------------------------------------------------------------------- bool json::Reader::Read(const char* doc, int64_t length, ConfigValue& root) { _cur = _begin = doc; _end = doc + length; SkipSpaces(); if(*_cur == '{') return ParseObject(root); // Assume root is an object root.SetEmptyObject(); std::string name; while(1) { SkipSpaces(); if(_cur == _end) break; name = ""; if(!ParseString(name)) { Error("Failed to parse string"); return false; } SkipSpaces(); if(*_cur != '=' && *_cur != ':') { Error("Expected '=' or ':'"); return false; } _cur++; ConfigValue& elem = root[name.c_str()]; if(!ParseValue(elem)) { return false; // Failed to parse value } SkipSpaces(); char c = *_cur; if(c == ',') // Separator between elements (Optional) { _cur++; continue; } } return true; }
SEXP ParseValue(yajl_val node, int bigint){ if(YAJL_IS_NULL(node)){ return R_NilValue; } if(YAJL_IS_STRING(node)){ SEXP tmp = PROTECT(allocVector(STRSXP, 1)); SET_STRING_ELT(tmp, 0, mkCharCE(YAJL_GET_STRING(node), CE_UTF8)); UNPROTECT(1); return tmp; } if(YAJL_IS_INTEGER(node)){ long long int val = YAJL_GET_INTEGER(node); /* 2^53 is highest int stored as double without loss */ if(bigint && (val > 9007199254740992 || val < -9007199254740992)){ char buf[32]; #ifdef _WIN32 snprintf(buf, 32, "%I64d", val); #else snprintf(buf, 32, "%lld", val); #endif return mkString(buf); /* see .Machine$integer.max in R */ } else if(val > 2147483647 || val < -2147483647){ return ScalarReal(val); } else { return ScalarInteger(val); } } if(YAJL_IS_DOUBLE(node)){ return(ScalarReal(YAJL_GET_DOUBLE(node))); } if(YAJL_IS_NUMBER(node)){ /* A number that is not int or double (very rare) */ /* This seems to correctly round to Inf/0/-Inf */ return(ScalarReal(YAJL_GET_DOUBLE(node))); } if(YAJL_IS_TRUE(node)){ return(ScalarLogical(1)); } if(YAJL_IS_FALSE(node)){ return(ScalarLogical(0)); } if(YAJL_IS_OBJECT(node)){ return(ParseObject(node, bigint)); } if(YAJL_IS_ARRAY(node)){ return(ParseArray(node, bigint)); } error("Invalid YAJL node type."); }
Json* Json::Parser::ParsePrimary(){ NextChar(); Kind kind = KindPreview(ch); Json* json = NULL; switch(kind){ case kString: { json = ParseString(); break;} case kNumber: { json = ParseNumber(); break;} case kFalse: { json = ParseFalse(); break; } case kTrue: { json = ParseTrue(); break; } case kArray: { json = ParseArray(); break; } case kObject: { json = ParseObject(); break; } case kNull: break; default: break; }; return json; }
CJsonNode CExecAndParseStructuredOutput::ParseNode() { switch (*m_Ch) { case '[': ++m_Ch; return ParseArray(); case '{': ++m_Ch; return ParseObject(false); case '\'': case '"': return CJsonNode::NewStringNode(ParseString()); } size_t max_len = GetRemainder(); size_t len = 1; switch (*m_Ch) { case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': while (len <= max_len && isdigit(m_Ch[len])) ++len; { CJsonNode::TNumber val(NStr::StringToInt8(CTempString(m_Ch, len))); m_Ch += len; return CJsonNode::NewNumberNode(val); } case 'F': case 'f': case 'N': case 'n': case 'T': case 't': case 'Y': case 'y': while (len <= max_len && isalpha(m_Ch[len])) ++len; { bool val(NStr::StringToBool(CTempString(m_Ch, len))); m_Ch += len; return CJsonNode::NewBooleanNode(val); } } return CJsonNode(); }
/** * @brief Parse a JSON value and fills it with data from the lexer. * * This function is a bit different from the other two process functions ParseArray() and * ParseObject(), because it takes its value parameter by reference. This is because when * entering the function it is not clear yet which type of value the current lexer token is, so a * new instance has to be created and stored in the pointer. */ bool JsonProcessor::ParseValue ( Lexer::iterator& ct, Lexer::iterator& end, JsonValue*& value ) { // proper usage of this function is to hand over a null pointer to a json value, which will be // assigned to a newly created value instance depending on the token type, so check for this // here. we don't want to overwrite existing values! assert (value == nullptr); // check all possible valid lexer token types and turn them into json values if (ct->IsSymbol()) { // the lexer only returns null, true or false as symbols, so this is safe if (ct->value().compare("null") == 0) { value = new JsonValueNull(); } else { value = new JsonValueBool(ct->value()); } ++ct; return true; } if (ct->IsNumber()) { value = new JsonValueNumber(ct->value()); ++ct; return true; } if (ct->IsString()) { value = new JsonValueString(ct->value()); ++ct; return true; } if (ct->IsBracket("[")) { value = new JsonValueArray(); return ParseArray (ct, end, JsonValueToArray(value)); } if (ct->IsBracket("{")) { value = new JsonValueObject(); return ParseObject (ct, end, JsonValueToObject(value)); } // if the lexer token is not a fitting json value, we have an error LOG_WARN << "JSON value contains invalid characters at " + ct->at() + ": '" + ct->value() + "'."; return false; }
/** * @brief Takes a string containing a JSON document and parses its contents into a JsonDocument. * * Returns true iff successfull. */ bool JsonProcessor::FromString (const std::string& json, JsonDocument& document) { // do stepwise lexing JsonLexer lexer; lexer.ProcessString(json, true); if (lexer.empty()) { LOG_INFO << "JSON document is empty."; return false; } if (lexer.HasError()) { LOG_WARN << "Lexing error at " << lexer.back().at() << " with message: " << lexer.back().value(); return false; } if (!lexer.cbegin()->IsBracket("{")) { LOG_WARN << "JSON document does not start with JSON object opener '{'."; return false; } // a json document is also a json object, so we start parsing the doc as such. // the begin iterator will be incremented with every token being processed. document.clear(); Lexer::iterator begin = lexer.begin(); Lexer::iterator end = lexer.end(); // delete tailing tokens immediately, produce tokens in time (needed for stepwise lexing). begin.ConsumeWithTail(0); begin.ProduceWithHead(0); if (!ParseObject(begin, end, &document)) { return false; } // after processing, the begin iterator will point to the lexer token that comes after // the one being processed last. if the document is well-formatted, this token is also // the end pointer of the iterator. if (begin != end) { LOG_WARN << "JSON document contains more information after the closing bracket."; return false; } return true; }
Js::Var JSONParser::Parse(LPCWSTR str, int length) { if (length > MIN_CACHE_LENGTH) { if (!this->arenaAllocatorObject) { this->arenaAllocatorObject = scriptContext->GetTemporaryGuestAllocator(_u("JSONParse")); this->arenaAllocator = arenaAllocatorObject->GetAllocator(); } } m_scanner.Init(str, length, &m_token, scriptContext, str, this->arenaAllocator); Scan(); Js::Var ret = ParseObject(); if (m_token.tk != tkEOF) { m_scanner.ThrowSyntaxError(JSERR_JsonSyntax); } return ret; }
bool ParseValue(ParserT *parser, JsonNodeT **node_p) { TokenT *token; JsonNodeT *node = *node_p; if (!node) (*node_p) = node = NewInstance(JsonNodeT); if ((token = ParserMatch(parser, TOK_LBRACE))) { node->type = JSON_OBJECT; node->u.object.num = token->size; if (token->size) node->u.object.item = NewTable(JsonPairT, token->size); return ParseObject(parser, node); } else if ((token = ParserMatch(parser, TOK_LBRACKET))) { node->type = JSON_ARRAY; node->u.array.num = token->size; if (token->size) node->u.array.item = NewTable(JsonNodeT *, token->size); return ParseArray(parser, node); }
AST* ParseExpression(ParserState* parser) { switch(parser->currentToken.type) { case TOKEN_INTREAD: Match(parser, TOKEN_INTREAD); return CreateASTNode(SEM_INTREAD, VALUE_EMPTY); case TOKEN_READ: Match(parser, TOKEN_READ); return CreateASTNode(SEM_READ, VALUE_EMPTY); case TOKEN_ARRAY: return ParseArray(parser); case TOKEN_OBJECT: case TOKEN_NEW: return ParseObject(parser); case TOKEN_FUNCTION: return ParseFunctionDefinition(parser); default: return ParseArithmeticalExpression(parser); } assert(0); return NULL; }
bool StateParser::ParseState(const char * stateFile, std::string stateId, std::vector<Object*>* pvObject, std::vector<std::string> *pvTextureId) { tinyxml2::XMLDocument xmlDoc; if (xmlDoc.LoadFile(stateFile) !=tinyxml2::XML_SUCCESS) { std::cerr << "XML File Load Error.\n"; return false; } tinyxml2::XMLElement *pRoot = xmlDoc.RootElement(); tinyxml2::XMLElement *pState, *pObject, *pTexture; pState = pRoot->FirstChildElement(stateId.c_str()); pTexture = pState->FirstChildElement("TEXTURES"); pObject = pState->FirstChildElement("OBJECTS"); ParseTexture(pTexture, pvTextureId); ParseObject(pObject, pvObject); return true; }
//top level parse function sgdm::StackGuard<JsonValue>&& JsonParser::ParsePrimary(){ switch(currentTok){ case tok_endl: return std::move(Error("primary fail")); case tok_identifier: return std::move(ParseName()); case tok_integer: return std::move(ParseInteger()); case tok_double: return std::move(ParseDouble()); case '[': return std::move(ParseArray(alloc)); case '{': return std::move(ParseObject(alloc)); default: return std::move(Error("unknown token found near place" + std::to_string(indexCount))); } sgdm::StackGuard<JsonValue>&& JsonParser::Parse(){ getNextTok(); return std::move(ParsePrimary(alloc)); }
wxPdfArray* wxPdfParser::ParseArray() { wxPdfArray* array = new wxPdfArray(); while (true) { wxPdfObject* obj = ParseObject(); int type = obj->GetType(); if (-type == TOKEN_END_ARRAY) { delete obj; break; } if (-type == TOKEN_END_DICTIONARY) { wxLogError(_("wxPdfParser::ParseArray: Unexpected '>>'.")); delete obj; break; } array->Add(obj); } return array; }
wxPdfDictionary* wxPdfParser::ParseDictionary() { wxPdfDictionary* dic = new wxPdfDictionary(); while (true) { m_tokens->NextValidToken(); if (m_tokens->GetTokenType() == TOKEN_END_DICTIONARY) break; if (m_tokens->GetTokenType() != TOKEN_NAME) { wxLogError(_("wxPdfParser::ParseDictionary: Dictionary key is not a name.")); break; } wxPdfName* name = new wxPdfName(m_tokens->GetStringValue()); wxPdfObject* obj = ParseObject(); int type = obj->GetType(); if (-type == TOKEN_END_DICTIONARY) { wxLogError(_("wxPdfParser::ParseDictionary: Unexpected '>>'.")); delete obj; delete name; break; } if (-type == TOKEN_END_ARRAY) { wxLogError(_("wxPdfParser::ParseDictionary: Unexpected ']'.")); delete obj; delete name; break; } dic->Put(name, obj); delete name; } return dic; }
static const char *ParseValue(ParseArgs& args, const char *data) { data = SkipWS(data); switch (*data) { case '"': return ParseString(args, data); case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case '-': return ParseNumber(args, data); case '{': return ParseObject(args, data); case '[': return ParseArray(args, data); case 't': return ParseKeyword(args, data, "true", Type_Bool); case 'f': return ParseKeyword(args, data, "false", Type_Bool); case 'n': return ParseKeyword(args, data, "null", Type_Null); default: return NULL; } }
bool TilemapLoader::ParseObjectLayer(const tinyxml2::XMLElement* _element, ObjectLayer& _object_layer) { string name = _element->Attribute("name"); _object_layer.name = name; cout << "Loading object layer: " << name << endl; int width = _element->IntAttribute("width"); int height = _element->IntAttribute("height"); _object_layer.layer_dimensions = sf::Vector2i(width, height); float opacity = _element->FloatAttribute("opacity"); _object_layer.opacity = opacity; bool visible = static_cast<bool>(_element->IntAttribute("visible")); _object_layer.visible = visible; const XMLElement* object = _element->FirstChildElement("object"); while (object) { Object temp_object; if (!ParseObject(object, temp_object)) { cout << "Failed to parse object" << endl; return false; } else _object_layer.objects.push_back(temp_object); object = object->NextSiblingElement("object"); } object = nullptr; return true; }
Js::Var JSONParser::ParseObject() { PROBE_STACK(scriptContext, Js::Constants::MinStackDefault); Js::Var retVal; switch (m_token.tk) { case tkFltCon: retVal = Js::JavascriptNumber::ToVarIntCheck(m_token.GetDouble(), scriptContext); Scan(); return retVal; case tkStrCon: { // will auto-null-terminate the string (as length=len+1) uint len = m_scanner.GetCurrentStringLen(); retVal = Js::JavascriptString::NewCopyBuffer(m_scanner.GetCurrentString(), len, scriptContext); Scan(); return retVal; } case tkTRUE: retVal = scriptContext->GetLibrary()->GetTrue(); Scan(); return retVal; case tkFALSE: retVal = scriptContext->GetLibrary()->GetFalse(); Scan(); return retVal; case tkNULL: retVal = scriptContext->GetLibrary()->GetNull(); Scan(); return retVal; case tkSub: // unary minus if (Scan() == tkFltCon) { retVal = Js::JavascriptNumber::ToVarIntCheck(-m_token.GetDouble(), scriptContext); Scan(); return retVal; } else { m_scanner.ThrowSyntaxError(JSERR_JsonBadNumber); } case tkLBrack: { Js::JavascriptArray* arrayObj = scriptContext->GetLibrary()->CreateArray(0); //skip '[' Scan(); //iterate over the array members, get JSON objects and add them in the pArrayMemberList uint k = 0; while (true) { if(tkRBrack == m_token.tk) { break; } Js::Var value = ParseObject(); arrayObj->SetItem(k++, value, Js::PropertyOperation_None); // if next token is not a comma consider the end of the array member list. if (tkComma != m_token.tk) break; Scan(); if(tkRBrack == m_token.tk) { m_scanner.ThrowSyntaxError(JSERR_JsonIllegalChar); } } //check and consume the ending ']' CheckCurrentToken(tkRBrack, JSERR_JsonNoRbrack); return arrayObj; } case tkLCurly: { // Parse an object, "{"name1" : ObjMember1, "name2" : ObjMember2, ...} " if(IsCaching()) { if(!typeCacheList) { typeCacheList = Anew(this->arenaAllocator, JsonTypeCacheList, this->arenaAllocator, 8); } } // first, create the object Js::DynamicObject* object = scriptContext->GetLibrary()->CreateObject(); JS_ETW(EventWriteJSCRIPT_RECYCLER_ALLOCATE_OBJECT(object)); #if ENABLE_DEBUG_CONFIG_OPTIONS if (Js::Configuration::Global.flags.IsEnabled(Js::autoProxyFlag)) { object = DynamicObject::FromVar(JavascriptProxy::AutoProxyWrapper(object)); } #endif //next token after '{' Scan(); //if empty object "{}" return; if(tkRCurly == m_token.tk) { Scan(); return object; } JsonTypeCache* previousCache = nullptr; JsonTypeCache* currentCache = nullptr; //parse the list of members while(true) { // parse a list member: "name" : ObjMember // and add it to the object. //pick "name" if(tkStrCon != m_token.tk) { m_scanner.ThrowSyntaxError(JSERR_JsonIllegalChar); } // currentStrLength = length w/o null-termination WCHAR* currentStr = m_scanner.GetCurrentString(); uint currentStrLength = m_scanner.GetCurrentStringLen(); DynamicType* typeWithoutProperty = object->GetDynamicType(); if(IsCaching()) { if(!previousCache) { // This is the first property in the list - see if we have an existing cache for it. currentCache = typeCacheList->LookupWithKey(Js::HashedCharacterBuffer<WCHAR>(currentStr, currentStrLength), nullptr); } if(currentCache && currentCache->typeWithoutProperty == typeWithoutProperty && currentCache->propertyRecord->Equals(JsUtil::CharacterBuffer<WCHAR>(currentStr, currentStrLength))) { //check and consume ":" if(Scan() != tkColon ) { m_scanner.ThrowSyntaxError(JSERR_JsonNoColon); } Scan(); // Cache all values from currentCache as there is a chance that ParseObject might change the cache DynamicType* typeWithProperty = currentCache->typeWithProperty; PropertyId propertyId = currentCache->propertyRecord->GetPropertyId(); PropertyIndex propertyIndex = currentCache->propertyIndex; previousCache = currentCache; currentCache = currentCache->next; // fast path for type transition and property set object->EnsureSlots(typeWithoutProperty->GetTypeHandler()->GetSlotCapacity(), typeWithProperty->GetTypeHandler()->GetSlotCapacity(), scriptContext, typeWithProperty->GetTypeHandler()); object->ReplaceType(typeWithProperty); Js::Var value = ParseObject(); object->SetSlot(SetSlotArguments(propertyId, propertyIndex, value)); // if the next token is not a comma consider the list of members done. if (tkComma != m_token.tk) break; Scan(); continue; } } // slow path Js::PropertyRecord const * propertyRecord; scriptContext->GetOrAddPropertyRecord(currentStr, currentStrLength, &propertyRecord); //check and consume ":" if(Scan() != tkColon ) { m_scanner.ThrowSyntaxError(JSERR_JsonNoColon); } Scan(); Js::Var value = ParseObject(); PropertyValueInfo info; object->SetProperty(propertyRecord->GetPropertyId(), value, PropertyOperation_None, &info); DynamicType* typeWithProperty = object->GetDynamicType(); if(IsCaching() && !propertyRecord->IsNumeric() && !info.IsNoCache() && typeWithProperty->GetIsShared() && typeWithProperty->GetTypeHandler()->IsPathTypeHandler()) { PropertyIndex propertyIndex = info.GetPropertyIndex(); if(!previousCache) { // This is the first property in the set add it to the dictionary. currentCache = JsonTypeCache::New(this->arenaAllocator, propertyRecord, typeWithoutProperty, typeWithProperty, propertyIndex); typeCacheList->AddNew(propertyRecord, currentCache); } else if(!currentCache) { currentCache = JsonTypeCache::New(this->arenaAllocator, propertyRecord, typeWithoutProperty, typeWithProperty, propertyIndex); previousCache->next = currentCache; } else { // cache miss!! currentCache->Update(propertyRecord, typeWithoutProperty, typeWithProperty, propertyIndex); } previousCache = currentCache; currentCache = currentCache->next; } // if the next token is not a comma consider the list of members done. if (tkComma != m_token.tk) break; Scan(); } // check and consume the ending '}" CheckCurrentToken(tkRCurly, JSERR_JsonNoRcurly); return object; } default: m_scanner.ThrowSyntaxError(JSERR_JsonSyntax); } }
bool json::Reader::ParseValue(ConfigValue& value) { SkipSpaces(); bool b = true; char c = *_cur; switch(c) { case '{': b = ParseObject(value); break; case '[': b = ParseArray(value); break; case '"': { std::string str; b = ParseString(str); if(b) value.SetString(str.c_str()); else Error("Failed to parse string"); } break; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case '-': b = ParseNumber(value); break; case 't': // true if(*(++_cur) != 'r' || *(++_cur) != 'u' || *(++_cur) != 'e') { Error("Expected \"true\""); return false; } ++_cur; value.SetBool(true); break; case 'f': // false if(*(++_cur) != 'a' || *(++_cur) != 'l' || *(++_cur) != 's' || *(++_cur) != 'e') { Error("Expected \"false\""); return false; } ++_cur; value.SetBool(false); break; case 'n': // null if(*(++_cur) != 'u' || *(++_cur) != 'l' || *(++_cur) != 'l') { Error("Expected \"null\""); return false; } ++_cur; value.SetNull(); break; default: b = false; }; return b; }
bool wxPdfParser::ParseXRefStream(int ptr, bool setTrailer) { int idx, k; m_tokens->Seek(ptr); int streamRef = 0; if (!m_tokens->NextToken()) { return false; } if (m_tokens->GetTokenType() != TOKEN_NUMBER) { return false; } streamRef = m_tokens->GetIntValue(); if (!m_tokens->NextToken() || m_tokens->GetTokenType() != TOKEN_NUMBER) { return false; } if (!m_tokens->NextToken() || m_tokens->GetStringValue() != _T("obj")) { return false; } wxPdfObject* object = ParseObject(); wxPdfStream* stm = NULL; if (object->GetType() == OBJTYPE_STREAM) { stm = (wxPdfStream*) object; if (((wxPdfName*) stm->Get(_T("/Type")))->GetName() != _T("/XRef")) { delete object; return false; } } int size = ((wxPdfNumber*) stm->Get(_T("/Size")))->GetInt(); bool indexAllocated = false; wxPdfArray* index; wxPdfObject* obj = stm->Get(_T("/Index")); if (obj == NULL) { indexAllocated = true; index = new wxPdfArray(); index->Add(0); index->Add(size); } else { index = (wxPdfArray*) obj; } wxPdfArray* w = (wxPdfArray*) stm->Get(_T("/W")); int prev = -1; obj = stm->Get(_T("/Prev")); if (obj != NULL) { prev = ((wxPdfNumber* )obj)->GetInt(); } // Each xref pair is a position // type 0 -> -1, 0 // type 1 -> offset, 0 // type 2 -> index, obj num ReserveXRef(size); GetStreamBytes(stm); wxMemoryOutputStream* streamBuffer = stm->GetBuffer(); wxMemoryInputStream streamBytes(*streamBuffer); size_t inLength = streamBytes.GetSize(); char* buffer = new char[inLength]; streamBytes.Read(buffer, inLength); int bptr = 0; int wc[3]; for (k = 0; k < 3; ++k) { wc[k] = ((wxPdfNumber*) (w->Get(k)))->GetInt(); } for (idx = 0; (size_t) idx < index->GetSize(); idx += 2) { int start = ((wxPdfNumber*) (index->Get(idx)))->GetInt(); int length = ((wxPdfNumber*) (index->Get(idx + 1)))->GetInt(); ReserveXRef(start+length); while (length-- > 0) { wxPdfXRefEntry& xrefEntry = m_xref[start]; int type = 1; if (wc[0] > 0) { type = 0; for (k = 0; k < wc[0]; ++k) { type = (type << 8) + (buffer[bptr++] & 0xff); } } int field2 = 0; for (k = 0; k < wc[1]; ++k) { field2 = (field2 << 8) + (buffer[bptr++] & 0xff); } int field3 = 0; for (k = 0; k < wc[2]; ++k) { field3 = (field3 << 8) + (buffer[bptr++] & 0xff); } if (xrefEntry.m_ofs_idx == 0 && xrefEntry.m_gen_ref == 0) { switch (type) { case 0: xrefEntry.m_type = 0; xrefEntry.m_ofs_idx = -1; xrefEntry.m_gen_ref = 0; break; case 1: xrefEntry.m_type = 1; xrefEntry.m_ofs_idx = field2; xrefEntry.m_gen_ref = field3; break; case 2: xrefEntry.m_type = 2; xrefEntry.m_ofs_idx = field3; xrefEntry.m_gen_ref = field2; break; } } start++; } } delete [] buffer; if ((size_t) streamRef < m_xref.GetCount()) { m_xref[streamRef].m_ofs_idx = -1; } if (indexAllocated) { delete index; } // Set the first xref stream dictionary as the trailer dictionary if (setTrailer && m_trailer == NULL) { m_trailer = stm->GetDictionary(); stm->SetDictionary(NULL); } delete stm; if (prev == -1) { return true; } return ParseXRefStream(prev, false); }
HRESULT CDVBSub::ParseSample(IMediaSample* pSample) { CheckPointer(pSample, E_POINTER); HRESULT hr; BYTE* pData = nullptr; int nSize; DVB_SEGMENT_TYPE nCurSegment; hr = pSample->GetPointer(&pData); if (FAILED(hr) || pData == nullptr) { return hr; } nSize = pSample->GetActualDataLength(); if (*((LONG*)pData) == 0xBD010000) { CGolombBuffer gb(pData, nSize); gb.SkipBytes(4); WORD wLength = (WORD)gb.BitRead(16); UNREFERENCED_PARAMETER(wLength); if (gb.BitRead(2) != 2) { return E_FAIL; // type } gb.BitRead(2); // scrambling gb.BitRead(1); // priority gb.BitRead(1); // alignment gb.BitRead(1); // copyright gb.BitRead(1); // original BYTE fpts = (BYTE)gb.BitRead(1); // fpts BYTE fdts = (BYTE)gb.BitRead(1); // fdts gb.BitRead(1); // escr gb.BitRead(1); // esrate gb.BitRead(1); // dsmtrickmode gb.BitRead(1); // morecopyright gb.BitRead(1); // crc gb.BitRead(1); // extension gb.BitRead(8); // hdrlen if (fpts) { BYTE b = (BYTE)gb.BitRead(4); if (!(fdts && b == 3 || !fdts && b == 2)) { ASSERT(0); return E_FAIL; } REFERENCE_TIME pts = 0; pts |= gb.BitRead(3) << 30; MARKER; // 32..30 pts |= gb.BitRead(15) << 15; MARKER; // 29..15 pts |= gb.BitRead(15); MARKER; // 14..0 pts = 10000 * pts / 90; m_rtStart = pts; m_rtStop = pts + 1; } else { m_rtStart = INVALID_TIME; m_rtStop = INVALID_TIME; } nSize -= 14; pData += 14; pSample->GetTime(&m_rtStart, &m_rtStop); pSample->GetMediaTime(&m_rtStart, &m_rtStop); } else if (SUCCEEDED(pSample->GetTime(&m_rtStart, &m_rtStop))) { pSample->SetTime(&m_rtStart, &m_rtStop); } if (AddToBuffer(pData, nSize) == S_OK) { CGolombBuffer gb(m_pBuffer + m_nBufferReadPos, m_nBufferWritePos - m_nBufferReadPos); int nLastPos = 0; while (gb.RemainingSize() >= 6) { // Ensure there is enough data to parse the entire segment header if (gb.ReadByte() == 0x0F) { TRACE_DVB(_T("DVB - ParseSample\n")); WORD wPageId; WORD wSegLength; nCurSegment = (DVB_SEGMENT_TYPE)gb.ReadByte(); wPageId = gb.ReadShort(); wSegLength = gb.ReadShort(); if (gb.RemainingSize() < wSegLength) { hr = S_FALSE; break; } switch (nCurSegment) { case PAGE: { if (m_pCurrentPage != nullptr) { TRACE_DVB(_T("DVB - Force End display")); EnqueuePage(m_rtStart); } UpdateTimeStamp(m_rtStart); CAutoPtr<DVB_PAGE> pPage; ParsePage(gb, wSegLength, pPage); if (pPage->pageState == DPS_ACQUISITION || pPage->pageState == DPS_MODE_CHANGE) { m_pCurrentPage = pPage; m_pCurrentPage->rtStart = m_rtStart; m_pCurrentPage->rtStop = m_pCurrentPage->rtStart + m_pCurrentPage->pageTimeOut * 10000000; TRACE_DVB(_T("DVB - Page started [pageState = %d] %s, TimeOut = %ds\n"), m_pCurrentPage->pageState, ReftimeToString(m_rtStart), m_pCurrentPage->pageTimeOut); } else if (!m_Pages.IsEmpty()) { m_pCurrentPage = pPage; m_pCurrentPage->rtStart = m_rtStart; m_pCurrentPage->rtStop = m_pCurrentPage->rtStart + m_pCurrentPage->pageTimeOut * 10000000; // Copy data from the previous page DVB_PAGE* pPrevPage = m_Pages.GetTail(); memcpy(m_pCurrentPage->regions, pPrevPage->regions, sizeof(m_pCurrentPage->regions)); for (POSITION pos = pPrevPage->objects.GetHeadPosition(); pos;) { m_pCurrentPage->objects.AddTail(pPrevPage->objects.GetNext(pos)->Copy()); } for (POSITION pos = pPrevPage->CLUTs.GetHeadPosition(); pos;) { m_pCurrentPage->CLUTs.AddTail(DEBUG_NEW DVB_CLUT(*pPrevPage->CLUTs.GetNext(pos))); } TRACE_DVB(_T("DVB - Page started [update] %s, TimeOut = %ds\n"), ReftimeToString(m_rtStart), m_pCurrentPage->pageTimeOut); } else { TRACE_DVB(_T("DVB - Page update ignored %s\n"), ReftimeToString(m_rtStart)); } } break; case REGION: ParseRegion(gb, wSegLength); TRACE_DVB(_T("DVB - Region\n")); break; case CLUT: ParseClut(gb, wSegLength); TRACE_DVB(_T("DVB - Clut\n")); break; case OBJECT: ParseObject(gb, wSegLength); TRACE_DVB(_T("DVB - Object\n")); break; case DISPLAY: ParseDisplay(gb, wSegLength); TRACE_DVB(_T("DVB - Display\n")); break; case END_OF_DISPLAY: if (m_pCurrentPage == nullptr) { TRACE_DVB(_T("DVB - Ignored End display %s: no current page\n"), ReftimeToString(m_rtStart)); } else if (m_pCurrentPage->rtStart < m_rtStart) { TRACE_DVB(_T("DVB - End display")); EnqueuePage(m_rtStart); } else { TRACE_DVB(_T("DVB - Ignored End display %s: no information on page duration\n"), ReftimeToString(m_rtStart)); } break; default: break; } nLastPos = gb.GetPos(); } } m_nBufferReadPos += nLastPos; } return hr; }
wxPdfObject* wxPdfParser::ParseObjectStream(wxPdfStream* objStm, int idx) { wxPdfObject* obj = NULL; wxPdfNumber* firstNumber = (wxPdfNumber*) ResolveObject(objStm->Get(_T("/First"))); int first = firstNumber->GetInt(); if (objStm->GetBuffer() == NULL) { bool saveUseRawStream = m_useRawStream; m_useRawStream = false; GetStreamBytes(objStm); m_useRawStream = saveUseRawStream; } bool saveEncrypted = m_encrypted; m_encrypted = false; wxPdfTokenizer* saveTokens = m_tokens; wxMemoryInputStream objStream(*(objStm->GetBuffer())); m_tokens = new wxPdfTokenizer(&objStream); int address = 0; bool ok = true; if (!objStm->HasObjOffsets()) { // Read object offsets wxArrayInt* objOffsets = objStm->GetObjOffsets(); int objCount = idx + 1; if (m_cacheObjects) { wxPdfNumber* objCountNumber = (wxPdfNumber*) ResolveObject(objStm->Get(_T("/N"))); objCount = objCountNumber->GetInt(); } int offset; int k; for (k = 0; k < objCount; ++k) { ok = m_tokens->NextToken(); if (!ok) break; if (m_tokens->GetTokenType() != TOKEN_NUMBER) { ok = false; break; } ok = m_tokens->NextToken(); if (!ok) break; if (m_tokens->GetTokenType() != TOKEN_NUMBER) { ok = false; break; } offset = m_tokens->GetIntValue() + first; if (m_cacheObjects) { objOffsets->Add(offset); } if (k == idx) { address = offset; } } if (ok) { objStm->SetHasObjOffsets(m_cacheObjects); } } else { address = objStm->GetObjOffset(idx); ok = (address > 0); } if (ok) { m_tokens->Seek(address); obj = ParseObject(); } else { wxLogError(_T("wxPdfParser::ParseOneObjStm: Error reading ObjStm.")); } delete m_tokens; m_tokens = saveTokens; m_encrypted = saveEncrypted; return obj; }
wxPdfObject* wxPdfParser::ParseDirectObject(int k) { int objIndex = 0; int objStreamIndex = 0; bool isCached = false; wxPdfObject* obj = NULL; // Check for free object if (m_xref[k].m_type == 0) { return NULL; } int pos = m_xref[k].m_ofs_idx; if (m_xref[k].m_type == 2) { objIndex = m_xref[k].m_gen_ref; wxPdfObjStmMap::iterator objStm = m_objStmCache->find(objIndex); if (objStm != m_objStmCache->end()) { obj = objStm->second; isCached = true; } else { objStreamIndex = m_xref[k].m_gen_ref; pos = m_xref[objStreamIndex].m_ofs_idx; } } if (!isCached) { m_tokens->Seek(pos); m_tokens->NextValidToken(); if (m_tokens->GetTokenType() != TOKEN_NUMBER) { wxLogError(_T("wxPdfParser::ParseSingleObject: Invalid object number.")); return NULL; } m_objNum = m_tokens->GetIntValue(); m_tokens->NextValidToken(); if (m_tokens->GetTokenType() != TOKEN_NUMBER) { wxLogError(_T("wxPdfParser::ParseSingleObject: Invalid generation number.")); return NULL; } m_objGen = m_tokens->GetIntValue(); m_tokens->NextValidToken(); if (m_tokens->GetStringValue() != _T("obj")) { wxLogError(_T("wxPdfParser::ParseSingleObject: Token 'obj' expected.")); return NULL; } obj = ParseObject(); } // TODO: Check for valid 'endstream' if (m_xref[k].m_type == 2) { m_objNum = k; m_objGen = 0; wxPdfStream* objStream = (wxPdfStream*) obj; obj = ParseObjectStream((wxPdfStream*) obj, m_xref[k].m_ofs_idx); if (m_cacheObjects) { if (!isCached) { (*m_objStmCache)[objIndex] = objStream; } } else { delete objStream; } } if (obj != NULL) { obj->SetObjNum(m_objNum, m_objGen); } if (obj->GetType() == OBJTYPE_STREAM) { GetStreamBytes((wxPdfStream*) obj); } return obj; }
/************************************************************************* * * ParseDefine() - Starting point for definition parsing. Called * after the 'define' keyword is encountered. * *************************************************************************/ int ParseDefine(void) { char nameid[TOKEN_SIZE_MAX]; int token; Stmt *stmt; /* Get new identifier name... */ if ((token = GetNewIdentifier()) == TK_UNKNOWN_ID) { /* Save id name now. */ strcpy(nameid, token_buffer); /* Get the object being defined... */ token = GetToken(); if (token == TK_SURFACE || token == DECL_SURFACE) { /* 'surface' or <surface name> */ stmt = (token == DECL_SURFACE) ? ParseDeclSurfaceStmt((Surface *)cur_token->data) : ParseSurfaceStmt(); if (stmt != NULL) { if (!error_count) { /* Create the surface and store it in the symbol table. */ parse_declsurfflag = 1; ExecStatements(stmt); if (parse_declsurf != NULL) { if (!Symbol_AddLocal(nameid, DECL_SURFACE, 0, (void *)parse_declsurf)) LogMemError("surface definition"); parse_declsurf = NULL; } parse_declsurfflag = 0; } DeleteStatements(stmt); } } else if (cur_token->flags & TKFLAG_OBJECT) { /* <object type> or <object name> */ Object *declobj = NULL; if (token == DECL_OBJECT) declobj = (Object *)cur_token->data; stmt = ParseObject(token, declobj); if (stmt != NULL) { if (!error_count) { Object *obj; /* Create the object and store it in the symbol table. */ ExecStatements(stmt); obj = ScnBuild_RemoveLastObject(); if (!Symbol_AddLocal(nameid, DECL_OBJECT, TKFLAG_OBJECT, (void *)obj)) LogMemError("object definition"); } DeleteStatements(stmt); } } } else { ErrUnknown(token, "identifier name", "define"); /* SkipStatement() */ return 0; } return 1; }
HRESULT CHdmvSub::ParseSample(IMediaSample* pSample) { CheckPointer (pSample, E_POINTER); HRESULT hr; REFERENCE_TIME rtStart = INVALID_TIME, rtStop = INVALID_TIME; BYTE* pData = NULL; int lSampleLen; hr = pSample->GetPointer(&pData); if(FAILED(hr) || pData == NULL) { return hr; } lSampleLen = pSample->GetActualDataLength(); pSample->GetTime(&rtStart, &rtStop); if (pData) { CGolombBuffer SampleBuffer (pData, lSampleLen); while (!SampleBuffer.IsEOF()) { if (m_nCurSegment == NO_SEGMENT) { HDMV_SEGMENT_TYPE nSegType = (HDMV_SEGMENT_TYPE)SampleBuffer.ReadByte(); USHORT nUnitSize = SampleBuffer.ReadShort(); lSampleLen -=3; switch (nSegType) { case PALETTE : case OBJECT : case PRESENTATION_SEG : case END_OF_DISPLAY : m_nCurSegment = nSegType; AllocSegment (nUnitSize); break; case WINDOW_DEF : case INTERACTIVE_SEG : case HDMV_SUB1 : case HDMV_SUB2 : // Ignored stuff... SampleBuffer.SkipBytes(nUnitSize); break; default : return VFW_E_SAMPLE_REJECTED; } } if (m_nCurSegment != NO_SEGMENT) { if (m_nSegBufferPos < m_nSegSize) { int nSize = min (m_nSegSize-m_nSegBufferPos, lSampleLen); SampleBuffer.ReadBuffer (m_pSegBuffer+m_nSegBufferPos, nSize); m_nSegBufferPos += nSize; } if (m_nSegBufferPos >= m_nSegSize) { CGolombBuffer SegmentBuffer (m_pSegBuffer, m_nSegSize); switch (m_nCurSegment) { case PALETTE : TRACE_HDMVSUB ("CHdmvSub:PALETTE rtStart=%10I64d\n", rtStart); ParsePalette(&SegmentBuffer, m_nSegSize); break; case OBJECT : //TRACE_HDMVSUB ("CHdmvSub:OBJECT %S\n", ReftimeToString(rtStart)); ParseObject(&SegmentBuffer, m_nSegSize); break; case PRESENTATION_SEG : TRACE_HDMVSUB ("CHdmvSub:PRESENTATION_SEG %S (size=%d)\n", ReftimeToString(rtStart), m_nSegSize); if (m_pCurrentObject) { m_pCurrentObject->m_rtStop = rtStart; m_pObjects.AddTail (m_pCurrentObject); TRACE_HDMVSUB ("CHdmvSub:HDMV : %S => %S\n", ReftimeToString (m_pCurrentObject->m_rtStart), ReftimeToString(rtStart)); m_pCurrentObject = NULL; } if (ParsePresentationSegment(&SegmentBuffer) > 0) { m_pCurrentObject->m_rtStart = rtStart; m_pCurrentObject->m_rtStop = _I64_MAX; } break; case WINDOW_DEF : // TRACE_HDMVSUB ("CHdmvSub:WINDOW_DEF %S\n", ReftimeToString(rtStart)); break; case END_OF_DISPLAY : // TRACE_HDMVSUB ("CHdmvSub:END_OF_DISPLAY %S\n", ReftimeToString(rtStart)); break; default : TRACE_HDMVSUB ("CHdmvSub:UNKNOWN Seg %d rtStart=0x%10dd\n", m_nCurSegment, rtStart); } m_nCurSegment = NO_SEGMENT; } } } } return hr; }
wxPdfDictionary* wxPdfParser::ParseXRefSection() { m_tokens->NextValidToken(); if (m_tokens->GetStringValue() != _T("xref")) { wxLogError(_("wxPdfParser::ParseXRefSection: xref subsection not found.")); return NULL; } int start = 0; int end = 0; int pos = 0; int gen = 0; while (true) { m_tokens->NextValidToken(); if (m_tokens->GetStringValue() == _T("trailer")) break; if (m_tokens->GetTokenType() != TOKEN_NUMBER) { wxLogError(_("wxPdfParser::ParseXRefSection: Object number of the first object in this xref subsection not found.")); return NULL; } start = m_tokens->GetIntValue(); m_tokens->NextValidToken(); if (m_tokens->GetTokenType() != TOKEN_NUMBER) { wxLogError(_("wxPdfParser::ParseXRefSection: Number of entries in this xref subsection not found.")); return NULL; } end = m_tokens->GetIntValue() + start; if (start == 1) { // fix incorrect start number int back = m_tokens->Tell(); m_tokens->NextValidToken(); pos = m_tokens->GetIntValue(); m_tokens->NextValidToken(); gen = m_tokens->GetIntValue(); if (pos == 0 && gen == 65535) { --start; --end; } m_tokens->Seek(back); } ReserveXRef(end); int k; for (k = start; k < end; ++k) { wxPdfXRefEntry& xrefEntry = m_xref[k]; m_tokens->NextValidToken(); pos = m_tokens->GetIntValue(); m_tokens->NextValidToken(); gen = m_tokens->GetIntValue(); m_tokens->NextValidToken(); if (m_tokens->GetStringValue() == _T("n")) { if (xrefEntry.m_ofs_idx == 0 && xrefEntry.m_gen_ref == 0) { // TODO: if (pos == 0) // wxLogError(_T("File position 0 cross-reference entry in this xref subsection")); xrefEntry.m_ofs_idx = pos; xrefEntry.m_gen_ref = gen; xrefEntry.m_type = 1; } } else if (m_tokens->GetStringValue() == _T("f")) { if (xrefEntry.m_ofs_idx == 0 && xrefEntry.m_gen_ref == 0) { xrefEntry.m_ofs_idx = -1; xrefEntry.m_gen_ref = 0; xrefEntry.m_type = 0; } } else { wxLogError(_("wxPdfParser:ReadXRefSection: Invalid cross-reference entry in this xref subsection.")); return NULL; } } } wxPdfDictionary* trailer = (wxPdfDictionary*) ParseObject(); wxPdfNumber* xrefSize = (wxPdfNumber*) trailer->Get(_T("/Size")); ReserveXRef(xrefSize->GetInt()); wxPdfObject* xrs = trailer->Get(_T("/XRefStm")); if (xrs != NULL && xrs->GetType() == OBJTYPE_NUMBER) { int loc = ((wxPdfNumber*) xrs)->GetInt(); ParseXRefStream(loc, false); } return trailer; }
HRESULT CHdmvSub::ParseSample(BYTE* pData, int lSampleLen, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop) { HRESULT hr = S_OK; if (pData) { CGolombBuffer SampleBuffer (pData, lSampleLen); while (!SampleBuffer.IsEOF()) { if (m_nCurSegment == NO_SEGMENT) { HDMV_SEGMENT_TYPE nSegType = (HDMV_SEGMENT_TYPE)SampleBuffer.ReadByte(); USHORT nUnitSize = SampleBuffer.ReadShort(); lSampleLen -=3; switch (nSegType) { case PALETTE : case OBJECT : case PRESENTATION_SEG : case END_OF_DISPLAY : m_nCurSegment = nSegType; AllocSegment(nUnitSize); break; case WINDOW_DEF : case INTERACTIVE_SEG : case HDMV_SUB1 : case HDMV_SUB2 : // Ignored stuff... SampleBuffer.SkipBytes(nUnitSize); break; default : return VFW_E_SAMPLE_REJECTED; } } if (m_nCurSegment != NO_SEGMENT) { if (m_nSegBufferPos < m_nSegSize) { int nSize = min(m_nSegSize - m_nSegBufferPos, lSampleLen); SampleBuffer.ReadBuffer(m_pSegBuffer + m_nSegBufferPos, nSize); m_nSegBufferPos += nSize; } if (m_nSegBufferPos >= m_nSegSize) { CGolombBuffer SegmentBuffer(m_pSegBuffer, m_nSegSize); switch (m_nCurSegment) { case PALETTE : TRACE_HDMVSUB(_T("CHdmvSub::ParseSample() : PALETTE\n")); ParsePalette(&SegmentBuffer, m_nSegSize); break; case OBJECT : TRACE_HDMVSUB(_T("CHdmvSub::ParseSample() : OBJECT\n")); ParseObject(&SegmentBuffer, m_nSegSize); break; case PRESENTATION_SEG : TRACE_HDMVSUB(_T("CHdmvSub::ParseSample() : PRESENTATION_SEG = [%10I64d], %s, size = %d\n"), rtStart, ReftimeToString(rtStart), m_nSegSize); ParsePresentationSegment(&SegmentBuffer, rtStart); break; case WINDOW_DEF : //TRACE_HDMVSUB(_T("CHdmvSub::ParseSample() : WINDOW_DEF = %10I64d, %S\n"), rtStart, ReftimeToString(rtStart)); break; case END_OF_DISPLAY : //TRACE_HDMVSUB(_T("CHdmvSub::ParseSample() : END_OF_DISPLAY = %10I64d, %S\n"), rtStart, ReftimeToString(rtStart)); break; default : TRACE_HDMVSUB(_T("CHdmvSub::ParseSample() : UNKNOWN Seg [%d] = [%10I64d], %s\n"), m_nCurSegment, rtStart, ReftimeToString(rtStart)); } m_nCurSegment = NO_SEGMENT; } } } } return hr; }
HRESULT CPGSSub::ParseSample(REFERENCE_TIME rtStart, REFERENCE_TIME rtStop, BYTE* pData, size_t nLen) { CheckPointer(pData, E_POINTER); CAutoLock cAutoLock(&m_csCritSec); CGolombBuffer sampleBuffer(pData, nLen); while (!sampleBuffer.IsEOF()) { if (m_nCurSegment == NO_SEGMENT) { HDMV_SEGMENT_TYPE nSegType = (HDMV_SEGMENT_TYPE)sampleBuffer.ReadByte(); unsigned short nUnitSize = sampleBuffer.ReadShort(); nLen -= 3; switch (nSegType) { case PALETTE: case OBJECT: case PRESENTATION_SEG: case END_OF_DISPLAY: m_nCurSegment = nSegType; AllocSegment(nUnitSize); break; case WINDOW_DEF: case INTERACTIVE_SEG: case HDMV_SUB1: case HDMV_SUB2: // Ignored stuff... sampleBuffer.SkipBytes(nUnitSize); break; default: return VFW_E_SAMPLE_REJECTED; } } if (m_nCurSegment != NO_SEGMENT) { if (m_nSegBufferPos < m_nSegSize) { size_t nSize = std::min(m_nSegSize - m_nSegBufferPos, nLen); sampleBuffer.ReadBuffer(m_pSegBuffer + m_nSegBufferPos, nSize); m_nSegBufferPos += nSize; } if (m_nSegBufferPos >= m_nSegSize) { CGolombBuffer SegmentBuffer(m_pSegBuffer, m_nSegSize); switch (m_nCurSegment) { case PALETTE: TRACE_PGSSUB(_T("CPGSSub:PALETTE %s\n"), ReftimeToString(rtStart)); ParsePalette(&SegmentBuffer, m_nSegSize); break; case OBJECT: TRACE_PGSSUB(_T("CPGSSub:OBJECT %s\n"), ReftimeToString(rtStart)); ParseObject(&SegmentBuffer, m_nSegSize); break; case PRESENTATION_SEG: TRACE_PGSSUB(_T("CPGSSub:PRESENTATION_SEG %s (size=%d)\n"), ReftimeToString(rtStart), m_nSegSize); if (rtStart == INVALID_TIME) { break; } // Update the timestamp for the previous segment UpdateTimeStamp(rtStart); // Parse the new presentation segment ParsePresentationSegment(rtStart, &SegmentBuffer); break; case WINDOW_DEF: //TRACE_PGSSUB(_T("CPGSSub:WINDOW_DEF %s\n"), ReftimeToString(rtStart)); break; case END_OF_DISPLAY: TRACE_PGSSUB(_T("CPGSSub:END_OF_DISPLAY %s\n"), ReftimeToString(rtStart)); // Enqueue the current presentation segment if any EnqueuePresentationSegment(); break; default: TRACE_PGSSUB(_T("CPGSSub:UNKNOWN Seg %d %s\n"), m_nCurSegment, ReftimeToString(rtStart)); } m_nCurSegment = NO_SEGMENT; } } } return S_OK; }
HRESULT CHdmvSub::ParseSample(BYTE* pData, int lSampleLen, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop) { HRESULT hr = S_OK; if (pData) { CGolombBuffer SampleBuffer(pData, lSampleLen); while (!SampleBuffer.IsEOF()) { if (m_nCurSegment == NO_SEGMENT) { HDMV_SEGMENT_TYPE nSegType = (HDMV_SEGMENT_TYPE)SampleBuffer.ReadByte(); unsigned short nUnitSize = SampleBuffer.ReadShort(); lSampleLen -= 3; switch (nSegType) { case PALETTE: case OBJECT: case PRESENTATION_SEG: case END_OF_DISPLAY: m_nCurSegment = nSegType; AllocSegment(nUnitSize); break; case WINDOW_DEF: case INTERACTIVE_SEG: case HDMV_SUB1: case HDMV_SUB2: // Ignored stuff... SampleBuffer.SkipBytes(nUnitSize); break; default: return VFW_E_SAMPLE_REJECTED; } } if (m_nCurSegment != NO_SEGMENT) { if (m_nSegBufferPos < m_nSegSize) { int nSize = min(m_nSegSize - m_nSegBufferPos, lSampleLen); SampleBuffer.ReadBuffer(m_pSegBuffer + m_nSegBufferPos, nSize); m_nSegBufferPos += nSize; } if (m_nSegBufferPos >= m_nSegSize) { CGolombBuffer SegmentBuffer(m_pSegBuffer, m_nSegSize); switch (m_nCurSegment) { case PALETTE: TRACE_HDMVSUB( (_T("CHdmvSub:PALETTE rtStart=%10I64d\n"), rtStart) ); ParsePalette(&SegmentBuffer, m_nSegSize); break; case OBJECT: TRACE_HDMVSUB( (_T("CHdmvSub:OBJECT %lS\n"), ReftimeToCString(rtStart)) ); ParseObject(&SegmentBuffer, m_nSegSize); break; case PRESENTATION_SEG: TRACE_HDMVSUB( (_T("CHdmvSub:PRESENTATION_SEG %lS (size=%d)\n"), ReftimeToCString(rtStart), m_nSegSize) ); // Enqueue the current presentation segment if any EnqueuePresentationSegment(rtStart); // Parse the new presentation segment ParsePresentationSegment(rtStart, &SegmentBuffer); break; case WINDOW_DEF: //TRACE_HDMVSUB( (_T("CHdmvSub:WINDOW_DEF %lS\n"), ReftimeToCString(rtStart)) ); break; case END_OF_DISPLAY: //TRACE_HDMVSUB( (_T("CHdmvSub:END_OF_DISPLAY %lS\n"), ReftimeToCString(rtStart)) ); break; default: TRACE_HDMVSUB( (_T("CHdmvSub:UNKNOWN Seg %d rtStart=0x%10dd\n"), m_nCurSegment, rtStart) ); } m_nCurSegment = NO_SEGMENT; } } } } return hr; }
HRESULT CDVBSub::ParseSample(IMediaSample* pSample) { CheckPointer(pSample, E_POINTER); HRESULT hr; BYTE* pData = NULL; int nSize; DVB_SEGMENT_TYPE nCurSegment; hr = pSample->GetPointer(&pData); if (FAILED(hr) || pData == NULL) { return hr; } nSize = pSample->GetActualDataLength(); if (*((LONG*)pData) == 0xBD010000) { CGolombBuffer gb(pData, nSize); gb.SkipBytes(4); WORD wLength = (WORD)gb.BitRead(16); UNREFERENCED_PARAMETER(wLength); if (gb.BitRead(2) != 2) { return E_FAIL; // type } gb.BitRead(2); // scrambling gb.BitRead(1); // priority gb.BitRead(1); // alignment gb.BitRead(1); // copyright gb.BitRead(1); // original BYTE fpts = (BYTE)gb.BitRead(1); // fpts BYTE fdts = (BYTE)gb.BitRead(1); // fdts gb.BitRead(1); // escr gb.BitRead(1); // esrate gb.BitRead(1); // dsmtrickmode gb.BitRead(1); // morecopyright gb.BitRead(1); // crc gb.BitRead(1); // extension gb.BitRead(8); // hdrlen if (fpts) { BYTE b = (BYTE)gb.BitRead(4); if (!(fdts && b == 3 || !fdts && b == 2)) { ASSERT(0); return E_FAIL; } REFERENCE_TIME pts = 0; pts |= gb.BitRead(3) << 30; MARKER; // 32..30 pts |= gb.BitRead(15) << 15; MARKER; // 29..15 pts |= gb.BitRead(15); MARKER; // 14..0 pts = 10000 * pts / 90; m_rtStart = pts; m_rtStop = pts + 1; } else { m_rtStart = INVALID_TIME; m_rtStop = INVALID_TIME; } nSize -= 14; pData += 14; pSample->GetTime(&m_rtStart, &m_rtStop); pSample->GetMediaTime(&m_rtStart, &m_rtStop); } else if (SUCCEEDED(pSample->GetTime(&m_rtStart, &m_rtStop))) { pSample->SetTime(&m_rtStart, &m_rtStop); } //FILE* hFile = fopen("D:\\Sources\\mpc-hc\\A garder\\TestSubRip\\dvbsub.dat", "ab"); //if (hFile != NULL) //{ // //BYTE Buff[5] = {48}; // //*((DWORD*)(Buff+1)) = lSampleLen; // //fwrite(Buff, 1, sizeof(Buff), hFile); // fwrite(pData, 1, lSampleLen, hFile); // fclose(hFile); //} if (AddToBuffer(pData, nSize) == S_OK) { CGolombBuffer gb(m_pBuffer + m_nBufferReadPos, m_nBufferWritePos - m_nBufferReadPos); int nLastPos = 0; while (!gb.IsEOF()) { if (gb.ReadByte() == 0x0F) { TRACE_DVB("DVB - ParseSample\n"); WORD wPageId; WORD wSegLength; nCurSegment = (DVB_SEGMENT_TYPE) gb.ReadByte(); wPageId = gb.ReadShort(); wSegLength = gb.ReadShort(); if (gb.RemainingSize() < wSegLength) { hr = S_FALSE; break; } switch (nCurSegment) { case PAGE: { CAutoPtr<DVB_PAGE> pPage; ParsePage(gb, wSegLength, pPage); if (pPage->PageState == DPS_ACQUISITION) { if (m_pCurrentPage != NULL) { m_pCurrentPage->rtStop = max(m_pCurrentPage->rtStop, m_rtStart); m_Pages.AddTail(m_pCurrentPage.Detach()); } UpdateTimeStamp(m_rtStart); m_pCurrentPage = pPage; m_pCurrentPage->rtStart = m_rtStart; m_pCurrentPage->rtStop = m_pCurrentPage->rtStart + m_pCurrentPage->PageTimeOut * 1000000; TRACE_DVB("DVB - Page started %S, TimeOut = %d\n", ReftimeToString(m_rtStart), m_pCurrentPage->PageTimeOut); } else { TRACE_DVB("DVB - Page update\n"); if (m_pCurrentPage && !m_pCurrentPage->RegionCount) { m_pCurrentPage = pPage; m_pCurrentPage->rtStart = m_rtStart; m_pCurrentPage->rtStop = m_pCurrentPage->rtStart + m_pCurrentPage->PageTimeOut * 1000000; TRACE_DVB("DVB - Page started[update] %S, TimeOut = %d\n", ReftimeToString(m_rtStart), m_pCurrentPage->PageTimeOut); } } } break; case REGION: ParseRegion(gb, wSegLength); TRACE_DVB("DVB - Region\n"); break; case CLUT: ParseClut(gb, wSegLength); TRACE_DVB("DVB - Clut\n"); break; case OBJECT: ParseObject(gb, wSegLength); TRACE_DVB("DVB - Object\n"); break; case DISPLAY: ParseDisplay(gb, wSegLength); TRACE_DVB("DVB - Display\n"); break; case END_OF_DISPLAY: if (m_pCurrentPage != NULL && (m_pCurrentPage->rtStart != m_rtStart)) { m_pCurrentPage->rtStop = max(m_pCurrentPage->rtStop, m_rtStart); TRACE_DVB("DVB - End display %S - %S\n", ReftimeToString(m_pCurrentPage->rtStart), ReftimeToString(m_pCurrentPage->rtStop)); m_Pages.AddTail(m_pCurrentPage.Detach()); } break; default: break; } nLastPos = gb.GetPos(); } } m_nBufferReadPos += nLastPos; } return hr; }