Ejemplo n.º 1
0
void Shader::processShader(StreamReader* reader, const char* endtag, ByteArray* soureCode)
{
	while (!reader->endOfStream())
	{
		ByteArray* line = reader->readLineToBytes();

		if (line)
		{
			line->trim();

			if (line->equals(endtag))
			{
				return;
			}
			else if (line->equals("[GLSL]"))
			{
				while (!reader->endOfStream())
				{
					ByteArray* line = reader->readLineToBytes();

					if (line)
					{
						line->trim();

						if (line->equals("[/GLSL]"))
						{
							break;
						}
						else
						{
							soureCode->append(line->cstr());
						}
					}
				}				
			}
		}
	}
}
Ejemplo n.º 2
0
void Shader::load(StreamReader* reader)
{
	DebugLog::print("Parsing shader...");

	reader->retain();
	while (!reader->endOfStream())
	{
		ByteArray* line = reader->readLineToBytes();

		if (line)
		{
			line->trim();

			if (line->equals("[EFFECT]"))
			{
				processEffectInfo(reader);
			}
			else if (line->equals("[VERTEXSHADER]"))
			{
				m_strVertexShader = ByteArray::create(false);
				processShader(reader, "[/VERTEXSHADER]", m_strVertexShader);
			}
			else if (line->equals("[FRAGMENTSHADER]"))
			{
				m_strFragmentShader = ByteArray::create(false);
				processShader(reader, "[/FRAGMENTSHADER]", m_strFragmentShader);
			}
		}		
	}

	DebugLog::print("Compiling shader...");
	build();
	reader->release();

	DebugLog::print("Done!");
}
Ejemplo n.º 3
0
void Shader::processEffectInfo(StreamReader* reader)
{
	while (!reader->endOfStream())
	{
		ByteArray* line = reader->readLineToBytes();

		if (line)
		{
			line->trim();

			if (line->equals("[/EFFECT]"))
			{
				return;
			}
			else
			{
				List* params = line->split(NULL, 0);

				if (params->count() >= 2)
				{
					ByteArray* param0 = (ByteArray*)params->get(0);
					ByteArray* param1 = (ByteArray*)params->get(1);
					param0->toUpper();					

					// Name
					if (param0->equals("NAME"))
					{
						m_name = param1;
						m_name->retain();
					}
					// Depth sorting
					else if (param0->equals("RENDERQUEUE"))
					{
						m_renderQueue = parseRenderQueueFromString(param1);
					}
					// Z-Writing
					else if (param0->equals("ZWRITE"))
					{
						m_isZWriting = param1->equalsIgnoreCase("On");
					}
					// Face Culling
					else if (param0->equals("FACECULL"))
					{
						m_faceCullingMode = cullModeFromString(param1);						
					}
					else if (param0->equals("ZTEST"))
					{
						if (param1->equalsIgnoreCase("Off"))
						{
							m_depthFunc = GL_ALWAYS;
						}
						else
						{
							m_depthFunc = depthFuncFromString(param1);
						}
					}
					else if (params->count() >= 3)
					{
						ByteArray* param2 = (ByteArray*)params->get(2);

						if (param0->equals("BLENDING"))
						{
							m_isAlphaBlending = true;							
							m_blendingSource = alphaFactorFromString(param1);
							m_blendingDest = alphaFactorFromString(param2);
						}
						else if (param0->equals("ATTRIBUTE") || param0->equals("UNIFORM"))
						{
							SPVRTPFXUniform uniform;

							// Variable name
							uniform.sValueName = param1;
							param1->retain();

							// Semantic index
							ByteArray* temp = ByteArray::create(param2->cstr());
							if (ByteArray::isNumeric(temp->cstr()[param2->length() - 1]))
							{
								temp->subString(0, param2->length() - 1);
							}

							int len = 0;
							const SPVRTPFXUniformSemantic* semantics = PVRTPFXSemanticsGetSemanticList();
							uniform.nSemantic = 0;
							for (int i = 0; i < ePVRTPFX_NumSemantics; ++i)
							{
								if (temp->equalsIgnoreCase(semantics[i].p))
								{
									len = (int)strlen(semantics[i].p);
									uniform.nSemantic = i;
									break;
								}								
							}		
	
							uniform.nIdx = len < param2->length() ? param2->cstr()[len] - '0' : 0;

							// Location to be retrieve when compiling shader
							uniform.nLocation = 0;

							m_uniforms->add(uniform);
						}
					}
				}
			}
		}
	}
}
Ejemplo n.º 4
0
/// Builds an index with payloads in the given Directory and performs different
/// tests to verify the payload encoding
static void encodingTest(const DirectoryPtr& dir) {
    PayloadAnalyzerPtr analyzer = newLucene<PayloadAnalyzer>();
    IndexWriterPtr writer = newLucene<IndexWriter>(dir, analyzer, true, IndexWriter::MaxFieldLengthLIMITED);

    // should be in sync with value in TermInfosWriter
    int32_t skipInterval = 16;

    int32_t numTerms = 5;
    String fieldName = L"f1";

    int32_t numDocs = skipInterval + 1;
    // create content for the test documents with just a few terms
    Collection<TermPtr> terms = generateTerms(fieldName, numTerms);
    StringStream sb;
    for (Collection<TermPtr>::iterator term = terms.begin(); term != terms.end(); ++term) {
        sb << (*term)->text() << L" ";
    }
    String content = sb.str();

    int32_t payloadDataLength = numTerms * numDocs * 2 + numTerms * numDocs * (numDocs - 1) / 2;
    ByteArray payloadData = generateRandomData(payloadDataLength);

    DocumentPtr d = newLucene<Document>();
    d->add(newLucene<Field>(fieldName, content, Field::STORE_NO, Field::INDEX_ANALYZED));

    // add the same document multiple times to have the same payload lengths for all
    // occurrences within two consecutive skip intervals
    int32_t offset = 0;
    for (int32_t i = 0; i < 2 * numDocs; ++i) {
        analyzer->setPayloadData(fieldName, payloadData, offset, 1);
        offset += numTerms;
        writer->addDocument(d);
    }

    // make sure we create more than one segment to test merging
    writer->commit();

    for (int32_t i = 0; i < numDocs; ++i) {
        analyzer->setPayloadData(fieldName, payloadData, offset, i);
        offset += i * numTerms;
        writer->addDocument(d);
    }

    writer->optimize();
    // flush
    writer->close();

    // Verify the index
    IndexReaderPtr reader = IndexReader::open(dir, true);

    ByteArray verifyPayloadData(ByteArray::newInstance(payloadDataLength));
    offset = 0;
    Collection<TermPositionsPtr> tps = Collection<TermPositionsPtr>::newInstance(numTerms);
    for (int32_t i = 0; i < numTerms; ++i) {
        tps[i] = reader->termPositions(terms[i]);
    }

    while (tps[0]->next()) {
        for (int32_t i = 1; i < numTerms; ++i) {
            tps[i]->next();
        }
        int32_t freq = tps[0]->freq();

        for (int32_t i = 0; i < freq; ++i) {
            for (int32_t j = 0; j < numTerms; ++j) {
                tps[j]->nextPosition();
                tps[j]->getPayload(verifyPayloadData, offset);
                offset += tps[j]->getPayloadLength();
            }
        }
    }

    for (int32_t i = 0; i < numTerms; ++i) {
        tps[i]->close();
    }

    EXPECT_TRUE(payloadData.equals(verifyPayloadData));

    // test lazy skipping
    TermPositionsPtr tp = reader->termPositions(terms[0]);
    tp->next();
    tp->nextPosition();
    // now we don't read this payload
    tp->nextPosition();
    EXPECT_EQ(1, tp->getPayloadLength());
    ByteArray payload = tp->getPayload(ByteArray(), 0);
    EXPECT_EQ(payload[0], payloadData[numTerms]);
    tp->nextPosition();

    // we don't read this payload and skip to a different document
    tp->skipTo(5);
    tp->nextPosition();
    EXPECT_EQ(1, tp->getPayloadLength());
    payload = tp->getPayload(ByteArray(), 0);
    EXPECT_EQ(payload[0], payloadData[5 * numTerms]);

    // Test different lengths at skip points
    tp->seek(terms[1]);
    tp->next();
    tp->nextPosition();
    EXPECT_EQ(1, tp->getPayloadLength());
    tp->skipTo(skipInterval - 1);
    tp->nextPosition();
    EXPECT_EQ(1, tp->getPayloadLength());
    tp->skipTo(2 * skipInterval - 1);
    tp->nextPosition();
    EXPECT_EQ(1, tp->getPayloadLength());
    tp->skipTo(3 * skipInterval - 1);
    tp->nextPosition();
    EXPECT_EQ(3 * skipInterval - 2 * numDocs - 1, tp->getPayloadLength());

    // Test multiple call of getPayload()
    tp->getPayload(ByteArray(), 0);

    // it is forbidden to call getPayload() more than once without calling nextPosition()
    try {
        tp->getPayload(ByteArray(), 0);
    } catch (IOException& e) {
        EXPECT_TRUE(check_exception(LuceneException::IO)(e));
    }

    reader->close();

    // test long payload
    analyzer = newLucene<PayloadAnalyzer>();
    writer = newLucene<IndexWriter>(dir, analyzer, true, IndexWriter::MaxFieldLengthLIMITED);
    String singleTerm = L"lucene";

    d = newLucene<Document>();
    d->add(newLucene<Field>(fieldName, singleTerm, Field::STORE_NO, Field::INDEX_ANALYZED));
    // add a payload whose length is greater than the buffer size of BufferedIndexOutput
    payloadData = generateRandomData(2000);
    analyzer->setPayloadData(fieldName, payloadData, 100, 1500);
    writer->addDocument(d);

    writer->optimize();
    // flush
    writer->close();

    reader = IndexReader::open(dir, true);
    tp = reader->termPositions(newLucene<Term>(fieldName, singleTerm));
    tp->next();
    tp->nextPosition();

    verifyPayloadData.resize(tp->getPayloadLength());
    tp->getPayload(verifyPayloadData, 0);
    ByteArray portion(ByteArray::newInstance(1500));
    MiscUtils::arrayCopy(payloadData.get(), 100, portion.get(), 0, 1500);

    EXPECT_TRUE(portion.equals(verifyPayloadData));

    reader->close();
}