Ejemplo n.º 1
0
EXPORT_C TBool TBtreeInlineIndexOrg::Update(TAny *aNode,TInt aPos,const TDesC8& anEntry) const
	{
	__ASSERT_DEBUG(anEntry.Size()<=KeySize(),Panic(EBadEntrySize));
	__ASSERT_DEBUG(aPos<Node(aNode)->iHead.iCount,Panic(EBadEntryPos));
	Mem::Copy(Entry(Node(aNode),aPos)->iKey,anEntry.Ptr(),KeySize());
	return ETrue;
	}
Ejemplo n.º 2
0
void IOBus::ElementRealKey( const void* localKey, const int &localKeySize, char *globalkey, int &globalkeySize )
{
	globalkeySize = 0;
	if ( DataType::IsValue(m_elementKeyType) 
		|| DataType::stream == m_elementKeyType )
	{
		memcpy( &globalkey[globalkeySize], Key(), KeySize() );
		globalkeySize += KeySize();
		memcpy( &globalkey[globalkeySize], "::", 2 );
		globalkeySize += 2;
		memcpy( &globalkey[globalkeySize], (char*)&localKey, localKeySize );
		globalkeySize += localKeySize;
		globalkey[globalkeySize] = 0;
	}
	else
	{
		IOBus *p = (IOBus*)localKey;
		memcpy( &globalkey[globalkeySize], Key(), KeySize() );
		globalkeySize += KeySize();
		memcpy( &globalkey[globalkeySize], "::", 2 );
		globalkeySize += 2;
		memcpy( &globalkey[globalkeySize], p->Key(), p->KeySize() );
		globalkeySize += p->KeySize();
		globalkey[globalkeySize] = 0;
	}
}
Ejemplo n.º 3
0
TBtreeInlineIndexOrg::SNode* TBtreeInlineIndexOrg::DoRedistribute(TAny *aLeftNode,TAny *aRightNode,const TDesC8& aPivot,TBtreePivot& aNewPivot,TInt aInsertPos) const
//
// Even out the distibution of entries in LeftNode and RightNode
// if aInsertPos>=0 we want to insert at this cumulative position so take into account for redistribution
// If total entries <=iMaxEntries or >iMaxEntries*2 then return 0
// Otherwise return the node into which insertion should take place
// If insertion should be promoted insert position is beyond end of left node, otherwise
// the new pivot is copied to aNewPivot
//
	{
	SNode* const pl=Node(aLeftNode);
	SNode* const pr=Node(aRightNode);
	SNode *insertNode=pr;
	TInt lCount=pl->iHead.iCount;
	TInt rCount=pr->iHead.iCount;
	TInt total=lCount+rCount+1;			// including pivot entry
	TInt left=total>>1;
	if (aInsertPos>=0)
		{	// call from InsertOverflow
		__ASSERT_DEBUG(aInsertPos<=total,Panic(EBadEntryPos));
		if (total>iMaxEntries<<1)
			return NULL;		// no space to insert
		if (aInsertPos<=left)
			{
			if (aInsertPos<left)
				--left;
			insertNode=pl;
			}
		}
	else
		{ // from Redistribute
		if (total<=iMaxEntries)
			return NULL;		// underflow state
		}
	pl->iHead.iCount=left;
	pr->iHead.iCount=total-left-1;		// pivot not included
	TInt pSize=aPivot.Size();
	__ASSERT_DEBUG(pSize<=KeySize(),Panic(EBadEntrySize));
	if (lCount>left)
		{ // move right
		TInt move=lCount-left;
		Mem::Copy(Entry(pr,move),pr->iEntries,rCount*iEntrySize+sizeof(TPageRef));
		TUint8 *pp=Mem::Copy(pr->iEntries,Entry(pl,left+1),(move-1)*iEntrySize+sizeof(TPageRef));
		Mem::Copy(pp,aPivot.Ptr(),pSize);
		aNewPivot.Copy(Entry(pl,left)->iKey,KeySize());		// new pivot
		}
	else if (lCount<left)
		{ // move left
		TInt move=left-lCount-1;
		TUint8 *pp=Mem::Copy(Entry(pl,lCount)->iKey,aPivot.Ptr(),pSize);
		Mem::Copy(pp,pr->iEntries,move*iEntrySize+sizeof(TPageRef));
		aNewPivot.Copy(Entry(pr,move)->iKey,KeySize());
		Mem::Copy(pr->iEntries,Entry(pr,move+1),(rCount-move-1)*iEntrySize+sizeof(TPageRef));
		}
	else
		{	// should we ever get here?	(lCount==left)
		aNewPivot=aPivot;
		}
	return insertNode;
	}
Ejemplo n.º 4
0
EXPORT_C void TBtreeInlineIndexOrg::InsertSplit(TAny *aLeftNode,TAny *aRightNode,TInt aPos,const TDesC8& anEntry,TPageRef aChild,TBtreePivot& aPromote) const
//
// part of the contract is not to use aPromote before anEntry
// We know that aNodePtr is full
// prepare right node and use insert-overflow
//
	{
	__ASSERT_DEBUG(Node(aLeftNode)->iHead.iCount==iMaxEntries,Panic(EIllegalSplit));
	__ASSERT_DEBUG(Node(aRightNode)->iHead.iCount==0,Panic(EIllegalSplit));
	SNode* const pl=Node(aLeftNode);
	SNode* const pr=Node(aRightNode);
	SEntry *pe=Entry(pl,pl->iHead.iCount);
	--pl->iHead.iCount;
	Entry(pr,0)->iChild=pe->iChild;
	TPtrC8 pivot((TUint8*)pe-KeySize(),KeySize());
	InsertOverflow(aLeftNode,aRightNode,aPos,ETrue,anEntry,aChild,pivot,aPromote);
	}
Ejemplo n.º 5
0
EXPORT_C TBool TBtreeInlineIndexOrg::Insert(TAny *aNode,TInt aPos,const TDesC8& anEntry,TPageRef aChild) const
	{
	SNode* const pn=Node(aNode);
	__ASSERT_DEBUG(aPos<=pn->iHead.iCount,Panic(EBadEntryPos));
	if (pn->iHead.iCount==iMaxEntries)
		return EFalse;
	TUint8* pe=Entry(pn,aPos)->iKey;
	Mem::Copy(pe+iEntrySize,pe,iEntrySize*(pn->iHead.iCount-aPos));
	TInt size=anEntry.Size();
	__ASSERT_ALWAYS(size<=KeySize(),Panic(EBadEntrySize));
	*(TPageRef*)Mem::Copy(pe,anEntry.Ptr(),size)=aChild;
	++pn->iHead.iCount;
	return ETrue;
	}
Ejemplo n.º 6
0
EXPORT_C void TBtreeInlineIndexOrg::Concatenate(TAny *aLeftNode,const TAny *aRightNode,const TDesC8& aPivot) const
//
// Join LeftNode and RightNode together in LeftNode
// contract says that it will fit
//
	{
	SNode* const pl=Node(aLeftNode);
	const SNode* const pr=Node(aRightNode);
	TInt rCount=pr->iHead.iCount;
	TInt lCount=pl->iHead.iCount;
	__ASSERT_DEBUG(lCount+rCount+1<=iMaxEntries,Panic(ECannotConcatenate));
	TInt pSize=aPivot.Size();
	__ASSERT_DEBUG(pSize<=KeySize(),Panic(EBadEntrySize));
	TUint8* pp=Mem::Copy(Entry(pl,lCount)->iKey,aPivot.Ptr(),pSize);
	Mem::Copy(pp,pr->iEntries,rCount*iEntrySize+sizeof(TPageRef));
	pl->iHead.iCount+=rCount+1;
	}
Ejemplo n.º 7
0
void GrStyle::WriteKey(uint32_t *key, const GrStyle &style, Apply apply, SkScalar scale,
                       uint32_t flags) {
    SkASSERT(key);
    SkASSERT(KeySize(style, apply) >= 0);
    GR_STATIC_ASSERT(sizeof(uint32_t) == sizeof(SkScalar));

    int i = 0;
    // The scale can influence both the path effect and stroking. We want to preserve the
    // property that the following two are equal:
    // 1. WriteKey with apply == kPathEffectAndStrokeRec
    // 2. WriteKey with apply == kPathEffectOnly followed by WriteKey of a GrStyle made
    //    from SkStrokeRec output by the the path effect (and no additional path effect).
    // Since the scale can affect both parts of 2 we write it into the key twice.
    if (style.isDashed()) {
        GR_STATIC_ASSERT(sizeof(style.dashPhase()) == sizeof(uint32_t));
        SkScalar phase = style.dashPhase();
        memcpy(&key[i++], &scale, sizeof(SkScalar));
        memcpy(&key[i++], &phase, sizeof(SkScalar));

        int32_t count = style.dashIntervalCnt();
        // Dash count should always be even.
        SkASSERT(0 == (count & 0x1));
        const SkScalar *intervals = style.dashIntervals();
        int intervalByteCnt = count * sizeof(SkScalar);
        memcpy(&key[i], intervals, intervalByteCnt);
        i += count;
    } else {
        SkASSERT(!style.pathEffect());
    }

    if (Apply::kPathEffectAndStrokeRec == apply && style.strokeRec().needToApply()) {
        memcpy(&key[i++], &scale, sizeof(SkScalar));
        enum {
            kStyleBits = 2,
            kJoinBits = 2,
            kCapBits = 32 - kStyleBits - kJoinBits,

            kJoinShift = kStyleBits,
            kCapShift = kJoinShift + kJoinBits,
        };
        GR_STATIC_ASSERT(SkStrokeRec::kStyleCount <= (1 << kStyleBits));
        GR_STATIC_ASSERT(SkPaint::kJoinCount <= (1 << kJoinBits));
        GR_STATIC_ASSERT(SkPaint::kCapCount <= (1 << kCapBits));
        // The cap type only matters for unclosed shapes. However, a path effect could unclose
        // the shape before it is stroked.
        SkPaint::Cap cap;
        if ((flags & kClosed_KeyFlag) && !style.pathEffect()) {
            cap = SkPaint::kButt_Cap;
        } else {
            cap = style.strokeRec().getCap();
        }
        key[i++] = style.strokeRec().getStyle() |
                   style.strokeRec().getJoin() << kJoinShift |
                   cap << kCapShift;

        SkScalar scalar;
        // Miter limit only affects miter joins
        scalar = SkPaint::kMiter_Join == style.strokeRec().getJoin()
                 ? style.strokeRec().getMiter()
                 : -1.f;
        memcpy(&key[i++], &scalar, sizeof(scalar));

        scalar = style.strokeRec().getWidth();
        memcpy(&key[i++], &scalar, sizeof(scalar));
    }
    SkASSERT(KeySize(style, apply) == i);
}
std::uint32_t LegacySymmetricProvider::IVSize(const Mode Mode)
{
    return KeySize(Mode);
}
Ejemplo n.º 9
0
EXPORT_C TPtrC8 TBtreeInlineIndexOrg::Entry(const TAny* aNode,TInt aPos) const
	{
	return TPtrC8((const TUint8*)EntryPtr(aNode,aPos),KeySize());
	}