ot_u8 vworm_write(vaddr addr, ot_u16 data) { #if ((VWORM_SIZE > 0) && (OT_FEATURE_VLNVWRITE == ENABLED)) SEGFAULT_CHECK(addr, in_vworm, 7, "VLC_" __LINE__); //__LINE__ return vworm_mark_physical( (ot_u16*)(((ot_u32)addr)+VWORM_BASE_PHYSICAL), data ); #else return 0; #endif }
ot_u8 vworm_save( ) { #if ((VWORM_SIZE > 0) && (OT_FEATURE_VLNVWRITE == ENABLED)) /// @note init & save processes have not been tested enough. /// Saves the state of the vworm onto the last physical block, which may /// require recombination before being able to be used. ot_u8 test = 0; ot_int i; ot_u16* b_ptr; ot_u16* s_ptr = (ot_u16*)(VWORM_BASE_PHYSICAL + \ (VWORM_PAGESIZE*(VWORM_PRIMARY_PAGES+VWORM_FALLOW_PAGES-1))); FLASH_Unlock(); /// 1. look through used blocks to see if the last physical block is /// somewhere inside. In this case, we need to recombine it. for (i=0; i<VWORM_PRIMARY_PAGES; i++) { if ( (X2table.block[i].primary == s_ptr) || (X2table.block[i].ancillary == s_ptr) ) { sub_recombine_block(&X2table.block[i], 0, 0); break; } } /// 2. The last block is now in the list of fallows, either because it was /// already there or because it was just erased and fallowed following /// recombination. // 2a. Write [Number of Primary blocks] //test |= vworm_mark_physical(s_ptr, VWORM_PRIMARY_PAGES); s_ptr++; // 2b. Write [Number of Fallow blocks] //test |= vworm_mark_physical(s_ptr, VWORM_FALLOW_PAGES); s_ptr++; /// 2c. Write each Primary & Ancillar Base Pointer to the save /// 2d. Write each Fallow Pointer to the save /// @todo optimize the block & fallow data b_ptr = (ot_u16*)&X2table; for (i=0; i<(sizeof(X2_struct)/2); i++) { test |= vworm_mark_physical(s_ptr, b_ptr[i]); s_ptr++; } FLASH_Lock(); return test; #else return 0; #endif }
ot_u16* sub_recombine_block(block_ptr* block_in, ot_int skip, ot_int span) { ot_u8 test; ot_int i; ot_u16* new_ptr; ot_u16* f_ptr; ot_u16* p_ptr; ot_u16* a_ptr; /// 1. Assign pointers p_ptr = block_in->primary; a_ptr = block_in->ancillary; new_ptr = X2table.fallow[(VWORM_FALLOW_PAGES-1)]; f_ptr = new_ptr; /// 2. Combine the old blocks into the fallow block span+=skip; for (i=0; i<OTF_VWORM_PAGESIZE; i+=2) { if ((i<skip) || (i>=span)) { test |= vworm_mark_physical(f_ptr, ~(*p_ptr ^ *a_ptr)); } f_ptr++; p_ptr++; a_ptr++; } // i += span; // PTR_OFFSET(f_ptr, span); // PTR_OFFSET(p_ptr, span); // PTR_OFFSET(a_ptr, span); // // for (; i<VWORM_PAGESIZE; i+=2) { // test |= vworm_mark_physical(f_ptr, ~(*p_ptr ^ *a_ptr)); // f_ptr++; // p_ptr++; // a_ptr++; // } /// 3. Erase the old blocks NAND_erase_page( block_in->primary ); NAND_erase_page( block_in->ancillary ); /// 4. Make the two erased blocks fallow blocks. If we are in this function, /// we can deduce that there is at least one ancillary and one fallow, so we /// stop when NULL is discovered or when at the end of the fallows. # if (VWORM_FALLOW_PAGES >= 2) for (i=(VWORM_FALLOW_PAGES-1); X2table.fallow[i] != NULL; i--) { X2table.fallow[i] = X2table.fallow[i-1]; } X2table.fallow[i+1] = block_in->primary; X2table.fallow[i] = block_in->ancillary; # else X2table.fallow[1] = block_in->primary; X2table.fallow[0] = block_in->ancillary; # endif /// 5. Set the primary block to its new position, and ancillary to NULL block_in->ancillary = NULL; block_in->primary = new_ptr; /// 6. return the (physical) skip address return PTR_OFFSET(new_ptr, skip); }
ot_u8 vworm_write(vaddr addr, ot_u16 data) { #if ((VWORM_SIZE > 0) && (OT_FEATURE_VLNVWRITE == ENABLED)) ot_int index; ot_int offset; ot_u16 wrtest; ot_u16* p_ptr; ot_u16* a_ptr; SEGFAULT_CHECK(addr, in_vworm, 7, "VLC_445"); //__LINE__ /// 1. Resolve the vaddr directly offset = addr & (VWORM_PAGESIZE-1); index = (addr-VWORM_BASE_VADDR) >> VWORM_PAGESHIFT; p_ptr = PTR_OFFSET(X2table.block[index].primary, offset); /// 2. No ancillary block, but try a write anyway if (X2table.block[index].ancillary == NULL) { /// 2a. If no 0->1 write requirement, then we're good to go if ((data & ~(*p_ptr)) == 0) { return vworm_mark_physical(p_ptr, data); } /// 2b. Attach a fallow to this bitch (it becomes ancillary) sub_attach_fallow(&X2table.block[index]); } /// 3. There is ancillary block, so go through the logical write process, /// which is designed to shake out a write out of whatever it can get. /// The only bit combination that cannot be managed is [1->0 via 0,0] a_ptr = PTR_OFFSET(X2table.block[index].ancillary, offset); wrtest = ~data & ~(*p_ptr) & ~(*a_ptr); if (wrtest == 0) { ot_u8 test = 0; /// 3a. Adjust cases where [1->0 via 1,1] or [0->1 via 1,0] wrtest = ~data & *p_ptr & *a_ptr; wrtest |= data & *p_ptr & ~(*a_ptr); if (wrtest != 0) { test |= vworm_mark_physical(p_ptr, *p_ptr ^ wrtest); } /// 3b. Adjust cases where [0->1 via 0,1] wrtest = data & ~(*p_ptr) & *a_ptr; if (wrtest != 0) { test |= vworm_mark_physical(a_ptr, *a_ptr ^ wrtest); } return test; } /// 4. Recombine this block, with the exception of the given addr offset, /// which we will then write-to else { p_ptr = sub_recombine_block(&X2table.block[index], offset, 2); return vworm_mark_physical(p_ptr, data); } #else return 0; #endif }