void PosUpdateScan::executePlanOperation() { auto c_pc = checked_pointer_cast<const PointerCalculator>(input.getTable(0)); auto c_store = checked_pointer_cast<const storage::Store>(c_pc->getActualTable()); // Cast the constness away auto store = std::const_pointer_cast<storage::Store>(c_store); // Get the current maximum size const auto& beforSize = store->size(); // Get the offset for inserts into the delta and the size of the delta that // we need to increase by the positions we are inserting auto writeArea = store->appendToDelta(c_pc->getPositions()->size()); // Get the modification record for the current transaction auto& txmgr = tx::TransactionManager::getInstance(); auto& modRecord = txmgr[_txContext.tid]; // Functor we use for updating the data set_json_value_functor fun(store->getDeltaTable()); storage::type_switch<hyrise_basic_types> ts; size_t counter = 0; for(const auto& p : *(c_pc->getPositions())) { // First delete the old record bool deleteOk = store->markForDeletion(p, _txContext.tid) == hyrise::tx::TX_CODE::TX_OK; if(!deleteOk) { txmgr.rollbackTransaction(_txContext); throw std::runtime_error("Aborted TX because TID of other TX found"); } modRecord.deletePos(store, p); //store->setTid(p, _txContext.tid); // Copy the old row from the main store->copyRowToDelta(store, p, writeArea.first+counter, _txContext.tid); // Update all the necessary values for(const auto& kv : _raw_data) { const auto& fld = store->numberOfColumn(kv.first); fun.set(fld, writeArea.first+counter, kv.second); ts(store->typeOfColumn(fld), fun); } // Insert the new one modRecord.insertPos(store, beforSize+counter); ++counter; } // Update affected rows auto rsp = getResponseTask(); if (rsp != nullptr) rsp->incAffectedRows(counter); addResult(c_store); }
void SimpleStoreMerger::mergeValues(const std::vector<hyrise::storage::c_atable_ptr_t > &input_tables, hyrise::storage::atable_ptr_t merged_table, const column_mapping_t &column_mapping, const uint64_t newSize, bool useValid, const std::vector<bool>& valid) { if (useValid) throw std::runtime_error("SimpleStoreMerger does not support valid vectors"); if(input_tables.size() != 2) throw std::runtime_error("SimpleStoreMerger does not support more than two tables"); auto delta = std::dynamic_pointer_cast<const RawTable>(input_tables[1]); auto main = input_tables[0]; // Prepare type handling MergeDictFunctor fun; type_switch<hyrise_basic_types> ts; std::vector<MergeDictFunctor::result> mergedDictionaries(column_mapping.size()); // Extract unique values for delta for(const auto& kv : column_mapping) { const auto& col = kv.first; const auto& dst = kv.second; fun.prepare(main, delta, col); auto result = ts(main->typeOfColumn(col), fun); merged_table->setDictionaryAt(result.dict, dst); mergedDictionaries[col] = result; } // Update the values of the new Table merged_table->resize(newSize); size_t tabSize = main->size(); for(size_t row=0; row < tabSize; ++row) { for( const auto& kv : column_mapping) { const auto& col = kv.first; const auto& dst = kv.second; merged_table->setValueId(dst, row, ValueId{mergedDictionaries[col].mapping[main->getValueId(col, row).valueId], 0}); } } // Map the values for the values in the uncompressed delta MapValueForValueId map; for( const auto& kv : column_mapping) { const auto& col = kv.first; const auto& dst = kv.second; map.prepare(merged_table, dst, mergedDictionaries[col].dict, col, delta); ts(merged_table->typeOfColumn(dst), map); } }
storage::atable_ptr_t InsertScan::buildFromJson() { auto result = input.getTable()->copy_structure_modifiable(); result->resize(_raw_data.size()); set_value_functor fun(result); storage::type_switch<hyrise_basic_types> ts; auto col_count = input.getTable()->columnCount(); for (size_t r=0, row_count=_raw_data.size(); r < row_count; ++r ) { for(size_t c=0; c < col_count; ++c) { fun.set(c,r,_raw_data[r][c]); ts(result->typeOfColumn(c), fun); } } return result; }
void SimpleRawTableScan::executePlanOperation() { auto table = std::dynamic_pointer_cast<const storage::RawTable>(input.getTable(0)); if (!table) throw std::runtime_error("Input table is no uncompressed raw table"); // Prepare a result that contains the result semi-compressed, and only row-wise storage::metadata_list meta(table->columnCount()); for(size_t i=0; i<table->columnCount(); ++i) meta[i] = table->metadataAt(i); auto result = std::make_shared<storage::Table>(&meta, nullptr, 1, /* initial size */ false, /* sorted */ false /* compressed */); // Prepare the copy operator storage::copy_value_functor_raw_table fun(result, table); storage::type_switch<hyrise_basic_types> ts; auto positions = new storage::pos_list_t; size_t tabSize = table->size(); for(size_t row=0; row < tabSize; ++row) { if ((*_comparator)(row)) { if (_materializing) { result->resize(result->size() + 1); for(size_t i=0; i < result->columnCount(); ++i) { fun.setValues(result->size() - 1, row, i); ts(table->typeOfColumn(i), fun); } } else { positions->push_back(row); } } } if (_materializing) { addResult(result); } else { addResult(storage::PointerCalculator::create(table, positions)); } }
TEST_F(StringLoaderTests, load_test_typesafe) { hyrise::storage::atable_ptr_t t = Loader::load( Loader::params() .setHeader(StringHeader("employee_id|employee_company_id|employee_name\n" "INTEGER|INTEGER|STRING\n" "0_C | 0_C | 0_C")) .setInput(CSVInput("test/tables/employees.data")) ); EmptyInput input; StringHeader header("employee_id|employee_company_id|employee_name\n" "INTEGER|INTEGER|INTEGER\n" "0_C | 0_C | 0_C"); Loader::params p; p.setInput(input).setHeader(header).setReturnsMutableVerticalTable(true).setReferenceTable(t); auto res = Loader::load(p); ASSERT_EQ(t->typeOfColumn(2), res->typeOfColumn(2)); }