TEST_F(TestFetchDocumentCallback, Basics) { // Define schema rapidjson::Document schemaDocument; RapidJsonAdapter schemaDocumentAdapter(schemaDocument); schemaDocument.SetObject(); schemaDocument.AddMember("$ref", "test#/", allocator); // Parse schema document Schema schema; SchemaParser schemaParser; schemaParser.populateSchema(schemaDocumentAdapter, schema, boost::make_optional<FetchDocumentFunction>(fetchDocument)); // Test resulting schema with a valid document rapidjson::Document validDocument; validDocument.SetObject(); validDocument.AddMember("test", "valid", allocator); Validator validator; EXPECT_TRUE(validator.validate(schema, RapidJsonAdapter(validDocument), NULL)); // Test resulting schema with an invalid document rapidjson::Document invalidDocument; invalidDocument.SetObject(); invalidDocument.AddMember("test", 123, allocator); EXPECT_FALSE(validator.validate(schema, RapidJsonAdapter(invalidDocument), NULL)); }
int main(int argc, char* argv[]) { if (argc != 2) { std::cerr << "usage: " << argv[0] << " <schema file>" << std::endl; return -1; } std::ifstream file; file.open(argv[1]); if(!file.is_open()) { std::cerr << "unable to open input file" << std::endl; return -1; } SchemaParser p; try { std::vector<RelationSchema> schema = p.parse(file); for(auto relationSchema : schema) { std::cout << relationSchema << std::endl; } } catch (ParserError& e) { file.close(); std::cerr << e.what() << std::endl; return -1; } file.close(); return 0; }
int main(int argc, char *argv[]) { if (argc != 3) { cerr << "Usage: " << argv[0] << " <schema document> <test/target document>" << endl; return 1; } // Load the document containing the schema rapidjson::Document schemaDocument; if (!valijson::utils::loadDocument(argv[1], schemaDocument)) { cerr << "Failed to load schema document." << endl; return 1; } // Load the document that is to be validated rapidjson::Document targetDocument; if (!valijson::utils::loadDocument(argv[2], targetDocument)) { cerr << "Failed to load target document." << endl; return 1; } // Parse the json schema into an internal schema format Schema schema; SchemaParser parser; RapidJsonAdapter schemaDocumentAdapter(schemaDocument); try { parser.populateSchema(schemaDocumentAdapter, schema); } catch (std::exception &e) { cerr << "Failed to parse schema: " << e.what() << endl; return 1; } // Perform validation Validator validator(schema); validator.setStrict(false); ValidationResults results; RapidJsonAdapter targetDocumentAdapter(targetDocument); if (!validator.validate(targetDocumentAdapter, &results)) { std::cerr << "Validation failed." << endl; ValidationResults::Error error; unsigned int errorNum = 1; while (results.popError(error)) { std::string context; std::vector<std::string>::iterator itr = error.context.begin(); for (; itr != error.context.end(); itr++) { context += *itr; } cerr << "Error #" << errorNum << std::endl << " context: " << context << endl << " desc: " << error.description << endl; ++errorNum; } return 1; } return 0; }
QString PgSQLType::getCodeDefinition(unsigned def_type,QString ref_type) { if(def_type==SchemaParser::SQL_DEFINITION) return(*(*this)); else { attribs_map attribs; SchemaParser schparser; attribs[ParsersAttributes::LENGTH]=QString(); attribs[ParsersAttributes::DIMENSION]=QString(); attribs[ParsersAttributes::PRECISION]=QString(); attribs[ParsersAttributes::WITH_TIMEZONE]=QString(); attribs[ParsersAttributes::INTERVAL_TYPE]=QString(); attribs[ParsersAttributes::SPATIAL_TYPE]=QString(); attribs[ParsersAttributes::VARIATION]=QString(); attribs[ParsersAttributes::SRID]=QString(); attribs[ParsersAttributes::REF_TYPE]=ref_type; attribs[ParsersAttributes::NAME]=(~(*this)); if(length > 1) attribs[ParsersAttributes::LENGTH]=QString("%1").arg(this->length); if(dimension > 0) attribs[ParsersAttributes::DIMENSION]=QString("%1").arg(this->dimension); if(precision >= 0) attribs[ParsersAttributes::PRECISION]=QString("%1").arg(this->precision); if(interval_type != BaseType::null) attribs[ParsersAttributes::INTERVAL_TYPE]=(~interval_type); if(isGiSType()) { attribs[ParsersAttributes::SPATIAL_TYPE]=(~spatial_type); attribs[ParsersAttributes::VARIATION]=QString("%1").arg(spatial_type.getVariation()); attribs[ParsersAttributes::SRID]=QString("%1").arg(spatial_type.getSRID()); } if(with_timezone) attribs[ParsersAttributes::WITH_TIMEZONE]=ParsersAttributes::_TRUE_; return(schparser.getCodeDefinition(ParsersAttributes::PGSQL_BASE_TYPE, attribs, def_type)); } }
QString BaseObject::getAlterDefinition(QString sch_name, attribs_map &attribs, bool ignore_ukn_attribs, bool ignore_empty_attribs) { try { SchemaParser schparser; QString alter_sch_dir=GlobalAttributes::SCHEMAS_ROOT_DIR + GlobalAttributes::DIR_SEPARATOR + GlobalAttributes::ALTER_SCHEMA_DIR + GlobalAttributes::DIR_SEPARATOR + QString("%1") + GlobalAttributes::SCHEMA_EXT; schparser.setPgSQLVersion(BaseObject::pgsql_ver); schparser.ignoreEmptyAttributes(ignore_empty_attribs); schparser.ignoreUnkownAttributes(ignore_ukn_attribs); return(schparser.getCodeDefinition(alter_sch_dir.arg(sch_name), attribs)); } catch(Exception &e) { throw Exception(e.getErrorMessage(),e.getErrorType(),__PRETTY_FUNCTION__,__FILE__,__LINE__,&e); } }
QString Reference::getXMLDefinition(void) { attribs_map attribs; SchemaParser schparser; attribs[ParsersAttributes::TABLE]=QString(); attribs[ParsersAttributes::COLUMN]=QString(); if(table) attribs[ParsersAttributes::TABLE]=table->getName(true); if(column) attribs[ParsersAttributes::COLUMN]=column->getName(); attribs[ParsersAttributes::EXPRESSION]=expression; attribs[ParsersAttributes::ALIAS]=alias; attribs[ParsersAttributes::COLUMN_ALIAS]=column_alias; return(schparser.getCodeDefinition(ParsersAttributes::REFERENCE, attribs, SchemaParser::XML_DEFINITION)); }
bool SnippetsConfigWidget::isSnippetValid(attribs_map &attribs, const QString &orig_id) { Messagebox msg_box; QString snip_id=attribs.at(ParsersAttributes::ID), err_msg; if(!orig_id.isEmpty() && snip_id!=orig_id && config_params.count(snip_id)!=0) err_msg=trUtf8("Duplicated snippet id <strong>%1</strong> detected. Please, specify a different one!").arg(snip_id); else if(!ID_FORMAT_REGEXP.exactMatch(snip_id)) err_msg=trUtf8("Invalid ID pattern detected <strong>%1</strong>. This one must start with at leat one letter and be composed by letters, numbers and/or underscore!").arg(snip_id); else if(attribs[ParsersAttributes::LABEL].isEmpty()) err_msg=trUtf8("Empty label for snippet <strong>%1</strong>. Please, specify a value for it!").arg(snip_id); else if(attribs[ParsersAttributes::CONTENTS].isEmpty()) err_msg=trUtf8("Empty code for snippet <strong>%1</strong>. Please, specify a value for it!").arg(snip_id); else if(attribs[ParsersAttributes::PARSABLE]==ParsersAttributes::_TRUE_) { try { QString buf=snippet_txt->toPlainText(); attribs_map attribs; SchemaParser schparser; schparser.loadBuffer(buf); schparser.ignoreEmptyAttributes(true); schparser.ignoreUnkownAttributes(true); schparser.getCodeDefinition(attribs); } catch(Exception &e) { err_msg=trUtf8("The dynamic snippet contains syntax error(s). Additional info: <br/><em>%1</em>").arg(e.getErrorMessage()); } } if(!err_msg.isEmpty()) { msg_box.show(err_msg, Messagebox::ERROR_ICON, Messagebox::OK_BUTTON); return(false); } else return(true); }
QString SnippetsConfigWidget::parseSnippet(attribs_map snippet, attribs_map attribs) { SchemaParser schparser; QStringList aux_attribs; QString buf=snippet[ParsersAttributes::CONTENTS]; if(snippet[ParsersAttributes::PARSABLE]!=ParsersAttributes::_TRUE_) return(buf); try { schparser.loadBuffer(buf); //Assigning dummy values for empty attributes if(snippet[ParsersAttributes::PLACEHOLDERS]==ParsersAttributes::_TRUE_) { aux_attribs=schparser.extractAttributes(); for(QString attr : aux_attribs) { if(attribs.count(attr)==0 || (attribs.count(attr) && attribs[attr].isEmpty())) attribs[attr]=QString("{%1}").arg(attr); } } schparser.ignoreEmptyAttributes(true); schparser.ignoreUnkownAttributes(true); return(schparser.getCodeDefinition(attribs)); } catch(Exception &e) { throw Exception(e.getErrorMessage(),e.getErrorType(),__PRETTY_FUNCTION__,__FILE__,__LINE__,&e); } }
void SchemaParserTest::testExpressionEvaluationWithCasts(void) { SchemaParser schparser; QString buffer; attribs_map attribs; buffer = "%set {ver} 10.0\n"; buffer += "\n%if ({ver} >f \"9.3\") %then"; buffer += "\n 10.0"; buffer += "\n%else"; buffer += "\n 9.3"; buffer += "\n%end"; try { schparser.loadBuffer(buffer); QCOMPARE(schparser.getCodeDefinition(attribs) == "10.0", true); } catch(Exception &e) { QFAIL(e.getExceptionsText().toStdString().c_str()); } }
void ModelsDiffHelper::processDiffInfos(void) { BaseObject *object=nullptr; Relationship *rel=nullptr; map<unsigned, QString> drop_objs, create_objs, alter_objs, truncate_tabs; vector<BaseObject *> drop_vect, create_vect, drop_cols; unsigned diff_type, schema_id=0, idx=0; ObjectType obj_type; map<unsigned, QString>::reverse_iterator ritr, ritr_end; attribs_map attribs; QString alter_def, no_inherit_def, inherit_def, set_perms, unset_perms, fk_defs, col_drop_def; SchemaParser schparser; Type *type=nullptr; vector<Type *> types; Constraint *constr=nullptr; Column *col=nullptr, *aux_col=nullptr; Table *parent_tab=nullptr; bool skip_obj=false; QStringList sch_names; try { if(!diff_infos.empty()) emit s_progressUpdated(0, trUtf8("Processing diff infos...")); //Reuniting the schema names to inject a SET search_path command for(auto &schema : *imported_model->getObjectList(OBJ_SCHEMA)) sch_names.push_back(schema->getName(true)); //Separating the base types for(ObjectsDiffInfo diff : diff_infos) { type=dynamic_cast<Type *>(diff.getObject()); if(type && type->getConfiguration()==Type::BASE_TYPE) { type->convertFunctionParameters(); types.push_back(type); } } for(ObjectsDiffInfo diff : diff_infos) { diff_type=diff.getDiffType(); object=diff.getObject(); obj_type=object->getObjectType(); rel=dynamic_cast<Relationship *>(object); constr=dynamic_cast<Constraint *>(object); col=dynamic_cast<Column *>(object); emit s_progressUpdated((idx++/static_cast<float>(diff_infos.size())) * 100, trUtf8("Processing `%1' info for object `%2' (%3)...") .arg(diff.getDiffTypeString()).arg(object->getSignature()).arg(object->getTypeName()), obj_type); /* Preliminary verification for check constraints: there is the need to check if the constraint is added by generalization or if this is not the case if it already exists in a ancestor table of its parent, this avoid the generation of commands to create or drop an inherited constraint raising errors when export the diff */ if(constr && constr->getConstraintType()==ConstraintType::check) { parent_tab=dynamic_cast<Table *>(constr->getParentTable()); skip_obj=constr->isAddedByGeneralization(); for(unsigned i=0; i < parent_tab->getAncestorTableCount() && !skip_obj; i++) skip_obj=(parent_tab->getAncestorTable(i)->getConstraint(constr->getName())!=nullptr); if(skip_obj) continue; } //Igoring any operation done over inherited columns else if(col) { parent_tab=dynamic_cast<Table *>(col->getParentTable()); skip_obj=col->isAddedByGeneralization(); for(unsigned i=0; i < parent_tab->getAncestorTableCount() && !skip_obj; i++) { aux_col=parent_tab->getAncestorTable(i)->getColumn(col->getName()); skip_obj=(aux_col && aux_col->getType().getAliasType()==col->getType()); } if(skip_obj) continue; } //Generating the DROP commands if(diff_type==ObjectsDiffInfo::DROP_OBJECT) { if(rel) //Undoing inheritances no_inherit_def+=rel->getInheritDefinition(true); else if(obj_type==OBJ_PERMISSION) //Unsetting permissions unset_perms+=object->getDropDefinition(diff_opts[OPT_CASCADE_MODE]); else { //Ordinary drop commands for any object except columns if(obj_type!=OBJ_COLUMN) drop_objs[object->getObjectId()]=getCodeDefinition(object, true); else { /* Special case for columns: due to cases like inheritance there is the the need to drop the columns in the normal order of creation to avoid error like 'drop inherited column' or wrong propagation of drop on all child tables. */ drop_cols.push_back(object); } } } //Generating the CREATE commands else if(diff_type==ObjectsDiffInfo::CREATE_OBJECT) { if(rel) //Creating inheritances inherit_def+=rel->getInheritDefinition(false); else if(obj_type==OBJ_PERMISSION) //Setting permissions set_perms+=object->getCodeDefinition(SchemaParser::SQL_DEFINITION); else { //Generating fks definitions in a separated variable in order to append them at create commands maps if(object->getObjectType()==OBJ_CONSTRAINT && dynamic_cast<Constraint *>(object)->getConstraintType()==ConstraintType::foreign_key) fk_defs+=getCodeDefinition(object, false); else { create_objs[object->getObjectId()]=getCodeDefinition(object, false); if(obj_type==OBJ_SCHEMA) sch_names.push_back(object->getName(true)); } } } //Generating the ALTER commands else if(diff_type==ObjectsDiffInfo::ALTER_OBJECT) { //Recreating the object instead of generating an ALTER command for it if((diff_opts[OPT_FORCE_RECREATION] && obj_type!=OBJ_DATABASE) && (!diff_opts[OPT_RECREATE_UNCHANGEBLE] || (diff_opts[OPT_RECREATE_UNCHANGEBLE] && !object->acceptsAlterCommand()))) { recreateObject(object, drop_vect, create_vect); //Generating the drop for the object's reference for(auto &obj : drop_vect) drop_objs[obj->getObjectId()]=getCodeDefinition(obj, true); //Generating the create for the object's reference for(auto &obj : create_vect) { //The there is no ALTER info registered for an object's reference if(!isDiffInfoExists(ObjectsDiffInfo::ALTER_OBJECT, nullptr, obj, false)) create_objs[obj->getObjectId()]=getCodeDefinition(obj, false); } drop_vect.clear(); create_vect.clear(); } else { if(diff.getOldObject()) alter_def=diff.getOldObject()->getAlterDefinition(object); if(!alter_def.isEmpty()) { //Commenting out the ALTER DATABASE ... RENAME TO ... command if user chooses to preserve the original name if(obj_type==OBJ_DATABASE && alter_def.contains("RENAME") && diff_opts[OPT_PRESERVE_DB_NAME]) alter_def.prepend(QString("-- ")); alter_objs[object->getObjectId()]=alter_def; /* If the object is a column checks if the types of the columns are differents, generating a TRUNCATE TABLE for the parent table */ if(obj_type==OBJ_COLUMN && diff_opts[OPT_TRUCANTE_TABLES]) { Column *src_col=dynamic_cast<Column *>(object), *imp_col=dynamic_cast<Column *>(diff.getOldObject()); Table *tab=dynamic_cast<Table *>(src_col->getParentTable()); //If the truncate was not generated previously if((*src_col->getType())!=(*imp_col->getType()) && truncate_tabs.count(tab->getObjectId())==0) truncate_tabs[tab->getObjectId()]=tab->getTruncateDefinition(diff_opts[OPT_CASCADE_MODE]); } } } } } //Creating the shell types declaration right below on the DDL that creates their schemas for(Type *type : types) { schema_id=type->getSchema()->getObjectId(); if(create_objs.count(schema_id)!=0) create_objs[schema_id]+=type->getCodeDefinition(SchemaParser::SQL_DEFINITION, true); else attribs[ParsersAttributes::CREATE_CMDS]+=type->getCodeDefinition(SchemaParser::SQL_DEFINITION, true); type->convertFunctionParameters(true); } //Generating the drop command for columns for(BaseObject *col : drop_cols) col_drop_def+=getCodeDefinition(col, true); diff_def.clear(); if(!drop_objs.empty() || !create_objs.empty() || !alter_objs.empty() || !inherit_def.isEmpty() || !no_inherit_def.isEmpty() || !set_perms.isEmpty() || !fk_defs.isEmpty() || !col_drop_def.isEmpty()) { sch_names.removeDuplicates(); //Attributes used on the diff schema file attribs[ParsersAttributes::HAS_CHANGES]=ParsersAttributes::_TRUE_; attribs[ParsersAttributes::PGMODELER_VERSION]=GlobalAttributes::PGMODELER_VERSION; attribs[ParsersAttributes::CHANGE]=QString::number(alter_objs.size()); attribs[ParsersAttributes::CREATE]=QString::number(create_objs.size()); attribs[ParsersAttributes::DROP]=QString::number(drop_objs.size()); attribs[ParsersAttributes::TRUNCATE]=QString::number(truncate_tabs.size()); attribs[ParsersAttributes::SEARCH_PATH]=sch_names.join(','); attribs[ParsersAttributes::ALTER_CMDS]=QString(); attribs[ParsersAttributes::DROP_CMDS]=QString(); attribs[ParsersAttributes::CREATE_CMDS]=QString(); attribs[ParsersAttributes::TRUNCATE_CMDS]=QString(); attribs[ParsersAttributes::UNSET_PERMS]=unset_perms; attribs[ParsersAttributes::SET_PERMS]=set_perms; attribs[ParsersAttributes::FUNCTION]=(source_model->getObjectCount(OBJ_FUNCTION)!=0 ? ParsersAttributes::_TRUE_ : QString()); ritr=drop_objs.rbegin(); ritr_end=drop_objs.rend(); attribs[ParsersAttributes::DROP_CMDS]+=no_inherit_def; while(ritr!=ritr_end) { attribs[ParsersAttributes::DROP_CMDS]+=ritr->second; ritr++; } attribs[ParsersAttributes::DROP_CMDS]+=col_drop_def; for(auto &itr : create_objs) attribs[ParsersAttributes::CREATE_CMDS]+=itr.second; attribs[ParsersAttributes::CREATE_CMDS]+=fk_defs; attribs[ParsersAttributes::CREATE_CMDS]+=inherit_def; for(auto &itr : truncate_tabs) attribs[ParsersAttributes::TRUNCATE_CMDS]+=itr.second; for(auto &itr : alter_objs) attribs[ParsersAttributes::ALTER_CMDS]+=itr.second; //Generating the whole diff buffer schparser.setPgSQLVersion(pgsql_version); diff_def=schparser.getCodeDefinition(GlobalAttributes::SCHEMAS_ROOT_DIR + GlobalAttributes::DIR_SEPARATOR + GlobalAttributes::ALTER_SCHEMA_DIR + GlobalAttributes::DIR_SEPARATOR + ParsersAttributes::DIFF + GlobalAttributes::SCHEMA_EXT, attribs); } emit s_progressUpdated(100, trUtf8("Comparison between model and database finished.")); } catch(Exception &e) { for(Type *type : types) type->convertFunctionParameters(true); throw Exception(e.getErrorMessage(),e.getErrorType(),__PRETTY_FUNCTION__,__FILE__,__LINE__,&e); } }
TEST_F(TestValidationErrors, AllOfConstraintFailure) { // Load schema document rapidjson::Document schemaDocument; ASSERT_TRUE( loadDocument(TEST_DATA_DIR "/schemas/allof_integers_and_numbers.schema.json", schemaDocument) ); RapidJsonAdapter schemaAdapter(schemaDocument); // Parse schema document Schema schema; SchemaParser schemaParser; ASSERT_NO_THROW( schemaParser.populateSchema(schemaAdapter, schema) ); // Load test document rapidjson::Document testDocument; ASSERT_TRUE( loadDocument(TEST_DATA_DIR "/documents/array_doubles_1_2_3.json", testDocument) ); RapidJsonAdapter testAdapter(testDocument); Validator validator; ValidationResults results; EXPECT_FALSE( validator.validate(schema, testAdapter, &results) ); ValidationResults::Error error; EXPECT_TRUE( results.popError(error) ); EXPECT_EQ( size_t(2), error.context.size() ); EXPECT_EQ( "<root>", error.context[0] ); EXPECT_EQ( "[0]", error.context[1] ); EXPECT_EQ( "Value type not permitted by 'type' constraint.", error.description ); EXPECT_TRUE( results.popError(error) ); EXPECT_EQ( size_t(1), error.context.size() ); EXPECT_EQ( "<root>", error.context[0] ); EXPECT_EQ( "Failed to validate item #0 in array.", error.description ); EXPECT_TRUE( results.popError(error) ); EXPECT_EQ( size_t(2), error.context.size() ); EXPECT_EQ( "<root>", error.context[0] ); EXPECT_EQ( "[1]", error.context[1] ); EXPECT_EQ( "Value type not permitted by 'type' constraint.", error.description ); EXPECT_TRUE( results.popError(error) ); EXPECT_EQ( size_t(1), error.context.size() ); EXPECT_EQ( "<root>", error.context[0] ); EXPECT_EQ( "Failed to validate item #1 in array.", error.description ); EXPECT_TRUE( results.popError(error) ); EXPECT_EQ( size_t(2), error.context.size() ); EXPECT_EQ( "<root>", error.context[0] ); EXPECT_EQ( "[2]", error.context[1] ); EXPECT_EQ( "Value type not permitted by 'type' constraint.", error.description ); EXPECT_TRUE( results.popError(error) ); EXPECT_EQ( size_t(1), error.context.size() ); EXPECT_EQ( "<root>", error.context[0] ); EXPECT_EQ( "Failed to validate item #2 in array.", error.description ); EXPECT_TRUE( results.popError(error) ); EXPECT_EQ( size_t(1), error.context.size() ); EXPECT_EQ( "<root>", error.context[0] ); EXPECT_EQ( "Failed to validate against child schema #0.", error.description ); EXPECT_FALSE( results.popError(error) ); while (results.popError(error)) { //std::cerr << error.context << std::endl; std::cerr << error.description << std::endl; } }