bool NewClassAdJobLogConsumer::SetAttribute(const char *key, const char *name, const char *value) { classad::ClassAd *ad = m_collection.GetClassAd(key); if (!ad) { dprintf(D_ALWAYS, "error reading %s: no such ad in collection: %s\n", m_reader ? m_reader->GetClassAdLogFileName() : "(null)", key); // The schedd has been known to set attributes for bogus job ids. // Ignore them and continue processing the log. return true; } classad::ExprTree *expr; ParseClassAdRvalExpr(value, expr, NULL); if (!expr) { dprintf(D_ALWAYS, "error reading %s: failed to parse expression: %s\n", m_reader ? m_reader->GetClassAdLogFileName() : "(null)", value); // If the schedd writes a bad attribute value, ignore it and keep // processing the log. return true; } ad->Insert(name,expr); return true; }
static int set_print_mask_from_stream( AttrListPrintMask & print_mask, std::string & constraint, const char * streamid, bool is_filename) { StringList attrs; // used for projection, which we don't currently do. std::string messages; printmask_aggregation_t aggregation; printmask_headerfooter_t headFoot = STD_HEADFOOT; std::vector<GroupByKeyInfo> group_by_keys; SimpleInputStream * pstream = NULL; FILE *file = NULL; if (MATCH == strcmp("-", streamid)) { pstream = new SimpleFileInputStream(stdin, false); } else if (is_filename) { file = safe_fopen_wrapper_follow(streamid, "r"); if (file == NULL) { fprintf(stderr, "Can't open select file: %s\n", streamid); return -1; } pstream = new SimpleFileInputStream(file, true); } else { pstream = new StringLiteralInputStream(streamid); } ASSERT(pstream); int err = SetAttrListPrintMaskFromStream( *pstream, LocalPrintFormatsTable, print_mask, headFoot, aggregation, group_by_keys, constraint, attrs, messages); delete pstream; pstream = NULL; if ( ! err) { customFormat = true; if ( ! constraint.empty()) { ExprTree *constraintExpr=NULL; if ( ! ParseClassAdRvalExpr(constraint.c_str(), constraintExpr)) { formatstr_cat(messages, "WHERE expression is not valid: %s\n", constraint.c_str()); err = -1; } else { delete constraintExpr; } } if (aggregation) { formatstr_cat(messages, "AUTOCLUSTER or UNIQUE aggregation is not supported.\n"); err = -1; } } if ( ! messages.empty()) { fprintf(stderr, "%s", messages.c_str()); } return err; }
void Defrag::validateExpr(char const *constraint,char const *constraint_source) { ExprTree *requirements = NULL; if( ParseClassAdRvalExpr( constraint, requirements )!=0 || requirements==NULL ) { EXCEPT("Invalid expression for %s: %s\n", constraint_source,constraint); } delete requirements; }
bool EvalBool(compat_classad::ClassAd *ad, const char *constraint) { static classad::ExprTree *tree = NULL; static char * saved_constraint = NULL; classad::Value result; bool constraint_changed = true; double doubleVal; long long intVal; bool boolVal; if ( saved_constraint ) { if ( strcmp(saved_constraint,constraint) == 0 ) { constraint_changed = false; } } if ( constraint_changed ) { // constraint has changed, or saved_constraint is NULL if ( saved_constraint ) { free(saved_constraint); saved_constraint = NULL; } if ( tree ) { delete tree; tree = NULL; } classad::ExprTree *tmp_tree = NULL; if ( ParseClassAdRvalExpr( constraint, tmp_tree ) != 0 ) { dprintf( D_ALWAYS, "can't parse constraint: %s\n", constraint ); return false; } tree = compat_classad::RemoveExplicitTargetRefs( tmp_tree ); delete tmp_tree; saved_constraint = strdup( constraint ); } // Evaluate constraint with ad in the target scope so that constraints // have the same semantics as the collector queries. --RR if ( !EvalExprTree( tree, ad, NULL, result ) ) { dprintf( D_ALWAYS, "can't evaluate constraint: %s\n", constraint ); return false; } if( result.IsBooleanValue( boolVal ) ) { return boolVal; } else if( result.IsIntegerValue( intVal ) ) { return intVal != 0; } else if( result.IsRealValue( doubleVal ) ) { return IS_DOUBLE_TRUE(doubleVal); } dprintf( D_FULLDEBUG, "constraint (%s) does not evaluate to bool\n", constraint ); return false; }
// check to see that a classad expression is valid, and optionally return the names of the attributes that it references bool IsValidClassAdExpression(const char * str, classad::References * attrs /*=NULL*/, classad::References *scopes /*=NULL*/) { if ( ! str || ! str[0]) return false; classad::ExprTree * expr = NULL; int rval = ParseClassAdRvalExpr(str, expr); if (0 == rval) { if (attrs) { GetAttrsAndScopes(expr, attrs, scopes); } delete expr; } return rval == 0; }
int GenericQuery:: makeQuery (ExprTree *&tree) { MyString req; int status = makeQuery(req); if (status != Q_OK) return status; // If there are no constraints, then we match everything. if (req.empty()) req = "TRUE"; // parse constraints and insert into query ad if (ParseClassAdRvalExpr (req.Value(), tree) > 0) return Q_PARSE_ERROR; return Q_OK; }
LogSetAttribute::LogSetAttribute(const char *k, const char *n, const char *val, bool dirty) { op_type = CondorLogOp_SetAttribute; key = strdup(k); name = strdup(n); value_expr = NULL; if (val && strlen(val) && !blankline(val) && !ParseClassAdRvalExpr(val, value_expr)) { value = strdup(val); } else { if (value_expr) delete value_expr; value_expr = NULL; value = strdup("UNDEFINED"); } is_dirty = dirty; }
int LogSetAttribute::ReadBody(FILE* fp) { int rval, rval1; free(key); rval1 = readword(fp, key); if (rval1 < 0) { return rval1; } free(name); rval = readword(fp, name); if (rval < 0) { return rval; } rval1 += rval; free(value); rval = readline(fp, value); if (rval < 0) { return rval; } if (value_expr) delete value_expr; value_expr = NULL; if (ParseClassAdRvalExpr(value, value_expr)) { if (value_expr) delete value_expr; value_expr = NULL; if (param_boolean("CLASSAD_LOG_STRICT_PARSING", true)) { return -1; } else { dprintf(D_ALWAYS, "WARNING: strict classad parsing failed for expression: \"%s\"\n", value); } } return rval + rval1; }
int main(int argc, char *argv[]) { has_proc = false; MyString constraint; Qmgr_connection *q; int nextarg = 1, cluster=0, proc=0; bool UseConstraint = false; MyString schedd_name; MyString pool_name; ExprTree* value_expr; myDistro->Init( argc, argv ); config(); #if !defined(WIN32) install_sig_handler(SIGPIPE, SIG_IGN ); #endif if (argc < 2) { usage(argv[0]); } // if -debug is present, it must be first. sigh. if (argv[nextarg][0] == '-' && argv[nextarg][1] == 'd') { // output dprintf messages to stderror at TOOL_DEBUG level dprintf_set_tool_debug("TOOL", 0); nextarg++; } // if it is present, it must be first after debug. if (argv[nextarg][0] == '-' && argv[nextarg][1] == 'n') { nextarg++; // use the given name as the schedd name to connect to if (argc <= nextarg) { fprintf(stderr, "%s: -n requires another argument\n", argv[0]); exit(1); } schedd_name = argv[nextarg]; nextarg++; } if (argc <= nextarg) { usage(argv[0]); } // if it is present, it must be just after -n flag if (argv[nextarg][0] == '-' && argv[nextarg][1] == 'p') { nextarg++; if (argc <= nextarg) { fprintf(stderr, "%s: -pool requires another argument\n", argv[0]); exit(1); } pool_name = argv[nextarg]; nextarg++; } DCSchedd schedd((schedd_name.Length() == 0) ? NULL : schedd_name.Value(), (pool_name.Length() == 0) ? NULL : pool_name.Value()); if ( schedd.locate() == false ) { if (schedd_name == "") { fprintf( stderr, "%s: ERROR: Can't find address of local schedd\n", argv[0] ); exit(1); } if (pool_name == "") { fprintf( stderr, "%s: No such schedd named %s in local pool\n", argv[0], schedd_name.Value() ); } else { fprintf( stderr, "%s: No such schedd named %s in " "pool %s\n", argv[0], schedd_name.Value(), pool_name.Value() ); } exit(1); } // Open job queue q = ConnectQ( schedd.addr(), 0, false, NULL, NULL, schedd.version() ); if( !q ) { fprintf( stderr, "Failed to connect to queue manager %s\n", schedd.addr() ); exit(1); } if (argc <= nextarg) { usage(argv[0]); } if (isdigit(argv[nextarg][0])) { char *tmp; cluster = strtol(argv[nextarg], &tmp, 10); if (cluster <= 0) { fprintf( stderr, "Invalid cluster # from %s.\n", argv[nextarg]); exit(1); } if (*tmp == '.') { proc = strtol(tmp + 1, &tmp, 10); if (cluster <= 0) { fprintf( stderr, "Invalid proc # from %s.\n", argv[nextarg]); exit(1); } UseConstraint = false; has_proc = true; } else { constraint.formatstr("(%s == %d)", ATTR_CLUSTER_ID, cluster); UseConstraint = true; } nextarg++; } else if (!match_prefix(argv[nextarg], "-constraint")) { constraint.formatstr("(%s == \"%s\")", ATTR_OWNER, argv[nextarg]); nextarg++; UseConstraint = true; } if (argc <= nextarg) { usage(argv[0]); } while (match_prefix(argv[nextarg], "-constraint")) { if ( has_proc ){ fprintf(stderr, "condor_qedit: proc_id specified. Ignoring constraint option\n"); nextarg+=2; continue; } nextarg++; if (argc <= nextarg) { usage(argv[0]); } if ( !UseConstraint ){ constraint = argv[nextarg]; } else{ constraint = "( " + constraint + " ) && " + argv[nextarg]; } nextarg++; UseConstraint = true; } if (argc <= nextarg) { usage(argv[0]); } for (; nextarg < argc; nextarg += 2) { if (argc <= nextarg+1) { usage(argv[0]); } if (ProtectedAttribute(argv[nextarg])) { fprintf(stderr, "Update of attribute \"%s\" is not allowed.\n", argv[nextarg]); fprintf(stderr, "Transaction failed. No attributes were set.\n"); exit(1); } // Check validity of attribute-name if ( blankline(argv[nextarg]) || !IsValidAttrName(argv[nextarg]) ) { fprintf(stderr, "Update aborted, illegal attribute-name specified for attribute \"%s\".\n", argv[nextarg]); fprintf(stderr, "Transaction failed. No attributes were set.\n"); exit(1); } // Check validity of attribute-value value_expr = NULL; if ( blankline(argv[nextarg+1]) || !IsValidAttrValue(argv[nextarg+1]) || ParseClassAdRvalExpr(argv[nextarg+1], value_expr) ) { fprintf(stderr, "Update aborted, illegal attribute-value specified for attribute \"%s\".\n", argv[nextarg]); fprintf(stderr, "Transaction failed. No attributes were set.\n"); exit(1); } if (value_expr) delete value_expr; if (UseConstraint) { // Try to communicate with the newer protocol first if (SetAttributeByConstraint(constraint.Value(), argv[nextarg], argv[nextarg+1], SETDIRTY) < 0) { if (SetAttributeByConstraint(constraint.Value(), argv[nextarg], argv[nextarg+1]) < 0) { fprintf(stderr, "Failed to set attribute \"%s\" by constraint: %s\n", argv[nextarg], constraint.Value()); fprintf(stderr, "Transaction failed. No attributes were set.\n"); exit(1); } } } else { if (SetAttribute(cluster, proc, argv[nextarg], argv[nextarg+1], SETDIRTY) < 0) { fprintf(stderr, "Failed to set attribute \"%s\" for job %d.%d.\n", argv[nextarg], cluster, proc); fprintf(stderr, "Transaction failed. No attributes were set.\n"); exit(1); } } printf("Set attribute \"%s\".\n", argv[nextarg]); } if (!DisconnectQ(q)) { fprintf(stderr, "Queue transaction failed. No attributes were set.\n"); exit(1); } return 0; }
void OfflineCollectorPlugin::configure () { dprintf ( D_FULLDEBUG, "In OfflineCollectorPlugin::configure ()\n" ); /**** Handle ABSENT_REQUIREMENTS PARAM ****/ char *tmp; if (AbsentReq) delete AbsentReq; AbsentReq = NULL; tmp = param("ABSENT_REQUIREMENTS"); if( tmp ) { if( ParseClassAdRvalExpr(tmp, AbsentReq) ) { EXCEPT ("Error parsing ABSENT_REQUIREMENTS expression: %s", tmp); } #if defined(ADD_TARGET_SCOPING) if(AbsentReq){ ExprTree *tmp_expr = AddTargetRefs( AbsentReq, TargetMachineAttrs ); delete AbsentReq; AbsentReq = tmp_expr; } #endif dprintf (D_ALWAYS,"ABSENT_REQUIREMENTS = %s\n", tmp); free( tmp ); tmp = NULL; } else { dprintf (D_ALWAYS,"ABSENT_REQUIREMENTS = None\n"); } /**** Handle COLLECTOR_PERSISTANT_AD_LOG PARAM ****/ if ( _persistent_store ) { /* was param()'d so we must use free() */ free ( _persistent_store ); _persistent_store = NULL; } _persistent_store = param("COLLECTOR_PERSISTENT_AD_LOG"); // if not found, try depreciated name OFFLINE_LOG if ( ! _persistent_store ) { _persistent_store = param ( "OFFLINE_LOG" ); } if ( _persistent_store ) { dprintf ( D_ALWAYS, "OfflineCollectorPlugin::configure: off-line ad " "persistent store: '%s'.\n", _persistent_store ); if ( _ads ) { delete _ads; _ads = NULL; } _ads = new ClassAdCollection (NULL, _persistent_store, 2 ); ASSERT ( _ads ); } else { dprintf ( D_ALWAYS, "OfflineCollectorPlugin::configure: no persistent store " "was defined for off-line ads.\n" ); } }
QuillErrCode HistorySnapshot::printResults(SQLQuery *queryhor, SQLQuery *queryver, bool longformat, bool fileformat, bool custForm, AttrListPrintMask *pmask, const char *constraint /* = "" */) { AttrList *ad = 0; QuillErrCode st = QUILL_SUCCESS; // initialize index variables off_t offset = 0, last_line = 0; cur_historyads_hor_index = 0; cur_historyads_ver_index = 0; if (!longformat && !custForm) { short_header(); } ExprTree *tree = NULL; if (constraint) { ParseClassAdRvalExpr(constraint, tree); } while(1) { st = getNextAd_Hor(ad, queryhor); if(st != QUILL_SUCCESS) break; if (longformat || constraint) { st = getNextAd_Ver(ad, queryver); if (constraint && EvalBool(ad, tree) == FALSE) { continue; } // in the case of vertical, we dont want to quit if we run // out of tuples because 1) we want to display whats in the ad // and 2) the horizontal cursor will correctly determine when // to stop - this is because in all cases, we only pull out those // tuples from vertical which join with a horizontal tuple if(st != QUILL_SUCCESS && st != DONE_HISTORY_VER_CURSOR) break; if (fileformat) { // Print out the job ads in history file format, i.e., print the *** delimiters MyString owner, ad_str, temp; int compl_date; ad->sPrint(ad_str); if (!ad->LookupString(ATTR_OWNER, owner)) owner = "NULL"; if (!ad->LookupInteger(ATTR_COMPLETION_DATE, compl_date)) compl_date = 0; temp.formatstr("*** Offset = %ld ClusterId = %d ProcId = %d Owner = \"%s\" CompletionDate = %d\n", offset - last_line, curClusterId_hor, curProcId_hor, owner.Value(), compl_date); offset += ad_str.Length() + temp.Length(); last_line = temp.Length(); fprintf(stdout, "%s", ad_str.Value()); fprintf(stdout, "%s", temp.Value()); } else if (longformat) { ad->fPrint(stdout); printf("\n"); } } if (!longformat) { if (custForm == true) { ASSERT(pmask != NULL); pmask->display(stdout, ad); } else { displayJobShort(ad); } } } if(ad != NULL) { delete ad; ad = NULL; } if(st == FAILURE_QUERY_HISTORYADS_HOR || st == FAILURE_QUERY_HISTORYADS_VER) return st; return QUILL_SUCCESS; }
int main(int argc, char* argv[]) { Collectors = NULL; #ifdef HAVE_EXT_POSTGRESQL HistorySnapshot *historySnapshot; SQLQuery queryhor; SQLQuery queryver; QuillErrCode st; bool remotequill=false; char *quillName=NULL; AttrList *ad=0; int flag = 1; void **parameters; char *dbconn=NULL; char *completedsince = NULL; char *dbIpAddr=NULL, *dbName=NULL,*queryPassword=NULL; bool remoteread = false; #endif /* HAVE_EXT_POSTGRESQL */ const char *owner=NULL; bool readfromfile = true; bool fileisuserlog = false; char* JobHistoryFileName=NULL; const char * pcolon=NULL; GenericQuery constraint; // used to build a complex constraint. ExprTree *constraintExpr=NULL; std::string tmp; int i; myDistro->Init( argc, argv ); config(); #ifdef HAVE_EXT_POSTGRESQL parameters = (void **) malloc(NUM_PARAMETERS * sizeof(void *)); queryhor.setQuery(HISTORY_ALL_HOR, NULL); queryver.setQuery(HISTORY_ALL_VER, NULL); #endif /* HAVE_EXT_POSTGRESQL */ for(i=1; i<argc; i++) { if (is_dash_arg_prefix(argv[i],"long",1)) { longformat=TRUE; } else if (is_dash_arg_prefix(argv[i],"xml",3)) { use_xml = true; longformat = true; } else if (is_dash_arg_prefix(argv[i],"backwards",1)) { backwards=TRUE; } // must be at least -forw to avoid conflict with -f (for file) and -format else if (is_dash_arg_prefix(argv[i],"nobackwards",3) || is_dash_arg_prefix(argv[i],"forwards",4)) { backwards=FALSE; } else if (is_dash_arg_colon_prefix(argv[i],"wide", &pcolon, 1)) { wide_format=TRUE; if (pcolon) { wide_format_width = atoi(++pcolon); if ( ! mask.IsEmpty()) mask.SetOverallWidth(getDisplayWidth()-1); if (wide_format_width <= 80) wide_format = FALSE; } } else if (is_dash_arg_prefix(argv[i],"match",1) || is_dash_arg_prefix(argv[i],"limit",3)) { i++; if (argc <= i) { fprintf(stderr, "Error: Argument -match requires a number value " " as a parameter.\n"); exit(1); } specifiedMatch = atoi(argv[i]); } #ifdef HAVE_EXT_POSTGRESQL else if(is_dash_arg_prefix(argv[i], "dbname",1)) { i++; if (argc <= i) { fprintf( stderr, "Error: Argument -dbname requires the name of a quilld as a parameter\n" ); exit(1); } /* if( !(quillName = get_daemon_name(argv[i])) ) { fprintf( stderr, "Error: unknown host %s\n", get_host_part(argv[i]) ); printf("\n"); print_wrapped_text("Extra Info: The name given with the -dbname " "should be the name of a condor_quilld process. " "Normally it is either a hostname, or " "\"name@hostname\". " "In either case, the hostname should be the " "Internet host name, but it appears that it " "wasn't.", stderr); exit(1); } sprintf (tmp, "%s == \"%s\"", ATTR_NAME, quillName); quillQuery.addORConstraint (tmp); */ quillName = argv[i]; sprintf (tmp, "%s == \"%s\"", ATTR_SCHEDD_NAME, quillName); quillQuery.addORConstraint (tmp.c_str()); remotequill = false; readfromfile = false; } #endif /* HAVE_EXT_POSTGRESQL */ else if (is_dash_arg_prefix(argv[i],"file",2)) { if (i+1==argc || JobHistoryFileName) break; i++; JobHistoryFileName=argv[i]; readfromfile = true; } else if (is_dash_arg_prefix(argv[i],"userlog",1)) { if (i+1==argc || JobHistoryFileName) break; i++; JobHistoryFileName=argv[i]; readfromfile = true; fileisuserlog = true; } else if (is_dash_arg_prefix(argv[i],"help",1)) { Usage(argv[0],0); } else if (is_dash_arg_prefix(argv[i],"format",1)) { if (argc <= i + 2) { fprintf(stderr, "Error: Argument -format requires a spec and " "classad attribute name as parameters.\n"); fprintf(stderr, "\t\te.g. condor_history -format '%%d' ClusterId\n"); exit(1); } mask.registerFormat(argv[i + 1], argv[i + 2]); customFormat = true; i += 2; } else if (*(argv[i]) == '-' && (is_arg_colon_prefix(argv[i]+1,"af", &pcolon, 2) || is_arg_colon_prefix(argv[i]+1,"autoformat", &pcolon, 5))) { // make sure we have at least one argument to autoformat if (argc <= i+1 || *(argv[i+1]) == '-') { fprintf (stderr, "Error: Argument %s requires at last one attribute parameter\n", argv[i]); fprintf(stderr, "\t\te.g. condor_history %s ClusterId\n", argv[i]); exit(1); } if (pcolon) ++pcolon; // if there are options, skip over the colon to the options. int ixNext = parse_autoformat_args(argc, argv, i+1, pcolon, mask, diagnostic); if (ixNext > i) i = ixNext-1; customFormat = true; } else if (is_dash_arg_colon_prefix(argv[i], "print-format", &pcolon, 2)) { if ( (argc <= i+1) || (*(argv[i+1]) == '-' && (argv[i+1])[1] != 0)) { fprintf( stderr, "Error: Argument -print-format requires a filename argument\n"); exit( 1 ); } // hack allow -pr ! to disable use of user-default print format files. if (MATCH == strcmp(argv[i+1], "!")) { ++i; disable_user_print_files = true; continue; } if ( ! wide_format) mask.SetOverallWidth(getDisplayWidth()-1); customFormat = true; ++i; std::string where_expr; if (set_print_mask_from_stream(mask, where_expr, argv[i], true) < 0) { fprintf(stderr, "Error: cannot execute print-format file %s\n", argv[i]); exit (1); } if ( ! where_expr.empty()) { constraint.addCustomAND(where_expr.c_str()); } } else if (is_dash_arg_prefix(argv[i],"constraint",1)) { // make sure we have at least one more argument if (argc <= i+1) { fprintf( stderr, "Error: Argument %s requires another parameter\n", argv[i]); exit(1); } i++; constraint.addCustomAND(argv[i]); } #ifdef HAVE_EXT_POSTGRESQL else if (is_dash_arg_prefix(argv[i],"completedsince",3)) { i++; if (argc <= i) { fprintf(stderr, "Error: Argument -completedsince requires a date and " "optional timestamp as a parameter.\n"); fprintf(stderr, "\t\te.g. condor_history -completedsince \"2004-10-19 10:23:54\"\n"); exit(1); } if (constraint!="") break; completedsince = strdup(argv[i]); parameters[0] = completedsince; queryhor.setQuery(HISTORY_COMPLETEDSINCE_HOR,parameters); queryver.setQuery(HISTORY_COMPLETEDSINCE_VER,parameters); } #endif /* HAVE_EXT_POSTGRESQL */ else if (sscanf (argv[i], "%d.%d", &cluster, &proc) == 2) { std::string jobconst; formatstr (jobconst, "%s == %d && %s == %d", ATTR_CLUSTER_ID, cluster,ATTR_PROC_ID, proc); constraint.addCustomOR(jobconst.c_str()); #ifdef HAVE_EXT_POSTGRESQL parameters[0] = &cluster; parameters[1] = &proc; queryhor.setQuery(HISTORY_CLUSTER_PROC_HOR, parameters); queryver.setQuery(HISTORY_CLUSTER_PROC_VER, parameters); #endif /* HAVE_EXT_POSTGRESQL */ } else if (sscanf (argv[i], "%d", &cluster) == 1) { std::string jobconst; formatstr (jobconst, "%s == %d", ATTR_CLUSTER_ID, cluster); constraint.addCustomOR(jobconst.c_str()); #ifdef HAVE_EXT_POSTGRESQL parameters[0] = &cluster; queryhor.setQuery(HISTORY_CLUSTER_HOR, parameters); queryver.setQuery(HISTORY_CLUSTER_VER, parameters); #endif /* HAVE_EXT_POSTGRESQL */ } else if (is_dash_arg_prefix(argv[i],"debug",1)) { // dprintf to console dprintf_set_tool_debug("TOOL", 0); } else if (is_dash_arg_prefix(argv[i],"diagnostic",4)) { // dprintf to console diagnostic = true; } else if (is_dash_arg_prefix(argv[i], "name", 1)) { i++; if (argc <= i) { fprintf(stderr, "Error: Argument -name requires name of a remote schedd\n"); fprintf(stderr, "\t\te.g. condor_history -name submit.example.com \n"); exit(1); } g_name = argv[i]; readfromfile = false; #ifdef HAVE_EXT_POSTGRESQL remoteread = true; #endif } else if (is_dash_arg_prefix(argv[i], "pool", 1)) { i++; if (argc <= i) { fprintf(stderr, "Error: Argument -name requires name of a remote schedd\n"); fprintf(stderr, "\t\te.g. condor_history -name submit.example.com \n"); exit(1); } g_pool = argv[i]; readfromfile = false; #ifdef HAVE_EXT_POSTGRESQL remoteread = true; #endif } else { std::string ownerconst; owner = argv[i]; formatstr(ownerconst, "%s == \"%s\"", ATTR_OWNER, owner); constraint.addCustomOR(ownerconst.c_str()); #ifdef HAVE_EXT_POSTGRESQL parameters[0] = owner; queryhor.setQuery(HISTORY_OWNER_HOR, parameters); queryver.setQuery(HISTORY_OWNER_VER, parameters); #endif /* HAVE_EXT_POSTGRESQL */ } } if (i<argc) Usage(argv[0]); MyString my_constraint; constraint.makeQuery(my_constraint); if (diagnostic) { fprintf(stderr, "Using effective constraint: %s\n", my_constraint.c_str()); } if ( ! my_constraint.empty() && ParseClassAdRvalExpr( my_constraint.c_str(), constraintExpr ) ) { fprintf( stderr, "Error: could not parse constraint %s\n", my_constraint.c_str() ); exit( 1 ); } #ifdef HAVE_EXT_POSTGRESQL /* This call must happen AFTER config() is called */ if (checkDBconfig() == true && !readfromfile) { readfromfile = false; } else { readfromfile = true; } #endif /* HAVE_EXT_POSTGRESQL */ #ifdef HAVE_EXT_POSTGRESQL if(!readfromfile && !remoteread) { if(remotequill) { if (Collectors == NULL) { Collectors = CollectorList::create(); if(Collectors == NULL ) { printf("Error: Unable to get list of known collectors\n"); exit(1); } } result = Collectors->query ( quillQuery, quillList ); if(result != Q_OK) { printf("Fatal Error querying collectors\n"); exit(1); } if(quillList.MyLength() == 0) { printf("Error: Unknown quill server %s\n", quillName); exit(1); } quillList.Open(); while ((ad = quillList.Next())) { // get the address of the database dbIpAddr = dbName = queryPassword = NULL; if (!ad->LookupString(ATTR_QUILL_DB_IP_ADDR, &dbIpAddr) || !ad->LookupString(ATTR_QUILL_DB_NAME, &dbName) || !ad->LookupString(ATTR_QUILL_DB_QUERY_PASSWORD, &queryPassword) || (ad->LookupBool(ATTR_QUILL_IS_REMOTELY_QUERYABLE,flag) && !flag)) { printf("Error: The quill daemon \"%s\" is not set up " "for database queries\n", quillName); exit(1); } } } else { // they just typed 'condor_history' on the command line and want // to use quill, so get the schedd ad for the local machine if // we can, figure out the name of the schedd and the // jobqueuebirthdate Daemon schedd( DT_SCHEDD, 0, 0 ); if ( schedd.locate(Daemon::LOCATE_FULL) ) { char *scheddname = quillName; if (scheddname == NULL) { // none set explictly, look it up in the daemon ad scheddname = schedd.name(); ClassAd *daemonAd = schedd.daemonAd(); int scheddbirthdate; if(daemonAd) { if(daemonAd->LookupInteger( ATTR_JOB_QUEUE_BIRTHDATE, scheddbirthdate) ) { queryhor.setJobqueuebirthdate( (time_t)scheddbirthdate); queryver.setJobqueuebirthdate( (time_t)scheddbirthdate); } } } else { queryhor.setJobqueuebirthdate(0); queryver.setJobqueuebirthdate(0); } queryhor.setScheddname(scheddname); queryver.setScheddname(scheddname); } } dbconn = getDBConnStr(quillName,dbIpAddr,dbName,queryPassword); historySnapshot = new HistorySnapshot(dbconn); if (!customFormat) { printf ("\n\n-- Quill: %s : %s : %s\n", quillName, dbIpAddr, dbName); } queryhor.prepareQuery(); // create the query strings before sending off to historySnapshot queryver.prepareQuery(); st = historySnapshot->sendQuery(&queryhor, &queryver, longformat, false, customFormat, &mask, constraint.c_str()); //if there's a failure here and if we're not posing a query on a //remote quill daemon, we should instead query the local file if(st == QUILL_FAILURE) { printf( "-- Database at %s not reachable\n", dbIpAddr); if(!remotequill) { char *tmp_hist = param("HISTORY"); if (!customFormat) { printf( "--Failing over to the history file at %s instead --\n", tmp_hist ? tmp_hist : "(null)" ); } if(!tmp_hist) { free(tmp_hist); } readfromfile = true; } } // query history table if (historySnapshot->isHistoryEmpty()) { printf("No historical jobs in the database match your query\n"); } historySnapshot->release(); delete(historySnapshot); } #endif /* HAVE_EXT_POSTGRESQL */ if(readfromfile == true) { readHistoryFromFiles(fileisuserlog, JobHistoryFileName, my_constraint.c_str(), constraintExpr); } else { readHistoryRemote(constraintExpr); } #ifdef HAVE_EXT_POSTGRESQL if(completedsince) free(completedsince); if(parameters) free(parameters); if(dbIpAddr) free(dbIpAddr); if(dbName) free(dbName); if(queryPassword) free(queryPassword); if(dbconn) free(dbconn); #endif return 0; }
void LiveJobImpl::Set ( const char *_name, const char *_value ) { if ( strcasecmp ( _name, ATTR_JOB_SUBMISSION ) == 0 ) { std::string val = TrimQuotes( _value ); // TODO: grab the cluster from our key PROC_ID id = getProcByString(m_job->GetKey()); if (m_job) { m_job->SetSubmission ( val.c_str(), id.cluster ); } } // our status is changing...decrement for old one if ( strcasecmp ( _name, ATTR_JOB_STATUS ) == 0 ) { if ( m_job ) { m_job->SetStatus(this->GetStatus()); m_job->DecrementSubmission (); } } if ( strcasecmp ( _name, ATTR_OWNER ) == 0 ) { // need to leave an owner for this job // to be picked up soon // if we are in here, we don't have m_submission PROC_ID id = getProcByString(m_job->GetKey()); std::string val = TrimQuotes( _value ); g_ownerless_clusters[id.cluster] = val; m_job->UpdateSubmission(id.cluster,val.c_str()); } // parse the type ExprTree *expr; if ( ParseClassAdRvalExpr ( _value, expr ) ) { dprintf ( D_ALWAYS, "error: parsing %s[%s] = %s, skipping\n", m_job->GetKey(), _name, _value ); return; } // add this value to the classad classad::Value value; expr->Evaluate(value); switch ( value.GetType() ) { case classad::Value::INTEGER_VALUE: int i; from_string<int> ( i, std::string ( _value ), std::dec ); m_full_ad->Assign ( _name, i ); break; case classad::Value::REAL_VALUE: float f; from_string<float> ( f, std::string ( _value ), std::dec ); m_full_ad->Assign ( _name, f ); break; case classad::Value::STRING_VALUE: m_full_ad->Assign ( _name, _value ); break; default: m_full_ad->AssignExpr ( _name, _value ); break; } delete expr; expr = NULL; // our status has changed...increment for new one if ( strcasecmp ( _name, ATTR_JOB_STATUS ) == 0 ) { if ( m_job ) { m_job->SetStatus(this->GetStatus()); m_job->IncrementSubmission (); } } }
// make query int GenericQuery:: makeQuery (ExprTree *&tree) { int i, value; char *item; float fvalue; MyString req = ""; tree = NULL; // construct query requirement expression bool firstCategory = true; // add string constraints for (i = 0; i < stringThreshold; i++) { stringConstraints [i].Rewind (); if (!stringConstraints [i].AtEnd ()) { bool firstTime = true; req += firstCategory ? "(" : " && ("; while ((item = stringConstraints [i].Next ())) { req.sprintf_cat ("%s(%s == \"%s\")", firstTime ? " " : " || ", stringKeywordList [i], item); firstTime = false; firstCategory = false; } req += " )"; } } // add integer constraints for (i = 0; i < integerThreshold; i++) { integerConstraints [i].Rewind (); if (!integerConstraints [i].AtEnd ()) { bool firstTime = true; req += firstCategory ? "(" : " && ("; while (integerConstraints [i].Next (value)) { req.sprintf_cat ("%s(%s == %d)", firstTime ? " " : " || ", integerKeywordList [i], value); firstTime = false; firstCategory = false; } req += " )"; } } // add float constraints for (i = 0; i < floatThreshold; i++) { floatConstraints [i].Rewind (); if (!floatConstraints [i].AtEnd ()) { bool firstTime = true; req += firstCategory ? "(" : " && ("; while (floatConstraints [i].Next (fvalue)) { req.sprintf_cat ("%s(%s == %f)", firstTime ? " " : " || ", floatKeywordList [i], fvalue); firstTime = false; firstCategory = false; } req += " )"; } } // add custom AND constraints customANDConstraints.Rewind (); if (!customANDConstraints.AtEnd ()) { bool firstTime = true; req += firstCategory ? "(" : " && ("; while ((item = customANDConstraints.Next ())) { req.sprintf_cat ("%s(%s)", firstTime ? " " : " && ", item); firstTime = false; firstCategory = false; } req += " )"; } // add custom OR constraints customORConstraints.Rewind (); if (!customORConstraints.AtEnd ()) { bool firstTime = true; req += firstCategory ? "(" : " && ("; while ((item = customORConstraints.Next ())) { req.sprintf_cat ("%s(%s)", firstTime ? " " : " || ", item); firstTime = false; firstCategory = false; } req += " )"; } // absolutely no constraints at all if (firstCategory) { req += "TRUE"; } // parse constraints and insert into query ad if (ParseClassAdRvalExpr (req.Value(), tree) > 0) return Q_PARSE_ERROR; return Q_OK; }
void Rooster::poll() { dprintf(D_FULLDEBUG,"C**k-a-doodle-doo! (Time to look for machines to wake up.)\n"); ClassAdList startdAds; CondorQuery unhibernateQuery(STARTD_AD); ExprTree *requirements = NULL; if( ParseClassAdRvalExpr( m_unhibernate_constraint.Value(), requirements )!=0 || requirements==NULL ) { EXCEPT("Invalid expression for ROOSTER_UNHIBERNATE: %s\n", m_unhibernate_constraint.Value()); } unhibernateQuery.addANDConstraint(m_unhibernate_constraint.Value()); CollectorList* collects = daemonCore->getCollectorList(); ASSERT( collects ); QueryResult result; result = collects->query(unhibernateQuery,startdAds); if( result != Q_OK ) { dprintf(D_ALWAYS, "Couldn't fetch startd ads using constraint " "ROOSTER_UNHIBERNATE=%s: %s\n", m_unhibernate_constraint.Value(), getStrQueryResult(result)); return; } dprintf(D_FULLDEBUG,"Got %d startd ads matching ROOSTER_UNHIBERNATE=%s\n", startdAds.MyLength(), m_unhibernate_constraint.Value()); startdAds.Sort(StartdSortFunc,&m_rank_ad); startdAds.Open(); int num_woken = 0; ClassAd *startd_ad; HashTable<MyString,bool> machines_done(MyStringHash); while( (startd_ad=startdAds.Next()) ) { MyString machine; MyString name; startd_ad->LookupString(ATTR_MACHINE,machine); startd_ad->LookupString(ATTR_NAME,name); if( machines_done.exists(machine)==0 ) { dprintf(D_FULLDEBUG, "Skipping %s: already attempted to wake up %s in this cycle.\n", name.Value(),machine.Value()); continue; } // in case the unhibernate expression is time-sensitive, // re-evaluate it now to make sure it still passes if( !EvalBool(startd_ad,requirements) ) { dprintf(D_ALWAYS, "Skipping %s: ROOSTER_UNHIBERNATE is no longer true.\n", name.Value()); continue; } if( wakeUp(startd_ad) ) { machines_done.insert(machine,true); if( ++num_woken >= m_max_unhibernate && m_max_unhibernate > 0 ) { dprintf(D_ALWAYS, "Reached ROOSTER_MAX_UNHIBERNATE=%d in this cycle.\n", m_max_unhibernate); break; } } } startdAds.Close(); delete requirements; requirements = NULL; if( startdAds.MyLength() ) { dprintf(D_FULLDEBUG,"Done sending wakeup calls.\n"); } }