//++++++++++++++++++++++++++++++++++ void view_edit_search::init_layout(){ // _vbox = new QVBoxLayout(); setLayout(_vbox); // _hbox_top = new QHBoxLayout(); _hbox_mid = new QHBoxLayout(); _hbox_low = new QHBoxLayout(); _vbox->addLayout( _hbox_top ); _vbox->addLayout( _hbox_mid ); _vbox->addLayout( _hbox_low ); // Top. // _list_attributes = new QListWidget(); _list_cats = new QListWidget(); connect( _list_attributes, SIGNAL( itemClicked(QListWidgetItem*) ), this, SLOT( write_categories(QListWidgetItem*) ) ); connect( _list_cats, SIGNAL( itemClicked(QListWidgetItem*) ), this, SLOT( append_value_to_query(QListWidgetItem*) ) ); _hbox_top->addWidget( _list_attributes ); _hbox_top->addWidget( _list_cats ); // Mid. // _text_query = new QTextEdit(); _hbox_mid->addWidget( _text_query ); // Low. // _button_help = new QPushButton( "Help" ); _button_submit = new QPushButton( "Submit" ); _button_cancel = new QPushButton( "Cancel" ); connect(_button_help, SIGNAL(clicked()), this, SLOT(do_help()) ); connect(_button_submit, SIGNAL(clicked()), this, SLOT(do_submit()) ); connect(_button_cancel, SIGNAL(clicked()), this, SLOT(close()) ); _hbox_low->addWidget( _button_help ); _hbox_low->addWidget( _button_submit ); _hbox_low->addWidget( _button_cancel ); }
//------------------------------------------------------------------------- bool condor_submit( const Dagman &dm, const char* cmdFile, CondorID& condorID, const char* DAGNodeName, MyString &DAGParentNodeNames, List<Job::NodeVar> *vars, int retry, const char* directory, const char *workflowLogFile, bool hold_claim ) { TmpDir tmpDir; MyString errMsg; if ( !tmpDir.Cd2TmpDir( directory, errMsg ) ) { debug_printf( DEBUG_QUIET, "Could not change to node directory %s: %s\n", directory, errMsg.Value() ); return false; } ArgList args; // construct arguments to condor_submit to add attributes to the // job classad which identify the job's node name in the DAG, the // node names of its parents in the DAG, and the job ID of DAGMan // itself; then, define submit_event_notes to print the job's node // name inside the submit event in the userlog // NOTE: we specify the job ID of DAGMan using only its cluster ID // so that it may be referenced by jobs in their priority // attribute (which needs an int, not a string). Doing so allows // users to effectively "batch" jobs by DAG so that when they // submit many DAGs to the same schedd, all the ready jobs from // one DAG complete before any jobs from another begin. args.AppendArg( dm.condorSubmitExe ); args.AppendArg( "-a" ); MyString nodeName = MyString(ATTR_DAG_NODE_NAME_ALT) + " = " + DAGNodeName; args.AppendArg( nodeName.Value() ); // append a line adding the parent DAGMan's cluster ID to the job ad args.AppendArg( "-a" ); MyString dagJobId = MyString( "+" ) + ATTR_DAGMAN_JOB_ID + " = " + dm.DAGManJobId._cluster; args.AppendArg( dagJobId.Value() ); // now we append a line setting the same thing as a submit-file macro // (this is necessary so the user can reference it in the priority) args.AppendArg( "-a" ); MyString dagJobIdMacro = MyString( "" ) + ATTR_DAGMAN_JOB_ID + " = " + dm.DAGManJobId._cluster; args.AppendArg( dagJobIdMacro.Value() ); args.AppendArg( "-a" ); MyString submitEventNotes = MyString( "submit_event_notes = DAG Node: " ) + DAGNodeName; args.AppendArg( submitEventNotes.Value() ); ASSERT( workflowLogFile ); // We need to append the DAGman default log file to // the log file list args.AppendArg( "-a" ); std::string dlog( "dagman_log = " ); dlog += workflowLogFile; args.AppendArg( dlog.c_str() ); debug_printf( DEBUG_VERBOSE, "Adding a DAGMan workflow log %s\n", workflowLogFile ); // Now append the mask debug_printf( DEBUG_VERBOSE, "Masking the events recorded in the DAGMAN workflow log\n" ); args.AppendArg( "-a" ); std::string dmask("+"); dmask += ATTR_DAGMAN_WORKFLOW_MASK; dmask += " = \""; const char *eventMask = getEventMask(); debug_printf( DEBUG_VERBOSE, "Mask for workflow log is %s\n", eventMask ); dmask += eventMask; dmask += "\""; args.AppendArg( dmask.c_str() ); // Suppress the job's log file if that option is enabled. if ( dm._suppressJobLogs ) { debug_printf( DEBUG_VERBOSE, "Suppressing node job log file\n" ); args.AppendArg( "-a" ); args.AppendArg( "log = ''" ); } ArgList parentNameArgs; parentNameArgs.AppendArg( "-a" ); MyString parentNodeNames = MyString( "+DAGParentNodeNames = " ) + "\"" + DAGParentNodeNames + "\""; parentNameArgs.AppendArg( parentNodeNames.Value() ); // set any VARS specified in the DAG file MyString anotherLine; ListIterator<Job::NodeVar> varsIter(*vars); Job::NodeVar nodeVar; while ( varsIter.Next(nodeVar) ) { // Substitute the node retry count if necessary. Note that // we can't do this in Job::ResolveVarsInterpolations() // because that's only called at parse time. MyString value = nodeVar._value; MyString retryStr( retry ); value.replaceString( "$(RETRY)", retryStr.Value() ); MyString varStr = nodeVar._name + " = " + value; args.AppendArg( "-a" ); args.AppendArg( varStr.Value() ); } // Set the special DAG_STATUS variable (mainly for use by // "final" nodes). args.AppendArg( "-a" ); MyString var = "DAG_STATUS = "; var += dm.dag->_dagStatus; args.AppendArg( var.Value() ); // Set the special FAILED_COUNT variable (mainly for use by // "final" nodes). args.AppendArg( "-a" ); var = "FAILED_COUNT = "; var += dm.dag->NumNodesFailed(); args.AppendArg( var.Value() ); // how big is the command line so far MyString display; args.GetArgsStringForDisplay( &display ); int cmdLineSize = display.Length(); parentNameArgs.GetArgsStringForDisplay( &display ); int DAGParentNodeNamesLen = display.Length(); // how many additional chars must we still add to command line // NOTE: according to the POSIX spec, the args + // environ given to exec() cannot exceed // _POSIX_ARG_MAX, so we also need to calculate & add // the size of environ** to reserveNeeded int reserveNeeded = strlen( cmdFile ); int maxCmdLine = _POSIX_ARG_MAX; // if we don't have room for DAGParentNodeNames, leave it unset if( cmdLineSize + reserveNeeded + DAGParentNodeNamesLen > maxCmdLine ) { debug_printf( DEBUG_NORMAL, "Warning: node %s has too many parents " "to list in its classad; leaving its DAGParentNodeNames " "attribute undefined\n", DAGNodeName ); check_warning_strictness( DAG_STRICT_3 ); } else { args.AppendArgsFromArgList( parentNameArgs ); } if( hold_claim ){ args.AppendArg( "-a" ); MyString holdit = MyString("+") + MyString(ATTR_JOB_KEEP_CLAIM_IDLE) + " = " + dm._claim_hold_time; args.AppendArg( holdit.Value() ); } if (dm._submitDagDeepOpts.suppress_notification) { args.AppendArg( "-a" ); MyString notify = MyString("notification = never"); args.AppendArg( notify.Value() ); } args.AppendArg( cmdFile ); bool success = do_submit( args, condorID, dm.prohibitMultiJobs ); if ( !tmpDir.Cd2MainDir( errMsg ) ) { debug_printf( DEBUG_QUIET, "Could not change to original directory: %s\n", errMsg.Value() ); success = false; } return success; }
void Submit::senddata_finished(QNetworkReply *reply) { //emit add_log(LOG_INFO, "Submit::senddata_finished"); if ( reply->error() != QNetworkReply::NoError ) { nr_submit->disconnect(SIGNAL(uploadProgress(qint64, qint64))); nr_submit->deleteLater(); emit finished(false, tr("SUBMIT %1: Request failed, %2") .arg(SITE_NAME[index], reply->errorString())); return; } emit add_log(LOG_INFO, QString("SUBMIT: %1 Request succeeded") .arg(SITE_NAME[index])); QString result(reply->readAll()); emit add_log(LOG_INFO, QString("%1: Server response: %2") .arg(SITE_NAME[index], result)); if (result != "") { if (result.contains("no POST parameters")) { // Server problem - http://www.last.fm/forum/21716/_/201367 // "FAILED Plugin bug: Not all request variables are set - no POST parameters" // This if statement can be removed if/when fixed need_handshake = true; reset_tracks(); do_submit(); return; } else if (result.contains("BADSESSION")) { need_handshake = true; reset_tracks(); do_submit(); return; } else if (result.contains("FAILED")) { emit add_log(LOG_INFO, QString("%1: Submission FAILED").arg(SITE_NAME[index])); nr_submit->disconnect(SIGNAL(uploadProgress(qint64, qint64))); nr_submit->deleteLater(); emit finished(false, tr("%1: Server returned an error after sending data") .arg(SITE_NAME[index])); return; } else if (result.contains("OK")) { context.mutex->lock(); int i; int count = 0; for ( i = 0; i < entry_index.size(); i++ ) { int entry_num = entry_index.value(i); scrob_entry tmp = context.entries->at(entry_num); tmp.sent[index] = SENT_SUCCESS; context.entries->replace(entry_num, tmp); } for (i = 0; i < context.entries->size(); i++) { if (SENT_UNSENT == context.entries->at(i).sent[index]) count++; } context.mutex->unlock(); if (count > 0) do_submit(); else { emit add_log(LOG_DEBUG, QString("%1: Submission complete") .arg(SITE_NAME[index])); nr_submit->disconnect(SIGNAL(uploadProgress(qint64, qint64))); nr_submit->deleteLater(); nr_handshake->deleteLater(); submission_ok = true; emit finished(true, ""); } } } else { nr_submit->disconnect(SIGNAL(uploadProgress(qint64, qint64))); nr_submit->deleteLater(); emit finished(false, tr("%1: Empty result from server") .arg(SITE_NAME[index])); return; } }