コード例 #1
0
ファイル: libkviurl.cpp プロジェクト: kartagis/KVIrc
bool urllist_module_event_onUrl(KviKvsModuleEventCall * c)
{
	KviKvsVariant * vUrl = c->firstParam();
	QString szUrl;
	if(vUrl)vUrl->asString(szUrl);

	if (check_url(c->window(),szUrl) == 0)
	{

		KviUrl *tmp = new KviUrl;
		QString tmpTimestamp;
		QDate d = QDate::currentDate();
		QString date;
		date.sprintf("%d-%d%d-%d%d",d.year(),d.month() / 10,d.month() % 10,d.day() / 10,d.day() % 10);
		tmpTimestamp = "["+date+"]"+" [";
		tmpTimestamp += QTime::currentTime().toString()+"]";
		tmp->url = szUrl;
		tmp->window = c->window()->plainTextCaption();
		tmp->count = 1;
		tmp->timestamp = tmpTimestamp;

		g_pList->append(tmp);
		for (UrlDlgList *tmpitem=g_pUrlDlgList->first();tmpitem;tmpitem=g_pUrlDlgList->next()) {
			if (tmpitem->dlg) {
				QString tmpCount;
				tmpCount.setNum(tmp->count);
				tmpitem->dlg->addUrl(QString(tmp->url), QString(tmp->window), tmpCount, QString(tmp->timestamp));
				tmpitem->dlg->windowListItem()->highlight(false);
			}
		}
	}
	return true;
}
コード例 #2
0
ファイル: gwget_data.c プロジェクト: nayanranjankapri/gwget
static void
gwget_download_playlist_items(gchar *filename)
{
	FILE *f;
	gchar line[1024];
	GwgetData *gwgetdata;
	
	f = g_fopen(filename, "r");
	if (f!=NULL) {
		while (fgets(line, 1024, f)!=NULL) {
			if (check_url("http://", line) || check_url("ftp://", line)) {
				gwgetdata = gwget_data_new (g_strstrip(line));
				gwget_data_add_download(gwgetdata);
				gwget_data_start_download(gwgetdata);
			}
		}
		fclose(f);
	}
}
コード例 #3
0
ファイル: sm_net.cpp プロジェクト: repinel/SimpleMan
const bool SM_Net::create ( const std::string &new_url )
{
	if( ! check_url( new_url ) )
	{
		SM_Util::msg_err( "Error: could not identify URL." );
		show_error( "URL Check", "Could not identify URL." , false );
		return false;
	}
	int find_return = new_url.find( "http://" );

		/* http */
	if( find_return == 0 )
		sm_protocol = 0;
	else
	{
		find_return = new_url.find( "ftp://" );

			/* ftp */
		if( find_return == 0 )
			sm_protocol = 1;
		else
		{
			SM_Util::msg_err( "Error: network protocol not supported." );
			show_error( "URL Check", "Network protocol\nnot supported." , false );
			sm_protocol = 2;
			return false;
		}
	}
	if ( ! set_url( new_url ) )
	{
		SM_Util::msg_err( "Error: could not define url." );
		show_error( "URL Check", "Could not define url." , false );
		return false;
	}
	if ( ! set_port( new_url ) )
	{
		SM_Util::msg_err( "Error: could not define port." );
		show_error( "Port Check", "Could not define port." , false );
		return false;
	}
	if ( ! set_address( new_url ) )
	{
		SM_Util::msg_err( "Error: could not define address." );
		show_error( "Address Check", "Could not define address." , false );
		return false;
	}
	if ( ! set_fileName( new_url ) )
	{
		SM_Util::msg_err( "Error: could not define file name." );
		show_error( "URL File Check", "Could not define\nfile name." , false );
		return false;
	}
	return true;
}
コード例 #4
0
ファイル: parser.cpp プロジェクト: Kapral42/pdc2015_Yandex
size_t parser::find_herfs()
{
	size_t iter = 0;
	while (true) {
		iter = content.get()->find(" href=\"", iter);
		if (iter == std::string::npos)
			break;
		iter += 7;
		size_t substr_size = content.get()->find('\"', iter) - iter;
		std::string *sub_s = new std::string;
		*sub_s = content.get()->substr(iter, substr_size);
		if (check_url(*sub_s)){
			found_urls.push_back(std::make_shared<std::string>(*sub_s));
		}
	}
	return found_urls.size();
}
コード例 #5
0
ファイル: httplint.c プロジェクト: krishnaneeraja/PDS_Demo
/**
 * Main entry point.
 */
int main(int argc, char *argv[])
{
  int i = 1;

  if (argc < 2)
    die("Usage: httplint [--html] url [url ...]");

  init();

  if (1 < argc && strcmp(argv[1], "--html") == 0) {
    html = true;
    i++;
  }

  for (; i != argc; i++)
    check_url(argv[i]);

  curl_global_cleanup();

  return 0;
}
コード例 #6
0
ファイル: aggregator.cpp プロジェクト: idaunis/binarytiers
void template_preprocess_aggregator_feed_source(map <string, string> &variables) 
{
	map <string, string> feed = variables;
	  
	variables["source_icon"] = theme("feed_icon", feed);
	variables["source_image"] = feed["image"];
	variables["source_description"] = aggregator_filter_xss(feed["description"]);
	
	map <string, string> url_options;
	url_options["absolute"] = _TRUE;
	variables["source_url"] = check_url(url( feed["link"], &url_options ));

	if ( isset(feed["checked"]) ) {
		variables["last_checked"] = format_interval( time() - intval(feed["checked"]) ) + " ago";
	}
	else {
		variables["last_checked"] = "never";
	}

	if ( user_access("administer news feeds") ) {
		variables["last_checked"] = "<a href=\""+ url("admin/content/aggregator") +"\">"+ variables["last_checked"] + "</a>";
	}
}
コード例 #7
0
ファイル: aggregator.cpp プロジェクト: idaunis/binarytiers
void template_preprocess_aggregator_item( map <string, string> &variables ) 
{
	map <string, string> item = variables;

	variables["feed_url"] = check_url( item["link"] );
	variables["feed_title"] = check_plain( item["title"] );
	variables["content"] = aggregator_filter_xss( item["description"] );

	variables["source_url"] = "";
	variables["source_title"] = "";
	if ( isset(item["ftitle"]) && isset(item["fid"]) ) 
	{
		variables["source_url"] = url("aggregator/sources/"+item["fid"]);
		variables["source_title"] = check_plain(item["ftitle"]);
	}
	if ( date("Ymd", item["timestamp"]) == date("Ymd") ) 
	{
		variables["source_date"] = format_interval(time() - intval(item["timestamp"]) ) + " ago";
	}
	else 
	{
		variables["source_date"] = format_date( item["timestamp"] );
	}
}
コード例 #8
0
ファイル: aggregator.cpp プロジェクト: idaunis/binarytiers
void aggregator_refresh(map <string, string> &feed)
{
	// Generate conditional GET headers.
	map <string, string> headers;
  
	if ( isset(feed["etag"]) ) 
	{
		headers["If-None-Match"] = feed["etag"];
	}
	if ( isset(feed["modified"]) ) 
	{
		headers["If-Modified-Since"] = gmdate("D, d M Y H:i:s", feed["modified"]) + " GMT";
	}
	
	map <string, string> result;
  
	// Request feed.
	http_request( result, feed["url"], headers );

	// Process HTTP response code.
	switch ( intval( result["response_code"] ) ) 
	{
		case 304:
			if(DB_TYPE==1)
				redis_command("HSET aggregator_feed:%d checked %d", intval(feed["fid"]), time() );
			if(DB_TYPE==2)
				db_querya("UPDATE aggregator_feed SET checked = %d WHERE fid = %d", str( time() ).c_str(), feed["fid"].c_str() );

			set_page_message( "There is no new syndicated content from " + feed["title"] );
			break;
		case 301:
			feed["url"] = result["redirect_url"];
		case 200:
		case 302:
		case 307:
			// Filter the input data:
			if ( aggregator_parse_feed( result["data"], feed ) ) 
			{
				string modified = !isset2(result,"Last-Modified") ? "0" : strtotime( result["Last-Modified"] );

				// Prepare the channel data.
				for( map <string, string>::iterator curr = channel.begin(), end = channel.end();  curr != end;  curr++ )
				{
					trim( curr->second );
					channel[curr->first] = curr->second;
				}

				// Prepare the image data (if any).
				for( map <string, string>::iterator curr = image.begin(), end = image.end();  curr != end;  curr++ )
				{
					trim( curr->second );
					image[curr->first] = curr->second;
				}

				string str_image;
				if (isset(image["LINK"]) && isset(image["URL"]) && isset(image["TITLE"]) ) 
				{
					// Note, we should really use theme_image() here but that only works with local images it won't work with images fetched with a URL unless PHP version > 5
					str_image = "<a href=\""+ check_url(image["LINK"]) +"\" class=\"feed-image\"><img src=\"" + check_url(image["URL"]) + "\" alt=\"" + check_plain(image["TITLE"]) +"\" /></a>";
				}
				else {
					str_image = "";
				}

				string etag = !isset(result["ETag"]) ? "" : result["ETag"];
				// Update the feed data.

				if(DB_TYPE==1)
				{
					redis_command_fields(redis_arg("HMSET aggregator_feed:%d", intval(feed["fid"]) ), "", 
						"url,checked,link,description,image,etag,modified", "%s,%d,%s,%s,%s,%s,%d", 
						feed["url"].c_str(), 
						time(), 
						channel["LINK"].c_str(), 
						channel["DESCRIPTION"].c_str(), 
						str_image.c_str(), 
						etag.c_str(), 
						intval( modified ) );
				}
				if(DB_TYPE==2)
				{
					db_querya("UPDATE aggregator_feed SET url = '%s', checked = %d, link = '%s', description = '%s', image = '%s', etag = '%s', modified = %d WHERE fid = %d", 
						feed["url"].c_str(), 
						str(time()).c_str(), 
						channel["LINK"].c_str(), 
						channel["DESCRIPTION"].c_str(), 
						str_image.c_str(), 
						etag.c_str(), 
						modified.c_str(), 
						feed["fid"].c_str() );
				}

				set_page_message( "There is new syndicated content from " + feed["title"] );
				break;
			}
			result["error"] = "feed not parseable";
		default:
			set_page_message( "The feed from "+feed["title"]+" seems to be broken, because of error \""+result["response_code"]+"\". "+ result["error"] );
	}
}
コード例 #9
0
ファイル: resume.c プロジェクト: 4383/dirb
void resume(void) {
  FILE *desc;
  char *home=NULL;
  char *dumppath=NULL;
  char *optionspath=NULL;
  char *wordlistpath=NULL;
  char *dirlistpath=NULL;

  resuming=1;
  home = getenv("HOME");
  asprintf(&dumppath,"%s/%s",home,DUMP_DIR);
  asprintf(&optionspath, "%s/%s",home,OPTIONS_DUMP);
  asprintf(&wordlistpath, "%s/%s",home,WORDLIST_DUMP);
  asprintf(&dirlistpath, "%s/%s",home,DIRLIST_DUMP);

  // Recuperamos la estructura options

  if((desc=fopen(optionspath, "r"))==0) {
    printf("\n(!) FATAL: Error opening input file: %s\n", optionspath);
    exit(-1);
    }
  fread(&options, sizeof(struct opciones), 1, desc);
  fclose(desc);


  // Inicializamos

  check_url(options.current_dir);
  get_options();
  init_exts();
  IMPRIME("CURRENT_BASE: %s\n", options.current_dir);
  IMPRIME("CURRENT_WORD: %s\n", options.current_word);
  IMPRIME("\n-----------------\n\n");


  // Mutations

  /*
  if(options.mutations_file) {
    muts_base=crea_wordlist_fich(options.mutation_file);
    } else if(options.mutations_list) {
    muts_base=crea_extslist(options.mutation_list);
    }
  */

  // Recuperamos la dirlist

  dirlist_current=crea_wordlist_fich(dirlistpath);
  dirlist_base=dirlist_current;
  while(dirlist_current->siguiente!=0) { dirlist_current=dirlist_current->siguiente; }
  dirlist_final=dirlist_current;
  dirlist_current=dirlist_base;


  // Recuperamos la wordlist

  wordlist_current=crea_wordlist_fich(wordlistpath);
  wordlist_base=wordlist_current;
  while(wordlist_current->siguiente!=0) { wordlist_current=wordlist_current->siguiente; }
  wordlist_final=wordlist_current;
  wordlist_current=wordlist_base;
  while(wordlist_current->siguiente!=0 && strncmp(wordlist_current->word, options.current_word, STRING_SIZE-1)!=0) { wordlist_current=wordlist_current->siguiente; }
  
  // Avanzamos la wordlist en 1
  wordlist_current=wordlist_current->siguiente;


  // Relanzamos el scan

  lanza_ataque(options.current_dir, wordlist_current);


  // Finalizamos

  cierre();
  exit(0);

}
コード例 #10
0
ファイル: saldl.c プロジェクト: saldl/saldl
void saldl(saldl_params *params_ptr) {
  /* Definitions */
  info_s info = DEF_INFO_S;
  info.params = params_ptr;

  /* Handle signals */
  info_global = &info;
  saldl_handle_signals();

  /* Need to be set as early as possible */
  set_color(&params_ptr->no_color);
  set_verbosity(&params_ptr->verbosity, &params_ptr->libcurl_verbosity);

  /* Check if loaded libcurl is recent enough */
  info.curl_info = curl_version_info(CURLVERSION_NOW);
  check_libcurl(info.curl_info);

  /* Library initializations, should run only once */
  SALDL_ASSERT(!curl_global_init(CURL_GLOBAL_ALL));
  SALDL_ASSERT(!evthread_use_pthreads());

  /* get/set initial info */
  main_msg("URL", "%s", params_ptr->start_url);
  check_url(params_ptr->start_url);
  get_info(&info);
  set_info(&info);
  check_remote_file_size(&info);

  /* initialize chunks early for extra_resume() */
  chunks_init(&info);

  if (params_ptr->resume) {
    check_resume(&info);
  }

  print_chunk_info(&info);
  global_progress_init(&info);

  /* exit here if dry_run was set */
  if ( params_ptr->dry_run  ) {
    saldl_free_all(&info);
    finish_msg_and_exit("Dry-run done.");
  }

  check_files_and_dirs(&info);

  /* Check if download was interrupted after all data was merged */
  if (info.already_finished) {
    goto saldl_all_data_merged;
  }

  /* threads, needed by set_modes() */
  info.threads = saldl_calloc(params_ptr->num_connections, sizeof(thread_s));
  set_modes(&info);

  /* 1st iteration */
  for (size_t counter = 0; counter < params_ptr->num_connections; counter++) {
    queue_next_chunk(&info, counter, 1);
  }

  /* Create event pthreads */
  saldl_pthread_create(&info.trigger_events_pth, NULL, events_trigger_thread, &info);

  if (!params_ptr->read_only && !params_ptr->to_stdout) {
    saldl_pthread_create(&info.sync_ctrl_pth, NULL, sync_ctrl, &info);
  }

  if (info.chunk_count != 1) {
    saldl_pthread_create(&info.status_display_pth, NULL, status_display, &info);
    saldl_pthread_create(&info.queue_next_pth, NULL, queue_next_thread, &info);
    saldl_pthread_create(&info.merger_pth, NULL, merger_thread, &info);
  }

  /* Now that everything is initialized */
  info.session_status = SESSION_IN_PROGRESS;

  /* Avoid race in joining event threads if the session was interrupted, or finishing without downloading if single_mode */
  do {
    usleep(100000);
  } while (params_ptr->single_mode ? info.chunks[0].progress != PRG_FINISHED : info.global_progress.complete_size != info.file_size);

  /* Join event pthreads */
  if (!params_ptr->read_only && !params_ptr->to_stdout) {
    join_event_pth(&info.ev_ctrl ,&info.sync_ctrl_pth);
  }

  if (info.chunk_count !=1) {
    join_event_pth(&info.ev_status, &info.status_display_pth);
    join_event_pth(&info.ev_queue, &info.queue_next_pth);
    join_event_pth(&info.ev_merge, &info.merger_pth);
  }

  info.events_queue_done = true;
  event_queue(&info.ev_trigger, NULL);
  join_event_pth(&info.ev_trigger ,&info.trigger_events_pth);

saldl_all_data_merged:

  /* Remove tmp_dirname */
  if (!params_ptr->read_only && !params_ptr->mem_bufs && !params_ptr->single_mode) {
    if ( rmdir(info.tmp_dirname) ) {
      err_msg(FN, "Failed to delete %s: %s", info.tmp_dirname, strerror(errno) );
    }
  }

  /*** Final Steps ***/

  /* One last check  */
  if (info.file_size && !params_ptr->no_remote_info &&
      !params_ptr->read_only && !params_ptr->to_stdout &&
      (!info.remote_info.content_encoded || params_ptr->no_decompress)) {
    off_t saved_file_size = saldl_fsizeo(info.part_filename, info.file);
    if (saved_file_size != info.file_size) {
      pre_fatal(FN, "Unexpected saved file size (%"SAL_JU"!=%"SAL_JU").", saved_file_size, info.file_size);
      pre_fatal(FN, "This could happen if you're downloading from a dynamic site.");
      pre_fatal(FN, "If that's the case and the download is small, retry with --no-remote-info");
      fatal(FN, "If you think that's a bug in saldl, report it: https://github.com/saldl/saldl/issues");
    }
  }
  else {
    debug_msg(FN, "Strict check for finished file size skipped.");
  }

  if (!params_ptr->read_only && !params_ptr->to_stdout) {
    saldl_fclose(info.part_filename, info.file);
    if (rename(info.part_filename, params_ptr->filename) ) {
      err_msg(FN, "Failed to rename now-complete %s to %s: %s", info.part_filename, params_ptr->filename, strerror(errno));
    }

    saldl_fclose(info.ctrl_filename, info.ctrl_file);
    if ( remove(info.ctrl_filename) ) {
      err_msg(FN, "Failed to remove %s: %s", info.ctrl_filename, strerror(errno));
    }
  }

  /* cleanups */
  curl_cleanup(&info);
  saldl_free_all(&info);

  finish_msg_and_exit("Download Finished.");
}
コード例 #11
0
ファイル: dirb.c プロジェクト: meldridge/dirb
int main(int argc, char **argv) {
  struct words *palabras;
  int c=0;

  banner();

  // Inicializaciones globales

  memset(&options, 0, sizeof(struct opciones));

  options.exitonwarn=1;
  options.ignore_nec=0;
  options.default_nec=404;
  options.lasting_bar=1;
  options.speed=0;
  options.add_header=0;

  encontradas=0;
  descargadas=0;
  listable=0;
  exts_num=0;

  strncpy(options.agente, AGENT_STRING, STRING_SIZE-1);

  dirlist_current=(struct words *)malloc(sizeof(struct words));
  memset(dirlist_current, 0, sizeof(struct words));
  dirlist_base=dirlist_current;
  dirlist_final=dirlist_current;

  curl=curl_easy_init();

  // Recepcion de parametros

  if(argc<2) {
    ayuda();
    exit(-1);
    }

  if(strncmp(argv[1], "-resume", 7)==0) {
    printf("(!) RESUMING...\n\n");
    resume();
    }

  strncpy(options.url_inicial, argv[1], STRING_SIZE-1);

  if(argc==2 || strncmp(argv[2], "-", 1)==0) {
    strncpy(options.mfile, DEFAULT_WORDLIST, STRING_SIZE-1);
    optind+=1;
    } else {
    strncpy(options.mfile, argv[2], STRING_SIZE-1);
    optind+=2;
    }

  while((c = getopt(argc,argv,"a:c:d:fgh:H:ilm:M:n:N:o:p:P:rRsSvwx:X:u:tz:"))!= -1){
    switch(c) {
      case 'a':
        options.use_agent=1;
        strncpy(options.agente, optarg, STRING_SIZE-1);
        break;
      case 'c':
        options.use_cookie=1;
        strncpy(options.cookie, optarg, STRING_SIZE-1);
        break;
      case 'd':
        options.debuging=atoi(optarg);
        break;
      case 'f':
        options.finetunning=1;
        break;
      case 'g':
        options.save_found=1;
        break;
      case 'h':
        options.use_vhost=1;
        strncpy(options.vhost, optarg, STRING_SIZE-1);
        break;
      case 'H':
        if(options.add_header) {
          strcat(options.header_string, "\n");
          strncat(options.header_string, optarg, STRING_SIZE-strlen(options.header_string)-2);
          } else {
  	      strncpy(options.header_string, optarg, STRING_SIZE-1);
	      }
        options.add_header++;
        break;
      case 'i':
        options.insensitive=1;
        break;
      case 'l':
        options.print_location=1;
        break;
      case 'm':
        options.mutations_file=1;
        strncpy(options.mutation_file, optarg, STRING_SIZE-1);
        break;
      case 'M':
        options.mutations_list=1;
        strncpy(options.mutation_list, optarg, STRING_SIZE-1);
        break;
      case 'N':
        options.ignore_nec=atoi(optarg);
        break;
      case 'o':
        options.saveoutput=1;
        strncpy(options.savefile, optarg, STRING_SIZE-1);
        break;
      case 'p':
        options.use_proxy=1;
        strncpy(options.proxy, optarg, STRING_SIZE-1);
        break;
      case 'P':
        options.use_proxypass=1;
        strncpy(options.proxypass_string, optarg, STRING_SIZE-1);
        break;
      case 'r':
        options.dont_recurse=1;
        break;
      case 'R':
        options.interactive=1;
        break;
      case 's':
        options.verify_ssl=1;
        break;
      case 'S':
        options.silent_mode=1;
        break;
      case 't':
        options.lasting_bar=0;
        break;
      case 'u':
        options.use_pass=1;
        strncpy(options.pass_string, optarg, STRING_SIZE-1);
        break;
      case 'v':
        options.nothide=1;
        break;
      case 'w':
        options.exitonwarn=0;
        break;
      case 'x':
        options.extensions_file=1;
        strncpy(options.exts_file, optarg, STRING_SIZE-1);
        break;
      case 'X':
        options.extensions_list=1;
        strncpy(options.exts_list, optarg, STRING_SIZE-1);
        break;
      case 'z':
        options.speed=(atoi(optarg)<=0)?0:atoi(optarg);
        break;
      default:
        printf("\n(!) FATAL: Incorrect parameter\n");
        exit(-1);
        break;
        }
      }

  // Limpia el input

  limpia_url(options.url_inicial);

  if(options.lasting_bar && !strchr(options.url_inicial, '?')) barra(options.url_inicial);

  check_url(options.url_inicial);

  limpia_url(options.mfile);

  // Chequeos iniciales

  get_options();

  init_exts();

  IMPRIME("\n-----------------\n\n");

  // Creamos la lista de palabras

  palabras=crea_wordlist(options.mfile);

  // Abrimos el fichero de mutations y creamos la lista

  /*

  if(options.mutations_file) {
    muts_base=crea_wordlist_fich(options.mutation_file);
    } else if(options.mutations_list) {
    muts_base=crea_extslist(options.mutation_list);
    }
  */

  // Lanzamos el bucle de descarga

  lanza_ataque(options.url_inicial, palabras);

  // Finalizamos

  cierre();
  exit(0);

}