int bibl_read( bibl *b, FILE *fp, char *filename, param *p ) { convert_rules r; param lp; bibl bin; if ( !b ) return BIBL_ERR_BADINPUT; if ( !fp ) return BIBL_ERR_BADINPUT; if ( !p ) return BIBL_ERR_BADINPUT; if ( bibl_illegalinmode( p->readformat ) ) return BIBL_ERR_BADINPUT; bibl_setreadparams( &lp, p ); bibl_init( &bin ); rules_init( &r, p->readformat ); read_ref( fp, &bin, filename, &r, &lp ); if ( !lp.output_raw || ( lp.output_raw & BIBL_RAW_WITHCHARCONVERT ) ) bibl_fixcharsets( &bin, &lp ); if ( !lp.output_raw ) convert_ref( &bin, filename, b, &r, &lp ); else { if ( p->verbose > 1 ) bibl_verbose0( &bin ); bibl_copy( b, &bin ); } if ( !lp.output_raw || ( lp.output_raw & BIBL_RAW_WITHMAKEREFID ) ) bibl_checkrefid( b, &lp ); bibl_free( &bin ); return BIBL_OK; }
int main (int argc, char **argv) { s_rules rr; if (argv[argc]) err(1, "bad argument list"); if (argc != 3) usage(argv[0]); if (strcmp(argv[1], "-c")) usage(argv[0]); openlog(argv[0], LOG_PID, LOG_AUTH); log_args("NEW", argc, (const char **)argv); init_package(); { const char *env_auth_id = getenv(ENV_AUTH_ID); t_sym id = sympackage_intern(&g_sympkg, env_auth_id ? env_auth_id : ""); s_symtable cmd; cmd_init(&cmd, id, argv[2]); rules_init(&rr); rules_read(&rr, "/etc/git-auth.conf"); { int auth_ok = auth(&rr, &cmd); rules_free(&rr); log_rule(auth_ok ? "ALLOW" : "DENY", &cmd); if (auth_ok) { exec_cmd(&cmd); // never reached } } log_rule("DENY", &cmd); } cleanup(); return 1; }
void config_init() { logprintf(LOG_STACK, "%s(...)", __FUNCTION__); hardware_init(); settings_init(); devices_init(); gui_init(); #ifdef EVENTS rules_init(); #endif registry_init(); }
int bibl_write( bibl *b, FILE *fp, int mode, param *p ) { convert_rules r; param lp; if ( !b || bibl_illegaloutmode( mode ) ) return BIBL_ERR_BADINPUT; if ( !fp && ( !p || !p->singlerefperfile ) ) return BIBL_ERR_BADINPUT; rules_init( &r, mode ); bibl_setwriteparams( &lp, p, mode ); bibl_fixcharsets( b, &lp ); output_bibl( fp, b, &r, &lp, mode ); return BIBL_OK; }
static void single_init(void) { struct db_salt *salt; log_event("Proceeding with \"single crack\" mode"); progress = 0; length = single_db->format->params.plaintext_length; key_count = single_db->format->params.min_keys_per_crypt; if (key_count < SINGLE_HASH_MIN) key_count = SINGLE_HASH_MIN; if (rpp_init(rule_ctx, SUBSECTION_SINGLE)) { log_event("! No \"single crack\" mode rules found"); fprintf(stderr, "No \"single crack\" mode rules found in %s\n", cfg_name); error(); } rules_init(length); rec_rule = rule_number = 0; rule_count = rules_count(rule_ctx, 0); log_event("- %d preprocessed word mangling rules", rule_count); status_init(get_progress, 0); rec_restore_mode(restore_state); rec_init(single_db, save_state); salt = single_db->salts; do { single_alloc_keys(&salt->keys); } while ((salt = salt->next)); if (key_count > 1) log_event("- Allocated %d buffer%s of %d candidate passwords%s", single_db->salt_count, single_db->salt_count != 1 ? "s" : "", key_count, single_db->salt_count != 1 ? " each" : ""); guessed_keys = NULL; single_alloc_keys(&guessed_keys); crk_init(single_db, NULL, guessed_keys); }
void do_wordlist_crack(struct db_main *db, char *name, int rules) { union { char buffer[2][LINE_BUFFER_SIZE + CACHE_BANK_SHIFT]; ARCH_WORD dummy; } aligned; char *line = aligned.buffer[0], *last = aligned.buffer[1]; struct rpp_context ctx; char *prerule, *rule, *word; char *(*apply)(char *word, char *rule, int split, char *last); int dist_rules, dist_switch; unsigned long my_words, their_words, my_words_left; log_event("Proceeding with wordlist mode"); if (name) { if (!(word_file = fopen(path_expand(name), "r"))) pexit("fopen: %s", path_expand(name)); log_event("- Wordlist file: %.100s", path_expand(name)); } else { word_file = stdin; log_event("- Reading candidate passwords from stdin"); } length = db->format->params.plaintext_length; if (rules) { if (rpp_init(rule_ctx = &ctx, SUBSECTION_WORDLIST)) { log_event("! No wordlist mode rules found"); if (john_main_process) fprintf(stderr, "No wordlist mode rules found in %s\n", cfg_name); error(); } rules_init(length); rule_count = rules_count(&ctx, -1); log_event("- %d preprocessed word mangling rules", rule_count); apply = rules_apply; } else { rule_ctx = NULL; rule_count = 1; log_event("- No word mangling rules"); apply = dummy_rules_apply; } rule_number = 0; line_number = 0; status_init(get_progress, 0); rec_restore_mode(restore_state); rec_init(db, save_state); crk_init(db, fix_state, NULL); prerule = rule = ""; if (rules) prerule = rpp_next(&ctx); /* A string that can't be produced by fgetl(). */ last[0] = '\n'; last[1] = 0; dist_rules = 0; dist_switch = rule_count; /* never */ my_words = ~0UL; /* all */ their_words = 0; if (options.node_count) { int rule_rem = rule_count % options.node_count; const char *now, *later = ""; dist_switch = rule_count - rule_rem; if (!rule_rem || rule_number < dist_switch) { dist_rules = 1; now = "rules"; if (rule_rem) later = ", then switch to distributing words"; } else { dist_switch = rule_count; /* never */ my_words = options.node_max - options.node_min + 1; their_words = options.node_count - my_words; now = "words"; } log_event("- Will distribute %s across nodes%s", now, later); } my_words_left = my_words; if (their_words) { if (line_number) { /* Restored session. line_number is right after a word we've actually used. */ int for_node = line_number % options.node_count + 1; if (for_node < options.node_min || for_node > options.node_max) { /* We assume that line_number is at the beginning of other nodes' block */ if (skip_lines(their_words, line) && /* Check for error since a mere EOF means next rule (the loop below should see * the EOF again, and it will skip to next rule if applicable) */ ferror(word_file)) prerule = NULL; } else { my_words_left = options.node_max - for_node + 1; } } else { /* New session. Skip lower-numbered nodes' lines. */ if (skip_lines(options.node_min - 1, line)) prerule = NULL; } } if (prerule) do { if (rules) { if (dist_rules) { int for_node = rule_number % options.node_count + 1; if (for_node < options.node_min || for_node > options.node_max) goto next_rule; } if ((rule = rules_reject(prerule, -1, last, db))) { if (strcmp(prerule, rule)) log_event("- Rule #%d: '%.100s'" " accepted as '%.100s'", rule_number + 1, prerule, rule); else log_event("- Rule #%d: '%.100s'" " accepted", rule_number + 1, prerule); } else { log_event("- Rule #%d: '%.100s' rejected", rule_number + 1, prerule); goto next_rule; } } while (fgetl(line, LINE_BUFFER_SIZE, word_file)) { line_number++; if (line[0] != '#') { process_word: if ((word = apply(line, rule, -1, last))) { last = word; if (ext_filter(word)) if (crk_process_key(word)) { rules = 0; break; } } next_word: if (--my_words_left) continue; if (skip_lines(their_words, line)) break; my_words_left = my_words; continue; } if (strncmp(line, "#!comment", 9)) goto process_word; goto next_word; } if (ferror(word_file)) break; if (rules) { next_rule: if (!(rule = rpp_next(&ctx))) break; rule_number++; if (rule_number >= dist_switch) { log_event("- Switching to distributing words"); dist_rules = 0; dist_switch = rule_count; /* not anymore */ my_words = options.node_max - options.node_min + 1; their_words = options.node_count - my_words; } line_number = 0; if (fseek(word_file, 0, SEEK_SET)) pexit("fseek"); if (their_words && skip_lines(options.node_min - 1, line)) break; } my_words_left = my_words; } while (rules); crk_done(); rec_done(event_abort || (status.pass && db->salts)); if (ferror(word_file)) pexit("fgets"); if (name) { if (event_abort) progress = get_progress(); else progress = 100; if (fclose(word_file)) pexit("fclose"); word_file = NULL; } }
static void single_init(void) { struct db_salt *salt; log_event("Proceeding with \"single crack\" mode"); if ((words_pair_max = cfg_get_int(SECTION_OPTIONS, NULL, "SingleWordsPairMax")) < 0) words_pair_max = SINGLE_WORDS_PAIR_MAX; progress = 0; length = single_db->format->params.plaintext_length; if (options.force_maxlength && options.force_maxlength < length) length = options.force_maxlength; key_count = single_db->format->params.min_keys_per_crypt; if (key_count < SINGLE_HASH_MIN) key_count = SINGLE_HASH_MIN; /* * We use "short" for buffered key indices and "unsigned short" for buffered * key offsets - make sure these don't overflow. */ if (key_count > 0x8000) key_count = 0x8000; while (key_count > 0xffff / length + 1) key_count >>= 1; if (rpp_init(rule_ctx, pers_opts.activesinglerules)) { log_event("! No \"%s\" mode rules found", pers_opts.activesinglerules); if (john_main_process) fprintf(stderr, "No \"%s\" mode rules found in %s\n", pers_opts.activesinglerules, cfg_name); error(); } rules_init(length); rec_rule = rule_number = 0; rule_count = rules_count(rule_ctx, 0); log_event("- %d preprocessed word mangling rules", rule_count); status_init(get_progress, 0); rec_restore_mode(restore_state); rec_init(single_db, save_state); salt = single_db->salts; do { single_alloc_keys(&salt->keys); } while ((salt = salt->next)); if (key_count > 1) log_event("- Allocated %d buffer%s of %d candidate passwords%s", single_db->salt_count, single_db->salt_count != 1 ? "s" : "", key_count, single_db->salt_count != 1 ? " each" : ""); guessed_keys = NULL; single_alloc_keys(&guessed_keys); crk_init(single_db, NULL, guessed_keys); }
static STANDARDIZER * CreateStd(char *lextab, char *gaztab, char *rultab) { STANDARDIZER *std; LEXICON *lex; LEXICON *gaz; RULES *rules; int err; int SPIcode; DBG("Enter: CreateStd"); SPIcode = SPI_connect(); if (SPIcode != SPI_OK_CONNECT) { elog(ERROR, "CreateStd: couldn't open a connection to SPI"); } std = std_init(); if (!std) elog(ERROR, "CreateStd: could not allocate memory (std)"); lex = lex_init(std->err_p); if (!lex) { std_free(std); SPI_finish(); elog(ERROR, "CreateStd: could not allocate memory (lex)"); } err = load_lex(lex, lextab); if (err == -1) { lex_free(lex); std_free(std); SPI_finish(); elog(ERROR, "CreateStd: failed to load '%s' for lexicon", lextab); } gaz = lex_init(std->err_p); if (!gaz) { lex_free(lex); std_free(std); SPI_finish(); elog(ERROR, "CreateStd: could not allocate memory (gaz)"); } err = load_lex(gaz, gaztab); if (err == -1) { lex_free(gaz); lex_free(lex); std_free(std); SPI_finish(); elog(ERROR, "CreateStd: failed to load '%s' for gazeteer", gaztab); } rules = rules_init(std->err_p); if (!rules) { lex_free(gaz); lex_free(lex); std_free(std); SPI_finish(); elog(ERROR, "CreateStd: could not allocate memory (rules)"); } err = load_rules(rules, rultab); if (err == -1) { rules_free(rules); lex_free(gaz); lex_free(lex); std_free(std); SPI_finish(); elog(ERROR, "CreateStd: failed to load '%s' for rules", rultab); } std_use_lex(std, lex); std_use_gaz(std, gaz); std_use_rules(std, rules); std_ready_standardizer(std); SPI_finish(); return std; }
int main(int argc, char *argv[]) { STANDARDIZER *std; LEXICON *lex; LEXICON *gaz; RULES *rules; char buf[1024]; int seq; char input_str[ 4096 ] ; char word[512]; char stdword[512]; int token; int nr; int rule[RULESIZE]; int err; int cnt; int option = 0; FILE *in; if (argc == 3 && !strcmp(argv[1], "-o")) { option = strtol(argv[2], NULL, 10); argc -= 2; argv += 2; } else if (argc != 1) Usage(); std = std_init(); assert(std); lex = lex_init(std->err_p); assert(lex); in = fopen(LEXIN, "rb"); assert(in); cnt = 0; while (!feof(in) && fgets(buf, 1024, in)) { cnt++; /* parse into fields */ if (parse_csv(buf, &seq, word, stdword, &token)) { /* add the record to the lexicon */ err = lex_add_entry(lex, seq, word, stdword, token); if (err != 1) printf("lex: Failed: %d: %s", cnt, buf); } else { printf("lex: Skipping: %d: %s", cnt, buf); } } fclose(in); if (option & 1) { printf("------------ address lexicon --------------\n"); print_lexicon(lex->hash_table); printf("\n"); } gaz = lex_init(std->err_p); assert(gaz); in = fopen(GAZIN, "rb"); assert(in); cnt = 0; while (!feof(in) && fgets(buf, 1024, in)) { cnt++; /* parse into fields */ if (parse_csv(buf, &seq, word, stdword, &token)) { /* add the record to the lexicon */ err = lex_add_entry(gaz, seq, word, stdword, token); if (err != 1) printf("gaz: Failed: %d: %s", cnt, buf); } else { printf("gaz: Skipping: %d: %s", cnt, buf); } } fclose(in); if (option & 2) { printf("------------ gazeteer lexicon --------------\n"); print_lexicon(gaz->hash_table); printf("\n"); } rules = rules_init(std->err_p); assert(rules); rules -> r_p -> collect_statistics = TRUE ; /* ************ RULES **************** */ in = fopen(RULESIN, "rb"); assert(in); cnt = 0; while (!feof(in) && fgets(buf, 1024, in)) { cnt++; /* parse into fields */ nr = parse_rule(buf, rule); /* add the record to the rules */ err = rules_add_rule(rules, nr, rule); if (err != 0) printf("rules: Failed: %d (%d): %s", cnt, err, buf); } err = rules_ready(rules); if (err != 0) printf("rules: Failed: err=%d\n", err); fclose(in); std_use_lex(std, lex); std_use_gaz(std, gaz); std_use_rules(std, rules); std_ready_standardizer(std); printf( "Standardization test. Type \"exit\" to quit:\n" ) ; fflush( stdout ) ; while ( TRUE ) { err = standardize_command_line( std, input_str, option ) ; if ( err == FAIL ) { break ; } } printf( "OK\n" ) ; fflush( stdout ) ; std_free(std); /* these were freed when we bound them with std_use_*() rules_free(rules); lex_free(gaz); lex_free(lex); */ return 0; }