int spritz_hash(unsigned char *out, size_t outlen, const unsigned char *msg, size_t msglen) { State state; unsigned char r; if (outlen > 255) { return -1; } r = (unsigned char) outlen; initialize_state(&state); absorb(&state, msg, msglen); absorb_stop(&state); absorb(&state, &r, 1U); squeeze(&state, out, outlen); memzero(&state, sizeof state); return 0; }
void hash_function(WORDSIZE* hash_input, WORDSIZE input_length, WORDSIZE* output){ WORDSIZE state[16] __attribute__((aligned(16))); unsigned long index, block_number = 0, number_of_blocks; zero_out(state, 16); number_of_blocks = input_length / 8; if (input_length % 8 == 0){ if (number_of_blocks >= 1){ number_of_blocks -= 1;}} if (number_of_blocks > 0){ for (block_number = 0; block_number < number_of_blocks; block_number++){ absorb(state, hash_input, 8, (block_number * 8), block_number);}} int amount = input_length % 8; if (amount == 0){ amount = 8;} absorb(state, hash_input, amount, (block_number * 8), (0xFFFFFFFF ^ block_number)); copy(output, state, 8, 0, 0);}
static void compressOne() { if (For1StrictLong()) { if (For0StrictShort()) { if (absorb()) { ForTravel1(); return; } else { ForTravel0(); return; } } else { ForTravel1(); return; } } else { ForTravel0(); return; } }
int spritz_auth(unsigned char *out, size_t outlen, const unsigned char *msg, size_t msglen, const unsigned char *key, size_t keylen) { State state; unsigned char r; if (outlen > 255) { return -1; } r = (unsigned char) outlen; key_setup(&state, key, keylen); absorb_stop(&state); absorb(&state, msg, msglen); absorb_stop(&state); absorb(&state, &r, 1U); squeeze(&state, out, outlen); memzero(&state, sizeof state); return 0; }
int spritz_stream(unsigned char *out, size_t outlen, const unsigned char *key, size_t keylen) { State state; initialize_state(&state); absorb(&state, key, keylen); squeeze(&state, out, outlen); memzero(&state, sizeof state); return 0; }
/* int main() { printf("Running Test on [-1]: %s %d\n", run_test(-1) ==0 ? "PASS": "******", run_test(-1)); printf("Running Test on [0]: %s %d\n", run_test (0) ==0 ? "PASS": "******", run_test(0)); printf("Running Test on [1]: %s %d\n", run_test (1) ==0 ? "PASS": "******", run_test(1)); return 0; } */ int run_test(int in) { if (in !=0 && in!=-1 && in != 1){ fprintf(stderr, "Invalid input\n"); return -1; } int retVal = 1; struct Curl c; setup_Curl(&c); int input[243] ; int i, j; for(j=0; j<243; j++){ input[j] = in; } int output[256]; char final_output[1024]; memset(final_output, 0 , 1024 * sizeof(char)); for(j=0; j<256; j++){ output[j] = 0; } absorb(&c, input, 0, 243); squeeze(&c, output, 0); char temp[16]; strcpy(final_output, "["); for(i=0; i<c.HASH_SIZE-1; i++){ sprintf(temp, "%d, ", output[i]); strcat(final_output, temp); } sprintf(temp, "%d]", output[c.HASH_SIZE-1]); strcat(final_output, temp); //Test FILE *fp; char ref[1024]; memset(ref, 0, 1024); char sysCall[64]; sprintf(sysCall, "java CurlReference %d", in); fp = popen(sysCall, "r"); if(fp==NULL){ fprintf(stderr, "Failed to run Reference Java Code\n"); exit(1); } while(fgets(ref, sizeof(ref)-1, fp) != NULL) ; ref[strlen(ref)-1] = 0; //trailing \n from java pclose(fp); retVal = strcmp(final_output, ref); return retVal; }
/* Output hash digest */ void spritz_hash_final(spritz_ctx *hash_ctx, uint8_t *digest, uint8_t digestLen) { uint8_t i; absorbStop(hash_ctx); absorb(hash_ctx, digestLen); /* squeeze() */ if (hash_ctx->a) { shuffle(hash_ctx); } for (i = 0; i < digestLen; i++) { digest[i] = drip(hash_ctx); } }
int spritz_decrypt(unsigned char *out, const unsigned char *c, size_t clen, const unsigned char *nonce, size_t noncelen, const unsigned char *key, size_t keylen) { State state; size_t v; key_setup(&state, key, keylen); absorb_stop(&state); absorb(&state, nonce, noncelen); for (v = 0; v < clen; v++) { out[v] = c[v] - drip(&state); } memzero(&state, sizeof state); return 0; }
EXPORT char* ccurl_pow(char* trytes, int minWeightMagnitude) { char* buf = NULL; //= malloc(sizeof(char)*TRYTE_LENGTH); size_t len = strnlen(trytes, TRANSACTION_LENGTH/3); char* trits = trits_from_trytes(trytes, len); pdcl_node_t* pd_node = &base; ccurl_pow_node_init(pd_node); while (pd_node->pdcl->pd.status == PD_SEARCHING) { if (pd_node->next != NULL) { pd_node = pd_node->next; } } curl_t curl; init_curl(&curl); absorb(&curl, trits, TRANSACTION_LENGTH - HASH_LENGTH); memcpy(&curl.state, &trits[TRANSACTION_LENGTH - HASH_LENGTH], HASH_LENGTH * sizeof(char)); if (ccurl_pow_node_init(pd_node) == 0) { if (pd_node->pdcl->loop_count < 1) { pd_node->pdcl->loop_count = loop_count; } #ifdef DEBUG fprintf(stderr, "OpenCL Hashing with %lu loops...\n", pd_node->pdcl->loop_count); #endif pearcl_search(pd_node->pdcl, &curl, offset, minWeightMagnitude); } if (pd_node->pdcl->pd.status != PD_FOUND && pd_node->pdcl->pd.status != PD_INVALID && pd_node->pdcl->pd.status != PD_INTERRUPTED) { #ifdef DEBUG fprintf(stderr, "Thread Hashing...\n"); #endif pd_search(&(pd_node->pdcl->pd), &curl, minWeightMagnitude, -1); } if (pd_node->pdcl->pd.status == PD_FOUND) { memcpy(&trits[TRANSACTION_LENGTH - HASH_LENGTH], &curl.state, HASH_LENGTH * sizeof(char)); buf = trytes_from_trits(trits, 0, TRANSACTION_LENGTH); } free(trits); pd_node->pdcl->pd.status = PD_FINISHED; return buf; }
static void test_search(void) { PearCLDiver pdcl; Curl curl; clock_t start, diff; int nonce_size = 13; char *digest, *trans; char *mytrits, hash_trits[HASH_LENGTH]; if (init_pearcl(&pdcl) != 0) { CU_FAIL("E: Could not initialize opencl\n"); return; } init_curl(&curl); mytrits = trits_from_trytes(real_transaction, TRYTE_LENGTH); // puts(trytes_from_trits(mytrits+TRANSACTION_LENGTH-HASH_LENGTH, 0, // HASH_LENGTH)); while (nonce_size < 19) { fprintf(stderr, "Testing mwm of %d: ", nonce_size); start = clock(); pearcl_search(&pdcl, mytrits, TRANSACTION_LENGTH, nonce_size, -1); diff = clock() - start; // printf("I took this many seconds: %ld", diff / CLOCKS_PER_SEC); trans = trytes_from_trits(mytrits, 0, TRANSACTION_LENGTH); // hash = trytes_from_trits(mytrits + TRANSACTION_LENGTH - HASH_LENGTH, 0, // HASH_LENGTH); absorb(&curl, mytrits, 0, TRANSACTION_LENGTH); squeeze(&curl, hash_trits, 0, HASH_LENGTH); reset(&curl); digest = trytes_from_trits(hash_trits, 0, HASH_LENGTH); // puts(trans); puts(digest); CU_ASSERT_FATAL(test_last_n_nines(digest, HASH_LENGTH / 3, nonce_size / 3)); nonce_size++; } free(mytrits); free(digest); }
/* ----------------------------------------------------------------------------- * Handeling Commands * ---------------------------------------------------------------------------*/ char * handleCommand(char * request) { fprintf(stderr, "Recv from Socket: %s", request); char *tokens[5]; char *token; int i = 0; while ((token = nextToken(&request))) { tokens[i] = token; i++; } char * response = "Some arbitrary response back to the socket\0"; if (!strcmp(tokens[0], "dimensions")) dimensions(tokens[1], tokens[2]); if (!strcmp(tokens[0], "absorb")) absorb(tokens[1], tokens[2]); if (!strcmp(tokens[0], "containerize")) containerize(); else if (!strcmp(tokens[0], "focus")) focus(tokens[1], tokens[2]); else if (!strcmp(tokens[0], "get")) response = get(tokens[1]); else if(!strcmp(tokens[0], "jump")) jump(tokens[1]); else if (!strcmp(tokens[0], "kill")) kill(); else if (!strcmp(tokens[0], "layout")) layout(tokens[1]); else if (!strcmp(tokens[0], "mark")) mark(tokens[1]); else if (!strcmp(tokens[0], "set")) set(tokens[1], tokens[2]); else if (!strcmp(tokens[0], "shift")) shift(tokens[1], atoi(tokens[2])); else if (!strcmp(tokens[0], "swap")) swap(tokens[1], tokens[2]); else if (!strcmp(tokens[0], "zoom")) zoom(atoi(tokens[1])); XFlush(display); return response; }
int CondorFileBuffer::write(off_t offset, const char *data, int length) { CondorChunk *c=0; c = new CondorChunk(offset,length,buffer_block_size); memcpy(c->data,data,length); c->dirty = 1; c->last_used = time++; head = absorb( head, c ); trim(); if((offset+length)>get_size()) { size = offset+length; } return length; }
void PulseClusterer::allHitsLoaded() { if (!isComplete()) { // scan multi-map in mid-bin order Train cluster; bool first = true; TripletList::iterator i; // cout << (dec) << tripletList.size() << " triplets" << endl; lock(); for (i = tripletList.begin(); i!= tripletList.end(); i++) { bool switchClusters = false; if (first) switchClusters = true; else if (!absorb(cluster, *i)) switchClusters = true; if (switchClusters) { if (!first) clusterDone(cluster); first = false; // start new cluster cluster.pulses.clear(); cluster.histogram.clear(); int period = (*i).second.pulses[1].spectrum - (*i).second.pulses[0].spectrum; cluster.histogram[period].val += 1; for (int j=0; j<TSZ; j++) cluster.addPulse((*i).second.pulses[j]); cluster.loBin = cluster.hiBin = (*i).first; } } if (!first) clusterDone(cluster); ChildClusterer::allHitsLoaded(); unlock(); } }
extern void absorb_specks(void) /* eliminate too-small sources */ { struct source head, *buddy; register struct source *last, *this; if (verbose) fprintf(stderr, "%s: absorbing small sources...\n", progname); head.next = donelist; last = &head; for (this = head.next; this != NULL; this = this->next) if (TOOSMALL(this)) { last->next = this->next; buddy = findbuddy(this, head.next); if (buddy != NULL) mergesource(buddy, this); else absorb(this); this = last; } else last = this; donelist = head.next; }
Iterator& operator++() { absorb(++std::get<Is>(this->iters)...); return *this; }
int CondorFileBuffer::read(off_t offset, char *data, int length) { CondorChunk *c=0; off_t piece=0; int bytes_read=0; off_t hole_top; // If the user is attempting to read past the end // of the file, chop off that access here. if((offset+length)>size) { length = size-offset; } while(length>0) { // hole_top keeps track of the lowest starting data point // in case we have created a virtual hole hole_top = MIN( size, offset+length ); // Scan through all the data chunks. // If one overlaps with the beginning of the // request, then copy that data. for( c=head; c; c=c->next ) { if( contains(c,offset) ) { piece = MIN(c->begin+c->size-offset,length); memcpy(data,&c->data[offset-c->begin],piece); offset += piece; data += piece; length -= piece; bytes_read += piece; c->last_used = time++; break; } else { if((c->begin<hole_top)&&(c->begin>offset)) { hole_top = c->begin; } } } // If that worked, try it again. if(c) continue; // Now, consider the logical size of the buffer file // and the size of the actual file. If we are less // than the former, but greater than the latter, simply // fill the hole with zeroes and continue above. piece = hole_top-offset; if( offset<size && offset>=original->get_size() ) { memset(data,0,piece); offset += piece; data += piece; length -= piece; bytes_read += piece; continue; } // Otherwise, make a new chunk. Try to read a whole block c = new CondorChunk(offset,buffer_block_size,buffer_block_size); piece = original->read(offset,c->data,c->size); if(piece<0) { delete c; if(bytes_read==0) bytes_read=-1; break; } else if(piece==0) { delete c; break; } else { c->size = piece; head = absorb( head, c ); } } trim(); return bytes_read; }