Beispiel #1
0
int main()
{
	printf("\n\n\nretfl: %f\n", retfl((float)pi));	
	printf("retdbl: %f\n", retdbl(pi));	
	printf("fl2dbl: %f\n", fl2dbl((float)pi));	

	printf("\nobvod: %f\n", obvod(5, M_PI));
	printf("fl2i: %d, %d, %d\n", fl2i(3.14), fl2i(3.5), fl2i(3.51));
	float floatnums[] = {1.1, 2.2, 3.9, -2, 1, 9, 10, 88, 9, 10, -6, 12};

	printf("\navg: %f\n", prumer(floatnums, sizeof(floatnums)/sizeof(*floatnums)));
	printf("\nmax: %f\n", max(floatnums, sizeof(floatnums)/sizeof(*floatnums)));
	printf("\narrsum: %f\n", arrsum(floatnums, sizeof(floatnums)/sizeof(*floatnums)));

	printf("\n--------------------------------\n");

	printf("\nvsphere: %f\n", vsphere(5, M_PI));
	printf("pytha(3, 4): %f\n", pytha(3.0, 4.0));
	double veca[] = {3.0, 1.0, 5.0};
	double vecb[] = {2.0, -2.0, 1.0};
	int veclen = 3;

	vecsum(veca, vecb, veclen);
	printf("vecsum:  ");
	for(int i=0; i<veclen; i++)
		printf("%f   ", veca[i]);
	printf("\nvecsize: %f\n", vecsize(veca, veclen));


}
Beispiel #2
0
static int vecsum_normal_loop(int pass, const struct libhdfs_data *ldata,
            const struct options *opts)
{
    double sum = 0.0;

    while (1) {
        int res = hdfsReadFully(ldata->fs, ldata->file, ldata->buf,
                NORMAL_READ_CHUNK_SIZE);
        if (res == 0) // EOF
            break;
        if (res < 0) {
            int err = errno;
            fprintf(stderr, "hdfsRead failed with error %d (%s)\n",
                err, strerror(err));
            return err;
        }
        if (res < NORMAL_READ_CHUNK_SIZE) {
            fprintf(stderr, "hdfsRead got a partial read of "
                "length %d\n", res);
            return EINVAL;
        }
        sum += vecsum(ldata->buf,
                  NORMAL_READ_CHUNK_SIZE / sizeof(double));
    }
    printf("finished normal pass %d.  sum = %g\n", pass, sum);
    return 0;
}
Beispiel #3
0
void crf1dc_alpha_score(crf1d_context_t* ctx)
{
    int i, t;
    floatval_t sum, *cur = NULL;
    floatval_t *scale = &ctx->scale_factor[0];
    const floatval_t *prev = NULL, *trans = NULL, *state = NULL;
    const int T = ctx->num_items;
    const int L = ctx->num_labels;

    /* Compute the alpha scores on nodes (0, *).
        alpha[0][j] = state[0][j]
     */
    cur = ALPHA_SCORE(ctx, 0);
    state = EXP_STATE_SCORE(ctx, 0);
    veccopy(cur, state, L);
    sum = vecsum(cur, L);
    *scale = (sum != 0.) ? 1. / sum : 1.;
    vecscale(cur, *scale, L);
    ++scale;

    /* Compute the alpha scores on nodes (t, *).
        alpha[t][j] = state[t][j] * \sum_{i} alpha[t-1][i] * trans[i][j]
     */
    for (t = 1;t < T;++t) {
        prev = ALPHA_SCORE(ctx, t-1);
        cur = ALPHA_SCORE(ctx, t);
        state = EXP_STATE_SCORE(ctx, t);

        veczero(cur, L);
        for (i = 0;i < L;++i) {
            trans = EXP_TRANS_SCORE(ctx, i);
            vecaadd(cur, prev[i], trans, L);
        }
        vecmul(cur, state, L);
        sum = vecsum(cur, L);
        *scale = (sum != 0.) ? 1. / sum : 1.;
        vecscale(cur, *scale, L);
        ++scale;
    }

    /* Compute the logarithm of the normalization factor here.
        norm = 1. / (C[0] * C[1] ... * C[T-1])
        log(norm) = - \sum_{t = 0}^{T-1} log(C[t]).
     */
    ctx->log_norm = -vecsumlog(ctx->scale_factor, T);
}
Beispiel #4
0
static void vecsum_local(struct local_data *cdata, const struct options *opts)
{
    int pass;

    for (pass = 0; pass < opts->passes; pass++) {
        double sum = vecsum(cdata->mmap, cdata->length / sizeof(double));
        printf("finished vecsum_local pass %d.  sum = %g\n", pass, sum);
    }
}
Beispiel #5
0
int main(void)
{
  int n;
  int *v;
  int i;
  int sum, start, end;

  while (scanf("%d", &n) == 1 && n > 0) {
    v = (int *)malloc(n*sizeof(int));
    assert(v);
    for (i = 0; i < n; i++) {
      scanf("%d", &(v[i]));
    }
    sum = vecsum(v, n, &start, &end);
    printf("Maximum sum %d from %d to %d.\n", sum, start, end);
    free(v);
  }

  return 0;
}
Beispiel #6
0
static int vecsum_zcr_loop(int pass, struct libhdfs_data *ldata,
        struct hadoopRzOptions *zopts,
        const struct options *opts)
{
    int32_t len;
    double sum = 0.0;
    const double *buf;
    struct hadoopRzBuffer *rzbuf = NULL;
    int ret;

    while (1) {
        rzbuf = hadoopReadZero(ldata->file, zopts, ZCR_READ_CHUNK_SIZE);
        if (!rzbuf) {
            ret = errno;
            fprintf(stderr, "hadoopReadZero failed with error "
                "code %d (%s)\n", ret, strerror(ret));
            goto done;
        }
        buf = hadoopRzBufferGet(rzbuf);
        if (!buf) break;
        len = hadoopRzBufferLength(rzbuf);
        if (len < ZCR_READ_CHUNK_SIZE) {
            fprintf(stderr, "hadoopReadZero got a partial read "
                "of length %d\n", len);
            ret = EINVAL;
            goto done;
        }
        sum += vecsum(buf,
            ZCR_READ_CHUNK_SIZE / sizeof(double));
        hadoopRzBufferFree(ldata->file, rzbuf);
    }
    printf("finished zcr pass %d.  sum = %g\n", pass, sum);
    ret = 0;

done:
    if (rzbuf)
        hadoopRzBufferFree(ldata->file, rzbuf);
    return ret;
}
Beispiel #7
0
void crf1dc_marginal_without_beta(crf1d_context_t* ctx)
{
    int i, j, t;
    floatval_t *prob = NULL;
    floatval_t *row = ctx->row;
    const floatval_t *fwd = NULL;
    const int T = ctx->num_items;
    const int L = ctx->num_labels;

    /*
        Compute marginal probabilities of states at T-1
            p(T-1,j) = fwd'[T-1][j]
     */
    fwd = ALPHA_SCORE(ctx, T-1);
    prob = STATE_MEXP(ctx, T-1);
    veccopy(prob, fwd, L);
    
    /*
        Repeat the following computation for t = T-1,T-2, ..., 1.
            1) Compute p(t-1,i,t,j) using p(t,j)
            2) Compute p(t,i) using p(t-1,i,t,j)
     */
    for (t = T-1;0 < t;--t) {
        fwd = ALPHA_SCORE(ctx, t-1);
        prob = STATE_MEXP(ctx, t);

        veczero(ctx->adj, L*L);
        veczero(row, L);

        /*
            Compute adj[i][j] and row[j].
                adj[i][j] = fwd'[t-1][i] * edge[i][j]
                row[j] = \sum_{i} adj[i][j]
         */
        for (i = 0;i < L;++i) {
            floatval_t *adj = ADJACENCY(ctx, i);
            floatval_t *edge = EXP_TRANS_SCORE(ctx, i);
            vecaadd(adj, fwd[i], edge, L);
            vecadd(row, adj, L);
        }

        /*
            Find z such that z * \sum_{i] adj[i][j] = p(t,j).
            Thus, z = p(t,j) / row[j]; we overwrite row with z.
         */
        vecinv(row, L);
        vecmul(row, prob, L);

        /*
            Apply the partition factor z (row[j]) to adj[i][j].
         */
        for (i = 0;i < L;++i) {
            floatval_t *adj = ADJACENCY(ctx, i);
            vecmul(adj, row, L);
        }

        /*
            Now that adj[i][j] presents p(t-1,i,t,j),
            accumulate model expectations of transitions.
         */
        for (i = 0;i < L;++i) {
            floatval_t *adj = ADJACENCY(ctx, i);
            floatval_t *prob = TRANS_MEXP(ctx, i);
            vecadd(prob, adj, L);
        }

        /*
            Compute the marginal probability of states at t-1.
                p(t-1,i) = \sum_{j} p(t-1,i,t,j)
         */
        prob = STATE_MEXP(ctx, t-1);
        for (i = 0;i < L;++i) {
            floatval_t *adj = ADJACENCY(ctx, i);
            prob[i] = vecsum(adj, L);
        }
    }
}
Beispiel #8
0
void crf1dc_partial_alpha_score(crf1d_context_t* ctx, int *mask)
{
    int i, j, t;
    int *prev_mask, *curr_mask;
    floatval_t sum, *cur = NULL;
    floatval_t *scale = &ctx->partial_scale_factor[0];
    const floatval_t *prev = NULL, *trans = NULL, *state = NULL;
    const int T = ctx->num_items;
    const int L = ctx->num_labels;

    /* Compute the alpha scores on nodes (0, *).
        alpha[0][j] = state[0][j]
     */
    cur = PARTIAL_ALPHA_SCORE(ctx, 0);
    veczero(cur, L);
    state = EXP_STATE_SCORE(ctx, 0);
    curr_mask = &mask[0];
    for (i = 0; i < L; ++ i) {
      if (curr_mask[i]) {
        cur[i] = state[i];
      }
    }

    sum = vecsum(cur, L);
    /* scale is a temporary structure */
    *scale = (sum != 0.) ? 1. / sum : 1.;
    vecscale(cur, *scale, L);
    ++scale;

    /* Compute the alpha scores on nodes (t, *).
        alpha[t][j] = state[t][j] * \sum_{i} alpha[t-1][i] * trans[i][j]
     */
    for (t = 1;t < T;++t) {
        prev = PARTIAL_ALPHA_SCORE(ctx, t-1);
        cur = PARTIAL_ALPHA_SCORE(ctx, t);
        state = EXP_STATE_SCORE(ctx, t);
        prev_mask = &mask[(t-1) * L];
        curr_mask = &mask[t * L];

        veczero(cur, L);
        for (i = 0; i < L; ++ i) {
          if (prev_mask[i]) {
            trans = EXP_TRANS_SCORE(ctx, i);
            for (j = 0; j < L; ++ j) {
              if (curr_mask[j]) {
                cur[j] += prev[i] * trans[j];
              }
            }
          }
        }

        for (j = 0; j < L; ++ j) {
          if (curr_mask[j]) {
            cur[j] *= state[j];
          }
        }

        sum = vecsum(cur, L);
        *scale = (sum != 0.) ? 1. / sum : 1.;
        vecscale(cur, *scale, L);
        ++scale;
    }

    /* Compute the logarithm of the normalization factor here.
        norm = 1. / (C[0] * C[1] ... * C[T-1])
        log(norm) = - \sum_{t = 0}^{T-1} log(C[t]).
     */
    ctx->partial_log_norm = -vecsumlog(ctx->partial_scale_factor, T);

}