Пример #1
0
void
Trace::erase(TraceTree::const_iterator& rit)
{
  /// @todo merge data for erased point?
  if (rit == trace_tree.end())
    return;

  TraceTree::const_iterator it_prev = find_prev(*rit);
  TraceTree::const_iterator it_next = find_next(*rit);

  // don't erase if last or first point in tree
  if ((it_prev == trace_tree.end()) || (it_next == trace_tree.end()))
    return;

  // create new point representing the next point since this is merged with it
  TracePoint tp_next = *it_next;
  tp_next.last_time = it_prev->time;

  // remove erased point from the delta map
  delta_map.erase(rit->time);

  // remove current (deletion), and next (to be replaced)
  trace_tree.erase(rit);
  trace_tree.erase(it_next);

  // insert point replacement
  it_next = trace_tree.insert(tp_next);

  // recompute data for previous and replacement point
  update_delta(find_prev(*it_prev), it_prev, it_next);
  update_delta(it_prev, it_next, find_next(*it_next));
}
Пример #2
0
void
Trace::erase(TraceTree::const_iterator& rit)
{
  /// @todo merge data for erased point?
  if (rit == trace_tree.end())
    return;

  TraceTree::const_iterator it_prev = find_prev(*rit);
  TraceTree::const_iterator it_next = find_next(*rit);

  if ((it_prev == trace_tree.end()) || (it_next == trace_tree.end()))
    return;

  TracePoint tp_next = *it_next;
  tp_next.last_time = it_prev->time;

  distance_delta_map.erase(rit->time);
  time_delta_map.erase(rit->time);

  trace_tree.erase(rit);
  trace_tree.erase(it_next);

  it_next = trace_tree.insert(tp_next);

  update_delta(find_prev(*it_prev), it_prev, it_next);
  update_delta(it_prev, it_next, find_next(*it_next));
}
Пример #3
0
void
Trace::append(const AIRCRAFT_STATE& state)
{
  if (empty()) {
    task_projection.reset(state.get_location());
    task_projection.update_fast();
    m_last_point.time = null_time;
  } else if (trace_tree.size() > 0 && state.Time < fixed(m_last_point.time)) {
    clear();
    return;
  }

  TracePoint tp(state);
  if ((tp.time - m_last_point.time) < 2)
    return;

  tp.project(task_projection);
  tp.last_time = m_last_point.time;
  TraceTree::const_iterator it_this = trace_tree.insert(tp);
  m_last_point = tp;

  // update deltas.  Last point is always high delta
  delta_map[tp.time].distance = null_delta;
  delta_map[tp.time].time = null_time;

  TraceTree::const_iterator it_prev = find_prev(tp);
  if (it_prev != end())
    update_delta(find_prev(*it_prev), it_prev, it_this);
}
Пример #4
0
void
Trace::append(const AircraftState& state)
{
  assert(cached_size == delta_list.size());
  assert(cached_size == chronological_list.Count());

  if (empty()) {
    // first point determines origin for flat projection
    task_projection.reset(state.location);
    task_projection.update_fast();
  } else if (state.time < fixed(back().time)) {
    // gone back in time, must reset. (shouldn't get here!)
    assert(1);
    clear();
    return;
  } else if ((unsigned)state.time - back().time < 2)
    // only add one item per two seconds
    return;

  TracePoint tp(state);
  tp.project(task_projection);

  TraceDelta &td = insert(tp);
  td.InsertBefore(chronological_list);

  ++cached_size;

  if (!chronological_list.IsFirst(td))
    update_delta(td.GetPrevious());
}
Пример #5
0
void
Trace::erase_inside(TraceDelta::iterator it)
{
  assert(cached_size > 0);
  assert(cached_size == delta_list.size());
  assert(cached_size == chronological_list.Count());
  assert(it != delta_list.end());

  const TraceDelta &td = *it;
  assert(!td.IsEdge());

  TraceDelta &previous = const_cast<TraceDelta &>(td.GetPrevious());
  TraceDelta &next = const_cast<TraceDelta &>(td.GetNext());

  // now delete the item
  td.RemoveConst();
  delta_list.erase(it);
  --cached_size;

  // and update the deltas
  update_delta(previous);
  update_delta(next);
}
Пример #6
0
void SPF::learn() {
    double old_likelihood, delta_likelihood, likelihood = -1e10;
    int likelihood_decreasing_count = 0;
    time_t start_time, end_time;

    int iteration = 0;
    char iter_as_str[4];
    bool converged = false;
    bool on_final_pass = false;

    while (!converged) {
        time(&start_time);
        iteration++;
        printf("iteration %d\n", iteration);

        reset_helper_params();

        // update rate for user preferences
        b_theta.each_col() += sum(beta, 1);

        set<int> items;
        int user = -1, item, rating;
        for (int i = 0; i < settings->sample_size; i++) {
            if (on_final_pass && settings->final_pass_test) {
                user++;
                while (data->test_users.count(user)==0) {
                    user++;
                }
            } else if (settings->svi) {
                user = gsl_rng_uniform_int(rand_gen, data->user_count());
            } else {
                user = i;
            }

            bool user_converged = false;
            int user_iters = 0;
            while (!user_converged) {
                user_iters++;
                a_beta_user.zeros();
                a_delta_user.zeros();

                // look at all the user's items
                for (int j = 0; j < data->item_count(user); j++) {
                    item = data->get_item(user, j);
                    items.insert(item);
                    rating = 1;
                    //TODO: rating = data->get_train_rating(i);
                    update_shape(user, item, rating);
                }

                // update per-user parameters
                double user_change = 0;
                if (!settings->factor_only && !settings->fix_influence)
                    user_change += update_tau(user);
                if (!settings->social_only)
                    user_change += update_theta(user);
                if (!settings->social_only && !settings->factor_only && !settings->fix_influence) {
                    user_change /= 2;

                    // if the updates are less than 1% change, the local params have converged
                    if (user_change < 0.01)
                        user_converged = true;

                } else {
                    // if we're only looking at social or factor (not combined)
                    // then the user parameters will always have converged with
                    // a single pass (since there's nothing to balance against)
                    user_converged = true;
                }
            }
            if (settings->verbose)
                printf("%d\tuser %d took %d iters to converge\n", iteration, user, user_iters);
            a_beta += a_beta_user;
            a_delta += a_delta_user;
        }

        if (!settings->social_only) {
            // update rate for item attributes
            b_beta.each_col() += sum(theta, 1);

            // update per-item parameters
            set<int>::iterator it;
            for (it = items.begin(); it != items.end(); it++) {
                item = *it;
                if (iter_count[item] == 0)
                    iter_count[item] = 0;
                iter_count[item]++;
                update_beta(item);
                if (settings->item_bias)
                    update_delta(item);
            }
        } else if (settings->item_bias) {
            set<int>::iterator it;
            for (it = items.begin(); it != items.end(); it++) {
                item = *it;
                if (iter_count[item] == 0)
                    iter_count[item] = 0;
                iter_count[item]++;
                if (settings->item_bias)
                    update_delta(item);
            }
        }


        // check for convergence
        if (on_final_pass) {
            printf("Final pass complete\n");
            converged = true;

            old_likelihood = likelihood;
            likelihood = get_ave_log_likelihood();
            delta_likelihood = abs((old_likelihood - likelihood) /
                                   old_likelihood);
            log_convergence(iteration, likelihood, delta_likelihood);
        } else if (iteration >= settings->max_iter) {
            printf("Reached maximum number of iterations.\n");
            converged = true;

            old_likelihood = likelihood;
            likelihood = get_ave_log_likelihood();
            delta_likelihood = abs((old_likelihood - likelihood) /
                                   old_likelihood);
            log_convergence(iteration, likelihood, delta_likelihood);
        } else if (iteration % settings->conv_freq == 0) {
            old_likelihood = likelihood;
            likelihood = get_ave_log_likelihood();

            if (likelihood < old_likelihood)
                likelihood_decreasing_count += 1;
            else
                likelihood_decreasing_count = 0;
            delta_likelihood = abs((old_likelihood - likelihood) /
                                   old_likelihood);
            log_convergence(iteration, likelihood, delta_likelihood);
            if (settings->verbose) {
                printf("delta: %f\n", delta_likelihood);
                printf("old:   %f\n", old_likelihood);
                printf("new:   %f\n", likelihood);
            }
            if (iteration >= settings->min_iter &&
                    delta_likelihood < settings->likelihood_delta) {
                printf("Model converged.\n");
                converged = true;
            } else if (iteration >= settings->min_iter &&
                       likelihood_decreasing_count >= 2) {
                printf("Likelihood decreasing.\n");
                converged = true;
            }
        }

        // save intermediate results
        if (!converged && settings->save_freq > 0 &&
                iteration % settings->save_freq == 0) {
            printf(" saving\n");
            sprintf(iter_as_str, "%04d", iteration);
            save_parameters(iter_as_str);
        }

        // intermediate evaluation
        if (!converged && settings->eval_freq > 0 &&
                iteration % settings->eval_freq == 0) {
            sprintf(iter_as_str, "%04d", iteration);
            evaluate(iter_as_str);
        }

        time(&end_time);
        log_time(iteration, difftime(end_time, start_time));

        if (converged && !on_final_pass &&
                (settings->final_pass || settings->final_pass_test)) {
            printf("final pass on all users.\n");
            on_final_pass = true;
            converged = false;

            // we need to modify some settings for the final pass
            // things should look exactly like batch for all users
            if (settings->final_pass) {
                settings->set_stochastic_inference(false);
                settings->set_sample_size(data->user_count());
                scale = 1;
            } else {
                settings->set_sample_size(data->test_users.size());
                scale = data->user_count() / settings->sample_size;
            }
        }
    }

    save_parameters("final");
}