Beispiel #1
0
vector<DepthMinMax> getDistancesFromSOM(const NGHolder &g_orig) {
    // We operate on a temporary copy of the original graph here, so we don't
    // have to mutate the original.
    NGHolder g;
    ue2::unordered_map<NFAVertex, NFAVertex> vmap; // vertex in g_orig to vertex in g
    cloneHolder(g, g_orig, &vmap);

    vector<NFAVertex> vstarts;
    for (auto v : vertices_range(g)) {
        if (is_virtual_start(v, g)) {
            vstarts.push_back(v);
        }
    }
    vstarts.push_back(g.startDs);

    // wire the successors of every virtual start or startDs to g.start.
    for (auto v : vstarts) {
        wireSuccessorsToStart(g, v);
    }

    // drop the in-edges of every virtual start so that they don't participate
    // in the depth calculation.
    for (auto v : vstarts) {
        clear_in_edges(v, g);
    }

    //dumpGraph("som_depth.dot", g.g);

    vector<DepthMinMax> temp_depths; // numbered by vertex index in g
    calcDepthsFrom(g, g.start, temp_depths);

    // Transfer depths, indexed by vertex index in g_orig.
    vector<DepthMinMax> depths(num_vertices(g_orig));

    for (auto v_orig : vertices_range(g_orig)) {
        assert(contains(vmap, v_orig));
        NFAVertex v_new = vmap[v_orig];

        u32 orig_idx = g_orig[v_orig].index;

        DepthMinMax &d = depths.at(orig_idx);

        if (v_orig == g_orig.startDs || is_virtual_start(v_orig, g_orig)) {
            // StartDs and virtual starts always have zero depth.
            d = DepthMinMax(0, 0);
        } else {
            u32 new_idx = g[v_new].index;
            d = temp_depths.at(new_idx);
        }
    }

    return depths;
}
Beispiel #2
0
bool somMayGoBackwards(NFAVertex u, const NGHolder &g,
                       const ue2::unordered_map<NFAVertex, u32> &region_map,
                       smgb_cache &cache) {
    /* Need to ensure all matches of the graph g up to u contain no infixes
     * which are also matches of the graph to u.
     *
     * This is basically the same as firstMatchIsFirst except we g is not
     * always a dag. As we haven't gotten around to writing an execute_graph
     * that operates on general graphs, we take some (hopefully) conservative
     * short cuts.
     *
     * Note: if the u can be jumped we will take jump edges
     * into account as a possibility of som going backwards
     *
     * TODO: write a generalised ng_execute_graph/make this less hacky
     */
    assert(&g == &cache.g);
    if (contains(cache.smgb, u)) {
        return cache.smgb[u];
    }

    DEBUG_PRINTF("checking if som can go backwards on %u\n",
                  g[u].index);

    set<NFAEdge> be;
    BackEdges<set<NFAEdge>> backEdgeVisitor(be);
    depth_first_search(
        g.g, visitor(backEdgeVisitor)
                 .root_vertex(g.start)
                 .vertex_index_map(get(&NFAGraphVertexProps::index, g.g)));

    bool rv;
    if (0) {
    exit:
        DEBUG_PRINTF("using cached result\n");
        cache.smgb[u] = rv;
        return rv;
    }

    assert(contains(region_map, u));
    const u32 u_region = region_map.at(u);

    for (const auto &e : be) {
        NFAVertex s = source(e, g);
        NFAVertex t = target(e, g);
        /* only need to worry about big cycles including/before u */
        DEBUG_PRINTF("back edge %u %u\n", g[s].index,
                      g[t].index);
        if (s != t && region_map.at(s) <= u_region) {
            DEBUG_PRINTF("eek big cycle\n");
            rv = true; /* big cycle -> eek */
            goto exit;
        }
    }

    ue2::unordered_map<NFAVertex, NFAVertex> orig_to_copy;
    NGHolder c_g;
    cloneHolder(c_g, g, &orig_to_copy);

    for (NFAVertex v : vertices_range(g)) {
        if (!is_virtual_start(v, g)) {
            continue;
        }
        NFAVertex c_v = orig_to_copy[v];
        orig_to_copy[v] = c_g.startDs;
        for (NFAVertex c_w : adjacent_vertices_range(c_v, c_g)) {
            add_edge_if_not_present(c_g.startDs, c_w, c_g);
        }
        clear_vertex(c_v, c_g);
    }

    NFAVertex c_u = orig_to_copy[u];
    clear_in_edges(c_g.acceptEod, c_g);
    add_edge(c_g.accept, c_g.acceptEod, c_g);
    clear_in_edges(c_g.accept, c_g);
    clear_out_edges(c_u, c_g);
    if (hasSelfLoop(u, g)) {
        add_edge(c_u, c_u, c_g);
    }
    add_edge(c_u, c_g.accept, c_g);

    set<NFAVertex> u_succ;
    insert(&u_succ, adjacent_vertices(u, g));
    u_succ.erase(u);

    for (auto t : inv_adjacent_vertices_range(u, g)) {
        if (t == u) {
            continue;
        }
        for (auto v : adjacent_vertices_range(t, g)) {
            if (contains(u_succ, v)) {
                add_edge(orig_to_copy[t], c_g.accept, c_g);
                break;
            }
        }
    }

    pruneUseless(c_g);

    be.clear();
    depth_first_search(c_g.g, visitor(backEdgeVisitor).root_vertex(c_g.start).
                       vertex_index_map(get(&NFAGraphVertexProps::index, c_g.g)));

    for (const auto &e : be) {
        NFAVertex s = source(e, c_g);
        NFAVertex t = target(e, c_g);
        DEBUG_PRINTF("back edge %u %u\n", c_g[s].index, c_g[t].index);
        if (s != t) {
            assert(0);
            DEBUG_PRINTF("eek big cycle\n");
            rv = true; /* big cycle -> eek */
            goto exit;
        }
    }

    DEBUG_PRINTF("checking acyclic+selfloop graph\n");

    rv = !firstMatchIsFirst(c_g);
    DEBUG_PRINTF("som may regress? %d\n", (int)rv);
    goto exit;
}
Beispiel #3
0
void clear_in_edges(NFAVertex v, NGHolder &h) {
    h.isValidNumEdges = false;
    clear_in_edges(v, h.g);
}