int rt_comb_export4( struct bu_external *ep, const struct rt_db_internal *ip, double UNUSED(local2mm), const struct db_i *dbip, struct resource *resp) { struct rt_comb_internal *comb; size_t node_count; size_t actual_count; struct rt_tree_array *rt_tree_array; union tree *tp; union record *rp; size_t j; char *endp; struct bu_vls tmp_vls = BU_VLS_INIT_ZERO; RT_CK_DB_INTERNAL(ip); if (dbip) RT_CK_DBI(dbip); RT_CK_RESOURCE(resp); if (ip->idb_type != ID_COMBINATION) bu_bomb("rt_comb_export4() type not ID_COMBINATION"); comb = (struct rt_comb_internal *)ip->idb_ptr; RT_CK_COMB(comb); if (comb->tree && db_ck_v4gift_tree(comb->tree) < 0) { db_non_union_push(comb->tree, resp); if (db_ck_v4gift_tree(comb->tree) < 0) { /* Need to further modify tree */ bu_log("rt_comb_export4() Unable to V4-ify tree, aborting.\n"); rt_pr_tree(comb->tree, 0); return -1; } } /* Count # leaves in tree -- that's how many Member records needed. */ node_count = db_tree_nleaves(comb->tree); if (node_count > 0) { rt_tree_array = (struct rt_tree_array *)bu_calloc(node_count, sizeof(struct rt_tree_array), "rt_tree_array"); /* Convert tree into array form */ actual_count = db_flatten_tree(rt_tree_array, comb->tree, OP_UNION, 1, resp) - rt_tree_array; BU_ASSERT_SIZE_T(actual_count, ==, node_count); comb->tree = TREE_NULL; } else {
/** * Warning: This function uses recursion rather than iteration and a * stack, to preserve simplicity. On machines with limited stack * space, such as the Gould, this subroutine may overwhelm the stack * on complex expressions. */ void rt_pr_tree(register const union tree *tp, int lvl) /* recursion level */ { register int i; RT_CK_TREE(tp); bu_log("%p ", (void *)tp); for (i=lvl; i>0; i--) bu_log(" "); if (tp == TREE_NULL) { bu_log("Null???\n"); return; } switch (tp->tr_op) { case OP_NOP: bu_log("NOP\n"); return; case OP_SOLID: bu_log("SOLID %s (bit %ld)\n", tp->tr_a.tu_stp->st_dp->d_namep, tp->tr_a.tu_stp->st_bit); return; case OP_REGION: bu_log("REGION ctsp=%p\n", (void *)tp->tr_c.tc_ctsp); db_pr_combined_tree_state(tp->tr_c.tc_ctsp); return; case OP_DB_LEAF: bu_log("DB_LEAF %s%s\n", tp->tr_l.tl_name, tp->tr_l.tl_mat ? " (matrix)" : ""); return; default: bu_log("Unknown op=x%x\n", tp->tr_op); return; case OP_UNION: bu_log("UNION\n"); break; case OP_INTERSECT: bu_log("INTERSECT\n"); break; case OP_SUBTRACT: bu_log("MINUS\n"); break; case OP_XOR: bu_log("XOR\n"); break; case OP_NOT: bu_log("NOT\n"); break; } switch (tp->tr_op) { case OP_UNION: case OP_INTERSECT: case OP_SUBTRACT: case OP_XOR: /* BINARY type */ rt_pr_tree(tp->tr_b.tb_left, lvl+1); rt_pr_tree(tp->tr_b.tb_right, lvl+1); break; case OP_NOT: case OP_GUARD: case OP_XNOP: /* UNARY tree */ rt_pr_tree(tp->tr_b.tb_left, lvl+1); break; } }
/** * When performing "ev" on a region, consider whether to process the * whole subtree recursively. * * Normally, say "yes" to all regions by returning 0. * * Check for special case: a region of one solid, which can be * directly drawn as polygons without going through NMGs. If we draw * it here, then return -1 to signal caller to ignore further * processing of this region. A hack to view polygonal models * (converted from FASTGEN) more rapidly. */ static int draw_nmg_region_start(struct db_tree_state *tsp, const struct db_full_path *pathp, const struct rt_comb_internal *combp, void *client_data) { union tree *tp; struct directory *dp; struct rt_db_internal intern; mat_t xform; matp_t matp; struct bu_list vhead; struct _ged_client_data *dgcdp = (struct _ged_client_data *)client_data; if (RT_G_DEBUG&DEBUG_TREEWALK) { char *sofar = db_path_to_string(pathp); bu_vls_printf(dgcdp->gedp->ged_result_str, "nmg_region_start(%s)\n", sofar); bu_free((void *)sofar, "path string"); rt_pr_tree(combp->tree, 1); db_pr_tree_state(tsp); } RT_CK_DBI(tsp->ts_dbip); RT_CK_RESOURCE(tsp->ts_resp); BU_LIST_INIT(&vhead); RT_CK_COMB(combp); tp = combp->tree; if (!tp) return -1; RT_CK_TREE(tp); if (tp->tr_l.tl_op != OP_DB_LEAF) return 0; /* proceed as usual */ /* The subtree is a single node. It may be a combination, though */ /* Fetch by name, check to see if it's an easy type */ dp = db_lookup(tsp->ts_dbip, tp->tr_l.tl_name, LOOKUP_NOISY); if (!dp) return 0; /* proceed as usual */ if (!bn_mat_is_identity(tsp->ts_mat)) { if (tp->tr_l.tl_mat) { matp = xform; bn_mat_mul(xform, tsp->ts_mat, tp->tr_l.tl_mat); } else { matp = tsp->ts_mat; } } else { if (tp->tr_l.tl_mat) { matp = tp->tr_l.tl_mat; } else { matp = (matp_t)NULL; } } if (rt_db_get_internal(&intern, dp, tsp->ts_dbip, matp, &rt_uniresource) < 0) return 0; /* proceed as usual */ switch (intern.idb_type) { case ID_POLY: { if (RT_G_DEBUG&DEBUG_TREEWALK) { bu_log("fastpath draw ID_POLY %s\n", dp->d_namep); } if (dgcdp->draw_wireframes) { (void)rt_pg_plot(&vhead, &intern, tsp->ts_ttol, tsp->ts_tol, NULL); } else { (void)rt_pg_plot_poly(&vhead, &intern, tsp->ts_ttol, tsp->ts_tol); } } goto out; case ID_BOT: { if (RT_G_DEBUG&DEBUG_TREEWALK) { bu_log("fastpath draw ID_BOT %s\n", dp->d_namep); } if (dgcdp->draw_wireframes) { (void)rt_bot_plot(&vhead, &intern, tsp->ts_ttol, tsp->ts_tol, NULL); } else { (void)rt_bot_plot_poly(&vhead, &intern, tsp->ts_ttol, tsp->ts_tol); } } goto out; case ID_BREP: { if (RT_G_DEBUG&DEBUG_TREEWALK) { bu_log("fastpath draw ID_BREP %s\n", dp->d_namep); } if (dgcdp->draw_wireframes) { (void)rt_brep_plot(&vhead, &intern, tsp->ts_ttol, tsp->ts_tol, NULL); } else { (void)rt_brep_plot_poly(&vhead, pathp, &intern, tsp->ts_ttol, tsp->ts_tol, NULL); } } goto out; case ID_COMBINATION: default: break; } rt_db_free_internal(&intern); return 0; out: { struct db_full_path pp; db_full_path_init(&pp); db_dup_full_path(&pp, pathp); /* Successful fastpath drawing of this solid */ db_add_node_to_full_path(&pp, dp); _ged_drawH_part2(0, &vhead, &pp, tsp, dgcdp); db_free_full_path(&pp); } rt_db_free_internal(&intern); dgcdp->fastpath_count++; return -1; /* SKIP THIS REGION */ }
/** * R T _ G E T T R E E _ R E G I O N _ E N D * * This routine will be called by db_walk_tree() once all the solids * in this region have been visited. * * This routine must be prepared to run in parallel. As a result, * note that the details of the solids pointed to by the soltab * pointers in the tree may not be filled in when this routine is * called (due to the way multiple instances of solids are handled). * Therefore, everything which referred to the tree has been moved out * into the serial section. (rt_tree_region_assign, rt_bound_tree) */ HIDDEN union tree *rt_gettree_region_end(register struct db_tree_state *tsp, struct db_full_path *pathp, union tree *curtree, genptr_t client_data) { struct region *rp; struct directory *dp; int shader_len=0; struct rt_i *rtip; int i; Tcl_HashTable *tbl = (Tcl_HashTable *)client_data; Tcl_HashEntry *entry; matp_t inv_mat; RT_CK_DBI(tsp->ts_dbip); RT_CK_FULL_PATH(pathp); RT_CK_TREE(curtree); rtip = tsp->ts_rtip; RT_CK_RTI(rtip); RT_CK_RESOURCE(tsp->ts_resp); if ( curtree->tr_op == OP_NOP ) { /* Ignore empty regions */ return curtree; } BU_GETSTRUCT( rp, region ); rp->l.magic = RT_REGION_MAGIC; rp->reg_regionid = tsp->ts_regionid; rp->reg_is_fastgen = tsp->ts_is_fastgen; rp->reg_aircode = tsp->ts_aircode; rp->reg_gmater = tsp->ts_gmater; rp->reg_los = tsp->ts_los; if ( tsp->ts_attrs.count && tsp->ts_attrs.avp ) { rp->attr_values = (struct bu_mro **)bu_calloc( tsp->ts_attrs.count+1, sizeof( struct bu_mro *), "regp->attr_values" ); for ( i=0; i<tsp->ts_attrs.count; i++ ) { rp->attr_values[i] = bu_malloc( sizeof( struct bu_mro ), "rp->attr_values[i]" ); bu_mro_init_with_string( rp->attr_values[i], tsp->ts_attrs.avp[i].value ); } } else { rp->attr_values = (struct bu_mro **)NULL; } rp->reg_mater = tsp->ts_mater; /* struct copy */ if ( tsp->ts_mater.ma_shader ) shader_len = strlen( tsp->ts_mater.ma_shader ); if ( shader_len ) { rp->reg_mater.ma_shader = bu_strdup( tsp->ts_mater.ma_shader ); } else rp->reg_mater.ma_shader = (char *)NULL; rp->reg_name = db_path_to_string( pathp ); dp = (struct directory *)DB_FULL_PATH_CUR_DIR(pathp); if (RT_G_DEBUG&DEBUG_TREEWALK) { bu_log("rt_gettree_region_end() %s\n", rp->reg_name ); rt_pr_tree( curtree, 0 ); } rp->reg_treetop = curtree; rp->reg_all_unions = db_is_tree_all_unions( curtree ); /* Determine material properties */ rp->reg_mfuncs = (char *)0; rp->reg_udata = (char *)0; if ( rp->reg_mater.ma_color_valid == 0 ) rt_region_color_map(rp); /* enter critical section */ bu_semaphore_acquire( RT_SEM_RESULTS ); rp->reg_instnum = dp->d_uses++; /* * Add the region to the linked list of regions. * Positions in the region bit vector are established at this time. */ BU_LIST_INSERT( &(rtip->HeadRegion), &rp->l ); /* Assign bit vector pos. */ rp->reg_bit = rtip->nregions++; /* leave critical section */ bu_semaphore_release( RT_SEM_RESULTS ); if ( tbl && bu_avs_get( &tsp->ts_attrs, "ORCA_Comp" ) ) { int newentry; long int reg_bit = rp->reg_bit; inv_mat = (matp_t)bu_calloc( 16, sizeof( fastf_t ), "inv_mat" ); if ( tsp->ts_mat ) bn_mat_inv( inv_mat, tsp->ts_mat ); else MAT_IDN( inv_mat ); /* enter critical section */ bu_semaphore_acquire( RT_SEM_RESULTS ); entry = Tcl_CreateHashEntry(tbl, (char *)reg_bit, &newentry); Tcl_SetHashValue( entry, (ClientData)inv_mat ); /* leave critical section */ bu_semaphore_release( RT_SEM_RESULTS ); } if ( RT_G_DEBUG & DEBUG_REGIONS ) { bu_log("Add Region %s instnum %d\n", rp->reg_name, rp->reg_instnum); } /* Indicate that we have swiped 'curtree' */ return(TREE_NULL); }