static int test_bn_tabdata_blend3(int argc, char *argv[]) { struct bn_table *tab_in; struct bn_tabdata *td_in1; struct bn_tabdata *td_in2; struct bn_tabdata *td_in3; struct bn_tabdata *expected; struct bn_tabdata *actual; fastf_t scale_1, scale_2, scale_3; if (argc != 10) { bu_exit(1, "<args> format: table tabdata1 tabdata2 tabdata3 scale1 scale2 scale3 expected_result [%s]\n", argv[0]); } scan_tab_args(argv[2], &tab_in); scan_tabdata_args(argv[3], &td_in1, tab_in); scan_tabdata_args(argv[4], &td_in2, tab_in); scan_tabdata_args(argv[5], &td_in3, tab_in); sscanf(argv[6], "%lg", &scale_1); sscanf(argv[7], "%lg", &scale_2); sscanf(argv[8], "%lg", &scale_3); scan_tabdata_args(argv[9], &expected, tab_in); BN_GET_TABDATA(actual, tab_in); bn_tabdata_blend3(actual, scale_1, td_in1, scale_2, td_in2, scale_3, td_in3); bn_pr_tabdata("Result", actual); return !tabdata_equal(expected, actual); }
/* * R T _ S P E C T _ X Y Z _ T O _ C U R V E * * Values of the curve will be normalized to 0..1 range; * caller must scale into meaningful units. * * Convenience routine. XXX This routine is probably wrong. Or at least, it needs different curves. XXX Converting rgb to a curve, directly, should be easy. */ void spect_xyz_to_curve(struct bn_tabdata *tabp, const fastf_t *xyz, const struct bn_tabdata *cie_x, const struct bn_tabdata *cie_y, const struct bn_tabdata *cie_z) { bn_tabdata_blend3(tabp, xyz[X], cie_x, xyz[Y], cie_y, xyz[Z], cie_z); }
/* * R T _ S P E C T _ R G B _ T O _ C U R V E * * Using the "Representative set of camera taking sensitivities" * for a NTSC television camera, from Benson "Television Engineering * Handbook" page 4.58, convert an RGB value in range 0..1 to * a spectral curve also in range 0..1. * * XXX This is completely wrong, don't do this. */ void spect_rgb_to_curve(struct bn_tabdata *tabp, const fastf_t *rgb, const struct bn_tabdata *ntsc_r, const struct bn_tabdata *ntsc_g, const struct bn_tabdata *ntsc_b) { bn_tabdata_blend3(tabp, rgb[0], ntsc_r, rgb[1], ntsc_g, rgb[2], ntsc_b); }