// // COPY_VALUE_Debug: C // // The implementation of COPY_VALUE_CORE is designed to be fairly optimal // (since it is being called in lieu of what would have been a memcpy() or // plain assignment). It is left in its raw form as an inline function to // to help convey that it is nearly as efficient as an assignment. // // This adds some verbose checking in the debug build to help debug cases // where the relative information bits are incorrect. // void COPY_VALUE_Debug( REBVAL *dest, const RELVAL *src, REBCTX *specifier ) { assert(!IS_END(src)); assert(!IS_TRASH_DEBUG(src)); #ifdef __cplusplus Assert_Cell_Writable(dest, __FILE__, __LINE__); #endif if (IS_RELATIVE(src)) { assert(ANY_WORD(src) || ANY_ARRAY(src)); if (specifier == SPECIFIED) { Debug_Fmt("Internal Error: Relative item used with SPECIFIED"); PROBE_MSG(src, "word or array"); PROBE_MSG(FUNC_VALUE(VAL_RELATIVE(src)), "func"); assert(FALSE); } else if ( VAL_RELATIVE(src) != VAL_FUNC(CTX_FRAME_FUNC_VALUE(specifier)) ) { Debug_Fmt("Internal Error: Function mismatch in specific binding"); PROBE_MSG(src, "word or array"); PROBE_MSG(FUNC_VALUE(VAL_RELATIVE(src)), "expected func"); PROBE_MSG(CTX_FRAME_FUNC_VALUE(specifier), "actual func"); assert(FALSE); } } COPY_VALUE_CORE(dest, src, specifier); }
// // Do_Breakpoint_Throws: C // // A call to Do_Breakpoint_Throws does delegation to a hook in the host, which // (if registered) will generally start an interactive session for probing the // environment at the break. The `resume` native cooperates by being able to // give back a value (or give back code to run to produce a value) that the // call to breakpoint returns. // // RESUME has another feature, which is to be able to actually unwind and // simulate a return /AT a function *further up the stack*. (This may be // switched to a feature of a "STEP OUT" command at some point.) // REBOOL Do_Breakpoint_Throws( REBVAL *out, REBOOL interrupted, // Ctrl-C (as opposed to a BREAKPOINT) const REBVAL *default_value, REBOOL do_default ) { REBVAL *target = NONE_VALUE; REBVAL temp; VAL_INIT_WRITABLE_DEBUG(&temp); if (!PG_Breakpoint_Quitting_Hook) { // // Host did not register any breakpoint handler, so raise an error // about this as early as possible. // fail (Error(RE_HOST_NO_BREAKPOINT)); } // We call the breakpoint hook in a loop, in order to keep running if any // inadvertent FAILs or THROWs occur during the interactive session. // Only a conscious call of RESUME speaks the protocol to break the loop. // while (TRUE) { struct Reb_State state; REBCTX *error; push_trap: PUSH_TRAP(&error, &state); // The host may return a block of code to execute, but cannot // while evaluating do a THROW or a FAIL that causes an effective // "resumption". Halt is the exception, hence we PUSH_TRAP and // not PUSH_UNHALTABLE_TRAP. QUIT is also an exception, but a // desire to quit is indicated by the return value of the breakpoint // hook (which may or may not decide to request a quit based on the // QUIT command being run). // // The core doesn't want to get involved in presenting UI, so if // an error makes it here and wasn't trapped by the host first that // is a bug in the host. It should have done its own PUSH_TRAP. // if (error) { #if !defined(NDEBUG) REBVAL error_value; VAL_INIT_WRITABLE_DEBUG(&error_value); Val_Init_Error(&error_value, error); PROBE_MSG(&error_value, "Error not trapped during breakpoint:"); Panic_Array(CTX_VARLIST(error)); #endif // In release builds, if an error managed to leak out of the // host's breakpoint hook somehow...just re-push the trap state // and try it again. // goto push_trap; } // Call the host's breakpoint hook. // if (PG_Breakpoint_Quitting_Hook(&temp, interrupted)) { // // If a breakpoint hook returns TRUE that means it wants to quit. // The value should be the /WITH value (as in QUIT/WITH) // assert(!THROWN(&temp)); *out = *ROOT_QUIT_NATIVE; CONVERT_NAME_TO_THROWN(out, &temp, FALSE); return TRUE; // TRUE = threw } // If a breakpoint handler returns FALSE, then it should have passed // back a "resume instruction" triggered by a call like: // // resume/do [fail "This is how to fail from a breakpoint"] // // So now that the handler is done, we will allow any code handed back // to do whatever FAIL it likes vs. trapping that here in a loop. // DROP_TRAP_SAME_STACKLEVEL_AS_PUSH(&state); // Decode and process the "resume instruction" { struct Reb_Frame *frame; REBVAL *mode; REBVAL *payload; assert(IS_GROUP(&temp)); assert(VAL_LEN_HEAD(&temp) == RESUME_INST_MAX); mode = VAL_ARRAY_AT_HEAD(&temp, RESUME_INST_MODE); payload = VAL_ARRAY_AT_HEAD(&temp, RESUME_INST_PAYLOAD); target = VAL_ARRAY_AT_HEAD(&temp, RESUME_INST_TARGET); // The first thing we need to do is determine if the target we // want to return to has another breakpoint sandbox blocking // us. If so, what we need to do is actually retransmit the // resume instruction so it can break that wall, vs. transform // it into an EXIT/FROM that would just get intercepted. // if (!IS_NONE(target)) { #if !defined(NDEBUG) REBOOL found = FALSE; #endif for (frame = FS_TOP; frame != NULL; frame = frame->prior) { if (frame->mode != CALL_MODE_FUNCTION) continue; if ( frame != FS_TOP && FUNC_CLASS(frame->func) == FUNC_CLASS_NATIVE && ( FUNC_CODE(frame->func) == &N_pause || FUNC_CODE(frame->func) == &N_breakpoint ) ) { // We hit a breakpoint (that wasn't this call to // breakpoint, at the current FS_TOP) before finding // the sought after target. Retransmit the resume // instruction so that level will get it instead. // *out = *ROOT_RESUME_NATIVE; CONVERT_NAME_TO_THROWN(out, &temp, FALSE); return TRUE; // TRUE = thrown } if (IS_FRAME(target)) { if (NOT(frame->flags & DO_FLAG_FRAME_CONTEXT)) continue; if ( VAL_CONTEXT(target) == AS_CONTEXT(frame->data.context) ) { // Found a closure matching the target before we // reached a breakpoint, no need to retransmit. // #if !defined(NDEBUG) found = TRUE; #endif break; } } else { assert(IS_FUNCTION(target)); if (frame->flags & DO_FLAG_FRAME_CONTEXT) continue; if (VAL_FUNC(target) == frame->func) { // // Found a function matching the target before we // reached a breakpoint, no need to retransmit. // #if !defined(NDEBUG) found = TRUE; #endif break; } } } // RESUME should not have been willing to use a target that // is not on the stack. // #if !defined(NDEBUG) assert(found); #endif } if (IS_NONE(mode)) { // // If the resume instruction had no /DO or /WITH of its own, // then it doesn't override whatever the breakpoint provided // as a default. (If neither the breakpoint nor the resume // provided a /DO or a /WITH, result will be UNSET.) // goto return_default; // heeds `target` } assert(IS_LOGIC(mode)); if (VAL_LOGIC(mode)) { if (DO_VAL_ARRAY_AT_THROWS(&temp, payload)) { // // Throwing is not compatible with /AT currently. // if (!IS_NONE(target)) fail (Error_No_Catch_For_Throw(&temp)); // Just act as if the BREAKPOINT call itself threw // *out = temp; return TRUE; // TRUE = thrown } // Ordinary evaluation result... } else temp = *payload; } // The resume instruction will be GC'd. // goto return_temp; } DEAD_END; return_default: if (do_default) { if (DO_VAL_ARRAY_AT_THROWS(&temp, default_value)) { // // If the code throws, we're no longer in the sandbox...so we // bubble it up. Note that breakpoint runs this code at its // level... so even if you request a higher target, any throws // will be processed as if they originated at the BREAKPOINT // frame. To do otherwise would require the EXIT/FROM protocol // to add support for DO-ing at the receiving point. // *out = temp; return TRUE; // TRUE = thrown } } else temp = *default_value; // generally UNSET! if no /WITH return_temp: // The easy case is that we just want to return from breakpoint // directly, signaled by the target being NONE!. // if (IS_NONE(target)) { *out = temp; return FALSE; // FALSE = not thrown } // If the target is a function, then we're looking to simulate a return // from something up the stack. This uses the same mechanic as // definitional returns--a throw named by the function or closure frame. // // !!! There is a weak spot in definitional returns for FUNCTION! that // they can only return to the most recent invocation; which is a weak // spot of FUNCTION! in general with stack relative variables. Also, // natives do not currently respond to definitional returns...though // they can do so just as well as FUNCTION! can. // *out = *target; CONVERT_NAME_TO_THROWN(out, &temp, TRUE); return TRUE; // TRUE = thrown }
// // Frame_For_Stack_Level: C // // Level can be an UNSET!, an INTEGER!, an ANY-FUNCTION!, or a FRAME!. If // level is UNSET! then it means give whatever the first call found is. // // Returns NULL if the given level number does not correspond to a running // function on the stack. // // Can optionally give back the index number of the stack level (counting // where the most recently pushed stack level is the lowest #) // // !!! Unfortunate repetition of logic inside of BACKTRACE; find a way to // unify the logic for omitting things like breakpoint frames, or either // considering pending frames or not... // struct Reb_Frame *Frame_For_Stack_Level( REBCNT *number_out, const REBVAL *level, REBOOL skip_current ) { struct Reb_Frame *frame = FS_TOP; REBOOL first = TRUE; REBINT num = 0; if (IS_INTEGER(level)) { if (VAL_INT32(level) < 0) { // // !!! fail() here, or just return NULL? // return NULL; } } // We may need to skip some number of frames, if there have been stack // levels added since the numeric reference point that "level" was // supposed to refer to has changed. For now that's only allowed to // be one level, because it's rather fuzzy which stack levels to // omit otherwise (pending? parens?) // if (skip_current) frame = frame->prior; for (; frame != NULL; frame = frame->prior) { if (frame->mode != CALL_MODE_FUNCTION) { // // Don't consider pending calls, or GROUP!, or any non-invoked // function as a candidate to target. // // !!! The inability to target a GROUP! by number is an artifact // of implementation, in that there's no hook in Do_Core() at // the point of group evaluation to process the return. The // matter is different with a pending function call, because its // arguments are only partially processed--hence something // like a RESUME/AT or an EXIT/FROM would not know which array // index to pick up running from. // continue; } if (first) { if ( IS_FUNCTION_AND(FUNC_VALUE(frame->func), FUNC_CLASS_NATIVE) && ( FUNC_CODE(frame->func) == &N_pause || FUNC_CODE(frame->func) == N_breakpoint ) ) { // this is considered the "0". Return it only if 0 was requested // specifically (you don't "count down to it"); // if (IS_INTEGER(level) && num == VAL_INT32(level)) goto return_maybe_set_number_out; else { first = FALSE; continue; } } else { ++num; // bump up from 0 } } if (IS_INTEGER(level) && num == VAL_INT32(level)) goto return_maybe_set_number_out; first = FALSE; if (frame->mode != CALL_MODE_FUNCTION) { // // Pending frames don't get numbered // continue; } if (IS_UNSET(level) || IS_NONE(level)) { // // Take first actual frame if unset or none // goto return_maybe_set_number_out; } else if (IS_INTEGER(level)) { ++num; if (num == VAL_INT32(level)) goto return_maybe_set_number_out; } else if (IS_FRAME(level)) { if ( (frame->flags & DO_FLAG_FRAME_CONTEXT) && frame->data.context == VAL_CONTEXT(level) ) { goto return_maybe_set_number_out; } } else { assert(IS_FUNCTION(level)); if (VAL_FUNC(level) == frame->func) goto return_maybe_set_number_out; } } // Didn't find it... // return NULL; return_maybe_set_number_out: if (number_out) *number_out = num; return frame; }
void do_elco_pair(cell *p, cell *q, vektor pbc) { int i, j; vektor d; int p_typ, q_typ, col1, inc = ntypes * ntypes, is_short = 0; real r2, tmp, phi, dphi, ddphi, dddphi; #ifdef EAM real rho_h; int col2; real rho_i_strich, rho_i_zweistrich, rho_i_dreistrich; real rho_j_strich, rho_j_zweistrich, rho_j_dreistrich; #endif /* For each atom in first cell */ for (i=0; i<p->n; ++i) /* For each atom in neighbouring cell */ for (j=((p==q) ? i+1 : 0); j<q->n; ++j) { /* Calculate distance */ d.x = q->ort[j].x - p->ort[i].x + pbc.x; d.y = q->ort[j].y - p->ort[i].y + pbc.y; #ifndef TWOD d.z = q->ort[j].z - p->ort[i].z + pbc.z; #endif r2 = SPROD(d,d); p_typ = p->sorte[i]; q_typ = q->sorte[j]; col1 = p_typ * ntypes + q_typ; if ( r2 <= pair_pot.end[col1] ) { PAIR_INT4(phi, dphi, ddphi, dddphi, pair_pot, col1, inc, r2, is_short) /* Compute potential energy */ epot += phi; /* Compute stress and elastic constants */ tmp = d.x * d.x * dphi; p->stress[i].xx += tmp; q->stress[j].xx += tmp; sigma.xx += 2.0 * tmp; tmp = d.x * d.y * dphi; p->stress[i].xy += tmp; q->stress[j].xy += tmp; sigma.xy += 2.0 * tmp; tmp = d.y * d.y * dphi; p->stress[i].yy += tmp; q->stress[j].yy += tmp; sigma.yy += 2.0 * tmp; #ifndef TWOD tmp = d.y * d.z * dphi; p->stress[i].yz += tmp; q->stress[j].yz += tmp; sigma.yz += 2.0 * tmp; tmp = d.z * d.z * dphi; p->stress[i].zz += tmp; q->stress[j].zz += tmp; sigma.zz += 2.0 * tmp; tmp = d.z * d.x * dphi; p->stress[i].zx += tmp; q->stress[j].zx += tmp; sigma.zx += 2.0 * tmp; #endif tmp = 2.0 * d.x * d.x * d.x * d.x * ddphi + d.x * d.x * dphi; p->elco[i].c11 += tmp; q->elco[j].c11 += tmp; c.c11 += 2.0 * tmp; tmp = 2.0 * d.x * d.x * d.y * d.y * ddphi; p->elco[i].c12 += tmp; q->elco[j].c12 += tmp; c.c12 += 2.0 * tmp; tmp += 0.5 * ( d.x * d.x + d.y * d.y ) * dphi; p->elco[i].c66 += tmp; q->elco[j].c66 += tmp; c.c66 += 2.0 * tmp; tmp = 2.0 * d.y * d.y * d.y * d.y * ddphi + d.y * d.y * dphi; p->elco[i].c22 += tmp; q->elco[j].c22 += tmp; c.c22 += 2.0 * tmp; #ifndef TWOD tmp = 2.0 * d.x * d.x * d.z * d.z * ddphi; p->elco[i].c13 += tmp; q->elco[j].c13 += tmp; c.c13 += 2.0 * tmp; tmp += 0.5 * ( d.x * d.x + d.z * d.z ) * dphi; p->elco[i].c55 += tmp; q->elco[j].c55 += tmp; c.c55 += 2.0 * tmp; tmp = 2.0 * d.y * d.y * d.z * d.z * ddphi; p->elco[i].c23 += tmp; q->elco[j].c23 += tmp; c.c23 += 2.0 * tmp; tmp += 0.5 * ( d.y * d.y + d.z * d.z ) * dphi; p->elco[i].c44 += tmp; q->elco[j].c44 += tmp; c.c44 += 2.0 * tmp; tmp = 2.0 * d.z * d.z * d.z * d.z * ddphi + d.z * d.z * dphi; p->elco[i].c33 += tmp; q->elco[j].c33 += tmp; c.c33 += 2.0 * tmp; tmp = 2.0 * d.x * d.y * d.z * d.z * ddphi + 0.25 * d.x * d.y * dphi; p->elco[i].c45 += tmp; q->elco[j].c45 += tmp; c.c45 += 2.0 * tmp; tmp = 2.0 * d.x * d.y * d.y * d.z * ddphi + 0.25 * d.x * d.z * dphi; p->elco[i].c46 += tmp; q->elco[j].c46 += tmp; c.c46 += 2.0 * tmp; tmp -= 0.25 * d.x * d.z * dphi; p->elco[i].c25 += tmp; q->elco[j].c25 += tmp; c.c25 += 2.0 * tmp; tmp = 2.0 * d.x * d.x * d.y * d.z * ddphi + 0.25 * d.y * d.z * dphi; p->elco[i].c56 += tmp; q->elco[j].c56 += tmp; c.c56 += 2.0 * tmp; tmp -= 0.25 * d.y * d.z * dphi; p->elco[i].c14 += tmp; q->elco[j].c14 += tmp; c.c14 += 2.0 * tmp; tmp = 2.0 * d.x * d.x * d.x * d.z * ddphi + 0.5 * d.x * d.z * dphi; p->elco[i].c15 += tmp; q->elco[j].c15 += tmp; c.c15 += 2.0 * tmp; #endif tmp = 2.0 * d.x * d.x * d.x * d.y * ddphi + 0.5 * d.x * d.y * dphi; p->elco[i].c16 += tmp; q->elco[j].c16 += tmp; c.c16 += 2.0 * tmp; tmp = 2.0 * d.x * d.y * d.y * d.y * ddphi + 0.5 * d.x * d.y * dphi; p->elco[i].c26 += tmp; q->elco[j].c26 += tmp; c.c26 += 2.0 * tmp; #ifndef TWOD tmp = 2.0 * d.y * d.y * d.y * d.z * ddphi + 0.5 * d.y * d.z * dphi; p->elco[i].c24 += tmp; q->elco[j].c24 += tmp; c.c24 += 2.0 * tmp; tmp = 2.0 * d.y * d.z * d.z * d.z * ddphi + 0.5 * d.y * d.z * dphi; p->elco[i].c34 += tmp; q->elco[j].c34 += tmp; c.c34 += 2.0 * tmp; tmp = 2.0 * d.x * d.z * d.z * d.z * ddphi + 0.5 * d.x * d.z * dphi; p->elco[i].c35 += tmp; q->elco[j].c35 += tmp; c.c35 += 2.0 * tmp; tmp = 2.0 * d.x * d.y * d.z * d.z * ddphi; p->elco[i].c36 += tmp; q->elco[j].c36 += tmp; c.c36 += 2.0 * tmp; #endif press += 2.0 * dphi * r2; bulkm += ( 2.0 * ddphi * r2 + dphi ) * 2.0 * r2; dbulkm_dp += ( 2.0 * dddphi * r2 + 3.0 * ddphi ) * 4.0 * r2 * r2; } #ifdef EAM col2 = q_typ * ntypes + p_typ; /* compute host electron density */ if ( r2 < rho_h_tab.end[col1] ) { VAL_FUNC(rho_h, rho_h_tab, col1, inc, r2, is_short); EAM_RHO(p,i) += rho_h; } if ( p_typ == q_typ ) { if ( r2 < rho_h_tab.end[col1] ) EAM_RHO(q,j) += rho_h; } else { if ( r2 < rho_h_tab.end[col2] ) { VAL_FUNC(rho_h, rho_h_tab, col2, inc, r2, is_short); EAM_RHO(q,j) += rho_h; } } /* Compute stress for EAM potential */ if ( (r2 < rho_h_tab.end[col1]) || (r2 < rho_h_tab.end[col2]) ) { DERIV_FUNC(rho_i_strich, rho_i_zweistrich, rho_i_dreistrich, rho_h_tab, col2, inc, r2, is_short); q->eam_stress[j].xx += 2.0 * rho_i_strich * d.x * d.x; q->eam_stress[j].xy += 2.0 * rho_i_strich * d.x * d.y; q->eam_stress[j].yy += 2.0 * rho_i_strich * d.y * d.y; #ifndef TWOD q->eam_stress[j].yz += 2.0 * rho_i_strich * d.y * d.z; q->eam_stress[j].zz += 2.0 * rho_i_strich * d.z * d.z; q->eam_stress[j].zx += 2.0 * rho_i_strich * d.z * d.x; #endif q->eam_press[j] += 2.0 * rho_i_strich * r2; q->eam_bulkm[j] += ( 2.0 * rho_i_zweistrich * r2 + rho_i_strich ) * 2.0 * r2; q->eam_dbulkm[j] += ( 2.0 * rho_i_dreistrich * r2 + 3.0 * rho_i_zweistrich ) * 4.0 * r2 * r2; if ( col1 == col2 ) { rho_j_strich = rho_i_strich; rho_j_zweistrich = rho_i_zweistrich; rho_j_dreistrich = rho_i_dreistrich; } else { DERIV_FUNC(rho_j_strich, rho_j_zweistrich, rho_j_dreistrich, rho_h_tab, col1, inc, r2, is_short); } p->eam_stress[i].xx += 2.0 * rho_j_strich * d.x * d.x; p->eam_stress[i].xy += 2.0 * rho_j_strich * d.x * d.y; p->eam_stress[i].yy += 2.0 * rho_j_strich * d.y * d.y; #ifndef TWOD p->eam_stress[i].yz += 2.0 * rho_j_strich * d.y * d.z; p->eam_stress[i].zz += 2.0 * rho_j_strich * d.z * d.z; p->eam_stress[i].zx += 2.0 * rho_j_strich * d.z * d.x; #endif p->eam_press[i] += 2.0 * rho_j_strich * r2; p->eam_bulkm[i] += ( 2.0 * rho_j_zweistrich * r2 + rho_j_strich ) * 2.0 * r2; p->eam_dbulkm[i] += ( 2.0 * rho_j_dreistrich * r2 + 3.0 * rho_j_zweistrich ) * 4.0 * r2 * r2; } #endif /* EAM */ } }
// // Do_Path_Throws_Core: C // // Evaluate an ANY_PATH! REBVAL, starting from the index position of that // path value and continuing to the end. // // The evaluator may throw because GROUP! is evaluated, e.g. `foo/(throw 1020)` // // If label_sym is passed in as being non-null, then the caller is implying // readiness to process a path which may be a function with refinements. // These refinements will be left in order on the data stack in the case // that `out` comes back as IS_FUNCTION(). // // If `opt_setval` is given, the path operation will be done as a "SET-PATH!" // if the path evaluation did not throw or error. HOWEVER the set value // is NOT put into `out`. This provides more flexibility on performance in // the evaluator, which may already have the `val` where it wants it, and // so the extra assignment would just be overhead. // // !!! Path evaluation is one of the parts of R3-Alpha that has not been // vetted very heavily by Ren-C, and needs a review and overhaul. // REBOOL Do_Path_Throws_Core( REBVAL *out, REBSTR **label_out, const RELVAL *path, REBCTX *specifier, REBVAL *opt_setval ) { REBPVS pvs; REBDSP dsp_orig = DSP; assert(ANY_PATH(path)); // !!! There is a bug in the dispatch such that if you are running a // set path, it does not always assign the output, because it "thinks you // aren't going to look at it". This presumably originated from before // parens were allowed in paths, and neglects cases like: // // foo/(throw 1020): value // // We always have to check to see if a throw occurred. Until this is // streamlined, we have to at minimum set it to something that is *not* // thrown so that we aren't testing uninitialized memory. A safe trash // will do, which is unset in release builds. // if (opt_setval) SET_TRASH_SAFE(out); // None of the values passed in can live on the data stack, because // they might be relocated during the path evaluation process. // assert(!IN_DATA_STACK_DEBUG(out)); assert(!IN_DATA_STACK_DEBUG(path)); assert(!opt_setval || !IN_DATA_STACK_DEBUG(opt_setval)); // Not currently robust for reusing passed in path or value as the output assert(out != path && out != opt_setval); assert(!opt_setval || !THROWN(opt_setval)); // Initialize REBPVS -- see notes in %sys-do.h // pvs.opt_setval = opt_setval; pvs.store = out; pvs.orig = path; pvs.item = VAL_ARRAY_AT(pvs.orig); // may not be starting at head of PATH! // The path value that's coming in may be relative (in which case it // needs to use the specifier passed in). Or it may be specific already, // in which case we should use the specifier in the value to process // its array contents. // if (IS_RELATIVE(path)) { #if !defined(NDEBUG) assert(specifier != SPECIFIED); if (VAL_RELATIVE(path) != VAL_FUNC(CTX_FRAME_FUNC_VALUE(specifier))) { Debug_Fmt("Specificity mismatch found in path dispatch"); PROBE_MSG(path, "the path being evaluated"); PROBE_MSG(FUNC_VALUE(VAL_RELATIVE(path)), "expected func"); PROBE_MSG(CTX_FRAME_FUNC_VALUE(specifier), "actual func"); assert(FALSE); } #endif pvs.item_specifier = specifier; } else pvs.item_specifier = VAL_SPECIFIER(const_KNOWN(path)); // Seed the path evaluation process by looking up the first item (to // get a datatype to dispatch on for the later path items) // if (IS_WORD(pvs.item)) { pvs.value = GET_MUTABLE_VAR_MAY_FAIL(pvs.item, pvs.item_specifier); pvs.value_specifier = SPECIFIED; if (IS_VOID(pvs.value)) fail (Error_No_Value_Core(pvs.item, pvs.item_specifier)); } else { // !!! Ideally there would be some way to deal with writes to // temporary locations, like this pvs.value...if a set-path sets // it, then it will be discarded. COPY_VALUE(pvs.store, VAL_ARRAY_AT(pvs.orig), pvs.item_specifier); pvs.value = pvs.store; pvs.value_specifier = SPECIFIED; } // Start evaluation of path: if (IS_END(pvs.item + 1)) { // If it was a single element path, return the value rather than // try to dispatch it (would cause a crash at time of writing) // // !!! Is this the desired behavior, or should it be an error? } else if (Path_Dispatch[VAL_TYPE(pvs.value)]) { REBOOL threw = Next_Path_Throws(&pvs); // !!! See comments about why the initialization of out is necessary. // Without it this assertion can change on some things: // // t: now // t/time: 10:20:03 // // (It thinks pvs.value has its THROWN bit set when it completed // successfully. It was a PE_USE_STORE case where pvs.value was reset to // pvs.store, and pvs.store has its thrown bit set. Valgrind does not // catch any uninitialized variables.) // // There are other cases that do trip valgrind when omitting the // initialization, though not as clearly reproducible. // assert(threw == THROWN(pvs.value)); if (threw) return TRUE; // Check for errors: if (NOT_END(pvs.item + 1) && !IS_FUNCTION(pvs.value)) { // // Only function refinements should get by this line: REBVAL specified_orig; COPY_VALUE(&specified_orig, pvs.orig, specifier); REBVAL specified_item; COPY_VALUE(&specified_item, pvs.item, specifier); fail (Error(RE_INVALID_PATH, &specified_orig, &specified_item)); } } else if (!IS_FUNCTION(pvs.value)) { REBVAL specified; COPY_VALUE(&specified, pvs.orig, specifier); fail (Error(RE_BAD_PATH_TYPE, &specified, Type_Of(pvs.value))); } if (opt_setval) { // If SET then we don't return anything assert(IS_END(pvs.item) + 1); return FALSE; } // If storage was not used, then copy final value back to it: if (pvs.value != pvs.store) COPY_VALUE(pvs.store, pvs.value, pvs.value_specifier); assert(!THROWN(out)); // Return 0 if not function or is :path/word... if (!IS_FUNCTION(pvs.value)) { assert(IS_END(pvs.item) + 1); return FALSE; } if (label_out) { REBVAL refinement; // When a function is hit, path processing stops as soon as the // processed sub-path resolves to a function. The path is still sitting // on the position of the last component of that sub-path. Usually, // this last component in the sub-path is a word naming the function. // if (IS_WORD(pvs.item)) { *label_out = VAL_WORD_SPELLING(pvs.item); } else { // In rarer cases, the final component (completing the sub-path to // the function to call) is not a word. Such as when you use a path // to pick by index out of a block of functions: // // functions: reduce [:add :subtract] // functions/1 10 20 // // Or when you have an immediate function value in a path with a // refinement. Tricky to make, but possible: // // do reduce [ // to-path reduce [:append 'only] [a] [b] // ] // // !!! When a function was not invoked through looking up a word // (or a word in a path) to use as a label, there were once three // different alternate labels used. One was SYM__APPLY_, another // was ROOT_NONAME, and another was to be the type of the function // being executed. None are fantastic, we do the type for now. *label_out = Canon(SYM_FROM_KIND(VAL_TYPE(pvs.value))); } // Move on to the refinements (if any) ++pvs.item; // !!! Currently, the mainline path evaluation "punts" on refinements. // When it finds a function, it stops the path evaluation and leaves // the position pvs.path before the list of refinements. // // A more elegant solution would be able to process and notice (for // instance) that `:APPEND/ONLY` should yield a function value that // has been specialized with a refinement. Path chaining should thus // be able to effectively do this and give the refined function object // back to the evaluator or other client. // // If a label_sym is passed in, we recognize that a function dispatch // is going to be happening. We do not want to pay to generate the // new series that would be needed to make a temporary function that // will be invoked and immediately GC'd So we gather the refinements // on the data stack. // // This code simulates that path-processing-to-data-stack, but it // should really be something in dispatch iself. In any case, we put // refinements on the data stack...and caller knows refinements are // from dsp_orig to DSP (thanks to accounting, all other operations // should balance!) for (; NOT_END(pvs.item); ++pvs.item) { // "the refinements" if (IS_VOID(pvs.item)) continue; if (IS_GROUP(pvs.item)) { // // Note it is not legal to use the data stack directly as the // output location for a DO (might be resized) if (Do_At_Throws( &refinement, VAL_ARRAY(pvs.item), VAL_INDEX(pvs.item), IS_RELATIVE(pvs.item) ? pvs.item_specifier // if relative, use parent's : VAL_SPECIFIER(const_KNOWN(pvs.item)) // else embedded )) { *out = refinement; DS_DROP_TO(dsp_orig); return TRUE; } if (IS_VOID(&refinement)) continue; DS_PUSH(&refinement); } else if (IS_GET_WORD(pvs.item)) { DS_PUSH_TRASH; *DS_TOP = *GET_OPT_VAR_MAY_FAIL(pvs.item, pvs.item_specifier); if (IS_VOID(DS_TOP)) { DS_DROP; continue; } } else DS_PUSH_RELVAL(pvs.item, pvs.item_specifier); // Whatever we were trying to use as a refinement should now be // on the top of the data stack, and only words are legal ATM // if (!IS_WORD(DS_TOP)) { fail (Error(RE_BAD_REFINE, DS_TOP)); } // Go ahead and canonize the word symbol so we don't have to // do it each time in order to get a case-insenstive compare // INIT_WORD_SPELLING(DS_TOP, VAL_WORD_CANON(DS_TOP)); } // To make things easier for processing, reverse the refinements on // the data stack (we needed to evaluate them in forward order). // This way we can just pop them as we go, and know if they weren't // all consumed if it doesn't get back to `dsp_orig` by the end. if (dsp_orig != DSP) { REBVAL *bottom = DS_AT(dsp_orig + 1); REBVAL *top = DS_TOP; while (top > bottom) { refinement = *bottom; *bottom = *top; *top = refinement; top--; bottom++; } } } else { // !!! Historically this just ignores a result indicating this is a // function with refinements, e.g. ':append/only'. However that // ignoring seems unwise. It should presumably create a modified // function in that case which acts as if it has the refinement. // // If the caller did not pass in a label pointer we assume they are // likely not ready to process any refinements. // if (NOT_END(pvs.item + 1)) fail (Error(RE_TOO_LONG)); // !!! Better error or add feature } return FALSE; }