4 ////////////////////////////////////////////////////////////////////////
5 // Debugging stuff. ////////////////////////////////////////////////////
6 ////////////////////////////////////////////////////////////////////////
8 #if !defined(DEBUG_NASAL)
9 # define DBG(expr) /* noop */
11 # define DBG(expr) expr
15 char* opStringDEBUG(int op);
16 void printOpDEBUG(int ip, int op);
17 void printStackDEBUG(struct Context* ctx);
18 ////////////////////////////////////////////////////////////////////////
20 struct Globals* globals = 0;
22 static naRef bindFunction(struct Context* ctx, struct Frame* f, naRef code);
24 #define ERR(c, msg) naRuntimeError((c),(msg))
25 void naRuntimeError(struct Context* c, char* msg)
28 longjmp(c->jumpHandle, 1);
31 static int boolify(struct Context* ctx, naRef r)
33 if(IS_NUM(r)) return r.num != 0;
34 if(IS_NIL(r)) return 0;
37 if(naStr_len(r) == 0) return 0;
38 if(naStr_tonum(r, &d)) return d != 0;
41 ERR(ctx, "non-scalar used in boolean context");
45 static double numify(struct Context* ctx, naRef o)
48 if(IS_NUM(o)) return o.num;
49 else if(IS_NIL(o)) ERR(ctx, "nil used in numeric context");
50 else if(!IS_STR(o)) ERR(ctx, "non-scalar in numeric context");
51 else if(naStr_tonum(o, &n)) return n;
52 else ERR(ctx, "non-numeric string in numeric context");
56 static naRef stringify(struct Context* ctx, naRef r)
58 if(IS_STR(r)) return r;
59 if(IS_NUM(r)) return naStr_fromnum(naNewString(ctx), r.num);
60 ERR(ctx, "non-scalar in string context");
64 static int checkVec(struct Context* ctx, naRef vec, naRef idx)
66 int i = (int)numify(ctx, idx);
67 if(i < 0) i += naVec_size(vec);
68 if(i < 0 || i >= naVec_size(vec)) ERR(ctx, "vector index out of bounds");
72 static int checkStr(struct Context* ctx, naRef str, naRef idx)
74 int i = (int)numify(ctx, idx);
75 if(i < 0) i += naStr_len(str);
76 if(i < 0 || i >= naStr_len(str)) ERR(ctx, "string index out of bounds");
80 static naRef containerGet(struct Context* ctx, naRef box, naRef key)
82 naRef result = naNil();
83 if(!IS_SCALAR(key)) ERR(ctx, "container index not scalar");
85 if(!naHash_get(box, key, &result))
86 ERR(ctx, "undefined value in container");
87 } else if(IS_VEC(box)) {
88 result = naVec_get(box, checkVec(ctx, box, key));
89 } else if(IS_STR(box)) {
90 result = naNum((unsigned char)naStr_data(box)[checkStr(ctx, box, key)]);
92 ERR(ctx, "extract from non-container");
97 static void containerSet(struct Context* ctx, naRef box, naRef key, naRef val)
99 if(!IS_SCALAR(key)) ERR(ctx, "container index not scalar");
100 else if(IS_HASH(box)) naHash_set(box, key, val);
101 else if(IS_VEC(box)) naVec_set(box, checkVec(ctx, box, key), val);
102 else if(IS_STR(box)) {
103 if(box.ref.ptr.str->hashcode)
104 ERR(ctx, "cannot change immutable string");
105 naStr_data(box)[checkStr(ctx, box, key)] = (char)numify(ctx, val);
106 } else ERR(ctx, "insert into non-container");
109 static void initTemps(struct Context* c)
112 c->temps = naAlloc(c->tempsz * sizeof(struct naObj*));
116 static void initContext(struct Context* c)
119 c->fTop = c->opTop = c->markTop = 0;
120 for(i=0; i<NUM_NASAL_TYPES; i++)
134 static void initGlobals()
138 globals = (struct Globals*)naAlloc(sizeof(struct Globals));
139 naBZero(globals, sizeof(struct Globals));
141 globals->sem = naNewSem();
142 globals->lock = naNewLock();
144 globals->allocCount = 256; // reasonable starting value
145 for(i=0; i<NUM_NASAL_TYPES; i++)
146 naGC_init(&(globals->pools[i]), i);
147 globals->deadsz = 256;
149 globals->deadBlocks = naAlloc(sizeof(void*) * globals->deadsz);
151 // Initialize a single context
152 globals->freeContexts = 0;
153 globals->allContexts = 0;
156 globals->symbols = naNewHash(c);
157 globals->save = naNewVector(c);
159 // Cache pre-calculated "me", "arg" and "parents" scalars
160 globals->meRef = naInternSymbol(naStr_fromdata(naNewString(c), "me", 2));
161 globals->argRef = naInternSymbol(naStr_fromdata(naNewString(c), "arg", 3));
162 globals->parentsRef = naInternSymbol(naStr_fromdata(naNewString(c), "parents", 7));
167 struct Context* naNewContext()
174 c = globals->freeContexts;
176 globals->freeContexts = c->nextFree;
182 c = (struct Context*)naAlloc(sizeof(struct Context));
186 c->nextAll = globals->allContexts;
188 globals->allContexts = c;
194 void naFreeContext(struct Context* c)
198 c->nextFree = globals->freeContexts;
199 globals->freeContexts = c;
205 * This is the original code which might not work properly on all
206 * platforms since it allows one to work on the same variable in one
207 * statement without the prior knowledge how this will behave.
209 * e.g. ctx->opStack[ctx->opTop++] = ctx->opStack[ctx->opTop-1];
212 # define PUSH(r) do { \
213 if(ctx->opTop >= MAX_STACK_DEPTH) ERR(ctx, "stack overflow"); \
214 ctx->opStack[ctx->opTop++] = r; \
219 # define PUSH(r) _PUSH((ctx), (r))
220 void _PUSH(struct Context* ctx, naRef r) {
221 if(ctx->opTop >= MAX_STACK_DEPTH) ERR(ctx, "stack overflow");
222 ctx->opStack[ctx->opTop++] = r;
226 static void setupArgs(naContext ctx, struct Frame* f, naRef* args, int nargs)
229 struct naCode* c = f->func.ref.ptr.func->code.ref.ptr.code;
231 // Set the argument symbols, and put any remaining args in a vector
232 if(nargs < c->nArgs) ERR(ctx, "not enough arguments to function call");
233 for(i=0; i<c->nArgs; i++)
234 naHash_newsym(f->locals.ref.ptr.hash,
235 &c->constants[c->argSyms[i]], &args[i]);
238 for(i=0; i<c->nOptArgs; i++, nargs--) {
239 naRef val = nargs > 0 ? args[i] : c->constants[c->optArgVals[i]];
241 val = bindFunction(ctx, &ctx->fStack[ctx->fTop-2], val);
242 naHash_newsym(f->locals.ref.ptr.hash, &c->constants[c->optArgSyms[i]],
246 if(c->needArgVector || nargs > 0) {
247 naRef argsv = naNewVector(ctx);
248 naVec_setsize(argsv, nargs > 0 ? nargs : 0);
249 for(i=0; i<nargs; i++)
250 argsv.ref.ptr.vec->rec->array[i] = *args++;
251 naHash_newsym(f->locals.ref.ptr.hash, &c->restArgSym, &argsv);
255 struct Frame* setupFuncall(struct Context* ctx, int nargs, int mcall, int tail)
260 DBG(printf("setupFuncall(nargs:%d, mcall:%d)\n", nargs, mcall);)
262 frame = &ctx->opStack[ctx->opTop - nargs - 1];
263 if(!IS_FUNC(frame[0]))
264 ERR(ctx, "function/method call invoked on uncallable object");
266 // Just do native calls right here, and don't touch the stack
267 // frames; return the current one (unless it's a tail call!).
268 if(frame[0].ref.ptr.func->code.ref.ptr.obj->type == T_CCODE) {
269 naRef obj = mcall ? frame[-1] : naNil();
270 naCFunction fp = frame[0].ref.ptr.func->code.ref.ptr.ccode->fptr;
271 naRef result = (*fp)(ctx, obj, nargs, frame + 1);
272 ctx->opTop -= nargs + 1 + mcall;
274 return &(ctx->fStack[ctx->fTop-1]);
277 if(tail) ctx->fTop--;
278 else if(ctx->fTop >= MAX_RECURSION) ERR(ctx, "call stack overflow");
280 // Note: assign nil first, otherwise the naNew() can cause a GC,
281 // which will now (after fTop++) see the *old* reference as a
283 f = &(ctx->fStack[ctx->fTop++]);
284 f->locals = f->func = naNil();
285 f->locals = naNewHash(ctx);
288 f->bp = ctx->opTop - (nargs + 1 + mcall);
291 naHash_set(f->locals, globals->meRef, frame[-1]);
293 setupArgs(ctx, f, frame+1, nargs);
295 ctx->opTop = f->bp; // Pop the stack last, to avoid GC lossage
296 DBG(printf("Entering frame %d with %d args\n", ctx->fTop-1, nargs);)
300 static naRef evalAndOr(struct Context* ctx, int op, naRef ra, naRef rb)
302 int a = boolify(ctx, ra);
303 int b = boolify(ctx, rb);
305 if(op == OP_AND) result = a && b ? 1 : 0;
306 else result = a || b ? 1 : 0;
307 return naNum(result);
310 static naRef evalEquality(int op, naRef ra, naRef rb)
312 int result = naEqual(ra, rb);
313 return naNum((op==OP_EQ) ? result : !result);
316 // When a code object comes out of the constant pool and shows up on
317 // the stack, it needs to be bound with the lexical context.
318 static naRef bindFunction(struct Context* ctx, struct Frame* f, naRef code)
320 naRef result = naNewFunc(ctx, code);
321 result.ref.ptr.func->namespace = f->locals;
322 result.ref.ptr.func->next = f->func;
326 static int getClosure(struct naFunc* c, naRef sym, naRef* result)
329 if(naHash_get(c->namespace, sym, result)) return 1;
330 c = c->next.ref.ptr.func;
335 static naRef getLocal2(struct Context* ctx, struct Frame* f, naRef sym)
338 if(!naHash_get(f->locals, sym, &result))
339 if(!getClosure(f->func.ref.ptr.func, sym, &result))
340 ERR(ctx, "undefined symbol");
344 static void getLocal(struct Context* ctx, struct Frame* f,
345 naRef* sym, naRef* out)
348 struct naStr* str = sym->ref.ptr.str;
349 if(naHash_sym(f->locals.ref.ptr.hash, str, out))
351 func = f->func.ref.ptr.func;
352 while(func && func->namespace.ref.ptr.hash) {
353 if(naHash_sym(func->namespace.ref.ptr.hash, str, out))
355 func = func->next.ref.ptr.func;
357 // Now do it again using the more general naHash_get(). This will
358 // only be necessary if something has created the value in the
359 // namespace using the more generic hash syntax
360 // (e.g. namespace["symbol"] and not namespace.symbol).
361 *out = getLocal2(ctx, f, *sym);
364 static int setClosure(naRef func, naRef sym, naRef val)
366 struct naFunc* c = func.ref.ptr.func;
367 if(c == 0) { return 0; }
368 else if(naHash_tryset(c->namespace, sym, val)) { return 1; }
369 else { return setClosure(c->next, sym, val); }
372 static naRef setSymbol(struct Frame* f, naRef sym, naRef val)
374 // Try the locals first, if not already there try the closures in
375 // order. Finally put it in the locals if nothing matched.
376 if(!naHash_tryset(f->locals, sym, val))
377 if(!setClosure(f->func, sym, val))
378 naHash_set(f->locals, sym, val);
382 // Recursively descend into the parents lists
383 static int getMember(struct Context* ctx, naRef obj, naRef fld,
384 naRef* result, int count)
387 if(--count < 0) ERR(ctx, "too many parents");
388 if(!IS_HASH(obj)) ERR(ctx, "non-objects have no members");
389 if(naHash_get(obj, fld, result)) {
391 } else if(naHash_get(obj, globals->parentsRef, &p)) {
394 struct VecRec* v = p.ref.ptr.vec->rec;
395 for(i=0; i<v->size; i++)
396 if(getMember(ctx, v->array[i], fld, result, count))
399 ERR(ctx, "parents field not vector");
404 // OP_EACH works like a vector get, except that it leaves the vector
405 // and index on the stack, increments the index after use, and pops
406 // the arguments and pushes a nil if the index is beyond the end.
407 static void evalEach(struct Context* ctx, int useIndex)
409 int idx = (int)(ctx->opStack[ctx->opTop-1].num);
410 naRef vec = ctx->opStack[ctx->opTop-2];
411 if(!IS_VEC(vec)) naRuntimeError(ctx, "foreach enumeration of non-vector");
412 if(!vec.ref.ptr.vec->rec || idx >= vec.ref.ptr.vec->rec->size) {
413 ctx->opTop -= 2; // pop two values
417 ctx->opStack[ctx->opTop-1].num = idx+1; // modify in place
418 PUSH(useIndex ? naNum(idx) : naVec_get(vec, idx));
421 #define ARG() cd->byteCode[f->ip++]
422 #define CONSTARG() cd->constants[ARG()]
423 #define POP() ctx->opStack[--ctx->opTop]
424 #define STK(n) (ctx->opStack[ctx->opTop-(n)])
425 #define FIXFRAME() f = &(ctx->fStack[ctx->fTop-1]); \
426 cd = f->func.ref.ptr.func->code.ref.ptr.code;
427 static naRef run(struct Context* ctx)
437 op = cd->byteCode[f->ip++];
438 DBG(printf("Stack Depth: %d\n", ctx->opTop));
439 DBG(printOpDEBUG(f->ip-1, op));
445 PUSH(ctx->opStack[ctx->opTop-1]);
448 PUSH(ctx->opStack[ctx->opTop-2]);
449 PUSH(ctx->opStack[ctx->opTop-2]);
452 a = STK(1); STK(1) = STK(2); STK(2) = a;
455 #define BINOP(expr) do { \
456 double l = IS_NUM(STK(2)) ? STK(2).num : numify(ctx, STK(2)); \
457 double r = IS_NUM(STK(1)) ? STK(1).num : numify(ctx, STK(1)); \
458 STK(2).ref.reftag = ~NASAL_REFTAG; \
460 ctx->opTop--; } while(0)
462 case OP_PLUS: BINOP(l + r); break;
463 case OP_MINUS: BINOP(l - r); break;
464 case OP_MUL: BINOP(l * r); break;
465 case OP_DIV: BINOP(l / r); break;
466 case OP_LT: BINOP(l < r ? 1 : 0); break;
467 case OP_LTE: BINOP(l <= r ? 1 : 0); break;
468 case OP_GT: BINOP(l > r ? 1 : 0); break;
469 case OP_GTE: BINOP(l >= r ? 1 : 0); break;
473 case OP_EQ: case OP_NEQ:
474 STK(2) = evalEquality(op, STK(2), STK(1));
477 case OP_AND: case OP_OR:
478 STK(2) = evalAndOr(ctx, op, STK(2), STK(1));
482 // stringify can call the GC, so don't take stuff of the stack!
483 a = stringify(ctx, ctx->opStack[ctx->opTop-1]);
484 b = stringify(ctx, ctx->opStack[ctx->opTop-2]);
485 c = naStr_concat(naNewString(ctx), b, a);
490 STK(1) = naNum(-numify(ctx, STK(1)));
493 STK(1) = naNum(boolify(ctx, STK(1)) ? 0 : 1);
497 if(IS_CODE(a)) a = bindFunction(ctx, f, a);
510 PUSH(naNewVector(ctx));
513 naVec_append(STK(2), STK(1));
517 PUSH(naNewHash(ctx));
520 naHash_set(STK(3), STK(2), STK(1));
525 getLocal(ctx, f, &a, &b);
529 STK(2) = setSymbol(f, STK(2), STK(1));
533 naHash_set(f->locals, STK(2), STK(1));
534 STK(2) = STK(1); // FIXME: reverse order of arguments instead!
538 if(!getMember(ctx, STK(1), CONSTARG(), &STK(1), 64))
539 ERR(ctx, "no such member");
542 if(!IS_HASH(STK(3))) ERR(ctx, "non-objects have no members");
543 naHash_set(STK(3), STK(2), STK(1));
544 STK(3) = STK(1); // FIXME: fix arg order instead
548 containerSet(ctx, STK(3), STK(2), STK(1));
549 STK(3) = STK(1); // FIXME: codegen order again...
553 STK(2) = containerGet(ctx, STK(2), STK(1));
557 // Identical to JMP, except for locking
559 f->ip = cd->byteCode[f->ip];
560 DBG(printf(" [Jump to: %d]\n", f->ip);)
563 f->ip = cd->byteCode[f->ip];
564 DBG(printf(" [Jump to: %d]\n", f->ip);)
569 ctx->opTop--; // Pops **ONLY** if it's nil!
571 DBG(printf(" [Jump to: %d]\n", f->ip);)
576 if(!boolify(ctx, POP())) {
578 DBG(printf(" [Jump to: %d]\n", f->ip);)
582 f = setupFuncall(ctx, ARG(), 0, 0);
583 cd = f->func.ref.ptr.func->code.ref.ptr.code;
586 f = setupFuncall(ctx, ARG(), 0, 1);
587 cd = f->func.ref.ptr.func->code.ref.ptr.code;
590 f = setupFuncall(ctx, ARG(), 1, 0);
591 cd = f->func.ref.ptr.func->code.ref.ptr.code;
594 f = setupFuncall(ctx, ARG(), 1, 1);
595 cd = f->func.ref.ptr.func->code.ref.ptr.code;
599 if(--ctx->fTop <= 0) return a;
600 ctx->opTop = f->bp + 1; // restore the correct opstack frame!
610 case OP_MARK: // save stack state (e.g. "setjmp")
611 if(ctx->markTop >= MAX_MARK_DEPTH)
612 naRuntimeError(ctx, "mark stack overflow");
613 ctx->markStack[ctx->markTop++] = ctx->opTop;
615 case OP_UNMARK: // pop stack state set by mark
618 case OP_BREAK: // restore stack state (FOLLOW WITH JMP!)
619 ctx->opTop = ctx->markStack[--ctx->markTop];
622 ERR(ctx, "BUG: bad opcode");
624 ctx->ntemps = 0; // reset GC temp vector
625 DBG(printStackDEBUG(ctx);)
627 return naNil(); // unreachable
634 void naSave(struct Context* ctx, naRef obj)
636 naVec_append(globals->save, obj);
639 // FIXME: handle ctx->callParent
640 int naStackDepth(struct Context* ctx)
645 // FIXME: handle ctx->callParent
646 int naGetLine(struct Context* ctx, int frame)
648 struct Frame* f = &ctx->fStack[ctx->fTop-1-frame];
649 naRef func = f->func;
651 if(IS_FUNC(func) && IS_CODE(func.ref.ptr.func->code)) {
652 struct naCode* c = func.ref.ptr.func->code.ref.ptr.code;
653 unsigned short* p = c->lineIps + c->nLines - 2;
654 while(p >= c->lineIps && p[0] > ip)
661 // FIXME: handle ctx->callParent
662 naRef naGetSourceFile(struct Context* ctx, int frame)
664 naRef f = ctx->fStack[ctx->fTop-1-frame].func;
665 f = f.ref.ptr.func->code;
666 return f.ref.ptr.code->srcFile;
669 char* naGetError(struct Context* ctx)
671 if(IS_STR(ctx->dieArg))
672 return (char*)ctx->dieArg.ref.ptr.str->data;
676 naRef naBindFunction(naContext ctx, naRef code, naRef closure)
678 naRef func = naNewFunc(ctx, code);
679 func.ref.ptr.func->namespace = closure;
680 func.ref.ptr.func->next = naNil();
684 naRef naBindToContext(naContext ctx, naRef code)
686 naRef func = naNewFunc(ctx, code);
687 struct Frame* f = &ctx->fStack[ctx->fTop-1];
688 func.ref.ptr.func->namespace = f->locals;
689 func.ref.ptr.func->next = f->func;
693 naRef naCall(naContext ctx, naRef func, int argc, naRef* args,
694 naRef obj, naRef locals)
698 if(!ctx->callParent) naModLock(ctx);
700 // We might have to allocate objects, which can call the GC. But
701 // the call isn't on the Nasal stack yet, so the GC won't find our
702 // C-space arguments.
703 naTempSave(ctx, func);
704 for(i=0; i<argc; i++)
705 naTempSave(ctx, args[i]);
706 naTempSave(ctx, obj);
707 naTempSave(ctx, locals);
709 if(IS_CCODE(func.ref.ptr.func->code)) {
710 naCFunction fp = func.ref.ptr.func->code.ref.ptr.ccode->fptr;
711 result = (*fp)(ctx, obj, argc, args);
712 if(!ctx->callParent) naModUnlock(ctx);
717 locals = naNewHash(ctx);
719 func = naNewFunc(ctx, func); // bind bare code objects
721 naHash_set(locals, globals->meRef, obj);
723 ctx->dieArg = naNil();
725 ctx->opTop = ctx->markTop = 0;
727 ctx->fStack[0].func = func;
728 ctx->fStack[0].locals = locals;
729 ctx->fStack[0].ip = 0;
730 ctx->fStack[0].bp = ctx->opTop;
732 setupArgs(ctx, ctx->fStack, args, argc);
734 // Return early if an error occurred. It will be visible to the
735 // caller via naGetError().
737 if(setjmp(ctx->jumpHandle)) {
738 if(!ctx->callParent) naModUnlock(ctx);
743 if(!ctx->callParent) naModUnlock(ctx);