4 ////////////////////////////////////////////////////////////////////////
5 // Debugging stuff. ////////////////////////////////////////////////////
6 ////////////////////////////////////////////////////////////////////////
7 #if !defined(DEBUG_NASAL)
8 # define DBG(expr) /* noop */
10 # define DBG(expr) expr
14 char* opStringDEBUG(int op);
15 void printOpDEBUG(int ip, int op);
16 void printRefDEBUG(naRef r);
17 void printStackDEBUG(struct Context* ctx);
18 ////////////////////////////////////////////////////////////////////////
20 // FIXME: need to store a list of all contexts
21 struct Context globalContext;
23 #define ERR(c, msg) naRuntimeError((c),(msg))
24 void naRuntimeError(struct Context* c, char* msg)
27 longjmp(c->jumpHandle, 1);
30 int boolify(struct Context* ctx, naRef r)
32 if(IS_NIL(r)) return 0;
33 if(IS_NUM(r)) return r.num != 0;
34 if(IS_STR(r)) return 1;
35 ERR(ctx, "non-scalar used in boolean context");
39 static double numify(struct Context* ctx, naRef o)
42 if(IS_NUM(o)) return o.num;
43 else if(IS_NIL(o)) ERR(ctx, "nil used in numeric context");
44 else if(!IS_STR(o)) ERR(ctx, "non-scalar in numeric context");
45 else if(naStr_tonum(o, &n)) return n;
46 else ERR(ctx, "non-numeric string in numeric context");
50 static naRef stringify(struct Context* ctx, naRef r)
52 if(IS_STR(r)) return r;
53 if(IS_NUM(r)) return naStr_fromnum(naNewString(ctx), r.num);
54 ERR(ctx, "non-scalar in string context");
58 static int checkVec(struct Context* ctx, naRef vec, naRef idx)
60 int i = (int)numify(ctx, idx);
61 if(i < 0 || i >= vec.ref.ptr.vec->size)
62 ERR(ctx, "vector index out of bounds");
66 static naRef containerGet(struct Context* ctx, naRef box, naRef key)
68 naRef result = naNil();
69 if(!IS_SCALAR(key)) ERR(ctx, "container index not scalar");
71 if(!naHash_get(box, key, &result))
72 ERR(ctx, "undefined value in container");
73 } else if(IS_VEC(box)) {
74 result = naVec_get(box, checkVec(ctx, box, key));
76 ERR(ctx, "extract from non-container");
81 static void containerSet(struct Context* ctx, naRef box, naRef key, naRef val)
83 if(!IS_SCALAR(key)) ERR(ctx, "container index not scalar");
84 else if(IS_HASH(box)) naHash_set(box, key, val);
85 else if(IS_VEC(box)) naVec_set(box, checkVec(ctx, box, key), val);
86 else ERR(ctx, "insert into non-container");
89 static void initContext(struct Context* c)
92 for(i=0; i<NUM_NASAL_TYPES; i++)
93 naGC_init(&(c->pools[i]), i);
95 c->fTop = c->opTop = c->markTop = 0;
97 naBZero(c->fStack, MAX_RECURSION * sizeof(struct Frame));
98 naBZero(c->opStack, MAX_STACK_DEPTH * sizeof(naRef));
100 // Make sure the args vectors (which are static with the context)
101 // are initialized to nil.
102 for(i=0; i<MAX_RECURSION; i++)
103 c->fStack[i].args = naNil();
105 // Note we can't use naNewVector() for this; it requires that
106 // temps exist first.
107 c->temps = naObj(T_VEC, naGC_get(&(c->pools[T_VEC])));
111 // Cache pre-calculated "me", "arg" and "parents" scalars
112 c->meRef = naStr_fromdata(naNewString(c), "me", 2);
113 c->argRef = naStr_fromdata(naNewString(c), "arg", 3);
114 c->parentsRef = naStr_fromdata(naNewString(c), "parents", 7);
117 struct Context* naNewContext()
119 // FIXME: need more than one!
120 struct Context* c = &globalContext;
125 void naGarbageCollect()
128 struct Context* c = &globalContext; // FIXME: more than one!
130 for(i=0; i < c->fTop; i++) {
131 naGC_mark(c->fStack[i].func);
132 naGC_mark(c->fStack[i].locals);
134 for(i=0; i < MAX_RECURSION; i++)
135 naGC_mark(c->fStack[i].args); // collect *all* the argument lists
136 for(i=0; i < c->opTop; i++)
137 naGC_mark(c->opStack[i]);
143 naGC_mark(c->argRef);
144 naGC_mark(c->parentsRef);
146 // Finally collect all the freed objects
147 for(i=0; i<NUM_NASAL_TYPES; i++)
148 naGC_reap(&(c->pools[i]));
151 void setupFuncall(struct Context* ctx, naRef func, naRef args)
154 f = &(ctx->fStack[ctx->fTop++]);
160 DBG(printf("Entering frame %d\n", ctx->fTop-1);)
163 ERR(ctx, "function/method call invoked on uncallable object");
166 if(IS_CCODE(func.ref.ptr.func->code)) {
168 } else if(IS_CODE(func.ref.ptr.func->code)) {
169 f->locals = naNewHash(ctx);
170 naHash_set(f->locals, ctx->argRef, args);
172 ERR(ctx, "function/method call invoked on uncallable object");
176 static naRef evalAndOr(struct Context* ctx, int op, naRef ra, naRef rb)
178 int a = boolify(ctx, ra);
179 int b = boolify(ctx, rb);
181 if(op == OP_AND) result = a && b ? 1 : 0;
182 else result = a || b ? 1 : 0;
183 return naNum(result);
186 static naRef evalEquality(int op, naRef ra, naRef rb)
188 int result = naEqual(ra, rb);
189 return naNum((op==OP_EQ) ? result : !result);
192 static naRef evalBinaryNumeric(struct Context* ctx, int op, naRef ra, naRef rb)
194 double a = numify(ctx, ra), b = numify(ctx, rb);
196 case OP_PLUS: return naNum(a + b);
197 case OP_MINUS: return naNum(a - b);
198 case OP_MUL: return naNum(a * b);
199 case OP_DIV: return naNum(a / b);
200 case OP_LT: return naNum(a < b ? 1 : 0);
201 case OP_LTE: return naNum(a <= b ? 1 : 0);
202 case OP_GT: return naNum(a > b ? 1 : 0);
203 case OP_GTE: return naNum(a >= b ? 1 : 0);
208 // When a code object comes out of the constant pool and shows up on
209 // the stack, it needs to be bound with the lexical context.
210 static naRef bindFunction(struct Context* ctx, struct Frame* f, naRef code)
212 naRef next = f->func.ref.ptr.func->closure;
213 naRef closure = naNewClosure(ctx, f->locals, next);
214 naRef result = naNewFunc(ctx, code);
215 result.ref.ptr.func->closure = closure;
219 static int getClosure(struct naClosure* c, naRef sym, naRef* result)
222 if(naHash_get(c->namespace, sym, result)) return 1;
223 c = c->next.ref.ptr.closure;
228 // Get a local symbol, or check the closure list if it isn't there
229 static naRef getLocal(struct Context* ctx, struct Frame* f, naRef sym)
232 if(!naHash_get(f->locals, sym, &result)) {
233 naRef c = f->func.ref.ptr.func->closure;
234 if(!getClosure(c.ref.ptr.closure, sym, &result))
235 ERR(ctx, "undefined symbol");
240 static int setClosure(naRef closure, naRef sym, naRef val)
242 struct naClosure* c = closure.ref.ptr.closure;
243 if(c == 0) { return 0; }
244 else if(naHash_tryset(c->namespace, sym, val)) { return 1; }
245 else { return setClosure(c->next, sym, val); }
248 static naRef setLocal(struct Frame* f, naRef sym, naRef val)
250 // Try the locals first, if not already there try the closures in
251 // order. Finally put it in the locals if nothing matched.
252 if(!naHash_tryset(f->locals, sym, val))
253 if(!setClosure(f->func.ref.ptr.func->closure, sym, val))
254 naHash_set(f->locals, sym, val);
258 // Recursively descend into the parents lists
259 static int getMember(struct Context* ctx, naRef obj, naRef fld, naRef* result)
262 if(!IS_HASH(obj)) ERR(ctx, "non-objects have no members");
263 if(naHash_get(obj, fld, result)) {
265 } else if(naHash_get(obj, ctx->parentsRef, &p)) {
267 if(!IS_VEC(p)) ERR(ctx, "parents field not vector");
268 for(i=0; i<p.ref.ptr.vec->size; i++)
269 if(getMember(ctx, p.ref.ptr.vec->array[i], fld, result))
275 static void PUSH(struct Context* ctx, naRef r)
277 if(ctx->opTop >= MAX_STACK_DEPTH) ERR(ctx, "stack overflow");
278 ctx->opStack[ctx->opTop++] = r;
281 static naRef POP(struct Context* ctx)
283 if(ctx->opTop == 0) ERR(ctx, "BUG: stack underflow");
284 return ctx->opStack[--ctx->opTop];
287 static naRef TOP(struct Context* ctx)
289 if(ctx->opTop == 0) ERR(ctx, "BUG: stack underflow");
290 return ctx->opStack[ctx->opTop-1];
293 static int ARG16(unsigned char* byteCode, struct Frame* f)
295 int arg = byteCode[f->ip]<<8 | byteCode[f->ip+1];
300 // OP_EACH works like a vector get, except that it leaves the vector
301 // and index on the stack, increments the index after use, and pops
302 // the arguments and pushes a nil if the index is beyond the end.
303 static void evalEach(struct Context* ctx)
305 int idx = (int)(ctx->opStack[ctx->opTop-1].num);
306 naRef vec = ctx->opStack[ctx->opTop-2];
307 if(idx >= vec.ref.ptr.vec->size) {
308 ctx->opTop -= 2; // pop two values
312 ctx->opStack[ctx->opTop-1].num = idx+1; // modify in place
313 PUSH(ctx, naVec_get(vec, idx));
316 static void run1(struct Context* ctx, struct Frame* f, naRef code)
319 struct naCode* cd = code.ref.ptr.code;
322 if(f->ip >= cd->nBytes) {
323 DBG(printf("Done with frame %d\n", ctx->fTop-1);)
330 op = cd->byteCode[f->ip++];
331 DBG(printf("Stack Depth: %d\n", ctx->opTop));
332 DBG(printOpDEBUG(f->ip-1, op));
338 PUSH(ctx, ctx->opStack[ctx->opTop-1]);
341 a = POP(ctx); b = POP(ctx);
342 PUSH(ctx, a); PUSH(ctx, b);
344 case OP_PLUS: case OP_MUL: case OP_DIV: case OP_MINUS:
345 case OP_LT: case OP_LTE: case OP_GT: case OP_GTE:
346 a = POP(ctx); b = POP(ctx);
347 PUSH(ctx, evalBinaryNumeric(ctx, op, b, a));
349 case OP_EQ: case OP_NEQ:
350 a = POP(ctx); b = POP(ctx);
351 PUSH(ctx, evalEquality(op, b, a));
353 case OP_AND: case OP_OR:
354 a = POP(ctx); b = POP(ctx);
355 PUSH(ctx, evalAndOr(ctx, op, a, b));
358 // stringify can call the GC, so don't take stuff of the stack!
359 if(ctx->opTop <= 1) ERR(ctx, "BUG: stack underflow");
360 a = stringify(ctx, ctx->opStack[ctx->opTop-1]);
361 b = stringify(ctx, ctx->opStack[ctx->opTop-2]);
362 c = naStr_concat(naNewString(ctx), b, a);
368 PUSH(ctx, naNum(-numify(ctx, a)));
372 PUSH(ctx, naNum(boolify(ctx, a) ? 0 : 1));
375 a = cd->constants[ARG16(cd->byteCode, f)];
376 if(IS_CODE(a)) a = bindFunction(ctx, f, a);
389 PUSH(ctx, naNewVector(ctx));
392 b = POP(ctx); a = TOP(ctx);
396 PUSH(ctx, naNewHash(ctx));
399 c = POP(ctx); b = POP(ctx); a = TOP(ctx); // a,b,c: hash, key, val
403 a = getLocal(ctx, f, POP(ctx));
407 a = POP(ctx); b = POP(ctx);
408 PUSH(ctx, setLocal(f, b, a));
411 a = POP(ctx); b = POP(ctx);
412 if(!getMember(ctx, b, a, &c))
413 ERR(ctx, "no such member");
417 c = POP(ctx); b = POP(ctx); a = POP(ctx); // a,b,c: hash, key, val
418 if(!IS_HASH(a)) ERR(ctx, "non-objects have no members");
423 c = POP(ctx); b = POP(ctx); a = POP(ctx); // a,b,c: box, key, val
424 containerSet(ctx, a, b, c);
428 b = POP(ctx); a = POP(ctx); // a,b: box, key
429 PUSH(ctx, containerGet(ctx, a, b));
432 f->ip = ARG16(cd->byteCode, f);
433 DBG(printf(" [Jump to: %d]\n", f->ip);)
436 arg = ARG16(cd->byteCode, f);
439 POP(ctx); // Pops **ONLY** if it's nil!
441 DBG(printf(" [Jump to: %d]\n", f->ip);)
445 arg = ARG16(cd->byteCode, f);
446 if(!boolify(ctx, POP(ctx))) {
448 DBG(printf(" [Jump to: %d]\n", f->ip);)
452 b = POP(ctx); a = POP(ctx); // a,b = func, args
453 setupFuncall(ctx, a, b);
456 c = POP(ctx); b = POP(ctx); a = POP(ctx); // a,b,c = obj, func, args
457 naVec_append(ctx->temps, a);
458 setupFuncall(ctx, b, c);
459 naHash_set(ctx->fStack[ctx->fTop-1].locals, ctx->meRef, a);
463 ctx->opTop = f->bp; // restore the correct stack frame!
465 ctx->fStack[ctx->fTop].args.ref.ptr.vec->size = 0;
469 f->line = ARG16(cd->byteCode, f);
474 case OP_MARK: // save stack state (e.g. "setjmp")
475 ctx->markStack[ctx->markTop++] = ctx->opTop;
477 case OP_UNMARK: // pop stack state set by mark
480 case OP_BREAK: // restore stack state (FOLLOW WITH JMP!)
481 ctx->opTop = ctx->markStack[--ctx->markTop];
484 ERR(ctx, "BUG: bad opcode");
491 static void nativeCall(struct Context* ctx, struct Frame* f, naRef ccode)
493 naCFunction fptr = ccode.ref.ptr.ccode->fptr;
494 naRef result = (*fptr)(ctx, f->args);
496 ctx->fStack[ctx->fTop].args.ref.ptr.vec->size = 0;
500 void naSave(struct Context* ctx, naRef obj)
505 int naStackDepth(struct Context* ctx)
510 int naGetLine(struct Context* ctx, int frame)
512 return ctx->fStack[ctx->fTop-1-frame].line;
515 naRef naGetSourceFile(struct Context* ctx, int frame)
517 naRef f = ctx->fStack[ctx->fTop-1-frame].func;
518 f = f.ref.ptr.func->code;
519 return f.ref.ptr.code->srcFile;
522 char* naGetError(struct Context* ctx)
527 static naRef run(naContext ctx)
529 // Return early if an error occurred. It will be visible to the
530 // caller via naGetError().
532 if(setjmp(ctx->jumpHandle))
537 struct Frame* f = &(ctx->fStack[ctx->fTop-1]);
538 naRef code = f->func.ref.ptr.func->code;
539 if(IS_CCODE(code)) nativeCall(ctx, f, code);
540 else run1(ctx, f, code);
542 ctx->temps.ref.ptr.vec->size = 0; // Reset the temporaries
543 DBG(printStackDEBUG(ctx);)
546 DBG(printStackDEBUG(ctx);)
547 return ctx->opStack[--ctx->opTop];
550 naRef naBindFunction(naContext ctx, naRef code, naRef closure)
552 naRef func = naNewFunc(ctx, code);
553 func.ref.ptr.func->closure = naNewClosure(ctx, closure, naNil());
557 naRef naCall(naContext ctx, naRef func, naRef args, naRef obj, naRef locals)
559 // We might have to allocate objects, which can call the GC. But
560 // the call isn't on the Nasal stack yet, so the GC won't find our
561 // C-space arguments.
562 naVec_append(ctx->temps, func);
563 naVec_append(ctx->temps, args);
564 naVec_append(ctx->temps, obj);
565 naVec_append(ctx->temps, locals);
568 args = naNewVector(ctx);
570 locals = naNewHash(ctx);
572 // Generate a noop closure for bare code objects
574 func = naNewFunc(ctx, code);
575 func.ref.ptr.func->closure = naNewClosure(ctx, locals, naNil());
578 naHash_set(locals, ctx->meRef, obj);
580 ctx->fTop = ctx->opTop = ctx->markTop = 0;
581 setupFuncall(ctx, func, args);
582 ctx->fStack[ctx->fTop-1].locals = locals;