4 ////////////////////////////////////////////////////////////////////////
5 // Debugging stuff. ////////////////////////////////////////////////////
6 ////////////////////////////////////////////////////////////////////////
7 #if !defined(DEBUG_NASAL)
8 # define DBG(expr) /* noop */
10 # define DBG(expr) expr
14 char* opStringDEBUG(int op);
15 void printOpDEBUG(int ip, int op);
16 void printRefDEBUG(naRef r);
17 void printStackDEBUG(struct Context* ctx);
18 ////////////////////////////////////////////////////////////////////////
20 // FIXME: need to store a list of all contexts
21 struct Context globalContext;
23 #define ERR(c, msg) naRuntimeError((c),(msg))
24 void naRuntimeError(struct Context* c, char* msg)
27 longjmp(c->jumpHandle, 1);
30 int boolify(struct Context* ctx, naRef r)
32 if(IS_NIL(r)) return 0;
33 if(IS_NUM(r)) return r.num != 0;
34 if(IS_STR(r)) return 1;
35 ERR(ctx, "non-scalar used in boolean context");
39 static double numify(struct Context* ctx, naRef o)
42 if(IS_NUM(o)) return o.num;
43 else if(IS_NIL(o)) ERR(ctx, "nil used in numeric context");
44 else if(!IS_STR(o)) ERR(ctx, "non-scalar in numeric context");
45 else if(naStr_tonum(o, &n)) return n;
46 else ERR(ctx, "non-numeric string in numeric context");
50 static naRef stringify(struct Context* ctx, naRef r)
52 if(IS_STR(r)) return r;
53 if(IS_NUM(r)) return naStr_fromnum(naNewString(ctx), r.num);
54 ERR(ctx, "non-scalar in string context");
58 static int checkVec(struct Context* ctx, naRef vec, naRef idx)
60 int i = (int)numify(ctx, idx);
61 if(i < 0 || i >= vec.ref.ptr.vec->size)
62 ERR(ctx, "vector index out of bounds");
66 static naRef containerGet(struct Context* ctx, naRef box, naRef key)
68 naRef result = naNil();
69 if(!IS_SCALAR(key)) ERR(ctx, "container index not scalar");
71 if(!naHash_get(box, key, &result))
72 ERR(ctx, "undefined value in container");
73 } else if(IS_VEC(box)) {
74 result = naVec_get(box, checkVec(ctx, box, key));
76 ERR(ctx, "extract from non-container");
81 static void containerSet(struct Context* ctx, naRef box, naRef key, naRef val)
83 if(!IS_SCALAR(key)) ERR(ctx, "container index not scalar");
84 else if(IS_HASH(box)) naHash_set(box, key, val);
85 else if(IS_VEC(box)) naVec_set(box, checkVec(ctx, box, key), val);
86 else ERR(ctx, "insert into non-container");
89 static void initContext(struct Context* c)
92 for(i=0; i<NUM_NASAL_TYPES; i++)
93 naGC_init(&(c->pools[i]), i);
95 c->fTop = c->opTop = c->markTop = 0;
97 naBZero(c->fStack, MAX_RECURSION * sizeof(struct Frame));
98 naBZero(c->opStack, MAX_STACK_DEPTH * sizeof(naRef));
100 // Make sure the args vectors (which are static with the context)
101 // are initialized to nil.
102 for(i=0; i<MAX_RECURSION; i++)
103 c->fStack[i].args = naNil();
105 c->argPool = naNewVector(c);
107 // Note we can't use naNewVector() for this; it requires that
108 // temps exist first.
109 c->temps = naObj(T_VEC, naGC_get(&(c->pools[T_VEC])));
113 // Cache pre-calculated "me", "arg" and "parents" scalars
114 c->meRef = naStr_fromdata(naNewString(c), "me", 2);
115 c->argRef = naStr_fromdata(naNewString(c), "arg", 3);
116 c->parentsRef = naStr_fromdata(naNewString(c), "parents", 7);
119 struct Context* naNewContext()
121 // FIXME: need more than one!
122 struct Context* c = &globalContext;
127 void naGarbageCollect()
130 struct Context* c = &globalContext; // FIXME: more than one!
132 for(i=0; i < c->fTop; i++) {
133 naGC_mark(c->fStack[i].func);
134 naGC_mark(c->fStack[i].locals);
136 for(i=0; i < MAX_RECURSION; i++)
137 naGC_mark(c->fStack[i].args); // collect *all* the argument lists
138 for(i=0; i < c->opTop; i++)
139 naGC_mark(c->opStack[i]);
141 naGC_mark(c->argPool);
146 naGC_mark(c->argRef);
147 naGC_mark(c->parentsRef);
149 // Finally collect all the freed objects
150 for(i=0; i<NUM_NASAL_TYPES; i++)
151 naGC_reap(&(c->pools[i]));
154 void setupFuncall(struct Context* ctx, naRef func, naRef args)
157 f = &(ctx->fStack[ctx->fTop++]);
163 DBG(printf("Entering frame %d\n", ctx->fTop-1);)
166 ERR(ctx, "function/method call invoked on uncallable object");
169 if(IS_CCODE(func.ref.ptr.func->code)) {
171 } else if(IS_CODE(func.ref.ptr.func->code)) {
172 f->locals = naNewHash(ctx);
173 naHash_set(f->locals, ctx->argRef, args);
175 ERR(ctx, "function/method call invoked on uncallable object");
179 static naRef evalAndOr(struct Context* ctx, int op, naRef ra, naRef rb)
181 int a = boolify(ctx, ra);
182 int b = boolify(ctx, rb);
184 if(op == OP_AND) result = a && b ? 1 : 0;
185 else result = a || b ? 1 : 0;
186 return naNum(result);
189 static naRef evalEquality(int op, naRef ra, naRef rb)
191 int result = naEqual(ra, rb);
192 return naNum((op==OP_EQ) ? result : !result);
195 static naRef evalBinaryNumeric(struct Context* ctx, int op, naRef ra, naRef rb)
197 double a = numify(ctx, ra), b = numify(ctx, rb);
199 case OP_PLUS: return naNum(a + b);
200 case OP_MINUS: return naNum(a - b);
201 case OP_MUL: return naNum(a * b);
202 case OP_DIV: return naNum(a / b);
203 case OP_LT: return naNum(a < b ? 1 : 0);
204 case OP_LTE: return naNum(a <= b ? 1 : 0);
205 case OP_GT: return naNum(a > b ? 1 : 0);
206 case OP_GTE: return naNum(a >= b ? 1 : 0);
211 // When a code object comes out of the constant pool and shows up on
212 // the stack, it needs to be bound with the lexical context.
213 static naRef bindFunction(struct Context* ctx, struct Frame* f, naRef code)
215 naRef next = f->func.ref.ptr.func->closure;
216 naRef closure = naNewClosure(ctx, f->locals, next);
217 naRef result = naNewFunc(ctx, code);
218 result.ref.ptr.func->closure = closure;
222 static int getClosure(struct naClosure* c, naRef sym, naRef* result)
225 if(naHash_get(c->namespace, sym, result)) return 1;
226 c = c->next.ref.ptr.closure;
231 // Get a local symbol, or check the closure list if it isn't there
232 static naRef getLocal(struct Context* ctx, struct Frame* f, naRef sym)
235 if(!naHash_get(f->locals, sym, &result)) {
236 naRef c = f->func.ref.ptr.func->closure;
237 if(!getClosure(c.ref.ptr.closure, sym, &result))
238 ERR(ctx, "undefined symbol");
243 static int setClosure(naRef closure, naRef sym, naRef val)
245 struct naClosure* c = closure.ref.ptr.closure;
246 if(c == 0) { return 0; }
247 else if(naHash_tryset(c->namespace, sym, val)) { return 1; }
248 else { return setClosure(c->next, sym, val); }
251 static naRef setLocal(struct Frame* f, naRef sym, naRef val)
253 // Try the locals first, if not already there try the closures in
254 // order. Finally put it in the locals if nothing matched.
255 if(!naHash_tryset(f->locals, sym, val))
256 if(!setClosure(f->func.ref.ptr.func->closure, sym, val))
257 naHash_set(f->locals, sym, val);
261 // Recursively descend into the parents lists
262 static int getMember(struct Context* ctx, naRef obj, naRef fld, naRef* result)
265 if(!IS_HASH(obj)) ERR(ctx, "non-objects have no members");
266 if(naHash_get(obj, fld, result)) {
268 } else if(naHash_get(obj, ctx->parentsRef, &p)) {
270 if(!IS_VEC(p)) ERR(ctx, "parents field not vector");
271 for(i=0; i<p.ref.ptr.vec->size; i++)
272 if(getMember(ctx, p.ref.ptr.vec->array[i], fld, result))
278 static void PUSH(struct Context* ctx, naRef r)
280 if(ctx->opTop >= MAX_STACK_DEPTH) ERR(ctx, "stack overflow");
281 ctx->opStack[ctx->opTop++] = r;
284 static naRef POP(struct Context* ctx)
286 if(ctx->opTop == 0) ERR(ctx, "BUG: stack underflow");
287 return ctx->opStack[--ctx->opTop];
290 static naRef TOP(struct Context* ctx)
292 if(ctx->opTop == 0) ERR(ctx, "BUG: stack underflow");
293 return ctx->opStack[ctx->opTop-1];
296 static int ARG16(unsigned char* byteCode, struct Frame* f)
298 int arg = byteCode[f->ip]<<8 | byteCode[f->ip+1];
303 // OP_EACH works like a vector get, except that it leaves the vector
304 // and index on the stack, increments the index after use, and pops
305 // the arguments and pushes a nil if the index is beyond the end.
306 static void evalEach(struct Context* ctx)
308 int idx = (int)(ctx->opStack[ctx->opTop-1].num);
309 naRef vec = ctx->opStack[ctx->opTop-2];
310 if(idx >= vec.ref.ptr.vec->size) {
311 ctx->opTop -= 2; // pop two values
315 ctx->opStack[ctx->opTop-1].num = idx+1; // modify in place
316 PUSH(ctx, naVec_get(vec, idx));
319 static void run1(struct Context* ctx, struct Frame* f, naRef code)
322 struct naCode* cd = code.ref.ptr.code;
325 if(f->ip >= cd->nBytes) {
326 DBG(printf("Done with frame %d\n", ctx->fTop-1);)
333 op = cd->byteCode[f->ip++];
334 DBG(printf("Stack Depth: %d\n", ctx->opTop));
335 DBG(printOpDEBUG(f->ip-1, op));
341 PUSH(ctx, ctx->opStack[ctx->opTop-1]);
344 a = POP(ctx); b = POP(ctx);
345 PUSH(ctx, a); PUSH(ctx, b);
347 case OP_PLUS: case OP_MUL: case OP_DIV: case OP_MINUS:
348 case OP_LT: case OP_LTE: case OP_GT: case OP_GTE:
349 a = POP(ctx); b = POP(ctx);
350 PUSH(ctx, evalBinaryNumeric(ctx, op, b, a));
352 case OP_EQ: case OP_NEQ:
353 a = POP(ctx); b = POP(ctx);
354 PUSH(ctx, evalEquality(op, b, a));
356 case OP_AND: case OP_OR:
357 a = POP(ctx); b = POP(ctx);
358 PUSH(ctx, evalAndOr(ctx, op, a, b));
361 // stringify can call the GC, so don't take stuff of the stack!
362 if(ctx->opTop <= 1) ERR(ctx, "BUG: stack underflow");
363 a = stringify(ctx, ctx->opStack[ctx->opTop-1]);
364 b = stringify(ctx, ctx->opStack[ctx->opTop-2]);
365 c = naStr_concat(naNewString(ctx), b, a);
371 PUSH(ctx, naNum(-numify(ctx, a)));
375 PUSH(ctx, naNum(boolify(ctx, a) ? 0 : 1));
378 a = cd->constants[ARG16(cd->byteCode, f)];
379 if(IS_CODE(a)) a = bindFunction(ctx, f, a);
392 PUSH(ctx, naNewVector(ctx));
395 b = POP(ctx); a = TOP(ctx);
399 PUSH(ctx, naNewHash(ctx));
402 c = POP(ctx); b = POP(ctx); a = TOP(ctx); // a,b,c: hash, key, val
406 a = getLocal(ctx, f, POP(ctx));
410 a = POP(ctx); b = POP(ctx);
411 PUSH(ctx, setLocal(f, b, a));
414 a = POP(ctx); b = POP(ctx);
415 if(!getMember(ctx, b, a, &c))
416 ERR(ctx, "no such member");
420 c = POP(ctx); b = POP(ctx); a = POP(ctx); // a,b,c: hash, key, val
421 if(!IS_HASH(a)) ERR(ctx, "non-objects have no members");
426 c = POP(ctx); b = POP(ctx); a = POP(ctx); // a,b,c: box, key, val
427 containerSet(ctx, a, b, c);
431 b = POP(ctx); a = POP(ctx); // a,b: box, key
432 PUSH(ctx, containerGet(ctx, a, b));
435 f->ip = ARG16(cd->byteCode, f);
436 DBG(printf(" [Jump to: %d]\n", f->ip);)
439 arg = ARG16(cd->byteCode, f);
442 POP(ctx); // Pops **ONLY** if it's nil!
444 DBG(printf(" [Jump to: %d]\n", f->ip);)
448 arg = ARG16(cd->byteCode, f);
449 if(!boolify(ctx, POP(ctx))) {
451 DBG(printf(" [Jump to: %d]\n", f->ip);)
455 b = POP(ctx); a = POP(ctx); // a,b = func, args
456 setupFuncall(ctx, a, b);
459 c = POP(ctx); b = POP(ctx); a = POP(ctx); // a,b,c = obj, func, args
460 setupFuncall(ctx, b, c);
461 naHash_set(ctx->fStack[ctx->fTop-1].locals, ctx->meRef, a);
465 ctx->opTop = f->bp; // restore the correct stack frame!
467 ctx->fStack[ctx->fTop].args.ref.ptr.vec->size = 0;
468 naVec_append(ctx->argPool, ctx->fStack[ctx->fTop].args);
472 f->line = ARG16(cd->byteCode, f);
477 case OP_MARK: // save stack state (e.g. "setjmp")
478 ctx->markStack[ctx->markTop++] = ctx->opTop;
480 case OP_UNMARK: // pop stack state set by mark
483 case OP_BREAK: // restore stack state (FOLLOW WITH JMP!)
484 ctx->opTop = ctx->markStack[--ctx->markTop];
486 case OP_NEWARGS: // push a new function arg vector
487 PUSH(ctx, (naVec_size(ctx->argPool) ?
488 naVec_removelast(ctx->argPool) : naNewVector(ctx)));
491 ERR(ctx, "BUG: bad opcode");
498 static void nativeCall(struct Context* ctx, struct Frame* f, naRef ccode)
500 naCFunction fptr = ccode.ref.ptr.ccode->fptr;
501 naRef result = (*fptr)(ctx, f->args);
503 ctx->fStack[ctx->fTop].args.ref.ptr.vec->size = 0;
507 void naSave(struct Context* ctx, naRef obj)
512 int naStackDepth(struct Context* ctx)
517 int naGetLine(struct Context* ctx, int frame)
519 return ctx->fStack[ctx->fTop-1-frame].line;
522 naRef naGetSourceFile(struct Context* ctx, int frame)
524 naRef f = ctx->fStack[ctx->fTop-1-frame].func;
525 f = f.ref.ptr.func->code;
526 return f.ref.ptr.code->srcFile;
529 char* naGetError(struct Context* ctx)
534 static naRef run(naContext ctx)
536 // Return early if an error occurred. It will be visible to the
537 // caller via naGetError().
539 if(setjmp(ctx->jumpHandle))
544 struct Frame* f = &(ctx->fStack[ctx->fTop-1]);
545 naRef code = f->func.ref.ptr.func->code;
546 if(IS_CCODE(code)) nativeCall(ctx, f, code);
547 else run1(ctx, f, code);
549 ctx->temps.ref.ptr.vec->size = 0; // Reset the temporaries
550 DBG(printStackDEBUG(ctx);)
553 DBG(printStackDEBUG(ctx);)
554 return ctx->opStack[--ctx->opTop];
557 naRef naBindFunction(naContext ctx, naRef code, naRef closure)
559 naRef func = naNewFunc(ctx, code);
560 func.ref.ptr.func->closure = naNewClosure(ctx, closure, naNil());
564 naRef naCall(naContext ctx, naRef func, naRef args, naRef obj, naRef locals)
566 // We might have to allocate objects, which can call the GC. But
567 // the call isn't on the Nasal stack yet, so the GC won't find our
568 // C-space arguments.
569 naVec_append(ctx->temps, func);
570 naVec_append(ctx->temps, args);
571 naVec_append(ctx->temps, obj);
572 naVec_append(ctx->temps, locals);
575 args = naNewVector(ctx);
577 locals = naNewHash(ctx);
579 // Generate a noop closure for bare code objects
581 func = naNewFunc(ctx, code);
582 func.ref.ptr.func->closure = naNewClosure(ctx, locals, naNil());
585 naHash_set(locals, ctx->meRef, obj);
587 ctx->fTop = ctx->opTop = ctx->markTop = 0;
588 setupFuncall(ctx, func, args);
589 ctx->fStack[ctx->fTop-1].locals = locals;