4 ////////////////////////////////////////////////////////////////////////
5 // Debugging stuff. ////////////////////////////////////////////////////
6 ////////////////////////////////////////////////////////////////////////
7 #if !defined(DEBUG_NASAL)
8 # define DBG(expr) /* noop */
10 # define DBG(expr) expr
14 char* opStringDEBUG(int op);
15 void printOpDEBUG(int ip, int op);
16 void printRefDEBUG(naRef r);
17 void printStackDEBUG(struct Context* ctx);
18 ////////////////////////////////////////////////////////////////////////
20 // FIXME: need to store a list of all contexts
21 struct Context globalContext;
23 #define ERR(c, msg) naRuntimeError((c),(msg))
24 void naRuntimeError(struct Context* c, char* msg)
27 longjmp(c->jumpHandle, 1);
30 static int boolify(struct Context* ctx, naRef r)
32 if(IS_NUM(r)) return r.num != 0;
33 if(IS_NIL(r)) return 0;
36 if(naStr_len(r) == 0) return 0;
37 if(naStr_tonum(r, &d)) return d != 0;
40 ERR(ctx, "non-scalar used in boolean context");
44 static double numify(struct Context* ctx, naRef o)
47 if(IS_NUM(o)) return o.num;
48 else if(IS_NIL(o)) ERR(ctx, "nil used in numeric context");
49 else if(!IS_STR(o)) ERR(ctx, "non-scalar in numeric context");
50 else if(naStr_tonum(o, &n)) return n;
51 else ERR(ctx, "non-numeric string in numeric context");
55 static naRef stringify(struct Context* ctx, naRef r)
57 if(IS_STR(r)) return r;
58 if(IS_NUM(r)) return naStr_fromnum(naNewString(ctx), r.num);
59 ERR(ctx, "non-scalar in string context");
63 static int checkVec(struct Context* ctx, naRef vec, naRef idx)
65 int i = (int)numify(ctx, idx);
66 if(i < 0 || i >= vec.ref.ptr.vec->size)
67 ERR(ctx, "vector index out of bounds");
71 static naRef containerGet(struct Context* ctx, naRef box, naRef key)
73 naRef result = naNil();
74 if(!IS_SCALAR(key)) ERR(ctx, "container index not scalar");
76 if(!naHash_get(box, key, &result))
77 ERR(ctx, "undefined value in container");
78 } else if(IS_VEC(box)) {
79 result = naVec_get(box, checkVec(ctx, box, key));
81 ERR(ctx, "extract from non-container");
86 static void containerSet(struct Context* ctx, naRef box, naRef key, naRef val)
88 if(!IS_SCALAR(key)) ERR(ctx, "container index not scalar");
89 else if(IS_HASH(box)) naHash_set(box, key, val);
90 else if(IS_VEC(box)) naVec_set(box, checkVec(ctx, box, key), val);
91 else ERR(ctx, "insert into non-container");
94 static void initContext(struct Context* c)
97 for(i=0; i<NUM_NASAL_TYPES; i++)
98 naGC_init(&(c->pools[i]), i);
100 c->fTop = c->opTop = c->markTop = 0;
102 naBZero(c->fStack, MAX_RECURSION * sizeof(struct Frame));
103 naBZero(c->opStack, MAX_STACK_DEPTH * sizeof(naRef));
105 // Make sure the args vectors (which are static with the context)
106 // are initialized to nil.
107 for(i=0; i<MAX_RECURSION; i++)
108 c->fStack[i].args = naNil();
110 // Note we can't use naNewVector() for this; it requires that
111 // temps exist first.
112 c->temps = naObj(T_VEC, naGC_get(&(c->pools[T_VEC])));
116 // Cache pre-calculated "me", "arg" and "parents" scalars
117 c->meRef = naStr_fromdata(naNewString(c), "me", 2);
118 c->argRef = naStr_fromdata(naNewString(c), "arg", 3);
119 c->parentsRef = naStr_fromdata(naNewString(c), "parents", 7);
122 struct Context* naNewContext()
124 // FIXME: need more than one!
125 struct Context* c = &globalContext;
130 void naGarbageCollect()
133 struct Context* c = &globalContext; // FIXME: more than one!
135 for(i=0; i < c->fTop; i++) {
136 naGC_mark(c->fStack[i].func);
137 naGC_mark(c->fStack[i].locals);
139 for(i=0; i < MAX_RECURSION; i++)
140 naGC_mark(c->fStack[i].args); // collect *all* the argument lists
141 for(i=0; i < c->opTop; i++)
142 naGC_mark(c->opStack[i]);
148 naGC_mark(c->argRef);
149 naGC_mark(c->parentsRef);
151 // Finally collect all the freed objects
152 for(i=0; i<NUM_NASAL_TYPES; i++)
153 naGC_reap(&(c->pools[i]));
156 void setupFuncall(struct Context* ctx, naRef func, naRef args)
160 !(IS_CCODE(func.ref.ptr.func->code) ||
161 IS_CODE(func.ref.ptr.func->code)))
163 ERR(ctx, "function/method call invoked on uncallable object");
166 f = &(ctx->fStack[ctx->fTop++]);
172 DBG(printf("Entering frame %d\n", ctx->fTop-1);)
175 if(IS_CCODE(func.ref.ptr.func->code)) {
177 } else if(IS_CODE(func.ref.ptr.func->code)) {
178 f->locals = naNewHash(ctx);
179 naHash_set(f->locals, ctx->argRef, args);
183 static naRef evalAndOr(struct Context* ctx, int op, naRef ra, naRef rb)
185 int a = boolify(ctx, ra);
186 int b = boolify(ctx, rb);
188 if(op == OP_AND) result = a && b ? 1 : 0;
189 else result = a || b ? 1 : 0;
190 return naNum(result);
193 static naRef evalEquality(int op, naRef ra, naRef rb)
195 int result = naEqual(ra, rb);
196 return naNum((op==OP_EQ) ? result : !result);
199 static naRef evalBinaryNumeric(struct Context* ctx, int op, naRef ra, naRef rb)
201 double a = numify(ctx, ra), b = numify(ctx, rb);
203 case OP_PLUS: return naNum(a + b);
204 case OP_MINUS: return naNum(a - b);
205 case OP_MUL: return naNum(a * b);
206 case OP_DIV: return naNum(a / b);
207 case OP_LT: return naNum(a < b ? 1 : 0);
208 case OP_LTE: return naNum(a <= b ? 1 : 0);
209 case OP_GT: return naNum(a > b ? 1 : 0);
210 case OP_GTE: return naNum(a >= b ? 1 : 0);
215 // When a code object comes out of the constant pool and shows up on
216 // the stack, it needs to be bound with the lexical context.
217 static naRef bindFunction(struct Context* ctx, struct Frame* f, naRef code)
219 naRef next = f->func.ref.ptr.func->closure;
220 naRef closure = naNewClosure(ctx, f->locals, next);
221 naRef result = naNewFunc(ctx, code);
222 result.ref.ptr.func->closure = closure;
226 static int getClosure(struct naClosure* c, naRef sym, naRef* result)
229 if(naHash_get(c->namespace, sym, result)) return 1;
230 c = c->next.ref.ptr.closure;
235 // Get a local symbol, or check the closure list if it isn't there
236 static naRef getLocal(struct Context* ctx, struct Frame* f, naRef sym)
239 if(!naHash_get(f->locals, sym, &result)) {
240 naRef c = f->func.ref.ptr.func->closure;
241 if(!getClosure(c.ref.ptr.closure, sym, &result))
242 ERR(ctx, "undefined symbol");
247 static int setClosure(naRef closure, naRef sym, naRef val)
249 struct naClosure* c = closure.ref.ptr.closure;
250 if(c == 0) { return 0; }
251 else if(naHash_tryset(c->namespace, sym, val)) { return 1; }
252 else { return setClosure(c->next, sym, val); }
255 static naRef setLocal(struct Frame* f, naRef sym, naRef val)
257 // Try the locals first, if not already there try the closures in
258 // order. Finally put it in the locals if nothing matched.
259 if(!naHash_tryset(f->locals, sym, val))
260 if(!setClosure(f->func.ref.ptr.func->closure, sym, val))
261 naHash_set(f->locals, sym, val);
265 // Recursively descend into the parents lists
266 static int getMember(struct Context* ctx, naRef obj, naRef fld, naRef* result)
269 if(!IS_HASH(obj)) ERR(ctx, "non-objects have no members");
270 if(naHash_get(obj, fld, result)) {
272 } else if(naHash_get(obj, ctx->parentsRef, &p)) {
274 if(!IS_VEC(p)) ERR(ctx, "parents field not vector");
275 for(i=0; i<p.ref.ptr.vec->size; i++)
276 if(getMember(ctx, p.ref.ptr.vec->array[i], fld, result))
282 static void PUSH(struct Context* ctx, naRef r)
284 if(ctx->opTop >= MAX_STACK_DEPTH) ERR(ctx, "stack overflow");
285 ctx->opStack[ctx->opTop++] = r;
288 static naRef POP(struct Context* ctx)
290 if(ctx->opTop == 0) ERR(ctx, "BUG: stack underflow");
291 return ctx->opStack[--ctx->opTop];
294 static naRef TOP(struct Context* ctx)
296 if(ctx->opTop == 0) ERR(ctx, "BUG: stack underflow");
297 return ctx->opStack[ctx->opTop-1];
300 static int ARG16(unsigned char* byteCode, struct Frame* f)
302 int arg = byteCode[f->ip]<<8 | byteCode[f->ip+1];
307 // OP_EACH works like a vector get, except that it leaves the vector
308 // and index on the stack, increments the index after use, and pops
309 // the arguments and pushes a nil if the index is beyond the end.
310 static void evalEach(struct Context* ctx)
312 int idx = (int)(ctx->opStack[ctx->opTop-1].num);
313 naRef vec = ctx->opStack[ctx->opTop-2];
314 if(idx >= vec.ref.ptr.vec->size) {
315 ctx->opTop -= 2; // pop two values
319 ctx->opStack[ctx->opTop-1].num = idx+1; // modify in place
320 PUSH(ctx, naVec_get(vec, idx));
323 static void run1(struct Context* ctx, struct Frame* f, naRef code)
326 struct naCode* cd = code.ref.ptr.code;
329 if(f->ip >= cd->nBytes) {
330 DBG(printf("Done with frame %d\n", ctx->fTop-1);)
337 op = cd->byteCode[f->ip++];
338 DBG(printf("Stack Depth: %d\n", ctx->opTop));
339 DBG(printOpDEBUG(f->ip-1, op));
345 PUSH(ctx, ctx->opStack[ctx->opTop-1]);
348 a = POP(ctx); b = POP(ctx);
349 PUSH(ctx, a); PUSH(ctx, b);
351 case OP_PLUS: case OP_MUL: case OP_DIV: case OP_MINUS:
352 case OP_LT: case OP_LTE: case OP_GT: case OP_GTE:
353 a = POP(ctx); b = POP(ctx);
354 PUSH(ctx, evalBinaryNumeric(ctx, op, b, a));
356 case OP_EQ: case OP_NEQ:
357 a = POP(ctx); b = POP(ctx);
358 PUSH(ctx, evalEquality(op, b, a));
360 case OP_AND: case OP_OR:
361 a = POP(ctx); b = POP(ctx);
362 PUSH(ctx, evalAndOr(ctx, op, a, b));
365 // stringify can call the GC, so don't take stuff of the stack!
366 if(ctx->opTop <= 1) ERR(ctx, "BUG: stack underflow");
367 a = stringify(ctx, ctx->opStack[ctx->opTop-1]);
368 b = stringify(ctx, ctx->opStack[ctx->opTop-2]);
369 c = naStr_concat(naNewString(ctx), b, a);
375 PUSH(ctx, naNum(-numify(ctx, a)));
379 PUSH(ctx, naNum(boolify(ctx, a) ? 0 : 1));
382 a = cd->constants[ARG16(cd->byteCode, f)];
383 if(IS_CODE(a)) a = bindFunction(ctx, f, a);
396 PUSH(ctx, naNewVector(ctx));
399 b = POP(ctx); a = TOP(ctx);
403 PUSH(ctx, naNewHash(ctx));
406 c = POP(ctx); b = POP(ctx); a = TOP(ctx); // a,b,c: hash, key, val
410 a = getLocal(ctx, f, POP(ctx));
414 a = POP(ctx); b = POP(ctx);
415 PUSH(ctx, setLocal(f, b, a));
418 a = POP(ctx); b = POP(ctx);
419 if(!getMember(ctx, b, a, &c))
420 ERR(ctx, "no such member");
424 c = POP(ctx); b = POP(ctx); a = POP(ctx); // a,b,c: hash, key, val
425 if(!IS_HASH(a)) ERR(ctx, "non-objects have no members");
430 c = POP(ctx); b = POP(ctx); a = POP(ctx); // a,b,c: box, key, val
431 containerSet(ctx, a, b, c);
435 b = POP(ctx); a = POP(ctx); // a,b: box, key
436 PUSH(ctx, containerGet(ctx, a, b));
439 f->ip = ARG16(cd->byteCode, f);
440 DBG(printf(" [Jump to: %d]\n", f->ip);)
443 arg = ARG16(cd->byteCode, f);
446 POP(ctx); // Pops **ONLY** if it's nil!
448 DBG(printf(" [Jump to: %d]\n", f->ip);)
452 arg = ARG16(cd->byteCode, f);
453 if(!boolify(ctx, POP(ctx))) {
455 DBG(printf(" [Jump to: %d]\n", f->ip);)
459 b = POP(ctx); a = POP(ctx); // a,b = func, args
460 setupFuncall(ctx, a, b);
463 c = POP(ctx); b = POP(ctx); a = POP(ctx); // a,b,c = obj, func, args
464 naVec_append(ctx->temps, a);
465 setupFuncall(ctx, b, c);
466 naHash_set(ctx->fStack[ctx->fTop-1].locals, ctx->meRef, a);
470 ctx->opTop = f->bp; // restore the correct stack frame!
472 ctx->fStack[ctx->fTop].args.ref.ptr.vec->size = 0;
476 f->line = ARG16(cd->byteCode, f);
481 case OP_MARK: // save stack state (e.g. "setjmp")
482 ctx->markStack[ctx->markTop++] = ctx->opTop;
484 case OP_UNMARK: // pop stack state set by mark
487 case OP_BREAK: // restore stack state (FOLLOW WITH JMP!)
488 ctx->opTop = ctx->markStack[--ctx->markTop];
491 ERR(ctx, "BUG: bad opcode");
498 static void nativeCall(struct Context* ctx, struct Frame* f, naRef ccode)
500 naCFunction fptr = ccode.ref.ptr.ccode->fptr;
501 naRef result = (*fptr)(ctx, f->args);
503 ctx->fStack[ctx->fTop].args.ref.ptr.vec->size = 0;
507 void naSave(struct Context* ctx, naRef obj)
512 int naStackDepth(struct Context* ctx)
517 int naGetLine(struct Context* ctx, int frame)
519 return ctx->fStack[ctx->fTop-1-frame].line;
522 naRef naGetSourceFile(struct Context* ctx, int frame)
524 naRef f = ctx->fStack[ctx->fTop-1-frame].func;
525 f = f.ref.ptr.func->code;
526 return f.ref.ptr.code->srcFile;
529 char* naGetError(struct Context* ctx)
534 static naRef run(naContext ctx)
536 // Return early if an error occurred. It will be visible to the
537 // caller via naGetError().
539 if(setjmp(ctx->jumpHandle))
544 struct Frame* f = &(ctx->fStack[ctx->fTop-1]);
545 naRef code = f->func.ref.ptr.func->code;
546 if(IS_CCODE(code)) nativeCall(ctx, f, code);
547 else run1(ctx, f, code);
549 ctx->temps.ref.ptr.vec->size = 0; // Reset the temporaries
550 DBG(printStackDEBUG(ctx);)
553 DBG(printStackDEBUG(ctx);)
554 return ctx->opStack[--ctx->opTop];
557 naRef naBindFunction(naContext ctx, naRef code, naRef closure)
559 naRef func = naNewFunc(ctx, code);
560 func.ref.ptr.func->closure = naNewClosure(ctx, closure, naNil());
564 naRef naCall(naContext ctx, naRef func, naRef args, naRef obj, naRef locals)
566 // We might have to allocate objects, which can call the GC. But
567 // the call isn't on the Nasal stack yet, so the GC won't find our
568 // C-space arguments.
569 naVec_append(ctx->temps, func);
570 naVec_append(ctx->temps, args);
571 naVec_append(ctx->temps, obj);
572 naVec_append(ctx->temps, locals);
575 args = naNewVector(ctx);
577 locals = naNewHash(ctx);
579 // Generate a noop closure for bare code objects
581 func = naNewFunc(ctx, code);
582 func.ref.ptr.func->closure = naNewClosure(ctx, locals, naNil());
585 naHash_set(locals, ctx->meRef, obj);
587 ctx->fTop = ctx->opTop = ctx->markTop = 0;
588 setupFuncall(ctx, func, args);
589 ctx->fStack[ctx->fTop-1].locals = locals;