////////////////////////////////////////////////////////////////////////
// Debugging stuff. ////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////
+//#define DEBUG_NASAL
#if !defined(DEBUG_NASAL)
# define DBG(expr) /* noop */
#else
#endif
char* opStringDEBUG(int op);
void printOpDEBUG(int ip, int op);
-void printRefDEBUG(naRef r);
void printStackDEBUG(struct Context* ctx);
////////////////////////////////////////////////////////////////////////
-// FIXME: need to store a list of all contexts
-struct Context globalContext;
+struct Globals* globals = 0;
+
+static naRef bindFunction(struct Context* ctx, struct Frame* f, naRef code);
#define ERR(c, msg) naRuntimeError((c),(msg))
void naRuntimeError(struct Context* c, char* msg)
longjmp(c->jumpHandle, 1);
}
-int boolify(struct Context* ctx, naRef r)
+static int boolify(struct Context* ctx, naRef r)
{
- if(IS_NIL(r)) return 0;
if(IS_NUM(r)) return r.num != 0;
- if(IS_STR(r)) return 1;
+ if(IS_NIL(r)) return 0;
+ if(IS_STR(r)) {
+ double d;
+ if(naStr_len(r) == 0) return 0;
+ if(naStr_tonum(r, &d)) return d != 0;
+ else return 1;
+ }
ERR(ctx, "non-scalar used in boolean context");
return 0;
}
static int checkVec(struct Context* ctx, naRef vec, naRef idx)
{
int i = (int)numify(ctx, idx);
- if(i < 0 || i >= vec.ref.ptr.vec->size)
- ERR(ctx, "vector index out of bounds");
+ if(i < 0) i += naVec_size(vec);
+ if(i < 0 || i >= naVec_size(vec)) ERR(ctx, "vector index out of bounds");
+ return i;
+}
+
+static int checkStr(struct Context* ctx, naRef str, naRef idx)
+{
+ int i = (int)numify(ctx, idx);
+ if(i < 0) i += naStr_len(str);
+ if(i < 0 || i >= naStr_len(str)) ERR(ctx, "string index out of bounds");
return i;
}
ERR(ctx, "undefined value in container");
} else if(IS_VEC(box)) {
result = naVec_get(box, checkVec(ctx, box, key));
+ } else if(IS_STR(box)) {
+ result = naNum((unsigned char)naStr_data(box)[checkStr(ctx, box, key)]);
} else {
ERR(ctx, "extract from non-container");
}
if(!IS_SCALAR(key)) ERR(ctx, "container index not scalar");
else if(IS_HASH(box)) naHash_set(box, key, val);
else if(IS_VEC(box)) naVec_set(box, checkVec(ctx, box, key), val);
- else ERR(ctx, "insert into non-container");
+ else if(IS_STR(box)) {
+ if(box.ref.ptr.str->hashcode)
+ ERR(ctx, "cannot change immutable string");
+ naStr_data(box)[checkStr(ctx, box, key)] = (char)numify(ctx, val);
+ } else ERR(ctx, "insert into non-container");
+}
+
+static void initTemps(struct Context* c)
+{
+ c->tempsz = 4;
+ c->temps = naAlloc(c->tempsz * sizeof(struct naObj*));
+ c->ntemps = 0;
}
static void initContext(struct Context* c)
{
int i;
+ c->fTop = c->opTop = c->markTop = 0;
for(i=0; i<NUM_NASAL_TYPES; i++)
- naGC_init(&(c->pools[i]), i);
+ c->nfree[i] = 0;
- c->fTop = c->opTop = c->markTop = 0;
+ if(c->tempsz > 32) {
+ naFree(c->temps);
+ initTemps(c);
+ }
- naBZero(c->fStack, MAX_RECURSION * sizeof(struct Frame));
- naBZero(c->opStack, MAX_STACK_DEPTH * sizeof(naRef));
+ c->callParent = 0;
+ c->callChild = 0;
+ c->dieArg = naNil();
+ c->error = 0;
+}
- // Make sure the args vectors (which are static with the context)
- // are initialized to nil.
- for(i=0; i<MAX_RECURSION; i++)
- c->fStack[i].args = naNil();
+static void initGlobals()
+{
+ int i;
+ struct Context* c;
+ globals = (struct Globals*)naAlloc(sizeof(struct Globals));
+ naBZero(globals, sizeof(struct Globals));
- c->argPool = naNewVector(c);
+ globals->sem = naNewSem();
+ globals->lock = naNewLock();
- // Note we can't use naNewVector() for this; it requires that
- // temps exist first.
- c->temps = naObj(T_VEC, naGC_get(&(c->pools[T_VEC])));
+ globals->allocCount = 256; // reasonable starting value
+ for(i=0; i<NUM_NASAL_TYPES; i++)
+ naGC_init(&(globals->pools[i]), i);
+ globals->deadsz = 256;
+ globals->ndead = 0;
+ globals->deadBlocks = naAlloc(sizeof(void*) * globals->deadsz);
+
+ // Initialize a single context
+ globals->freeContexts = 0;
+ globals->allContexts = 0;
+ c = naNewContext();
- c->save = naNil();
+ globals->symbols = naNewHash(c);
+ globals->save = naNewVector(c);
// Cache pre-calculated "me", "arg" and "parents" scalars
- c->meRef = naStr_fromdata(naNewString(c), "me", 2);
- c->argRef = naStr_fromdata(naNewString(c), "arg", 3);
- c->parentsRef = naStr_fromdata(naNewString(c), "parents", 7);
+ globals->meRef = naInternSymbol(naStr_fromdata(naNewString(c), "me", 2));
+ globals->argRef = naInternSymbol(naStr_fromdata(naNewString(c), "arg", 3));
+ globals->parentsRef = naInternSymbol(naStr_fromdata(naNewString(c), "parents", 7));
+
+ naFreeContext(c);
}
struct Context* naNewContext()
{
- // FIXME: need more than one!
- struct Context* c = &globalContext;
- initContext(c);
+ struct Context* c;
+ if(globals == 0)
+ initGlobals();
+
+ LOCK();
+ c = globals->freeContexts;
+ if(c) {
+ globals->freeContexts = c->nextFree;
+ c->nextFree = 0;
+ UNLOCK();
+ initContext(c);
+ } else {
+ UNLOCK();
+ c = (struct Context*)naAlloc(sizeof(struct Context));
+ initTemps(c);
+ initContext(c);
+ LOCK();
+ c->nextAll = globals->allContexts;
+ c->nextFree = 0;
+ globals->allContexts = c;
+ UNLOCK();
+ }
return c;
}
-void naGarbageCollect()
+void naFreeContext(struct Context* c)
{
- int i;
- struct Context* c = &globalContext; // FIXME: more than one!
-
- for(i=0; i < c->fTop; i++) {
- naGC_mark(c->fStack[i].func);
- naGC_mark(c->fStack[i].locals);
- }
- for(i=0; i < MAX_RECURSION; i++)
- naGC_mark(c->fStack[i].args); // collect *all* the argument lists
- for(i=0; i < c->opTop; i++)
- naGC_mark(c->opStack[i]);
-
- naGC_mark(c->argPool);
- naGC_mark(c->temps);
- naGC_mark(c->save);
+ c->ntemps = 0;
+ LOCK();
+ c->nextFree = globals->freeContexts;
+ globals->freeContexts = c;
+ UNLOCK();
+}
- naGC_mark(c->meRef);
- naGC_mark(c->argRef);
- naGC_mark(c->parentsRef);
+#define PUSH(r) do { \
+ if(ctx->opTop >= MAX_STACK_DEPTH) ERR(ctx, "stack overflow"); \
+ ctx->opStack[ctx->opTop++] = r; \
+ } while(0)
- // Finally collect all the freed objects
- for(i=0; i<NUM_NASAL_TYPES; i++)
- naGC_reap(&(c->pools[i]));
+static void setupArgs(naContext ctx, struct Frame* f, naRef* args, int nargs)
+{
+ int i;
+ struct naCode* c = f->func.ref.ptr.func->code.ref.ptr.code;
+
+ // Set the argument symbols, and put any remaining args in a vector
+ if(nargs < c->nArgs) ERR(ctx, "not enough arguments to function call");
+ for(i=0; i<c->nArgs; i++)
+ naHash_newsym(f->locals.ref.ptr.hash,
+ &c->constants[c->argSyms[i]], &args[i]);
+ args += c->nArgs;
+ nargs -= c->nArgs;
+ for(i=0; i<c->nOptArgs; i++, nargs--) {
+ naRef val = nargs > 0 ? args[i] : c->constants[c->optArgVals[i]];
+ if(IS_CODE(val))
+ val = bindFunction(ctx, &ctx->fStack[ctx->fTop-2], val);
+ naHash_newsym(f->locals.ref.ptr.hash, &c->constants[c->optArgSyms[i]],
+ &val);
+ }
+ args += c->nOptArgs;
+ if(c->needArgVector || nargs > 0) {
+ naRef argsv = naNewVector(ctx);
+ naVec_setsize(argsv, nargs > 0 ? nargs : 0);
+ for(i=0; i<nargs; i++)
+ argsv.ref.ptr.vec->rec->array[i] = *args++;
+ naHash_newsym(f->locals.ref.ptr.hash, &c->restArgSym, &argsv);
+ }
}
-void setupFuncall(struct Context* ctx, naRef func, naRef args)
+struct Frame* setupFuncall(struct Context* ctx, int nargs, int mcall, int tail)
{
+ naRef *frame;
struct Frame* f;
+
+ DBG(printf("setupFuncall(nargs:%d, mcall:%d)\n", nargs, mcall);)
+
+ frame = &ctx->opStack[ctx->opTop - nargs - 1];
+ if(!IS_FUNC(frame[0]))
+ ERR(ctx, "function/method call invoked on uncallable object");
+
+ // Just do native calls right here, and don't touch the stack
+ // frames; return the current one (unless it's a tail call!).
+ if(frame[0].ref.ptr.func->code.ref.ptr.obj->type == T_CCODE) {
+ naRef obj = mcall ? frame[-1] : naNil();
+ naCFunction fp = frame[0].ref.ptr.func->code.ref.ptr.ccode->fptr;
+ naRef result = (*fp)(ctx, obj, nargs, frame + 1);
+ ctx->opTop -= nargs + 1 + mcall;
+ PUSH(result);
+ return &(ctx->fStack[ctx->fTop-1]);
+ }
+
+ if(tail) ctx->fTop--;
+ else if(ctx->fTop >= MAX_RECURSION) ERR(ctx, "call stack overflow");
+
+ // Note: assign nil first, otherwise the naNew() can cause a GC,
+ // which will now (after fTop++) see the *old* reference as a
+ // markable value!
f = &(ctx->fStack[ctx->fTop++]);
- f->func = func;
+ f->locals = f->func = naNil();
+ f->locals = naNewHash(ctx);
+ f->func = frame[0];
f->ip = 0;
- f->bp = ctx->opTop;
- f->line = 0;
+ f->bp = ctx->opTop - (nargs + 1 + mcall);
- DBG(printf("Entering frame %d\n", ctx->fTop-1);)
+ if(mcall)
+ naHash_set(f->locals, globals->meRef, frame[-1]);
- if(!IS_REF(func))
- ERR(ctx, "function/method call invoked on uncallable object");
+ setupArgs(ctx, f, frame+1, nargs);
- f->args = args;
- if(IS_CCODE(func.ref.ptr.func->code)) {
- f->locals = naNil();
- } else if(IS_CODE(func.ref.ptr.func->code)) {
- f->locals = naNewHash(ctx);
- naHash_set(f->locals, ctx->argRef, args);
- } else {
- ERR(ctx, "function/method call invoked on uncallable object");
- }
+ ctx->opTop = f->bp; // Pop the stack last, to avoid GC lossage
+ DBG(printf("Entering frame %d with %d args\n", ctx->fTop-1, nargs);)
+ return f;
}
static naRef evalAndOr(struct Context* ctx, int op, naRef ra, naRef rb)
return naNum((op==OP_EQ) ? result : !result);
}
-static naRef evalBinaryNumeric(struct Context* ctx, int op, naRef ra, naRef rb)
-{
- double a = numify(ctx, ra), b = numify(ctx, rb);
- switch(op) {
- case OP_PLUS: return naNum(a + b);
- case OP_MINUS: return naNum(a - b);
- case OP_MUL: return naNum(a * b);
- case OP_DIV: return naNum(a / b);
- case OP_LT: return naNum(a < b ? 1 : 0);
- case OP_LTE: return naNum(a <= b ? 1 : 0);
- case OP_GT: return naNum(a > b ? 1 : 0);
- case OP_GTE: return naNum(a >= b ? 1 : 0);
- }
- return naNil();
-}
-
// When a code object comes out of the constant pool and shows up on
// the stack, it needs to be bound with the lexical context.
static naRef bindFunction(struct Context* ctx, struct Frame* f, naRef code)
{
- naRef next = f->func.ref.ptr.func->closure;
- naRef closure = naNewClosure(ctx, f->locals, next);
naRef result = naNewFunc(ctx, code);
- result.ref.ptr.func->closure = closure;
+ result.ref.ptr.func->namespace = f->locals;
+ result.ref.ptr.func->next = f->func;
return result;
}
-static int getClosure(struct naClosure* c, naRef sym, naRef* result)
+static int getClosure(struct naFunc* c, naRef sym, naRef* result)
{
while(c) {
if(naHash_get(c->namespace, sym, result)) return 1;
- c = c->next.ref.ptr.closure;
+ c = c->next.ref.ptr.func;
}
return 0;
}
-// Get a local symbol, or check the closure list if it isn't there
-static naRef getLocal(struct Context* ctx, struct Frame* f, naRef sym)
+static naRef getLocal2(struct Context* ctx, struct Frame* f, naRef sym)
{
naRef result;
- if(!naHash_get(f->locals, sym, &result)) {
- naRef c = f->func.ref.ptr.func->closure;
- if(!getClosure(c.ref.ptr.closure, sym, &result))
+ if(!naHash_get(f->locals, sym, &result))
+ if(!getClosure(f->func.ref.ptr.func, sym, &result))
ERR(ctx, "undefined symbol");
- }
return result;
}
-static int setClosure(naRef closure, naRef sym, naRef val)
+static void getLocal(struct Context* ctx, struct Frame* f,
+ naRef* sym, naRef* out)
{
- struct naClosure* c = closure.ref.ptr.closure;
+ struct naFunc* func;
+ struct naStr* str = sym->ref.ptr.str;
+ if(naHash_sym(f->locals.ref.ptr.hash, str, out))
+ return;
+ func = f->func.ref.ptr.func;
+ while(func && func->namespace.ref.ptr.hash) {
+ if(naHash_sym(func->namespace.ref.ptr.hash, str, out))
+ return;
+ func = func->next.ref.ptr.func;
+ }
+ // Now do it again using the more general naHash_get(). This will
+ // only be necessary if something has created the value in the
+ // namespace using the more generic hash syntax
+ // (e.g. namespace["symbol"] and not namespace.symbol).
+ *out = getLocal2(ctx, f, *sym);
+}
+
+static int setClosure(naRef func, naRef sym, naRef val)
+{
+ struct naFunc* c = func.ref.ptr.func;
if(c == 0) { return 0; }
else if(naHash_tryset(c->namespace, sym, val)) { return 1; }
else { return setClosure(c->next, sym, val); }
}
-static naRef setLocal(struct Frame* f, naRef sym, naRef val)
+static naRef setSymbol(struct Frame* f, naRef sym, naRef val)
{
// Try the locals first, if not already there try the closures in
// order. Finally put it in the locals if nothing matched.
if(!naHash_tryset(f->locals, sym, val))
- if(!setClosure(f->func.ref.ptr.func->closure, sym, val))
+ if(!setClosure(f->func, sym, val))
naHash_set(f->locals, sym, val);
return val;
}
// Recursively descend into the parents lists
-static int getMember(struct Context* ctx, naRef obj, naRef fld, naRef* result)
+static int getMember(struct Context* ctx, naRef obj, naRef fld,
+ naRef* result, int count)
{
naRef p;
+ if(--count < 0) ERR(ctx, "too many parents");
if(!IS_HASH(obj)) ERR(ctx, "non-objects have no members");
if(naHash_get(obj, fld, result)) {
return 1;
- } else if(naHash_get(obj, ctx->parentsRef, &p)) {
- int i;
- if(!IS_VEC(p)) ERR(ctx, "parents field not vector");
- for(i=0; i<p.ref.ptr.vec->size; i++)
- if(getMember(ctx, p.ref.ptr.vec->array[i], fld, result))
- return 1;
+ } else if(naHash_get(obj, globals->parentsRef, &p)) {
+ if(IS_VEC(p)) {
+ int i;
+ struct VecRec* v = p.ref.ptr.vec->rec;
+ for(i=0; i<v->size; i++)
+ if(getMember(ctx, v->array[i], fld, result, count))
+ return 1;
+ } else
+ ERR(ctx, "parents field not vector");
}
return 0;
}
-static void PUSH(struct Context* ctx, naRef r)
-{
- if(ctx->opTop >= MAX_STACK_DEPTH) ERR(ctx, "stack overflow");
- ctx->opStack[ctx->opTop++] = r;
-}
-
-static naRef POP(struct Context* ctx)
-{
- if(ctx->opTop == 0) ERR(ctx, "BUG: stack underflow");
- return ctx->opStack[--ctx->opTop];
-}
-
-static naRef TOP(struct Context* ctx)
-{
- if(ctx->opTop == 0) ERR(ctx, "BUG: stack underflow");
- return ctx->opStack[ctx->opTop-1];
-}
-
-static int ARG16(unsigned char* byteCode, struct Frame* f)
-{
- int arg = byteCode[f->ip]<<8 | byteCode[f->ip+1];
- f->ip += 2;
- return arg;
-}
-
// OP_EACH works like a vector get, except that it leaves the vector
// and index on the stack, increments the index after use, and pops
// the arguments and pushes a nil if the index is beyond the end.
-static void evalEach(struct Context* ctx)
+static void evalEach(struct Context* ctx, int useIndex)
{
int idx = (int)(ctx->opStack[ctx->opTop-1].num);
naRef vec = ctx->opStack[ctx->opTop-2];
- if(idx >= vec.ref.ptr.vec->size) {
+ if(!IS_VEC(vec)) naRuntimeError(ctx, "foreach enumeration of non-vector");
+ if(!vec.ref.ptr.vec->rec || idx >= vec.ref.ptr.vec->rec->size) {
ctx->opTop -= 2; // pop two values
- PUSH(ctx, naNil());
+ PUSH(naNil());
return;
}
ctx->opStack[ctx->opTop-1].num = idx+1; // modify in place
- PUSH(ctx, naVec_get(vec, idx));
+ PUSH(useIndex ? naNum(idx) : naVec_get(vec, idx));
}
-static void run1(struct Context* ctx, struct Frame* f, naRef code)
+#define ARG() cd->byteCode[f->ip++]
+#define CONSTARG() cd->constants[ARG()]
+#define POP() ctx->opStack[--ctx->opTop]
+#define STK(n) (ctx->opStack[ctx->opTop-(n)])
+#define FIXFRAME() f = &(ctx->fStack[ctx->fTop-1]); \
+ cd = f->func.ref.ptr.func->code.ref.ptr.code;
+static naRef run(struct Context* ctx)
{
- naRef a, b, c;
- struct naCode* cd = code.ref.ptr.code;
+ struct Frame* f;
+ struct naCode* cd;
int op, arg;
+ naRef a, b, c;
- if(f->ip >= cd->nBytes) {
- DBG(printf("Done with frame %d\n", ctx->fTop-1);)
- ctx->fTop--;
- if(ctx->fTop <= 0)
- ctx->done = 1;
- return;
- }
-
- op = cd->byteCode[f->ip++];
- DBG(printf("Stack Depth: %d\n", ctx->opTop));
- DBG(printOpDEBUG(f->ip-1, op));
- switch(op) {
- case OP_POP:
- POP(ctx);
- break;
- case OP_DUP:
- PUSH(ctx, ctx->opStack[ctx->opTop-1]);
- break;
- case OP_XCHG:
- a = POP(ctx); b = POP(ctx);
- PUSH(ctx, a); PUSH(ctx, b);
- break;
- case OP_PLUS: case OP_MUL: case OP_DIV: case OP_MINUS:
- case OP_LT: case OP_LTE: case OP_GT: case OP_GTE:
- a = POP(ctx); b = POP(ctx);
- PUSH(ctx, evalBinaryNumeric(ctx, op, b, a));
- break;
- case OP_EQ: case OP_NEQ:
- a = POP(ctx); b = POP(ctx);
- PUSH(ctx, evalEquality(op, b, a));
- break;
- case OP_AND: case OP_OR:
- a = POP(ctx); b = POP(ctx);
- PUSH(ctx, evalAndOr(ctx, op, a, b));
- break;
- case OP_CAT:
- a = stringify(ctx, POP(ctx)); b = stringify(ctx, POP(ctx));
- c = naStr_concat(naNewString(ctx), b, a);
- PUSH(ctx, c);
- break;
- case OP_NEG:
- a = POP(ctx);
- PUSH(ctx, naNum(-numify(ctx, a)));
- break;
- case OP_NOT:
- a = POP(ctx);
- PUSH(ctx, naNum(boolify(ctx, a) ? 0 : 1));
- break;
- case OP_PUSHCONST:
- a = cd->constants[ARG16(cd->byteCode, f)];
- if(IS_CODE(a)) a = bindFunction(ctx, f, a);
- PUSH(ctx, a);
- break;
- case OP_PUSHONE:
- PUSH(ctx, naNum(1));
- break;
- case OP_PUSHZERO:
- PUSH(ctx, naNum(0));
- break;
- case OP_PUSHNIL:
- PUSH(ctx, naNil());
- break;
- case OP_NEWVEC:
- PUSH(ctx, naNewVector(ctx));
- break;
- case OP_VAPPEND:
- b = POP(ctx); a = TOP(ctx);
- naVec_append(a, b);
- break;
- case OP_NEWHASH:
- PUSH(ctx, naNewHash(ctx));
- break;
- case OP_HAPPEND:
- c = POP(ctx); b = POP(ctx); a = TOP(ctx); // a,b,c: hash, key, val
- naHash_set(a, b, c);
- break;
- case OP_LOCAL:
- a = getLocal(ctx, f, POP(ctx));
- PUSH(ctx, a);
- break;
- case OP_SETLOCAL:
- a = POP(ctx); b = POP(ctx);
- PUSH(ctx, setLocal(f, b, a));
- break;
- case OP_MEMBER:
- a = POP(ctx); b = POP(ctx);
- if(!getMember(ctx, b, a, &c))
- ERR(ctx, "no such member");
- PUSH(ctx, c);
- break;
- case OP_SETMEMBER:
- c = POP(ctx); b = POP(ctx); a = POP(ctx); // a,b,c: hash, key, val
- if(!IS_HASH(a)) ERR(ctx, "non-objects have no members");
- naHash_set(a, b, c);
- PUSH(ctx, c);
- break;
- case OP_INSERT:
- c = POP(ctx); b = POP(ctx); a = POP(ctx); // a,b,c: box, key, val
- containerSet(ctx, a, b, c);
- PUSH(ctx, c);
- break;
- case OP_EXTRACT:
- b = POP(ctx); a = POP(ctx); // a,b: box, key
- PUSH(ctx, containerGet(ctx, a, b));
- break;
- case OP_JMP:
- f->ip = ARG16(cd->byteCode, f);
- DBG(printf(" [Jump to: %d]\n", f->ip);)
- break;
- case OP_JIFNIL:
- arg = ARG16(cd->byteCode, f);
- a = TOP(ctx);
- if(IS_NIL(a)) {
- POP(ctx); // Pops **ONLY** if it's nil!
- f->ip = arg;
+ FIXFRAME();
+
+ while(1) {
+ op = cd->byteCode[f->ip++];
+ DBG(printf("Stack Depth: %d\n", ctx->opTop));
+ DBG(printOpDEBUG(f->ip-1, op));
+ switch(op) {
+ case OP_POP:
+ ctx->opTop--;
+ break;
+ case OP_DUP:
+ PUSH(ctx->opStack[ctx->opTop-1]);
+ break;
+ case OP_DUP2:
+ PUSH(ctx->opStack[ctx->opTop-2]);
+ PUSH(ctx->opStack[ctx->opTop-2]);
+ break;
+ case OP_XCHG:
+ a = STK(1); STK(1) = STK(2); STK(2) = a;
+ break;
+
+#define BINOP(expr) do { \
+ double l = IS_NUM(STK(2)) ? STK(2).num : numify(ctx, STK(2)); \
+ double r = IS_NUM(STK(1)) ? STK(1).num : numify(ctx, STK(1)); \
+ STK(2).ref.reftag = ~NASAL_REFTAG; \
+ STK(2).num = expr; \
+ ctx->opTop--; } while(0)
+
+ case OP_PLUS: BINOP(l + r); break;
+ case OP_MINUS: BINOP(l - r); break;
+ case OP_MUL: BINOP(l * r); break;
+ case OP_DIV: BINOP(l / r); break;
+ case OP_LT: BINOP(l < r ? 1 : 0); break;
+ case OP_LTE: BINOP(l <= r ? 1 : 0); break;
+ case OP_GT: BINOP(l > r ? 1 : 0); break;
+ case OP_GTE: BINOP(l >= r ? 1 : 0); break;
+
+#undef BINOP
+
+ case OP_EQ: case OP_NEQ:
+ STK(2) = evalEquality(op, STK(2), STK(1));
+ ctx->opTop--;
+ break;
+ case OP_AND: case OP_OR:
+ STK(2) = evalAndOr(ctx, op, STK(2), STK(1));
+ ctx->opTop--;
+ break;
+ case OP_CAT:
+ // stringify can call the GC, so don't take stuff of the stack!
+ a = stringify(ctx, ctx->opStack[ctx->opTop-1]);
+ b = stringify(ctx, ctx->opStack[ctx->opTop-2]);
+ c = naStr_concat(naNewString(ctx), b, a);
+ ctx->opTop -= 2;
+ PUSH(c);
+ break;
+ case OP_NEG:
+ STK(1) = naNum(-numify(ctx, STK(1)));
+ break;
+ case OP_NOT:
+ STK(1) = naNum(boolify(ctx, STK(1)) ? 0 : 1);
+ break;
+ case OP_PUSHCONST:
+ a = CONSTARG();
+ if(IS_CODE(a)) a = bindFunction(ctx, f, a);
+ PUSH(a);
+ break;
+ case OP_PUSHONE:
+ PUSH(naNum(1));
+ break;
+ case OP_PUSHZERO:
+ PUSH(naNum(0));
+ break;
+ case OP_PUSHNIL:
+ PUSH(naNil());
+ break;
+ case OP_NEWVEC:
+ PUSH(naNewVector(ctx));
+ break;
+ case OP_VAPPEND:
+ naVec_append(STK(2), STK(1));
+ ctx->opTop--;
+ break;
+ case OP_NEWHASH:
+ PUSH(naNewHash(ctx));
+ break;
+ case OP_HAPPEND:
+ naHash_set(STK(3), STK(2), STK(1));
+ ctx->opTop -= 2;
+ break;
+ case OP_LOCAL:
+ a = CONSTARG();
+ getLocal(ctx, f, &a, &b);
+ PUSH(b);
+ break;
+ case OP_SETSYM:
+ STK(2) = setSymbol(f, STK(2), STK(1));
+ ctx->opTop--;
+ break;
+ case OP_SETLOCAL:
+ naHash_set(f->locals, STK(2), STK(1));
+ STK(2) = STK(1); // FIXME: reverse order of arguments instead!
+ ctx->opTop--;
+ break;
+ case OP_MEMBER:
+ if(!getMember(ctx, STK(1), CONSTARG(), &STK(1), 64))
+ ERR(ctx, "no such member");
+ break;
+ case OP_SETMEMBER:
+ if(!IS_HASH(STK(3))) ERR(ctx, "non-objects have no members");
+ naHash_set(STK(3), STK(2), STK(1));
+ STK(3) = STK(1); // FIXME: fix arg order instead
+ ctx->opTop -= 2;
+ break;
+ case OP_INSERT:
+ containerSet(ctx, STK(3), STK(2), STK(1));
+ STK(3) = STK(1); // FIXME: codegen order again...
+ ctx->opTop -= 2;
+ break;
+ case OP_EXTRACT:
+ STK(2) = containerGet(ctx, STK(2), STK(1));
+ ctx->opTop--;
+ break;
+ case OP_JMPLOOP:
+ // Identical to JMP, except for locking
+ naCheckBottleneck();
+ f->ip = cd->byteCode[f->ip];
DBG(printf(" [Jump to: %d]\n", f->ip);)
- }
- break;
- case OP_JIFNOT:
- arg = ARG16(cd->byteCode, f);
- if(!boolify(ctx, POP(ctx))) {
- f->ip = arg;
+ break;
+ case OP_JMP:
+ f->ip = cd->byteCode[f->ip];
DBG(printf(" [Jump to: %d]\n", f->ip);)
+ break;
+ case OP_JIFNIL:
+ arg = ARG();
+ if(IS_NIL(STK(1))) {
+ ctx->opTop--; // Pops **ONLY** if it's nil!
+ f->ip = arg;
+ DBG(printf(" [Jump to: %d]\n", f->ip);)
+ }
+ break;
+ case OP_JIFNOT:
+ arg = ARG();
+ if(!boolify(ctx, POP())) {
+ f->ip = arg;
+ DBG(printf(" [Jump to: %d]\n", f->ip);)
+ }
+ break;
+ case OP_FCALL:
+ f = setupFuncall(ctx, ARG(), 0, 0);
+ cd = f->func.ref.ptr.func->code.ref.ptr.code;
+ break;
+ case OP_FTAIL:
+ f = setupFuncall(ctx, ARG(), 0, 1);
+ cd = f->func.ref.ptr.func->code.ref.ptr.code;
+ break;
+ case OP_MCALL:
+ f = setupFuncall(ctx, ARG(), 1, 0);
+ cd = f->func.ref.ptr.func->code.ref.ptr.code;
+ break;
+ case OP_MTAIL:
+ f = setupFuncall(ctx, ARG(), 1, 1);
+ cd = f->func.ref.ptr.func->code.ref.ptr.code;
+ break;
+ case OP_RETURN:
+ a = STK(1);
+ if(--ctx->fTop <= 0) return a;
+ ctx->opTop = f->bp + 1; // restore the correct opstack frame!
+ STK(1) = a;
+ FIXFRAME();
+ break;
+ case OP_EACH:
+ evalEach(ctx, 0);
+ break;
+ case OP_INDEX:
+ evalEach(ctx, 1);
+ break;
+ case OP_MARK: // save stack state (e.g. "setjmp")
+ if(ctx->markTop >= MAX_MARK_DEPTH)
+ naRuntimeError(ctx, "mark stack overflow");
+ ctx->markStack[ctx->markTop++] = ctx->opTop;
+ break;
+ case OP_UNMARK: // pop stack state set by mark
+ ctx->markTop--;
+ break;
+ case OP_BREAK: // restore stack state (FOLLOW WITH JMP!)
+ ctx->opTop = ctx->markStack[--ctx->markTop];
+ break;
+ default:
+ ERR(ctx, "BUG: bad opcode");
}
- break;
- case OP_FCALL:
- b = POP(ctx); a = POP(ctx); // a,b = func, args
- setupFuncall(ctx, a, b);
- break;
- case OP_MCALL:
- c = POP(ctx); b = POP(ctx); a = POP(ctx); // a,b,c = obj, func, args
- setupFuncall(ctx, b, c);
- naHash_set(ctx->fStack[ctx->fTop-1].locals, ctx->meRef, a);
- break;
- case OP_RETURN:
- a = POP(ctx);
- ctx->opTop = f->bp; // restore the correct stack frame!
- ctx->fTop--;
- ctx->fStack[ctx->fTop].args.ref.ptr.vec->size = 0;
- naVec_append(ctx->argPool, ctx->fStack[ctx->fTop].args);
- PUSH(ctx, a);
- break;
- case OP_LINE:
- f->line = ARG16(cd->byteCode, f);
- break;
- case OP_EACH:
- evalEach(ctx);
- break;
- case OP_MARK: // save stack state (e.g. "setjmp")
- ctx->markStack[ctx->markTop++] = ctx->opTop;
- break;
- case OP_UNMARK: // pop stack state set by mark
- ctx->markTop--;
- break;
- case OP_BREAK: // restore stack state (FOLLOW WITH JMP!)
- ctx->opTop = ctx->markStack[--ctx->markTop];
- break;
- case OP_NEWARGS: // push a new function arg vector
- PUSH(ctx, (naVec_size(ctx->argPool) ?
- naVec_removelast(ctx->argPool) : naNewVector(ctx)));
- break;
- default:
- ERR(ctx, "BUG: bad opcode");
+ ctx->ntemps = 0; // reset GC temp vector
+ DBG(printStackDEBUG(ctx);)
}
-
- if(ctx->fTop <= 0)
- ctx->done = 1;
-}
-
-static void nativeCall(struct Context* ctx, struct Frame* f, naRef ccode)
-{
- naCFunction fptr = ccode.ref.ptr.ccode->fptr;
- naRef result = (*fptr)(ctx, f->args);
- ctx->fTop--;
- ctx->fStack[ctx->fTop].args.ref.ptr.vec->size = 0;
- PUSH(ctx, result);
+ return naNil(); // unreachable
}
+#undef POP
+#undef CONSTARG
+#undef STK
+#undef FIXFRAME
void naSave(struct Context* ctx, naRef obj)
{
- ctx->save = obj;
+ naVec_append(globals->save, obj);
}
+// FIXME: handle ctx->callParent
int naStackDepth(struct Context* ctx)
{
return ctx->fTop;
}
+// FIXME: handle ctx->callParent
int naGetLine(struct Context* ctx, int frame)
{
- return ctx->fStack[ctx->fTop-1-frame].line;
+ struct Frame* f = &ctx->fStack[ctx->fTop-1-frame];
+ naRef func = f->func;
+ int ip = f->ip;
+ if(IS_FUNC(func) && IS_CODE(func.ref.ptr.func->code)) {
+ struct naCode* c = func.ref.ptr.func->code.ref.ptr.code;
+ unsigned short* p = c->lineIps + c->nLines - 2;
+ while(p >= c->lineIps && p[0] > ip)
+ p -= 2;
+ return p[1];
+ }
+ return -1;
}
+// FIXME: handle ctx->callParent
naRef naGetSourceFile(struct Context* ctx, int frame)
{
naRef f = ctx->fStack[ctx->fTop-1-frame].func;
char* naGetError(struct Context* ctx)
{
+ if(IS_STR(ctx->dieArg))
+ return (char*)ctx->dieArg.ref.ptr.str->data;
return ctx->error;
}
-static naRef run(naContext ctx)
+naRef naBindFunction(naContext ctx, naRef code, naRef closure)
{
- // Return early if an error occurred. It will be visible to the
- // caller via naGetError().
- if(setjmp(ctx->jumpHandle))
- return naNil();
-
- ctx->done = 0;
- while(!ctx->done) {
- struct Frame* f = &(ctx->fStack[ctx->fTop-1]);
- naRef code = f->func.ref.ptr.func->code;
- if(IS_CCODE(code)) nativeCall(ctx, f, code);
- else run1(ctx, f, code);
-
- ctx->temps.ref.ptr.vec->size = 0; // Reset the temporaries
- DBG(printStackDEBUG(ctx);)
- }
-
- DBG(printStackDEBUG(ctx);)
- return ctx->opStack[--ctx->opTop];
+ naRef func = naNewFunc(ctx, code);
+ func.ref.ptr.func->namespace = closure;
+ func.ref.ptr.func->next = naNil();
+ return func;
}
-naRef naBindFunction(naContext ctx, naRef code, naRef closure)
+naRef naBindToContext(naContext ctx, naRef code)
{
naRef func = naNewFunc(ctx, code);
- func.ref.ptr.func->closure = naNewClosure(ctx, closure, naNil());
+ struct Frame* f = &ctx->fStack[ctx->fTop-1];
+ func.ref.ptr.func->namespace = f->locals;
+ func.ref.ptr.func->next = f->func;
return func;
}
-naRef naCall(naContext ctx, naRef func, naRef args, naRef obj, naRef locals)
+naRef naCall(naContext ctx, naRef func, int argc, naRef* args,
+ naRef obj, naRef locals)
{
+ int i;
+ naRef result;
+ if(!ctx->callParent) naModLock(ctx);
+
// We might have to allocate objects, which can call the GC. But
// the call isn't on the Nasal stack yet, so the GC won't find our
// C-space arguments.
- naVec_append(ctx->temps, func);
- naVec_append(ctx->temps, args);
- naVec_append(ctx->temps, obj);
- naVec_append(ctx->temps, locals);
+ naTempSave(ctx, func);
+ for(i=0; i<argc; i++)
+ naTempSave(ctx, args[i]);
+ naTempSave(ctx, obj);
+ naTempSave(ctx, locals);
+
+ if(IS_CCODE(func.ref.ptr.func->code)) {
+ naCFunction fp = func.ref.ptr.func->code.ref.ptr.ccode->fptr;
+ result = (*fp)(ctx, obj, argc, args);
+ if(!ctx->callParent) naModUnlock(ctx);
+ return result;
+ }
- if(IS_NIL(args))
- args = naNewVector(ctx);
if(IS_NIL(locals))
locals = naNewHash(ctx);
- if(!IS_FUNC(func)) {
- // Generate a noop closure for bare code objects
- naRef code = func;
- func = naNewFunc(ctx, code);
- func.ref.ptr.func->closure = naNewClosure(ctx, locals, naNil());
- }
+ if(!IS_FUNC(func))
+ func = naNewFunc(ctx, func); // bind bare code objects
if(!IS_NIL(obj))
- naHash_set(locals, ctx->meRef, obj);
+ naHash_set(locals, globals->meRef, obj);
+
+ ctx->dieArg = naNil();
+
+ ctx->opTop = ctx->markTop = 0;
+ ctx->fTop = 1;
+ ctx->fStack[0].func = func;
+ ctx->fStack[0].locals = locals;
+ ctx->fStack[0].ip = 0;
+ ctx->fStack[0].bp = ctx->opTop;
- ctx->fTop = ctx->opTop = ctx->markTop = 0;
- setupFuncall(ctx, func, args);
- ctx->fStack[ctx->fTop-1].locals = locals;
+ setupArgs(ctx, ctx->fStack, args, argc);
- return run(ctx);
+ // Return early if an error occurred. It will be visible to the
+ // caller via naGetError().
+ ctx->error = 0;
+ if(setjmp(ctx->jumpHandle)) {
+ if(!ctx->callParent) naModUnlock(ctx);
+ return naNil();
+ }
+
+ result = run(ctx);
+ if(!ctx->callParent) naModUnlock(ctx);
+ return result;
}