___________________________________________________________
/*
* Vector of jump to op_code
handlers
*
*/
static const VM_OpCodeHandlers
VM_OpCodeJmp[] = {
VM_DECLARE_OPCODE_REF( 0,OP_MOVE), /* A
B R(A) := R(B) */
VM_DECLARE_OPCODE_REF(
1,OP_LOADK), /* A Bx R(A) :=
Kst(Bx) */
VM_DECLARE_OPCODE_REF(
2,OP_LOADBOOL), /* A B C R(A) := (Bool)B; if (C)
pc++ */
VM_DECLARE_OPCODE_REF( 3,OP_LOADNIL), /* A
B R(A) := ... := R(B) :=
nil */
VM_DECLARE_OPCODE_REF( 4,OP_GETUPVAL), /*
A B R(A) := UpValue[B] */
VM_DECLARE_OPCODE_REF(
5,OP_GETGLOBAL), /* A Bx R(A) :=
Gbl[Kst(Bx)] */
VM_DECLARE_OPCODE_REF(
6,OP_GETTABLE), /* A B C R(A) :=
R(B)[RK(C)] */
VM_DECLARE_OPCODE_REF(
7,OP_SETGLOBAL), /* A Bx Gbl[Kst(Bx)] :=
R(A) */
VM_DECLARE_OPCODE_REF( 8,OP_SETUPVAL), /*
A B UpValue[B] := R(A) */
VM_DECLARE_OPCODE_REF(
9,OP_SETTABLE), /* A B C R(A)[RK(B)] :=
RK(C) */
VM_DECLARE_OPCODE_REF(10,OP_NEWTABLE), /*
A B C R(A) := {} (size =
B,C) */
VM_DECLARE_OPCODE_REF(11,OP_SELF), /* A B
C R(A+1) := R(B); R(A) :=
R(B)[RK(C)] */
VM_DECLARE_OPCODE_REF(12,OP_ADD), /* A B
C R(A) := RK(B) +
RK(C) */
VM_DECLARE_OPCODE_REF(13,OP_SUB), /* A B
C R(A) := RK(B) -
RK(C) */
VM_DECLARE_OPCODE_REF(14,OP_MUL), /* A B
C R(A) := RK(B) *
RK(C) */
VM_DECLARE_OPCODE_REF(15,OP_DIV), /* A B
C R(A) := RK(B) /
RK(C) */
VM_DECLARE_OPCODE_REF(16,OP_MOD), /* A B
C R(A) := RK(B) %
RK(C) */
VM_DECLARE_OPCODE_REF(17,OP_POW), /* A B
C R(A) := RK(B) ^
RK(C) */
VM_DECLARE_OPCODE_REF(18,OP_UNM), /* A
B R(A) :=
-R(B) */
VM_DECLARE_OPCODE_REF(19,OP_NOT), /*
A B R(A) := not
R(B) */
VM_DECLARE_OPCODE_REF(20,OP_LEN), /*
A B R(A) := length of
R(B) */
VM_DECLARE_OPCODE_REF(21,OP_CONCAT), /* A
B C R(A) := R(B).. ...
..R(C) */
VM_DECLARE_OPCODE_REF(22,OP_JMP), /*
sBx pc+=sBx */
VM_DECLARE_OPCODE_REF(23,OP_EQ), /*
A B C if ((RK(B) == RK(C)) ~= A) then
pc++ */
VM_DECLARE_OPCODE_REF(24,OP_LT), /* A B
C if ((RK(B) < RK(C)) ~= A) then
pc++ */
VM_DECLARE_OPCODE_REF(25,OP_LE), /* A B
C if ((RK(B) <= RK(C)) ~= A) then
pc++ */
VM_DECLARE_OPCODE_REF(26,OP_TEST), /* A
C if not (R(A) <=> C) then
pc++ */
VM_DECLARE_OPCODE_REF(27,OP_TESTSET), /* A B
C if (R(B) <=> C) then R(A) := R(B) else
pc++ */
VM_DECLARE_OPCODE_REF(28,OP_CALL), /* A B
C R(A), ... ,R(A+C-2) := R(A)(R(A+1), ...
,R(A+B-1)) */
VM_DECLARE_OPCODE_REF(29,OP_TAILCALL), /*
A B C return R(A)(R(A+1), ...
,R(A+B-1)) */
VM_DECLARE_OPCODE_REF(30,OP_RETURN), /* A
B return R(A), ... ,R(A+B-2) (see
note) */
VM_DECLARE_OPCODE_REF(31,OP_FORLOOP), /*
A sBx R(A)+=R(A+2);
if
R(A) <?= R(A+1) then { pc+=sBx; R(A+3)=R(A)
} */
VM_DECLARE_OPCODE_REF(32,OP_FORPREP), /* A
sBx R(A)-=R(A+2);
pc+=sBx */
VM_DECLARE_OPCODE_REF(33,OP_TFORLOOP), /*
A C R(A+3), ... ,R(A+2+C) := R(A)(R(A+1),
R(A+2));
if R(A+3) ~= nil
then R(A+2)=R(A+3) else
pc++ */
VM_DECLARE_OPCODE_REF(34,OP_SETLIST), /*
A B C R(A)[(C-1)*FPF+i] := R(A+i), 1 <= i <=
B */
VM_DECLARE_OPCODE_REF(35,OP_CLOSE), /* A
close all variables in the stack up to (>=)
R(A) */
VM_DECLARE_OPCODE_REF(36,OP_CLOSURE), /* A
Bx R(A) := closure(KPROTO[Bx], R(A), ...
,R(A+n)) */
VM_DECLARE_OPCODE_REF(37,OP_VARARG), /* A
B R(A), R(A+1), ..., R(A+B-1) =
vararg */
NULL
};
//__________________________________________________
LUAI_FUNC_S void __fastcall luaV_execute (lua_State *
__Lu_St__, int _nexeccalls)
{
/* VM INIT */
//~ const
Instruction* pc;
//~ StkId base;
//~
LClosure* cl;
//~
TValue* k;
VM_state_mach VM_BUF;
VM_state_mach* const __VM__
= &VM_BUF;
__reg_call_lvl__ = _nexeccalls;
reentry: /* ENTRY POINT */
/* VM
STAT INIT */
lua_assert(
isLua( LUA_LS_2(__Lu_St__, ci)
)
);
__reg_pc__ = LUA_LS_SAVED_PC(__Lu_St__);
//~
pc = LUA_LS_SAVED_PC(__Lu_St__);
__reg_base__ = LUA_LS_SAVED_BASE(__Lu_St__);
//~
base = LUA_LS_SAVED_BASE(__Lu_St__);
__reg_cl__ = &clvalue(
LUA_LS_2(__Lu_St__,
ci)->func
)->l;
__reg_k__ = __reg_cl__->p->k;
//~
k = cl->p->k;
for (;;) /* MAIN LOOP OF INTERPRETER
*/
{
__reg_instruction__ = *__reg_pc__;
++__reg_pc__;
/* VM CONTEXT TEST */
if ( ( LUA_MASKLINE | LUA_MASKCOUNT ) &
LUA_LS_2(__Lu_St__, hookmask) ){
--LUA_LS_2(__Lu_St__,
hookcount);
if ( LUA_MASKLINE & LUA_LS_2(__Lu_St__,
hookmask)
|| 0 == LUA_LS_2(__Lu_St__, hookcount)
){
traceexec(__Lu_St__,
__reg_pc__);
if (LUA_LS_2(__Lu_St__, status) ==
LUA_YIELD) { /* did hook yield?
*/
LUA_LS_SAVED_PC(__Lu_St__) = __reg_pc__ -
1;
return;
};
__reg_base__
=
LUA_LS_SAVED_BASE(__Lu_St__);
};
};
lua_assert(__reg_base__
== LUA_LS_SAVED_BASE(__Lu_St__)
&&
LUA_LS_SAVED_BASE(__Lu_St__) == LUA_LS_2(__Lu_St__,
ci)->__reg_base__);
lua_assert(__reg_base__ <=
LUA_LS_TOP(__Lu_St__)
&& LUA_LS_TOP(__Lu_St__)
<= LUA_LS_2(__Lu_St__, stack) + LUA_LS_2(__Lu_St__,
stacksize));
lua_assert( LUA_LS_TOP(__Lu_St__) ==
LUA_LS_2(__Lu_St__, ci)->top
|| luaG_checkopenop(
__reg_instruction__ ) );
/* VM CALL OPCODE HANDLERS */
if(
(sizeof( VM_OpCodeJmp )/sizeof(VM_OpCodeHandlers) - 1) <=
LUA_E__OP())
return;
/* ??? warning!! several calls may realloc the stack
and invalidate `R(A)' */
// if R(A) saved into
__reg_instruction__:
__VM__->vm_reg_curr_instruction
// and this field of
<lua_State> changed only this module.
// So if c-func
destroy this field, then we have make Big_Trouble
switch(
VM_OpCodeJmp[ LUA_E__OP() ]( __VM__, __Lu_St__ )
){
case VM_STATE_VM_RET:
return;
case VM_STATE_REENTRY:
goto
reentry;
};
};
}
___________________________________________________________
___________________________________________________________
May be
Autors of Lua changed strategy, and us this method?
___________________________________________________________
Time
test:
function
test()
local c = os.clock()
local sum = 0
local prod
= 1
io.write("\n")
for i =1, 1000005 do
if (i %
20000) == 0 then
io.write(i /
20000)
end
if (i % 20001) == 0
then
sum = sum + (i /
20000)
end
if (i % 20002) == 0
then
prod = prod + (i /
20000)
end
end
io.write("\n")
local
t=os.clock()-c
print(t)
end
[old: 0.563
/0.562]
[new: 0.407 /0.406]
___________________________________________________________
I am send changed
lua-VM, so
experimental