Skip to content

Commit

Permalink
Merge pull request torch#225 from colesbury/lua52
Browse files Browse the repository at this point in the history
Add support for Lua 5.2
  • Loading branch information
andresy committed Jun 10, 2015
2 parents 8d3154a + 9cf1907 commit 8a9a35d
Show file tree
Hide file tree
Showing 22 changed files with 186 additions and 58 deletions.
2 changes: 1 addition & 1 deletion DiskFile.c
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,6 @@ void torch_DiskFile_init(lua_State *L)
luaT_newmetatable(L, "torch.DiskFile", "torch.File",
torch_DiskFile_new, torch_DiskFile_free, NULL);

luaL_register(L, NULL, torch_DiskFile__);
luaT_setfuncs(L, torch_DiskFile__, 0);
lua_pop(L, 1);
}
6 changes: 2 additions & 4 deletions FFI.lua
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
if jit then

local ffi = require 'ffi'

local ok, ffi = pcall(require, 'ffi')
if ok then
local Real2real = {
Byte='unsigned char',
Char='char',
Expand Down
2 changes: 1 addition & 1 deletion File.c
Original file line number Diff line number Diff line change
Expand Up @@ -199,6 +199,6 @@ static const struct luaL_Reg torch_File__ [] = {
void torch_File_init(lua_State *L)
{
luaT_newmetatable(L, "torch.File", NULL, NULL, NULL, NULL);
luaL_register(L, NULL, torch_File__);
luaT_setfuncs(L, torch_File__, 0);
lua_pop(L, 1);
}
37 changes: 24 additions & 13 deletions File.lua
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,11 @@ local TYPE_TABLE = 3
local TYPE_TORCH = 4
local TYPE_BOOLEAN = 5
local TYPE_FUNCTION = 6
local TYPE_RECUR_FUNCTION = 7
local TYPE_RECUR_FUNCTION = 8
local LEGACY_TYPE_RECUR_FUNCTION = 7

-- Lua 5.2 compatibility
local loadstring = loadstring or load

function File:isWritableObject(object)
local typename = type(object)
Expand Down Expand Up @@ -138,7 +142,8 @@ function File:writeObject(object)
counter = counter + 1
local name,value = debug.getupvalue(object, counter)
if not name then break end
table.insert(upvalues, value)
if name == '_ENV' then value = nil end
table.insert(upvalues, {name=name, value=value})
end
local dumped = string.dump(object)
local stringStorage = torch.CharStorage():string(dumped)
Expand Down Expand Up @@ -214,7 +219,7 @@ function File:readObject()
debug.setupvalue(func, index, upvalue)
end
return func
elseif typeidx == TYPE_TABLE or typeidx == TYPE_TORCH or typeidx == TYPE_RECUR_FUNCTION then
elseif typeidx == TYPE_TABLE or typeidx == TYPE_TORCH or typeidx == TYPE_RECUR_FUNCTION or typeidx == LEGACY_TYPE_RECUR_FUNCTION then
-- read the index
local index = self:readInt()

Expand All @@ -225,16 +230,22 @@ function File:readObject()
end

-- otherwise read it
if typeidx == TYPE_RECUR_FUNCTION then
local size = self:readInt()
local dumped = self:readChar(size):string()
local func = loadstring(dumped)
objects[index] = func
local upvalues = self:readObject()
for index,upvalue in ipairs(upvalues) do
debug.setupvalue(func, index, upvalue)
end
return func
if typeidx == TYPE_RECUR_FUNCTION or typeidx == LEGACY_TYPE_RECUR_FUNCTION then
local size = self:readInt()
local dumped = self:readChar(size):string()
local func = loadstring(dumped)
objects[index] = func
local upvalues = self:readObject()
for index,upvalue in ipairs(upvalues) do
if typeidx == LEGACY_TYPE_RECUR_FUNCTION then
debug.setupvalue(func, index, upvalue)
elseif upvalue.name == '_ENV' then
debug.setupvalue(func, index, _ENV)
else
debug.setupvalue(func, index, upvalue.value)
end
end
return func
elseif typeidx == TYPE_TORCH then
local version, className, versionNumber
version = self:readChar(self:readInt()):string()
Expand Down
2 changes: 1 addition & 1 deletion Generator.c
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,6 @@ void torch_Generator_init(lua_State *L)
{
luaT_newmetatable(L, torch_Generator, NULL,
torch_Generator_new, torch_Generator_free, torch_Generator_factory);
luaL_register(L, NULL, torch_Generator_table_);
luaT_setfuncs(L, torch_Generator_table_, 0);
lua_pop(L, 1);
}
2 changes: 1 addition & 1 deletion MemoryFile.c
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,6 @@ void torch_MemoryFile_init(lua_State *L)
{
luaT_newmetatable(L, "torch.MemoryFile", "torch.File",
torch_MemoryFile_new, torch_MemoryFile_free, NULL);
luaL_register(L, NULL, torch_MemoryFile__);
luaT_setfuncs(L, torch_MemoryFile__, 0);
lua_pop(L, 1);
}
2 changes: 1 addition & 1 deletion PipeFile.c
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,6 @@ void torch_PipeFile_init(lua_State *L)
{
luaT_newmetatable(L, "torch.PipeFile", "torch.DiskFile",
torch_PipeFile_new, torch_PipeFile_free, NULL);
luaL_register(L, NULL, torch_PipeFile__);
luaT_setfuncs(L, torch_PipeFile__, 0);
lua_pop(L, 1);
}
7 changes: 5 additions & 2 deletions Tensor.lua
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ local Tensor = {}
-- types
local types = {'Byte', 'Char', 'Short', 'Int', 'Long', 'Float', 'Double'}

-- Lua 5.2 compatibility
local log10 = math.log10 or function(x) return math.log(x, 10) end

-- tostring() functions for Tensor and Storage
local function Storage__printformat(self)
if self:size() == 0 then
Expand All @@ -25,13 +28,13 @@ local function Storage__printformat(self)
local tensor = torch.DoubleTensor(torch.DoubleStorage(self:size()):copy(self), 1, self:size()):abs()
local expMin = tensor:min()
if expMin ~= 0 then
expMin = math.floor(math.log10(expMin)) + 1
expMin = math.floor(log10(expMin)) + 1
else
expMin = 1
end
local expMax = tensor:max()
if expMax ~= 0 then
expMax = math.floor(math.log10(expMax)) + 1
expMax = math.floor(log10(expMax)) + 1
else
expMax = 1
end
Expand Down
9 changes: 5 additions & 4 deletions TensorMath.lua
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,8 @@ local function wrap(...)
end
end
end
method:wrap(unpack(args))
local unpack = unpack or table.unpack
method:wrap(unpack(args))
end

local reals = {ByteTensor='unsigned char',
Expand Down Expand Up @@ -1133,12 +1134,12 @@ static void torch_TensorMath_init(lua_State *L)
luaT_pushmetatable(L, "torch.Tensor");
/* register methods */
luaL_register(L, NULL, m_torch_TensorMath__);
luaT_setfuncs(L, m_torch_TensorMath__, 0);
/* register functions into the "torch" field of the tensor metaclass */
lua_pushstring(L, "torch");
lua_newtable(L);
luaL_register(L, NULL, torch_TensorMath__);
luaT_setfuncs(L, torch_TensorMath__, 0);
lua_rawset(L, -3);
lua_pop(L, 1);
}
Expand All @@ -1157,7 +1158,7 @@ void torch_TensorMath_init(lua_State *L)
torch_LongTensorMath_init(L);
torch_FloatTensorMath_init(L);
torch_DoubleTensorMath_init(L);
luaL_register(L, NULL, torch_TensorMath__);
luaT_setfuncs(L, torch_TensorMath__, 0);
}
]])

Expand Down
2 changes: 1 addition & 1 deletion Timer.c
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,6 @@ static const struct luaL_Reg torch_Timer__ [] = {
void torch_Timer_init(lua_State *L)
{
luaT_newmetatable(L, "torch.Timer", NULL, torch_Timer_new, torch_Timer_free, NULL);
luaL_register(L, NULL, torch_Timer__);
luaT_setfuncs(L, torch_Timer__, 0);
lua_pop(L, 1);
}
2 changes: 1 addition & 1 deletion generic/Storage.c
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,7 @@ void torch_Storage_(init)(lua_State *L)
{
luaT_newmetatable(L, torch_Storage, NULL,
torch_Storage_(new), torch_Storage_(free), torch_Storage_(factory));
luaL_register(L, NULL, torch_Storage_(_));
luaT_setfuncs(L, torch_Storage_(_), 0);
lua_pop(L, 1);
}

Expand Down
2 changes: 1 addition & 1 deletion generic/Tensor.c
Original file line number Diff line number Diff line change
Expand Up @@ -1278,7 +1278,7 @@ void torch_Tensor_(init)(lua_State *L)
{
luaT_newmetatable(L, torch_Tensor, NULL,
torch_Tensor_(new), torch_Tensor_(free), torch_Tensor_(factory));
luaL_register(L, NULL, torch_Tensor_(_));
luaT_setfuncs(L, torch_Tensor_(_), 0);
lua_pop(L, 1);
}

Expand Down
2 changes: 1 addition & 1 deletion generic/TensorOperator.c
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ static const struct luaL_Reg torch_TensorOperator_(_) [] = {
void torch_TensorOperator_(init)(lua_State *L)
{
luaT_pushmetatable(L, torch_Tensor);
luaL_register(L, NULL, torch_TensorOperator_(_));
luaT_setfuncs(L, torch_TensorOperator_(_), 0);
lua_pop(L, 1);
}

Expand Down
2 changes: 1 addition & 1 deletion init.c
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ int luaopen_libtorch(lua_State *L)

lua_newtable(L);
lua_pushvalue(L, -1);
lua_setfield(L, LUA_GLOBALSINDEX, "torch");
lua_setglobal(L, "torch");

torch_File_init(L);

Expand Down
3 changes: 3 additions & 0 deletions init.lua
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@
if not string.gfind then
string.gfind = string.gmatch
end
if not table.unpack then
table.unpack = unpack
end

require "paths"
paths.require "libtorch"
Expand Down
Loading

0 comments on commit 8a9a35d

Please sign in to comment.