[fluksod] use the new table.create to eliminate the rehashes in json_encode

This commit is contained in:
Bart Van Der Meerssche 2011-05-01 17:15:47 +02:00
parent 18897a307b
commit ab393a2c21
2 changed files with 17 additions and 4 deletions

View File

@ -113,19 +113,32 @@ function polish(M, now, cutoff)
end
end
function json_encode(M)
function json_encode(M, entries)
local J = {}
if entries then
arr_size = 5*entries + 2
else
arr_size = 0
end
for sensor, T in pairs(M) do
local H = timestamps(T)
local SB = {'['} -- use a string buffer for building up the JSON string
local SB = table.create(arr_size, 0)
SB[1] = '[' -- use a string buffer for building up the JSON string
for k, timestamp in ipairs(H) do
SB[#SB+1] = '[' .. timestamp .. ',' .. T[timestamp] .. '],'
SB[#SB+1] = '['
SB[#SB+1] = timestamp
SB[#SB+1] = ','
SB[#SB+1] = T[timestamp]
SB[#SB+1] = '],'
end
SB[#SB] = SB[#SB]:sub(1, -2) -- remove the trailing comma from the last entry
SB[#SB+1] = ']'
J[sensor] = table.concat(SB)
end

View File

@ -321,7 +321,7 @@ function publish(child)
while true do
measurements:polish(os.time(), LAN_POLISH_CUTOFF)
local measurements_json = measurements:json_encode()
local measurements_json = measurements:json_encode(LAN_POLISH_CUTOFF)
for sensor_id, json in pairs(measurements_json) do
local file = LAN_PUBLISH_PATH .. '/' .. sensor_id