23.12.2022, 00:54
Hi
We got an customer that wants to get kWh values once an hour to another system.
The file is to be sent as csv file attached to a mail.
The script underneath is run once an hour to save the value and i want to send the mail every hour with the values last 24 hour and delete the oldest entry each time.
a better solution than this might be to trend the object and get values from the trend if possible?
Under i have made the data table correct and run this once an hour.
But i have some issues and questions.
1: Then i log data underneath the colums is not sorted right. I tried to sort by time but that doesnt help?
2: I dont understand how to make the csv from this data and send it by email?
3: the old csv file can then be deleted.
Can you help with this?
data = storage.get('myobjectlogs', {})
-- data = {}
table.insert(data, {
['time'] = os.date('%Y-%m-%d %X'),
['id'] = 'byggmax',
['verdi'] = grp.getvalue('2/1/0'),
})
table.sort(data, function(a, b)
return a.time < b.time
end)
storage.set('myobjectlogs', data)
--log(storage.get('myobjectlogs'))
log(data)
ftpfile = string.format('/home/ftp/%s.csv', os.date('%Y-%m-%d_%H-%M'))
--[[ get past hour data (3600 seconds)
logtime = os.time() - 60 * 60
-- list of objects by id
objects = {}
-- objects with logging enabled
query = 'SELECT address, datatype, name FROM objects WHERE disablelog=0'
for _, object in ipairs(db:getall(query)) do
objects[ tonumber(object.address) ] = {
datatype = tonumber(object.datatype),
name = tostring(object.name or ''),
}
end
-- csv buffer
buffer = { '"date","address","name","value"' }
-- get object logs
query = 'SELECT src, address, datahex, logtime, eventtype FROM objectlog WHERE logtime >= ? ORDER BY id DESC'
for _, row in ipairs(db:getall(query, logtime)) do
object = objects[ tonumber(row.address) ]
-- found matching object and event type is group write
if object and row.eventtype == 'write' then
datatype = object.datatype
-- check that object datatype is set
if datatype then
-- decode data
data = knxdatatype.decode(row.datahex, datatype)
-- remove null chars from char/string datatype
if datatype == dt.char or datatype == dt.string then
data = data:gsub('%z+', '')
-- date to DD.MM.YYYY
elseif datatype == dt.date then
data = string.format('%.2d.%.2d.%.2d', data.day, data.month, data.year)
-- time to HH:MM:SS
elseif datatype == dt.time then
data = string.format('%.2d:%.2d:%.2d', data.hour, data.minute, data.second)
end
else
data = ''
end
-- format csv row
logdate = os.date('%Y.%m.%d %H:%M:%S', row.logtime)
csv = string.format('%q,%q,%q,%q', logdate, knxlib.decodega(row.address), object.name, tostring(data))
-- add to buffer
table.insert(buffer, csv)
end
end]]
data2 = table.concat(data)
io.writefile(ftpfile, data2)
We got an customer that wants to get kWh values once an hour to another system.
The file is to be sent as csv file attached to a mail.
The script underneath is run once an hour to save the value and i want to send the mail every hour with the values last 24 hour and delete the oldest entry each time.
a better solution than this might be to trend the object and get values from the trend if possible?
Under i have made the data table correct and run this once an hour.
But i have some issues and questions.
1: Then i log data underneath the colums is not sorted right. I tried to sort by time but that doesnt help?
2: I dont understand how to make the csv from this data and send it by email?
3: the old csv file can then be deleted.
Can you help with this?
data = storage.get('myobjectlogs', {})
-- data = {}
table.insert(data, {
['time'] = os.date('%Y-%m-%d %X'),
['id'] = 'byggmax',
['verdi'] = grp.getvalue('2/1/0'),
})
table.sort(data, function(a, b)
return a.time < b.time
end)
storage.set('myobjectlogs', data)
--log(storage.get('myobjectlogs'))
log(data)
ftpfile = string.format('/home/ftp/%s.csv', os.date('%Y-%m-%d_%H-%M'))
--[[ get past hour data (3600 seconds)
logtime = os.time() - 60 * 60
-- list of objects by id
objects = {}
-- objects with logging enabled
query = 'SELECT address, datatype, name FROM objects WHERE disablelog=0'
for _, object in ipairs(db:getall(query)) do
objects[ tonumber(object.address) ] = {
datatype = tonumber(object.datatype),
name = tostring(object.name or ''),
}
end
-- csv buffer
buffer = { '"date","address","name","value"' }
-- get object logs
query = 'SELECT src, address, datahex, logtime, eventtype FROM objectlog WHERE logtime >= ? ORDER BY id DESC'
for _, row in ipairs(db:getall(query, logtime)) do
object = objects[ tonumber(row.address) ]
-- found matching object and event type is group write
if object and row.eventtype == 'write' then
datatype = object.datatype
-- check that object datatype is set
if datatype then
-- decode data
data = knxdatatype.decode(row.datahex, datatype)
-- remove null chars from char/string datatype
if datatype == dt.char or datatype == dt.string then
data = data:gsub('%z+', '')
-- date to DD.MM.YYYY
elseif datatype == dt.date then
data = string.format('%.2d.%.2d.%.2d', data.day, data.month, data.year)
-- time to HH:MM:SS
elseif datatype == dt.time then
data = string.format('%.2d:%.2d:%.2d', data.hour, data.minute, data.second)
end
else
data = ''
end
-- format csv row
logdate = os.date('%Y.%m.%d %H:%M:%S', row.logtime)
csv = string.format('%q,%q,%q,%q', logdate, knxlib.decodega(row.address), object.name, tostring(data))
-- add to buffer
table.insert(buffer, csv)
end
end]]
data2 = table.concat(data)
io.writefile(ftpfile, data2)