Skip to content

Commit

Permalink
Merge pull request #9 from tanmaykm/tan/misc
Browse files Browse the repository at this point in the history
optional json format logs, per log msg size limit
  • Loading branch information
tanmaykm authored Aug 21, 2020
2 parents 7a2b014 + 38f2aa6 commit e25e25c
Show file tree
Hide file tree
Showing 8 changed files with 379 additions and 30 deletions.
3 changes: 1 addition & 2 deletions .appveyor.yml
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
environment:
matrix:
- julia_version: 1.0
- julia_version: 1.1
- julia_version: 1.2
- julia_version: 1.3
- julia_version: 1.4
- julia_version: 1.5
- julia_version: nightly

platform:
Expand Down
3 changes: 1 addition & 2 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,10 @@ os:
- osx

julia:
- 1.0
- 1.1
- 1.2
- 1.3
- 1.4
- 1.5
- nightly

# # Uncomment the following lines to allow failures on nightly julia
Expand Down
6 changes: 4 additions & 2 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,18 @@ uuid = "c41e01d8-14e5-11ea-185b-e7eabed7be4b"
keywords = ["log", "rotate", "roller", "logrotate"]
license = "MIT"
authors = ["Tanmay Mohapatra <[email protected]>"]
version = "0.3.0"
version = "0.4.0"

[compat]
julia = "1"
julia = "1.2,1.3,1.4,1.5"
CodecZlib = "0.6,0.7"
JSON = "0.21"

[deps]
Dates = "ade2ca70-3891-5945-98fb-dc099432e06a"
Logging = "56ddb016-857b-54e1-b83d-db4d58db5568"
CodecZlib = "944b1d66-785c-5afd-91f1-9de20f533193"
JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"

[extras]
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
Expand Down
11 changes: 11 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,11 @@ Construction parameters:
- `nfiles`: number of rotated files to maintain
- `level`: log level (default: `Logging.Info`)

Constructor keyword parameters (described below):
- `timestamp_identifier`
- `format`
- `entry_size_limit`

## Examples

Using `RollingFileWriter`
Expand Down Expand Up @@ -147,3 +152,9 @@ shell> cat /tmp/mylog.log
└ @ Main REPL[3]:2
```

Logs can be formatted as JSON by setting the constructor keyword argument `format` to `:json`. A JSON formatted log entry is a JSON object having the below mentioned keys (unless they are empty):
- `metadata`: event metadata e.g. timestamp, line, filename, ...
- `message`: the log message string
- `keywords`: any keywords provided

Size of each log entry can be limited by specifying the constructor keyword argument `entry_size_limit`. It is set to 256KB by default.
69 changes: 52 additions & 17 deletions src/LogRoller.jl
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,20 @@ using Dates
using Logging
using CodecZlib
using Logging
using JSON
using JSON.Serializations: CommonSerialization, StandardSerialization
using JSON.Writer: StructuralContext
import JSON: show_json

import Logging: shouldlog, min_enabled_level, catch_exceptions, handle_message
import Base: write, close, rawhandle
export RollingLogger, RollingFileWriter, RollingFileWriterTee, postrotate

const BUFFSIZE = 1024*16 # try and read 16K pages when possible
const DEFAULT_MAX_LOG_ENTRY_SIZE = 256*1024

include("limitio.jl")
include("log_utils.jl")

"""
A file writer that implements the `IO` interface, but only provides `write` methods.
Expand Down Expand Up @@ -159,18 +167,27 @@ function RollingFileWriterTee(io::RollingFileWriter, logger::AbstractLogger, ass
end

"""
RollingLogger(filename, sizelimit, nfiles, min_level=Info; timestamp_identifier::Symbol=:time)
RollingLogger(filename, sizelimit, nfiles, min_level=Info; timestamp_identifier::Symbol=:time, format::Symbol=:console)
Log into a log file. Rotate log file based on file size. Compress rotated logs.
Logs can be formatted as JSON by setting the optional keyword argument `format` to `:json`. A JSON formatted log entry
is a JSON object. It should have these keys (unless they are empty):
The message part can contain the following keys unless they are empty:
- `metadata`: event metadata e.g. timestamp, line, filename, ...
- `message`: the log message string
- `keywords`: any keywords provided
"""
mutable struct RollingLogger <: AbstractLogger
stream::RollingFileWriter
min_level::LogLevel
message_limits::Dict{Any,Int}
timestamp_identifier::Symbol
format::Symbol
entry_size_limit::Int
end
function RollingLogger(filename::String, sizelimit::Int, nfiles::Int, level=Logging.Info; timestamp_identifier::Symbol=:time)
function RollingLogger(filename::String, sizelimit::Int, nfiles::Int, level=Logging.Info; timestamp_identifier::Symbol=:time, format::Symbol=:console, entry_size_limit::Int=DEFAULT_MAX_LOG_ENTRY_SIZE)
stream = RollingFileWriter(filename, sizelimit, nfiles)
RollingLogger(stream, level, Dict{Any,Int}(), timestamp_identifier)
RollingLogger(stream, level, Dict{Any,Int}(), timestamp_identifier, format, entry_size_limit)
end

"""
Expand Down Expand Up @@ -215,21 +232,39 @@ function handle_message(logger::RollingLogger, level, message, _module, group, i
logger.message_limits[id] = remaining - 1
remaining > 0 || return
end
buf = IOBuffer()
iob = IOContext(buf, logger.stream)
levelstr = level == Logging.Warn ? "Warning" : string(level)
timestamp, kwarg_timestamp = get_timestamp(logger, kwargs)
msglines = split(chomp(string(message)), '\n')
println(iob, "", levelstr, ": ", timestamp, ": ", msglines[1])
for i in 2:length(msglines)
println(iob, "", msglines[i])
end
for (key, val) in kwargs
kwarg_timestamp && (key === logger.timestamp_identifier) && continue
println(iob, "", key, " = ", val)

if logger.format === :json
timestamp, kwarg_timestamp = get_timestamp(logger, kwargs)
log = (level=level, message=message, _module=_module, group=group, id=id, filepath=filepath, line=line, kwargs=kwargs)
log = merge(log, [logger.timestamp_identifier=>timestamp])
logentry = IndexedLogEntry(log, Symbol[])
write(logger.stream, message_string(logentry, logger.entry_size_limit, true))
else # if logger.format === :console
buf = IOBuffer()
lim = LimitIO(buf, logger.entry_size_limit)
limited = false
try
iob = IOContext(lim, logger.stream)
levelstr = level == Logging.Warn ? "Warning" : string(level)
timestamp, kwarg_timestamp = get_timestamp(logger, kwargs)
msglines = split(chomp(string(message)), '\n')
println(iob, "", levelstr, ": ", timestamp, ": ", msglines[1])
for i in 2:length(msglines)
println(iob, "", msglines[i])
end
for (key, val) in kwargs
kwarg_timestamp && (key === logger.timestamp_identifier) && continue
println(iob, "", key, " = ", val)
end
println(iob, "└ @ ", something(_module, "nothing"), " ", something(filepath, "nothing"), ":", something(line, "nothing"))
catch ex
isa(ex, LimitIOException) || rethrow(ex)
limited = true
end
write(logger.stream, take!(buf))
limited && write(logger.stream, UInt8[0x0a])
end
println(iob, "└ @ ", something(_module, "nothing"), " ", something(filepath, "nothing"), ":", something(line, "nothing"))
write(logger.stream, take!(buf))

nothing
end

Expand Down
15 changes: 15 additions & 0 deletions src/limitio.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# limits number of bytes written to an io stream (originally from https://github.com/JuliaDebug/Debugger.jl/blob/master/src/limitio.jl)
# useful to detect messages that would go over limit when converted to JSON.
mutable struct LimitIO{IO_t <: IO} <: IO
io::IO_t
maxbytes::Int
n::Int # max bytes to write
end
LimitIO(io::IO, maxbytes) = LimitIO(io, maxbytes, 0)

struct LimitIOException <: Exception end

function Base.write(io::LimitIO, v::UInt8)
io.n > io.maxbytes && throw(LimitIOException())
io.n += write(io.io, v)
end
125 changes: 125 additions & 0 deletions src/log_utils.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
"""
Custom JSON serializer for log entries.
Handles Module types for now, more can be added later.
"""
struct LogEntrySerialization <: CommonSerialization end

show_json(io::StructuralContext, ::LogEntrySerialization, m::Module) = show_json(io, LogEntrySerialization(), string(m))
show_json(io::StructuralContext, ::LogEntrySerialization, ptr::Ptr) = show_json(io, LogEntrySerialization(), string(ptr))
show_json(io::StructuralContext, ::LogEntrySerialization, sv::Core.SimpleVector) = show_json(io, LogEntrySerialization(), [sv...])
show_json(io::StructuralContext, ::LogEntrySerialization, typ::DataType) = show_json(io, LogEntrySerialization(), string(typ))

function show_json(io::StructuralContext, ::LogEntrySerialization, level::Logging.LogLevel)
levelstr = (level == Logging.Debug) ? "Debug" :
(level == Logging.Info) ? "Info" :
(level == Logging.Warn) ? "Warn" :
(level == Logging.Error) ? "Error" :
"LogLevel($(level.level))"
show_json(io, LogEntrySerialization(), levelstr)
end

function show_json(io::StructuralContext, ::LogEntrySerialization, exception::Tuple{Exception,Any})
iob = IOBuffer()
Base.show_exception_stack(iob, [exception])
show_json(io, LogEntrySerialization(), String(take!(iob)))
end

as_text(str::String) = str
function as_text(obj)
iob = IOBuffer()
lim = LimitIO(iob, 4*1024) # fixing it as of now to a large enough size for most use cases
try
show(lim, "text/plain", obj)
catch ex
if isa(ex, LimitIOException)
# ignore and take what was printed
print(iob, "...")
else
rethrow()
end
end

String(take!(iob))
end

"""
IndexedLogEntry represents a log entry as a dictionary and its
indexable attributes in a form that is useful to a logging sink.
The index part contains metadata that are to be indexed. Event metadata
consists of attributes like level, module, filepath, line, job id,
process id, user id, etc. It also includes application specific
keywords that the originating piece of code wishes to index.
Keywords that should be considered as metadata are indicated via the
`indexable` constructor parameter.
What metadata can be indexed depends on the type of sink and whether
it has support to index certain types of attributes. Attributes that
the sink can not index are made part of the message itself for storage.
The message part can contain the following keys unless they are empty:
- `metadata`: event metadata that could not be indexed
- `message`: the log message string
- `keywords`: any keywords provided
Constructor parameters:
- `log`: Named tuple containing args to the handle_message method, e.g.: (level, message, _module, group, id, file, line, kwargs)
- `indexable`: list of names from `log` and `log.kwargs` that should be included in the index
"""
struct IndexedLogEntry
index::Dict{Symbol,Any}
message::Dict{Symbol,Any}
end

function IndexedLogEntry(log, indexable::Vector{Symbol}=[:level, :module, :filepath, :line])
index = Dict{Symbol, Any}()
metadata = Dict{Symbol,Any}()
keywords = Dict(log.kwargs)

log_prop_names = propertynames(log)
for name in log_prop_names
(name === :kwargs) && continue # skip the kwargs, deal with that separately
(name === :message) && continue # skip message, we are dealing with that separately
((name in indexable) ? index : metadata)[name] = getproperty(log, name)
end
for name in keys(keywords)
(name in log_prop_names) && continue # avoid clobbering reserved names
if name in indexable
index[name] = keywords[name]
delete!(keywords, name)
end
end

message = Dict{Symbol,Any}()
messagestr = as_text(log.message)
isempty(messagestr) || (message[:message] = messagestr)
isempty(metadata) || (message[:metadata] = metadata)
isempty(keywords) || (message[:keywords] = keywords)

IndexedLogEntry(index, message)
end

message_string(entry::IndexedLogEntry, size_limit::Int, newline::Bool=false) = message_string(entry.message, size_limit, newline)
function message_string(message::Dict{Symbol,Any}, size_limit::Int, newline::Bool=false)
iob = IOBuffer()
lim = LimitIO(iob, size_limit)
try
JSON.show_json(lim, LogEntrySerialization(), message)
newline && write(lim, '\n')
catch ex
if isa(ex, LimitIOException)
if haskey(message, :keywords)
# strip off keywords (if any) and retry
delete!(message, :keywords)
return message_string(message, size_limit, newline)
elseif haskey(message, :message)
# strip off the message and retry (we retain only the metadata in the worst case)
delete!(message, :message)
return message_string(message, size_limit, newline)
end
end
rethrow(ex)
end
String(take!(iob))
end
Loading

2 comments on commit e25e25c

@tanmaykm
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@JuliaRegistrator
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Registration pull request created: JuliaRegistries/General/19913

After the above pull request is merged, it is recommended that a tag is created on this repository for the registered package version.

This will be done automatically if the Julia TagBot GitHub Action is installed, or can be done manually through the github interface, or via:

git tag -a v0.4.0 -m "<description of version>" e25e25cab933038d0a580f24504b4afd80aa8639
git push origin v0.4.0

Please sign in to comment.