Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implement AbstractLogger interface #9

Merged
merged 7 commits into from
Apr 10, 2019
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,3 @@ script:
- julia --color=yes -e 'using Pkg; Pkg.activate(); Pkg.instantiate(); Pkg.test()';
after_success:
- julia -e 'using Pkg; cd(Pkg.dir("TensorBoardLogger")); Pkg.add("Coverage"); using Coverage; Codecov.submit(Codecov.process_folder())'
- julia -e 'using Pkg; cd(Pkg.dir("TensorBoardLogger")); Pkg.add("Coverage"); using Coverage; Coveralls.submit(Coveralls.process_folder())';
1 change: 1 addition & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ version = "0.1.0"

[deps]
CRC32c = "8bf52ea8-c179-5cab-976a-9e18b702a9bc"
DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
ProtoBuf = "3349acd9-ac6a-5e09-bcdb-63829b23a429"
StatsBase = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91"

Expand Down
29 changes: 13 additions & 16 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ and from [TensorBoardX](https://tensorboardx.readthedocs.io/en/latest/).

To use the library you must create a `Logger` object and then log data to it.

- `Logger(dir_path)` creates a logger saving data to the folder `dir_path`
- `TBLogger(dir_path)` creates a logger saving data to the folder `dir_path`
- `log_value(logger, name, val)` logs to `logger` the value `val` under the tag `name`

## Supported values
Expand All @@ -25,25 +25,22 @@ At the moment, you can log the following values:

## Example
```
using TensorBoardLogger
using TensorBoardLogger, Logging, Random

lg = Logger("runs/run-12", overwrite=true)
lg=TBLogger()

for step=1:100
ev = log_value(lg, "quan/prova1", step*1.5, step=step)
ev = log_value(lg, "quan/prova2", step*2.5, step=step)
with_logger(lg) do
for i=1:100
x0 = 0.5+i/30; s0 = 0.5/(i/20);
edges = collect(-5:0.1:5)
centers = collect(edges[1:end-1] .+0.05)
histvals = [exp(-((c-x0)/s0)^2) for c=centers]
data_tuple = (edges, histvals)

x0 = 0.5+step/30; s0 = 0.5/(step/20);
edges = collect(-5:0.1:5)
centers = collect(edges[1:end-1] .+0.05)
histvals = [exp(-((c-x0)/s0)^2) for c=centers]
histvals./=sum(histvals)
data_tuple = (edges, histvals)

# Log pre-binned data
log_histogram(lg, "hist/cust", data_tuple, step=step)
# Automatically bin the data
log_histogram(lg, "hist/auto", randn(1000).*s0.+x0, step=step)
@info "test" i=i j=i^2 dd=rand(10).+0.1*i hh=data_tuple
@info "test_2" i=i j=2^i hh=data_tuple delta_step=0
end
end
```

Expand Down
94 changes: 82 additions & 12 deletions src/Logger.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ deletes previously created events.
If `purge_step::Int` is passed, every step before `purge_step` will be ignored
by tensorboard (usefull in the case of restarting a crashed computation).
"""
function Logger(logdir; overwrite=false, time=time(), purge_step::Union{Int,Nothing}=nothing)
function TBLogger(logdir; overwrite=false, time=time(), purge_step::Union{Int,Nothing}=nothing, min_level::LogLevel=Info)
if overwrite
rm(logdir; force=true, recursive=true)
end
Expand All @@ -33,27 +33,27 @@ function Logger(logdir; overwrite=false, time=time(), purge_step::Union{Int,Noth
write_event(file, ev_0)
end

Logger(realpath(logdir), file, all_files, start_step)
TBLogger(realpath(logdir), file, all_files, start_step, min_level)
end

# normally the logs don't overwrite, but if you've not given a path, you clearly don't care.
Logger() = Logger("tensorboard_logs", overwrite=true)
TBLogger() = TBLogger("tensorboard_logs", overwrite=true)

# Accessors
"""
logdir(lg::Logger) -> String

Returns the directory to which Logger `lg` is writing data.
"""
logdir(lg::Logger) = lg.logdir
logdir(lg::TBLogger) = lg.logdir

"""
get_file(lg::Logger) -> IOS

Returns the main `file` IOStream object of Logger `lg`.
"""
get_file(lg::Logger) = lg.file
function add_log_file(lg::Logger, path::String)
get_file(lg::TBLogger) = lg.file
function add_log_file(lg::TBLogger, path::String)
file = open(path, "w")
lg.all_files[path] = file
return file
Expand All @@ -65,7 +65,7 @@ end
Returns the `file` IOStream object of Logger `lg` writing to the tag
`tags1/tags2.../tagsN`.
"""
function get_file(lg::Logger, tags::String...)
function get_file(lg::TBLogger, tags::String...)
key = joinpath(logdir(lg), tags...)
if key ∈ lg.all_files
return lg.all_files[key]
Expand All @@ -80,21 +80,21 @@ end
Sets the iteration counter in the logger to `iter`. This counter is used by the
logger when no value is passed by the user.
"""
set_step(lg::Logger, iter::Int) = lg.global_step = iter
set_step(lg::TBLogger, iter::Int) = lg.global_step = iter

increment_step(lg::TBLogger, iter::Int) = lg.global_step += iter

"""
iteration(lg)
step(lg)

Returns the internal iteration counter of the logger. When no step keyword
is provided to the loggers, it will use this value.
"""
step(lg::Logger) = lg.global_step
step(lg::TBLogger) = lg.global_step


# Additional things

#const default_logging_session = Ref(Logger())

"""
set_tb_logdir(logdir, overwrite=false)
Start a new log in the given directory
Expand All @@ -111,3 +111,73 @@ function reset_tb_logs()
logdir = default_logging_session[].logdir
default_logging_session[] = Logger(logdir, overwrite=true)
end


# Implement the AbstractLogger Interface

catch_exceptions(lg::TBLogger) = false

min_enabled_level(lg::TBLogger) = lg.min_level

# For now, log everything that is above the lg.min_level
shouldlog(lg::TBLogger, level, _module, group, id) = true

function handle_message(lg::TBLogger, level, message, _module, group, id, file, line; kwargs...)
# Unpack the message
summ = SummaryCollection()
i_step = 1

if !isempty(kwargs)
for (key,val) in pairs(kwargs)
# special values
if key == :log_step_increment
i_step = val
continue
end

data = Stack{Pair{String,Any}}()
name = message*"/$key"
push!(data, name => val)
while !isempty(data)
name, val = pop!(data)
loggable(val) ? push!(summ.value, summary_impl(name, val)) : preprocess(name, val, data)
end
end
end
iter = increment_step(lg, i_step)
write_event(lg.file, make_event(lg, summ, step=iter))
end

"""
loggable(value) -> Bool

Returns `true` if `value` is a type that can be serialized into a `Summary`
ProtoBuffer, `false` otherwise.

This is defined to be false for `::Any`, and for every supported TensorBoard
Plugin this method should be specialized for the relative type and return true.
"""
loggable(::Any) = false

"""
preprocess(name, val, data)

This method takes a tag `name` and the value `val::T` which cannot be directly
serialized into TensorBoard, and pushes into the stack `data` several
name-value pairs `Pair{String,Any}` containing simpler types. Those pairs will
be serialized if possible, otherwise `preprocess` will be called recursively.

For a struct
"""
function preprocess(name, val::T, data) where T
PhilipVinc marked this conversation as resolved.
Show resolved Hide resolved
if isstructtype(T)
fn = fieldnames(T)
for f=fn
prop = getproperty(val, f)
push!(data, name*"/$f" => prop)
end
else
throw(ErrorException("Can't log type $T, but can't preprocess it either.\n You should define preprocess(name, val::$T, data)."))
PhilipVinc marked this conversation as resolved.
Show resolved Hide resolved
end
data
end
17 changes: 14 additions & 3 deletions src/Loggers/LogHistograms.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ passed as a tuple holding the `N+1` bin edges and the height of the `N` bins.
You can also pass the raw data, and a binning algorithm from `StatsBase.jl` will
be used to bin the data.
"""
function log_histogram(logger::Logger, name::String, (bins,weights)::Tuple{Vector,Vector};
function log_histogram(logger::TBLogger, name::String, (bins,weights)::Tuple{Vector,Vector};
step=nothing)
summ = SummaryCollection()
push!(summ.value, histogram_summary(name, bins, weights))
Expand All @@ -21,7 +21,7 @@ end
Bins the values found in `data` and logs them as an histogram under the tag
`name`.
"""
function log_histogram(logger::Logger, name::String, data::Vector;
function log_histogram(logger::TBLogger, name::String, data::Vector;
step=nothing)
summ = SummaryCollection()
hvals = fit(Histogram, data)
Expand All @@ -34,7 +34,7 @@ end

Logs the vector found in `data` as an histogram under the name `name`.
"""
function log_vector(logger::Logger, name::String, data::Vector; step=nothing)
function log_vector(logger::TBLogger, name::String, data::Vector; step=nothing)
summ = SummaryCollection()
push!(summ.value, histogram_summary(name, collect(0:length(data)),data))
write_event(logger.file, make_event(logger, summ, step))
Expand All @@ -59,3 +59,14 @@ end
## Backward compatibility
log_histogram(logger, name, value, step) =
log_histogram(logger, name, value; step=step)


# Forward
PhilipVinc marked this conversation as resolved.
Show resolved Hide resolved
preprocess(name, val::AbstractVector, data) where T<:Complex = push!(data, name*"/re"=>real.(val), name*"/im"=>imag.(val))
preprocess(name, val::AbstractArray, data) = push!(data, name=>vec(val))

loggable(::AbstractVector{T}) where T<:Real = true
summary_impl(name, val::AbstractVector) = histogram_summary(name, collect(0:length(val)),val)

loggable(::Tuple{Vector,Vector}) = true
summary_impl(name, (bins,weights)::Tuple{Vector,Vector}) = histogram_summary(name, bins, weights)
9 changes: 7 additions & 2 deletions src/Loggers/LogValue.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,13 @@

Logs a Floating-point variable with name `name` at step `step`
"""
function log_value(logger::Logger, name::String, value::Real; step=nothing)
function log_value(logger::TBLogger, name::String, value::Real; step=nothing)
summ = SummaryCollection()
push!(summ.value, scalar_summary(name, value))
write_event(logger.file, make_event(logger, summ, step=step))
end

function log_value(logger::Logger, name::String, value::Complex; step=nothing)
function log_value(logger::TBLogger, name::String, value::Complex; step=nothing)
log_value(logger, name*"/re", real(value), step)
log_value(logger, name*"/im", imag(value), step)
end
Expand All @@ -21,3 +21,8 @@ end
## Backward compatibility
log_value(logger, name, value, step) =
log_value(logger, name, value, step=step)

# Forward
loggable(::Real) = true
preprocess(name, val::Complex, data) = push!(data, name*"/re"=>real(val), name*"/im"=>imag(val))
summary_impl(name, value::Real) = scalar_summary(name, value)
21 changes: 14 additions & 7 deletions src/TensorBoardLogger.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,19 @@ module TensorBoardLogger

using ProtoBuf
using CRC32c
using DataStructures

#TODO: remove it. Only needed to compute histogram bins.
#TODO: remove it. Only needed to compute histogram bins.
using StatsBase

# Import Base methods for Logging
using Base.CoreLogging:
global_logger, LogLevel, Info

import Base.CoreLogging:
AbstractLogger, handle_message, shouldlog, min_enabled_level,
catch_exceptions

# Protobuffer definitions for tensorboard
include("protojl/tensorflow.jl")
include("protojl/summary_pb.jl")
Expand All @@ -15,11 +24,13 @@ include("protojl/event_pb.jl")
include("utils.jl")

# Logging structures
mutable struct Logger
mutable struct TBLogger <: AbstractLogger
logdir::String
file::IOStream
all_files::Dict{String, IOStream}
global_step::Int

min_level::LogLevel
end

include("logging.jl")
Expand All @@ -28,13 +39,9 @@ include("Loggers/LogHistograms.jl")
include("Logger.jl")


#macro tb_log(name)
# :(_tb_log($(esc(string(name))), $(esc(name))))
#end

export log_histogram, log_value, log_vector
export scalar_summary, histogram_summary, make_event
export Logger
export TBLogger

export set_tb_logdir, reset_tb_logs, default_logging_session

Expand Down
4 changes: 2 additions & 2 deletions src/logging.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
# protobuf is broken).
const SummaryCollection(;kwargs...) = Summary(value=Base.Vector{Summary_Value}(); kwargs...)

function make_event(logger::Logger, summary::Summary;
function make_event(logger::TBLogger, summary::Summary;
step::Union{Nothing, Int}=nothing)
# If the step is not set explicitly, get it from the logger
if typeof(step) == Nothing
Expand All @@ -27,4 +27,4 @@ function write_event(file::IOStream, event::Event)
flush(file)
end

write_event(logger::Logger, event::Event) = write_event(logger.file, event)
write_event(logger::TBLogger, event::Event) = write_event(logger.file, event)
4 changes: 2 additions & 2 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ using TensorBoardLogger
using Test

@testset "Scalar Value Logger" begin
logger = Logger("log/")
logger = TBLogger("log/")
@test isdir("log/")
step = 1
log_value(logger, "float32", 1.25f0, step=step)
Expand All @@ -17,7 +17,7 @@ using Test
end

@testset "Histogram Value Logger" begin
logger = Logger("log/")
logger = TBLogger("log/")
@test isdir("log/")
step = 1

Expand Down