Simon Danisch / Nov 18 2018
Julia
Julia
pkg"up; add WebSockets WebIO#master JSExpr@0.3.1 Observables CSSUtil ImageFiltering Flux@0.6.8 ImageTransformations CuArrays#master GPUArrays#master NNlib#master Makie#master AbstractPlotting#master FixedPointNumbers" pkg"precompile" nothing
45.8s
using Flux, Flux.Data.MNIST, Statistics using Flux: onehotbatch, onecold, crossentropy, throttle using Base.Iterators: repeated, partition using CuArrays # Classify MNIST digits with a convolutional network imgs = MNIST.images() labels = onehotbatch(MNIST.labels(), 0:9) # Partition into batches of size 1,000 train = gpu.([(cat(float.(imgs[i])..., dims = 4), labels[:,i]) for i in partition(1:60_000, 1000)]) # Prepare test set (first 1,000 images) tX = gpu(cat(float.(MNIST.images(:test)[1:1000])..., dims = 4)) tY = gpu(onehotbatch(MNIST.labels(:test)[1:1000], 0:9)) reshape4(x) = reshape(x, :, size(x, 4)) maxpool2x2(x) = maxpool(x, (2,2)) m = gpu(Chain( Conv((2,2), 1=>16, relu), maxpool2x2, Conv((2,2), 16=>8, relu), maxpool2x2, reshape4, Dense(288, 10), softmax )) loss(x, y) = crossentropy(m(x), y) accuracy(x, y) = mean(onecold(m(x)) .== onecold(y)) evalcb = throttle(() -> (accuracy(tX, tY)), 10) opt = ADAM(Flux.params(m));
# train for i = 1:10 Flux.train!(loss, train, opt, cb = evalcb) end
using ForwardDiff f(x) = x^2 + 2.0x + 3.0 f_grad(x) = 2x + 2 # manual gradient function val = 5 dual = ForwardDiff.Dual(val, 1) dx = f(dual) ForwardDiff.value(dx) == f(val) ForwardDiff.partials(dx)[] == f_grad(val) using BenchmarkTools vals = fill(val, 10^6) duals = ForwardDiff.Dual.(vals, 1) f($val) f($dual) f.($vals) f.($duals) nothing
function d3tod2(input3d) rotr90(hcat((input3d[ :, :, i] for i in 1:size(input3d, 3))...)) end function squeeze(A) dims = ((i for i in 1:ndims(A) if size(A, i) == 1)..., ) dropdims(A, dims = dims) end function visualize_layer(scene, layer::Union{Conv, typeof(maxpool2x2)}, input) img = lift(d3tod2 ∘ collect, input) heatmap!(scene, 0..16, 0..1, img, colormap = [:black, :white], interpolate = true) end function visualize_layer(scene, layer::Dense, input) heatmap!(scene, 0..16, 0..1, lift(x-> collect(layer.W'), input)) height = lift(input) do x (collect(x)[:, 1] .+ 15) ./ 15 end xrange = range(1, stop = 15, length = 10) barplot!(scene, xrange, height, color = height, colormap = Reverse(:Spectral)) annotations!( scene, string.(0:9), Point2f0.(xrange, 0.1), textsize = 0.5 ) end function visualize_layer(scene, layer, input) # ignore unknown layers end
using Makie, Observables, WebSockets, ImageShow, WebIO using AbstractPlotting: hbox! function visualize(m, input) scene = Scene(camera = cam2d!, raw = true, resolution = (650, 250)) input_img = lift(rotr90 ∘ squeeze ∘ collect, input) image!(scene, (-8)..(-1), (0.0)..7, input_img, colorrange = (0, 1)) drawto!(scene, input, scene[end]) output = async_latest(input) for layer in m output = lift(x-> layer(x), output) visualize_layer(scene, layer, output) end hbox!(scene.plots[3:end]) center!(scene) display(scene) scene end
input_node = Makie.Node(tX[:, :, 1:1, 1:1]) scene = visualize(m, input_node) lift(x-> (yield();scene), input_node)
evalcb = throttle(0.01) do (accuracy(tX, tY)) input_node[] = input_node[] # update node end for i = 1:10 Flux.train!(loss, train, opt, cb = evalcb) end
HTML(read(index.html↩, String))