diff --git a/src/pattern_detection.jl b/src/pattern_detection.jl index fe5b67a3..01610821 100644 --- a/src/pattern_detection.jl +++ b/src/pattern_detection.jl @@ -3,10 +3,11 @@ function slow_filter(img) return filtered_data end -function fast_filter!(dat_filtered, kernel, dat) # + +function fast_filter(kernel, dat) # #r = Images.ImageFiltering.ComputationalResources.CPU1(Images.ImageFiltering.FIR()) - DSP.filt!(dat_filtered, kernel[1].data.parent, dat) - return dat_filtered + filter_result = DSP.filt(kernel[1].data.parent, dat) + return filter_result end function single_chan_pattern_detector(dat, func, evts) @@ -42,26 +43,26 @@ function mult_chan_pattern_detector_probability(dat, stat_function, evts; n_perm d_perm = similar(dat, size(dat, 1), n_permutations) @debug "starting permutation loop" # We permute data for all events in advance - for ch = 1:size(dat, 1) - for perm = 1:n_permutations - + + Threads.@threads for perm = 1:n_permutations + for ch = 1:size(dat, 1) sortix = shuffle(1:size(dat_filtered, 1)) d_perm[ch, perm] = stat_function( - fast_filter!(dat_filtered, kernel, @view(dat_padded[ch, sortix, :])), + fast_filter(kernel, @view(dat_padded[ch, sortix, :])), ) @show ch, perm end end mean_d_perm = mean(d_perm, dims = 2)[:, 1] - for n in names(evts) + Threads.@threads for n in names(evts) sortix = sortperm(evts[!, n]) col = fill(NaN, size(dat, 1)) for ch = 1:size(dat, 1) - fast_filter!(dat_filtered, kernel, @view(dat_padded[ch, sortix, :])) - d_emp = stat_function(dat_filtered) + col[ch] = abs(stat_function( + fast_filter(kernel, @view(dat_padded[ch, sortix, :])) + ) - mean_d_perm[ch]) - col[ch] = abs(d_emp - mean_d_perm[ch]) print(ch, " ") end println(n) diff --git a/src/runner.jl b/src/runner.jl index 19999d11..66d5fa60 100644 --- a/src/runner.jl +++ b/src/runner.jl @@ -1,3 +1,7 @@ +# FOR MULTITHREADING: +# run: >julia -t [n_threads] +# instead of [n_threads] write a desired number of threads (<= amount of CPU cores) + include("setup.jl") include("pattern_detection.jl") @@ -29,6 +33,13 @@ fid = h5open("data/mult.hdf5", "r") dat2 = read(fid["data"]["mult.hdf5"]) close(fid) +# Data for multiple channels (only fixations) +# 128 channels x 769 time x 2508 events + +fid = h5open("data/data_fixations.hdf5", "r") +dat_fix = read(fid["data"]["data_fixations.hdf5"]) +close(fid) + # PATTERN DECTECTION 1 # for single channel data @@ -65,6 +76,12 @@ evts_init = CSV.read("data/events_init.csv", DataFrame) evts_d = mult_chan_pattern_detector_probability(dat2[:, :, ix], Images.entropy, evts) end +# PATTERN DETECTION 4 (FOR FIXATIONS ONLY) +# 10 cores: 50 s +@time begin + evts_d = mult_chan_pattern_detector_probability(dat_fix, Images.entropy, evts) +end + begin f = Figure() ax = CairoMakie.Axis(f[1, 1], xlabel = "Channels", ylabel = "Sorting event variables")