Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ version = "0.3.0"
[deps]
ConstraintDomains = "5800fd60-8556-4464-8d61-84ebf7a0bedb"
Dictionaries = "85a47980-9c8c-11e8-2b9f-f7ca1fa99fb4"
Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7"
Evolutionary = "86b6b26d-c046-49b6-aa0b-5f0f74682bd6"
JuliaFormatter = "98e50ef6-434e-11e9-1051-2b60c6c9e899"
Memoization = "6fafb56a-5788-4b4e-91ca-c0cea6611c73"
Expand All @@ -16,7 +17,7 @@ ThreadSafeDicts = "4239201d-c60e-5e0a-9702-85d713665ba7"
Unrolled = "9602ed7d-8fef-5bc8-8597-8f21381861e8"

[compat]
ConstraintDomains = "0.2"
ConstraintDomains = "0.3"
Dictionaries = "0.3"
Evolutionary = "0.10"
JuliaFormatter = "0.16"
Expand Down
1 change: 1 addition & 0 deletions src/CompositionalNetworks.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ module CompositionalNetworks
# Usings
using ConstraintDomains
using Dictionaries
import Distances
using Evolutionary
using JuliaFormatter
using Memoization
Expand Down
18 changes: 8 additions & 10 deletions src/composition.jl
Original file line number Diff line number Diff line change
Expand Up @@ -53,18 +53,16 @@ function generate(c::Composition, name, ::Val{:Julia})
"""

output = """
function $name(x; X = zeros(length(x), $tr_length), param=nothing, dom_size)
$(CN)tr_in(Tuple($tr), X, x, param)
for i in 1:length(x)
X[i,1] = $ar(@view X[i,:])
end
return $ag(@view X[:, 1]) |> (y -> $co(y; param, dom_size, nvars=length(x)))
end
"""
return documentation * format_text(output, BlueStyle(), pipe_to_function_call = false)
function $name(x; X = zeros(length(x), $tr_length), param=nothing, dom_size)
$(CN)tr_in(Tuple($tr), X, x, param)
X[:, 1] .= 1:length(x) .|> (i -> $ar(@view X[i, 1:$tr_length]))
return $ag(@view X[:, 1]) |> (y -> $co(y; param, dom_size, nvars=length(x)))
end
"""
return documentation * format_text(output, BlueStyle(); pipe_to_function_call=false)
end

function composition_to_file!(c::Composition, path, name, language =:Julia)
function composition_to_file!(c::Composition, path, name, language=:Julia)
output = code(c, language; name)
write(path, output)
return nothing
Expand Down
39 changes: 15 additions & 24 deletions src/genetic.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,22 +8,6 @@ function generate_population(icn, pop_size)
return population
end

"""
loss(X, X_sols, icn, weigths, metric)
Compute the loss of `icn`.
"""
function loss(solutions, non_sltns, icn, weigths, metric, dom_size, param; samples=nothing)
compo = compose(icn, weigths)
f = composition(compo)
X = if isnothing(samples)
Iterators.flatten((solutions, non_sltns))
else
Iterators.flatten((solutions, rand(non_sltns, samples)))
end
σ = sum(x -> abs(f(x; param, dom_size) - metric(x, solutions)), X) + regularization(icn)
return σ
end

"""
_optimize!(icn, X, X_sols; metric = hamming, pop_size = 200)
Optimize and set the weigths of an ICN with a given set of configuration `X` and solutions `X_sols`.
Expand All @@ -38,13 +22,20 @@ function _optimize!(
pop_size,
iterations;
samples=nothing,
memoize=true,
memoize=false,
)
_metric = memoize ? (@memoize Dict memoize_metric(x, X) = metric(x, X)) : metric
_bias = memoize ? (@memoize Dict memoize_bias(x) = weigths_bias(x)) : weigths_bias
fitness =
w ->
loss(solutions, non_sltns, icn, w, _metric, dom_size, param; samples) + _bias(w)
inplace = zeros(dom_size, max_icn_length())
_non_sltns = isnothing(samples) ? non_sltns : rand(non_sltns, samples)

function fitness(w)
compo = compose(icn, w)
f = composition(compo)
S = Iterators.flatten((solutions, _non_sltns))
return sum(x -> abs(f(x; X=inplace, param, dom_size) - metric(x, solutions)), S) +
regularization(icn) +
weigths_bias(w)
end
_fitness = memoize ? (@memoize Dict memoize_fitness(w) = fitness(w)) : fitness

_icn_ga = GA(;
populationSize=pop_size,
Expand All @@ -57,7 +48,7 @@ function _optimize!(
)

pop = generate_population(icn, pop_size)
r = Evolutionary.optimize(fitness, pop, _icn_ga, Evolutionary.Options(; iterations))
r = Evolutionary.optimize(_fitness, pop, _icn_ga, Evolutionary.Options(; iterations))
return weights!(icn, Evolutionary.minimizer(r))
end

Expand All @@ -76,7 +67,7 @@ function optimize!(
metric,
pop_size;
sampler=nothing,
memoize=true,
memoize=false,
)
results = Dictionary{BitVector,Int}()
aux_results = Vector{BitVector}(undef, global_iter)
Expand Down
23 changes: 14 additions & 9 deletions src/icn.jl
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ mutable struct ICN
co_layer=comparison_layer(param),
)
w = generate_weights([tr_layer, ar_layer, ag_layer, co_layer])
new(tr_layer, ar_layer, ag_layer, co_layer, w)
return new(tr_layer, ar_layer, ag_layer, co_layer, w)
end
end

Expand Down Expand Up @@ -75,7 +75,7 @@ Set the weights of an ICN with a `BitVector`.
function weights!(icn, weigths)
length(weigths) == nbits(icn) || @warn icn weigths
@assert length(weigths) == nbits(icn)
icn.weigths = weigths
return icn.weigths = weigths
end

"""
Expand Down Expand Up @@ -107,14 +107,20 @@ function regularization(icn)
return Σop / (Σmax + 1)
end

max_icn_length(icn = ICN(param = true)) = length(icn.transformation)
max_icn_length(icn=ICN(; param=true)) = length(icn.transformation)

"""
_compose(icn)
Internal function called by `compose` and `show_composition`.
"""
function _compose(icn::ICN)
!is_viable(icn) && (return ((x; param=nothing, dom_size=0) -> typemax(Float64)), [])
!is_viable(icn) && (
return (
(x; X=zeros(length(x), max_icn_length()), param=nothing, dom_size=0) ->
typemax(Float64)
),
[]
)

funcs = Vector{Vector{Function}}()
symbols = Vector{Vector{Symbol}}()
Expand Down Expand Up @@ -148,11 +154,10 @@ function _compose(icn::ICN)

function composition(x; X=zeros(length(x), length(funcs[1])), param=nothing, dom_size)
tr_in(Tuple(funcs[1]), X, x, param)
for i in 1:length(x)
X[i,1] = funcs[2][1](@view X[i,:])
end
funcs[3][1](@view X[:, 1]) |>
(y -> funcs[4][1](y; param, dom_size, nvars=length(x)))
X[:, 1] .= 1:length(x) .|> (i -> funcs[2][1](@view X[i, 1:length(funcs[1])]))
return (y -> funcs[4][1](y; param, dom_size, nvars=length(x)))(
funcs[3][1](@view X[:, 1])
)
end

return composition, symbols
Expand Down
20 changes: 10 additions & 10 deletions src/learn.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@ function learn_compose(
dom_size,
param=nothing;
global_iter=Threads.nthreads(),
local_iter=100,
local_iter=64,
metric=:hamming,
pop_size=400,
pop_size=64,
sampler=nothing,
memoize=true,
memoize=false,
)
icn = ICN(; param=!isnothing(param))
_, weigths = optimize!(
Expand Down Expand Up @@ -60,19 +60,19 @@ function explore_learn_compose(
concept,
param=nothing;
global_iter=Threads.nthreads(),
local_iter=100,
local_iter=64,
metric=:hamming,
pop_size=400,
pop_size=64,
search=:flexible,
complete_search_limit=1000,
solutions_limit=100,
sampler=nothing,
configurations=explore(
domains, concept, param; search, complete_search_limit, solutions_limit
),
memoize=true,
memoize=false,
)
dom_size = maximum(domain_size, domains)
dom_size = maximum(length, domains)
solutions, non_sltns = configurations
return learn_compose(
solutions,
Expand Down Expand Up @@ -116,14 +116,14 @@ function compose_to_file!(
param=nothing,
global_iter=Threads.nthreads(),
language=:Julia,
local_iter=100,
local_iter=64,
metric=hamming,
pop_size=400,
pop_size=64,
search=:flexible,
search_limit=1000,
solutions_limit=100,
sampler=nothing,
memoize=true,
memoize=false,
)
compo, icn, _ = explore_learn_compose(
domains,
Expand Down
7 changes: 3 additions & 4 deletions src/metrics.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,18 +2,17 @@
hamming(x, X)
Compute the hamming distance of `x` over a collection of solutions `X`, i.e. the minimal number of variables to switch in `x`to reach a solution.
"""
hamming(x, X) = mapreduce(y -> sum(x .!= y), min, X; init = length(x))
hamming(x, X) = mapreduce(y -> Distances.hamming(x,y), min, X)

"""
minkowski(x, X, p)
"""
minkowski(x, X, p) = mapreduce(y -> (sum(abs.(x .- y).^p))^(1/p), min, X; init = sum(x.^p)^(1/p))
minkowski(x, X, p) = mapreduce(Distances.minkowski(x, y, p), min, X)

"""
manhattan(x, X)
"""
manhattan(x, X) = mapreduce(y -> sum(abs.(x .- y)), min, X; init = typemax(Int))
# manhattan(x, X) = minkowski(x, X, 1)
manhattan(x, X) = mapreduce(y -> Distances.cityblock(x, y), min, X)

"""
weigths_bias(x)
Expand Down
Loading