From 0edf26d33f5c46bb442b4a2daad8f538dbca1c99 Mon Sep 17 00:00:00 2001 From: "Anthony D. Blaom" Date: Tue, 21 Sep 2021 14:45:56 +1200 Subject: [PATCH 1/4] fix formatting in LatinHypercube doc-string --- src/strategies/latin_hypercube.jl | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/strategies/latin_hypercube.jl b/src/strategies/latin_hypercube.jl index c4a8fc20..fa9b9202 100644 --- a/src/strategies/latin_hypercube.jl +++ b/src/strategies/latin_hypercube.jl @@ -1,12 +1,12 @@ """ -LatinHypercube(gens = 1, - popsize = 100, - ntour = 2, - ptour = 0.8., - interSampleWeight = 1.0, - ae_power = 2, - periodic_ae = false, - rng=Random.GLOBAL_RNG) + LatinHypercube(gens = 1, + popsize = 100, + ntour = 2, + ptour = 0.8., + interSampleWeight = 1.0, + ae_power = 2, + periodic_ae = false, + rng=Random.GLOBAL_RNG) Instantiate grid-based hyperparameter tuning strategy using the library [LatinHypercubeSampling.jl](https://github.com/MrUrq/LatinHypercubeSampling.jl). From 5db06afa5ad2d247a8b7e20b145f98bfd31c873f Mon Sep 17 00:00:00 2001 From: "Anthony D. Blaom" Date: Tue, 21 Sep 2021 18:07:00 +1200 Subject: [PATCH 2/4] bump compat MLJBase = "0.18.19" --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 2699f134..00325a66 100644 --- a/Project.toml +++ b/Project.toml @@ -17,7 +17,7 @@ RecipesBase = "3cdcf5f2-1ef4-517c-9805-6587b60abb01" ComputationalResources = "0.3" Distributions = "0.22,0.23,0.24, 0.25" LatinHypercubeSampling = "1.7.2" -MLJBase = "0.18.15" +MLJBase = "0.18.19" MLJModelInterface = "0.4.1, 1.1.1" ProgressMeter = "1.7.1" RecipesBase = "0.8,0.9,1" From eb77a31b1cf4daa82cdf1d9bc56c399bdd134cbe Mon Sep 17 00:00:00 2001 From: "Anthony D. Blaom" Date: Tue, 21 Sep 2021 18:07:20 +1200 Subject: [PATCH 3/4] auto infer operations, allow measures of mixed prediction_type --- src/tuned_models.jl | 15 ++++++++------- test/tuned_models.jl | 16 ++++++++++++++++ 2 files changed, 24 insertions(+), 7 deletions(-) diff --git a/src/tuned_models.jl b/src/tuned_models.jl index d303c933..53b8ff23 100644 --- a/src/tuned_models.jl +++ b/src/tuned_models.jl @@ -69,7 +69,7 @@ const EitherTunedModel{T,M} = range=nothing, measure=nothing, n=default_n(tuning, range), - operation=predict, + operation=nothing, other_options...) Construct a model wrapper for hyper-parameter optimization of a @@ -80,7 +80,7 @@ hyper-parameters are to be mutated. resampling=Holdout(), measure=nothing, n=default_n(tuning, range), - operation=predict, + operation=nothing, other_options...) Construct a wrapper for multiple `models`, for selection of an optimal @@ -185,10 +185,10 @@ plus other key/value pairs specific to the `tuning` strategy. - `repeats=1`: for generating train/test sets multiple times in resampling; see [`evaluate!`](@ref) for details -- `operation=predict`: operation to be applied to each fitted model; - usually `predict` but `predict_mean`, `predict_median` or - `predict_mode` can be used for `Probabilistic` models, if - the specified measures are `Deterministic` +- `operation`/`operations` - One of + $(MLJBase.PREDICT_OPERATIONS_STRING), or a vector of these of the + same length as `measure`/`measures`. Automatically inferred if left + unspecified. - `range`: range object; tuning strategy documentation describes supported types @@ -227,7 +227,8 @@ function TunedModel(; model=nothing, measures=nothing, measure=measures, weights=nothing, - operation=predict, + operations=nothing, + operation=operations, ranges=nothing, range=ranges, selection_heuristic=NaiveSelection(), diff --git a/test/tuned_models.jl b/test/tuned_models.jl index 4de3b514..849329f5 100644 --- a/test/tuned_models.jl +++ b/test/tuned_models.jl @@ -294,4 +294,20 @@ end @test all(==(per_folds[1]), per_folds) end +@testset "deterministic metrics for probabilistic models" begin + + # https://github.com/JuliaAI/MLJBase.jl/pull/599 allows mix of + # deterministic and probabilistic metrics: + X, y = MLJBase.make_blobs() + model = DecisionTreeClassifier() + range = MLJBase.range(model, :max_depth, values=[1,2]) + tmodel = TunedModel(model=model, + range=range, + measures=[MisclassificationRate(), + LogLoss()]) + mach = machine(tmodel, X, y) + @test_logs fit!(mach, verbosity=0) + +end + true From 63cc4ed1a69ec3b303fba4461abcfb038116e539 Mon Sep 17 00:00:00 2001 From: "Anthony Blaom, PhD" Date: Tue, 21 Sep 2021 18:20:14 +1200 Subject: [PATCH 4/4] Bump version 0.6.12 --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 00325a66..b04c5639 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "MLJTuning" uuid = "03970b2e-30c4-11ea-3135-d1576263f10f" authors = ["Anthony D. Blaom "] -version = "0.6.11" +version = "0.6.12" [deps] ComputationalResources = "ed09eef8-17a6-5b46-8889-db040fac31e3"