Skip to content

Commit

Permalink
Merge pull request #149 from JuliaAI/dev
Browse files Browse the repository at this point in the history
For a 0.6.12 release
  • Loading branch information
ablaom authored Sep 21, 2021
2 parents a819b50 + 63cc4ed commit a47019b
Show file tree
Hide file tree
Showing 4 changed files with 34 additions and 17 deletions.
4 changes: 2 additions & 2 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "MLJTuning"
uuid = "03970b2e-30c4-11ea-3135-d1576263f10f"
authors = ["Anthony D. Blaom <anthony.blaom@gmail.com>"]
version = "0.6.11"
version = "0.6.12"

[deps]
ComputationalResources = "ed09eef8-17a6-5b46-8889-db040fac31e3"
Expand All @@ -17,7 +17,7 @@ RecipesBase = "3cdcf5f2-1ef4-517c-9805-6587b60abb01"
ComputationalResources = "0.3"
Distributions = "0.22,0.23,0.24, 0.25"
LatinHypercubeSampling = "1.7.2"
MLJBase = "0.18.15"
MLJBase = "0.18.19"
MLJModelInterface = "0.4.1, 1.1.1"
ProgressMeter = "1.7.1"
RecipesBase = "0.8,0.9,1"
Expand Down
16 changes: 8 additions & 8 deletions src/strategies/latin_hypercube.jl
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
"""
LatinHypercube(gens = 1,
popsize = 100,
ntour = 2,
ptour = 0.8.,
interSampleWeight = 1.0,
ae_power = 2,
periodic_ae = false,
rng=Random.GLOBAL_RNG)
LatinHypercube(gens = 1,
popsize = 100,
ntour = 2,
ptour = 0.8.,
interSampleWeight = 1.0,
ae_power = 2,
periodic_ae = false,
rng=Random.GLOBAL_RNG)
Instantiate grid-based hyperparameter tuning strategy using the
library [LatinHypercubeSampling.jl](https://github.com/MrUrq/LatinHypercubeSampling.jl).
Expand Down
15 changes: 8 additions & 7 deletions src/tuned_models.jl
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ const EitherTunedModel{T,M} =
range=nothing,
measure=nothing,
n=default_n(tuning, range),
operation=predict,
operation=nothing,
other_options...)
Construct a model wrapper for hyper-parameter optimization of a
Expand All @@ -80,7 +80,7 @@ hyper-parameters are to be mutated.
resampling=Holdout(),
measure=nothing,
n=default_n(tuning, range),
operation=predict,
operation=nothing,
other_options...)
Construct a wrapper for multiple `models`, for selection of an optimal
Expand Down Expand Up @@ -185,10 +185,10 @@ plus other key/value pairs specific to the `tuning` strategy.
- `repeats=1`: for generating train/test sets multiple times in
resampling; see [`evaluate!`](@ref) for details
- `operation=predict`: operation to be applied to each fitted model;
usually `predict` but `predict_mean`, `predict_median` or
`predict_mode` can be used for `Probabilistic` models, if
the specified measures are `Deterministic`
- `operation`/`operations` - One of
$(MLJBase.PREDICT_OPERATIONS_STRING), or a vector of these of the
same length as `measure`/`measures`. Automatically inferred if left
unspecified.
- `range`: range object; tuning strategy documentation describes
supported types
Expand Down Expand Up @@ -227,7 +227,8 @@ function TunedModel(; model=nothing,
measures=nothing,
measure=measures,
weights=nothing,
operation=predict,
operations=nothing,
operation=operations,
ranges=nothing,
range=ranges,
selection_heuristic=NaiveSelection(),
Expand Down
16 changes: 16 additions & 0 deletions test/tuned_models.jl
Original file line number Diff line number Diff line change
Expand Up @@ -294,4 +294,20 @@ end
@test all(==(per_folds[1]), per_folds)
end

@testset "deterministic metrics for probabilistic models" begin

# https://github.com/JuliaAI/MLJBase.jl/pull/599 allows mix of
# deterministic and probabilistic metrics:
X, y = MLJBase.make_blobs()
model = DecisionTreeClassifier()
range = MLJBase.range(model, :max_depth, values=[1,2])
tmodel = TunedModel(model=model,
range=range,
measures=[MisclassificationRate(),
LogLoss()])
mach = machine(tmodel, X, y)
@test_logs fit!(mach, verbosity=0)

end

true

0 comments on commit a47019b

Please sign in to comment.