Skip to content
This repository was archived by the owner on Apr 26, 2021. It is now read-only.

Commit 6378db6

Browse files
authored
Merge branch 'master' into compathelper/new_version/2020-03-03-00-12-18-450-1857031776
2 parents 911a030 + a47fa44 commit 6378db6

File tree

7 files changed

+39
-31
lines changed

7 files changed

+39
-31
lines changed

NEWS.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
# 0.1.1
2+
3+
* Updated dependencies

Project.toml

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
name = "JLBoostMLJ"
22
uuid = "8b86df2c-1bc3-481d-95df-1c4d5a20ed95"
33
authors = ["Dai ZJ <zhuojia.dai@gmail.com>"]
4-
version = "0.1.0"
4+
version = "0.1.1"
55

66
[deps]
77
DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
@@ -12,15 +12,16 @@ MLJBase = "a7f614a8-145f-11e9-1d2a-a57a1082229d"
1212
ScientificTypes = "321657f4-b219-11e9-178b-2701a2544e81"
1313

1414
[compat]
15-
DataFrames = "0.19, 0.20"
16-
JLBoost = "^0.1.4"
15+
DataFrames = "0.20"
16+
JLBoost = "^0.1.7"
1717
LossFunctions = "0.5"
18-
MLJ = "0.6"
19-
MLJBase = "0.9, 0.11"
20-
ScientificTypes = "0.3"
18+
MLJ = "0.9"
19+
MLJBase = "0.11"
20+
ScientificTypes = "0.7"
2121
julia = "1"
2222

2323
[extras]
24+
CategoricalArrays = "324d7699-5711-5eae-9e2f-1d82baa6b597"
2425
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
2526

2627
[targets]

README.jl

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
21
using RDatasets;
32
iris = dataset("datasets", "iris");
43
iris[!, :is_setosa] .= iris.Species .== "setosa";
@@ -52,4 +51,3 @@ predict(model, mljmodel.fitresult, X)
5251

5352

5453
feature_importance(mljmodel.fitresult.treemodel, X, y)
55-

README.jmd

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,6 @@ feature_importance(fitted_params(mljmachine).fitresult, X, y)
4848
Data preparation: need to convert `y` to categorical
4949

5050
```julia
51-
using CategoricalArrays
5251
y_cate = categorical(y)
5352
```
5453

README.md

Lines changed: 21 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -19,15 +19,16 @@ model = JLBoostClassifier()
1919

2020

2121
````
22-
JLBoostClassifier(loss = JLBoost.LogitLogLoss(),
23-
nrounds = 1,
24-
subsample = 1.0,
25-
eta = 1.0,
26-
max_depth = 6,
27-
min_child_weight = 1.0,
28-
lambda = 0.0,
29-
gamma = 0.0,
30-
colsample_bytree = 1,) @ 6…52
22+
JLBoostClassifier(
23+
loss = JLBoost.LogitLogLoss(),
24+
nrounds = 1,
25+
subsample = 1.0,
26+
eta = 1.0,
27+
max_depth = 6,
28+
min_child_weight = 1.0,
29+
lambda = 0.0,
30+
gamma = 0.0,
31+
colsample_bytree = 1) @ 1…51
3132
````
3233

3334

@@ -44,7 +45,7 @@ mljmachine = machine(model, X, y)
4445

4546

4647
````
47-
Machine{JLBoostClassifier} @ 1…93
48+
Machine{JLBoostClassifier} @ 3…89
4849
````
4950

5051

@@ -73,7 +74,7 @@ Choosing a split on SepalLength
7374
Choosing a split on SepalWidth
7475
Choosing a split on PetalLength
7576
Choosing a split on PetalWidth
76-
Machine{JLBoostClassifier} @ 1…93
77+
Machine{JLBoostClassifier} @ 3…89
7778
````
7879

7980

@@ -139,7 +140,6 @@ feature_importance(fitted_params(mljmachine).fitresult, X, y)
139140
Data preparation: need to convert `y` to categorical
140141

141142
````julia
142-
using CategoricalArrays
143143
y_cate = categorical(y)
144144
````
145145

@@ -184,10 +184,13 @@ r3 = range(jlb, :eta, lower=0.1, upper=1.0)
184184

185185

186186
````
187-
MLJ.NumericRange(field = :eta,
188-
lower = 0.1,
189-
upper = 1.0,
190-
scale = :linear,) @ 1…03
187+
NumericRange(
188+
field = :eta,
189+
lower = 0.1,
190+
upper = 1.0,
191+
origin = 0.55,
192+
unit = 0.45,
193+
scale = :linear) @ 1…28
191194
````
192195

193196

@@ -202,7 +205,7 @@ m = machine(tm, X, y_cate)
202205

203206

204207
````
205-
Machine{ProbabilisticTunedModel} @ 5…78
208+
Machine{ProbabilisticTunedModel} @ 1…91
206209
````
207210

208211

@@ -216,7 +219,7 @@ fit!(m)
216219

217220

218221
````
219-
Machine{ProbabilisticTunedModel} @ 5…78
222+
Machine{ProbabilisticTunedModel} @ 1…91
220223
````
221224

222225

build-readme.jl

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,14 @@
11
# Weave readme
22
using Pkg
33
Pkg.activate("c:/git/JLBoostMLJ")
4+
5+
Pkg.add("Weave")
46
using Weave
57

68
weave("c:/git/JLBoostMLJ/README.jmd", out_path="c:/git/JLBoostMLJ", doctype="github")
79

10+
Pkg.rm("Weave")
11+
812

913
if false
1014
tangle("c:/git/JLBoostMLJ/README.jmd")

src/mlj.jl

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ import MLJBase: input_scitype, target_scitype, docstring, UnivariateFinite
88
using ScientificTypes: Continuous, OrderedFactor, Count, Multiclass, Finite
99

1010
using LossFunctions: PoissonLoss, L2DistLoss
11-
using JLBoost: LogitLogLoss, jlboost, AUC, gini, feature_importance
11+
using JLBoost: LogitLogLoss, jlboost, AUC, gini, feature_importance, predict
1212

1313
using DataFrames: DataFrame, nrow, levels, categorical
1414

@@ -173,7 +173,7 @@ fit(model::JLBoostClassifier, verbosity::Int, X, y::AbstractVector) = begin
173173
fitresult = fitresult,
174174
cache = nothing,
175175
report = (
176-
AUC = abs(AUC(predict(fitresult.treemodel, X), y)),
176+
AUC = abs(AUC(JLBoost.predict(fitresult.treemodel, X), y)),
177177
feature_importance = feature_importance(fitresult.treemodel, df)
178178
)
179179
)
@@ -195,15 +195,15 @@ fitted_params(model::JLBoostMLJModel, fitresult) = (fitresult = fitresult.treemo
195195

196196
# seehttps://alan-turing-institute.github.io/MLJ.jl/stable/adding_models_for_general_use/#The-predict-method-1
197197
predict(model::JLBoostClassifier, fitresult, Xnew) = begin
198-
res = predict(fitresult.treemodel, Xnew)
198+
res = JLBoost.predict(fitresult.treemodel, Xnew)
199199
p = 1 ./ (1 .+ exp.(-res))
200200
levels_cate = categorical(fitresult.target_levels)
201201
[UnivariateFinite(levels_cate, [p, 1-p]) for p in p]
202202
end
203203

204204

205205
predict(model::JLBoostMLJModel, fitresult, Xnew) = begin
206-
predict(fitresult, Xnew)
206+
JLBoost.predict(fitresult, Xnew)
207207
end
208208

209209
# see https://alan-turing-institute.github.io/MLJ.jl/stable/adding_models_for_general_use/#Trait-declarations-1

0 commit comments

Comments
 (0)