|
1 | 1 | """
|
2 | 2 | This example illustrates the composition of quantification methods from
|
3 |
| -arbitrary loss functions and feature transformations. It will extend the basic |
| 3 | +arbitrary loss functions and feature representations. It will extend the basic |
4 | 4 | example on the usage of quapy with this composition.
|
5 | 5 |
|
6 | 6 | This example requires the installation of qunfold, the back-end of QuaPy's
|
7 | 7 | composition module:
|
8 | 8 |
|
9 | 9 | pip install --upgrade pip setuptools wheel
|
10 | 10 | pip install "jax[cpu]"
|
11 |
| - pip install "qunfold @ git+https://github.com/mirkobunse/qunfold@v0.1.4" |
| 11 | + pip install "qunfold @ git+https://github.com/mirkobunse/qunfold@v0.1.5" |
12 | 12 | """
|
13 | 13 |
|
14 | 14 | import numpy as np
|
|
24 | 24 | training, testing = data.train_test
|
25 | 25 |
|
26 | 26 | # We start by recovering PACC from its building blocks, a LeastSquaresLoss and
|
27 |
| -# a probabilistic ClassTransformer. A 5-fold cross-validation is implemented |
| 27 | +# a probabilistic ClassRepresentation. A 5-fold cross-validation is implemented |
28 | 28 | # through a CVClassifier.
|
29 | 29 |
|
30 | 30 | from quapy.method.composable import (
|
31 | 31 | ComposableQuantifier,
|
32 | 32 | LeastSquaresLoss,
|
33 |
| - ClassTransformer, |
| 33 | + ClassRepresentation, |
34 | 34 | CVClassifier,
|
35 | 35 | )
|
36 | 36 | from sklearn.linear_model import LogisticRegression
|
37 | 37 |
|
38 | 38 | pacc = ComposableQuantifier(
|
39 | 39 | LeastSquaresLoss(),
|
40 |
| - ClassTransformer( |
| 40 | + ClassRepresentation( |
41 | 41 | CVClassifier(LogisticRegression(random_state=0), 5),
|
42 | 42 | is_probabilistic = True
|
43 | 43 | ),
|
|
63 | 63 |
|
64 | 64 | model = ComposableQuantifier(
|
65 | 65 | HellingerSurrogateLoss(), # the loss is different from before
|
66 |
| - ClassTransformer( # we use the same transformer |
| 66 | + ClassRepresentation( # we use the same representation |
67 | 67 | CVClassifier(LogisticRegression(random_state=0), 5),
|
68 | 68 | is_probabilistic = True
|
69 | 69 | ),
|
|
79 | 79 | print(f"MAE = {np.mean(absolute_errors):.4f}+-{np.std(absolute_errors):.4f}")
|
80 | 80 |
|
81 | 81 | # In general, any composed method solves a linear system of equations by
|
82 |
| -# minimizing the loss after transforming the data. Methods of this kind include |
| 82 | +# minimizing the loss after representing the data. Methods of this kind include |
83 | 83 | # ACC, PACC, HDx, HDy, and many other well-known methods, as well as an
|
84 | 84 | # unlimited number of re-combinations of their building blocks.
|
85 | 85 |
|
|
93 | 93 |
|
94 | 94 | model = ComposableQuantifier(
|
95 | 95 | CombinedLoss(HellingerSurrogateLoss(), LeastSquaresLoss()),
|
96 |
| - ClassTransformer( |
| 96 | + ClassRepresentation( |
97 | 97 | CVClassifier(LogisticRegression(random_state=0), 5),
|
98 | 98 | is_probabilistic = True
|
99 | 99 | ),
|
100 | 100 | )
|
101 | 101 |
|
102 |
| -from qunfold.quapy import QuaPyWrapper |
103 |
| -from qunfold import GenericMethod |
| 102 | +from quapy.method.composable import QUnfoldWrapper |
| 103 | +from qunfold import LinearMethod |
104 | 104 |
|
105 |
| -model = QuaPyWrapper(GenericMethod( |
| 105 | +model = QUnfoldWrapper(LinearMethod( |
106 | 106 | CombinedLoss(HellingerSurrogateLoss(), LeastSquaresLoss()),
|
107 |
| - ClassTransformer( |
| 107 | + ClassRepresentation( |
108 | 108 | CVClassifier(LogisticRegression(random_state=0), 5),
|
109 | 109 | is_probabilistic = True
|
110 | 110 | ),
|
|
115 | 115 |
|
116 | 116 | param_grid = {
|
117 | 117 | "loss__weights": [ (w, 1-w) for w in [.1, .5, .9] ],
|
118 |
| - "transformer__classifier__estimator__C": [1e-1, 1e1], |
| 118 | + "representation__classifier__estimator__C": [1e-1, 1e1], |
119 | 119 | }
|
120 | 120 |
|
121 | 121 | grid_search = qp.model_selection.GridSearchQ(
|
|
0 commit comments