Coverage for src/sensai/sklearn/sklearn_classification.py: 80%

35 statements  

« prev     ^ index     » next       coverage.py v7.6.1, created at 2024-08-13 22:17 +0000

1import logging 

2from typing import Union, Optional 

3 

4import numpy as np 

5import sklearn.ensemble 

6import sklearn.naive_bayes 

7import sklearn.neural_network 

8import sklearn.tree 

9from sklearn.ensemble import RandomForestClassifier 

10from sklearn.tree import DecisionTreeClassifier 

11 

12from .sklearn_base import AbstractSkLearnVectorClassificationModel, FeatureImportanceProviderSkLearnClassification 

13 

14log = logging.getLogger(__name__) 

15 

16 

17class SkLearnDecisionTreeVectorClassificationModel(AbstractSkLearnVectorClassificationModel): 

18 def __init__(self, min_samples_leaf=1, random_state=42, **model_args): 

19 super().__init__(DecisionTreeClassifier, 

20 min_samples_leaf=min_samples_leaf, random_state=random_state, **model_args) 

21 

22 

23class SkLearnRandomForestVectorClassificationModel(AbstractSkLearnVectorClassificationModel, 

24 FeatureImportanceProviderSkLearnClassification): 

25 def __init__(self, n_estimators=100, min_samples_leaf=1, random_state=42, use_balanced_class_weights=False, **model_args): 

26 super().__init__(RandomForestClassifier, 

27 random_state=random_state, min_samples_leaf=min_samples_leaf, n_estimators=n_estimators, 

28 use_balanced_class_weights=use_balanced_class_weights, 

29 **model_args) 

30 

31 

32class SkLearnMLPVectorClassificationModel(AbstractSkLearnVectorClassificationModel): 

33 def __init__(self, hidden_layer_sizes=(100,), activation: str = "relu", 

34 solver: str = "adam", batch_size: Union[int, str] = "auto", random_state: Optional[int] = 42, 

35 max_iter: int = 200, early_stopping: bool = False, n_iter_no_change: int = 10, **model_args): 

36 """ 

37 :param hidden_layer_sizes: the sequence of hidden layer sizes 

38 :param activation: {"identity", "logistic", "tanh", "relu"} the activation function to use for hidden layers (the one used for the 

39 output layer is always 'identity') 

40 :param solver: {"adam", "lbfgs", "sgd"} the name of the solver to apply 

41 :param batch_size: the batch size or "auto" for min(200, data set size) 

42 :param random_state: the random seed for reproducability; use None if it shall not be specifically defined 

43 :param max_iter: the number of iterations (gradient steps for L-BFGS, epochs for other solvers) 

44 :param early_stopping: whether to use early stopping (stop training after n_iter_no_change epochs without improvement) 

45 :param n_iter_no_change: the number of iterations after which to stop early (if early_stopping is enabled) 

46 :param model_args: additional arguments to pass on to MLPClassifier, see 

47 https://scikit-learn.org/stable/modules/generated/sklearn.neural_network.MLPClassifier.html 

48 """ 

49 super().__init__(sklearn.neural_network.MLPClassifier, hidden_layer_sizes=hidden_layer_sizes, activation=activation, 

50 random_state=random_state, solver=solver, batch_size=batch_size, max_iter=max_iter, early_stopping=early_stopping, 

51 n_iter_no_change=n_iter_no_change, **model_args) 

52 

53 

54class SkLearnMultinomialNBVectorClassificationModel(AbstractSkLearnVectorClassificationModel): 

55 def __init__(self, **model_args): 

56 super().__init__(sklearn.naive_bayes.MultinomialNB, **model_args) 

57 

58 

59class SkLearnSVCVectorClassificationModel(AbstractSkLearnVectorClassificationModel): 

60 def __init__(self, random_state=42, **model_args): 

61 super().__init__(sklearn.svm.SVC, random_state=random_state, **model_args) 

62 

63 

64class SkLearnLogisticRegressionVectorClassificationModel(AbstractSkLearnVectorClassificationModel): 

65 def __init__(self, random_state=42, **model_args): 

66 super().__init__(sklearn.linear_model.LogisticRegression, random_state=random_state, **model_args) 

67 

68 

69class SkLearnKNeighborsVectorClassificationModel(AbstractSkLearnVectorClassificationModel): 

70 def __init__(self, **model_args): 

71 super().__init__(sklearn.neighbors.KNeighborsClassifier, **model_args) 

72 

73 def _predict_sklearn(self, input_values): 

74 # Apply a transformation to fix a bug in sklearn 1.3.0 (and perhaps earlier versions): 

75 # https://github.com/scikit-learn/scikit-learn/issues/26768 

76 inputs = np.ascontiguousarray(input_values) 

77 

78 return super()._predict_sklearn(inputs)