LLH commited on
Commit
848fa8d
·
1 Parent(s): 086c1e6

2024/03/07/22:40

Browse files
analysis/model_train/bayes_model.py CHANGED
@@ -41,24 +41,24 @@ class NaiveBayesClassifierParams:
41
 
42
 
43
  # 朴素贝叶斯分类
44
- def naive_bayes_classifier(container, params_list, model=None):
45
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
46
  info = {}
47
 
48
- params_list = transform_params_list(NaiveBayesClassifierParams, params_list, model)
49
 
50
  if model == "MultinomialNB":
51
  naive_bayes_model = MultinomialNB()
52
- params = params_list
53
  elif model == "GaussianNB":
54
  naive_bayes_model = GaussianNB()
55
- params = params_list
56
  elif model == "ComplementNB":
57
  naive_bayes_model = ComplementNB()
58
- params = params_list
59
  else:
60
  naive_bayes_model = GaussianNB()
61
- params = params_list
62
 
63
  if hyper_params_optimize == "grid_search":
64
  best_model = grid_search(params, naive_bayes_model, x_train, y_train)
 
41
 
42
 
43
  # 朴素贝叶斯分类
44
+ def naive_bayes_classifier(container, params, model=None):
45
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
46
  info = {}
47
 
48
+ params = transform_params_list(NaiveBayesClassifierParams, params, model)
49
 
50
  if model == "MultinomialNB":
51
  naive_bayes_model = MultinomialNB()
52
+ params = params
53
  elif model == "GaussianNB":
54
  naive_bayes_model = GaussianNB()
55
+ params = params
56
  elif model == "ComplementNB":
57
  naive_bayes_model = ComplementNB()
58
+ params = params
59
  else:
60
  naive_bayes_model = GaussianNB()
61
+ params = params
62
 
63
  if hyper_params_optimize == "grid_search":
64
  best_model = grid_search(params, naive_bayes_model, x_train, y_train)
analysis/model_train/distance_model.py CHANGED
@@ -28,14 +28,13 @@ class KNNClassifierParams:
28
 
29
 
30
  # KNN分类
31
- def knn_classifier(container, params_list):
32
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
33
  info = {}
34
 
35
- params_list = transform_params_list(KNNClassifierParams, params_list)
36
 
37
  knn_classifier_model = KNeighborsClassifier()
38
- params = params_list
39
 
40
  if hyper_params_optimize == "grid_search":
41
  best_model = grid_search(params, knn_classifier_model, x_train, y_train)
@@ -87,14 +86,13 @@ class KNNRegressionParams:
87
 
88
 
89
  # KNN回归
90
- def knn_regressor(container, params_list):
91
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
92
  info = {}
93
 
94
- params_list = transform_params_list(KNNRegressionParams, params_list)
95
 
96
  knn_regression_model = KNeighborsRegressor()
97
- params = params_list
98
 
99
  if hyper_params_optimize == "grid_search":
100
  best_model = grid_search(params, knn_regression_model, x_train, y_train)
 
28
 
29
 
30
  # KNN分类
31
+ def knn_classifier(container, params):
32
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
33
  info = {}
34
 
35
+ params = transform_params_list(KNNClassifierParams, params)
36
 
37
  knn_classifier_model = KNeighborsClassifier()
 
38
 
39
  if hyper_params_optimize == "grid_search":
40
  best_model = grid_search(params, knn_classifier_model, x_train, y_train)
 
86
 
87
 
88
  # KNN回归
89
+ def knn_regressor(container, params):
90
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
91
  info = {}
92
 
93
+ params = transform_params_list(KNNRegressionParams, params)
94
 
95
  knn_regression_model = KNeighborsRegressor()
 
96
 
97
  if hyper_params_optimize == "grid_search":
98
  best_model = grid_search(params, knn_regression_model, x_train, y_train)
analysis/model_train/gradient_model.py CHANGED
@@ -17,7 +17,6 @@ class GradientBoostingParams:
17
  'max_depth': StaticValue.INT,
18
  'min_samples_split': StaticValue.INT,
19
  'min_samples_leaf': StaticValue.INT,
20
- 'random_state': StaticValue.INT
21
  }
22
 
23
  @classmethod
@@ -28,19 +27,18 @@ class GradientBoostingParams:
28
  'max_depth': [3, 5, 7],
29
  'min_samples_split': [2, 5, 10],
30
  'min_samples_leaf': [1, 2, 4],
31
- 'random_state': [StaticValue.RANDOM_STATE]
32
  }
33
 
34
 
35
  # 梯度提升回归
36
- def gradient_boosting_regressor(container, params_list):
37
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
38
  info = {}
39
 
40
- params_list = transform_params_list(GradientBoostingParams, params_list)
 
41
 
42
  gradient_boosting_regression_model = GradientBoostingRegressor(random_state=StaticValue.RANDOM_STATE)
43
- params = params_list
44
 
45
  if hyper_params_optimize == "grid_search":
46
  best_model = grid_search(params, gradient_boosting_regression_model, x_train, y_train)
 
17
  'max_depth': StaticValue.INT,
18
  'min_samples_split': StaticValue.INT,
19
  'min_samples_leaf': StaticValue.INT,
 
20
  }
21
 
22
  @classmethod
 
27
  'max_depth': [3, 5, 7],
28
  'min_samples_split': [2, 5, 10],
29
  'min_samples_leaf': [1, 2, 4],
 
30
  }
31
 
32
 
33
  # 梯度提升回归
34
+ def gradient_boosting_regressor(container, params):
35
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
36
  info = {}
37
 
38
+ params = transform_params_list(GradientBoostingParams, params)
39
+ params['random_state'] = [StaticValue.RANDOM_STATE]
40
 
41
  gradient_boosting_regression_model = GradientBoostingRegressor(random_state=StaticValue.RANDOM_STATE)
 
42
 
43
  if hyper_params_optimize == "grid_search":
44
  best_model = grid_search(params, gradient_boosting_regression_model, x_train, y_train)
analysis/model_train/kernel_model.py CHANGED
@@ -31,14 +31,13 @@ class SVMRegressionParams:
31
 
32
 
33
  # 支持向量机回归
34
- def svm_regressor(container, params_list):
35
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
36
  info = {}
37
 
38
- params_list = transform_params_list(SVMRegressionParams, params_list)
39
 
40
  svm_regression_model = SVR(kernel='rbf', C=100, gamma=0.1, epsilon=0.1)
41
- params = params_list
42
 
43
  if hyper_params_optimize == "grid_search":
44
  best_model = grid_search(params, svm_regression_model, x_train, y_train)
@@ -79,7 +78,6 @@ class SVMClassifierParams:
79
  "C": StaticValue.FLOAT,
80
  "kernel": StaticValue.STR,
81
  "gamma": StaticValue.FLOAT,
82
- 'random_state': StaticValue.INT
83
  }
84
 
85
  @classmethod
@@ -88,19 +86,18 @@ class SVMClassifierParams:
88
  "C": [0.1, 1, 10, 100],
89
  "kernel": ['linear', 'rbf', 'poly'],
90
  "gamma": [0.1, 1, 10],
91
- 'random_state': [StaticValue.RANDOM_STATE]
92
  }
93
 
94
 
95
  # 支持向量机分类
96
- def svm_classifier(container, params_list):
97
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
98
  info = {}
99
 
100
- params_list = transform_params_list(SVMClassifierParams, params_list)
 
101
 
102
  svm_classifier_model = SVC(kernel="rbf", random_state=StaticValue.RANDOM_STATE)
103
- params = params_list
104
 
105
  if hyper_params_optimize == "grid_search":
106
  best_model = grid_search(params, svm_classifier_model, x_train, y_train)
 
31
 
32
 
33
  # 支持向量机回归
34
+ def svm_regressor(container, params):
35
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
36
  info = {}
37
 
38
+ params = transform_params_list(SVMRegressionParams, params)
39
 
40
  svm_regression_model = SVR(kernel='rbf', C=100, gamma=0.1, epsilon=0.1)
 
41
 
42
  if hyper_params_optimize == "grid_search":
43
  best_model = grid_search(params, svm_regression_model, x_train, y_train)
 
78
  "C": StaticValue.FLOAT,
79
  "kernel": StaticValue.STR,
80
  "gamma": StaticValue.FLOAT,
 
81
  }
82
 
83
  @classmethod
 
86
  "C": [0.1, 1, 10, 100],
87
  "kernel": ['linear', 'rbf', 'poly'],
88
  "gamma": [0.1, 1, 10],
 
89
  }
90
 
91
 
92
  # 支持向量机分类
93
+ def svm_classifier(container, params):
94
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
95
  info = {}
96
 
97
+ params = transform_params_list(SVMClassifierParams, params)
98
+ params['random_state'] = [StaticValue.RANDOM_STATE]
99
 
100
  svm_classifier_model = SVC(kernel="rbf", random_state=StaticValue.RANDOM_STATE)
 
101
 
102
  if hyper_params_optimize == "grid_search":
103
  best_model = grid_search(params, svm_classifier_model, x_train, y_train)
analysis/model_train/linear_model.py CHANGED
@@ -23,7 +23,6 @@ class LinearRegressionParams:
23
  return {
24
  "fit_intercept": StaticValue.BOOL,
25
  "alpha": StaticValue.FLOAT,
26
- "random_state": StaticValue.INT
27
  }
28
  else:
29
  return {
@@ -36,7 +35,6 @@ class LinearRegressionParams:
36
  return {
37
  "fit_intercept": [True, False],
38
  "alpha": [0.001, 0.01, 0.1, 1.0, 10.0],
39
- "random_state": [StaticValue.RANDOM_STATE]
40
  }
41
  else:
42
  return {
@@ -45,27 +43,28 @@ class LinearRegressionParams:
45
 
46
 
47
  # 线性回归
48
- def linear_regressor(container, params_list, model=None):
49
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
50
  info = {}
51
 
52
- input_params = transform_params_list(LinearRegressionParams, params_list, model)
 
53
 
54
  if model == "Lasso":
55
  linear_regression_model = Lasso(alpha=0.1, random_state=StaticValue.RANDOM_STATE)
56
- params = input_params
57
  elif model == "Ridge":
58
  linear_regression_model = Ridge(alpha=0.1, random_state=StaticValue.RANDOM_STATE)
59
- params = input_params
60
  elif model == "ElasticNet":
61
  linear_regression_model = ElasticNet(alpha=0.1, random_state=StaticValue.RANDOM_STATE)
62
- params = input_params
63
  elif model == "LinearRegression":
64
  linear_regression_model = LinearRegression()
65
- params = input_params
66
  else:
67
  linear_regression_model = LinearRegression()
68
- params = input_params
69
 
70
  try:
71
  if hyper_params_optimize == "grid_search":
@@ -126,18 +125,17 @@ class PolynomialRegressionParams:
126
 
127
 
128
  # 多项式回归
129
- def polynomial_regressor(container, params_list):
130
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
131
  info = {}
132
 
133
- params_list = transform_params_list(PolynomialRegressionParams, params_list)
134
 
135
  polynomial_features = PolynomialFeatures(degree=2)
136
  linear_regression_model = LinearRegression()
137
 
138
  polynomial_regression_model = Pipeline([("polynomial_features", polynomial_features),
139
  ("linear_regression_model", linear_regression_model)])
140
- params = params_list
141
 
142
  if hyper_params_optimize == "grid_search":
143
  best_model = grid_search(params, polynomial_regression_model, x_train, y_train)
@@ -186,7 +184,6 @@ class LogisticRegressionParams:
186
  "C": StaticValue.FLOAT,
187
  "max_iter": StaticValue.INT,
188
  "solver": StaticValue.STR,
189
- "random_state": StaticValue.INT
190
  }
191
 
192
  @classmethod
@@ -195,19 +192,18 @@ class LogisticRegressionParams:
195
  "C": [0.001, 0.01, 0.1, 1.0, 10.0],
196
  "max_iter": [100, 200, 300],
197
  "solver": ["liblinear", "lbfgs", "newton-cg", "sag", "saga"],
198
- "random_state": [StaticValue.RANDOM_STATE]
199
  }
200
 
201
 
202
  # 逻辑斯谛分类
203
- def logistic_classifier(container, params_list):
204
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
205
  info = {}
206
 
207
- params_list = transform_params_list(LogisticRegressionParams, params_list)
 
208
 
209
  logistic_regression_model = LogisticRegression(random_state=StaticValue.RANDOM_STATE)
210
- params = params_list
211
 
212
  if hyper_params_optimize == "grid_search":
213
  best_model = grid_search(params, logistic_regression_model, x_train, y_train)
 
23
  return {
24
  "fit_intercept": StaticValue.BOOL,
25
  "alpha": StaticValue.FLOAT,
 
26
  }
27
  else:
28
  return {
 
35
  return {
36
  "fit_intercept": [True, False],
37
  "alpha": [0.001, 0.01, 0.1, 1.0, 10.0],
 
38
  }
39
  else:
40
  return {
 
43
 
44
 
45
  # 线性回归
46
+ def linear_regressor(container, params, model=None):
47
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
48
  info = {}
49
 
50
+ params = transform_params_list(LinearRegressionParams, params, model)
51
+ params['random_state'] = [StaticValue.RANDOM_STATE]
52
 
53
  if model == "Lasso":
54
  linear_regression_model = Lasso(alpha=0.1, random_state=StaticValue.RANDOM_STATE)
55
+ params = params
56
  elif model == "Ridge":
57
  linear_regression_model = Ridge(alpha=0.1, random_state=StaticValue.RANDOM_STATE)
58
+ params = params
59
  elif model == "ElasticNet":
60
  linear_regression_model = ElasticNet(alpha=0.1, random_state=StaticValue.RANDOM_STATE)
61
+ params = params
62
  elif model == "LinearRegression":
63
  linear_regression_model = LinearRegression()
64
+ params = params
65
  else:
66
  linear_regression_model = LinearRegression()
67
+ params = params
68
 
69
  try:
70
  if hyper_params_optimize == "grid_search":
 
125
 
126
 
127
  # 多项式回归
128
+ def polynomial_regressor(container, params):
129
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
130
  info = {}
131
 
132
+ params = transform_params_list(PolynomialRegressionParams, params)
133
 
134
  polynomial_features = PolynomialFeatures(degree=2)
135
  linear_regression_model = LinearRegression()
136
 
137
  polynomial_regression_model = Pipeline([("polynomial_features", polynomial_features),
138
  ("linear_regression_model", linear_regression_model)])
 
139
 
140
  if hyper_params_optimize == "grid_search":
141
  best_model = grid_search(params, polynomial_regression_model, x_train, y_train)
 
184
  "C": StaticValue.FLOAT,
185
  "max_iter": StaticValue.INT,
186
  "solver": StaticValue.STR,
 
187
  }
188
 
189
  @classmethod
 
192
  "C": [0.001, 0.01, 0.1, 1.0, 10.0],
193
  "max_iter": [100, 200, 300],
194
  "solver": ["liblinear", "lbfgs", "newton-cg", "sag", "saga"],
 
195
  }
196
 
197
 
198
  # 逻辑斯谛分类
199
+ def logistic_classifier(container, params):
200
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
201
  info = {}
202
 
203
+ params = transform_params_list(LogisticRegressionParams, params)
204
+ params['random_state'] = [StaticValue.RANDOM_STATE]
205
 
206
  logistic_regression_model = LogisticRegression(random_state=StaticValue.RANDOM_STATE)
 
207
 
208
  if hyper_params_optimize == "grid_search":
209
  best_model = grid_search(params, logistic_regression_model, x_train, y_train)
analysis/model_train/tree_model.py CHANGED
@@ -21,7 +21,6 @@ class RandomForestRegressionParams:
21
  'max_depth': StaticValue.INT,
22
  'min_samples_split': StaticValue.INT,
23
  'min_samples_leaf': StaticValue.INT,
24
- 'random_state': StaticValue.INT
25
  }
26
 
27
  @classmethod
@@ -31,19 +30,18 @@ class RandomForestRegressionParams:
31
  'max_depth': [0, 10, 20, 30],
32
  'min_samples_split': [2, 5, 10],
33
  'min_samples_leaf': [1, 2, 4],
34
- 'random_state': [StaticValue.RANDOM_STATE]
35
  }
36
 
37
 
38
  # 随机森林回归
39
- def random_forest_regressor(container, params_list):
40
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
41
  info = {}
42
 
43
- params_list = transform_params_list(RandomForestRegressionParams, params_list)
 
44
 
45
  random_forest_regression_model = RandomForestRegressor(n_estimators=5, random_state=StaticValue.RANDOM_STATE)
46
- params = params_list
47
 
48
  if hyper_params_optimize == "grid_search":
49
  best_model = grid_search(params, random_forest_regression_model, x_train, y_train)
@@ -86,7 +84,6 @@ class DecisionTreeClassifierParams:
86
  "max_depth": StaticValue.INT,
87
  "min_samples_split": StaticValue.INT,
88
  "min_samples_leaf": StaticValue.INT,
89
- 'random_state': StaticValue.INT
90
  }
91
 
92
  @classmethod
@@ -97,19 +94,18 @@ class DecisionTreeClassifierParams:
97
  "max_depth": [0, 5, 10, 15],
98
  "min_samples_split": [2, 5, 10],
99
  "min_samples_leaf": [1, 2, 4],
100
- 'random_state': [StaticValue.RANDOM_STATE]
101
  }
102
 
103
 
104
  # 决策树分类
105
- def decision_tree_classifier(container, params_list):
106
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
107
  info = {}
108
 
109
- params_list = transform_params_list(DecisionTreeClassifierParams, params_list)
 
110
 
111
  random_forest_regression_model = DecisionTreeClassifier(random_state=StaticValue.RANDOM_STATE)
112
- params = params_list
113
 
114
  if hyper_params_optimize == "grid_search":
115
  best_model = grid_search(params, random_forest_regression_model, x_train, y_train)
@@ -151,7 +147,6 @@ class RandomForestClassifierParams:
151
  "max_depth": StaticValue.INT,
152
  "min_samples_split": StaticValue.INT,
153
  "min_samples_leaf": StaticValue.INT,
154
- "random_state": StaticValue.INT
155
  }
156
 
157
  @classmethod
@@ -162,19 +157,18 @@ class RandomForestClassifierParams:
162
  "max_depth": [0, 5, 10, 15],
163
  "min_samples_split": [2, 5, 10],
164
  "min_samples_leaf": [1, 2, 4],
165
- "random_state": [StaticValue.RANDOM_STATE]
166
  }
167
 
168
 
169
  # 随机森林分类
170
- def random_forest_classifier(container, params_list):
171
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
172
  info = {}
173
 
174
- params_list = transform_params_list(RandomForestClassifierParams, params_list)
 
175
 
176
  random_forest_classifier_model = RandomForestClassifier(n_estimators=5, random_state=StaticValue.RANDOM_STATE)
177
- params = params_list
178
 
179
  if hyper_params_optimize == "grid_search":
180
  best_model = grid_search(params, random_forest_classifier_model, x_train, y_train)
@@ -219,7 +213,6 @@ class XgboostClassifierParams:
219
  "gamma": StaticValue.FLOAT,
220
  "subsample": StaticValue.FLOAT,
221
  "colsample_bytree": StaticValue.FLOAT,
222
- "random_state": StaticValue.INT
223
  }
224
 
225
  @classmethod
@@ -232,19 +225,18 @@ class XgboostClassifierParams:
232
  "gamma": [0, 0.1, 0.2],
233
  "subsample": [0.5, 0.8, 0.9, 1.0],
234
  "colsample_bytree": [0.8, 0.9, 1.0],
235
- "random_state": [StaticValue.RANDOM_STATE]
236
  }
237
 
238
 
239
  # xgboost分类
240
- def xgboost_classifier(container, params_list):
241
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
242
  info = {}
243
 
244
- params_list = transform_params_list(XgboostClassifierParams, params_list)
 
245
 
246
  xgboost_classifier_model = XGBClassifier(random_state=StaticValue.RANDOM_STATE)
247
- params = params_list
248
 
249
  if hyper_params_optimize == "grid_search":
250
  best_model = grid_search(params, xgboost_classifier_model, x_train, y_train)
@@ -285,14 +277,14 @@ class LightGBMClassifierParams:
285
 
286
 
287
  # lightGBM分��
288
- def lightGBM_classifier(container, params_list):
289
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
290
  info = {}
291
 
292
- params_list = transform_params_list(LightGBMClassifierParams, params_list)
 
293
 
294
  lightgbm_classifier_model = lightGBMClassifier
295
- params = params_list
296
 
297
  if hyper_params_optimize == "grid_search":
298
  best_model = grid_search(params, lightgbm_classifier_model, x_train, y_train)
 
21
  'max_depth': StaticValue.INT,
22
  'min_samples_split': StaticValue.INT,
23
  'min_samples_leaf': StaticValue.INT,
 
24
  }
25
 
26
  @classmethod
 
30
  'max_depth': [0, 10, 20, 30],
31
  'min_samples_split': [2, 5, 10],
32
  'min_samples_leaf': [1, 2, 4],
 
33
  }
34
 
35
 
36
  # 随机森林回归
37
+ def random_forest_regressor(container, params):
38
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
39
  info = {}
40
 
41
+ params = transform_params_list(RandomForestRegressionParams, params)
42
+ params['random_state'] = [StaticValue.RANDOM_STATE]
43
 
44
  random_forest_regression_model = RandomForestRegressor(n_estimators=5, random_state=StaticValue.RANDOM_STATE)
 
45
 
46
  if hyper_params_optimize == "grid_search":
47
  best_model = grid_search(params, random_forest_regression_model, x_train, y_train)
 
84
  "max_depth": StaticValue.INT,
85
  "min_samples_split": StaticValue.INT,
86
  "min_samples_leaf": StaticValue.INT,
 
87
  }
88
 
89
  @classmethod
 
94
  "max_depth": [0, 5, 10, 15],
95
  "min_samples_split": [2, 5, 10],
96
  "min_samples_leaf": [1, 2, 4],
 
97
  }
98
 
99
 
100
  # 决策树分类
101
+ def decision_tree_classifier(container, params):
102
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
103
  info = {}
104
 
105
+ params = transform_params_list(DecisionTreeClassifierParams, params)
106
+ params['random_state'] = [StaticValue.RANDOM_STATE]
107
 
108
  random_forest_regression_model = DecisionTreeClassifier(random_state=StaticValue.RANDOM_STATE)
 
109
 
110
  if hyper_params_optimize == "grid_search":
111
  best_model = grid_search(params, random_forest_regression_model, x_train, y_train)
 
147
  "max_depth": StaticValue.INT,
148
  "min_samples_split": StaticValue.INT,
149
  "min_samples_leaf": StaticValue.INT,
 
150
  }
151
 
152
  @classmethod
 
157
  "max_depth": [0, 5, 10, 15],
158
  "min_samples_split": [2, 5, 10],
159
  "min_samples_leaf": [1, 2, 4],
 
160
  }
161
 
162
 
163
  # 随机森林分类
164
+ def random_forest_classifier(container, params):
165
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
166
  info = {}
167
 
168
+ params = transform_params_list(RandomForestClassifierParams, params)
169
+ params['random_state'] = [StaticValue.RANDOM_STATE]
170
 
171
  random_forest_classifier_model = RandomForestClassifier(n_estimators=5, random_state=StaticValue.RANDOM_STATE)
 
172
 
173
  if hyper_params_optimize == "grid_search":
174
  best_model = grid_search(params, random_forest_classifier_model, x_train, y_train)
 
213
  "gamma": StaticValue.FLOAT,
214
  "subsample": StaticValue.FLOAT,
215
  "colsample_bytree": StaticValue.FLOAT,
 
216
  }
217
 
218
  @classmethod
 
225
  "gamma": [0, 0.1, 0.2],
226
  "subsample": [0.5, 0.8, 0.9, 1.0],
227
  "colsample_bytree": [0.8, 0.9, 1.0],
 
228
  }
229
 
230
 
231
  # xgboost分类
232
+ def xgboost_classifier(container, params):
233
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
234
  info = {}
235
 
236
+ params = transform_params_list(XgboostClassifierParams, params)
237
+ params['random_state'] = [StaticValue.RANDOM_STATE]
238
 
239
  xgboost_classifier_model = XGBClassifier(random_state=StaticValue.RANDOM_STATE)
 
240
 
241
  if hyper_params_optimize == "grid_search":
242
  best_model = grid_search(params, xgboost_classifier_model, x_train, y_train)
 
277
 
278
 
279
  # lightGBM分��
280
+ def lightGBM_classifier(container, params):
281
  x_train, y_train, x_test, y_test, hyper_params_optimize = get_values_from_container_class(container)
282
  info = {}
283
 
284
+ params = transform_params_list(LightGBMClassifierParams, params)
285
+ params['random_state'] = [StaticValue.RANDOM_STATE]
286
 
287
  lightgbm_classifier_model = lightGBMClassifier
 
288
 
289
  if hyper_params_optimize == "grid_search":
290
  best_model = grid_search(params, lightgbm_classifier_model, x_train, y_train)