Spaces:
Sleeping
Sleeping
Add parameter selection
Browse files
app.py
CHANGED
|
@@ -1,42 +1,51 @@
|
|
| 1 |
-
import gradio as gr
|
| 2 |
-
|
| 3 |
-
def greet(name):
|
| 4 |
-
return "Hello " + name + "!!"
|
| 5 |
-
|
| 6 |
-
iface = gr.Interface(fn=greet, inputs="text", outputs="text")
|
| 7 |
-
iface.launch()
|
| 8 |
-
|
| 9 |
from sklearn.datasets import make_circles
|
| 10 |
from sklearn.model_selection import train_test_split
|
|
|
|
|
|
|
| 11 |
|
| 12 |
X, y = make_circles(n_samples=1_000, factor=0.3, noise=0.05, random_state=0)
|
| 13 |
X_train, X_test, y_train, y_test = train_test_split(X, y, stratify=y, random_state=0)
|
| 14 |
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
pca = PCA(n_components=
|
| 18 |
-
kernel_pca = KernelPCA(
|
| 19 |
-
|
| 20 |
-
)
|
| 21 |
-
|
| 22 |
-
X_test_pca = pca.fit(X_train).transform(X_test)
|
| 23 |
-
X_test_kernel_pca = kernel_pca.fit(X_train).transform(X_test)
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
)
|
| 28 |
-
|
| 29 |
-
orig_data_ax.scatter(X_test[:, 0], X_test[:, 1], c=y_test)
|
| 30 |
-
orig_data_ax.set_ylabel("Feature #1")
|
| 31 |
-
orig_data_ax.set_xlabel("Feature #0")
|
| 32 |
-
orig_data_ax.set_title("Testing data")
|
| 33 |
-
|
| 34 |
-
pca_proj_ax.scatter(X_test_pca[:, 0], X_test_pca[:, 1], c=y_test)
|
| 35 |
-
pca_proj_ax.set_ylabel("Principal component #1")
|
| 36 |
-
pca_proj_ax.set_xlabel("Principal component #0")
|
| 37 |
-
pca_proj_ax.set_title("Projection of testing data\n using PCA")
|
| 38 |
-
|
| 39 |
-
kernel_pca_proj_ax.scatter(X_test_kernel_pca[:, 0], X_test_kernel_pca[:, 1], c=y_test)
|
| 40 |
-
kernel_pca_proj_ax.set_ylabel("Principal component #1")
|
| 41 |
-
kernel_pca_proj_ax.set_xlabel("Principal component #0")
|
| 42 |
-
_ = kernel_pca_proj_ax.set_title("Projection of testing data\n using KernelPCA")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
from sklearn.datasets import make_circles
|
| 2 |
from sklearn.model_selection import train_test_split
|
| 3 |
+
from sklearn.decomposition import PCA, KernelPCA
|
| 4 |
+
import gradio as gr
|
| 5 |
|
| 6 |
X, y = make_circles(n_samples=1_000, factor=0.3, noise=0.05, random_state=0)
|
| 7 |
X_train, X_test, y_train, y_test = train_test_split(X, y, stratify=y, random_state=0)
|
| 8 |
|
| 9 |
+
def fit_plot(n_comp, gamma, alpha)
|
| 10 |
+
|
| 11 |
+
pca = PCA(n_components=n_comp)
|
| 12 |
+
kernel_pca = KernelPCA(
|
| 13 |
+
n_components=None, kernel="rbf", gamma=gamma, fit_inverse_transform=True, alpha=alpha
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
X_test_pca = pca.fit(X_train).transform(X_test)
|
| 17 |
+
X_test_kernel_pca = kernel_pca.fit(X_train).transform(X_test)
|
| 18 |
+
|
| 19 |
+
fig1, (orig_data_ax, pca_proj_ax, kernel_pca_proj_ax) = plt.subplots(
|
| 20 |
+
ncols=3, figsize=(14, 4)
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
orig_data_ax.scatter(X_test[:, 0], X_test[:, 1], c=y_test)
|
| 24 |
+
orig_data_ax.set_ylabel("Feature #1")
|
| 25 |
+
orig_data_ax.set_xlabel("Feature #0")
|
| 26 |
+
orig_data_ax.set_title("Testing data")
|
| 27 |
+
|
| 28 |
+
pca_proj_ax.scatter(X_test_pca[:, 0], X_test_pca[:, 1], c=y_test)
|
| 29 |
+
pca_proj_ax.set_ylabel("Principal component #1")
|
| 30 |
+
pca_proj_ax.set_xlabel("Principal component #0")
|
| 31 |
+
pca_proj_ax.set_title("Projection of testing data\n using PCA")
|
| 32 |
+
|
| 33 |
+
kernel_pca_proj_ax.scatter(X_test_kernel_pca[:, 0], X_test_kernel_pca[:, 1], c=y_test)
|
| 34 |
+
kernel_pca_proj_ax.set_ylabel("Principal component #1")
|
| 35 |
+
kernel_pca_proj_ax.set_xlabel("Principal component #0")
|
| 36 |
+
_ = kernel_pca_proj_ax.set_title("Projection of testing data\n using KernelPCA")
|
| 37 |
+
|
| 38 |
+
return fig1
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
with gr.Blocks() as demo:
|
| 42 |
+
gr.Markdown("## PCA vs Kernel PCA")
|
| 43 |
+
gr.Markdown("Demo is based on the [Kernel PCA](https://scikit-learn.org/stable/auto_examples/decomposition/plot_kernel_pca.html#sphx-glr-auto-examples-decomposition-plot-kernel-pca-py")
|
| 44 |
+
with gr.Row():
|
| 45 |
+
p1 = gr.Slider(0, 10, label="Number of PCs", value=2, step=1)
|
| 46 |
+
p2 = gr.Slider(0, 10, label="Kernel coefficient", value=2, step=1e-3)
|
| 47 |
+
p3 = gr.Slider(0, 1, label="Hyperparameter for ridge regression", value=0.1, step=1e-3)
|
| 48 |
+
btn = gr.Button(value="Submit")
|
| 49 |
+
btn.click(plot_multi_tree, inputs= [p1,p2,p3], outputs= gr.Plot(label='Projecting data with PCA and Kernel PCA') )
|
| 50 |
+
|
| 51 |
+
demo.launch()
|