sanghan commited on
Commit
512ff5b
·
1 Parent(s): 42bfe8f

initial commit

Browse files
.gitattributes CHANGED
@@ -33,3 +33,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ .bin filter=lfs diff=lfs merge=lfs -text
37
+ **/*.bin filter=lfs diff=lfs merge=lfs -text
.gitignore ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Python-generated files
2
+ __pycache__/
3
+ *.py[oc]
4
+ build/
5
+ dist/
6
+ wheels/
7
+ *.egg-info
8
+
9
+ # Virtual environments
10
+ .venv
11
+
12
+ # Ignored Files/Dirs
13
+ **/Server
14
+ **/Keys
15
+ **/Client
16
+ *-emb.txt
.python-version ADDED
@@ -0,0 +1 @@
 
 
1
+ 3.13
README.md CHANGED
@@ -1,13 +0,0 @@
1
- ---
2
- title: Histopathologic Cancer FHE
3
- emoji: ⚡
4
- colorFrom: red
5
- colorTo: red
6
- sdk: gradio
7
- sdk_version: 5.25.2
8
- app_file: app.py
9
- pinned: false
10
- short_description: Binary classification of histopathologic cancer cells
11
- ---
12
-
13
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
 
app.py ADDED
@@ -0,0 +1,277 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import subprocess
3
+ import time
4
+
5
+ import gradio as gr
6
+ import numpy as np
7
+ import timm
8
+ import torch
9
+ import torch.nn.functional as F
10
+ from PIL import Image
11
+
12
+ REF_CANCER_IMG = "./examples/a24ce148f6ffa7ef8eefb4efb12ebffe8dd700da.jpg"
13
+ SECURITYLEVELS = ["128", "196", "256"]
14
+ EXAMPLE_IMAGES = [
15
+ "./examples/0b820b71670c039dd0a51333d1c919f471a9e940.jpg",
16
+ "./examples/7f6ccae485af121e0b6ee733022e226ee6b0c65f.jpg",
17
+ "./examples/8eaaa7a400aa79d36c2440a4aa101cc14256cda4.jpg",
18
+ "./examples/94fa32b29cc1c00403176c0795fffa3cfaa0f20e.jpg",
19
+ "./examples/730431efa2f79927156dcc4382819e9a6cc2c5bb.jpg",
20
+ "./examples/a1c001f6b242c72d3066f15ac6eb059ea72d30ba.jpg",
21
+ "./examples/a106469bbfda4cdc5a9da7ac0152927bf1b4a92d.jpg",
22
+ "./examples/c3d660212bf2a11c994e0eadff13770a9927b731.jpg",
23
+ "./examples/f416de7491a31951f79b3cee75b002f4d1bf0162.jpg",
24
+ ]
25
+
26
+ TRUE_LABELS = {
27
+ "./examples/0b820b71670c039dd0a51333d1c919f471a9e940.jpg": 1,
28
+ "./examples/7f6ccae485af121e0b6ee733022e226ee6b0c65f.jpg": 1,
29
+ "./examples/8eaaa7a400aa79d36c2440a4aa101cc14256cda4.jpg": 0,
30
+ "./examples/94fa32b29cc1c00403176c0795fffa3cfaa0f20e.jpg": 1,
31
+ "./examples/730431efa2f79927156dcc4382819e9a6cc2c5bb.jpg": 0,
32
+ "./examples/a1c001f6b242c72d3066f15ac6eb059ea72d30ba.jpg": 0,
33
+ "./examples/a106469bbfda4cdc5a9da7ac0152927bf1b4a92d.jpg": 0,
34
+ "./examples/c3d660212bf2a11c994e0eadff13770a9927b731.jpg": 1,
35
+ "./examples/f416de7491a31951f79b3cee75b002f4d1bf0162.jpg": 0,
36
+ }
37
+
38
+
39
+ def display_image(image):
40
+ return image
41
+
42
+
43
+ def get_selected_image(evt: gr.SelectData):
44
+ image_path = EXAMPLE_IMAGES[evt.index]
45
+ return Image.open(image_path).convert("RGB")
46
+
47
+
48
+ def runBinFile(*args):
49
+ binary_path = args[0]
50
+ if not os.path.isfile(binary_path):
51
+ return "Error: Compiled binary not Match."
52
+ try:
53
+ os.chmod(binary_path, 0o755)
54
+ start = time.time()
55
+ result = subprocess.run(
56
+ list(args), stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
57
+ )
58
+ end = time.time()
59
+ duration = (end - start) * 1000
60
+ if "print" in args:
61
+ return result.stdout
62
+ elif "styledPrint" in args:
63
+ return styled_output(result.stdout)
64
+ elif result.returncode == 0:
65
+ return True, f"<b>⏱️ Processing Time:</b> {duration:.0f} ms"
66
+ else:
67
+ return (
68
+ False,
69
+ f"""
70
+ ❌ <b>Return code:</b> {result.returncode}<br>
71
+ <b>stdout:</b><br><pre>{result.stdout.strip()}</pre><br>
72
+ <b>stderr:</b><br><pre style='color:red;'>{result.stderr.strip()}</pre>
73
+ """,
74
+ )
75
+ except Exception as e:
76
+ return f"Execution failed: {e}"
77
+
78
+
79
+ def styled_output(result):
80
+ if result.strip().lower() == "match":
81
+ return "<span style='color: green; font-weight: bold;'>✔️ Match</span>"
82
+ elif result.strip().lower() == "no match":
83
+ return "<span style='color: red; font-weight: bold;'>❌ No Match</span>"
84
+ else:
85
+ return "<span style='color: red; font-weight: bold;'>Error</span>"
86
+
87
+
88
+ def extract_emb(image, mode=None):
89
+ model = timm.create_model(
90
+ "hf-hub:1aurent/resnet18.tiatoolbox-pcam", pretrained=True, num_classes=0
91
+ ).eval()
92
+
93
+ data_config = timm.data.resolve_model_data_config(model)
94
+ transforms = timm.data.create_transform(**data_config, is_training=False)
95
+ features = transforms(image).unsqueeze(0)
96
+ with torch.no_grad():
97
+ embs = model(features)
98
+ embs = F.normalize(embs, dim=1)
99
+ embs = embs.detach().numpy()
100
+ embs = embs.squeeze(0)
101
+ if mode is not None:
102
+ np.savetxt(
103
+ f"{mode}-emb.txt",
104
+ embs.reshape(1, embs.shape[0]),
105
+ fmt="%.6f",
106
+ delimiter=",",
107
+ )
108
+ return embs
109
+
110
+
111
+ def check(security_level, threshold):
112
+ image_input_auth = Image.open(REF_CANCER_IMG).convert("RGB")
113
+ _ = extract_emb(image_input_auth, mode="auth")
114
+ runBinFile("./bin/encProbe.bin", security_level, "encrypt")
115
+ runBinFile("./bin/recDecision.bin", security_level, "decision", str(threshold))
116
+ final_output = runBinFile("./bin/decDecision.bin", security_level, "print")
117
+
118
+ if final_output.strip().lower() == "match":
119
+ result = (
120
+ "<h1><span style='color: green; font-weight: bold;'>Cancerous</span></h1>"
121
+ )
122
+ elif final_output.strip().lower() == "no match":
123
+ result = "<h1><span style='color: green; font-weight: bold;'>Not Cancerous</span></h1>"
124
+ else:
125
+ result = "<h1><span style='color: red; font-weight: bold;'>Error!</span></h1>"
126
+
127
+ return result
128
+
129
+
130
+ if __name__ == "__main__":
131
+ with gr.Blocks() as demo:
132
+ gr.HTML(
133
+ """
134
+ <h1 align="center">Suraksh.AI</h1>
135
+ <p align="center">
136
+ <a href="https://www.suraksh.ai"> https://www.suraksh.ai</a>
137
+ </p>
138
+ """
139
+ )
140
+ with gr.Row():
141
+ gr.Markdown("## Setup Phase: 🔐 Generate the FHE public and secret keys.")
142
+ with gr.Row():
143
+ with gr.Column():
144
+ securityLevel = gr.Dropdown(
145
+ choices=SECURITYLEVELS, label="Choose a security level"
146
+ )
147
+ with gr.Column():
148
+ key_button = gr.Button("Generate the FHE public and secret keys")
149
+ key_status = gr.Checkbox(
150
+ label="FHE Public and Secret keys generated.", value=False
151
+ )
152
+ time_output = gr.HTML()
153
+ key_button.click(
154
+ fn=runBinFile,
155
+ inputs=[
156
+ gr.State("./bin/genKeys.bin"),
157
+ securityLevel,
158
+ gr.State("genkeys"),
159
+ ],
160
+ outputs=[key_status, time_output],
161
+ )
162
+ with gr.Row():
163
+ gr.Markdown("### Step 1: Upload or select a reference cell image")
164
+ with gr.Row():
165
+ image_input_enroll = gr.Image(type="pil", visible=False)
166
+ with gr.Column():
167
+ image_upload_enroll = gr.Image(
168
+ label="Upload a reference cell image.",
169
+ type="pil",
170
+ sources="upload",
171
+ )
172
+ image_upload_enroll.change(
173
+ fn=display_image,
174
+ inputs=image_upload_enroll,
175
+ outputs=image_input_enroll,
176
+ )
177
+ with gr.Column():
178
+ example_gallery = gr.Gallery(value=EXAMPLE_IMAGES, columns=3)
179
+ example_gallery.select(
180
+ fn=get_selected_image, inputs=None, outputs=image_input_enroll
181
+ )
182
+ with gr.Column():
183
+ selectedImage = gr.Image(
184
+ type="pil", label="Reference cell image", interactive=False
185
+ )
186
+ image_input_enroll.change(
187
+ fn=lambda img: img, inputs=image_input_enroll, outputs=selectedImage
188
+ )
189
+ with gr.Row():
190
+ gr.Markdown("### Step 2: Generate reference embedding.")
191
+ with gr.Row():
192
+ with gr.Column():
193
+ example_gallery.select(
194
+ fn=get_selected_image, inputs=None, outputs=image_input_enroll
195
+ )
196
+ key_button = gr.Button("Generate embedding")
197
+ enroll_emb_text = gr.JSON(label="Reference embedding")
198
+ mode = gr.State("enroll")
199
+ key_button.click(
200
+ fn=extract_emb,
201
+ inputs=[image_input_enroll, mode],
202
+ outputs=enroll_emb_text,
203
+ )
204
+ with gr.Row():
205
+ gr.Markdown("### Step 3: 🔒 Encrypt reference embedding using FHE.")
206
+ with gr.Row():
207
+ with gr.Column():
208
+ key_button = gr.Button("Encrypt")
209
+ key_status = gr.Checkbox(
210
+ label="Reference embedding encrypted.", value=False
211
+ )
212
+ time_output = gr.HTML()
213
+ key_button.click(
214
+ fn=runBinFile,
215
+ inputs=[
216
+ gr.State("./bin/encReference.bin"),
217
+ securityLevel,
218
+ gr.State("encrypt"),
219
+ ],
220
+ outputs=[key_status, time_output],
221
+ )
222
+
223
+ with gr.Column():
224
+ key_button = gr.Button("Display")
225
+ output_text = gr.Text(
226
+ label="Encrypted embedding", lines=3, interactive=False
227
+ )
228
+ key_button.click(
229
+ fn=runBinFile,
230
+ inputs=[
231
+ gr.State("./bin/encReference.bin"),
232
+ securityLevel,
233
+ gr.State("print"),
234
+ ],
235
+ outputs=output_text,
236
+ )
237
+
238
+ with gr.Row():
239
+ gr.Markdown(
240
+ "### Step 4: 🔒 Compute biometric recognition decision using the threshold under FHE."
241
+ )
242
+ with gr.Row():
243
+ gr.Markdown("### Set the recognition threshold.")
244
+ with gr.Row():
245
+ slider_threshold = gr.Slider(
246
+ -512 * 5,
247
+ 512 * 5,
248
+ step=1,
249
+ value=133,
250
+ label="Decision threshold",
251
+ info="The higher the stricter.",
252
+ interactive=True,
253
+ )
254
+ number_threshold = gr.Textbox(visible=False, value="133")
255
+ slider_threshold.change(
256
+ fn=lambda x: x, inputs=slider_threshold, outputs=number_threshold
257
+ )
258
+ with gr.Row():
259
+ check_button = gr.Button("Check")
260
+ with gr.Row():
261
+ with gr.Column(scale=1):
262
+ final_output = gr.HTML()
263
+ check_button.click(
264
+ fn=check,
265
+ inputs=[securityLevel, slider_threshold],
266
+ outputs=final_output,
267
+ )
268
+
269
+ with gr.Column(scale=1):
270
+ image_output_enroll = gr.Image(label="Reference", sources="upload")
271
+ image_input_enroll.change(
272
+ fn=display_image,
273
+ inputs=image_input_enroll,
274
+ outputs=image_output_enroll,
275
+ )
276
+
277
+ demo.launch()
bin/decDecision.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f7675db4cd1c61743d73b78364ccefa313b408b09713e3b16e19f5a6eee087bb
3
+ size 8045152
bin/encProbe.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3959987099fb521134b6c46c367b2b6f6106bf9bb45eda98108e26710376f078
3
+ size 8030536
bin/encReference.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6705fbd4703ab2bc52d4af5719f5ece369de22d214dda966b124a85803c3a92d
3
+ size 8069184
bin/genKeys.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30c5e81854404a40c06ac9b727a17907536b1490defb055831921022b25e6e7c
3
+ size 8079376
bin/recDecision.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:97d6a8be345463779f296ae73c70a082ea90e2c41add0f99f6875d75ff753c75
3
+ size 8057936
examples/0b820b71670c039dd0a51333d1c919f471a9e940.jpg ADDED
examples/730431efa2f79927156dcc4382819e9a6cc2c5bb.jpg ADDED
examples/7f6ccae485af121e0b6ee733022e226ee6b0c65f.jpg ADDED
examples/8eaaa7a400aa79d36c2440a4aa101cc14256cda4.jpg ADDED
examples/94fa32b29cc1c00403176c0795fffa3cfaa0f20e.jpg ADDED
examples/a106469bbfda4cdc5a9da7ac0152927bf1b4a92d.jpg ADDED
examples/a1c001f6b242c72d3066f15ac6eb059ea72d30ba.jpg ADDED
examples/a24ce148f6ffa7ef8eefb4efb12ebffe8dd700da.jpg ADDED
examples/c3d660212bf2a11c994e0eadff13770a9927b731.jpg ADDED
examples/f416de7491a31951f79b3cee75b002f4d1bf0162.jpg ADDED
lookupTables/Borders_nB_3_dimF_512.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ -0.050880,-0.029846,-0.014102,-0.000000,0.014102,0.029846,0.050880
lookupTables/MFIP_nB_3_dQ_0.001_dimF_512.txt ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ 5,3,2,1,-1,-2,-3,-5
2
+ 3,2,1,0,0,-1,-2,-3
3
+ 2,1,0,0,0,0,-1,-2
4
+ 1,0,0,0,0,0,0,-1
5
+ -1,0,0,0,0,0,0,1
6
+ -2,-1,0,0,0,0,1,2
7
+ -3,-2,-1,0,0,1,2,3
8
+ -5,-3,-2,-1,1,2,3,5
pyproject.toml ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [project]
2
+ name = "histopathologic-cancer-fhe"
3
+ version = "0.1.0"
4
+ description = "Add your description here"
5
+ readme = "README.md"
6
+ requires-python = ">=3.13"
7
+ dependencies = [
8
+ "gradio>=5.25.2",
9
+ "timm>=1.0.15",
10
+ "torch>=2.6.0",
11
+ ]
requirements.txt ADDED
@@ -0,0 +1,217 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file was autogenerated by uv via the following command:
2
+ # uv pip compile pyproject.toml
3
+ aiofiles==24.1.0
4
+ # via gradio
5
+ annotated-types==0.7.0
6
+ # via pydantic
7
+ anyio==4.9.0
8
+ # via
9
+ # gradio
10
+ # httpx
11
+ # starlette
12
+ audioop-lts==0.2.1
13
+ # via gradio
14
+ certifi==2025.1.31
15
+ # via
16
+ # httpcore
17
+ # httpx
18
+ # requests
19
+ charset-normalizer==3.4.1
20
+ # via requests
21
+ click==8.1.8
22
+ # via
23
+ # typer
24
+ # uvicorn
25
+ fastapi==0.115.12
26
+ # via gradio
27
+ ffmpy==0.5.0
28
+ # via gradio
29
+ filelock==3.18.0
30
+ # via
31
+ # huggingface-hub
32
+ # torch
33
+ fsspec==2025.3.2
34
+ # via
35
+ # gradio-client
36
+ # huggingface-hub
37
+ # torch
38
+ gradio==5.25.2
39
+ # via histopathologic-cancer-fhe (pyproject.toml)
40
+ gradio-client==1.8.0
41
+ # via gradio
42
+ groovy==0.1.2
43
+ # via gradio
44
+ h11==0.14.0
45
+ # via
46
+ # httpcore
47
+ # uvicorn
48
+ httpcore==1.0.8
49
+ # via httpx
50
+ httpx==0.28.1
51
+ # via
52
+ # gradio
53
+ # gradio-client
54
+ # safehttpx
55
+ huggingface-hub==0.30.2
56
+ # via
57
+ # gradio
58
+ # gradio-client
59
+ # timm
60
+ idna==3.10
61
+ # via
62
+ # anyio
63
+ # httpx
64
+ # requests
65
+ jinja2==3.1.6
66
+ # via
67
+ # gradio
68
+ # torch
69
+ markdown-it-py==3.0.0
70
+ # via rich
71
+ markupsafe==3.0.2
72
+ # via
73
+ # gradio
74
+ # jinja2
75
+ mdurl==0.1.2
76
+ # via markdown-it-py
77
+ mpmath==1.3.0
78
+ # via sympy
79
+ networkx==3.4.2
80
+ # via torch
81
+ numpy==2.2.5
82
+ # via
83
+ # gradio
84
+ # pandas
85
+ # torchvision
86
+ nvidia-cublas-cu12==12.4.5.8
87
+ # via
88
+ # nvidia-cudnn-cu12
89
+ # nvidia-cusolver-cu12
90
+ # torch
91
+ nvidia-cuda-cupti-cu12==12.4.127
92
+ # via torch
93
+ nvidia-cuda-nvrtc-cu12==12.4.127
94
+ # via torch
95
+ nvidia-cuda-runtime-cu12==12.4.127
96
+ # via torch
97
+ nvidia-cudnn-cu12==9.1.0.70
98
+ # via torch
99
+ nvidia-cufft-cu12==11.2.1.3
100
+ # via torch
101
+ nvidia-curand-cu12==10.3.5.147
102
+ # via torch
103
+ nvidia-cusolver-cu12==11.6.1.9
104
+ # via torch
105
+ nvidia-cusparse-cu12==12.3.1.170
106
+ # via
107
+ # nvidia-cusolver-cu12
108
+ # torch
109
+ nvidia-cusparselt-cu12==0.6.2
110
+ # via torch
111
+ nvidia-nccl-cu12==2.21.5
112
+ # via torch
113
+ nvidia-nvjitlink-cu12==12.4.127
114
+ # via
115
+ # nvidia-cusolver-cu12
116
+ # nvidia-cusparse-cu12
117
+ # torch
118
+ nvidia-nvtx-cu12==12.4.127
119
+ # via torch
120
+ orjson==3.10.16
121
+ # via gradio
122
+ packaging==25.0
123
+ # via
124
+ # gradio
125
+ # gradio-client
126
+ # huggingface-hub
127
+ pandas==2.2.3
128
+ # via gradio
129
+ pillow==11.2.1
130
+ # via
131
+ # gradio
132
+ # torchvision
133
+ pydantic==2.11.3
134
+ # via
135
+ # fastapi
136
+ # gradio
137
+ pydantic-core==2.33.1
138
+ # via pydantic
139
+ pydub==0.25.1
140
+ # via gradio
141
+ pygments==2.19.1
142
+ # via rich
143
+ python-dateutil==2.9.0.post0
144
+ # via pandas
145
+ python-multipart==0.0.20
146
+ # via gradio
147
+ pytz==2025.2
148
+ # via pandas
149
+ pyyaml==6.0.2
150
+ # via
151
+ # gradio
152
+ # huggingface-hub
153
+ # timm
154
+ requests==2.32.3
155
+ # via huggingface-hub
156
+ rich==14.0.0
157
+ # via typer
158
+ ruff==0.11.6
159
+ # via gradio
160
+ safehttpx==0.1.6
161
+ # via gradio
162
+ safetensors==0.5.3
163
+ # via timm
164
+ semantic-version==2.10.0
165
+ # via gradio
166
+ setuptools==79.0.0
167
+ # via torch
168
+ shellingham==1.5.4
169
+ # via typer
170
+ six==1.17.0
171
+ # via python-dateutil
172
+ sniffio==1.3.1
173
+ # via anyio
174
+ starlette==0.46.2
175
+ # via
176
+ # fastapi
177
+ # gradio
178
+ sympy==1.13.1
179
+ # via torch
180
+ timm==1.0.15
181
+ # via histopathologic-cancer-fhe (pyproject.toml)
182
+ tomlkit==0.13.2
183
+ # via gradio
184
+ torch==2.6.0
185
+ # via
186
+ # histopathologic-cancer-fhe (pyproject.toml)
187
+ # timm
188
+ # torchvision
189
+ torchvision==0.21.0
190
+ # via timm
191
+ tqdm==4.67.1
192
+ # via huggingface-hub
193
+ triton==3.2.0
194
+ # via torch
195
+ typer==0.15.2
196
+ # via gradio
197
+ typing-extensions==4.13.2
198
+ # via
199
+ # fastapi
200
+ # gradio
201
+ # gradio-client
202
+ # huggingface-hub
203
+ # pydantic
204
+ # pydantic-core
205
+ # torch
206
+ # typer
207
+ # typing-inspection
208
+ typing-inspection==0.4.0
209
+ # via pydantic
210
+ tzdata==2025.2
211
+ # via pandas
212
+ urllib3==2.4.0
213
+ # via requests
214
+ uvicorn==0.34.2
215
+ # via gradio
216
+ websockets==15.0.1
217
+ # via gradio-client
uv.lock ADDED
The diff for this file is too large to render. See raw diff