ashiq24 commited on
Commit
e75efae
·
1 Parent(s): d568f14

code for dataloading and visualization

Browse files
Files changed (4) hide show
  1. .gitignore +65 -0
  2. data_vis.ipynb +486 -0
  3. fsi_animation.gif +3 -0
  4. fsi_reader.py +138 -0
.gitignore ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Virtual environment
10
+ venv/
11
+ env/
12
+ .venv/
13
+ .venv-*/
14
+
15
+ # Distribution / packaging
16
+ build/
17
+ dist/
18
+ *.egg-info/
19
+ *.egg
20
+ pip-log.txt
21
+ pip-delete-this-directory.txt
22
+
23
+ # Jupyter Notebook checkpoints
24
+ .ipynb_checkpoints/
25
+
26
+ # PyCharm project files
27
+ .idea/
28
+
29
+ # VS Code settings
30
+ .vscode/
31
+ *.code-workspace
32
+
33
+ # Test coverage and pytest cache
34
+ .coverage
35
+ htmlcov/
36
+ .tox/
37
+ .nox/
38
+ .pytest_cache/
39
+ .cache/
40
+ nosetests.xml
41
+ coverage.xml
42
+ *.cover
43
+ .hypothesis/
44
+
45
+ # Mypy, Pyre type checker
46
+ .mypy_cache/
47
+ .pyre/
48
+
49
+ # Linting tools
50
+ .pylint.d/
51
+ ruff_cache/
52
+
53
+ # Logs and debug files
54
+ logs/
55
+ *.log
56
+ debug.log
57
+
58
+ # MacOS system files
59
+ .DS_Store
60
+
61
+ # Windows system files
62
+ Thumbs.db
63
+
64
+ # Jupyter notebook metadata
65
+ *.ipynb_metadata.json
data_vis.ipynb ADDED
@@ -0,0 +1,486 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": null,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "from fsi_reader import FsiDataReader\n",
10
+ "import matplotlib.pyplot as plt\n",
11
+ "import numpy as np\n",
12
+ "from matplotlib.tri import Triangulation\n",
13
+ "from matplotlib.animation import FuncAnimation\n",
14
+ "from scipy.interpolate import griddata"
15
+ ]
16
+ },
17
+ {
18
+ "cell_type": "code",
19
+ "execution_count": null,
20
+ "metadata": {},
21
+ "outputs": [],
22
+ "source": [
23
+ "data = FsiDataReader('./fsi-data/', mu=['1.0'], in_lets_x1=['0.0'])\n",
24
+ "mesh = data.input_mesh\n",
25
+ "print(mesh.shape)"
26
+ ]
27
+ },
28
+ {
29
+ "cell_type": "code",
30
+ "execution_count": null,
31
+ "metadata": {},
32
+ "outputs": [],
33
+ "source": [
34
+ "data_loader = data.get_loader(batch_size=1, shuffle=False)"
35
+ ]
36
+ },
37
+ {
38
+ "cell_type": "code",
39
+ "execution_count": null,
40
+ "metadata": {},
41
+ "outputs": [],
42
+ "source": [
43
+ "def single_plot(data, mesh_points):\n",
44
+ " data = np.squeeze(data) # Shape becomes (1317,)\n",
45
+ " print(data.shape)\n",
46
+ " print(mesh_points.shape)\n",
47
+ " x, y = mesh_points[:, 0], mesh_points[:, 1]\n",
48
+ "\n",
49
+ " # Create figure with subplots\n",
50
+ " fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(16, 6), \n",
51
+ " gridspec_kw={'width_ratios': [1, 1.2]})\n",
52
+ "\n",
53
+ " # Approach 1: Triangulation-based contour plot\n",
54
+ " tri = Triangulation(x, y)\n",
55
+ " contour = ax1.tricontourf(tri, data, levels=40, cmap='viridis')\n",
56
+ " fig.colorbar(contour, ax=ax1, label='Value', shrink=0.3)\n",
57
+ " ax1.set_title('Contour Plot of Field Data')\n",
58
+ " ax1.set_aspect('equal')\n",
59
+ "\n",
60
+ " # Approach 2: Scatter plot with interpolated background\n",
61
+ " grid_x, grid_y = np.mgrid[x.min():x.max():100j, y.min():y.max():100j]\n",
62
+ " grid_z = griddata((x, y), data, (grid_x, grid_y), method='cubic')\n",
63
+ "\n",
64
+ " im = ax2.imshow(grid_z.T, origin='lower', extent=[x.min(), x.max(), \n",
65
+ " y.min(), y.max()], cmap='plasma')\n",
66
+ " ax2.scatter(x, y, c=data, edgecolor='k', lw=0.3, cmap='plasma', s=15)\n",
67
+ " fig.colorbar(im, ax=ax2, label='Interpolated Value', shrink=0.3)\n",
68
+ " ax2.set_title('Interpolated Surface with Sample Points')\n",
69
+ "\n",
70
+ " # Common formatting\n",
71
+ " for ax in (ax1, ax2):\n",
72
+ " ax.set_xlabel('X Coordinate')\n",
73
+ " ax.set_ylabel('Y Coordinate')\n",
74
+ " ax.grid(True, alpha=0.3)\n",
75
+ " \n",
76
+ " plt.tight_layout()\n",
77
+ " plt.show()"
78
+ ]
79
+ },
80
+ {
81
+ "cell_type": "code",
82
+ "execution_count": 17,
83
+ "metadata": {},
84
+ "outputs": [],
85
+ "source": [
86
+ "def create_field_animation(data_frames, mesh_frames, interval=100, save_path=None):\n",
87
+ " \"\"\"\n",
88
+ " Create an animation of time-varying 2D field data on a mesh.\n",
89
+ " \n",
90
+ " Parameters:\n",
91
+ " -----------\n",
92
+ " data_frames : list of arrays\n",
93
+ " List of data arrays for each time frame (each with shape [1, 1317, 1] or similar)\n",
94
+ " mesh_frames : list of arrays or single array\n",
95
+ " Either a list of mesh coordinates for each frame or a single fixed mesh\n",
96
+ " interval : int\n",
97
+ " Delay between animation frames in milliseconds\n",
98
+ " save_path : str, optional\n",
99
+ " Path to save the GIF animation\n",
100
+ " \"\"\"\n",
101
+ " # Determine if mesh is fixed or time-varying\n",
102
+ " mesh_varying = isinstance(mesh_frames, list)\n",
103
+ " \n",
104
+ " # Get initial mesh and data\n",
105
+ " mesh_initial = mesh_frames[0] if mesh_varying else mesh_frames\n",
106
+ " data_initial = np.squeeze(data_frames[0])\n",
107
+ " \n",
108
+ " # Extract coordinates\n",
109
+ " x, y = mesh_initial[:, 0], mesh_initial[:, 1]\n",
110
+ " \n",
111
+ " # Create figure\n",
112
+ " fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(50, 10), \n",
113
+ " gridspec_kw={'width_ratios': [1, 1.2]})\n",
114
+ " \n",
115
+ " # Calculate global min/max for consistent colorbars\n",
116
+ " all_data = np.concatenate([np.squeeze(frame) for frame in data_frames])\n",
117
+ " vmin, vmax = all_data.min(), all_data.max()\n",
118
+ " \n",
119
+ " # Create initial triangulation\n",
120
+ " tri_initial = Triangulation(x, y)\n",
121
+ " \n",
122
+ " # Set up first subplot - contour\n",
123
+ " contour = ax1.tricontourf(tri_initial, data_initial, levels=40, cmap='viridis', \n",
124
+ " vmin=vmin, vmax=vmax)\n",
125
+ " # Add contour lines for better visibility\n",
126
+ " contour_lines = ax1.tricontour(tri_initial, data_initial, levels=15, \n",
127
+ " colors='black', linewidths=0.5, alpha=0.7)\n",
128
+ " \n",
129
+ " fig.colorbar(contour, ax=ax1, label='Value', shrink=0.3)\n",
130
+ " ax1.set_title('Contour Plot of Field Data')\n",
131
+ " ax1.set_aspect('equal')\n",
132
+ " \n",
133
+ " # Set up second subplot - interpolated surface with scatter points\n",
134
+ " grid_x, grid_y = np.mgrid[x.min():x.max():100j, y.min():y.max():100j]\n",
135
+ " grid_z = griddata((x, y), data_initial, (grid_x, grid_y), method='cubic')\n",
136
+ " \n",
137
+ " im = ax2.imshow(grid_z.T, origin='lower', extent=[x.min(), x.max(), \n",
138
+ " y.min(), y.max()], \n",
139
+ " cmap='plasma', vmin=vmin, vmax=vmax)\n",
140
+ " scat = ax2.scatter(x, y, c=data_initial, edgecolor='k', lw=0.3, \n",
141
+ " cmap='plasma', s=15, vmin=vmin, vmax=vmax)\n",
142
+ " \n",
143
+ " fig.colorbar(im, ax=ax2, label='Interpolated Value', shrink=0.3)\n",
144
+ " ax2.set_title('Interpolated Surface with Sample Points')\n",
145
+ " \n",
146
+ " # Common formatting\n",
147
+ " for ax in (ax1, ax2):\n",
148
+ " ax.set_xlabel('X Coordinate')\n",
149
+ " ax.set_ylabel('Y Coordinate')\n",
150
+ " ax.grid(True, alpha=0.3)\n",
151
+ " \n",
152
+ " # Add frame counter\n",
153
+ " time_text = ax1.text(0.02, 0.98, '', transform=ax1.transAxes, \n",
154
+ " fontsize=10, va='top', ha='left')\n",
155
+ " \n",
156
+ " plt.tight_layout()\n",
157
+ " \n",
158
+ " # Update function for animation\n",
159
+ " def update(frame):\n",
160
+ " # Get current data\n",
161
+ " data = np.squeeze(data_frames[frame])\n",
162
+ " \n",
163
+ " # Get current mesh if varying\n",
164
+ " if mesh_varying:\n",
165
+ " mesh = mesh_frames[frame]\n",
166
+ " x, y = mesh[:, 0], mesh[:, 1]\n",
167
+ " tri = Triangulation(x, y)\n",
168
+ " else:\n",
169
+ " mesh = mesh_frames\n",
170
+ " x, y = mesh[:, 0], mesh[:, 1]\n",
171
+ " tri = tri_initial\n",
172
+ " \n",
173
+ " # Update contour plot\n",
174
+ " for c in ax1.collections:\n",
175
+ " c.remove()\n",
176
+ " new_contour = ax1.tricontourf(tri, data, levels=40, cmap='viridis', \n",
177
+ " vmin=vmin, vmax=vmax)\n",
178
+ " new_lines = ax1.tricontour(tri, data, levels=15, colors='black', \n",
179
+ " linewidths=0.5, alpha=0.7)\n",
180
+ " \n",
181
+ " # Update interpolated surface\n",
182
+ " grid_z = griddata((x, y), data, (grid_x, grid_y), method='cubic')\n",
183
+ " im.set_array(grid_z.T)\n",
184
+ " \n",
185
+ " # Update scatter points\n",
186
+ " scat.set_offsets(mesh)\n",
187
+ " scat.set_array(data)\n",
188
+ " \n",
189
+ " # Update frame counter\n",
190
+ " time_text.set_text(f'Frame: {frame+1}/{len(data_frames)}')\n",
191
+ " \n",
192
+ " return [new_contour, new_lines, im, scat, time_text]\n",
193
+ " \n",
194
+ " # Create animation\n",
195
+ " anim = FuncAnimation(fig, update, frames=len(data_frames), \n",
196
+ " interval=interval, blit=False)\n",
197
+ " \n",
198
+ " # Save if path provided\n",
199
+ " if save_path:\n",
200
+ " print(f\"Saving animation to {save_path}...\")\n",
201
+ " if save_path.endswith('.gif'):\n",
202
+ " anim.save(save_path, writer='pillow', dpi=150)\n",
203
+ " else:\n",
204
+ " anim.save(save_path, writer='ffmpeg', dpi=150)\n",
205
+ " \n",
206
+ " return anim"
207
+ ]
208
+ },
209
+ {
210
+ "cell_type": "code",
211
+ "execution_count": 18,
212
+ "metadata": {},
213
+ "outputs": [
214
+ {
215
+ "name": "stdout",
216
+ "output_type": "stream",
217
+ "text": [
218
+ "torch.Size([1, 1317, 5])\n",
219
+ "torch.Size([1, 1317, 5])\n",
220
+ "torch.Size([1, 1317, 5])\n",
221
+ "torch.Size([1, 1317, 5])\n",
222
+ "torch.Size([1, 1317, 5])\n",
223
+ "torch.Size([1, 1317, 5])\n",
224
+ "torch.Size([1, 1317, 5])\n",
225
+ "torch.Size([1, 1317, 5])\n",
226
+ "torch.Size([1, 1317, 5])\n",
227
+ "torch.Size([1, 1317, 5])\n",
228
+ "torch.Size([1, 1317, 5])\n",
229
+ "torch.Size([1, 1317, 5])\n",
230
+ "torch.Size([1, 1317, 5])\n",
231
+ "torch.Size([1, 1317, 5])\n",
232
+ "torch.Size([1, 1317, 5])\n",
233
+ "torch.Size([1, 1317, 5])\n",
234
+ "torch.Size([1, 1317, 5])\n",
235
+ "torch.Size([1, 1317, 5])\n",
236
+ "torch.Size([1, 1317, 5])\n",
237
+ "torch.Size([1, 1317, 5])\n",
238
+ "torch.Size([1, 1317, 5])\n",
239
+ "torch.Size([1, 1317, 5])\n",
240
+ "torch.Size([1, 1317, 5])\n",
241
+ "torch.Size([1, 1317, 5])\n",
242
+ "torch.Size([1, 1317, 5])\n",
243
+ "torch.Size([1, 1317, 5])\n",
244
+ "torch.Size([1, 1317, 5])\n",
245
+ "torch.Size([1, 1317, 5])\n",
246
+ "torch.Size([1, 1317, 5])\n",
247
+ "torch.Size([1, 1317, 5])\n",
248
+ "torch.Size([1, 1317, 5])\n",
249
+ "torch.Size([1, 1317, 5])\n",
250
+ "torch.Size([1, 1317, 5])\n",
251
+ "torch.Size([1, 1317, 5])\n",
252
+ "torch.Size([1, 1317, 5])\n",
253
+ "torch.Size([1, 1317, 5])\n",
254
+ "torch.Size([1, 1317, 5])\n",
255
+ "torch.Size([1, 1317, 5])\n",
256
+ "torch.Size([1, 1317, 5])\n",
257
+ "torch.Size([1, 1317, 5])\n",
258
+ "torch.Size([1, 1317, 5])\n",
259
+ "torch.Size([1, 1317, 5])\n",
260
+ "torch.Size([1, 1317, 5])\n",
261
+ "torch.Size([1, 1317, 5])\n",
262
+ "torch.Size([1, 1317, 5])\n",
263
+ "torch.Size([1, 1317, 5])\n",
264
+ "torch.Size([1, 1317, 5])\n",
265
+ "torch.Size([1, 1317, 5])\n",
266
+ "torch.Size([1, 1317, 5])\n",
267
+ "torch.Size([1, 1317, 5])\n",
268
+ "torch.Size([1, 1317, 5])\n",
269
+ "torch.Size([1, 1317, 5])\n",
270
+ "torch.Size([1, 1317, 5])\n",
271
+ "torch.Size([1, 1317, 5])\n",
272
+ "torch.Size([1, 1317, 5])\n",
273
+ "torch.Size([1, 1317, 5])\n",
274
+ "torch.Size([1, 1317, 5])\n",
275
+ "torch.Size([1, 1317, 5])\n",
276
+ "torch.Size([1, 1317, 5])\n",
277
+ "torch.Size([1, 1317, 5])\n",
278
+ "torch.Size([1, 1317, 5])\n",
279
+ "torch.Size([1, 1317, 5])\n",
280
+ "torch.Size([1, 1317, 5])\n",
281
+ "torch.Size([1, 1317, 5])\n",
282
+ "torch.Size([1, 1317, 5])\n",
283
+ "torch.Size([1, 1317, 5])\n",
284
+ "torch.Size([1, 1317, 5])\n",
285
+ "torch.Size([1, 1317, 5])\n",
286
+ "torch.Size([1, 1317, 5])\n",
287
+ "torch.Size([1, 1317, 5])\n",
288
+ "torch.Size([1, 1317, 5])\n",
289
+ "torch.Size([1, 1317, 5])\n",
290
+ "torch.Size([1, 1317, 5])\n",
291
+ "torch.Size([1, 1317, 5])\n",
292
+ "torch.Size([1, 1317, 5])\n",
293
+ "torch.Size([1, 1317, 5])\n",
294
+ "torch.Size([1, 1317, 5])\n",
295
+ "torch.Size([1, 1317, 5])\n",
296
+ "torch.Size([1, 1317, 5])\n",
297
+ "torch.Size([1, 1317, 5])\n",
298
+ "torch.Size([1, 1317, 5])\n",
299
+ "torch.Size([1, 1317, 5])\n",
300
+ "torch.Size([1, 1317, 5])\n",
301
+ "torch.Size([1, 1317, 5])\n",
302
+ "torch.Size([1, 1317, 5])\n",
303
+ "torch.Size([1, 1317, 5])\n",
304
+ "torch.Size([1, 1317, 5])\n",
305
+ "torch.Size([1, 1317, 5])\n",
306
+ "torch.Size([1, 1317, 5])\n",
307
+ "torch.Size([1, 1317, 5])\n",
308
+ "torch.Size([1, 1317, 5])\n",
309
+ "torch.Size([1, 1317, 5])\n",
310
+ "torch.Size([1, 1317, 5])\n",
311
+ "torch.Size([1, 1317, 5])\n",
312
+ "torch.Size([1, 1317, 5])\n",
313
+ "torch.Size([1, 1317, 5])\n",
314
+ "torch.Size([1, 1317, 5])\n",
315
+ "torch.Size([1, 1317, 5])\n",
316
+ "torch.Size([1, 1317, 5])\n",
317
+ "torch.Size([1, 1317, 5])\n",
318
+ "torch.Size([1, 1317, 5])\n",
319
+ "torch.Size([1, 1317, 5])\n",
320
+ "torch.Size([1, 1317, 5])\n",
321
+ "torch.Size([1, 1317, 5])\n",
322
+ "torch.Size([1, 1317, 5])\n",
323
+ "torch.Size([1, 1317, 5])\n",
324
+ "torch.Size([1, 1317, 5])\n",
325
+ "torch.Size([1, 1317, 5])\n",
326
+ "torch.Size([1, 1317, 5])\n",
327
+ "torch.Size([1, 1317, 5])\n",
328
+ "torch.Size([1, 1317, 5])\n",
329
+ "torch.Size([1, 1317, 5])\n",
330
+ "torch.Size([1, 1317, 5])\n",
331
+ "torch.Size([1, 1317, 5])\n",
332
+ "torch.Size([1, 1317, 5])\n",
333
+ "torch.Size([1, 1317, 5])\n",
334
+ "torch.Size([1, 1317, 5])\n",
335
+ "torch.Size([1, 1317, 5])\n",
336
+ "torch.Size([1, 1317, 5])\n",
337
+ "torch.Size([1, 1317, 5])\n",
338
+ "torch.Size([1, 1317, 5])\n",
339
+ "torch.Size([1, 1317, 5])\n",
340
+ "torch.Size([1, 1317, 5])\n",
341
+ "torch.Size([1, 1317, 5])\n",
342
+ "torch.Size([1, 1317, 5])\n",
343
+ "torch.Size([1, 1317, 5])\n",
344
+ "torch.Size([1, 1317, 5])\n",
345
+ "torch.Size([1, 1317, 5])\n",
346
+ "torch.Size([1, 1317, 5])\n",
347
+ "torch.Size([1, 1317, 5])\n",
348
+ "torch.Size([1, 1317, 5])\n",
349
+ "torch.Size([1, 1317, 5])\n",
350
+ "torch.Size([1, 1317, 5])\n",
351
+ "torch.Size([1, 1317, 5])\n",
352
+ "torch.Size([1, 1317, 5])\n",
353
+ "torch.Size([1, 1317, 5])\n",
354
+ "torch.Size([1, 1317, 5])\n",
355
+ "torch.Size([1, 1317, 5])\n",
356
+ "torch.Size([1, 1317, 5])\n",
357
+ "torch.Size([1, 1317, 5])\n",
358
+ "torch.Size([1, 1317, 5])\n",
359
+ "torch.Size([1, 1317, 5])\n",
360
+ "torch.Size([1, 1317, 5])\n",
361
+ "torch.Size([1, 1317, 5])\n",
362
+ "torch.Size([1, 1317, 5])\n",
363
+ "torch.Size([1, 1317, 5])\n",
364
+ "torch.Size([1, 1317, 5])\n",
365
+ "torch.Size([1, 1317, 5])\n",
366
+ "torch.Size([1, 1317, 5])\n",
367
+ "torch.Size([1, 1317, 5])\n",
368
+ "torch.Size([1, 1317, 5])\n",
369
+ "torch.Size([1, 1317, 5])\n",
370
+ "torch.Size([1, 1317, 5])\n",
371
+ "torch.Size([1, 1317, 5])\n",
372
+ "torch.Size([1, 1317, 5])\n",
373
+ "torch.Size([1, 1317, 5])\n",
374
+ "torch.Size([1, 1317, 5])\n",
375
+ "torch.Size([1, 1317, 5])\n",
376
+ "torch.Size([1, 1317, 5])\n",
377
+ "torch.Size([1, 1317, 5])\n",
378
+ "torch.Size([1, 1317, 5])\n",
379
+ "torch.Size([1, 1317, 5])\n",
380
+ "torch.Size([1, 1317, 5])\n",
381
+ "torch.Size([1, 1317, 5])\n",
382
+ "torch.Size([1, 1317, 5])\n",
383
+ "torch.Size([1, 1317, 5])\n",
384
+ "torch.Size([1, 1317, 5])\n",
385
+ "torch.Size([1, 1317, 5])\n",
386
+ "torch.Size([1, 1317, 5])\n",
387
+ "torch.Size([1, 1317, 5])\n",
388
+ "torch.Size([1, 1317, 5])\n",
389
+ "torch.Size([1, 1317, 5])\n",
390
+ "torch.Size([1, 1317, 5])\n",
391
+ "torch.Size([1, 1317, 5])\n",
392
+ "torch.Size([1, 1317, 5])\n",
393
+ "torch.Size([1, 1317, 5])\n",
394
+ "torch.Size([1, 1317, 5])\n",
395
+ "torch.Size([1, 1317, 5])\n",
396
+ "torch.Size([1, 1317, 5])\n",
397
+ "torch.Size([1, 1317, 5])\n",
398
+ "torch.Size([1, 1317, 5])\n",
399
+ "torch.Size([1, 1317, 5])\n",
400
+ "torch.Size([1, 1317, 5])\n",
401
+ "torch.Size([1, 1317, 5])\n",
402
+ "torch.Size([1, 1317, 5])\n",
403
+ "torch.Size([1, 1317, 5])\n",
404
+ "torch.Size([1, 1317, 5])\n",
405
+ "torch.Size([1, 1317, 5])\n",
406
+ "torch.Size([1, 1317, 5])\n",
407
+ "torch.Size([1, 1317, 5])\n",
408
+ "torch.Size([1, 1317, 5])\n",
409
+ "torch.Size([1, 1317, 5])\n",
410
+ "torch.Size([1, 1317, 5])\n",
411
+ "torch.Size([1, 1317, 5])\n",
412
+ "torch.Size([1, 1317, 5])\n",
413
+ "torch.Size([1, 1317, 5])\n",
414
+ "torch.Size([1, 1317, 5])\n",
415
+ "torch.Size([1, 1317, 5])\n",
416
+ "torch.Size([1, 1317, 5])\n",
417
+ "torch.Size([1, 1317, 5])\n"
418
+ ]
419
+ }
420
+ ],
421
+ "source": [
422
+ "frames = 200\n",
423
+ "data_list = []\n",
424
+ "mesh_list = []\n",
425
+ "for idx, i in enumerate(data_loader):\n",
426
+ " if idx%10 !=0:\n",
427
+ " continue\n",
428
+ " print(i.shape)\n",
429
+ " # single_plot(i[:,:,0].numpy(), mesh.numpy())\n",
430
+ " updated_mesh = mesh + i[0,:,-2:]\n",
431
+ " data_list.append(i[:,:,3].numpy())\n",
432
+ " mesh_list.append(updated_mesh.numpy())\n",
433
+ " frames -= 1\n",
434
+ " if frames == 0:\n",
435
+ " break"
436
+ ]
437
+ },
438
+ {
439
+ "cell_type": "code",
440
+ "execution_count": null,
441
+ "metadata": {},
442
+ "outputs": [
443
+ {
444
+ "name": "stdout",
445
+ "output_type": "stream",
446
+ "text": [
447
+ "Saving animation to fsi_animation.gif...\n"
448
+ ]
449
+ }
450
+ ],
451
+ "source": [
452
+ "create_field_animation(data_list, mesh_list, interval=100, save_path='fsi_animation.gif')"
453
+ ]
454
+ },
455
+ {
456
+ "cell_type": "code",
457
+ "execution_count": null,
458
+ "metadata": {},
459
+ "outputs": [],
460
+ "source": [
461
+ "len(data_list)"
462
+ ]
463
+ }
464
+ ],
465
+ "metadata": {
466
+ "kernelspec": {
467
+ "display_name": "neuralop",
468
+ "language": "python",
469
+ "name": "python3"
470
+ },
471
+ "language_info": {
472
+ "codemirror_mode": {
473
+ "name": "ipython",
474
+ "version": 3
475
+ },
476
+ "file_extension": ".py",
477
+ "mimetype": "text/x-python",
478
+ "name": "python",
479
+ "nbconvert_exporter": "python",
480
+ "pygments_lexer": "ipython3",
481
+ "version": "3.11.9"
482
+ }
483
+ },
484
+ "nbformat": 4,
485
+ "nbformat_minor": 2
486
+ }
fsi_animation.gif ADDED

Git LFS Details

  • SHA256: 864f0a6cbf03103b0831ad068bcdc04e4bb467eaa3f2361bde49af0c72db1419
  • Pointer size: 133 Bytes
  • Size of remote file: 20.4 MB
fsi_reader.py ADDED
@@ -0,0 +1,138 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import h5py
3
+ import numpy as np
4
+ import torch
5
+ import h5py
6
+
7
+
8
+ class FsiDataReader():
9
+ def __init__(self,
10
+ location,
11
+ mu=None,
12
+ in_lets_x1=None,
13
+ in_lets_x2=None,):
14
+ self.location = location
15
+ self._x1 = ['-4.0', '-2.0', '0.0', '2.0', '4.0', '6.0']
16
+ self._x2 = ['-4.0', '-2.0', '0', '2.0', '4.0', '6.0']
17
+ self._mu = ['0.1', '0.01', '0.5', '5', '1.0', '10.0']
18
+ # keeping vx,xy, P, dx,dy
19
+ self.varable_idices = [0, 1, 3, 4, 5]
20
+
21
+ if mu is not None:
22
+ # check if mu is _mu else raise error
23
+ assert set(mu).issubset(set(self._mu))
24
+ self._mu = mu
25
+ if in_lets_x1 is not None:
26
+ # check if in_lets_x1 is _x1 else raise error
27
+ assert set(in_lets_x1).issubset(set(self._x1))
28
+ self._x1 = in_lets_x1
29
+ if in_lets_x2 is not None:
30
+ # check if in_lets_x2 is _x2 else raise error
31
+ assert set(in_lets_x2).issubset(set(self._x2))
32
+ self._x2 = in_lets_x2
33
+
34
+ mesh_h = h5py.File(os.path.join(location, 'mesh.h5'), 'r')
35
+ mesh = mesh_h['mesh/coordinates'][:]
36
+ self.input_mesh = torch.from_numpy(mesh).type(torch.float)
37
+
38
+ def _readh5(self, h5f, dtype=torch.float32):
39
+ a_dset_keys = list(h5f['VisualisationVector'].keys())
40
+ size = len(a_dset_keys)
41
+ readings = [None for i in range(size)]
42
+ for dset in a_dset_keys:
43
+ ds_data = (h5f['VisualisationVector'][dset])
44
+ if ds_data.dtype == 'float64':
45
+ csvfmt = '%.18e'
46
+ elif ds_data.dtype == 'int64':
47
+ csvfmt = '%.10d'
48
+ else:
49
+ csvfmt = '%s'
50
+ readings[int(dset)] = torch.tensor(np.array(ds_data), dtype=dtype)
51
+
52
+ readings_tensor = torch.stack(readings, dim=0)
53
+ print(f"Loaded tensor Size: {readings_tensor.shape}")
54
+ return readings_tensor
55
+
56
+ def get_data(self, mu, x1, x2):
57
+ if mu not in self._mu:
58
+ raise ValueError(f"Value of mu must be one of {self._mu}")
59
+ if x1 not in self._x1 or x2 not in self._x2:
60
+ raise ValueError(
61
+ f"Value of is must be one of {self._ivals3} and {self._ivals12} ")
62
+ path = os.path.join(
63
+ self.location,
64
+ 'mu='+str(mu),
65
+ 'x1='+str(x1),
66
+ 'x2='+str(x2),
67
+ 'Visualization')
68
+
69
+ filename = os.path.join(path, 'displacement.h5')
70
+ h5f = h5py.File(filename, 'r')
71
+ displacements_tensor = self._readh5(h5f)
72
+
73
+
74
+ filename = os.path.join(path, 'pressure.h5')
75
+ h5f = h5py.File(filename, 'r')
76
+ pressure_tensor = self._readh5(h5f)
77
+
78
+ filename = os.path.join(path, 'velocity.h5')
79
+ h5f = h5py.File(filename, 'r')
80
+ velocity_tensor = self._readh5(h5f)
81
+
82
+ combined = torch.cat([velocity_tensor, pressure_tensor, displacements_tensor], dim=-1)[..., self.varable_idices]
83
+
84
+ # return velocity_tensor, pressure_tensor, displacements_tensor
85
+ return combined
86
+
87
+ def get_data_txt(self, mu, x1, x2):
88
+ if mu not in self._mu:
89
+ raise ValueError(f"Value of mu must be one of {self._mu}")
90
+ if x1 not in self._x1 or x2 not in self._x2:
91
+ raise ValueError(
92
+ f"Value of is must be one of {self._ivals3} and {self._ivals12} ")
93
+ path = os.path.join(
94
+ self.params.super_res_data_location,
95
+ 'mu='+str(mu),
96
+ 'x1='+str(x1),
97
+ 'x2='+str(x2),
98
+ '1')
99
+ #try:
100
+ dis_x = torch.tensor(np.loadtxt(os.path.join(path, 'dis_x.txt')))
101
+ dis_y = torch.tensor(np.loadtxt(os.path.join(path, 'dis_y.txt')))
102
+ pressure = torch.tensor(np.loadtxt(os.path.join(path, 'pres.txt')))
103
+ velocity_x = torch.tensor(np.loadtxt(os.path.join(path, 'vel_x.txt')))
104
+ velocity_y = torch.tensor(np.loadtxt(os.path.join(path, 'vel_y.txt')))
105
+
106
+ # reshape each tensor into 2d by keeping 876 entries in each row
107
+ dis_x = dis_x.view(-1, 876,1)
108
+ dis_y = dis_y.view(-1, 876,1)
109
+ pressure = pressure.view(-1, 876,1)
110
+ velocity_x = velocity_x.view(-1, 876,1)
111
+ velocity_y = velocity_y.view(-1, 876,1)
112
+
113
+
114
+ combined = torch.cat([velocity_x, velocity_y, pressure, dis_x, dis_y], dim=-1)[..., ]
115
+ return combined
116
+
117
+ def get_loader(self, batch_size, shuffle=True):
118
+ data = []
119
+ for mu in self._mu:
120
+ for x1 in self._x1:
121
+ for x2 in self._x2:
122
+ try:
123
+ if mu == 0.5:
124
+ data.append(self.get_data_txt(mu, x1, x2))
125
+ else:
126
+ data.append(self.get_data(mu, x1, x2))
127
+ except FileNotFoundError as e:
128
+ print(
129
+ f"file not found for mu={mu}, x1={x1}, x2={x2}")
130
+ continue
131
+ data = torch.cat(data, dim=0)
132
+ print(f"Data shape: {data.shape}")
133
+
134
+ data_loader = torch.utils.data.DataLoader(data, batch_size=batch_size, shuffle=shuffle)
135
+
136
+ return data_loader
137
+
138
+