Fraser-Greenlee commited on
Commit
cec358e
·
1 Parent(s): 9f271d1

almost able to generate variations with codeT5

Browse files
make_variations/generate_with_codeT5.ipynb ADDED
@@ -0,0 +1,550 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 1,
6
+ "metadata": {},
7
+ "outputs": [
8
+ {
9
+ "name": "stdout",
10
+ "output_type": "stream",
11
+ "text": [
12
+ "{user.username}\n"
13
+ ]
14
+ }
15
+ ],
16
+ "source": [
17
+ "from transformers import AutoTokenizer, AutoModelForSeq2SeqLM\n",
18
+ "\n",
19
+ "tokenizer = AutoTokenizer.from_pretrained(\"Salesforce/codet5-base\")\n",
20
+ "model = AutoModelForSeq2SeqLM.from_pretrained(\"Salesforce/codet5-base\")\n",
21
+ "\n",
22
+ "text = \"def greet(user): print(f'hello <extra_id_0>!')\"\n",
23
+ "input_ids = tokenizer(text, return_tensors=\"pt\").input_ids\n",
24
+ "\n",
25
+ "# simply generate a single sequence\n",
26
+ "generated_ids = model.generate(input_ids, max_length=8)\n",
27
+ "print(tokenizer.decode(generated_ids[0], skip_special_tokens=True))\n",
28
+ "# this prints \"{user.username}\""
29
+ ]
30
+ },
31
+ {
32
+ "cell_type": "code",
33
+ "execution_count": 18,
34
+ "metadata": {},
35
+ "outputs": [],
36
+ "source": [
37
+ "import ast\n",
38
+ "\n",
39
+ "def filter_codes(codes):\n",
40
+ " codes = list(set(codes))\n",
41
+ " new_codes = []\n",
42
+ " for code in codes:\n",
43
+ " if ';' in code:\n",
44
+ " code = code[code.index(';'):]\n",
45
+ " try:\n",
46
+ " ast.parse(code)\n",
47
+ " except Exception:\n",
48
+ " continue\n",
49
+ " new_codes.append(code)\n",
50
+ " return new_codes"
51
+ ]
52
+ },
53
+ {
54
+ "cell_type": "code",
55
+ "execution_count": 19,
56
+ "metadata": {},
57
+ "outputs": [],
58
+ "source": [
59
+ "def temp_value(value):\n",
60
+ " if value[0] == '[' and value[-1] == ']':\n",
61
+ " return '[<extra_id_0>]'\n",
62
+ " if value[0] == '\"' and value[-1] == '\"':\n",
63
+ " return '\"<extra_id_0>\"'\n",
64
+ " if value[0] == \"'\" and value[-1] == \"'\":\n",
65
+ " return \"'<extra_id_0>'\"\n",
66
+ " if value[0] == '{' and value[-1] == '}':\n",
67
+ " return '{<extra_id_0>}'\n",
68
+ " return '<extra_id_0>'\n",
69
+ "\n",
70
+ "def temp_var(var):\n",
71
+ " value = var[4:]\n",
72
+ " return var[:4] + temp_value(value)"
73
+ ]
74
+ },
75
+ {
76
+ "cell_type": "code",
77
+ "execution_count": 20,
78
+ "metadata": {},
79
+ "outputs": [],
80
+ "source": [
81
+ "def make_code(start, code):\n",
82
+ " return f'def main(): {\"; \".join(start)}; {code}; return {\", \".join([v.split()[0] for v in start])}'"
83
+ ]
84
+ },
85
+ {
86
+ "cell_type": "code",
87
+ "execution_count": 21,
88
+ "metadata": {},
89
+ "outputs": [],
90
+ "source": [
91
+ "import ast\n",
92
+ "\n",
93
+ "def filter_codes(codes):\n",
94
+ " codes = list(set(codes))\n",
95
+ " new_codes = []\n",
96
+ " for code in codes:\n",
97
+ " if ';' in code:\n",
98
+ " code = code[code.index(';'):]\n",
99
+ " try:\n",
100
+ " ast.parse(code)\n",
101
+ " except Exception:\n",
102
+ " continue\n",
103
+ " new_codes.append(code)\n",
104
+ " return new_codes"
105
+ ]
106
+ },
107
+ {
108
+ "cell_type": "code",
109
+ "execution_count": 22,
110
+ "metadata": {},
111
+ "outputs": [],
112
+ "source": [
113
+ "def alt_from_code(code):\n",
114
+ " input_ids = tokenizer(code, return_tensors=\"pt\").input_ids\n",
115
+ " generated_ids = model.generate(input_ids, num_return_sequences=100, max_length=20, do_sample=True, temperature=1.0)\n",
116
+ " return filter_codes(tokenizer.batch_decode(generated_ids, skip_special_tokens=True))"
117
+ ]
118
+ },
119
+ {
120
+ "cell_type": "code",
121
+ "execution_count": 23,
122
+ "metadata": {},
123
+ "outputs": [],
124
+ "source": [
125
+ "import errno\n",
126
+ "import os\n",
127
+ "import signal\n",
128
+ "import functools\n",
129
+ "\n",
130
+ "class TimeoutError(Exception):\n",
131
+ " pass\n",
132
+ "\n",
133
+ "def timeout(seconds=10, error_message=os.strerror(errno.ETIME)):\n",
134
+ " def decorator(func):\n",
135
+ " def _handle_timeout(signum, frame):\n",
136
+ " raise TimeoutError(error_message)\n",
137
+ "\n",
138
+ " @functools.wraps(func)\n",
139
+ " def wrapper(*args, **kwargs):\n",
140
+ " signal.signal(signal.SIGALRM, _handle_timeout)\n",
141
+ " signal.alarm(seconds)\n",
142
+ " try:\n",
143
+ " result = func(*args, **kwargs)\n",
144
+ " finally:\n",
145
+ " signal.alarm(0)\n",
146
+ " return result\n",
147
+ "\n",
148
+ " return wrapper\n",
149
+ "\n",
150
+ " return decorator"
151
+ ]
152
+ },
153
+ {
154
+ "cell_type": "code",
155
+ "execution_count": 37,
156
+ "metadata": {},
157
+ "outputs": [],
158
+ "source": [
159
+ "def state_dict_to_str(state):\n",
160
+ " vals = []\n",
161
+ " for k, v in state.items():\n",
162
+ " vals.append(\n",
163
+ " f'{k} = {v}'\n",
164
+ " )\n",
165
+ " vals = sorted(vals)\n",
166
+ " return '; '.join(vals)"
167
+ ]
168
+ },
169
+ {
170
+ "cell_type": "code",
171
+ "execution_count": 38,
172
+ "metadata": {},
173
+ "outputs": [],
174
+ "source": [
175
+ "def trace_code(start_state: str, code: str):\n",
176
+ " state = {}\n",
177
+ " try:\n",
178
+ " exec(start_state, {}, state)\n",
179
+ " except Exception:\n",
180
+ " return\n",
181
+ " start_state = dict(state)\n",
182
+ " try:\n",
183
+ " exec(code, {}, state)\n",
184
+ " except Exception:\n",
185
+ " return\n",
186
+ " return state_dict_to_str(start_state), code, state_dict_to_str(state)"
187
+ ]
188
+ },
189
+ {
190
+ "cell_type": "code",
191
+ "execution_count": 39,
192
+ "metadata": {},
193
+ "outputs": [
194
+ {
195
+ "data": {
196
+ "text/plain": [
197
+ "[{'start': 'g = 100; i = 1; l = [1, 100, 1]',\n",
198
+ " 'code': 'g += l[i]',\n",
199
+ " 'end': 'g = 200; i = 1; l = [1, 100, 1]'},\n",
200
+ " {'start': 'g = 100; i = 1; l = [1, 1]',\n",
201
+ " 'code': 'g += l[i]',\n",
202
+ " 'end': 'g = 101; i = 1; l = [1, 1]'},\n",
203
+ " {'start': 'g = 100; i = 1; l = [1, 1, 1]',\n",
204
+ " 'code': 'g += l[i]',\n",
205
+ " 'end': 'g = 101; i = 1; l = [1, 1, 1]'},\n",
206
+ " {'start': 'g = 100; i = 1; l = [100, 100]',\n",
207
+ " 'code': 'g += l[i]',\n",
208
+ " 'end': 'g = 200; i = 1; l = [100, 100]'},\n",
209
+ " {'start': 'g = 100; i = 1; l = [50, 50, 50, 40]',\n",
210
+ " 'code': 'g += l[i]',\n",
211
+ " 'end': 'g = 150; i = 1; l = [50, 50, 50, 40]'},\n",
212
+ " {'start': 'g = 100; i = 1; l = [0, 10]',\n",
213
+ " 'code': 'g += l[i]',\n",
214
+ " 'end': 'g = 110; i = 1; l = [0, 10]'},\n",
215
+ " {'start': 'g = 100; i = 1; l = [100, 900, 10, 10]',\n",
216
+ " 'code': 'g += l[i]',\n",
217
+ " 'end': 'g = 1000; i = 1; l = [100, 900, 10, 10]'},\n",
218
+ " {'start': 'g = 100; i = 1; l = [1, 1, 2]',\n",
219
+ " 'code': 'g += l[i]',\n",
220
+ " 'end': 'g = 101; i = 1; l = [1, 1, 2]'},\n",
221
+ " {'start': 'g = 100; i = 1; l = [100, 100, 100, 0, 0]',\n",
222
+ " 'code': 'g += l[i]',\n",
223
+ " 'end': 'g = 200; i = 1; l = [100, 100, 100, 0, 0]'}]"
224
+ ]
225
+ },
226
+ "execution_count": 39,
227
+ "metadata": {},
228
+ "output_type": "execute_result"
229
+ }
230
+ ],
231
+ "source": [
232
+ "def get_working_alts(other_vars, var_alts, code):\n",
233
+ " rows = []\n",
234
+ " for alt in var_alts:\n",
235
+ " start = other_vars + [alt]\n",
236
+ " result = trace_code('; '.join(start), code)\n",
237
+ " if result:\n",
238
+ " rows.append({'start': result[0], 'code': result[1], 'end': result[2]})\n",
239
+ " return rows\n",
240
+ "\n",
241
+ "test_alt_vars = [\n",
242
+ " 'l = [1, 100, 1]',\n",
243
+ " 'l = [1, 1]',\n",
244
+ " 'l = [f]',\n",
245
+ " 'l = [1, 1, 1,]',\n",
246
+ " 'l = [i = 10]',\n",
247
+ " 'l = [100, 100]',\n",
248
+ " 'l = [l[i].max(), l[i].min()]',\n",
249
+ " 'l = [1]',\n",
250
+ " 'l = [50, 50, 50, 40]',\n",
251
+ " 'l = [0, 10]',\n",
252
+ " 'l = [100, 900, 10, 10]',\n",
253
+ " 'l = [i, 1, 2]',\n",
254
+ " 'l = [100, 100, 100, 0, 0]'\n",
255
+ "]\n",
256
+ "get_working_alts(['g = 100', 'i = 1'], test_alt_vars, 'g += l[i]')"
257
+ ]
258
+ },
259
+ {
260
+ "cell_type": "code",
261
+ "execution_count": 46,
262
+ "metadata": {},
263
+ "outputs": [
264
+ {
265
+ "data": {
266
+ "text/plain": [
267
+ "(['g = 100', 'i = 1'],\n",
268
+ " ['l = [1, 2]',\n",
269
+ " 'l = [0,1,2,3,3,4,5,6,9]',\n",
270
+ " 'l = [0.01]',\n",
271
+ " 'l = [5, 6, 8, 12,]',\n",
272
+ " 'l = [g * 2, 1]',\n",
273
+ " 'l = [g / 100.0 + i]',\n",
274
+ " 'l = [100, 100,]',\n",
275
+ " 'l = [0, 1]',\n",
276
+ " 'l = [1]',\n",
277
+ " 'l = [15, 100, 1000100, i,]',\n",
278
+ " 'l = [100, 1, 0]',\n",
279
+ " 'l = [i, m]',\n",
280
+ " 'l = [.1,.2]',\n",
281
+ " 'l = [100100]',\n",
282
+ " 'l = [100, 100,100]',\n",
283
+ " 'l = [1, 2, 3, 4]',\n",
284
+ " 'l = [0.001, 0.001, 0.001, 1.001]',\n",
285
+ " 'l = [100, 100, 100]',\n",
286
+ " 'l = [0.9]',\n",
287
+ " 'l = [1, 2, 3]',\n",
288
+ " 'l = [g / i]',\n",
289
+ " 'l = [g]',\n",
290
+ " 'l = [i - 1]',\n",
291
+ " 'l = [1, 1, 1]',\n",
292
+ " 'l = [10, 20]',\n",
293
+ " 'l = [0, 2, 3]',\n",
294
+ " 'l = [100]',\n",
295
+ " 'l = [1, 1, 2]',\n",
296
+ " 'l = [10109090909090909090909090909090909]',\n",
297
+ " 'l = [g, i]',\n",
298
+ " 'l = [1, 2,2]',\n",
299
+ " 'l = [0, 0]',\n",
300
+ " 'l = [10, 20, 20]',\n",
301
+ " 'l = [i]',\n",
302
+ " 'l = [g, 1]',\n",
303
+ " 'l = [0, 1, 0]',\n",
304
+ " 'l = [100, 90]',\n",
305
+ " 'l = [10, 5, 6]',\n",
306
+ " 'l = [g, g-i, l]',\n",
307
+ " 'l = [0]',\n",
308
+ " 'l = [1,2,3,2,7,5,6,8,9]',\n",
309
+ " 'l = [floa_e_b, floa_e_c, f]',\n",
310
+ " 'l = [100, 100, 1]',\n",
311
+ " 'l = [0.5, 1.5]',\n",
312
+ " 'l = [100, 1001, 1001, 1012, 1015]'])"
313
+ ]
314
+ },
315
+ "execution_count": 46,
316
+ "metadata": {},
317
+ "output_type": "execute_result"
318
+ }
319
+ ],
320
+ "source": [
321
+ "def get_alts_for_var(start_vars, alt_i, code):\n",
322
+ " start_vars[alt_i] = temp_var(start_vars[alt_i])\n",
323
+ " code = make_code(start_vars, row['code'])\n",
324
+ " var_alts = alt_from_code(code)\n",
325
+ " alt_var_temp = start_vars[alt_i]\n",
326
+ " del start_vars[alt_i]\n",
327
+ " return start_vars, [alt_var_temp.replace('<extra_id_0>', alt) for alt in var_alts]\n",
328
+ "\n",
329
+ "alt_start_vars, var_alts = get_alts_for_var(\n",
330
+ " ['g = 100', 'i = 1', 'l = [100, 100, 0, 0, -100, -100]'], 2, 'g += l[i]'\n",
331
+ ")\n",
332
+ "alt_start_vars, var_alts"
333
+ ]
334
+ },
335
+ {
336
+ "cell_type": "code",
337
+ "execution_count": 50,
338
+ "metadata": {},
339
+ "outputs": [
340
+ {
341
+ "data": {
342
+ "text/plain": [
343
+ "(53,\n",
344
+ " [{'start': 'g = 1; i = 1; l = [100, 100, 0, 0, -100, -100]',\n",
345
+ " 'code': 'g += l[i]',\n",
346
+ " 'end': 'g = 101; i = 1; l = [100, 100, 0, 0, -100, -100]'},\n",
347
+ " {'start': 'g = 2; i = 1; l = [100, 100, 0, 0, -100, -100]',\n",
348
+ " 'code': 'g += l[i]',\n",
349
+ " 'end': 'g = 102; i = 1; l = [100, 100, 0, 0, -100, -100]'},\n",
350
+ " {'start': 'g = 3; i = 1; l = [100, 100, 0, 0, -100, -100]',\n",
351
+ " 'code': 'g += l[i]',\n",
352
+ " 'end': 'g = 103; i = 1; l = [100, 100, 0, 0, -100, -100]'}])"
353
+ ]
354
+ },
355
+ "execution_count": 50,
356
+ "metadata": {},
357
+ "output_type": "execute_result"
358
+ }
359
+ ],
360
+ "source": [
361
+ "def make_alternatives(row):\n",
362
+ " start_vars = row['start'].split('; ')\n",
363
+ "\n",
364
+ " alts = []\n",
365
+ " for i in range(len(start_vars)):\n",
366
+ " alt_start_vars, var_alts = get_alts_for_var(list(start_vars), i, row['code'])\n",
367
+ " alts += get_working_alts(alt_start_vars, var_alts, row['code'])\n",
368
+ "\n",
369
+ " return alts\n",
370
+ "\n",
371
+ "alts = make_alternatives(\n",
372
+ " {'start': 'g = 100; i = 1; l = [100, 100, 0, 0, -100, -100]',\n",
373
+ " 'code': 'g += l[i]',\n",
374
+ " 'end': 'g = 200; i = 1; l = [100, 100, 0, 0, -100, -100]'}\n",
375
+ ")\n",
376
+ "len(alts), alts[:3]"
377
+ ]
378
+ },
379
+ {
380
+ "cell_type": "code",
381
+ "execution_count": 4,
382
+ "metadata": {},
383
+ "outputs": [],
384
+ "source": [
385
+ "import json\n",
386
+ "\n",
387
+ "with open('../data.jsonl', 'r', encoding=\"utf-8\") as f:\n",
388
+ " for id_, line in enumerate(f):\n",
389
+ " row = json.loads(line)\n",
390
+ " alts = make_alternatives(row)\n",
391
+ " # TODO: save alts\n",
392
+ " break"
393
+ ]
394
+ },
395
+ {
396
+ "cell_type": "code",
397
+ "execution_count": null,
398
+ "metadata": {},
399
+ "outputs": [],
400
+ "source": []
401
+ },
402
+ {
403
+ "cell_type": "code",
404
+ "execution_count": null,
405
+ "metadata": {},
406
+ "outputs": [],
407
+ "source": []
408
+ },
409
+ {
410
+ "cell_type": "code",
411
+ "execution_count": 3,
412
+ "metadata": {},
413
+ "outputs": [
414
+ {
415
+ "data": {
416
+ "text/plain": [
417
+ "['1, 2',\n",
418
+ " '1, 0',\n",
419
+ " '1, 1, 1, 1',\n",
420
+ " '1, 1',\n",
421
+ " '\"ab\",i,2',\n",
422
+ " '0, 1',\n",
423
+ " '8',\n",
424
+ " '\"s\", \"m\", \"v\", \"r \"',\n",
425
+ " 'g, - p',\n",
426
+ " '1, 1, 1,',\n",
427
+ " '7, 5, 6',\n",
428
+ " 'g, i, l',\n",
429
+ " '1',\n",
430
+ " '1,1,2,3',\n",
431
+ " '1, 2, 2',\n",
432
+ " '\"ab\", \"aa\", \"ab\", \"aa\"',\n",
433
+ " '1, 2, 3, 4',\n",
434
+ " '\"ab\",\"ace\",\"ae\",\"ad\"',\n",
435
+ " 'i, i',\n",
436
+ " '\"ab\", \"a\", \"e\"',\n",
437
+ " '100, 100, 100',\n",
438
+ " '1,3,3,4,5,6,7,9,0',\n",
439
+ " '\" a\"',\n",
440
+ " '0, 1, 2',\n",
441
+ " '0, 1, 1, 1, 0',\n",
442
+ " '\"ab\", \"bal,ca\"',\n",
443
+ " 'g,i, l [ i ]',\n",
444
+ " '1, 3,4, 6',\n",
445
+ " 'a',\n",
446
+ " '1, 2, 3',\n",
447
+ " '9, 9',\n",
448
+ " '( 1)',\n",
449
+ " '2, - 1, - 1',\n",
450
+ " '0 | 1 | 0|0',\n",
451
+ " '{ 1 }',\n",
452
+ " 'i - 1',\n",
453
+ " 'o, l1, o2, l',\n",
454
+ " '\"ab\"',\n",
455
+ " '1, 1, 2',\n",
456
+ " 'g, i',\n",
457
+ " '0, 0',\n",
458
+ " '\"a\"',\n",
459
+ " 'i, l',\n",
460
+ " 'i',\n",
461
+ " '0,0',\n",
462
+ " '- l [ i ]',\n",
463
+ " '1, 2, 3, 1',\n",
464
+ " 'l[ i - 1 ]',\n",
465
+ " '\"1\",\"2\", \"3\",\"4\", \"5\"',\n",
466
+ " 'g, g, i']"
467
+ ]
468
+ },
469
+ "execution_count": 3,
470
+ "metadata": {},
471
+ "output_type": "execute_result"
472
+ }
473
+ ],
474
+ "source": [
475
+ "code ='def main(): g = \"ab\"; i = 1; l = [<extra_id_0>]; g += l[i]; return g, i, l'\n",
476
+ "\n",
477
+ "input_ids = tokenizer(code, return_tensors=\"pt\").input_ids\n",
478
+ "generated_ids = model.generate(input_ids, num_return_sequences=100, max_length=20, do_sample=True, temperature=1.0)\n",
479
+ "filter_codes(tokenizer.batch_decode(generated_ids, skip_special_tokens=True))\n",
480
+ "\n",
481
+ "# 100 samples -> ~8 valid alternatives, 3.1s on macos CPU"
482
+ ]
483
+ },
484
+ {
485
+ "cell_type": "code",
486
+ "execution_count": 54,
487
+ "metadata": {},
488
+ "outputs": [
489
+ {
490
+ "data": {
491
+ "text/plain": [
492
+ "['<pad><s><extra_id_0>5<extra_id_1>g i l [ 0</s><pad><pad>',\n",
493
+ " '<pad><s><extra_id_0>0<extra_id_1>0, 0, 0</s><pad><pad>',\n",
494
+ " '<pad><s><extra_id_0>0<extra_id_1>1 1 2, 1</s><pad><pad>',\n",
495
+ " \"<pad><s><extra_id_0>'<extra_id_1>i</s><pad><pad><pad><pad><pad><pad>\",\n",
496
+ " '<pad><s><extra_id_0>0<extra_id_1>a t</s><pad><pad><pad><pad><pad>',\n",
497
+ " '<pad><s><extra_id_0>0.0<extra_id_1>e. f_i</s>',\n",
498
+ " '<pad><s><extra_id_0>\" \"<extra_id_1>1</s><pad><pad><pad><pad><pad>',\n",
499
+ " '<pad><s><extra_id_0>0<extra_id_1>n = 1 l =</s><pad><pad>',\n",
500
+ " '<pad><s><extra_id_0>0, 0, 1<extra_id_1>1</s><pad><pad>',\n",
501
+ " '<pad><s><extra_id_0>1<extra_id_1>k y y x z</s><pad><pad>']"
502
+ ]
503
+ },
504
+ "execution_count": 54,
505
+ "metadata": {},
506
+ "output_type": "execute_result"
507
+ }
508
+ ],
509
+ "source": [
510
+ "code ='def main(): g = <extra_id_0>; i = 1; l = [<extra_id_1>]; g += l[i]; return g, i, l'\n",
511
+ "\n",
512
+ "input_ids = tokenizer(code, return_tensors=\"pt\").input_ids\n",
513
+ "generated_ids = model.generate(input_ids, num_return_sequences=10, max_length=20, do_sample=True, temperature=1.0)\n",
514
+ "tokenizer.batch_decode(generated_ids)"
515
+ ]
516
+ },
517
+ {
518
+ "cell_type": "code",
519
+ "execution_count": null,
520
+ "metadata": {},
521
+ "outputs": [],
522
+ "source": []
523
+ }
524
+ ],
525
+ "metadata": {
526
+ "interpreter": {
527
+ "hash": "ced6a873299cbeeefe969ab88294103b352f8c83b6537b9e08e8739795321d60"
528
+ },
529
+ "kernelspec": {
530
+ "display_name": "Python 3.9.9 64-bit ('3.9.9': pyenv)",
531
+ "language": "python",
532
+ "name": "python3"
533
+ },
534
+ "language_info": {
535
+ "codemirror_mode": {
536
+ "name": "ipython",
537
+ "version": 3
538
+ },
539
+ "file_extension": ".py",
540
+ "mimetype": "text/x-python",
541
+ "name": "python",
542
+ "nbconvert_exporter": "python",
543
+ "pygments_lexer": "ipython3",
544
+ "version": "3.9.9"
545
+ },
546
+ "orig_nbformat": 4
547
+ },
548
+ "nbformat": 4,
549
+ "nbformat_minor": 2
550
+ }