maomao88 commited on
Commit
77a9363
·
1 Parent(s): 1cf899e

update descriptions text

Browse files
Files changed (2) hide show
  1. __pycache__/utils.cpython-313.pyc +0 -0
  2. app.py +2 -2
__pycache__/utils.cpython-313.pyc CHANGED
Binary files a/__pycache__/utils.cpython-313.pyc and b/__pycache__/utils.cpython-313.pyc differ
 
app.py CHANGED
@@ -247,8 +247,7 @@ function showCrossAttFun(attn_scores, decoder_attn) {
247
  with gr.Blocks(css=css) as demo:
248
  gr.Markdown("""
249
  ## 🕸️ Visualize Attentions in Translated Text (English to Chinese)
250
- Cross attention is a key component in transformers, where a sequence (English Text) can attend to another sequence’s information (Chinese Text).
251
- You can check the cross attentions and self-attentions of the translated text in the lower section of the page.
252
  """)
253
 
254
  with gr.Row():
@@ -274,6 +273,7 @@ with gr.Blocks(css=css) as demo:
274
  gr.Markdown(
275
  """
276
  ## Check Cross Attentions
 
277
  Hover your mouse over an output (Chinese) word/token to see which input (English) word/token it is attending to.
278
  """,
279
  elem_classes="output-html-desc"
 
247
  with gr.Blocks(css=css) as demo:
248
  gr.Markdown("""
249
  ## 🕸️ Visualize Attentions in Translated Text (English to Chinese)
250
+ After translating your English input to Chinese, you can check the cross attentions and self-attentions of the translation in the lower section of the page.
 
251
  """)
252
 
253
  with gr.Row():
 
273
  gr.Markdown(
274
  """
275
  ## Check Cross Attentions
276
+ Cross attention is a key component in transformers, where a sequence (English Text) can attend to another sequence’s information (Chinese Text).
277
  Hover your mouse over an output (Chinese) word/token to see which input (English) word/token it is attending to.
278
  """,
279
  elem_classes="output-html-desc"