{ "cells": [ { "cell_type": "code", "execution_count": 37, "metadata": { "id": "gLqZbLQybHUY" }, "outputs": [], "source": [ "#!pip install transformers\n", "#!pip install wandb" ] }, { "cell_type": "code", "execution_count": 38, "metadata": { "id": "9Era7yOm9WZL" }, "outputs": [], "source": [ "import os\n", "os.environ[\"CUDA_LAUNCH_BLOCKING\"] = \"1\"" ] }, { "cell_type": "code", "execution_count": 39, "metadata": { "id": "ipRsfJJkRy9I" }, "outputs": [], "source": [ "from transformers import AutoTokenizer, BertModel, Wav2Vec2ForCTC, Wav2Vec2Processor\n", "\n", "import torch\n", "import torch.nn as nn\n", "import torchaudio\n", "import torchaudio.functional as F\n", "\n", "import torch.optim as optim\n", "\n", "import re\n", "from torch.utils.data import Dataset, DataLoader, Subset\n", "\n", "import pandas as pd\n", "import numpy as np\n", "\n", "import pickle\n", "\n", "import wandb\n", "\n", "import seaborn as sns\n", "import matplotlib.pyplot as plt\n", "from sklearn.metrics import confusion_matrix, accuracy_score, precision_score, recall_score\n", "\n", "from huggingface_hub import login\n", "\n", "from typing import DefaultDict" ] }, { "cell_type": "code", "execution_count": 40, "metadata": { "id": "IxUZCBuY7D8m" }, "outputs": [], "source": [ "from google.colab import userdata\n", "hf_token = userdata.get('HF_TOKEN')\n", "wandb_token = userdata.get('WAND_TOKEN')" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "20b809e6", "outputId": "a8b9e90e-a3b7-4631-f280-0c9f33ffef59" }, "outputs": [], "source": [ "%env HF_TOKEN_ENV=$hf_token\n", "!wget -nc --header \"Authorization: Bearer ${HF_TOKEN_ENV}\" https://huggingface.co/datasets/asapp/slue/resolve/main/data/voxceleb/dev.tsv\n", "!wget -nc --header \"Authorization: Bearer ${HF_TOKEN_ENV}\" https://huggingface.co/datasets/asapp/slue/resolve/main/data/voxceleb/fine-tune.tsv\n", "!wget -nc --header \"Authorization: Bearer ${HF_TOKEN_ENV}\" https://huggingface.co/datasets/asapp/slue/resolve/main/data/voxceleb/test.tsv\n", "\n", "!wget -nc --header \"Authorization: Bearer ${HF_TOKEN_ENV}\" https://huggingface.co/datasets/asapp/slue/resolve/main/data/voxceleb/audio/dev.zip\n", "!wget -nc --header \"Authorization: Bearer ${HF_TOKEN_ENV}\" https://huggingface.co/datasets/asapp/slue/resolve/main/data/voxceleb/audio/fine-tune.zip\n", "!wget -nc --header \"Authorization: Bearer ${HF_TOKEN_ENV}\" https://huggingface.co/datasets/asapp/slue/resolve/main/data/voxceleb/audio/test.zip" ] }, { "cell_type": "code", "execution_count": 42, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "3248f75f", "outputId": "5c7d7170-aeff-4b13-81af-03d57a695831" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "dev_raw folder already exists. Skipping unzip.\n", "fine-tune_raw folder already exists. Skipping unzip.\n", "test_raw folder already exists. Skipping unzip.\n" ] } ], "source": [ "if not os.path.exists(\"dev_raw\"):\n", " print(\"dev_raw folder not found. Unzipping dev.zip...\")\n", " !unzip -q dev.zip\n", "else:\n", " print(\"dev_raw folder already exists. Skipping unzip.\")\n", "\n", "if not os.path.exists(\"fine-tune_raw\"):\n", " print(\"fine-tune_raw folder not found. Unzipping fine-tune.zip...\")\n", " !unzip -q fine-tune.zip\n", "else:\n", " print(\"fine-tune_raw folder already exists. Skipping unzip.\")\n", "\n", "if not os.path.exists(\"test_raw\"):\n", " print(\"test_raw folder not found. Unzipping test.zip...\")\n", " !unzip -q test.zip\n", "else:\n", " print(\"test_raw folder already exists. Skipping unzip.\")" ] }, { "cell_type": "code", "execution_count": 43, "metadata": { "id": "km1H4kNFSJRa" }, "outputs": [], "source": [ "device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n", "NUM_EPOCHS = 5\n", "BATCH_SIZE = 16\n", "\n", "SAVED_CUSTOM_BERT_TOKEN_MAX_LEN_PATH = \"max_len.pkl\"\n", "SAVED_CUSTOM_BERT_TOKENIZER_DIR = \"bert_tokenizer_local\"\n", "SAVED_CUSTOM_BERT_MODEL_PATH = \"custom_bert_model.bin\"\n", "SAVED_TARGET_CAT_PATH = \"categories.bin\"\n", "TRAIN_DS_PATH = \"fine-tune.tsv\"\n", "TEST_DS_PATH = \"test.tsv\"\n", "BERT_BASE_MODEL = \"google-bert/bert-base-uncased\"\n", "INTERMEDIATE_CUSTOM_BERT_LAYER_SIZE = 30\n", "\n", "SAVED_AUDIO_MODEL_DIR_PATH = \"wav2vec2_local\"\n", "AUDIO_BASE_MODEL = \"facebook/wav2vec2-base-960h\"\n", "PROCESSOR_NAME = \"preprocessor_config.json\"\n", "MODEL_NAME = \"config.json\"\n", "\n", "SENTIMENT_MODALITIES = [\"Neutral\", \"Positive\", \"Negative\"]" ] }, { "cell_type": "code", "execution_count": 44, "metadata": { "id": "kMZmRkYPUvmy" }, "outputs": [], "source": [ "class CustomBertDataset(Dataset):\n", " def __init__(self, file_path,audio_folder, model_path=BERT_BASE_MODEL,saved_target_cats_path=SAVED_TARGET_CAT_PATH, saved_max_len_path=SAVED_CUSTOM_BERT_TOKEN_MAX_LEN_PATH):\n", " self.model_path = model_path\n", " self.tokenizer = AutoTokenizer.from_pretrained(self.model_path)\n", " self.lines = open(file_path).readlines()\n", " self.lines = np.array([[re.split(r'\\t+', line.replace(\"\\n\",\"\"))[1], re.split(r'\\t+', line.replace(\"\\n\",\"\"))[4], re.split(r'\\t+', line.replace(\"\\n\",\"\"))[0]] for i, line in enumerate(self.lines) if line != \"\\n\" and i != 0])\n", "\n", " self.elem_cats = self.lines[:,1]\n", " self.corpus = self.lines[:,0]\n", " self.audio_files_id = self.lines[:,2]\n", "\n", " # We have to proceed in this order here\n", " self.corpus = [sent.lower() for sent, cat in zip(self.corpus, self.elem_cats) if cat in SENTIMENT_MODALITIES]\n", " self.audio_files = np.array([os.path.join(audio_folder, f\"{file_name}.flac\") for file_name, cat in zip(self.audio_files_id, self.elem_cats) if cat in SENTIMENT_MODALITIES])\n", " self.elem_cats = [cat for cat in self.elem_cats if cat in SENTIMENT_MODALITIES]\n", "\n", " self.unique_cats = sorted(list(set(self.elem_cats)))\n", " self.num_class = len(self.unique_cats)\n", " self.cats_dict = {cat:i for i, cat in enumerate(self.unique_cats)}\n", " self.targets = np.array([self.cats_dict[cat] for cat in self.elem_cats])\n", "\n", " torch.save(self.unique_cats, saved_target_cats_path)\n", " self.tokenizer.save_pretrained(SAVED_CUSTOM_BERT_TOKENIZER_DIR)\n", "\n", " \"\"\"entry_dict = DefaultDict(list)\n", " for i in range(len(self.corpus)):\n", " entry_dict[self.targets[i]].append(self.corpus[i])\n", "\n", " self.final_corpus = []\n", " self.final_targets = []\n", " n=0\n", " while n < len(self.corpus):\n", " for key in entry_dict.keys():\n", " if len(entry_dict[key]) > 0:\n", " self.final_corpus.append(entry_dict[key].pop(0))\n", " self.final_targets.append(key)\n", " n+=1\n", "\n", " self.corpus = np.array(self.final_corpus)\n", " self.targets = np.array(self.final_targets)\"\"\"\n", "\n", " self.max_len = 0\n", " for sent in self.corpus:\n", " input_ids = self.tokenizer.encode(sent, add_special_tokens=True)\n", " self.max_len = max(self.max_len, len(input_ids))\n", "\n", " self.max_len = min(self.max_len, 512)\n", " print(f\"Max length : {self.max_len}\")\n", " print(f\"Nombre de classes : {self.num_class}\")\n", " print(f\"Exemples de targets : {np.unique(self.targets)}\")\n", "\n", " # Save max_len\n", " with open(saved_max_len_path, 'wb') as f:\n", " pickle.dump(self.max_len, f)\n", " print(f\"max_len saved to {saved_max_len_path}\")\n", "\n", " def __len__(self):\n", " return len(self.elem_cats)\n", "\n", " def __getitem__(self, idx):\n", " text = self.corpus[idx]\n", " target = self.targets[idx]\n", "\n", " # Vérification : target doit être entre 0 et num_class - 1\n", " if target < 0 or target >= self.num_class:\n", " raise ValueError(f\"Target out of bounds: {target} not in [0, {self.num_class - 1}]\")\n", "\n", "\n", " encoded_input = self.tokenizer.encode_plus(text, max_length=self.max_len, padding=\"max_length\", truncation=True, return_tensors='pt')\n", " return encoded_input['input_ids'].squeeze(0), encoded_input['attention_mask'].squeeze(0), torch.tensor(target, dtype=torch.long), self.audio_files[idx]\n", " #return np.array(encoded_input), torch.tensor(target, dtype=torch.long)" ] }, { "cell_type": "code", "execution_count": 45, "metadata": { "id": "zSvUMZGXWRzs" }, "outputs": [], "source": [ "class CustomBertModel(nn.Module):\n", " def __init__(self, num_class, model_path=BERT_BASE_MODEL):\n", " super(CustomBertModel, self ).__init__()\n", " self.model_path = model_path\n", " self.num_class = num_class\n", "\n", " self.bert = BertModel.from_pretrained(self.model_path)\n", " # Freeze of the parameters of this layer for the training process\n", " for param in self.bert.parameters():\n", " param.requires_grad = False\n", " #self.proj_intermediate = nn.Sequential(nn.Linear(self.bert.config.hidden_size, INTERMEDIATE_CUSTOM_BERT_LAYER_SIZE),nn.Linear(INTERMEDIATE_CUSTOM_BERT_LAYER_SIZE, INTERMEDIATE_CUSTOM_BERT_LAYER_SIZE), INTERMEDIATE_CUSTOM_BERT_LAYER_SIZE),nn.Linear(INTERMEDIATE_CUSTOM_BERT_LAYER_SIZE, INTERMEDIATE_CUSTOM_BERT_LAYER_SIZE))\n", " self.proj_lin = nn.Linear(self.bert.config.hidden_size, self.num_class)\n", "\n", " def forward(self, input_ids, attention_mask):\n", " x = self.bert(input_ids=input_ids, attention_mask=attention_mask)\n", "\n", " x = x.last_hidden_state[:,0,:]\n", " #x = self.proj_intermediate(x)\n", " x = self.proj_lin(x)\n", " return x" ] }, { "cell_type": "code", "execution_count": 46, "metadata": { "id": "zVOKJ1wEZ8YF" }, "outputs": [], "source": [ "def train_step(model, train_dataloader, loss_fn, optimizer):\n", "\n", " num_iterations = len(train_dataloader)\n", "\n", " for i in range(NUM_EPOCHS):\n", " print(f\"Training Epoch n° {i}\")\n", " model.train()\n", "\n", " for j, batch in enumerate(train_dataloader):\n", "\n", " input = batch[:][0]\n", " attention = batch[:][1]\n", " target = batch[:][2]\n", "\n", "\n", " output = model(input.to(device), attention.to(device))\n", "\n", "\n", " loss = loss_fn(output, target.to(device))\n", "\n", " optimizer.zero_grad()\n", " loss.backward()\n", " optimizer.step()\n", "\n", " run.log({\"Training loss\": loss})\n", "\n", " print(f\"Epoch {i+1} | step {j+1} / {num_iterations} | loss : {loss}\")\n", "\n", "\n", " #Save model\n", " torch.save(model.state_dict(), SAVED_CUSTOM_BERT_MODEL_PATH)\n", " print(f\"Custom BERT Model saved at {SAVED_CUSTOM_BERT_MODEL_PATH}\")" ] }, { "cell_type": "code", "execution_count": 47, "metadata": { "id": "O3JNf-ahaPy0" }, "outputs": [], "source": [ "def eval_step(test_dataloader, loss_fn, num_class, saved_model_path=SAVED_CUSTOM_BERT_MODEL_PATH,saved_target_cats_path=SAVED_TARGET_CAT_PATH):\n", "\n", " y_pred = []\n", " y_true = []\n", "\n", " num_iterations = len(test_dataloader)\n", " # Load the saved model\n", " saved_model = CustomBertModel(num_class)\n", " saved_model.load_state_dict(torch.load(saved_model_path, weights_only=False)) # Explicitly set weights_only to False\n", " saved_model = saved_model.to(device)\n", " saved_model.eval() # Set the model to evaluation mode\n", " print(f\"Model loaded from path :{saved_model_path}\")\n", "\n", " with torch.no_grad():\n", " for j, batch in enumerate(test_dataloader):\n", "\n", " input = batch[:][0]\n", " attention = batch[:][1]\n", " target = batch[:][2]\n", "\n", "\n", " output = saved_model(input.to(device), attention.to(device))\n", "\n", " loss = loss_fn(output, target.to(device))\n", "\n", " run.log({\"Eval loss\": loss})\n", " print(f\"Step {j+1} / {num_iterations} | Eval loss : {loss}\")\n", " y_pred.extend(output.cpu().numpy().argmax(axis=1))\n", " y_true.extend(target.cpu().numpy())\n", "\n", " class_labels = torch.load(saved_target_cats_path, weights_only=False)\n", "\n", " true_labels = [class_labels[i] for i in y_true]\n", " pred_labels = [class_labels[i] for i in y_pred]\n", "\n", " print(f\"Accuracy : {accuracy_score(true_labels, pred_labels)}\")\n", "\n", " cm = confusion_matrix(true_labels, pred_labels, labels=class_labels)\n", " df_cm = pd.DataFrame(cm, index=class_labels,columns=class_labels)\n", " sns.heatmap(df_cm, annot=True, fmt='d')\n", " plt.title(\"Confusion Matrix for Sentiment analysis dataset\")\n", " plt.xlabel(\"Predicted Label\")\n", " plt.ylabel(\"True Label\")\n", " plt.show()" ] }, { "cell_type": "code", "execution_count": 48, "metadata": { "id": "lQ-Y9PMx-Oup" }, "outputs": [], "source": [ "def eval_pipeline_step(test_dataloader, loss_fn, num_class, audio_model_dir=SAVED_AUDIO_MODEL_DIR_PATH, audio_model_name=MODEL_NAME, audio_processor_name=PROCESSOR_NAME, saved_model_path=SAVED_CUSTOM_BERT_MODEL_PATH,saved_target_cats_path=SAVED_TARGET_CAT_PATH):\n", "\n", " y_pred = []\n", " y_true = []\n", "\n", " num_iterations = len(test_dataloader)\n", " # Load the saved model\n", " saved_model = CustomBertModel(num_class)\n", " saved_model.load_state_dict(torch.load(saved_model_path, weights_only=False)) # Explicitly set weights_only to False\n", " saved_model = saved_model.to(device)\n", " saved_model.eval() # Set the model to evaluation mode\n", " print(f\"Model loaded from path :{saved_model_path}\")\n", "\n", " audio_processor = None\n", " audio_model = None\n", "\n", " processor_path = os.path.join(audio_model_dir, audio_processor_name) # Check for a key file, like the preprocessor config\n", " model_path = os.path.join(audio_model_dir, audio_model_name) # Check for a key file, like the model config\n", "\n", " if os.path.exists(audio_model_dir) and os.path.exists(processor_path) and os.path.exists(model_path):\n", " print(\"Local Wav2Vec2 processor and model found. Loading from local directory.\")\n", " audio_processor = Wav2Vec2Processor.from_pretrained(audio_model_dir)\n", " audio_model = Wav2Vec2ForCTC.from_pretrained(audio_model_dir)\n", " else:\n", " print(\"Local Wav2Vec2 processor and model not found. Downloading from Hugging Face Hub.\")\n", " audio_processor = Wav2Vec2Processor.from_pretrained(AUDIO_BASE_MODEL)\n", " audio_model = Wav2Vec2ForCTC.from_pretrained(AUDIO_BASE_MODEL)\n", "\n", " # Optionally save the downloaded model and processor for future use\n", " audio_processor.save_pretrained(audio_model_dir)\n", " audio_model.save_pretrained(audio_model_dir)\n", " print(f\"Wav2Vec2 processor and model downloaded and saved to {audio_model_dir}\")\n", "\n", " # Move audio model to GPU\n", " audio_model = audio_model.to(device)\n", " audio_model.eval()\n", "\n", " with torch.no_grad():\n", " for j, batch in enumerate(test_dataloader):\n", "\n", " target = batch[:][2]\n", " audio_file_path = batch[:][3]\n", "\n", " encoded_inputs = []\n", " attention_masks = []\n", "\n", " bundle = torchaudio.pipelines.WAV2VEC2_ASR_BASE_960H\n", " sample_rate = bundle.sample_rate\n", "\n", " for audio_file in audio_file_path:\n", " waveform, sr = torchaudio.load(audio_file)\n", " if sr != sample_rate:\n", " print(\"Resampling\")\n", " resampler = torchaudio.transforms.Resample(orig_freq=sr, new_freq=sample_rate)\n", " waveform = resampler(waveform)\n", "\n", " # Move waveform to GPU before processing\n", " input_values = audio_processor(waveform.squeeze().numpy(), sampling_rate=sample_rate, return_tensors=\"pt\").input_values.to(device)\n", "\n", " with torch.no_grad():\n", " logits = audio_model(input_values).logits\n", " predicted_ids_hf = torch.argmax(logits, dim=-1)\n", " transcript_hf = audio_processor.decode(predicted_ids_hf[0].cpu().numpy()) # Move predicted_ids_hf back to CPU for decoding\n", " transcript_hf = transcript_hf.lower() if transcript_hf is not None else None\n", "\n", " encoded_input = test_dataloader.dataset.tokenizer.encode_plus(transcript_hf, max_length=test_dataloader.dataset.max_len, padding=\"max_length\", truncation=True, return_tensors='pt')\n", " encoded_inputs.append(encoded_input['input_ids'].squeeze(0))\n", " attention_masks.append(encoded_input['attention_mask'].squeeze(0))\n", "\n", " text_input = torch.stack(encoded_inputs)\n", " attention = torch.stack(attention_masks)\n", "\n", "\n", " output = saved_model(text_input.to(device), attention.to(device))\n", "\n", " loss = loss_fn(output, target.to(device))\n", "\n", " run.log({\"Pipeline Eval loss\": loss})\n", " print(f\"Step {j+1} / {num_iterations} | Pipeline Eval loss : {loss}\")\n", "\n", " y_pred.extend(output.cpu().numpy().argmax(axis=1))\n", " y_true.extend(target.cpu().numpy())\n", "\n", " class_labels = torch.load(saved_target_cats_path, weights_only=False)\n", "\n", " true_labels = [class_labels[i] for i in y_true]\n", " pred_labels = [class_labels[i] for i in y_pred]\n", "\n", " print(f\"Pipeline Accuracy : {accuracy_score(true_labels, pred_labels)}\")\n", "\n", " cm = confusion_matrix(true_labels, pred_labels, labels=class_labels)\n", " df_cm = pd.DataFrame(cm, index=class_labels,columns=class_labels)\n", " sns.heatmap(df_cm, annot=True, fmt='d')\n", " plt.title(\"Confusion Matrix for Sentiment analysis Pipeline\")\n", " plt.xlabel(\"Predicted Label\")\n", " plt.ylabel(\"True Label\")\n", " plt.show()" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "QVtOhLTh-I3S" }, "outputs": [], "source": [ "def get_audio_sentiment(input_audio_path,num_class=len(SENTIMENT_MODALITIES), audio_model_dir=SAVED_AUDIO_MODEL_DIR_PATH, audio_model_name=MODEL_NAME, audio_processor_name=PROCESSOR_NAME, saved_model_path=SAVED_CUSTOM_BERT_MODEL_PATH,saved_target_cats_path=SAVED_TARGET_CAT_PATH, tokenizer_save_directory = SAVED_CUSTOM_BERT_TOKENIZER_DIR, saved_max_len_path=SAVED_CUSTOM_BERT_TOKEN_MAX_LEN_PATH):\n", " # Load the saved model\n", " saved_model = CustomBertModel(num_class)\n", " saved_model.load_state_dict(torch.load(saved_model_path, weights_only=False, map_location=torch.device(device))) # Explicitly set weights_only to False\n", " saved_model = saved_model.to(device)\n", " saved_model.eval() # Set the model to evaluation mode\n", " print(f\"Model loaded from path :{saved_model_path}\")\n", " loaded_tokenizer = AutoTokenizer.from_pretrained(tokenizer_save_directory)\n", " max_len = 0\n", " with open(saved_max_len_path, 'rb') as f:\n", " max_len = pickle.load(f)\n", "\n", " audio_processor = None\n", " audio_model = None\n", "\n", " processor_path = os.path.join(audio_model_dir, audio_processor_name) # Check for a key file, like the preprocessor config\n", " model_path = os.path.join(audio_model_dir, audio_model_name) # Check for a key file, like the model config\n", "\n", " if os.path.exists(audio_model_dir) and os.path.exists(processor_path) and os.path.exists(model_path):\n", " print(\"Local Wav2Vec2 processor and model found. Loading from local directory.\")\n", " audio_processor = Wav2Vec2Processor.from_pretrained(audio_model_dir)\n", " audio_model = Wav2Vec2ForCTC.from_pretrained(audio_model_dir)\n", " else:\n", " print(\"Local Wav2Vec2 processor and model not found. Downloading from Hugging Face Hub.\")\n", " audio_processor = Wav2Vec2Processor.from_pretrained(AUDIO_BASE_MODEL)\n", " audio_model = Wav2Vec2ForCTC.from_pretrained(AUDIO_BASE_MODEL)\n", "\n", " # Optionally save the downloaded model and processor for future use\n", " audio_processor.save_pretrained(audio_model_dir)\n", " audio_model.save_pretrained(audio_model_dir)\n", " print(f\"Wav2Vec2 processor and model downloaded and saved to {audio_model_dir}\")\n", "\n", " # Move audio model to GPU\n", " audio_model = audio_model.to(device)\n", " audio_model.eval()\n", "\n", " with torch.no_grad():\n", " audio_file_path = input_audio_path\n", "\n", " encoded_inputs = []\n", " attention_masks = []\n", "\n", " bundle = torchaudio.pipelines.WAV2VEC2_ASR_BASE_960H\n", " sample_rate = bundle.sample_rate\n", "\n", "\n", " waveform, sr = torchaudio.load(audio_file_path)\n", " if sr != sample_rate:\n", " print(\"Resampling\")\n", " resampler = torchaudio.transforms.Resample(orig_freq=sr, new_freq=sample_rate)\n", " waveform = resampler(waveform)\n", "\n", " # Move waveform to GPU before processing\n", " input_values = audio_processor(waveform.squeeze().numpy(), sampling_rate=sample_rate, return_tensors=\"pt\").input_values.to(device)\n", "\n", " with torch.no_grad():\n", " logits = audio_model(input_values).logits\n", " predicted_ids_hf = torch.argmax(logits, dim=-1)\n", " transcript_hf = audio_processor.decode(predicted_ids_hf[0].cpu().numpy()) # Move predicted_ids_hf back to CPU for decoding\n", " transcript_hf = transcript_hf.lower() if transcript_hf is not None else None\n", "\n", "\n", "\n", " encoded_input = loaded_tokenizer.encode_plus(transcript_hf, max_length=max_len, padding=\"max_length\", truncation=True, return_tensors='pt')\n", " encoded_inputs.append(encoded_input['input_ids'].squeeze(0))\n", " attention_masks.append(encoded_input['attention_mask'].squeeze(0))\n", "\n", " # Stack the lists of tensors before moving to device\n", " text_input = torch.stack(encoded_inputs)\n", " attention = torch.stack(attention_masks)\n", "\n", "\n", " output = saved_model(text_input.to(device), attention.to(device))\n", " class_labels = torch.load(saved_target_cats_path, weights_only=False)\n", "\n", " return class_labels[output.cpu().numpy().argmax(axis=1)[0]]" ] }, { "cell_type": "code", "execution_count": 54, "metadata": { "colab": { "base_uri": "https://localhost:8080/", "height": 1000 }, "id": "a_g1x6SEbjHj", "outputId": "7b4572d3-7bcf-4d64-bc3b-076bdf198e79" }, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "\u001b[34m\u001b[1mwandb\u001b[0m: \u001b[33mWARNING\u001b[0m Calling wandb.login() after wandb.init() has no effect.\n" ] }, { "data": { "text/html": [ "Finishing previous runs because reinit is set to 'default'." ], "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "text/html": [], "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "text/html": [ " View run honest-serenity-21 at: https://wandb.ai/jrmd-institut-pasteur-de-dakar/DIT-Wav2Vec-Bert-Sentiment-Analysis-project/runs/zcllb8pl
View project at: https://wandb.ai/jrmd-institut-pasteur-de-dakar/DIT-Wav2Vec-Bert-Sentiment-Analysis-project
Synced 5 W&B file(s), 0 media file(s), 0 artifact file(s) and 0 other file(s)" ], "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "text/html": [ "Find logs at: ./wandb/run-20250713_234542-zcllb8pl/logs" ], "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "text/html": [ "Tracking run with wandb version 0.21.0" ], "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "text/html": [ "Run data is saved locally in /content/wandb/run-20250713_234656-giuz48qn" ], "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "text/html": [ "Syncing run snowy-plant-22 to Weights & Biases (docs)
" ], "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "text/html": [ " View project at https://wandb.ai/jrmd-institut-pasteur-de-dakar/DIT-Wav2Vec-Bert-Sentiment-Analysis-project" ], "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "text/html": [ " View run at https://wandb.ai/jrmd-institut-pasteur-de-dakar/DIT-Wav2Vec-Bert-Sentiment-Analysis-project/runs/giuz48qn" ], "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" }, { "name": "stdout", "output_type": "stream", "text": [ "Max length : 134\n", "Nombre de classes : 3\n", "Exemples de targets : [0 1 2]\n", "max_len saved to max_len.pkl\n", "Max length : 161\n", "Nombre de classes : 3\n", "Exemples de targets : [0 1 2]\n", "max_len saved to max_len.pkl\n", "Size of bert dataset : 5729\n", "Training Epoch n° 0\n", "Epoch 1 | step 1 / 359 | loss : 1.2239360809326172\n", "Epoch 1 | step 2 / 359 | loss : 0.7781790494918823\n", "Epoch 1 | step 3 / 359 | loss : 0.7769264578819275\n", "Epoch 1 | step 4 / 359 | loss : 0.6504240036010742\n", "Epoch 1 | step 5 / 359 | loss : 0.5884197950363159\n", "Epoch 1 | step 6 / 359 | loss : 0.46859484910964966\n", "Epoch 1 | step 7 / 359 | loss : 0.5255210399627686\n", "Epoch 1 | step 8 / 359 | loss : 0.7808817028999329\n", "Epoch 1 | step 9 / 359 | loss : 0.9569503664970398\n", "Epoch 1 | step 10 / 359 | loss : 1.1509002447128296\n", "Epoch 1 | step 11 / 359 | loss : 0.8167925477027893\n", "Epoch 1 | step 12 / 359 | loss : 0.7020487189292908\n", "Epoch 1 | step 13 / 359 | loss : 0.5900958180427551\n", "Epoch 1 | step 14 / 359 | loss : 0.6116088032722473\n", "Epoch 1 | step 15 / 359 | loss : 0.8662782907485962\n", "Epoch 1 | step 16 / 359 | loss : 0.5392611026763916\n", "Epoch 1 | step 17 / 359 | loss : 0.5323561429977417\n", "Epoch 1 | step 18 / 359 | loss : 0.3145168423652649\n", "Epoch 1 | step 19 / 359 | loss : 0.7632851004600525\n", "Epoch 1 | step 20 / 359 | loss : 0.7354919910430908\n", "Epoch 1 | step 21 / 359 | loss : 1.2063307762145996\n", "Epoch 1 | step 22 / 359 | loss : 0.5908204317092896\n", "Epoch 1 | step 23 / 359 | loss : 1.0740903615951538\n", "Epoch 1 | step 24 / 359 | loss : 0.8128124475479126\n", "Epoch 1 | step 25 / 359 | loss : 0.6890608668327332\n", "Epoch 1 | step 26 / 359 | loss : 0.869840681552887\n", "Epoch 1 | step 27 / 359 | loss : 0.7113715410232544\n", "Epoch 1 | step 28 / 359 | loss : 0.5622650980949402\n", "Epoch 1 | step 29 / 359 | loss : 0.9921711087226868\n", "Epoch 1 | step 30 / 359 | loss : 0.8243380188941956\n", "Epoch 1 | step 31 / 359 | loss : 0.6731699109077454\n", "Epoch 1 | step 32 / 359 | loss : 0.8704314827919006\n", "Epoch 1 | step 33 / 359 | loss : 0.6902273893356323\n", "Epoch 1 | step 34 / 359 | loss : 0.685562252998352\n", "Epoch 1 | step 35 / 359 | loss : 0.6672124862670898\n", "Epoch 1 | step 36 / 359 | loss : 0.7629058361053467\n", "Epoch 1 | step 37 / 359 | loss : 0.5171371698379517\n", "Epoch 1 | step 38 / 359 | loss : 0.8352501392364502\n", "Epoch 1 | step 39 / 359 | loss : 0.427827388048172\n", "Epoch 1 | step 40 / 359 | loss : 0.44383424520492554\n", "Epoch 1 | step 41 / 359 | loss : 0.7249219417572021\n", "Epoch 1 | step 42 / 359 | loss : 1.0801457166671753\n", "Epoch 1 | step 43 / 359 | loss : 0.5163288712501526\n", "Epoch 1 | step 44 / 359 | loss : 0.798217236995697\n", "Epoch 1 | step 45 / 359 | loss : 0.8695089221000671\n", "Epoch 1 | step 46 / 359 | loss : 0.8460457921028137\n", "Epoch 1 | step 47 / 359 | loss : 0.6039737462997437\n", "Epoch 1 | step 48 / 359 | loss : 0.7451112866401672\n", "Epoch 1 | step 49 / 359 | loss : 0.5277621150016785\n", "Epoch 1 | step 50 / 359 | loss : 0.7562238574028015\n", "Epoch 1 | step 51 / 359 | loss : 0.6880096793174744\n", "Epoch 1 | step 52 / 359 | loss : 0.3799269199371338\n", "Epoch 1 | step 53 / 359 | loss : 0.49801552295684814\n", "Epoch 1 | step 54 / 359 | loss : 0.6689792275428772\n", "Epoch 1 | step 55 / 359 | loss : 0.7728772163391113\n", "Epoch 1 | step 56 / 359 | loss : 0.49119311571121216\n", "Epoch 1 | step 57 / 359 | loss : 0.6129393577575684\n", "Epoch 1 | step 58 / 359 | loss : 0.5274078249931335\n", "Epoch 1 | step 59 / 359 | loss : 0.5157696008682251\n", "Epoch 1 | step 60 / 359 | loss : 0.8469938039779663\n", "Epoch 1 | step 61 / 359 | loss : 0.7215883731842041\n", "Epoch 1 | step 62 / 359 | loss : 0.30823007225990295\n", "Epoch 1 | step 63 / 359 | loss : 1.0006577968597412\n", "Epoch 1 | step 64 / 359 | loss : 0.36621108651161194\n", "Epoch 1 | step 65 / 359 | loss : 0.7098656296730042\n", "Epoch 1 | step 66 / 359 | loss : 1.019273281097412\n", "Epoch 1 | step 67 / 359 | loss : 0.949777364730835\n", "Epoch 1 | step 68 / 359 | loss : 0.6516746878623962\n", "Epoch 1 | step 69 / 359 | loss : 0.5371072888374329\n", "Epoch 1 | step 70 / 359 | loss : 0.5272154211997986\n", "Epoch 1 | step 71 / 359 | loss : 0.5280236601829529\n", "Epoch 1 | step 72 / 359 | loss : 0.8014841675758362\n", "Epoch 1 | step 73 / 359 | loss : 0.5721211433410645\n", "Epoch 1 | step 74 / 359 | loss : 0.6892715692520142\n", "Epoch 1 | step 75 / 359 | loss : 0.7798994183540344\n", "Epoch 1 | step 76 / 359 | loss : 0.704157292842865\n", "Epoch 1 | step 77 / 359 | loss : 0.7520430684089661\n", "Epoch 1 | step 78 / 359 | loss : 0.8178438544273376\n", "Epoch 1 | step 79 / 359 | loss : 0.560861349105835\n", "Epoch 1 | step 80 / 359 | loss : 0.7542012929916382\n", "Epoch 1 | step 81 / 359 | loss : 0.9202328324317932\n", "Epoch 1 | step 82 / 359 | loss : 0.49177563190460205\n", "Epoch 1 | step 83 / 359 | loss : 0.5814316272735596\n", "Epoch 1 | step 84 / 359 | loss : 0.5556803941726685\n", "Epoch 1 | step 85 / 359 | loss : 0.7293121218681335\n", "Epoch 1 | step 86 / 359 | loss : 0.42432543635368347\n", "Epoch 1 | step 87 / 359 | loss : 0.39255252480506897\n", "Epoch 1 | step 88 / 359 | loss : 0.950404167175293\n", "Epoch 1 | step 89 / 359 | loss : 0.3511916697025299\n", "Epoch 1 | step 90 / 359 | loss : 0.4647309184074402\n", "Epoch 1 | step 91 / 359 | loss : 0.7327504754066467\n", "Epoch 1 | step 92 / 359 | loss : 0.6212035417556763\n", "Epoch 1 | step 93 / 359 | loss : 0.6117188930511475\n", "Epoch 1 | step 94 / 359 | loss : 0.6207751631736755\n", "Epoch 1 | step 95 / 359 | loss : 0.6237505078315735\n", "Epoch 1 | step 96 / 359 | loss : 0.5009356141090393\n", "Epoch 1 | step 97 / 359 | loss : 0.3003557026386261\n", "Epoch 1 | step 98 / 359 | loss : 0.7784075736999512\n", "Epoch 1 | step 99 / 359 | loss : 0.8191822171211243\n", "Epoch 1 | step 100 / 359 | loss : 0.2960091531276703\n", "Epoch 1 | step 101 / 359 | loss : 0.5828260183334351\n", "Epoch 1 | step 102 / 359 | loss : 0.7016475200653076\n", "Epoch 1 | step 103 / 359 | loss : 0.9149867296218872\n", "Epoch 1 | step 104 / 359 | loss : 0.5240529775619507\n", "Epoch 1 | step 105 / 359 | loss : 0.6905428171157837\n", "Epoch 1 | step 106 / 359 | loss : 0.6538899540901184\n", "Epoch 1 | step 107 / 359 | loss : 0.49481841921806335\n", "Epoch 1 | step 108 / 359 | loss : 0.4719923436641693\n", "Epoch 1 | step 109 / 359 | loss : 0.46440446376800537\n", "Epoch 1 | step 110 / 359 | loss : 1.0554474592208862\n", "Epoch 1 | step 111 / 359 | loss : 0.7591137886047363\n", "Epoch 1 | step 112 / 359 | loss : 0.7504141330718994\n", "Epoch 1 | step 113 / 359 | loss : 0.6725304126739502\n", "Epoch 1 | step 114 / 359 | loss : 0.8056756258010864\n", "Epoch 1 | step 115 / 359 | loss : 0.6666991114616394\n", "Epoch 1 | step 116 / 359 | loss : 0.582266628742218\n", "Epoch 1 | step 117 / 359 | loss : 0.8188720941543579\n", "Epoch 1 | step 118 / 359 | loss : 1.129102110862732\n", "Epoch 1 | step 119 / 359 | loss : 0.45885685086250305\n", "Epoch 1 | step 120 / 359 | loss : 0.5593740940093994\n", "Epoch 1 | step 121 / 359 | loss : 0.8018425703048706\n", "Epoch 1 | step 122 / 359 | loss : 0.6625592112541199\n", "Epoch 1 | step 123 / 359 | loss : 0.6349537372589111\n", "Epoch 1 | step 124 / 359 | loss : 0.6683805584907532\n", "Epoch 1 | step 125 / 359 | loss : 0.5267989635467529\n", "Epoch 1 | step 126 / 359 | loss : 0.7062298655509949\n", "Epoch 1 | step 127 / 359 | loss : 0.450639933347702\n", "Epoch 1 | step 128 / 359 | loss : 0.7860562801361084\n", "Epoch 1 | step 129 / 359 | loss : 0.5087366104125977\n", "Epoch 1 | step 130 / 359 | loss : 0.4271976053714752\n", "Epoch 1 | step 131 / 359 | loss : 0.7225075364112854\n", "Epoch 1 | step 132 / 359 | loss : 0.5826457142829895\n", "Epoch 1 | step 133 / 359 | loss : 0.6383864879608154\n", "Epoch 1 | step 134 / 359 | loss : 0.4803358018398285\n", "Epoch 1 | step 135 / 359 | loss : 0.6588662266731262\n", "Epoch 1 | step 136 / 359 | loss : 0.7276127338409424\n", "Epoch 1 | step 137 / 359 | loss : 0.5295218825340271\n", "Epoch 1 | step 138 / 359 | loss : 0.34616604447364807\n", "Epoch 1 | step 139 / 359 | loss : 1.1637855768203735\n", "Epoch 1 | step 140 / 359 | loss : 0.8757032155990601\n", "Epoch 1 | step 141 / 359 | loss : 0.5685425400733948\n", "Epoch 1 | step 142 / 359 | loss : 0.7358294725418091\n", "Epoch 1 | step 143 / 359 | loss : 0.48710113763809204\n", "Epoch 1 | step 144 / 359 | loss : 0.7419922351837158\n", "Epoch 1 | step 145 / 359 | loss : 0.573459267616272\n", "Epoch 1 | step 146 / 359 | loss : 0.7812228202819824\n", "Epoch 1 | step 147 / 359 | loss : 0.4849877655506134\n", "Epoch 1 | step 148 / 359 | loss : 0.5277436375617981\n", "Epoch 1 | step 149 / 359 | loss : 0.8431311249732971\n", "Epoch 1 | step 150 / 359 | loss : 0.6630666255950928\n", "Epoch 1 | step 151 / 359 | loss : 0.6491422653198242\n", "Epoch 1 | step 152 / 359 | loss : 0.6880113482475281\n", "Epoch 1 | step 153 / 359 | loss : 0.5888128876686096\n", "Epoch 1 | step 154 / 359 | loss : 0.8430927991867065\n", "Epoch 1 | step 155 / 359 | loss : 1.0269807577133179\n", "Epoch 1 | step 156 / 359 | loss : 0.5663043856620789\n", "Epoch 1 | step 157 / 359 | loss : 0.7358954548835754\n", "Epoch 1 | step 158 / 359 | loss : 0.628414511680603\n", "Epoch 1 | step 159 / 359 | loss : 0.43829524517059326\n", "Epoch 1 | step 160 / 359 | loss : 0.5527949929237366\n", "Epoch 1 | step 161 / 359 | loss : 0.961116373538971\n", "Epoch 1 | step 162 / 359 | loss : 0.6491231918334961\n", "Epoch 1 | step 163 / 359 | loss : 0.45700401067733765\n", "Epoch 1 | step 164 / 359 | loss : 0.5936495661735535\n", "Epoch 1 | step 165 / 359 | loss : 0.5856679677963257\n", "Epoch 1 | step 166 / 359 | loss : 0.39677685499191284\n", "Epoch 1 | step 167 / 359 | loss : 0.6376288533210754\n", "Epoch 1 | step 168 / 359 | loss : 0.8651082515716553\n", "Epoch 1 | step 169 / 359 | loss : 0.7944251298904419\n", "Epoch 1 | step 170 / 359 | loss : 0.6771171689033508\n", "Epoch 1 | step 171 / 359 | loss : 0.35224202275276184\n", "Epoch 1 | step 172 / 359 | loss : 0.6574368476867676\n", "Epoch 1 | step 173 / 359 | loss : 0.43292945623397827\n", "Epoch 1 | step 174 / 359 | loss : 0.5546406507492065\n", "Epoch 1 | step 175 / 359 | loss : 0.4846234619617462\n", "Epoch 1 | step 176 / 359 | loss : 0.689373254776001\n", "Epoch 1 | step 177 / 359 | loss : 0.9037076234817505\n", "Epoch 1 | step 178 / 359 | loss : 0.703309953212738\n", "Epoch 1 | step 179 / 359 | loss : 1.0673630237579346\n", "Epoch 1 | step 180 / 359 | loss : 0.6334757208824158\n", "Epoch 1 | step 181 / 359 | loss : 0.5086771249771118\n", "Epoch 1 | step 182 / 359 | loss : 0.25804322957992554\n", "Epoch 1 | step 183 / 359 | loss : 0.5818949937820435\n", "Epoch 1 | step 184 / 359 | loss : 0.8925405144691467\n", "Epoch 1 | step 185 / 359 | loss : 0.6411100029945374\n", "Epoch 1 | step 186 / 359 | loss : 0.5723517537117004\n", "Epoch 1 | step 187 / 359 | loss : 0.5954481959342957\n", "Epoch 1 | step 188 / 359 | loss : 0.48721998929977417\n", "Epoch 1 | step 189 / 359 | loss : 0.54664146900177\n", "Epoch 1 | step 190 / 359 | loss : 0.8742663264274597\n", "Epoch 1 | step 191 / 359 | loss : 0.42869776487350464\n", "Epoch 1 | step 192 / 359 | loss : 0.7925054430961609\n", "Epoch 1 | step 193 / 359 | loss : 0.9238168597221375\n", "Epoch 1 | step 194 / 359 | loss : 0.5946146845817566\n", "Epoch 1 | step 195 / 359 | loss : 0.5161392688751221\n", "Epoch 1 | step 196 / 359 | loss : 0.45859768986701965\n", "Epoch 1 | step 197 / 359 | loss : 0.889039933681488\n", "Epoch 1 | step 198 / 359 | loss : 0.5095492601394653\n", "Epoch 1 | step 199 / 359 | loss : 0.7949885129928589\n", "Epoch 1 | step 200 / 359 | loss : 0.4921233654022217\n", "Epoch 1 | step 201 / 359 | loss : 0.3488420248031616\n", "Epoch 1 | step 202 / 359 | loss : 0.3814465403556824\n", "Epoch 1 | step 203 / 359 | loss : 0.880484402179718\n", "Epoch 1 | step 204 / 359 | loss : 0.9748294949531555\n", "Epoch 1 | step 205 / 359 | loss : 0.6507573127746582\n", "Epoch 1 | step 206 / 359 | loss : 0.44102632999420166\n", "Epoch 1 | step 207 / 359 | loss : 0.7032008171081543\n", "Epoch 1 | step 208 / 359 | loss : 0.5776005983352661\n", "Epoch 1 | step 209 / 359 | loss : 0.4170357882976532\n", "Epoch 1 | step 210 / 359 | loss : 0.43411242961883545\n", "Epoch 1 | step 211 / 359 | loss : 0.6729763150215149\n", "Epoch 1 | step 212 / 359 | loss : 0.7977082133293152\n", "Epoch 1 | step 213 / 359 | loss : 0.41741424798965454\n", "Epoch 1 | step 214 / 359 | loss : 0.7309755682945251\n", "Epoch 1 | step 215 / 359 | loss : 0.44595402479171753\n", "Epoch 1 | step 216 / 359 | loss : 0.6042097210884094\n", "Epoch 1 | step 217 / 359 | loss : 0.5662139058113098\n", "Epoch 1 | step 218 / 359 | loss : 0.5115290284156799\n", "Epoch 1 | step 219 / 359 | loss : 0.3285876214504242\n", "Epoch 1 | step 220 / 359 | loss : 0.3442113995552063\n", "Epoch 1 | step 221 / 359 | loss : 0.7901559472084045\n", "Epoch 1 | step 222 / 359 | loss : 0.5904033184051514\n", "Epoch 1 | step 223 / 359 | loss : 0.6319628953933716\n", "Epoch 1 | step 224 / 359 | loss : 0.5340222716331482\n", "Epoch 1 | step 225 / 359 | loss : 0.5207698345184326\n", "Epoch 1 | step 226 / 359 | loss : 0.43860089778900146\n", "Epoch 1 | step 227 / 359 | loss : 0.8561108708381653\n", "Epoch 1 | step 228 / 359 | loss : 0.6058894395828247\n", "Epoch 1 | step 229 / 359 | loss : 0.8068221211433411\n", "Epoch 1 | step 230 / 359 | loss : 0.6129536032676697\n", "Epoch 1 | step 231 / 359 | loss : 0.4782792329788208\n", "Epoch 1 | step 232 / 359 | loss : 0.5855157971382141\n", "Epoch 1 | step 233 / 359 | loss : 0.9014760851860046\n", "Epoch 1 | step 234 / 359 | loss : 0.8639354109764099\n", "Epoch 1 | step 235 / 359 | loss : 0.45864102244377136\n", "Epoch 1 | step 236 / 359 | loss : 0.8283216953277588\n", "Epoch 1 | step 237 / 359 | loss : 0.7266609072685242\n", "Epoch 1 | step 238 / 359 | loss : 0.5021374225616455\n", "Epoch 1 | step 239 / 359 | loss : 0.985302209854126\n", "Epoch 1 | step 240 / 359 | loss : 0.7980105876922607\n", "Epoch 1 | step 241 / 359 | loss : 0.5387177467346191\n", "Epoch 1 | step 242 / 359 | loss : 0.617057740688324\n", "Epoch 1 | step 243 / 359 | loss : 0.5833848714828491\n", "Epoch 1 | step 244 / 359 | loss : 0.3008049428462982\n", "Epoch 1 | step 245 / 359 | loss : 0.6955407857894897\n", "Epoch 1 | step 246 / 359 | loss : 0.30312517285346985\n", "Epoch 1 | step 247 / 359 | loss : 0.4176006615161896\n", "Epoch 1 | step 248 / 359 | loss : 0.9927029609680176\n", "Epoch 1 | step 249 / 359 | loss : 0.4026799201965332\n", "Epoch 1 | step 250 / 359 | loss : 0.48649710416793823\n", "Epoch 1 | step 251 / 359 | loss : 0.28198403120040894\n", "Epoch 1 | step 252 / 359 | loss : 0.6628500819206238\n", "Epoch 1 | step 253 / 359 | loss : 0.6618868708610535\n", "Epoch 1 | step 254 / 359 | loss : 0.7057161331176758\n", "Epoch 1 | step 255 / 359 | loss : 0.4672327935695648\n", "Epoch 1 | step 256 / 359 | loss : 0.36875641345977783\n", "Epoch 1 | step 257 / 359 | loss : 0.5775286555290222\n", "Epoch 1 | step 258 / 359 | loss : 0.4577794373035431\n", "Epoch 1 | step 259 / 359 | loss : 0.7261006832122803\n", "Epoch 1 | step 260 / 359 | loss : 0.517343282699585\n", "Epoch 1 | step 261 / 359 | loss : 0.6604872345924377\n", "Epoch 1 | step 262 / 359 | loss : 0.7485090494155884\n", "Epoch 1 | step 263 / 359 | loss : 0.4134189188480377\n", "Epoch 1 | step 264 / 359 | loss : 0.5467849373817444\n", "Epoch 1 | step 265 / 359 | loss : 0.46603503823280334\n", "Epoch 1 | step 266 / 359 | loss : 0.7908571362495422\n", "Epoch 1 | step 267 / 359 | loss : 0.9639848470687866\n", "Epoch 1 | step 268 / 359 | loss : 0.5169028639793396\n", "Epoch 1 | step 269 / 359 | loss : 0.44020214676856995\n", "Epoch 1 | step 270 / 359 | loss : 0.49927398562431335\n", "Epoch 1 | step 271 / 359 | loss : 0.4012812376022339\n", "Epoch 1 | step 272 / 359 | loss : 0.6189234256744385\n", "Epoch 1 | step 273 / 359 | loss : 0.6977769732475281\n", "Epoch 1 | step 274 / 359 | loss : 0.8794508576393127\n", "Epoch 1 | step 275 / 359 | loss : 0.5181682705879211\n", "Epoch 1 | step 276 / 359 | loss : 0.8176714181900024\n", "Epoch 1 | step 277 / 359 | loss : 0.790208101272583\n", "Epoch 1 | step 278 / 359 | loss : 0.9666171073913574\n", "Epoch 1 | step 279 / 359 | loss : 0.487628310918808\n", "Epoch 1 | step 280 / 359 | loss : 0.7216569185256958\n", "Epoch 1 | step 281 / 359 | loss : 0.663218080997467\n", "Epoch 1 | step 282 / 359 | loss : 0.481402724981308\n", "Epoch 1 | step 283 / 359 | loss : 0.3431940972805023\n", "Epoch 1 | step 284 / 359 | loss : 0.4515286087989807\n", "Epoch 1 | step 285 / 359 | loss : 0.4668726921081543\n", "Epoch 1 | step 286 / 359 | loss : 0.23787550628185272\n", "Epoch 1 | step 287 / 359 | loss : 0.3132522404193878\n", "Epoch 1 | step 288 / 359 | loss : 1.251168131828308\n", "Epoch 1 | step 289 / 359 | loss : 0.8826850652694702\n", "Epoch 1 | step 290 / 359 | loss : 0.43332386016845703\n", "Epoch 1 | step 291 / 359 | loss : 0.7611289024353027\n", "Epoch 1 | step 292 / 359 | loss : 0.6357234716415405\n", "Epoch 1 | step 293 / 359 | loss : 0.7737550735473633\n", "Epoch 1 | step 294 / 359 | loss : 0.45760709047317505\n", "Epoch 1 | step 295 / 359 | loss : 0.5028179883956909\n", "Epoch 1 | step 296 / 359 | loss : 0.7770479321479797\n", "Epoch 1 | step 297 / 359 | loss : 0.7753926515579224\n", "Epoch 1 | step 298 / 359 | loss : 0.48841679096221924\n", "Epoch 1 | step 299 / 359 | loss : 0.5349805355072021\n", "Epoch 1 | step 300 / 359 | loss : 0.4567863345146179\n", "Epoch 1 | step 301 / 359 | loss : 0.670014500617981\n", "Epoch 1 | step 302 / 359 | loss : 0.38304203748703003\n", "Epoch 1 | step 303 / 359 | loss : 0.6711037755012512\n", "Epoch 1 | step 304 / 359 | loss : 0.6373739242553711\n", "Epoch 1 | step 305 / 359 | loss : 0.7561929225921631\n", "Epoch 1 | step 306 / 359 | loss : 0.9501511454582214\n", "Epoch 1 | step 307 / 359 | loss : 0.5759420990943909\n", "Epoch 1 | step 308 / 359 | loss : 0.9267945289611816\n", "Epoch 1 | step 309 / 359 | loss : 0.4668361246585846\n", "Epoch 1 | step 310 / 359 | loss : 0.8086445927619934\n", "Epoch 1 | step 311 / 359 | loss : 0.8943449854850769\n", "Epoch 1 | step 312 / 359 | loss : 0.6755104660987854\n", "Epoch 1 | step 313 / 359 | loss : 0.9810946583747864\n", "Epoch 1 | step 314 / 359 | loss : 0.8420147895812988\n", "Epoch 1 | step 315 / 359 | loss : 0.7197953462600708\n", "Epoch 1 | step 316 / 359 | loss : 0.6869163513183594\n", "Epoch 1 | step 317 / 359 | loss : 0.5818591713905334\n", "Epoch 1 | step 318 / 359 | loss : 0.5346606969833374\n", "Epoch 1 | step 319 / 359 | loss : 0.45133447647094727\n", "Epoch 1 | step 320 / 359 | loss : 0.5932372212409973\n", "Epoch 1 | step 321 / 359 | loss : 0.6580062508583069\n", "Epoch 1 | step 322 / 359 | loss : 0.5511972904205322\n", "Epoch 1 | step 323 / 359 | loss : 0.9163527488708496\n", "Epoch 1 | step 324 / 359 | loss : 0.40520647168159485\n", "Epoch 1 | step 325 / 359 | loss : 0.5748513340950012\n", "Epoch 1 | step 326 / 359 | loss : 0.585534393787384\n", "Epoch 1 | step 327 / 359 | loss : 0.5518801808357239\n", "Epoch 1 | step 328 / 359 | loss : 0.6216816306114197\n", "Epoch 1 | step 329 / 359 | loss : 0.7924049496650696\n", "Epoch 1 | step 330 / 359 | loss : 0.64686518907547\n", "Epoch 1 | step 331 / 359 | loss : 0.6205539107322693\n", "Epoch 1 | step 332 / 359 | loss : 0.6180996298789978\n", "Epoch 1 | step 333 / 359 | loss : 0.5916785001754761\n", "Epoch 1 | step 334 / 359 | loss : 0.6725429892539978\n", "Epoch 1 | step 335 / 359 | loss : 0.6917330622673035\n", "Epoch 1 | step 336 / 359 | loss : 0.8216856718063354\n", "Epoch 1 | step 337 / 359 | loss : 0.740888774394989\n", "Epoch 1 | step 338 / 359 | loss : 0.8027430176734924\n", "Epoch 1 | step 339 / 359 | loss : 0.5009944438934326\n", "Epoch 1 | step 340 / 359 | loss : 0.6240816712379456\n", "Epoch 1 | step 341 / 359 | loss : 0.8416829109191895\n", "Epoch 1 | step 342 / 359 | loss : 0.45060473680496216\n", "Epoch 1 | step 343 / 359 | loss : 0.7020756006240845\n", "Epoch 1 | step 344 / 359 | loss : 0.8399860262870789\n", "Epoch 1 | step 345 / 359 | loss : 0.7299742102622986\n", "Epoch 1 | step 346 / 359 | loss : 0.43536609411239624\n", "Epoch 1 | step 347 / 359 | loss : 0.6317023038864136\n", "Epoch 1 | step 348 / 359 | loss : 0.6240214705467224\n", "Epoch 1 | step 349 / 359 | loss : 0.6413570046424866\n", "Epoch 1 | step 350 / 359 | loss : 0.4067925810813904\n", "Epoch 1 | step 351 / 359 | loss : 0.893609881401062\n", "Epoch 1 | step 352 / 359 | loss : 0.3025822341442108\n", "Epoch 1 | step 353 / 359 | loss : 0.35098570585250854\n", "Epoch 1 | step 354 / 359 | loss : 0.526041567325592\n", "Epoch 1 | step 355 / 359 | loss : 0.4771920442581177\n", "Epoch 1 | step 356 / 359 | loss : 0.5898294448852539\n", "Epoch 1 | step 357 / 359 | loss : 0.34690189361572266\n", "Epoch 1 | step 358 / 359 | loss : 0.5914736390113831\n", "Epoch 1 | step 359 / 359 | loss : 0.17744918167591095\n", "Training Epoch n° 1\n", "Epoch 2 | step 1 / 359 | loss : 0.633962869644165\n", "Epoch 2 | step 2 / 359 | loss : 0.3054753243923187\n", "Epoch 2 | step 3 / 359 | loss : 0.6607589721679688\n", "Epoch 2 | step 4 / 359 | loss : 0.9518417716026306\n", "Epoch 2 | step 5 / 359 | loss : 0.5441517233848572\n", "Epoch 2 | step 6 / 359 | loss : 0.30165964365005493\n", "Epoch 2 | step 7 / 359 | loss : 0.2893284559249878\n", "Epoch 2 | step 8 / 359 | loss : 0.48833250999450684\n", "Epoch 2 | step 9 / 359 | loss : 0.8016430735588074\n", "Epoch 2 | step 10 / 359 | loss : 0.5871331095695496\n", "Epoch 2 | step 11 / 359 | loss : 0.3607480823993683\n", "Epoch 2 | step 12 / 359 | loss : 0.6529996395111084\n", "Epoch 2 | step 13 / 359 | loss : 0.4442242980003357\n", "Epoch 2 | step 14 / 359 | loss : 0.39014095067977905\n", "Epoch 2 | step 15 / 359 | loss : 0.7560208439826965\n", "Epoch 2 | step 16 / 359 | loss : 0.38058218359947205\n", "Epoch 2 | step 17 / 359 | loss : 0.562026858329773\n", "Epoch 2 | step 18 / 359 | loss : 1.0161309242248535\n", "Epoch 2 | step 19 / 359 | loss : 0.6237533092498779\n", "Epoch 2 | step 20 / 359 | loss : 0.6799100637435913\n", "Epoch 2 | step 21 / 359 | loss : 0.8194398880004883\n", "Epoch 2 | step 22 / 359 | loss : 0.5420281887054443\n", "Epoch 2 | step 23 / 359 | loss : 0.6585139036178589\n", "Epoch 2 | step 24 / 359 | loss : 0.7121905088424683\n", "Epoch 2 | step 25 / 359 | loss : 0.577256441116333\n", "Epoch 2 | step 26 / 359 | loss : 0.5374178290367126\n", "Epoch 2 | step 27 / 359 | loss : 0.8076707124710083\n", "Epoch 2 | step 28 / 359 | loss : 0.43450668454170227\n", "Epoch 2 | step 29 / 359 | loss : 0.5030489563941956\n", "Epoch 2 | step 30 / 359 | loss : 0.38909488916397095\n", "Epoch 2 | step 31 / 359 | loss : 0.7929323315620422\n", "Epoch 2 | step 32 / 359 | loss : 0.7165305018424988\n", "Epoch 2 | step 33 / 359 | loss : 0.7198590636253357\n", "Epoch 2 | step 34 / 359 | loss : 0.6218193769454956\n", "Epoch 2 | step 35 / 359 | loss : 0.43337205052375793\n", "Epoch 2 | step 36 / 359 | loss : 0.586912989616394\n", "Epoch 2 | step 37 / 359 | loss : 0.36819812655448914\n", "Epoch 2 | step 38 / 359 | loss : 0.8170300126075745\n", "Epoch 2 | step 39 / 359 | loss : 0.9286708831787109\n", "Epoch 2 | step 40 / 359 | loss : 0.676888644695282\n", "Epoch 2 | step 41 / 359 | loss : 0.772739589214325\n", "Epoch 2 | step 42 / 359 | loss : 0.5258831977844238\n", "Epoch 2 | step 43 / 359 | loss : 0.49276211857795715\n", "Epoch 2 | step 44 / 359 | loss : 0.5273029208183289\n", "Epoch 2 | step 45 / 359 | loss : 0.5217918157577515\n", "Epoch 2 | step 46 / 359 | loss : 0.5938214063644409\n", "Epoch 2 | step 47 / 359 | loss : 0.676416277885437\n", "Epoch 2 | step 48 / 359 | loss : 1.0179071426391602\n", "Epoch 2 | step 49 / 359 | loss : 0.5645001530647278\n", "Epoch 2 | step 50 / 359 | loss : 0.6184473037719727\n", "Epoch 2 | step 51 / 359 | loss : 0.4222728908061981\n", "Epoch 2 | step 52 / 359 | loss : 0.5659639835357666\n", "Epoch 2 | step 53 / 359 | loss : 0.6624584197998047\n", "Epoch 2 | step 54 / 359 | loss : 0.8513897657394409\n", "Epoch 2 | step 55 / 359 | loss : 0.46587997674942017\n", "Epoch 2 | step 56 / 359 | loss : 0.281337171792984\n", "Epoch 2 | step 57 / 359 | loss : 0.43045487999916077\n", "Epoch 2 | step 58 / 359 | loss : 0.3294815123081207\n", "Epoch 2 | step 59 / 359 | loss : 0.5735496282577515\n", "Epoch 2 | step 60 / 359 | loss : 0.37668102979660034\n", "Epoch 2 | step 61 / 359 | loss : 0.38745009899139404\n", "Epoch 2 | step 62 / 359 | loss : 0.6611344218254089\n", "Epoch 2 | step 63 / 359 | loss : 0.33649933338165283\n", "Epoch 2 | step 64 / 359 | loss : 0.33687716722488403\n", "Epoch 2 | step 65 / 359 | loss : 0.8752702474594116\n", "Epoch 2 | step 66 / 359 | loss : 0.5326871871948242\n", "Epoch 2 | step 67 / 359 | loss : 0.8331062197685242\n", "Epoch 2 | step 68 / 359 | loss : 0.4441353678703308\n", "Epoch 2 | step 69 / 359 | loss : 0.41893503069877625\n", "Epoch 2 | step 70 / 359 | loss : 0.6807630658149719\n", "Epoch 2 | step 71 / 359 | loss : 0.9560554027557373\n", "Epoch 2 | step 72 / 359 | loss : 0.38040441274642944\n", "Epoch 2 | step 73 / 359 | loss : 0.630980372428894\n", "Epoch 2 | step 74 / 359 | loss : 0.7498860359191895\n", "Epoch 2 | step 75 / 359 | loss : 0.6879129409790039\n", "Epoch 2 | step 76 / 359 | loss : 0.5770354866981506\n", "Epoch 2 | step 77 / 359 | loss : 0.5149751305580139\n", "Epoch 2 | step 78 / 359 | loss : 0.5274748206138611\n", "Epoch 2 | step 79 / 359 | loss : 1.0023281574249268\n", "Epoch 2 | step 80 / 359 | loss : 0.5330910682678223\n", "Epoch 2 | step 81 / 359 | loss : 0.5617172718048096\n", "Epoch 2 | step 82 / 359 | loss : 0.6606523394584656\n", "Epoch 2 | step 83 / 359 | loss : 0.6464207768440247\n", "Epoch 2 | step 84 / 359 | loss : 0.7229560613632202\n", "Epoch 2 | step 85 / 359 | loss : 0.37037935853004456\n", "Epoch 2 | step 86 / 359 | loss : 0.7931226491928101\n", "Epoch 2 | step 87 / 359 | loss : 0.484144389629364\n", "Epoch 2 | step 88 / 359 | loss : 0.1759965717792511\n", "Epoch 2 | step 89 / 359 | loss : 0.5871164202690125\n", "Epoch 2 | step 90 / 359 | loss : 0.460847944021225\n", "Epoch 2 | step 91 / 359 | loss : 0.7374564409255981\n", "Epoch 2 | step 92 / 359 | loss : 1.0729384422302246\n", "Epoch 2 | step 93 / 359 | loss : 0.6632082462310791\n", "Epoch 2 | step 94 / 359 | loss : 0.42092540860176086\n", "Epoch 2 | step 95 / 359 | loss : 0.5902599692344666\n", "Epoch 2 | step 96 / 359 | loss : 0.6813877820968628\n", "Epoch 2 | step 97 / 359 | loss : 0.5078868269920349\n", "Epoch 2 | step 98 / 359 | loss : 0.7401432991027832\n", "Epoch 2 | step 99 / 359 | loss : 0.6324419975280762\n", "Epoch 2 | step 100 / 359 | loss : 0.6322566866874695\n", "Epoch 2 | step 101 / 359 | loss : 0.5297607183456421\n", "Epoch 2 | step 102 / 359 | loss : 0.265007346868515\n", "Epoch 2 | step 103 / 359 | loss : 0.5700696110725403\n", "Epoch 2 | step 104 / 359 | loss : 0.3742567002773285\n", "Epoch 2 | step 105 / 359 | loss : 0.729337751865387\n", "Epoch 2 | step 106 / 359 | loss : 0.30449479818344116\n", "Epoch 2 | step 107 / 359 | loss : 0.4680975377559662\n", "Epoch 2 | step 108 / 359 | loss : 0.39841383695602417\n", "Epoch 2 | step 109 / 359 | loss : 0.6513102054595947\n", "Epoch 2 | step 110 / 359 | loss : 0.44508078694343567\n", "Epoch 2 | step 111 / 359 | loss : 1.026611089706421\n", "Epoch 2 | step 112 / 359 | loss : 0.4740903675556183\n", "Epoch 2 | step 113 / 359 | loss : 0.5623096227645874\n", "Epoch 2 | step 114 / 359 | loss : 0.6697084307670593\n", "Epoch 2 | step 115 / 359 | loss : 0.45237141847610474\n", "Epoch 2 | step 116 / 359 | loss : 0.4347166419029236\n", "Epoch 2 | step 117 / 359 | loss : 0.7322158813476562\n", "Epoch 2 | step 118 / 359 | loss : 0.4557601809501648\n", "Epoch 2 | step 119 / 359 | loss : 0.5860872864723206\n", "Epoch 2 | step 120 / 359 | loss : 0.6772462725639343\n", "Epoch 2 | step 121 / 359 | loss : 0.7387906312942505\n", "Epoch 2 | step 122 / 359 | loss : 0.3356669545173645\n", "Epoch 2 | step 123 / 359 | loss : 0.7657076120376587\n", "Epoch 2 | step 124 / 359 | loss : 0.6926765441894531\n", "Epoch 2 | step 125 / 359 | loss : 0.6191259026527405\n", "Epoch 2 | step 126 / 359 | loss : 0.5855773091316223\n", "Epoch 2 | step 127 / 359 | loss : 0.7406179904937744\n", "Epoch 2 | step 128 / 359 | loss : 0.6981971859931946\n", "Epoch 2 | step 129 / 359 | loss : 0.7557581663131714\n", "Epoch 2 | step 130 / 359 | loss : 0.5450518727302551\n", "Epoch 2 | step 131 / 359 | loss : 0.6208651065826416\n", "Epoch 2 | step 132 / 359 | loss : 0.5678790807723999\n", "Epoch 2 | step 133 / 359 | loss : 0.7818830609321594\n", "Epoch 2 | step 134 / 359 | loss : 0.3245457708835602\n", "Epoch 2 | step 135 / 359 | loss : 0.7390953898429871\n", "Epoch 2 | step 136 / 359 | loss : 0.5431913733482361\n", "Epoch 2 | step 137 / 359 | loss : 0.45542097091674805\n", "Epoch 2 | step 138 / 359 | loss : 0.7669100165367126\n", "Epoch 2 | step 139 / 359 | loss : 0.8333150148391724\n", "Epoch 2 | step 140 / 359 | loss : 0.5373601913452148\n", "Epoch 2 | step 141 / 359 | loss : 0.47603583335876465\n", "Epoch 2 | step 142 / 359 | loss : 0.4389837682247162\n", "Epoch 2 | step 143 / 359 | loss : 0.7029034495353699\n", "Epoch 2 | step 144 / 359 | loss : 0.5745508074760437\n", "Epoch 2 | step 145 / 359 | loss : 0.43390321731567383\n", "Epoch 2 | step 146 / 359 | loss : 0.5012756586074829\n", "Epoch 2 | step 147 / 359 | loss : 0.7040070295333862\n", "Epoch 2 | step 148 / 359 | loss : 0.7722150087356567\n", "Epoch 2 | step 149 / 359 | loss : 0.5407854914665222\n", "Epoch 2 | step 150 / 359 | loss : 0.5914983749389648\n", "Epoch 2 | step 151 / 359 | loss : 0.5761913061141968\n", "Epoch 2 | step 152 / 359 | loss : 0.8303322196006775\n", "Epoch 2 | step 153 / 359 | loss : 0.5024698972702026\n", "Epoch 2 | step 154 / 359 | loss : 0.6382593512535095\n", "Epoch 2 | step 155 / 359 | loss : 0.4763377904891968\n", "Epoch 2 | step 156 / 359 | loss : 0.8325011730194092\n", "Epoch 2 | step 157 / 359 | loss : 0.49250122904777527\n", "Epoch 2 | step 158 / 359 | loss : 0.8957772254943848\n", "Epoch 2 | step 159 / 359 | loss : 0.7966060042381287\n", "Epoch 2 | step 160 / 359 | loss : 0.7229738235473633\n", "Epoch 2 | step 161 / 359 | loss : 0.6465620994567871\n", "Epoch 2 | step 162 / 359 | loss : 0.4325474798679352\n", "Epoch 2 | step 163 / 359 | loss : 0.6943216323852539\n", "Epoch 2 | step 164 / 359 | loss : 0.3318979740142822\n", "Epoch 2 | step 165 / 359 | loss : 0.6471325755119324\n", "Epoch 2 | step 166 / 359 | loss : 0.40608611702919006\n", "Epoch 2 | step 167 / 359 | loss : 0.5986186265945435\n", "Epoch 2 | step 168 / 359 | loss : 0.6221967935562134\n", "Epoch 2 | step 169 / 359 | loss : 0.35112181305885315\n", "Epoch 2 | step 170 / 359 | loss : 1.0979722738265991\n", "Epoch 2 | step 171 / 359 | loss : 0.6665899157524109\n", "Epoch 2 | step 172 / 359 | loss : 0.5733560919761658\n", "Epoch 2 | step 173 / 359 | loss : 0.6441207528114319\n", "Epoch 2 | step 174 / 359 | loss : 0.6317400932312012\n", "Epoch 2 | step 175 / 359 | loss : 0.332592248916626\n", "Epoch 2 | step 176 / 359 | loss : 0.590224027633667\n", "Epoch 2 | step 177 / 359 | loss : 0.3564487397670746\n", "Epoch 2 | step 178 / 359 | loss : 0.5172982215881348\n", "Epoch 2 | step 179 / 359 | loss : 0.6651707291603088\n", "Epoch 2 | step 180 / 359 | loss : 0.44533446431159973\n", "Epoch 2 | step 181 / 359 | loss : 0.6969414353370667\n", "Epoch 2 | step 182 / 359 | loss : 0.42616334557533264\n", "Epoch 2 | step 183 / 359 | loss : 0.5644264221191406\n", "Epoch 2 | step 184 / 359 | loss : 0.49040040373802185\n", "Epoch 2 | step 185 / 359 | loss : 0.28691840171813965\n", "Epoch 2 | step 186 / 359 | loss : 0.6303470730781555\n", "Epoch 2 | step 187 / 359 | loss : 0.4258257746696472\n", "Epoch 2 | step 188 / 359 | loss : 0.5987871885299683\n", "Epoch 2 | step 189 / 359 | loss : 0.5503125190734863\n", "Epoch 2 | step 190 / 359 | loss : 0.828195333480835\n", "Epoch 2 | step 191 / 359 | loss : 0.5237138271331787\n", "Epoch 2 | step 192 / 359 | loss : 1.1587247848510742\n", "Epoch 2 | step 193 / 359 | loss : 0.5232516527175903\n", "Epoch 2 | step 194 / 359 | loss : 0.5770069360733032\n", "Epoch 2 | step 195 / 359 | loss : 0.880166232585907\n", "Epoch 2 | step 196 / 359 | loss : 0.794714629650116\n", "Epoch 2 | step 197 / 359 | loss : 0.8895128965377808\n", "Epoch 2 | step 198 / 359 | loss : 0.4024942219257355\n", "Epoch 2 | step 199 / 359 | loss : 0.46008560061454773\n", "Epoch 2 | step 200 / 359 | loss : 0.6359156370162964\n", "Epoch 2 | step 201 / 359 | loss : 0.8496811389923096\n", "Epoch 2 | step 202 / 359 | loss : 0.5093135237693787\n", "Epoch 2 | step 203 / 359 | loss : 0.6447819471359253\n", "Epoch 2 | step 204 / 359 | loss : 0.606738269329071\n", "Epoch 2 | step 205 / 359 | loss : 0.6529638171195984\n", "Epoch 2 | step 206 / 359 | loss : 0.4934871792793274\n", "Epoch 2 | step 207 / 359 | loss : 0.6940314173698425\n", "Epoch 2 | step 208 / 359 | loss : 0.6376542448997498\n", "Epoch 2 | step 209 / 359 | loss : 0.5892638564109802\n", "Epoch 2 | step 210 / 359 | loss : 0.5792542695999146\n", "Epoch 2 | step 211 / 359 | loss : 0.6554176211357117\n", "Epoch 2 | step 212 / 359 | loss : 0.6325833797454834\n", "Epoch 2 | step 213 / 359 | loss : 0.9500777721405029\n", "Epoch 2 | step 214 / 359 | loss : 0.8305779695510864\n", "Epoch 2 | step 215 / 359 | loss : 0.7280408143997192\n", "Epoch 2 | step 216 / 359 | loss : 0.5781653523445129\n", "Epoch 2 | step 217 / 359 | loss : 0.7200438380241394\n", "Epoch 2 | step 218 / 359 | loss : 0.41216087341308594\n", "Epoch 2 | step 219 / 359 | loss : 0.45791691541671753\n", "Epoch 2 | step 220 / 359 | loss : 0.9044284224510193\n", "Epoch 2 | step 221 / 359 | loss : 0.5421136021614075\n", "Epoch 2 | step 222 / 359 | loss : 0.9546344876289368\n", "Epoch 2 | step 223 / 359 | loss : 1.0051774978637695\n", "Epoch 2 | step 224 / 359 | loss : 0.7729747891426086\n", "Epoch 2 | step 225 / 359 | loss : 0.4749220907688141\n", "Epoch 2 | step 226 / 359 | loss : 0.47767969965934753\n", "Epoch 2 | step 227 / 359 | loss : 0.6659330129623413\n", "Epoch 2 | step 228 / 359 | loss : 0.3765334188938141\n", "Epoch 2 | step 229 / 359 | loss : 0.41046762466430664\n", "Epoch 2 | step 230 / 359 | loss : 0.9594283103942871\n", "Epoch 2 | step 231 / 359 | loss : 0.6146476864814758\n", "Epoch 2 | step 232 / 359 | loss : 0.39238250255584717\n", "Epoch 2 | step 233 / 359 | loss : 0.4334268271923065\n", "Epoch 2 | step 234 / 359 | loss : 0.4442388415336609\n", "Epoch 2 | step 235 / 359 | loss : 0.6737079620361328\n", "Epoch 2 | step 236 / 359 | loss : 0.7817719578742981\n", "Epoch 2 | step 237 / 359 | loss : 0.5824685096740723\n", "Epoch 2 | step 238 / 359 | loss : 0.7460023164749146\n", "Epoch 2 | step 239 / 359 | loss : 0.6490378975868225\n", "Epoch 2 | step 240 / 359 | loss : 0.42491501569747925\n", "Epoch 2 | step 241 / 359 | loss : 0.6735126376152039\n", "Epoch 2 | step 242 / 359 | loss : 0.4683500826358795\n", "Epoch 2 | step 243 / 359 | loss : 0.3423594534397125\n", "Epoch 2 | step 244 / 359 | loss : 0.36960846185684204\n", "Epoch 2 | step 245 / 359 | loss : 0.4035419821739197\n", "Epoch 2 | step 246 / 359 | loss : 0.6501013040542603\n", "Epoch 2 | step 247 / 359 | loss : 0.8464272022247314\n", "Epoch 2 | step 248 / 359 | loss : 0.31636863946914673\n", "Epoch 2 | step 249 / 359 | loss : 0.5932666659355164\n", "Epoch 2 | step 250 / 359 | loss : 0.33725056052207947\n", "Epoch 2 | step 251 / 359 | loss : 0.323375940322876\n", "Epoch 2 | step 252 / 359 | loss : 1.1148899793624878\n", "Epoch 2 | step 253 / 359 | loss : 0.5139695405960083\n", "Epoch 2 | step 254 / 359 | loss : 0.4739786386489868\n", "Epoch 2 | step 255 / 359 | loss : 0.6231196522712708\n", "Epoch 2 | step 256 / 359 | loss : 0.555223286151886\n", "Epoch 2 | step 257 / 359 | loss : 0.5524908304214478\n", "Epoch 2 | step 258 / 359 | loss : 0.657410740852356\n", "Epoch 2 | step 259 / 359 | loss : 0.7115008234977722\n", "Epoch 2 | step 260 / 359 | loss : 0.4881207346916199\n", "Epoch 2 | step 261 / 359 | loss : 0.6385228633880615\n", "Epoch 2 | step 262 / 359 | loss : 0.49243026971817017\n", "Epoch 2 | step 263 / 359 | loss : 0.38959506154060364\n", "Epoch 2 | step 264 / 359 | loss : 0.440262109041214\n", "Epoch 2 | step 265 / 359 | loss : 0.5564330220222473\n", "Epoch 2 | step 266 / 359 | loss : 0.5985969305038452\n", "Epoch 2 | step 267 / 359 | loss : 0.3996179401874542\n", "Epoch 2 | step 268 / 359 | loss : 0.6914099454879761\n", "Epoch 2 | step 269 / 359 | loss : 0.32315850257873535\n", "Epoch 2 | step 270 / 359 | loss : 0.708372950553894\n", "Epoch 2 | step 271 / 359 | loss : 0.3052769601345062\n", "Epoch 2 | step 272 / 359 | loss : 0.6982291340827942\n", "Epoch 2 | step 273 / 359 | loss : 0.436003714799881\n", "Epoch 2 | step 274 / 359 | loss : 0.28871604800224304\n", "Epoch 2 | step 275 / 359 | loss : 0.49104517698287964\n", "Epoch 2 | step 276 / 359 | loss : 0.48685985803604126\n", "Epoch 2 | step 277 / 359 | loss : 0.731686532497406\n", "Epoch 2 | step 278 / 359 | loss : 0.49107643961906433\n", "Epoch 2 | step 279 / 359 | loss : 0.7503469586372375\n", "Epoch 2 | step 280 / 359 | loss : 0.438730925321579\n", "Epoch 2 | step 281 / 359 | loss : 0.3579823076725006\n", "Epoch 2 | step 282 / 359 | loss : 0.8086063265800476\n", "Epoch 2 | step 283 / 359 | loss : 0.8255655765533447\n", "Epoch 2 | step 284 / 359 | loss : 0.6347156763076782\n", "Epoch 2 | step 285 / 359 | loss : 0.5948058366775513\n", "Epoch 2 | step 286 / 359 | loss : 0.7629244923591614\n", "Epoch 2 | step 287 / 359 | loss : 0.5086061358451843\n", "Epoch 2 | step 288 / 359 | loss : 0.40740442276000977\n", "Epoch 2 | step 289 / 359 | loss : 0.5721706748008728\n", "Epoch 2 | step 290 / 359 | loss : 0.4877966344356537\n", "Epoch 2 | step 291 / 359 | loss : 0.7966518402099609\n", "Epoch 2 | step 292 / 359 | loss : 0.5162725448608398\n", "Epoch 2 | step 293 / 359 | loss : 0.4720936417579651\n", "Epoch 2 | step 294 / 359 | loss : 0.5351039171218872\n", "Epoch 2 | step 295 / 359 | loss : 0.6956008672714233\n", "Epoch 2 | step 296 / 359 | loss : 0.49350929260253906\n", "Epoch 2 | step 297 / 359 | loss : 0.47316426038742065\n", "Epoch 2 | step 298 / 359 | loss : 0.8731467723846436\n", "Epoch 2 | step 299 / 359 | loss : 0.8846551775932312\n", "Epoch 2 | step 300 / 359 | loss : 0.4272167980670929\n", "Epoch 2 | step 301 / 359 | loss : 0.808382511138916\n", "Epoch 2 | step 302 / 359 | loss : 0.6883760690689087\n", "Epoch 2 | step 303 / 359 | loss : 0.37449967861175537\n", "Epoch 2 | step 304 / 359 | loss : 0.5402615666389465\n", "Epoch 2 | step 305 / 359 | loss : 0.6297013759613037\n", "Epoch 2 | step 306 / 359 | loss : 0.7948426604270935\n", "Epoch 2 | step 307 / 359 | loss : 0.3096265494823456\n", "Epoch 2 | step 308 / 359 | loss : 0.6381617188453674\n", "Epoch 2 | step 309 / 359 | loss : 0.6753301024436951\n", "Epoch 2 | step 310 / 359 | loss : 0.5228232741355896\n", "Epoch 2 | step 311 / 359 | loss : 0.428478866815567\n", "Epoch 2 | step 312 / 359 | loss : 0.5349342823028564\n", "Epoch 2 | step 313 / 359 | loss : 0.9860405325889587\n", "Epoch 2 | step 314 / 359 | loss : 0.7197734713554382\n", "Epoch 2 | step 315 / 359 | loss : 0.5339714288711548\n", "Epoch 2 | step 316 / 359 | loss : 0.5536119937896729\n", "Epoch 2 | step 317 / 359 | loss : 0.618342399597168\n", "Epoch 2 | step 318 / 359 | loss : 0.508105456829071\n", "Epoch 2 | step 319 / 359 | loss : 0.5951187014579773\n", "Epoch 2 | step 320 / 359 | loss : 0.5082687139511108\n", "Epoch 2 | step 321 / 359 | loss : 0.698558509349823\n", "Epoch 2 | step 322 / 359 | loss : 0.4006327986717224\n", "Epoch 2 | step 323 / 359 | loss : 0.5879320502281189\n", "Epoch 2 | step 324 / 359 | loss : 0.44331735372543335\n", "Epoch 2 | step 325 / 359 | loss : 0.8376948237419128\n", "Epoch 2 | step 326 / 359 | loss : 0.2528882920742035\n", "Epoch 2 | step 327 / 359 | loss : 0.8471085429191589\n", "Epoch 2 | step 328 / 359 | loss : 0.737007737159729\n", "Epoch 2 | step 329 / 359 | loss : 0.9493338465690613\n", "Epoch 2 | step 330 / 359 | loss : 0.33772656321525574\n", "Epoch 2 | step 331 / 359 | loss : 0.4521937370300293\n", "Epoch 2 | step 332 / 359 | loss : 0.6733250617980957\n", "Epoch 2 | step 333 / 359 | loss : 0.5575129985809326\n", "Epoch 2 | step 334 / 359 | loss : 0.6293047070503235\n", "Epoch 2 | step 335 / 359 | loss : 0.7387343645095825\n", "Epoch 2 | step 336 / 359 | loss : 0.4705043137073517\n", "Epoch 2 | step 337 / 359 | loss : 0.7646978497505188\n", "Epoch 2 | step 338 / 359 | loss : 0.4852716326713562\n", "Epoch 2 | step 339 / 359 | loss : 0.6760196089744568\n", "Epoch 2 | step 340 / 359 | loss : 0.4837150573730469\n", "Epoch 2 | step 341 / 359 | loss : 0.8186960220336914\n", "Epoch 2 | step 342 / 359 | loss : 0.4655584692955017\n", "Epoch 2 | step 343 / 359 | loss : 0.48027342557907104\n", "Epoch 2 | step 344 / 359 | loss : 0.5112879276275635\n", "Epoch 2 | step 345 / 359 | loss : 0.5137596130371094\n", "Epoch 2 | step 346 / 359 | loss : 0.6494828462600708\n", "Epoch 2 | step 347 / 359 | loss : 0.4904279410839081\n", "Epoch 2 | step 348 / 359 | loss : 0.859269917011261\n", "Epoch 2 | step 349 / 359 | loss : 0.5791561007499695\n", "Epoch 2 | step 350 / 359 | loss : 0.498825341463089\n", "Epoch 2 | step 351 / 359 | loss : 0.5862367749214172\n", "Epoch 2 | step 352 / 359 | loss : 0.43433678150177\n", "Epoch 2 | step 353 / 359 | loss : 0.30885589122772217\n", "Epoch 2 | step 354 / 359 | loss : 0.5060280561447144\n", "Epoch 2 | step 355 / 359 | loss : 0.812046229839325\n", "Epoch 2 | step 356 / 359 | loss : 0.7270104885101318\n", "Epoch 2 | step 357 / 359 | loss : 0.6961581110954285\n", "Epoch 2 | step 358 / 359 | loss : 0.6174116134643555\n", "Epoch 2 | step 359 / 359 | loss : 0.1321401447057724\n", "Training Epoch n° 2\n", "Epoch 3 | step 1 / 359 | loss : 0.39003878831863403\n", "Epoch 3 | step 2 / 359 | loss : 0.5941234827041626\n", "Epoch 3 | step 3 / 359 | loss : 0.5925105214118958\n", "Epoch 3 | step 4 / 359 | loss : 0.5777327418327332\n", "Epoch 3 | step 5 / 359 | loss : 0.5395992398262024\n", "Epoch 3 | step 6 / 359 | loss : 0.358142226934433\n", "Epoch 3 | step 7 / 359 | loss : 0.6602321267127991\n", "Epoch 3 | step 8 / 359 | loss : 0.6805344223976135\n", "Epoch 3 | step 9 / 359 | loss : 0.487517386674881\n", "Epoch 3 | step 10 / 359 | loss : 0.413181334733963\n", "Epoch 3 | step 11 / 359 | loss : 0.4090081453323364\n", "Epoch 3 | step 12 / 359 | loss : 0.829048752784729\n", "Epoch 3 | step 13 / 359 | loss : 0.6721126437187195\n", "Epoch 3 | step 14 / 359 | loss : 0.5629092454910278\n", "Epoch 3 | step 15 / 359 | loss : 0.5261154770851135\n", "Epoch 3 | step 16 / 359 | loss : 0.651794970035553\n", "Epoch 3 | step 17 / 359 | loss : 0.8267910480499268\n", "Epoch 3 | step 18 / 359 | loss : 0.4810485243797302\n", "Epoch 3 | step 19 / 359 | loss : 0.5304057598114014\n", "Epoch 3 | step 20 / 359 | loss : 0.6236020922660828\n", "Epoch 3 | step 21 / 359 | loss : 0.5955148935317993\n", "Epoch 3 | step 22 / 359 | loss : 0.7646294832229614\n", "Epoch 3 | step 23 / 359 | loss : 0.45329582691192627\n", "Epoch 3 | step 24 / 359 | loss : 0.28889000415802\n", "Epoch 3 | step 25 / 359 | loss : 0.2472880631685257\n", "Epoch 3 | step 26 / 359 | loss : 0.8967002034187317\n", "Epoch 3 | step 27 / 359 | loss : 0.692566454410553\n", "Epoch 3 | step 28 / 359 | loss : 0.5742484331130981\n", "Epoch 3 | step 29 / 359 | loss : 0.7695298790931702\n", "Epoch 3 | step 30 / 359 | loss : 0.5508667230606079\n", "Epoch 3 | step 31 / 359 | loss : 0.47712424397468567\n", "Epoch 3 | step 32 / 359 | loss : 0.5272340774536133\n", "Epoch 3 | step 33 / 359 | loss : 0.4837336540222168\n", "Epoch 3 | step 34 / 359 | loss : 0.6386617422103882\n", "Epoch 3 | step 35 / 359 | loss : 0.6502728462219238\n", "Epoch 3 | step 36 / 359 | loss : 0.5395351648330688\n", "Epoch 3 | step 37 / 359 | loss : 0.5878652930259705\n", "Epoch 3 | step 38 / 359 | loss : 0.9645787477493286\n", "Epoch 3 | step 39 / 359 | loss : 0.6572265625\n", "Epoch 3 | step 40 / 359 | loss : 0.7335481643676758\n", "Epoch 3 | step 41 / 359 | loss : 0.6036574840545654\n", "Epoch 3 | step 42 / 359 | loss : 0.5315230488777161\n", "Epoch 3 | step 43 / 359 | loss : 0.49783721566200256\n", "Epoch 3 | step 44 / 359 | loss : 0.5730430483818054\n", "Epoch 3 | step 45 / 359 | loss : 0.8045597076416016\n", "Epoch 3 | step 46 / 359 | loss : 0.7683224678039551\n", "Epoch 3 | step 47 / 359 | loss : 0.43406254053115845\n", "Epoch 3 | step 48 / 359 | loss : 0.5317076444625854\n", "Epoch 3 | step 49 / 359 | loss : 0.4315342307090759\n", "Epoch 3 | step 50 / 359 | loss : 0.8795009255409241\n", "Epoch 3 | step 51 / 359 | loss : 0.6435292959213257\n", "Epoch 3 | step 52 / 359 | loss : 0.7391325235366821\n", "Epoch 3 | step 53 / 359 | loss : 0.5607476830482483\n", "Epoch 3 | step 54 / 359 | loss : 0.4962770640850067\n", "Epoch 3 | step 55 / 359 | loss : 0.6353371739387512\n", "Epoch 3 | step 56 / 359 | loss : 0.5881751179695129\n", "Epoch 3 | step 57 / 359 | loss : 0.7389804124832153\n", "Epoch 3 | step 58 / 359 | loss : 0.56746906042099\n", "Epoch 3 | step 59 / 359 | loss : 0.5920646786689758\n", "Epoch 3 | step 60 / 359 | loss : 0.6998968124389648\n", "Epoch 3 | step 61 / 359 | loss : 0.7021005153656006\n", "Epoch 3 | step 62 / 359 | loss : 0.6149020791053772\n", "Epoch 3 | step 63 / 359 | loss : 0.5946621894836426\n", "Epoch 3 | step 64 / 359 | loss : 0.5008553266525269\n", "Epoch 3 | step 65 / 359 | loss : 0.4086427688598633\n", "Epoch 3 | step 66 / 359 | loss : 0.9652490019798279\n", "Epoch 3 | step 67 / 359 | loss : 0.7368547320365906\n", "Epoch 3 | step 68 / 359 | loss : 0.4447685480117798\n", "Epoch 3 | step 69 / 359 | loss : 0.6642535328865051\n", "Epoch 3 | step 70 / 359 | loss : 0.8500411510467529\n", "Epoch 3 | step 71 / 359 | loss : 0.7203304171562195\n", "Epoch 3 | step 72 / 359 | loss : 0.729564905166626\n", "Epoch 3 | step 73 / 359 | loss : 0.6008549928665161\n", "Epoch 3 | step 74 / 359 | loss : 0.6007261276245117\n", "Epoch 3 | step 75 / 359 | loss : 0.6009789109230042\n", "Epoch 3 | step 76 / 359 | loss : 0.5183075070381165\n", "Epoch 3 | step 77 / 359 | loss : 0.34114938974380493\n", "Epoch 3 | step 78 / 359 | loss : 0.6134307384490967\n", "Epoch 3 | step 79 / 359 | loss : 0.7434840798377991\n", "Epoch 3 | step 80 / 359 | loss : 0.2701911926269531\n", "Epoch 3 | step 81 / 359 | loss : 0.7402443885803223\n", "Epoch 3 | step 82 / 359 | loss : 0.3729211986064911\n", "Epoch 3 | step 83 / 359 | loss : 0.3350273072719574\n", "Epoch 3 | step 84 / 359 | loss : 0.76374751329422\n", "Epoch 3 | step 85 / 359 | loss : 0.3868831396102905\n", "Epoch 3 | step 86 / 359 | loss : 0.4579334557056427\n", "Epoch 3 | step 87 / 359 | loss : 0.7388073801994324\n", "Epoch 3 | step 88 / 359 | loss : 0.508171796798706\n", "Epoch 3 | step 89 / 359 | loss : 0.8003300428390503\n", "Epoch 3 | step 90 / 359 | loss : 0.5135202407836914\n", "Epoch 3 | step 91 / 359 | loss : 0.5436776280403137\n", "Epoch 3 | step 92 / 359 | loss : 0.6817873120307922\n", "Epoch 3 | step 93 / 359 | loss : 0.31884315609931946\n", "Epoch 3 | step 94 / 359 | loss : 0.699508786201477\n", "Epoch 3 | step 95 / 359 | loss : 0.5054154992103577\n", "Epoch 3 | step 96 / 359 | loss : 0.5382583737373352\n", "Epoch 3 | step 97 / 359 | loss : 0.6824609637260437\n", "Epoch 3 | step 98 / 359 | loss : 0.7373035550117493\n", "Epoch 3 | step 99 / 359 | loss : 0.4665217995643616\n", "Epoch 3 | step 100 / 359 | loss : 0.9271599650382996\n", "Epoch 3 | step 101 / 359 | loss : 0.6850453019142151\n", "Epoch 3 | step 102 / 359 | loss : 0.7969502806663513\n", "Epoch 3 | step 103 / 359 | loss : 0.4792705178260803\n", "Epoch 3 | step 104 / 359 | loss : 0.476526141166687\n", "Epoch 3 | step 105 / 359 | loss : 0.67435622215271\n", "Epoch 3 | step 106 / 359 | loss : 0.24319523572921753\n", "Epoch 3 | step 107 / 359 | loss : 0.31220734119415283\n", "Epoch 3 | step 108 / 359 | loss : 0.2815428376197815\n", "Epoch 3 | step 109 / 359 | loss : 0.5797743797302246\n", "Epoch 3 | step 110 / 359 | loss : 0.5164719820022583\n", "Epoch 3 | step 111 / 359 | loss : 0.3829927146434784\n", "Epoch 3 | step 112 / 359 | loss : 0.6017778515815735\n", "Epoch 3 | step 113 / 359 | loss : 0.6890553832054138\n", "Epoch 3 | step 114 / 359 | loss : 0.6499165296554565\n", "Epoch 3 | step 115 / 359 | loss : 0.4320700764656067\n", "Epoch 3 | step 116 / 359 | loss : 0.42645734548568726\n", "Epoch 3 | step 117 / 359 | loss : 0.6049628853797913\n", "Epoch 3 | step 118 / 359 | loss : 0.31039366126060486\n", "Epoch 3 | step 119 / 359 | loss : 0.7565672993659973\n", "Epoch 3 | step 120 / 359 | loss : 0.6578177213668823\n", "Epoch 3 | step 121 / 359 | loss : 0.5354474782943726\n", "Epoch 3 | step 122 / 359 | loss : 0.6742721199989319\n", "Epoch 3 | step 123 / 359 | loss : 0.8204450011253357\n", "Epoch 3 | step 124 / 359 | loss : 0.7981418371200562\n", "Epoch 3 | step 125 / 359 | loss : 0.5157833099365234\n", "Epoch 3 | step 126 / 359 | loss : 0.5810724496841431\n", "Epoch 3 | step 127 / 359 | loss : 0.42615821957588196\n", "Epoch 3 | step 128 / 359 | loss : 0.5375134944915771\n", "Epoch 3 | step 129 / 359 | loss : 0.8329095840454102\n", "Epoch 3 | step 130 / 359 | loss : 0.4822121858596802\n", "Epoch 3 | step 131 / 359 | loss : 0.40876027941703796\n", "Epoch 3 | step 132 / 359 | loss : 0.3062140941619873\n", "Epoch 3 | step 133 / 359 | loss : 0.38889044523239136\n", "Epoch 3 | step 134 / 359 | loss : 0.22066670656204224\n", "Epoch 3 | step 135 / 359 | loss : 0.7220257520675659\n", "Epoch 3 | step 136 / 359 | loss : 0.4620121717453003\n", "Epoch 3 | step 137 / 359 | loss : 0.7553125023841858\n", "Epoch 3 | step 138 / 359 | loss : 1.0966514348983765\n", "Epoch 3 | step 139 / 359 | loss : 0.6255748867988586\n", "Epoch 3 | step 140 / 359 | loss : 0.6991614103317261\n", "Epoch 3 | step 141 / 359 | loss : 0.6000030040740967\n", "Epoch 3 | step 142 / 359 | loss : 0.3267807960510254\n", "Epoch 3 | step 143 / 359 | loss : 0.7994662523269653\n", "Epoch 3 | step 144 / 359 | loss : 0.48146748542785645\n", "Epoch 3 | step 145 / 359 | loss : 0.4818108081817627\n", "Epoch 3 | step 146 / 359 | loss : 0.6886528134346008\n", "Epoch 3 | step 147 / 359 | loss : 0.6375142931938171\n", "Epoch 3 | step 148 / 359 | loss : 0.8527612686157227\n", "Epoch 3 | step 149 / 359 | loss : 0.4900520443916321\n", "Epoch 3 | step 150 / 359 | loss : 0.45373499393463135\n", "Epoch 3 | step 151 / 359 | loss : 0.7730929851531982\n", "Epoch 3 | step 152 / 359 | loss : 0.9158742427825928\n", "Epoch 3 | step 153 / 359 | loss : 0.5820903182029724\n", "Epoch 3 | step 154 / 359 | loss : 0.44902169704437256\n", "Epoch 3 | step 155 / 359 | loss : 0.66640305519104\n", "Epoch 3 | step 156 / 359 | loss : 0.6842275857925415\n", "Epoch 3 | step 157 / 359 | loss : 0.3802758455276489\n", "Epoch 3 | step 158 / 359 | loss : 0.4427416920661926\n", "Epoch 3 | step 159 / 359 | loss : 0.6224071383476257\n", "Epoch 3 | step 160 / 359 | loss : 0.47488638758659363\n", "Epoch 3 | step 161 / 359 | loss : 0.24249988794326782\n", "Epoch 3 | step 162 / 359 | loss : 0.9441826939582825\n", "Epoch 3 | step 163 / 359 | loss : 0.47178915143013\n", "Epoch 3 | step 164 / 359 | loss : 0.7144463062286377\n", "Epoch 3 | step 165 / 359 | loss : 0.35927480459213257\n", "Epoch 3 | step 166 / 359 | loss : 0.5408070087432861\n", "Epoch 3 | step 167 / 359 | loss : 0.5116636753082275\n", "Epoch 3 | step 168 / 359 | loss : 0.6961904764175415\n", "Epoch 3 | step 169 / 359 | loss : 0.9038954973220825\n", "Epoch 3 | step 170 / 359 | loss : 0.6927047967910767\n", "Epoch 3 | step 171 / 359 | loss : 0.6366347670555115\n", "Epoch 3 | step 172 / 359 | loss : 0.5034002065658569\n", "Epoch 3 | step 173 / 359 | loss : 0.6438537836074829\n", "Epoch 3 | step 174 / 359 | loss : 0.7451682090759277\n", "Epoch 3 | step 175 / 359 | loss : 0.68568354845047\n", "Epoch 3 | step 176 / 359 | loss : 0.5486295223236084\n", "Epoch 3 | step 177 / 359 | loss : 0.48730671405792236\n", "Epoch 3 | step 178 / 359 | loss : 0.7287465929985046\n", "Epoch 3 | step 179 / 359 | loss : 0.5958290100097656\n", "Epoch 3 | step 180 / 359 | loss : 0.6371828317642212\n", "Epoch 3 | step 181 / 359 | loss : 0.6420140266418457\n", "Epoch 3 | step 182 / 359 | loss : 0.7382590174674988\n", "Epoch 3 | step 183 / 359 | loss : 0.6480355858802795\n", "Epoch 3 | step 184 / 359 | loss : 0.7921721339225769\n", "Epoch 3 | step 185 / 359 | loss : 0.4635060131549835\n", "Epoch 3 | step 186 / 359 | loss : 0.5381967425346375\n", "Epoch 3 | step 187 / 359 | loss : 0.6797651052474976\n", "Epoch 3 | step 188 / 359 | loss : 0.7316694855690002\n", "Epoch 3 | step 189 / 359 | loss : 0.3658829927444458\n", "Epoch 3 | step 190 / 359 | loss : 0.5274326205253601\n", "Epoch 3 | step 191 / 359 | loss : 0.5722898840904236\n", "Epoch 3 | step 192 / 359 | loss : 0.44237974286079407\n", "Epoch 3 | step 193 / 359 | loss : 0.723672091960907\n", "Epoch 3 | step 194 / 359 | loss : 0.30339089035987854\n", "Epoch 3 | step 195 / 359 | loss : 0.47153398394584656\n", "Epoch 3 | step 196 / 359 | loss : 0.7241735458374023\n", "Epoch 3 | step 197 / 359 | loss : 0.47646862268447876\n", "Epoch 3 | step 198 / 359 | loss : 0.5304155945777893\n", "Epoch 3 | step 199 / 359 | loss : 0.3512808680534363\n", "Epoch 3 | step 200 / 359 | loss : 0.5947755575180054\n", "Epoch 3 | step 201 / 359 | loss : 0.2434542030096054\n", "Epoch 3 | step 202 / 359 | loss : 0.17269045114517212\n", "Epoch 3 | step 203 / 359 | loss : 0.33827725052833557\n", "Epoch 3 | step 204 / 359 | loss : 0.7362703680992126\n", "Epoch 3 | step 205 / 359 | loss : 0.4152049124240875\n", "Epoch 3 | step 206 / 359 | loss : 0.6862747073173523\n", "Epoch 3 | step 207 / 359 | loss : 0.5797796845436096\n", "Epoch 3 | step 208 / 359 | loss : 0.4819744825363159\n", "Epoch 3 | step 209 / 359 | loss : 0.3521980941295624\n", "Epoch 3 | step 210 / 359 | loss : 0.3292635977268219\n", "Epoch 3 | step 211 / 359 | loss : 0.9041137099266052\n", "Epoch 3 | step 212 / 359 | loss : 0.7124985456466675\n", "Epoch 3 | step 213 / 359 | loss : 0.703311026096344\n", "Epoch 3 | step 214 / 359 | loss : 0.386079341173172\n", "Epoch 3 | step 215 / 359 | loss : 0.45958802103996277\n", "Epoch 3 | step 216 / 359 | loss : 0.5668870806694031\n", "Epoch 3 | step 217 / 359 | loss : 0.45389047265052795\n", "Epoch 3 | step 218 / 359 | loss : 0.35509487986564636\n", "Epoch 3 | step 219 / 359 | loss : 0.60392826795578\n", "Epoch 3 | step 220 / 359 | loss : 0.18686147034168243\n", "Epoch 3 | step 221 / 359 | loss : 0.3580904006958008\n", "Epoch 3 | step 222 / 359 | loss : 0.6003063321113586\n", "Epoch 3 | step 223 / 359 | loss : 0.3384496569633484\n", "Epoch 3 | step 224 / 359 | loss : 0.46131330728530884\n", "Epoch 3 | step 225 / 359 | loss : 0.8962639570236206\n", "Epoch 3 | step 226 / 359 | loss : 0.49780434370040894\n", "Epoch 3 | step 227 / 359 | loss : 0.7114304900169373\n", "Epoch 3 | step 228 / 359 | loss : 0.6336272358894348\n", "Epoch 3 | step 229 / 359 | loss : 0.5499700903892517\n", "Epoch 3 | step 230 / 359 | loss : 0.575852632522583\n", "Epoch 3 | step 231 / 359 | loss : 0.3725271224975586\n", "Epoch 3 | step 232 / 359 | loss : 0.805372416973114\n", "Epoch 3 | step 233 / 359 | loss : 0.5457251667976379\n", "Epoch 3 | step 234 / 359 | loss : 0.6110445261001587\n", "Epoch 3 | step 235 / 359 | loss : 0.6544880867004395\n", "Epoch 3 | step 236 / 359 | loss : 0.4167264997959137\n", "Epoch 3 | step 237 / 359 | loss : 0.5676006078720093\n", "Epoch 3 | step 238 / 359 | loss : 0.49601277709007263\n", "Epoch 3 | step 239 / 359 | loss : 0.6786152124404907\n", "Epoch 3 | step 240 / 359 | loss : 0.6590061783790588\n", "Epoch 3 | step 241 / 359 | loss : 0.5381757616996765\n", "Epoch 3 | step 242 / 359 | loss : 0.4305076003074646\n", "Epoch 3 | step 243 / 359 | loss : 0.48627761006355286\n", "Epoch 3 | step 244 / 359 | loss : 0.5432324409484863\n", "Epoch 3 | step 245 / 359 | loss : 0.8432349562644958\n", "Epoch 3 | step 246 / 359 | loss : 0.579787015914917\n", "Epoch 3 | step 247 / 359 | loss : 0.4007748067378998\n", "Epoch 3 | step 248 / 359 | loss : 0.5502997636795044\n", "Epoch 3 | step 249 / 359 | loss : 0.3621792197227478\n", "Epoch 3 | step 250 / 359 | loss : 0.5502415895462036\n", "Epoch 3 | step 251 / 359 | loss : 1.0432645082473755\n", "Epoch 3 | step 252 / 359 | loss : 0.5588504672050476\n", "Epoch 3 | step 253 / 359 | loss : 0.8634796738624573\n", "Epoch 3 | step 254 / 359 | loss : 0.6683022379875183\n", "Epoch 3 | step 255 / 359 | loss : 0.7095800638198853\n", "Epoch 3 | step 256 / 359 | loss : 0.6001671552658081\n", "Epoch 3 | step 257 / 359 | loss : 0.7290136218070984\n", "Epoch 3 | step 258 / 359 | loss : 0.6307818293571472\n", "Epoch 3 | step 259 / 359 | loss : 0.5971916317939758\n", "Epoch 3 | step 260 / 359 | loss : 0.30903103947639465\n", "Epoch 3 | step 261 / 359 | loss : 0.3772878646850586\n", "Epoch 3 | step 262 / 359 | loss : 0.7331608533859253\n", "Epoch 3 | step 263 / 359 | loss : 0.3888910114765167\n", "Epoch 3 | step 264 / 359 | loss : 0.8430558443069458\n", "Epoch 3 | step 265 / 359 | loss : 0.6458213329315186\n", "Epoch 3 | step 266 / 359 | loss : 0.4510352909564972\n", "Epoch 3 | step 267 / 359 | loss : 0.5069302320480347\n", "Epoch 3 | step 268 / 359 | loss : 0.5338892936706543\n", "Epoch 3 | step 269 / 359 | loss : 0.5904382467269897\n", "Epoch 3 | step 270 / 359 | loss : 0.8022188544273376\n", "Epoch 3 | step 271 / 359 | loss : 0.5337827801704407\n", "Epoch 3 | step 272 / 359 | loss : 0.49875083565711975\n", "Epoch 3 | step 273 / 359 | loss : 0.6747299432754517\n", "Epoch 3 | step 274 / 359 | loss : 0.39316099882125854\n", "Epoch 3 | step 275 / 359 | loss : 0.5180034041404724\n", "Epoch 3 | step 276 / 359 | loss : 0.5595631003379822\n", "Epoch 3 | step 277 / 359 | loss : 0.4272802770137787\n", "Epoch 3 | step 278 / 359 | loss : 0.4879480004310608\n", "Epoch 3 | step 279 / 359 | loss : 0.4488792419433594\n", "Epoch 3 | step 280 / 359 | loss : 0.27409154176712036\n", "Epoch 3 | step 281 / 359 | loss : 0.33446723222732544\n", "Epoch 3 | step 282 / 359 | loss : 0.31908783316612244\n", "Epoch 3 | step 283 / 359 | loss : 0.5669617056846619\n", "Epoch 3 | step 284 / 359 | loss : 0.8382391929626465\n", "Epoch 3 | step 285 / 359 | loss : 0.4921998977661133\n", "Epoch 3 | step 286 / 359 | loss : 0.25483638048171997\n", "Epoch 3 | step 287 / 359 | loss : 1.236391544342041\n", "Epoch 3 | step 288 / 359 | loss : 0.4990811347961426\n", "Epoch 3 | step 289 / 359 | loss : 0.863811731338501\n", "Epoch 3 | step 290 / 359 | loss : 0.5347537398338318\n", "Epoch 3 | step 291 / 359 | loss : 0.5802640914916992\n", "Epoch 3 | step 292 / 359 | loss : 0.6832436919212341\n", "Epoch 3 | step 293 / 359 | loss : 0.661620020866394\n", "Epoch 3 | step 294 / 359 | loss : 0.5508796572685242\n", "Epoch 3 | step 295 / 359 | loss : 0.39803609251976013\n", "Epoch 3 | step 296 / 359 | loss : 0.6186118721961975\n", "Epoch 3 | step 297 / 359 | loss : 0.4044573903083801\n", "Epoch 3 | step 298 / 359 | loss : 0.2639743387699127\n", "Epoch 3 | step 299 / 359 | loss : 0.4180637001991272\n", "Epoch 3 | step 300 / 359 | loss : 0.7664673328399658\n", "Epoch 3 | step 301 / 359 | loss : 0.75621497631073\n", "Epoch 3 | step 302 / 359 | loss : 0.5332871079444885\n", "Epoch 3 | step 303 / 359 | loss : 0.3568538427352905\n", "Epoch 3 | step 304 / 359 | loss : 0.4912607669830322\n", "Epoch 3 | step 305 / 359 | loss : 0.8574433326721191\n", "Epoch 3 | step 306 / 359 | loss : 0.4732414782047272\n", "Epoch 3 | step 307 / 359 | loss : 0.49640268087387085\n", "Epoch 3 | step 308 / 359 | loss : 0.3974117636680603\n", "Epoch 3 | step 309 / 359 | loss : 0.39655953645706177\n", "Epoch 3 | step 310 / 359 | loss : 0.4732697904109955\n", "Epoch 3 | step 311 / 359 | loss : 0.3768687844276428\n", "Epoch 3 | step 312 / 359 | loss : 0.32515275478363037\n", "Epoch 3 | step 313 / 359 | loss : 0.6175051927566528\n", "Epoch 3 | step 314 / 359 | loss : 0.5764682292938232\n", "Epoch 3 | step 315 / 359 | loss : 0.4426231384277344\n", "Epoch 3 | step 316 / 359 | loss : 0.6414517164230347\n", "Epoch 3 | step 317 / 359 | loss : 0.8550004363059998\n", "Epoch 3 | step 318 / 359 | loss : 0.6493987441062927\n", "Epoch 3 | step 319 / 359 | loss : 0.5938549041748047\n", "Epoch 3 | step 320 / 359 | loss : 0.39559587836265564\n", "Epoch 3 | step 321 / 359 | loss : 0.6976167559623718\n", "Epoch 3 | step 322 / 359 | loss : 0.6971575617790222\n", "Epoch 3 | step 323 / 359 | loss : 0.6154391169548035\n", "Epoch 3 | step 324 / 359 | loss : 0.5544577836990356\n", "Epoch 3 | step 325 / 359 | loss : 1.085427165031433\n", "Epoch 3 | step 326 / 359 | loss : 0.5657405257225037\n", "Epoch 3 | step 327 / 359 | loss : 0.6484676599502563\n", "Epoch 3 | step 328 / 359 | loss : 0.47546297311782837\n", "Epoch 3 | step 329 / 359 | loss : 0.4132961928844452\n", "Epoch 3 | step 330 / 359 | loss : 0.4488992393016815\n", "Epoch 3 | step 331 / 359 | loss : 0.32924512028694153\n", "Epoch 3 | step 332 / 359 | loss : 0.7502052783966064\n", "Epoch 3 | step 333 / 359 | loss : 0.6139392852783203\n", "Epoch 3 | step 334 / 359 | loss : 0.9511435031890869\n", "Epoch 3 | step 335 / 359 | loss : 0.6335725784301758\n", "Epoch 3 | step 336 / 359 | loss : 0.4846392571926117\n", "Epoch 3 | step 337 / 359 | loss : 0.603792130947113\n", "Epoch 3 | step 338 / 359 | loss : 0.6350513696670532\n", "Epoch 3 | step 339 / 359 | loss : 0.45024374127388\n", "Epoch 3 | step 340 / 359 | loss : 0.6770630478858948\n", "Epoch 3 | step 341 / 359 | loss : 0.39416933059692383\n", "Epoch 3 | step 342 / 359 | loss : 0.650143027305603\n", "Epoch 3 | step 343 / 359 | loss : 0.8228369951248169\n", "Epoch 3 | step 344 / 359 | loss : 0.45007073879241943\n", "Epoch 3 | step 345 / 359 | loss : 0.480183482170105\n", "Epoch 3 | step 346 / 359 | loss : 0.30392372608184814\n", "Epoch 3 | step 347 / 359 | loss : 0.44802963733673096\n", "Epoch 3 | step 348 / 359 | loss : 0.26264750957489014\n", "Epoch 3 | step 349 / 359 | loss : 0.3882264792919159\n", "Epoch 3 | step 350 / 359 | loss : 0.449749231338501\n", "Epoch 3 | step 351 / 359 | loss : 0.7744216918945312\n", "Epoch 3 | step 352 / 359 | loss : 0.5345607995986938\n", "Epoch 3 | step 353 / 359 | loss : 0.3972330093383789\n", "Epoch 3 | step 354 / 359 | loss : 0.8812428116798401\n", "Epoch 3 | step 355 / 359 | loss : 0.3004673421382904\n", "Epoch 3 | step 356 / 359 | loss : 0.7468639612197876\n", "Epoch 3 | step 357 / 359 | loss : 0.5752899050712585\n", "Epoch 3 | step 358 / 359 | loss : 0.5472959280014038\n", "Epoch 3 | step 359 / 359 | loss : 2.958794593811035\n", "Training Epoch n° 3\n", "Epoch 4 | step 1 / 359 | loss : 0.8564368486404419\n", "Epoch 4 | step 2 / 359 | loss : 0.6859894394874573\n", "Epoch 4 | step 3 / 359 | loss : 0.7219178080558777\n", "Epoch 4 | step 4 / 359 | loss : 0.7635927796363831\n", "Epoch 4 | step 5 / 359 | loss : 0.6200098991394043\n", "Epoch 4 | step 6 / 359 | loss : 0.560172975063324\n", "Epoch 4 | step 7 / 359 | loss : 0.7071115970611572\n", "Epoch 4 | step 8 / 359 | loss : 0.4798569977283478\n", "Epoch 4 | step 9 / 359 | loss : 0.7737131714820862\n", "Epoch 4 | step 10 / 359 | loss : 0.4846486747264862\n", "Epoch 4 | step 11 / 359 | loss : 0.4485028088092804\n", "Epoch 4 | step 12 / 359 | loss : 0.45044979453086853\n", "Epoch 4 | step 13 / 359 | loss : 0.7076160907745361\n", "Epoch 4 | step 14 / 359 | loss : 0.7458792328834534\n", "Epoch 4 | step 15 / 359 | loss : 0.5291970372200012\n", "Epoch 4 | step 16 / 359 | loss : 0.5492866039276123\n", "Epoch 4 | step 17 / 359 | loss : 0.486955463886261\n", "Epoch 4 | step 18 / 359 | loss : 0.6392285227775574\n", "Epoch 4 | step 19 / 359 | loss : 0.5431461334228516\n", "Epoch 4 | step 20 / 359 | loss : 0.4957622289657593\n", "Epoch 4 | step 21 / 359 | loss : 0.573685884475708\n", "Epoch 4 | step 22 / 359 | loss : 0.6799619197845459\n", "Epoch 4 | step 23 / 359 | loss : 0.514926552772522\n", "Epoch 4 | step 24 / 359 | loss : 0.5069143176078796\n", "Epoch 4 | step 25 / 359 | loss : 0.7787679433822632\n", "Epoch 4 | step 26 / 359 | loss : 0.470180869102478\n", "Epoch 4 | step 27 / 359 | loss : 0.30924221873283386\n", "Epoch 4 | step 28 / 359 | loss : 0.43635156750679016\n", "Epoch 4 | step 29 / 359 | loss : 0.2881840765476227\n", "Epoch 4 | step 30 / 359 | loss : 0.7513533234596252\n", "Epoch 4 | step 31 / 359 | loss : 0.3463623523712158\n", "Epoch 4 | step 32 / 359 | loss : 0.9752247929573059\n", "Epoch 4 | step 33 / 359 | loss : 0.8124076724052429\n", "Epoch 4 | step 34 / 359 | loss : 0.3932952880859375\n", "Epoch 4 | step 35 / 359 | loss : 0.4053417146205902\n", "Epoch 4 | step 36 / 359 | loss : 0.1781567484140396\n", "Epoch 4 | step 37 / 359 | loss : 0.9727395176887512\n", "Epoch 4 | step 38 / 359 | loss : 0.5629979968070984\n", "Epoch 4 | step 39 / 359 | loss : 0.28248512744903564\n", "Epoch 4 | step 40 / 359 | loss : 0.3472312092781067\n", "Epoch 4 | step 41 / 359 | loss : 0.7433170676231384\n", "Epoch 4 | step 42 / 359 | loss : 0.7384646534919739\n", "Epoch 4 | step 43 / 359 | loss : 0.28494665026664734\n", "Epoch 4 | step 44 / 359 | loss : 0.6902724504470825\n", "Epoch 4 | step 45 / 359 | loss : 0.523247241973877\n", "Epoch 4 | step 46 / 359 | loss : 0.4837402403354645\n", "Epoch 4 | step 47 / 359 | loss : 0.8723412156105042\n", "Epoch 4 | step 48 / 359 | loss : 0.5382542014122009\n", "Epoch 4 | step 49 / 359 | loss : 1.0891611576080322\n", "Epoch 4 | step 50 / 359 | loss : 0.6817523241043091\n", "Epoch 4 | step 51 / 359 | loss : 0.6616396307945251\n", "Epoch 4 | step 52 / 359 | loss : 0.5207712650299072\n", "Epoch 4 | step 53 / 359 | loss : 0.544668436050415\n", "Epoch 4 | step 54 / 359 | loss : 0.4282703399658203\n", "Epoch 4 | step 55 / 359 | loss : 0.44502773880958557\n", "Epoch 4 | step 56 / 359 | loss : 0.6099150776863098\n", "Epoch 4 | step 57 / 359 | loss : 0.5720613598823547\n", "Epoch 4 | step 58 / 359 | loss : 0.781141996383667\n", "Epoch 4 | step 59 / 359 | loss : 0.4801074266433716\n", "Epoch 4 | step 60 / 359 | loss : 0.42590463161468506\n", "Epoch 4 | step 61 / 359 | loss : 0.3848291337490082\n", "Epoch 4 | step 62 / 359 | loss : 0.3169406056404114\n", "Epoch 4 | step 63 / 359 | loss : 0.3877840042114258\n", "Epoch 4 | step 64 / 359 | loss : 0.578086256980896\n", "Epoch 4 | step 65 / 359 | loss : 0.6219249963760376\n", "Epoch 4 | step 66 / 359 | loss : 0.4843847155570984\n", "Epoch 4 | step 67 / 359 | loss : 0.3788127303123474\n", "Epoch 4 | step 68 / 359 | loss : 0.38203105330467224\n", "Epoch 4 | step 69 / 359 | loss : 0.24643558263778687\n", "Epoch 4 | step 70 / 359 | loss : 0.39546751976013184\n", "Epoch 4 | step 71 / 359 | loss : 0.8535398840904236\n", "Epoch 4 | step 72 / 359 | loss : 0.6732137799263\n", "Epoch 4 | step 73 / 359 | loss : 0.5769467353820801\n", "Epoch 4 | step 74 / 359 | loss : 0.6230678558349609\n", "Epoch 4 | step 75 / 359 | loss : 0.28252658247947693\n", "Epoch 4 | step 76 / 359 | loss : 0.4497382342815399\n", "Epoch 4 | step 77 / 359 | loss : 0.6207828521728516\n", "Epoch 4 | step 78 / 359 | loss : 0.6384775638580322\n", "Epoch 4 | step 79 / 359 | loss : 0.683223307132721\n", "Epoch 4 | step 80 / 359 | loss : 0.6025792360305786\n", "Epoch 4 | step 81 / 359 | loss : 0.472320020198822\n", "Epoch 4 | step 82 / 359 | loss : 0.6880651712417603\n", "Epoch 4 | step 83 / 359 | loss : 0.6545637249946594\n", "Epoch 4 | step 84 / 359 | loss : 0.3757723569869995\n", "Epoch 4 | step 85 / 359 | loss : 0.8189146518707275\n", "Epoch 4 | step 86 / 359 | loss : 0.774932861328125\n", "Epoch 4 | step 87 / 359 | loss : 0.3934170603752136\n", "Epoch 4 | step 88 / 359 | loss : 0.7502649426460266\n", "Epoch 4 | step 89 / 359 | loss : 0.8076132535934448\n", "Epoch 4 | step 90 / 359 | loss : 0.6737825274467468\n", "Epoch 4 | step 91 / 359 | loss : 0.5529383420944214\n", "Epoch 4 | step 92 / 359 | loss : 0.2920679748058319\n", "Epoch 4 | step 93 / 359 | loss : 0.657031774520874\n", "Epoch 4 | step 94 / 359 | loss : 0.8875868320465088\n", "Epoch 4 | step 95 / 359 | loss : 0.35107898712158203\n", "Epoch 4 | step 96 / 359 | loss : 0.5178171396255493\n", "Epoch 4 | step 97 / 359 | loss : 0.25622624158859253\n", "Epoch 4 | step 98 / 359 | loss : 0.26048168540000916\n", "Epoch 4 | step 99 / 359 | loss : 0.9557887315750122\n", "Epoch 4 | step 100 / 359 | loss : 0.5239349603652954\n", "Epoch 4 | step 101 / 359 | loss : 0.5305095314979553\n", "Epoch 4 | step 102 / 359 | loss : 0.5385419130325317\n", "Epoch 4 | step 103 / 359 | loss : 0.5898186564445496\n", "Epoch 4 | step 104 / 359 | loss : 0.8544620275497437\n", "Epoch 4 | step 105 / 359 | loss : 0.4004482626914978\n", "Epoch 4 | step 106 / 359 | loss : 0.4949885606765747\n", "Epoch 4 | step 107 / 359 | loss : 0.4328043460845947\n", "Epoch 4 | step 108 / 359 | loss : 0.5214012265205383\n", "Epoch 4 | step 109 / 359 | loss : 0.8425970673561096\n", "Epoch 4 | step 110 / 359 | loss : 0.4643709063529968\n", "Epoch 4 | step 111 / 359 | loss : 0.4529315233230591\n", "Epoch 4 | step 112 / 359 | loss : 0.9606918692588806\n", "Epoch 4 | step 113 / 359 | loss : 0.5517867207527161\n", "Epoch 4 | step 114 / 359 | loss : 0.7145147323608398\n", "Epoch 4 | step 115 / 359 | loss : 0.5211625099182129\n", "Epoch 4 | step 116 / 359 | loss : 0.4222180247306824\n", "Epoch 4 | step 117 / 359 | loss : 0.945061206817627\n", "Epoch 4 | step 118 / 359 | loss : 0.6914369463920593\n", "Epoch 4 | step 119 / 359 | loss : 0.5512202382087708\n", "Epoch 4 | step 120 / 359 | loss : 0.5296524167060852\n", "Epoch 4 | step 121 / 359 | loss : 0.7735228538513184\n", "Epoch 4 | step 122 / 359 | loss : 0.747725784778595\n", "Epoch 4 | step 123 / 359 | loss : 0.7123962044715881\n", "Epoch 4 | step 124 / 359 | loss : 0.4736109673976898\n", "Epoch 4 | step 125 / 359 | loss : 0.39313116669654846\n", "Epoch 4 | step 126 / 359 | loss : 0.6195414662361145\n", "Epoch 4 | step 127 / 359 | loss : 0.3967725336551666\n", "Epoch 4 | step 128 / 359 | loss : 0.7416632771492004\n", "Epoch 4 | step 129 / 359 | loss : 0.7338702082633972\n", "Epoch 4 | step 130 / 359 | loss : 0.6261587738990784\n", "Epoch 4 | step 131 / 359 | loss : 0.3617550730705261\n", "Epoch 4 | step 132 / 359 | loss : 0.6650050282478333\n", "Epoch 4 | step 133 / 359 | loss : 0.32515448331832886\n", "Epoch 4 | step 134 / 359 | loss : 0.7869561314582825\n", "Epoch 4 | step 135 / 359 | loss : 0.7779080271720886\n", "Epoch 4 | step 136 / 359 | loss : 0.6861831545829773\n", "Epoch 4 | step 137 / 359 | loss : 0.5376986265182495\n", "Epoch 4 | step 138 / 359 | loss : 0.41467636823654175\n", "Epoch 4 | step 139 / 359 | loss : 0.7792675495147705\n", "Epoch 4 | step 140 / 359 | loss : 0.4949716329574585\n", "Epoch 4 | step 141 / 359 | loss : 0.5390953421592712\n", "Epoch 4 | step 142 / 359 | loss : 0.4229286015033722\n", "Epoch 4 | step 143 / 359 | loss : 0.5062383413314819\n", "Epoch 4 | step 144 / 359 | loss : 0.6705712080001831\n", "Epoch 4 | step 145 / 359 | loss : 0.7091580033302307\n", "Epoch 4 | step 146 / 359 | loss : 0.524224579334259\n", "Epoch 4 | step 147 / 359 | loss : 0.5270421504974365\n", "Epoch 4 | step 148 / 359 | loss : 0.44137322902679443\n", "Epoch 4 | step 149 / 359 | loss : 0.9296683669090271\n", "Epoch 4 | step 150 / 359 | loss : 0.7338294982910156\n", "Epoch 4 | step 151 / 359 | loss : 0.7398310303688049\n", "Epoch 4 | step 152 / 359 | loss : 0.5610384941101074\n", "Epoch 4 | step 153 / 359 | loss : 0.4462992250919342\n", "Epoch 4 | step 154 / 359 | loss : 0.6210212707519531\n", "Epoch 4 | step 155 / 359 | loss : 0.41759514808654785\n", "Epoch 4 | step 156 / 359 | loss : 0.7409286499023438\n", "Epoch 4 | step 157 / 359 | loss : 0.3694286048412323\n", "Epoch 4 | step 158 / 359 | loss : 0.5383358597755432\n", "Epoch 4 | step 159 / 359 | loss : 0.4087238609790802\n", "Epoch 4 | step 160 / 359 | loss : 0.3772364854812622\n", "Epoch 4 | step 161 / 359 | loss : 0.5942056775093079\n", "Epoch 4 | step 162 / 359 | loss : 0.6108125448226929\n", "Epoch 4 | step 163 / 359 | loss : 0.594600260257721\n", "Epoch 4 | step 164 / 359 | loss : 0.3885132074356079\n", "Epoch 4 | step 165 / 359 | loss : 0.672572135925293\n", "Epoch 4 | step 166 / 359 | loss : 0.3020663559436798\n", "Epoch 4 | step 167 / 359 | loss : 0.34674882888793945\n", "Epoch 4 | step 168 / 359 | loss : 0.8513849973678589\n", "Epoch 4 | step 169 / 359 | loss : 0.3370882272720337\n", "Epoch 4 | step 170 / 359 | loss : 0.40099066495895386\n", "Epoch 4 | step 171 / 359 | loss : 0.3824271559715271\n", "Epoch 4 | step 172 / 359 | loss : 0.6406750679016113\n", "Epoch 4 | step 173 / 359 | loss : 0.59336918592453\n", "Epoch 4 | step 174 / 359 | loss : 0.31587257981300354\n", "Epoch 4 | step 175 / 359 | loss : 0.8703002333641052\n", "Epoch 4 | step 176 / 359 | loss : 0.46289536356925964\n", "Epoch 4 | step 177 / 359 | loss : 0.7168917059898376\n", "Epoch 4 | step 178 / 359 | loss : 0.5809172987937927\n", "Epoch 4 | step 179 / 359 | loss : 0.7091209292411804\n", "Epoch 4 | step 180 / 359 | loss : 0.5719928741455078\n", "Epoch 4 | step 181 / 359 | loss : 0.44667118787765503\n", "Epoch 4 | step 182 / 359 | loss : 0.6195563673973083\n", "Epoch 4 | step 183 / 359 | loss : 1.049143671989441\n", "Epoch 4 | step 184 / 359 | loss : 0.5983288884162903\n", "Epoch 4 | step 185 / 359 | loss : 0.643012523651123\n", "Epoch 4 | step 186 / 359 | loss : 0.4290783107280731\n", "Epoch 4 | step 187 / 359 | loss : 0.5943226218223572\n", "Epoch 4 | step 188 / 359 | loss : 0.3686700761318207\n", "Epoch 4 | step 189 / 359 | loss : 0.6467016339302063\n", "Epoch 4 | step 190 / 359 | loss : 0.5025885105133057\n", "Epoch 4 | step 191 / 359 | loss : 0.7057023644447327\n", "Epoch 4 | step 192 / 359 | loss : 0.5198248624801636\n", "Epoch 4 | step 193 / 359 | loss : 0.3642106354236603\n", "Epoch 4 | step 194 / 359 | loss : 0.671090841293335\n", "Epoch 4 | step 195 / 359 | loss : 0.8560749292373657\n", "Epoch 4 | step 196 / 359 | loss : 0.5727582573890686\n", "Epoch 4 | step 197 / 359 | loss : 0.6271953582763672\n", "Epoch 4 | step 198 / 359 | loss : 0.42037978768348694\n", "Epoch 4 | step 199 / 359 | loss : 0.8821246027946472\n", "Epoch 4 | step 200 / 359 | loss : 0.4919246435165405\n", "Epoch 4 | step 201 / 359 | loss : 0.44483035802841187\n", "Epoch 4 | step 202 / 359 | loss : 0.8908201456069946\n", "Epoch 4 | step 203 / 359 | loss : 0.41926103830337524\n", "Epoch 4 | step 204 / 359 | loss : 0.5296030640602112\n", "Epoch 4 | step 205 / 359 | loss : 0.41740480065345764\n", "Epoch 4 | step 206 / 359 | loss : 0.9651390910148621\n", "Epoch 4 | step 207 / 359 | loss : 0.8912392854690552\n", "Epoch 4 | step 208 / 359 | loss : 0.4827890694141388\n", "Epoch 4 | step 209 / 359 | loss : 0.6250355839729309\n", "Epoch 4 | step 210 / 359 | loss : 0.519737184047699\n", "Epoch 4 | step 211 / 359 | loss : 0.7050705552101135\n", "Epoch 4 | step 212 / 359 | loss : 0.7109194993972778\n", "Epoch 4 | step 213 / 359 | loss : 0.5690346360206604\n", "Epoch 4 | step 214 / 359 | loss : 0.680954098701477\n", "Epoch 4 | step 215 / 359 | loss : 0.4367704689502716\n", "Epoch 4 | step 216 / 359 | loss : 0.4230548143386841\n", "Epoch 4 | step 217 / 359 | loss : 0.7413887977600098\n", "Epoch 4 | step 218 / 359 | loss : 0.3883676528930664\n", "Epoch 4 | step 219 / 359 | loss : 0.4222516417503357\n", "Epoch 4 | step 220 / 359 | loss : 0.6789976954460144\n", "Epoch 4 | step 221 / 359 | loss : 0.6700400710105896\n", "Epoch 4 | step 222 / 359 | loss : 0.4034160375595093\n", "Epoch 4 | step 223 / 359 | loss : 0.3882797956466675\n", "Epoch 4 | step 224 / 359 | loss : 0.6411839723587036\n", "Epoch 4 | step 225 / 359 | loss : 0.4355905055999756\n", "Epoch 4 | step 226 / 359 | loss : 0.7370401620864868\n", "Epoch 4 | step 227 / 359 | loss : 0.6759117841720581\n", "Epoch 4 | step 228 / 359 | loss : 0.5836606621742249\n", "Epoch 4 | step 229 / 359 | loss : 0.3777658939361572\n", "Epoch 4 | step 230 / 359 | loss : 0.6070016026496887\n", "Epoch 4 | step 231 / 359 | loss : 1.0443676710128784\n", "Epoch 4 | step 232 / 359 | loss : 0.7245497703552246\n", "Epoch 4 | step 233 / 359 | loss : 0.42516767978668213\n", "Epoch 4 | step 234 / 359 | loss : 0.6138558983802795\n", "Epoch 4 | step 235 / 359 | loss : 0.7681633830070496\n", "Epoch 4 | step 236 / 359 | loss : 0.7021509408950806\n", "Epoch 4 | step 237 / 359 | loss : 0.2529008090496063\n", "Epoch 4 | step 238 / 359 | loss : 0.6807279586791992\n", "Epoch 4 | step 239 / 359 | loss : 0.3564892113208771\n", "Epoch 4 | step 240 / 359 | loss : 0.47370797395706177\n", "Epoch 4 | step 241 / 359 | loss : 0.8183282017707825\n", "Epoch 4 | step 242 / 359 | loss : 0.5799499154090881\n", "Epoch 4 | step 243 / 359 | loss : 0.33040112257003784\n", "Epoch 4 | step 244 / 359 | loss : 0.4043756425380707\n", "Epoch 4 | step 245 / 359 | loss : 0.46896809339523315\n", "Epoch 4 | step 246 / 359 | loss : 0.5719637274742126\n", "Epoch 4 | step 247 / 359 | loss : 0.36247119307518005\n", "Epoch 4 | step 248 / 359 | loss : 0.5892225503921509\n", "Epoch 4 | step 249 / 359 | loss : 0.2608709931373596\n", "Epoch 4 | step 250 / 359 | loss : 0.3356388211250305\n", "Epoch 4 | step 251 / 359 | loss : 0.7241940498352051\n", "Epoch 4 | step 252 / 359 | loss : 0.6910727620124817\n", "Epoch 4 | step 253 / 359 | loss : 0.9517636299133301\n", "Epoch 4 | step 254 / 359 | loss : 0.5011162161827087\n", "Epoch 4 | step 255 / 359 | loss : 0.6836770176887512\n", "Epoch 4 | step 256 / 359 | loss : 0.4454158544540405\n", "Epoch 4 | step 257 / 359 | loss : 0.6233155131340027\n", "Epoch 4 | step 258 / 359 | loss : 0.22179347276687622\n", "Epoch 4 | step 259 / 359 | loss : 0.1307835876941681\n", "Epoch 4 | step 260 / 359 | loss : 0.5185268521308899\n", "Epoch 4 | step 261 / 359 | loss : 0.8403699994087219\n", "Epoch 4 | step 262 / 359 | loss : 0.32493358850479126\n", "Epoch 4 | step 263 / 359 | loss : 0.6876887679100037\n", "Epoch 4 | step 264 / 359 | loss : 0.516236424446106\n", "Epoch 4 | step 265 / 359 | loss : 0.5101617574691772\n", "Epoch 4 | step 266 / 359 | loss : 0.6930872201919556\n", "Epoch 4 | step 267 / 359 | loss : 0.9315861463546753\n", "Epoch 4 | step 268 / 359 | loss : 0.6034566760063171\n", "Epoch 4 | step 269 / 359 | loss : 0.6151376962661743\n", "Epoch 4 | step 270 / 359 | loss : 0.3999011814594269\n", "Epoch 4 | step 271 / 359 | loss : 0.24461492896080017\n", "Epoch 4 | step 272 / 359 | loss : 0.6075186729431152\n", "Epoch 4 | step 273 / 359 | loss : 0.8297305107116699\n", "Epoch 4 | step 274 / 359 | loss : 0.6934260725975037\n", "Epoch 4 | step 275 / 359 | loss : 0.5654253959655762\n", "Epoch 4 | step 276 / 359 | loss : 0.3556724786758423\n", "Epoch 4 | step 277 / 359 | loss : 0.5568978190422058\n", "Epoch 4 | step 278 / 359 | loss : 0.32880398631095886\n", "Epoch 4 | step 279 / 359 | loss : 0.5980675220489502\n", "Epoch 4 | step 280 / 359 | loss : 0.3360569477081299\n", "Epoch 4 | step 281 / 359 | loss : 0.5570515990257263\n", "Epoch 4 | step 282 / 359 | loss : 0.35125964879989624\n", "Epoch 4 | step 283 / 359 | loss : 0.29304996132850647\n", "Epoch 4 | step 284 / 359 | loss : 0.4552541673183441\n", "Epoch 4 | step 285 / 359 | loss : 0.6670404076576233\n", "Epoch 4 | step 286 / 359 | loss : 0.597094714641571\n", "Epoch 4 | step 287 / 359 | loss : 0.47436046600341797\n", "Epoch 4 | step 288 / 359 | loss : 0.4275301396846771\n", "Epoch 4 | step 289 / 359 | loss : 0.35426434874534607\n", "Epoch 4 | step 290 / 359 | loss : 0.5203490257263184\n", "Epoch 4 | step 291 / 359 | loss : 0.5058573484420776\n", "Epoch 4 | step 292 / 359 | loss : 0.43619146943092346\n", "Epoch 4 | step 293 / 359 | loss : 0.5516949892044067\n", "Epoch 4 | step 294 / 359 | loss : 0.8163848519325256\n", "Epoch 4 | step 295 / 359 | loss : 0.7684018015861511\n", "Epoch 4 | step 296 / 359 | loss : 0.423323392868042\n", "Epoch 4 | step 297 / 359 | loss : 0.6323391199111938\n", "Epoch 4 | step 298 / 359 | loss : 0.42869892716407776\n", "Epoch 4 | step 299 / 359 | loss : 0.4813457131385803\n", "Epoch 4 | step 300 / 359 | loss : 0.4196486175060272\n", "Epoch 4 | step 301 / 359 | loss : 0.8601276874542236\n", "Epoch 4 | step 302 / 359 | loss : 0.5828316807746887\n", "Epoch 4 | step 303 / 359 | loss : 0.6386504173278809\n", "Epoch 4 | step 304 / 359 | loss : 0.523444652557373\n", "Epoch 4 | step 305 / 359 | loss : 0.524789571762085\n", "Epoch 4 | step 306 / 359 | loss : 0.4232000410556793\n", "Epoch 4 | step 307 / 359 | loss : 0.5800895690917969\n", "Epoch 4 | step 308 / 359 | loss : 0.8270313739776611\n", "Epoch 4 | step 309 / 359 | loss : 0.738415002822876\n", "Epoch 4 | step 310 / 359 | loss : 0.6773646473884583\n", "Epoch 4 | step 311 / 359 | loss : 0.5123197436332703\n", "Epoch 4 | step 312 / 359 | loss : 0.42228561639785767\n", "Epoch 4 | step 313 / 359 | loss : 0.7067606449127197\n", "Epoch 4 | step 314 / 359 | loss : 0.30932679772377014\n", "Epoch 4 | step 315 / 359 | loss : 0.8496456742286682\n", "Epoch 4 | step 316 / 359 | loss : 0.5295753479003906\n", "Epoch 4 | step 317 / 359 | loss : 0.5178505182266235\n", "Epoch 4 | step 318 / 359 | loss : 0.5403149127960205\n", "Epoch 4 | step 319 / 359 | loss : 0.5357810258865356\n", "Epoch 4 | step 320 / 359 | loss : 0.5265540480613708\n", "Epoch 4 | step 321 / 359 | loss : 0.6178824305534363\n", "Epoch 4 | step 322 / 359 | loss : 0.42328697443008423\n", "Epoch 4 | step 323 / 359 | loss : 0.44531697034835815\n", "Epoch 4 | step 324 / 359 | loss : 0.4007529616355896\n", "Epoch 4 | step 325 / 359 | loss : 0.1509433090686798\n", "Epoch 4 | step 326 / 359 | loss : 0.1910867691040039\n", "Epoch 4 | step 327 / 359 | loss : 0.38904717564582825\n", "Epoch 4 | step 328 / 359 | loss : 0.35359540581703186\n", "Epoch 4 | step 329 / 359 | loss : 0.5580013990402222\n", "Epoch 4 | step 330 / 359 | loss : 0.3450890779495239\n", "Epoch 4 | step 331 / 359 | loss : 0.7865393161773682\n", "Epoch 4 | step 332 / 359 | loss : 0.6598439812660217\n", "Epoch 4 | step 333 / 359 | loss : 0.3338422179222107\n", "Epoch 4 | step 334 / 359 | loss : 0.5669081211090088\n", "Epoch 4 | step 335 / 359 | loss : 0.3651413023471832\n", "Epoch 4 | step 336 / 359 | loss : 0.39071547985076904\n", "Epoch 4 | step 337 / 359 | loss : 0.8707458972930908\n", "Epoch 4 | step 338 / 359 | loss : 0.5867917537689209\n", "Epoch 4 | step 339 / 359 | loss : 0.7652719616889954\n", "Epoch 4 | step 340 / 359 | loss : 0.45785433053970337\n", "Epoch 4 | step 341 / 359 | loss : 0.29408377408981323\n", "Epoch 4 | step 342 / 359 | loss : 0.6626219153404236\n", "Epoch 4 | step 343 / 359 | loss : 0.5259003639221191\n", "Epoch 4 | step 344 / 359 | loss : 0.5454174280166626\n", "Epoch 4 | step 345 / 359 | loss : 0.2760079503059387\n", "Epoch 4 | step 346 / 359 | loss : 1.0857301950454712\n", "Epoch 4 | step 347 / 359 | loss : 0.46478724479675293\n", "Epoch 4 | step 348 / 359 | loss : 0.8140203952789307\n", "Epoch 4 | step 349 / 359 | loss : 0.2945897579193115\n", "Epoch 4 | step 350 / 359 | loss : 0.5503985285758972\n", "Epoch 4 | step 351 / 359 | loss : 0.8089784979820251\n", "Epoch 4 | step 352 / 359 | loss : 0.7245193719863892\n", "Epoch 4 | step 353 / 359 | loss : 0.4170621335506439\n", "Epoch 4 | step 354 / 359 | loss : 0.36673715710639954\n", "Epoch 4 | step 355 / 359 | loss : 0.6850064396858215\n", "Epoch 4 | step 356 / 359 | loss : 0.7988060116767883\n", "Epoch 4 | step 357 / 359 | loss : 0.6089465022087097\n", "Epoch 4 | step 358 / 359 | loss : 0.5224261283874512\n", "Epoch 4 | step 359 / 359 | loss : 0.13294337689876556\n", "Training Epoch n° 4\n", "Epoch 5 | step 1 / 359 | loss : 0.6890937685966492\n", "Epoch 5 | step 2 / 359 | loss : 0.41773098707199097\n", "Epoch 5 | step 3 / 359 | loss : 0.5810186862945557\n", "Epoch 5 | step 4 / 359 | loss : 0.6069818139076233\n", "Epoch 5 | step 5 / 359 | loss : 0.5630529522895813\n", "Epoch 5 | step 6 / 359 | loss : 0.743416965007782\n", "Epoch 5 | step 7 / 359 | loss : 0.690617024898529\n", "Epoch 5 | step 8 / 359 | loss : 0.471493661403656\n", "Epoch 5 | step 9 / 359 | loss : 0.7940378189086914\n", "Epoch 5 | step 10 / 359 | loss : 0.5831283330917358\n", "Epoch 5 | step 11 / 359 | loss : 0.37270617485046387\n", "Epoch 5 | step 12 / 359 | loss : 0.39503300189971924\n", "Epoch 5 | step 13 / 359 | loss : 0.6828255653381348\n", "Epoch 5 | step 14 / 359 | loss : 0.31552353501319885\n", "Epoch 5 | step 15 / 359 | loss : 0.5014876127243042\n", "Epoch 5 | step 16 / 359 | loss : 0.5301109552383423\n", "Epoch 5 | step 17 / 359 | loss : 0.2793719470500946\n", "Epoch 5 | step 18 / 359 | loss : 0.6716868281364441\n", "Epoch 5 | step 19 / 359 | loss : 0.5706344842910767\n", "Epoch 5 | step 20 / 359 | loss : 0.6320375204086304\n", "Epoch 5 | step 21 / 359 | loss : 0.5166906714439392\n", "Epoch 5 | step 22 / 359 | loss : 0.47423723340034485\n", "Epoch 5 | step 23 / 359 | loss : 0.24714064598083496\n", "Epoch 5 | step 24 / 359 | loss : 0.3377956748008728\n", "Epoch 5 | step 25 / 359 | loss : 0.5117562413215637\n", "Epoch 5 | step 26 / 359 | loss : 0.6851857304573059\n", "Epoch 5 | step 27 / 359 | loss : 0.29111456871032715\n", "Epoch 5 | step 28 / 359 | loss : 0.7694495320320129\n", "Epoch 5 | step 29 / 359 | loss : 0.6006689667701721\n", "Epoch 5 | step 30 / 359 | loss : 0.36785900592803955\n", "Epoch 5 | step 31 / 359 | loss : 0.6781489253044128\n", "Epoch 5 | step 32 / 359 | loss : 0.52740877866745\n", "Epoch 5 | step 33 / 359 | loss : 0.37350547313690186\n", "Epoch 5 | step 34 / 359 | loss : 0.7707896828651428\n", "Epoch 5 | step 35 / 359 | loss : 0.4284389019012451\n", "Epoch 5 | step 36 / 359 | loss : 0.34838569164276123\n", "Epoch 5 | step 37 / 359 | loss : 0.6159989833831787\n", "Epoch 5 | step 38 / 359 | loss : 0.309887558221817\n", "Epoch 5 | step 39 / 359 | loss : 0.787958025932312\n", "Epoch 5 | step 40 / 359 | loss : 0.7039900422096252\n", "Epoch 5 | step 41 / 359 | loss : 0.8565507531166077\n", "Epoch 5 | step 42 / 359 | loss : 0.5352646112442017\n", "Epoch 5 | step 43 / 359 | loss : 0.6152167320251465\n", "Epoch 5 | step 44 / 359 | loss : 0.36440032720565796\n", "Epoch 5 | step 45 / 359 | loss : 0.5788359642028809\n", "Epoch 5 | step 46 / 359 | loss : 0.4329127371311188\n", "Epoch 5 | step 47 / 359 | loss : 0.6387036442756653\n", "Epoch 5 | step 48 / 359 | loss : 0.5131040215492249\n", "Epoch 5 | step 49 / 359 | loss : 0.3546302914619446\n", "Epoch 5 | step 50 / 359 | loss : 0.5535198450088501\n", "Epoch 5 | step 51 / 359 | loss : 0.4185803532600403\n", "Epoch 5 | step 52 / 359 | loss : 0.5943914651870728\n", "Epoch 5 | step 53 / 359 | loss : 0.6919261813163757\n", "Epoch 5 | step 54 / 359 | loss : 0.5121871829032898\n", "Epoch 5 | step 55 / 359 | loss : 0.6394711136817932\n", "Epoch 5 | step 56 / 359 | loss : 0.4240040183067322\n", "Epoch 5 | step 57 / 359 | loss : 1.0071691274642944\n", "Epoch 5 | step 58 / 359 | loss : 0.5828624963760376\n", "Epoch 5 | step 59 / 359 | loss : 0.7174032926559448\n", "Epoch 5 | step 60 / 359 | loss : 0.49112313985824585\n", "Epoch 5 | step 61 / 359 | loss : 0.595212996006012\n", "Epoch 5 | step 62 / 359 | loss : 0.35469773411750793\n", "Epoch 5 | step 63 / 359 | loss : 0.23237675428390503\n", "Epoch 5 | step 64 / 359 | loss : 0.32630881667137146\n", "Epoch 5 | step 65 / 359 | loss : 0.6025553941726685\n", "Epoch 5 | step 66 / 359 | loss : 0.7556974291801453\n", "Epoch 5 | step 67 / 359 | loss : 0.3733444809913635\n", "Epoch 5 | step 68 / 359 | loss : 0.5636255145072937\n", "Epoch 5 | step 69 / 359 | loss : 0.5397895574569702\n", "Epoch 5 | step 70 / 359 | loss : 0.35626327991485596\n", "Epoch 5 | step 71 / 359 | loss : 0.9965929388999939\n", "Epoch 5 | step 72 / 359 | loss : 0.6860464215278625\n", "Epoch 5 | step 73 / 359 | loss : 0.43470045924186707\n", "Epoch 5 | step 74 / 359 | loss : 0.8957409858703613\n", "Epoch 5 | step 75 / 359 | loss : 0.9535013437271118\n", "Epoch 5 | step 76 / 359 | loss : 0.6921337246894836\n", "Epoch 5 | step 77 / 359 | loss : 0.6739645600318909\n", "Epoch 5 | step 78 / 359 | loss : 0.5825358033180237\n", "Epoch 5 | step 79 / 359 | loss : 0.717427134513855\n", "Epoch 5 | step 80 / 359 | loss : 0.6271607279777527\n", "Epoch 5 | step 81 / 359 | loss : 0.7028568387031555\n", "Epoch 5 | step 82 / 359 | loss : 0.4371265769004822\n", "Epoch 5 | step 83 / 359 | loss : 0.5678001046180725\n", "Epoch 5 | step 84 / 359 | loss : 0.6150687336921692\n", "Epoch 5 | step 85 / 359 | loss : 0.4137924313545227\n", "Epoch 5 | step 86 / 359 | loss : 0.22421741485595703\n", "Epoch 5 | step 87 / 359 | loss : 0.5553796887397766\n", "Epoch 5 | step 88 / 359 | loss : 0.5579193830490112\n", "Epoch 5 | step 89 / 359 | loss : 0.49640437960624695\n", "Epoch 5 | step 90 / 359 | loss : 0.25191232562065125\n", "Epoch 5 | step 91 / 359 | loss : 0.6718919277191162\n", "Epoch 5 | step 92 / 359 | loss : 0.6365219354629517\n", "Epoch 5 | step 93 / 359 | loss : 0.2365250438451767\n", "Epoch 5 | step 94 / 359 | loss : 0.39772966504096985\n", "Epoch 5 | step 95 / 359 | loss : 0.7137119174003601\n", "Epoch 5 | step 96 / 359 | loss : 0.4325517416000366\n", "Epoch 5 | step 97 / 359 | loss : 1.0982767343521118\n", "Epoch 5 | step 98 / 359 | loss : 0.6929500102996826\n", "Epoch 5 | step 99 / 359 | loss : 0.7032517790794373\n", "Epoch 5 | step 100 / 359 | loss : 0.44335007667541504\n", "Epoch 5 | step 101 / 359 | loss : 0.7671558856964111\n", "Epoch 5 | step 102 / 359 | loss : 0.8702166080474854\n", "Epoch 5 | step 103 / 359 | loss : 0.6650075316429138\n", "Epoch 5 | step 104 / 359 | loss : 0.4287559390068054\n", "Epoch 5 | step 105 / 359 | loss : 0.30514201521873474\n", "Epoch 5 | step 106 / 359 | loss : 0.6226436495780945\n", "Epoch 5 | step 107 / 359 | loss : 0.7104206681251526\n", "Epoch 5 | step 108 / 359 | loss : 0.6430006623268127\n", "Epoch 5 | step 109 / 359 | loss : 0.4889525771141052\n", "Epoch 5 | step 110 / 359 | loss : 0.3193940222263336\n", "Epoch 5 | step 111 / 359 | loss : 0.8222076892852783\n", "Epoch 5 | step 112 / 359 | loss : 0.5555716753005981\n", "Epoch 5 | step 113 / 359 | loss : 0.5604231357574463\n", "Epoch 5 | step 114 / 359 | loss : 0.3180747926235199\n", "Epoch 5 | step 115 / 359 | loss : 0.380447655916214\n", "Epoch 5 | step 116 / 359 | loss : 0.37518975138664246\n", "Epoch 5 | step 117 / 359 | loss : 0.417123407125473\n", "Epoch 5 | step 118 / 359 | loss : 0.3855883777141571\n", "Epoch 5 | step 119 / 359 | loss : 0.737647294998169\n", "Epoch 5 | step 120 / 359 | loss : 0.6985689401626587\n", "Epoch 5 | step 121 / 359 | loss : 0.36497753858566284\n", "Epoch 5 | step 122 / 359 | loss : 0.3750423192977905\n", "Epoch 5 | step 123 / 359 | loss : 0.6454333662986755\n", "Epoch 5 | step 124 / 359 | loss : 0.7226238250732422\n", "Epoch 5 | step 125 / 359 | loss : 0.47227638959884644\n", "Epoch 5 | step 126 / 359 | loss : 0.6074947118759155\n", "Epoch 5 | step 127 / 359 | loss : 0.7100797891616821\n", "Epoch 5 | step 128 / 359 | loss : 0.172258660197258\n", "Epoch 5 | step 129 / 359 | loss : 0.4567517936229706\n", "Epoch 5 | step 130 / 359 | loss : 0.4577532112598419\n", "Epoch 5 | step 131 / 359 | loss : 0.48527365922927856\n", "Epoch 5 | step 132 / 359 | loss : 0.6068864464759827\n", "Epoch 5 | step 133 / 359 | loss : 0.26967960596084595\n", "Epoch 5 | step 134 / 359 | loss : 0.5720950961112976\n", "Epoch 5 | step 135 / 359 | loss : 0.6644923686981201\n", "Epoch 5 | step 136 / 359 | loss : 0.4734307825565338\n", "Epoch 5 | step 137 / 359 | loss : 0.34137392044067383\n", "Epoch 5 | step 138 / 359 | loss : 0.697024941444397\n", "Epoch 5 | step 139 / 359 | loss : 0.2464621663093567\n", "Epoch 5 | step 140 / 359 | loss : 0.6969866752624512\n", "Epoch 5 | step 141 / 359 | loss : 0.9200045466423035\n", "Epoch 5 | step 142 / 359 | loss : 0.6639581322669983\n", "Epoch 5 | step 143 / 359 | loss : 0.2890852093696594\n", "Epoch 5 | step 144 / 359 | loss : 0.7452632784843445\n", "Epoch 5 | step 145 / 359 | loss : 0.5201159119606018\n", "Epoch 5 | step 146 / 359 | loss : 0.5096035003662109\n", "Epoch 5 | step 147 / 359 | loss : 0.4933430850505829\n", "Epoch 5 | step 148 / 359 | loss : 0.5394662618637085\n", "Epoch 5 | step 149 / 359 | loss : 0.5954113602638245\n", "Epoch 5 | step 150 / 359 | loss : 0.4741590917110443\n", "Epoch 5 | step 151 / 359 | loss : 0.3819828927516937\n", "Epoch 5 | step 152 / 359 | loss : 1.147247552871704\n", "Epoch 5 | step 153 / 359 | loss : 0.543906033039093\n", "Epoch 5 | step 154 / 359 | loss : 0.2696515619754791\n", "Epoch 5 | step 155 / 359 | loss : 0.8000929951667786\n", "Epoch 5 | step 156 / 359 | loss : 0.33503296971321106\n", "Epoch 5 | step 157 / 359 | loss : 0.47647762298583984\n", "Epoch 5 | step 158 / 359 | loss : 0.5937966108322144\n", "Epoch 5 | step 159 / 359 | loss : 0.7667871713638306\n", "Epoch 5 | step 160 / 359 | loss : 0.7477221488952637\n", "Epoch 5 | step 161 / 359 | loss : 0.5300925970077515\n", "Epoch 5 | step 162 / 359 | loss : 0.7961193323135376\n", "Epoch 5 | step 163 / 359 | loss : 0.6141793131828308\n", "Epoch 5 | step 164 / 359 | loss : 0.7137416005134583\n", "Epoch 5 | step 165 / 359 | loss : 0.4619899392127991\n", "Epoch 5 | step 166 / 359 | loss : 0.660707950592041\n", "Epoch 5 | step 167 / 359 | loss : 0.8214199542999268\n", "Epoch 5 | step 168 / 359 | loss : 0.5749958753585815\n", "Epoch 5 | step 169 / 359 | loss : 0.6679018139839172\n", "Epoch 5 | step 170 / 359 | loss : 0.6316142082214355\n", "Epoch 5 | step 171 / 359 | loss : 0.6020323634147644\n", "Epoch 5 | step 172 / 359 | loss : 0.48769375681877136\n", "Epoch 5 | step 173 / 359 | loss : 0.4953238070011139\n", "Epoch 5 | step 174 / 359 | loss : 0.4439656138420105\n", "Epoch 5 | step 175 / 359 | loss : 0.32572299242019653\n", "Epoch 5 | step 176 / 359 | loss : 0.7198874950408936\n", "Epoch 5 | step 177 / 359 | loss : 0.19425973296165466\n", "Epoch 5 | step 178 / 359 | loss : 0.8211824893951416\n", "Epoch 5 | step 179 / 359 | loss : 0.5186080932617188\n", "Epoch 5 | step 180 / 359 | loss : 0.5318098664283752\n", "Epoch 5 | step 181 / 359 | loss : 0.5451950430870056\n", "Epoch 5 | step 182 / 359 | loss : 0.6354038715362549\n", "Epoch 5 | step 183 / 359 | loss : 0.657254159450531\n", "Epoch 5 | step 184 / 359 | loss : 0.6860841512680054\n", "Epoch 5 | step 185 / 359 | loss : 0.370220422744751\n", "Epoch 5 | step 186 / 359 | loss : 0.8425744771957397\n", "Epoch 5 | step 187 / 359 | loss : 0.3825719356536865\n", "Epoch 5 | step 188 / 359 | loss : 0.6366987824440002\n", "Epoch 5 | step 189 / 359 | loss : 0.7244680523872375\n", "Epoch 5 | step 190 / 359 | loss : 0.548956573009491\n", "Epoch 5 | step 191 / 359 | loss : 0.7736207246780396\n", "Epoch 5 | step 192 / 359 | loss : 0.6857976913452148\n", "Epoch 5 | step 193 / 359 | loss : 0.48584914207458496\n", "Epoch 5 | step 194 / 359 | loss : 0.8651173114776611\n", "Epoch 5 | step 195 / 359 | loss : 0.42054516077041626\n", "Epoch 5 | step 196 / 359 | loss : 0.39980703592300415\n", "Epoch 5 | step 197 / 359 | loss : 0.4385637044906616\n", "Epoch 5 | step 198 / 359 | loss : 0.7236454486846924\n", "Epoch 5 | step 199 / 359 | loss : 0.7375848293304443\n", "Epoch 5 | step 200 / 359 | loss : 0.5197281837463379\n", "Epoch 5 | step 201 / 359 | loss : 0.4482142925262451\n", "Epoch 5 | step 202 / 359 | loss : 0.6413425207138062\n", "Epoch 5 | step 203 / 359 | loss : 0.619426429271698\n", "Epoch 5 | step 204 / 359 | loss : 0.575221598148346\n", "Epoch 5 | step 205 / 359 | loss : 0.9112139344215393\n", "Epoch 5 | step 206 / 359 | loss : 0.5193901062011719\n", "Epoch 5 | step 207 / 359 | loss : 0.4025103449821472\n", "Epoch 5 | step 208 / 359 | loss : 0.3653140068054199\n", "Epoch 5 | step 209 / 359 | loss : 0.6252807974815369\n", "Epoch 5 | step 210 / 359 | loss : 0.4066406488418579\n", "Epoch 5 | step 211 / 359 | loss : 0.6271090507507324\n", "Epoch 5 | step 212 / 359 | loss : 1.0008506774902344\n", "Epoch 5 | step 213 / 359 | loss : 0.4657682180404663\n", "Epoch 5 | step 214 / 359 | loss : 0.6716334819793701\n", "Epoch 5 | step 215 / 359 | loss : 0.5756651163101196\n", "Epoch 5 | step 216 / 359 | loss : 0.4476677179336548\n", "Epoch 5 | step 217 / 359 | loss : 0.7775046825408936\n", "Epoch 5 | step 218 / 359 | loss : 0.8086909055709839\n", "Epoch 5 | step 219 / 359 | loss : 0.5726574659347534\n", "Epoch 5 | step 220 / 359 | loss : 0.47992217540740967\n", "Epoch 5 | step 221 / 359 | loss : 0.37286368012428284\n", "Epoch 5 | step 222 / 359 | loss : 0.9171247482299805\n", "Epoch 5 | step 223 / 359 | loss : 0.46970877051353455\n", "Epoch 5 | step 224 / 359 | loss : 0.5567471385002136\n", "Epoch 5 | step 225 / 359 | loss : 0.355701744556427\n", "Epoch 5 | step 226 / 359 | loss : 0.34834781289100647\n", "Epoch 5 | step 227 / 359 | loss : 0.38251984119415283\n", "Epoch 5 | step 228 / 359 | loss : 0.5725654363632202\n", "Epoch 5 | step 229 / 359 | loss : 0.5568553805351257\n", "Epoch 5 | step 230 / 359 | loss : 0.6275726556777954\n", "Epoch 5 | step 231 / 359 | loss : 0.4340502619743347\n", "Epoch 5 | step 232 / 359 | loss : 0.6980936527252197\n", "Epoch 5 | step 233 / 359 | loss : 0.7200913429260254\n", "Epoch 5 | step 234 / 359 | loss : 0.5051522850990295\n", "Epoch 5 | step 235 / 359 | loss : 0.5792745351791382\n", "Epoch 5 | step 236 / 359 | loss : 0.5486299395561218\n", "Epoch 5 | step 237 / 359 | loss : 0.4761613607406616\n", "Epoch 5 | step 238 / 359 | loss : 0.7010300159454346\n", "Epoch 5 | step 239 / 359 | loss : 0.7013839483261108\n", "Epoch 5 | step 240 / 359 | loss : 0.3315306603908539\n", "Epoch 5 | step 241 / 359 | loss : 0.6217252016067505\n", "Epoch 5 | step 242 / 359 | loss : 0.49374035000801086\n", "Epoch 5 | step 243 / 359 | loss : 0.5746185183525085\n", "Epoch 5 | step 244 / 359 | loss : 0.9323083758354187\n", "Epoch 5 | step 245 / 359 | loss : 0.4467077851295471\n", "Epoch 5 | step 246 / 359 | loss : 0.5704219937324524\n", "Epoch 5 | step 247 / 359 | loss : 0.3901069462299347\n", "Epoch 5 | step 248 / 359 | loss : 0.3837338089942932\n", "Epoch 5 | step 249 / 359 | loss : 0.4082445204257965\n", "Epoch 5 | step 250 / 359 | loss : 0.8118282556533813\n", "Epoch 5 | step 251 / 359 | loss : 0.49787870049476624\n", "Epoch 5 | step 252 / 359 | loss : 0.5747211575508118\n", "Epoch 5 | step 253 / 359 | loss : 0.5390498638153076\n", "Epoch 5 | step 254 / 359 | loss : 0.8444603085517883\n", "Epoch 5 | step 255 / 359 | loss : 0.5802822709083557\n", "Epoch 5 | step 256 / 359 | loss : 0.5085899829864502\n", "Epoch 5 | step 257 / 359 | loss : 0.875558614730835\n", "Epoch 5 | step 258 / 359 | loss : 0.39608752727508545\n", "Epoch 5 | step 259 / 359 | loss : 0.42947694659233093\n", "Epoch 5 | step 260 / 359 | loss : 0.4010148346424103\n", "Epoch 5 | step 261 / 359 | loss : 0.5200693607330322\n", "Epoch 5 | step 262 / 359 | loss : 0.31202781200408936\n", "Epoch 5 | step 263 / 359 | loss : 0.40361422300338745\n", "Epoch 5 | step 264 / 359 | loss : 0.4561455249786377\n", "Epoch 5 | step 265 / 359 | loss : 0.43284541368484497\n", "Epoch 5 | step 266 / 359 | loss : 0.45928651094436646\n", "Epoch 5 | step 267 / 359 | loss : 0.44300657510757446\n", "Epoch 5 | step 268 / 359 | loss : 0.681822657585144\n", "Epoch 5 | step 269 / 359 | loss : 0.6477993726730347\n", "Epoch 5 | step 270 / 359 | loss : 0.3806605935096741\n", "Epoch 5 | step 271 / 359 | loss : 0.7229161262512207\n", "Epoch 5 | step 272 / 359 | loss : 0.36386942863464355\n", "Epoch 5 | step 273 / 359 | loss : 0.49419355392456055\n", "Epoch 5 | step 274 / 359 | loss : 0.6384040117263794\n", "Epoch 5 | step 275 / 359 | loss : 0.26484012603759766\n", "Epoch 5 | step 276 / 359 | loss : 0.4088018238544464\n", "Epoch 5 | step 277 / 359 | loss : 0.7537527680397034\n", "Epoch 5 | step 278 / 359 | loss : 0.3987102806568146\n", "Epoch 5 | step 279 / 359 | loss : 0.9682834148406982\n", "Epoch 5 | step 280 / 359 | loss : 0.4449518918991089\n", "Epoch 5 | step 281 / 359 | loss : 0.44708192348480225\n", "Epoch 5 | step 282 / 359 | loss : 0.28126633167266846\n", "Epoch 5 | step 283 / 359 | loss : 0.4916335642337799\n", "Epoch 5 | step 284 / 359 | loss : 0.7257509231567383\n", "Epoch 5 | step 285 / 359 | loss : 0.488421767950058\n", "Epoch 5 | step 286 / 359 | loss : 0.32843250036239624\n", "Epoch 5 | step 287 / 359 | loss : 0.28340545296669006\n", "Epoch 5 | step 288 / 359 | loss : 0.24835504591464996\n", "Epoch 5 | step 289 / 359 | loss : 0.6621408462524414\n", "Epoch 5 | step 290 / 359 | loss : 0.5760281085968018\n", "Epoch 5 | step 291 / 359 | loss : 0.46710628271102905\n", "Epoch 5 | step 292 / 359 | loss : 0.44043973088264465\n", "Epoch 5 | step 293 / 359 | loss : 0.4998098313808441\n", "Epoch 5 | step 294 / 359 | loss : 0.41770628094673157\n", "Epoch 5 | step 295 / 359 | loss : 0.41480550169944763\n", "Epoch 5 | step 296 / 359 | loss : 0.38136938214302063\n", "Epoch 5 | step 297 / 359 | loss : 0.6768234372138977\n", "Epoch 5 | step 298 / 359 | loss : 0.23349380493164062\n", "Epoch 5 | step 299 / 359 | loss : 0.5486599206924438\n", "Epoch 5 | step 300 / 359 | loss : 0.8196735978126526\n", "Epoch 5 | step 301 / 359 | loss : 0.5262584686279297\n", "Epoch 5 | step 302 / 359 | loss : 0.47486937046051025\n", "Epoch 5 | step 303 / 359 | loss : 0.44482630491256714\n", "Epoch 5 | step 304 / 359 | loss : 0.6735386848449707\n", "Epoch 5 | step 305 / 359 | loss : 0.6177557110786438\n", "Epoch 5 | step 306 / 359 | loss : 0.4739852845668793\n", "Epoch 5 | step 307 / 359 | loss : 0.6940737962722778\n", "Epoch 5 | step 308 / 359 | loss : 0.39578208327293396\n", "Epoch 5 | step 309 / 359 | loss : 0.463655948638916\n", "Epoch 5 | step 310 / 359 | loss : 0.6771460175514221\n", "Epoch 5 | step 311 / 359 | loss : 0.6777963638305664\n", "Epoch 5 | step 312 / 359 | loss : 0.3491297662258148\n", "Epoch 5 | step 313 / 359 | loss : 0.546493649482727\n", "Epoch 5 | step 314 / 359 | loss : 0.41079458594322205\n", "Epoch 5 | step 315 / 359 | loss : 0.6999017000198364\n", "Epoch 5 | step 316 / 359 | loss : 0.45699048042297363\n", "Epoch 5 | step 317 / 359 | loss : 0.5934231281280518\n", "Epoch 5 | step 318 / 359 | loss : 0.5284858345985413\n", "Epoch 5 | step 319 / 359 | loss : 0.8978694677352905\n", "Epoch 5 | step 320 / 359 | loss : 0.47603875398635864\n", "Epoch 5 | step 321 / 359 | loss : 0.5330944657325745\n", "Epoch 5 | step 322 / 359 | loss : 0.6395842432975769\n", "Epoch 5 | step 323 / 359 | loss : 0.4547261595726013\n", "Epoch 5 | step 324 / 359 | loss : 0.36656278371810913\n", "Epoch 5 | step 325 / 359 | loss : 0.5807834267616272\n", "Epoch 5 | step 326 / 359 | loss : 0.5919549465179443\n", "Epoch 5 | step 327 / 359 | loss : 0.5825574994087219\n", "Epoch 5 | step 328 / 359 | loss : 0.7745530605316162\n", "Epoch 5 | step 329 / 359 | loss : 0.4457758367061615\n", "Epoch 5 | step 330 / 359 | loss : 0.6896244287490845\n", "Epoch 5 | step 331 / 359 | loss : 0.6200140714645386\n", "Epoch 5 | step 332 / 359 | loss : 0.5697098970413208\n", "Epoch 5 | step 333 / 359 | loss : 0.4179951548576355\n", "Epoch 5 | step 334 / 359 | loss : 0.6015208959579468\n", "Epoch 5 | step 335 / 359 | loss : 0.5406888127326965\n", "Epoch 5 | step 336 / 359 | loss : 0.5856372117996216\n", "Epoch 5 | step 337 / 359 | loss : 0.632480263710022\n", "Epoch 5 | step 338 / 359 | loss : 0.3438686728477478\n", "Epoch 5 | step 339 / 359 | loss : 0.2908724844455719\n", "Epoch 5 | step 340 / 359 | loss : 0.7376210689544678\n", "Epoch 5 | step 341 / 359 | loss : 0.5387629866600037\n", "Epoch 5 | step 342 / 359 | loss : 0.33194419741630554\n", "Epoch 5 | step 343 / 359 | loss : 0.5197245478630066\n", "Epoch 5 | step 344 / 359 | loss : 0.5835251212120056\n", "Epoch 5 | step 345 / 359 | loss : 0.4507204294204712\n", "Epoch 5 | step 346 / 359 | loss : 0.5495316982269287\n", "Epoch 5 | step 347 / 359 | loss : 0.34266141057014465\n", "Epoch 5 | step 348 / 359 | loss : 0.7577629089355469\n", "Epoch 5 | step 349 / 359 | loss : 0.587293803691864\n", "Epoch 5 | step 350 / 359 | loss : 0.6759886145591736\n", "Epoch 5 | step 351 / 359 | loss : 0.23549827933311462\n", "Epoch 5 | step 352 / 359 | loss : 0.4909011125564575\n", "Epoch 5 | step 353 / 359 | loss : 0.7525323629379272\n", "Epoch 5 | step 354 / 359 | loss : 0.4407119154930115\n", "Epoch 5 | step 355 / 359 | loss : 0.3086337447166443\n", "Epoch 5 | step 356 / 359 | loss : 0.6504732966423035\n", "Epoch 5 | step 357 / 359 | loss : 0.8242849707603455\n", "Epoch 5 | step 358 / 359 | loss : 0.4627362787723541\n", "Epoch 5 | step 359 / 359 | loss : 0.6060939431190491\n", "Custom BERT Model saved at custom_bert_model.bin\n", "Model loaded from path :custom_bert_model.bin\n", "Step 1 / 215 | Eval loss : 0.6606608629226685\n", "Step 2 / 215 | Eval loss : 0.940995454788208\n", "Step 3 / 215 | Eval loss : 0.4735192656517029\n", "Step 4 / 215 | Eval loss : 0.18805256485939026\n", "Step 5 / 215 | Eval loss : 0.7046387195587158\n", "Step 6 / 215 | Eval loss : 0.6991875767707825\n", "Step 7 / 215 | Eval loss : 0.5379126667976379\n", "Step 8 / 215 | Eval loss : 0.6306701898574829\n", "Step 9 / 215 | Eval loss : 0.6071992516517639\n", "Step 10 / 215 | Eval loss : 0.46820664405822754\n", "Step 11 / 215 | Eval loss : 0.7413480877876282\n", "Step 12 / 215 | Eval loss : 0.8910939693450928\n", "Step 13 / 215 | Eval loss : 0.6558029651641846\n", "Step 14 / 215 | Eval loss : 0.5070111155509949\n", "Step 15 / 215 | Eval loss : 0.5369452238082886\n", "Step 16 / 215 | Eval loss : 1.6437710523605347\n", "Step 17 / 215 | Eval loss : 0.9378615021705627\n", "Step 18 / 215 | Eval loss : 0.3516538739204407\n", "Step 19 / 215 | Eval loss : 0.4339148700237274\n", "Step 20 / 215 | Eval loss : 0.4886070787906647\n", "Step 21 / 215 | Eval loss : 1.1344634294509888\n", "Step 22 / 215 | Eval loss : 0.758878231048584\n", "Step 23 / 215 | Eval loss : 0.7197548747062683\n", "Step 24 / 215 | Eval loss : 0.29678967595100403\n", "Step 25 / 215 | Eval loss : 0.20891933143138885\n", "Step 26 / 215 | Eval loss : 1.1853114366531372\n", "Step 27 / 215 | Eval loss : 0.6912646293640137\n", "Step 28 / 215 | Eval loss : 0.6738522052764893\n", "Step 29 / 215 | Eval loss : 1.1897950172424316\n", "Step 30 / 215 | Eval loss : 0.2901351749897003\n", "Step 31 / 215 | Eval loss : 0.80014568567276\n", "Step 32 / 215 | Eval loss : 0.8643969297409058\n", "Step 33 / 215 | Eval loss : 0.7699019908905029\n", "Step 34 / 215 | Eval loss : 0.8179371356964111\n", "Step 35 / 215 | Eval loss : 0.8933157920837402\n", "Step 36 / 215 | Eval loss : 0.9829018115997314\n", "Step 37 / 215 | Eval loss : 0.4167900085449219\n", "Step 38 / 215 | Eval loss : 0.4125189483165741\n", "Step 39 / 215 | Eval loss : 1.01780366897583\n", "Step 40 / 215 | Eval loss : 0.49110254645347595\n", "Step 41 / 215 | Eval loss : 0.9671584367752075\n", "Step 42 / 215 | Eval loss : 0.5981038212776184\n", "Step 43 / 215 | Eval loss : 0.2745092511177063\n", "Step 44 / 215 | Eval loss : 0.4741087853908539\n", "Step 45 / 215 | Eval loss : 0.2900589108467102\n", "Step 46 / 215 | Eval loss : 0.7140235304832458\n", "Step 47 / 215 | Eval loss : 0.5328750014305115\n", "Step 48 / 215 | Eval loss : 0.3689824342727661\n", "Step 49 / 215 | Eval loss : 0.7666400671005249\n", "Step 50 / 215 | Eval loss : 0.3764069080352783\n", "Step 51 / 215 | Eval loss : 0.4676625728607178\n", "Step 52 / 215 | Eval loss : 0.3747645318508148\n", "Step 53 / 215 | Eval loss : 0.2915308177471161\n", "Step 54 / 215 | Eval loss : 0.4269636869430542\n", "Step 55 / 215 | Eval loss : 0.45585086941719055\n", "Step 56 / 215 | Eval loss : 0.8258201479911804\n", "Step 57 / 215 | Eval loss : 0.25589126348495483\n", "Step 58 / 215 | Eval loss : 0.6997331380844116\n", "Step 59 / 215 | Eval loss : 0.6933463215827942\n", "Step 60 / 215 | Eval loss : 1.0180388689041138\n", "Step 61 / 215 | Eval loss : 0.6255486011505127\n", "Step 62 / 215 | Eval loss : 0.3158257305622101\n", "Step 63 / 215 | Eval loss : 0.6153553128242493\n", "Step 64 / 215 | Eval loss : 0.44162753224372864\n", "Step 65 / 215 | Eval loss : 0.559870183467865\n", "Step 66 / 215 | Eval loss : 0.780640721321106\n", "Step 67 / 215 | Eval loss : 0.9667119979858398\n", "Step 68 / 215 | Eval loss : 1.1485122442245483\n", "Step 69 / 215 | Eval loss : 0.7712194323539734\n", "Step 70 / 215 | Eval loss : 0.6650707125663757\n", "Step 71 / 215 | Eval loss : 0.8182984590530396\n", "Step 72 / 215 | Eval loss : 0.9108543395996094\n", "Step 73 / 215 | Eval loss : 1.0534818172454834\n", "Step 74 / 215 | Eval loss : 0.4142661392688751\n", "Step 75 / 215 | Eval loss : 0.7974268794059753\n", "Step 76 / 215 | Eval loss : 0.6505253314971924\n", "Step 77 / 215 | Eval loss : 0.4818551540374756\n", "Step 78 / 215 | Eval loss : 0.5829604268074036\n", "Step 79 / 215 | Eval loss : 0.6951273679733276\n", "Step 80 / 215 | Eval loss : 1.265076756477356\n", "Step 81 / 215 | Eval loss : 0.41435933113098145\n", "Step 82 / 215 | Eval loss : 1.2804782390594482\n", "Step 83 / 215 | Eval loss : 0.47707974910736084\n", "Step 84 / 215 | Eval loss : 0.30460023880004883\n", "Step 85 / 215 | Eval loss : 0.7826817631721497\n", "Step 86 / 215 | Eval loss : 0.4140920639038086\n", "Step 87 / 215 | Eval loss : 0.5698394775390625\n", "Step 88 / 215 | Eval loss : 1.1413559913635254\n", "Step 89 / 215 | Eval loss : 0.4971219003200531\n", "Step 90 / 215 | Eval loss : 0.6268702149391174\n", "Step 91 / 215 | Eval loss : 0.5048957467079163\n", "Step 92 / 215 | Eval loss : 0.44694918394088745\n", "Step 93 / 215 | Eval loss : 0.3200993835926056\n", "Step 94 / 215 | Eval loss : 0.7690590023994446\n", "Step 95 / 215 | Eval loss : 0.6997581124305725\n", "Step 96 / 215 | Eval loss : 0.41971489787101746\n", "Step 97 / 215 | Eval loss : 0.8472976684570312\n", "Step 98 / 215 | Eval loss : 0.4737146198749542\n", "Step 99 / 215 | Eval loss : 0.5324667692184448\n", "Step 100 / 215 | Eval loss : 0.3365022838115692\n", "Step 101 / 215 | Eval loss : 0.41937434673309326\n", "Step 102 / 215 | Eval loss : 0.6584456562995911\n", "Step 103 / 215 | Eval loss : 0.40995416045188904\n", "Step 104 / 215 | Eval loss : 0.5231168270111084\n", "Step 105 / 215 | Eval loss : 0.7536401748657227\n", "Step 106 / 215 | Eval loss : 0.5795460343360901\n", "Step 107 / 215 | Eval loss : 0.15765082836151123\n", "Step 108 / 215 | Eval loss : 0.16437526047229767\n", "Step 109 / 215 | Eval loss : 0.31037604808807373\n", "Step 110 / 215 | Eval loss : 0.5933796167373657\n", "Step 111 / 215 | Eval loss : 0.12163121998310089\n", "Step 112 / 215 | Eval loss : 0.25451189279556274\n", "Step 113 / 215 | Eval loss : 0.5189600586891174\n", "Step 114 / 215 | Eval loss : 0.8156670331954956\n", "Step 115 / 215 | Eval loss : 0.8359318375587463\n", "Step 116 / 215 | Eval loss : 0.4049523174762726\n", "Step 117 / 215 | Eval loss : 0.4407676160335541\n", "Step 118 / 215 | Eval loss : 0.6089163422584534\n", "Step 119 / 215 | Eval loss : 0.2682640850543976\n", "Step 120 / 215 | Eval loss : 0.3496963083744049\n", "Step 121 / 215 | Eval loss : 0.5874218344688416\n", "Step 122 / 215 | Eval loss : 0.31929585337638855\n", "Step 123 / 215 | Eval loss : 0.3496461808681488\n", "Step 124 / 215 | Eval loss : 0.24946044385433197\n", "Step 125 / 215 | Eval loss : 0.1575804501771927\n", "Step 126 / 215 | Eval loss : 0.2650514245033264\n", "Step 127 / 215 | Eval loss : 0.44977864623069763\n", "Step 128 / 215 | Eval loss : 0.35343241691589355\n", "Step 129 / 215 | Eval loss : 0.3032771944999695\n", "Step 130 / 215 | Eval loss : 0.2734353840351105\n", "Step 131 / 215 | Eval loss : 0.5242516994476318\n", "Step 132 / 215 | Eval loss : 0.21193639934062958\n", "Step 133 / 215 | Eval loss : 0.24046996235847473\n", "Step 134 / 215 | Eval loss : 0.14584071934223175\n", "Step 135 / 215 | Eval loss : 0.3700297176837921\n", "Step 136 / 215 | Eval loss : 0.3315545320510864\n", "Step 137 / 215 | Eval loss : 0.2787961959838867\n", "Step 138 / 215 | Eval loss : 0.3256147801876068\n", "Step 139 / 215 | Eval loss : 0.27142974734306335\n", "Step 140 / 215 | Eval loss : 0.11580347269773483\n", "Step 141 / 215 | Eval loss : 0.20189496874809265\n", "Step 142 / 215 | Eval loss : 0.4195190668106079\n", "Step 143 / 215 | Eval loss : 0.2015504240989685\n", "Step 144 / 215 | Eval loss : 0.2335728108882904\n", "Step 145 / 215 | Eval loss : 0.3954576253890991\n", "Step 146 / 215 | Eval loss : 0.18411581218242645\n", "Step 147 / 215 | Eval loss : 0.6000935435295105\n", "Step 148 / 215 | Eval loss : 0.33632534742355347\n", "Step 149 / 215 | Eval loss : 0.20703820884227753\n", "Step 150 / 215 | Eval loss : 0.14040318131446838\n", "Step 151 / 215 | Eval loss : 0.6776421070098877\n", "Step 152 / 215 | Eval loss : 0.4872628450393677\n", "Step 153 / 215 | Eval loss : 0.44609424471855164\n", "Step 154 / 215 | Eval loss : 0.6998865008354187\n", "Step 155 / 215 | Eval loss : 0.10464746505022049\n", "Step 156 / 215 | Eval loss : 0.5100151896476746\n", "Step 157 / 215 | Eval loss : 0.6910212635993958\n", "Step 158 / 215 | Eval loss : 0.24725009500980377\n", "Step 159 / 215 | Eval loss : 0.2387521117925644\n", "Step 160 / 215 | Eval loss : 0.5562507510185242\n", "Step 161 / 215 | Eval loss : 0.5274513959884644\n", "Step 162 / 215 | Eval loss : 0.6402338743209839\n", "Step 163 / 215 | Eval loss : 0.8098723292350769\n", "Step 164 / 215 | Eval loss : 0.27254608273506165\n", "Step 165 / 215 | Eval loss : 0.6093867421150208\n", "Step 166 / 215 | Eval loss : 0.35692739486694336\n", "Step 167 / 215 | Eval loss : 0.41693294048309326\n", "Step 168 / 215 | Eval loss : 0.62213534116745\n", "Step 169 / 215 | Eval loss : 0.4798803925514221\n", "Step 170 / 215 | Eval loss : 0.37978628277778625\n", "Step 171 / 215 | Eval loss : 0.3897610306739807\n", "Step 172 / 215 | Eval loss : 0.46640634536743164\n", "Step 173 / 215 | Eval loss : 0.7077266573905945\n", "Step 174 / 215 | Eval loss : 0.5597200393676758\n", "Step 175 / 215 | Eval loss : 0.5924436450004578\n", "Step 176 / 215 | Eval loss : 0.7516982555389404\n", "Step 177 / 215 | Eval loss : 1.1184933185577393\n", "Step 178 / 215 | Eval loss : 0.4607028365135193\n", "Step 179 / 215 | Eval loss : 0.5655089616775513\n", "Step 180 / 215 | Eval loss : 0.4212472140789032\n", "Step 181 / 215 | Eval loss : 0.5896470546722412\n", "Step 182 / 215 | Eval loss : 0.7037050724029541\n", "Step 183 / 215 | Eval loss : 0.22706402838230133\n", "Step 184 / 215 | Eval loss : 0.2094581127166748\n", "Step 185 / 215 | Eval loss : 0.29174113273620605\n", "Step 186 / 215 | Eval loss : 0.6499775648117065\n", "Step 187 / 215 | Eval loss : 0.6057150363922119\n", "Step 188 / 215 | Eval loss : 0.8856449723243713\n", "Step 189 / 215 | Eval loss : 0.49939483404159546\n", "Step 190 / 215 | Eval loss : 0.607839047908783\n", "Step 191 / 215 | Eval loss : 0.5950604677200317\n", "Step 192 / 215 | Eval loss : 0.8190675973892212\n", "Step 193 / 215 | Eval loss : 0.3950651288032532\n", "Step 194 / 215 | Eval loss : 0.525504469871521\n", "Step 195 / 215 | Eval loss : 0.6782089471817017\n", "Step 196 / 215 | Eval loss : 0.84128737449646\n", "Step 197 / 215 | Eval loss : 0.9220267534255981\n", "Step 198 / 215 | Eval loss : 0.6864020824432373\n", "Step 199 / 215 | Eval loss : 0.39921456575393677\n", "Step 200 / 215 | Eval loss : 0.49840855598449707\n", "Step 201 / 215 | Eval loss : 0.8691364526748657\n", "Step 202 / 215 | Eval loss : 0.6975659728050232\n", "Step 203 / 215 | Eval loss : 0.6259441375732422\n", "Step 204 / 215 | Eval loss : 0.674730658531189\n", "Step 205 / 215 | Eval loss : 0.4781358540058136\n", "Step 206 / 215 | Eval loss : 0.7725555300712585\n", "Step 207 / 215 | Eval loss : 0.7168688774108887\n", "Step 208 / 215 | Eval loss : 0.10718905925750732\n", "Step 209 / 215 | Eval loss : 0.753957211971283\n", "Step 210 / 215 | Eval loss : 0.5338475704193115\n", "Step 211 / 215 | Eval loss : 1.0818021297454834\n", "Step 212 / 215 | Eval loss : 0.4929424226284027\n", "Step 213 / 215 | Eval loss : 0.8976386785507202\n", "Step 214 / 215 | Eval loss : 0.8368238806724548\n", "Step 215 / 215 | Eval loss : 2.204969644546509\n", "Accuracy : 0.7807939287799183\n" ] }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiQAAAHHCAYAAACPy0PBAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAbUlJREFUeJzt3XdYk9fbB/BvGAlLlmwHIriwuFtF3KK4Z+uuaN3FiVr1556496rWLVq3ddW9FbeodQ+UqiAuQERmzvuHL6kRMICJT8Tvp1euqznPyXnuDOKdsx6ZEEKAiIiISEIGUgdARERExISEiIiIJMeEhIiIiCTHhISIiIgkx4SEiIiIJMeEhIiIiCTHhISIiIgkx4SEiIiIJMeEhIiIiCTHhERH7t69i7p168LKygoymQzbt2/XavsPHz6ETCbDypUrtdru16xGjRqoUaOG1tqLi4tD165d4eTkBJlMhv79+2ut7a8FP2faJ5PJMGbMGJ20re2/gY8dPXoUMpkMR48e1dk56NuVqxOS+/fvo0ePHihcuDBMTExgaWkJHx8fzJkzB+/evdPpuf39/XHt2jVMnDgRa9asQYUKFXR6vi+pU6dOkMlksLS0zPB1vHv3LmQyGWQyGaZPn57t9p8+fYoxY8YgNDRUC9Hm3KRJk7By5Ur06tULa9aswc8//6zT8yUlJWHOnDkoW7YsLC0tYW1tjZIlS6J79+64deuWTs+9bt06zJ49W6fn0KU9e/bo7B950g59e48mTZqk9R+K9JlELrVr1y5hamoqrK2tRd++fcWSJUvE/PnzRZs2bYSxsbHo1q2bzs4dHx8vAIjhw4fr7BxKpVK8e/dOpKSk6OwcmfH39xdGRkbC0NBQbNiwId3x0aNHCxMTEwFATJs2Ldvtnz9/XgAQK1asyNbjEhMTRWJiYrbPl5mKFSsKHx8frbWnSaNGjYShoaHo0KGDWLBggZg9e7bo2bOnyJ8/f7Zfi+xq2LChcHV1TVcu5ecsOwICAsTX8nUGQIwePVonbWv7b+BjR44cEQDEkSNHsv1YfXuPzM3Nhb+/v9Rh0AeMJMyFdCYsLAxt2rSBq6srDh8+DGdnZ9WxgIAA3Lt3D7t379bZ+Z8/fw4AsLa21tk5ZDIZTExMdNa+JgqFAj4+Pli/fj1atWqldmzdunVo2LAhtmzZ8kViiY+Ph5mZGeRyuVbbjYqKgqenp9baS0lJgVKpzDDO8+fPY9euXZg4cSL+97//qR2bP38+oqOjtRZHdkj9OaPs0fbfANEXJXVGpAs9e/YUAMSpU6eyVD85OVmMGzdOFC5cWMjlcuHq6iqGDRsmEhIS1Oq5urqKhg0bihMnTojvv/9eKBQK4ebmJlatWqWqM3r0aAFA7Zb2y9Pf3z/DX6Fpj/nQ/v37hY+Pj7CyshLm5uaiaNGiYtiwYarjYWFhGfYiHDp0SFSpUkWYmZkJKysr0aRJE3Hjxo0Mz3f37l3h7+8vrKyshKWlpejUqZN4+/atxtfL399fmJubi5UrVwqFQiFev36tOnbu3DkBQGzZsiVdD8nLly/FwIEDxXfffSfMzc1Fnjx5RL169URoaKiqTtovsI9vac+zevXqomTJkuLChQuiatWqwtTUVPTr1091rHr16qq2OnbsKBQKRbrnX7duXWFtbS2ePHmS4fPLLIawsDAhhBDPnj0Tv/zyi3BwcBAKhUKUKlVKrFy5Uq2NtPdn2rRpYtasWaJw4cLCwMBAXL58OcNzrl+/XgAQR48e/cQr/5/Hjx+Lzp07CwcHByGXy4Wnp6dYtmxZhs9jw4YNYsKECSJfvnxCoVCIWrVqibt376rqVa9ePdPPbEafs7T3/9GjR6Jhw4bC3NxcuLi4iPnz5wshhLh69aqoWbOmMDMzEwULFhTBwcHp4n/9+rXo16+fyJ8/v5DL5cLd3V1MnjxZpKamZvga/v7776q/zwoVKohz586pxZPR+/Up27dvFw0aNBDOzs5CLpeLwoULi3HjxqXrCUr7vF2/fl3UqFFDmJqaChcXFzFlyhS1eomJiWLkyJGiXLlywtLSUpiZmYkqVaqIw4cPpzs3PughOXz4sAAgtm7dmq5ecHCwACBOnz4thBAiIiJCdOrUSeTLl0/I5XLh5OQkmjRpovpcpsX74d+AEELMnTtXeHp6qnqMy5cvn+F78rF///1XNG3aVJiZmQl7e3vRv39/sXfv3nQ9JMePHxc//vijKFCggJDL5SJ//vyif//+Ij4+XlVH03s0bdo04e3tLWxtbYWJiYkoV66c2LRpU7qYNH0vCiFEQkKCGDVqlHB3d1fFM3jwYLXv84xiYW+J9HJlD8nOnTtRuHBhVK5cOUv1u3btilWrVuHHH3/EwIEDcfbsWQQFBeHmzZvYtm2bWt179+7hxx9/RJcuXeDv74/ly5ejU6dOKF++PEqWLIkWLVrA2toaAwYMQNu2bdGgQQNYWFhkK/7r16+jUaNGKFWqFMaNGweFQoF79+7h1KlTn3zcwYMHUb9+fRQuXBhjxozBu3fvMG/ePPj4+ODSpUsoVKiQWv1WrVrBzc0NQUFBuHTpEv744w84ODhgypQpWYqzRYsW6NmzJ7Zu3YpffvkFwPvekeLFi6NcuXLp6j948ADbt2/HTz/9BDc3Nzx79gy///47qlevjhs3bsDFxQUlSpTAuHHjMGrUKHTv3h1Vq1YFALX38uXLl6hfvz7atGmDDh06wNHRMcP45syZg8OHD8Pf3x8hISEwNDTE77//jv3792PNmjVwcXHJ8HElSpTAmjVrMGDAAOTPnx8DBw4EANjb2+Pdu3eoUaMG7t27h969e8PNzQ2bNm1Cp06dEB0djX79+qm1tWLFCiQkJKB79+5QKBSwtbXN8Jyurq4AgODgYPj4+MDIKPM/zWfPnqFSpUqQyWTo3bs37O3t8ffff6NLly6IjY1NN/l28uTJMDAwwKBBgxATE4OpU6eiffv2OHv2LABg+PDhiImJwePHjzFr1iwA0PiZTU1NRf369VGtWjVMnToVwcHB6N27N8zNzTF8+HC0b98eLVq0wOLFi9GxY0d4e3vDzc0NwPsererVq+PJkyfo0aMHChYsiNOnT2PYsGGIiIhIN5dl3bp1ePPmDXr06AGZTIapU6eiRYsWePDgAYyNjdGjRw88ffoUBw4cwJo1az4Zd5qVK1fCwsICgYGBsLCwwOHDhzFq1CjExsZi2rRpanVfv36NevXqoUWLFmjVqhU2b96MIUOGwMvLC/Xr1wcAxMbG4o8//kDbtm3RrVs3vHnzBsuWLYOfnx/OnTuHMmXKZBhHjRo1UKBAAQQHB6N58+Zqx4KDg+Hu7g5vb28AQMuWLXH9+nX06dMHhQoVQlRUFA4cOIDw8PB0f9tpli5dir59++LHH39Ev379kJCQgKtXr+Ls2bNo165dpq/Pu3fvULt2bYSHh6Nv375wcXHBmjVrcPjw4XR1N23ahPj4ePTq1Qt58+bFuXPnMG/ePDx+/BibNm0CAI3v0Zw5c9CkSRO0b98eSUlJ+PPPP/HTTz9h165daNiwIYCsfS8qlUo0adIEJ0+eRPfu3VGiRAlcu3YNs2bNwp07d1RzRtasWYOuXbvihx9+QPfu3QEA7u7umb4e9IVInRFpW0xMjAAgmjZtmqX6oaGhAoDo2rWrWvmgQYMEALVfOK6urgKAOH78uKosKipKKBQKMXDgQFXZh7/sPpTVHpJZs2YJAOL58+eZxp3RL9cyZcoIBwcH8fLlS1XZlStXhIGBgejYsWO68/3yyy9qbTZv3lzkzZs303N++DzMzc2FEEL8+OOPonbt2kIIIVJTU4WTk5MYO3Zshq9BQkKC2i/gtOehUCjEuHHjVGWfmkOS9mt+8eLFGR77+Nfhvn37BAAxYcIE8eDBA2FhYSGaNWum8TkK8V+P2Idmz54tAIi1a9eqypKSkoS3t7ewsLAQsbGxqucFQFhaWoqoqCiN51Iqlarn5ujoKNq2bSsWLFggHj16lK5uly5dhLOzs3jx4oVaeZs2bYSVlZXql2laD0mJEiXU5hXMmTNHABDXrl1TlWU2hySzHhIAYtKkSaqy169fC1NTUyGTycSff/6pKr9161a6ORPjx48X5ubm4s6dO2rnGjp0qDA0NBTh4eFq586bN6949eqVqt5ff/0lAIidO3eqyrI7P+HDX+9pevToIczMzNR+Sae9J6tXr1aVJSYmCicnJ9GyZUtVWUpKSrq5G69fvxaOjo7p/s4+fj2GDRsmFAqFiI6OVpVFRUUJIyMjVb3Xr19naU7Wx38DTZs2FSVLlvzkYzKS9jnfuHGjquzt27fCw8MjXQ9JRq9lUFCQkMlkap/fT71HH7eRlJQkvvvuO1GrVi1VWVa+F9esWSMMDAzEiRMn1MoXL16crtecc0j0T65bZRMbGwsAyJMnT5bq79mzBwAQGBioVp72q/jjuSaenp6qX+3A+1/NxYoVw4MHD3Ic88fS5p789ddfUCqVWXpMREQEQkND0alTJ7Vf4aVKlUKdOnVUz/NDPXv2VLtftWpVvHz5UvUaZkW7du1w9OhRREZG4vDhw4iMjMz0l5dCoYCBwfuPXGpqKl6+fAkLCwsUK1YMly5dyvI5FQoFOnfunKW6devWRY8ePTBu3Di0aNECJiYm+P3337N8ro/t2bMHTk5OaNu2rarM2NgYffv2RVxcHI4dO6ZWv2XLlrC3t9fYrkwmw759+zBhwgTY2Nhg/fr1CAgIgKurK1q3bq2aQyKEwJYtW9C4cWMIIfDixQvVzc/PDzExMeley86dO6vNLUj7/H7uZ7Zr166q/7e2tkaxYsVgbm6uNqeoWLFisLa2VjvXpk2bULVqVdjY2KjF7+vri9TUVBw/flztPK1bt4aNjY1W4zc1NVX9/5s3b/DixQtUrVoV8fHx6VY0WVhYoEOHDqr7crkcP/zwg9r5DQ0NVa+xUqnEq1evkJKSggoVKmj8bHfs2BGJiYnYvHmzqmzDhg1ISUlRndfU1BRyuRxHjx7F69evs/w8ra2t8fjxY5w/fz7LjwHef86dnZ3x448/qsrMzMxUvQkf+vC1fPv2LV68eIHKlStDCIHLly9n6XwftvH69WvExMSgatWqaq9dVr4XN23ahBIlSqB48eJqn61atWoBAI4cOZKleEgauS4hsbS0BPD+SyYrHj16BAMDA3h4eKiVOzk5wdraGo8ePVIrL1iwYLo2bGxssvUloUnr1q3h4+ODrl27wtHREW3atMHGjRs/mZykxVmsWLF0x0qUKIEXL17g7du3auUfP5e0L/3sPJcGDRogT5482LBhA4KDg/H999+ney3TKJVKzJo1C0WKFIFCoYCdnR3s7e1x9epVxMTEZPmc+fLly9bkvenTp8PW1hahoaGYO3cuHBwcsvzYjz169AhFihRRJVZpSpQooTr+obRhiqxQKBQYPnw4bt68iadPn2L9+vWoVKkSNm7ciN69ewN4P2E6OjoaS5Ysgb29vdotLUmLiopSa1cb7/PHTExM0iVaVlZWyJ8/P2QyWbryD8919+5d7N27N138vr6+Xyz+69evo3nz5rCysoKlpSXs7e1V//h//FnM6Dll9De/atUqlCpVCiYmJsibNy/s7e2xe/dujZ/t4sWL4/vvv0dwcLCqLDg4GJUqVVL9LSkUCkyZMgV///03HB0dVUNlkZGRn2x7yJAhsLCwwA8//IAiRYogICBA49Av8P5z7OHhke55Z/T9Eh4ervohZGFhAXt7e1SvXh1A+tcyM7t27UKlSpVgYmICW1tb2NvbY9GiRWqPz8r34t27d3H9+vV0n62iRYsCSP/ZIv2S6+aQWFpawsXFBf/880+2HvfxH15mDA0NMywXQuT4HKmpqWr3TU1Ncfz4cRw5cgS7d+/G3r17sWHDBtSqVQv79+/PNIbs+pznkkahUKBFixZYtWoVHjx48Ml9BiZNmoSRI0fil19+wfjx42FrawsDAwP0798/yz1BgPqvqay4fPmy6ovo2rVrar0bupbdWNM4OzujTZs2aNmyJUqWLImNGzdi5cqVqtepQ4cO8Pf3z/CxpUqVUruvjff5Y5m1mZVzKZVK1KlTB7/99luGddP+8chOm9kRHR2N6tWrw9LSEuPGjYO7uztMTExw6dIlDBkyJN1nMSvnX7t2LTp16oRmzZph8ODBcHBwgKGhIYKCgnD//n2NMXXs2BH9+vXD48ePkZiYiDNnzmD+/Plqdfr374/GjRtj+/bt2LdvH0aOHImgoCAcPnwYZcuWzbDdEiVK4Pbt29i1axf27t2LLVu2YOHChRg1ahTGjh2rMS5NUlNTUadOHbx69QpDhgxB8eLFYW5ujidPnqBTp05Z+rs+ceIEmjRpgmrVqmHhwoVwdnaGsbExVqxYgXXr1qnqZeV7UalUwsvLCzNnzszwXAUKFPjs50y6k+sSEgBo1KgRlixZgpCQENWEsMy4urpCqVTi7t27ql+5wPuJg9HR0arJhtpgY2OT4fLNj39VA4CBgQFq166N2rVrY+bMmZg0aRKGDx+OI0eOqH5Jfvw8AOD27dvpjt26dQt2dnYwNzf//CeRgXbt2mH58uUwMDBAmzZtMq23efNm1KxZE8uWLVMrj46Ohp2dnep+VpPDrHj79i06d+4MT09PVK5cGVOnTkXz5s3x/fff56g9V1dXXL16FUqlUq2XJK2bX5ufF+D9cFCpUqVw9+5dvHjxAvb29siTJw9SU1Mz/BzklDZfc03c3d0RFxcnWfxHjx7Fy5cvsXXrVlSrVk1VHhYWluPzb968GYULF8bWrVvVYhk9enSWHt+mTRsEBgZi/fr1ePfuHYyNjdG6det09dzd3TFw4EAMHDgQd+/eRZkyZTBjxgysXbs207bNzc3RunVrtG7dGklJSWjRogUmTpyIYcOGZbqk29XVFf/88w+EEGrP5+Pvl2vXruHOnTtYtWoVOnbsqCo/cOBAujYze4+2bNkCExMT7Nu3DwqFQlW+YsWKdHU1fS+6u7vjypUrqF27tsbPxJf8zFPW5LohGwD47bffYG5ujq5du+LZs2fpjt+/fx9z5swB8H7IAUC6mf1pGXbaDG9tcHd3R0xMDK5evaoqi4iISLeS59WrV+kemzZLPzExMcO2nZ2dUaZMGaxatUot6fnnn3+wf/9+1fPUhZo1a2L8+PGYP38+nJycMq1naGiY7lftpk2b8OTJE7WytMRJG3tvDBkyBOHh4Vi1ahVmzpyJQoUKwd/fP9PXUZMGDRogMjISGzZsUJWlpKRg3rx5sLCwUHVVZ9fdu3cRHh6erjw6OhohISGwsbGBvb09DA0N0bJlS2zZsiXDXsC0PXCyy9zcPFvDZp+jVatWCAkJwb59+9Idi46ORkpKSrbbzM5nJq3H48PPYlJSEhYuXJjt836qzbNnzyIkJCRLj7ezs0P9+vWxdu1aBAcHo169empJenx8PBISEtQe4+7ujjx58nzys/zy5Uu1+3K5HJ6enhBCIDk5OdPHNWjQAE+fPlWb1xIfH48lS5ao1cvoeQshVN+vH8rsPTI0NIRMJlPrKX748GG6XVSz8r3YqlUrPHnyBEuXLk1X9927d2rD1ubm5pLt70MZy5U9JO7u7li3bh1at26NEiVKoGPHjvjuu++QlJSE06dPq5ZpAkDp0qXh7++PJUuWqLpyz507h1WrVqFZs2aoWbOm1uJq06YNhgwZgubNm6Nv376Ij4/HokWLULRoUbXJW+PGjcPx48fRsGFDuLq6IioqCgsXLkT+/PlRpUqVTNufNm0a6tevD29vb3Tp0kW17NfKykqnWzYbGBhgxIgRGus1atQI48aNQ+fOnVG5cmVcu3YNwcHBKFy4sFo9d3d3WFtbY/HixciTJw/Mzc1RsWLFbM3HAIDDhw9j4cKFGD16tGoZ8ooVK1CjRg2MHDkSU6dOzVZ7ANC9e3f8/vvv6NSpEy5evIhChQph8+bNOHXqFGbPnp3lydQfu3LlCtq1a4f69eujatWqsLW1xZMnT7Bq1So8ffoUs2fPVn35T548GUeOHEHFihXRrVs3eHp64tWrV7h06RIOHjyY4Re3JuXLl8eGDRsQGBiI77//HhYWFmjcuHGOnosmgwcPxo4dO9CoUSPVkvm3b9/i2rVr2Lx5Mx4+fKj2j3FW4weAvn37ws/PD4aGhpn21lWuXBk2Njbw9/dH3759IZPJsGbNms8awmrUqBG2bt2K5s2bo2HDhggLC8PixYvh6emJuLi4LLXRsWNH1STS8ePHqx27c+cOateujVatWsHT0xNGRkbYtm0bnj179sleybp168LJyQk+Pj5wdHTEzZs3MX/+fDRs2PCTn9Vu3bph/vz56NixIy5evAhnZ2esWbMGZmZmavWKFy8Od3d3DBo0CE+ePIGlpSW2bNmS4fyezN6jhg0bYubMmahXrx7atWuHqKgoLFiwAB4eHmo/3rLyvfjzzz9j48aN6NmzJ44cOQIfHx+kpqbi1q1b2LhxI/bt26e6jEf58uVx8OBBzJw5Ey4uLnBzc0PFihU/9RaRrn3xdT1f0J07d0S3bt1EoUKFhFwuF3ny5BE+Pj5i3rx5akv7kpOTxdixY4Wbm5swNjYWBQoU+OTGaB/7eKldZst+hXi/sc93330n5HK5KFasmFi7dm26Zb+HDh0STZs2FS4uLkIulwsXFxfRtm1btWWSmW2MdvDgQeHj4yNMTU2FpaWlaNy4caYbo328fG7FihVqG4Bl5sNlv5nJbNnvwIEDhbOzszA1NRU+Pj4iJCQkw+W6f/31l/D09BRGRkYZboyWkQ/biY2NFa6urqJcuXIiOTlZrd6AAQOEgYGBCAkJ+eRzyOz9fvbsmejcubOws7MTcrlceHl5pXsfPvUZyMizZ8/E5MmTRfXq1YWzs7MwMjISNjY2olatWmLz5s0Z1g8ICBAFChQQxsbGwsnJSdSuXVssWbJEVSdt2e/HG0xl9NmJi4sT7dq1E9bW1lneGO1jmb03Gb2Ob968EcOGDRMeHh5CLpcLOzs7UblyZTF9+nSRlJSkdu6MXkN8tHQ2JSVF9OnTR9jb2wuZTKZxCfCpU6dEpUqVVBud/fbbb6ol4h8uac3sOX28hF+pVIpJkyYJV1dXoVAoRNmyZcWuXbsyXOr/cexpEhMThY2NjbCyshLv3r1TO/bixQsREBAgihcvLszNzYWVlZWoWLGi2rLctHg//Fv6/fffRbVq1UTevHmFQqEQ7u7uYvDgwSImJuaTr48QQjx69Eg0adJEmJmZCTs7O9GvX78MN0a7ceOG8PX1FRYWFsLOzk5069ZNXLlyJd3n5lPv0bJly0SRIkWEQqEQxYsXFytWrMjR96IQ75cMT5kyRZQsWVIoFAphY2MjypcvL8aOHav2vG/duiWqVasmTE1NuTGanpAJ8Rk/C4iISCtSUlLg4uKCxo0bp5tnRfQtyJVzSIiIvjbbt2/H8+fP1SaHEn1L2ENCRCShs2fP4urVqxg/fjzs7OyytUkgUW7CHhIiIgktWrQIvXr1goODA1avXi11OESSYQ8JERERSY49JERERCQ5JiREREQkOSYkREREJLlcuVOrkTyf1CGQnrE3s5I6BNIjz+O/zDb59HVISXqiudJnSn7xQCvtGNsV1lzpK8UeEiIiIpJcruwhISIi0ivKVM11vnFMSIiIiHRNKKWOQO8xISEiItI1JRMSTTiHhIiIiCTHHhIiIiIdExyy0YgJCRERka5xyEYjDtkQERGR5NhDQkREpGscstGICQkREZGucR8SjThkQ0RERJJjDwkREZGucchGIyYkREREusZVNhpxyIaIiIgkxx4SIiIiHePGaJoxISEiItI1DtloxISEiIhI19hDohHnkBAREZHk2ENCRESka9wYTSMmJERERLrGIRuNOGRDREREkmMPCRERka5xlY1GTEiIiIh0jUM2GnHIhoiIiCTHHhIiIiJd45CNRkxIiIiIdEwILvvVhEM2REREJDn2kBAREekaJ7VqxISEiIhI1ziHRCMmJERERLrGHhKNOIeEiIiIJMceEiIiIl3jxfU0YkJCRESkaxyy0UhvhmxOnDiBDh06wNvbG0+ePAEArFmzBidPnpQ4MiIiItI1vUhItmzZAj8/P5iamuLy5ctITEwEAMTExGDSpEkSR0dERPSZlErt3HIxvUhIJkyYgMWLF2Pp0qUwNjZWlfv4+ODSpUsSRkZERKQFQqmdWy6mFwnJ7du3Ua1atXTlVlZWiI6O/vIBERER0RelFwmJk5MT7t27l6785MmTKFy4sAQRERERaRGHbDTSi4SkW7du6NevH86ePQuZTIanT58iODgYgwYNQq9evaQOj4iI6PMwIdFIL5b9Dh06FEqlErVr10Z8fDyqVasGhUKBQYMGoU+fPlKHR0RERDomE0IIqYNIk5SUhHv37iEuLg6enp6wsLDIUTtG8nxajoy+dvZmVlKHQHrkeXyM1CGQHklJeqLzc7w7vlIr7ZhW66SVdvSRXvSQrF27Fi1atICZmRk8PT2lDoeIiEi7cvlwizboxRySAQMGwMHBAe3atcOePXuQmsotdomIKBfhsl+N9CIhiYiIwJ9//gmZTIZWrVrB2dkZAQEBOH36tNShERER0RegFwmJkZERGjVqhODgYERFRWHWrFl4+PAhatasCXd3d6nDIyIi+jxcZaORXswh+ZCZmRn8/Pzw+vVrPHr0CDdv3pQ6JCIios+Ty4dbtEEvekgAID4+HsHBwWjQoAHy5cuH2bNno3nz5rh+/brUoREREZGO6UUPSZs2bbBr1y6YmZmhVatWGDlyJLy9vaUOi4iISDty+XCLNuhFQmJoaIiNGzfCz88PhoaGUodDRESkXRyy0UgvEpLg4GCpQyAiIiIJSZaQzJ07F927d4eJiQnmzp37ybp9+/b9QlERERHpAIdsNJJs63g3NzdcuHABefPmhZubW6b1ZDIZHjx4kK22uXU8fYxbx9OHuHU8feiLbB2/e7ZW2jFt2F8r7egjyXpIwsLCMvx/IiIi+vboxbLfcePGIT4+Pl35u3fvMG7cOAkiIiIi0iJuHa+RXlzt19DQEBEREXBwcFArf/nyJRwcHLJ9bRsO2dDHOGRDH+KQDX3oiwzZ7JiulXZMmwzSSjv6SC96SIQQkMlk6cqvXLkCW1tbCSL6+vXq6Y97d84gLvY+Tp/cie8rlJE6JNKBSpXLY9WfC3D55lFERN9AvYa109UpUrQwVq6fj9uPzuL+kwv4+/AG5MvvrDo+ddYYhFzeiwcRl/DPvZNYsW4+PIpkPq+Lvm6jRgYiJemJ2u2fa8ekDiv3Yw+JRpIu+7WxsYFMJoNMJkPRokXVkpLU1FTExcWhZ8+eEkb4dfrppyaYPm00fg0YinPnL6Nvn67YszsYnt9Vw/PnL6UOj7TIzMwMN67dxp9rt2L52nnpjrsWKoDte9di/ZotmB60AG9i41CshAcSEhJVda6GXsfWTTvx+HEEbGysMHBoAP7c+gd+KF0HSq4MyJX+uX4LfvXaqO6npKRIGA3Re5IO2axatQpCCPzyyy+YPXs2rKz+61aXy+UoVKhQjnZs/daHbE6f3InzF66gX/8RAN6vVHr44DwWLFyBqdMWSBydNL6FIZuI6Bvo3L4P9u4+pCpbtGw6UlJS0KfH0Cy3U6JkURw+tR2Vyvjh0cN/dRGq5L7lIZtRIwPRpEk9VPi+rtSh6I0vMmSzbbJW2jFtnvW/5a+NpD0k/v7+AN4vAa5cuTKMjY2lDCdXMDY2RrlypTB56nxVmRAChw6fRKVK5SWMjL40mUwG37rVsXDuMqzfsgTflSqB8EdPMG/WUrWk5UOmZqZo0745Hj38F0+fRH7hiOlLKeLhhvCHF5GQkIgzZy9i+Igg/PvvU6nDyt1y+XCLNujFHJLq1aurkpGEhATExsaq3Sjr7OxsYWRkhKhnL9TKo6Kew8nRXqKoSAp29nlhkcccvft3xZFDJ9GmRTf8vesglq2ZA2+fCmp1/bu0wb3HF/Dg6UXU8q2K1s26Ijk5WaLISZfOnbuMX7oOQMPGHdC7zzC4FSqIo4e3wcLCXOrQ6BunF1vHx8fH47fffsPGjRvx8mX6OQ6fWmWTmJiIxMREtbLMJskSfUsMDN7/DezdcxhLFq4GAFy/dgsVKpbBz51bI+TUBVXdrZt24fiREDg62aFnn85YsnImmvi1R2JikiSxk+7s3XdE9f/Xrt3E2XOX8eDeWfz0Y2OsWPmnhJHlcpyPpZFe9JAMHjwYhw8fxqJFi6BQKPDHH39g7NixcHFxwerVqz/52KCgIFhZWandhPLNF4pc/7x48QopKSlwcLRTK3dwsEfks+cSRUVSePUyGsnJybh7+75a+d3bD9RW2QDAm9g4hD14hDOnL6JbxwHwKOKG+o18v2S4JJGYmFjcufsAHh6FpA4ld1MqtXPLxfQiIdm5cycWLlyIli1bwsjICFWrVsWIESMwadIkjRfeGzZsGGJiYtRuMoM8Xyhy/ZOcnIxLl66iVs0qqjKZTIZaNavgzJmLEkZGX1pycjJCL/0D94+W8Lp7FMLjT8wXkMnef2bkCrmuQyQ9YG5uBvfCroiIiJI6FPrG6cWQzatXr1C4cGEAgKWlJV69egUAqFKlCnr16vXJxyoUCigUCrWyb324ZtacpVixbBYuXrqK8+cvo2+fbjA3N8XKVRukDo20zMzcDG6FC6ruF3TNh5JexRH9OgZPHkdg0bzlWLx8Js6cuoBTJ86hpm8V1KlXAy0bdfr/+vnRtEV9HDt8Ci9fvoaziyN6D+iKdwmJOLT/uETPinRp6uSR2LX7AB6FP4aLsxNGjxqI1FQl/tywXerQcjfp9yDVe3qRkBQuXBhhYWEoWLAgihcvjo0bN+KHH37Azp07YW1tLXV4X51Nm3bA3s4WY0YNgpOTPa5cuY6GjTogKuqF5gfTV6V02ZLYumuV6v7YSe+XBG5Ytw39fx2Ov3cdwpDAsegzoBvGT/kf7t97iK4d++PcmUsA3s/BquhdHt16/Qwrays8j3qBs6cvoknddnj54pUkz4l0K19+Z6xdswB589rg+fNXOHX6HHyqNsYLvt+6lcuHW7RBL7aOnzVrFgwNDdG3b18cPHgQjRs3hhACycnJmDlzJvr165et9r71fUgovW9hHxLKum95HxJK74vsQ7J+tFbaMW07Vivt6CO9mEMyYMAA9O3bFwDg6+uLW7duYd26dbh8+XK2kxEiIiK9I8Gk1qCgIHz//ffIkycPHBwc0KxZM9y+fVutTkJCAgICApA3b15YWFigZcuWePbsmVqd8PBwNGzYEGZmZnBwcMDgwYPT7e579OhRlCtXDgqFAh4eHli5cmW2XyK9SEg+5urqihYtWqBUqVJSh0JERPT5JLiWzbFjxxAQEIAzZ87gwIEDSE5ORt26dfH27VtVnQEDBmDnzp3YtGkTjh07hqdPn6JFixaq46mpqWjYsCGSkpJw+vRprFq1CitXrsSoUaNUdcLCwtCwYUPUrFkToaGh6N+/P7p27Yp9+/ZlK169GLKZO3duhuUymQwmJibw8PBAtWrVYGhomKX2OGRDH+OQDX2IQzb0oS8yZLN6mFbaMe0YlOPHPn/+HA4ODjh27BiqVauGmJgY2NvbY926dfjxxx8BALdu3UKJEiUQEhKCSpUq4e+//0ajRo3w9OlTODo6AgAWL16MIUOG4Pnz55DL5RgyZAh2796Nf/75R3WuNm3aIDo6Gnv37s1yfHoxqXXWrFl4/vw54uPjYWNjAwB4/fo1zMzMYGFhgaioKBQuXBhHjhxBgQIFJI6WiIhIGhltBprRatOMxMS8T8RtbW0BABcvXkRycjJ8ff/bc6h48eIoWLCgKiEJCQmBl5eXKhkBAD8/P/Tq1QvXr19H2bJlERISotZGWp3+/ftn67npxZDNpEmT8P333+Pu3bt4+fIlXr58iTt37qBixYqYM2cOwsPD4eTkhAEDBkgdKhERUfYJoZVbRpuBBgVp7jVRKpXo378/fHx88N133wEAIiMjIZfL061mdXR0RGRkpKrOh8lI2vG0Y5+qExsbi3fv3mX5JdKLHpIRI0Zgy5YtcHd3V5V5eHhg+vTpaNmyJR48eICpU6eiZcuWEkZJRESUQ1pa9jts2DAEBgaqlWWldyQgIAD//PMPTp48qZU4dEEvEpKIiIh0M3YBICUlRZWBubi44M2bb3dLeCIioqwOz3yod+/e2LVrF44fP478+fOryp2cnJCUlITo6Gi1XpJnz57ByclJVefcuXNq7aWtwvmwzscrc549ewZLS0uYmppmOU69GLKpWbMmevTogcuXL6vKLl++jF69eqFWrVoAgGvXrsHNzS2zJoiIiPSXBMt+hRDo3bs3tm3bhsOHD6f7N7R8+fIwNjbGoUOHVGW3b99GeHg4vL29AQDe3t64du0aoqL+u7TAgQMHYGlpCU9PT1WdD9tIq5PWRlbpRUKybNky2Nraonz58qrsr0KFCrC1tcWyZcsAABYWFpgxY4bEkRIREeWABMt+AwICsHbtWqxbtw558uRBZGQkIiMjVfM6rKys0KVLFwQGBuLIkSO4ePEiOnfuDG9vb1SqVAkAULduXXh6euLnn3/GlStXsG/fPowYMQIBAQGqnpqePXviwYMH+O2333Dr1i0sXLgQGzduzPa8T71Y9pvm1q1buHPnDgCgWLFiKFasWI7a4bJf+hiX/dKHuOyXPvRFlv3+Eai5UhaYdp2Z5bqZXddtxYoV6NSpE4D3G6MNHDgQ69evR2JiIvz8/LBw4ULVcAwAPHr0CL169cLRo0dhbm4Of39/TJ48GUZG/836OHr0KAYMGIAbN24gf/78GDlypOocWY5XnxKSpKQkhIWFwd3dXe2JZhcTEvoYExL6EBMS+tCXSEjil2hnlahZ91laaUcf6cWQTXx8PLp06QIzMzOULFkS4eHhAIA+ffpg8uTJEkdHRET0mSSYQ/K10YuEZNiwYbhy5QqOHj0KExMTVbmvry82bNggYWRERET0JejFst/t27djw4YNqFSpktqYV8mSJXH//n0JIyMiItKCbE5I/RbpRUKStr/+x96+fZvppBwiIqKvhlJvpmvqLb0YsqlQoQJ2796tup+WhPzxxx/ZXsdMRESkdziHRCO96CGZNGkS6tevjxs3biAlJQVz5szBjRs3cPr0aRw7dkzq8IiIiEjH9KKHpEqVKggNDUVKSgq8vLywf/9+ODg4ICQkBOXLl5c6PCIios/DHhKN9KKHBADc3d2xdOlSqcMgIiLSPv3Z8ktvSZqQGBgYaJy0KpPJMrzwHhEREeUekiYk27Zty/RYSEgI5s6dC2Uu76IiIqJvAP8t00jShKRp06bpym7fvo2hQ4di586daN++PcaNGydBZERERFrEZb8a6cWkVgB4+vQpunXrBi8vL6SkpCA0NBSrVq2Cq6ur1KERERGRjkmekMTExGDIkCHw8PDA9evXcejQIezcuRPfffed1KERERFph1Bq55aLSTpkM3XqVEyZMgVOTk5Yv359hkM4REREXz0O2WgkE0K6tUgGBgYwNTWFr68vDA0NM623devWbLVrJM/3uaFRLmNvZiV1CKRHnsfHSB0C6ZGUpCc6P0f8lM5aacdsyAqttKOPJO0h6dixI69VQ0REuZ7gKhuNJE1IVq5cKeXpiYiIvgwO2WikNzu1EhER5Vq5fEKqNki+yoaIiIiIPSRERES6xiEbjZiQEBER6RontWrEIRsiIiKSHHtIiIiIdI1DNhoxISEiItI1rrLRiEM2REREJDn2kBAREekah2w0YkJCRESkY9w6XjMO2RAREZHk2ENCRESkaxyy0YgJCRERka4xIdGICQkREZGucdmvRpxDQkRERJJjDwkREZGucchGIyYkREREOiaYkGjEIRsiIiKSHHtIiIiIdI09JBoxISEiItI17tSqEYdsiIiISHLsISEiItI1DtloxISEiIhI15iQaMQhGyIiIpIce0iIiIh0TAj2kGjChISIiEjXOGSjERMSIiIiXWNCohHnkBAREZHk2ENC34Twe7ukDoH0SJ78NaQOgb4xvJaNZkxIiIiIdI0JiUYcsiEiIiLJsYeEiIhI13gpG42YkBAREekY55BoxiEbIiIikhx7SIiIiHSNPSQaMSEhIiLSNc4h0YhDNkRERCQ59pAQERHpGCe1asaEhIiISNc4ZKMRExIiIiIdYw+JZpxDQkRERJJjDwkREZGucchGIyYkREREOiaYkGjEIRsiIiKSHHtIiIiIdI09JBoxISEiItIxDtloxiEbIiKiXOr48eNo3LgxXFxcIJPJsH37drXjnTp1gkwmU7vVq1dPrc6rV6/Qvn17WFpawtraGl26dEFcXJxanatXr6Jq1aowMTFBgQIFMHXq1GzHyoSEiIhI15RaumXT27dvUbp0aSxYsCDTOvXq1UNERITqtn79erXj7du3x/Xr13HgwAHs2rULx48fR/fu3VXHY2NjUbduXbi6uuLixYuYNm0axowZgyVLlmQrVg7ZEBER6ZhUQzb169dH/fr1P1lHoVDAyckpw2M3b97E3r17cf78eVSoUAEAMG/ePDRo0ADTp0+Hi4sLgoODkZSUhOXLl0Mul6NkyZIIDQ3FzJkz1RIXTdhDQkREpGNCqZ2bLhw9ehQODg4oVqwYevXqhZcvX6qOhYSEwNraWpWMAICvry8MDAxw9uxZVZ1q1apBLper6vj5+eH27dt4/fp1luNgDwkREdFXIjExEYmJiWplCoUCCoUiR+3Vq1cPLVq0gJubG+7fv4///e9/qF+/PkJCQmBoaIjIyEg4ODioPcbIyAi2traIjIwEAERGRsLNzU2tjqOjo+qYjY1NlmJhDwkREZGOaauHJCgoCFZWVmq3oKCgHMfVpk0bNGnSBF5eXmjWrBl27dqF8+fP4+jRo9p78lnEHhIiIiJdEzKtNDNs2DAEBgaqleW0dyQjhQsXhp2dHe7du4fatWvDyckJUVFRanVSUlLw6tUr1bwTJycnPHv2TK1O2v3M5qZkhD0kREREXwmFQgFLS0u1mzYTksePH+Ply5dwdnYGAHh7eyM6OhoXL15U1Tl8+DCUSiUqVqyoqnP8+HEkJyer6hw4cADFihXL8nANwISEiIhI56Sa1BoXF4fQ0FCEhoYCAMLCwhAaGorw8HDExcVh8ODBOHPmDB4+fIhDhw6hadOm8PDwgJ+fHwCgRIkSqFevHrp164Zz587h1KlT6N27N9q0aQMXFxcAQLt27SCXy9GlSxdcv34dGzZswJw5c9L15GjCIRsiIiIdE0rtDNlk14ULF1CzZk3V/bQkwd/fH4sWLcLVq1exatUqREdHw8XFBXXr1sX48ePVel2Cg4PRu3dv1K5dGwYGBmjZsiXmzp2rOm5lZYX9+/cjICAA5cuXh52dHUaNGpWtJb8AIBNCCE2Vrl69muUGS5Uqla0AdMFInk/qEEjPvHt6QuoQSI/kyV9D6hBIjyQkhOv8HBFVamqulAXOJ49opR19lKUekjJlykAmkyGz3CXtmEwmQ2pqqlYDJCIi+trxWjaaZSkhCQsL03UcREREuZbQ0iqb3CxLCYmrq6uu4yAiIqJvWI5W2axZswY+Pj5wcXHBo0ePAACzZ8/GX3/9pdXgiIiIcgN93jpeX2Q7IVm0aBECAwPRoEEDREdHq+aMWFtbY/bs2dqOj4iI6KsnlDKt3HKzbCck8+bNw9KlSzF8+HAYGhqqyitUqIBr165pNTgiIqLcQAjt3HKzbCckYWFhKFu2bLpyhUKBt2/faiUoIiIi+rZkOyFxc3NT7fj2ob1796JEiRLaiImIiChX4ZCNZtneqTUwMBABAQFISEiAEALnzp3D+vXrERQUhD/++EMXMRIREX3VcnsyoQ3ZTki6du0KU1NTjBgxAvHx8WjXrh1cXFwwZ84ctGnTRhcxEhERUS6Xo2vZtG/fHu3bt0d8fDzi4uLg4OCg7biIiIhyjdw+IVUbcnxxvaioKNy+fRvA+63j7e3ts/X42NjYLNe1tLTMVttERET6hEM2mmU7IXnz5g1+/fVXrF+/Hkrl+11aDA0N0bp1ayxYsABWVlZZasfa2hoy2affIF4fh4iI6NuQozkkly9fxu7du+Ht7Q0ACAkJQb9+/dCjRw/8+eefWWrnyJHce8VCIiKiD/FaNprJRGaX8M2Eubk59u3bhypVqqiVnzhxAvXq1dOLvUiM5PmkDoH0zLunJ6QOgfRInvw1pA6B9EhCQrjOz3HP008r7Xjc2KeVdvRRtntI8ubNm+GwjJWVFWxsbD4rmPj4eISHhyMpKUmtvFSpUp/VLhEREem3bCckI0aMQGBgINasWQMnJycAQGRkJAYPHoyRI0fmKIjnz5+jc+fO+PvvvzM8zjkkRET0NVNyyEajLCUkZcuWVZuAevfuXRQsWBAFCxYEAISHh0OhUOD58+fo0aNHtoPo378/oqOjcfbsWdSoUQPbtm3Ds2fPMGHCBMyYMSPb7REREekTziHRLEsJSbNmzXQaxOHDh/HXX3+hQoUKMDAwgKurK+rUqQNLS0sEBQWhYcOGOj0/ERGRLnHZr2ZZSkhGjx6t0yDevn2r2lzNxsYGz58/R9GiReHl5YVLly7p9NxEREQkvWxfXE8XihUrptpkrXTp0vj999/x5MkTLF68GM7OzhJHR0RE9HmE0M4tN8v2pNbU1FTMmjULGzduzHBFzKtXr7IdRL9+/RAREQHgfW9MvXr1EBwcDLlcjpUrV2a7PSIiIn3CIRvNsp2QjB07Fn/88QcGDhyIESNGYPjw4Xj48CG2b9+OUaNG5SiIDh06qP6/fPnyePToEW7duoWCBQvCzs4uR20SERHR1yPbQzbBwcFYunQpBg4cCCMjI7Rt2xZ//PEHRo0ahTNnzmQ7gOTkZLi7u+PmzZuqMjMzM5QrV47JCBER5QpKIdPKLTfLdkISGRkJLy8vAICFhQViYmIAAI0aNcLu3buzHYCxsTESEhKy/TgiIqKvhRAyrdxys2wnJPnz51fN93B3d8f+/fsBAOfPn4dCochREAEBAZgyZQpSUlJy9HgiIiL6umV7Dknz5s1x6NAhVKxYEX369EGHDh2wbNkyhIeHY8CAATkK4vz58zh06BD2798PLy8vmJubqx3funVrjtolIiLSB7l9hYw2ZDshmTx5sur/W7duDVdXV5w+fRpFihRB48aNcxSEtbU1WrZsmaPHUsZ69fTHwMBecHKyx9WrN9Cv/0icvxAqdVj0GZau3oCDx04h7NFjmCjkKOPliQG9foGba/50dYUQ6DVoFE6euYA5QSNRu1plAMD23QcwYtLMDNs/tms98tpY49KVfzBz0QqEPfoXCQmJcHFywE9NG6Bjm+Y6fX6ke4MG/YoJE4Zi3rxlGDx4LADA0dEeQUHDUatWFeTJY4E7d+5jypT52L4940t5UM7k9vkf2pDthORjlSpVQqVKlRAVFYVJkybhf//7X7bbWLFixeeGQR/46acmmD5tNH4NGIpz5y+jb5+u2LM7GJ7fVcPz5y+lDo9y6ELoNbRt0RjflSiKlNRUzPl9JboPGI6/gn+HmamJWt01G7Yjo6+/er7VUKVSebWy4RNnIjEpCXltrAEApqYmaNeyMYq6u8HU1ASXrl7HuKlzYWqqwE9NG+jo2ZGulS9fCl27tsPVqzfUypctmwUrK0v8+GMXvHz5Gq1bN0Vw8EJUrtwIV65clyha+hZpbWO0iIiIHF9cr1atWoiOjk5XHhsbi1q1an1mZN+eAf264Y9l67Bq9UbcvHkXvwYMRXz8O3Tu1Ebq0Ogz/D5zApo1rAOPwq4oXqQwJg4PRMSzKNy4fVet3q0797Hqzy0Y/7/0Q6gmCgXs8tqqbgYGBjh78QpaNPrv0uglinqgQZ0a8CjsinzOjmjsVwuVfyiPi/zH6atlbm6GlSvn4tdfhyI6OkbtWKVK5bFo0UpcuHAFYWHhmDx5HqKjY1GunJdE0eZOnNSqmV7s1Hr06NF0G6wBQEJCAk6cOCFBRF8vY2NjlCtXCocO//e6CSFw6PBJVProlzF93eLexgMArCzzqMreJSTgt7FTMHxgAOzy2mpsY8feQzA1UaBuzSqZ1rl55x5C/7mJCmX4D9TXas6cCfj778M4fPhkumNnzlzEjz82ho2NFWQyGX76qTFMTBQ4dixEgkhzL+7UqtlnD9l8jqtXr6r+/8aNG4iMjFTdT01Nxd69e5EvXz4pQvtq2dnZwsjICFHPXqiVR0U9R/Fi7hJFRdqmVCoxec7vKFvKE0UKF1KVT527BGW+80Stqt5Zamfrrn1oUKcGTDJYIVe7WQe8io5BaqoSv/7SHj82qaet8OkL+umnxihT5jv4+GQ8x699+1+xdu0CRERcQ3JyMuLj36F162548ODRF440d+McEs0kTUjKlCkDmUwGmUyW4dCMqakp5s2b98k2EhMTkZiYqFYmhIBMxjefcq8JMxbg3oOHWL1ouqrsyIkzOHvxCjavmJ+lNkL/uYkHD/9F0MjBGR5ftXA64t+9w9XrtzBr0QoUzO+CBnVqaCN8+kLy53fG9Olj0LBh+3Tfk2lGjx4IKytL1K/fFi9evEKTJn5Yu3Yhatf+Edev3/7CEdO3LMsJSWBg4CePP3/+PNsnDwsLgxAChQsXxrlz52Bvb686JpfL4eDgAENDw0+2ERQUhLFjx6qVyQwsIDO0zHY8ucGLF6+QkpICB0f1XW4dHOwR+Sz77xHpn4kzFuLY6XNYtWAanBz++5s5ezEU/z6JgHe9H9XqDxg+EeVKl8TK+VPVyrfs3IviRQqjZPEiGZ4nv4sTAKCouxtevorGwmVrmZB8ZcqW9YKjoz3OnNmjKjMyMkKVKhXRq5c/SpWqiV9/7YyyZX1x8+YdAMC1azfh4/MDevb0R58+2V+kQBnL7fM/tCHLCcnly5c11qlWrVq2Tu7q6grgffdzTg0bNixdsmSTt3iO2/vaJScn49Klq6hVswp27NgHAO97oGpWwcJFXM30NRNCYNLMRTh0/DRWzJ+iShjSdP25FVp+NKzS/Ode+K1vd9TwqahWHh//DvsOnUD/np2ydG6lUomk5OTPip++vCNHTqFcOV+1siVLZuDOnfuYPn0hTP9/ddbH38GpqakwMNCLKYa5BodsNMtyQnLkyBGdBbF69epPHu/YsWOmxxQKRbodYr/14ZpZc5ZixbJZuHjpKs6fv4y+fbrB3NwUK1dtkDo0+gwTZizAngNHMXfyKJibmeLFy/dX1rawMFdbPfMxZ0f7dMnL34eOIzU1FY380g+Vrt+yE86O9nBzLQAAuBD6D1au34L2PzXVwbMiXYqLe4sbN+6olcXHx+Ply9e4ceMOjIyMcO9eGBYsCMLQoRPw6lU0Gjeui9q1q6J5884SRU3fKknnkKTp16+f2v33E6viIZfLYWZm9smEhNLbtGkH7O1sMWbUIDg52ePKleto2KgDoqJeaH4w6a0N295fK6pz7yFq5RP+F4hmDetkq62tu/bBt3plWOaxSHdMqVRi9uKVeBIRCUNDQxTI54wBv/6CVtyDJNdJSUlB06b+mDBhKLZsWQ4LC3Pcv/8QXbsGYt8+3f0I/Rbl8gUyWiETQj8XEt29exe9evXC4MGD4efnp/kBHzCSc2UOqXv3lMvH6T958teQOgTSIwkJ4To/x2ln7exGXjlii1ba0Ud6O0hYpEgRTJ48OV3vCREREeU+ejFkkxkjIyM8ffpU6jCIiIg+C1fZaKYXCcmOHTvU7gshEBERgfnz58PHx0eiqIiIiLQj52tJvx05SkhOnDiB33//Hffv38fmzZuRL18+rFmzBm5ubqhSJfMtqDPTrFkztfsymQz29vaoVasWZsyYkZMQiYiI6CuS7TkkW7ZsgZ+fH0xNTXH58mXV7n8xMTGYNGlSjoJQKpVqt9TUVERGRmLdunVwdnbOUZtERET6QkCmlVtulu2EZMKECVi8eDGWLl0KY2NjVbmPjw8uXbr0WcEkJSXh9u3bSElJ+ax2iIiI9IlSaOeWm2U7Ibl9+3aGO7JaWVkhOjo6R0HEx8fjl19+gZmZGUqWLInw8PdLsPr06YPJkyfnqE0iIiJ9oYRMK7fcLNsJiZOTE+7du5eu/OTJkyhcuHCOghg2bBiuXr2Ko0ePwsTERFXu6+uLDRu4uygREVFul+1Jrd26dUO/fv2wfPlyyGQyPH36FCEhIRg0aBBGjhyZoyC2b9+ODRs2oFKlSmrbvpcsWRL379/PUZtERET6IrfP/9CGbCckQ4cOhVKpRO3atREfH49q1apBoVBg0KBB6NOnT46CeP78ORwcHNKVv3379pu/Lg0REX39uOxXs2wP2chkMgwfPhyvXr3CP//8gzNnzuD58+cYP358joOoUKECdu/erXYOAPjjjz/g7e2d43aJiIjo65DjjdHkcjk8PT21EsSkSZNQv3593LhxAykpKZgzZw5u3LiB06dP49ixY1o5BxERkVQ4ZKNZthOSmjVrfnIY5fDhw9kOokqVKggNDcXkyZPh5eWF/fv3o1y5cggJCYGXl1e22yMiItInHLLRLNsJSZkyZdTuJycnIzQ0FP/88w/8/f1zHIi7uzuWLl2a48cTERHR1yvbCcmsWbMyLB8zZgzi4uKy1ZaBgYHGSasymYwbpRER0VeNPSSaae3ieh06dMAPP/yA6dOnZ/kx27Zty/RYSEgI5s6dC6WSbyMREX3dOIdEM60lJCEhIWqbmmVF06ZN05Xdvn0bQ4cOxc6dO9G+fXuMGzdOWyESERGRnsp2QtKiRQu1+0IIRERE4MKFCzneGA0Anj59itGjR2PVqlXw8/NDaGgovvvuuxy3R0REpC+U7CDRKNsJiZWVldp9AwMDFCtWDOPGjUPdunWzHUDaVYLnzZuHMmXK4NChQ6hatWq22yEiItJXuf06NNqQrYQkNTUVnTt3hpeXF2xsbD775FOnTsWUKVPg5OSE9evXZziEQ0RE9LXL5Rfq1QqZECJbr5OJiQlu3rwJNze3zz65gYEBTE1N4evrC0NDw0zrbd26NVvtGsnzfW5olMu8e3pC6hBIj+TJX0PqEEiPJCSE6/wc253aaaWdZpHrtNKOPsr2kM13332HBw8eaCUh6dixI69VQ0REuR7Xi2qW7YRkwoQJGDRoEMaPH4/y5cvD3Nxc7bilpWWW21q5cmV2T09ERPTVUfLHt0ZZTkjGjRuHgQMHokGDBgCAJk2aqPVuCCEgk8mQmpqq/SiJiIgoV8tyQjJ27Fj07NkTR44c0WU8REREuQ4ntWqW5YQkbe5r9erVdRYMERFRbsQ5JJoZZKcyJ6ASERF9PY4fP47GjRvDxcUFMpkM27dvVzsuhMCoUaPg7OysWvV69+5dtTqvXr1C+/btYWlpCWtra3Tp0iXdteuuXr2KqlWrwsTEBAUKFMDUqVOzHWu2EpKiRYvC1tb2kzciIiJSp5Rp55Zdb9++RenSpbFgwYIMj0+dOhVz587F4sWLcfbsWZibm8PPzw8JCQmqOu3bt8f169dx4MAB7Nq1C8ePH0f37t1Vx2NjY1G3bl24urri4sWLmDZtGsaMGYMlS5ZkK9ZsrbIZO3Zsup1aiYiI6NOk2qm1fv36qF+/fobHhBCYPXs2RowYodqYdPXq1XB0dMT27dvRpk0b3Lx5E3v37sX58+dRoUIFAMC8efPQoEEDTJ8+HS4uLggODkZSUhKWL18OuVyOkiVLIjQ0FDNnzlRLXDTJVkLSpk0bODg4ZOchREREpCWJiYlITExUK1MoFFAoFNluKywsDJGRkfD19VWVWVlZoWLFiggJCUGbNm0QEhICa2trVTICAL6+vjAwMMDZs2fRvHlzhISEoFq1apDL5ao6fn5+mDJlCl6/fp3lnd2zPGTD+SNEREQ5I7R0CwoKgpWVldotKCgoRzFFRkYCABwdHdXKHR0dVcciIyPTdUQYGRnB1tZWrU5GbXx4jqzI9iobIiIiyh5tXe132LBhCAwMVCvLSe+IPspyQqJUctESERFRTmjrX9CcDs9kxMnJCQDw7NkzODs7q8qfPXuGMmXKqOpERUWpPS4lJQWvXr1SPd7JyQnPnj1Tq5N2P61OVmRrlQ0RERHlDm5ubnBycsKhQ4dUZbGxsTh79iy8vb0BAN7e3oiOjsbFixdVdQ4fPgylUomKFSuq6hw/fhzJycmqOgcOHECxYsWyPH8EYEJCRESkc9qaQ5JdcXFxCA0NRWhoKID3E1lDQ0MRHh4OmUyG/v37Y8KECdixYweuXbuGjh07wsXFBc2aNQMAlChRAvXq1UO3bt1w7tw5nDp1Cr1790abNm3g4uICAGjXrh3kcjm6dOmC69evY8OGDZgzZ066oSVNsn1xPSIiIsoebc0hya4LFy6gZs2aqvtpSYK/vz9WrlyJ3377DW/fvkX37t0RHR2NKlWqYO/evTAxMVE9Jjg4GL1790bt2rVhYGCAli1bYu7cuarjVlZW2L9/PwICAlC+fHnY2dlh1KhR2VryCwAykQtnqxrJ80kdAumZd09PSB0C6ZE8+WtIHQLpkYSEcJ2fY1n+Dlppp8vjtVppRx+xh4SIiEjHuCxEMyYkREREOsaERDNOaiUiIiLJsYeEiIhIxwQ3O9eICQkREZGOcchGMw7ZEBERkeTYQ0JERKRj7CHRjAkJERGRjuW6Db90gAkJERGRjkm1U+vXhHNIiIiISHLsISEiItIxziHRjAkJERGRjjEh0YxDNkRERCQ59pAQERHpGFfZaMaEhIiISMe4ykYzDtkQERGR5NhDQkREpGOc1KoZExIiIiId4xwSzThkQ0RERJJjDwkREZGOKdlHohETEvomtC3fX+oQSI84mltLHQJ9YziHRDMmJERERDrG/hHNOIeEiIiIJMceEiIiIh3jkI1mTEiIiIh0jDu1asYhGyIiIpIce0iIiIh0jMt+NWNCQkREpGNMRzTjkA0RERFJjj0kREREOsZVNpoxISEiItIxziHRjEM2REREJDn2kBAREekY+0c0Y0JCRESkY5xDohkTEiIiIh3jHBLNOIeEiIiIJMceEiIiIh1j/4hmTEiIiIh0jHNINOOQDREREUmOPSREREQ6JjhooxETEiIiIh3jkI1mHLIhIiIiybGHhIiISMe4D4lmTEiIiIh0jOmIZhyyISIiIsmxh4SIiEjHOGSjGRMSIiIiHeMqG82YkBAREekY9yHRjHNIiIiISHLsISEiItIxDtloxoSEiIhIxzhkoxmHbIiIiEhy7CEhIiLSMQ7ZaMaEhIiISMeUgkM2mnDIhoiIiCTHHhIiIiIdY/+IZkxIiIiIdIxbx2vGIRsiIiKSnN4kJCdOnECHDh3g7e2NJ0+eAADWrFmDkydPShwZERHR5xFa+i8304uEZMuWLfDz84OpqSkuX76MxMREAEBMTAwmTZokcXRERESfR6mlW26mFwnJhAkTsHjxYixduhTGxsaqch8fH1y6dEnCyIiIiD6fEkIrt9xMLxKS27dvo1q1aunKraysEB0d/eUDIiIioi9KLxISJycn3Lt3L135yZMnUbhwYQkiIiIi0h7OIdFMLxKSbt26oV+/fjh79ixkMhmePn2K4OBgDBo0CL169ZI6PCIios/COSSa6UVCMnToULRr1w61a9dGXFwcqlWrhq5du6JHjx7o06eP1OERERF9dcaMGQOZTKZ2K168uOp4QkICAgICkDdvXlhYWKBly5Z49uyZWhvh4eFo2LAhzMzM4ODggMGDByMlJUUn8erFxmgymQzDhw/H4MGDce/ePcTFxcHT0xMWFhZSh0ZERPTZhETXsilZsiQOHjyoum9k9N8/+wMGDMDu3buxadMmWFlZoXfv3mjRogVOnToFAEhNTUXDhg3h5OSE06dPIyIiAh07doSxsbFOVsDqRUKydu1atGjRAmZmZvD09JQ6HCIiIq2SaoWMkZERnJyc0pXHxMRg2bJlWLduHWrVqgUAWLFiBUqUKIEzZ86gUqVK2L9/P27cuIGDBw/C0dERZcqUwfjx4zFkyBCMGTMGcrlcq7HqxZDNgAED4ODggHbt2mHPnj1ITU2VOiQiIiK9k5iYiNjYWLVb2t5dGbl79y5cXFxQuHBhtG/fHuHh4QCAixcvIjk5Gb6+vqq6xYsXR8GCBRESEgIACAkJgZeXFxwdHVV1/Pz8EBsbi+vXr2v9uelFQhIREYE///wTMpkMrVq1grOzMwICAnD69GmpQyMiIvps2prUGhQUBCsrK7VbUFBQhuesWLEiVq5cib1792LRokUICwtD1apV8ebNG0RGRkIul8Pa2lrtMY6OjoiMjAQAREZGqiUjacfTjmmbXgzZGBkZoVGjRmjUqBHi4+Oxbds2rFu3DjVr1kT+/Plx//59qUMkIiLKMW0t2R02bBgCAwPVyhQKRYZ169evr/r/UqVKoWLFinB1dcXGjRthamqqlXi0SS8Skg+ZmZnBz88Pr1+/xqNHj3Dz5k2pQyIiItILCoUi0wREE2traxQtWhT37t1DnTp1kJSUhOjoaLVekmfPnqnmnDg5OeHcuXNqbaStwsloXsrn0oshGwCIj49HcHAwGjRogHz58mH27Nlo3ry5TsapiIiIviR92Do+Li4O9+/fh7OzM8qXLw9jY2McOnRIdfz27dsIDw+Ht7c3AMDb2xvXrl1DVFSUqs6BAwdgaWmpkwUoetFD0qZNG+zatQtmZmZo1aoVRo4cqXpBiIiIvnZSLPsdNGgQGjduDFdXVzx9+hSjR4+GoaEh2rZtCysrK3Tp0gWBgYGwtbWFpaUl+vTpA29vb1SqVAkAULduXXh6euLnn3/G1KlTERkZiREjRiAgICDHvTSfohcJiaGhITZu3Ag/Pz8YGhpKHQ4REZFWSbHL6uPHj9G2bVu8fPkS9vb2qFKlCs6cOQN7e3sAwKxZs2BgYICWLVsiMTERfn5+WLhwoerxhoaG2LVrF3r16gVvb2+Ym5vD398f48aN00m8MiHVbi06ZCTPJ3UIpGeaOZeXOgTSI+fjHkodAumRRy+v6vwcfgXqa66UBfv+/Vsr7egjyXpI5s6di+7du8PExARz5879ZN2+fft+oahyj149/TEwsBecnOxx9eoN9Os/EucvhEodFmlZq/5t0WpAW7WyJ/ceo1/tXwEA3Sf9ilJVSsPG0RYJbxNw5+ItrJm8Ek/vP1HV3/xoR7p2Z/WehlM7T+g2eNK6X/t3Qb1GteFexA0J7xJx8XwoJo+djQf3HqrqKBRyjBg/CI2b14NcLsfxI6cxYvAEvHj+CgBQomRR9OrXBd9XKgtbW2s8/vcp1q7YhBVLgiV6VrlDbr8wnjZIlpDMmjUL7du3h4mJCWbNmpVpPZlMxoQkm376qQmmTxuNXwOG4tz5y+jbpyv27A6G53fV8Pz5S6nDIy0Lv/0I49qPVN1PTflvY8EH1+7jxPZjePH0OSysLdCqf1uMXDMOAVW6Qan8rxN5/sDZCD12SXX/bezbLxM8aVXFyhWwetmfuHLpOoyMDPHbiL5Ys3kxfCs3x7v4dwCAkRN/Q606VfHrL4MQG/sG46f8D7+vmoWWDfwBAF6lPfHyxSv07zkMT59EosIPZRA0cxSUylSs+uNPKZ/eV02qnVq/JpIlJGFhYRn+P32+Af264Y9l67Bq9UYAwK8BQ9Ggfm107tQGU6ctkDg60rbUlFREP4/O8NjB9ftU///8cRT+nB6MGfvmwj6/A56F/7ex0dvYt5m2QV8P/1bqV0cf2HskLt85Bq/SnjgXchF58ligdfvm6Nd9KE6feL+cc1CfkTh8ZgfKViiFyxeuYuO67Wpt/PvoCcp9Xxr1GvkyISGd0otlv+PGjUN8fHy68nfv3uls8kxuZWxsjHLlSuHQ4f+624UQOHT4JCpV4jyK3MjZzQVLzq3AghNL0G9OIOxc7DKspzBVoOZPtfEsPBIvI16oHes6vieWX16LoL+mo1Yr3wwfT1+fPJbvL1Aa/ToGAOBVxhNyuTFOHjujqnP/7kM8/vcpylUo9cl20tqgnBFCaOWWm+nFKpuxY8eiZ8+eMDMzUyuPj4/H2LFjMWrUKIki+/rY2dnCyMgIUc/U/8GJinqO4sXcJYqKdOVu6G0sGDgHTx88gbWDDVr1b4PxmyZjQN0+SHj7vove7+f66DCsE0zNTfHk3mOMaz8KKcn/XT78zxnBuHb6KpLeJaJ01TLoOr4nTMxMsGflLqmeFmmBTCbD6Im/4fyZS7hz6x4AwN7BDomJSYiNfaNW98Xzl7B3zDiRLf99aTRq5ofObXrrPObcjEM2mulFQiKEgEwmS1d+5coV2NrafvKxiYmJ6S4slFl7RLnN5aP/zft4dOsh7obewaJTf6Byoyo4vOEAAODE9mO4ciIUNg62aNK9GQIX/oYRLYcgOTEZALB57gZVG2HXH0BhZoImPZozIfnKjZ82HEVLeODHhp1y3EbR4h5YunYO5kxbjBNHQ7QXHFEGJB2ysbGxga2tLWQyGYoWLQpbW1vVzcrKCnXq1EGrVq0+2UZGFxoSyjeffExu9uLFK6SkpMDho187Dg72iHz2XKKo6EuJj32LiLCncHJ1/q/sTTwiH0bg5rnrmNFrCvK558cPfplvPHg39A7sXOxhJNeL3yuUA+OmDEPtutXQtmlXRD59pip/HvUCCoUclpZ51Orb2efF8496VYsUK4x125Zi/eotmDdj6ReJOzcTWvovN5P0G2f27NkQQuCXX37B2LFjYWVlpToml8tRqFAhjTu2ZnShIZu8xXUS79cgOTkZly5dRa2aVbBjx/sJjTKZDLVqVsHCRSskjo50zcTMBI6uTojeeiTjCrL3nwfjTyQbhTzd8Cb6DVKSUjKtQ/pr3JRh8GtYC62bdMG/4U/Ujl0LvYGkpGT4VK+Iv3ceBAAU9iiE/AVccOnCf3txFCnmjvXb/8CWP3dg2sR5XzT+3EqZy+d/aIOkCYm///tlZm5ubqhcuTKMjY2z3UZGFxr61odrZs1ZihXLZuHipas4f/4y+vbpBnNzU6xctUHzg+mr0nF4Z1w4eA7PnzyHraMtWg1oB2WqEid3HIdDAUf4NK6KK8cvI/ZVDPI626FZr5ZISkjEpSMXAQDla38Pa3tr3Ll0G8mJyShVtQxaBPyEHUu2SfzMKCcmTBuOJi3ro1uHfngb9xb2DnkBALGxcUhMSMSbN3HYELwNI8YPQvTrGLx5E4dxk4fh4rlQXP7/hKRocQ+s3/4Hjh85hT8WrVa1kZqqxKuXryV7bpT7SZaQxMbGwtLSEgBQtmxZvHv3Du/evcuwblo9yppNm3bA3s4WY0YNgpOTPa5cuY6GjTogKuqF5gfTVyWvU170nzcIeawtEfsqBrfO38D/mg1G7KtYGBoZocQPnmj4SxOYW5kj5kU0bp67juEthiD25fsVE6kpqajXsSE6jewCyGSIfBiBVeOX4eD6/RI/M8qJn39pDQDYuFO9N3Rg7xHYvP79Bnjjh0+FUCqxeOXM/98Y7RRGDJ6oqtugSR3Y2duiRavGaNGqsar83/AnqFJWO7uNfovYP6KZZFvHGxoaIiIiAg4ODjAwMMiwVyNtcmpqamoGLWSOW8fTx7h1PH2IW8fTh77E1vE++WpppZ1TTw5rpR19JFkPyeHDh1UraI4cyWS8m4iIKBfgsl/NJEtIqlevnuH/ExER0bdHL3Zq3bt3L06ePKm6v2DBApQpUwbt2rXD69ecREVERF837tSqmV4kJIMHD0ZsbCwA4Nq1awgMDESDBg0QFhaWbkkvERHR10YJoZVbbqYXOx+FhYXB09MTALBlyxY0btwYkyZNwqVLl9CgQQOJoyMiIiJd04seErlcrrq43sGDB1G3bl0AgK2trarnhIiI6GvFnVo104sekipVqiAwMBA+Pj44d+4cNmx4v4HXnTt3kD9/fomjIyIi+jy5ff6HNuhFD8n8+fNhZGSEzZs3Y9GiRciX7/0+In///Tfq1asncXRERESka5JtjKZL3BiNPsaN0ehD3BiNPvQlNkYr51xFK+1cijipudJXSi+GbAAgNTUV27dvx82bNwEAJUuWRJMmTWBoaChxZERERJ8nF/721zq9SEju3buHBg0a4MmTJyhWrBgAICgoCAUKFMDu3bvh7u4ucYRERESkS3oxh6Rv375wd3fHv//+i0uXLuHSpUsIDw+Hm5sb+vbtK3V4REREn4X7kGimFz0kx44dw5kzZ1TXtgGAvHnzYvLkyfDx8ZEwMiIios+X25fsaoNeJCQKhQJv3rxJVx4XFwe5XC5BRERERNqj5BwSjfRiyKZRo0bo3r07zp49q9qv/8yZM+jZsyeaNGkidXhERESkY3qRkMydOxceHh6oXLkyTExMYGJiAh8fH3h4eGDOnDlSh0dERPRZuFOrZpIO2SiVSkybNg07duxAUlISmjVrBn9/f8hkMpQoUQIeHh5ShkdERKQVHLLRTNKEZOLEiRgzZgx8fX1hamqKPXv2wMrKCsuXL5cyLCIiIvrCJB2yWb16NRYuXIh9+/Zh+/bt2LlzJ4KDg6FUKqUMi4iISKs4ZKOZpAlJeHg4GjRooLrv6+sLmUyGp0+fShgVERGRdimF0MotN5M0IUlJSYGJiYlambGxMZKTkyWKiIiIiKQg6RwSIQQ6deoEhUKhKktISEDPnj1hbm6uKtu6dasU4REREWlFbh9u0QZJExJ/f/90ZR06dJAgEiIiIt3J7cMt2iBpQrJixQopT09ERER6Qi+2jiciIsrNOGSjGRMSIiIiHROC21lowoSEiIhIx5TsIdFIL65lQ0RERN829pAQERHpmOAqG42YkBAREekYh2w045ANERERSY49JERERDrGIRvNmJAQERHpGHdq1YxDNkRERCQ59pAQERHpGHdq1YwJCRERkY5xDolmHLIhIiIiybGHhIiISMe4D4lmTEiIiIh0jEM2mjEhISIi0jEu+9WMc0iIiIhIcuwhISIi0jEO2WjGhISIiEjHOKlVMw7ZEBERkeTYQ0JERKRjHLLRjAkJERGRjnGVjWYcsiEiIiLJsYeEiIhIx3hxPc2YkBAREekYh2w045ANERERSY49JERERDrGVTaaMSEhIiLSMc4h0YxDNkRERDomhNDKLScWLFiAQoUKwcTEBBUrVsS5c+e0/Oy0gwkJERFRLrVhwwYEBgZi9OjRuHTpEkqXLg0/Pz9ERUVJHVo6TEiIiIh0TKoekpkzZ6Jbt27o3LkzPD09sXjxYpiZmWH58uU6eJafhwkJERGRjgkt3bIjKSkJFy9ehK+vr6rMwMAAvr6+CAkJ+aznowuc1EpERPSVSExMRGJiolqZQqGAQqFIV/fFixdITU2Fo6OjWrmjoyNu3bql0zhzIlcmJClJT6QOQXKJiYkICgrCsGHDMvyg0reHnwn6ED8PX5a2/l0aM2YMxo4dq1Y2evRojBkzRivtS0kmuDg6V4qNjYWVlRViYmJgaWkpdTikB/iZoA/x8/B1yk4PSVJSEszMzLB582Y0a9ZMVe7v74/o6Gj89ddfug43WziHhIiI6CuhUChgaWmpdsush0sul6N8+fI4dOiQqkypVOLQoUPw9vb+UiFnWa4csiEiIiIgMDAQ/v7+qFChAn744QfMnj0bb9++RefOnaUOLR0mJERERLlU69at8fz5c4waNQqRkZEoU6YM9u7dm26iqz5gQpJLKRQKjB49mpPVSIWfCfoQPw/fjt69e6N3795Sh6ERJ7USERGR5DiplYiIiCTHhISIiIgkx4SEiIiIJMeEhFQKFSqE2bNnSx0GfWWOHj0KmUyG6OhoqUOhLMjq+8XvA/rSmJB8IZ06dYJMJsPkyZPVyrdv3w6ZTPZFY1m5ciWsra3TlZ8/fx7du3f/orHQf77UZ+Thw4eQyWQIDQ3VWpukfWmfB5lMBrlcDg8PD4wbNw4pKSmf1W7lypUREREBKysrAPw+IP3BhOQLMjExwZQpU/D69WupQ8mQvb09zMzMpA7jm6ZPn5GkpCSpQ/jm1atXDxEREbh79y4GDhyIMWPGYNq0aZ/Vplwuh5OTk8Ykl98H9KUxIfmCfH194eTkhKCgoEzrnDx5ElWrVoWpqSkKFCiAvn374u3bt6rjERERaNiwIUxNTeHm5oZ169al61qdOXMmvLy8YG5ujgIFCuDXX39FXFwcgPfdtZ07d0ZMTIzq11faRZk+bKddu3Zo3bq1WmzJycmws7PD6tWrAbzfgjgoKAhubm4wNTVF6dKlsXnzZi28Ut8ubXxGZDIZtm/frvYYa2trrFy5EgDg5uYGAChbtixkMhlq1KgB4P0v8mbNmmHixIlwcXFBsWLFAABr1qxBhQoVkCdPHjg5OaFdu3aIiorS3pOmTCkUCjg5OcHV1RW9evWCr68vduzYgdevX6Njx46wsbGBmZkZ6tevj7t376oe9+jRIzRu3Bg2NjYwNzdHyZIlsWfPHgDqQzb8PiB9woTkCzI0NMSkSZMwb948PH78ON3x+/fvo169emjZsiWuXr2KDRs24OTJk2ob2nTs2BFPnz7F0aNHsWXLFixZsiTdPw4GBgaYO3curl+/jlWrVuHw4cP47bffALzvrp09ezYsLS0RERGBiIgIDBo0KF0s7du3x86dO1WJDADs27cP8fHxaN68OQAgKCgIq1evxuLFi3H9+nUMGDAAHTp0wLFjx7Tyen2LtPEZ0eTcuXMAgIMHDyIiIgJbt25VHTt06BBu376NAwcOYNeuXQDe/8Mzfvx4XLlyBdu3b8fDhw/RqVOnz3uilCOmpqZISkpCp06dcOHCBezYsQMhISEQQqBBgwZITk4GAAQEBCAxMRHHjx/HtWvXMGXKFFhYWKRrj98HpFcEfRH+/v6iadOmQgghKlWqJH755RchhBDbtm0TaW9Dly5dRPfu3dUed+LECWFgYCDevXsnbt68KQCI8+fPq47fvXtXABCzZs3K9NybNm0SefPmVd1fsWKFsLKySlfP1dVV1U5ycrKws7MTq1evVh1v27ataN26tRBCiISEBGFmZiZOnz6t1kaXLl1E27ZtP/1iUIa08RkRQggAYtu2bWp1rKysxIoVK4QQQoSFhQkA4vLly+nO7+joKBITEz8Z5/nz5wUA8ebNGyGEEEeOHBEAxOvXr7P5jOlTPvw8KJVKceDAAaFQKESzZs0EAHHq1ClV3RcvXghTU1OxceNGIYQQXl5eYsyYMRm2+/H7xe8D0hfcOl4CU6ZMQa1atdL9Erly5QquXr2K4OBgVZkQAkqlEmFhYbhz5w6MjIxQrlw51XEPDw/Y2NiotXPw4EEEBQXh1q1biI2NRUpKChISEhAfH5/lMWEjIyO0atUKwcHB+Pnnn/H27Vv89ddf+PPPPwEA9+7dQ3x8POrUqaP2uKSkJJQtWzZbrwell9PPSIkSJT7rvF5eXpDL5WplFy9exJgxY3DlyhW8fv0aSqUSABAeHg5PT8/POh992q5du2BhYYHk5GQolUq0a9cOLVq0wK5du1CxYkVVvbx586JYsWK4efMmAKBv377o1asX9u/fD19fX7Rs2RKlSpXKcRz8PqAvgQmJBKpVqwY/Pz8MGzZMres7Li4OPXr0QN++fdM9pmDBgrhz547Gth8+fIhGjRqhV69emDhxImxtbXHy5El06dIFSUlJ2Zqk1r59e1SvXh1RUVE4cOAATE1NUa9ePVWsALB7927ky5dP7XG8Nsbny+lnBHg/h0R8dEWItK58TczNzdXuv337Fn5+fvDz80NwcDDs7e0RHh4OPz8/Tnr9AmrWrIlFixZBLpfDxcUFRkZG2LFjh8bHde3aFX5+fti9ezf279+PoKAgzJgxA3369MlxLPw+IF1jQiKRyZMno0yZMqqJgwBQrlw53LhxAx4eHhk+plixYkhJScHly5dRvnx5AO9/mXy4IuPixYtQKpWYMWMGDAzeTxHauHGjWjtyuRypqakaY6xcuTIKFCiADRs24O+//8ZPP/0EY2NjAICnpycUCgXCw8NRvXr17D15ypKcfEaA96sjIiIiVPfv3r2L+Ph41f20HpCsfAZu3bqFly9fYvLkyShQoAAA4MKFC9l+LpQz5ubm6d7rEiVKICUlBWfPnkXlypUBAC9fvsTt27fVeqwKFCiAnj17omfPnhg2bBiWLl2aYULC7wPSF0xIJOLl5YX27dtj7ty5qrIhQ4agUqVK6N27N7p27Qpzc3PcuHEDBw4cwPz581G8eHH4+vqie/fuWLRoEYyNjTFw4ECYmpqqlvB5eHggOTkZ8+bNQ+PGjXHq1CksXrxY7dyFChVCXFwcDh06hNKlS8PMzCzTnpN27dph8eLFuHPnDo4cOaIqz5MnDwYNGoQBAwZAqVSiSpUqiImJwalTp2BpaQl/f38dvGrflpx8RgCgVq1amD9/Pry9vZGamoohQ4ao/uEAAAcHB5iammLv3r3Inz8/TExMVHtSfKxgwYKQy+WYN28eevbsiX/++Qfjx4/X7ROnTypSpAiaNm2Kbt264ffff0eePHkwdOhQ5MuXD02bNgUA9O/fH/Xr10fRokXx+vVrHDlyJNPhPH4fkN6QeA7LN+PDCWppwsLChFwuFx++DefOnRN16tQRFhYWwtzcXJQqVUpMnDhRdfzp06eifv36QqFQCFdXV7Fu3Trh4OAgFi9erKozc+ZM4ezsLExNTYWfn59YvXp1ukmHPXv2FHnz5hUAxOjRo4UQ6pPY0ty4cUMAEK6urkKpVKodUyqVYvbs2aJYsWLC2NhY2NvbCz8/P3Hs2LHPe7G+Udr6jDx58kTUrVtXmJubiyJFiog9e/aoTWoVQoilS5eKAgUKCAMDA1G9evVMzy+EEOvWrROFChUSCoVCeHt7ix07dqhNiuWkVt3I7P0QQohXr16Jn3/+WVhZWan+zu/cuaM63rt3b+Hu7i4UCoWwt7cXP//8s3jx4oUQIuP3i98HpA9kQnw02ExflcePH6NAgQI4ePAgateuLXU4REREOcKE5Ctz+PBhxMXFwcvLCxEREfjtt9/w5MkT3LlzR61bnoiI6GvCOSRfmeTkZPzvf//DgwcPkCdPHlSuXBnBwcFMRoiI6KvGHhIiIiKSHLeOJyIiIskxISEiIiLJMSEhIiIiyTEhISIiIskxISHSA506dUKzZs1U92vUqIH+/ft/8TiOHj0KmUyG6OhonZ3j4+eaE18iTiL6spiQEGWiU6dOkMlkkMlkkMvl8PDwwLhx45CSkqLzc2/dujXLW7R/6X+cCxUqhNmzZ3+RcxHRt4P7kBB9Qr169bBixQokJiZiz549CAgIgLGxMYYNG5aublJSkurCdZ/L1tZWK+0QEX0t2ENC9AkKhQJOTk5wdXVFr1694Ovrq7r8e9rQw8SJE+Hi4qK6Ku+///6LVq1awdraGra2tmjatCkePnyoajM1NRWBgYGwtrZG3rx58dtvv+Hj7YA+HrJJTEzEkCFDUKBAASgUCnh4eGDZsmV4+PAhatasCQCwsbGBTCZDp06dAABKpRJBQUFwc3ODqakpSpcujc2bN6udZ8+ePShatChMTU1Rs2ZNtThzIjU1FV26dFGds1ixYpgzZ06GdceOHQt7e3tYWlqiZ8+eSEpKUh3LSuxElLuwh4QoG0xNTfHy5UvV/UOHDsHS0hIHDhwA8H4nXT8/P3h7e+PEiRMwMjLChAkTUK9ePVy9ehVyuRwzZszAypUrsXz5cpQoUQIzZszAtm3bUKtWrUzP27FjR4SEhGDu3LkoXbo0wsLC8OLFCxQoUABbtmxBy5Ytcfv2bVhaWsLU1BQAEBQUhLVr12Lx4sUoUqQIjh8/jg4dOsDe3h7Vq1fHv//+ixYtWiAgIADdu3fHhQsXMHDgwM96fZRKJfLnz49NmzYhb968OH36NLp37w5nZ2e0atVK7XUzMTHB0aNH8fDhQ3Tu3Bl58+bFxIkTsxQ7EeVCEl7Yj0ivfXi1VaVSKQ4cOCAUCoUYNGiQ6rijo6NITExUPWbNmjWiWLFialdCTUxMFKampmLfvn1CCCGcnZ3F1KlTVceTk5NF/vz51a7sWr16ddGvXz8hhBC3b98WAMSBAwcyjDOjq7cmJCQIMzMzcfr0abW6Xbp0EW3bthVCCDFs2DDh6empdnzIkCEar9yb0VVgPyUgIEC0bNlSdd/f31/Y2tqKt2/fqsoWLVokLCwsRGpqapZi5xWGiXIf9pAQfcKuXbtgYWGB5ORkKJVKtGvXDmPGjFEd9/LyUps3cuXKFdy7dw958uRRaychIQH3799HTEwMIiIiULFiRdUxIyMjVKhQId2wTZrQ0FAYGhpmq2fg3r17iI+PR506ddTKk5KSULZsWQDAzZs31eIAAG9v7yyfIzMLFizA8uXLER4ejnfv3iEpKQllypRRq1O6dGmYmZmpnTcuLg7//vsv4uLiNMZORLkPExKiT6hZsyYWLVoEuVwOFxcXGBmp/8mYm5ur3Y+Li0P58uURHBycri17e/scxZA2BJMdcXFxAIDdu3cjX758ascUCkWO4siKP//8E4MGDcKMGTPg7e2NPHnyYNq0aTh79myW25AqdiKSFhMSok8wNzeHh4dHluuXK1cOGzZsgIODAywtLTOs4+zsjLNnz6JatWoAgJSUFFy8eBHlypXLsL6XlxeUSiWOHTsGX1/fdMfTemhSU1NVZZ6enlAoFAgPD8+0Z6VEiRKqCbppzpw5o/lJfsKpU6dQuXJl/Prrr6qy+/fvp6t35coVvHv3TpVsnTlzBhYWFihQoABsbW01xk5EuQ9X2RBpUfv27WFnZ4emTZvixIkTCAsLw9GjR9G3b188fvwYANCvXz9MnjwZ27dvx61bt/Drr79+cg+RQoUKwd/fH7/88gu2b9+uanPjxo0AAFdXV8hkMuzatQvPnz9HXFwc8uTJg0GDBmHAgAFYtWoV7t+/j0uXLmHevHlYtWoVAKBnz564e/cuBg8ejNu3b2PdunVYuXJllp7nkydPEBoaqnZ7/fo1ihQpggsXLmDfvn24c+cORo4cifPnz6d7fFJSErp06YIbN25gz549GD16NHr37g0DA4MsxU5EuZDUk1iI9NWHk1qzczwiIkJ07NhR2NnZCYVCIQoXLiy6desmYmJihBDvJ7H269dPWFpaCmtraxEYGCg6duyY6aRWIYR49+6dGDBggHB2dhZyuVx4eHiI5cuXq46PGzdOODk5CZlMJvz9/YUQ7yfizp49WxQrVkwYGxsLe3t74efnJ44dO6Z63M6dO4WHh4dQKBSiatWqYvny5Vma1Aog3W3NmjUiISFBdOrUSVhZWQlra2vRq1cvMXToUFG6dOl0r9uoUaNE3rx5hYWFhejWrZtISEhQ1dEUOye1EuU+MiEymUlHRERE9IVwyIaIiIgkx4SEiIiIJMeEhIiIiCTHhISIiIgkx4SEiIiIJMeEhIiIiCTHhISIiIgkx4SEiIiIJMeEhIiIiCTHhISIiIgkx4SEiIiIJMeEhIiIiCT3f5OZy8AJAol1AAAAAElFTkSuQmCC", "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" }, { "name": "stdout", "output_type": "stream", "text": [ "Model loaded from path :custom_bert_model.bin\n", "Local Wav2Vec2 processor and model found. Loading from local directory.\n", "Step 1 / 215 | Pipeline Eval loss : 0.6574205160140991\n", "Step 2 / 215 | Pipeline Eval loss : 1.034090518951416\n", "Step 3 / 215 | Pipeline Eval loss : 0.531771183013916\n", "Step 4 / 215 | Pipeline Eval loss : 0.20735439658164978\n", "Step 5 / 215 | Pipeline Eval loss : 0.7105944156646729\n", "Step 6 / 215 | Pipeline Eval loss : 0.755276083946228\n", "Step 7 / 215 | Pipeline Eval loss : 0.5784748196601868\n", "Step 8 / 215 | Pipeline Eval loss : 0.5784557461738586\n", "Step 9 / 215 | Pipeline Eval loss : 0.6005702018737793\n", "Step 10 / 215 | Pipeline Eval loss : 0.6952868103981018\n", "Step 11 / 215 | Pipeline Eval loss : 0.8656256794929504\n", "Step 12 / 215 | Pipeline Eval loss : 1.0646796226501465\n", "Step 13 / 215 | Pipeline Eval loss : 0.939874529838562\n", "Step 14 / 215 | Pipeline Eval loss : 0.872765064239502\n", "Step 15 / 215 | Pipeline Eval loss : 0.5229593515396118\n", "Step 16 / 215 | Pipeline Eval loss : 1.9176567792892456\n", "Step 17 / 215 | Pipeline Eval loss : 1.1688629388809204\n", "Step 18 / 215 | Pipeline Eval loss : 0.4722476005554199\n", "Step 19 / 215 | Pipeline Eval loss : 0.5088549256324768\n", "Step 20 / 215 | Pipeline Eval loss : 0.5018230080604553\n", "Step 21 / 215 | Pipeline Eval loss : 1.1920435428619385\n", "Step 22 / 215 | Pipeline Eval loss : 0.8868094086647034\n", "Step 23 / 215 | Pipeline Eval loss : 0.6470405459403992\n", "Step 24 / 215 | Pipeline Eval loss : 0.2753985524177551\n", "Step 25 / 215 | Pipeline Eval loss : 0.25323957204818726\n", "Step 26 / 215 | Pipeline Eval loss : 1.0650177001953125\n", "Step 27 / 215 | Pipeline Eval loss : 0.647516131401062\n", "Step 28 / 215 | Pipeline Eval loss : 0.7516441941261292\n", "Step 29 / 215 | Pipeline Eval loss : 1.2095987796783447\n", "Step 30 / 215 | Pipeline Eval loss : 0.2861909568309784\n", "Step 31 / 215 | Pipeline Eval loss : 0.7248450517654419\n", "Step 32 / 215 | Pipeline Eval loss : 0.9189347624778748\n", "Step 33 / 215 | Pipeline Eval loss : 0.7949596047401428\n", "Step 34 / 215 | Pipeline Eval loss : 0.8411239385604858\n", "Step 35 / 215 | Pipeline Eval loss : 0.8913142085075378\n", "Step 36 / 215 | Pipeline Eval loss : 1.1351021528244019\n", "Step 37 / 215 | Pipeline Eval loss : 0.49428656697273254\n", "Step 38 / 215 | Pipeline Eval loss : 0.406154990196228\n", "Step 39 / 215 | Pipeline Eval loss : 1.1077359914779663\n", "Step 40 / 215 | Pipeline Eval loss : 0.5277447700500488\n", "Step 41 / 215 | Pipeline Eval loss : 1.253176212310791\n", "Step 42 / 215 | Pipeline Eval loss : 0.6096060872077942\n", "Step 43 / 215 | Pipeline Eval loss : 0.23254646360874176\n", "Step 44 / 215 | Pipeline Eval loss : 0.6691218018531799\n", "Step 45 / 215 | Pipeline Eval loss : 0.39019834995269775\n", "Step 46 / 215 | Pipeline Eval loss : 0.8474738597869873\n", "Step 47 / 215 | Pipeline Eval loss : 0.7466152906417847\n", "Step 48 / 215 | Pipeline Eval loss : 0.35886526107788086\n", "Step 49 / 215 | Pipeline Eval loss : 0.6998598575592041\n", "Step 50 / 215 | Pipeline Eval loss : 0.5308201909065247\n", "Step 51 / 215 | Pipeline Eval loss : 0.6972887516021729\n", "Step 52 / 215 | Pipeline Eval loss : 0.3023456335067749\n", "Step 53 / 215 | Pipeline Eval loss : 0.44797074794769287\n", "Step 54 / 215 | Pipeline Eval loss : 0.5744849443435669\n", "Step 55 / 215 | Pipeline Eval loss : 0.5571881532669067\n", "Step 56 / 215 | Pipeline Eval loss : 1.074060082435608\n", "Step 57 / 215 | Pipeline Eval loss : 0.39769044518470764\n", "Step 58 / 215 | Pipeline Eval loss : 0.8281753659248352\n", "Step 59 / 215 | Pipeline Eval loss : 0.8963945508003235\n", "Step 60 / 215 | Pipeline Eval loss : 1.0808473825454712\n", "Step 61 / 215 | Pipeline Eval loss : 0.7298275828361511\n", "Step 62 / 215 | Pipeline Eval loss : 0.4039141535758972\n", "Step 63 / 215 | Pipeline Eval loss : 0.8202739953994751\n", "Step 64 / 215 | Pipeline Eval loss : 0.750015377998352\n", "Step 65 / 215 | Pipeline Eval loss : 0.6557111740112305\n", "Step 66 / 215 | Pipeline Eval loss : 0.9604915976524353\n", "Step 67 / 215 | Pipeline Eval loss : 1.1664859056472778\n", "Step 68 / 215 | Pipeline Eval loss : 1.3001796007156372\n", "Step 69 / 215 | Pipeline Eval loss : 0.8946617245674133\n", "Step 70 / 215 | Pipeline Eval loss : 0.77407306432724\n", "Step 71 / 215 | Pipeline Eval loss : 0.9749297499656677\n", "Step 72 / 215 | Pipeline Eval loss : 0.9847258925437927\n", "Step 73 / 215 | Pipeline Eval loss : 1.2171512842178345\n", "Step 74 / 215 | Pipeline Eval loss : 0.5846810340881348\n", "Step 75 / 215 | Pipeline Eval loss : 1.3362019062042236\n", "Step 76 / 215 | Pipeline Eval loss : 0.8401086330413818\n", "Step 77 / 215 | Pipeline Eval loss : 0.7154609560966492\n", "Step 78 / 215 | Pipeline Eval loss : 0.6830504536628723\n", "Step 79 / 215 | Pipeline Eval loss : 0.9784758687019348\n", "Step 80 / 215 | Pipeline Eval loss : 1.2047427892684937\n", "Step 81 / 215 | Pipeline Eval loss : 0.34852132201194763\n", "Step 82 / 215 | Pipeline Eval loss : 1.2795438766479492\n", "Step 83 / 215 | Pipeline Eval loss : 0.5367223024368286\n", "Step 84 / 215 | Pipeline Eval loss : 0.4773041605949402\n", "Step 85 / 215 | Pipeline Eval loss : 0.9025859236717224\n", "Step 86 / 215 | Pipeline Eval loss : 0.560878336429596\n", "Step 87 / 215 | Pipeline Eval loss : 0.7861832976341248\n", "Step 88 / 215 | Pipeline Eval loss : 1.2031795978546143\n", "Step 89 / 215 | Pipeline Eval loss : 0.5929140448570251\n", "Step 90 / 215 | Pipeline Eval loss : 0.7738965749740601\n", "Step 91 / 215 | Pipeline Eval loss : 0.49037113785743713\n", "Step 92 / 215 | Pipeline Eval loss : 0.4643455147743225\n", "Step 93 / 215 | Pipeline Eval loss : 0.3992091715335846\n", "Step 94 / 215 | Pipeline Eval loss : 0.8561199307441711\n", "Step 95 / 215 | Pipeline Eval loss : 0.7128015160560608\n", "Step 96 / 215 | Pipeline Eval loss : 0.5250660181045532\n", "Step 97 / 215 | Pipeline Eval loss : 0.9905263185501099\n", "Step 98 / 215 | Pipeline Eval loss : 0.49944809079170227\n", "Step 99 / 215 | Pipeline Eval loss : 0.6214860081672668\n", "Step 100 / 215 | Pipeline Eval loss : 0.33997011184692383\n", "Step 101 / 215 | Pipeline Eval loss : 0.3499006927013397\n", "Step 102 / 215 | Pipeline Eval loss : 0.9855827689170837\n", "Step 103 / 215 | Pipeline Eval loss : 0.6351476907730103\n", "Step 104 / 215 | Pipeline Eval loss : 0.6422473788261414\n", "Step 105 / 215 | Pipeline Eval loss : 0.8129053115844727\n", "Step 106 / 215 | Pipeline Eval loss : 0.6968633532524109\n", "Step 107 / 215 | Pipeline Eval loss : 0.1873641163110733\n", "Step 108 / 215 | Pipeline Eval loss : 0.3135111629962921\n", "Step 109 / 215 | Pipeline Eval loss : 0.2839707136154175\n", "Step 110 / 215 | Pipeline Eval loss : 0.5901329517364502\n", "Step 111 / 215 | Pipeline Eval loss : 0.1304977387189865\n", "Step 112 / 215 | Pipeline Eval loss : 0.24329698085784912\n", "Step 113 / 215 | Pipeline Eval loss : 0.7134706377983093\n", "Step 114 / 215 | Pipeline Eval loss : 1.1387730836868286\n", "Step 115 / 215 | Pipeline Eval loss : 0.7494038343429565\n", "Step 116 / 215 | Pipeline Eval loss : 0.5017316341400146\n", "Step 117 / 215 | Pipeline Eval loss : 0.523347020149231\n", "Step 118 / 215 | Pipeline Eval loss : 0.5053303241729736\n", "Step 119 / 215 | Pipeline Eval loss : 0.2911222577095032\n", "Step 120 / 215 | Pipeline Eval loss : 0.3454724848270416\n", "Step 121 / 215 | Pipeline Eval loss : 0.7484281063079834\n", "Step 122 / 215 | Pipeline Eval loss : 0.25852513313293457\n", "Step 123 / 215 | Pipeline Eval loss : 0.2880212664604187\n", "Step 124 / 215 | Pipeline Eval loss : 0.250782310962677\n", "Step 125 / 215 | Pipeline Eval loss : 0.097069650888443\n", "Step 126 / 215 | Pipeline Eval loss : 0.14853698015213013\n", "Step 127 / 215 | Pipeline Eval loss : 0.3724161386489868\n", "Step 128 / 215 | Pipeline Eval loss : 0.273474782705307\n", "Step 129 / 215 | Pipeline Eval loss : 0.3158494830131531\n", "Step 130 / 215 | Pipeline Eval loss : 0.2516258955001831\n", "Step 131 / 215 | Pipeline Eval loss : 0.690243661403656\n", "Step 132 / 215 | Pipeline Eval loss : 0.37732306122779846\n", "Step 133 / 215 | Pipeline Eval loss : 0.23337556421756744\n", "Step 134 / 215 | Pipeline Eval loss : 0.09232647716999054\n", "Step 135 / 215 | Pipeline Eval loss : 0.32356148958206177\n", "Step 136 / 215 | Pipeline Eval loss : 0.309037446975708\n", "Step 137 / 215 | Pipeline Eval loss : 0.2778354585170746\n", "Step 138 / 215 | Pipeline Eval loss : 0.31219035387039185\n", "Step 139 / 215 | Pipeline Eval loss : 0.25231048464775085\n", "Step 140 / 215 | Pipeline Eval loss : 0.08691377937793732\n", "Step 141 / 215 | Pipeline Eval loss : 0.154060959815979\n", "Step 142 / 215 | Pipeline Eval loss : 0.5336939692497253\n", "Step 143 / 215 | Pipeline Eval loss : 0.15721698105335236\n", "Step 144 / 215 | Pipeline Eval loss : 0.18838104605674744\n", "Step 145 / 215 | Pipeline Eval loss : 0.39432621002197266\n", "Step 146 / 215 | Pipeline Eval loss : 0.15996097028255463\n", "Step 147 / 215 | Pipeline Eval loss : 0.7417457103729248\n", "Step 148 / 215 | Pipeline Eval loss : 0.31830883026123047\n", "Step 149 / 215 | Pipeline Eval loss : 0.22150768339633942\n", "Step 150 / 215 | Pipeline Eval loss : 0.10236559808254242\n", "Step 151 / 215 | Pipeline Eval loss : 0.8729762434959412\n", "Step 152 / 215 | Pipeline Eval loss : 0.4436299204826355\n", "Step 153 / 215 | Pipeline Eval loss : 0.5209086537361145\n", "Step 154 / 215 | Pipeline Eval loss : 0.8735697865486145\n", "Step 155 / 215 | Pipeline Eval loss : 0.06759092956781387\n", "Step 156 / 215 | Pipeline Eval loss : 0.540948748588562\n", "Step 157 / 215 | Pipeline Eval loss : 0.7396793365478516\n", "Step 158 / 215 | Pipeline Eval loss : 0.2856099605560303\n", "Step 159 / 215 | Pipeline Eval loss : 0.23114192485809326\n", "Step 160 / 215 | Pipeline Eval loss : 0.5006106495857239\n", "Step 161 / 215 | Pipeline Eval loss : 0.5642840266227722\n", "Step 162 / 215 | Pipeline Eval loss : 0.6494811177253723\n", "Step 163 / 215 | Pipeline Eval loss : 0.8000176548957825\n", "Step 164 / 215 | Pipeline Eval loss : 0.2957736551761627\n", "Step 165 / 215 | Pipeline Eval loss : 0.6262645125389099\n", "Step 166 / 215 | Pipeline Eval loss : 0.3061993420124054\n", "Step 167 / 215 | Pipeline Eval loss : 0.4420517683029175\n", "Step 168 / 215 | Pipeline Eval loss : 0.7103923559188843\n", "Step 169 / 215 | Pipeline Eval loss : 0.6877099871635437\n", "Step 170 / 215 | Pipeline Eval loss : 0.503110408782959\n", "Step 171 / 215 | Pipeline Eval loss : 0.37333595752716064\n", "Step 172 / 215 | Pipeline Eval loss : 0.6431294679641724\n", "Step 173 / 215 | Pipeline Eval loss : 0.7391048669815063\n", "Step 174 / 215 | Pipeline Eval loss : 0.48019126057624817\n", "Step 175 / 215 | Pipeline Eval loss : 0.5768507122993469\n", "Step 176 / 215 | Pipeline Eval loss : 0.9165944457054138\n", "Step 177 / 215 | Pipeline Eval loss : 1.2352447509765625\n", "Step 178 / 215 | Pipeline Eval loss : 0.5884535312652588\n", "Step 179 / 215 | Pipeline Eval loss : 0.48984605073928833\n", "Step 180 / 215 | Pipeline Eval loss : 0.42352521419525146\n", "Step 181 / 215 | Pipeline Eval loss : 0.5375940799713135\n", "Step 182 / 215 | Pipeline Eval loss : 0.6948469281196594\n", "Step 183 / 215 | Pipeline Eval loss : 0.34012457728385925\n", "Step 184 / 215 | Pipeline Eval loss : 0.3592049479484558\n", "Step 185 / 215 | Pipeline Eval loss : 0.3635694980621338\n", "Step 186 / 215 | Pipeline Eval loss : 0.6891028881072998\n", "Step 187 / 215 | Pipeline Eval loss : 0.573372483253479\n", "Step 188 / 215 | Pipeline Eval loss : 0.9922007322311401\n", "Step 189 / 215 | Pipeline Eval loss : 0.6043763160705566\n", "Step 190 / 215 | Pipeline Eval loss : 0.703070342540741\n", "Step 191 / 215 | Pipeline Eval loss : 0.670422375202179\n", "Step 192 / 215 | Pipeline Eval loss : 0.8413001298904419\n", "Step 193 / 215 | Pipeline Eval loss : 0.3922325074672699\n", "Step 194 / 215 | Pipeline Eval loss : 0.7669886350631714\n", "Step 195 / 215 | Pipeline Eval loss : 0.7236793637275696\n", "Step 196 / 215 | Pipeline Eval loss : 0.8872082829475403\n", "Step 197 / 215 | Pipeline Eval loss : 1.0730468034744263\n", "Step 198 / 215 | Pipeline Eval loss : 0.8801345825195312\n", "Step 199 / 215 | Pipeline Eval loss : 0.4431702494621277\n", "Step 200 / 215 | Pipeline Eval loss : 0.5681254863739014\n", "Step 201 / 215 | Pipeline Eval loss : 0.9366014003753662\n", "Step 202 / 215 | Pipeline Eval loss : 0.9687889814376831\n", "Step 203 / 215 | Pipeline Eval loss : 0.7217267155647278\n", "Step 204 / 215 | Pipeline Eval loss : 0.6611248254776001\n", "Step 205 / 215 | Pipeline Eval loss : 0.4820144474506378\n", "Step 206 / 215 | Pipeline Eval loss : 0.8098682761192322\n", "Step 207 / 215 | Pipeline Eval loss : 0.732722282409668\n", "Step 208 / 215 | Pipeline Eval loss : 0.0728195533156395\n", "Step 209 / 215 | Pipeline Eval loss : 0.7701850533485413\n", "Step 210 / 215 | Pipeline Eval loss : 0.6503212451934814\n", "Step 211 / 215 | Pipeline Eval loss : 1.1696949005126953\n", "Step 212 / 215 | Pipeline Eval loss : 0.5338002443313599\n", "Step 213 / 215 | Pipeline Eval loss : 0.9348071813583374\n", "Step 214 / 215 | Pipeline Eval loss : 0.7644015550613403\n", "Step 215 / 215 | Pipeline Eval loss : 2.028141975402832\n", "Pipeline Accuracy : 0.7580268534734385\n" ] }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiQAAAHHCAYAAACPy0PBAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAbrpJREFUeJzt3XdYU9f/B/B3WGHJUqYDEVwo7lYRt1ScdbXuCm4tTpzUPXHUvWvrRqt11VWVqmhV3HujolRZLqSIsnJ+f/gjXyNoCCbeiO9XnzxPc+7JuZ+Ea/hw1pUJIQSIiIiIJGQgdQBERERETEiIiIhIckxIiIiISHJMSIiIiEhyTEiIiIhIckxIiIiISHJMSIiIiEhyTEiIiIhIckxIiIiISHJMSLQsMjISjRo1grW1NWQyGXbs2KHV9u/fvw+ZTIbVq1drtd3PWb169VCvXj2ttZecnIyePXvCyckJMpkMgwcP1lrbnwteZ9onk8kwYcIEnbSt7X8D6qxevRoymQz379/X6Xne/cw+1XlJGvkyIbl79y769OmDEiVKwNTUFFZWVvDx8cH8+fPx6tUrnZ7b398fV65cwdSpU7Fu3TpUq1ZNp+f7lAICAiCTyWBlZZXj5xgZGQmZTAaZTIaff/5Z4/ZjYmIwYcIEXLx4UQvR5t20adOwevVq9OvXD+vWrcMPP/yg0/OlpaVh/vz5qFy5MqysrGBjY4Ny5cqhd+/euHnzpk7PvWHDBsybN0+n59ClvXv36uyX/JcoKxHNehgaGqJYsWJo3bq15P8u6Qsg8pndu3cLMzMzYWNjIwYOHCh++eUXsWjRItGhQwdhbGwsevXqpbNzp6SkCABi9OjROjuHQqEQr169EhkZGTo7x/v4+/sLIyMjYWhoKDZt2pTt+Pjx44WpqakAIGbNmqVx+2fOnBEAxKpVqzR6XWpqqkhNTdX4fO9TvXp14ePjo7X21GnevLkwNDQUXbp0EYsXLxbz5s0Tffv2FUWKFNH4s9BUs2bNhKura7ZyKa8zTQQGBorP5WsMgBg/frxO2tbWv4GoqCgBQHTs2FGsW7dOrF69WowcOVJYWVkJuVwuLly4IIQQIiMjQ7x69UooFIqPPueHvPuZfarzkjSMJMyFtC4qKgodOnSAq6srDh06BGdnZ+WxwMBA3LlzB3v27NHZ+R8/fgwAsLGx0dk5ZDIZTE1Ndda+OnK5HD4+Pti4cSPatWuncmzDhg1o1qwZtm7d+kliSUlJgbm5OUxMTLTabkJCAjw9PbXWXkZGBhQKRY5xnjlzBrt378bUqVPx008/qRxbtGgREhMTtRaHJqS+zkgz2v43UKVKFXTp0kX53MfHB99++y2WLl2K5cuXw9DQEIaGhlo9Z25IdV76NPLVkM3MmTORnJyM3377TSUZyeLh4YFBgwYpn2dkZGDy5Mlwd3eHXC5H8eLF8dNPPyE1NVXldcWLF0fz5s1x7NgxfP311zA1NUWJEiWwdu1aZZ0JEybA1dUVADB8+HDIZDIUL14cwJuhjqz/f9uECRMgk8lUysLCwlCrVi3Y2NjA0tISpUuXVvlF9b6x/UOHDqF27dqwsLCAjY0NWrZsiRs3buR4vjt37iAgIAA2NjawtrZGt27dkJKS8v4P9h2dOnXCX3/9pfLL8syZM4iMjESnTp2y1X/27BmGDRsGLy8vWFpawsrKCk2aNMGlS5eUdcLDw/HVV18BALp166bsMs56n/Xq1UP58uVx7tw51KlTB+bm5srP5d3xc39/f5iammZ7/35+frC1tUVMTEyO7ys8PBwymQxRUVHYs2ePMoas8eqEhAT06NEDjo6OMDU1RcWKFbFmzRqVNrJ+Pj///DPmzZunvLauX7+e4znv3r0L4M0X/rsMDQ1RsGBBlbJHjx6he/fucHR0hFwuR7ly5bBy5coc38fmzZsxdepUFClSBKampmjYsCHu3LmjrFevXj3s2bMHDx48UL7XrOs0p+ssICAAlpaWiI6ORvPmzWFpaYnChQtj8eLFAIArV66gQYMGsLCwgKurKzZs2JDtPSUmJmLw4MEoWrQo5HI5PDw8MGPGDCgUihw/w19++UX5GX711Vc4c+aMSjxZ5357mOFD/vzzTzRr1gwuLi6Qy+Vwd3fH5MmTkZmZqVIv63q7fv066tevD3NzcxQuXBgzZ85UqZeWloZx48ahatWqsLa2hoWFBWrXro3Dhw9/MI7Dhw9DJpNh+/bt2Y5t2LABMpkMERERAIC4uDh069YNRYoUgVwuh7OzM1q2bKkyjyKnOSQLFy5EuXLlYG5uDltbW1SrVi3Hn0luNGjQAMCbP/qAnOdyZH1PHjhwAJUqVYKpqSk8PT2xbdu2bO3l5jrIyYfO+6Hv5489L30a+aqHZNeuXShRogRq1qyZq/o9e/bEmjVr8N1332Ho0KE4deoUQkJCcOPGjWxfFHfu3MF3332HHj16wN/fHytXrkRAQACqVq2KcuXKoU2bNrCxscGQIUPQsWNHNG3aFJaWlhrFf+3aNTRv3hwVKlTApEmTIJfLcefOHRw/fvyDr/v777/RpEkTlChRAhMmTMCrV6+wcOFC+Pj44Pz589mSoXbt2sHNzQ0hISE4f/48fv31Vzg4OGDGjBm5irNNmzbo27cvtm3bhu7duwN48yVapkwZVKlSJVv9e/fuYceOHfj+++/h5uaG+Ph4LF++HHXr1sX169fh4uKCsmXLYtKkSRg3bhx69+6N2rVrA4DKz/Lp06do0qQJOnTogC5dusDR0THH+ObPn49Dhw7B398fERERMDQ0xPLly3HgwAGsW7cOLi4uOb6ubNmyWLduHYYMGYIiRYpg6NChAAB7e3u8evUK9erVw507d9C/f3+4ubnhjz/+QEBAABITE1USXQBYtWoVXr9+jd69e0Mul8POzi7Hc2YlsaGhofDx8YGR0fv/ScbHx6NGjRqQyWTo378/7O3t8ddff6FHjx5ISkrKNvl2+vTpMDAwwLBhw/DixQvMnDkTnTt3xqlTpwAAo0ePxosXL/Dw4UPMnTsXANRes5mZmWjSpAnq1KmDmTNnIjQ0FP3794eFhQVGjx6Nzp07o02bNli2bBm6du0Kb29vuLm5AXjTo1W3bl08evQIffr0QbFixXDixAkEBwcjNjY221yWDRs24L///kOfPn0gk8kwc+ZMtGnTBvfu3YOxsTH69OmDmJgYhIWFYd26dR+MO8vq1athaWmJoKAgWFpa4tChQxg3bhySkpIwa9YslbrPnz9H48aN0aZNG7Rr1w5btmzByJEj4eXlhSZNmgAAkpKS8Ouvv6Jjx47o1asX/vvvP/z222/w8/PD6dOnUalSpRzjqFevHooWLYrQ0FC0bt1a5VhoaCjc3d3h7e0NAGjbti2uXbuGAQMGoHjx4khISEBYWBiio6Nz/EMHAFasWIGBAwfiu+++w6BBg/D69WtcvnwZp06dyvGPBnWyEud3E+R3RUZGon379ujbty/8/f2xatUqfP/999i3bx+++eYbAJpfB7mh7vtZV+clLZN6zEhbXrx4IQCIli1b5qr+xYsXBQDRs2dPlfJhw4YJAOLQoUPKMldXVwFAHD16VFmWkJAg5HK5GDp0qLIsa/z13fkT/v7+OY7Tjx8/XmX8e+7cuQKAePz48XvjzjrH23MLKlWqJBwcHMTTp0+VZZcuXRIGBgaia9eu2c7XvXt3lTZbt24tChYs+N5zvv0+LCwshBBCfPfdd6Jhw4ZCCCEyMzOFk5OTmDhxYo6fwevXr0VmZma29yGXy8WkSZOUZR+aQ1K3bl0BQCxbtizHY3Xr1lUp279/vwAgpkyZIu7duycsLS1Fq1at1L5HId78vJs1a6ZSNm/ePAFArF+/XlmWlpYmvL29haWlpUhKSlK+LwDCyspKJCQkqD2XQqFQvjdHR0fRsWNHsXjxYvHgwYNsdXv06CGcnZ3FkydPVMo7dOggrK2tRUpKihBCiMOHDwsAomzZsirzCubPny8AiCtXrijL3jeHJKfrzN/fXwAQ06ZNU5Y9f/5cmJmZCZlMJn7//Xdl+c2bN7ON/0+ePFlYWFiI27dvq5xr1KhRwtDQUERHR6ucu2DBguLZs2fKen/++acAIHbt2qUs03QOSdZn9LY+ffoIc3Nz8fr1a2VZ1s9k7dq1yrLU1FTh5OQk2rZtqyzLyMjINnfj+fPnwtHRMdu/s3c/j+DgYCGXy0ViYqKyLCEhQRgZGSnrPX/+PFdzst79N9CyZUtRrly5D74mJ1mf/cSJE8Xjx49FXFycCA8PF5UrVxYAxNatW4UQQqxatUoAEFFRUcrXZn1PZtUR4s33srOzs6hcubKyLLfXgRDZP7MPnVfd97Mm5yVp5Jshm6SkJABAgQIFclV/7969AICgoCCV8qy/it+da+Lp6an8qx1481dz6dKlce/evTzH/K6suSd//vlnrrsQY2NjcfHiRQQEBKj8FV6hQgV88803yvf5tr59+6o8r127Np4+far8DHOjU6dOCA8PR1xcHA4dOoS4uLj3/uUll8thYPDmUsvMzMTTp0+Vw1Hnz5/P9Tnlcjm6deuWq7qNGjVCnz59MGnSJLRp0wampqZYvnx5rs/1rr1798LJyQkdO3ZUlhkbG2PgwIFITk7GkSNHVOq3bdsW9vb2atuVyWTYv38/pkyZAltbW2zcuBGBgYFwdXVF+/btlcNiQghs3boVLVq0gBACT548UT78/Pzw4sWLbJ9lt27dVOYWZF2/H3vN9uzZU/n/NjY2KF26NCwsLFTmFJUuXRo2NjYq5/rjjz9Qu3Zt2NraqsTv6+uLzMxMHD16VOU87du3h62trVbjNzMzU/7/f//9hydPnqB27dpISUnJtqLJ0tJSZR6FiYkJvv76a5XzGxoaKj9jhUKBZ8+eISMjA9WqVVN7bXft2hWpqanYsmWLsmzTpk3IyMhQntfMzAwmJiYIDw/H8+fPc/0+bWxs8PDhQ5UhLk2MHz8e9vb2cHJyQr169XD37l3MmDEDbdq0+eDrXFxcVHp8rKys0LVrV1y4cAFxcXEANL8OciM338+6OC9pV75JSKysrAC8+ZLJjQcPHsDAwAAeHh4q5U5OTrCxscGDBw9UyosVK5atDVtbW42+JNRp3749fHx80LNnTzg6OqJDhw7YvHnzB5OTrDhLly6d7VjZsmXx5MkTvHz5UqX83feS9aWvyXtp2rQpChQogE2bNiE0NBRfffVVts8yi0KhwNy5c1GyZEnI5XIUKlQI9vb2uHz5Ml68eJHrcxYuXFijyXs///wz7OzscPHiRSxYsAAODg65fu27Hjx4gJIlSyoTqyxly5ZVHn9b1jBFbsjlcowePRo3btxATEwMNm7ciBo1amDz5s3o378/gDcTphMTE/HLL7/A3t5e5ZGVpCUkJKi0q42f87tMTU2zJVrW1tYoUqRItvkb1tbWKueKjIzEvn37ssXv6+v7yeK/du0aWrduDWtra1hZWcHe3l75y//dazGn95TTv/k1a9agQoUKMDU1RcGCBWFvb489e/aovbbLlCmDr776CqGhocqy0NBQ1KhRQ/lvSS6XY8aMGfjrr7/g6OioHCrL+uX+PiNHjoSlpSW+/vprlCxZEoGBgWqHft/Wu3dvhIWF4eDBgzh37hwSEhIwYsQIta/z8PDI9pmVKlUKAJTzPjS9DnIjN9/PujgvaVe+mUNiZWUFFxcXXL16VaPXqZsEl+V9M7uFEHk+x7sT6czMzHD06FEcPnwYe/bswb59+7Bp0yY0aNAABw4c0Nrs8o95L1nkcjnatGmDNWvW4N69ex/cC2LatGkYO3YsunfvjsmTJ8POzg4GBgYYPHiwRpPJ3v7rNjcuXLig/JK5cuWKSu+GrmkaaxZnZ2d06NABbdu2Rbly5bB582asXr1a+Tl16dIF/v7+Ob62QoUKKs+18XN+1/vazM25FAoFvvnmm/f+Ysv6xaVJm5pITExE3bp1YWVlhUmTJsHd3R2mpqY4f/48Ro4cme1azM35169fj4CAALRq1QrDhw+Hg4MDDA0NERISopx38SFdu3bFoEGD8PDhQ6SmpuLkyZNYtGiRSp3BgwejRYsW2LFjB/bv34+xY8ciJCQEhw4dQuXKlXNst2zZsrh16xZ2796Nffv2YevWrViyZAnGjRuHiRMnqo2rZMmSyl/U2qbpdZAburj+6NPLNwkJADRv3hy//PILIiIilBPC3sfV1RUKhQKRkZHKv3KBNxMHExMTlZMNtcHW1jbH5Zvv/lUNAAYGBmjYsCEaNmyIOXPmYNq0aRg9ejQOHz6c4xdEVpy3bt3KduzmzZsoVKgQLCwsPv5N5KBTp05YuXIlDAwM0KFDh/fW27JlC+rXr4/ffvtNpTwxMRGFChVSPs9tcpgbL1++RLdu3eDp6YmaNWti5syZaN26tXIlj6ZcXV1x+fJlKBQKlV6SrG5+bV4vwJvhoAoVKiAyMhJPnjyBvb09ChQogMzMTK3+otDmZ66Ou7s7kpOTJYs/PDwcT58+xbZt21CnTh1ledbKkbzYsmULSpQogW3btqnEMn78+Fy9vkOHDggKCsLGjRvx6tUrGBsbo3379tnqubu7Y+jQoRg6dCgiIyNRqVIlzJ49G+vXr39v2xYWFmjfvj3at2+PtLQ0tGnTBlOnTkVwcLDOlnTfuXMHQgiVz+L27dsAoJyAq4vrIDekOi/lXr4ZsgGAESNGwMLCAj179kR8fHy243fv3sX8+fMBvBlyAJBtZvWcOXMAAM2aNdNaXO7u7njx4gUuX76sLIuNjc22kufZs2fZXps1S//dpchZnJ2dUalSJaxZs0Yl6bl69SoOHDigfJ+6UL9+fUyePBmLFi2Ck5PTe+sZGhpm+6v2jz/+wKNHj1TKshInbey9MXLkSERHR2PNmjWYM2cOihcvDn9///d+juo0bdoUcXFx2LRpk7IsIyMDCxcuhKWlJerWrZundiMjIxEdHZ2tPDExEREREbC1tYW9vT0MDQ3Rtm1bbN26NcdewKw9cDRlYWGh0bDZx2jXrh0iIiKwf//+bMcSExORkZGhcZuaXDNZf0W/fS2mpaVhyZIlGp/3Q22eOnVKuWRXnUKFCqFJkyZYv349QkND0bhxY5UkPSUlBa9fv1Z5jbu7OwoUKPDBa/np06cqz01MTODp6QkhBNLT03MVW17ExMSofK8lJSVh7dq1qFSpkvI7QhfXQW5IdV7KvXzVQ+Lu7o4NGzagffv2KFu2LLp27Yry5csjLS0NJ06cUC7TBICKFSvC398fv/zyi7Ir9/Tp01izZg1atWqF+vXray2uDh06YOTIkWjdujUGDhyIlJQULF26FKVKlVKZ+DZp0iQcPXoUzZo1g6urKxISErBkyRIUKVIEtWrVem/7s2bNQpMmTeDt7Y0ePXool/1aW1vrdFttAwMDjBkzRm295s2bY9KkSejWrRtq1qyJK1euIDQ0FCVKlFCp5+7uDhsbGyxbtgwFChSAhYUFqlevrtF8DODNnixLlizB+PHjlcuQV61ahXr16mHs2LHZ9pLIjd69e2P58uUICAjAuXPnULx4cWzZsgXHjx/HvHnzcj2Z+l2XLl1Cp06d0KRJE9SuXRt2dnZ49OgR1qxZg5iYGMybN0/5S2/69Ok4fPgwqlevjl69esHT0xPPnj3D+fPn8ffff+eY0KpTtWpVbNq0CUFBQfjqq69gaWmJFi1a5Om9qDN8+HDs3LkTzZs3Vy7JfPnyJa5cuYItW7bg/v37Kr+Mcxs/AAwcOBB+fn4wNDR8b29dzZo1YWtrC39/fwwcOBAymQzr1q37qCGs5s2bY9u2bWjdujWaNWuGqKgoLFu2DJ6enkhOTs5VG127dsV3330HAJg8ebLKsdu3b6Nhw4Zo164dPD09YWRkhO3btyM+Pv6DvZKNGjWCk5MTfHx84OjoiBs3bmDRokVo1qxZnq/V3ChVqhR69OiBM2fOwNHREStXrkR8fDxWrVqlrKOL6yA3pDovaUCClT06d/v2bdGrVy9RvHhxYWJiIgoUKCB8fHzEwoULVZb2paeni4kTJwo3NzdhbGwsihYtKoKDg1XqCJHzMlAhsi+1e9+yXyGEOHDggChfvrwwMTERpUuXFuvXr8+27PfgwYOiZcuWwsXFRZiYmAgXFxfRsWNHlWVqOS3HFEKIv//+W/j4+AgzMzNhZWUlWrRoIa5fv65SJ+t87y4rzmkpXU7eXvb7Pu9b9jt06FDh7OwszMzMhI+Pj4iIiMhxue6ff/4pPD09hZGRkcr7rFu37nuXMb7dTlJSknB1dRVVqlQR6enpKvWGDBkiDAwMRERExAffw/t+3vHx8aJbt26iUKFCwsTERHh5eWX7OXzoGshJfHy8mD59uqhbt65wdnYWRkZGwtbWVjRo0EBs2bIlx/qBgYGiaNGiwtjYWDg5OYmGDRuKX375RVkna9nvH3/8kWNsb8ecnJwsOnXqJGxsbAQA5RLg9y37zenn/76fTU6f43///SeCg4OFh4eHMDExEYUKFRI1a9YUP//8s0hLS1M5d06fIXLYSnzAgAHC3t5eyGQytUuAjx8/LmrUqCHMzMyEi4uLGDFihHKJ+OHDh9W+p3eX8CsUCjFt2jTh6uoq5HK5qFy5sti9e3eOS/3fjT1LamqqsLW1FdbW1uLVq1cqx548eSICAwNFmTJlhIWFhbC2thbVq1cXmzdvVqn37r+l5cuXizp16oiCBQsKuVwu3N3dxfDhw8WLFy8++Pnk9vp93/LbZs2aif3794sKFSoIuVwuypQpk+06FCJ314EQuV/2m5vvZ03OS9KQCfERfx4QEdFHycjIgIuLC1q0aJFtntXnpHjx4ihfvjx2794tdSj0mcpXc0iIiD43O3bswOPHj9G1a1epQyGSVL6aQ0JE9Lk4deoULl++jMmTJ6Ny5cp5nhhNlF+wh4SISAJLly5Fv3794ODgkOON4Ii+NJxDQkRERJJjDwkREVE+FBISgq+++goFChSAg4MDWrVqlW0TzXr16kEmk6k83r3fWXR0NJo1awZzc3M4ODhg+PDh2fZtCQ8PR5UqVSCXy+Hh4YHVq1drHC8TEiIionzoyJEjCAwMxMmTJxEWFob09HQ0atQo2/3NevXqhdjYWOXj7b2aMjMz0axZM+V+XmvWrMHq1asxbtw4ZZ2oqCg0a9YM9evXx8WLFzF48GD07Nkzx03oPoRDNkRERF+Ax48fw8HBAUeOHFHePqFevXqoVKlStl3Ls/z1119o3rw5YmJi4OjoCABYtmwZRo4cicePH8PExAQjR47Enj17VHaR7tChAxITE7Fv375cx5cvV9kYmRSWOgTSM44WNlKHQHok/mWi1CGQHslIe6S+0kdKf3JPK+0oChTOdtsAuVwOuVyu9rVZt4mws7NTKQ8NDcX69evh5OSEFi1aYOzYsTA3NwcAREREwMvLS5mMAICfnx/69euHa9euoXLlyoiIiMh2jyA/Pz8MHjxYo/fGIRsiIqLPREhICKytrVUeISEhal+nUCgwePBg+Pj4oHz58sryTp06Yf369Th8+DCCg4Oxbt06dOnSRXk8Li5OJRkBoHweFxf3wTpJSUl49epVrt9bvuwhISIi0iuKTK00ExwcjKCgIJWy3PSOBAYG4urVqzh27JhKee/evZX/7+XlBWdnZzRs2BB3796Fu7u7VmLOLSYkREREuiYUWmkmt8Mzb+vfvz92796No0ePokiRIh+sW716dQDAnTt34O7uDicnJ5w+fVqlTnx8PAAo7+Ds5OSkLHu7jpWVFczMzHIdJ4dsiIiIdE2h0M5DA0II9O/fH9u3b8ehQ4dydef0ixcvAgCcnZ0BAN7e3rhy5QoSEhKUdcLCwmBlZQVPT09lnYMHD6q0ExYWBm9vb43iZUJCRESUDwUGBmL9+vXYsGEDChQogLi4OMTFxSnnddy9exeTJ0/GuXPncP/+fezcuRNdu3ZFnTp1UKFCBQBAo0aN4OnpiR9++AGXLl3C/v37MWbMGAQGBip7avr27Yt79+5hxIgRuHnzJpYsWYLNmzdjyJAhGsWbL5f9cpUNvYurbOhtXGVDb/sUq2zSYq5ppR0Tl3K5riuTyXIsX7VqFQICAvDvv/+iS5cuuHr1Kl6+fImiRYuidevWGDNmDKysrJT1Hzx4gH79+iE8PBwWFhbw9/fH9OnTYWT0v1kf4eHhGDJkCK5fv44iRYpg7NixCAgI0Oi9MSGhLwITEnobExJ62ydJSB5e0Uo7JkW8tNKOPuKQDREREUmOq2yIiIh0TUurbPIzJiRERES6pqV9SPIzDtkQERGR5NhDQkREpGscslGLCQkREZGuabip2ZeIQzZEREQkOfaQEBER6ZjgkI1aTEiIiIh0jUM2ajEhISIi0jX2kKjFOSREREQkOfaQEBER6Ro3RlOLCQkREZGucchGLQ7ZEBERkeTYQ0JERKRrXGWjFhMSIiIiXeOQjVocsiEiIiLJsYeEiIhI1zhkoxYTEiIiIh0Tgst+1eGQDREREUmOPSRERES6xkmtajEhISIi0jXOIVGLCQkREZGusYdELc4hISIiIsmxh4SIiEjXeHM9tZiQEBER6RqHbNTSmyGbf/75B126dIG3tzcePXoEAFi3bh2OHTsmcWRERESka3qRkGzduhV+fn4wMzPDhQsXkJqaCgB48eIFpk2bJnF0REREH0mh0M4jH9OLhGTKlClYtmwZVqxYAWNjY2W5j48Pzp8/L2FkREREWiAU2nnkY3qRkNy6dQt16tTJVm5tbY3ExMRPHxARERF9UnqRkDg5OeHOnTvZyo8dO4YSJUpIEBEREZEWcchGLb1ISHr16oVBgwbh1KlTkMlkiImJQWhoKIYNG4Z+/fpJHR4REdHHYUKill4s+x01ahQUCgUaNmyIlJQU1KlTB3K5HMOGDcOAAQOkDo+IiIh0TCaEEFIHkSUtLQ137txBcnIyPD09YWlpmad2jEwKazky+tw5WthIHQLpkfiXiVKHQHokI+2Rzs/x6uhqrbRjVidAK+3oI73oIVm/fj3atGkDc3NzeHp6Sh0OERGRduXz4RZt0Is5JEOGDIGDgwM6deqEvXv3IjOTW+wSEVE+wmW/aulFQhIbG4vff/8dMpkM7dq1g7OzMwIDA3HixAmpQyMiIqJPQC8SEiMjIzRv3hyhoaFISEjA3Llzcf/+fdSvXx/u7u5Sh0dERPRxuMpGLb2YQ/I2c3Nz+Pn54fnz53jw4AFu3LghdUhEREQfJ58Pt2iDXvSQAEBKSgpCQ0PRtGlTFC5cGPPmzUPr1q1x7do1qUMjIiIiHdOLHpIOHTpg9+7dMDc3R7t27TB27Fh4e3tLHRYREZF25PPhFm3Qi4TE0NAQmzdvhp+fHwwNDaUOh4iISLs4ZKOWXiQkoaGhUodAREREEpIsIVmwYAF69+4NU1NTLFiw4IN1Bw4c+ImiIiIi0gEO2agl2dbxbm5uOHv2LAoWLAg3N7f31pPJZLh3755GbXPreHoXt46nt3HreHrbJ9k6fs88rbRj1mywVtrRR5L1kERFReX4/0RERPTl0Ytlv5MmTUJKSkq28levXmHSpEkSRERERKRF3DpeLb2426+hoSFiY2Ph4OCgUv706VM4ODhofG8bDtnQuzhkQ2/jkA297ZMM2ez8WSvtmH07TCvt6CO96CERQkAmk2Urv3TpEuzs7CSI6PPXr68/7tw+ieSkuzhxbBe+qlZJ6pBIB6rXrIrVGxfj3PXDePT8GvyaNshWx6NUCazasAg3HpxE5MMz2HNwE1yKOCuP2zsUwoJlIbhw8wgiH57BvvA/0LTFN5/ybdAnxu8HCbCHRC1JExJbW1vY2dlBJpOhVKlSsLOzUz6sra3xzTffoF27dlKG+Fn6/vtv8fOs8Zg8ZQ6+qt4Yly5fx949obC3Lyh1aKRl5uZmuH71FkYPn5LjcdfiRbHjr3W4ExmF75oHwLdWG8z7eRlSX6cq68xfOg0lPNzQrVN/NPRpjb92/Y1lq2ajnFeZT/U26BPi9wPpK0mHbNasWQMhBLp374558+bB2tpaeczExATFixfP046tX/qQzYlju3Dm7CUMGjwGwJuVSvfvncHiJaswc9ZiiaOTxpcwZPPo+TV07zwA+/ceUpYt+W0WMtIzMLBv8Htfd/vfMwgeNglbN+1Sll29exxTJ8zBxnVbdRqzVL7kIRt+P2T3SYZstk/XSjtmrUdppR19JOnGaP7+/gDeLAGuWbMmjI2NpQwnXzA2NkaVKhUwfeYiZZkQAgcPHUONGlUljIw+NZlMhobf1MXSBSsRuuUXlK9QBtEPHmHR3BUqScvZ0xfwbevGOLj/KF68SEKL1o0hl5sg4tgZCaMnXeD3g4Ty+XCLNujFHJK6desqk5HXr18jKSlJ5UG5V6iQHYyMjJAQ/0SlPCHhMZwc7SWKiqRQyL4gLAtYIHBwD4QfPIZObXpj356D+HXdfNSoWU1Zr2+3oTAyMsa1qBOIir+AGXPHo8cPg3A/KlrC6EkX+P1A+kwvto5PSUnBiBEjsHnzZjx9+jTb8Q+tsklNTUVqaqpK2fsmyRJ9SQwM3vwb2P/XYaxYuhYAcO3qTVT7uhJ+6N4eJ0+cBQAMHz0AVtYF0L5ldzx7lgi/pg2wbNVstGnaFTevR0oWP1G+wp1a1dKLHpLhw4fj0KFDWLp0KeRyOX799VdMnDgRLi4uWLt27QdfGxISAmtra5WHUPz3iSLXP0+ePENGRgYcHAuplDs42CMu/rFEUZEUnj1NRHp6OiJv3lUpj7x9D4X/f5WNa/Gi6N67M4YOGINjR0/h+tVbmDtzKS5fuIaAnh2lCJt0iN8PElIotPPIx/QiIdm1axeWLFmCtm3bwsjICLVr18aYMWMwbdo0tTfeCw4OxosXL1QeMoMCnyhy/ZOeno7z5y+jQf1ayjKZTIYG9Wvh5MlzEkZGn1p6ejouXbgK95LFVcpLuLvi4b8xAAAzc1MAgEKhOrc9M1MBmUwvvh5Ii/j9QPpML4Zsnj17hhIlSgAArKys8OzZMwBArVq10K9fvw++Vi6XQy6Xq5R96cM1c+evwKrf5uLc+cs4c+YCBg7oBQsLM6xes0nq0EjLzC3M4eZWTPm8mGsRlCtfBs8TXyDmYSyWLliFpStn4+SJczjxz2nU862FbxrXw3ctugEA7tyOQtTdB5gxdzwmj/0Zz58lonGzBqhT3xv+HX6U6m2RDvH7QSLS70Gq9/QiISlRogSioqJQrFgxlClTBps3b8bXX3+NXbt2wcbGRurwPjt//LET9oXsMGHcMDg52ePSpWto1rwLEhKeqH8xfVYqViqHLbtXK59PmDYSALB5ww4MCRyNfXsOYlTQRAwY0guTpgfj3p376NV1MM6cPA8AyMjIwA/t+iJ4fBBWb1wECwtz3I/6F4N//AmHwv6R4i2RjvH7QSL5fLhFG/Ri6/i5c+fC0NAQAwcOxN9//40WLVpACIH09HTMmTMHgwYN0qi9L30fEsruS9iHhHLvS96HhLL7JPuQbByvlXbMOk7USjv6SC96SIYMGaL8f19fX9y8eRPnzp2Dh4cHKlSoIGFkREREWsAeErX0IiF5l6urK1xdXaUOg4iISDu4MZpaepGQLFiwIMdymUwGU1NTeHh4oE6dOjA0NPzEkREREWkBe0jU0ouEZO7cuXj8+DFSUlJga2sLAHj+/DnMzc1haWmJhIQElChRAocPH0bRokUljpaIiIi0TS82Gpg2bRq++uorREZG4unTp3j69Clu376N6tWrY/78+YiOjoaTk5PKXBMiIqLPhhDaeWggJCQEX331FQoUKAAHBwe0atUKt27dUqnz+vVrBAYGomDBgrC0tETbtm0RHx+vUic6OhrNmjWDubk5HBwcMHz4cGRkZKjUCQ8PR5UqVSCXy+Hh4YHVq1dr/BHpRUIyZswYzJ07F+7u7soyDw8P/PzzzwgODkaRIkUwc+ZMHD9+XMIoiYiI8kiCnVqPHDmCwMBAnDx5EmFhYUhPT0ejRo3w8uVLZZ0hQ4Zg165d+OOPP3DkyBHExMSgTZs2yuOZmZlo1qwZ0tLScOLECaxZswarV6/GuHHjlHWioqLQrFkz1K9fHxcvXsTgwYPRs2dP7N+/X6N49WLZr7m5OY4ePYpq1aqplJ85cwZ169ZFSkoK7t+/j/LlyyM5OVlte1z2S+/isl96G5f90ts+ybLfVSO00o5Zt5l5fu3jx4/h4OCAI0eOoE6dOnjx4gXs7e2xYcMGfPfddwCAmzdvomzZsoiIiECNGjXw119/oXnz5oiJiYGjoyMAYNmyZRg5ciQeP34MExMTjBw5Env27MHVq1eV5+rQoQMSExOxb9++XMenFz0k9evXR58+fXDhwgVl2YULF9CvXz80aNAAAHDlyhW4ublJFSIREVHeaamHJDU1FUlJSSqPd28w+z4vXrwAANjZ2QEAzp07h/T0dPj6+irrlClTBsWKFUNERAQAICIiAl5eXspkBAD8/PyQlJSEa9euKeu83UZWnaw2cksvEpLffvsNdnZ2qFq1qnIr+GrVqsHOzg6//fYbAMDS0hKzZ8+WOFIiIqI8EAqtPHK6oWxISIja0ysUCgwePBg+Pj4oX748ACAuLg4mJibZdkR3dHREXFycss7byUjW8axjH6qTlJSEV69e5foj0otVNk5OTggLC8PNmzdx+/ZtAEDp0qVRunRpZZ369etLFR4REZFeCA4ORlBQkErZu/dzy0lgYCCuXr2KY8eO6Sq0j6YXCUmWEiVKQCaTwd3dHUZGehUaERFRngmFdqZr5nRDWXX69++P3bt34+jRoyhSpIiy3MnJCWlpaUhMTFTpJYmPj4eTk5OyzunTp1Xay1qF83add1fmxMfHw8rKCmZmZrmOUy+GbFJSUtCjRw+Ym5ujXLlyiI6OBgAMGDAA06dPlzg6IiKijyTBKhshBPr374/t27fj0KFD2eZhVq1aFcbGxjh48KCy7NatW4iOjoa3tzcAwNvbG1euXEFCQoKyTlhYGKysrODp6ams83YbWXWy2sgtvUhIgoODcenSJYSHh8PU1FRZ7uvri02beEtsIiIiTQUGBmL9+vXYsGEDChQogLi4OMTFxSnndVhbW6NHjx4ICgrC4cOHce7cOXTr1g3e3t6oUaMGAKBRo0bw9PTEDz/8gEuXLmH//v0YM2YMAgMDlT01ffv2xb179zBixAjcvHkTS5YswebNmzXeO0wvxkV27NiBTZs2oUaNGpDJZMrycuXK4e7duxJGRkREpAUS3Mtm6dKlAIB69eqplK9atQoBAQEA3uyUbmBggLZt2yI1NRV+fn5YsmSJsq6hoSF2796Nfv36wdvbGxYWFvD398ekSZOUddzc3LBnzx4MGTIE8+fPR5EiRfDrr7/Cz89Po3j1IiHJWhv9rpcvX6okKERERJ8lLc0h0URuthkzNTXF4sWLsXjx4vfWcXV1xd69ez/YTr169VS27sgLvRiyqVatGvbs2aN8npWE/PrrrxqPQREREekdCeaQfG70oodk2rRpaNKkCa5fv46MjAzMnz8f169fx4kTJ3DkyBGpwyMiIiId04seklq1auHixYvIyMiAl5cXDhw4AAcHB0RERKBq1apSh0dERPRx2EOill70kACAu7s7VqxYIXUYRERE2if9beP0nqQJiYGBgdpJqzKZLNttjomIiCh/kTQh2b59+3uPRUREYMGCBVDk8y4qIiL6AvB3mVqSJiQtW7bMVnbr1i2MGjUKu3btQufOnVXWOhMREX2WJFj2+7nRi0mtABATE4NevXrBy8sLGRkZuHjxItasWQNXV1epQyMiIiIdkzwhefHiBUaOHAkPDw9cu3YNBw8exK5du5S3RyYiIvrsCYV2HvmYpEM2M2fOxIwZM+Dk5ISNGzfmOIRDRET02eOQjVoykZu9ZXXEwMAAZmZm8PX1haGh4Xvrbdu2TaN2jUwKf2xolM84WthIHQLpkfiXiVKHQHokI+2Rzs+RMqObVtoxH7lKK+3oI0l7SLp27cp71RARUb4nuMpGLUkTktWrV0t5eiIiok+DQzZq6c1OrURERPlWPp+Qqg2Sr7IhIiIiYg8JERGRrnHIRi0mJERERLrGSa1qcciGiIiIJMceEiIiIl3jkI1aTEiIiIh0jats1OKQDREREUmOPSRERES6xiEbtZiQEBER6Ri3jlePQzZEREQkOfaQEBER6RqHbNRiQkJERKRrTEjUYkJCRESka1z2qxbnkBAREZHk2ENCRESkaxyyUYsJCRERkY4JJiRqcciGiIiIJMceEiIiIl1jD4laTEiIiIh0jTu1qsUhGyIiIpIce0iIiIh0jUM2ajEhISIi0jUmJGpxyIaIiIgkxx4SIiIiHROCPSTqMCEhIiLSNQ7ZqMWEhIiISNeYkKjFOSREREQkOfaQ0BfhfuQuqUMgPWLuUlvqEOgLw3vZqMeEhIiISNeYkKjFIRsiIiKSHHtIiIiIdI23slGLCQkREZGOcQ6JehyyISIiIsmxh4SIiEjX2EOiFhMSIiIiXeMcErU4ZENERESSYw8JERGRjnFSq3pMSIiIiHSNQzZqMSEhIiLSMfaQqMc5JERERCQ59pAQERHpGods1GJCQkREpGOCCYlaHLIhIiIiybGHhIiISNfYQ6IWExIiIiId45CNehyyISIiIsmxh4SIiEjX2EOiFhMSIiIiHeOQjXocsiEiItIxodDOQ1NHjx5FixYt4OLiAplMhh07dqgcDwgIgEwmU3k0btxYpc6zZ8/QuXNnWFlZwcbGBj169EBycrJKncuXL6N27dowNTVF0aJFMXPmTI1jZUJCRESUT718+RIVK1bE4sWL31uncePGiI2NVT42btyocrxz5864du0awsLCsHv3bhw9ehS9e/dWHk9KSkKjRo3g6uqKc+fOYdasWZgwYQJ++eUXjWLlkA0REZGOSTVk06RJEzRp0uSDdeRyOZycnHI8duPGDezbtw9nzpxBtWrVAAALFy5E06ZN8fPPP8PFxQWhoaFIS0vDypUrYWJignLlyuHixYuYM2eOSuKiDntIiIiIdE3ItPPQgfDwcDg4OKB06dLo168fnj59qjwWEREBGxsbZTICAL6+vjAwMMCpU6eUderUqQMTExNlHT8/P9y6dQvPnz/PdRzsISEiIvpMpKamIjU1VaVMLpdDLpfnqb3GjRujTZs2cHNzw927d/HTTz+hSZMmiIiIgKGhIeLi4uDg4KDyGiMjI9jZ2SEuLg4AEBcXBzc3N5U6jo6OymO2tra5ioU9JERERDqmrUmtISEhsLa2VnmEhITkOa4OHTrg22+/hZeXF1q1aoXdu3fjzJkzCA8P196bzyX2kBAREemYUGhnuCU4OBhBQUEqZXntHclJiRIlUKhQIdy5cwcNGzaEk5MTEhISVOpkZGTg2bNnynknTk5OiI+PV6mT9fx9c1NykquE5PLly7lusEKFCrmuS0RERLn3McMzufHw4UM8ffoUzs7OAABvb28kJibi3LlzqFq1KgDg0KFDUCgUqF69urLO6NGjkZ6eDmNjYwBAWFgYSpcunevhGiCXCUmlSpUgk8kghMjxeNYxmUyGzMzMXJ+ciIjoSyDVKpvk5GTcuXNH+TwqKgoXL16EnZ0d7OzsMHHiRLRt2xZOTk64e/cuRowYAQ8PD/j5+QEAypYti8aNG6NXr15YtmwZ0tPT0b9/f3To0AEuLi4AgE6dOmHixIno0aMHRo4ciatXr2L+/PmYO3euRrHmKiGJiorSqFEiIiL6H6GjFTLqnD17FvXr11c+zxru8ff3x9KlS3H58mWsWbMGiYmJcHFxQaNGjTB58mSVXpjQ0FD0798fDRs2hIGBAdq2bYsFCxYoj1tbW+PAgQMIDAxE1apVUahQIYwbN06jJb8AIBPv6/b4jBmZFJY6BNIzr2L+kToE0iPmLrWlDoH0SHraI52f45F3A620UzjikFba0Ud5WmWzbt06+Pj4wMXFBQ8ePAAAzJs3D3/++adWgyMiIsoPpNo6/nOicUKydOlSBAUFoWnTpkhMTFTOGbGxscG8efO0HR8REdFnTyhkWnnkZxonJAsXLsSKFSswevRoGBoaKsurVauGK1euaDU4IiKi/EAI7TzyM40TkqioKFSuXDlbuVwux8uXL7USFBEREX1ZNE5I3NzccPHixWzl+/btQ9myZbURExERUb7CIRv1NN6pNSgoCIGBgXj9+jWEEDh9+jQ2btyIkJAQ/Prrr7qIkYiI6LOW35MJbdA4IenZsyfMzMwwZswYpKSkoFOnTnBxccH8+fPRoUMHXcRIRERE+Vye7mXTuXNndO7cGSkpKUhOTs52J0AiIiL6n/w+IVUb8nxzvYSEBNy6dQvAm63j7e3tNXp9UlJSrutaWVlp1DYREZE+4ZCNehonJP/99x9+/PFHbNy4EQrFm11aDA0N0b59eyxevBjW1ta5asfGxgYy2Yd/QLw/DhER0ZchT3NILly4gD179sDb2xsAEBERgUGDBqFPnz74/fffc9XO4cOHNT01ERHRZ0mqe9l8TjS+l42FhQX279+PWrVqqZT/888/aNy4sV7sRcJ72dC7eC8behvvZUNv+xT3srnj6aeVdjyu79dKO/pI4x6SggUL5jgsY21tDVtb248KJiUlBdHR0UhLS1Mpr1Chwke1S0RERPpN44RkzJgxCAoKwrp16+Dk5AQAiIuLw/DhwzF27Ng8BfH48WN069YNf/31V47HOYeEiIg+ZwoO2aiVq4SkcuXKKhNQIyMjUaxYMRQrVgwAEB0dDblcjsePH6NPnz4aBzF48GAkJibi1KlTqFevHrZv3474+HhMmTIFs2fP1rg9IiIifcI5JOrlKiFp1aqVToM4dOgQ/vzzT1SrVg0GBgZwdXXFN998AysrK4SEhKBZs2Y6PT8REZEucdmverlKSMaPH6/TIF6+fKncXM3W1haPHz9GqVKl4OXlhfPnz+v03ERERCQ9jW+upwulS5dWbrJWsWJFLF++HI8ePcKyZcvg7OwscXREREQfRwjtPPIzjSe1ZmZmYu7cudi8eXOOK2KePXumcRCDBg1CbGwsgDe9MY0bN0ZoaChMTEywevVqjdsjIiLSJxyyUU/jhGTixIn49ddfMXToUIwZMwajR4/G/fv3sWPHDowbNy5PQXTp0kX5/1WrVsWDBw9w8+ZNFCtWDIUKFcpTm0RERPT50HjIJjQ0FCtWrMDQoUNhZGSEjh074tdff8W4ceNw8uRJjQNIT0+Hu7s7bty4oSwzNzdHlSpVmIwQEVG+oBAyrTzyM40Tkri4OHh5eQEALC0t8eLFCwBA8+bNsWfPHo0DMDY2xuvXrzV+HRER0edCCJlWHvmZxglJkSJFlPM93N3dceDAAQDAmTNnIJfL8xREYGAgZsyYgYyMjDy9noiIiD5vGs8had26NQ4ePIjq1atjwIAB6NKlC3777TdER0djyJAheQrizJkzOHjwIA4cOAAvLy9YWFioHN+2bVue2iUiItIH+X2FjDZonJBMnz5d+f/t27eHq6srTpw4gZIlS6JFixZ5CsLGxgZt27bN02spZ/36+mNoUD84Odnj8uXrGDR4LM6cvSh1WPQRVqzdhL+PHEfUg4cwlZugkpcnhvTrDjfXIso6Af1H4OyFKyqv+75lU4wfMUD5PDYuAZN+XoQz5y/D3MwU3zbxxeC+3WBkZKiss3v/IazcsAXR/8bA0tIctWpUw7DAnrCxttL9GyWtqlWrOoYO7Ycqlb3g4uKEtt91x86d/7tB29ixQWjXriWKFnFBWloazp+/gnHjZuD0mQsSRp3/5Pf5H9qgcULyrho1aqBGjRpISEjAtGnT8NNPP2ncxqpVqz42DHrL999/i59njcePgaNw+swFDBzQE3v3hMKzfB08fvxU6vAoj85evIKObVqgfNlSyMjMxPzlq9F7yGj8Gboc5mamynrffdsY/Xv+oHxuavq/odTMzEz8OHw8CtrZYv2y2Xj89Bl+mvIzjIyMMLhvAADg/OVr+GnKbIwY2Bv1fKoj4fETTJq1COOnz8f8kLzdr4qkY2FhjsuXr2P16t+x5Y/fsh2PjLyHQYPGICrqAczMTDFoYC/s3bsBZcr64MkTzbdxIMorrW2MFhsbm+eb6zVo0ACJiYnZypOSktCgQYOPjOzLM2RQL/z62wasWbsZN25E4sfAUUhJeYVuAR2kDo0+wvI5U9Cq2TfwKOGKMiVLYOroIMTGJ+D6rUiVeqZyOQoVtFM+LN8aAj1x+jzu3o/G9PHDUaaUO2p7f4X+Pbvi9227kJ6eDgC4dPUGXJwc0OX7liji4oQqFcvj+5ZNcPXGrU/6fkk79u8/jPHjZ+LPP/flePz333fg0KF/EBUVjevXb2PY8ImwtraCl5fnJ440f+OkVvX0YqfW8PDwbBusAcDr16/xzz//SBDR58vY2BhVqlTAwUP/+9yEEDh46Bhq1KgqYWSkbckvUwAA1lYFVMr3hB1Grabt0apLX8xdugqv3lrFdunqDZQsURyF7GyVZT7VqyL5ZQruRD0AAFQsXxZxCU9w9MRpCCHw5NlzhIUfQ23vrz7BuyIpGRsbo2fPzkhMfIHLl69JHU6+wp1a1fvoIZuPcfnyZeX/X79+HXFxccrnmZmZ2LdvHwoXLixFaJ+tQoXsYGRkhIT4JyrlCQmPUaa0u0RRkbYpFApMn78clSt4omSJ4sryZt/Ug4uTI+wL2eH2nSjMXboS96MfKodanjx7joJ2NiptZT1/8vQ5AKBKhXKYMX4Eho2bjrS0NGRkZqKeT3WMHhr4Kd4aSaBpU1+Erl8Cc3MzxMbGo0mTjnj6/9cDaQfnkKgnaUJSqVIlyGQyyGSyHIdmzMzMsHDhwg+2kZqaitTUVJUyIQRkMv7wKf+aMnsx7ty7j7VLf1Yp/75lU+X/l3J3g30hO/QYGIzohzEoVsQlV23fjXqA6fOWoW+3TvCpXhVPnj7Dz4t/xaRZCzE5OG8r6Ui/hYcfR7WvGqFQQTv06NEJGzYsg0+t5pxzRp9UrhOSoKCgDx5//PixxiePioqCEAIlSpTA6dOnYW9vrzxmYmICBwcHGBoafqAFICQkBBMnTlQpkxlYQmb4Za4GePLkGTIyMuDgqLrLrYODPeLiNf8Zkf6ZOnsJjpw4jTWLZ8HJwf6Ddb08ywAA/n0Ui2JFXFDIzhZXrt9WqfP0WSIAoFDBN8M4K9ZtRuUKnuje+TsAQGkPN5iZytH1x+EY2Msf9oXstPyOSGopKa9w9+593L17H6dOn8f1a8fQrVtHzJy5SOrQ8o38Pv9DG3KdkFy4oH4JWJ06dTQ6uaurK4A33c95FRwcnC1Zsi1YJs/tfe7S09Nx/vxlNKhfS7m0TyaToUH9WliylKuZPmdCCEybsxQHj57AqkUzUMTFSe1rbkbeBQAUKvgmiahYvix+WbsJT58noqCtDQAg4sx5WFqYw714MQDA69ep2f4QMPj/5yK/D2ITAMDAQAa53ETqMPIVDtmol+uE5PDhwzoLYu3atR883rVr1/cek8vl2XaI/dKHa+bOX4FVv83FufOXcebMBQwc0AsWFmZYvWaT1KHRR5gyezH2hoVjwfRxsDA3w5Onb5ZkWlpawFQuR/TDGOwNC0dt769gY22F23eiMGPBclSrVB6lPdwAADW/rgL34sUQPGkWgn7sgafPnmPhL2vRoU0LmJi8+QVUz6c6JsyYj9+374bP11Xx+OkzzJi/HF6epeFgX1Cy9095Y2FhDo////kDgFvxYqhYsRyePXuOp0+fIzh4EHbvOoDYuHgUKmiHfv0CULiwE7Zu3S1h1PQlkgk9+JPH1tZW5Xl6ejpSUlJgYmICc3NzPHum2Vp4IxNOhP2xX4ByY7RLl65h8JBxX/RGR69iPv/VWuV9muRYPuWnILRq9g1i4x8jeNJMRN57gFevX8PJwR4N69REn4AOKkt/Y+LiMXnWIpy5cAVmZnJ828QXQ/p2V9kYLfSPP7F5x148io1HAUsLfF21IoJ+7A5H+/xxw0tzl9pSh/DJ1KnjjYN/b8lWvnbtZvwYOArr1i3C119VRqFCdnj69DnOnruEkGnzcfbcJQmilUZ62iOdn+OkSxuttFMjJv/uXK4XCUlOIiMj0a9fPwwfPhx+fn4avZYJCb0rPyQkpD1fUkJC6n2KhOSEs3Z2I68Zu1Ur7egjvdiHJCclS5bE9OnTMWjQIKlDISIiIh2TdNmvOkZGRoiJiZE6DCIioo/CVTbq6UVCsnPnTpXnQgjExsZi0aJF8PHxkSgqIiIi7cj7WtIvR54Skn/++QfLly/H3bt3sWXLFhQuXBjr1q2Dm5sbatWqpXF7rVq1Unkuk8lgb2+PBg0aYPbs2XkJkYiIiD4jGs8h2bp1K/z8/GBmZoYLFy4od0l98eIFpk2blqcgFAqFyiMzMxNxcXHYsGEDnJ2d89QmERGRvhCQaeWRn2mckEyZMgXLli3DihUrYGxsrCz38fHB+fPnPyqYtLQ03Lp1CxkZGR/VDhERkT5RCO088jONE5Jbt27luCOrtbU1EhMT8xRESkoKunfvDnNzc5QrVw7R0dEAgAEDBmD69Ol5apOIiEhfKCDTyiM/0zghcXJywp07d7KVHzt2DCVKlMhTEMHBwbh8+TLCw8NhamqqLPf19cWmTdxdlIiIKL/TeFJrr169MGjQIKxcuRIymQwxMTGIiIjAsGHDMHbs2DwFsWPHDmzatAk1atRQ2fa9XLlyuHv3bp7aJCIi0hf5ff6HNmickIwaNQoKhQINGzZESkoK6tSpA7lcjmHDhmHAgAF5CuLx48dwcHDIVv7y5csv/r40RET0+eOyX/U0HrKRyWQYPXo0nj17hqtXr+LkyZN4/PgxJk+enOcgqlWrhj179qicAwB+/fVXeHt757ldIiIi+jzkeWM0ExMTeHp6aiWIadOmoUmTJrh+/ToyMjIwf/58XL9+HSdOnMCRI0e0cg4iIiKpcMhGPY0Tkvr1639wGOXQoUMaB1GrVi1cvHgR06dPh5eXFw4cOIAqVaogIiICXl5eGrdHRESkTzhko57GCUmlSpVUnqenp+PixYu4evUq/P398xyIu7s7VqxYkefXExER0edL44Rk7ty5OZZPmDABycnJGrVlYGCgdtKqTCbjRmlERPRZYw+Jelq7uV6XLl3w9ddf4+eff871a7Zv3/7eYxEREViwYAEUCv4YiYjo88Y5JOppLSGJiIhQ2dQsN1q2bJmt7NatWxg1ahR27dqFzp07Y9KkSdoKkYiIiPSUxglJmzZtVJ4LIRAbG4uzZ8/meWM0AIiJicH48eOxZs0a+Pn54eLFiyhfvnye2yMiItIXCnaQqKVxQmJtba3y3MDAAKVLl8akSZPQqFEjjQPIukvwwoULUalSJRw8eBC1a9fWuB0iIiJ9ld/vQ6MNGiUkmZmZ6NatG7y8vGBra/vRJ585cyZmzJgBJycnbNy4McchHCIios9dPr9Rr1bIhBAafU6mpqa4ceMG3NzcPvrkBgYGMDMzg6+vLwwNDd9bb9u2bRq1a2RS+GNDo3zmVcw/UodAesTchb2w9D/paY90fo4dTp200k6ruA1aaUcfaTxkU758edy7d08rCUnXrl15rxoiIsr3uF5UPY0TkilTpmDYsGGYPHkyqlatCgsLC5XjVlZWuW5r9erVmp6eiIjos6PgH99q5TohmTRpEoYOHYqmTZsCAL799luV3g0hBGQyGTIzM7UfJREREeVruU5IJk6ciL59++Lw4cO6jIeIiCjf4aRW9XKdkGTNfa1bt67OgiEiIsqPOIdEPQNNKnMCKhEREemCRpNaS5UqpTYpefbs2UcFRERElN9wp1b1NEpIJk6cmG2nViIiIvowqXZqPXr0KGbNmoVz584hNjYW27dvR6tWrZTHhRAYP348VqxYgcTERPj4+GDp0qUoWbKkss6zZ88wYMAA7Nq1CwYGBmjbti3mz58PS0tLZZ3Lly8jMDAQZ86cgb29PQYMGIARI0ZoFKtGCUmHDh3g4OCg0QmIiIhIGi9fvkTFihXRvXv3bPeiA97smL5gwQKsWbMGbm5uGDt2LPz8/HD9+nXlDXM7d+6M2NhYhIWFIT09Hd26dUPv3r2xYcObTdqSkpLQqFEj+Pr6YtmyZbhy5Qq6d+8OGxsb9O7dO9ex5joh4fwRIiKivJFqlU2TJk3QpEmTHI8JITBv3jyMGTNGeeuWtWvXwtHRETt27ECHDh1w48YN7Nu3D2fOnEG1atUAAAsXLkTTpk3x888/w8XFBaGhoUhLS8PKlSthYmKCcuXK4eLFi5gzZ45GCUmuJ7VquMM8ERER/T+FTDuP1NRUJCUlqTxSU1PzFFNUVBTi4uLg6+urLLO2tkb16tUREREBAIiIiICNjY0yGQEAX19fGBgY4NSpU8o6derUgYmJibKOn58fbt26hefPn+c6nlwnJAqFgsM1REREeaDQ0iMkJATW1tYqj5CQkDzFFBcXBwBwdHRUKXd0dFQei4uLy/a738jICHZ2dip1cmrj7XPkhsZbxxMREZE0goODERQUpFIml8slika7mJAQERHpmLYmPcjlcq0lIE5OTgCA+Ph4ODs7K8vj4+NRqVIlZZ2EhASV12VkZODZs2fK1zs5OSE+Pl6lTtbzrDq5odHGaERERKQ5bc0h0SY3Nzc4OTnh4MGDyrKkpCScOnUK3t7eAABvb28kJibi3LlzyjqHDh2CQqFA9erVlXWOHj2K9PR0ZZ2wsDCULl0atra2uY6HCQkREVE+lZycjIsXL+LixYsA3kxkvXjxIqKjoyGTyTB48GBMmTIFO3fuxJUrV9C1a1e4uLgo9yopW7YsGjdujF69euH06dM4fvw4+vfvjw4dOsDFxQUA0KlTJ5iYmKBHjx64du0aNm3ahPnz52cbWlKHQzZEREQ6JtW9bM6ePYv69esrn2clCf7+/li9ejVGjBiBly9fonfv3khMTEStWrWwb98+5R4kABAaGor+/fujYcOGyo3RFixYoDxubW2NAwcOIDAwEFWrVkWhQoUwbtw4jZb8AoBM5MP1vEYmhaUOgfTMq5h/pA6B9Ii5S22pQyA9kp72SOfnWF6ki1ba6fNwvVba0UccsiEiIiLJcciGiIhIxwQ3O1eLCQkREZGOSTWH5HPCIRsiIiKSHHtIiIiIdIw9JOoxISEiItKxfLecVQeYkBAREemYtndZzY84h4SIiIgkxx4SIiIiHeMcEvWYkBAREekYExL1OGRDREREkmMPCRERkY5xlY16TEiIiIh0jKts1OOQDREREUmOPSREREQ6xkmt6jEhISIi0jHOIVGPQzZEREQkOfaQEBER6ZiCfSRqMSGhL8KP1UZKHQLpEXMTU6lDoC8M55Cox4SEiIhIx9g/oh7nkBAREZHk2ENCRESkYxyyUY8JCRERkY5xp1b1OGRDREREkmMPCRERkY5x2a96TEiIiIh0jOmIehyyISIiIsmxh4SIiEjHuMpGPSYkREREOsY5JOpxyIaIiIgkxx4SIiIiHWP/iHpMSIiIiHSMc0jUY0JCRESkY5xDoh7nkBAREZHk2ENCRESkY+wfUY8JCRERkY5xDol6HLIhIiIiybGHhIiISMcEB23UYkJCRESkYxyyUY9DNkRERCQ59pAQERHpGPchUY8JCRERkY4xHVGPQzZEREQkOfaQEBER6RiHbNRjQkJERKRjXGWjHhMSIiIiHeM+JOpxDgkRERFJjj0kREREOsYhG/WYkBAREekYh2zU45ANERERSY49JERERDrGIRv1mJAQERHpmEJwyEYdDtkQERGR5NhDQkREpGPsH1GPCQkREZGOcet49ThkQ0RERJLTm4Tkn3/+QZcuXeDt7Y1Hjx4BANatW4djx45JHBkREdHHEVr6Lz/Ti4Rk69at8PPzg5mZGS5cuIDU1FQAwIsXLzBt2jSJoyMiIvo4Ci098jO9SEimTJmCZcuWYcWKFTA2NlaW+/j44Pz58xJGRkRE9PEUEFp55Gd6kZDcunULderUyVZubW2NxMTETx8QERERfVJ6kZA4OTnhzp072cqPHTuGEiVKSBARERGR9nAOiXp6kZD06tULgwYNwqlTpyCTyRATE4PQ0FAMGzYM/fr1kzo8IiKij8I5JOrpxT4ko0aNgkKhQMOGDZGSkoI6depALpdj2LBhGDBggNThERERkY7pRQ+JTCbD6NGj8ezZM1y9ehUnT57E48ePMXnyZKlDIyIi+mhCCK08NDFhwgTIZDKVR5kyZZTHX79+jcDAQBQsWBCWlpZo27Yt4uPjVdqIjo5Gs2bNYG5uDgcHBwwfPhwZGRla+UzepRc9JOvXr0ebNm1gbm4OT09PqcMhIiLSKqlWyJQrVw5///238rmR0f9+7Q8ZMgR79uzBH3/8AWtra/Tv3x9t2rTB8ePHAQCZmZlo1qwZnJyccOLECcTGxqJr164wNjbWyZYcetFDMmTIEDg4OKBTp07Yu3cvMjMzpQ6JiIjos2dkZAQnJyflo1ChQgDe7PP122+/Yc6cOWjQoAGqVq2KVatW4cSJEzh58iQA4MCBA7h+/TrWr1+PSpUqoUmTJpg8eTIWL16MtLQ0rceqFwlJbGwsfv/9d8hkMrRr1w7Ozs4IDAzEiRMnpA6NiIjoo2lrUmtqaiqSkpJUHlmbieYkMjISLi4uKFGiBDp37ozo6GgAwLlz55Ceng5fX19l3TJlyqBYsWKIiIgAAERERMDLywuOjo7KOn5+fkhKSsK1a9e08rm8TS8SEiMjIzRv3hyhoaFISEjA3Llzcf/+fdSvXx/u7u5Sh0dERPRRtLXsNyQkBNbW1iqPkJCQHM9ZvXp1rF69Gvv27cPSpUsRFRWF2rVr47///kNcXBxMTExgY2Oj8hpHR0fExcUBAOLi4lSSkazjWce0TS/mkLzN3Nwcfn5+eP78OR48eIAbN25IHRIREZFeCA4ORlBQkEqZXC7PsW6TJk2U/1+hQgVUr14drq6u2Lx5M8zMzHQaZ17oRQ8JAKSkpCA0NBRNmzZF4cKFMW/ePLRu3Von3UJERESfkra2jpfL5bCyslJ5vC8heZeNjQ1KlSqFO3fuwMnJCWlpadl2Q4+Pj4eTkxOAN5uWvrvqJut5Vh1t0ouEpEOHDnBwcMCQIUNQokQJhIeH486dO5g8ebLKEiUiIqLPkRTLft+VnJyMu3fvwtnZGVWrVoWxsTEOHjyoPH7r1i1ER0fD29sbAODt7Y0rV64gISFBWScsLAxWVlY6WRGrF0M2hoaG2Lx5M/z8/GBoaCh1OERERFolxS6rw4YNQ4sWLeDq6oqYmBiMHz8ehoaG6NixI6ytrdGjRw8EBQXBzs4OVlZWGDBgALy9vVGjRg0AQKNGjeDp6YkffvgBM2fORFxcHMaMGYPAwMBc98poQi8SktDQUKlDICIiylcePnyIjh074unTp7C3t0etWrVw8uRJ2NvbAwDmzp0LAwMDtG3bFqmpqfDz88OSJUuUrzc0NMTu3bvRr18/eHt7w8LCAv7+/pg0aZJO4pWJj+0DyqMFCxagd+/eMDU1xYIFCz5Yd+DAgRq1bWRS+GNCyxf69fXH0KB+cHKyx+XL1zFo8FicOXtR6rAk082lptQh6ISNox3ajuqM8vUqw8RMjoT7cVg9fDEeXLmXrW6Xqb1Qt3Mj/D5pFQ6u3Kssd3Rzxnc//QD3qqVhZGyEhzej8eec33ErIv/O39r85LzUIXwSBgYGCB49CO3bt4SDoz3iYuMRGroNs2YsUtaxdyiIiZNGokHDWrC2tsKJ42cwfNhE3Lt7X7rAP7EXyXd1fo5GRRtrpZ0D/+7TSjv6SLIekrlz56Jz584wNTXF3Llz31tPJpNpnJB86b7//lv8PGs8fgwchdNnLmDggJ7YuycUnuXr4PHjp1KHR1pibmWBkVsn41bENcwPmIbkp0lwcHNCyouX2epW9vsaJSqXwvO4Z9mODfhtFOLvx2F2p4lIf50G3+7NMOC3Ufip7gAkPU78BO+EdGVIUB/06NkJfXsPx80bkahcxQuLl85AUtJ/WL50DQBgw8ZlSM/IQKf2fZD0XzL6D+iBP3etRfVqfkhJeSXxO8g/pNqp9XMiWUISFRWV4//TxxsyqBd+/W0D1qzdDAD4MXAUmjZpiG4BHTBz1mKJoyNtadyvFZ7HPMXq4f/rYn3yMCFbPRtHO3Sc0B3zuk7BgFXBKscsbQvAsYQL1oxcikc332yYtHVGKOp3bYzCpYoyIfnMfV29Cvbu/hsH9ocDAKKjH+G771ugatUKAAB3j+L4unoVVP+qMW7eiAQADBk0FpH3TuG771tg7ZrNUoVOXyC9WGUzadIkpKSkZCt/9eqVzsaq8itjY2NUqVIBBw/9oywTQuDgoWOoUaOqhJGRtlX0rYb7V+6iz+IgzD77K8bumYnaHRqq1JHJZOgxdwD2/7ITMZEPs7WR/Pw/xN59hBpt6sLETA4DQwPU7fQNkh4n5jjsQ5+X06fOo069mnD3KA4AKF++DGp4V0PYgSMAALncBACQ+vp/O30KIZCamoYa3tU+ebz5mT6sstF3epGQTJw4EcnJydnKU1JSMHHiRAki+nwVKmQHIyMjJMQ/USlPSHgMJ0d7iaIiXbAv5oB6XRoh4X4s5vlPQfj6A+gwoTu829ZV1mncryUyMzJxcNXe97Yzt/MkFCvnhoXX1mLJrQ34pmdzzAuYipSk7EM/9HmZM3sZtm3ZjbPnw/Dk+U38c2IXli5ehT827wQA3L51D9HRjzB+4jDY2FjB2NgYg4f0RpEiznBy4veFNmlrH5L8TC9W2QghIJPJspVfunQJdnZ2H3xtampqtn3839ceUX4ikxng/pW72D5rIwDg32v3UbhUUdTt3AgRW4+gWPkSaNitGSY3G/HBdjpN7on/nr7AzO/HIf11Gmp1aIgBv47C1G9H4QWHbD5rbdo2w/ftW6Jn9yG4ceM2vLw8MX3GGMTGJmDjhm3IyMjAD51+xMIlIXjw8AIyMjIQfvgEDuwP53cofXKSJiS2traQyWSQyWQoVaqUyj+AzMxMJCcno2/fvh9sIyQkJFsviszAEjJDK53ErO+ePHmGjIwMODgWUil3cLBHXPxjiaIiXXiR8Byx7wzDxN59hCpN3uwhUPLrMihQ0AozTixVHjc0MkS70f7w7d4MwbUCUaZmeVRoUBWDKgbgdfKbCYwbxv4Kz1oV4P1dPexbuuOTvR/SvklTRmHunGXYumU3AOD6tdsoWqwwgob1xcYN2wAAFy9eRe2aLWBlZQljExM8ffIMBw9vxYULV6QMPd8R+bx3QxskTUjmzZsHIQS6d++OiRMnwtraWnnMxMQExYsXV+4Y9z457etvW/DL3d01PT0d589fRoP6tbBz534Ab+YRNKhfC0uWrpI4OtKmO+duwamEi0qZo5sznj56k3ie3HYUN46p/lIZvHYMTm4/iuN/HAYAmJi92dxIKFS/LIVCwIB/IX/2zM1MIRSqW3IpMjNhIMs+Wp+U9GbYvIR7cVSu4oWpk9+/+pE0p8jn8z+0QdKExN/fHwDg5uaGmjVrwtjYWOM25HJ5th3jvvSuxrnzV2DVb3Nx7vxlnDlzAQMH9IKFhRlWr9kkdWikRX//thsjt05B0x9b48yeCLhV9ECdjr5YF7wcAPAyMRkvE1XnZmVmZODF4+eIvxcDALh3/jZevkhGt9mB2L1gC9Jfp6F2B18UKuqAy4e/jL068rO//jqEocN/xL//xuDmjUhUqFgOgQO6Y/3aLco6rVo3wZMnz/Dw3xh4liuN6TPHYs/uMBw6dEzCyOlLJFlCkpSUBCurN8MqlStXxqtXr/DqVc5r3rPqUe788cdO2Beyw4Rxw+DkZI9Ll66hWfMuSEh4ov7F9Nm4f/kulvaZhdYjOqP5oO/w5N8EbJq0Gqf+zP0vkuTn/2G+/1S0Ht4RQzeMh6GRIWIiH2Jx7xl4eOOBDqOnT2HEsIkYPXYIZs+dBHv7goiLjceqlb9jRshCZR1HJwdMDRkNB4eCiIt7jN83bsfM6Ys+0CrlBftH1JNsp1ZDQ0PExsbCwcEBBgYGOfZqZE1OzczM1Kht7tRK78qvO7VS3nwpO7VS7nyKnVp9CjfQSjvHHx3SSjv6SLIekkOHDilX0Bw+fFiqMIiIiHQuvy/Z1QbJEpK6devm+P9ERET05dGLjdH27duHY8f+N+69ePFiVKpUCZ06dcLz588ljIyIiOjjcadW9fQiIRk+fDiSkpIAAFeuXEFQUBCaNm2KqKiobEt6iYiIPjfcqVU9vdipNSoqCp6engCArVu3okWLFpg2bRrOnz+Ppk2bShwdERER6Zpe9JCYmJgob673999/o1GjRgAAOzs7Zc8JERHR50po6b/8TC96SGrVqoWgoCD4+Pjg9OnT2LTpzQZet2/fRpEiRSSOjoiI6OPk9/kf2qAXPSSLFi2CkZERtmzZgqVLl6Jw4Tf7iPz1119o3LixxNERERGRrulFD0mxYsWwe/fubOVz5/JeCkRE9PnL7xNStUEvEhLgzd19d+zYgRs3bgAAypUrh2+//RaGhoYSR0ZERPRxOGSjnl4kJHfu3EHTpk3x6NEjlC5dGgAQEhKCokWLYs+ePXB3d5c4QiIiItIlvZhDMnDgQLi7u+Pff//F+fPncf78eURHR8PNzQ0DBw6UOjwiIqKPwn1I1NOLHpIjR47g5MmTynvbAEDBggUxffp0+Pj4SBgZERHRx8vvS3a1QS8SErlcjv/++y9beXJyMkxMTCSIiIiISHsUnEOill4M2TRv3hy9e/fGqVOnlPv1nzx5En379sW3334rdXhERESkY3qRkCxYsAAeHh6oWbMmTE1NYWpqCh8fH3h4eGD+/PlSh0dERPRRuFOrepIO2SgUCsyaNQs7d+5EWloaWrVqBX9/f8hkMpQtWxYeHh5ShkdERKQVHLJRT9KEZOrUqZgwYQJ8fX1hZmaGvXv3wtraGitXrpQyLCIiIvrEJB2yWbt2LZYsWYL9+/djx44d2LVrF0JDQ6FQKKQMi4iISKs4ZKOepAlJdHQ0mjZtqnzu6+sLmUyGmJgYCaMiIiLSLoUQWnnkZ5ImJBkZGTA1NVUpMzY2Rnp6ukQRERERkRQknUMihEBAQADkcrmy7PXr1+jbty8sLCyUZdu2bZMiPCIiIq3I78Mt2iBpQuLv75+trEuXLhJEQkREpDv5fbhFGyRNSFatWiXl6YmIiEhP6MXW8URERPkZh2zUY0JCRESkY0JwOwt1mJAQERHpmII9JGrpxb1siIiI6MvGHhIiIiIdE1xloxYTEiIiIh3jkI16HLIhIiIiybGHhIiISMc4ZKMeExIiIiId406t6nHIhoiIiCTHHhIiIiId406t6jEhISIi0jHOIVGPQzZEREQkOfaQEBER6Rj3IVGPCQkREZGOcchGPSYkREREOsZlv+pxDgkRERFJjj0kREREOsYhG/WYkBAREekYJ7WqxyEbIiIikhx7SIiIiHSMQzbqMSEhIiLSMa6yUY9DNkRERCQ59pAQERHpGG+upx4TEiIiIh3jkI16HLIhIiIiybGHhIiISMe4ykY9JiREREQ6xjkk6nHIhoiISMeEEFp55MXixYtRvHhxmJqaonr16jh9+rSW3512MCEhIiLKpzZt2oSgoCCMHz8e58+fR8WKFeHn54eEhASpQ8uGCQkREZGOSdVDMmfOHPTq1QvdunWDp6cnli1bBnNzc6xcuVIH7/LjMCEhIiLSMaGlhybS0tJw7tw5+Pr6KssMDAzg6+uLiIiIj3o/usBJrURERJ+J1NRUpKamqpTJ5XLI5fJsdZ88eYLMzEw4OjqqlDs6OuLmzZs6jTMv8mVCkpH2SOoQJJeamoqQkBAEBwfneKHSl4fXxP+skDoAPcDr4dPS1u+lCRMmYOLEiSpl48ePx4QJE7TSvpRkgouj86WkpCRYW1vjxYsXsLKykjoc0gO8JuhtvB4+T5r0kKSlpcHc3BxbtmxBq1atlOX+/v5ITEzEn3/+qetwNcI5JERERJ8JuVwOKysrlcf7erhMTExQtWpVHDx4UFmmUChw8OBBeHt7f6qQcy1fDtkQEREREBQUBH9/f1SrVg1ff/015s2bh5cvX6Jbt25Sh5YNExIiIqJ8qn379nj8+DHGjRuHuLg4VKpUCfv27cs20VUfMCHJp+RyOcaPH8/JaqTEa4Lexuvhy9G/f3/0799f6jDU4qRWIiIikhwntRIREZHkmJAQERGR5JiQEBERkeSYkJBS8eLFMW/ePKnDoM9MeHg4ZDIZEhMTpQ6FciG3Py9+H9CnxoTkEwkICIBMJsP06dNVynfs2AGZTPZJY1m9ejVsbGyylZ85cwa9e/f+pLHQ/3yqa+T+/fuQyWS4ePGi1tok7cu6HmQyGUxMTODh4YFJkyYhIyPjo9qtWbMmYmNjYW1tDYDfB6Q/mJB8QqamppgxYwaeP38udSg5sre3h7m5udRhfNH06RpJS0uTOoQvXuPGjREbG4vIyEgMHToUEyZMwKxZsz6qTRMTEzg5OalNcvl9QJ8aE5JPyNfXF05OTggJCXlvnWPHjqF27dowMzND0aJFMXDgQLx8+VJ5PDY2Fs2aNYOZmRnc3NywYcOGbF2rc+bMgZeXFywsLFC0aFH8+OOPSE5OBvCmu7Zbt2548eKF8q+vrJsyvd1Op06d0L59e5XY0tPTUahQIaxduxbAmy2IQ0JC4ObmBjMzM1SsWBFbtmzRwif15dLGNSKTybBjxw6V19jY2GD16tUAADc3NwBA5cqVIZPJUK9ePQBv/iJv1aoVpk6dChcXF5QuXRoAsG7dOlSrVg0FChSAk5MTOnXqhISEBO29aXovuVwOJycnuLq6ol+/fvD19cXOnTvx/PlzdO3aFba2tjA3N0eTJk0QGRmpfN2DBw/QokUL2NrawsLCAuXKlcPevXsBqA7Z8PuA9AkTkk/I0NAQ06ZNw8KFC/Hw4cNsx+/evYvGjRujbdu2uHz5MjZt2oRjx46pbGjTtWtXxMTEIDw8HFu3bsUvv/yS7ZeDgYEBFixYgGvXrmHNmjU4dOgQRowYAeBNd+28efNgZWWF2NhYxMbGYtiwYdli6dy5M3bt2qVMZABg//79SElJQevWrQEAISEhWLt2LZYtW4Zr165hyJAh6NKlC44cOaKVz+tLpI1rRJ3Tp08DAP7++2/ExsZi27ZtymMHDx7ErVu3EBYWht27dwN484tn8uTJuHTpEnbs2IH79+8jICDg494o5YmZmRnS0tIQEBCAs2fPYufOnYiIiIAQAk2bNkV6ejoAIDAwEKmpqTh69CiuXLmCGTNmwNLSMlt7/D4gvSLok/D39xctW7YUQghRo0YN0b17dyGEENu3bxdZP4YePXqI3r17q7zun3/+EQYGBuLVq1fixo0bAoA4c+aM8nhkZKQAIObOnfvec//xxx+iYMGCyuerVq0S1tbW2eq5uroq20lPTxeFChUSa9euVR7v2LGjaN++vRBCiNevXwtzc3Nx4sQJlTZ69OghOnbs+OEPg3KkjWtECCEAiO3bt6vUsba2FqtWrRJCCBEVFSUAiAsXLmQ7v6Ojo0hNTf1gnGfOnBEAxH///SeEEOLw4cMCgHj+/LmG75g+5O3rQaFQiLCwMCGXy0WrVq0EAHH8+HFl3SdPnggzMzOxefNmIYQQXl5eYsKECTm2++7Pi98HpC+4dbwEZsyYgQYNGmT7S+TSpUu4fPkyQkNDlWVCCCgUCkRFReH27dswMjJClSpVlMc9PDxga2ur0s7ff/+NkJAQ3Lx5E0lJScjIyMDr16+RkpKS6zFhIyMjtGvXDqGhofjhhx/w8uVL/Pnnn/j9998BAHfu3EFKSgq++eYbldelpaWhcuXKGn0elF1er5GyZct+1Hm9vLxgYmKiUnbu3DlMmDABly5dwvPnz6FQKAAA0dHR8PT0/Kjz0Yft3r0blpaWSE9Ph0KhQKdOndCmTRvs3r0b1atXV9YrWLAgSpcujRs3bgAABg4ciH79+uHAgQPw9fVF27ZtUaFChTzHwe8D+hSYkEigTp068PPzQ3BwsErXd3JyMvr06YOBAwdme02xYsVw+/ZttW3fv38fzZs3R79+/TB16lTY2dnh2LFj6NGjB9LS0jSapNa5c2fUrVsXCQkJCAsLg5mZGRo3bqyMFQD27NmDwoULq7yO98b4eHm9RoA3c0jEO3eEyOrKV8fCwkLl+cuXL+Hn5wc/Pz+EhobC3t4e0dHR8PPz46TXT6B+/fpYunQpTExM4OLiAiMjI+zcuVPt63r27Ak/Pz/s2bMHBw4cQEhICGbPno0BAwbkORZ+H5CuMSGRyPTp01GpUiXlxEEAqFKlCq5fvw4PD48cX1O6dGlkZGTgwoULqFq1KoA3f5m8vSLj3LlzUCgUmD17NgwM3kwR2rx5s0o7JiYmyMzMVBtjzZo1UbRoUWzatAl//fUXvv/+exgbGwMAPD09IZfLER0djbp162r25ilX8nKNAG9WR8TGxiqfR0ZGIiUlRfk8qwckN9fAzZs38fTpU0yfPh1FixYFAJw9e1bj90J5Y2Fhke1nXbZsWWRkZODUqVOoWbMmAODp06e4deuWSo9V0aJF0bdvX/Tt2xfBwcFYsWJFjgkJvw9IXzAhkYiXlxc6d+6MBQsWKMtGjhyJGjVqoH///ujZsycsLCxw/fp1hIWFYdGiRShTpgx8fX3Ru3dvLF26FMbGxhg6dCjMzMyUS/g8PDyQnp6OhQsXokWLFjh+/DiWLVumcu7ixYsjOTkZBw8eRMWKFWFubv7enpNOnTph2bJluH37Ng4fPqwsL1CgAIYNG4YhQ4ZAoVCgVq1aePHiBY4fPw4rKyv4+/vr4FP7suTlGgGABg0aYNGiRfD29kZmZiZGjhyp/MUBAA4ODjAzM8O+fftQpEgRmJqaKvekeFexYsVgYmKChQsXom/fvrh69SomT56s2zdOH1SyZEm0bNkSvXr1wvLly1GgQAGMGjUKhQsXRsuWLQEAgwcPRpMmTVCqVCk8f/4chw8ffu9wHr8PSG9IPIfli/H2BLUsUVFRwsTERLz9Yzh9+rT45ptvhKWlpbCwsBAVKlQQU6dOVR6PiYkRTZo0EXK5XLi6uooNGzYIBwcHsWzZMmWdOXPmCGdnZ2FmZib8/PzE2rVrs0067Nu3ryhYsKAAIMaPHy+EUJ3EluX69esCgHB1dRUKhULlmEKhEPPmzROlS5cWxsbGwt7eXvj5+YkjR4583If1hdLWNfLo0SPRqFEjYWFhIUqWLCn27t2rMqlVCCFWrFghihYtKgwMDETdunXfe34hhNiwYYMoXry4kMvlwtvbW+zcuVNlUiwnterG+34eQgjx7Nkz8cMPPwhra2vlv/Pbt28rj/fv31+4u7sLuVwu7O3txQ8//CCePHkihMj558XvA9IHMiHeGWymz8rDhw9RtGhR/P3332jYsKHU4RAREeUJE5LPzKFDh5CcnAwvLy/ExsZixIgRePToEW7fvq3SLU9ERPQ54RySz0x6ejp++ukn3Lt3DwUKFEDNmjURGhrKZISIiD5r7CEhIiIiyXHreCIiIpIcExIiIiKSHBMSIiIikhwTEiIiIpIcExIiPRAQEIBWrVopn9erVw+DBw/+5HGEh4dDJpMhMTFRZ+d4973mxaeIk4g+LSYkRO8REBAAmUwGmUwGExMTeHh4YNKkScjIyND5ubdt25brLdo/9S/n4sWLY968eZ/kXET05eA+JEQf0LhxY6xatQqpqanYu3cvAgMDYWxsjODg4Gx109LSlDeu+1h2dnZaaYeI6HPBHhKiD5DL5XBycoKrqyv69esHX19f5e3fs4Yepk6dChcXF+Vdef/991+0a9cONjY2sLOzQ8uWLXH//n1lm5mZmQgKCoKNjQ0KFiyIESNG4N3tgN4dsklNTcXIkSNRtGhRyOVyeHh44LfffsP9+/dRv359AICtrS1kMhkCAgIAAAqFAiEhIXBzc4OZmRkqVqyILVu2qJxn7969KFWqFMzMzFC/fn2VOPMiMzMTPXr0UJ6zdOnSmD9/fo51J06cCHt7e1hZWaFv375IS0tTHstN7ESUv7CHhEgDZmZmePr0qfL5wYMHYWVlhbCwMABvdtL18/ODt7c3/vnnHxgZGWHKlClo3LgxLl++DBMTE8yePRurV6/GypUrUbZsWcyePRvbt29HgwYN3nverl27IiIiAgsWLEDFihURFRWFJ0+eoGjRoti6dSvatm2LW7duwcrKCmZmZgCAkJAQrF+/HsuWLUPJkiVx9OhRdOnSBfb29qhbty7+/fdftGnTBoGBgejduzfOnj2LoUOHftTno1AoUKRIEfzxxx8oWLAgTpw4gd69e8PZ2Rnt2rVT+dxMTU0RHh6O+/fvo1u3bihYsCCmTp2aq9iJKB+S8MZ+RHrt7butKhQKERYWJuRyuRg2bJjyuKOjo0hNTVW+Zt26daJ06dIqd0JNTU0VZmZmYv/+/UIIIZydncXMmTOVx9PT00WRIkVU7uxat25dMWjQICGEELdu3RIARFhYWI5x5nT31tevXwtzc3Nx4sQJlbo9evQQHTt2FEIIERwcLDw9PVWOjxw5Uu2de3O6C+yHBAYGirZt2yqf+/v7Czs7O/Hy5Utl2dKlS4WlpaXIzMzMVey8wzBR/sMeEqIP2L17NywtLZGeng6FQoFOnTphwoQJyuNeXl4q80YuXbqEO3fuoECBAirtvH79Gnfv3sWLFy8QGxuL6tWrK48ZGRmhWrVq2YZtsly8eBGGhoYa9QzcuXMHKSkp+Oabb1TK09LSULlyZQDAjRs3VOIAAG9v71yf430WL16MlStXIjo6Gq9evUJaWhoqVaqkUqdixYowNzdXOW9ycjL+/fdfJCcnq42diPIfJiREH1C/fn0sXboUJiYmcHFxgZGR6j8ZCwsLlefJycmoWrUqQkNDs7Vlb2+fpxiyhmA0kZycDADYs2cPChcurHJMLpfnKY7c+P333zFs2DDMnj0b3t7eKFCgAGbNmoVTp07lug2pYiciaTEhIfoACwsLeHh45Lp+lSpVsGnTJjg4OMDKyirHOs7Ozjh16hTq1KkDAMjIyMC5c+dQpUqVHOt7eXlBoVDgyJEj8PX1zXY8q4cmMzNTWebp6Qm5XI7o6Oj39qyULVtWOUE3y8mTJ9W/yQ84fvw4atasiR9//FFZdvfu3Wz1Ll26hFevXimTrZMnT8LS0hJFixaFnZ2d2tiJKP/hKhsiLercuTMKFSqEli1b4p9//kFUVBTCw8MxcOBAPHz4EAAwaNAgTJ8+HTt27MDNmzfx448/fnAPkeLFi8Pf3x/du3fHjh07lG1u3rwZAODq6gqZTIbdu3fj8ePHSE5ORoECBTBs2DAMGTIEa9aswd27d3H+/HksXLgQa9asAQD07dsXkZGRGD58OG7duoUNGzZg9erVuXqfjx49wsWLF1Uez58/R8mSJXH27Fns378ft2/fxtixY3HmzJlsr09LS0OPHj1w/fp17N27F+PHj0f//v1hYGCQq9iJKB+SehILkb56e1KrJsdjY2NF165dRaFChYRcLhclSpQQvXr1Ei9evBBCvJnEOmjQIGFlZSVsbGxEUFCQ6Nq163sntQohxKtXr8SQIUOEs7OzMDExER4eHmLlypXK45MmTRJOTk5CJpMJf39/IcSbibjz5s0TpUuXFsbGxsLe3l74+fmJI0eOKF+3a9cu4eHhIeRyuahdu7ZYuXJlria1Asj2WLdunXj9+rUICAgQ1tbWwsbGRvTr10+MGjVKVKxYMdvnNm7cOFGwYEFhaWkpevXqJV6/fq2soy52Tmolyn9kQrxnJh0RERHRJ8IhGyIiIpIcExIiIiKSHBMSIiIikhwTEiIiIpIcExIiIiKSHBMSIiIikhwTEiIiIpIcExIiIiKSHBMSIiIikhwTEiIiIpIcExIiIiKSHBMSIiIiktz/AYzT3E0JAvs9AAAAAElFTkSuQmCC", "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" }, { "name": "stdout", "output_type": "stream", "text": [ "Model loaded from path :custom_bert_model.bin\n", "Local Wav2Vec2 processor and model found. Loading from local directory.\n", "Neutral\n" ] } ], "source": [ "if __name__ == \"__main__\":\n", "\n", " wandb.login(key=wandb_token)\n", " run = wandb.init(project=\"DIT-Wav2Vec-Bert-Sentiment-Analysis-project\")\n", " bert_train_dataset = CustomBertDataset(TRAIN_DS_PATH, \"fine-tune_raw\")\n", " bert_test_dataset = CustomBertDataset(TEST_DS_PATH, \"test_raw\")\n", " print(f\"Size of bert dataset : {len(bert_train_dataset)}\")\n", " \"\"\"train_dataset = Subset(our_bert_dataset, range(int(len(our_bert_dataset)*0.8)))\n", " test_dataset = Subset(our_bert_dataset, range(int(len(our_bert_dataset)*0.8), len(our_bert_dataset)))\"\"\"\n", "\n", " train_dataloader = DataLoader(bert_train_dataset, batch_size=BATCH_SIZE, shuffle=True)\n", " test_dataloader = DataLoader(bert_test_dataset, batch_size=BATCH_SIZE, shuffle=False)\n", "\n", " our_bert_model = CustomBertModel(bert_train_dataset.num_class)\n", " our_bert_model = our_bert_model.to(device)\n", "\n", " loss_fn = nn.CrossEntropyLoss()\n", " optimizer = optim.SGD(filter(lambda p: p.requires_grad, our_bert_model.parameters()), lr=0.01)\n", "\n", " train_step(our_bert_model, train_dataloader, loss_fn, optimizer)\n", " eval_step(test_dataloader, loss_fn, bert_train_dataset.num_class)\n", " eval_pipeline_step(test_dataloader, loss_fn, bert_train_dataset.num_class)\n", "\n", " test_inference_audio_path = \"/content/dev_raw/id10012_0AXjxNXiEzo_00001.flac\"\n", " print(get_audio_sentiment(test_inference_audio_path))" ] } ], "metadata": { "accelerator": "GPU", "colab": { "gpuType": "T4", "provenance": [] }, "kernelspec": { "display_name": "DIT_DL2", "language": "python", "name": "python3" }, "language_info": { "name": "python", "version": "3.13.2" } }, "nbformat": 4, "nbformat_minor": 0 }