refactor(Modify.py): 重构代码并改进模型训练流程

- 重构了代码结构,优化了导入顺序和格式
- 改进了模型训练流程,添加了早停机制和学习率调度器- 增加了模型测试和可视化部分的代码
-优化了量子卷积层和模型的实现
- 调整了训练参数和数据预处理方法
This commit is contained in:
fly6516 2025-06-25 15:07:08 +08:00
parent a6c92a4031
commit 9266859f0a
11 changed files with 1448 additions and 1253 deletions

File diff suppressed because one or more lines are too long

613
Modify.py
View File

@ -1,291 +1,508 @@
# Modify.py #%%
# 首先我们导入所有需要的包:
#%% 导入所有需要的包
import os import os
import random import random
import numpy as np import numpy as np
import pandas as pd import pandas as pd
import deepquantum as dq
import matplotlib.pyplot as plt
import torch import torch
import torch.nn as nn import torch.nn as nn
import torch.optim as optim import torch.optim as optim
import torchvision.transforms as transforms import torchvision.transforms as transforms
from torchvision.datasets import FashionMNIST
from tqdm import tqdm from tqdm import tqdm
from sklearn.metrics import roc_auc_score
from torch.utils.data import DataLoader from torch.utils.data import DataLoader
from multiprocessing import freeze_support from torchvision.datasets import FashionMNIST
import deepquantum as dq
import matplotlib.pyplot as plt
#%% 设置随机种子以保证可复现
def seed_torch(seed=1024): def seed_torch(seed=1024):
"""
Set random seeds for reproducibility.
Args:
seed (int): Random seed number to use. Default is 1024.
"""
random.seed(seed) random.seed(seed)
os.environ['PYTHONHASHSEED'] = str(seed) os.environ['PYTHONHASHSEED'] = str(seed)
np.random.seed(seed) np.random.seed(seed)
torch.manual_seed(seed) torch.manual_seed(seed)
torch.cuda.manual_seed(seed) torch.cuda.manual_seed(seed)
# Seed all GPUs with the same seed if using multi-GPU
torch.cuda.manual_seed_all(seed) torch.cuda.manual_seed_all(seed)
torch.backends.cudnn.benchmark = False torch.backends.cudnn.benchmark = False
torch.backends.cudnn.deterministic = True torch.backends.cudnn.deterministic = True
#%% 准确率计算函数 seed_torch(42) # 使用更常见的随机种子值
#%%
def calculate_score(y_true, y_preds): def calculate_score(y_true, y_preds):
# 将模型预测结果转为概率分布
preds_prob = torch.softmax(y_preds, dim=1) preds_prob = torch.softmax(y_preds, dim=1)
# 获得预测的类别(概率最高的一类)
preds_class = torch.argmax(preds_prob, dim=1) preds_class = torch.argmax(preds_prob, dim=1)
# 计算准确率
correct = (preds_class == y_true).float() correct = (preds_class == y_true).float()
return (correct.sum() / len(correct)).cpu().numpy() accuracy = correct.sum() / len(correct)
return accuracy.cpu().numpy()
#%% 训练与验证函数
def train_model(model, criterion, optimizer, scheduler, train_loader, valid_loader, num_epochs, device, save_path):
model.to(device)
best_acc = 0.0
metrics = {'epoch': [], 'train_acc': [], 'valid_acc': [], 'train_loss': [], 'valid_loss': []}
for epoch in range(1, num_epochs + 1): def train_model(model, criterion, optimizer, train_loader, valid_loader, num_epochs, device):
# --- 训练阶段 --- """
训练和验证模型
Args:
model (torch.nn.Module): 要训练的模型
criterion (torch.nn.Module): 损失函数
optimizer (torch.optim.Optimizer): 优化器
train_loader (torch.utils.data.DataLoader): 训练数据加载器
valid_loader (torch.utils.data.DataLoader): 验证数据加载器
num_epochs (int): 训练的epoch数
Returns:
model (torch.nn.Module): 训练后的模型
"""
model.train() model.train()
running_loss, running_acc = 0.0, 0.0 train_loss_list = []
for imgs, labels in train_loader: valid_loss_list = []
imgs, labels = imgs.to(device), labels.to(device) train_acc_list = []
valid_acc_list = []
best_valid_acc = 0.0
patience = 10 # 早停耐心值
counter = 0 # 计数器
scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer, mode='max', factor=0.5, patience=10)
with tqdm(total=num_epochs) as pbar:
for epoch in range(num_epochs):
# 训练阶段
train_loss = 0.0
train_acc = 0.0
for images, labels in train_loader:
images = images.to(device)
labels = labels.to(device)
optimizer.zero_grad() optimizer.zero_grad()
outputs = model(imgs) outputs = model(images)
loss = criterion(outputs, labels) loss = criterion(outputs, labels)
loss.backward() loss.backward()
torch.nn.utils.clip_grad_norm_(model.parameters(), max_norm=1.0)
optimizer.step() optimizer.step()
running_loss += loss.item() train_loss += loss.item()
running_acc += calculate_score(labels, outputs) train_acc += calculate_score(labels, outputs)
train_loss = running_loss / len(train_loader) train_loss /= len(train_loader)
train_acc = running_acc / len(train_loader) train_acc /= len(train_loader)
scheduler.step()
# --- 验证阶段 --- # 验证阶段
model.eval() model.eval()
val_loss, val_acc = 0.0, 0.0 valid_loss = 0.0
valid_acc = 0.0
with torch.no_grad(): with torch.no_grad():
for imgs, labels in valid_loader: for images, labels in valid_loader:
imgs, labels = imgs.to(device), labels.to(device) images = images.to(device)
outputs = model(imgs) labels = labels.to(device)
outputs = model(images)
loss = criterion(outputs, labels) loss = criterion(outputs, labels)
val_loss += loss.item() valid_loss += loss.item()
val_acc += calculate_score(labels, outputs) valid_acc += calculate_score(labels, outputs)
valid_loss = val_loss / len(valid_loader) valid_loss /= len(valid_loader)
valid_acc = val_acc / len(valid_loader) valid_acc /= len(valid_loader)
metrics['epoch'].append(epoch) # 学习率调度器更新
metrics['train_loss'].append(train_loss) scheduler.step(valid_acc)
metrics['valid_loss'].append(valid_loss)
metrics['train_acc'].append(train_acc)
metrics['valid_acc'].append(valid_acc)
tqdm.write(f"[{save_path}] Epoch {epoch}/{num_epochs} " # 早停机制
f"Train Acc: {train_acc:.4f} Valid Acc: {valid_acc:.4f}") if valid_acc > best_valid_acc:
best_valid_acc = valid_acc
torch.save(model.state_dict(), './data/notebook2/best_model.pt')
counter = 0
else:
counter += 1
if counter >= patience:
print(f'Early stopping at epoch {epoch+1} due to no improvement in validation accuracy.')
break
pbar.set_description(f"Train loss: {train_loss:.3f} Valid Acc: {valid_acc:.3f}")
pbar.update()
train_loss_list.append(train_loss)
valid_loss_list.append(valid_loss)
train_acc_list.append(train_acc)
valid_acc_list.append(valid_acc)
# 加载最佳模型权重
if os.path.exists('./data/notebook2/best_model.pt'):
model.load_state_dict(torch.load('./data/notebook2/best_model.pt'))
# 修改metrics构建方式确保各数组长度一致
metrics = {
'epoch': list(range(1, len(train_loss_list) + 1)),
'train_acc': train_acc_list,
'valid_acc': valid_acc_list,
'train_loss': train_loss_list,
'valid_loss': valid_loss_list
}
if valid_acc > best_acc:
best_acc = valid_acc
torch.save(model.state_dict(), save_path)
return model, metrics return model, metrics
#%% 测试函数
def test_model(model, test_loader, device): def test_model(model, test_loader, device):
model.to(device).eval() model.eval()
acc = 0.0 test_acc = 0.0
with torch.no_grad(): with torch.no_grad():
for imgs, labels in test_loader: for images, labels in test_loader:
imgs, labels = imgs.to(device), labels.to(device) images = images.to(device)
outputs = model(imgs) labels = labels.to(device)
acc += calculate_score(labels, outputs) outputs = model(images)
acc /= len(test_loader) test_acc += calculate_score(labels, outputs)
print(f"Test Accuracy: {acc:.4f}")
return acc
#%% 定义量子卷积层与模型 test_acc /= len(test_loader)
singlegate_list = ['rx','ry','rz','s','t','p','u3'] print(f'Test Acc: {test_acc:.3f}')
doublegate_list = ['rxx','ryy','rzz','swap','cnot','cp','ch','cu','ct','cz'] return test_acc
#%%
# 定义图像变换
trans1 = transforms.Compose([
transforms.RandomHorizontalFlip(), # 随机水平翻转
transforms.RandomRotation(10), # 随机旋转±10度
transforms.ColorJitter(brightness=0.2, contrast=0.2), # 颜色调整
transforms.Resize((18, 18)), # 调整大小为18x18
transforms.ToTensor(), # 转换为张量
transforms.Normalize((0.5,), (0.5,)) # 归一化到[-1, 1]
])
trans2 = transforms.Compose([
transforms.RandomHorizontalFlip(), # 随机水平翻转
transforms.RandomRotation(10), # 随机旋转±10度
transforms.ColorJitter(brightness=0.2, contrast=0.2), # 颜色调整
transforms.Resize((16, 16)), # 调整大小为16x16
transforms.ToTensor(), # 转换为张量
transforms.Normalize((0.5,), (0.5,)) # 归一化到[-1, 1]
])
train_dataset = FashionMNIST(root='./data/notebook2', train=False, transform=trans1,download=True)
test_dataset = FashionMNIST(root='./data/notebook2', train=False, transform=trans1,download=True)
# 定义训练集和测试集的比例
train_ratio = 0.8 # 训练集比例为80%验证集比例为20%
valid_ratio = 0.2
total_samples = len(train_dataset)
train_size = int(train_ratio * total_samples)
valid_size = int(valid_ratio * total_samples)
# 分割训练集和测试集
train_dataset, valid_dataset = torch.utils.data.random_split(train_dataset, [train_size, valid_size])
# 加载随机抽取的训练数据集
train_loader = DataLoader(train_dataset, batch_size=64, shuffle=True, drop_last=True)
valid_loader = DataLoader(valid_dataset, batch_size=64, shuffle=False, drop_last=True)
test_loader = DataLoader(test_dataset, batch_size=64, shuffle=False, drop_last=True)
#%%
singlegate_list = ['rx', 'ry', 'rz', 's', 't', 'p', 'u3']
doublegate_list = ['rxx', 'ryy', 'rzz', 'swap', 'cnot', 'cp', 'ch', 'cu', 'ct', 'cz']
#%%
# 随机量子卷积层
class RandomQuantumConvolutionalLayer(nn.Module): class RandomQuantumConvolutionalLayer(nn.Module):
def __init__(self, nqubit, num_circuits, seed=1024): def __init__(self, nqubit, num_circuits, seed:int=1024):
super().__init__() super(RandomQuantumConvolutionalLayer, self).__init__()
random.seed(seed) random.seed(seed)
self.nqubit = nqubit self.nqubit = nqubit
self.cirs = nn.ModuleList([self.circuit(nqubit) for _ in range(num_circuits)]) self.cirs = nn.ModuleList([self.circuit(nqubit) for _ in range(num_circuits)])
def circuit(self, nqubit): def circuit(self, nqubit):
cir = dq.QubitCircuit(nqubit) cir = dq.QubitCircuit(nqubit)
cir.rxlayer(encode=True); cir.barrier() cir.rxlayer(encode=True) # 对原论文的量子线路结构并无影响,只是做了一个数据编码的操作
for _ in range(3):
for i in range(nqubit):
getattr(cir, random.choice(singlegate_list))(i)
c,t = random.sample(range(nqubit),2)
gate = random.choice(doublegate_list)
if gate[0] in ['r','s']:
getattr(cir, gate)([c,t])
else:
getattr(cir, gate)(c,t)
cir.barrier() cir.barrier()
for iter in range(3):
for i in range(nqubit):
singlegate = random.choice(singlegate_list)
getattr(cir, singlegate)(i)
control_bit, target_bit = random.sample(range(0, nqubit - 1), 2)
doublegate = random.choice(doublegate_list)
if doublegate[0] in ['r', 's']:
getattr(cir, doublegate)([control_bit, target_bit])
else:
getattr(cir, doublegate)(control_bit, target_bit)
cir.barrier()
cir.observable(0) cir.observable(0)
return cir return cir
def forward(self, x):
k,s = 2,2
x_unf = x.unfold(2,k,s).unfold(3,k,s)
w = (x.shape[-1]-k)//s + 1
x_r = x_unf.reshape(-1, self.nqubit)
exps = []
for cir in self.cirs:
cir(x_r)
exps.append(cir.expectation())
exps = torch.stack(exps,1).reshape(x.size(0), len(self.cirs), w, w)
return exps
def forward(self, x):
kernel_size, stride = 2, 2
# [64, 1, 18, 18] -> [64, 1, 9, 18, 2] -> [64, 1, 9, 9, 2, 2]
x_unflod = x.unfold(2, kernel_size, stride).unfold(3, kernel_size, stride)
w = int((x.shape[-1] - kernel_size) / stride + 1)
x_reshape = x_unflod.reshape(-1, self.nqubit)
exps = []
for cir in self.cirs: # out_channels
cir(x_reshape)
exp = cir.expectation()
exps.append(exp)
exps = torch.stack(exps, dim=1)
exps = exps.reshape(x.shape[0], 3, w, w)
return exps
#%%
net = RandomQuantumConvolutionalLayer(nqubit=4, num_circuits=3, seed=1024)
net.cirs[0].draw()
#%%
# 基于随机量子卷积层的混合模型
class RandomQCCNN(nn.Module): class RandomQCCNN(nn.Module):
def __init__(self): def __init__(self):
super().__init__() super(RandomQCCNN, self).__init__()
self.conv = nn.Sequential( self.conv = nn.Sequential(
RandomQuantumConvolutionalLayer(4,3,seed=1024), RandomQuantumConvolutionalLayer(nqubit=4, num_circuits=3, seed=1024), # num_circuits=3代表我们在quanv1层只用了3个量子卷积核
nn.ReLU(), nn.MaxPool2d(2,1), nn.BatchNorm2d(3), # 添加批量归一化
nn.Conv2d(3,6,2,1), nn.ReLU(), nn.MaxPool2d(2,1) nn.ReLU(),
nn.MaxPool2d(kernel_size=2, stride=1),
nn.Conv2d(3, 6, kernel_size=2, stride=1),
nn.BatchNorm2d(6), # 添加批量归一化
nn.ReLU(),
nn.MaxPool2d(kernel_size=2, stride=1)
) )
self.fc = nn.Sequential( self.fc = nn.Sequential(
nn.Linear(6*6*6,1024), nn.Dropout(0.4), nn.Linear(6 * 6 * 6, 1024),
nn.Linear(1024,10) nn.BatchNorm1d(1024), # 添加批量归一化
nn.Dropout(0.5), # 增加dropout比例
nn.ReLU(),
nn.Linear(1024, 10)
) )
def forward(self,x):
x = self.conv(x)
x = x.view(x.size(0),-1)
return self.fc(x)
def forward(self, x):
x = self.conv(x)
x = x.reshape(x.size(0), -1)
x = self.fc(x)
return x
#%%
# 修改RandomQCCNN模型的训练参数
num_epochs = 300
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
print(device)
seed_torch(42) # 使用相同的随机种子值
model = RandomQCCNN()
model.to(device)
criterion = nn.CrossEntropyLoss()
optimizer = optim.AdamW(model.parameters(), lr=3e-4, weight_decay=1e-5) # 使用AdamW优化器和适当的权重衰减
optim_model, metrics = train_model(model, criterion, optimizer, train_loader, valid_loader, num_epochs, device)
torch.save(optim_model.state_dict(), './data/notebook2/random_qccnn_weights.pt') # 保存训练好的模型参数,用于后续的推理或测试
pd.DataFrame(metrics).to_csv('./data/notebook2/random_qccnn_metrics.csv', index='None') # 保存模型训练过程,用于后续图标展示
#%%
state_dict = torch.load('./data/notebook2/random_qccnn_weights.pt', map_location=device)
random_qccnn_model = RandomQCCNN()
random_qccnn_model.load_state_dict(state_dict)
random_qccnn_model.to(device)
test_acc = test_model(random_qccnn_model, test_loader, device)
#%%
data = pd.read_csv('./data/notebook2/random_qccnn_metrics.csv')
epoch = data['epoch']
train_loss = data['train_loss']
valid_loss = data['valid_loss']
train_acc = data['train_acc']
valid_acc = data['valid_acc']
# 创建图和Axes对象
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 5))
# 绘制训练损失曲线
ax1.plot(epoch, train_loss, label='Train Loss')
ax1.plot(epoch, valid_loss, label='Valid Loss')
ax1.set_title('Training Loss Curve')
ax1.set_xlabel('Epoch')
ax1.set_ylabel('Loss')
ax1.legend()
# 绘制训练准确率曲线
ax2.plot(epoch, train_acc, label='Train Accuracy')
ax2.plot(epoch, valid_acc, label='Valid Accuracy')
ax2.set_title('Training Accuracy Curve')
ax2.set_xlabel('Epoch')
ax2.set_ylabel('Accuracy')
ax2.legend()
plt.show()
#%%
class ParameterizedQuantumConvolutionalLayer(nn.Module): class ParameterizedQuantumConvolutionalLayer(nn.Module):
def __init__(self,nqubit,num_circuits): def __init__(self, nqubit, num_circuits):
super().__init__() super().__init__()
self.nqubit = nqubit self.nqubit = nqubit
self.cirs = nn.ModuleList([self.circuit(nqubit) for _ in range(num_circuits)]) self.cirs = nn.ModuleList([self.circuit(nqubit) for _ in range(num_circuits)])
def circuit(self,nqubit):
def circuit(self, nqubit):
cir = dq.QubitCircuit(nqubit) cir = dq.QubitCircuit(nqubit)
cir.rxlayer(encode=True); cir.barrier() cir.rxlayer(encode=True) #对原论文的量子线路结构并无影响,只是做了一个数据编码的操作
for _ in range(4): cir.barrier()
cir.rylayer(); cir.cnot_ring(); cir.barrier() for iter in range(4): #对应原论文中一个量子卷积线路上的深度为4可控参数一共16个
cir.rylayer()
cir.cnot_ring()
cir.barrier()
cir.observable(0) cir.observable(0)
return cir return cir
def forward(self,x):
k,s = 2,2
x_unf = x.unfold(2,k,s).unfold(3,k,s)
w = (x.shape[-1]-k)//s +1
x_r = x_unf.reshape(-1,self.nqubit)
exps = []
for cir in self.cirs:
cir(x_r); exps.append(cir.expectation())
exps = torch.stack(exps,1).reshape(x.size(0),len(self.cirs),w,w)
return exps
def forward(self, x):
kernel_size, stride = 2, 2
# [64, 1, 18, 18] -> [64, 1, 9, 18, 2] -> [64, 1, 9, 9, 2, 2]
x_unflod = x.unfold(2, kernel_size, stride).unfold(3, kernel_size, stride)
w = int((x.shape[-1] - kernel_size) / stride + 1)
x_reshape = x_unflod.reshape(-1, self.nqubit)
exps = []
for cir in self.cirs: # out_channels
cir(x_reshape)
exp = cir.expectation()
exps.append(exp)
exps = torch.stack(exps, dim=1)
exps = exps.reshape(x.shape[0], 3, w, w)
return exps
#%%
# 此处我们可视化其中一个量子卷积核的线路结构:
net = ParameterizedQuantumConvolutionalLayer(nqubit=4, num_circuits=3)
net.cirs[0].draw()
#%%
# QCCNN整体网络架构
class QCCNN(nn.Module): class QCCNN(nn.Module):
def __init__(self): def __init__(self):
super().__init__() super(QCCNN, self).__init__()
self.conv = nn.Sequential( self.conv = nn.Sequential(
ParameterizedQuantumConvolutionalLayer(4,3), ParameterizedQuantumConvolutionalLayer(nqubit=4, num_circuits=3),
nn.ReLU(), nn.MaxPool2d(2,1) nn.BatchNorm2d(3), # 添加批量归一化
nn.ReLU(),
nn.MaxPool2d(kernel_size=2, stride=1)
) )
self.fc = nn.Sequential(
nn.Linear(8*8*3,128), nn.Dropout(0.4), nn.ReLU(),
nn.Linear(128,10)
)
def forward(self,x):
x = self.conv(x); x = x.view(x.size(0),-1)
return self.fc(x)
def vgg_block(in_c,out_c,n_convs): self.fc = nn.Sequential(
layers = [nn.Conv2d(in_c,out_c,3,padding=1), nn.ReLU()] nn.Linear(8 * 8 * 3, 128),
for _ in range(n_convs-1): nn.BatchNorm1d(128), # 添加批量归一化
layers += [nn.Conv2d(out_c,out_c,3,padding=1), nn.ReLU()] nn.Dropout(0.5), # 增加dropout比例
layers.append(nn.MaxPool2d(2,2)) nn.ReLU(),
nn.Linear(128, 10)
)
def forward(self, x):
x = self.conv(x)
x = x.reshape(x.size(0), -1)
x = self.fc(x)
return x
#%%
# 修改QCCNN模型的训练参数
num_epochs = 300
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model = QCCNN()
model.to(device)
criterion = nn.CrossEntropyLoss()
optimizer = optim.AdamW(model.parameters(), lr=3e-4, weight_decay=1e-5) # 使用AdamW优化器和适当的权重衰减
optim_model, metrics = train_model(model, criterion, optimizer, train_loader, valid_loader, num_epochs, device)
torch.save(optim_model.state_dict(), './data/notebook2/qccnn_weights.pt') # 保存训练好的模型参数,用于后续的推理或测试
pd.DataFrame(metrics).to_csv('./data/notebook2/qccnn_metrics.csv', index='None') # 保存模型训练过程,用于后续图标展示
#%%
state_dict = torch.load('./data/notebook2/qccnn_weights.pt', map_location=device)
qccnn_model = QCCNN()
qccnn_model.load_state_dict(state_dict)
qccnn_model.to(device)
test_acc = test_model(qccnn_model, test_loader, device)
#%%
def vgg_block(in_channel,out_channel,num_convs):
layers = nn.ModuleList()
assert num_convs >= 1
layers.append(nn.Conv2d(in_channel,out_channel,kernel_size=3,padding=1))
layers.append(nn.ReLU())
for _ in range(num_convs-1):
layers.append(nn.Conv2d(out_channel,out_channel,kernel_size=3,padding=1))
layers.append(nn.ReLU())
layers.append(nn.MaxPool2d(kernel_size=2,stride=2))
return nn.Sequential(*layers) return nn.Sequential(*layers)
VGG = nn.Sequential( VGG = nn.Sequential(
vgg_block(1,10,3), vgg_block(1, 32, 2), # 增加通道数和调整卷积层数量
vgg_block(10,16,3), vgg_block(32, 64, 2),
nn.Flatten(), nn.Flatten(),
nn.Linear(16*4*4,120), nn.Sigmoid(), nn.Linear(64 * 4 * 4, 256), # 调整全连接层大小
nn.Linear(120,84), nn.Sigmoid(), nn.BatchNorm1d(256), # 添加批量归一化
nn.Linear(84,10), nn.Softmax(dim=-1) nn.ReLU(),
nn.Dropout(0.5), # 增加dropout比例
nn.Linear(256, 128),
nn.BatchNorm1d(128), # 添加批量归一化
nn.ReLU(),
nn.Dropout(0.5),
nn.Linear(128, 10),
nn.Softmax(dim=-1)
) )
#%%
# 修改VGG模型的训练参数
num_epochs = 300
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
#%% 主入口 vgg_model = VGG
if __name__ == '__main__': vgg_model.to(device)
freeze_support() criterion = nn.CrossEntropyLoss()
optimizer = optim.AdamW(vgg_model.parameters(), lr=3e-4, weight_decay=1e-5) # 使用AdamW优化器和适当的权重衰减
vgg_model, metrics = train_model(vgg_model, criterion, optimizer, train_loader, valid_loader, num_epochs, device)
torch.save(vgg_model.state_dict(), './data/notebook2/vgg_weights.pt') # 保存训练好的模型参数,用于后续的推理或测试
pd.DataFrame(metrics).to_csv('./data/notebook2/vgg_metrics.csv', index='None') # 保存模型训练过程,用于后续图标展示
#%%
state_dict = torch.load('./data/notebook2/vgg_weights.pt', map_location=device)
vgg_model = VGG
vgg_model.load_state_dict(state_dict)
vgg_model.to(device)
# 数据增广与加载 vgg_test_acc = test_model(vgg_model, test_loader, device)
train_transform = transforms.Compose([ #%%
transforms.Resize((18, 18)), vgg_data = pd.read_csv('./data/notebook2/vgg_metrics.csv')
transforms.RandomRotation(15), qccnn_data = pd.read_csv('./data/notebook2/qccnn_metrics.csv')
transforms.RandomHorizontalFlip(), vgg_epoch = vgg_data['epoch']
transforms.RandomVerticalFlip(0.3), vgg_train_loss = vgg_data['train_loss']
transforms.ToTensor(), vgg_valid_loss = vgg_data['valid_loss']
transforms.Normalize((0.5,), (0.5,)) vgg_train_acc = vgg_data['train_acc']
]) vgg_valid_acc = vgg_data['valid_acc']
eval_transform = transforms.Compose([
transforms.Resize((18, 18)),
transforms.ToTensor(),
transforms.Normalize((0.5,), (0.5,))
])
full_train = FashionMNIST(root='./data/notebook2', train=True, transform=train_transform, download=True) qccnn_epoch = qccnn_data['epoch']
test_dataset = FashionMNIST(root='./data/notebook2', train=False, transform=eval_transform, download=True) qccnn_train_loss = qccnn_data['train_loss']
train_size = int(0.8 * len(full_train)) qccnn_valid_loss = qccnn_data['valid_loss']
valid_size = len(full_train) - train_size qccnn_train_acc = qccnn_data['train_acc']
train_ds, valid_ds = torch.utils.data.random_split(full_train, [train_size, valid_size]) qccnn_valid_acc = qccnn_data['valid_acc']
valid_ds.dataset.transform = eval_transform
batch_size = 128 # 创建图和Axes对象
train_loader = DataLoader(train_ds, batch_size=batch_size, shuffle=True, drop_last=True, num_workers=4) fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 5))
valid_loader = DataLoader(valid_ds, batch_size=batch_size, shuffle=False, drop_last=True, num_workers=4)
test_loader = DataLoader(test_dataset, batch_size=batch_size, shuffle=False, drop_last=False, num_workers=4)
# 三种模型配置 # 绘制训练损失曲线
device = torch.device("cuda" if torch.cuda.is_available() else "cpu") ax1.plot(vgg_epoch, vgg_train_loss, label='VGG Train Loss')
models = { ax1.plot(vgg_epoch, vgg_valid_loss, label='VGG Valid Loss')
'random_qccnn': (RandomQCCNN(), 1e-3, './data/notebook2/random_qccnn_best.pt'), ax1.plot(qccnn_epoch, qccnn_train_loss, label='QCCNN Valid Loss')
'qccnn': (QCCNN(), 1e-4, './data/notebook2/qccnn_best.pt'), ax1.plot(qccnn_epoch, qccnn_valid_loss, label='QCCNN Valid Loss')
'vgg': (VGG, 1e-4, './data/notebook2/vgg_best.pt') ax1.set_title('Training Loss Curve')
} ax1.set_xlabel('Epoch')
ax1.set_ylabel('Loss')
ax1.legend()
all_metrics = {} # 绘制训练准确率曲线
for name, (model, lr, save_path) in models.items(): ax2.plot(vgg_epoch, vgg_train_acc, label='VGG Train Accuracy')
seed_torch(1024) ax2.plot(vgg_epoch, vgg_valid_acc, label='VGG Valid Accuracy')
model = model.to(device) ax2.plot(qccnn_epoch, qccnn_train_acc, label='QCCNN Train Accuracy')
criterion = nn.CrossEntropyLoss() ax2.plot(qccnn_epoch, qccnn_valid_acc, label='QCCNN Valid Accuracy')
optimizer = optim.AdamW(model.parameters(), lr=lr, weight_decay=1e-4) ax2.set_title('Training Accuracy Curve')
scheduler = optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=50) ax2.set_xlabel('Epoch')
ax2.set_ylabel('Accuracy')
ax2.legend()
print(f"\n=== Training {name} ===") plt.show()
_, metrics = train_model( #%%
model, criterion, optimizer, scheduler, # 这里我们对比不同模型之间可训练参数量的区别
train_loader, valid_loader,
num_epochs=50, device=device, save_path=save_path
)
all_metrics[name] = metrics
pd.DataFrame(metrics).to_csv(f'./data/notebook2/{name}_metrics.csv', index=False)
# 测试与可视化 def count_parameters(model):
plt.figure(figsize=(12,5)) """
for i,(name,metrics) in enumerate(all_metrics.items(),1): 计算模型的参数数量
model, _, save_path = models[name] """
best_model = model.to(device) return sum(p.numel() for p in model.parameters() if p.requires_grad)
best_model.load_state_dict(torch.load(save_path))
print(f"\n--- Testing {name} ---")
test_model(best_model, test_loader, device)
plt.subplot(1,3,i) number_params_VGG = count_parameters(VGG)
plt.plot(metrics['epoch'], metrics['valid_acc'], label=f'{name} Val Acc') number_params_QCCNN = count_parameters(QCCNN())
plt.xlabel('Epoch'); plt.ylabel('Valid Acc') print(f'VGG 模型可训练参数量:{number_params_VGG}\t QCCNN模型可训练参数量{number_params_QCCNN}')
plt.title(name); plt.legend()
plt.tight_layout(); plt.show()
# 参数量统计
def count_parameters(m):
return sum(p.numel() for p in m.parameters() if p.requires_grad)
print("\nParameter Counts:")
for name,(model,_,_) in models.items():
print(f"{name}: {count_parameters(model)}")

View File

@ -1,301 +1,33 @@
,epoch,train_acc,valid_acc,train_loss,valid_loss ,epoch,train_acc,valid_acc,train_loss,valid_loss
0,1,0.12575,0.21421370967741934,2.2937231807708742,2.2676862593620055 0,1,0.59325,0.6854838709677419,1.1784187927246095,0.8956879992638865
1,2,0.248,0.3240927419354839,2.2377592124938963,2.1976155081102924 1,2,0.713,0.7127016129032258,0.788702152967453,0.7735376723351017
2,3,0.354375,0.3815524193548387,2.129876153945923,2.044375258107339 2,3,0.755,0.7389112903225806,0.7006268813610077,0.7271041427889178
3,4,0.42625,0.4324596774193548,1.9269471073150635,1.815262294584705 3,4,0.763625,0.7273185483870968,0.668900458574295,0.7277685692233424
4,5,0.482125,0.5282258064516129,1.6988583679199218,1.6081839684517152 4,5,0.76275,0.748991935483871,0.6469288661479949,0.672559670863613
5,6,0.528375,0.5151209677419355,1.5167635107040405,1.4654438649454424 5,6,0.773625,0.7434475806451613,0.6203210880756378,0.6748099394382969
6,7,0.5465,0.5574596774193549,1.3927514476776124,1.3655932180343135 6,7,0.771625,0.7494959677419355,0.6234635796546936,0.6763338706185741
7,8,0.5705,0.5403225806451613,1.3035813112258912,1.2985444838000881 7,8,0.784125,0.7580645161290323,0.5965014040470124,0.6313219945276937
8,9,0.586875,0.5902217741935484,1.2359741706848144,1.2315824993195073 8,9,0.781,0.7419354838709677,0.5893729448318481,0.6552943125847848
9,10,0.5985,0.5967741935483871,1.1828507437705993,1.1789349471369097 9,10,0.785375,0.7600806451612904,0.582163923740387,0.6227685501498561
10,11,0.61075,0.6013104838709677,1.1400936079025268,1.146019495302631 10,11,0.790625,0.7605846774193549,0.5674120993614197,0.6145914264263646
11,12,0.61425,0.6098790322580645,1.105873393535614,1.1111935800121677 11,12,0.7995,0.765625,0.5617314722537995,0.6158498919779255
12,13,0.623,0.6234879032258065,1.076672074317932,1.089227545645929 12,13,0.7965,0.7883064516129032,0.5522617139816284,0.581030644716755
13,14,0.63475,0.6159274193548387,1.0523606872558593,1.0597389667264876 13,14,0.803125,0.7817540322580645,0.5367116575241089,0.5911272004727395
14,15,0.63575,0.5866935483870968,1.0302134299278258,1.0517296521894393 14,15,0.80025,0.7752016129032258,0.5398472018241882,0.5930967292478008
15,16,0.638625,0.625,1.0127733755111694,1.030857661078053 15,16,0.808125,0.7620967741935484,0.5326456694602967,0.6306540206555398
16,17,0.644125,0.623991935483871,0.9972539176940918,1.0212982143125227 16,17,0.8145,0.7772177419354839,0.5097175936698913,0.5843374094655437
17,18,0.647125,0.6491935483870968,0.9847306752204895,1.0031032081573241 17,18,0.817375,0.7918346774193549,0.5045695571899415,0.5643920071663395
18,19,0.651625,0.6295362903225806,0.9718224520683288,0.9922776433729357 18,19,0.81175,0.7681451612903226,0.5022978014945984,0.593349628871487
19,20,0.653375,0.655241935483871,0.9614444556236267,1.004007081831655 19,20,0.824375,0.7928427419354839,0.4974315061569214,0.5595071181174247
20,21,0.65475,0.639616935483871,0.9525526990890503,0.9718767077692093 20,21,0.818,0.7762096774193549,0.49526858401298524,0.5555580258369446
21,22,0.657125,0.6491935483870968,0.944016770362854,0.9612755986952013 21,22,0.815625,0.7862903225806451,0.4976941578388214,0.5627694274148634
22,23,0.660125,0.6512096774193549,0.9386747822761535,0.9546890701017072 22,23,0.825875,0.7908266129032258,0.4906779823303223,0.5514134312829664
23,24,0.660125,0.6607862903225806,0.9306546316146851,0.9596057341944787 23,24,0.823875,0.7953629032258065,0.4869117760658264,0.5559701804191836
24,25,0.665625,0.6466733870967742,0.9239095873832702,0.9440147549875321 24,25,0.823,0.7807459677419355,0.48527450203895567,0.5541293746040713
25,26,0.664,0.6537298387096774,0.9177467222213745,0.9464947581291199 25,26,0.824625,0.795866935483871,0.4811685870885849,0.5384060884675672
26,27,0.66675,0.6582661290322581,0.912406234741211,0.9296330482729019 26,27,0.827875,0.7938508064516129,0.48096660792827606,0.5493545234203339
27,28,0.66625,0.6607862903225806,0.9084503087997436,0.9246035218238831 27,28,0.826875,0.7998991935483871,0.477142098903656,0.5478304624557495
28,29,0.669375,0.6633064516129032,0.9031522455215454,0.9183507677047483 28,29,0.82725,0.7948588709677419,0.47373487854003904,0.547797841410483
29,30,0.66925,0.6622983870967742,0.8971806693077088,0.9155043325116557 29,30,0.825375,0.7847782258064516,0.47746446084976196,0.5636355328944421
30,31,0.669625,0.6658266129032258,0.8963782095909119,0.9107550824842146 30,31,0.828875,0.7757056451612904,0.4754366238117218,0.5671144060550197
31,32,0.6715,0.6607862903225806,0.8907575702667236,0.9062397806875168 31,32,0.8315,0.7908266129032258,0.4769072663784027,0.5523799340571126
32,33,0.672875,0.6592741935483871,0.8865971283912659,0.9067775895518642
33,34,0.6755,0.6643145161290323,0.8831153435707092,0.8996775977073177
34,35,0.6745,0.6643145161290323,0.8799522204399108,0.9008925057226612
35,36,0.67625,0.6738911290322581,0.8765962233543396,0.8957007392760246
36,37,0.67775,0.6602822580645161,0.8732238512039184,0.8957233851955783
37,38,0.677,0.6663306451612904,0.8700605201721191,0.8884582346485507
38,39,0.677625,0.6779233870967742,0.86691774892807,0.8847945544027513
39,40,0.683875,0.6683467741935484,0.8638643326759339,0.8812634848779247
40,41,0.6825,0.6703629032258065,0.8619350233078003,0.8736868096936133
41,42,0.68525,0.6769153225806451,0.8574845008850097,0.8757798306403621
42,43,0.68325,0.6829637096774194,0.8559755001068116,0.8762649028531967
43,44,0.681875,0.6794354838709677,0.8526963171958923,0.868664155083318
44,45,0.6855,0.670866935483871,0.8508440051078796,0.8656267696811307
45,46,0.687375,0.6738911290322581,0.8477729640007019,0.8647398410304901
46,47,0.689125,0.6723790322580645,0.844515609741211,0.8638435967506901
47,48,0.685,0.6779233870967742,0.8429114985466003,0.8618940441839157
48,49,0.689125,0.6794354838709677,0.8397265086174012,0.8545605559502879
49,50,0.686625,0.6804435483870968,0.8384793972969056,0.8552838506237153
50,51,0.691125,0.6774193548387096,0.8345498304367065,0.848295844370319
51,52,0.687875,0.6859879032258065,0.8335188755989075,0.855028792735069
52,53,0.691,0.6824596774193549,0.8311837863922119,0.8479280106482967
53,54,0.692875,0.6859879032258065,0.8279946088790894,0.8404466413682506
54,55,0.6925,0.6804435483870968,0.8260387935638428,0.84263696593623
55,56,0.693125,0.6849798387096774,0.8229942922592163,0.8379247784614563
56,57,0.694375,0.6844758064516129,0.8220300149917602,0.8415298577277891
57,58,0.696375,0.6880040322580645,0.8190384802818298,0.8321282882844249
58,59,0.694,0.6875,0.816288417339325,0.8343535277151293
59,60,0.694875,0.6839717741935484,0.8151030368804931,0.8336188389408973
60,61,0.69425,0.6854838709677419,0.8126006669998169,0.8352141207264315
61,62,0.697125,0.6854838709677419,0.8106505064964294,0.8332801768856664
62,63,0.69975,0.6905241935483871,0.8080810284614564,0.8234034065277346
63,64,0.70075,0.6844758064516129,0.8052171502113342,0.8192608933294973
64,65,0.69925,0.6869959677419355,0.8035496191978455,0.815050482749939
65,66,0.7005,0.6875,0.8014146156311035,0.8252233305285054
66,67,0.698125,0.6905241935483871,0.7996929430961609,0.8083832244719228
67,68,0.69975,0.6895161290322581,0.7969387445449829,0.8108954429626465
68,69,0.702125,0.6869959677419355,0.7955113248825073,0.8145994409438102
69,70,0.7,0.6935483870967742,0.792340503692627,0.807374858087109
70,71,0.702625,0.6950604838709677,0.7911754889488221,0.8086352617509903
71,72,0.703625,0.6985887096774194,0.7885409245491027,0.8008877135092213
72,73,0.70425,0.702116935483871,0.7873198952674866,0.7998276199063947
73,74,0.704125,0.6970766129032258,0.7848704538345337,0.7971179581457569
74,75,0.70625,0.6970766129032258,0.7822935285568238,0.7927621314602513
75,76,0.70475,0.6960685483870968,0.7788817420005798,0.7937883177111226
76,77,0.704875,0.6985887096774194,0.7792668890953064,0.7862545732528933
77,78,0.703625,0.7006048387096774,0.7767630572319031,0.7841528077279368
78,79,0.710875,0.6980846774193549,0.7729922785758973,0.7877849590393805
79,80,0.7085,0.7036290322580645,0.7725881729125976,0.7884936524975684
80,81,0.7145,0.6975806451612904,0.7703448913097382,0.7794140846498551
81,82,0.707875,0.703125,0.7683381514549256,0.7827766460757102
82,83,0.71425,0.704133064516129,0.7661359086036682,0.7740850121744217
83,84,0.716,0.7247983870967742,0.7635970077514649,0.7837481344899824
84,85,0.714625,0.7046370967741935,0.7620524802207946,0.7760492024883148
85,86,0.717875,0.7273185483870968,0.7587687454223633,0.7693652645234139
86,87,0.71925,0.7167338709677419,0.7571495487689972,0.7791498034231125
87,88,0.723125,0.7061491935483871,0.7559212753772736,0.7657425865050285
88,89,0.721125,0.7227822580645161,0.7540467400550842,0.7639261926374128
89,90,0.72775,0.7212701612903226,0.7519344382286072,0.7616109309657928
90,91,0.72775,0.7283266129032258,0.7487852301597595,0.7575869021877166
91,92,0.727875,0.7273185483870968,0.7474929718971253,0.7539062634591134
92,93,0.72625,0.7237903225806451,0.744426561832428,0.7515108796858019
93,94,0.7285,0.717741935483871,0.7436468710899353,0.7493124450406721
94,95,0.72875,0.7227822580645161,0.7407840843200684,0.7509921366168607
95,96,0.731875,0.7232862903225806,0.7383509948253631,0.7504203108049208
96,97,0.732125,0.7288306451612904,0.7363486151695251,0.7452502135307558
97,98,0.732875,0.7283266129032258,0.7337991933822632,0.7476337955844018
98,99,0.735625,0.7303427419354839,0.7317234919071197,0.739826298529102
99,100,0.733625,0.7283266129032258,0.7287046048641205,0.739542922665996
100,101,0.73375,0.7147177419354839,0.7274810273647309,0.7344657682603405
101,102,0.7355,0.7338709677419355,0.7247261893749237,0.7346700814462477
102,103,0.73225,0.733366935483871,0.7240956840515137,0.7285718571755194
103,104,0.737,0.7247983870967742,0.720921181678772,0.7279678198599047
104,105,0.73725,0.7162298387096774,0.7193985497951507,0.7334578460262667
105,106,0.73975,0.7232862903225806,0.7166938002109527,0.728367522839577
106,107,0.737875,0.7363911290322581,0.715068681716919,0.7223057727659902
107,108,0.742625,0.7318548387096774,0.7129860005378723,0.7190377539204013
108,109,0.74,0.7389112903225806,0.7117807359695435,0.7183994016339702
109,110,0.739875,0.7384072580645161,0.7105983903408051,0.7164657365891242
110,111,0.742375,0.7328629032258065,0.7074811918735504,0.7246334398946455
111,112,0.74,0.7348790322580645,0.7077446887493134,0.7127827925066794
112,113,0.740125,0.7348790322580645,0.7058010087013245,0.7185006468526779
113,114,0.74125,0.7379032258064516,0.7062028846740722,0.711994872939202
114,115,0.741375,0.734375,0.7025900321006775,0.722117660507079
115,116,0.741875,0.7404233870967742,0.702047369480133,0.7054055179319074
116,117,0.741,0.7368951612903226,0.7008094470500946,0.7059461897419345
117,118,0.74,0.7379032258064516,0.7000908722877502,0.7077065225570432
118,119,0.74275,0.7389112903225806,0.697608702659607,0.7044590307820228
119,120,0.741125,0.735383064516129,0.6974484593868255,0.705822677381577
120,121,0.745,0.7363911290322581,0.6940790374279022,0.726842547616651
121,122,0.741875,0.7409274193548387,0.6941187741756439,0.6984606769777113
122,123,0.7455,0.7363911290322581,0.6942173736095428,0.7010489721452037
123,124,0.74125,0.7404233870967742,0.6926323885917663,0.704462626288014
124,125,0.743875,0.7409274193548387,0.6916290044784545,0.696697513903341
125,126,0.74375,0.7394153225806451,0.6898484938144683,0.6978444207099176
126,127,0.743625,0.7399193548387096,0.6888203046321869,0.7016230494745316
127,128,0.74525,0.7414314516129032,0.6875256526470185,0.6972026151995505
128,129,0.744125,0.7414314516129032,0.686995727300644,0.6930867991139812
129,130,0.748,0.7424395161290323,0.6873089451789856,0.6934665374217495
130,131,0.744,0.7328629032258065,0.6838541362285614,0.7066772426328352
131,132,0.7435,0.7363911290322581,0.6849607121944428,0.6904300124414505
132,133,0.746,0.7444556451612904,0.6836790750026703,0.6916862668529633
133,134,0.746125,0.7419354838709677,0.6825263681411743,0.6903918568165072
134,135,0.747125,0.7454637096774194,0.6808646347522735,0.6892580957181992
135,136,0.746625,0.7328629032258065,0.6810887951850891,0.6992948112949249
136,137,0.747625,0.7399193548387096,0.6801265494823456,0.6914754644516976
137,138,0.744125,0.7449596774193549,0.6794462163448334,0.686861309313005
138,139,0.746375,0.7379032258064516,0.6796345655918121,0.6865283789173249
139,140,0.74825,0.7363911290322581,0.6776200189590454,0.6895612343665092
140,141,0.748375,0.7368951612903226,0.6769659533500672,0.6895686657198014
141,142,0.745875,0.7409274193548387,0.6762004556655884,0.684873923178642
142,143,0.75,0.7429435483870968,0.6766425142288208,0.6841276749487846
143,144,0.7495,0.7429435483870968,0.6757207989692688,0.6857355977258375
144,145,0.74775,0.7409274193548387,0.6757489387989044,0.6874895538053205
145,146,0.75275,0.7358870967741935,0.6723645563125611,0.6919096823661558
146,147,0.75125,0.7399193548387096,0.672961886882782,0.6845684782151253
147,148,0.751375,0.7389112903225806,0.672047397851944,0.6911592175883632
148,149,0.7485,0.7399193548387096,0.6713605234622956,0.6868368666018209
149,150,0.749625,0.7434475806451613,0.6715062477588654,0.6797055390573317
150,151,0.752,0.7429435483870968,0.6708727264404297,0.6786900566470239
151,152,0.749375,0.7404233870967742,0.6705182585716247,0.6893971793113216
152,153,0.756375,0.7409274193548387,0.6681198358535767,0.6818877074026293
153,154,0.74875,0.7323588709677419,0.668769829750061,0.6870744218749385
154,155,0.751375,0.7404233870967742,0.6693622040748596,0.6791631608240066
155,156,0.7495,0.7384072580645161,0.6677224984169007,0.6791054318028111
156,157,0.751125,0.7530241935483871,0.6669107344150543,0.6757311301846658
157,158,0.750375,0.7454637096774194,0.6689134707450867,0.675748364579293
158,159,0.749875,0.7394153225806451,0.665742442369461,0.6785702628474082
159,160,0.755125,0.7439516129032258,0.6639340476989746,0.6909028849294109
160,161,0.754375,0.7484879032258065,0.6655936982631684,0.6800686274805376
161,162,0.753,0.7444556451612904,0.6626871073246002,0.6847033404534862
162,163,0.754375,0.7394153225806451,0.6636988418102264,0.678314108041025
163,164,0.756,0.7439516129032258,0.6631928851604462,0.679127223068668
164,165,0.7535,0.7449596774193549,0.6635311431884766,0.6755695458381407
165,166,0.75425,0.7439516129032258,0.6608122355937958,0.6740436525114121
166,167,0.7535,0.751008064516129,0.6623840556144714,0.6725323296362354
167,168,0.753375,0.7474798387096774,0.6615988018512726,0.6734585973524279
168,169,0.7555,0.7404233870967742,0.658470029592514,0.6751049209025598
169,170,0.75275,0.748991935483871,0.6596595213413239,0.6707770478340888
170,171,0.75475,0.7464717741935484,0.6594919128417969,0.6760938773232121
171,172,0.752375,0.7404233870967742,0.6587064244747162,0.6735088594498173
172,173,0.754875,0.7419354838709677,0.6581688785552978,0.6718735867930997
173,174,0.755625,0.7444556451612904,0.6594367871284484,0.672063180515843
174,175,0.75675,0.7484879032258065,0.6572930109500885,0.6681920395743463
175,176,0.7565,0.7555443548387096,0.6567469036579132,0.666823229482097
176,177,0.75625,0.7242943548387096,0.6563848433494568,0.69633928614278
177,178,0.7545,0.7520161290322581,0.6568190479278564,0.6688290171084865
178,179,0.758125,0.7464717741935484,0.6569823319911957,0.668416089588596
179,180,0.756125,0.7474798387096774,0.6567688817977906,0.6825112623553122
180,181,0.75825,0.7384072580645161,0.6541404497623443,0.6793540994967183
181,182,0.754125,0.7484879032258065,0.6573447947502136,0.6673893765095742
182,183,0.7575,0.7459677419354839,0.6545488471984864,0.669147877923904
183,184,0.757625,0.7520161290322581,0.653099508523941,0.6698833357903266
184,185,0.7575,0.7459677419354839,0.652852609872818,0.6746915771115211
185,186,0.75775,0.7469758064516129,0.6531596164703369,0.6692096014176646
186,187,0.75625,0.7429435483870968,0.65095947265625,0.6785491474213139
187,188,0.759875,0.748991935483871,0.6528274257183075,0.6676096195174802
188,189,0.75825,0.751008064516129,0.6515383367538452,0.6677034651079485
189,190,0.761,0.7429435483870968,0.6507666997909546,0.6711848301272239
190,191,0.756,0.7505040322580645,0.6502130575180054,0.6673150216379473
191,192,0.755625,0.7520161290322581,0.6508474731445313,0.6693628420752864
192,193,0.758375,0.7449596774193549,0.6497379775047303,0.6839032778816838
193,194,0.760125,0.7449596774193549,0.6500401992797852,0.6711471715281087
194,195,0.7595,0.751008064516129,0.6485152614116668,0.6667885655356992
195,196,0.761,0.751008064516129,0.6486285464763641,0.6671426277006826
196,197,0.76,0.7515120967741935,0.6487596340179443,0.6690003919985986
197,198,0.759375,0.7494959677419355,0.6480154263973236,0.6674725115299225
198,199,0.761125,0.7494959677419355,0.6471124620437622,0.6651709810380013
199,200,0.7585,0.7389112903225806,0.6480610625743866,0.6811543289692171
200,201,0.75975,0.7379032258064516,0.6461314346790313,0.6756981101728254
201,202,0.759375,0.7459677419354839,0.6457764523029328,0.6665134122294765
202,203,0.75775,0.7550403225806451,0.6469809007644653,0.661889091614754
203,204,0.761125,0.7479838709677419,0.6435232808589936,0.6644216324052503
204,205,0.760375,0.7439516129032258,0.6471346819400787,0.6682568625096352
205,206,0.762,0.7414314516129032,0.6459955296516419,0.6739240298348088
206,207,0.75925,0.7540322580645161,0.6436120250225067,0.6613490379625752
207,208,0.764875,0.7484879032258065,0.6442920634746552,0.665196894638
208,209,0.7595,0.7439516129032258,0.6450260980129242,0.66756044376281
209,210,0.762375,0.7525201612903226,0.6450640928745269,0.6618116507607121
210,211,0.764125,0.7479838709677419,0.6432662625312805,0.6611761902609179
211,212,0.76175,0.7464717741935484,0.6430184645652771,0.6601478543973738
212,213,0.7635,0.7540322580645161,0.6424563028812409,0.6675388928382627
213,214,0.76075,0.7464717741935484,0.6431009001731872,0.6693989961378036
214,215,0.763375,0.7545362903225806,0.6424487299919128,0.6593958552806608
215,216,0.759875,0.7550403225806451,0.6421677529811859,0.6666595695480224
216,217,0.760625,0.7464717741935484,0.6414295537471771,0.6686101180891837
217,218,0.76525,0.7545362903225806,0.641946748971939,0.663229928862664
218,219,0.759875,0.751008064516129,0.6410156943798065,0.6790805785886703
219,220,0.76,0.7484879032258065,0.6424972369670868,0.6616327435739578
220,221,0.7605,0.751008064516129,0.6389381773471833,0.6645574127474139
221,222,0.763125,0.7515120967741935,0.6390037200450898,0.6623315830384532
222,223,0.761,0.7570564516129032,0.6397264943122863,0.6680223576484188
223,224,0.76425,0.7520161290322581,0.6378453469276428,0.6630678580653283
224,225,0.763875,0.7520161290322581,0.6382321789264679,0.662065879952523
225,226,0.762,0.7479838709677419,0.639366191625595,0.661988039170542
226,227,0.765,0.7525201612903226,0.6383702094554902,0.6641764746558282
227,228,0.763125,0.751008064516129,0.6380119869709014,0.6607372049362429
228,229,0.759375,0.7520161290322581,0.6383262400627137,0.6545132811992399
229,230,0.764375,0.7555443548387096,0.6372324013710022,0.6568373855083219
230,231,0.761625,0.7484879032258065,0.6373445687294006,0.6696269560244775
231,232,0.76475,0.7515120967741935,0.6373440618515015,0.6574311083362948
232,233,0.768,0.7555443548387096,0.6352594494819641,0.6599096617391033
233,234,0.7635,0.7464717741935484,0.6361170980930329,0.6795821391767071
234,235,0.76175,0.75,0.6369703621864319,0.6554409640450631
235,236,0.765,0.7535282258064516,0.6348738882541657,0.6552178427096336
236,237,0.762625,0.7520161290322581,0.6365966260433197,0.6692352737149885
237,238,0.7645,0.7449596774193549,0.6336214027404785,0.688425297698667
238,239,0.76525,0.7560483870967742,0.6345989825725555,0.6607512629801228
239,240,0.7625,0.7479838709677419,0.6337947535514832,0.65921801136386
240,241,0.764,0.7515120967741935,0.6340638983249665,0.6573365686401245
241,242,0.767,0.7535282258064516,0.6342987337112427,0.6604504883289337
242,243,0.768,0.7444556451612904,0.6332927324771881,0.671097691020658
243,244,0.76475,0.7520161290322581,0.6348397693634034,0.6558002243118901
244,245,0.76675,0.7474798387096774,0.632552001953125,0.6722479366487072
245,246,0.768,0.748991935483871,0.6314238250255585,0.6594607532024384
246,247,0.764875,0.7444556451612904,0.6325469689369202,0.6605463912410121
247,248,0.7655,0.7525201612903226,0.6310685338973999,0.6538672639477637
248,249,0.766,0.7570564516129032,0.6316399626731872,0.6516222578863944
249,250,0.766125,0.7515120967741935,0.6315728986263275,0.6539699367938503
250,251,0.764625,0.7479838709677419,0.6300767264366149,0.6660763275238776
251,252,0.768375,0.7525201612903226,0.6311213612556458,0.6618637481043416
252,253,0.764375,0.75,0.6319745321273804,0.6553429893908962
253,254,0.765125,0.7484879032258065,0.6321416351795196,0.6639243306652192
254,255,0.765125,0.7424395161290323,0.6311585788726807,0.6792801503212221
255,256,0.766125,0.7575604838709677,0.6310848982334137,0.6525870225121898
256,257,0.767625,0.7520161290322581,0.6288257777690888,0.6681103860178301
257,258,0.7655,0.7550403225806451,0.6306543848514556,0.659768590042668
258,259,0.764,0.7555443548387096,0.6302573113441468,0.6550669179808709
259,260,0.769125,0.7580645161290323,0.6280254762172699,0.6525000554900016
260,261,0.76875,0.7540322580645161,0.6282578265666962,0.6584180526195034
261,262,0.76775,0.7560483870967742,0.6287466604709625,0.6528177463239239
262,263,0.770375,0.7409274193548387,0.6289410617351532,0.6700733111750695
263,264,0.76825,0.7515120967741935,0.628407103061676,0.6540230772187633
264,265,0.7675,0.7520161290322581,0.630276577949524,0.6641475250644069
265,266,0.767875,0.7530241935483871,0.6273887372016906,0.6583489189224858
266,267,0.76625,0.7535282258064516,0.6282406325340271,0.6604514862260511
267,268,0.76925,0.7535282258064516,0.6271086316108704,0.6536620384262454
268,269,0.76825,0.7469758064516129,0.6269893596172332,0.6587162335072795
269,270,0.767125,0.7479838709677419,0.6261151025295257,0.6642194390296936
270,271,0.768,0.7545362903225806,0.62709636926651,0.659077537636603
271,272,0.767875,0.7575604838709677,0.626769639968872,0.6581989363316567
272,273,0.768625,0.7570564516129032,0.6237564489841462,0.6504895312170829
273,274,0.767875,0.7540322580645161,0.6259858434200287,0.6550556219393208
274,275,0.76925,0.7565524193548387,0.6264045915603638,0.6529571308243659
275,276,0.768375,0.7605846774193549,0.6264821493625641,0.6480650353816247
276,277,0.767375,0.7585685483870968,0.6253943295478821,0.6529869091126227
277,278,0.768,0.7585685483870968,0.6250380325317383,0.6509808676858102
278,279,0.77225,0.7560483870967742,0.6249497833251954,0.652328065326137
279,280,0.768375,0.7610887096774194,0.6253566663265229,0.6481388095886477
280,281,0.769,0.7631048387096774,0.6269658114910126,0.6511666505567489
281,282,0.77225,0.7615927419354839,0.6219663026332856,0.6503843261349586
282,283,0.768375,0.7530241935483871,0.6239040396213531,0.6559025366460124
283,284,0.773125,0.7565524193548387,0.6230207705497741,0.6521498032154576
284,285,0.77225,0.7575604838709677,0.6224600894451141,0.6498911640336437
285,286,0.772375,0.7535282258064516,0.6230463716983795,0.6594464038648913
286,287,0.772125,0.7454637096774194,0.623299633026123,0.6596774737681111
287,288,0.7705,0.7575604838709677,0.6226675305366516,0.6503997896948168
288,289,0.771875,0.7615927419354839,0.6228007516860962,0.6582970378860351
289,290,0.769,0.7610887096774194,0.6228819963932037,0.6468615512694081
290,291,0.77075,0.7560483870967742,0.6219233012199402,0.6543603664444338
291,292,0.770125,0.7570564516129032,0.6231648538112641,0.6516289316838787
292,293,0.772625,0.7555443548387096,0.6210650091171265,0.6526086061231552
293,294,0.7735,0.7580645161290323,0.6205819368362426,0.650269466061746
294,295,0.76975,0.7600806451612904,0.6209871962070465,0.6479213949172727
295,296,0.770125,0.7565524193548387,0.6204864876270294,0.6524337001385228
296,297,0.76875,0.7464717741935484,0.622816385269165,0.6591307309366041
297,298,0.771375,0.7585685483870968,0.620723108291626,0.647264729584417
298,299,0.768625,0.7580645161290323,0.621616400718689,0.6502204921937758
299,300,0.7725,0.7555443548387096,0.6198663051128388,0.6479197202190277

1 epoch train_acc valid_acc train_loss valid_loss
2 0 1 0.12575 0.59325 0.21421370967741934 0.6854838709677419 2.2937231807708742 1.1784187927246095 2.2676862593620055 0.8956879992638865
3 1 2 0.248 0.713 0.3240927419354839 0.7127016129032258 2.2377592124938963 0.788702152967453 2.1976155081102924 0.7735376723351017
4 2 3 0.354375 0.755 0.3815524193548387 0.7389112903225806 2.129876153945923 0.7006268813610077 2.044375258107339 0.7271041427889178
5 3 4 0.42625 0.763625 0.4324596774193548 0.7273185483870968 1.9269471073150635 0.668900458574295 1.815262294584705 0.7277685692233424
6 4 5 0.482125 0.76275 0.5282258064516129 0.748991935483871 1.6988583679199218 0.6469288661479949 1.6081839684517152 0.672559670863613
7 5 6 0.528375 0.773625 0.5151209677419355 0.7434475806451613 1.5167635107040405 0.6203210880756378 1.4654438649454424 0.6748099394382969
8 6 7 0.5465 0.771625 0.5574596774193549 0.7494959677419355 1.3927514476776124 0.6234635796546936 1.3655932180343135 0.6763338706185741
9 7 8 0.5705 0.784125 0.5403225806451613 0.7580645161290323 1.3035813112258912 0.5965014040470124 1.2985444838000881 0.6313219945276937
10 8 9 0.586875 0.781 0.5902217741935484 0.7419354838709677 1.2359741706848144 0.5893729448318481 1.2315824993195073 0.6552943125847848
11 9 10 0.5985 0.785375 0.5967741935483871 0.7600806451612904 1.1828507437705993 0.582163923740387 1.1789349471369097 0.6227685501498561
12 10 11 0.61075 0.790625 0.6013104838709677 0.7605846774193549 1.1400936079025268 0.5674120993614197 1.146019495302631 0.6145914264263646
13 11 12 0.61425 0.7995 0.6098790322580645 0.765625 1.105873393535614 0.5617314722537995 1.1111935800121677 0.6158498919779255
14 12 13 0.623 0.7965 0.6234879032258065 0.7883064516129032 1.076672074317932 0.5522617139816284 1.089227545645929 0.581030644716755
15 13 14 0.63475 0.803125 0.6159274193548387 0.7817540322580645 1.0523606872558593 0.5367116575241089 1.0597389667264876 0.5911272004727395
16 14 15 0.63575 0.80025 0.5866935483870968 0.7752016129032258 1.0302134299278258 0.5398472018241882 1.0517296521894393 0.5930967292478008
17 15 16 0.638625 0.808125 0.625 0.7620967741935484 1.0127733755111694 0.5326456694602967 1.030857661078053 0.6306540206555398
18 16 17 0.644125 0.8145 0.623991935483871 0.7772177419354839 0.9972539176940918 0.5097175936698913 1.0212982143125227 0.5843374094655437
19 17 18 0.647125 0.817375 0.6491935483870968 0.7918346774193549 0.9847306752204895 0.5045695571899415 1.0031032081573241 0.5643920071663395
20 18 19 0.651625 0.81175 0.6295362903225806 0.7681451612903226 0.9718224520683288 0.5022978014945984 0.9922776433729357 0.593349628871487
21 19 20 0.653375 0.824375 0.655241935483871 0.7928427419354839 0.9614444556236267 0.4974315061569214 1.004007081831655 0.5595071181174247
22 20 21 0.65475 0.818 0.639616935483871 0.7762096774193549 0.9525526990890503 0.49526858401298524 0.9718767077692093 0.5555580258369446
23 21 22 0.657125 0.815625 0.6491935483870968 0.7862903225806451 0.944016770362854 0.4976941578388214 0.9612755986952013 0.5627694274148634
24 22 23 0.660125 0.825875 0.6512096774193549 0.7908266129032258 0.9386747822761535 0.4906779823303223 0.9546890701017072 0.5514134312829664
25 23 24 0.660125 0.823875 0.6607862903225806 0.7953629032258065 0.9306546316146851 0.4869117760658264 0.9596057341944787 0.5559701804191836
26 24 25 0.665625 0.823 0.6466733870967742 0.7807459677419355 0.9239095873832702 0.48527450203895567 0.9440147549875321 0.5541293746040713
27 25 26 0.664 0.824625 0.6537298387096774 0.795866935483871 0.9177467222213745 0.4811685870885849 0.9464947581291199 0.5384060884675672
28 26 27 0.66675 0.827875 0.6582661290322581 0.7938508064516129 0.912406234741211 0.48096660792827606 0.9296330482729019 0.5493545234203339
29 27 28 0.66625 0.826875 0.6607862903225806 0.7998991935483871 0.9084503087997436 0.477142098903656 0.9246035218238831 0.5478304624557495
30 28 29 0.669375 0.82725 0.6633064516129032 0.7948588709677419 0.9031522455215454 0.47373487854003904 0.9183507677047483 0.547797841410483
31 29 30 0.66925 0.825375 0.6622983870967742 0.7847782258064516 0.8971806693077088 0.47746446084976196 0.9155043325116557 0.5636355328944421
32 30 31 0.669625 0.828875 0.6658266129032258 0.7757056451612904 0.8963782095909119 0.4754366238117218 0.9107550824842146 0.5671144060550197
33 31 32 0.6715 0.8315 0.6607862903225806 0.7908266129032258 0.8907575702667236 0.4769072663784027 0.9062397806875168 0.5523799340571126
32 33 0.672875 0.6592741935483871 0.8865971283912659 0.9067775895518642
33 34 0.6755 0.6643145161290323 0.8831153435707092 0.8996775977073177
34 35 0.6745 0.6643145161290323 0.8799522204399108 0.9008925057226612
35 36 0.67625 0.6738911290322581 0.8765962233543396 0.8957007392760246
36 37 0.67775 0.6602822580645161 0.8732238512039184 0.8957233851955783
37 38 0.677 0.6663306451612904 0.8700605201721191 0.8884582346485507
38 39 0.677625 0.6779233870967742 0.86691774892807 0.8847945544027513
39 40 0.683875 0.6683467741935484 0.8638643326759339 0.8812634848779247
40 41 0.6825 0.6703629032258065 0.8619350233078003 0.8736868096936133
41 42 0.68525 0.6769153225806451 0.8574845008850097 0.8757798306403621
42 43 0.68325 0.6829637096774194 0.8559755001068116 0.8762649028531967
43 44 0.681875 0.6794354838709677 0.8526963171958923 0.868664155083318
44 45 0.6855 0.670866935483871 0.8508440051078796 0.8656267696811307
45 46 0.687375 0.6738911290322581 0.8477729640007019 0.8647398410304901
46 47 0.689125 0.6723790322580645 0.844515609741211 0.8638435967506901
47 48 0.685 0.6779233870967742 0.8429114985466003 0.8618940441839157
48 49 0.689125 0.6794354838709677 0.8397265086174012 0.8545605559502879
49 50 0.686625 0.6804435483870968 0.8384793972969056 0.8552838506237153
50 51 0.691125 0.6774193548387096 0.8345498304367065 0.848295844370319
51 52 0.687875 0.6859879032258065 0.8335188755989075 0.855028792735069
52 53 0.691 0.6824596774193549 0.8311837863922119 0.8479280106482967
53 54 0.692875 0.6859879032258065 0.8279946088790894 0.8404466413682506
54 55 0.6925 0.6804435483870968 0.8260387935638428 0.84263696593623
55 56 0.693125 0.6849798387096774 0.8229942922592163 0.8379247784614563
56 57 0.694375 0.6844758064516129 0.8220300149917602 0.8415298577277891
57 58 0.696375 0.6880040322580645 0.8190384802818298 0.8321282882844249
58 59 0.694 0.6875 0.816288417339325 0.8343535277151293
59 60 0.694875 0.6839717741935484 0.8151030368804931 0.8336188389408973
60 61 0.69425 0.6854838709677419 0.8126006669998169 0.8352141207264315
61 62 0.697125 0.6854838709677419 0.8106505064964294 0.8332801768856664
62 63 0.69975 0.6905241935483871 0.8080810284614564 0.8234034065277346
63 64 0.70075 0.6844758064516129 0.8052171502113342 0.8192608933294973
64 65 0.69925 0.6869959677419355 0.8035496191978455 0.815050482749939
65 66 0.7005 0.6875 0.8014146156311035 0.8252233305285054
66 67 0.698125 0.6905241935483871 0.7996929430961609 0.8083832244719228
67 68 0.69975 0.6895161290322581 0.7969387445449829 0.8108954429626465
68 69 0.702125 0.6869959677419355 0.7955113248825073 0.8145994409438102
69 70 0.7 0.6935483870967742 0.792340503692627 0.807374858087109
70 71 0.702625 0.6950604838709677 0.7911754889488221 0.8086352617509903
71 72 0.703625 0.6985887096774194 0.7885409245491027 0.8008877135092213
72 73 0.70425 0.702116935483871 0.7873198952674866 0.7998276199063947
73 74 0.704125 0.6970766129032258 0.7848704538345337 0.7971179581457569
74 75 0.70625 0.6970766129032258 0.7822935285568238 0.7927621314602513
75 76 0.70475 0.6960685483870968 0.7788817420005798 0.7937883177111226
76 77 0.704875 0.6985887096774194 0.7792668890953064 0.7862545732528933
77 78 0.703625 0.7006048387096774 0.7767630572319031 0.7841528077279368
78 79 0.710875 0.6980846774193549 0.7729922785758973 0.7877849590393805
79 80 0.7085 0.7036290322580645 0.7725881729125976 0.7884936524975684
80 81 0.7145 0.6975806451612904 0.7703448913097382 0.7794140846498551
81 82 0.707875 0.703125 0.7683381514549256 0.7827766460757102
82 83 0.71425 0.704133064516129 0.7661359086036682 0.7740850121744217
83 84 0.716 0.7247983870967742 0.7635970077514649 0.7837481344899824
84 85 0.714625 0.7046370967741935 0.7620524802207946 0.7760492024883148
85 86 0.717875 0.7273185483870968 0.7587687454223633 0.7693652645234139
86 87 0.71925 0.7167338709677419 0.7571495487689972 0.7791498034231125
87 88 0.723125 0.7061491935483871 0.7559212753772736 0.7657425865050285
88 89 0.721125 0.7227822580645161 0.7540467400550842 0.7639261926374128
89 90 0.72775 0.7212701612903226 0.7519344382286072 0.7616109309657928
90 91 0.72775 0.7283266129032258 0.7487852301597595 0.7575869021877166
91 92 0.727875 0.7273185483870968 0.7474929718971253 0.7539062634591134
92 93 0.72625 0.7237903225806451 0.744426561832428 0.7515108796858019
93 94 0.7285 0.717741935483871 0.7436468710899353 0.7493124450406721
94 95 0.72875 0.7227822580645161 0.7407840843200684 0.7509921366168607
95 96 0.731875 0.7232862903225806 0.7383509948253631 0.7504203108049208
96 97 0.732125 0.7288306451612904 0.7363486151695251 0.7452502135307558
97 98 0.732875 0.7283266129032258 0.7337991933822632 0.7476337955844018
98 99 0.735625 0.7303427419354839 0.7317234919071197 0.739826298529102
99 100 0.733625 0.7283266129032258 0.7287046048641205 0.739542922665996
100 101 0.73375 0.7147177419354839 0.7274810273647309 0.7344657682603405
101 102 0.7355 0.7338709677419355 0.7247261893749237 0.7346700814462477
102 103 0.73225 0.733366935483871 0.7240956840515137 0.7285718571755194
103 104 0.737 0.7247983870967742 0.720921181678772 0.7279678198599047
104 105 0.73725 0.7162298387096774 0.7193985497951507 0.7334578460262667
105 106 0.73975 0.7232862903225806 0.7166938002109527 0.728367522839577
106 107 0.737875 0.7363911290322581 0.715068681716919 0.7223057727659902
107 108 0.742625 0.7318548387096774 0.7129860005378723 0.7190377539204013
108 109 0.74 0.7389112903225806 0.7117807359695435 0.7183994016339702
109 110 0.739875 0.7384072580645161 0.7105983903408051 0.7164657365891242
110 111 0.742375 0.7328629032258065 0.7074811918735504 0.7246334398946455
111 112 0.74 0.7348790322580645 0.7077446887493134 0.7127827925066794
112 113 0.740125 0.7348790322580645 0.7058010087013245 0.7185006468526779
113 114 0.74125 0.7379032258064516 0.7062028846740722 0.711994872939202
114 115 0.741375 0.734375 0.7025900321006775 0.722117660507079
115 116 0.741875 0.7404233870967742 0.702047369480133 0.7054055179319074
116 117 0.741 0.7368951612903226 0.7008094470500946 0.7059461897419345
117 118 0.74 0.7379032258064516 0.7000908722877502 0.7077065225570432
118 119 0.74275 0.7389112903225806 0.697608702659607 0.7044590307820228
119 120 0.741125 0.735383064516129 0.6974484593868255 0.705822677381577
120 121 0.745 0.7363911290322581 0.6940790374279022 0.726842547616651
121 122 0.741875 0.7409274193548387 0.6941187741756439 0.6984606769777113
122 123 0.7455 0.7363911290322581 0.6942173736095428 0.7010489721452037
123 124 0.74125 0.7404233870967742 0.6926323885917663 0.704462626288014
124 125 0.743875 0.7409274193548387 0.6916290044784545 0.696697513903341
125 126 0.74375 0.7394153225806451 0.6898484938144683 0.6978444207099176
126 127 0.743625 0.7399193548387096 0.6888203046321869 0.7016230494745316
127 128 0.74525 0.7414314516129032 0.6875256526470185 0.6972026151995505
128 129 0.744125 0.7414314516129032 0.686995727300644 0.6930867991139812
129 130 0.748 0.7424395161290323 0.6873089451789856 0.6934665374217495
130 131 0.744 0.7328629032258065 0.6838541362285614 0.7066772426328352
131 132 0.7435 0.7363911290322581 0.6849607121944428 0.6904300124414505
132 133 0.746 0.7444556451612904 0.6836790750026703 0.6916862668529633
133 134 0.746125 0.7419354838709677 0.6825263681411743 0.6903918568165072
134 135 0.747125 0.7454637096774194 0.6808646347522735 0.6892580957181992
135 136 0.746625 0.7328629032258065 0.6810887951850891 0.6992948112949249
136 137 0.747625 0.7399193548387096 0.6801265494823456 0.6914754644516976
137 138 0.744125 0.7449596774193549 0.6794462163448334 0.686861309313005
138 139 0.746375 0.7379032258064516 0.6796345655918121 0.6865283789173249
139 140 0.74825 0.7363911290322581 0.6776200189590454 0.6895612343665092
140 141 0.748375 0.7368951612903226 0.6769659533500672 0.6895686657198014
141 142 0.745875 0.7409274193548387 0.6762004556655884 0.684873923178642
142 143 0.75 0.7429435483870968 0.6766425142288208 0.6841276749487846
143 144 0.7495 0.7429435483870968 0.6757207989692688 0.6857355977258375
144 145 0.74775 0.7409274193548387 0.6757489387989044 0.6874895538053205
145 146 0.75275 0.7358870967741935 0.6723645563125611 0.6919096823661558
146 147 0.75125 0.7399193548387096 0.672961886882782 0.6845684782151253
147 148 0.751375 0.7389112903225806 0.672047397851944 0.6911592175883632
148 149 0.7485 0.7399193548387096 0.6713605234622956 0.6868368666018209
149 150 0.749625 0.7434475806451613 0.6715062477588654 0.6797055390573317
150 151 0.752 0.7429435483870968 0.6708727264404297 0.6786900566470239
151 152 0.749375 0.7404233870967742 0.6705182585716247 0.6893971793113216
152 153 0.756375 0.7409274193548387 0.6681198358535767 0.6818877074026293
153 154 0.74875 0.7323588709677419 0.668769829750061 0.6870744218749385
154 155 0.751375 0.7404233870967742 0.6693622040748596 0.6791631608240066
155 156 0.7495 0.7384072580645161 0.6677224984169007 0.6791054318028111
156 157 0.751125 0.7530241935483871 0.6669107344150543 0.6757311301846658
157 158 0.750375 0.7454637096774194 0.6689134707450867 0.675748364579293
158 159 0.749875 0.7394153225806451 0.665742442369461 0.6785702628474082
159 160 0.755125 0.7439516129032258 0.6639340476989746 0.6909028849294109
160 161 0.754375 0.7484879032258065 0.6655936982631684 0.6800686274805376
161 162 0.753 0.7444556451612904 0.6626871073246002 0.6847033404534862
162 163 0.754375 0.7394153225806451 0.6636988418102264 0.678314108041025
163 164 0.756 0.7439516129032258 0.6631928851604462 0.679127223068668
164 165 0.7535 0.7449596774193549 0.6635311431884766 0.6755695458381407
165 166 0.75425 0.7439516129032258 0.6608122355937958 0.6740436525114121
166 167 0.7535 0.751008064516129 0.6623840556144714 0.6725323296362354
167 168 0.753375 0.7474798387096774 0.6615988018512726 0.6734585973524279
168 169 0.7555 0.7404233870967742 0.658470029592514 0.6751049209025598
169 170 0.75275 0.748991935483871 0.6596595213413239 0.6707770478340888
170 171 0.75475 0.7464717741935484 0.6594919128417969 0.6760938773232121
171 172 0.752375 0.7404233870967742 0.6587064244747162 0.6735088594498173
172 173 0.754875 0.7419354838709677 0.6581688785552978 0.6718735867930997
173 174 0.755625 0.7444556451612904 0.6594367871284484 0.672063180515843
174 175 0.75675 0.7484879032258065 0.6572930109500885 0.6681920395743463
175 176 0.7565 0.7555443548387096 0.6567469036579132 0.666823229482097
176 177 0.75625 0.7242943548387096 0.6563848433494568 0.69633928614278
177 178 0.7545 0.7520161290322581 0.6568190479278564 0.6688290171084865
178 179 0.758125 0.7464717741935484 0.6569823319911957 0.668416089588596
179 180 0.756125 0.7474798387096774 0.6567688817977906 0.6825112623553122
180 181 0.75825 0.7384072580645161 0.6541404497623443 0.6793540994967183
181 182 0.754125 0.7484879032258065 0.6573447947502136 0.6673893765095742
182 183 0.7575 0.7459677419354839 0.6545488471984864 0.669147877923904
183 184 0.757625 0.7520161290322581 0.653099508523941 0.6698833357903266
184 185 0.7575 0.7459677419354839 0.652852609872818 0.6746915771115211
185 186 0.75775 0.7469758064516129 0.6531596164703369 0.6692096014176646
186 187 0.75625 0.7429435483870968 0.65095947265625 0.6785491474213139
187 188 0.759875 0.748991935483871 0.6528274257183075 0.6676096195174802
188 189 0.75825 0.751008064516129 0.6515383367538452 0.6677034651079485
189 190 0.761 0.7429435483870968 0.6507666997909546 0.6711848301272239
190 191 0.756 0.7505040322580645 0.6502130575180054 0.6673150216379473
191 192 0.755625 0.7520161290322581 0.6508474731445313 0.6693628420752864
192 193 0.758375 0.7449596774193549 0.6497379775047303 0.6839032778816838
193 194 0.760125 0.7449596774193549 0.6500401992797852 0.6711471715281087
194 195 0.7595 0.751008064516129 0.6485152614116668 0.6667885655356992
195 196 0.761 0.751008064516129 0.6486285464763641 0.6671426277006826
196 197 0.76 0.7515120967741935 0.6487596340179443 0.6690003919985986
197 198 0.759375 0.7494959677419355 0.6480154263973236 0.6674725115299225
198 199 0.761125 0.7494959677419355 0.6471124620437622 0.6651709810380013
199 200 0.7585 0.7389112903225806 0.6480610625743866 0.6811543289692171
200 201 0.75975 0.7379032258064516 0.6461314346790313 0.6756981101728254
201 202 0.759375 0.7459677419354839 0.6457764523029328 0.6665134122294765
202 203 0.75775 0.7550403225806451 0.6469809007644653 0.661889091614754
203 204 0.761125 0.7479838709677419 0.6435232808589936 0.6644216324052503
204 205 0.760375 0.7439516129032258 0.6471346819400787 0.6682568625096352
205 206 0.762 0.7414314516129032 0.6459955296516419 0.6739240298348088
206 207 0.75925 0.7540322580645161 0.6436120250225067 0.6613490379625752
207 208 0.764875 0.7484879032258065 0.6442920634746552 0.665196894638
208 209 0.7595 0.7439516129032258 0.6450260980129242 0.66756044376281
209 210 0.762375 0.7525201612903226 0.6450640928745269 0.6618116507607121
210 211 0.764125 0.7479838709677419 0.6432662625312805 0.6611761902609179
211 212 0.76175 0.7464717741935484 0.6430184645652771 0.6601478543973738
212 213 0.7635 0.7540322580645161 0.6424563028812409 0.6675388928382627
213 214 0.76075 0.7464717741935484 0.6431009001731872 0.6693989961378036
214 215 0.763375 0.7545362903225806 0.6424487299919128 0.6593958552806608
215 216 0.759875 0.7550403225806451 0.6421677529811859 0.6666595695480224
216 217 0.760625 0.7464717741935484 0.6414295537471771 0.6686101180891837
217 218 0.76525 0.7545362903225806 0.641946748971939 0.663229928862664
218 219 0.759875 0.751008064516129 0.6410156943798065 0.6790805785886703
219 220 0.76 0.7484879032258065 0.6424972369670868 0.6616327435739578
220 221 0.7605 0.751008064516129 0.6389381773471833 0.6645574127474139
221 222 0.763125 0.7515120967741935 0.6390037200450898 0.6623315830384532
222 223 0.761 0.7570564516129032 0.6397264943122863 0.6680223576484188
223 224 0.76425 0.7520161290322581 0.6378453469276428 0.6630678580653283
224 225 0.763875 0.7520161290322581 0.6382321789264679 0.662065879952523
225 226 0.762 0.7479838709677419 0.639366191625595 0.661988039170542
226 227 0.765 0.7525201612903226 0.6383702094554902 0.6641764746558282
227 228 0.763125 0.751008064516129 0.6380119869709014 0.6607372049362429
228 229 0.759375 0.7520161290322581 0.6383262400627137 0.6545132811992399
229 230 0.764375 0.7555443548387096 0.6372324013710022 0.6568373855083219
230 231 0.761625 0.7484879032258065 0.6373445687294006 0.6696269560244775
231 232 0.76475 0.7515120967741935 0.6373440618515015 0.6574311083362948
232 233 0.768 0.7555443548387096 0.6352594494819641 0.6599096617391033
233 234 0.7635 0.7464717741935484 0.6361170980930329 0.6795821391767071
234 235 0.76175 0.75 0.6369703621864319 0.6554409640450631
235 236 0.765 0.7535282258064516 0.6348738882541657 0.6552178427096336
236 237 0.762625 0.7520161290322581 0.6365966260433197 0.6692352737149885
237 238 0.7645 0.7449596774193549 0.6336214027404785 0.688425297698667
238 239 0.76525 0.7560483870967742 0.6345989825725555 0.6607512629801228
239 240 0.7625 0.7479838709677419 0.6337947535514832 0.65921801136386
240 241 0.764 0.7515120967741935 0.6340638983249665 0.6573365686401245
241 242 0.767 0.7535282258064516 0.6342987337112427 0.6604504883289337
242 243 0.768 0.7444556451612904 0.6332927324771881 0.671097691020658
243 244 0.76475 0.7520161290322581 0.6348397693634034 0.6558002243118901
244 245 0.76675 0.7474798387096774 0.632552001953125 0.6722479366487072
245 246 0.768 0.748991935483871 0.6314238250255585 0.6594607532024384
246 247 0.764875 0.7444556451612904 0.6325469689369202 0.6605463912410121
247 248 0.7655 0.7525201612903226 0.6310685338973999 0.6538672639477637
248 249 0.766 0.7570564516129032 0.6316399626731872 0.6516222578863944
249 250 0.766125 0.7515120967741935 0.6315728986263275 0.6539699367938503
250 251 0.764625 0.7479838709677419 0.6300767264366149 0.6660763275238776
251 252 0.768375 0.7525201612903226 0.6311213612556458 0.6618637481043416
252 253 0.764375 0.75 0.6319745321273804 0.6553429893908962
253 254 0.765125 0.7484879032258065 0.6321416351795196 0.6639243306652192
254 255 0.765125 0.7424395161290323 0.6311585788726807 0.6792801503212221
255 256 0.766125 0.7575604838709677 0.6310848982334137 0.6525870225121898
256 257 0.767625 0.7520161290322581 0.6288257777690888 0.6681103860178301
257 258 0.7655 0.7550403225806451 0.6306543848514556 0.659768590042668
258 259 0.764 0.7555443548387096 0.6302573113441468 0.6550669179808709
259 260 0.769125 0.7580645161290323 0.6280254762172699 0.6525000554900016
260 261 0.76875 0.7540322580645161 0.6282578265666962 0.6584180526195034
261 262 0.76775 0.7560483870967742 0.6287466604709625 0.6528177463239239
262 263 0.770375 0.7409274193548387 0.6289410617351532 0.6700733111750695
263 264 0.76825 0.7515120967741935 0.628407103061676 0.6540230772187633
264 265 0.7675 0.7520161290322581 0.630276577949524 0.6641475250644069
265 266 0.767875 0.7530241935483871 0.6273887372016906 0.6583489189224858
266 267 0.76625 0.7535282258064516 0.6282406325340271 0.6604514862260511
267 268 0.76925 0.7535282258064516 0.6271086316108704 0.6536620384262454
268 269 0.76825 0.7469758064516129 0.6269893596172332 0.6587162335072795
269 270 0.767125 0.7479838709677419 0.6261151025295257 0.6642194390296936
270 271 0.768 0.7545362903225806 0.62709636926651 0.659077537636603
271 272 0.767875 0.7575604838709677 0.626769639968872 0.6581989363316567
272 273 0.768625 0.7570564516129032 0.6237564489841462 0.6504895312170829
273 274 0.767875 0.7540322580645161 0.6259858434200287 0.6550556219393208
274 275 0.76925 0.7565524193548387 0.6264045915603638 0.6529571308243659
275 276 0.768375 0.7605846774193549 0.6264821493625641 0.6480650353816247
276 277 0.767375 0.7585685483870968 0.6253943295478821 0.6529869091126227
277 278 0.768 0.7585685483870968 0.6250380325317383 0.6509808676858102
278 279 0.77225 0.7560483870967742 0.6249497833251954 0.652328065326137
279 280 0.768375 0.7610887096774194 0.6253566663265229 0.6481388095886477
280 281 0.769 0.7631048387096774 0.6269658114910126 0.6511666505567489
281 282 0.77225 0.7615927419354839 0.6219663026332856 0.6503843261349586
282 283 0.768375 0.7530241935483871 0.6239040396213531 0.6559025366460124
283 284 0.773125 0.7565524193548387 0.6230207705497741 0.6521498032154576
284 285 0.77225 0.7575604838709677 0.6224600894451141 0.6498911640336437
285 286 0.772375 0.7535282258064516 0.6230463716983795 0.6594464038648913
286 287 0.772125 0.7454637096774194 0.623299633026123 0.6596774737681111
287 288 0.7705 0.7575604838709677 0.6226675305366516 0.6503997896948168
288 289 0.771875 0.7615927419354839 0.6228007516860962 0.6582970378860351
289 290 0.769 0.7610887096774194 0.6228819963932037 0.6468615512694081
290 291 0.77075 0.7560483870967742 0.6219233012199402 0.6543603664444338
291 292 0.770125 0.7570564516129032 0.6231648538112641 0.6516289316838787
292 293 0.772625 0.7555443548387096 0.6210650091171265 0.6526086061231552
293 294 0.7735 0.7580645161290323 0.6205819368362426 0.650269466061746
294 295 0.76975 0.7600806451612904 0.6209871962070465 0.6479213949172727
295 296 0.770125 0.7565524193548387 0.6204864876270294 0.6524337001385228
296 297 0.76875 0.7464717741935484 0.622816385269165 0.6591307309366041
297 298 0.771375 0.7585685483870968 0.620723108291626 0.647264729584417
298 299 0.768625 0.7580645161290323 0.621616400718689 0.6502204921937758
299 300 0.7725 0.7555443548387096 0.6198663051128388 0.6479197202190277

Binary file not shown.

View File

@ -1,51 +1,49 @@
epoch,train_acc,valid_acc,train_loss,valid_loss ,epoch,train_acc,valid_acc,train_loss,valid_loss
1,0.3112916666666667,0.5076444892473119,2.1129255460103353,1.7863053237238238 0,1,0.391375,0.5882056451612904,1.7793689422607422,1.3356874796652025
2,0.46210416666666665,0.5547715053763441,1.5605127541224162,1.3499009532313193 1,2,0.617125,0.6365927419354839,1.059556163787842,0.9690603344671188
3,0.5200833333333333,0.5892137096774194,1.298180630683899,1.1617528520604616 2,3,0.663125,0.6577620967741935,0.9102170023918151,0.9078671547674364
4,0.5641666666666667,0.6311323924731183,1.161390997727712,1.0419180438082705 3,4,0.69,0.6819556451612904,0.8537670917510987,0.8649420180628377
5,0.6055833333333334,0.6598622311827957,1.062113331158956,0.9560088053826363 4,5,0.71125,0.7056451612903226,0.8021836678981781,0.8317571128568342
6,0.6316666666666667,0.6804435483870968,0.9944041889508565,0.8958990977656457 5,6,0.719375,0.6844758064516129,0.7773117737770081,0.8293971457789021
7,0.6513958333333333,0.6928763440860215,0.9395476338068645,0.8504296951396491 6,7,0.733,0.7247983870967742,0.7432277894020081,0.7570091389840649
8,0.6677708333333333,0.7007728494623656,0.8985106336275737,0.8148919709267155 7,8,0.727125,0.7051411290322581,0.7392586808204651,0.7708722833664187
9,0.6809375,0.7095934139784946,0.8673048818906148,0.7897684433126962 8,9,0.740375,0.7091733870967742,0.716008551120758,0.7547545788749572
10,0.6876666666666666,0.7185819892473119,0.8408271819750468,0.7671139176173877 9,10,0.74775,0.7091733870967742,0.6988923935890198,0.7634256799374858
11,0.697625,0.7247143817204301,0.8202435002326965,0.7502269674372929 10,11,0.75025,0.7328629032258065,0.6836859595775604,0.7220065324537216
12,0.7030625,0.7303427419354839,0.8024592914581299,0.7343912464316174 11,12,0.75525,0.7313508064516129,0.6790840411186219,0.7320531125991575
13,0.7118958333333333,0.7316868279569892,0.7869214735031128,0.7222210591839205 12,13,0.755875,0.734375,0.6686462206840516,0.7067360877990723
14,0.7163958333333333,0.7404233870967742,0.7718908907572428,0.7093277560767307 13,14,0.755875,0.7338709677419355,0.659578008890152,0.6924379564100697
15,0.7211041666666667,0.7437836021505376,0.7589795832633972,0.6981546148177116 14,15,0.758625,0.7217741935483871,0.6591809678077698,0.7092515037905786
16,0.724875,0.748067876344086,0.7483940289815267,0.6881407947950465 15,16,0.770125,0.7474798387096774,0.6366513178348542,0.6689942498360911
17,0.7293958333333334,0.75,0.7373875479698181,0.6788085179944192 16,17,0.76975,0.75,0.64143789935112,0.6781372427940369
18,0.7345,0.7517641129032258,0.7301613558133443,0.6724013622089099 17,18,0.766625,0.7469758064516129,0.6327295961380005,0.6749444469328849
19,0.7350416666666667,0.7550403225806451,0.7210039290587107,0.6651292238184201 18,19,0.772125,0.7298387096774194,0.6213552060127259,0.6950428524324971
20,0.7391458333333333,0.758736559139785,0.711990216255188,0.6584682823509298 19,20,0.771625,0.7560483870967742,0.619529750585556,0.6620656597998834
21,0.7433958333333334,0.7608366935483871,0.7047773049672444,0.6513981505106854 20,21,0.77425,0.7459677419354839,0.6170263500213623,0.6944894117693747
22,0.7442916666666667,0.7634408602150538,0.6997552577654521,0.6454382868864204 21,22,0.77375,0.7379032258064516,0.610548350572586,0.698592597438443
23,0.7481458333333333,0.7653729838709677,0.6937046423753103,0.6401395256160408 22,23,0.774125,0.7494959677419355,0.6073116481304168,0.6649176555295144
24,0.7498333333333334,0.7683131720430108,0.6873725473880767,0.6349253362865859 23,24,0.78,0.75,0.601892656326294,0.6502023070089279
25,0.7529166666666667,0.769573252688172,0.6828897857666015,0.6308066611007977 24,25,0.777875,0.7671370967741935,0.5965238115787506,0.6416350141648324
26,0.7547291666666667,0.7708333333333334,0.6747500312328338,0.6260690948655528 25,26,0.7875,0.751008064516129,0.584319313287735,0.6557927843063108
27,0.7547291666666667,0.7722614247311828,0.6727884339491527,0.6230032514500362 26,27,0.784625,0.7651209677419355,0.5858220131397247,0.6265111680953733
28,0.7580208333333334,0.7736055107526881,0.6705719958146413,0.6201616948650729 27,28,0.77975,0.7610887096774194,0.5928270950317382,0.6353864400617538
29,0.757125,0.7748655913978495,0.6678872625033061,0.6165956912502166 28,29,0.780625,0.7520161290322581,0.5824392430782318,0.6569667564284417
30,0.76025,0.7759576612903226,0.6641453323364258,0.6141596103227267 29,30,0.78475,0.7686491935483871,0.5810435285568237,0.6306867743692091
31,0.7609583333333333,0.7767137096774194,0.6619020007451375,0.6114715427480718 30,31,0.789,0.7706653225806451,0.5672282783985138,0.6261125274242894
32,0.7618333333333334,0.776377688172043,0.6576849890549977,0.609050533463878 31,32,0.78525,0.7560483870967742,0.5757509377002716,0.6505500414679127
33,0.7629583333333333,0.7783938172043011,0.6566993356545766,0.6069687149857962 32,33,0.792,0.7681451612903226,0.5613697295188904,0.629849144527989
34,0.7631666666666667,0.7791498655913979,0.6553838003476461,0.6052348261238426 33,34,0.793875,0.7620967741935484,0.5625830183029175,0.6189906856706066
35,0.7626666666666667,0.7785618279569892,0.6523663277626037,0.6037159392269709 34,35,0.791875,0.7681451612903226,0.5602230775356293,0.6212261732547514
36,0.7653333333333333,0.780325940860215,0.6526273953914642,0.6023694485105494 35,36,0.794625,0.7615927419354839,0.5545250961780548,0.619811047469416
37,0.7639791666666667,0.7810819892473119,0.6524330813884736,0.6012957778669172 36,37,0.794625,0.7701612903225806,0.5573954427242279,0.6205428954093687
38,0.7673958333333334,0.7799059139784946,0.6500039516290029,0.6002496516191831 37,38,0.79425,0.764616935483871,0.5514744529724122,0.628905875067557
39,0.764875,0.7815860215053764,0.6476710465749105,0.5992967845291219 38,39,0.792375,0.7842741935483871,0.5618353080749512,0.5954909113145643
40,0.7661041666666667,0.782258064516129,0.6484741485118866,0.5987250285763894 39,40,0.796375,0.7711693548387096,0.5491654114723206,0.6137347759739045
41,0.7668333333333334,0.7818380376344086,0.64447620677948,0.5979425240588444 40,41,0.799,0.7641129032258065,0.5372960684299469,0.6470560056547965
42,0.7662291666666666,0.7817540322580645,0.6458657967249553,0.5976644568545844 41,42,0.800375,0.7671370967741935,0.5395989503860473,0.6211921880322118
43,0.76675,0.7824260752688172,0.6449048202037811,0.5971484975789183 42,43,0.806,0.7671370967741935,0.5370515692234039,0.6075864828401997
44,0.7677916666666667,0.7825100806451613,0.6437487976551056,0.5968063899906733 43,44,0.801875,0.7605846774193549,0.5388010408878326,0.5891174308715328
45,0.7677083333333333,0.7825940860215054,0.6432626353104909,0.5964810960395361 44,45,0.800875,0.766633064516129,0.539761929512024,0.610026998865989
46,0.7666041666666666,0.7826780913978495,0.6452286802132925,0.5963560240243071 45,46,0.802375,0.780241935483871,0.5270701496601105,0.6000283591208919
47,0.768875,0.7827620967741935,0.6432473388512929,0.5962550809947393 46,47,0.799875,0.7772177419354839,0.5320828959941865,0.5864833074231302
48,0.7674583333333334,0.7826780913978495,0.6433716455300649,0.5961945524779699 47,48,0.807,0.7837701612903226,0.5309389193058014,0.5761975809451072
49,0.7678333333333334,0.7827620967741935,0.6440556212266286,0.596176440036425
50,0.7677083333333333,0.7825940860215054,0.6438165396849315,0.5961719805835396

1 epoch train_acc valid_acc train_loss valid_loss
2 0 1 0.3112916666666667 0.391375 0.5076444892473119 0.5882056451612904 2.1129255460103353 1.7793689422607422 1.7863053237238238 1.3356874796652025
3 1 2 0.46210416666666665 0.617125 0.5547715053763441 0.6365927419354839 1.5605127541224162 1.059556163787842 1.3499009532313193 0.9690603344671188
4 2 3 0.5200833333333333 0.663125 0.5892137096774194 0.6577620967741935 1.298180630683899 0.9102170023918151 1.1617528520604616 0.9078671547674364
5 3 4 0.5641666666666667 0.69 0.6311323924731183 0.6819556451612904 1.161390997727712 0.8537670917510987 1.0419180438082705 0.8649420180628377
6 4 5 0.6055833333333334 0.71125 0.6598622311827957 0.7056451612903226 1.062113331158956 0.8021836678981781 0.9560088053826363 0.8317571128568342
7 5 6 0.6316666666666667 0.719375 0.6804435483870968 0.6844758064516129 0.9944041889508565 0.7773117737770081 0.8958990977656457 0.8293971457789021
8 6 7 0.6513958333333333 0.733 0.6928763440860215 0.7247983870967742 0.9395476338068645 0.7432277894020081 0.8504296951396491 0.7570091389840649
9 7 8 0.6677708333333333 0.727125 0.7007728494623656 0.7051411290322581 0.8985106336275737 0.7392586808204651 0.8148919709267155 0.7708722833664187
10 8 9 0.6809375 0.740375 0.7095934139784946 0.7091733870967742 0.8673048818906148 0.716008551120758 0.7897684433126962 0.7547545788749572
11 9 10 0.6876666666666666 0.74775 0.7185819892473119 0.7091733870967742 0.8408271819750468 0.6988923935890198 0.7671139176173877 0.7634256799374858
12 10 11 0.697625 0.75025 0.7247143817204301 0.7328629032258065 0.8202435002326965 0.6836859595775604 0.7502269674372929 0.7220065324537216
13 11 12 0.7030625 0.75525 0.7303427419354839 0.7313508064516129 0.8024592914581299 0.6790840411186219 0.7343912464316174 0.7320531125991575
14 12 13 0.7118958333333333 0.755875 0.7316868279569892 0.734375 0.7869214735031128 0.6686462206840516 0.7222210591839205 0.7067360877990723
15 13 14 0.7163958333333333 0.755875 0.7404233870967742 0.7338709677419355 0.7718908907572428 0.659578008890152 0.7093277560767307 0.6924379564100697
16 14 15 0.7211041666666667 0.758625 0.7437836021505376 0.7217741935483871 0.7589795832633972 0.6591809678077698 0.6981546148177116 0.7092515037905786
17 15 16 0.724875 0.770125 0.748067876344086 0.7474798387096774 0.7483940289815267 0.6366513178348542 0.6881407947950465 0.6689942498360911
18 16 17 0.7293958333333334 0.76975 0.75 0.7373875479698181 0.64143789935112 0.6788085179944192 0.6781372427940369
19 17 18 0.7345 0.766625 0.7517641129032258 0.7469758064516129 0.7301613558133443 0.6327295961380005 0.6724013622089099 0.6749444469328849
20 18 19 0.7350416666666667 0.772125 0.7550403225806451 0.7298387096774194 0.7210039290587107 0.6213552060127259 0.6651292238184201 0.6950428524324971
21 19 20 0.7391458333333333 0.771625 0.758736559139785 0.7560483870967742 0.711990216255188 0.619529750585556 0.6584682823509298 0.6620656597998834
22 20 21 0.7433958333333334 0.77425 0.7608366935483871 0.7459677419354839 0.7047773049672444 0.6170263500213623 0.6513981505106854 0.6944894117693747
23 21 22 0.7442916666666667 0.77375 0.7634408602150538 0.7379032258064516 0.6997552577654521 0.610548350572586 0.6454382868864204 0.698592597438443
24 22 23 0.7481458333333333 0.774125 0.7653729838709677 0.7494959677419355 0.6937046423753103 0.6073116481304168 0.6401395256160408 0.6649176555295144
25 23 24 0.7498333333333334 0.78 0.7683131720430108 0.75 0.6873725473880767 0.601892656326294 0.6349253362865859 0.6502023070089279
26 24 25 0.7529166666666667 0.777875 0.769573252688172 0.7671370967741935 0.6828897857666015 0.5965238115787506 0.6308066611007977 0.6416350141648324
27 25 26 0.7547291666666667 0.7875 0.7708333333333334 0.751008064516129 0.6747500312328338 0.584319313287735 0.6260690948655528 0.6557927843063108
28 26 27 0.7547291666666667 0.784625 0.7722614247311828 0.7651209677419355 0.6727884339491527 0.5858220131397247 0.6230032514500362 0.6265111680953733
29 27 28 0.7580208333333334 0.77975 0.7736055107526881 0.7610887096774194 0.6705719958146413 0.5928270950317382 0.6201616948650729 0.6353864400617538
30 28 29 0.757125 0.780625 0.7748655913978495 0.7520161290322581 0.6678872625033061 0.5824392430782318 0.6165956912502166 0.6569667564284417
31 29 30 0.76025 0.78475 0.7759576612903226 0.7686491935483871 0.6641453323364258 0.5810435285568237 0.6141596103227267 0.6306867743692091
32 30 31 0.7609583333333333 0.789 0.7767137096774194 0.7706653225806451 0.6619020007451375 0.5672282783985138 0.6114715427480718 0.6261125274242894
33 31 32 0.7618333333333334 0.78525 0.776377688172043 0.7560483870967742 0.6576849890549977 0.5757509377002716 0.609050533463878 0.6505500414679127
34 32 33 0.7629583333333333 0.792 0.7783938172043011 0.7681451612903226 0.6566993356545766 0.5613697295188904 0.6069687149857962 0.629849144527989
35 33 34 0.7631666666666667 0.793875 0.7791498655913979 0.7620967741935484 0.6553838003476461 0.5625830183029175 0.6052348261238426 0.6189906856706066
36 34 35 0.7626666666666667 0.791875 0.7785618279569892 0.7681451612903226 0.6523663277626037 0.5602230775356293 0.6037159392269709 0.6212261732547514
37 35 36 0.7653333333333333 0.794625 0.780325940860215 0.7615927419354839 0.6526273953914642 0.5545250961780548 0.6023694485105494 0.619811047469416
38 36 37 0.7639791666666667 0.794625 0.7810819892473119 0.7701612903225806 0.6524330813884736 0.5573954427242279 0.6012957778669172 0.6205428954093687
39 37 38 0.7673958333333334 0.79425 0.7799059139784946 0.764616935483871 0.6500039516290029 0.5514744529724122 0.6002496516191831 0.628905875067557
40 38 39 0.764875 0.792375 0.7815860215053764 0.7842741935483871 0.6476710465749105 0.5618353080749512 0.5992967845291219 0.5954909113145643
41 39 40 0.7661041666666667 0.796375 0.782258064516129 0.7711693548387096 0.6484741485118866 0.5491654114723206 0.5987250285763894 0.6137347759739045
42 40 41 0.7668333333333334 0.799 0.7818380376344086 0.7641129032258065 0.64447620677948 0.5372960684299469 0.5979425240588444 0.6470560056547965
43 41 42 0.7662291666666666 0.800375 0.7817540322580645 0.7671370967741935 0.6458657967249553 0.5395989503860473 0.5976644568545844 0.6211921880322118
44 42 43 0.76675 0.806 0.7824260752688172 0.7671370967741935 0.6449048202037811 0.5370515692234039 0.5971484975789183 0.6075864828401997
45 43 44 0.7677916666666667 0.801875 0.7825100806451613 0.7605846774193549 0.6437487976551056 0.5388010408878326 0.5968063899906733 0.5891174308715328
46 44 45 0.7677083333333333 0.800875 0.7825940860215054 0.766633064516129 0.6432626353104909 0.539761929512024 0.5964810960395361 0.610026998865989
47 45 46 0.7666041666666666 0.802375 0.7826780913978495 0.780241935483871 0.6452286802132925 0.5270701496601105 0.5963560240243071 0.6000283591208919
48 46 47 0.768875 0.799875 0.7827620967741935 0.7772177419354839 0.6432473388512929 0.5320828959941865 0.5962550809947393 0.5864833074231302
49 47 48 0.7674583333333334 0.807 0.7826780913978495 0.7837701612903226 0.6433716455300649 0.5309389193058014 0.5961945524779699 0.5761975809451072
49 0.7678333333333334 0.7827620967741935 0.6440556212266286 0.596176440036425
50 0.7677083333333333 0.7825940860215054 0.6438165396849315 0.5961719805835396

Binary file not shown.

View File

@ -1,51 +1,34 @@
epoch,train_acc,valid_acc,train_loss,valid_loss ,epoch,train_acc,valid_acc,train_loss,valid_loss
1,0.6377291666666667,0.7426075268817204,1.0222952149709066,0.6658745436899124 0,1,0.561125,0.6476814516129032,1.271985122203827,0.9911825291572078
2,0.7588541666666667,0.7799059139784946,0.6436424539883931,0.5876969707909451 1,2,0.676375,0.6668346774193549,0.9134431419372558,0.8937955517922679
3,0.7829791666666667,0.7955309139784946,0.5888768948713938,0.5508138095178912 2,3,0.699125,0.6824596774193549,0.8243759956359863,0.8520721235582905
4,0.7927916666666667,0.8009912634408602,0.5641531114578247,0.5473136597423143 3,4,0.716625,0.6995967741935484,0.7818560593128204,0.8005917687569896
5,0.7997291666666667,0.8065356182795699,0.5444657148520152,0.515406842834206 4,5,0.725125,0.7101814516129032,0.745261854171753,0.8037946531849522
6,0.8043541666666667,0.8140120967741935,0.5316140202681223,0.5052012169873843 5,6,0.733375,0.7061491935483871,0.7344184167385102,0.7732808570707997
7,0.80975,0.8156922043010753,0.521293937365214,0.4983856466508681 6,7,0.737875,0.7253024193548387,0.7150477197170257,0.74149090051651
8,0.8155208333333334,0.8162802419354839,0.5079634555180867,0.49958501611986467 7,8,0.74325,0.7253024193548387,0.7022454526424408,0.7516527022084882
9,0.8161875,0.8159442204301075,0.5036936206022898,0.4907228536503289 8,9,0.742625,0.7298387096774194,0.7041031284332275,0.7247120468847214
10,0.816875,0.8160282258064516,0.497530157327652,0.4895895427914076 9,10,0.747875,0.7116935483870968,0.6772661633491516,0.7338270487323884
11,0.8210416666666667,0.8269489247311828,0.4916797243754069,0.47517218673101036 10,11,0.757875,0.7212701612903226,0.6562292041778565,0.7387931039256435
12,0.8210416666666667,0.8266129032258065,0.4892559293905894,0.468310099135163 11,12,0.761625,0.7328629032258065,0.6542983632087708,0.725050816612859
13,0.8241041666666666,0.8211525537634409,0.4825246704419454,0.4856182368852759 12,13,0.75925,0.7318548387096774,0.6493379819393158,0.7001086867624714
14,0.8239791666666667,0.8264448924731183,0.48390908201535543,0.46895075997998636 13,14,0.767375,0.7379032258064516,0.6460576868057251,0.6988139988914612
15,0.8245,0.8230846774193549,0.4795244421164195,0.47176380727880746 14,15,0.765875,0.7288306451612904,0.6339491362571716,0.7128441218406923
16,0.8270416666666667,0.829133064516129,0.47462198424339297,0.4671747069205007 15,16,0.763125,0.7368951612903226,0.6262373595237732,0.714297366719092
17,0.8273958333333333,0.8297211021505376,0.4733834793567657,0.4658442559421703 16,17,0.76925,0.7227822580645161,0.6279029569625855,0.7181522269402781
18,0.828125,0.8337533602150538,0.47195579528808596,0.46035799896845253 17,18,0.77025,0.7449596774193549,0.6159816448688507,0.6757595616002237
19,0.8294583333333333,0.829133064516129,0.4668528276284536,0.46208705472689804 18,19,0.7745,0.7283266129032258,0.6136245548725128,0.6998546681096477
20,0.830625,0.8303931451612904,0.46666145197550457,0.4616392642580053 19,20,0.775375,0.7469758064516129,0.6000997524261474,0.6749713065162781
21,0.8320416666666667,0.8280409946236559,0.46406093080838523,0.4690516353935324 20,21,0.7805,0.748991935483871,0.5928600332736969,0.6656959902855658
22,0.8315833333333333,0.8293010752688172,0.4628266294002533,0.4590829178210228 21,22,0.77525,0.7444556451612904,0.599046837568283,0.6857193170055267
23,0.8315416666666666,0.8298051075268817,0.46202420779069264,0.4582436110383721 22,23,0.785,0.7384072580645161,0.5875316572189331,0.6785462142959717
24,0.8328541666666667,0.8282090053763441,0.4594616918563843,0.46520241454083433 23,24,0.778375,0.765625,0.588378502368927,0.627939272311426
25,0.8321458333333334,0.8313172043010753,0.45899707794189454,0.4611524093535639 24,25,0.78575,0.7459677419354839,0.5700427904129028,0.6502643679418871
26,0.835,0.8311491935483871,0.4561348853111267,0.460902286793596 25,26,0.7805,0.75,0.5785817315578461,0.6712307555060233
27,0.8342708333333333,0.8335013440860215,0.45571692689259846,0.4524733103731627 26,27,0.78675,0.7474798387096774,0.5676946561336518,0.6555941143343526
28,0.83475,0.831989247311828,0.45314634958902994,0.4535403392648184 27,28,0.787875,0.7505040322580645,0.575938116312027,0.6507430134281036
29,0.8353958333333333,0.8333333333333334,0.4508692183494568,0.4509869323622796 28,29,0.789,0.7605846774193549,0.5655779435634612,0.6520830373610219
30,0.8358958333333333,0.8314012096774194,0.4508490650653839,0.4559444804345408 29,30,0.790625,0.7434475806451613,0.5578647639751434,0.6789213486256138
31,0.8379791666666667,0.8347614247311828,0.44810916344324747,0.45132114586009775 30,31,0.787375,0.7565524193548387,0.5687701859474182,0.649224087115257
32,0.8369375,0.832997311827957,0.4488534228801727,0.4531173186917459 31,32,0.79575,0.7560483870967742,0.5432633152008056,0.6388471222692921
33,0.8375208333333334,0.8366935483870968,0.4460577855904897,0.4518213977095901 32,33,0.7895,0.7595766129032258,0.5557173223495483,0.6391933618053314
34,0.8372708333333333,0.8314852150537635,0.4453533016045888,0.4571616124081355
35,0.8381458333333334,0.8351814516129032,0.4446527805328369,0.4513627043975297
36,0.8392916666666667,0.8373655913978495,0.4431237142880758,0.44869983228304056
37,0.838375,0.8347614247311828,0.4424218458334605,0.4486082660895522
38,0.8397916666666667,0.8355174731182796,0.4412206415732702,0.44703892129723743
39,0.8401875,0.8365255376344086,0.4396123299598694,0.4470836206149029
40,0.8399791666666667,0.8345934139784946,0.43971687642733254,0.4466231196157394
41,0.8410833333333333,0.8373655913978495,0.43810279977321626,0.4461495735312021
42,0.8408958333333333,0.8372815860215054,0.4371747978528341,0.4456534408113008
43,0.8414583333333333,0.8363575268817204,0.43774009283383686,0.4454879440287108
44,0.841375,0.8358534946236559,0.4363077602386475,0.4460145914426414
45,0.8408958333333333,0.8363575268817204,0.43665687982241314,0.44518788110825325
46,0.841875,0.8371135752688172,0.43598757874965666,0.44520101848468985
47,0.8422083333333333,0.836945564516129,0.4349166991710663,0.44520143988311933
48,0.8421041666666667,0.8360215053763441,0.43520630804697674,0.44510498354511874
49,0.8416875,0.836861559139785,0.4354708949327469,0.44491737311886204
50,0.8408125,0.836861559139785,0.43524290529886883,0.4449239748139535

1 epoch train_acc valid_acc train_loss valid_loss
2 0 1 0.6377291666666667 0.561125 0.7426075268817204 0.6476814516129032 1.0222952149709066 1.271985122203827 0.6658745436899124 0.9911825291572078
3 1 2 0.7588541666666667 0.676375 0.7799059139784946 0.6668346774193549 0.6436424539883931 0.9134431419372558 0.5876969707909451 0.8937955517922679
4 2 3 0.7829791666666667 0.699125 0.7955309139784946 0.6824596774193549 0.5888768948713938 0.8243759956359863 0.5508138095178912 0.8520721235582905
5 3 4 0.7927916666666667 0.716625 0.8009912634408602 0.6995967741935484 0.5641531114578247 0.7818560593128204 0.5473136597423143 0.8005917687569896
6 4 5 0.7997291666666667 0.725125 0.8065356182795699 0.7101814516129032 0.5444657148520152 0.745261854171753 0.515406842834206 0.8037946531849522
7 5 6 0.8043541666666667 0.733375 0.8140120967741935 0.7061491935483871 0.5316140202681223 0.7344184167385102 0.5052012169873843 0.7732808570707997
8 6 7 0.80975 0.737875 0.8156922043010753 0.7253024193548387 0.521293937365214 0.7150477197170257 0.4983856466508681 0.74149090051651
9 7 8 0.8155208333333334 0.74325 0.8162802419354839 0.7253024193548387 0.5079634555180867 0.7022454526424408 0.49958501611986467 0.7516527022084882
10 8 9 0.8161875 0.742625 0.8159442204301075 0.7298387096774194 0.5036936206022898 0.7041031284332275 0.4907228536503289 0.7247120468847214
11 9 10 0.816875 0.747875 0.8160282258064516 0.7116935483870968 0.497530157327652 0.6772661633491516 0.4895895427914076 0.7338270487323884
12 10 11 0.8210416666666667 0.757875 0.8269489247311828 0.7212701612903226 0.4916797243754069 0.6562292041778565 0.47517218673101036 0.7387931039256435
13 11 12 0.8210416666666667 0.761625 0.8266129032258065 0.7328629032258065 0.4892559293905894 0.6542983632087708 0.468310099135163 0.725050816612859
14 12 13 0.8241041666666666 0.75925 0.8211525537634409 0.7318548387096774 0.4825246704419454 0.6493379819393158 0.4856182368852759 0.7001086867624714
15 13 14 0.8239791666666667 0.767375 0.8264448924731183 0.7379032258064516 0.48390908201535543 0.6460576868057251 0.46895075997998636 0.6988139988914612
16 14 15 0.8245 0.765875 0.8230846774193549 0.7288306451612904 0.4795244421164195 0.6339491362571716 0.47176380727880746 0.7128441218406923
17 15 16 0.8270416666666667 0.763125 0.829133064516129 0.7368951612903226 0.47462198424339297 0.6262373595237732 0.4671747069205007 0.714297366719092
18 16 17 0.8273958333333333 0.76925 0.8297211021505376 0.7227822580645161 0.4733834793567657 0.6279029569625855 0.4658442559421703 0.7181522269402781
19 17 18 0.828125 0.77025 0.8337533602150538 0.7449596774193549 0.47195579528808596 0.6159816448688507 0.46035799896845253 0.6757595616002237
20 18 19 0.8294583333333333 0.7745 0.829133064516129 0.7283266129032258 0.4668528276284536 0.6136245548725128 0.46208705472689804 0.6998546681096477
21 19 20 0.830625 0.775375 0.8303931451612904 0.7469758064516129 0.46666145197550457 0.6000997524261474 0.4616392642580053 0.6749713065162781
22 20 21 0.8320416666666667 0.7805 0.8280409946236559 0.748991935483871 0.46406093080838523 0.5928600332736969 0.4690516353935324 0.6656959902855658
23 21 22 0.8315833333333333 0.77525 0.8293010752688172 0.7444556451612904 0.4628266294002533 0.599046837568283 0.4590829178210228 0.6857193170055267
24 22 23 0.8315416666666666 0.785 0.8298051075268817 0.7384072580645161 0.46202420779069264 0.5875316572189331 0.4582436110383721 0.6785462142959717
25 23 24 0.8328541666666667 0.778375 0.8282090053763441 0.765625 0.4594616918563843 0.588378502368927 0.46520241454083433 0.627939272311426
26 24 25 0.8321458333333334 0.78575 0.8313172043010753 0.7459677419354839 0.45899707794189454 0.5700427904129028 0.4611524093535639 0.6502643679418871
27 25 26 0.835 0.7805 0.8311491935483871 0.75 0.4561348853111267 0.5785817315578461 0.460902286793596 0.6712307555060233
28 26 27 0.8342708333333333 0.78675 0.8335013440860215 0.7474798387096774 0.45571692689259846 0.5676946561336518 0.4524733103731627 0.6555941143343526
29 27 28 0.83475 0.787875 0.831989247311828 0.7505040322580645 0.45314634958902994 0.575938116312027 0.4535403392648184 0.6507430134281036
30 28 29 0.8353958333333333 0.789 0.8333333333333334 0.7605846774193549 0.4508692183494568 0.5655779435634612 0.4509869323622796 0.6520830373610219
31 29 30 0.8358958333333333 0.790625 0.8314012096774194 0.7434475806451613 0.4508490650653839 0.5578647639751434 0.4559444804345408 0.6789213486256138
32 30 31 0.8379791666666667 0.787375 0.8347614247311828 0.7565524193548387 0.44810916344324747 0.5687701859474182 0.45132114586009775 0.649224087115257
33 31 32 0.8369375 0.79575 0.832997311827957 0.7560483870967742 0.4488534228801727 0.5432633152008056 0.4531173186917459 0.6388471222692921
34 32 33 0.8375208333333334 0.7895 0.8366935483870968 0.7595766129032258 0.4460577855904897 0.5557173223495483 0.4518213977095901 0.6391933618053314
34 0.8372708333333333 0.8314852150537635 0.4453533016045888 0.4571616124081355
35 0.8381458333333334 0.8351814516129032 0.4446527805328369 0.4513627043975297
36 0.8392916666666667 0.8373655913978495 0.4431237142880758 0.44869983228304056
37 0.838375 0.8347614247311828 0.4424218458334605 0.4486082660895522
38 0.8397916666666667 0.8355174731182796 0.4412206415732702 0.44703892129723743
39 0.8401875 0.8365255376344086 0.4396123299598694 0.4470836206149029
40 0.8399791666666667 0.8345934139784946 0.43971687642733254 0.4466231196157394
41 0.8410833333333333 0.8373655913978495 0.43810279977321626 0.4461495735312021
42 0.8408958333333333 0.8372815860215054 0.4371747978528341 0.4456534408113008
43 0.8414583333333333 0.8363575268817204 0.43774009283383686 0.4454879440287108
44 0.841375 0.8358534946236559 0.4363077602386475 0.4460145914426414
45 0.8408958333333333 0.8363575268817204 0.43665687982241314 0.44518788110825325
46 0.841875 0.8371135752688172 0.43598757874965666 0.44520101848468985
47 0.8422083333333333 0.836945564516129 0.4349166991710663 0.44520143988311933
48 0.8421041666666667 0.8360215053763441 0.43520630804697674 0.44510498354511874
49 0.8416875 0.836861559139785 0.4354708949327469 0.44491737311886204
50 0.8408125 0.836861559139785 0.43524290529886883 0.4449239748139535

Binary file not shown.

View File

@ -1,51 +1,86 @@
epoch,train_acc,valid_acc,train_loss,valid_loss ,epoch,train_acc,valid_acc,train_loss,valid_loss
1,0.2535833333333333,0.41952284946236557,2.274448096593221,2.2001695325297694 0,1,0.575125,0.7011088709677419,2.031563010215759,1.8535375249001287
2,0.4691875,0.5073084677419355,2.116311640103658,2.051948335862929 1,2,0.723875,0.7515120967741935,1.7430778923034669,1.713461822079074
3,0.5270416666666666,0.5266297043010753,2.009436710357666,1.9777893276624783 2,3,0.76425,0.7525201612903226,1.6991203842163085,1.7051894049490652
4,0.5353541666666667,0.535114247311828,1.9530798486073813,1.9356736265203005 3,4,0.779875,0.7716733870967742,1.6807003259658813,1.690080665772961
5,0.5935416666666666,0.6175235215053764,1.9118746633529664,1.891348486305565 4,5,0.79525,0.7696572580645161,1.6654855661392212,1.6926762519344207
6,0.636625,0.6625504032258065,1.8629151741663614,1.847881397893352 5,6,0.80175,0.7888104838709677,1.6598792200088501,1.6711357831954956
7,0.686625,0.7302587365591398,1.8228577140172322,1.8057919330494379 6,7,0.80875,0.7721774193548387,1.6529147624969482,1.6847506146277151
8,0.7415416666666667,0.7511760752688172,1.7803055089314779,1.7647134245082896 7,8,0.812375,0.8004032258064516,1.649049828529358,1.6597587523921844
9,0.7542291666666666,0.7582325268817204,1.7502256110509236,1.7423059594246648 8,9,0.815375,0.7893145161290323,1.6458229179382324,1.6707303101016628
10,0.7607708333333333,0.764616935483871,1.7317150996526083,1.724781782396378 9,10,0.825,0.811491935483871,1.6370085287094116,1.6518691432091497
11,0.7677291666666667,0.7719254032258065,1.7192926041285197,1.713816902970755 10,11,0.832375,0.795866935483871,1.630222158432007,1.6616583139665666
12,0.7704791666666667,0.7731014784946236,1.71117463239034,1.7070557173862253 11,12,0.83225,0.8145161290322581,1.6293829317092896,1.6454557026586225
13,0.771625,0.7736055107526881,1.7053865385055542,1.7027398668309695 12,13,0.834875,0.8125,1.6268642024993896,1.6500283018235238
14,0.7747708333333333,0.7749495967741935,1.7004834445317587,1.6978983327906618 13,14,0.840875,0.8104838709677419,1.6199358901977539,1.652194688397069
15,0.7763333333333333,0.7751176075268817,1.6967166414260864,1.6960316857983988 14,15,0.83975,0.8069556451612904,1.6210605192184449,1.6520756982987927
16,0.7790416666666666,0.7767977150537635,1.6929608987172444,1.692371742699736 15,16,0.84175,0.8004032258064516,1.6195698108673096,1.6582853217278757
17,0.780125,0.7801579301075269,1.6910814901987712,1.6917471321680213 16,17,0.838625,0.8049395161290323,1.6228710346221924,1.656198097813514
18,0.7817916666666667,0.7827620967741935,1.6882368882497152,1.6861866725388395 17,18,0.83775,0.8140120967741935,1.6226149969100951,1.6462942131104008
19,0.7827291666666667,0.7815860215053764,1.6857908573150635,1.6868788478195027 18,19,0.845375,0.8251008064516129,1.6150365982055663,1.6358921681680987
20,0.7827083333333333,0.7840221774193549,1.6843910643259685,1.6850647259784002 19,20,0.85125,0.8296370967741935,1.6097205333709717,1.6333312757553593
21,0.7848541666666666,0.7857862903225806,1.6823169787724812,1.6813292862266622 20,21,0.851875,0.8210685483870968,1.609424132347107,1.6388044895664338
22,0.7876666666666666,0.7848622311827957,1.6801685171127319,1.6815461574062225 21,22,0.850375,0.8210685483870968,1.6101643466949462,1.6369191561975787
23,0.7879791666666667,0.7848622311827957,1.6788572374979656,1.6818229216401295 22,23,0.858125,0.8185483870967742,1.6031983375549317,1.6421872877305554
24,0.7890833333333334,0.7859543010752689,1.6776897859573365,1.6799169009731663 23,24,0.857125,0.8361895161290323,1.6038059520721435,1.62519553015309
25,0.7902708333333334,0.7883904569892473,1.6763870159784953,1.6777271570697907 24,25,0.854,0.8301411290322581,1.606280044555664,1.6290803609355804
26,0.791,0.789986559139785,1.6751194190979004,1.6777111407249206 25,26,0.858375,0.8119959677419355,1.6011180095672608,1.648349738890125
27,0.7918541666666666,0.7886424731182796,1.674490571975708,1.6776156912567795 26,27,0.861375,0.8371975806451613,1.6000273780822754,1.6237776010267195
28,0.7936666666666666,0.7905745967741935,1.6728278172810873,1.6744540147883917 27,28,0.8705,0.8351814516129032,1.5907322645187378,1.6240986316434798
29,0.793375,0.7876344086021505,1.671903525352478,1.6776354030896259 28,29,0.87175,0.828125,1.5896439476013184,1.6316479982868317
30,0.7945833333333333,0.7901545698924731,1.6710031661987306,1.6751063805754467 29,30,0.867375,0.8266129032258065,1.5936143741607667,1.632056209348863
31,0.7959166666666667,0.7932627688172043,1.6700964994430543,1.6719800926023913 30,31,0.86575,0.8377016129032258,1.5949567575454713,1.6240303862479426
32,0.7955833333333333,0.7933467741935484,1.6697869466145834,1.6723043623790945 31,32,0.86975,0.8336693548387096,1.5907692136764526,1.6251203167823054
33,0.7971458333333333,0.792002688172043,1.668815224647522,1.6722341519530102 32,33,0.86975,0.8397177419354839,1.5919580411911012,1.6214879328204739
34,0.7974375,0.7931787634408602,1.6682115157445272,1.6723163063808153 33,34,0.872375,0.8422379032258065,1.5886561880111694,1.6194973222671016
35,0.7979791666666667,0.7947748655913979,1.6676976483662924,1.670254216399244 34,35,0.876,0.8331653225806451,1.585767653465271,1.6247131862948019
36,0.7984583333333334,0.7950268817204301,1.6669070380528768,1.6704239499184392 35,36,0.871875,0.8392137096774194,1.5895709590911866,1.6215527211466143
37,0.799,0.795866935483871,1.6664897476832072,1.6691849411174815 36,37,0.871125,0.8245967741935484,1.590462643623352,1.6370407227546937
38,0.7991875,0.7948588709677419,1.6660230029424032,1.6700230785595473 37,38,0.87325,0.8301411290322581,1.5876847248077393,1.6306157150576193
39,0.800125,0.795950940860215,1.6655609397888183,1.6688298884258475 38,39,0.8705,0.8240927419354839,1.590844289779663,1.634965923524672
40,0.8006458333333333,0.7956989247311828,1.6652250595092772,1.6690674840763051 39,40,0.88125,0.8402217741935484,1.5803495874404907,1.6220719968118975
41,0.8006875,0.7962869623655914,1.664971160888672,1.6688041135828982 40,41,0.881875,0.8462701612903226,1.5788077726364136,1.614263488400367
42,0.8015416666666667,0.795866935483871,1.6647087678909303,1.668453808753721 41,42,0.87625,0.8487903225806451,1.5843929862976074,1.6131737078389814
43,0.8013958333333333,0.7962029569892473,1.6643381754557292,1.6682484457569737 42,43,0.88175,0.842741935483871,1.5789586782455445,1.6184405088424683
44,0.8016458333333333,0.7965389784946236,1.6641663211186728,1.668392535178892 43,44,0.880375,0.8417338709677419,1.5802030544281005,1.6189708786626016
45,0.8017708333333333,0.7957829301075269,1.6640429185231527,1.6681856429705055 44,45,0.880375,0.8492943548387096,1.5803771505355835,1.610948174230514
46,0.8021041666666666,0.7965389784946236,1.663849822362264,1.6680005634984663 45,46,0.877625,0.8523185483870968,1.5828620948791503,1.6089561793111986
47,0.8024583333333334,0.7966229838709677,1.663770879427592,1.6679569816076627 46,47,0.882125,0.8442540322580645,1.578727219581604,1.6160847948443504
48,0.8025833333333333,0.7962029569892473,1.6636734215418498,1.6680130330465173 47,48,0.872875,0.8392137096774194,1.5874832077026366,1.6228746021947553
49,0.8024583333333334,0.7970430107526881,1.6635978577931723,1.6679544077124646 48,49,0.884125,0.8371975806451613,1.5771635084152222,1.6237398655183855
50,0.8025,0.7970430107526881,1.6635685895284016,1.6679527682642783 49,50,0.88975,0.8341733870967742,1.5712118272781372,1.6256768280459988
50,51,0.876,0.8432459677419355,1.585065812110901,1.6188401445265739
51,52,0.883,0.844758064516129,1.5785351629257203,1.6169025359615203
52,53,0.889125,0.8548387096774194,1.5723771095275878,1.607426397262081
53,54,0.88625,0.8518145161290323,1.5749757404327394,1.6076742141477522
54,55,0.891125,0.8452620967741935,1.5695797872543336,1.6156867473356185
55,56,0.891625,0.8497983870967742,1.5691107511520386,1.6118658050414054
56,57,0.883375,0.8508064516129032,1.577266471862793,1.6087883749315817
57,58,0.891625,0.8422379032258065,1.5702907581329346,1.6168161553721274
58,59,0.89375,0.8553427419354839,1.5679762859344482,1.6071604336461713
59,60,0.885875,0.8482862903225806,1.5749410953521727,1.6108381056016492
60,61,0.891875,0.8417338709677419,1.5696327953338622,1.6187968907817718
61,62,0.894,0.8392137096774194,1.5671770153045654,1.6208721822307957
62,63,0.891125,0.8392137096774194,1.569726734161377,1.6224797733368412
63,64,0.89325,0.8518145161290323,1.568245210647583,1.606977516605008
64,65,0.895125,0.8462701612903226,1.5658181638717652,1.6140611633177726
65,66,0.89175,0.8568548387096774,1.5686321334838866,1.6044510756769488
66,67,0.896375,0.8412298387096774,1.5650024271011354,1.6208048520549652
67,68,0.8895,0.8477822580645161,1.571241024017334,1.6136699338113107
68,69,0.901625,0.8472782258064516,1.5600450325012207,1.612250724146443
69,70,0.89025,0.8306451612903226,1.5705311574935914,1.630635638390818
70,71,0.89675,0.8538306451612904,1.5637341051101685,1.6056317244806597
71,72,0.897,0.8412298387096774,1.5633089447021484,1.6192954970944313
72,73,0.900375,0.8503024193548387,1.560647201538086,1.6097132775091356
73,74,0.895625,0.8533266129032258,1.565330421447754,1.6075265484471475
74,75,0.895125,0.8543346774193549,1.565690894126892,1.6060891266792052
75,76,0.900625,0.8608870967741935,1.560530616760254,1.6011101891917567
76,77,0.897125,0.8563508064516129,1.563725378036499,1.6037284789546844
77,78,0.89175,0.8482862903225806,1.5693257989883422,1.6122698245509979
78,79,0.90225,0.8487903225806451,1.5589981718063355,1.6111104180735927
79,80,0.893375,0.8442540322580645,1.5669814805984497,1.6161983090062295
80,81,0.897125,0.8573588709677419,1.56400607585907,1.6040064134905416
81,82,0.898,0.8568548387096774,1.5621892538070679,1.6044225615839804
82,83,0.89075,0.8503024193548387,1.5697740039825439,1.6114465228972896
83,84,0.900625,0.8598790322580645,1.5606124105453492,1.6002429416102748
84,85,0.89825,0.8553427419354839,1.5628600664138794,1.603792409743032

1 epoch train_acc valid_acc train_loss valid_loss
2 0 1 0.2535833333333333 0.575125 0.41952284946236557 0.7011088709677419 2.274448096593221 2.031563010215759 2.2001695325297694 1.8535375249001287
3 1 2 0.4691875 0.723875 0.5073084677419355 0.7515120967741935 2.116311640103658 1.7430778923034669 2.051948335862929 1.713461822079074
4 2 3 0.5270416666666666 0.76425 0.5266297043010753 0.7525201612903226 2.009436710357666 1.6991203842163085 1.9777893276624783 1.7051894049490652
5 3 4 0.5353541666666667 0.779875 0.535114247311828 0.7716733870967742 1.9530798486073813 1.6807003259658813 1.9356736265203005 1.690080665772961
6 4 5 0.5935416666666666 0.79525 0.6175235215053764 0.7696572580645161 1.9118746633529664 1.6654855661392212 1.891348486305565 1.6926762519344207
7 5 6 0.636625 0.80175 0.6625504032258065 0.7888104838709677 1.8629151741663614 1.6598792200088501 1.847881397893352 1.6711357831954956
8 6 7 0.686625 0.80875 0.7302587365591398 0.7721774193548387 1.8228577140172322 1.6529147624969482 1.8057919330494379 1.6847506146277151
9 7 8 0.7415416666666667 0.812375 0.7511760752688172 0.8004032258064516 1.7803055089314779 1.649049828529358 1.7647134245082896 1.6597587523921844
10 8 9 0.7542291666666666 0.815375 0.7582325268817204 0.7893145161290323 1.7502256110509236 1.6458229179382324 1.7423059594246648 1.6707303101016628
11 9 10 0.7607708333333333 0.825 0.764616935483871 0.811491935483871 1.7317150996526083 1.6370085287094116 1.724781782396378 1.6518691432091497
12 10 11 0.7677291666666667 0.832375 0.7719254032258065 0.795866935483871 1.7192926041285197 1.630222158432007 1.713816902970755 1.6616583139665666
13 11 12 0.7704791666666667 0.83225 0.7731014784946236 0.8145161290322581 1.71117463239034 1.6293829317092896 1.7070557173862253 1.6454557026586225
14 12 13 0.771625 0.834875 0.7736055107526881 0.8125 1.7053865385055542 1.6268642024993896 1.7027398668309695 1.6500283018235238
15 13 14 0.7747708333333333 0.840875 0.7749495967741935 0.8104838709677419 1.7004834445317587 1.6199358901977539 1.6978983327906618 1.652194688397069
16 14 15 0.7763333333333333 0.83975 0.7751176075268817 0.8069556451612904 1.6967166414260864 1.6210605192184449 1.6960316857983988 1.6520756982987927
17 15 16 0.7790416666666666 0.84175 0.7767977150537635 0.8004032258064516 1.6929608987172444 1.6195698108673096 1.692371742699736 1.6582853217278757
18 16 17 0.780125 0.838625 0.7801579301075269 0.8049395161290323 1.6910814901987712 1.6228710346221924 1.6917471321680213 1.656198097813514
19 17 18 0.7817916666666667 0.83775 0.7827620967741935 0.8140120967741935 1.6882368882497152 1.6226149969100951 1.6861866725388395 1.6462942131104008
20 18 19 0.7827291666666667 0.845375 0.7815860215053764 0.8251008064516129 1.6857908573150635 1.6150365982055663 1.6868788478195027 1.6358921681680987
21 19 20 0.7827083333333333 0.85125 0.7840221774193549 0.8296370967741935 1.6843910643259685 1.6097205333709717 1.6850647259784002 1.6333312757553593
22 20 21 0.7848541666666666 0.851875 0.7857862903225806 0.8210685483870968 1.6823169787724812 1.609424132347107 1.6813292862266622 1.6388044895664338
23 21 22 0.7876666666666666 0.850375 0.7848622311827957 0.8210685483870968 1.6801685171127319 1.6101643466949462 1.6815461574062225 1.6369191561975787
24 22 23 0.7879791666666667 0.858125 0.7848622311827957 0.8185483870967742 1.6788572374979656 1.6031983375549317 1.6818229216401295 1.6421872877305554
25 23 24 0.7890833333333334 0.857125 0.7859543010752689 0.8361895161290323 1.6776897859573365 1.6038059520721435 1.6799169009731663 1.62519553015309
26 24 25 0.7902708333333334 0.854 0.7883904569892473 0.8301411290322581 1.6763870159784953 1.606280044555664 1.6777271570697907 1.6290803609355804
27 25 26 0.791 0.858375 0.789986559139785 0.8119959677419355 1.6751194190979004 1.6011180095672608 1.6777111407249206 1.648349738890125
28 26 27 0.7918541666666666 0.861375 0.7886424731182796 0.8371975806451613 1.674490571975708 1.6000273780822754 1.6776156912567795 1.6237776010267195
29 27 28 0.7936666666666666 0.8705 0.7905745967741935 0.8351814516129032 1.6728278172810873 1.5907322645187378 1.6744540147883917 1.6240986316434798
30 28 29 0.793375 0.87175 0.7876344086021505 0.828125 1.671903525352478 1.5896439476013184 1.6776354030896259 1.6316479982868317
31 29 30 0.7945833333333333 0.867375 0.7901545698924731 0.8266129032258065 1.6710031661987306 1.5936143741607667 1.6751063805754467 1.632056209348863
32 30 31 0.7959166666666667 0.86575 0.7932627688172043 0.8377016129032258 1.6700964994430543 1.5949567575454713 1.6719800926023913 1.6240303862479426
33 31 32 0.7955833333333333 0.86975 0.7933467741935484 0.8336693548387096 1.6697869466145834 1.5907692136764526 1.6723043623790945 1.6251203167823054
34 32 33 0.7971458333333333 0.86975 0.792002688172043 0.8397177419354839 1.668815224647522 1.5919580411911012 1.6722341519530102 1.6214879328204739
35 33 34 0.7974375 0.872375 0.7931787634408602 0.8422379032258065 1.6682115157445272 1.5886561880111694 1.6723163063808153 1.6194973222671016
36 34 35 0.7979791666666667 0.876 0.7947748655913979 0.8331653225806451 1.6676976483662924 1.585767653465271 1.670254216399244 1.6247131862948019
37 35 36 0.7984583333333334 0.871875 0.7950268817204301 0.8392137096774194 1.6669070380528768 1.5895709590911866 1.6704239499184392 1.6215527211466143
38 36 37 0.799 0.871125 0.795866935483871 0.8245967741935484 1.6664897476832072 1.590462643623352 1.6691849411174815 1.6370407227546937
39 37 38 0.7991875 0.87325 0.7948588709677419 0.8301411290322581 1.6660230029424032 1.5876847248077393 1.6700230785595473 1.6306157150576193
40 38 39 0.800125 0.8705 0.795950940860215 0.8240927419354839 1.6655609397888183 1.590844289779663 1.6688298884258475 1.634965923524672
41 39 40 0.8006458333333333 0.88125 0.7956989247311828 0.8402217741935484 1.6652250595092772 1.5803495874404907 1.6690674840763051 1.6220719968118975
42 40 41 0.8006875 0.881875 0.7962869623655914 0.8462701612903226 1.664971160888672 1.5788077726364136 1.6688041135828982 1.614263488400367
43 41 42 0.8015416666666667 0.87625 0.795866935483871 0.8487903225806451 1.6647087678909303 1.5843929862976074 1.668453808753721 1.6131737078389814
44 42 43 0.8013958333333333 0.88175 0.7962029569892473 0.842741935483871 1.6643381754557292 1.5789586782455445 1.6682484457569737 1.6184405088424683
45 43 44 0.8016458333333333 0.880375 0.7965389784946236 0.8417338709677419 1.6641663211186728 1.5802030544281005 1.668392535178892 1.6189708786626016
46 44 45 0.8017708333333333 0.880375 0.7957829301075269 0.8492943548387096 1.6640429185231527 1.5803771505355835 1.6681856429705055 1.610948174230514
47 45 46 0.8021041666666666 0.877625 0.7965389784946236 0.8523185483870968 1.663849822362264 1.5828620948791503 1.6680005634984663 1.6089561793111986
48 46 47 0.8024583333333334 0.882125 0.7966229838709677 0.8442540322580645 1.663770879427592 1.578727219581604 1.6679569816076627 1.6160847948443504
49 47 48 0.8025833333333333 0.872875 0.7962029569892473 0.8392137096774194 1.6636734215418498 1.5874832077026366 1.6680130330465173 1.6228746021947553
50 48 49 0.8024583333333334 0.884125 0.7970430107526881 0.8371975806451613 1.6635978577931723 1.5771635084152222 1.6679544077124646 1.6237398655183855
51 49 50 0.8025 0.88975 0.7970430107526881 0.8341733870967742 1.6635685895284016 1.5712118272781372 1.6679527682642783 1.6256768280459988
52 50 51 0.876 0.8432459677419355 1.585065812110901 1.6188401445265739
53 51 52 0.883 0.844758064516129 1.5785351629257203 1.6169025359615203
54 52 53 0.889125 0.8548387096774194 1.5723771095275878 1.607426397262081
55 53 54 0.88625 0.8518145161290323 1.5749757404327394 1.6076742141477522
56 54 55 0.891125 0.8452620967741935 1.5695797872543336 1.6156867473356185
57 55 56 0.891625 0.8497983870967742 1.5691107511520386 1.6118658050414054
58 56 57 0.883375 0.8508064516129032 1.577266471862793 1.6087883749315817
59 57 58 0.891625 0.8422379032258065 1.5702907581329346 1.6168161553721274
60 58 59 0.89375 0.8553427419354839 1.5679762859344482 1.6071604336461713
61 59 60 0.885875 0.8482862903225806 1.5749410953521727 1.6108381056016492
62 60 61 0.891875 0.8417338709677419 1.5696327953338622 1.6187968907817718
63 61 62 0.894 0.8392137096774194 1.5671770153045654 1.6208721822307957
64 62 63 0.891125 0.8392137096774194 1.569726734161377 1.6224797733368412
65 63 64 0.89325 0.8518145161290323 1.568245210647583 1.606977516605008
66 64 65 0.895125 0.8462701612903226 1.5658181638717652 1.6140611633177726
67 65 66 0.89175 0.8568548387096774 1.5686321334838866 1.6044510756769488
68 66 67 0.896375 0.8412298387096774 1.5650024271011354 1.6208048520549652
69 67 68 0.8895 0.8477822580645161 1.571241024017334 1.6136699338113107
70 68 69 0.901625 0.8472782258064516 1.5600450325012207 1.612250724146443
71 69 70 0.89025 0.8306451612903226 1.5705311574935914 1.630635638390818
72 70 71 0.89675 0.8538306451612904 1.5637341051101685 1.6056317244806597
73 71 72 0.897 0.8412298387096774 1.5633089447021484 1.6192954970944313
74 72 73 0.900375 0.8503024193548387 1.560647201538086 1.6097132775091356
75 73 74 0.895625 0.8533266129032258 1.565330421447754 1.6075265484471475
76 74 75 0.895125 0.8543346774193549 1.565690894126892 1.6060891266792052
77 75 76 0.900625 0.8608870967741935 1.560530616760254 1.6011101891917567
78 76 77 0.897125 0.8563508064516129 1.563725378036499 1.6037284789546844
79 77 78 0.89175 0.8482862903225806 1.5693257989883422 1.6122698245509979
80 78 79 0.90225 0.8487903225806451 1.5589981718063355 1.6111104180735927
81 79 80 0.893375 0.8442540322580645 1.5669814805984497 1.6161983090062295
82 80 81 0.897125 0.8573588709677419 1.56400607585907 1.6040064134905416
83 81 82 0.898 0.8568548387096774 1.5621892538070679 1.6044225615839804
84 82 83 0.89075 0.8503024193548387 1.5697740039825439 1.6114465228972896
85 83 84 0.900625 0.8598790322580645 1.5606124105453492 1.6002429416102748
86 84 85 0.89825 0.8553427419354839 1.5628600664138794 1.603792409743032

Binary file not shown.