Hello, everyone.
I’v probrem with triplet loss in gluon.loss.
This code sets 2 datas - [0,0,0] and [1,1,1]. and loss can not decrease.
I can not find where is wrong. Please help me.
Thank you.
# -*- coding: utf-8 -*-
from mxnet import ndarray as F
from mxnet import autograd
from mxnet import cpu, autograd, nd
from mxnet.gluon import Trainer, Block, nn
from mxnet.gluon.loss import TripletLoss
import random
import numpy as np
class Model(Block):
def __init__(self, **kwargs):
super(Model, self).__init__(**kwargs)
with self.name_scope():
self.dense1 = nn.Dense(4)
self.dense2 = nn.Dense(2)
def forward(self, x):
x = F.tanh(self.dense1(x))
x = self.dense2(x)
return x
X = [[1,1,1]]
Y = [[0,0,0]]
model = Model()
model.initialize(ctx=[cpu(0),cpu(1),cpu(2),cpu(3)])
trainer = Trainer(model.collect_params(),'adam')
loss_func = TripletLoss()
def get_one_triplet():
if random.random() < 0.5:
return (X[0],X[0],Y[0])
else:
return (Y[0],Y[0],X[0])
print('start training...')
batch_size = 15
iterations = 1000000
log_interval = 100
logs = []
for iteration in range(1, iterations + 1):
anchor = []
positive = []
negative = []
for batch in range(batch_size):
triplet = get_one_triplet()
anchor.append(triplet[0])
positive.append(triplet[1])
negative.append(triplet[2])
anchor = nd.array(anchor)
positive = nd.array(positive)
negative = nd.array(negative)
with autograd.record():
output1 = model(anchor)
output2 = model(positive)
output3 = model(negative)
loss = loss_func(output1, output2, output3)
logs.append(np.mean(loss.asnumpy()))
loss.backward()
if log_interval == len(logs):
ll = np.mean(logs)
print('%d iteration loss=%f...'%(iteration,ll))
logs = []