Removed grad clip

This commit is contained in:
Vasilis Valatsos 2023-12-10 11:59:27 +01:00
parent 939a90dd0f
commit 8e2664533f

View file

@ -102,22 +102,22 @@ class Agent:
self.critic.optimizer.zero_grad()
self.total_loss.backward()
T.nn.utils.clip_grad_norm_(
self.actor.parameters(), max_norm=2)
T.nn.utils.clip_grad_norm_(
self.critic.parameters(), max_norm=2)
# Calculate the gradient norms for both networks
actor_grad_norm = T.nn.utils.clip_grad_norm_(
self.actor.parameters(), max_norm=2)
critic_grad_norm = T.nn.utils.clip_grad_norm_(
self.critic.parameters(), max_norm=2)
# Log or print the gradient norms
print(f"Actor Gradient Norm: {actor_grad_norm}")
print(f"Critic Gradient Norm: {critic_grad_norm}")
# T.nn.utils.clip_grad_norm_(
# self.actor.parameters(), max_norm=2)
#
# T.nn.utils.clip_grad_norm_(
# self.critic.parameters(), max_norm=2)
#
# # Calculate the gradient norms for both networks
# actor_grad_norm = T.nn.utils.clip_grad_norm_(
# self.actor.parameters(), max_norm=2)
#
# critic_grad_norm = T.nn.utils.clip_grad_norm_(
# self.critic.parameters(), max_norm=2)
#
# # Log or print the gradient norms
# print(f"Actor Gradient Norm: {actor_grad_norm}")
# print(f"Critic Gradient Norm: {critic_grad_norm}")
self.actor.optimizer.step()
self.critic.optimizer.step()