Removed grad clip
This commit is contained in:
parent
939a90dd0f
commit
8e2664533f
1 changed files with 16 additions and 16 deletions
|
@ -102,22 +102,22 @@ class Agent:
|
|||
self.critic.optimizer.zero_grad()
|
||||
self.total_loss.backward()
|
||||
|
||||
T.nn.utils.clip_grad_norm_(
|
||||
self.actor.parameters(), max_norm=2)
|
||||
|
||||
T.nn.utils.clip_grad_norm_(
|
||||
self.critic.parameters(), max_norm=2)
|
||||
|
||||
# Calculate the gradient norms for both networks
|
||||
actor_grad_norm = T.nn.utils.clip_grad_norm_(
|
||||
self.actor.parameters(), max_norm=2)
|
||||
|
||||
critic_grad_norm = T.nn.utils.clip_grad_norm_(
|
||||
self.critic.parameters(), max_norm=2)
|
||||
|
||||
# Log or print the gradient norms
|
||||
print(f"Actor Gradient Norm: {actor_grad_norm}")
|
||||
print(f"Critic Gradient Norm: {critic_grad_norm}")
|
||||
# T.nn.utils.clip_grad_norm_(
|
||||
# self.actor.parameters(), max_norm=2)
|
||||
#
|
||||
# T.nn.utils.clip_grad_norm_(
|
||||
# self.critic.parameters(), max_norm=2)
|
||||
#
|
||||
# # Calculate the gradient norms for both networks
|
||||
# actor_grad_norm = T.nn.utils.clip_grad_norm_(
|
||||
# self.actor.parameters(), max_norm=2)
|
||||
#
|
||||
# critic_grad_norm = T.nn.utils.clip_grad_norm_(
|
||||
# self.critic.parameters(), max_norm=2)
|
||||
#
|
||||
# # Log or print the gradient norms
|
||||
# print(f"Actor Gradient Norm: {actor_grad_norm}")
|
||||
# print(f"Critic Gradient Norm: {critic_grad_norm}")
|
||||
|
||||
self.actor.optimizer.step()
|
||||
self.critic.optimizer.step()
|
||||
|
|
Loading…
Reference in a new issue