Removed grad clip
This commit is contained in:
parent
939a90dd0f
commit
8e2664533f
1 changed files with 16 additions and 16 deletions
|
@ -102,22 +102,22 @@ class Agent:
|
||||||
self.critic.optimizer.zero_grad()
|
self.critic.optimizer.zero_grad()
|
||||||
self.total_loss.backward()
|
self.total_loss.backward()
|
||||||
|
|
||||||
T.nn.utils.clip_grad_norm_(
|
# T.nn.utils.clip_grad_norm_(
|
||||||
self.actor.parameters(), max_norm=2)
|
# self.actor.parameters(), max_norm=2)
|
||||||
|
#
|
||||||
T.nn.utils.clip_grad_norm_(
|
# T.nn.utils.clip_grad_norm_(
|
||||||
self.critic.parameters(), max_norm=2)
|
# self.critic.parameters(), max_norm=2)
|
||||||
|
#
|
||||||
# Calculate the gradient norms for both networks
|
# # Calculate the gradient norms for both networks
|
||||||
actor_grad_norm = T.nn.utils.clip_grad_norm_(
|
# actor_grad_norm = T.nn.utils.clip_grad_norm_(
|
||||||
self.actor.parameters(), max_norm=2)
|
# self.actor.parameters(), max_norm=2)
|
||||||
|
#
|
||||||
critic_grad_norm = T.nn.utils.clip_grad_norm_(
|
# critic_grad_norm = T.nn.utils.clip_grad_norm_(
|
||||||
self.critic.parameters(), max_norm=2)
|
# self.critic.parameters(), max_norm=2)
|
||||||
|
#
|
||||||
# Log or print the gradient norms
|
# # Log or print the gradient norms
|
||||||
print(f"Actor Gradient Norm: {actor_grad_norm}")
|
# print(f"Actor Gradient Norm: {actor_grad_norm}")
|
||||||
print(f"Critic Gradient Norm: {critic_grad_norm}")
|
# print(f"Critic Gradient Norm: {critic_grad_norm}")
|
||||||
|
|
||||||
self.actor.optimizer.step()
|
self.actor.optimizer.step()
|
||||||
self.critic.optimizer.step()
|
self.critic.optimizer.step()
|
||||||
|
|
Loading…
Reference in a new issue