We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
I want to use the agent which I trained in from your rl-agents instead of the stablebaseline implementation. How can I use it in a code like this:
import warnings import gym import highway_env from stable_baselines3 import DQN import json import os import cv2 import numpy as np
ACTIONS_ALL = { 0: 'LANE_LEFT', 1: 'IDLE', 2: 'LANE_RIGHT', 3: 'FASTER', 4: 'SLOWER' }
config = { "observation": { "type": "Kinematics", "features": ["presence", "x", "y", "vx", "vy"], "normalize": False } }
env = gym.make("highway-fast-v0") env.configure(config) env.reset()
model = DQN.load( "C:\Users\davin\Documents\Studium\Bachelorarbeit\davin-holten-bachelor\highway_dqn\highway_dqn\DQNFast2\rl_model_500000_steps.zip")
basic_traces_folder = "basic_traces" simb_traces_folderRL = "simb_tracesRL"
if not os.path.exists(basic_traces_folder): os.makedirs(basic_traces_folder)
if not os.path.exists(simb_traces_folderRL): os.makedirs(simb_traces_folderRL)
fileCounter = 1 Crash = False critical_distance = False critical_distanceY = False
for i in range(1,50): frames = [] steps = 0 done = truncated = False obs = env.reset() dest_state = obs.tolist() Crash = False critical_distance = False critical_distanceY = False videoCounter=i
while not (done or truncated): action, _states = model.predict(obs, deterministic=True) obs, reward, done, info = env.step(int(action)) dest_state = obs.tolist() check= False checkY = False steps += 1 # Trace Creation
The text was updated successfully, but these errors were encountered:
I would like to generate traces to track the performance of the agents. Thats why I want to use it inside of another program.
Sorry, something went wrong.
Please see e.g. this colab
No branches or pull requests
I want to use the agent which I trained in from your rl-agents instead of the stablebaseline implementation. How can I use it in a code like this:
import warnings
import gym
import highway_env
from stable_baselines3 import DQN
import json
import os
import cv2
import numpy as np
ACTIONS_ALL = {
0: 'LANE_LEFT',
1: 'IDLE',
2: 'LANE_RIGHT',
3: 'FASTER',
4: 'SLOWER'
}
config = {
"observation": {
"type": "Kinematics",
"features": ["presence", "x", "y", "vx", "vy"],
"normalize": False
}
}
env = gym.make("highway-fast-v0")
env.configure(config)
env.reset()
model = DQN.load(
"C:\Users\davin\Documents\Studium\Bachelorarbeit\davin-holten-bachelor\highway_dqn\highway_dqn\DQNFast2\rl_model_500000_steps.zip")
basic_traces_folder = "basic_traces"
simb_traces_folderRL = "simb_tracesRL"
if not os.path.exists(basic_traces_folder):
os.makedirs(basic_traces_folder)
if not os.path.exists(simb_traces_folderRL):
os.makedirs(simb_traces_folderRL)
fileCounter = 1
Crash = False
critical_distance = False
critical_distanceY = False
for i in range(1,50):
frames = []
steps = 0
done = truncated = False
obs = env.reset()
dest_state = obs.tolist()
Crash = False
critical_distance = False
critical_distanceY = False
videoCounter=i
The text was updated successfully, but these errors were encountered: