Skip to content

Commit 205024a

Browse files
authored
Merge pull request #60 from aws-deepracer-community/log-new-field
Add fix to the new obstacle_crash_counter field in log file
2 parents 03daf30 + dd557ff commit 205024a

File tree

3 files changed

+668
-672
lines changed

3 files changed

+668
-672
lines changed

deepracer/logs/log.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -39,8 +39,8 @@ class DeepRacerLog:
3939
"episode_status",
4040
"pause_duration"
4141
]
42-
# TODO Column names as a workaround for an excess comma in the CSV file
43-
_COL_NAMES_WORKAROUND = [
42+
# Additional obstacle_crash_counter column is added to the CSV file.
43+
_COL_NAMES_NEW = [
4444
"episode",
4545
"steps",
4646
"x",
@@ -49,7 +49,6 @@ class DeepRacerLog:
4949
"steering_angle",
5050
"speed",
5151
"action",
52-
"action_b",
5352
"reward",
5453
"done",
5554
"all_wheels_on_track",
@@ -58,7 +57,8 @@ class DeepRacerLog:
5857
"track_len",
5958
"tstamp",
6059
"episode_status",
61-
"pause_duration"
60+
"pause_duration",
61+
"obstacle_crash_counter"
6262
]
6363
_HYPERPARAM_KEYS = [
6464
"batch_size",
@@ -120,10 +120,10 @@ def __init__(self, model_folder: str = None, filehandler: FileHandler = None,
120120
def _read_csv(self, path: str, splitRegex, type: LogType = LogType.TRAINING):
121121
try:
122122
csv_bytes = self.fh.get_file(path)
123-
# TODO: this is a workaround and should be removed when logs are fixed
123+
# Work also with a new column
124124
df = pd.read_csv(BytesIO(csv_bytes), encoding='utf8',
125-
names=self._COL_NAMES_WORKAROUND, header=0)
126-
df = df.drop("action_b", axis=1)
125+
names=self._COL_NAMES_NEW, header=0)
126+
df = df.drop("obstacle_crash_counter", axis=1)
127127
except pd.errors.ParserError:
128128
try:
129129
df = pd.read_csv(BytesIO(csv_bytes), names=self._COL_NAMES, header=0)

deepracer/logs/log_utils.py

Lines changed: 10 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -143,10 +143,6 @@ def convert_to_pandas(data, episodes_per_iteration=20, stream=None):
143143

144144
for d in data[:]:
145145
parts = d.rstrip().split(",")
146-
# TODO: this is a workaround and should be removed when logs are fixed
147-
parts_workaround = 0
148-
if len(parts) > 17:
149-
parts_workaround = 1
150146
episode = int(parts[0])
151147
steps = int(parts[1])
152148
x = float(parts[2])
@@ -158,16 +154,16 @@ def convert_to_pandas(data, episodes_per_iteration=20, stream=None):
158154
action = int(parts[7])
159155
except ValueError as e:
160156
action = -1
161-
reward = float(parts[8+parts_workaround])
162-
done = 0 if 'False' in parts[9+parts_workaround] else 1
163-
all_wheels_on_track = parts[10+parts_workaround]
164-
progress = float(parts[11+parts_workaround])
165-
closest_waypoint = int(parts[12+parts_workaround])
166-
track_len = float(parts[13+parts_workaround])
167-
tstamp = Decimal(parts[14+parts_workaround])
168-
episode_status = parts[15+parts_workaround]
169-
if len(parts) > 16+parts_workaround:
170-
pause_duration = float(parts[16+parts_workaround])
157+
reward = float(parts[8])
158+
done = 0 if 'False' in parts[9] else 1
159+
all_wheels_on_track = parts[10]
160+
progress = float(parts[11])
161+
closest_waypoint = int(parts[12])
162+
track_len = float(parts[13])
163+
tstamp = Decimal(parts[14])
164+
episode_status = parts[15]
165+
if len(parts) > 16:
166+
pause_duration = float(parts[16])
171167
else:
172168
pause_duration = 0.0
173169

0 commit comments

Comments
 (0)