-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,18 @@ | ||
{ | ||
"MonoBehaviour": { | ||
"Version": 4, | ||
"EnableBurstCompilation": true, | ||
"EnableOptimisations": true, | ||
"EnableSafetyChecks": false, | ||
"EnableDebugInAllBuilds": false, | ||
"DebugDataKind": 1, | ||
"EnableArmv9SecurityFeatures": false, | ||
"CpuMinTargetX32": 0, | ||
"CpuMaxTargetX32": 0, | ||
"CpuMinTargetX64": 0, | ||
"CpuMaxTargetX64": 0, | ||
"CpuTargetsX32": 6, | ||
"CpuTargetsX64": 72, | ||
"OptimizeFor": 0 | ||
} | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
import os | ||
import shutil | ||
|
||
# Source directory where files are located | ||
source_dir = os.path.join(os.getcwd(), "results") | ||
|
||
# Destination directory where files will be copied | ||
destination_dir = os.path.join(os.getcwd(), "tensorboard_data_copy_01_results") | ||
|
||
# Find all files in the source directory starting with "events" | ||
files = [] | ||
for root, dirs, filenames in os.walk(source_dir): | ||
for filename in filenames: | ||
if filename.startswith('events'): | ||
files.append(os.path.join(root, filename)) | ||
|
||
# Loop through each file found | ||
for file_path in files: | ||
# Get the directory where the file is located relative to the source directory | ||
relative_dir = os.path.relpath(os.path.dirname(file_path), source_dir) | ||
|
||
# Create the corresponding directory structure in the destination directory | ||
os.makedirs(os.path.join(destination_dir, relative_dir), exist_ok=True) | ||
|
||
# Copy the file to the destination directory, preserving the directory structure | ||
shutil.copy(file_path, os.path.join(destination_dir, relative_dir)) | ||
|
||
# Print out the copied files | ||
print(f"Copied {file_path} to {os.path.join(destination_dir, relative_dir)}") | ||
|
||
print("Copy operation completed.") |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,64 @@ | ||
import os | ||
import json | ||
from tensorboard.backend.event_processing.event_accumulator import EventAccumulator, ScalarEvent | ||
|
||
def scalar_event_list_to_dict_list(scalar_event_list): | ||
dict_list = [] | ||
for scalar_event in scalar_event_list: | ||
# remove wall_time | ||
dict_list.append({'step': scalar_event.step, 'value': scalar_event.value}) | ||
return dict_list | ||
|
||
|
||
def convert_to_json(events_file_path, output_json_path): | ||
accumulator = EventAccumulator(events_file_path) | ||
accumulator.Reload() # loads events from the file | ||
|
||
# Get a List[ScalarEvents] | ||
scalar_events = accumulator.Scalars('Environment/Cumulative Reward') | ||
|
||
# Convert the List[ScalarEvents] to List[Dict] | ||
dict_list = scalar_event_list_to_dict_list(scalar_events) | ||
|
||
# Dump json from List[Dict], then write JSON to a file | ||
with open(output_json_path, "w") as json_file: | ||
json.dump(dict_list, json_file) | ||
|
||
|
||
if __name__ == "__main__": | ||
|
||
# Source directory where files are located | ||
source_dir = os.path.join(os.getcwd(), "tensorboard_data_copy_01_results") | ||
|
||
# Destination directory where files will be copied | ||
destination_dir = os.path.join(os.getcwd(), "tensorboard_data_json_02_results") | ||
|
||
# Find all files in the source directory starting with "events" | ||
files = [] | ||
for root, dirs, filenames in os.walk(source_dir): | ||
for filename in filenames: | ||
if filename.startswith('events'): | ||
files.append(os.path.join(root, filename)) | ||
|
||
# Loop through each file found | ||
for file_path in files: | ||
# Get the directory where the file is located relative to the source directory | ||
relative_dir = os.path.relpath(os.path.dirname(file_path), source_dir) | ||
|
||
# Create the corresponding directory structure in the destination directory | ||
os.makedirs(os.path.join(destination_dir, relative_dir), exist_ok=True) | ||
|
||
# Define the new filename with '.json' extension | ||
json_filename = os.path.splitext(os.path.basename(file_path))[0] + '.json' | ||
|
||
# Define the new path of .json file | ||
json_filepath = os.path.join(destination_dir, relative_dir, json_filename) | ||
|
||
# Convert tensorboard events file to json | ||
convert_to_json(file_path, json_filepath) | ||
|
||
# Print out the coverted files | ||
print(f"Covert {file_path} to {json_filepath}") | ||
|
||
print("Convert operation completed.") | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,58 @@ | ||
import os | ||
import json | ||
import numpy as np | ||
|
||
|
||
def convert_to_npy(json_file_path, output_npy_path): | ||
# Load the JSON file | ||
with open(json_file_path, 'r') as f: | ||
json_data = json.load(f) | ||
|
||
# Extract 'step' and 'value' from the list of dictionaries | ||
steps = np.array([d['step'] for d in json_data]) | ||
values = np.array([d['value'] for d in json_data]) | ||
|
||
# Combine 'steps' and 'values' arrays into a single 2D array | ||
combined_array = np.array([steps, values]) | ||
|
||
# Save the arrays into separate files | ||
np.save(output_npy_path, combined_array) | ||
|
||
|
||
if __name__ == "__main__": | ||
|
||
# Source directory where files are located | ||
source_dir = os.path.join(os.getcwd(), "tensorboard_data_json_02_results") | ||
|
||
# Destination directory where files will be copied | ||
destination_dir = os.path.join(os.getcwd(), "tensorboard_data_numpy_03_results") | ||
|
||
# Find all files in the source directory starting with "events" | ||
files = [] | ||
for root, dirs, filenames in os.walk(source_dir): | ||
for filename in filenames: | ||
if filename.startswith('events'): | ||
files.append(os.path.join(root, filename)) | ||
|
||
# Loop through each file found | ||
for file_path in files: | ||
# Get the directory where the file is located relative to the source directory | ||
relative_dir = os.path.relpath(os.path.dirname(file_path), source_dir) | ||
|
||
# Create the corresponding directory structure in the destination directory | ||
os.makedirs(os.path.join(destination_dir, relative_dir), exist_ok=True) | ||
|
||
# Define the new filename with '.npy' extension | ||
npy_filename = os.path.splitext(os.path.basename(file_path))[0] + '.npy' | ||
|
||
# Define the new path of .npy file | ||
npy_filepath = os.path.join(destination_dir, relative_dir, npy_filename) | ||
|
||
# Convert json to step.npy and value.npy | ||
convert_to_npy(file_path, npy_filepath) | ||
|
||
# Print out the coverted files | ||
print(f"Covert {file_path} to {npy_filepath}") | ||
|
||
print("Convert operation completed.") | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,66 @@ | ||
import os | ||
import numpy as np | ||
|
||
|
||
if __name__ == "__main__": | ||
|
||
# Source directory where files are located | ||
source_dir = os.path.join(os.getcwd(), "tensorboard_data_numpy_03_results") | ||
|
||
# Destination directory where files will be copied | ||
destination_dir = os.path.join(os.getcwd(), "tensorboard_data_merged_numpy_04_results") | ||
|
||
# Find all files in the source directory starting with "events" | ||
files = [] | ||
for root, dirs, filenames in os.walk(source_dir): | ||
normal_root = os.path.normpath(root) | ||
for filename in filenames: | ||
version = os.path.split(normal_root)[-2] | ||
if filename.startswith('events') and any(sub in version for sub in ('v5.0.1', 'v5.1.0', 'v5.2.0')) : | ||
files.append(os.path.join(root, filename)) | ||
|
||
values = 0 | ||
|
||
# Loop through each file found | ||
for num, file_path in enumerate(files): | ||
# Get the directory where the file is located relative to the source directory | ||
relative_dir = os.path.relpath(os.path.dirname(file_path), source_dir) | ||
|
||
# Split the directory specifying the version number, va.b.c-d | ||
first_level_dir = os.path.split(relative_dir.rstrip(os.sep))[0] | ||
|
||
# Split the version number to get va.b.c | ||
version_number = first_level_dir.split('-')[0] | ||
|
||
# Load the data | ||
data = np.load(file_path) | ||
|
||
# Extract steps and values | ||
steps = data[0] | ||
values = values + data[1] | ||
|
||
# Merge results from many seeds | ||
if num % 3 == 2: | ||
values = values / 3 | ||
|
||
# Create the corresponding directory structure in the destination directory | ||
os.makedirs(os.path.join(destination_dir, relative_dir), exist_ok=True) | ||
|
||
# Define the new filename with '.npy' extension | ||
npy_filename = os.path.splitext(os.path.basename(file_path))[0] + '.npy' | ||
|
||
# Define the new path of .npy file | ||
npy_filepath = os.path.join(destination_dir, relative_dir, npy_filename) | ||
|
||
# Convert 'steps' and 'values' arrays into a numpy array | ||
combined_array = np.array([steps, values]) | ||
|
||
# Save the Dataframe into npy files | ||
np.save(npy_filepath, combined_array) | ||
|
||
print(f"Save merged pandas Dataframe {version_number}") | ||
|
||
print(f"Add {file_path}") | ||
|
||
|
||
print("Convert operation completed.") |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,81 @@ | ||
import os | ||
import numpy as np | ||
import pandas as pd | ||
import seaborn as sns | ||
import matplotlib.pyplot as plt | ||
|
||
def smooth(scalars, weight): # Weight between 0 and 1 | ||
last = scalars[0] # First value in the plot (first timestep) | ||
smoothed = list() | ||
for point in scalars: | ||
smoothed_val = last * weight + (1 - weight) * point # Calculate smoothed value | ||
smoothed.append(smoothed_val) # Save it | ||
last = smoothed_val # Anchor the last smoothed value | ||
|
||
return smoothed | ||
|
||
if __name__ == "__main__": | ||
|
||
# Source directory where files are located | ||
source_dir = os.path.join(os.getcwd(), "tensorboard_data_merged_numpy_04_results") | ||
|
||
# Destination directory where files will be copied | ||
destination_dir = os.path.join(os.getcwd(), "tensorboard_data_seaborn_05_results") | ||
|
||
# Find all files in the source directory starting with "events" | ||
files = [] | ||
for root, dirs, filenames in os.walk(source_dir): | ||
for filename in filenames: | ||
if filename.startswith('events'): | ||
files.append(os.path.join(root, filename)) | ||
|
||
# Legend labels | ||
legend_labels = {'v5.0.1': 'Pos+CNN', 'v5.1.0': 'Pos', 'v5.2.0': 'CNN'} | ||
|
||
# Create a line plot | ||
plt.figure(figsize=(10, 8)) | ||
|
||
# Loop through each file found | ||
for num, file_path in enumerate(files): | ||
# Get the directory where the file is located relative to the source directory | ||
relative_dir = os.path.relpath(os.path.dirname(file_path), source_dir) | ||
|
||
# Split the directory specifying the version number, va.b.c-d | ||
first_level_dir = os.path.split(relative_dir.rstrip(os.sep))[0] | ||
|
||
# Split the version number to get va.b.c | ||
version_number = first_level_dir.split('-')[0] | ||
|
||
# Read npy file into array | ||
data = np.load(file_path) | ||
|
||
# Extract steps and values | ||
steps = data[0] | ||
values = data[1] | ||
|
||
# Smooth values | ||
smoothed_values = smooth(values, 0.95) | ||
|
||
|
||
|
||
plt.plot(steps, smoothed_values, linestyle='-', label=legend_labels[version_number]) | ||
print(f"Add {file_path}") | ||
|
||
# Add labels and title | ||
plt.xlabel('Step') | ||
plt.ylabel('Cumulative Reward') | ||
plt.title('Line Plot of Step vs Cumulative Reward') | ||
|
||
# Show grid | ||
plt.grid(True) | ||
|
||
# Add legend | ||
plt.legend() | ||
|
||
# Create the corresponding directory structure in the destination directory | ||
os.makedirs(destination_dir, exist_ok=True) | ||
|
||
# Save the plot as a JPG file | ||
plt.savefig(os.path.join(destination_dir, 'line_plot.jpg'), format='jpg') | ||
|
||
print("plot operation completed.") |
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,29 @@ | ||
import matplotlib.pyplot as plt | ||
import numpy as np | ||
import seaborn as sns | ||
import pandas as pd | ||
# 生成数据 | ||
x = np.arange(5) | ||
|
||
y_a = np.random.uniform(size=5) | ||
y_b = np.random.uniform(size=5) | ||
|
||
label_a = np.full(x.shape, fill_value='a') | ||
label_b = np.full(x.shape, fill_value='b') | ||
|
||
data_a = pd.DataFrame(np.concatenate((x[:, None], label_a[:, None], y_a[:, None]), axis=1), | ||
columns=['x', 'label', 'y']) | ||
data_b = pd.DataFrame(np.concatenate((x[:, None], label_b[:, None], y_b[:, None]), axis=1), | ||
columns=['x', 'label', 'y']) | ||
data = pd.concat([data_a, data_b], axis=0) | ||
|
||
# x、y轴数据需要为数字类型,但上面的操作后会变成object,所以要进行一下转换 | ||
data[['x', 'y']] = data[['x', 'y']].apply(pd.to_numeric) | ||
print(data) | ||
# 设置样式 | ||
sns.set_theme(context='paper', style='darkgrid') | ||
fig = plt.figure() | ||
plt.title('multi lines') | ||
# 绘图 | ||
sns.lineplot(x="x", y="y", data=data, errorbar=('sd', 1)) | ||
plt.show() |