Skip to content

Commit

Permalink
Workaround for duplicate models for pit_crew
Browse files Browse the repository at this point in the history
Signed-off-by: methylDragon <[email protected]>
  • Loading branch information
methylDragon committed Apr 4, 2024
1 parent a83d0e8 commit efa3ace
Showing 1 changed file with 34 additions and 27 deletions.
61 changes: 34 additions & 27 deletions rmf_building_map_tools/pit_crew/pit_crew.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@
"build_and_update_cache",
"init_logging",
"PitCrewFormatter",
"ModelNames",
"ModelTuple",
"download_model_fuel_tools",
"sync_sdf"
]
Expand All @@ -72,7 +72,8 @@
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())

ModelNames = namedtuple("ModelNames", ["model_name", "author_name"])
ModelTuple = namedtuple(
"ModelTuple", ["model_name", "author_name", "upload_date"])


def remove_spaces(original):
Expand Down Expand Up @@ -113,8 +114,8 @@ def get_missing_models(model_names, model_path=None,
Args:
model_names (iterable): Iterable of model names to classify.
Also supports ModelNames tuples, or unnamed tuples of
(model_name, author_name)!
Also supports ModelTuple tuples, or unnamed tuples of
(model_name, author_name) or (model_name, author_name, upload_time)!
model_path (str, optional): Overall path to model directory.
Defaults to None. If None, function will use "~/.gazebo/models" or
"~/.ignition/fuel" depending on the value of ign.
Expand Down Expand Up @@ -144,9 +145,11 @@ def get_missing_models(model_names, model_path=None,
- Missing models are models that are not in your local directory
and also missing from Fuel.
"""
for key, model_name in enumerate(model_names):
if isinstance(model_name, ModelNames) or isinstance(model_name, tuple):
assert len(model_name) == 2, \
# Enumerate prevents a lone tuple from being interpreted as a list of three
# singles.
for _, model_name in enumerate(model_names):
if isinstance(model_name, ModelTuple) or isinstance(model_name, tuple):
assert len(model_name) == 2 or len(model_name) == 3, \
"Invalid model name tuple given: %s!" % model_name

if update_cache:
Expand Down Expand Up @@ -192,7 +195,7 @@ def get_missing_models(model_names, model_path=None,
elif isinstance(model, tuple):
model_name = model[0]
author_name = model[1]
elif isinstance(model, ModelNames):
elif isinstance(model, ModelTuple):
model_name = model.model_name
author_name = model.author_name

Expand Down Expand Up @@ -243,7 +246,7 @@ def get_local_model_name_tuples(path=None, config_file="model.config",
default_author_name="", lower=True,
use_dir_as_name=False, ign=False):
"""
Gets all ModelNames tuples from a given overall local model path.
Gets all ModelTuple tuples from a given overall local model path.
Args:
path (str, optional): Overall path to model directory.
Expand All @@ -261,7 +264,7 @@ def get_local_model_name_tuples(path=None, config_file="model.config",
following Ignition's directory structure. Defaults to False.
Returns:
dictionary (ModelNames: str): Dictionary where ModelNames tuples are
dictionary (ModelTuple: str): Dictionary where ModelTuple tuples are
keys, and the model path is the value. Each name will be lower-case
only unless lower is False.
"""
Expand Down Expand Up @@ -313,7 +316,7 @@ def get_local_model_name_tuples(path=None, config_file="model.config",
if model_path.endswith("/"):
model_path = model_path[:-1]

name_tuple = ModelNames(os.path.basename(model_path),
name_tuple = ModelTuple(os.path.basename(model_path),
name_tuple.author_name)

output[name_tuple] = latest_ver_path
Expand All @@ -339,7 +342,7 @@ def get_model_name_tuple(config_file_path, config_file="model.config",
Defaults to True.
Returns:
(str, str): ModelNames tuple of (model_name, author_name). Each name
(str, str): ModelTuple tuple of (model_name, author_name). Each name
will be lower-case only unless lower is False.
Warnings:
Expand Down Expand Up @@ -372,17 +375,17 @@ def get_model_name_tuple(config_file_path, config_file="model.config",
author_name = default_author_name

if lower:
return ModelNames(model_name.lower(), author_name.lower())
return ModelTuple(model_name.lower(), author_name.lower())
else:
return ModelNames(model_name, author_name)
return ModelTuple(model_name, author_name)


def get_author_to_model_dict(model_name_tuples, lower=True):
"""
Get a dictionary of author names mapped to model names.
Args:
model_name_tuples (iterable): An iterable of ModelNames or unnamed
model_name_tuples (iterable): An iterable of ModelTuple or unnamed
tuples of (model_name, author_name).
lower (bool, optional): Make all output names lower-case.
Defaults to True.
Expand All @@ -393,7 +396,7 @@ def get_author_to_model_dict(model_name_tuples, lower=True):
"""
output = {}

for (model_name, author_name) in model_name_tuples:
for (model_name, author_name, _) in model_name_tuples:
if lower:
model_name = model_name.lower()
author_name = author_name.lower()
Expand All @@ -411,7 +414,7 @@ def get_model_to_author_dict(model_name_tuples, lower=True):
Get a dictionary of model names mapped to author names.
Args:
model_name_tuples (list or set): An iterable of ModelNames tuples of
model_name_tuples (list or set): An iterable of ModelTuple tuples of
(model_name, author_name). Each name will be lower-case only.
lower (bool, optional): Make all output names lower-case.
Defaults to True.
Expand All @@ -422,7 +425,7 @@ def get_model_to_author_dict(model_name_tuples, lower=True):
"""
output = {}

for (model_name, author_name) in model_name_tuples:
for (model_name, author_name, _) in model_name_tuples:
if lower:
model_name = model_name.lower()
author_name = author_name.lower()
Expand All @@ -447,8 +450,8 @@ def get_fuel_authors(model_name, cache_file_path=None, update_cache=True):
else:
cache = load_cache(cache_file_path)

if isinstance(model_name, ModelNames) or isinstance(model_name, tuple):
assert len(model_name) == 2, "Invalid model name tuple given: %s!" \
if isinstance(model_name, ModelTuple) or isinstance(model_name, tuple):
assert len(model_name) == 2 or len(model_name) == 3, "Invalid model name tuple given: %s!" \
% model_name
model_name = model_name[0]

Expand Down Expand Up @@ -713,6 +716,8 @@ def download_model_fuel_tools(model_name, author_name,
(model_name, export_path))

return True
except KeyError as e:
return False
except Exception as e:
logger.error("Could not download model '%s'! %s" % (model_name, e))
return False
Expand Down Expand Up @@ -745,7 +750,7 @@ def load_cache(cache_file_path=None, lower=True):
Returns:
dict: Cache dict, with keys 'model_cache' and 'fuel_cache'.
model_cache will contain ModelNames tuples of
model_cache will contain ModelTuple tuples of
(model_name, author_name).
Whereas fuel_cache will contain JSON responses from Fuel.
Expand All @@ -765,12 +770,12 @@ def load_cache(cache_file_path=None, lower=True):

if lower:
model_cache = set(
ModelNames(*[name.lower() for name in x])
ModelTuple(*[name.lower() for name in x])
for x in loaded_cache.get("model_cache")
)
else:
model_cache = set(
ModelNames(*x) for x in loaded_cache.get("model_cache")
ModelTuple(*x) for x in loaded_cache.get("model_cache")
)

logger.info("Load success!\n")
Expand Down Expand Up @@ -844,25 +849,27 @@ def build_and_update_cache(cache_file_path=None, write_to_cache=True,
while status == 200 and not break_flag:
logger.info("Fetching page: %d" % page)

resp = requests.get("%s?page=%d" % (url_base, page))
resp = requests.get("%s?page=%d&per_page=100" % (url_base, page))
status = resp.status_code
page += 1

if status == 200:
for model in json.loads(resp.text):
model_name = model.get("name", "")
author_name = model.get("owner", "")
upload_date = model.get("upload_date", "")

# If a cached model was found, halt
if (model_name, author_name) in old_cache['model_cache']:
logger.info("Cached model found! "
if (model_name, author_name, upload_date) in old_cache['model_cache']:
logger.info("Cached model found! ", (model_name, author_name, upload_date),
"Halting Fuel traversal...")
break_flag = True
break
# Otherwise, add it to the cache
else:
new_cache_count += 1
old_cache['model_cache'].add((model_name, author_name))
old_cache['model_cache'].add(
(model_name, author_name, upload_date))
old_cache['fuel_cache'].append(model)
else:
break
Expand Down

0 comments on commit efa3ace

Please sign in to comment.