diff --git a/backend/btrixcloud/colls.py b/backend/btrixcloud/colls.py index a44a8eadf3..54de17d981 100644 --- a/backend/btrixcloud/colls.py +++ b/backend/btrixcloud/colls.py @@ -794,13 +794,12 @@ async def list_page_snapshots_in_collection( crawl_ids = await self.get_collection_crawl_ids(coll_id) match_query: dict[str, object] = {"crawl_id": {"$in": crawl_ids}} - sort_query: dict[str, int] = {"isSeed": -1, "url": 1, "ts": 1} + sort_query: dict[str, int] = {"isSeed": -1, "ts": 1, "url": 1} if url_prefix: url_prefix = urllib.parse.unquote(url_prefix) regex_pattern = f"^{re.escape(url_prefix)}" match_query["url"] = {"$regex": regex_pattern, "$options": "i"} - # sort_query = {"ts": 1} aggregate: List[Dict[str, Union[int, object]]] = [ {"$match": match_query}, diff --git a/backend/btrixcloud/pages.py b/backend/btrixcloud/pages.py index 6a17fd135b..df52275fed 100644 --- a/backend/btrixcloud/pages.py +++ b/backend/btrixcloud/pages.py @@ -78,8 +78,8 @@ async def init_index(self): [ ("crawl_id", pymongo.HASHED), ("isSeed", pymongo.DESCENDING), - ("url", pymongo.ASCENDING), ("ts", pymongo.ASCENDING), + ("url", pymongo.ASCENDING), ] ) await self.pages.create_index(