From 0b34e1ed66146fae9563f1539e66a512397bce29 Mon Sep 17 00:00:00 2001 From: actionless Date: Fri, 14 Jun 2024 20:47:58 +0200 Subject: [PATCH] feat: implement lookup by `provides` field (fixes #402) --- pikaur/aur.py | 137 +++++++++++++++++++++++++++++++-- pikaur/aur_deps.py | 57 +++++++++++--- pikaur/build.py | 18 ++++- pikaur/install_cli.py | 58 ++++++++++---- pikaur/install_info_fetcher.py | 32 ++++++-- pikaur/search_cli.py | 4 +- 6 files changed, 262 insertions(+), 44 deletions(-) diff --git a/pikaur/aur.py b/pikaur/aur.py index 6e4ce9328..5db965178 100644 --- a/pikaur/aur.py +++ b/pikaur/aur.py @@ -8,6 +8,7 @@ from .config import PikaurConfig from .core import DataType from .exceptions import AURError +from .logging import create_logger from .progressbar import ThreadSafeProgressBar from .urllib_helper import get_gzip_from_url, get_json_from_url @@ -20,6 +21,16 @@ MAX_URL_LENGTH: "Final" = 8177 # default value in many web servers +logger = create_logger("aur_module") + + +class NotFound: + pass + + +NOT_FOUND: "Final[NotFound]" = NotFound() + + class AurRPCErrors: ERROR_KEY: "Final" = "error" TOO_MANY_RESULTS: "Final" = "Too many package results." @@ -137,12 +148,14 @@ def strip_aur_repo_name(pkg_name: str) -> str: return pkg_name -def aur_rpc_search_name_desc(search_query: str) -> list[AURPackageInfo]: +def aur_rpc_search( + search_query: str, search_by: str = "name-desc", +) -> list[AURPackageInfo]: url = construct_aur_rpc_url_from_params({ "v": 5, "type": "search", "arg": strip_aur_repo_name(search_query), - "by": "name-desc", + "by": search_by, }) result_json = get_json_from_url(url) if AurRPCErrors.ERROR_KEY in result_json: @@ -156,6 +169,10 @@ def aur_rpc_search_name_desc(search_query: str) -> list[AURPackageInfo]: ] +def aur_rpc_search_provider(package_name: str) -> tuple[str, list[AURPackageInfo]]: + return package_name, aur_rpc_search(search_query=package_name, search_by="provides") + + def _get_aur_rpc_info_url(search_queries: list[str]) -> str: uri = parse.urlencode({ "v": 5, @@ -181,7 +198,8 @@ def aur_rpc_info(search_queries: list[str]) -> list[AURPackageInfo]: def aur_rpc_info_with_progress( - search_queries: list[str], *, progressbar_length: int, with_progressbar: bool, + search_queries: list[str], + *, progressbar_length: int, with_progressbar: bool, ) -> list[AURPackageInfo]: result = aur_rpc_info(search_queries) if with_progressbar: @@ -193,6 +211,20 @@ def aur_rpc_info_with_progress( return result +def aur_rpc_search_provider_with_progress( + package_name: str, + *, progressbar_length: int, with_progressbar: bool, +) -> tuple[str, list[AURPackageInfo]]: + result = aur_rpc_search_provider(package_name=package_name) + if with_progressbar: + progressbar = ThreadSafeProgressBar.get( + progressbar_length=progressbar_length, + progressbar_id="aur_provides_search", + ) + progressbar.update() + return result + + class AurPackageListCache: cache: ClassVar[list[str]] = [] @@ -206,17 +238,34 @@ def get(cls) -> list[str]: class AurPackageSearchCache: - cache: ClassVar[dict[str, AURPackageInfo]] = {} + cache: ClassVar[dict[str, AURPackageInfo | NotFound]] = {} @classmethod def put(cls, pkg: AURPackageInfo) -> None: cls.cache[pkg.name] = pkg @classmethod - def get(cls, pkg_name: str) -> AURPackageInfo | None: + def put_not_found(cls, pkg_name: str) -> None: + cls.cache[pkg_name] = NOT_FOUND + + @classmethod + def get(cls, pkg_name: str) -> AURPackageInfo | NotFound | None: return cls.cache.get(pkg_name) +class AurProvidedPackageSearchCache: + + cache: ClassVar[dict[str, list[AURPackageInfo]]] = {} + + @classmethod + def put(cls, provides: str, pkgs: list[AURPackageInfo]) -> None: + cls.cache[provides] = pkgs + + @classmethod + def get(cls, provides: str) -> list[AURPackageInfo] | None: + return cls.cache.get(provides) + + def get_all_aur_names() -> list[str]: return AurPackageListCache.get() @@ -243,12 +292,20 @@ def find_aur_packages( # @TODO: return only packages for the current architecture package_names = [strip_aur_repo_name(name) for name in package_names] num_packages = len(package_names) - json_results = [] + json_results: list[AURPackageInfo] = [] + cached_not_found_pkgs: list[str] = [] for package_name in package_names[:]: aur_pkg = AurPackageSearchCache.get(package_name) - if aur_pkg: + if aur_pkg is NOT_FOUND: + package_names.remove(package_name) + cached_not_found_pkgs.append(package_name) + logger.debug("find_aur_packages: {} cached as not found", package_name) + elif isinstance(aur_pkg, AURPackageInfo): json_results.append(aur_pkg) package_names.remove(package_name) + logger.debug("find_aur_packages: {} cached", package_name) + else: + logger.debug("find_aur_packages: {} uncached", package_name) if package_names: with ThreadPool() as pool: @@ -272,6 +329,7 @@ def find_aur_packages( found_aur_packages = [ result.name for result in json_results + if isinstance(result, AURPackageInfo) ] not_found_packages: list[str] = ( [] if num_packages == len(found_aur_packages) @@ -280,9 +338,74 @@ def find_aur_packages( if package not in found_aur_packages ] ) + for not_found_pkgname in not_found_packages: + AurPackageSearchCache.put_not_found(not_found_pkgname) + not_found_packages += cached_not_found_pkgs return json_results, not_found_packages +def find_aur_provided_deps( + package_names: list[str], *, with_progressbar: bool = False, +) -> tuple[list[AURPackageInfo], list[str]]: + + # @TODO: return only packages for the current architecture + package_names = [strip_aur_repo_name(name) for name in package_names] + num_packages = len(package_names) + json_results = [] + cached_not_found_pkgs: list[str] = [] + for package_name in package_names[:]: + aur_pkgs = AurProvidedPackageSearchCache.get(package_name) + if aur_pkgs is None: + logger.debug("find_aur_provided_deps: {} not cached", package_name) + elif len(aur_pkgs) == 0: + package_names.remove(package_name) + cached_not_found_pkgs.append(package_name) + logger.debug("find_aur_provided_deps: {} cached as not found", package_name) + else: + # @TODO: dynamicly select package provider + json_results.append(aur_pkgs[0]) + package_names.remove(package_name) + logger.debug("find_aur_provided_deps: {} cached", package_name) + + if package_names: + with ThreadPool() as pool: + requests = [ + pool.apply_async(aur_rpc_search_provider_with_progress, [], { + "package_name": package_name, + "progressbar_length": len(package_names), + "with_progressbar": with_progressbar, + }) + for package_name in package_names + ] + pool.close() + results = [request.get() for request in requests] + pool.join() + for provided_pkg_name, aur_pkgs in results: + if not aur_pkgs: + continue + AurProvidedPackageSearchCache.put(pkgs=aur_pkgs, provides=provided_pkg_name) + for aur_pkg in aur_pkgs: + if provided_pkg_name in package_names: + # @TODO: dynamicly select package provider + json_results += [aur_pkg] + break + + found_aur_packages = [ + result.name for result in json_results + ] + not_found_packages: list[str] = ( + [] if num_packages == len(found_aur_packages) + else [ + package for package in package_names + if package not in found_aur_packages + ] + ) + result_names = list({pkg.name for pkg in json_results}) + full_pkg_infos, _ = find_aur_packages(result_names) + not_found_packages += cached_not_found_pkgs + return full_pkg_infos, not_found_packages + + def get_repo_url(package_base_name: str) -> str: return f"{AurBaseUrl.get()}/{package_base_name}.git" diff --git a/pikaur/aur_deps.py b/pikaur/aur_deps.py index 468ea0abb..0807c505b 100644 --- a/pikaur/aur_deps.py +++ b/pikaur/aur_deps.py @@ -3,7 +3,7 @@ from multiprocessing.pool import ThreadPool from typing import TYPE_CHECKING -from .aur import find_aur_packages +from .aur import find_aur_packages, find_aur_provided_deps from .core import PackageSource from .exceptions import ( DependencyVersionMismatchError, @@ -11,6 +11,7 @@ PackagesNotFoundInRepoError, ) from .i18n import translate +from .logging import create_logger from .pacman import PackageDB from .pprint import print_error from .version import VersionMatcher @@ -19,6 +20,9 @@ from .aur import AURPackageInfo +logger = create_logger("aur_deps") + + def check_deps_versions( deps_pkg_names: list[str], version_matchers: dict[str, VersionMatcher], @@ -245,7 +249,11 @@ def find_missing_deps_for_aur_pkg( aur_deps_info, not_found_aur_deps = find_aur_packages( not_found_repo_pkgs, ) - # @TODO: find packages Provided by AUR packages + provided_aur_deps_info, not_found_aur_deps = find_aur_provided_deps( + not_found_aur_deps, + ) + aur_deps_info += provided_aur_deps_info + handle_not_found_aur_pkgs( aur_pkg_name=aur_pkg_name, aur_pkgs_info=aur_pkgs_info, @@ -254,17 +262,31 @@ def find_missing_deps_for_aur_pkg( ) # check versions of found AUR packages: + logger.debug("version_matchers={}", version_matchers) for aur_dep_info in aur_deps_info: aur_dep_name = aur_dep_info.name - version_matcher = version_matchers[aur_dep_name] - if not version_matcher(aur_dep_info.version): - raise DependencyVersionMismatchError( - version_found=aur_dep_info.version, - dependency_line=version_matcher.line, - who_depends=aur_pkg_name, - depends_on=aur_dep_name, - location=PackageSource.AUR, - ) + version_matcher = version_matchers.get( + aur_dep_name, + ) + pkg_version_matchers: list[VersionMatcher] = [] + if version_matcher: + pkg_version_matchers = [version_matcher] + else: + for provide in aur_dep_info.provides: + version_matcher = version_matchers.get(VersionMatcher(provide).pkg_name) + if version_matcher is not None: + pkg_version_matchers.append(version_matcher) + logger.debug("{} pkg version_matchers={}", aur_dep_name, pkg_version_matchers) + for version_matcher in pkg_version_matchers: + if not version_matcher(aur_dep_info.version): + raise DependencyVersionMismatchError( + version_found=aur_dep_info.version, + dependency_line=version_matcher.line, + who_depends=aur_pkg_name, + depends_on=aur_dep_name, + location=PackageSource.AUR, + ) + # not_found_repo_pkgs.remove(version_matcher.pkg_name) return not_found_repo_pkgs @@ -280,6 +302,7 @@ def find_aur_deps( # pylint: disable=too-many-branches aur_pkg.name for aur_pkg in aur_pkgs_infos ] + logger.debug("find_aur_deps: package_names={}", package_names) result_aur_deps: dict[str, list[str]] = {} initial_pkg_infos = initial_pkg_infos2_todo = aur_pkgs_infos[:] # @TODO: var name @@ -292,7 +315,12 @@ def find_aur_deps( # pylint: disable=too-many-branches initial_pkg_infos = [] else: aur_pkgs_info, not_found_aur_pkgs = find_aur_packages(iter_package_names) + provided_aur_deps_info, not_found_aur_pkgs = find_aur_provided_deps( + not_found_aur_pkgs, + ) + aur_pkgs_info += provided_aur_deps_info if not_found_aur_pkgs: + logger.debug("not_found_aur_pkgs={}", not_found_aur_pkgs) raise PackagesNotFoundInAURError(packages=not_found_aur_pkgs) for aur_pkg in aur_pkgs_info: aur_pkg_deps = get_aur_pkg_deps_and_version_matchers( @@ -320,7 +348,11 @@ def find_aur_deps( # pylint: disable=too-many-branches for aur_pkg_name, request in all_requests.items(): try: results = request.get() - except Exception: + except Exception as exc: + logger.debug( + "exception during aur search: {}: {}", + exc.__class__.__name__, exc, + ) print_error(translate( "Can't resolve dependencies for AUR package '{pkg}':", ).format(pkg=aur_pkg_name)) @@ -336,6 +368,7 @@ def find_aur_deps( # pylint: disable=too-many-branches new_aur_deps.append(pkg_name) iter_package_names.append(pkg_name) + logger.debug("find_aur_deps: result_aur_deps={}", result_aur_deps) return result_aur_deps diff --git a/pikaur/build.py b/pikaur/build.py index 8d27d708c..c1eceec6b 100644 --- a/pikaur/build.py +++ b/pikaur/build.py @@ -131,6 +131,7 @@ class PackageBuild(DataType): # noqa: PLR0904 package_base: str package_names: list[str] + provides: list[str] repo_path: Path pkgbuild_path: Path @@ -180,12 +181,15 @@ def __init__( # pylint: disable=super-init-not-called if pkgbase and srcinfo.pkgnames: self.package_names = package_names or srcinfo.pkgnames self.package_base = pkgbase + self.provides = srcinfo.get_values("provides") else: no_pkgname_error = translate("Can't get package name from PKGBUILD") raise BuildError(message=no_pkgname_error, build=self) elif package_names: self.package_names = package_names - self.package_base = find_aur_packages([package_names[0]])[0][0].packagebase + aur_pkg = find_aur_packages([package_names[0]])[0][0] + self.package_base = aur_pkg.packagebase + self.provides = aur_pkg.provides self.repo_path = AurReposCachePath()() / self.package_base self.pkgbuild_path = self.repo_path / DEFAULT_PKGBUILD_BASENAME else: @@ -436,6 +440,7 @@ def _filter_built_deps( ) -> None: def _mark_dep_resolved(dep: str) -> None: + logger.debug("_mark_dep_resolved: {}", dep) if dep in self.new_make_deps_to_install: self.new_make_deps_to_install.remove(dep) if dep in self.new_deps_to_install: @@ -447,12 +452,21 @@ def _mark_dep_resolved(dep: str) -> None: srcinfo = SrcInfo( pkgbuild_path=pkg_build.pkgbuild_path, package_name=pkg_name, ) + stripped_pkg_name = VersionMatcher(pkg_name).pkg_name all_provided_pkgnames.update( - dict.fromkeys([pkg_name, *srcinfo.get_values("provides")], pkg_name), + dict.fromkeys( + [stripped_pkg_name, *( + VersionMatcher(name).pkg_name + for name in srcinfo.get_values("provides") + )], + stripped_pkg_name, + ), ) self.built_deps_to_install = {} + logger.debug("self.all_deps_to_install={}", self.all_deps_to_install) + logger.debug("all_provided_pkgnames={}", all_provided_pkgnames) for dep in self.all_deps_to_install: dep_name = VersionMatcher(dep).pkg_name if dep_name not in all_provided_pkgnames: diff --git a/pikaur/install_cli.py b/pikaur/install_cli.py index 925dc7af2..b2438a253 100644 --- a/pikaur/install_cli.py +++ b/pikaur/install_cli.py @@ -72,7 +72,7 @@ ) from .srcinfo import SrcInfo from .updates import is_devel_pkg -from .version import compare_versions +from .version import VersionMatcher, compare_versions if TYPE_CHECKING: import pyalpm @@ -167,7 +167,8 @@ def __init__(self) -> None: self.not_found_repo_pkgs_names = [] self.repo_packages_by_name = {} - self.package_builds_by_name = {} + self.package_builds_by_name: dict[str, PackageBuild] = {} + self.package_builds_by_provides: dict[str, PackageBuild] = {} self.found_conflicts = {} self.transactions = {} @@ -261,12 +262,18 @@ def get_info_from_pkgbuilds(self) -> None: self.args.positional or ["PKGBUILD"] } + def _get_pkgbuild_for_name_or_provided(self, pkg_name: str) -> PackageBuild: + return ( + self.package_builds_by_name.get(pkg_name) + or self.package_builds_by_provides[pkg_name] + ) + def edit_pkgbuild_during_the_build(self, pkg_name: str) -> None: updated_pkgbuilds = self._clone_aur_repos([pkg_name]) if not updated_pkgbuilds: return self.package_builds_by_name.update(updated_pkgbuilds) - pkg_build = self.package_builds_by_name[pkg_name] + pkg_build = self._get_pkgbuild_for_name_or_provided(pkg_name) if not edit_file( pkg_build.pkgbuild_path, ): @@ -349,6 +356,10 @@ def get_all_packages_info(self) -> None: # pylint:disable=too-many-branches,too skip_checkdeps_for_pkgnames=self.skip_checkfunc_for_pkgnames, ) except PackagesNotFoundInAURError as exc: + logger.debug( + "exception during install info fetch: {}: {}", + exc.__class__.__name__, exc, + ) if exc.wanted_by: print_error(bold_line( translate("Dependencies missing for {}").format(", ".join(exc.wanted_by)), @@ -711,9 +722,10 @@ def _clone_aur_repos( # pylint: disable=too-many-branches def get_package_builds(self) -> None: while self.all_aur_packages_names: - clone_names = [] + clone_infos = [] pkgbuilds_by_base: dict[str, PackageBuild] = {} pkgbuilds_by_name = {} + pkgbuilds_by_provides = {} for info in self.install_info.aur_install_info: if info.pkgbuild_path: if not isinstance(info.package, AURPackageInfo): @@ -730,11 +742,19 @@ def get_package_builds(self) -> None: package_names=package_names, ) pkgbuilds_by_name[info.name] = pkgbuilds_by_base[pkg_base] + for provided_str in info.package.provides: + provided_name = VersionMatcher(provided_str).pkg_name + pkgbuilds_by_provides[provided_name] = pkgbuilds_by_base[pkg_base] else: - clone_names.append(info.name) - cloned_pkgbuilds = self._clone_aur_repos(clone_names) + clone_infos.append(info) + cloned_pkgbuilds = self._clone_aur_repos([info.name for info in clone_infos]) if cloned_pkgbuilds: + logger.debug("cloned_pkgbuilds={}", cloned_pkgbuilds) pkgbuilds_by_name.update(cloned_pkgbuilds) + for info in clone_infos: + for provided_str in info.package.provides: + provided_name = VersionMatcher(provided_str).pkg_name + pkgbuilds_by_provides[provided_name] = cloned_pkgbuilds[info.package.name] for pkg_list in (self.aur_packages_names, self.aur_deps_names): self._find_extra_aur_build_deps( all_package_builds={ @@ -744,7 +764,10 @@ def get_package_builds(self) -> None: }, ) self.package_builds_by_name = pkgbuilds_by_name + self.package_builds_by_provides = pkgbuilds_by_provides break + logger.debug("self.package_builds_by_name={}", self.package_builds_by_name) + logger.debug("self.package_builds_by_provides={}", self.package_builds_by_provides) def ask_about_package_conflicts(self) -> None: if self.aur_packages_names or self.aur_deps_names: @@ -1035,7 +1058,7 @@ def build_packages(self) -> None: # pylint: disable=too-many-branches index = 0 pkg_name = packages_to_be_built[index] - pkg_build = self.package_builds_by_name[pkg_name] + pkg_build = self._get_pkgbuild_for_name_or_provided(pkg_name) pkg_base = pkg_build.package_base if ( pkg_base in self.built_package_bases @@ -1087,12 +1110,16 @@ def build_packages(self) -> None: # pylint: disable=too-many-branches self.prompt_dependency_cycle(_pkg_name) else: logger.debug( - "Build done for packages {}, removing from queue", + "Build done for packages {}, removing from queue {}", pkg_build.package_names, + packages_to_be_built, ) self.built_package_bases.append(pkg_base) - for _pkg_name in pkg_build.package_names: - if _pkg_name not in self.manually_excluded_packages_names: + for _pkg_name in pkg_build.package_names + pkg_build.provides: + if ( + (_pkg_name not in self.manually_excluded_packages_names) + and (_pkg_name in packages_to_be_built) + ): packages_to_be_built.remove(_pkg_name) self.failed_to_build_package_names = failed_to_build_package_names @@ -1154,10 +1181,11 @@ def install_repo_packages(self) -> None: ) def install_new_aur_deps(self) -> None: - new_aur_deps_to_install = { - pkg_name: self.package_builds_by_name[pkg_name].built_packages_paths[pkg_name] - for pkg_name in self.aur_deps_names - } + new_aur_deps_to_install = {} + for pkg_name in self.aur_deps_names: + pkg_build = self._get_pkgbuild_for_name_or_provided(pkg_name) + for name in pkg_build.package_names: + new_aur_deps_to_install[name] = pkg_build.built_packages_paths[name] try: install_built_deps( deps_names_and_paths=new_aur_deps_to_install, @@ -1175,7 +1203,7 @@ def install_new_aur_deps(self) -> None: def install_aur_packages(self) -> None: aur_packages_to_install = {} for pkg_name in self.aur_packages_names: - pkg_build = self.package_builds_by_name.get(pkg_name) + pkg_build = self._get_pkgbuild_for_name_or_provided(pkg_name) if pkg_build: path = pkg_build.built_packages_paths.get(pkg_name) if path: diff --git a/pikaur/install_info_fetcher.py b/pikaur/install_info_fetcher.py index 53608c56b..ca6ba0ecb 100644 --- a/pikaur/install_info_fetcher.py +++ b/pikaur/install_info_fetcher.py @@ -6,7 +6,7 @@ from typing import TYPE_CHECKING from .args import parse_args, reconstruct_args -from .aur import AURPackageInfo, find_aur_packages, strip_aur_repo_name +from .aur import AURPackageInfo, find_aur_packages, find_aur_provided_deps, strip_aur_repo_name from .aur_deps import find_aur_deps, find_repo_deps_of_aur_pkgs from .core import AURInstallInfo, ComparableType, PackageSource, RepoInstallInfo from .exceptions import DependencyError, DependencyVersionMismatchError, SysExit @@ -502,6 +502,7 @@ def get_aur_pkgs_info( # pylint: disable=too-many-branches else: not_found_aur_pkgs.append(aur_packages_names_to_versions[aur_pkg.name].line) if not_found_aur_pkgs: + logger.debug("error code: 3fh7n834fh7n") print_not_found_packages(sorted(not_found_aur_pkgs)) raise SysExit(6) aur_updates_install_info_by_name: dict[str, AURInstallInfo] = {} @@ -509,6 +510,7 @@ def get_aur_pkgs_info( # pylint: disable=too-many-branches self._all_aur_updates_raw, not_found_aur_pkgs = find_aur_updates() self.exclude_ignored_packages(not_found_aur_pkgs, print_packages=False) if not_found_aur_pkgs: + logger.debug("error code: 789sdfgh789sd6") print_not_found_packages(sorted(not_found_aur_pkgs)) aur_updates_install_info_by_name = { upd.name: upd for upd in self._all_aur_updates_raw @@ -599,24 +601,42 @@ def get_aur_deps_info(self) -> None: self.install_package_names.append(pkg_name) self.get_all_packages_info() return + # prepare install info (InstallInfo objects) # for all the AUR packages which gonna be built: - aur_pkgs = { - aur_pkg.name: aur_pkg - for aur_pkg in find_aur_packages(self.aur_deps_names)[0] - } + # aur_pkgs = { + # aur_pkg.name: aur_pkg + # for aur_pkg in find_aur_packages(self.aur_deps_names)[0] + # } + aur_pkgs = {} + aur_pkgs_infos, not_found_aur_pkgs = find_aur_packages(self.aur_deps_names) + provided_aur_deps_infos, not_found_aur_pkgs = find_aur_provided_deps( + not_found_aur_pkgs, + ) + for aur_pkg_info in aur_pkgs_infos: + aur_pkgs[aur_pkg_info.name] = aur_pkg_info + for aur_pkg_info in provided_aur_deps_infos: + for provided_pkg_name in aur_pkg_info.provides: + aur_pkgs[VersionMatcher(provided_pkg_name).pkg_name] = aur_pkg_info + logger.debug("get_aur_deps_info: aur_pkgs={}", aur_pkgs) + local_pkgs = PackageDB.get_local_dict() + + added_pkg_names: list[str] = [] for pkg_name in self.aur_deps_names: aur_pkg = aur_pkgs[pkg_name] + if aur_pkg.name in added_pkg_names: + continue local_pkg = local_pkgs.get(pkg_name) self.aur_deps_install_info.append(AURInstallInfo( - name=pkg_name, + name=aur_pkg.name, current_version=local_pkg.version if local_pkg else " ", new_version=aur_pkg.version, description=aur_pkg.desc, maintainer=aur_pkg.maintainer, package=aur_pkg, )) + added_pkg_names.append(aur_pkg.name) def mark_dependent(self) -> None: """Update packages' install info to show deps in prompt.""" diff --git a/pikaur/search_cli.py b/pikaur/search_cli.py index 68926b1bb..5f8919da4 100644 --- a/pikaur/search_cli.py +++ b/pikaur/search_cli.py @@ -8,7 +8,7 @@ from .aur import ( AURPackageInfo, AurRPCErrors, - aur_rpc_search_name_desc, + aur_rpc_search, get_all_aur_names, get_all_aur_packages, ) @@ -72,7 +72,7 @@ def package_search_thread_aur( # pylint: disable=too-many-branches with ThreadPool() as pool: requests = {} for query in queries: - requests[query] = pool.apply_async(aur_rpc_search_name_desc, (query, )) + requests[query] = pool.apply_async(aur_rpc_search, (query, )) pool.close() for query, request in requests.items(): try: