diff --git a/Crawler.php b/Crawler.php index 48b52a3..6cbb5a8 100644 --- a/Crawler.php +++ b/Crawler.php @@ -57,6 +57,9 @@ public function crawl($root, $local, $pattern, $recursive, $titlefile) { $path = $root . $local; + // do not descent into wiki or data directories + if(Path::isWikiControlled($path)) return []; + if (($dir = opendir($path)) === false) return []; $result = []; while (($file = readdir($dir)) !== false) { diff --git a/Path.php b/Path.php index 82944dc..de2f757 100644 --- a/Path.php +++ b/Path.php @@ -149,4 +149,26 @@ public static function realpath($path) } return implode('/', $output); } + + /** + * Check if the given path is within the data or dokuwiki dir + * + * This whould prevent accidental or deliberate circumvention of the ACLs + * + * @param string $path and already cleaned path + * @return bool + */ + public static function isWikiControlled($path) + { + global $conf; + $dataPath = self::cleanPath($conf['savedir']); + if (str_starts_with($path, $dataPath)) { + return true; + } + $wikiDir = self::cleanPath(DOKU_INC); + if (str_starts_with($path, $wikiDir)) { + return true; + } + return false; + } } diff --git a/file.php b/file.php index e3f5b9d..ecd7740 100644 --- a/file.php +++ b/file.php @@ -19,6 +19,10 @@ try { $pathInfo = $pathUtil->getPathInfo($path, false); + if ($pathUtil::isWikiControlled($pathInfo['path'])) { + throw new Exception('Access to wiki files is not allowed'); + } + if (!is_readable($pathInfo['path'])) { header('Content-Type: text/plain'); http_status(404);