From e82754c523db139b3cfd3d76d5bc8843b9e25ac0 Mon Sep 17 00:00:00 2001 From: Andreas Gohr Date: Wed, 3 Apr 2024 14:23:53 +0200 Subject: [PATCH] prevent access of wiki files This prevents the crawler to descent into the wiki directory or data directory, even when for some reason a higher up directory was made accessible. This should prevent the circumvention of ACLs and prevent access to sensitive data like user password hashes etc. --- Crawler.php | 3 +++ Path.php | 22 ++++++++++++++++++++++ file.php | 4 ++++ 3 files changed, 29 insertions(+) diff --git a/Crawler.php b/Crawler.php index 48b52a3..6cbb5a8 100644 --- a/Crawler.php +++ b/Crawler.php @@ -57,6 +57,9 @@ class Crawler { $path = $root . $local; + // do not descent into wiki or data directories + if(Path::isWikiControlled($path)) return []; + if (($dir = opendir($path)) === false) return []; $result = []; while (($file = readdir($dir)) !== false) { diff --git a/Path.php b/Path.php index 82944dc..de2f757 100644 --- a/Path.php +++ b/Path.php @@ -149,4 +149,26 @@ class Path } return implode('/', $output); } + + /** + * Check if the given path is within the data or dokuwiki dir + * + * This whould prevent accidental or deliberate circumvention of the ACLs + * + * @param string $path and already cleaned path + * @return bool + */ + public static function isWikiControlled($path) + { + global $conf; + $dataPath = self::cleanPath($conf['savedir']); + if (str_starts_with($path, $dataPath)) { + return true; + } + $wikiDir = self::cleanPath(DOKU_INC); + if (str_starts_with($path, $wikiDir)) { + return true; + } + return false; + } } diff --git a/file.php b/file.php index e3f5b9d..ecd7740 100644 --- a/file.php +++ b/file.php @@ -19,6 +19,10 @@ $path = $INPUT->str('root') . $INPUT->str('file'); try { $pathInfo = $pathUtil->getPathInfo($path, false); + if ($pathUtil::isWikiControlled($pathInfo['path'])) { + throw new Exception('Access to wiki files is not allowed'); + } + if (!is_readable($pathInfo['path'])) { header('Content-Type: text/plain'); http_status(404);