prevent access of wiki files

This prevents the crawler to descent into the wiki directory or data
directory, even when for some reason a higher up directory was made
accessible. This should prevent the circumvention of ACLs and prevent
access to sensitive data like user password hashes etc.
This commit is contained in:
Andreas Gohr
2024-04-03 14:23:53 +02:00
parent 55e6f8f9aa
commit e82754c523
3 changed files with 29 additions and 0 deletions

View File

@@ -57,6 +57,9 @@ class Crawler
{
$path = $root . $local;
// do not descent into wiki or data directories
if(Path::isWikiControlled($path)) return [];
if (($dir = opendir($path)) === false) return [];
$result = [];
while (($file = readdir($dir)) !== false) {

View File

@@ -149,4 +149,26 @@ class Path
}
return implode('/', $output);
}
/**
* Check if the given path is within the data or dokuwiki dir
*
* This whould prevent accidental or deliberate circumvention of the ACLs
*
* @param string $path and already cleaned path
* @return bool
*/
public static function isWikiControlled($path)
{
global $conf;
$dataPath = self::cleanPath($conf['savedir']);
if (str_starts_with($path, $dataPath)) {
return true;
}
$wikiDir = self::cleanPath(DOKU_INC);
if (str_starts_with($path, $wikiDir)) {
return true;
}
return false;
}
}

View File

@@ -19,6 +19,10 @@ $path = $INPUT->str('root') . $INPUT->str('file');
try {
$pathInfo = $pathUtil->getPathInfo($path, false);
if ($pathUtil::isWikiControlled($pathInfo['path'])) {
throw new Exception('Access to wiki files is not allowed');
}
if (!is_readable($pathInfo['path'])) {
header('Content-Type: text/plain');
http_status(404);