prevent access of wiki files
This prevents the crawler to descent into the wiki directory or data directory, even when for some reason a higher up directory was made accessible. This should prevent the circumvention of ACLs and prevent access to sensitive data like user password hashes etc.
This commit is contained in:
@@ -57,6 +57,9 @@ class Crawler
|
||||
{
|
||||
$path = $root . $local;
|
||||
|
||||
// do not descent into wiki or data directories
|
||||
if(Path::isWikiControlled($path)) return [];
|
||||
|
||||
if (($dir = opendir($path)) === false) return [];
|
||||
$result = [];
|
||||
while (($file = readdir($dir)) !== false) {
|
||||
|
||||
22
Path.php
22
Path.php
@@ -149,4 +149,26 @@ class Path
|
||||
}
|
||||
return implode('/', $output);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the given path is within the data or dokuwiki dir
|
||||
*
|
||||
* This whould prevent accidental or deliberate circumvention of the ACLs
|
||||
*
|
||||
* @param string $path and already cleaned path
|
||||
* @return bool
|
||||
*/
|
||||
public static function isWikiControlled($path)
|
||||
{
|
||||
global $conf;
|
||||
$dataPath = self::cleanPath($conf['savedir']);
|
||||
if (str_starts_with($path, $dataPath)) {
|
||||
return true;
|
||||
}
|
||||
$wikiDir = self::cleanPath(DOKU_INC);
|
||||
if (str_starts_with($path, $wikiDir)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
4
file.php
4
file.php
@@ -19,6 +19,10 @@ $path = $INPUT->str('root') . $INPUT->str('file');
|
||||
|
||||
try {
|
||||
$pathInfo = $pathUtil->getPathInfo($path, false);
|
||||
if ($pathUtil::isWikiControlled($pathInfo['path'])) {
|
||||
throw new Exception('Access to wiki files is not allowed');
|
||||
}
|
||||
|
||||
if (!is_readable($pathInfo['path'])) {
|
||||
header('Content-Type: text/plain');
|
||||
http_status(404);
|
||||
|
||||
Reference in New Issue
Block a user