prevent access of wiki files
This prevents the crawler to descent into the wiki directory or data directory, even when for some reason a higher up directory was made accessible. This should prevent the circumvention of ACLs and prevent access to sensitive data like user password hashes etc.
This commit is contained in:
22
Path.php
22
Path.php
@@ -149,4 +149,26 @@ class Path
|
||||
}
|
||||
return implode('/', $output);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the given path is within the data or dokuwiki dir
|
||||
*
|
||||
* This whould prevent accidental or deliberate circumvention of the ACLs
|
||||
*
|
||||
* @param string $path and already cleaned path
|
||||
* @return bool
|
||||
*/
|
||||
public static function isWikiControlled($path)
|
||||
{
|
||||
global $conf;
|
||||
$dataPath = self::cleanPath($conf['savedir']);
|
||||
if (str_starts_with($path, $dataPath)) {
|
||||
return true;
|
||||
}
|
||||
$wikiDir = self::cleanPath(DOKU_INC);
|
||||
if (str_starts_with($path, $wikiDir)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user