From 5581143bee602dbd5417f532f2b483e58d0a4269 Mon Sep 17 00:00:00 2001 From: Brion Vibber Date: Wed, 28 Oct 2009 15:29:20 -0400 Subject: Rebuilt HTTPClient class as an extension of PEAR HTTP_Request2 package, adding redirect handling and convenience functions. Caching support will be added in future work after unit tests have been added. * extlib: add PEAR HTTP_Request2 0.4.1 alpha * extlib: update PEAR Net_URL2 to 0.3.0 beta for HTTP_Request2 compatibility * moved direct usage of CURL and file_get_contents to HTTPClient class, excluding external-sourced libraries * adapted GeonamesPlugin for new HTTPResponse interface Note some plugins haven't been fully tested yet. --- classes/File_redirection.php | 68 +++++++++++++++++++++++--------------------- 1 file changed, 36 insertions(+), 32 deletions(-) (limited to 'classes') diff --git a/classes/File_redirection.php b/classes/File_redirection.php index 79052bf7d..08a6e8d8b 100644 --- a/classes/File_redirection.php +++ b/classes/File_redirection.php @@ -47,18 +47,15 @@ class File_redirection extends Memcached_DataObject /* the code above is auto generated do not remove the tag below */ ###END_AUTOCODE - function _commonCurl($url, $redirs) { - $curlh = curl_init(); - curl_setopt($curlh, CURLOPT_URL, $url); - curl_setopt($curlh, CURLOPT_AUTOREFERER, true); // # setup referer header when folowing redirects - curl_setopt($curlh, CURLOPT_CONNECTTIMEOUT, 10); // # seconds to wait - curl_setopt($curlh, CURLOPT_MAXREDIRS, $redirs); // # max number of http redirections to follow - curl_setopt($curlh, CURLOPT_USERAGENT, USER_AGENT); - curl_setopt($curlh, CURLOPT_FOLLOWLOCATION, true); // Follow redirects - curl_setopt($curlh, CURLOPT_RETURNTRANSFER, true); - curl_setopt($curlh, CURLOPT_FILETIME, true); - curl_setopt($curlh, CURLOPT_HEADER, true); // Include header in output - return $curlh; + static function _commonHttp($url, $redirs) { + $request = new HTTPClient($url); + $request->setConfig(array( + 'connect_timeout' => 10, // # seconds to wait + 'max_redirs' => $redirs, // # max number of http redirections to follow + 'follow_redirects' => true, // Follow redirects + 'store_body' => false, // We won't need body content here. + )); + return $request; } function _redirectWhere_imp($short_url, $redirs = 10, $protected = false) { @@ -82,32 +79,39 @@ class File_redirection extends Memcached_DataObject if(strpos($short_url,'://') === false){ return $short_url; } - $curlh = File_redirection::_commonCurl($short_url, $redirs); - // Don't include body in output - curl_setopt($curlh, CURLOPT_NOBODY, true); - curl_exec($curlh); - $info = curl_getinfo($curlh); - curl_close($curlh); - - if (405 == $info['http_code']) { - $curlh = File_redirection::_commonCurl($short_url, $redirs); - curl_exec($curlh); - $info = curl_getinfo($curlh); - curl_close($curlh); + try { + $request = self::_commonHttp($short_url, $redirs); + // Don't include body in output + $request->setMethod(HTTP_Request2::METHOD_HEAD); + $response = $request->send(); + + if (405 == $response->getStatus()) { + // Server doesn't support HEAD method? Can this really happen? + // We'll try again as a GET and ignore the response data. + $request = self::_commonHttp($short_url, $redirs); + $response = $request->send(); + } + } catch (Exception $e) { + // Invalid URL or failure to reach server + return $short_url; } - if (!empty($info['redirect_count']) && File::isProtected($info['url'])) { - return File_redirection::_redirectWhere_imp($short_url, $info['redirect_count'] - 1, true); + if ($response->getRedirectCount() && File::isProtected($response->getUrl())) { + // Bump back up the redirect chain until we find a non-protected URL + return self::_redirectWhere_imp($short_url, $response->getRedirectCount() - 1, true); } - $ret = array('code' => $info['http_code'] - , 'redirects' => $info['redirect_count'] - , 'url' => $info['url']); + $ret = array('code' => $response->getStatus() + , 'redirects' => $response->getRedirectCount() + , 'url' => $response->getUrl()); - if (!empty($info['content_type'])) $ret['type'] = $info['content_type']; + $type = $response->getHeader('Content-Type'); + if ($type) $ret['type'] = $type; if ($protected) $ret['protected'] = true; - if (!empty($info['download_content_length'])) $ret['size'] = $info['download_content_length']; - if (isset($info['filetime']) && ($info['filetime'] > 0)) $ret['time'] = $info['filetime']; + $size = $response->getHeader('Content-Length'); // @fixme bytes? + if ($size) $ret['size'] = $size; + $time = $response->getHeader('Last-Modified'); + if ($time) $ret['time'] = strtotime($time); return $ret; } -- cgit v1.2.3-54-g00ecf From 15d0055c6f2e3b7007a82df40502e15cf5c32a13 Mon Sep 17 00:00:00 2001 From: Craig Andrews Date: Mon, 2 Nov 2009 15:18:04 -0500 Subject: allowed_nickname blocks top level url router names --- classes/User.php | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) (limited to 'classes') diff --git a/classes/User.php b/classes/User.php index 3fa9cc152..530ece1ba 100644 --- a/classes/User.php +++ b/classes/User.php @@ -118,7 +118,7 @@ class User extends Memcached_DataObject { // XXX: should already be validated for size, content, etc. - $blacklist = array(); + $blacklist = common_config('nickname', 'blacklist'); //all directory and file names should be blacklisted $d = dir(INSTALLDIR); @@ -126,8 +126,15 @@ class User extends Memcached_DataObject $blacklist[]=$entry; } $d->close(); - $merged = array_merge($blacklist, common_config('nickname', 'blacklist')); - return !in_array($nickname, $merged); + + //all top level names in the router should be blacklisted + $router = Router::get(); + foreach(array_keys($router->m->getPaths()) as $path){ + if(preg_match('/^\/(.*?)[\/\?]/',$path,$matches)){ + $blacklist[]=$matches[1]; + } + } + return !in_array($nickname, $blacklist); } function getCurrentNotice($dt=null) -- cgit v1.2.3-54-g00ecf