summaryrefslogtreecommitdiff
path: root/classes/File.php
diff options
context:
space:
mode:
authorBrion Vibber <brion@pobox.com>2010-05-27 14:54:43 -0700
committerBrion Vibber <brion@pobox.com>2010-05-27 14:54:43 -0700
commit2f2fa100715f9c09b2b91a2d1afddacf7b7f16db (patch)
tree883a6642614484db322a3b6f17511c2ab5f46376 /classes/File.php
parent87b8a89aa7b96e63586bdd3b1afb9d43355fd48b (diff)
parent2b318a3420fe3e979defc7eacfb1eb2d9321d807 (diff)
Merge branch 'master' of gitorious.org:statusnet/mainline into testingtesting
Diffstat (limited to 'classes/File.php')
-rw-r--r--classes/File.php22
1 files changed, 19 insertions, 3 deletions
diff --git a/classes/File.php b/classes/File.php
index 33273bbdc..8297e5091 100644
--- a/classes/File.php
+++ b/classes/File.php
@@ -116,7 +116,11 @@ class File extends Memcached_DataObject
return false;
}
- function processNew($given_url, $notice_id=null) {
+ /**
+ * @fixme refactor this mess, it's gotten pretty scary.
+ * @param bool $followRedirects
+ */
+ function processNew($given_url, $notice_id=null, $followRedirects=true) {
if (empty($given_url)) return -1; // error, no url to process
$given_url = File_redirection::_canonUrl($given_url);
if (empty($given_url)) return -1; // error, no url to process
@@ -124,6 +128,10 @@ class File extends Memcached_DataObject
if (empty($file)) {
$file_redir = File_redirection::staticGet('url', $given_url);
if (empty($file_redir)) {
+ // @fixme for new URLs this also looks up non-redirect data
+ // such as target content type, size, etc, which we need
+ // for File::saveNew(); so we call it even if not following
+ // new redirects.
$redir_data = File_redirection::where($given_url);
if (is_array($redir_data)) {
$redir_url = $redir_data['url'];
@@ -134,11 +142,19 @@ class File extends Memcached_DataObject
throw new ServerException("Can't process url '$given_url'");
}
// TODO: max field length
- if ($redir_url === $given_url || strlen($redir_url) > 255) {
+ if ($redir_url === $given_url || strlen($redir_url) > 255 || !$followRedirects) {
$x = File::saveNew($redir_data, $given_url);
$file_id = $x->id;
} else {
- $x = File::processNew($redir_url, $notice_id);
+ // This seems kind of messed up... for now skipping this part
+ // if we're already under a redirect, so we don't go into
+ // horrible infinite loops if we've been given an unstable
+ // redirect (where the final destination of the first request
+ // doesn't match what we get when we ask for it again).
+ //
+ // Seen in the wild with clojure.org, which redirects through
+ // wikispaces for auth and appends session data in the URL params.
+ $x = File::processNew($redir_url, $notice_id, /*followRedirects*/false);
$file_id = $x->id;
File_redirection::saveNew($redir_data, $file_id, $given_url);
}