summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDan McGee <dan@archlinux.org>2011-10-24 13:49:14 -0500
committerDan McGee <dan@archlinux.org>2011-10-24 13:49:14 -0500
commit5853025137e64411d7be745a55936ec3e8868212 (patch)
treec8931910a62079263d5dc224137acb0eeb02f736
parentd6e3446e70bcda9c0f28d80110d588106daa4b5c (diff)
Add more logging to download code
This adds a logger to the CURLE_OK case so we can always know the return code if it was >= 400, and debug log it regardless. Also adjust another logger to use the cURL error message directly, as well as use fstat() when we have an open file handle rather than stat(). Signed-off-by: Dan McGee <dan@archlinux.org>
-rw-r--r--lib/libalpm/dload.c14
1 files changed, 11 insertions, 3 deletions
diff --git a/lib/libalpm/dload.c b/lib/libalpm/dload.c
index cd2857c3..9d919b0a 100644
--- a/lib/libalpm/dload.c
+++ b/lib/libalpm/dload.c
@@ -392,8 +392,15 @@ static int curl_download_internal(struct dload_payload *payload,
case CURLE_OK:
/* get http/ftp response code */
curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &respcode);
+ _alpm_log(handle, ALPM_LOG_DEBUG, "response code: %ld\n", respcode);
if(respcode >= 400) {
payload->unlink_on_fail = 1;
+ /* non-translated message is same as libcurl */
+ snprintf(error_buffer, sizeof(error_buffer),
+ "The requested URL returned error: %ld", respcode);
+ _alpm_log(handle, ALPM_LOG_ERROR,
+ _("failed retrieving file '%s' from %s : %s\n"),
+ payload->remote_name, hostname, error_buffer);
goto cleanup;
}
break;
@@ -402,15 +409,16 @@ static int curl_download_internal(struct dload_payload *payload,
if(dload_interrupted == ABORT_OVER_MAXFILESIZE) {
payload->curlerr = CURLE_FILESIZE_EXCEEDED;
handle->pm_errno = ALPM_ERR_LIBCURL;
- /* the hardcoded 'size exceeded' message is same as libcurl's normal */
+ /* use the 'size exceeded' message from libcurl */
_alpm_log(handle, ALPM_LOG_ERROR,
_("failed retrieving file '%s' from %s : %s\n"),
- payload->remote_name, hostname, "Maximum file size exceeded");
+ payload->remote_name, hostname,
+ curl_easy_strerror(CURLE_FILESIZE_EXCEEDED));
}
goto cleanup;
default:
/* delete zero length downloads */
- if(stat(payload->tempfile_name, &st) == 0 && st.st_size == 0) {
+ if(fstat(fileno(localf), &st) == 0 && st.st_size == 0) {
payload->unlink_on_fail = 1;
}
if(!payload->errors_ok) {