diff options
author | Lennart Poettering <lennart@poettering.net> | 2015-01-20 15:06:34 +0100 |
---|---|---|
committer | Lennart Poettering <lennart@poettering.net> | 2015-01-20 15:06:58 +0100 |
commit | 85dbc41dc67ff49fd8a843dbac5b8b5cb0b61155 (patch) | |
tree | 27cd179d2f98ef32ed27cb66589e44751135513e /src/import/import-job.c | |
parent | 88a1aadc48e5bbefd2e689db099569ec4c3c1e4b (diff) |
import: add a simple scheme for validating the SHA256 sums of downloaded raw files
Diffstat (limited to 'src/import/import-job.c')
-rw-r--r-- | src/import/import-job.c | 38 |
1 files changed, 35 insertions, 3 deletions
diff --git a/src/import/import-job.c b/src/import/import-job.c index 37f8ef76e4..6de32686c5 100644 --- a/src/import/import-job.c +++ b/src/import/import-job.c @@ -38,10 +38,14 @@ ImportJob* import_job_unref(ImportJob *j) { else if (j->compressed == IMPORT_JOB_GZIP) inflateEnd(&j->gzip); + if (j->hash_context) + gcry_md_close(j->hash_context); + free(j->url); free(j->etag); strv_free(j->old_etags); free(j->payload); + free(j->sha256); free(j); @@ -94,6 +98,7 @@ void import_job_curl_on_finished(CurlGlue *g, CURL *curl, CURLcode result) { goto finish; } else if (status == 304) { log_info("Image already downloaded. Skipping download."); + j->etag_exists = true; r = 0; goto finish; } else if (status >= 300) { @@ -119,6 +124,25 @@ void import_job_curl_on_finished(CurlGlue *g, CURL *curl, CURLcode result) { goto finish; } + if (j->hash_context) { + uint8_t *k; + + k = gcry_md_read(j->hash_context, GCRY_MD_SHA256); + if (!k) { + log_error("Failed to get checksum."); + r = -EIO; + goto finish; + } + + j->sha256 = hexmem(k, gcry_md_get_algo_dlen(GCRY_MD_SHA256)); + if (!j->sha256) { + r = log_oom(); + goto finish; + } + + log_debug("SHA256 of %s is %s.", j->url, j->sha256); + } + if (j->disk_fd >= 0 && j->allow_sparse) { /* Make sure the file size is right, in case the file was * sparse and we just seeked for the last part */ @@ -151,14 +175,12 @@ finish: import_job_finish(j, r); } - static int import_job_write_uncompressed(ImportJob *j, void *p, size_t sz) { ssize_t n; assert(j); assert(p); assert(sz > 0); - assert(j->disk_fd >= 0); if (j->written_uncompressed + sz < j->written_uncompressed) { log_error("File too large, overflow"); @@ -204,7 +226,6 @@ static int import_job_write_compressed(ImportJob *j, void *p, size_t sz) { assert(j); assert(p); assert(sz > 0); - assert(j->disk_fd >= 0); if (j->written_compressed + sz < j->written_compressed) { log_error("File too large, overflow"); @@ -222,6 +243,9 @@ static int import_job_write_compressed(ImportJob *j, void *p, size_t sz) { return -EFBIG; } + if (j->hash_context) + gcry_md_write(j->hash_context, p, sz); + switch (j->compressed) { case IMPORT_JOB_UNCOMPRESSED: @@ -311,6 +335,13 @@ static int import_job_open_disk(ImportJob *j) { } } + if (j->calc_hash) { + if (gcry_md_open(&j->hash_context, GCRY_MD_SHA256, 0) != 0) { + log_error("Failed to initialize hash context."); + return -EIO; + } + } + return 0; } @@ -459,6 +490,7 @@ static size_t import_job_header_callback(void *contents, size_t size, size_t nme if (strv_contains(j->old_etags, j->etag)) { log_info("Image already downloaded. Skipping download."); + j->etag_exists = true; import_job_finish(j, 0); return sz; } |