From 96fe15caf6761c5746f1848fbdfd4351189462f7 Mon Sep 17 00:00:00 2001 From: Armin Kuster Date: Sat, 17 Sep 2016 22:33:07 -0700 Subject: wget: Security fix CVE-2016-4971 affects wget < 1.18.0 (From OE-Core rev: f4ea85d9c33a18f9e18e789a3399cf2d5c4f8164) Signed-off-by: Armin Kuster Signed-off-by: Richard Purdie --- .../recipes-extended/wget/wget/CVE-2016-4971.patch | 295 +++++++++++++++++++++ meta/recipes-extended/wget/wget_1.17.1.bb | 1 + 2 files changed, 296 insertions(+) create mode 100644 meta/recipes-extended/wget/wget/CVE-2016-4971.patch (limited to 'meta/recipes-extended/wget') diff --git a/meta/recipes-extended/wget/wget/CVE-2016-4971.patch b/meta/recipes-extended/wget/wget/CVE-2016-4971.patch new file mode 100644 index 0000000000..e8b22f4d8d --- /dev/null +++ b/meta/recipes-extended/wget/wget/CVE-2016-4971.patch @@ -0,0 +1,295 @@ +From e996e322ffd42aaa051602da182d03178d0f13e1 Mon Sep 17 00:00:00 2001 +From: Giuseppe Scrivano +Date: Mon, 6 Jun 2016 21:20:24 +0200 +Subject: [PATCH] ftp: understand --trust-server-names on a HTTP->FTP redirect + +If not --trust-server-names is used, FTP will also get the destination +file name from the original url specified by the user instead of the +redirected url. Closes CVE-2016-4971. + +* src/ftp.c (ftp_get_listing): Add argument original_url. +(getftp): Likewise. +(ftp_loop_internal): Likewise. Use original_url to generate the +file name if --trust-server-names is not provided. +(ftp_retrieve_glob): Likewise. +(ftp_loop): Likewise. + +Signed-off-by: Giuseppe Scrivano + +Upstream-Status: Backport +CVE: CVE-2016-4971 +Signed-off-by: Armin Kuster + +--- + src/ftp.c | 71 +++++++++++++++++++++++++++++++++++++------------------------- + src/ftp.h | 3 ++- + src/retr.c | 3 ++- + 3 files changed, 47 insertions(+), 30 deletions(-) + +Index: wget-1.17.1/src/ftp.c +=================================================================== +--- wget-1.17.1.orig/src/ftp.c ++++ wget-1.17.1/src/ftp.c +@@ -236,7 +236,7 @@ print_length (wgint size, wgint start, b + logputs (LOG_VERBOSE, !authoritative ? _(" (unauthoritative)\n") : "\n"); + } + +-static uerr_t ftp_get_listing (struct url *, ccon *, struct fileinfo **); ++static uerr_t ftp_get_listing (struct url *, struct url *, ccon *, struct fileinfo **); + + static uerr_t + get_ftp_greeting(int csock, ccon *con) +@@ -315,7 +315,8 @@ init_control_ssl_connection (int csock, + and closes the control connection in case of error. If warc_tmp + is non-NULL, the downloaded data will be written there as well. */ + static uerr_t +-getftp (struct url *u, wgint passed_expected_bytes, wgint *qtyread, ++getftp (struct url *u, struct url *original_url, ++ wgint passed_expected_bytes, wgint *qtyread, + wgint restval, ccon *con, int count, wgint *last_expected_bytes, + FILE *warc_tmp) + { +@@ -1189,7 +1190,7 @@ Error in server response, closing contro + { + bool exists = false; + struct fileinfo *f; +- uerr_t _res = ftp_get_listing (u, con, &f); ++ uerr_t _res = ftp_get_listing (u, original_url, con, &f); + /* Set the DO_RETR command flag again, because it gets unset when + calling ftp_get_listing() and would otherwise cause an assertion + failure earlier on when this function gets repeatedly called +@@ -1780,8 +1781,8 @@ exit_error: + This loop either gets commands from con, or (if ON_YOUR_OWN is + set), makes them up to retrieve the file given by the URL. */ + static uerr_t +-ftp_loop_internal (struct url *u, struct fileinfo *f, ccon *con, char **local_file, +- bool force_full_retrieve) ++ftp_loop_internal (struct url *u, struct url *original_url, struct fileinfo *f, ++ ccon *con, char **local_file, bool force_full_retrieve) + { + int count, orig_lp; + wgint restval, len = 0, qtyread = 0; +@@ -1806,7 +1807,7 @@ ftp_loop_internal (struct url *u, struct + { + /* URL-derived file. Consider "-O file" name. */ + xfree (con->target); +- con->target = url_file_name (u, NULL); ++ con->target = url_file_name (opt.trustservernames || !original_url ? u : original_url, NULL); + if (!opt.output_document) + locf = con->target; + else +@@ -1924,8 +1925,8 @@ ftp_loop_internal (struct url *u, struct + + /* If we are working on a WARC record, getftp should also write + to the warc_tmp file. */ +- err = getftp (u, len, &qtyread, restval, con, count, &last_expected_bytes, +- warc_tmp); ++ err = getftp (u, original_url, len, &qtyread, restval, con, count, ++ &last_expected_bytes, warc_tmp); + + if (con->csock == -1) + con->st &= ~DONE_CWD; +@@ -2093,7 +2094,8 @@ Removing file due to --delete-after in f + /* Return the directory listing in a reusable format. The directory + is specifed in u->dir. */ + static uerr_t +-ftp_get_listing (struct url *u, ccon *con, struct fileinfo **f) ++ftp_get_listing (struct url *u, struct url *original_url, ccon *con, ++ struct fileinfo **f) + { + uerr_t err; + char *uf; /* url file name */ +@@ -2114,7 +2116,7 @@ ftp_get_listing (struct url *u, ccon *co + + con->target = xstrdup (lf); + xfree (lf); +- err = ftp_loop_internal (u, NULL, con, NULL, false); ++ err = ftp_loop_internal (u, original_url, NULL, con, NULL, false); + lf = xstrdup (con->target); + xfree (con->target); + con->target = old_target; +@@ -2137,8 +2139,9 @@ ftp_get_listing (struct url *u, ccon *co + return err; + } + +-static uerr_t ftp_retrieve_dirs (struct url *, struct fileinfo *, ccon *); +-static uerr_t ftp_retrieve_glob (struct url *, ccon *, int); ++static uerr_t ftp_retrieve_dirs (struct url *, struct url *, ++ struct fileinfo *, ccon *); ++static uerr_t ftp_retrieve_glob (struct url *, struct url *, ccon *, int); + static struct fileinfo *delelement (struct fileinfo *, struct fileinfo **); + static void freefileinfo (struct fileinfo *f); + +@@ -2150,7 +2153,8 @@ static void freefileinfo (struct fileinf + If opt.recursive is set, after all files have been retrieved, + ftp_retrieve_dirs will be called to retrieve the directories. */ + static uerr_t +-ftp_retrieve_list (struct url *u, struct fileinfo *f, ccon *con) ++ftp_retrieve_list (struct url *u, struct url *original_url, ++ struct fileinfo *f, ccon *con) + { + static int depth = 0; + uerr_t err; +@@ -2311,7 +2315,10 @@ Already have correct symlink %s -> %s\n\ + else /* opt.retr_symlinks */ + { + if (dlthis) +- err = ftp_loop_internal (u, f, con, NULL, force_full_retrieve); ++ { ++ err = ftp_loop_internal (u, original_url, f, con, NULL, ++ force_full_retrieve); ++ } + } /* opt.retr_symlinks */ + break; + case FT_DIRECTORY: +@@ -2322,7 +2329,10 @@ Already have correct symlink %s -> %s\n\ + case FT_PLAINFILE: + /* Call the retrieve loop. */ + if (dlthis) +- err = ftp_loop_internal (u, f, con, NULL, force_full_retrieve); ++ { ++ err = ftp_loop_internal (u, original_url, f, con, NULL, ++ force_full_retrieve); ++ } + break; + case FT_UNKNOWN: + logprintf (LOG_NOTQUIET, _("%s: unknown/unsupported file type.\n"), +@@ -2387,7 +2397,7 @@ Already have correct symlink %s -> %s\n\ + /* We do not want to call ftp_retrieve_dirs here */ + if (opt.recursive && + !(opt.reclevel != INFINITE_RECURSION && depth >= opt.reclevel)) +- err = ftp_retrieve_dirs (u, orig, con); ++ err = ftp_retrieve_dirs (u, original_url, orig, con); + else if (opt.recursive) + DEBUGP ((_("Will not retrieve dirs since depth is %d (max %d).\n"), + depth, opt.reclevel)); +@@ -2400,7 +2410,8 @@ Already have correct symlink %s -> %s\n\ + ftp_retrieve_glob on each directory entry. The function knows + about excluded directories. */ + static uerr_t +-ftp_retrieve_dirs (struct url *u, struct fileinfo *f, ccon *con) ++ftp_retrieve_dirs (struct url *u, struct url *original_url, ++ struct fileinfo *f, ccon *con) + { + char *container = NULL; + int container_size = 0; +@@ -2450,7 +2461,7 @@ Not descending to %s as it is excluded/n + odir = xstrdup (u->dir); /* because url_set_dir will free + u->dir. */ + url_set_dir (u, newdir); +- ftp_retrieve_glob (u, con, GLOB_GETALL); ++ ftp_retrieve_glob (u, original_url, con, GLOB_GETALL); + url_set_dir (u, odir); + xfree (odir); + +@@ -2509,14 +2520,15 @@ is_invalid_entry (struct fileinfo *f) + GLOB_GLOBALL, use globbing; if it's GLOB_GETALL, download the whole + directory. */ + static uerr_t +-ftp_retrieve_glob (struct url *u, ccon *con, int action) ++ftp_retrieve_glob (struct url *u, struct url *original_url, ++ ccon *con, int action) + { + struct fileinfo *f, *start; + uerr_t res; + + con->cmd |= LEAVE_PENDING; + +- res = ftp_get_listing (u, con, &start); ++ res = ftp_get_listing (u, original_url, con, &start); + if (res != RETROK) + return res; + /* First: weed out that do not conform the global rules given in +@@ -2612,7 +2624,7 @@ ftp_retrieve_glob (struct url *u, ccon * + if (start) + { + /* Just get everything. */ +- res = ftp_retrieve_list (u, start, con); ++ res = ftp_retrieve_list (u, original_url, start, con); + } + else + { +@@ -2628,7 +2640,7 @@ ftp_retrieve_glob (struct url *u, ccon * + { + /* Let's try retrieving it anyway. */ + con->st |= ON_YOUR_OWN; +- res = ftp_loop_internal (u, NULL, con, NULL, false); ++ res = ftp_loop_internal (u, original_url, NULL, con, NULL, false); + return res; + } + +@@ -2648,8 +2660,8 @@ ftp_retrieve_glob (struct url *u, ccon * + of URL. Inherently, its capabilities are limited on what can be + encoded into a URL. */ + uerr_t +-ftp_loop (struct url *u, char **local_file, int *dt, struct url *proxy, +- bool recursive, bool glob) ++ftp_loop (struct url *u, struct url *original_url, char **local_file, int *dt, ++ struct url *proxy, bool recursive, bool glob) + { + ccon con; /* FTP connection */ + uerr_t res; +@@ -2670,16 +2682,17 @@ ftp_loop (struct url *u, char **local_fi + if (!*u->file && !recursive) + { + struct fileinfo *f; +- res = ftp_get_listing (u, &con, &f); ++ res = ftp_get_listing (u, original_url, &con, &f); + + if (res == RETROK) + { + if (opt.htmlify && !opt.spider) + { ++ struct url *url_file = opt.trustservernames ? u : original_url; + char *filename = (opt.output_document + ? xstrdup (opt.output_document) + : (con.target ? xstrdup (con.target) +- : url_file_name (u, NULL))); ++ : url_file_name (url_file, NULL))); + res = ftp_index (filename, u, f); + if (res == FTPOK && opt.verbose) + { +@@ -2724,11 +2737,13 @@ ftp_loop (struct url *u, char **local_fi + /* ftp_retrieve_glob is a catch-all function that gets called + if we need globbing, time-stamping, recursion or preserve + permissions. Its third argument is just what we really need. */ +- res = ftp_retrieve_glob (u, &con, ++ res = ftp_retrieve_glob (u, original_url, &con, + ispattern ? GLOB_GLOBALL : GLOB_GETONE); + } + else +- res = ftp_loop_internal (u, NULL, &con, local_file, false); ++ { ++ res = ftp_loop_internal (u, original_url, NULL, &con, local_file, false); ++ } + } + if (res == FTPOK) + res = RETROK; +Index: wget-1.17.1/src/ftp.h +=================================================================== +--- wget-1.17.1.orig/src/ftp.h ++++ wget-1.17.1/src/ftp.h +@@ -169,7 +169,8 @@ enum wget_ftp_fstatus + }; + + struct fileinfo *ftp_parse_ls (const char *, const enum stype); +-uerr_t ftp_loop (struct url *, char **, int *, struct url *, bool, bool); ++uerr_t ftp_loop (struct url *, struct url *, char **, int *, struct url *, ++ bool, bool); + + uerr_t ftp_index (const char *, struct url *, struct fileinfo *); + +Index: wget-1.17.1/src/retr.c +=================================================================== +--- wget-1.17.1.orig/src/retr.c ++++ wget-1.17.1/src/retr.c +@@ -830,7 +830,8 @@ retrieve_url (struct url * orig_parsed, + if (redirection_count) + oldrec = glob = false; + +- result = ftp_loop (u, &local_file, dt, proxy_url, recursive, glob); ++ result = ftp_loop (u, orig_parsed, &local_file, dt, proxy_url, ++ recursive, glob); + recursive = oldrec; + + /* There is a possibility of having HTTP being redirected to diff --git a/meta/recipes-extended/wget/wget_1.17.1.bb b/meta/recipes-extended/wget/wget_1.17.1.bb index dca5d1f712..eac8abf377 100644 --- a/meta/recipes-extended/wget/wget_1.17.1.bb +++ b/meta/recipes-extended/wget/wget_1.17.1.bb @@ -1,5 +1,6 @@ SRC_URI = "${GNU_MIRROR}/wget/wget-${PV}.tar.gz \ file://0001-Unset-need_charset_alias-when-building-for-musl.patch \ + file://CVE-2016-4971.patch \ " SRC_URI[md5sum] = "a6a908c9ae0e6a4194c628974cc3f05a" -- cgit v1.2.3-54-g00ecf