summary refs log tree commit diff
path: root/gnu/packages/patches/libupnp-CVE-2016-8863.patch
blob: 9978b39487a386e1c410126f05ef53636c09c5d5 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
Fix CVE-2016-8863:

https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2016-8863
https://sourceforge.net/p/pupnp/bugs/133/

Patch copied from upstream source repository:

https://sourceforge.net/p/pupnp/code/ci/9c099c2923ab4d98530ab5204af1738be5bddba7/

From 9c099c2923ab4d98530ab5204af1738be5bddba7 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Uwe=20Kleine-K=C3=B6nig?= <ukleinek@debian.org>
Date: Thu, 8 Dec 2016 17:11:53 +0100
Subject: [PATCH] Fix out-of-bound access in create_url_list() (CVE-2016-8863)

If there is an invalid URL in URLS->buf after a valid one, uri_parse is
called with out pointing after the allocated memory. As uri_parse writes
to *out before returning an error the loop in create_url_list must be
stopped early to prevent an out-of-bound access

Bug: https://sourceforge.net/p/pupnp/bugs/133/
Bug-CVE: https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2016-8863
Bug-Debian: https://bugs.debian.org/842093
Bug-Redhat: https://bugzilla.redhat.com/show_bug.cgi?id=1388771
(cherry picked from commit a0f6e719bc03c4d2fe6a4a42ef6b8761446f520b)
---
 upnp/src/gena/gena_device.c | 17 ++++++++++++-----
 1 file changed, 12 insertions(+), 5 deletions(-)

diff --git a/upnp/src/gena/gena_device.c b/upnp/src/gena/gena_device.c
index fb04a29..245c56b 100644
--- a/upnp/src/gena/gena_device.c
+++ b/upnp/src/gena/gena_device.c
@@ -1113,7 +1113,7 @@ static int create_url_list(
 	/*! [out] . */
 	URL_list *out)
 {
-    size_t URLcount = 0;
+    size_t URLcount = 0, URLcount2 = 0;
     size_t i;
     int return_code = 0;
     uri_type temp;
@@ -1155,16 +1155,23 @@ static int create_url_list(
         }
         memcpy( out->URLs, URLS->buff, URLS->size );
         out->URLs[URLS->size] = 0;
-        URLcount = 0;
         for( i = 0; i < URLS->size; i++ ) {
             if( ( URLS->buff[i] == '<' ) && ( i + 1 < URLS->size ) ) {
                 if( ( ( return_code =
                         parse_uri( &out->URLs[i + 1], URLS->size - i + 1,
-                                   &out->parsedURLs[URLcount] ) ) ==
+                                   &out->parsedURLs[URLcount2] ) ) ==
                       HTTP_SUCCESS )
-                    && ( out->parsedURLs[URLcount].hostport.text.size !=
+                    && ( out->parsedURLs[URLcount2].hostport.text.size !=
                          0 ) ) {
-                    URLcount++;
+                    URLcount2++;
+                    if (URLcount2 >= URLcount)
+                        /*
+                         * break early here in case there is a bogus URL that
+                         * was skipped above. This prevents to access
+                         * out->parsedURLs[URLcount] which is beyond the
+                         * allocation.
+                         */
+                        break;
                 } else {
                     if( return_code == UPNP_E_OUTOF_MEMORY ) {
                         free( out->URLs );
-- 
2.11.0