util/token.h: fix possible out-of-bounds read

The 'WHITESPACE' case of the _calc_len method wrongly accessed the
character before checking upper bound of the token. The problem is fixed
by switching the order of both conditions.

Fixes #3756
This commit is contained in:
Norman Feske 2020-05-07 21:23:07 +02:00
parent 894c7411e7
commit 48b2456845
11 changed files with 122 additions and 4 deletions

View File

@ -179,15 +179,19 @@ class Genode::Token
size_t _quoted_string_len(size_t max_len) const
{
/*
* The 'end_of_quote' function examines two 'char' values.
* Hence, the upper bound of the index is max_len - 2.
*/
unsigned i = 0;
for (; !end_of_quote(&_start[i]) && i < max_len; i++)
for (; i + 1 < max_len && !end_of_quote(&_start[i]); i++)
/* string ends without final quotation mark? too bad! */
if (!_start[i]) return 0;
/* exceeded maximum token length */
if (i == max_len) return 0;
if (i + 1 == max_len)
return 0;
/*
* We stopped our search at the character before the
@ -234,7 +238,7 @@ class Genode::Token
case WHITESPACE:
{
unsigned i = 0;
for (; is_whitespace(_start[i]) && i < max_len; i++);
for (; i < max_len && is_whitespace(_start[i]); i++);
return i;
}

View File

@ -0,0 +1 @@
Scenario that tests the util/token.h utility

View File

@ -0,0 +1,2 @@
_/src/init
_/src/test-token

View File

@ -0,0 +1 @@
2020-04-23 42b5b14a84c37b36077cd9c304057c7e3d18163e

View File

@ -0,0 +1,29 @@
<runtime ram="32M" caps="1000" binary="init">
<events>
<timeout meaning="failed" sec="20" />
<log meaning="succeeded">finished token test</log>
</events>
<content>
<rom label="ld.lib.so"/>
<rom label="test-token"/>
</content>
<config>
<parent-provides>
<service name="ROM"/>
<service name="CPU"/>
<service name="RM"/>
<service name="PD"/>
<service name="LOG"/>
</parent-provides>
<default-route>
<any-service> <parent/> </any-service>
</default-route>
<default caps="100"/>
<start name="test-token">
<resource name="RAM" quantum="10M"/>
</start>
</config>
</runtime>

View File

@ -0,0 +1,2 @@
SRC_DIR = src/test/token
include $(GENODE_DIR)/repos/base/recipes/src/content.inc

View File

@ -0,0 +1 @@
2020-04-23 0e735b0b95eaeb3f55c36b56e06c8a0b17f3c44b

View File

@ -0,0 +1 @@
base

View File

@ -0,0 +1,73 @@
/*
* \brief Tokenizer test
* \author Norman Feske
* \date 2020-05-08
*/
/*
* Copyright (C) 2020 Genode Labs GmbH
*
* This file is part of the Genode OS framework, which is distributed
* under the terms of the GNU Affero General Public License version 3.
*/
#include <util/string.h>
#include <base/log.h>
#include <base/component.h>
#include <base/attached_dataspace.h>
#include <base/attached_ram_dataspace.h>
#include <rm_session/connection.h>
#include <region_map/client.h>
using namespace Genode;
/**
* Regression test for issue #3756
*/
static void test_out_of_bounds_access(Env &env)
{
enum { PAGE_SIZE = 4096U,
SUB_RM_SIZE = PAGE_SIZE*2,
BUF_SIZE = PAGE_SIZE };
Rm_connection rm(env);
Region_map_client sub_rm(rm.create(SUB_RM_SIZE));
/* allocate physical page of memory as buffer */
Attached_ram_dataspace buf_ds(env.ram(), env.rm(), BUF_SIZE);
/* attach buffer at start of managed dataspace, leave 2nd page as guard */
sub_rm.attach_at(buf_ds.cap(), 0);
/* locally attach managed dataspace */
char * const buf_ptr = env.rm().attach(sub_rm.dataspace());
auto tokenize_two_tokens_at_end_of_buffer = [&] (char const * const input)
{
log("tokenize: '", input, "'");
size_t const input_len = strlen(input);
char * const token_ptr = buf_ptr + BUF_SIZE - input_len;
memcpy(token_ptr, input, input_len);
typedef ::Genode::Token<Scanner_policy_identifier_with_underline> Token;
Token t(token_ptr, input_len);
t = t.next();
};
tokenize_two_tokens_at_end_of_buffer("x ");
tokenize_two_tokens_at_end_of_buffer("x\"");
}
void Component::construct(Env &env)
{
log("--- token test ---");
test_out_of_bounds_access(env);
log("--- finished token test ---");
}

View File

@ -0,0 +1,3 @@
TARGET = test-token
SRC_CC = main.cc
LIBS = base

View File

@ -723,6 +723,7 @@ set default_test_pkgs {
test-terminal_crosslink
test-timer
test-tls
test-token
test-trace
test-trace_logger
test-utf8