------------------------------------------------------------ revno: 13438 revision-id: kinkie@squid-cache.org-20140602175231-2y3z8punsih3i83g parent: kinkie@squid-cache.org-20140602150419-av9zc2kw7fmjgeyg committer: Francesco Chemolli branch nick: trunk timestamp: Mon 2014-06-02 19:52:31 +0200 message: Marked const variables in Tokenizer methods as such ------------------------------------------------------------ # Bazaar merge directive format 2 (Bazaar 0.90) # revision_id: kinkie@squid-cache.org-20140602175231-2y3z8punsih3i83g # target_branch: http://bzr.squid-cache.org/bzr/squid3/trunk/ # testament_sha1: 402f61831737fe4deac38076ad02c08cbce324c1 # timestamp: 2014-06-02 17:53:53 +0000 # source_branch: http://bzr.squid-cache.org/bzr/squid3/trunk/ # base_revision_id: kinkie@squid-cache.org-20140602150419-\ # av9zc2kw7fmjgeyg # # Begin patch === modified file 'src/parser/Tokenizer.cc' --- src/parser/Tokenizer.cc 2014-06-02 07:19:35 +0000 +++ src/parser/Tokenizer.cc 2014-06-02 17:52:31 +0000 @@ -31,15 +31,15 @@ bool Parser::Tokenizer::token(SBuf &returnedToken, const CharacterSet &delimiters) { - SBuf savebuf(buf_); + const SBuf savebuf(buf_); skip(delimiters); - SBuf::size_type tokenLen = buf_.findFirstOf(delimiters); // not found = npos => consume to end + const SBuf::size_type tokenLen = buf_.findFirstOf(delimiters); // not found = npos => consume to end if (tokenLen == SBuf::npos && !delimiters['\0']) { // no delimiter found, nor is NUL/EOS/npos acceptible as one buf_ = savebuf; return false; } - SBuf retval = buf_.consume(tokenLen); + const SBuf retval = buf_.consume(tokenLen); skip(delimiters); returnedToken = retval; return true; @@ -48,7 +48,7 @@ bool Parser::Tokenizer::prefix(SBuf &returnedToken, const CharacterSet &tokenChars, const SBuf::size_type limit) { - SBuf::size_type prefixLen = buf_.substr(0,limit).findFirstNotOf(tokenChars); + const SBuf::size_type prefixLen = buf_.substr(0,limit).findFirstNotOf(tokenChars); if (prefixLen == 0) return false; returnedToken = buf_.consume(prefixLen); @@ -58,7 +58,7 @@ bool Parser::Tokenizer::skip(const CharacterSet &tokenChars) { - SBuf::size_type prefixLen = buf_.findFirstNotOf(tokenChars); + const SBuf::size_type prefixLen = buf_.findFirstNotOf(tokenChars); if (prefixLen == 0) return false; buf_.consume(prefixLen); @@ -122,7 +122,7 @@ uint64_t cutoff; cutoff = neg ? -static_cast(INT64_MIN) : INT64_MAX; - int cutlim = cutoff % static_cast(base); + const int cutlim = cutoff % static_cast(base); cutoff /= static_cast(base); int any = 0, c;