Cleanup: Better constify some strings in net/.

Review URL: https://codereview.chromium.org/696033002

Cr-Commit-Position: refs/heads/master@{#312985}
diff --git a/net/base/address_list_unittest.cc b/net/base/address_list_unittest.cc
index e559788..4f70b67 100644
--- a/net/base/address_list_unittest.cc
+++ b/net/base/address_list_unittest.cc
@@ -13,7 +13,7 @@
 namespace net {
 namespace {
 
-static const char* kCanonicalHostname = "canonical.bar.com";
+const char kCanonicalHostname[] = "canonical.bar.com";
 
 TEST(AddressListTest, Canonical) {
   // Create an addrinfo with a canonical name.
diff --git a/net/base/mime_sniffer.cc b/net/base/mime_sniffer.cc
index ad4212dc28..0d1f1169 100644
--- a/net/base/mime_sniffer.cc
+++ b/net/base/mime_sniffer.cc
@@ -110,11 +110,11 @@
 static const size_t kBytesRequiredForMagic = 42;
 
 struct MagicNumber {
-  const char* mime_type;
-  const char* magic;
+  const char* const mime_type;
+  const char* const magic;
   size_t magic_len;
   bool is_string;
-  const char* mask;  // if set, must have same length as |magic|
+  const char* const mask;  // if set, must have same length as |magic|
 };
 
 #define MAGIC_NUMBER(mime_type, magic) \
@@ -209,7 +209,7 @@
 
 struct OfficeExtensionType {
   OfficeDocType doc_type;
-  const char* extension;
+  const char* const extension;
   size_t extension_len;
 };
 
@@ -724,7 +724,7 @@
 static bool IsUnknownMimeType(const std::string& mime_type) {
   // TODO(tc): Maybe reuse some code in net/http/http_response_headers.* here.
   // If we do, please be careful not to alter the semantics at all.
-  static const char* kUnknownMimeTypes[] = {
+  static const char* const kUnknownMimeTypes[] = {
     // Empty mime types are as unknown as they get.
     "",
     // The unknown/unknown type is popular and uninformative
@@ -819,7 +819,7 @@
     return false;
   }
 
-  static const char* kSniffableTypes[] = {
+  static const char* const kSniffableTypes[] = {
     // Many web servers are misconfigured to send text/plain for many
     // different types of content.
     "text/plain",
diff --git a/net/base/mime_util.cc b/net/base/mime_util.cc
index d1b67d58..a7fe9f3 100644
--- a/net/base/mime_util.cc
+++ b/net/base/mime_util.cc
@@ -174,8 +174,8 @@
     LAZY_INSTANCE_INITIALIZER;
 
 struct MimeInfo {
-  const char* mime_type;
-  const char* extensions;  // comma separated list
+  const char* const mime_type;
+  const char* const extensions;  // comma separated list
 };
 
 static const MimeInfo primary_mappings[] = {
@@ -385,7 +385,7 @@
 
 // Dictionary of cryptographic file mime types.
 struct CertificateMimeTypeInfo {
-  const char* mime_type;
+  const char* const mime_type;
   CertificateMimeType cert_type;
 };
 
@@ -498,8 +498,8 @@
 #endif
 
 struct MediaFormatStrict {
-  const char* mime_type;
-  const char* codecs_list;
+  const char* const mime_type;
+  const char* const codecs_list;
 };
 
 // Following is the list of RFC 6381 compliant codecs:
@@ -799,7 +799,7 @@
 }
 
 // See http://www.iana.org/assignments/media-types/media-types.xhtml
-static const char* legal_top_level_types[] = {
+static const char* const legal_top_level_types[] = {
   "application",
   "audio",
   "example",
@@ -1223,7 +1223,7 @@
 };
 
 struct StandardType {
-  const char* leading_mime_type;
+  const char* const leading_mime_type;
   const char* const* standard_types;
   size_t standard_types_len;
 };
diff --git a/net/base/mime_util_unittest.cc b/net/base/mime_util_unittest.cc
index a3d9516e..a28c8b69 100644
--- a/net/base/mime_util_unittest.cc
+++ b/net/base/mime_util_unittest.cc
@@ -17,7 +17,7 @@
 TEST(MimeUtilTest, ExtensionTest) {
   const struct {
     const base::FilePath::CharType* extension;
-    const char* mime_type;
+    const char* const mime_type;
     bool valid;
   } tests[] = {
     { FILE_PATH_LITERAL("png"), "image/png", true },
@@ -44,7 +44,7 @@
 TEST(MimeUtilTest, FileTest) {
   const struct {
     const base::FilePath::CharType* file_path;
-    const char* mime_type;
+    const char* const mime_type;
     bool valid;
   } tests[] = {
     { FILE_PATH_LITERAL("c:\\foo\\bar.css"), "text/css", true },
@@ -258,9 +258,9 @@
 // results' length to 2.
 TEST(MimeUtilTest, ParseCodecString) {
   const struct {
-    const char* original;
+    const char* const original;
     size_t expected_size;
-    const char* results[2];
+    const char* const results[2];
   } tests[] = {
     { "\"bogus\"",                  1, { "bogus" }            },
     { "0",                          1, { "0" }                },
@@ -365,9 +365,9 @@
 
 TEST(MimeUtilTest, TestGetExtensionsForMimeType) {
   const struct {
-    const char* mime_type;
+    const char* const mime_type;
     size_t min_expected_size;
-    const char* contained_result;
+    const char* const contained_result;
   } tests[] = {
     { "text/plain", 2, "txt" },
     { "*",          0, NULL  },
@@ -427,12 +427,13 @@
 }
 
 TEST(MimeUtilTest, TestAddMultipartValueForUpload) {
-  const char* ref_output = "--boundary\r\nContent-Disposition: form-data;"
-                           " name=\"value name\"\r\nContent-Type: content type"
-                           "\r\n\r\nvalue\r\n"
-                           "--boundary\r\nContent-Disposition: form-data;"
-                           " name=\"value name\"\r\n\r\nvalue\r\n"
-                           "--boundary--\r\n";
+  const char ref_output[] =
+      "--boundary\r\nContent-Disposition: form-data;"
+      " name=\"value name\"\r\nContent-Type: content type"
+      "\r\n\r\nvalue\r\n"
+      "--boundary\r\nContent-Disposition: form-data;"
+      " name=\"value name\"\r\n\r\nvalue\r\n"
+      "--boundary--\r\n";
   std::string post_data;
   AddMultipartValueForUpload("value name", "value", "boundary",
                              "content type", &post_data);
diff --git a/net/base/net_string_util_icu_alternatives_android.cc b/net/base/net_string_util_icu_alternatives_android.cc
index ca0e7f0..f4ef1ef 100644
--- a/net/base/net_string_util_icu_alternatives_android.cc
+++ b/net/base/net_string_util_icu_alternatives_android.cc
@@ -66,7 +66,7 @@
 
 }  // namespace
 
-const char* const kCharsetLatin1 = "ISO-8859-1";
+const char kCharsetLatin1[] = "ISO-8859-1";
 
 bool ConvertToUtf8(const std::string& text, const char* charset,
                    std::string* output) {
diff --git a/net/base/net_util_icu.cc b/net/base/net_util_icu.cc
index 8526edaf..176826c4 100644
--- a/net/base/net_util_icu.cc
+++ b/net/base/net_util_icu.cc
@@ -670,9 +670,9 @@
 
   // Special handling for view-source:.  Don't use content::kViewSourceScheme
   // because this library shouldn't depend on chrome.
-  const char* const kViewSource = "view-source";
+  const char kViewSource[] = "view-source";
   // Reject "view-source:view-source:..." to avoid deep recursion.
-  const char* const kViewSourceTwice = "view-source:view-source:";
+  const char kViewSourceTwice[] = "view-source:view-source:";
   if (url.SchemeIs(kViewSource) &&
       !StartsWithASCII(url.possibly_invalid_spec(), kViewSourceTwice, false)) {
     return FormatViewSourceUrl(url, languages, format_types,
diff --git a/net/base/net_util_icu_unittest.cc b/net/base/net_util_icu_unittest.cc
index d92b9d1..9af0e2a 100644
--- a/net/base/net_util_icu_unittest.cc
+++ b/net/base/net_util_icu_unittest.cc
@@ -22,9 +22,9 @@
 
 namespace {
 
-static const size_t kNpos = base::string16::npos;
+const size_t kNpos = base::string16::npos;
 
-const char* kLanguages[] = {
+const char* const kLanguages[] = {
   "",      "en",    "zh-CN",    "ja",    "ko",
   "he",    "ar",    "ru",       "el",    "fr",
   "de",    "pt",    "sv",       "th",    "hi",
@@ -33,7 +33,7 @@
 };
 
 struct IDNTestCase {
-  const char* input;
+  const char* const input;
   const wchar_t* unicode_output;
   const bool unicode_allowed[arraysize(kLanguages)];
 };
@@ -354,9 +354,9 @@
 };
 
 struct UrlTestData {
-  const char* description;
-  const char* input;
-  const char* languages;
+  const char* const description;
+  const char* const input;
+  const char* const languages;
   FormatUrlTypes format_types;
   UnescapeRule::Type escape_rules;
   const wchar_t* output;  // Use |wchar_t| to handle Unicode constants easily.
@@ -455,11 +455,11 @@
 
 struct GetDirectoryListingEntryCase {
   const wchar_t* name;
-  const char* raw_bytes;
+  const char* const raw_bytes;
   bool is_dir;
   int64 filesize;
   base::Time time;
-  const char* expected;
+  const char* const expected;
 };
 
 }  // namespace
@@ -930,7 +930,7 @@
 TEST(NetUtilTest, FormatUrlRoundTripQueryEscaped) {
   // A full list of characters which FormatURL should unescape and GURL should
   // not escape again, when they appear in a query string.
-  const char* kUnescapedCharacters =
+  const char kUnescapedCharacters[] =
       "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-_~";
   for (unsigned char test_char = 0; test_char < 128; ++test_char) {
     std::string original_url("http://www.google.com/?");
diff --git a/net/base/net_util_unittest.cc b/net/base/net_util_unittest.cc
index cb9ec1ffc..60fae68 100644
--- a/net/base/net_util_unittest.cc
+++ b/net/base/net_util_unittest.cc
@@ -61,8 +61,8 @@
 namespace {
 
 struct HeaderCase {
-  const char* header_name;
-  const char* expected;
+  const char* const header_name;
+  const char* const expected;
 };
 
 // Fills in sockaddr for the given 32-bit address (IPv4.)
@@ -154,9 +154,9 @@
 
 TEST(NetUtilTest, GetIdentityFromURL) {
   struct {
-    const char* input_url;
-    const char* expected_username;
-    const char* expected_password;
+    const char* const input_url;
+    const char* const expected_username;
+    const char* const expected_password;
   } tests[] = {
     {
       "http://username:password@google.com",
@@ -224,7 +224,7 @@
 }
 
 // Just a bunch of fake headers.
-const char* google_headers =
+const char google_headers[] =
     "HTTP/1.1 200 OK\n"
     "Content-TYPE: text/html; charset=utf-8\n"
     "Content-disposition: attachment; filename=\"download.pdf\"\n"
@@ -271,7 +271,7 @@
 
 TEST(NetUtilTest, CompliantHost) {
   struct CompliantHostCase {
-    const char* host;
+    const char* const host;
     bool expected_output;
   };
 
@@ -308,9 +308,9 @@
 
 TEST(NetUtilTest, ParseHostAndPort) {
   const struct {
-    const char* input;
+    const char* const input;
     bool success;
-    const char* expected_host;
+    const char* const expected_host;
     int expected_port;
   } tests[] = {
     // Valid inputs:
@@ -367,7 +367,7 @@
 TEST(NetUtilTest, GetHostAndPort) {
   const struct {
     GURL url;
-    const char* expected_host_and_port;
+    const char* const expected_host_and_port;
   } tests[] = {
     { GURL("http://www.foo.com/x"), "www.foo.com:80"},
     { GURL("http://www.foo.com:21/x"), "www.foo.com:21"},
@@ -385,7 +385,7 @@
 TEST(NetUtilTest, GetHostAndOptionalPort) {
   const struct {
     GURL url;
-    const char* expected_host_and_port;
+    const char* const expected_host_and_port;
   } tests[] = {
     { GURL("http://www.foo.com/x"), "www.foo.com"},
     { GURL("http://www.foo.com:21/x"), "www.foo.com:21"},
@@ -427,7 +427,7 @@
 TEST(NetUtilTest, NetAddressToString_IPv4) {
   const struct {
     uint8 addr[4];
-    const char* result;
+    const char* const result;
   } tests[] = {
     {{0, 0, 0, 0}, "0.0.0.0"},
     {{127, 0, 0, 1}, "127.0.0.1"},
@@ -445,7 +445,7 @@
 TEST(NetUtilTest, NetAddressToString_IPv6) {
   const struct {
     uint8 addr[16];
-    const char* result;
+    const char* const result;
   } tests[] = {
     {{0xFE, 0xDC, 0xBA, 0x98, 0x76, 0x54, 0x32, 0x10, 0xFE, 0xDC, 0xBA,
       0x98, 0x76, 0x54, 0x32, 0x10},
@@ -494,8 +494,8 @@
 
 TEST(NetUtilTest, SimplifyUrlForRequest) {
   struct {
-    const char* input_url;
-    const char* expected_simplified_url;
+    const char* const input_url;
+    const char* const expected_simplified_url;
   } tests[] = {
     {
       // Reference section should be stripped.
@@ -661,7 +661,7 @@
 
 // Test parsing invalid CIDR notation literals.
 TEST(NetUtilTest, ParseCIDRBlock_Invalid) {
-  const char* bad_literals[] = {
+  const char* const bad_literals[] = {
       "foobar",
       "",
       "192.168.0.1",
@@ -702,8 +702,8 @@
 
 TEST(NetUtilTest, IPNumberMatchesPrefix) {
   struct {
-    const char* cidr_literal;
-    const char* ip_literal;
+    const char* const cidr_literal;
+    const char* const ip_literal;
     bool expected_to_match;
   } tests[] = {
     // IPv4 prefix with IPv4 inputs.
@@ -1417,7 +1417,7 @@
 
 struct NonUniqueNameTestData {
   bool is_unique;
-  const char* hostname;
+  const char* const hostname;
 };
 
 // Google Test pretty-printer.
diff --git a/net/base/network_change_notifier.cc b/net/base/network_change_notifier.cc
index 34ec592..2e573dc 100644
--- a/net/base/network_change_notifier.cc
+++ b/net/base/network_change_notifier.cc
@@ -555,7 +555,7 @@
 // static
 const char* NetworkChangeNotifier::ConnectionTypeToString(
     ConnectionType type) {
-  static const char* kConnectionTypeNames[] = {
+  static const char* const kConnectionTypeNames[] = {
     "CONNECTION_UNKNOWN",
     "CONNECTION_ETHERNET",
     "CONNECTION_WIFI",
diff --git a/net/base/registry_controlled_domains/registry_controlled_domain_unittest.cc b/net/base/registry_controlled_domains/registry_controlled_domain_unittest.cc
index 22bd963..42af173 100644
--- a/net/base/registry_controlled_domains/registry_controlled_domain_unittest.cc
+++ b/net/base/registry_controlled_domains/registry_controlled_domain_unittest.cc
@@ -372,13 +372,13 @@
   // is about 100 bytes and a one byte offset can at most add 64 bytes to
   // previous offset. Thus the paths must go over two byte offsets.
 
-  const char* key0 =
+  const char key0[] =
       "a.b.6____________________________________________________"
       "________________________________________________6";
-  const char* key1 =
+  const char key1[] =
       "a.b.7____________________________________________________"
       "________________________________________________7";
-  const char* key2 =
+  const char key2[] =
       "a.b.a____________________________________________________"
       "________________________________________________8";
 
@@ -405,13 +405,13 @@
   // probability at least one of the tested paths has go over a three byte
   // offset.
 
-  const char* key0 =
+  const char key0[] =
       "a.b.Z6___________________________________________________"
       "_________________________________________________Z6";
-  const char* key1 =
+  const char key1[] =
       "a.b.Z7___________________________________________________"
       "_________________________________________________Z7";
-  const char* key2 =
+  const char key2[] =
       "a.b.Za___________________________________________________"
       "_________________________________________________Z8";
 
@@ -431,14 +431,14 @@
   // suffixes. The DAFSA will then form a trie with the implicit source node
   // as root.
 
-  const char* key0 = "a.b.ai";
-  const char* key1 = "a.b.bj";
-  const char* key2 = "a.b.aak";
-  const char* key3 = "a.b.bbl";
-  const char* key4 = "a.b.aaa";
-  const char* key5 = "a.b.bbb";
-  const char* key6 = "a.b.aaaam";
-  const char* key7 = "a.b.bbbbn";
+  const char key0[] = "a.b.ai";
+  const char key1[] = "a.b.bj";
+  const char key2[] = "a.b.aak";
+  const char key3[] = "a.b.bbl";
+  const char key4[] = "a.b.aaa";
+  const char key5[] = "a.b.bbb";
+  const char key6[] = "a.b.aaaam";
+  const char key7[] = "a.b.bbbbn";
 
   EXPECT_EQ(2U, GetRegistryLengthFromHost(key0, EXCLUDE_UNKNOWN_REGISTRIES));
   EXPECT_EQ(0U, GetRegistryLengthFromHost(key1, EXCLUDE_UNKNOWN_REGISTRIES));
@@ -468,14 +468,14 @@
   // prefixes. The DAFSA will then form a trie with the implicit sink node as
   // root.
 
-  const char* key0 = "a.b.ia";
-  const char* key1 = "a.b.jb";
-  const char* key2 = "a.b.kaa";
-  const char* key3 = "a.b.lbb";
-  const char* key4 = "a.b.aaa";
-  const char* key5 = "a.b.bbb";
-  const char* key6 = "a.b.maaaa";
-  const char* key7 = "a.b.nbbbb";
+  const char key0[] = "a.b.ia";
+  const char key1[] = "a.b.jb";
+  const char key2[] = "a.b.kaa";
+  const char key3[] = "a.b.lbb";
+  const char key4[] = "a.b.aaa";
+  const char key5[] = "a.b.bbb";
+  const char key6[] = "a.b.maaaa";
+  const char key7[] = "a.b.nbbbb";
 
   EXPECT_EQ(2U, GetRegistryLengthFromHost(key0, EXCLUDE_UNKNOWN_REGISTRIES));
   EXPECT_EQ(0U, GetRegistryLengthFromHost(key1, EXCLUDE_UNKNOWN_REGISTRIES));
diff --git a/net/cert/x509_cert_types_win.cc b/net/cert/x509_cert_types_win.cc
index 8a456f3b..f5f426d 100644
--- a/net/cert/x509_cert_types_win.cc
+++ b/net/cert/x509_cert_types_win.cc
@@ -21,7 +21,7 @@
 
 // A list of OIDs to decode. Any OID not on this list will be ignored for
 // purposes of parsing.
-const char* kOIDs[] = {
+const char* const kOIDs[] = {
   szOID_COMMON_NAME,
   szOID_LOCALITY_NAME,
   szOID_STATE_OR_PROVINCE_NAME,
diff --git a/net/cookies/cookie_constants_unittest.cc b/net/cookies/cookie_constants_unittest.cc
index f14f11e1..87fa1c1b 100644
--- a/net/cookies/cookie_constants_unittest.cc
+++ b/net/cookies/cookie_constants_unittest.cc
@@ -31,7 +31,8 @@
   EXPECT_LT(COOKIE_PRIORITY_MEDIUM, COOKIE_PRIORITY_HIGH);
 
   // Unrecognized tokens are interpreted as COOKIE_PRIORITY_DEFAULT.
-  const char* bad_tokens[] = {"", "lo", "lowerest", "high ", " high", "0"};
+  const char* const bad_tokens[] = {
+    "", "lo", "lowerest", "high ", " high", "0"};
   for (size_t i = 0; i < arraysize(bad_tokens); ++i) {
     EXPECT_EQ(COOKIE_PRIORITY_DEFAULT, StringToCookiePriority(bad_tokens[i]));
   }
diff --git a/net/cookies/cookie_monster_perftest.cc b/net/cookies/cookie_monster_perftest.cc
index 6e799ad..40dc9463 100644
--- a/net/cookies/cookie_monster_perftest.cc
+++ b/net/cookies/cookie_monster_perftest.cc
@@ -186,7 +186,7 @@
   scoped_refptr<CookieMonster> cm(new CookieMonster(NULL, NULL));
   GetCookiesCallback getCookiesCallback;
   SetCookieCallback setCookieCallback;
-  const char* domain_cookie_format_tree = "a=b; domain=%s";
+  const char domain_cookie_format_tree[] = "a=b; domain=%s";
   const std::string domain_base("top.com");
 
   std::vector<std::string> domain_list;
@@ -256,7 +256,7 @@
   domain_list.push_back("b.a.b.a.top.com");
   EXPECT_EQ(4u, domain_list.size());
 
-  const char* domain_cookie_format_line = "a%03d=b; domain=%s";
+  const char domain_cookie_format_line[] = "a%03d=b; domain=%s";
   for (int i = 0; i < 8; i++) {
     for (std::vector<std::string>::const_iterator it = domain_list.begin();
          it != domain_list.end(); it++) {
@@ -331,7 +331,7 @@
   SetCookieCallback setCookieCallback;
 
   const struct TestCase {
-    const char* name;
+    const char* const name;
     size_t num_cookies;
     size_t num_old_cookies;
   } test_cases[] = {
diff --git a/net/cookies/cookie_monster_unittest.cc b/net/cookies/cookie_monster_unittest.cc
index 633bca59..d5067ae7 100644
--- a/net/cookies/cookie_monster_unittest.cc
+++ b/net/cookies/cookie_monster_unittest.cc
@@ -61,12 +61,12 @@
   virtual ~NewMockPersistentCookieStore() {}
 };
 
-const char* kTopLevelDomainPlus1 = "http://www.harvard.edu";
-const char* kTopLevelDomainPlus2 = "http://www.math.harvard.edu";
-const char* kTopLevelDomainPlus2Secure = "https://www.math.harvard.edu";
-const char* kTopLevelDomainPlus3 =
+const char kTopLevelDomainPlus1[] = "http://www.harvard.edu";
+const char kTopLevelDomainPlus2[] = "http://www.math.harvard.edu";
+const char kTopLevelDomainPlus2Secure[] = "https://www.math.harvard.edu";
+const char kTopLevelDomainPlus3[] =
     "http://www.bourbaki.math.harvard.edu";
-const char* kOtherDomain = "http://www.mit.edu";
+const char kOtherDomain[] = "http://www.mit.edu";
 const char kUrlGoogleSpecific[] = "http://www.gmail.google.izzle";
 
 class GetCookieListCallback : public CookieCallback {
@@ -1252,7 +1252,7 @@
   scoped_refptr<CookieMonster> cm_foo(new CookieMonster(NULL, NULL));
 
   // Only cm_foo should allow foo:// cookies.
-  const char* kSchemes[] = {"foo"};
+  const char* const kSchemes[] = {"foo"};
   cm_foo->SetCookieableSchemes(kSchemes, 1);
 
   GURL foo_url("foo://host/path");
diff --git a/net/cookies/cookie_util.cc b/net/cookies/cookie_util.cc
index 9588018..075ad73c 100644
--- a/net/cookies/cookie_util.cc
+++ b/net/cookies/cookie_util.cc
@@ -92,8 +92,9 @@
 // An average cookie expiration will look something like this:
 //   Sat, 15-Apr-17 21:01:22 GMT
 base::Time ParseCookieTime(const std::string& time_string) {
-  static const char* kMonths[] = { "jan", "feb", "mar", "apr", "may", "jun",
-                                   "jul", "aug", "sep", "oct", "nov", "dec" };
+  static const char* const kMonths[] = {
+    "jan", "feb", "mar", "apr", "may", "jun",
+    "jul", "aug", "sep", "oct", "nov", "dec" };
   static const int kMonthsLen = arraysize(kMonths);
   // We want to be pretty liberal, and support most non-ascii and non-digit
   // characters as a delimiter.  We can't treat : as a delimiter, because it
@@ -102,7 +103,7 @@
   // If the cookie attribute came in in quotes (ex expires="XXX"), the quotes
   // will be preserved, and we will get them here.  So we make sure to include
   // quote characters, and also \ for anything that was internally escaped.
-  static const char* kDelimiters = "\t !\"#$%&'()*+,-./;<=>?@[\\]^_`{|}~";
+  static const char kDelimiters[] = "\t !\"#$%&'()*+,-./;<=>?@[\\]^_`{|}~";
 
   base::Time::Exploded exploded = {0};
 
diff --git a/net/cookies/parsed_cookie_unittest.cc b/net/cookies/parsed_cookie_unittest.cc
index 0f571551..30d41eb 100644
--- a/net/cookies/parsed_cookie_unittest.cc
+++ b/net/cookies/parsed_cookie_unittest.cc
@@ -38,7 +38,7 @@
   // handle differently.  I've tested Internet Explorer 6, Opera 9.6,
   // Firefox 3, and Safari Windows 3.2.1.  We originally tried to match
   // Firefox closely, however we now match Internet Explorer and Safari.
-  const char* values[] = {
+  const char* const values[] = {
     // Trailing whitespace after a quoted value.  The whitespace after
     // the quote is stripped in all browsers.
     "\"zzz \"  ",              "\"zzz \"",
@@ -471,14 +471,14 @@
 TEST(ParsedCookieTest, ValidNonAlphanumericChars) {
   // Note that some of these words are pasted backwords thanks to poor vim bidi
   // support. This should not affect the tests, however.
-  const char* pc1_literal = "name=العربية";
-  const char* pc2_literal = "name=普通話";
-  const char* pc3_literal = "name=ภาษาไทย";
-  const char* pc4_literal = "name=עִבְרִית";
-  const char* pc5_literal = "العربية=value";
-  const char* pc6_literal = "普通話=value";
-  const char* pc7_literal = "ภาษาไทย=value";
-  const char* pc8_literal = "עִבְרִית=value";
+  const char pc1_literal[] = "name=العربية";
+  const char pc2_literal[] = "name=普通話";
+  const char pc3_literal[] = "name=ภาษาไทย";
+  const char pc4_literal[] = "name=עִבְרִית";
+  const char pc5_literal[] = "العربية=value";
+  const char pc6_literal[] = "普通話=value";
+  const char pc7_literal[] = "ภาษาไทย=value";
+  const char pc8_literal[] = "עִבְרִית=value";
   ParsedCookie pc1(pc1_literal);
   ParsedCookie pc2(pc2_literal);
   ParsedCookie pc3(pc3_literal);
diff --git a/net/disk_cache/backend_unittest.cc b/net/disk_cache/backend_unittest.cc
index 3b1f669..9c06839 100644
--- a/net/disk_cache/backend_unittest.cc
+++ b/net/disk_cache/backend_unittest.cc
@@ -349,8 +349,8 @@
 
 void DiskCacheBackendTest::BackendKeying() {
   InitCache();
-  const char* kName1 = "the first key";
-  const char* kName2 = "the first Key";
+  const char kName1[] = "the first key";
+  const char kName2[] = "the first Key";
   disk_cache::Entry *entry1, *entry2;
   ASSERT_EQ(net::OK, CreateEntry(kName1, &entry1));
 
@@ -3231,7 +3231,7 @@
   SetSimpleCacheMode();
   InitCache();
 
-  const char* key = "the first key";
+  const char key[] = "the first key";
   disk_cache::Entry* entry = NULL;
 
   ASSERT_EQ(net::OK, CreateEntry(key, &entry));
@@ -3267,7 +3267,7 @@
   SetSimpleCacheMode();
   InitCache();
 
-  const char* key = "the first key";
+  const char key[] = "the first key";
   disk_cache::Entry* entry = NULL;
 
   ASSERT_EQ(net::OK, CreateEntry(key, &entry));
diff --git a/net/disk_cache/blockfile/backend_impl.cc b/net/disk_cache/blockfile/backend_impl.cc
index bf76842c..980249bf 100644
--- a/net/disk_cache/blockfile/backend_impl.cc
+++ b/net/disk_cache/blockfile/backend_impl.cc
@@ -41,7 +41,7 @@
 
 namespace {
 
-const char* kIndexName = "index";
+const char kIndexName[] = "index";
 
 // Seems like ~240 MB correspond to less than 50k entries for 99% of the people.
 // Note that the actual target is to keep the index table load factor under 55%
diff --git a/net/disk_cache/blockfile/backend_impl_v3.cc b/net/disk_cache/blockfile/backend_impl_v3.cc
index 0b0e2326..6d73d07 100644
--- a/net/disk_cache/blockfile/backend_impl_v3.cc
+++ b/net/disk_cache/blockfile/backend_impl_v3.cc
@@ -230,7 +230,8 @@
 }
 
 std::string BackendImplV3::HistogramName(const char* name) const {
-  static const char* names[] = { "Http", "", "Media", "AppCache", "Shader" };
+  static const char* const names[] = {
+    "Http", "", "Media", "AppCache", "Shader" };
   DCHECK_NE(cache_type_, net::MEMORY_CACHE);
   return base::StringPrintf("DiskCache3.%s_%s", name, names[cache_type_]);
 }
diff --git a/net/disk_cache/blockfile/backend_worker_v3.cc b/net/disk_cache/blockfile/backend_worker_v3.cc
index 1d89de6..b0f3250 100644
--- a/net/disk_cache/blockfile/backend_worker_v3.cc
+++ b/net/disk_cache/blockfile/backend_worker_v3.cc
@@ -26,7 +26,7 @@
 
 #if defined(V3_NOT_JUST_YET_READY)
 
-const char* kIndexName = "index";
+const char kIndexName[] = "index";
 
 // Seems like ~240 MB correspond to less than 50k entries for 99% of the people.
 // Note that the actual target is to keep the index table load factor under 55%
diff --git a/net/disk_cache/blockfile/block_files.cc b/net/disk_cache/blockfile/block_files.cc
index 525141b..9aa8f0f1 100644
--- a/net/disk_cache/blockfile/block_files.cc
+++ b/net/disk_cache/blockfile/block_files.cc
@@ -20,7 +20,7 @@
 
 namespace {
 
-const char* kBlockName = "data_";
+const char kBlockName[] = "data_";
 
 // This array is used to perform a fast lookup of the nibble bit pattern to the
 // type of entry that can be stored there (number of consecutive blocks).
diff --git a/net/disk_cache/blockfile/stats.cc b/net/disk_cache/blockfile/stats.cc
index fae006d..e6b10b8 100644
--- a/net/disk_cache/blockfile/stats.cc
+++ b/net/disk_cache/blockfile/stats.cc
@@ -43,7 +43,7 @@
 }
 
 // WARNING: Add new stats only at the end, or change LoadStats().
-static const char* kCounterNames[] = {
+const char* const kCounterNames[] = {
   "Open miss",
   "Open hit",
   "Create miss",
diff --git a/net/dns/address_sorter_posix_unittest.cc b/net/dns/address_sorter_posix_unittest.cc
index 5dcfe709..88a135fe 100644
--- a/net/dns/address_sorter_posix_unittest.cc
+++ b/net/dns/address_sorter_posix_unittest.cc
@@ -151,9 +151,9 @@
 
   // Verify that NULL-terminated |addresses| matches (-1)-terminated |order|
   // after sorting.
-  void Verify(const char* addresses[], const int order[]) {
+  void Verify(const char* const addresses[], const int order[]) {
     AddressList list;
-    for (const char** addr = addresses; *addr != NULL; ++addr)
+    for (const char* const* addr = addresses; *addr != NULL; ++addr)
       list.push_back(IPEndPoint(ParseIP(*addr), 80));
     for (size_t i = 0; order[i] >= 0; ++i)
       CHECK_LT(order[i], static_cast<int>(list.size()));
@@ -181,7 +181,7 @@
 // Rule 1: Avoid unusable destinations.
 TEST_F(AddressSorterPosixTest, Rule1) {
   AddMapping("10.0.0.231", "10.0.0.1");
-  const char* addresses[] = { "::1", "10.0.0.231", "127.0.0.1", NULL };
+  const char* const addresses[] = { "::1", "10.0.0.231", "127.0.0.1", NULL };
   const int order[] = { 1, -1 };
   Verify(addresses, order);
 }
@@ -196,11 +196,11 @@
   AddMapping("8.0.0.1", "169.254.0.10");  // global vs. link-local
   // In all three cases, matching scope is preferred.
   const int order[] = { 1, 0, -1 };
-  const char* addresses1[] = { "3002::2", "3002::1", NULL };
+  const char* const addresses1[] = { "3002::2", "3002::1", NULL };
   Verify(addresses1, order);
-  const char* addresses2[] = { "fec1::2", "ff32::1", NULL };
+  const char* const addresses2[] = { "fec1::2", "ff32::1", NULL };
   Verify(addresses2, order);
-  const char* addresses3[] = { "8.0.0.1", "fec1::1", NULL };
+  const char* const addresses3[] = { "8.0.0.1", "fec1::1", NULL };
   Verify(addresses3, order);
 }
 
@@ -210,7 +210,7 @@
   AddMapping("3002::1", "4000::10");
   GetSourceInfo("4000::10")->deprecated = true;
   AddMapping("3002::2", "4000::20");
-  const char* addresses[] = { "3002::1", "3002::2", NULL };
+  const char* const addresses[] = { "3002::1", "3002::2", NULL };
   const int order[] = { 1, 0, -1 };
   Verify(addresses, order);
 }
@@ -220,7 +220,7 @@
   AddMapping("3002::1", "4000::10");
   AddMapping("3002::2", "4000::20");
   GetSourceInfo("4000::20")->home = true;
-  const char* addresses[] = { "3002::1", "3002::2", NULL };
+  const char* const addresses[] = { "3002::1", "3002::2", NULL };
   const int order[] = { 1, 0, -1 };
   Verify(addresses, order);
 }
@@ -233,11 +233,11 @@
   AddMapping("2002::1", "2001::10");              // 6to4 vs. Teredo
   const int order[] = { 1, 0, -1 };
   {
-    const char* addresses[] = { "2001::1", "::1", NULL };
+    const char* const addresses[] = { "2001::1", "::1", NULL };
     Verify(addresses, order);
   }
   {
-    const char* addresses[] = { "2002::1", "::ffff:1234:1", NULL };
+    const char* const addresses[] = { "2002::1", "::ffff:1234:1", NULL };
     Verify(addresses, order);
   }
 }
@@ -248,8 +248,8 @@
   AddMapping("ff32::1", "fe81::10");              // multicast
   AddMapping("::ffff:1234:1", "::ffff:1234:10");  // IPv4-mapped
   AddMapping("2001::1", "2001::10");              // Teredo
-  const char* addresses[] = { "2001::1", "::ffff:1234:1", "ff32::1", "::1",
-                              NULL };
+  const char* const addresses[] = { "2001::1", "::ffff:1234:1", "ff32::1",
+    "::1", NULL };
   const int order[] = { 3, 2, 1, 0, -1 };
   Verify(addresses, order);
 }
@@ -259,7 +259,7 @@
   AddMapping("3002::1", "4000::10");
   AddMapping("3002::2", "4000::20");
   GetSourceInfo("4000::20")->native = true;
-  const char* addresses[] = { "3002::1", "3002::2", NULL };
+  const char* const addresses[] = { "3002::1", "3002::2", NULL };
   const int order[] = { 1, 0, -1 };
   Verify(addresses, order);
 }
@@ -273,8 +273,8 @@
   AddMapping("ff32::1", "4000::10");  // link-local
   AddMapping("ff35::1", "4000::10");  // site-local
   AddMapping("ff38::1", "4000::10");  // org-local
-  const char* addresses[] = { "ff38::1", "3000::1", "ff35::1", "ff32::1",
-                              "fe81::1", NULL };
+  const char* const addresses[] = { "ff38::1", "3000::1", "ff35::1", "ff32::1",
+                                    "fe81::1", NULL };
   const int order[] = { 4, 1, 3, 2, 0, -1 };
   Verify(addresses, order);
 }
@@ -287,8 +287,8 @@
   GetSourceInfo("4000::10")->prefix_length = 15;
   AddMapping("4002::1", "4000::10");       // 14 bit match
   AddMapping("4080::1", "4000::10");       // 8 bit match
-  const char* addresses[] = { "4080::1", "4002::1", "4000::1", "3000::1",
-                              NULL };
+  const char* const addresses[] = { "4080::1", "4002::1", "4000::1", "3000::1",
+                                    NULL };
   const int order[] = { 3, 2, 1, 0, -1 };
   Verify(addresses, order);
 }
@@ -298,7 +298,7 @@
   AddMapping("4000::1", "4000::10");
   AddMapping("4000::2", "4000::10");
   AddMapping("4000::3", "4000::10");
-  const char* addresses[] = { "4000::1", "4000::2", "4000::3", NULL };
+  const char* const addresses[] = { "4000::1", "4000::2", "4000::3", NULL };
   const int order[] = { 0, 1, 2, -1 };
   Verify(addresses, order);
 }
@@ -310,8 +310,8 @@
   AddMapping("4000::1", "4000::10");  // global unicast
   AddMapping("ff32::2", "fe81::20");  // deprecated link-local multicast
   GetSourceInfo("fe81::20")->deprecated = true;
-  const char* addresses[] = { "ff3e::1", "ff32::2", "4000::1", "ff32::1", "::1",
-                              "8.0.0.1", NULL };
+  const char* const addresses[] = { "ff3e::1", "ff32::2", "4000::1", "ff32::1",
+                                    "::1", "8.0.0.1", NULL };
   const int order[] = { 4, 3, 0, 2, 1, -1 };
   Verify(addresses, order);
 }
diff --git a/net/dns/dns_config_service_posix_unittest.cc b/net/dns/dns_config_service_posix_unittest.cc
index 1bfbe089..b208ffd7 100644
--- a/net/dns/dns_config_service_posix_unittest.cc
+++ b/net/dns/dns_config_service_posix_unittest.cc
@@ -15,7 +15,7 @@
 namespace {
 
 // MAXNS is normally 3, but let's test 4 if possible.
-const char* kNameserversIPv4[] = {
+const char* const kNameserversIPv4[] = {
     "8.8.8.8",
     "192.168.1.1",
     "63.1.2.4",
@@ -23,7 +23,7 @@
 };
 
 #if defined(OS_LINUX)
-const char* kNameserversIPv6[] = {
+const char* const kNameserversIPv6[] = {
     NULL,
     "2001:DB8:0::42",
     NULL,
diff --git a/net/ftp/ftp_directory_listing_parser.cc b/net/ftp/ftp_directory_listing_parser.cc
index fa6e8a5386..31cb462 100644
--- a/net/ftp/ftp_directory_listing_parser.cc
+++ b/net/ftp/ftp_directory_listing_parser.cc
@@ -95,7 +95,7 @@
                    const base::Time& current_time,
                    std::vector<FtpDirectoryListingEntry>* entries,
                    FtpServerType* server_type) {
-  const char* kNewlineSeparators[] = { "\n", "\r\n" };
+  const char* const kNewlineSeparators[] = { "\n", "\r\n" };
 
   std::vector<std::string> encodings;
   if (!base::DetectAllEncodings(text, &encodings))
diff --git a/net/ftp/ftp_directory_listing_parser_ls_unittest.cc b/net/ftp/ftp_directory_listing_parser_ls_unittest.cc
index 313ef664..6657de9b 100644
--- a/net/ftp/ftp_directory_listing_parser_ls_unittest.cc
+++ b/net/ftp/ftp_directory_listing_parser_ls_unittest.cc
@@ -167,7 +167,7 @@
 }
 
 TEST_F(FtpDirectoryListingParserLsTest, Ignored) {
-  const char* ignored_cases[] = {
+  const char* const ignored_cases[] = {
     "drwxr-xr-x 2 0 0 4096 Mar 18  2007  ",  // http://crbug.com/60065
 
     "ftpd: .: Permission denied",
@@ -194,7 +194,7 @@
 }
 
 TEST_F(FtpDirectoryListingParserLsTest, Bad) {
-  const char* bad_cases[] = {
+  const char* const bad_cases[] = {
     " foo",
     "garbage",
     "-rw-r--r-- ftp ftp",
diff --git a/net/ftp/ftp_directory_listing_parser_netware_unittest.cc b/net/ftp/ftp_directory_listing_parser_netware_unittest.cc
index 102552c..11c695f 100644
--- a/net/ftp/ftp_directory_listing_parser_netware_unittest.cc
+++ b/net/ftp/ftp_directory_listing_parser_netware_unittest.cc
@@ -47,7 +47,7 @@
 }
 
 TEST_F(FtpDirectoryListingParserNetwareTest, Bad) {
-  const char* bad_cases[] = {
+  const char* const bad_cases[] = {
     " foo",
     "garbage",
     "d [] ftpadmin 512 Jan 29  2004 pub",
diff --git a/net/ftp/ftp_directory_listing_parser_os2_unittest.cc b/net/ftp/ftp_directory_listing_parser_os2_unittest.cc
index 7302f4c..3288a22 100644
--- a/net/ftp/ftp_directory_listing_parser_os2_unittest.cc
+++ b/net/ftp/ftp_directory_listing_parser_os2_unittest.cc
@@ -64,7 +64,7 @@
 }
 
 TEST_F(FtpDirectoryListingParserOS2Test, Ignored) {
-  const char* ignored_cases[] = {
+  const char* const ignored_cases[] = {
     "1234 A 12-07-10  12:05",
     "0 DIR 11-02-09  05:32",
   };
@@ -81,7 +81,7 @@
 }
 
 TEST_F(FtpDirectoryListingParserOS2Test, Bad) {
-  const char* bad_cases[] = {
+  const char* const bad_cases[] = {
     "garbage",
     "0 GARBAGE 11-02-09  05:32",
     "0 GARBAGE 11-02-09  05:32       NT",
diff --git a/net/ftp/ftp_directory_listing_parser_unittest.cc b/net/ftp/ftp_directory_listing_parser_unittest.cc
index 70795e5..79af5c3 100644
--- a/net/ftp/ftp_directory_listing_parser_unittest.cc
+++ b/net/ftp/ftp_directory_listing_parser_unittest.cc
@@ -104,7 +104,7 @@
   }
 }
 
-const char* kTestFiles[] = {
+const char* const kTestFiles[] = {
   "dir-listing-ls-1",
   "dir-listing-ls-1-utf8",
   "dir-listing-ls-2",
diff --git a/net/ftp/ftp_directory_listing_parser_vms.cc b/net/ftp/ftp_directory_listing_parser_vms.cc
index c568c0a..05e687f 100644
--- a/net/ftp/ftp_directory_listing_parser_vms.cc
+++ b/net/ftp/ftp_directory_listing_parser_vms.cc
@@ -133,7 +133,7 @@
 }
 
 bool LooksLikeVMSError(const base::string16& text) {
-  static const char* kPermissionDeniedMessages[] = {
+  static const char* const kPermissionDeniedMessages[] = {
     "%RMS-E-FNF",  // File not found.
     "%RMS-E-PRV",  // Access denied.
     "%SYSTEM-F-NOPRIV",
diff --git a/net/ftp/ftp_directory_listing_parser_vms_unittest.cc b/net/ftp/ftp_directory_listing_parser_vms_unittest.cc
index cef86c0..08ee9e1 100644
--- a/net/ftp/ftp_directory_listing_parser_vms_unittest.cc
+++ b/net/ftp/ftp_directory_listing_parser_vms_unittest.cc
@@ -75,7 +75,7 @@
 }
 
 TEST_F(FtpDirectoryListingParserVmsTest, Bad) {
-  const char* bad_cases[] = {
+  const char* const bad_cases[] = {
     "garbage",
 
     // Missing file version number.
@@ -128,7 +128,7 @@
 }
 
 TEST_F(FtpDirectoryListingParserVmsTest, BadDataAfterFooter) {
-  const char* bad_cases[] = {
+  const char* const bad_cases[] = {
     "garbage",
     "Total of 1 file, 2 blocks.",
     "Directory ANYNYMOUS_ROOT:[000000]",
diff --git a/net/ftp/ftp_directory_listing_parser_windows_unittest.cc b/net/ftp/ftp_directory_listing_parser_windows_unittest.cc
index d3d47da..1c6234e 100644
--- a/net/ftp/ftp_directory_listing_parser_windows_unittest.cc
+++ b/net/ftp/ftp_directory_listing_parser_windows_unittest.cc
@@ -70,7 +70,7 @@
 }
 
 TEST_F(FtpDirectoryListingParserWindowsTest, Ignored) {
-  const char* ignored_cases[] = {
+  const char* const ignored_cases[] = {
     "12-07-10  12:05AM       <DIR>    ",  // http://crbug.com/66097
     "12-07-10  12:05AM       1234    ",
     "11-02-09  05:32         <DIR>",
@@ -89,7 +89,7 @@
 }
 
 TEST_F(FtpDirectoryListingParserWindowsTest, Bad) {
-  const char* bad_cases[] = {
+  const char* const bad_cases[] = {
     "garbage",
     "11-02-09  05:32PM       <GARBAGE>",
     "11-02-09  05:32PM       <GARBAGE>      NT",
diff --git a/net/ftp/ftp_network_transaction_unittest.cc b/net/ftp/ftp_network_transaction_unittest.cc
index 4ee812d..ad748c9d 100644
--- a/net/ftp/ftp_network_transaction_unittest.cc
+++ b/net/ftp/ftp_network_transaction_unittest.cc
@@ -72,8 +72,9 @@
                       "331 Password needed\r\n");
       case PRE_PASSWD:
         {
-          const char* response_one = "230 Welcome\r\n";
-          const char* response_multi = "230- One\r\n230- Two\r\n230 Three\r\n";
+          static const char response_one[] = "230 Welcome\r\n";
+          static const char response_multi[] =
+              "230- One\r\n230- Two\r\n230 Three\r\n";
           return Verify("PASS chrome@example.com\r\n", data, PRE_SYST,
                         multiline_welcome_ ? response_multi : response_one);
         }
diff --git a/net/http/http_auth_cache_unittest.cc b/net/http/http_auth_cache_unittest.cc
index 3565b0c..18866eaa 100644
--- a/net/http/http_auth_cache_unittest.cc
+++ b/net/http/http_auth_cache_unittest.cc
@@ -55,11 +55,11 @@
   ~MockAuthHandler() override {}
 };
 
-const char* kRealm1 = "Realm1";
-const char* kRealm2 = "Realm2";
-const char* kRealm3 = "Realm3";
-const char* kRealm4 = "Realm4";
-const char* kRealm5 = "Realm5";
+const char kRealm1[] = "Realm1";
+const char kRealm2[] = "Realm2";
+const char kRealm3[] = "Realm3";
+const char kRealm4[] = "Realm4";
+const char kRealm5[] = "Realm5";
 const base::string16 k123(ASCIIToUTF16("123"));
 const base::string16 k1234(ASCIIToUTF16("1234"));
 const base::string16 kAdmin(ASCIIToUTF16("admin"));
diff --git a/net/http/http_cache_unittest.cc b/net/http/http_cache_unittest.cc
index 16a5523..cee0c4a5 100644
--- a/net/http/http_cache_unittest.cc
+++ b/net/http/http_cache_unittest.cc
@@ -2323,7 +2323,7 @@
   MockHttpCache cache;
 
   // The URL we will be requesting.
-  const char* kUrl = "http://foobar.com/main.css";
+  const char kUrl[] = "http://foobar.com/main.css";
 
   // Junk network response.
   static const Response kUnexpectedResponse = {
@@ -2434,7 +2434,7 @@
     "body2"
   };
 
-  const char* extra_headers =
+  const char extra_headers[] =
       "If-Modified-Since: Wed, 06 Feb 2008 22:38:21 GMT\r\n";
 
   ConditionalizedRequestUpdatesCacheHelper(
@@ -2462,7 +2462,7 @@
     "body2"
   };
 
-  const char* extra_headers = "If-None-Match: \"ETAG1\"\r\n";
+  const char extra_headers[] = "If-None-Match: \"ETAG1\"\r\n";
 
   ConditionalizedRequestUpdatesCacheHelper(
       kNetResponse1, kNetResponse2, kNetResponse2, extra_headers);
@@ -2498,7 +2498,7 @@
     "body1"
   };
 
-  const char* extra_headers =
+  const char extra_headers[] =
       "If-Modified-Since: Wed, 06 Feb 2008 22:38:21 GMT\r\n";
 
   ConditionalizedRequestUpdatesCacheHelper(
@@ -2511,7 +2511,7 @@
 TEST(HttpCache, ConditionalizedRequestUpdatesCache4) {
   MockHttpCache cache;
 
-  const char* kUrl = "http://foobar.com/main.css";
+  const char kUrl[] = "http://foobar.com/main.css";
 
   static const Response kNetResponse = {
     "HTTP/1.1 304 Not Modified",
@@ -2520,7 +2520,7 @@
     ""
   };
 
-  const char* kExtraRequestHeaders =
+  const char kExtraRequestHeaders[] =
       "If-Modified-Since: Wed, 06 Feb 2008 22:38:21 GMT\r\n";
 
   // We will control the network layer's responses for |kUrl| using
@@ -2555,7 +2555,7 @@
 TEST(HttpCache, ConditionalizedRequestUpdatesCache5) {
   MockHttpCache cache;
 
-  const char* kUrl = "http://foobar.com/main.css";
+  const char kUrl[] = "http://foobar.com/main.css";
 
   static const Response kNetResponse = {
     "HTTP/1.1 200 OK",
@@ -2564,7 +2564,7 @@
     "foobar!!!"
   };
 
-  const char* kExtraRequestHeaders =
+  const char kExtraRequestHeaders[] =
       "If-Modified-Since: Wed, 06 Feb 2008 22:38:21 GMT\r\n";
 
   // We will control the network layer's responses for |kUrl| using
@@ -2617,7 +2617,7 @@
 
   // This is two days in the future from the original response's last-modified
   // date!
-  const char* kExtraRequestHeaders =
+  const char kExtraRequestHeaders[] =
       "If-Modified-Since: Fri, 08 Feb 2008 22:38:21 GMT\r\n";
 
   ConditionalizedRequestUpdatesCacheHelper(
@@ -2646,7 +2646,7 @@
   };
 
   // Different etag from original response.
-  const char* kExtraRequestHeaders = "If-None-Match: \"Foo2\"\r\n";
+  const char kExtraRequestHeaders[] = "If-None-Match: \"Foo2\"\r\n";
 
   ConditionalizedRequestUpdatesCacheHelper(
       kNetResponse1, kNetResponse2, kNetResponse1, kExtraRequestHeaders);
@@ -2672,7 +2672,7 @@
     "body2"
   };
 
-  const char* kExtraRequestHeaders =
+  const char kExtraRequestHeaders[] =
       "If-Modified-Since: Wed, 06 Feb 2008 22:38:21 GMT\r\n"
       "If-None-Match: \"Foo1\"\r\n";
 
@@ -2701,7 +2701,7 @@
   };
 
   // The etag doesn't match what we have stored.
-  const char* kExtraRequestHeaders =
+  const char kExtraRequestHeaders[] =
       "If-Modified-Since: Wed, 06 Feb 2008 22:38:21 GMT\r\n"
       "If-None-Match: \"Foo2\"\r\n";
 
@@ -2730,7 +2730,7 @@
   };
 
   // The modification date doesn't match what we have stored.
-  const char* kExtraRequestHeaders =
+  const char kExtraRequestHeaders[] =
       "If-Modified-Since: Fri, 08 Feb 2008 22:38:21 GMT\r\n"
       "If-None-Match: \"Foo1\"\r\n";
 
@@ -6366,8 +6366,8 @@
 TEST(HttpCache, UpdatesRequestResponseTimeOn304) {
   MockHttpCache cache;
 
-  const char* kUrl = "http://foobar";
-  const char* kData = "body";
+  const char kUrl[] = "http://foobar";
+  const char kData[] = "body";
 
   MockTransaction mock_network_response = { 0 };
   mock_network_response.url = kUrl;
diff --git a/net/http/http_chunked_decoder_unittest.cc b/net/http/http_chunked_decoder_unittest.cc
index f3e8300..db32c6f 100644
--- a/net/http/http_chunked_decoder_unittest.cc
+++ b/net/http/http_chunked_decoder_unittest.cc
@@ -15,7 +15,8 @@
 
 typedef testing::Test HttpChunkedDecoderTest;
 
-void RunTest(const char* inputs[], size_t num_inputs,
+void RunTest(const char* const inputs[],
+             size_t num_inputs,
              const char* expected_output,
              bool expected_eof,
              int bytes_after_eof) {
@@ -38,7 +39,7 @@
 }
 
 // Feed the inputs to the decoder, until it returns an error.
-void RunTestUntilFailure(const char* inputs[],
+void RunTestUntilFailure(const char* const inputs[],
                          size_t num_inputs,
                          size_t fail_index) {
   HttpChunkedDecoder decoder;
@@ -57,21 +58,21 @@
 }
 
 TEST(HttpChunkedDecoderTest, Basic) {
-  const char* inputs[] = {
+  const char* const inputs[] = {
     "B\r\nhello hello\r\n0\r\n\r\n"
   };
   RunTest(inputs, arraysize(inputs), "hello hello", true, 0);
 }
 
 TEST(HttpChunkedDecoderTest, OneChunk) {
-  const char* inputs[] = {
+  const char* const inputs[] = {
     "5\r\nhello\r\n"
   };
   RunTest(inputs, arraysize(inputs), "hello", false, 0);
 }
 
 TEST(HttpChunkedDecoderTest, Typical) {
-  const char* inputs[] = {
+  const char* const inputs[] = {
     "5\r\nhello\r\n",
     "1\r\n \r\n",
     "5\r\nworld\r\n",
@@ -81,7 +82,7 @@
 }
 
 TEST(HttpChunkedDecoderTest, Incremental) {
-  const char* inputs[] = {
+  const char* const inputs[] = {
     "5",
     "\r",
     "\n",
@@ -99,7 +100,7 @@
 
 // Same as above, but group carriage returns with previous input.
 TEST(HttpChunkedDecoderTest, Incremental2) {
-  const char* inputs[] = {
+  const char* const inputs[] = {
     "5\r",
     "\n",
     "hello\r",
@@ -115,7 +116,7 @@
   // Compatibility: [RFC 2616 - Invalid]
   // {Firefox3} - Valid
   // {IE7, Safari3.1, Opera9.51} - Invalid
-  const char* inputs[] = {
+  const char* const inputs[] = {
     "5\nhello\n",
     "1\n \n",
     "5\nworld\n",
@@ -125,7 +126,7 @@
 }
 
 TEST(HttpChunkedDecoderTest, Extensions) {
-  const char* inputs[] = {
+  const char* const inputs[] = {
     "5;x=0\r\nhello\r\n",
     "0;y=\"2 \"\r\n\r\n"
   };
@@ -133,7 +134,7 @@
 }
 
 TEST(HttpChunkedDecoderTest, Trailers) {
-  const char* inputs[] = {
+  const char* const inputs[] = {
     "5\r\nhello\r\n",
     "0\r\n",
     "Foo: 1\r\n",
@@ -144,7 +145,7 @@
 }
 
 TEST(HttpChunkedDecoderTest, TrailersUnfinished) {
-  const char* inputs[] = {
+  const char* const inputs[] = {
     "5\r\nhello\r\n",
     "0\r\n",
     "Foo: 1\r\n"
@@ -153,7 +154,7 @@
 }
 
 TEST(HttpChunkedDecoderTest, InvalidChunkSize_TooBig) {
-  const char* inputs[] = {
+  const char* const inputs[] = {
     // This chunked body is not terminated.
     // However we will fail decoding because the chunk-size
     // number is larger than we can handle.
@@ -164,7 +165,7 @@
 }
 
 TEST(HttpChunkedDecoderTest, InvalidChunkSize_0X) {
-  const char* inputs[] = {
+  const char* const inputs[] = {
     // Compatibility [RFC 2616 - Invalid]:
     // {Safari3.1, IE7} - Invalid
     // {Firefox3, Opera 9.51} - Valid
@@ -175,7 +176,7 @@
 }
 
 TEST(HttpChunkedDecoderTest, ChunkSize_TrailingSpace) {
-  const char* inputs[] = {
+  const char* const inputs[] = {
     // Compatibility [RFC 2616 - Invalid]:
     // {IE7, Safari3.1, Firefox3, Opera 9.51} - Valid
     //
@@ -187,7 +188,7 @@
 }
 
 TEST(HttpChunkedDecoderTest, InvalidChunkSize_TrailingTab) {
-  const char* inputs[] = {
+  const char* const inputs[] = {
     // Compatibility [RFC 2616 - Invalid]:
     // {IE7, Safari3.1, Firefox3, Opera 9.51} - Valid
     "5\t\r\nhello\r\n",
@@ -197,7 +198,7 @@
 }
 
 TEST(HttpChunkedDecoderTest, InvalidChunkSize_TrailingFormFeed) {
-  const char* inputs[] = {
+  const char* const inputs[] = {
     // Compatibility [RFC 2616- Invalid]:
     // {Safari3.1} - Invalid
     // {IE7, Firefox3, Opera 9.51} - Valid
@@ -208,7 +209,7 @@
 }
 
 TEST(HttpChunkedDecoderTest, InvalidChunkSize_TrailingVerticalTab) {
-  const char* inputs[] = {
+  const char* const inputs[] = {
     // Compatibility [RFC 2616 - Invalid]:
     // {Safari 3.1} - Invalid
     // {IE7, Firefox3, Opera 9.51} - Valid
@@ -219,7 +220,7 @@
 }
 
 TEST(HttpChunkedDecoderTest, InvalidChunkSize_TrailingNonHexDigit) {
-  const char* inputs[] = {
+  const char* const inputs[] = {
     // Compatibility [RFC 2616 - Invalid]:
     // {Safari 3.1} - Invalid
     // {IE7, Firefox3, Opera 9.51} - Valid
@@ -230,7 +231,7 @@
 }
 
 TEST(HttpChunkedDecoderTest, InvalidChunkSize_LeadingSpace) {
-  const char* inputs[] = {
+  const char* const inputs[] = {
     // Compatibility [RFC 2616 - Invalid]:
     // {IE7} - Invalid
     // {Safari 3.1, Firefox3, Opera 9.51} - Valid
@@ -241,7 +242,7 @@
 }
 
 TEST(HttpChunkedDecoderTest, InvalidLeadingSeparator) {
-  const char* inputs[] = {
+  const char* const inputs[] = {
     "\r\n5\r\nhello\r\n",
     "0\r\n\r\n"
   };
@@ -249,7 +250,7 @@
 }
 
 TEST(HttpChunkedDecoderTest, InvalidChunkSize_NoSeparator) {
-  const char* inputs[] = {
+  const char* const inputs[] = {
     "5\r\nhello",
     "1\r\n \r\n",
     "0\r\n\r\n"
@@ -258,7 +259,7 @@
 }
 
 TEST(HttpChunkedDecoderTest, InvalidChunkSize_Negative) {
-  const char* inputs[] = {
+  const char* const inputs[] = {
     "8\r\n12345678\r\n-5\r\nhello\r\n",
     "0\r\n\r\n"
   };
@@ -266,7 +267,7 @@
 }
 
 TEST(HttpChunkedDecoderTest, InvalidChunkSize_Plus) {
-  const char* inputs[] = {
+  const char* const inputs[] = {
     // Compatibility [RFC 2616 - Invalid]:
     // {IE7, Safari 3.1} - Invalid
     // {Firefox3, Opera 9.51} - Valid
@@ -277,7 +278,7 @@
 }
 
 TEST(HttpChunkedDecoderTest, InvalidConsecutiveCRLFs) {
-  const char* inputs[] = {
+  const char* const inputs[] = {
     "5\r\nhello\r\n",
     "\r\n\r\n\r\n\r\n",
     "0\r\n\r\n"
@@ -286,21 +287,21 @@
 }
 
 TEST(HttpChunkedDecoderTest, ExcessiveChunkLen) {
-  const char* inputs[] = {
+  const char* const inputs[] = {
     "c0000000\r\nhello\r\n"
   };
   RunTestUntilFailure(inputs, arraysize(inputs), 0);
 }
 
 TEST(HttpChunkedDecoderTest, BasicExtraData) {
-  const char* inputs[] = {
+  const char* const inputs[] = {
     "5\r\nhello\r\n0\r\n\r\nextra bytes"
   };
   RunTest(inputs, arraysize(inputs), "hello", true, 11);
 }
 
 TEST(HttpChunkedDecoderTest, IncrementalExtraData) {
-  const char* inputs[] = {
+  const char* const inputs[] = {
     "5",
     "\r",
     "\n",
@@ -317,7 +318,7 @@
 }
 
 TEST(HttpChunkedDecoderTest, MultipleExtraDataBlocks) {
-  const char* inputs[] = {
+  const char* const inputs[] = {
     "5\r\nhello\r\n0\r\n\r\nextra",
     " bytes"
   };
@@ -330,7 +331,7 @@
   scoped_ptr<char[]> big_chunk(new char[big_chunk_length + 1]);
   memset(big_chunk.get(), '0', big_chunk_length);
   big_chunk[big_chunk_length] = 0;
-  const char* inputs[] = {
+  const char* const inputs[] = {
     big_chunk.get(),
     "5"
   };
@@ -344,7 +345,7 @@
   scoped_ptr<char[]> big_chunk(new char[big_chunk_length + 1]);
   memset(big_chunk.get(), '0', big_chunk_length);
   big_chunk[big_chunk_length] = 0;
-  const char* inputs[] = {
+  const char* const inputs[] = {
     "5;",
     big_chunk.get()
   };
diff --git a/net/http/http_network_transaction_unittest.cc b/net/http/http_network_transaction_unittest.cc
index a85e6ea..3812c60 100644
--- a/net/http/http_network_transaction_unittest.cc
+++ b/net/http/http_network_transaction_unittest.cc
@@ -477,7 +477,7 @@
 
 // Fill |str| with a long header list that consumes >= |size| bytes.
 void FillLargeHeadersString(std::string* str, int size) {
-  const char* row =
+  const char row[] =
       "SomeHeaderName: xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\r\n";
   const int sizeof_row = strlen(row);
   const int num_rows = static_cast<int>(
@@ -1313,7 +1313,7 @@
   StaticSocketDataProvider data2(data2_reads, arraysize(data2_reads), NULL, 0);
   session_deps_.socket_factory->AddSocketDataProvider(&data2);
 
-  const char* kExpectedResponseData[] = {
+  const char* const kExpectedResponseData[] = {
     "hello", "world"
   };
 
@@ -3425,7 +3425,7 @@
   scoped_ptr<SpdyFrame> connect(spdy_util_.ConstructSpdyConnect(
       NULL, 0, 1, LOWEST, HostPortPair("www.google.com", 443)));
   // fetch https://www.google.com/ via SPDY
-  const char* const kMyUrl = "https://www.google.com/";
+  const char kMyUrl[] = "https://www.google.com/";
   scoped_ptr<SpdyFrame> get(
       spdy_util_.ConstructSpdyGet(kMyUrl, false, 1, LOWEST));
   scoped_ptr<SpdyFrame> wrapped_get(
@@ -5150,7 +5150,7 @@
   session_deps_.socket_factory->AddSocketDataProvider(&data1);
   session_deps_.socket_factory->AddSocketDataProvider(&data2);
 
-  const char* kExpectedResponseData[] = {
+  const char* const kExpectedResponseData[] = {
     "hello world", "welcome"
   };
 
@@ -6498,7 +6498,7 @@
 
   // The proxy responds to the connect with a 407, using a persistent
   // connection.
-  const char* const kAuthStatus = "407";
+  const char kAuthStatus[] = "407";
   const char* const kAuthChallenge[] = {
     "proxy-authenticate", "Basic realm=\"MyRealm1\"",
   };
@@ -9549,10 +9549,10 @@
   static const int kNoSSL = 500;
 
   struct TestConfig {
-    const char* proxy_url;
+    const char* const proxy_url;
     AuthTiming proxy_auth_timing;
     int proxy_auth_rv;
-    const char* server_url;
+    const char* const server_url;
     AuthTiming server_auth_timing;
     int server_auth_rv;
     int num_auth_rounds;
@@ -9934,7 +9934,7 @@
                                          writes, arraysize(writes));
   session_deps_.socket_factory->AddSocketDataProvider(&data_provider);
 
-  const char* const kSocketGroup = "www.example.com:80";
+  const char kSocketGroup[] = "www.example.com:80";
 
   // First round of authentication.
   auth_handler->SetGenerateExpectation(false, OK);
diff --git a/net/http/http_response_headers.cc b/net/http/http_response_headers.cc
index c3fd958..3aef42a6 100644
--- a/net/http/http_response_headers.cc
+++ b/net/http/http_response_headers.cc
@@ -1214,9 +1214,10 @@
   // NOTE: It is perhaps risky to assume that a Proxy-Connection header is
   // meaningful when we don't know that this response was from a proxy, but
   // Mozilla also does this, so we'll do the same.
-  static const char* kConnectionHeaders[] = {"connection", "proxy-connection"};
+  static const char* const kConnectionHeaders[] = {
+    "connection", "proxy-connection"};
   struct KeepAliveToken {
-    const char* token;
+    const char* const token;
     bool keep_alive;
   };
   static const KeepAliveToken kKeepAliveTokens[] = {{"keep-alive", true},
diff --git a/net/http/http_util.cc b/net/http/http_util.cc
index 9bdc045..5e342270 100644
--- a/net/http/http_util.cc
+++ b/net/http/http_util.cc
@@ -405,7 +405,7 @@
                                      std::string::const_iterator name_end) {
   // NOTE: "set-cookie2" headers do not support expires attributes, so we don't
   // have to list them here.
-  const char* kNonCoalescingHeaders[] = {
+  const char* const kNonCoalescingHeaders[] = {
     "date",
     "expires",
     "last-modified",
diff --git a/net/http/http_util_unittest.cc b/net/http/http_util_unittest.cc
index e20bbdce..ee501c7 100644
--- a/net/http/http_util_unittest.cc
+++ b/net/http/http_util_unittest.cc
@@ -16,7 +16,7 @@
 }
 
 TEST(HttpUtilTest, IsSafeHeader) {
-  static const char* unsafe_headers[] = {
+  static const char* const unsafe_headers[] = {
     "sec-",
     "sEc-",
     "sec-foo",
@@ -53,7 +53,7 @@
     EXPECT_FALSE(HttpUtil::IsSafeHeader(StringToUpperASCII(std::string(
         unsafe_headers[i])))) << unsafe_headers[i];
   }
-  static const char* safe_headers[] = {
+  static const char* const safe_headers[] = {
     "foo",
     "x-",
     "x-foo",
@@ -102,8 +102,8 @@
 
 TEST(HttpUtilTest, HasHeader) {
   static const struct {
-    const char* headers;
-    const char* name;
+    const char* const headers;
+    const char* const name;
     bool expected_result;
   } tests[] = {
     { "", "foo", false },
@@ -121,7 +121,7 @@
 }
 
 TEST(HttpUtilTest, StripHeaders) {
-  static const char* headers =
+  static const char* const headers =
       "Origin: origin\r\n"
       "Content-Type: text/plain\r\n"
       "Cookies: foo1\r\n"
@@ -130,11 +130,11 @@
       "Server: Apache\r\n"
       "OrIGin: origin2\r\n";
 
-  static const char* header_names[] = {
+  static const char* const header_names[] = {
     "origin", "content-type", "cookies"
   };
 
-  static const char* expected_stripped_headers =
+  static const char* const expected_stripped_headers =
       "Custom: baz\r\n"
       "Server: Apache\r\n";
 
@@ -262,7 +262,7 @@
 
 TEST(HttpUtilTest, LocateEndOfHeaders) {
   struct {
-    const char* input;
+    const char* const input;
     int expected_result;
   } tests[] = {
     { "foo\r\nbar\r\n\r\n", 12 },
@@ -281,8 +281,8 @@
 
 TEST(HttpUtilTest, AssembleRawHeaders) {
   struct {
-    const char* input;  // with '|' representing '\0'
-    const char* expected_result;  // with '\0' changed to '|'
+    const char* const input;  // with '|' representing '\0'
+    const char* const expected_result;  // with '\0' changed to '|'
   } tests[] = {
     { "HTTP/1.0 200 OK\r\nFoo: 1\r\nBar: 2\r\n\r\n",
       "HTTP/1.0 200 OK|Foo: 1|Bar: 2||" },
@@ -594,9 +594,9 @@
 // Test SpecForRequest() and PathForRequest().
 TEST(HttpUtilTest, RequestUrlSanitize) {
   struct {
-    const char* url;
-    const char* expected_spec;
-    const char* expected_path;
+    const char* const url;
+    const char* const expected_spec;
+    const char* const expected_path;
   } tests[] = {
     { // Check that #hash is removed.
       "http://www.google.com:78/foobar?query=1#hash",
@@ -657,11 +657,11 @@
 // HttpResponseHeadersTest.GetMimeType also tests ParseContentType.
 TEST(HttpUtilTest, ParseContentType) {
   const struct {
-    const char* content_type;
-    const char* expected_mime_type;
-    const char* expected_charset;
+    const char* const content_type;
+    const char* const expected_mime_type;
+    const char* const expected_charset;
     const bool expected_had_charset;
-    const char* expected_boundary;
+    const char* const expected_boundary;
   } tests[] = {
     { "text/html; charset=utf-8",
       "text/html",
@@ -741,7 +741,7 @@
 
 TEST(HttpUtilTest, ParseRanges) {
   const struct {
-    const char* headers;
+    const char* const headers;
     bool expected_return_value;
     size_t expected_ranges_size;
     const struct {
diff --git a/net/http/http_vary_data_unittest.cc b/net/http/http_vary_data_unittest.cc
index cffa2d29..19dbbd77 100644
--- a/net/http/http_vary_data_unittest.cc
+++ b/net/http/http_vary_data_unittest.cc
@@ -32,7 +32,7 @@
 
 TEST(HttpVaryDataTest, IsInvalid) {
   // All of these responses should result in an invalid vary data object.
-  const char* kTestResponses[] = {
+  const char* const kTestResponses[] = {
     "HTTP/1.1 200 OK\n\n",
     "HTTP/1.1 200 OK\nVary: *\n\n",
     "HTTP/1.1 200 OK\nVary: cookie, *, bar\n\n",
diff --git a/net/http/transport_security_state_unittest.cc b/net/http/transport_security_state_unittest.cc
index 44fc2d6..2e734087 100644
--- a/net/http/transport_security_state_unittest.cc
+++ b/net/http/transport_security_state_unittest.cc
@@ -786,7 +786,7 @@
 
 TEST_F(TransportSecurityStateTest, PinValidationWithoutRejectedCerts) {
   // kGoodPath is blog.torproject.org.
-  static const char* kGoodPath[] = {
+  static const char* const kGoodPath[] = {
     "sha1/m9lHYJYke9k0GtVZ+bXSQYE8nDI=",
     "sha1/o5OZxATDsgmwgcIfIWIneMJ0jkw=",
     "sha1/wHqYaI2J+6sFZAwRfap9ZbjKzE4=",
@@ -795,7 +795,7 @@
 
   // kBadPath is plus.google.com via Trustcenter, which is utterly wrong for
   // torproject.org.
-  static const char* kBadPath[] = {
+  static const char* const kBadPath[] = {
     "sha1/4BjDjn8v2lWeUFQnqSs0BgbIcrU=",
     "sha1/gzuEEAB/bkqdQS3EIjk2by7lW+k=",
     "sha1/SOZo+SvSspXXR9gjIBBPM5iQn9Q=",
diff --git a/net/http/url_security_manager_unittest.cc b/net/http/url_security_manager_unittest.cc
index cf072e5..8e6d8c5 100644
--- a/net/http/url_security_manager_unittest.cc
+++ b/net/http/url_security_manager_unittest.cc
@@ -15,12 +15,12 @@
 namespace {
 
 struct TestData {
-  const char* url;
+  const char* const url;
   bool succeds_in_windows_default;
   bool succeeds_in_whitelist;
 };
 
-const char* kTestAuthWhitelist = "*example.com,*foobar.com,baz";
+const char kTestAuthWhitelist[] = "*example.com,*foobar.com,baz";
 
 // Under Windows the following will be allowed by default:
 //    localhost
diff --git a/net/proxy/dhcp_proxy_script_adapter_fetcher_win_unittest.cc b/net/proxy/dhcp_proxy_script_adapter_fetcher_win_unittest.cc
index dc0d1b4f..af858d63 100644
--- a/net/proxy/dhcp_proxy_script_adapter_fetcher_win_unittest.cc
+++ b/net/proxy/dhcp_proxy_script_adapter_fetcher_win_unittest.cc
@@ -21,7 +21,7 @@
 
 namespace {
 
-const char* const kPacUrl = "http://pacserver/script.pac";
+const char kPacUrl[] = "http://pacserver/script.pac";
 
 // In net/proxy/dhcp_proxy_script_fetcher_win_unittest.cc there are a few
 // tests that exercise DhcpProxyScriptAdapterFetcher end-to-end along with
diff --git a/net/proxy/proxy_config_source.cc b/net/proxy/proxy_config_source.cc
index 80e2735..f3137ff 100644
--- a/net/proxy/proxy_config_source.cc
+++ b/net/proxy/proxy_config_source.cc
@@ -11,7 +11,7 @@
 
 namespace {
 
-const char* kSourceNames[] = {
+const char* const kSourceNames[] = {
   "UNKNOWN",
   "SYSTEM",
   "SYSTEM FAILED",
diff --git a/net/proxy/proxy_server_unittest.cc b/net/proxy/proxy_server_unittest.cc
index 2471b59..0a596e47 100644
--- a/net/proxy/proxy_server_unittest.cc
+++ b/net/proxy/proxy_server_unittest.cc
@@ -11,12 +11,12 @@
 // was labelled correctly, and the accessors all give the right data.
 TEST(ProxyServerTest, FromURI) {
   const struct {
-    const char* input_uri;
-    const char* expected_uri;
+    const char* const input_uri;
+    const char* const expected_uri;
     net::ProxyServer::Scheme expected_scheme;
-    const char* expected_host;
+    const char* const expected_host;
     int expected_port;
-    const char* expected_pac_string;
+    const char* const expected_pac_string;
   } tests[] = {
     // HTTP proxy URIs:
     {
@@ -191,7 +191,7 @@
 
 // Test parsing some invalid inputs.
 TEST(ProxyServerTest, Invalid) {
-  const char* tests[] = {
+  const char* const tests[] = {
     "",
     "   ",
     "dddf:",   // not a valid port
@@ -214,7 +214,7 @@
 
 // Test that LWS (SP | HT) is disregarded from the ends.
 TEST(ProxyServerTest, Whitespace) {
-  const char* tests[] = {
+  const char* const tests[] = {
     "  foopy:80",
     "foopy:80   \t",
     "  \tfoopy:80  ",
@@ -230,8 +230,8 @@
 // Test parsing a ProxyServer from a PAC representation.
 TEST(ProxyServerTest, FromPACString) {
   const struct {
-    const char* input_pac;
-    const char* expected_uri;
+    const char* const input_pac;
+    const char* const expected_uri;
   } tests[] = {
     {
        "PROXY foopy:10",
@@ -288,7 +288,7 @@
 
 // Test parsing a ProxyServer from an invalid PAC representation.
 TEST(ProxyServerTest, FromPACStringInvalid) {
-  const char* tests[] = {
+  const char* const tests[] = {
     "PROXY",  // missing host/port.
     "HTTPS",  // missing host/port.
     "SOCKS",  // missing host/port.
@@ -304,8 +304,8 @@
 TEST(ProxyServerTest, ComparatorAndEquality) {
   struct {
     // Inputs.
-    const char* server1;
-    const char* server2;
+    const char* const server1;
+    const char* const server2;
 
     // Expectation.
     //   -1 means server1 is less than server2
diff --git a/net/quic/crypto/crypto_server_test.cc b/net/quic/crypto/crypto_server_test.cc
index bd78850..b140e23 100644
--- a/net/quic/crypto/crypto_server_test.cc
+++ b/net/quic/crypto/crypto_server_test.cc
@@ -302,7 +302,7 @@
                         ::testing::ValuesIn(GetTestParams()));
 
 TEST_P(CryptoServerTest, BadSNI) {
-  static const char* kBadSNIs[] = {
+  static const char* const kBadSNIs[] = {
     "",
     "foo",
     "#00",
@@ -368,7 +368,7 @@
 
 TEST_P(CryptoServerTest, BadSourceAddressToken) {
   // Invalid source-address tokens should be ignored.
-  static const char* kBadSourceAddressTokens[] = {
+  static const char* const kBadSourceAddressTokens[] = {
     "",
     "foo",
     "#0000",
@@ -390,7 +390,7 @@
 
 TEST_P(CryptoServerTest, BadClientNonce) {
   // Invalid nonces should be ignored.
-  static const char* kBadNonces[] = {
+  static const char* const kBadNonces[] = {
     "",
     "#0000",
     "#0000000000000000000000000000000000000000",
diff --git a/net/quic/quic_fec_group_test.cc b/net/quic/quic_fec_group_test.cc
index b9420dd..a918aeb 100644
--- a/net/quic/quic_fec_group_test.cc
+++ b/net/quic/quic_fec_group_test.cc
@@ -19,7 +19,7 @@
 
 namespace {
 
-const char* kData[] = {
+const char* const kData[] = {
   "abc12345678",
   "987defg",
   "ghi12345",
diff --git a/net/server/http_server_unittest.cc b/net/server/http_server_unittest.cc
index c25c03e..aaad7b8 100644
--- a/net/server/http_server_unittest.cc
+++ b/net/server/http_server_unittest.cc
@@ -278,7 +278,7 @@
 TEST_F(HttpServerTest, RequestWithHeaders) {
   TestHttpClient client;
   ASSERT_EQ(OK, client.ConnectAndWait(server_address_));
-  const char* kHeaders[][3] = {
+  const char* const kHeaders[][3] = {
       {"Header", ": ", "1"},
       {"HeaderWithNoWhitespace", ":", "1"},
       {"HeaderWithWhitespace", "   :  \t   ", "1 1 1 \t  "},
@@ -308,7 +308,7 @@
 TEST_F(HttpServerTest, RequestWithDuplicateHeaders) {
   TestHttpClient client;
   ASSERT_EQ(OK, client.ConnectAndWait(server_address_));
-  const char* kHeaders[][3] = {
+  const char* const kHeaders[][3] = {
       {"FirstHeader", ": ", "1"},
       {"DuplicateHeader", ": ", "2"},
       {"MiddleHeader", ": ", "3"},
@@ -336,7 +336,7 @@
 TEST_F(HttpServerTest, HasHeaderValueTest) {
   TestHttpClient client;
   ASSERT_EQ(OK, client.ConnectAndWait(server_address_));
-  const char* kHeaders[] = {
+  const char* const kHeaders[] = {
       "Header: Abcd",
       "HeaderWithNoWhitespace:E",
       "HeaderWithWhitespace   :  \t   f \t  ",
diff --git a/net/socket/ssl_client_socket_unittest.cc b/net/socket/ssl_client_socket_unittest.cc
index 2fc6523..287dbb8 100644
--- a/net/socket/ssl_client_socket_unittest.cc
+++ b/net/socket/ssl_client_socket_unittest.cc
@@ -2322,15 +2322,15 @@
   EXPECT_TRUE(sock->IsConnected());
 
   const int kKeyingMaterialSize = 32;
-  const char* kKeyingLabel1 = "client-socket-test-1";
-  const char* kKeyingContext = "";
+  const char kKeyingLabel1[] = "client-socket-test-1";
+  const char kKeyingContext[] = "";
   unsigned char client_out1[kKeyingMaterialSize];
   memset(client_out1, 0, sizeof(client_out1));
   rv = sock->ExportKeyingMaterial(
       kKeyingLabel1, false, kKeyingContext, client_out1, sizeof(client_out1));
   EXPECT_EQ(rv, OK);
 
-  const char* kKeyingLabel2 = "client-socket-test-2";
+  const char kKeyingLabel2[] = "client-socket-test-2";
   unsigned char client_out2[kKeyingMaterialSize];
   memset(client_out2, 0, sizeof(client_out2));
   rv = sock->ExportKeyingMaterial(
diff --git a/net/socket/ssl_server_socket_unittest.cc b/net/socket/ssl_server_socket_unittest.cc
index 5fabdd2e..d13dba8 100644
--- a/net/socket/ssl_server_socket_unittest.cc
+++ b/net/socket/ssl_server_socket_unittest.cc
@@ -535,8 +535,8 @@
   }
 
   const int kKeyingMaterialSize = 32;
-  const char* kKeyingLabel = "EXPERIMENTAL-server-socket-test";
-  const char* kKeyingContext = "";
+  const char kKeyingLabel[] = "EXPERIMENTAL-server-socket-test";
+  const char kKeyingContext[] = "";
   unsigned char server_out[kKeyingMaterialSize];
   int rv = server_socket_->ExportKeyingMaterial(kKeyingLabel,
                                                 false, kKeyingContext,
@@ -550,7 +550,7 @@
   ASSERT_EQ(OK, rv);
   EXPECT_EQ(0, memcmp(server_out, client_out, sizeof(server_out)));
 
-  const char* kKeyingLabelBad = "EXPERIMENTAL-server-socket-test-bad";
+  const char kKeyingLabelBad[] = "EXPERIMENTAL-server-socket-test-bad";
   unsigned char client_bad[kKeyingMaterialSize];
   rv = client_socket_->ExportKeyingMaterial(kKeyingLabelBad,
                                             false, kKeyingContext,
diff --git a/net/spdy/spdy_session.cc b/net/spdy/spdy_session.cc
index 575cee2..a6478df 100644
--- a/net/spdy/spdy_session.cc
+++ b/net/spdy/spdy_session.cc
@@ -450,8 +450,8 @@
         to_insert = request_headers;
     } else {
       const char* host = protocol_version >= SPDY4 ? ":authority" : ":host";
-      static const char* scheme = ":scheme";
-      static const char* path = ":path";
+      static const char scheme[] = ":scheme";
+      static const char path[] = ":path";
       if (it->first == host || it->first == scheme || it->first == path)
         to_insert = request_headers;
     }
diff --git a/net/tools/crash_cache/crash_cache.cc b/net/tools/crash_cache/crash_cache.cc
index 60d7158..34eaafa 100644
--- a/net/tools/crash_cache/crash_cache.cc
+++ b/net/tools/crash_cache/crash_cache.cc
@@ -84,12 +84,12 @@
 NET_EXPORT_PRIVATE extern RankCrashes g_rankings_crash;
 }
 
-const char* kCrashEntryName = "the first key";
+const char kCrashEntryName[] = "the first key";
 
 // Creates the destinaton folder for this run, and returns it on full_path.
 bool CreateTargetFolder(const base::FilePath& path, RankCrashes action,
                         base::FilePath* full_path) {
-  const char* folders[] = {
+  const char* const folders[] = {
     "",
     "insert_empty1",
     "insert_empty2",
diff --git a/net/tools/quic/end_to_end_test.cc b/net/tools/quic/end_to_end_test.cc
index 1104528..0a5010d 100644
--- a/net/tools/quic/end_to_end_test.cc
+++ b/net/tools/quic/end_to_end_test.cc
@@ -70,8 +70,8 @@
 namespace test {
 namespace {
 
-const char* kFooResponseBody = "Artichoke hearts make me happy.";
-const char* kBarResponseBody = "Palm hearts are pretty delicious, also.";
+const char kFooResponseBody[] = "Artichoke hearts make me happy.";
+const char kBarResponseBody[] = "Palm hearts are pretty delicious, also.";
 
 // Run all tests with the cross products of all versions.
 struct TestParams {
diff --git a/net/tools/quic/spdy_utils.cc b/net/tools/quic/spdy_utils.cc
index c2b6174..2cfd381 100644
--- a/net/tools/quic/spdy_utils.cc
+++ b/net/tools/quic/spdy_utils.cc
@@ -24,14 +24,14 @@
 namespace net {
 namespace tools {
 
-const char* const kV4Host = ":authority";
+const char kV4Host[] = ":authority";
 
-const char* const kV3Host = ":host";
-const char* const kV3Path = ":path";
-const char* const kV3Scheme = ":scheme";
-const char* const kV3Status = ":status";
-const char* const kV3Method = ":method";
-const char* const kV3Version = ":version";
+const char kV3Host[] = ":host";
+const char kV3Path[] = ":path";
+const char kV3Scheme[] = ":scheme";
+const char kV3Status[] = ":status";
+const char kV3Method[] = ":method";
+const char kV3Version[] = ":version";
 
 void PopulateSpdyHeaderBlock(const BalsaHeaders& headers,
                              SpdyHeaderBlock* block,
diff --git a/net/tools/quic/test_tools/http_message.cc b/net/tools/quic/test_tools/http_message.cc
index 9bd3cffc..cb32255 100644
--- a/net/tools/quic/test_tools/http_message.cc
+++ b/net/tools/quic/test_tools/http_message.cc
@@ -20,20 +20,20 @@
 
 namespace {
 
-//const char* kContentEncoding = "content-encoding";
-const char* kContentLength = "content-length";
-const char* kTransferCoding = "transfer-encoding";
+//const char kContentEncoding[] = "content-encoding";
+const char kContentLength[] = "content-length";
+const char kTransferCoding[] = "transfer-encoding";
 
 // Both kHTTPVersionString and kMethodString arrays are constructed to match
 // the enum values defined in Version and Method of HTTPMessage.
-const char* kHTTPVersionString[] = {
+const char* const kHTTPVersionString[] = {
   "",
   "HTTP/0.9",
   "HTTP/1.0",
   "HTTP/1.1"
 };
 
-const char* kMethodString[] = {
+const char* const kMethodString[] = {
   "",
   "OPTIONS",
   "GET",
diff --git a/net/udp/udp_socket_unittest.cc b/net/udp/udp_socket_unittest.cc
index c92bf2b..221be01f 100644
--- a/net/udp/udp_socket_unittest.cc
+++ b/net/udp/udp_socket_unittest.cc
@@ -541,7 +541,7 @@
 
 TEST_F(UDPSocketTest, MAYBE_JoinMulticastGroup) {
   const uint16 kPort = 9999;
-  const char* const kGroup = "237.132.100.17";
+  const char kGroup[] = "237.132.100.17";
 
   IPEndPoint bind_address;
   CreateUDPAddress("0.0.0.0", kPort, &bind_address);
diff --git a/net/url_request/sdch_dictionary_fetcher_unittest.cc b/net/url_request/sdch_dictionary_fetcher_unittest.cc
index f79efc6..de4ac81 100644
--- a/net/url_request/sdch_dictionary_fetcher_unittest.cc
+++ b/net/url_request/sdch_dictionary_fetcher_unittest.cc
@@ -21,8 +21,8 @@
 
 namespace net {
 
-static const char* kSampleBufferContext = "This is a sample buffer.";
-static const char* kTestDomain = "top.domain.test";
+static const char kSampleBufferContext[] = "This is a sample buffer.";
+static const char kTestDomain[] = "top.domain.test";
 
 class URLRequestSpecifiedResponseJob : public URLRequestSimpleJob {
  public:
diff --git a/net/websockets/websocket_extension_parser_test.cc b/net/websockets/websocket_extension_parser_test.cc
index dc7dc85..c19460cb 100644
--- a/net/websockets/websocket_extension_parser_test.cc
+++ b/net/websockets/websocket_extension_parser_test.cc
@@ -65,7 +65,7 @@
 }
 
 TEST(WebSocketExtensionParserTest, InvalidPatterns) {
-  const char* patterns[] = {
+  const char* const patterns[] = {
     "fo\ao",  // control in extension name
     "fo\x01o",  // control in extension name
     "fo<o",  // separator in extension name
diff --git a/net/websockets/websocket_handshake_constants.cc b/net/websockets/websocket_handshake_constants.cc
index dc670aa..bb031e21 100644
--- a/net/websockets/websocket_handshake_constants.cc
+++ b/net/websockets/websocket_handshake_constants.cc
@@ -7,32 +7,32 @@
 namespace net {
 namespace websockets {
 
-const char* const kHttpProtocolVersion = "HTTP/1.1";
+const char kHttpProtocolVersion[] = "HTTP/1.1";
 
 const size_t kRawChallengeLength = 16;
 
-const char* const kSecWebSocketProtocol = "Sec-WebSocket-Protocol";
-const char* const kSecWebSocketExtensions = "Sec-WebSocket-Extensions";
-const char* const kSecWebSocketKey = "Sec-WebSocket-Key";
-const char* const kSecWebSocketAccept = "Sec-WebSocket-Accept";
-const char* const kSecWebSocketVersion = "Sec-WebSocket-Version";
+const char kSecWebSocketProtocol[] = "Sec-WebSocket-Protocol";
+const char kSecWebSocketExtensions[] = "Sec-WebSocket-Extensions";
+const char kSecWebSocketKey[] = "Sec-WebSocket-Key";
+const char kSecWebSocketAccept[] = "Sec-WebSocket-Accept";
+const char kSecWebSocketVersion[] = "Sec-WebSocket-Version";
 
-const char* const kSupportedVersion = "13";
+const char kSupportedVersion[] = "13";
 
-const char* const kUpgrade = "Upgrade";
-const char* const kWebSocketGuid = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11";
+const char kUpgrade[] = "Upgrade";
+const char kWebSocketGuid[] = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11";
 
-const char* const kSecWebSocketProtocolSpdy3 = ":sec-websocket-protocol";
-const char* const kSecWebSocketExtensionsSpdy3 = ":sec-websocket-extensions";
+const char kSecWebSocketProtocolSpdy3[] = ":sec-websocket-protocol";
+const char kSecWebSocketExtensionsSpdy3[] = ":sec-websocket-extensions";
 
 const char* const kSecWebSocketProtocolLowercase =
     kSecWebSocketProtocolSpdy3 + 1;
 const char* const kSecWebSocketExtensionsLowercase =
     kSecWebSocketExtensionsSpdy3 + 1;
-const char* const kSecWebSocketKeyLowercase = "sec-websocket-key";
-const char* const kSecWebSocketVersionLowercase = "sec-websocket-version";
-const char* const kUpgradeLowercase = "upgrade";
-const char* const kWebSocketLowercase = "websocket";
+const char kSecWebSocketKeyLowercase[] = "sec-websocket-key";
+const char kSecWebSocketVersionLowercase[] = "sec-websocket-version";
+const char kUpgradeLowercase[] = "upgrade";
+const char kWebSocketLowercase[] = "websocket";
 
 }  // namespace websockets
 }  // namespace net
diff --git a/net/websockets/websocket_handshake_constants.h b/net/websockets/websocket_handshake_constants.h
index 43d3efd..d68a28a 100644
--- a/net/websockets/websocket_handshake_constants.h
+++ b/net/websockets/websocket_handshake_constants.h
@@ -22,45 +22,45 @@
 // "HTTP/1.1"
 // RFC6455 only requires HTTP/1.1 "or better" but in practice an HTTP version
 // other than 1.1 should not occur in a WebSocket handshake.
-extern const char* const kHttpProtocolVersion;
+extern const char kHttpProtocolVersion[];
 
 // The Sec-WebSockey-Key challenge is 16 random bytes, base64 encoded.
 extern const size_t kRawChallengeLength;
 
 // "Sec-WebSocket-Protocol"
-extern const char* const kSecWebSocketProtocol;
+extern const char kSecWebSocketProtocol[];
 
 // "Sec-WebSocket-Extensions"
-extern const char* const kSecWebSocketExtensions;
+extern const char kSecWebSocketExtensions[];
 
 // "Sec-WebSocket-Key"
-extern const char* const kSecWebSocketKey;
+extern const char kSecWebSocketKey[];
 
 // "Sec-WebSocket-Accept"
-extern const char* const kSecWebSocketAccept;
+extern const char kSecWebSocketAccept[];
 
 // "Sec-WebSocket-Version"
-extern const char* const kSecWebSocketVersion;
+extern const char kSecWebSocketVersion[];
 
 // This implementation only supports one version of the WebSocket protocol,
 // "13", as specified in RFC6455. If support for multiple versions is added in
 // future, it will probably no longer be worth having a constant for this.
-extern const char* const kSupportedVersion;
+extern const char kSupportedVersion[];
 
 // "Upgrade"
-extern const char* const kUpgrade;
+extern const char kUpgrade[];
 
 // "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" as defined in section 4.1 of
 // RFC6455.
-extern const char* const kWebSocketGuid;
+extern const char kWebSocketGuid[];
 
 // Colon-prefixed lowercase headers for SPDY3.
 
 // ":sec-websocket-protocol"
-extern const char* const kSecWebSocketProtocolSpdy3;
+extern const char kSecWebSocketProtocolSpdy3[];
 
 // ":sec-websocket-extensions"
-extern const char* const kSecWebSocketExtensionsSpdy3;
+extern const char kSecWebSocketExtensionsSpdy3[];
 
 // Some parts of the code require lowercase versions of the header names in
 // order to do case-insensitive comparisons, or because of SPDY.
@@ -71,17 +71,17 @@
 extern const char* const kSecWebSocketExtensionsLowercase;
 
 // "sec-webSocket-key"
-extern const char* const kSecWebSocketKeyLowercase;
+extern const char kSecWebSocketKeyLowercase[];
 
 // "sec-websocket-version"
-extern const char* const kSecWebSocketVersionLowercase;
+extern const char kSecWebSocketVersionLowercase[];
 
 // "upgrade"
-extern const char* const kUpgradeLowercase;
+extern const char kUpgradeLowercase[];
 
 // "websocket", as used in the "Upgrade:" header. This is always lowercase
 // (except in obsolete versions of the protocol).
-extern const char* const kWebSocketLowercase;
+extern const char kWebSocketLowercase[];
 
 }  // namespace websockets
 }  // namespace net