blob: 526d3ce642233969d0fac19cc9c89f02acac175e [file] [log] [blame]
#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import binascii
import hashlib
import logging
import os
import subprocess
import sys
import tempfile
import time
import unittest
import urllib
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, ROOT_DIR)
import isolateserver
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Ensure that the testing machine has access to this server.
ISOLATE_SERVER = 'https://isolateserver.appspot.com/'
# The directory containing the test data files.
TEST_DATA_DIR = os.path.join(ROOT_DIR, 'tests', 'isolateserver')
# TODO(vadimsh): This test is a bit frankensteinish now. It uses new /content-gs
# protocol for uploads via 'isolateserver.py archive', but uses old /content
# protocol for validity check and fetches.
class IsolateServerArchiveSmokeTest(unittest.TestCase):
def setUp(self):
# The namespace must end in '-gzip' since all files are now compressed
# before being uploaded.
self.namespace = ('temporary' + str(long(time.time())).split('.', 1)[0]
+ '-gzip')
url = ISOLATE_SERVER + '/content/get_token?from_smoke_test=1'
self.token = urllib.quote(isolateserver.net.url_read(url))
def _archive_given_files(self, files):
"""Given a list of files, call isolateserver.py with them. Then
verify they are all on the server."""
args = [
sys.executable,
os.path.join(ROOT_DIR, 'isolateserver.py'),
'archive',
'--isolate-server', ISOLATE_SERVER,
'--namespace', self.namespace
]
if '-v' in sys.argv:
args.append('--verbose')
args.extend(os.path.join(TEST_DATA_DIR, filename) for filename in files)
self.assertEqual(0, subprocess.call(args))
# Try to download the files from the server.
file_hashes = [
isolateserver.hash_file(os.path.join(TEST_DATA_DIR, f), hashlib.sha1)
for f in files
]
for i in range(len(files)):
download_url = '%scontent/retrieve/%s/%s' % (
ISOLATE_SERVER, self.namespace, file_hashes[i])
downloaded_file = isolateserver.net.url_read(download_url, retry_404=True)
self.assertTrue(downloaded_file is not None,
'File %s was missing from the server' % files[i])
# Ensure the files are listed as present on the server.
contains_hash_url = '%scontent/contains/%s?token=%s&from_smoke_test=1' % (
ISOLATE_SERVER, self.namespace, self.token)
body = ''.join(binascii.unhexlify(h) for h in file_hashes)
expected = chr(1) * len(files)
MAX_ATTEMPTS = 10
for i in xrange(MAX_ATTEMPTS):
# AppEngine's database is eventually consistent and isolateserver do not
# use transaction for performance reasons, so even if one request was able
# to retrieve the file, an subsequent may not see it! So retry a few time
# until the database becomes consistent with regard to these entities.
response = isolateserver.net.url_read(
contains_hash_url,
data=body,
content_type='application/octet-stream')
if response == expected:
break
# GAE is exposing its internal data inconsistency.
if i != (MAX_ATTEMPTS - 1):
print('Visible datastore inconsistency, retrying.')
time.sleep(0.1)
self.assertEqual(expected, response)
def test_archive_empty_file(self):
self._archive_given_files(['empty_file.txt'])
def test_archive_small_file(self):
self._archive_given_files(['small_file.txt'])
def disabled_test_archive_huge_file(self):
# Create a file over 2gbs.
# TODO(maruel): Temporarily disabled until the server is fixed.
filepath = None
try:
try:
handle, filepath = tempfile.mkstemp(prefix='isolateserver')
# Write 2.1gb.
chunk = chr(0) + chr(57) + chr(128) + chr(255)
chunk1mb = chunk * (1024 * 1024 / len(chunk))
for _ in xrange(1280):
os.write(handle, chunk1mb)
finally:
os.close(handle)
self._archive_given_files([filepath])
finally:
if filepath:
os.remove(filepath)
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1].startswith('http'):
ISOLATE_SERVER = sys.argv.pop(1).rstrip('/') + '/'
logging.basicConfig(
level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
unittest.main()