Skip to content

Commit 0673a65

Browse files
authored
CI: Index verification (#10)
* Index tests * Some refactoring * New flake8 settings * Metadata checks also * Better build stages
1 parent 4a81d88 commit 0673a65

File tree

5 files changed

+206
-12
lines changed

5 files changed

+206
-12
lines changed

.flake8

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,8 @@
22
max-line-length = 120
33
max-complexity = 10
44
ignore =
5-
E126,
6-
E501,
7-
E722,
8-
E741,
9-
F401,
10-
F811,
11-
C901
5+
E501, # line too long, it is covered by pylint
6+
E722, # bare except, bad practice, to be removed in the future
7+
F401, # imported but unused, too many violations, to be removed in the future
8+
F811, # redefinition of unused, to be removed in the future
9+
C901 # code flow is too complex, too many violations, to be removed in the future

.github/PULL_REQUEST_TEMPLATE.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,4 +4,5 @@ This checklist is used to make sure that common guidelines for a pull request ar
44

55
### General Guidelines
66

7-
- [ ] Have you run `./scripts/ci/test_static.sh` locally? (`pip install pylint flake8` required)
7+
- [ ] If you modified extension source code, have you run `./scripts/ci/test_static.sh` locally? (`pip install pylint flake8` required)
8+
- [ ] If you modified the index, have you run `python scripts/ci/test_index.py -q` locally?

.travis.yml

Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,14 +2,22 @@ dist: trusty
22
sudo: off
33
language: python
44
install:
5-
- pip install pylint flake8
5+
- pip install pylint flake8 requests
66
jobs:
77
include:
88
- stage: verify
9+
env: PURPOSE='SourceStatic'
910
script: ./scripts/ci/test_static.sh
10-
env: PURPOSE='VerifySource-StaticCheck'
1111
python: 3.6
1212
- stage: verify
13+
env: PURPOSE='SourceStatic'
1314
script: ./scripts/ci/test_static.sh
14-
env: PURPOSE='VerifySource-StaticCheck'
15+
python: 2.7
16+
- stage: verify
17+
env: PURPOSE='IndexVerify'
18+
script: python ./scripts/ci/test_index.py -v
19+
python: 3.6
20+
- stage: verify
21+
env: PURPOSE='IndexVerify'
22+
script: python ./scripts/ci/test_index.py -v
1523
python: 2.7

scripts/ci/test_index.py

Lines changed: 187 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,187 @@
1+
# --------------------------------------------------------------------------------------------
2+
# Copyright (c) Microsoft Corporation. All rights reserved.
3+
# Licensed under the MIT License. See License.txt in the project root for license information.
4+
# --------------------------------------------------------------------------------------------
5+
6+
from __future__ import print_function
7+
8+
import os
9+
import json
10+
import tempfile
11+
import unittest
12+
import zipfile
13+
import hashlib
14+
import shutil
15+
from util import get_repo_root
16+
from wheel.install import WHEEL_INFO_RE
17+
18+
INDEX_PATH = os.path.join(get_repo_root(), 'src', 'index.json')
19+
20+
21+
def catch_dup_keys(pairs):
22+
seen = {}
23+
for k, v in pairs:
24+
if k in seen:
25+
raise ValueError("duplicate key {}".format(k))
26+
seen[k] = v
27+
return seen
28+
29+
30+
def get_index_data():
31+
try:
32+
with open(INDEX_PATH) as f:
33+
return json.load(f, object_pairs_hook=catch_dup_keys)
34+
except ValueError as err:
35+
raise AssertionError("Invalid JSON in {}: {}".format(INDEX_PATH, err))
36+
37+
38+
def get_whl_from_url(url, filename, tmp_dir, whl_cache):
39+
if url in whl_cache:
40+
return whl_cache[url]
41+
import requests
42+
r = requests.get(url, stream=True)
43+
assert r.status_code == 200, "Request to {} failed with {}".format(url, r.status_code)
44+
ext_file = os.path.join(tmp_dir, filename)
45+
with open(ext_file, 'wb') as f:
46+
for chunk in r.iter_content(chunk_size=1024):
47+
if chunk: # ignore keep-alive new chunks
48+
f.write(chunk)
49+
whl_cache[url] = ext_file
50+
return ext_file
51+
52+
53+
def get_sha256sum(a_file):
54+
sha256 = hashlib.sha256()
55+
with open(a_file, 'rb') as f:
56+
sha256.update(f.read())
57+
return sha256.hexdigest()
58+
59+
60+
def get_extension_modname(ext_dir):
61+
# Modification of https://github.com/Azure/azure-cli/blob/dev/src/azure-cli-core/azure/cli/core/extension.py#L153
62+
EXTENSIONS_MOD_PREFIX = 'azext_'
63+
pos_mods = [n for n in os.listdir(ext_dir)
64+
if n.startswith(EXTENSIONS_MOD_PREFIX) and os.path.isdir(os.path.join(ext_dir, n))]
65+
return pos_mods[0]
66+
67+
68+
def get_azext_metadata(ext_dir):
69+
# Modification of https://github.com/Azure/azure-cli/blob/dev/src/azure-cli-core/azure/cli/core/extension.py#L109
70+
AZEXT_METADATA_FILENAME = 'azext_metadata.json'
71+
azext_metadata = None
72+
ext_modname = get_extension_modname(ext_dir=ext_dir)
73+
azext_metadata_filepath = os.path.join(ext_dir, ext_modname, AZEXT_METADATA_FILENAME)
74+
if os.path.isfile(azext_metadata_filepath):
75+
with open(azext_metadata_filepath) as f:
76+
azext_metadata = json.load(f)
77+
return azext_metadata
78+
79+
80+
def get_ext_metadata(ext_dir, ext_file, ext_name):
81+
# Modification of https://github.com/Azure/azure-cli/blob/dev/src/azure-cli-core/azure/cli/core/extension.py#L89
82+
WHL_METADATA_FILENAME = 'metadata.json'
83+
zip_ref = zipfile.ZipFile(ext_file, 'r')
84+
zip_ref.extractall(ext_dir)
85+
zip_ref.close()
86+
metadata = {}
87+
dist_info_dirs = [f for f in os.listdir(ext_dir) if f.endswith('.dist-info')]
88+
azext_metadata = get_azext_metadata(ext_dir)
89+
if azext_metadata:
90+
metadata.update(azext_metadata)
91+
for dist_info_dirname in dist_info_dirs:
92+
parsed_dist_info_dir = WHEEL_INFO_RE(dist_info_dirname)
93+
if parsed_dist_info_dir and parsed_dist_info_dir.groupdict().get('name') == ext_name:
94+
whl_metadata_filepath = os.path.join(ext_dir, dist_info_dirname, WHL_METADATA_FILENAME)
95+
if os.path.isfile(whl_metadata_filepath):
96+
with open(whl_metadata_filepath) as f:
97+
metadata.update(json.load(f))
98+
return metadata
99+
100+
101+
class TestIndex(unittest.TestCase):
102+
103+
@classmethod
104+
def setUpClass(cls):
105+
cls.longMessage = True
106+
cls.index = get_index_data()
107+
cls.whl_cache_dir = tempfile.mkdtemp()
108+
cls.whl_cache = {}
109+
110+
@classmethod
111+
def tearDownClass(cls):
112+
shutil.rmtree(cls.whl_cache_dir)
113+
114+
def test_format_version(self):
115+
self.assertEqual(self.index['formatVersion'], '1')
116+
117+
def test_format_extensions_key(self):
118+
self.assertIn('extensions', self.index)
119+
120+
def test_format_extensions_value(self):
121+
self.assertIsInstance(self.index['extensions'], dict)
122+
123+
def test_extension_filenames(self):
124+
for ext_name, exts in self.index['extensions'].items():
125+
for item in exts:
126+
self.assertTrue(item['filename'].endswith('.whl'),
127+
"Filename {} must end with .whl".format(item['filename']))
128+
self.assertTrue(item['filename'].startswith(ext_name),
129+
"Filename {} must start with {}".format(item['filename'], ext_name))
130+
parsed_filename = WHEEL_INFO_RE(item['filename'])
131+
p = parsed_filename.groupdict()
132+
self.assertTrue(p.get('name'), "Can't get name for {}".format(item['filename']))
133+
universal_wheel = p.get('pyver') == 'py2.py3' and p.get('abi') == 'none' and p.get('plat') == 'any'
134+
self.assertTrue(universal_wheel,
135+
"{} of {} not universal (platform independent) wheel. "
136+
"It should end in py2.py3-none-any.whl".format(item['filename'], ext_name))
137+
138+
def test_extension_url_filename(self):
139+
for exts in self.index['extensions'].values():
140+
for item in exts:
141+
self.assertEqual(os.path.basename(item['downloadUrl']), item['filename'],
142+
"Filename must match last segment of downloadUrl")
143+
144+
def test_filename_duplicates(self):
145+
filenames = []
146+
for exts in self.index['extensions'].values():
147+
for item in exts:
148+
filenames.append(item['filename'])
149+
filename_seen = set()
150+
dups = []
151+
for f in filenames:
152+
if f in filename_seen:
153+
dups.append(f)
154+
filename_seen.add(f)
155+
self.assertFalse(dups, "Duplicate filenames found {}".format(dups))
156+
157+
@unittest.skipUnless(os.getenv('CI'), 'Skipped as not running on CI')
158+
def test_checksums(self):
159+
for exts in self.index['extensions'].values():
160+
for item in exts:
161+
ext_file = get_whl_from_url(item['downloadUrl'], item['filename'],
162+
self.whl_cache_dir, self.whl_cache)
163+
computed_hash = get_sha256sum(ext_file)
164+
self.assertEqual(computed_hash, item['sha256Digest'],
165+
"Computed {} but found {} in index for {}".format(computed_hash,
166+
item['sha256Digest'],
167+
item['filename']))
168+
169+
@unittest.skipUnless(os.getenv('CI'), 'Skipped as not running on CI')
170+
def test_metadata(self):
171+
self.maxDiff = None
172+
extensions_dir = tempfile.mkdtemp()
173+
for ext_name, exts in self.index['extensions'].items():
174+
for item in exts:
175+
ext_dir = tempfile.mkdtemp(dir=extensions_dir)
176+
ext_file = get_whl_from_url(item['downloadUrl'], item['filename'],
177+
self.whl_cache_dir, self.whl_cache)
178+
metadata = get_ext_metadata(ext_dir, ext_file, ext_name)
179+
self.assertDictEqual(metadata, item['metadata'],
180+
"Metadata for {} in index doesn't match the expected of: \n"
181+
"{}".format(item['filename'], json.dumps(metadata, indent=2, sort_keys=True,
182+
separators=(',', ': '))))
183+
shutil.rmtree(extensions_dir)
184+
185+
186+
if __name__ == '__main__':
187+
unittest.main()

src/index.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
"contacts": [
1515
{
1616
"email": "[email protected]",
17-
"name": "Microsoft Corporation.",
17+
"name": "Microsoft Corporation",
1818
"role": "author"
1919
}
2020
],

0 commit comments

Comments
 (0)