Skip to content

Commit 42fedc1

Browse files
committed
Add blob check
1 parent 14c4de1 commit 42fedc1

File tree

3 files changed

+65
-31
lines changed

3 files changed

+65
-31
lines changed

cli/blobconverter/__init__.py

Lines changed: 58 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import hashlib
22
import json
33
import os
4+
import struct
45
import sys
56
import tempfile
67
import urllib
@@ -152,6 +153,23 @@ def show_progress(curr, max):
152153
sys.stdout.flush()
153154

154155

156+
def is_valid_blob(blob_path):
157+
convertedPath = Path(blob_path)
158+
if not convertedPath.exists():
159+
return False
160+
161+
try:
162+
with convertedPath.open('rb+') as f:
163+
f.seek(56)
164+
expected_size = struct.unpack("<I", f.read(4))[0] # `<` means little endian, `I` means unsigned int 4 bytes
165+
f.seek(0, os.SEEK_END)
166+
actual_size = f.tell()
167+
168+
return expected_size == actual_size
169+
except:
170+
return False
171+
172+
155173
# https://stackoverflow.com/a/54745657/5494277
156174
class __S3ProgressPercentage:
157175
def __init__(self, o_s3bucket, key_name):
@@ -225,8 +243,10 @@ def compile_blob(blob_name, version=None, shaves=None, req_data=None, req_files=
225243
url_params = {
226244
'version': version,
227245
'no_cache': not use_cache,
228-
'dry': dry,
229246
}
247+
if dry:
248+
url_params["dry"] = True
249+
230250
data = {
231251
"myriad_shaves": str(shaves),
232252
"myriad_params_advanced": ' '.join(compile_params),
@@ -236,41 +256,49 @@ def compile_blob(blob_name, version=None, shaves=None, req_data=None, req_files=
236256
**req_data,
237257
}
238258

239-
hash_obj = hashlib.sha256(json.dumps({**url_params, **data}).encode())
240-
for file_path in req_files.values():
241-
with open(file_path, 'rb') as f:
242-
hash_obj.update(f.read())
243-
req_hash = hash_obj.hexdigest()
259+
if not dry:
260+
hash_obj = hashlib.sha256(json.dumps({**url_params, **data}).encode())
261+
for file_path in req_files.values():
262+
with open(file_path, 'rb') as f:
263+
hash_obj.update(f.read())
264+
req_hash = hash_obj.hexdigest()
244265

245-
new_cache_config = {
246-
**cache_config,
247-
req_hash: str(blob_path),
248-
}
266+
new_cache_config = {
267+
**cache_config,
268+
req_hash: str(blob_path),
269+
}
270+
271+
if use_cache:
272+
cached_path = None
273+
if req_hash in cache_config:
274+
cached_path = cache_config[req_hash]
249275

250-
if use_cache:
251-
if req_hash in cache_config:
252-
return cache_config[req_hash]
276+
if blob_path.exists():
277+
cached_path = str(blob_path)
253278

254-
if blob_path.exists():
255-
return blob_path
279+
if cached_path is not None:
280+
if is_valid_blob(cached_path):
281+
return cached_path
282+
else:
283+
print("Cached blob is invalid, will download a new one from API.")
256284

257-
cache_config_path.parent.mkdir(parents=True, exist_ok=True)
258-
with cache_config_path.open('w') as f:
259-
json.dump(new_cache_config, f)
285+
cache_config_path.parent.mkdir(parents=True, exist_ok=True)
286+
with cache_config_path.open('w') as f:
287+
json.dump(new_cache_config, f)
260288

261-
if not __defaults["silent"]:
262-
print("Downloading {}...".format(blob_path))
289+
if not __defaults["silent"]:
290+
print("Downloading {}...".format(blob_path))
263291

264-
if None in (s3, bucket):
265-
__init_s3()
292+
if None in (s3, bucket):
293+
__init_s3()
266294

267-
try:
268-
if not download_ir:
269-
__download_from_s3_bucket("{}.blob".format(req_hash), blob_path)
270-
return blob_path
271-
except botocore.exceptions.ClientError as ex:
272-
if ex.response['Error']['Code'] not in ('NoSuchKey', '404'):
273-
raise ex
295+
try:
296+
if not download_ir:
297+
__download_from_s3_bucket("{}.blob".format(req_hash), blob_path)
298+
return blob_path
299+
except botocore.exceptions.ClientError as ex:
300+
if ex.response['Error']['Code'] not in ('NoSuchKey', '404'):
301+
raise ex
274302

275303
files = {
276304
name: open(path, 'rb') for name, path in req_files.items()
@@ -280,7 +308,7 @@ def compile_blob(blob_name, version=None, shaves=None, req_data=None, req_files=
280308
"{}/compile?{}".format(url, urllib.parse.urlencode(url_params)),
281309
data=data,
282310
files=files,
283-
stream=True,
311+
stream=not dry,
284312
)
285313
if response.status_code == 400:
286314
try:

cli/blobconverter/test.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,12 @@
1212
result = blobconverter.from_zoo(name="mobilenet-ssd", use_cache=use_cache, dry=True)
1313
print(result)
1414

15+
result = blobconverter.from_zoo(name="mobilenet-ssd", use_cache=use_cache, dry=False)
16+
print(result)
17+
18+
result = blobconverter.from_zoo(name="mobilenet-ssd", use_cache=True, dry=False)
19+
print(result)
20+
1521
result = blobconverter.from_openvino(
1622
xml="../../face-detection-retail-0004.xml", # get from https://storage.openvinotoolkit.org/repositories/open_model_zoo/2021.2/models_bin/3/face-detection-retail-0004/FP16/face-detection-retail-0004.xml
1723
bin="../../face-detection-retail-0004.bin", # get from https://storage.openvinotoolkit.org/repositories/open_model_zoo/2021.2/models_bin/3/face-detection-retail-0004/FP16/face-detection-retail-0004.bin

cli/setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
setup(
66
name='blobconverter',
7-
version='1.2.5',
7+
version='1.2.6',
88
description='The tool that allows you to convert neural networks to MyriadX blob',
99
long_description=io.open("README.md", encoding="utf-8").read(),
1010
long_description_content_type="text/markdown",

0 commit comments

Comments
 (0)