Skip to content

Commit b8d937e

Browse files
author
Nikhil Thorat
authored
Add error messaging when the model.json parsing fails. (tensorflow#1599)
UX If it has a .pb extension, send a nice error message about how to convert to json. Also: - Removes the ability to load binary files. - Removes the string[] option to `io.browserHTTPRequest` On a request to a .pb file: ![image](https://user-images.githubusercontent.com/1100749/53658725-9e601980-3c27-11e9-9c47-0e79d7cd3d0f.png) An invalid request with OK status: ![image](https://user-images.githubusercontent.com/1100749/53658776-c780aa00-3c27-11e9-9315-2ab89a230d72.png) OLD request to url with invalid status error: ![image](https://user-images.githubusercontent.com/1100749/53659067-8ccb4180-3c28-11e9-8655-7265a22613ed.png) NEW request to url with invalid status error: ![image](https://user-images.githubusercontent.com/1100749/53660193-46c3ad00-3c2b-11e9-8336-b95ac09cc7a2.png) Verified we can load models from GCP by linking this to union.
1 parent 5cdd7f5 commit b8d937e

File tree

3 files changed

+40
-433
lines changed

3 files changed

+40
-433
lines changed

src/io/browser_http.ts

Lines changed: 33 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -32,10 +32,10 @@ const OCTET_STREAM_MIME_TYPE = 'application/octet-stream';
3232
const JSON_TYPE = 'application/json';
3333

3434
export class BrowserHTTPRequest implements IOHandler {
35-
protected readonly path: string|string[];
35+
protected readonly path: string;
3636
protected readonly requestInit: RequestInit;
3737

38-
private readonly fetchFunc: Function;
38+
private readonly fetchFunc: (path: string, init?: RequestInit) => Response;
3939

4040
readonly DEFAULT_METHOD = 'POST';
4141

@@ -44,7 +44,7 @@ export class BrowserHTTPRequest implements IOHandler {
4444
private readonly weightPathPrefix: string;
4545
private readonly onProgress: OnProgressCallback;
4646

47-
constructor(path: string|string[], loadOptions?: LoadOptions) {
47+
constructor(path: string, loadOptions?: LoadOptions) {
4848
if (loadOptions == null) {
4949
loadOptions = {};
5050
}
@@ -134,7 +134,7 @@ export class BrowserHTTPRequest implements IOHandler {
134134
'model.weights.bin');
135135
}
136136

137-
const response = await this.getFetchFunc()(this.path as string, init);
137+
const response = await this.getFetchFunc()(this.path, init);
138138

139139
if (response.ok) {
140140
return {
@@ -157,57 +157,36 @@ export class BrowserHTTPRequest implements IOHandler {
157157
* @returns The loaded model artifacts (if loading succeeds).
158158
*/
159159
async load(): Promise<ModelArtifacts> {
160-
return Array.isArray(this.path) ? this.loadBinaryModel() :
161-
this.loadJSONModel();
162-
}
163-
164-
/**
165-
* Loads the model topology file and build the in memory graph of the model.
166-
*/
167-
private async loadBinaryTopology(): Promise<ArrayBuffer> {
168-
const response = await this.getFetchFunc()(this.path[0], this.requestInit);
169-
170-
if (!response.ok) {
171-
throw new Error(`Request to ${this.path[0]} failed with error: ${
172-
response.statusText}`);
173-
}
174-
return await response.arrayBuffer();
175-
}
176-
177-
protected async loadBinaryModel(): Promise<ModelArtifacts> {
178-
const graphPromise = this.loadBinaryTopology();
179-
const manifestPromise =
180-
await this.getFetchFunc()(this.path[1], this.requestInit);
181-
if (!manifestPromise.ok) {
182-
throw new Error(`Request to ${this.path[1]} failed with error: ${
183-
manifestPromise.statusText}`);
184-
}
185-
186-
const results = await Promise.all([graphPromise, manifestPromise]);
187-
const [modelTopology, weightsManifestResponse] = results;
188-
189-
const weightsManifest =
190-
await weightsManifestResponse.json() as WeightsManifestConfig;
191-
192-
let weightSpecs: WeightsManifestEntry[];
193-
let weightData: ArrayBuffer;
194-
if (weightsManifest != null) {
195-
const results = await this.loadWeights(weightsManifest);
196-
[weightSpecs, weightData] = results;
197-
}
198-
199-
return {modelTopology, weightSpecs, weightData};
200-
}
201-
202-
protected async loadJSONModel(): Promise<ModelArtifacts> {
203160
const modelConfigRequest =
204-
await this.getFetchFunc()(this.path as string, this.requestInit);
161+
await this.getFetchFunc()(this.path, this.requestInit);
205162

206163
if (!modelConfigRequest.ok) {
207-
throw new Error(`Request to ${this.path} failed with error: ${
208-
modelConfigRequest.statusText}`);
164+
throw new Error(
165+
`Request to ${this.path} failed with status code ` +
166+
`${modelConfigRequest.status}. Please verify this URL points to ` +
167+
`the model JSON of the model to load.`);
168+
}
169+
let modelConfig;
170+
try {
171+
modelConfig = await modelConfigRequest.json();
172+
} catch (e) {
173+
let message = `Failed to parse model JSON of response from ${this.path}.`;
174+
// TODO(nsthorat): Remove this after some time when we're comfortable that
175+
// .pb files are mostly gone.
176+
if (this.path.endsWith('.pb')) {
177+
message += ' Your path contains a .pb file extension. ' +
178+
'Support for .pb models have been removed in TensorFlow.js 1.0 ' +
179+
'in favor of .json models. You can re-convert your Python ' +
180+
'TensorFlow model using the TensorFlow.js 1.0 conversion scripts ' +
181+
'or you can convert your.pb models with the \'pb2json\'' +
182+
'NPM script in the tensorflow/tfjs-converter repository.';
183+
} else {
184+
message += ' Please make sure the server is serving valid ' +
185+
'JSON for this request.';
186+
}
187+
throw new Error(message);
209188
}
210-
const modelConfig = await modelConfigRequest.json();
189+
211190
const modelTopology = modelConfig['modelTopology'];
212191
const weightsManifest = modelConfig['weightsManifest'];
213192

@@ -292,7 +271,7 @@ export function isHTTPScheme(url: string): boolean {
292271
}
293272

294273
export const httpRequestRouter: IORouter =
295-
(url: string|string[], onProgress?: OnProgressCallback) => {
274+
(url: string, onProgress?: OnProgressCallback) => {
296275
if (typeof fetch === 'undefined') {
297276
// browserHTTPRequest uses `fetch`, if one wants to use it in node.js
298277
// they have to setup a global fetch polyfill.
@@ -433,9 +412,7 @@ IORouterRegistry.registerLoadRouter(httpRequestRouter);
433412
* main()
434413
* ```
435414
*
436-
* @param path A single URL path or an Array of URL paths.
437-
* Currently, only a single URL path is supported. Array input is reserved
438-
* for future development.
415+
* @param path A URL path to the model.
439416
* Can be an absolute HTTP path (e.g.,
440417
* 'http://localhost:8000/model-upload)') or a relative path (e.g.,
441418
* './model-upload').
@@ -459,6 +436,6 @@ IORouterRegistry.registerLoadRouter(httpRequestRouter);
459436
* @returns An instance of `IOHandler`.
460437
*/
461438
export function browserHTTPRequest(
462-
path: string|string[], loadOptions?: LoadOptions): IOHandler {
439+
path: string, loadOptions?: LoadOptions): IOHandler {
463440
return new BrowserHTTPRequest(path, loadOptions);
464441
}

0 commit comments

Comments
 (0)