blob: 66e0e126a67e89ad32c1ff788018bdb1bcc99ddb [file] [log] [blame]
<!DOCTYPE html>
<html>
<head>
<!-- Image Capture Browser Test -->
</head>
<body>
<script type="text/javascript" src="webrtc_test_utilities.js"></script>
<script>
const WIDTH = 640;
/** @const */ var CONSTRAINTS = { width: { max : WIDTH } };
// Returns a Promise resolved with |object| after a delay of |delayInMs|.
function delayedResolver(object, delayInMs) {
return new Promise((resolve, reject) => {
setTimeout(() => { resolve(object); }, delayInMs);
});
}
// Runs an ImageCapture.getPhotoCapabilities() and expects it to succeed.
function testCreateAndGetPhotoCapabilitiesSucceeds() {
navigator.mediaDevices.getUserMedia({"video" : CONSTRAINTS})
.then(stream => {
assertEquals('video', stream.getVideoTracks()[0].kind);
let imageCapturer = new ImageCapture(stream.getVideoTracks()[0]);
return imageCapturer.getPhotoCapabilities();
})
.then(capabilities => {
// There's nothing to check here since |capabilities| vary per device.
reportTestSuccess();
})
.catch(err => {
return failTest(err.toString());
});
}
// Runs an ImageCapture.getPhotoSettings() and expects it to succeed.
function testCreateAndGetPhotoSettingsSucceeds() {
navigator.mediaDevices.getUserMedia({"video" : CONSTRAINTS})
.then(stream => {
assertEquals('video', stream.getVideoTracks()[0].kind);
let imageCapturer = new ImageCapture(stream.getVideoTracks()[0]);
return imageCapturer.getPhotoSettings();
})
.then(settings => {
// There's nothing to check here since |settings| vary per device.
reportTestSuccess();
})
.catch(err => {
return failTest(err.toString());
});
}
// Runs an ImageCapture.takePhoto() and expects it to succeed.
function testCreateAndTakePhotoSucceeds() {
navigator.mediaDevices.getUserMedia({"video" : CONSTRAINTS})
.then(stream => {
assertEquals('video', stream.getVideoTracks()[0].kind);
let imageCapturer = new ImageCapture(stream.getVideoTracks()[0]);
return imageCapturer.takePhoto();
})
.then(blob => {
assertTrue(blob.type != undefined);
assertNotEquals(0, blob.size);
reportTestSuccess();
})
.catch(err => {
return failTest(err.toString());
});
}
// Runs an ImageCapture.takePhoto() and expects it to get rejected.
function testCreateAndTakePhotoIsRejected() {
navigator.mediaDevices.getUserMedia({"video" : CONSTRAINTS})
.then(stream => {
assertEquals('video', stream.getVideoTracks()[0].kind);
let imageCapturer = new ImageCapture(stream.getVideoTracks()[0]);
return imageCapturer.takePhoto();
})
.then(blob => {
failTest('Expected promise to get rejected but it was fulfilled');
}, err => {
reportTestSuccess();
})
.catch(err => {
return failTest(err.toString());
});
}
// Runs an ImageCapture.grabFrame() and expects it to succeed.
function testCreateAndGrabFrameSucceeds() {
navigator.mediaDevices.getUserMedia({"video" : CONSTRAINTS})
.then(stream => {
assertEquals('video', stream.getVideoTracks()[0].kind);
let imageCapturer = new ImageCapture(stream.getVideoTracks()[0]);
return imageCapturer.grabFrame();
})
.then(imageBitmap => {
// On Android, depending on the device orientation, |imageBitmap| might
// appear rotated.
assertTrue(WIDTH == imageBitmap.width || WIDTH == imageBitmap.height);
reportTestSuccess();
})
.catch(err => {
return failTest(err.toString());
});
}
// Runs a MediaStreamTrack.getCapabilities().
function testCreateAndGetTrackCapabilities() {
var imageCapturer;
navigator.mediaDevices.getUserMedia({"video" : CONSTRAINTS})
.then(stream => {
assertEquals('video', stream.getVideoTracks()[0].kind);
imageCapturer = new ImageCapture(stream.getVideoTracks()[0]);
// TODO(mcasas): Before accesing synchronous track APIs we need a delay,
// use instead a round trip of capabilities: https://crbug.com/711524.
return imageCapturer.getPhotoCapabilities();
})
.then(capabilities => {
imageCapturer.track.getCapabilities();
// There's nothing to check here since |capabilities| vary per device.
reportTestSuccess();
})
.catch(err => {
return failTest(err.toString());
});
}
// Runs an MediaStreamTrack.getSettings().
function testCreateAndGetTrackSettings() {
var imageCapturer;
navigator.mediaDevices.getUserMedia({"video" : CONSTRAINTS})
.then(stream => {
assertEquals('video', stream.getVideoTracks()[0].kind);
imageCapturer = new ImageCapture(stream.getVideoTracks()[0]);
// TODO(mcasas): Before accesing synchronous track APIs we need a delay,
// use instead a round trip of capabilities: https://crbug.com/711524.
return imageCapturer.getPhotoCapabilities();
})
.then(capabilities => {
imageCapturer.track.getSettings();
// There's nothing to check here since |settings| vary per device.
reportTestSuccess();
})
.catch(err => {
return failTest(err.toString());
});
}
// Tries to read, set and read back the zoom capability if available.
function testManipulateZoom() {
var newZoom = -1;
var imageCapturer;
var zoomTolerance;
navigator.mediaDevices.getUserMedia({"video" : CONSTRAINTS})
.then(stream => {
assertEquals('video', stream.getVideoTracks()[0].kind);
imageCapturer = new ImageCapture(stream.getVideoTracks()[0]);
// TODO(mcasas): Before accesing synchronous track APIs we need a delay,
// use instead a round trip of capabilities: https://crbug.com/711524.
return imageCapturer.getPhotoCapabilities();
})
.then(capabilities => {
const trackCapabilities = imageCapturer.track.getCapabilities();
if (trackCapabilities.zoom === undefined) {
console.log('zoom not supported, skipping test');
reportTestSuccess();
return;
}
const currentZoom = imageCapturer.track.getSettings().zoom;
newZoom = currentZoom + trackCapabilities.zoom.step;
newZoom = Math.min(newZoom, trackCapabilities.zoom.max);
console.log("Setting zoom from " + currentZoom + " to " + newZoom);
zoomTolerance = trackCapabilities.zoom.step / 10;
return imageCapturer.track.applyConstraints({advanced : [{zoom : newZoom}]});
})
.then(() => {
assertEquals(newZoom,
imageCapturer.track.getConstraints().advanced[0].zoom);
assertTrue(Math.abs(newZoom - imageCapturer.track.getSettings().zoom)
< zoomTolerance);
reportTestSuccess();
})
.catch(err => {
return failTest(err.toString());
});
}
// Tries to read, set and read back the exposureTime capability if available.
function testManipulateExposureTime() {
var newExposureTime = -1;
var imageCapturer;
var exposureTimeTolerance;
navigator.mediaDevices.getUserMedia({"video": CONSTRAINTS})
.then(stream => {
assertEquals('video', stream.getVideoTracks()[0].kind);
imageCapturer = new ImageCapture(stream.getVideoTracks()[0]);
// TODO(mcasas): Before accesing synchronous track APIs we need a delay,
// use instead a round trip of capabilities: https://crbug.com/711524.
return imageCapturer.getPhotoCapabilities();
})
.then(capabilities => {
const trackCapabilities = imageCapturer.track.getCapabilities();
if (trackCapabilities.exposureTime === undefined) {
console.log('exposureTime not supported, skipping test');
reportTestSuccess();
return;
}
const currentExposureTime = imageCapturer.track.getSettings().exposureTime;
newExposureTime = currentExposureTime + trackCapabilities.exposureTime
.step;
newExposureTime = Math.min(newExposureTime, trackCapabilities.exposureTime
.max);
console.log("Setting exposureTime from " + currentExposureTime +
" to " +
newExposureTime);
exposureTimeTolerance = trackCapabilities.exposureTime.step /
10;
currentExposureMode = imageCapturer.track.getSettings().exposureMode;
console.log(" exposureMode == " + currentExposureMode);
exposureModeCapabilities = trackCapabilities.exposureMode;
console.log(" exposureModeCapabilities == " +
exposureModeCapabilities);
return imageCapturer.track.applyConstraints({
advanced: [{
exposureMode: "manual",
exposureTime: newExposureTime
}]
});
})
.then(() => {
assertEquals(newExposureTime,
imageCapturer.track.getConstraints().advanced[0].exposureTime
);
assertTrue(Math.abs(newExposureTime - imageCapturer.track.getSettings()
.exposureTime) <
exposureTimeTolerance);
reportTestSuccess();
})
.catch(err => {
return failTest(err.toString());
});
}
// Tries to read, set and read back the focusDistance capability if available.
function testManipulateFocusDistance() {
var newFocusDistance = -1;
var imageCapturer;
var focusDistanceTolerance;
navigator.mediaDevices.getUserMedia({"video": CONSTRAINTS})
.then(stream => {
assertEquals('video', stream.getVideoTracks()[0].kind);
imageCapturer = new ImageCapture(stream.getVideoTracks()[0]);
// TODO(mcasas): Before accesing synchronous track APIs we need a delay,
// use instead a round trip of capabilities: https://crbug.com/711524.
return imageCapturer.getPhotoCapabilities();
})
.then(capabilities => {
const trackCapabilities = imageCapturer.track.getCapabilities();
if (trackCapabilities.focusDistance === undefined) {
console.log('focusDistance not supported, skipping test');
reportTestSuccess();
return;
}
const currentFocusDistance = imageCapturer.track.getSettings().focusDistance;
newFocusDistance = currentFocusDistance + trackCapabilities.focusDistance
.step;
newFocusDistance = Math.min(newFocusDistance, trackCapabilities.focusDistance
.max);
console.log("Setting focusDistance from " + currentFocusDistance +
" to " +
newFocusDistance);
focusDistanceTolerance = trackCapabilities.focusDistance.step /
10;
currentFocusMode = imageCapturer.track.getSettings().focusMode;
console.log(" focusMode == " + currentFocusMode);
focusModeCapabilities = trackCapabilities.focusMode;
console.log(" focusModeCapabilities == " +
focusModeCapabilities);
return imageCapturer.track.applyConstraints({
advanced: [{
focusMode: "manual",
focusDistance: newFocusDistance
}]
});
})
.then(() => {
assertEquals(newFocusDistance,
imageCapturer.track.getConstraints().advanced[0].focusDistance
);
assertTrue(Math.abs(newFocusDistance - imageCapturer.track.getSettings()
.focusDistance) <
focusDistanceTolerance);
reportTestSuccess();
})
.catch(err => {
return failTest(err.toString());
});
}
</script>
</body>
</html>