gecko/content/canvas/test/webgl/conformance/invalid-UTF-16.html

49 lines
1.6 KiB
HTML

<!--
Copyright (c) 2010 The Chromium Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
-->
<html>
<head>
<link rel="stylesheet" href="../resources/js-test-style.css">
<script src="../resources/js-test-pre.js"></script>
<script src="resources/webgl-test.js"></script>
</head>
<body>
<p id="description"></p>
<div id="console"></div>
<script>
if (window.initNonKhronosFramework) {
window.initNonKhronosFramework(false);
}
</script>
<script>
description('This test verifies that the internal conversion from UTF16 to UTF8 is robust to invalid inputs. Any DOM entry point which converts an incoming string to UTF8 could be used for this test.');
var array = [];
array.push(String.fromCharCode(0x48)); // H
array.push(String.fromCharCode(0x69)); // i
array.push(String.fromCharCode(0xd87e)); // Bogus
var string = array.join('');
// In order to make this test not depend on WebGL, the following were
// attempted:
// - Send a string to console.log
// - Submit a mailto: form containing a text input with the bogus
// string
// The first code path does not perform a utf8 conversion of the
// incoming string unless Console::shouldPrintExceptions() returns
// true. The second seems to sanitize the form's input before
// converting it to a UTF8 string.
var gl = create3DContext(null);
var program = gl.createProgram();
gl.bindAttribLocation(program, 0, string);
testPassed("bindAttribLocation with invalid UTF-16 did not crash");
successfullyParsed = true;
</script>
<script src="../resources/js-test-post.js"></script>
</body>
</html>