summaryrefslogtreecommitdiff
path: root/cli/tests/integration_tests.rs
diff options
context:
space:
mode:
Diffstat (limited to 'cli/tests/integration_tests.rs')
-rw-r--r--cli/tests/integration_tests.rs246
1 files changed, 0 insertions, 246 deletions
diff --git a/cli/tests/integration_tests.rs b/cli/tests/integration_tests.rs
index 61893ab21..5830f8f20 100644
--- a/cli/tests/integration_tests.rs
+++ b/cli/tests/integration_tests.rs
@@ -6,12 +6,9 @@ use deno_core::url;
use deno_runtime::deno_fetch::reqwest;
use deno_runtime::deno_websocket::tokio_tungstenite;
use std::io::{BufRead, Write};
-use std::path::Path;
-use std::path::PathBuf;
use std::process::Command;
use tempfile::TempDir;
use test_util as util;
-use walkdir::WalkDir;
macro_rules! itest(
($name:ident {$( $key:ident: $value:expr,)*}) => {
@@ -5193,249 +5190,6 @@ fn denort_direct_use_error() {
assert!(!status.success());
}
-fn concat_bundle(
- files: Vec<(PathBuf, String)>,
- bundle_path: &Path,
- init: String,
-) -> String {
- let bundle_url = url::Url::from_file_path(bundle_path).unwrap().to_string();
-
- let mut bundle = init.clone();
- let mut bundle_line_count = init.lines().count() as u32;
- let mut source_map = sourcemap::SourceMapBuilder::new(Some(&bundle_url));
-
- // In classic workers, `importScripts()` performs an actual import.
- // However, we don't implement that function in Deno as we want to enforce
- // the use of ES6 modules.
- // To work around this, we:
- // 1. Define `importScripts()` as a no-op (code below)
- // 2. Capture its parameter from the source code and add it to the list of
- // files to concatenate. (see `web_platform_tests()`)
- bundle.push_str("function importScripts() {}\n");
- bundle_line_count += 1;
-
- for (path, text) in files {
- let path = std::fs::canonicalize(path).unwrap();
- let url = url::Url::from_file_path(path).unwrap().to_string();
- let src_id = source_map.add_source(&url);
- source_map.set_source_contents(src_id, Some(&text));
-
- for (line_index, line) in text.lines().enumerate() {
- bundle.push_str(line);
- bundle.push('\n');
- source_map.add_raw(
- bundle_line_count,
- 0,
- line_index as u32,
- 0,
- Some(src_id),
- None,
- );
-
- bundle_line_count += 1;
- }
- bundle.push('\n');
- bundle_line_count += 1;
- }
-
- let mut source_map_buf: Vec<u8> = vec![];
- source_map
- .into_sourcemap()
- .to_writer(&mut source_map_buf)
- .unwrap();
-
- bundle.push_str("//# sourceMappingURL=data:application/json;base64,");
- let encoded_map = base64::encode(source_map_buf);
- bundle.push_str(&encoded_map);
-
- bundle
-}
-
-// TODO(lucacasonato): DRY with tsc_config.rs
-/// Convert a jsonc libraries `JsonValue` to a serde `Value`.
-fn jsonc_to_serde(j: jsonc_parser::JsonValue) -> serde_json::Value {
- use jsonc_parser::JsonValue;
- use serde_json::Value;
- use std::str::FromStr;
- match j {
- JsonValue::Array(arr) => {
- let vec = arr.into_iter().map(jsonc_to_serde).collect();
- Value::Array(vec)
- }
- JsonValue::Boolean(bool) => Value::Bool(bool),
- JsonValue::Null => Value::Null,
- JsonValue::Number(num) => {
- let number =
- serde_json::Number::from_str(&num).expect("could not parse number");
- Value::Number(number)
- }
- JsonValue::Object(obj) => {
- let mut map = serde_json::map::Map::new();
- for (key, json_value) in obj.into_iter() {
- map.insert(key, jsonc_to_serde(json_value));
- }
- Value::Object(map)
- }
- JsonValue::String(str) => Value::String(str),
- }
-}
-
-#[test]
-fn web_platform_tests() {
- use deno_core::serde::Deserialize;
-
- #[derive(Deserialize)]
- #[serde(untagged)]
- enum WptConfig {
- Simple(String),
- #[serde(rename_all = "camelCase")]
- Options {
- name: String,
- expect_fail: Vec<String>,
- },
- }
-
- let text =
- std::fs::read_to_string(util::tests_path().join("wpt.jsonc")).unwrap();
- let jsonc = jsonc_parser::parse_to_value(&text).unwrap().unwrap();
- let config: std::collections::HashMap<String, Vec<WptConfig>> =
- deno_core::serde_json::from_value(jsonc_to_serde(jsonc)).unwrap();
-
- for (suite_name, includes) in config.into_iter() {
- let suite_path = util::wpt_path().join(suite_name);
- let dir = WalkDir::new(&suite_path)
- .into_iter()
- .filter_map(Result::ok)
- .filter(|e| e.file_type().is_file())
- .filter(|f| {
- let filename = f.file_name().to_str().unwrap();
- filename.ends_with(".any.js")
- || filename.ends_with(".window.js")
- || filename.ends_with(".worker.js")
- })
- .filter_map(|f| {
- let path = f
- .path()
- .strip_prefix(&suite_path)
- .unwrap()
- .to_str()
- .unwrap();
- for cfg in &includes {
- match cfg {
- WptConfig::Simple(name) if path.starts_with(name) => {
- return Some((f.path().to_owned(), vec![]))
- }
- WptConfig::Options { name, expect_fail }
- if path.starts_with(name) =>
- {
- return Some((f.path().to_owned(), expect_fail.to_vec()))
- }
- _ => {}
- }
- }
- None
- });
-
- let testharness_path = util::wpt_path().join("resources/testharness.js");
- let testharness_text = std::fs::read_to_string(&testharness_path)
- .unwrap()
- .replace("output:true", "output:false");
- let testharnessreporter_path =
- util::tests_path().join("wpt_testharnessconsolereporter.js");
- let testharnessreporter_text =
- std::fs::read_to_string(&testharnessreporter_path).unwrap();
-
- for (test_file_path, expect_fail) in dir {
- let test_file_text = std::fs::read_to_string(&test_file_path).unwrap();
- let imports: Vec<(PathBuf, String)> = test_file_text
- .split('\n')
- .into_iter()
- .filter_map(|t| {
- // Hack: we don't implement `importScripts()`, and instead capture the
- // parameter in source code; see `concat_bundle()` for more details.
- if let Some(rest_import_scripts) = t.strip_prefix("importScripts(\"")
- {
- if let Some(import_path) = rest_import_scripts.strip_suffix("\");")
- {
- // The code in `testharness.js` silences the test outputs.
- if import_path != "/resources/testharness.js" {
- return Some(import_path);
- }
- }
- }
- t.strip_prefix("// META: script=")
- })
- .map(|s| {
- let s = if s == "/resources/WebIDLParser.js" {
- "/resources/webidl2/lib/webidl2.js"
- } else {
- s
- };
- if s.starts_with('/') {
- util::wpt_path().join(format!(".{}", s))
- } else {
- test_file_path.parent().unwrap().join(s)
- }
- })
- .map(|path| {
- let text = std::fs::read_to_string(&path).unwrap();
- (path, text)
- })
- .collect();
-
- let mut variants: Vec<&str> = test_file_text
- .split('\n')
- .into_iter()
- .filter_map(|t| t.strip_prefix("// META: variant="))
- .collect();
-
- if variants.is_empty() {
- variants.push("");
- }
-
- for variant in variants {
- let mut files = Vec::with_capacity(3 + imports.len());
- files.push((testharness_path.clone(), testharness_text.clone()));
- files.push((
- testharnessreporter_path.clone(),
- testharnessreporter_text.clone(),
- ));
- files.extend(imports.clone());
- files.push((test_file_path.clone(), test_file_text.clone()));
-
- let mut file = tempfile::Builder::new()
- .prefix("wpt-bundle-")
- .suffix(".js")
- .rand_bytes(5)
- .tempfile()
- .unwrap();
-
- let bundle = concat_bundle(files, file.path(), "".to_string());
- file.write_all(bundle.as_bytes()).unwrap();
-
- let child = util::deno_cmd()
- .current_dir(test_file_path.parent().unwrap())
- .arg("run")
- .arg("--location")
- .arg(&format!("http://web-platform-tests/?{}", variant))
- .arg("-A")
- .arg(file.path())
- .arg(deno_core::serde_json::to_string(&expect_fail).unwrap())
- .arg("--quiet")
- .stdin(std::process::Stdio::piped())
- .spawn()
- .unwrap();
-
- let output = child.wait_with_output().unwrap();
- if !output.status.success() {
- file.keep().unwrap();
- }
- assert!(output.status.success());
- }
- }
- }
-}
-
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_resolve_dns() {
use std::collections::BTreeMap;