summaryrefslogtreecommitdiff
path: root/cli
diff options
context:
space:
mode:
Diffstat (limited to 'cli')
-rw-r--r--cli/Cargo.toml73
-rw-r--r--cli/build.rs28
-rw-r--r--cli/colors.rs88
-rw-r--r--cli/compilers/js.rs25
-rw-r--r--cli/compilers/json.rs26
-rw-r--r--cli/compilers/mod.rs20
-rw-r--r--cli/compilers/ts.rs804
-rw-r--r--cli/deno_dir.rs43
-rw-r--r--cli/deno_error.rs513
-rw-r--r--cli/diagnostics.rs584
-rw-r--r--cli/disk_cache.rs208
-rw-r--r--cli/file_fetcher.rs1519
-rw-r--r--cli/flags.rs1799
-rw-r--r--cli/fmt_errors.rs315
-rw-r--r--cli/fs.rs192
-rw-r--r--cli/global_timer.rs50
-rw-r--r--cli/http_body.rs89
-rw-r--r--cli/http_util.rs217
-rw-r--r--cli/import_map.rs2076
-rw-r--r--cli/js.rs57
-rw-r--r--cli/js/base64.ts150
-rw-r--r--cli/js/blob.ts178
-rw-r--r--cli/js/blob_test.ts62
-rw-r--r--cli/js/body.ts272
-rw-r--r--cli/js/body_test.ts68
-rw-r--r--cli/js/buffer.ts294
-rw-r--r--cli/js/buffer_test.ts277
-rw-r--r--cli/js/build.ts27
-rw-r--r--cli/js/build_test.ts10
-rw-r--r--cli/js/chmod.ts20
-rw-r--r--cli/js/chmod_test.ts142
-rw-r--r--cli/js/chown.ts27
-rw-r--r--cli/js/chown_test.ts145
-rw-r--r--cli/js/colors.ts40
-rw-r--r--cli/js/compiler.ts667
-rw-r--r--cli/js/console.ts790
-rw-r--r--cli/js/console_table.ts94
-rw-r--r--cli/js/console_test.ts698
-rw-r--r--cli/js/copy_file.ts30
-rw-r--r--cli/js/copy_file_test.ts163
-rw-r--r--cli/js/core.ts6
-rw-r--r--cli/js/custom_event.ts48
-rw-r--r--cli/js/custom_event_test.ts27
-rw-r--r--cli/js/deno.ts119
-rw-r--r--cli/js/diagnostics.ts217
-rw-r--r--cli/js/dir.ts22
-rw-r--r--cli/js/dir_test.ts54
-rw-r--r--cli/js/dispatch.ts110
-rw-r--r--cli/js/dispatch_json.ts86
-rw-r--r--cli/js/dispatch_json_test.ts19
-rw-r--r--cli/js/dispatch_minimal.ts80
-rw-r--r--cli/js/dom_file.ts24
-rw-r--r--cli/js/dom_types.ts625
-rw-r--r--cli/js/dom_util.ts85
-rw-r--r--cli/js/error_stack.ts273
-rw-r--r--cli/js/error_stack_test.ts108
-rw-r--r--cli/js/errors.ts79
-rw-r--r--cli/js/event.ts348
-rw-r--r--cli/js/event_target.ts503
-rw-r--r--cli/js/event_target_test.ts142
-rw-r--r--cli/js/event_test.ts95
-rw-r--r--cli/js/fetch.ts478
-rw-r--r--cli/js/fetch_test.ts357
-rw-r--r--cli/js/file_info.ts91
-rw-r--r--cli/js/file_test.ts103
-rw-r--r--cli/js/files.ts235
-rw-r--r--cli/js/files_test.ts329
-rw-r--r--cli/js/form_data.ts149
-rw-r--r--cli/js/form_data_test.ts179
-rw-r--r--cli/js/format_error.ts9
-rw-r--r--cli/js/get_random_values.ts31
-rw-r--r--cli/js/get_random_values_test.ts51
-rw-r--r--cli/js/globals.ts207
-rw-r--r--cli/js/globals_test.ts104
-rw-r--r--cli/js/headers.ts139
-rw-r--r--cli/js/headers_test.ts331
-rw-r--r--cli/js/io.ts170
-rw-r--r--cli/js/lib.deno_runtime.d.ts2800
-rw-r--r--cli/js/lib.web_assembly.d.ts173
-rw-r--r--cli/js/link.ts19
-rw-r--r--cli/js/link_test.ts115
-rw-r--r--cli/js/location.ts52
-rw-r--r--cli/js/location_test.ts8
-rw-r--r--cli/js/main.ts41
-rw-r--r--cli/js/make_temp_dir.ts35
-rw-r--r--cli/js/make_temp_dir_test.ts66
-rw-r--r--cli/js/metrics.ts28
-rw-r--r--cli/js/metrics_test.ts46
-rw-r--r--cli/js/mixins/dom_iterable.ts82
-rw-r--r--cli/js/mixins/dom_iterable_test.ts79
-rw-r--r--cli/js/mkdir.ts33
-rw-r--r--cli/js/mkdir_test.ts66
-rw-r--r--cli/js/mock_builtin.js2
-rw-r--r--cli/js/net.ts205
-rw-r--r--cli/js/net_test.ts229
-rw-r--r--cli/js/os.ts151
-rw-r--r--cli/js/os_test.ts165
-rw-r--r--cli/js/performance.ts22
-rw-r--r--cli/js/performance_test.ts10
-rw-r--r--cli/js/permissions.ts39
-rw-r--r--cli/js/permissions_test.ts28
-rw-r--r--cli/js/process.ts307
-rw-r--r--cli/js/process_test.ts377
-rw-r--r--cli/js/read_dir.ts34
-rw-r--r--cli/js/read_dir_test.ts84
-rw-r--r--cli/js/read_file.ts29
-rw-r--r--cli/js/read_file_test.ts57
-rw-r--r--cli/js/read_link.ts19
-rw-r--r--cli/js/read_link_test.ts69
-rw-r--r--cli/js/remove.ts32
-rw-r--r--cli/js/remove_test.ts335
-rw-r--r--cli/js/rename.ts24
-rw-r--r--cli/js/rename_test.ts74
-rw-r--r--cli/js/repl.ts197
-rw-r--r--cli/js/request.ts151
-rw-r--r--cli/js/request_test.ts17
-rw-r--r--cli/js/resources.ts19
-rw-r--r--cli/js/resources_test.ts48
-rw-r--r--cli/js/stat.ts73
-rw-r--r--cli/js/stat_test.ts172
-rw-r--r--cli/js/symlink.ts39
-rw-r--r--cli/js/symlink_test.ts80
-rw-r--r--cli/js/test_util.ts262
-rw-r--r--cli/js/text_encoding.ts554
-rw-r--r--cli/js/text_encoding_test.ts193
-rw-r--r--cli/js/timers.ts280
-rw-r--r--cli/js/timers_test.ts291
-rw-r--r--cli/js/tls.ts21
-rw-r--r--cli/js/tls_test.ts25
-rw-r--r--cli/js/truncate.ts34
-rw-r--r--cli/js/truncate_test.ts74
-rw-r--r--cli/js/ts_global.d.ts19
-rw-r--r--cli/js/type_directives.ts91
-rw-r--r--cli/js/types.ts2
-rwxr-xr-xcli/js/unit_test_runner.ts107
-rw-r--r--cli/js/unit_tests.ts65
-rw-r--r--cli/js/url.ts376
-rw-r--r--cli/js/url_search_params.ts297
-rw-r--r--cli/js/url_search_params_test.ts238
-rw-r--r--cli/js/url_test.ts181
-rw-r--r--cli/js/util.ts225
-rw-r--r--cli/js/utime.ts45
-rw-r--r--cli/js/utime_test.ts181
-rw-r--r--cli/js/version.ts28
-rw-r--r--cli/js/version_test.ts8
-rw-r--r--cli/js/window.ts9
-rw-r--r--cli/js/workers.ts193
-rw-r--r--cli/js/write_file.ts76
-rw-r--r--cli/js/write_file_test.ts219
-rw-r--r--cli/lib.rs409
-rw-r--r--cli/main.rs5
-rw-r--r--cli/msg.rs85
-rw-r--r--cli/ops/compiler.rs99
-rw-r--r--cli/ops/dispatch_json.rs111
-rw-r--r--cli/ops/dispatch_minimal.rs110
-rw-r--r--cli/ops/errors.rs56
-rw-r--r--cli/ops/fetch.rs73
-rw-r--r--cli/ops/files.rs136
-rw-r--r--cli/ops/fs.rs525
-rw-r--r--cli/ops/io.rs46
-rw-r--r--cli/ops/metrics.rs21
-rw-r--r--cli/ops/mod.rs25
-rw-r--r--cli/ops/net.rs151
-rw-r--r--cli/ops/os.rs157
-rw-r--r--cli/ops/performance.rs30
-rw-r--r--cli/ops/permissions.rs44
-rw-r--r--cli/ops/process.rs157
-rw-r--r--cli/ops/random.rs24
-rw-r--r--cli/ops/repl.rs50
-rw-r--r--cli/ops/resources.rs14
-rw-r--r--cli/ops/timers.rs42
-rw-r--r--cli/ops/tls.rs76
-rw-r--r--cli/ops/workers.rs227
-rw-r--r--cli/permissions.rs637
-rw-r--r--cli/progress.rs168
-rw-r--r--cli/repl.rs128
-rw-r--r--cli/resolve_addr.rs132
-rw-r--r--cli/resources.rs585
-rw-r--r--cli/shell.rs491
-rw-r--r--cli/signal.rs16
-rw-r--r--cli/source_maps.rs458
-rw-r--r--cli/startup_data.rs59
-rw-r--r--cli/state.rs433
-rw-r--r--cli/test_util.rs77
-rw-r--r--cli/tests/001_hello.js1
-rw-r--r--cli/tests/001_hello.js.out1
-rw-r--r--cli/tests/002_hello.ts1
-rw-r--r--cli/tests/002_hello.ts.out1
-rw-r--r--cli/tests/003_relative_import.ts3
-rw-r--r--cli/tests/003_relative_import.ts.out1
-rw-r--r--cli/tests/004_set_timeout.ts11
-rw-r--r--cli/tests/004_set_timeout.ts.out2
-rw-r--r--cli/tests/005_more_imports.ts11
-rw-r--r--cli/tests/005_more_imports.ts.out1
-rw-r--r--cli/tests/006_url_imports.ts3
-rw-r--r--cli/tests/006_url_imports.ts.out2
-rw-r--r--cli/tests/012_async.ts13
-rw-r--r--cli/tests/012_async.ts.out3
-rw-r--r--cli/tests/013_dynamic_import.ts15
-rw-r--r--cli/tests/013_dynamic_import.ts.out1
-rw-r--r--cli/tests/014_duplicate_import.ts9
-rw-r--r--cli/tests/014_duplicate_import.ts.out1
-rw-r--r--cli/tests/015_duplicate_parallel_import.js20
-rw-r--r--cli/tests/015_duplicate_parallel_import.js.out1
-rw-r--r--cli/tests/016_double_await.ts8
-rw-r--r--cli/tests/016_double_await.ts.out2
-rw-r--r--cli/tests/017_import_redirect.ts4
-rw-r--r--cli/tests/017_import_redirect.ts.out1
-rw-r--r--cli/tests/018_async_catch.ts14
-rw-r--r--cli/tests/018_async_catch.ts.out3
-rw-r--r--cli/tests/019_media_types.ts24
-rw-r--r--cli/tests/019_media_types.ts.out1
-rw-r--r--cli/tests/020_json_modules.ts2
-rw-r--r--cli/tests/020_json_modules.ts.out1
-rw-r--r--cli/tests/021_mjs_modules.ts2
-rw-r--r--cli/tests/021_mjs_modules.ts.out1
-rw-r--r--cli/tests/022_info_flag_script.out14
-rw-r--r--cli/tests/023_no_ext_with_headers1
-rw-r--r--cli/tests/023_no_ext_with_headers.headers.json1
-rw-r--r--cli/tests/023_no_ext_with_headers.out1
-rw-r--r--cli/tests/024_import_no_ext_with_headers.ts1
-rw-r--r--cli/tests/024_import_no_ext_with_headers.ts.out1
-rw-r--r--cli/tests/025_hrtime.ts3
-rw-r--r--cli/tests/025_hrtime.ts.out2
-rw-r--r--cli/tests/025_reload_js_type_error.js5
-rw-r--r--cli/tests/025_reload_js_type_error.js.out1
-rw-r--r--cli/tests/026_redirect_javascript.js2
-rw-r--r--cli/tests/026_redirect_javascript.js.out1
-rw-r--r--cli/tests/026_workers.ts14
-rw-r--r--cli/tests/026_workers.ts.out4
-rw-r--r--cli/tests/027_redirect_typescript.ts2
-rw-r--r--cli/tests/027_redirect_typescript.ts.out1
-rw-r--r--cli/tests/028_args.ts5
-rw-r--r--cli/tests/028_args.ts.out7
-rw-r--r--cli/tests/029_eval.out1
-rw-r--r--cli/tests/030_xeval.out3
-rw-r--r--cli/tests/031_xeval_replvar.out3
-rw-r--r--cli/tests/032_xeval_delim.out3
-rw-r--r--cli/tests/033_import_map.out7
-rw-r--r--cli/tests/034_onload.out11
-rw-r--r--cli/tests/034_onload/imported.ts11
-rw-r--r--cli/tests/034_onload/main.ts23
-rw-r--r--cli/tests/034_onload/nest_imported.ts10
-rw-r--r--cli/tests/035_no_fetch_flag.out1
-rw-r--r--cli/tests/036_import_map_fetch.out0
-rw-r--r--cli/tests/038_checkjs.js6
-rw-r--r--cli/tests/038_checkjs.js.out15
-rw-r--r--cli/tests/038_checkjs.tsconfig.json5
-rw-r--r--cli/tests/039_worker_deno_ns.ts25
-rw-r--r--cli/tests/039_worker_deno_ns.ts.out4
-rw-r--r--cli/tests/039_worker_deno_ns/has_ns.ts10
-rw-r--r--cli/tests/039_worker_deno_ns/maybe_ns.ts1
-rw-r--r--cli/tests/039_worker_deno_ns/no_ns.ts10
-rw-r--r--cli/tests/040_worker_blob.ts6
-rw-r--r--cli/tests/040_worker_blob.ts.out1
-rw-r--r--cli/tests/041_dyn_import_eval.out1
-rw-r--r--cli/tests/041_info_flag.out3
-rw-r--r--cli/tests/042_dyn_import_evalcontext.ts4
-rw-r--r--cli/tests/042_dyn_import_evalcontext.ts.out1
-rw-r--r--cli/tests/044_bad_resource.ts7
-rw-r--r--cli/tests/044_bad_resource.ts.out6
-rw-r--r--cli/tests/045_proxy_client.ts7
-rw-r--r--cli/tests/045_proxy_test.ts72
-rw-r--r--cli/tests/045_proxy_test.ts.out3
-rw-r--r--cli/tests/046_jsx_test.tsx9
-rw-r--r--cli/tests/046_jsx_test.tsx.out1
-rw-r--r--cli/tests/047_jsx_test.jsx9
-rw-r--r--cli/tests/047_jsx_test.jsx.out1
-rw-r--r--cli/tests/README.md7
-rw-r--r--cli/tests/async_error.ts8
-rw-r--r--cli/tests/async_error.ts.out11
-rw-r--r--cli/tests/badly_formatted.js4
-rw-r--r--cli/tests/badly_formatted_fixed.js1
-rw-r--r--cli/tests/cat.ts11
-rw-r--r--cli/tests/circular1.js2
-rw-r--r--cli/tests/circular1.js.out2
-rw-r--r--cli/tests/circular2.js2
-rw-r--r--cli/tests/config.ts5
-rw-r--r--cli/tests/config.ts.out10
-rw-r--r--cli/tests/config.tsconfig.json7
-rw-r--r--cli/tests/echo_server.ts12
-rw-r--r--cli/tests/error_001.ts9
-rw-r--r--cli/tests/error_001.ts.out9
-rw-r--r--cli/tests/error_002.ts7
-rw-r--r--cli/tests/error_002.ts.out9
-rw-r--r--cli/tests/error_003_typescript.ts20
-rw-r--r--cli/tests/error_003_typescript.ts.out16
-rw-r--r--cli/tests/error_004_missing_module.ts2
-rw-r--r--cli/tests/error_004_missing_module.ts.out5
-rw-r--r--cli/tests/error_005_missing_dynamic_import.ts4
-rw-r--r--cli/tests/error_005_missing_dynamic_import.ts.out5
-rw-r--r--cli/tests/error_006_import_ext_failure.ts1
-rw-r--r--cli/tests/error_006_import_ext_failure.ts.out5
-rw-r--r--cli/tests/error_007_any.ts1
-rw-r--r--cli/tests/error_007_any.ts.out1
-rw-r--r--cli/tests/error_008_checkjs.js6
-rw-r--r--cli/tests/error_008_checkjs.js.out7
-rw-r--r--cli/tests/error_009_missing_js_module.disabled4
-rw-r--r--cli/tests/error_009_missing_js_module.js1
-rw-r--r--cli/tests/error_009_missing_js_module.js.out1
-rw-r--r--cli/tests/error_010_nonexistent_arg.disabled4
-rw-r--r--cli/tests/error_010_nonexistent_arg.out1
-rw-r--r--cli/tests/error_011_bad_module_specifier.ts2
-rw-r--r--cli/tests/error_011_bad_module_specifier.ts.out5
-rw-r--r--cli/tests/error_012_bad_dynamic_import_specifier.ts4
-rw-r--r--cli/tests/error_012_bad_dynamic_import_specifier.ts.out5
-rw-r--r--cli/tests/error_013_missing_script.out1
-rw-r--r--cli/tests/error_014_catch_dynamic_import_error.js31
-rw-r--r--cli/tests/error_014_catch_dynamic_import_error.js.out12
-rw-r--r--cli/tests/error_015_dynamic_import_permissions.js3
-rw-r--r--cli/tests/error_015_dynamic_import_permissions.out1
-rw-r--r--cli/tests/error_016_dynamic_import_permissions2.js5
-rw-r--r--cli/tests/error_016_dynamic_import_permissions2.out2
-rw-r--r--cli/tests/error_stack.ts10
-rw-r--r--cli/tests/error_stack.ts.out6
-rw-r--r--cli/tests/error_syntax.js3
-rw-r--r--cli/tests/error_syntax.js.out6
-rw-r--r--cli/tests/error_type_definitions.ts5
-rw-r--r--cli/tests/error_type_definitions.ts.out5
-rw-r--r--cli/tests/error_worker_dynamic.ts3
-rw-r--r--cli/tests/error_worker_dynamic.ts.out3
-rw-r--r--cli/tests/esm_imports_a.js3
-rw-r--r--cli/tests/esm_imports_b.js3
-rw-r--r--cli/tests/exec_path.ts1
-rw-r--r--cli/tests/exit_error42.ts3
-rw-r--r--cli/tests/exit_error42.ts.out1
-rw-r--r--cli/tests/fetch_deps.ts14
-rw-r--r--cli/tests/hello.txt1
-rw-r--r--cli/tests/https_import.ts5
-rw-r--r--cli/tests/https_import.ts.out1
-rw-r--r--cli/tests/if_main.ts7
-rw-r--r--cli/tests/if_main.ts.out1
-rw-r--r--cli/tests/import_meta.ts3
-rw-r--r--cli/tests/import_meta.ts.out2
-rw-r--r--cli/tests/import_meta2.ts1
-rw-r--r--cli/tests/importmaps/import_map.json14
-rw-r--r--cli/tests/importmaps/lodash/lodash.ts1
-rw-r--r--cli/tests/importmaps/lodash/other_file.ts1
-rw-r--r--cli/tests/importmaps/moment/moment.ts1
-rw-r--r--cli/tests/importmaps/moment/other_file.ts1
-rw-r--r--cli/tests/importmaps/scope/scoped.ts2
-rw-r--r--cli/tests/importmaps/scoped_moment.ts1
-rw-r--r--cli/tests/importmaps/test.ts6
-rw-r--r--cli/tests/importmaps/vue.ts1
-rw-r--r--cli/tests/integration_tests.rs578
-rw-r--r--cli/tests/is_tty.ts1
-rw-r--r--cli/tests/no_color.js1
-rw-r--r--cli/tests/seed_random.js11
-rw-r--r--cli/tests/seed_random.js.out12
-rw-r--r--cli/tests/subdir/auto_print_hello.ts2
-rw-r--r--cli/tests/subdir/bench_worker.ts20
-rw-r--r--cli/tests/subdir/config.json6
-rw-r--r--cli/tests/subdir/evil_remote_import.js4
-rw-r--r--cli/tests/subdir/form_urlencoded.txt1
-rw-r--r--cli/tests/subdir/indirect_import_error.js1
-rw-r--r--cli/tests/subdir/indirect_throws.js1
-rw-r--r--cli/tests/subdir/mismatch_ext.ts1
-rw-r--r--cli/tests/subdir/mod1.ts17
-rw-r--r--cli/tests/subdir/mod2.ts1
-rw-r--r--cli/tests/subdir/mod3.js1
-rw-r--r--cli/tests/subdir/mod4.js1
-rw-r--r--cli/tests/subdir/mod5.mjs1
-rw-r--r--cli/tests/subdir/mt_application_ecmascript.j2.js1
-rw-r--r--cli/tests/subdir/mt_application_x_javascript.j4.js1
-rw-r--r--cli/tests/subdir/mt_application_x_typescript.t4.ts1
-rw-r--r--cli/tests/subdir/mt_javascript.js1
-rw-r--r--cli/tests/subdir/mt_text_ecmascript.j3.js1
-rw-r--r--cli/tests/subdir/mt_text_javascript.j1.js1
-rw-r--r--cli/tests/subdir/mt_text_typescript.t1.ts1
-rw-r--r--cli/tests/subdir/mt_video_mp2t.t3.ts1
-rw-r--r--cli/tests/subdir/mt_video_vdn.t2.ts1
-rw-r--r--cli/tests/subdir/no_ext1
-rw-r--r--cli/tests/subdir/print_hello.ts3
-rw-r--r--cli/tests/subdir/redirects/redirect1.js1
-rw-r--r--cli/tests/subdir/redirects/redirect1.ts1
-rw-r--r--cli/tests/subdir/redirects/redirect2.js1
-rw-r--r--cli/tests/subdir/redirects/redirect3.js2
-rw-r--r--cli/tests/subdir/redirects/redirect4.ts2
-rw-r--r--cli/tests/subdir/subdir2/mod2.ts9
-rw-r--r--cli/tests/subdir/test_worker.js7
-rw-r--r--cli/tests/subdir/test_worker.ts7
-rw-r--r--cli/tests/subdir/throws.js5
-rw-r--r--cli/tests/subdir/unknown_ext.deno1
-rw-r--r--cli/tests/top_level_await.js3
-rw-r--r--cli/tests/top_level_await.out3
-rw-r--r--cli/tests/top_level_await.ts3
-rw-r--r--cli/tests/tty_tests.rs18
-rw-r--r--cli/tests/type_definitions.ts10
-rw-r--r--cli/tests/type_definitions.ts.out3
-rw-r--r--cli/tests/type_definitions/bar.d.ts7
-rw-r--r--cli/tests/type_definitions/fizz.d.ts2
-rw-r--r--cli/tests/type_definitions/fizz.js1
-rw-r--r--cli/tests/type_definitions/foo.d.ts2
-rw-r--r--cli/tests/type_definitions/foo.js1
-rw-r--r--cli/tests/type_definitions/qat.ts1
-rw-r--r--cli/tests/types.out14
-rw-r--r--cli/tests/unbuffered_stderr.ts3
-rw-r--r--cli/tests/unbuffered_stderr.ts.out2
-rw-r--r--cli/tests/unbuffered_stdout.ts3
-rw-r--r--cli/tests/unbuffered_stdout.ts.out1
-rw-r--r--cli/tests/util/mod.rs218
-rw-r--r--cli/tests/v8_flags.js1
-rw-r--r--cli/tests/v8_flags.js.out1
-rw-r--r--cli/tests/v8_help.out3
-rw-r--r--cli/tests/version.out3
-rw-r--r--cli/tests/wasm.ts15
-rw-r--r--cli/tests/wasm.ts.out1
-rw-r--r--cli/tests/wasm_async.js27
-rw-r--r--cli/tests/wasm_async.out1
-rw-r--r--cli/tests/workers_round_robin_bench.ts79
-rw-r--r--cli/tests/workers_startup_bench.ts27
-rw-r--r--cli/tokio_read.rs64
-rw-r--r--cli/tokio_util.rs168
-rw-r--r--cli/tokio_write.rs62
-rw-r--r--cli/version.rs7
-rw-r--r--cli/worker.rs613
416 files changed, 38567 insertions, 0 deletions
diff --git a/cli/Cargo.toml b/cli/Cargo.toml
new file mode 100644
index 000000000..00079b180
--- /dev/null
+++ b/cli/Cargo.toml
@@ -0,0 +1,73 @@
+# Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+
+[package]
+name = "deno_cli"
+version = "0.20.0"
+license = "MIT"
+authors = ["the Deno authors"]
+edition = "2018"
+description = "Provides the deno executable"
+repository = "https://github.com/denoland/deno"
+default-run = "deno"
+
+[lib]
+name = "deno_cli"
+path = "lib.rs"
+
+[[bin]]
+name = "deno"
+path = "main.rs"
+
+[build-dependencies]
+deno_typescript = { path = "../deno_typescript", version = "0.20.0" }
+
+[dependencies]
+deno = { path = "../core", version = "0.20.0" }
+deno_typescript = { path = "../deno_typescript", version = "0.20.0" }
+
+ansi_term = "0.12.1"
+atty = "0.2.13"
+clap = "2.33.0"
+dirs = "2.0.2"
+futures = "0.1.29"
+http = "0.1.18"
+hyper = "0.12.34"
+hyper-rustls = "0.17.1"
+indexmap = "1.2.0"
+lazy_static = "1.4.0"
+libc = "0.2.62"
+log = "0.4.8"
+rand = "0.7.0"
+regex = "1.3.1"
+remove_dir_all = "0.5.2"
+reqwest = { version = "0.9.20", default-features = false, features = ["rustls-tls"] }
+ring = "0.16.9"
+rustyline = "5.0.2"
+serde = { version = "1.0.100", features = ["derive"] }
+serde_derive = "1.0.100"
+serde_json = { version = "1.0.40", features = [ "preserve_order" ] }
+source-map-mappings = "0.5.0"
+sys-info = "0.5.8"
+tempfile = "3.1.0"
+termcolor = "1.0.5"
+tokio = "0.1.22"
+tokio-executor = "0.1.8"
+tokio-fs = "0.1.6"
+tokio-io = "0.1.12"
+tokio-process = "0.2.4"
+tokio-rustls = "0.10.0"
+tokio-threadpool = "0.1.15"
+url = "1.7.2"
+utime = "0.2.1"
+webpki = "0.21.0"
+webpki-roots = "0.17.0"
+
+[target.'cfg(windows)'.dependencies]
+winapi = "0.3.8"
+fwdansi = "1.0.1"
+
+[target.'cfg(unix)'.dependencies]
+nix = "0.14.1"
+
+[dev-dependencies]
+os_pipe = "0.8.2"
diff --git a/cli/build.rs b/cli/build.rs
new file mode 100644
index 000000000..365e01aa8
--- /dev/null
+++ b/cli/build.rs
@@ -0,0 +1,28 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use std::env;
+use std::path::PathBuf;
+
+fn main() {
+ // To debug snapshot issues uncomment:
+ // deno_typescript::trace_serializer();
+
+ println!(
+ "cargo:rustc-env=TS_VERSION={}",
+ deno_typescript::ts_version()
+ );
+
+ let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap());
+ let o = PathBuf::from(env::var_os("OUT_DIR").unwrap());
+
+ let root_names = vec![c.join("js/main.ts")];
+ let bundle = o.join("CLI_SNAPSHOT.js");
+ let state = deno_typescript::compile_bundle(&bundle, root_names).unwrap();
+ assert!(bundle.exists());
+ deno_typescript::mksnapshot_bundle(&bundle, state).unwrap();
+
+ let root_names = vec![c.join("js/compiler.ts")];
+ let bundle = o.join("COMPILER_SNAPSHOT.js");
+ let state = deno_typescript::compile_bundle(&bundle, root_names).unwrap();
+ assert!(bundle.exists());
+ deno_typescript::mksnapshot_bundle_ts(&bundle, state).unwrap();
+}
diff --git a/cli/colors.rs b/cli/colors.rs
new file mode 100644
index 000000000..9c2c7a401
--- /dev/null
+++ b/cli/colors.rs
@@ -0,0 +1,88 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+// TODO(ry) Replace ansi_term with termcolor.
+use ansi_term::Color::Black;
+use ansi_term::Color::Fixed;
+use ansi_term::Color::Red;
+use ansi_term::Color::White;
+use ansi_term::Style;
+use regex::Regex;
+use std::env;
+use std::fmt;
+
+lazy_static! {
+ // STRIP_ANSI_RE and strip_ansi_codes are lifted from the "console" crate.
+ // Copyright 2017 Armin Ronacher <armin.ronacher@active-4.com>. MIT License.
+ static ref STRIP_ANSI_RE: Regex = Regex::new(
+ r"[\x1b\x9b][\[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-PRZcf-nqry=><]"
+ ).unwrap();
+ static ref NO_COLOR: bool = {
+ env::var_os("NO_COLOR").is_some()
+ };
+}
+
+/// Helper function to strip ansi codes.
+pub fn strip_ansi_codes(s: &str) -> std::borrow::Cow<str> {
+ STRIP_ANSI_RE.replace_all(s, "")
+}
+
+pub fn use_color() -> bool {
+ !(*NO_COLOR)
+}
+
+pub fn red_bold(s: String) -> impl fmt::Display {
+ let mut style = Style::new();
+ if use_color() {
+ style = style.bold().fg(Red);
+ }
+ style.paint(s)
+}
+
+pub fn italic_bold(s: String) -> impl fmt::Display {
+ let mut style = Style::new();
+ if use_color() {
+ style = style.italic().bold();
+ }
+ style.paint(s)
+}
+
+pub fn black_on_white(s: String) -> impl fmt::Display {
+ let mut style = Style::new();
+ if use_color() {
+ style = style.on(White).fg(Black);
+ }
+ style.paint(s)
+}
+
+pub fn yellow(s: String) -> impl fmt::Display {
+ let mut style = Style::new();
+ if use_color() {
+ // matches TypeScript's ForegroundColorEscapeSequences.Yellow
+ style = style.fg(Fixed(11));
+ }
+ style.paint(s)
+}
+
+pub fn cyan(s: String) -> impl fmt::Display {
+ let mut style = Style::new();
+ if use_color() {
+ // matches TypeScript's ForegroundColorEscapeSequences.Cyan
+ style = style.fg(Fixed(14));
+ }
+ style.paint(s)
+}
+
+pub fn red(s: String) -> impl fmt::Display {
+ let mut style = Style::new();
+ if use_color() {
+ style = style.fg(Red);
+ }
+ style.paint(s)
+}
+
+pub fn bold(s: String) -> impl fmt::Display {
+ let mut style = Style::new();
+ if use_color() {
+ style = style.bold();
+ }
+ style.paint(s)
+}
diff --git a/cli/compilers/js.rs b/cli/compilers/js.rs
new file mode 100644
index 000000000..56c9b672e
--- /dev/null
+++ b/cli/compilers/js.rs
@@ -0,0 +1,25 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use crate::compilers::CompiledModule;
+use crate::compilers::CompiledModuleFuture;
+use crate::file_fetcher::SourceFile;
+use crate::state::ThreadSafeState;
+use std::str;
+
+pub struct JsCompiler {}
+
+impl JsCompiler {
+ pub fn compile_async(
+ self: &Self,
+ _state: ThreadSafeState,
+ source_file: &SourceFile,
+ ) -> Box<CompiledModuleFuture> {
+ let module = CompiledModule {
+ code: str::from_utf8(&source_file.source_code)
+ .unwrap()
+ .to_string(),
+ name: source_file.url.to_string(),
+ };
+
+ Box::new(futures::future::ok(module))
+ }
+}
diff --git a/cli/compilers/json.rs b/cli/compilers/json.rs
new file mode 100644
index 000000000..57e44d354
--- /dev/null
+++ b/cli/compilers/json.rs
@@ -0,0 +1,26 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use crate::compilers::CompiledModule;
+use crate::compilers::CompiledModuleFuture;
+use crate::file_fetcher::SourceFile;
+use crate::state::ThreadSafeState;
+use std::str;
+
+pub struct JsonCompiler {}
+
+impl JsonCompiler {
+ pub fn compile_async(
+ self: &Self,
+ _state: ThreadSafeState,
+ source_file: &SourceFile,
+ ) -> Box<CompiledModuleFuture> {
+ let module = CompiledModule {
+ code: format!(
+ "export default {};",
+ str::from_utf8(&source_file.source_code).unwrap()
+ ),
+ name: source_file.url.to_string(),
+ };
+
+ Box::new(futures::future::ok(module))
+ }
+}
diff --git a/cli/compilers/mod.rs b/cli/compilers/mod.rs
new file mode 100644
index 000000000..fdc18d2bc
--- /dev/null
+++ b/cli/compilers/mod.rs
@@ -0,0 +1,20 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use deno::ErrBox;
+use futures::Future;
+
+mod js;
+mod json;
+mod ts;
+
+pub use js::JsCompiler;
+pub use json::JsonCompiler;
+pub use ts::TsCompiler;
+
+#[derive(Debug, Clone)]
+pub struct CompiledModule {
+ pub code: String,
+ pub name: String,
+}
+
+pub type CompiledModuleFuture =
+ dyn Future<Item = CompiledModule, Error = ErrBox> + Send;
diff --git a/cli/compilers/ts.rs b/cli/compilers/ts.rs
new file mode 100644
index 000000000..9cbaaae09
--- /dev/null
+++ b/cli/compilers/ts.rs
@@ -0,0 +1,804 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use crate::compilers::CompiledModule;
+use crate::compilers::CompiledModuleFuture;
+use crate::diagnostics::Diagnostic;
+use crate::disk_cache::DiskCache;
+use crate::file_fetcher::SourceFile;
+use crate::file_fetcher::SourceFileFetcher;
+use crate::msg;
+use crate::resources;
+use crate::source_maps::SourceMapGetter;
+use crate::startup_data;
+use crate::state::*;
+use crate::version;
+use crate::worker::Worker;
+use deno::Buf;
+use deno::ErrBox;
+use deno::ModuleSpecifier;
+use futures::Future;
+use futures::Stream;
+use regex::Regex;
+use ring;
+use std::collections::HashSet;
+use std::fmt::Write;
+use std::fs;
+use std::io;
+use std::path::PathBuf;
+use std::str;
+use std::sync::atomic::Ordering;
+use std::sync::Mutex;
+use url::Url;
+
+lazy_static! {
+ static ref CHECK_JS_RE: Regex =
+ Regex::new(r#""checkJs"\s*?:\s*?true"#).unwrap();
+}
+
+/// Struct which represents the state of the compiler
+/// configuration where the first is canonical name for the configuration file,
+/// second is a vector of the bytes of the contents of the configuration file,
+/// third is bytes of the hash of contents.
+#[derive(Clone)]
+pub struct CompilerConfig {
+ pub path: Option<PathBuf>,
+ pub content: Option<Vec<u8>>,
+ pub hash: Vec<u8>,
+ pub compile_js: bool,
+}
+
+impl CompilerConfig {
+ /// Take the passed flag and resolve the file name relative to the cwd.
+ pub fn load(config_path: Option<String>) -> Result<Self, ErrBox> {
+ let config_file = match &config_path {
+ Some(config_file_name) => {
+ debug!("Compiler config file: {}", config_file_name);
+ let cwd = std::env::current_dir().unwrap();
+ Some(cwd.join(config_file_name))
+ }
+ _ => None,
+ };
+
+ // Convert the PathBuf to a canonicalized string. This is needed by the
+ // compiler to properly deal with the configuration.
+ let config_path = match &config_file {
+ Some(config_file) => Some(
+ config_file
+ .canonicalize()
+ .map_err(|_| {
+ io::Error::new(
+ io::ErrorKind::InvalidInput,
+ format!(
+ "Could not find the config file: {}",
+ config_file.to_string_lossy()
+ ),
+ )
+ })?
+ .to_owned(),
+ ),
+ _ => None,
+ };
+
+ // Load the contents of the configuration file
+ let config = match &config_file {
+ Some(config_file) => {
+ debug!("Attempt to load config: {}", config_file.to_str().unwrap());
+ let config = fs::read(&config_file)?;
+ Some(config)
+ }
+ _ => None,
+ };
+
+ let config_hash = match &config {
+ Some(bytes) => bytes.clone(),
+ _ => b"".to_vec(),
+ };
+
+ // If `checkJs` is set to true in `compilerOptions` then we're gonna be compiling
+ // JavaScript files as well
+ let compile_js = if let Some(config_content) = config.clone() {
+ let config_str = std::str::from_utf8(&config_content)?;
+ CHECK_JS_RE.is_match(config_str)
+ } else {
+ false
+ };
+
+ let ts_config = Self {
+ path: config_path,
+ content: config,
+ hash: config_hash,
+ compile_js,
+ };
+
+ Ok(ts_config)
+ }
+}
+
+/// Information associated with compiled file in cache.
+/// Includes source code path and state hash.
+/// version_hash is used to validate versions of the file
+/// and could be used to remove stale file in cache.
+pub struct CompiledFileMetadata {
+ pub source_path: PathBuf,
+ pub version_hash: String,
+}
+
+static SOURCE_PATH: &str = "source_path";
+static VERSION_HASH: &str = "version_hash";
+
+impl CompiledFileMetadata {
+ pub fn from_json_string(metadata_string: String) -> Option<Self> {
+ // TODO: use serde for deserialization
+ let maybe_metadata_json: serde_json::Result<serde_json::Value> =
+ serde_json::from_str(&metadata_string);
+
+ if let Ok(metadata_json) = maybe_metadata_json {
+ let source_path = metadata_json[SOURCE_PATH].as_str().map(PathBuf::from);
+ let version_hash = metadata_json[VERSION_HASH].as_str().map(String::from);
+
+ if source_path.is_none() || version_hash.is_none() {
+ return None;
+ }
+
+ return Some(CompiledFileMetadata {
+ source_path: source_path.unwrap(),
+ version_hash: version_hash.unwrap(),
+ });
+ }
+
+ None
+ }
+
+ pub fn to_json_string(self: &Self) -> Result<String, serde_json::Error> {
+ let mut value_map = serde_json::map::Map::new();
+
+ value_map.insert(SOURCE_PATH.to_owned(), json!(&self.source_path));
+ value_map.insert(VERSION_HASH.to_string(), json!(&self.version_hash));
+ serde_json::to_string(&value_map)
+ }
+}
+/// Creates the JSON message send to compiler.ts's onmessage.
+fn req(
+ root_names: Vec<String>,
+ compiler_config: CompilerConfig,
+ bundle: Option<String>,
+) -> Buf {
+ let j = match (compiler_config.path, compiler_config.content) {
+ (Some(config_path), Some(config_data)) => json!({
+ "rootNames": root_names,
+ "bundle": bundle,
+ "configPath": config_path,
+ "config": str::from_utf8(&config_data).unwrap(),
+ }),
+ _ => json!({
+ "rootNames": root_names,
+ "bundle": bundle,
+ }),
+ };
+
+ j.to_string().into_boxed_str().into_boxed_bytes()
+}
+
+fn gen_hash(v: Vec<&[u8]>) -> String {
+ let mut ctx = ring::digest::Context::new(&ring::digest::SHA256);
+ for src in v.iter() {
+ ctx.update(src);
+ }
+ let digest = ctx.finish();
+ let mut out = String::new();
+ // TODO There must be a better way to do this...
+ for byte in digest.as_ref() {
+ write!(&mut out, "{:02x}", byte).unwrap();
+ }
+ out
+}
+
+/// Emit a SHA256 hash based on source code, deno version and TS config.
+/// Used to check if a recompilation for source code is needed.
+pub fn source_code_version_hash(
+ source_code: &[u8],
+ version: &str,
+ config_hash: &[u8],
+) -> String {
+ gen_hash(vec![source_code, version.as_bytes(), config_hash])
+}
+
+pub struct TsCompiler {
+ pub file_fetcher: SourceFileFetcher,
+ pub config: CompilerConfig,
+ pub disk_cache: DiskCache,
+ /// Set of all URLs that have been compiled. This prevents double
+ /// compilation of module.
+ pub compiled: Mutex<HashSet<Url>>,
+ /// This setting is controlled by `--reload` flag. Unless the flag
+ /// is provided disk cache is used.
+ pub use_disk_cache: bool,
+ /// This setting is controlled by `compilerOptions.checkJs`
+ pub compile_js: bool,
+}
+
+impl TsCompiler {
+ pub fn new(
+ file_fetcher: SourceFileFetcher,
+ disk_cache: DiskCache,
+ use_disk_cache: bool,
+ config_path: Option<String>,
+ ) -> Result<Self, ErrBox> {
+ let config = CompilerConfig::load(config_path)?;
+
+ let compiler = Self {
+ file_fetcher,
+ disk_cache,
+ compile_js: config.compile_js,
+ config,
+ compiled: Mutex::new(HashSet::new()),
+ use_disk_cache,
+ };
+
+ Ok(compiler)
+ }
+
+ /// Create a new V8 worker with snapshot of TS compiler and setup compiler's runtime.
+ fn setup_worker(state: ThreadSafeState) -> Worker {
+ // Count how many times we start the compiler worker.
+ state.metrics.compiler_starts.fetch_add(1, Ordering::SeqCst);
+
+ let mut worker = Worker::new(
+ "TS".to_string(),
+ startup_data::compiler_isolate_init(),
+ // TODO(ry) Maybe we should use a separate state for the compiler.
+ // as was done previously.
+ state.clone(),
+ );
+ worker.execute("denoMain()").unwrap();
+ worker.execute("workerMain()").unwrap();
+ worker.execute("compilerMain()").unwrap();
+ worker
+ }
+
+ pub fn bundle_async(
+ self: &Self,
+ state: ThreadSafeState,
+ module_name: String,
+ out_file: String,
+ ) -> impl Future<Item = (), Error = ErrBox> {
+ debug!(
+ "Invoking the compiler to bundle. module_name: {}",
+ module_name
+ );
+
+ let root_names = vec![module_name.clone()];
+ let req_msg = req(root_names, self.config.clone(), Some(out_file));
+
+ let worker = TsCompiler::setup_worker(state.clone());
+ let resource = worker.state.resource.clone();
+ let compiler_rid = resource.rid;
+ let first_msg_fut =
+ resources::post_message_to_worker(compiler_rid, req_msg)
+ .then(move |_| worker)
+ .then(move |result| {
+ if let Err(err) = result {
+ // TODO(ry) Need to forward the error instead of exiting.
+ eprintln!("{}", err.to_string());
+ std::process::exit(1);
+ }
+ debug!("Sent message to worker");
+ let stream_future =
+ resources::get_message_stream_from_worker(compiler_rid)
+ .into_future();
+ stream_future.map(|(f, _rest)| f).map_err(|(f, _rest)| f)
+ });
+
+ first_msg_fut.map_err(|_| panic!("not handled")).and_then(
+ move |maybe_msg: Option<Buf>| {
+ debug!("Received message from worker");
+
+ if let Some(msg) = maybe_msg {
+ let json_str = std::str::from_utf8(&msg).unwrap();
+ debug!("Message: {}", json_str);
+ if let Some(diagnostics) = Diagnostic::from_emit_result(json_str) {
+ return Err(ErrBox::from(diagnostics));
+ }
+ }
+
+ Ok(())
+ },
+ )
+ }
+
+ /// Mark given module URL as compiled to avoid multiple compilations of same module
+ /// in single run.
+ fn mark_compiled(&self, url: &Url) {
+ let mut c = self.compiled.lock().unwrap();
+ c.insert(url.clone());
+ }
+
+ /// Check if given module URL has already been compiled and can be fetched directly from disk.
+ fn has_compiled(&self, url: &Url) -> bool {
+ let c = self.compiled.lock().unwrap();
+ c.contains(url)
+ }
+
+ /// Asynchronously compile module and all it's dependencies.
+ ///
+ /// This method compiled every module at most once.
+ ///
+ /// If `--reload` flag was provided then compiler will not on-disk cache and force recompilation.
+ ///
+ /// If compilation is required then new V8 worker is spawned with fresh TS compiler.
+ pub fn compile_async(
+ self: &Self,
+ state: ThreadSafeState,
+ source_file: &SourceFile,
+ ) -> Box<CompiledModuleFuture> {
+ if self.has_compiled(&source_file.url) {
+ return match self.get_compiled_module(&source_file.url) {
+ Ok(compiled) => Box::new(futures::future::ok(compiled)),
+ Err(err) => Box::new(futures::future::err(err)),
+ };
+ }
+
+ if self.use_disk_cache {
+ // Try to load cached version:
+ // 1. check if there's 'meta' file
+ if let Some(metadata) = self.get_metadata(&source_file.url) {
+ // 2. compare version hashes
+ // TODO: it would probably be good idea to make it method implemented on SourceFile
+ let version_hash_to_validate = source_code_version_hash(
+ &source_file.source_code,
+ version::DENO,
+ &self.config.hash,
+ );
+
+ if metadata.version_hash == version_hash_to_validate {
+ debug!("load_cache metadata version hash match");
+ if let Ok(compiled_module) =
+ self.get_compiled_module(&source_file.url)
+ {
+ self.mark_compiled(&source_file.url);
+ return Box::new(futures::future::ok(compiled_module));
+ }
+ }
+ }
+ }
+
+ let source_file_ = source_file.clone();
+
+ debug!(">>>>> compile_sync START");
+ let module_url = source_file.url.clone();
+
+ debug!(
+ "Running rust part of compile_sync, module specifier: {}",
+ &source_file.url
+ );
+
+ let root_names = vec![module_url.to_string()];
+ let req_msg = req(root_names, self.config.clone(), None);
+
+ let worker = TsCompiler::setup_worker(state.clone());
+ let compiling_job = state.progress.add("Compile", &module_url.to_string());
+ let state_ = state.clone();
+
+ let resource = worker.state.resource.clone();
+ let compiler_rid = resource.rid;
+ let first_msg_fut =
+ resources::post_message_to_worker(compiler_rid, req_msg)
+ .then(move |_| worker)
+ .then(move |result| {
+ if let Err(err) = result {
+ // TODO(ry) Need to forward the error instead of exiting.
+ eprintln!("{}", err.to_string());
+ std::process::exit(1);
+ }
+ debug!("Sent message to worker");
+ let stream_future =
+ resources::get_message_stream_from_worker(compiler_rid)
+ .into_future();
+ stream_future.map(|(f, _rest)| f).map_err(|(f, _rest)| f)
+ });
+
+ let fut = first_msg_fut
+ .map_err(|_| panic!("not handled"))
+ .and_then(move |maybe_msg: Option<Buf>| {
+ debug!("Received message from worker");
+
+ if let Some(msg) = maybe_msg {
+ let json_str = std::str::from_utf8(&msg).unwrap();
+ debug!("Message: {}", json_str);
+ if let Some(diagnostics) = Diagnostic::from_emit_result(json_str) {
+ return Err(ErrBox::from(diagnostics));
+ }
+ }
+
+ Ok(())
+ })
+ .and_then(move |_| {
+ // if we are this far it means compilation was successful and we can
+ // load compiled filed from disk
+ state_
+ .ts_compiler
+ .get_compiled_module(&source_file_.url)
+ .map_err(|e| {
+ // TODO: this situation shouldn't happen
+ panic!("Expected to find compiled file: {} {}", e, source_file_.url)
+ })
+ })
+ .and_then(move |compiled_module| {
+ // Explicit drop to keep reference alive until future completes.
+ drop(compiling_job);
+
+ Ok(compiled_module)
+ })
+ .then(move |r| {
+ debug!(">>>>> compile_sync END");
+ // TODO(ry) do this in worker's destructor.
+ // resource.close();
+ r
+ });
+
+ Box::new(fut)
+ }
+
+ /// Get associated `CompiledFileMetadata` for given module if it exists.
+ pub fn get_metadata(self: &Self, url: &Url) -> Option<CompiledFileMetadata> {
+ // Try to load cached version:
+ // 1. check if there's 'meta' file
+ let cache_key = self
+ .disk_cache
+ .get_cache_filename_with_extension(url, "meta");
+ if let Ok(metadata_bytes) = self.disk_cache.get(&cache_key) {
+ if let Ok(metadata) = std::str::from_utf8(&metadata_bytes) {
+ if let Some(read_metadata) =
+ CompiledFileMetadata::from_json_string(metadata.to_string())
+ {
+ return Some(read_metadata);
+ }
+ }
+ }
+
+ None
+ }
+
+ pub fn get_compiled_module(
+ self: &Self,
+ module_url: &Url,
+ ) -> Result<CompiledModule, ErrBox> {
+ let compiled_source_file = self.get_compiled_source_file(module_url)?;
+
+ let compiled_module = CompiledModule {
+ code: str::from_utf8(&compiled_source_file.source_code)
+ .unwrap()
+ .to_string(),
+ name: module_url.to_string(),
+ };
+
+ Ok(compiled_module)
+ }
+
+ /// Return compiled JS file for given TS module.
+ // TODO: ideally we shouldn't construct SourceFile by hand, but it should be delegated to
+ // SourceFileFetcher
+ pub fn get_compiled_source_file(
+ self: &Self,
+ module_url: &Url,
+ ) -> Result<SourceFile, ErrBox> {
+ let cache_key = self
+ .disk_cache
+ .get_cache_filename_with_extension(&module_url, "js");
+ let compiled_code = self.disk_cache.get(&cache_key)?;
+ let compiled_code_filename = self.disk_cache.location.join(cache_key);
+ debug!("compiled filename: {:?}", compiled_code_filename);
+
+ let compiled_module = SourceFile {
+ url: module_url.clone(),
+ filename: compiled_code_filename,
+ media_type: msg::MediaType::JavaScript,
+ source_code: compiled_code,
+ };
+
+ Ok(compiled_module)
+ }
+
+ /// Save compiled JS file for given TS module to on-disk cache.
+ ///
+ /// Along compiled file a special metadata file is saved as well containing
+ /// hash that can be validated to avoid unnecessary recompilation.
+ fn cache_compiled_file(
+ self: &Self,
+ module_specifier: &ModuleSpecifier,
+ contents: &str,
+ ) -> std::io::Result<()> {
+ let js_key = self
+ .disk_cache
+ .get_cache_filename_with_extension(module_specifier.as_url(), "js");
+ self
+ .disk_cache
+ .set(&js_key, contents.as_bytes())
+ .and_then(|_| {
+ self.mark_compiled(module_specifier.as_url());
+
+ let source_file = self
+ .file_fetcher
+ .fetch_source_file(&module_specifier)
+ .expect("Source file not found");
+
+ let version_hash = source_code_version_hash(
+ &source_file.source_code,
+ version::DENO,
+ &self.config.hash,
+ );
+
+ let compiled_file_metadata = CompiledFileMetadata {
+ source_path: source_file.filename.to_owned(),
+ version_hash,
+ };
+ let meta_key = self
+ .disk_cache
+ .get_cache_filename_with_extension(module_specifier.as_url(), "meta");
+ self.disk_cache.set(
+ &meta_key,
+ compiled_file_metadata.to_json_string()?.as_bytes(),
+ )
+ })
+ }
+
+ /// Return associated source map file for given TS module.
+ // TODO: ideally we shouldn't construct SourceFile by hand, but it should be delegated to
+ // SourceFileFetcher
+ pub fn get_source_map_file(
+ self: &Self,
+ module_specifier: &ModuleSpecifier,
+ ) -> Result<SourceFile, ErrBox> {
+ let cache_key = self
+ .disk_cache
+ .get_cache_filename_with_extension(module_specifier.as_url(), "js.map");
+ let source_code = self.disk_cache.get(&cache_key)?;
+ let source_map_filename = self.disk_cache.location.join(cache_key);
+ debug!("source map filename: {:?}", source_map_filename);
+
+ let source_map_file = SourceFile {
+ url: module_specifier.as_url().to_owned(),
+ filename: source_map_filename,
+ media_type: msg::MediaType::JavaScript,
+ source_code,
+ };
+
+ Ok(source_map_file)
+ }
+
+ /// Save source map file for given TS module to on-disk cache.
+ fn cache_source_map(
+ self: &Self,
+ module_specifier: &ModuleSpecifier,
+ contents: &str,
+ ) -> std::io::Result<()> {
+ let source_map_key = self
+ .disk_cache
+ .get_cache_filename_with_extension(module_specifier.as_url(), "js.map");
+ self.disk_cache.set(&source_map_key, contents.as_bytes())
+ }
+
+ /// This method is called by TS compiler via an "op".
+ pub fn cache_compiler_output(
+ self: &Self,
+ module_specifier: &ModuleSpecifier,
+ extension: &str,
+ contents: &str,
+ ) -> std::io::Result<()> {
+ match extension {
+ ".map" => self.cache_source_map(module_specifier, contents),
+ ".js" => self.cache_compiled_file(module_specifier, contents),
+ _ => unreachable!(),
+ }
+ }
+}
+
+impl SourceMapGetter for TsCompiler {
+ fn get_source_map(&self, script_name: &str) -> Option<Vec<u8>> {
+ self
+ .try_to_resolve_and_get_source_map(script_name)
+ .and_then(|out| Some(out.source_code))
+ }
+
+ fn get_source_line(&self, script_name: &str, line: usize) -> Option<String> {
+ self
+ .try_resolve_and_get_source_file(script_name)
+ .and_then(|out| {
+ str::from_utf8(&out.source_code).ok().and_then(|v| {
+ let lines: Vec<&str> = v.lines().collect();
+ assert!(lines.len() > line);
+ Some(lines[line].to_string())
+ })
+ })
+ }
+}
+
+// `SourceMapGetter` related methods
+impl TsCompiler {
+ fn try_to_resolve(self: &Self, script_name: &str) -> Option<ModuleSpecifier> {
+ // if `script_name` can't be resolved to ModuleSpecifier it's probably internal
+ // script (like `gen/cli/bundle/compiler.js`) so we won't be
+ // able to get source for it anyway
+ ModuleSpecifier::resolve_url(script_name).ok()
+ }
+
+ fn try_resolve_and_get_source_file(
+ &self,
+ script_name: &str,
+ ) -> Option<SourceFile> {
+ if let Some(module_specifier) = self.try_to_resolve(script_name) {
+ return match self.file_fetcher.fetch_source_file(&module_specifier) {
+ Ok(out) => Some(out),
+ Err(_) => None,
+ };
+ }
+
+ None
+ }
+
+ fn try_to_resolve_and_get_source_map(
+ &self,
+ script_name: &str,
+ ) -> Option<SourceFile> {
+ if let Some(module_specifier) = self.try_to_resolve(script_name) {
+ return match self.get_source_map_file(&module_specifier) {
+ Ok(out) => Some(out),
+ Err(_) => None,
+ };
+ }
+
+ None
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::fs as deno_fs;
+ use crate::tokio_util;
+ use deno::ModuleSpecifier;
+ use futures::future::lazy;
+ use std::path::PathBuf;
+ use tempfile::TempDir;
+
+ #[test]
+ fn test_compile_async() {
+ let p = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
+ .parent()
+ .unwrap()
+ .join("tests/002_hello.ts")
+ .to_owned();
+ let specifier =
+ ModuleSpecifier::resolve_url_or_path(p.to_str().unwrap()).unwrap();
+
+ let out = SourceFile {
+ url: specifier.as_url().clone(),
+ filename: PathBuf::from(p.to_str().unwrap().to_string()),
+ media_type: msg::MediaType::TypeScript,
+ source_code: include_bytes!("../tests/002_hello.ts").to_vec(),
+ };
+
+ let mock_state = ThreadSafeState::mock(vec![
+ String::from("deno"),
+ String::from("hello.js"),
+ ]);
+
+ tokio_util::run(lazy(move || {
+ mock_state
+ .ts_compiler
+ .compile_async(mock_state.clone(), &out)
+ .then(|result| {
+ assert!(result.is_ok());
+ assert!(result
+ .unwrap()
+ .code
+ .as_bytes()
+ .starts_with("console.log(\"Hello World\");".as_bytes()));
+ Ok(())
+ })
+ }))
+ }
+
+ #[test]
+ fn test_bundle_async() {
+ let p = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
+ .parent()
+ .unwrap()
+ .join("tests/002_hello.ts")
+ .to_owned();
+ use deno::ModuleSpecifier;
+ let module_name = ModuleSpecifier::resolve_url_or_path(p.to_str().unwrap())
+ .unwrap()
+ .to_string();
+
+ let state = ThreadSafeState::mock(vec![
+ String::from("deno"),
+ p.to_string_lossy().into(),
+ String::from("$deno$/bundle.js"),
+ ]);
+
+ tokio_util::run(lazy(move || {
+ state
+ .ts_compiler
+ .bundle_async(
+ state.clone(),
+ module_name,
+ String::from("$deno$/bundle.js"),
+ )
+ .then(|result| {
+ assert!(result.is_ok());
+ Ok(())
+ })
+ }))
+ }
+
+ #[test]
+ fn test_source_code_version_hash() {
+ assert_eq!(
+ "0185b42de0686b4c93c314daaa8dee159f768a9e9a336c2a5e3d5b8ca6c4208c",
+ source_code_version_hash(b"1+2", "0.4.0", b"{}")
+ );
+ // Different source_code should result in different hash.
+ assert_eq!(
+ "e58631f1b6b6ce2b300b133ec2ad16a8a5ba6b7ecf812a8c06e59056638571ac",
+ source_code_version_hash(b"1", "0.4.0", b"{}")
+ );
+ // Different version should result in different hash.
+ assert_eq!(
+ "307e6200347a88dbbada453102deb91c12939c65494e987d2d8978f6609b5633",
+ source_code_version_hash(b"1", "0.1.0", b"{}")
+ );
+ // Different config should result in different hash.
+ assert_eq!(
+ "195eaf104a591d1d7f69fc169c60a41959c2b7a21373cd23a8f675f877ec385f",
+ source_code_version_hash(b"1", "0.4.0", b"{\"compilerOptions\": {}}")
+ );
+ }
+
+ #[test]
+ fn test_compile_js() {
+ let temp_dir = TempDir::new().expect("tempdir fail");
+ let temp_dir_path = temp_dir.path();
+
+ let test_cases = vec![
+ // valid JSON
+ (
+ r#"{ "compilerOptions": { "checkJs": true } } "#,
+ true,
+ ),
+ // JSON with comment
+ (
+ r#"{ "compilerOptions": { // force .js file compilation by Deno "checkJs": true } } "#,
+ true,
+ ),
+ // invalid JSON
+ (
+ r#"{ "compilerOptions": { "checkJs": true },{ } "#,
+ true,
+ ),
+ // without content
+ (
+ "",
+ false,
+ ),
+ ];
+
+ let path = temp_dir_path.join("tsconfig.json");
+ let path_str = path.to_str().unwrap().to_string();
+
+ for (json_str, expected) in test_cases {
+ deno_fs::write_file(&path, json_str.as_bytes(), 0o666).unwrap();
+ let config = CompilerConfig::load(Some(path_str.clone())).unwrap();
+ assert_eq!(config.compile_js, expected);
+ }
+ }
+
+ #[test]
+ fn test_compiler_config_load() {
+ let temp_dir = TempDir::new().expect("tempdir fail");
+ let temp_dir_path = temp_dir.path();
+ let path = temp_dir_path.join("doesnotexist.json");
+ let path_str = path.to_str().unwrap().to_string();
+ let res = CompilerConfig::load(Some(path_str.clone()));
+ assert!(res.is_err());
+ }
+}
diff --git a/cli/deno_dir.rs b/cli/deno_dir.rs
new file mode 100644
index 000000000..ac35922eb
--- /dev/null
+++ b/cli/deno_dir.rs
@@ -0,0 +1,43 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use crate::disk_cache::DiskCache;
+use dirs;
+use std;
+use std::path::PathBuf;
+
+/// `DenoDir` serves as coordinator for multiple `DiskCache`s containing them
+/// in single directory that can be controlled with `$DENO_DIR` env variable.
+#[derive(Clone)]
+pub struct DenoDir {
+ // Example: /Users/rld/.deno/
+ pub root: PathBuf,
+ /// Used by SourceFileFetcher to cache remote modules.
+ pub deps_cache: DiskCache,
+ /// Used by TsCompiler to cache compiler output.
+ pub gen_cache: DiskCache,
+}
+
+impl DenoDir {
+ pub fn new(custom_root: Option<PathBuf>) -> std::io::Result<Self> {
+ // Only setup once.
+ let home_dir = dirs::home_dir().expect("Could not get home directory.");
+ let fallback = home_dir.join(".deno");
+ // We use the OS cache dir because all files deno writes are cache files
+ // Once that changes we need to start using different roots if DENO_DIR
+ // is not set, and keep a single one if it is.
+ let default = dirs::cache_dir()
+ .map(|d| d.join("deno"))
+ .unwrap_or(fallback);
+
+ let root: PathBuf = custom_root.unwrap_or(default);
+ let deps_path = root.join("deps");
+ let gen_path = root.join("gen");
+
+ let deno_dir = Self {
+ root,
+ deps_cache: DiskCache::new(&deps_path),
+ gen_cache: DiskCache::new(&gen_path),
+ };
+
+ Ok(deno_dir)
+ }
+}
diff --git a/cli/deno_error.rs b/cli/deno_error.rs
new file mode 100644
index 000000000..551547e26
--- /dev/null
+++ b/cli/deno_error.rs
@@ -0,0 +1,513 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use crate::diagnostics::Diagnostic;
+use crate::fmt_errors::JSError;
+use crate::import_map::ImportMapError;
+pub use crate::msg::ErrorKind;
+use deno::AnyError;
+use deno::ErrBox;
+use deno::ModuleResolutionError;
+use http::uri;
+use hyper;
+use reqwest;
+use rustyline::error::ReadlineError;
+use std;
+use std::env::VarError;
+use std::error::Error;
+use std::fmt;
+use std::io;
+use url;
+
+#[derive(Debug)]
+pub struct DenoError {
+ kind: ErrorKind,
+ msg: String,
+}
+
+pub fn print_err_and_exit(err: ErrBox) {
+ eprintln!("{}", err.to_string());
+ std::process::exit(1);
+}
+
+pub fn js_check(r: Result<(), ErrBox>) {
+ if let Err(err) = r {
+ print_err_and_exit(err);
+ }
+}
+
+impl DenoError {
+ pub fn new(kind: ErrorKind, msg: String) -> Self {
+ Self { kind, msg }
+ }
+}
+
+impl Error for DenoError {}
+
+impl fmt::Display for DenoError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.pad(self.msg.as_str())
+ }
+}
+
+#[derive(Debug)]
+struct StaticError(ErrorKind, &'static str);
+
+impl Error for StaticError {}
+
+impl fmt::Display for StaticError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.pad(self.1)
+ }
+}
+
+pub fn bad_resource() -> ErrBox {
+ StaticError(ErrorKind::BadResource, "bad resource id").into()
+}
+
+pub fn permission_denied() -> ErrBox {
+ StaticError(ErrorKind::PermissionDenied, "permission denied").into()
+}
+
+pub fn op_not_implemented() -> ErrBox {
+ StaticError(ErrorKind::OpNotAvailable, "op not implemented").into()
+}
+
+pub fn no_buffer_specified() -> ErrBox {
+ StaticError(ErrorKind::InvalidInput, "no buffer specified").into()
+}
+
+pub fn no_async_support() -> ErrBox {
+ StaticError(ErrorKind::NoAsyncSupport, "op doesn't support async calls")
+ .into()
+}
+
+pub fn no_sync_support() -> ErrBox {
+ StaticError(ErrorKind::NoSyncSupport, "op doesn't support sync calls").into()
+}
+
+pub fn invalid_address_syntax() -> ErrBox {
+ StaticError(ErrorKind::InvalidInput, "invalid address syntax").into()
+}
+
+pub fn too_many_redirects() -> ErrBox {
+ StaticError(ErrorKind::TooManyRedirects, "too many redirects").into()
+}
+
+pub trait GetErrorKind {
+ fn kind(&self) -> ErrorKind;
+}
+
+impl GetErrorKind for DenoError {
+ fn kind(&self) -> ErrorKind {
+ self.kind
+ }
+}
+
+impl GetErrorKind for StaticError {
+ fn kind(&self) -> ErrorKind {
+ self.0
+ }
+}
+
+impl GetErrorKind for JSError {
+ fn kind(&self) -> ErrorKind {
+ ErrorKind::JSError
+ }
+}
+
+impl GetErrorKind for Diagnostic {
+ fn kind(&self) -> ErrorKind {
+ ErrorKind::Diagnostic
+ }
+}
+
+impl GetErrorKind for ImportMapError {
+ fn kind(&self) -> ErrorKind {
+ ErrorKind::ImportMapError
+ }
+}
+
+impl GetErrorKind for ModuleResolutionError {
+ fn kind(&self) -> ErrorKind {
+ use ModuleResolutionError::*;
+ match self {
+ InvalidUrl(ref err) | InvalidBaseUrl(ref err) => err.kind(),
+ InvalidPath(_) => ErrorKind::InvalidPath,
+ ImportPrefixMissing(_) => ErrorKind::ImportPrefixMissing,
+ }
+ }
+}
+
+impl GetErrorKind for VarError {
+ fn kind(&self) -> ErrorKind {
+ use VarError::*;
+ match self {
+ NotPresent => ErrorKind::NotFound,
+ NotUnicode(..) => ErrorKind::InvalidData,
+ }
+ }
+}
+
+impl GetErrorKind for io::Error {
+ fn kind(&self) -> ErrorKind {
+ use io::ErrorKind::*;
+ match self.kind() {
+ NotFound => ErrorKind::NotFound,
+ PermissionDenied => ErrorKind::PermissionDenied,
+ ConnectionRefused => ErrorKind::ConnectionRefused,
+ ConnectionReset => ErrorKind::ConnectionReset,
+ ConnectionAborted => ErrorKind::ConnectionAborted,
+ NotConnected => ErrorKind::NotConnected,
+ AddrInUse => ErrorKind::AddrInUse,
+ AddrNotAvailable => ErrorKind::AddrNotAvailable,
+ BrokenPipe => ErrorKind::BrokenPipe,
+ AlreadyExists => ErrorKind::AlreadyExists,
+ WouldBlock => ErrorKind::WouldBlock,
+ InvalidInput => ErrorKind::InvalidInput,
+ InvalidData => ErrorKind::InvalidData,
+ TimedOut => ErrorKind::TimedOut,
+ Interrupted => ErrorKind::Interrupted,
+ WriteZero => ErrorKind::WriteZero,
+ UnexpectedEof => ErrorKind::UnexpectedEof,
+ _ => ErrorKind::Other,
+ }
+ }
+}
+
+impl GetErrorKind for uri::InvalidUri {
+ fn kind(&self) -> ErrorKind {
+ // The http::uri::ErrorKind exists and is similar to url::ParseError.
+ // However it is also private, so we can't get any details out.
+ ErrorKind::InvalidUri
+ }
+}
+
+impl GetErrorKind for url::ParseError {
+ fn kind(&self) -> ErrorKind {
+ use url::ParseError::*;
+ match self {
+ EmptyHost => ErrorKind::EmptyHost,
+ IdnaError => ErrorKind::IdnaError,
+ InvalidDomainCharacter => ErrorKind::InvalidDomainCharacter,
+ InvalidIpv4Address => ErrorKind::InvalidIpv4Address,
+ InvalidIpv6Address => ErrorKind::InvalidIpv6Address,
+ InvalidPort => ErrorKind::InvalidPort,
+ Overflow => ErrorKind::Overflow,
+ RelativeUrlWithCannotBeABaseBase => {
+ ErrorKind::RelativeUrlWithCannotBeABaseBase
+ }
+ RelativeUrlWithoutBase => ErrorKind::RelativeUrlWithoutBase,
+ SetHostOnCannotBeABaseUrl => ErrorKind::SetHostOnCannotBeABaseUrl,
+ }
+ }
+}
+
+impl GetErrorKind for hyper::Error {
+ fn kind(&self) -> ErrorKind {
+ match self {
+ e if e.is_canceled() => ErrorKind::HttpCanceled,
+ e if e.is_closed() => ErrorKind::HttpClosed,
+ e if e.is_parse() => ErrorKind::HttpParse,
+ e if e.is_user() => ErrorKind::HttpUser,
+ _ => ErrorKind::HttpOther,
+ }
+ }
+}
+
+impl GetErrorKind for reqwest::Error {
+ fn kind(&self) -> ErrorKind {
+ use self::GetErrorKind as Get;
+
+ match self.get_ref() {
+ Some(err_ref) => None
+ .or_else(|| err_ref.downcast_ref::<hyper::Error>().map(Get::kind))
+ .or_else(|| err_ref.downcast_ref::<url::ParseError>().map(Get::kind))
+ .or_else(|| err_ref.downcast_ref::<io::Error>().map(Get::kind))
+ .or_else(|| {
+ err_ref
+ .downcast_ref::<serde_json::error::Error>()
+ .map(Get::kind)
+ })
+ .unwrap_or_else(|| ErrorKind::HttpOther),
+ _ => ErrorKind::HttpOther,
+ }
+ }
+}
+
+impl GetErrorKind for ReadlineError {
+ fn kind(&self) -> ErrorKind {
+ use ReadlineError::*;
+ match self {
+ Io(err) => GetErrorKind::kind(err),
+ Eof => ErrorKind::UnexpectedEof,
+ Interrupted => ErrorKind::Interrupted,
+ #[cfg(unix)]
+ Errno(err) => err.kind(),
+ _ => unimplemented!(),
+ }
+ }
+}
+
+impl GetErrorKind for serde_json::error::Error {
+ fn kind(&self) -> ErrorKind {
+ use serde_json::error::*;
+ match self.classify() {
+ Category::Io => ErrorKind::InvalidInput,
+ Category::Syntax => ErrorKind::InvalidInput,
+ Category::Data => ErrorKind::InvalidData,
+ Category::Eof => ErrorKind::UnexpectedEof,
+ }
+ }
+}
+
+#[cfg(unix)]
+mod unix {
+ use super::{ErrorKind, GetErrorKind};
+ use nix::errno::Errno::*;
+ pub use nix::Error;
+ use nix::Error::Sys;
+
+ impl GetErrorKind for Error {
+ fn kind(&self) -> ErrorKind {
+ match self {
+ Sys(EPERM) => ErrorKind::PermissionDenied,
+ Sys(EINVAL) => ErrorKind::InvalidInput,
+ Sys(ENOENT) => ErrorKind::NotFound,
+ Sys(_) => ErrorKind::UnixError,
+ _ => ErrorKind::Other,
+ }
+ }
+ }
+}
+
+impl GetErrorKind for dyn AnyError {
+ fn kind(&self) -> ErrorKind {
+ use self::GetErrorKind as Get;
+
+ #[cfg(unix)]
+ fn unix_error_kind(err: &dyn AnyError) -> Option<ErrorKind> {
+ err.downcast_ref::<unix::Error>().map(Get::kind)
+ }
+
+ #[cfg(not(unix))]
+ fn unix_error_kind(_: &dyn AnyError) -> Option<ErrorKind> {
+ None
+ }
+
+ None
+ .or_else(|| self.downcast_ref::<DenoError>().map(Get::kind))
+ .or_else(|| self.downcast_ref::<Diagnostic>().map(Get::kind))
+ .or_else(|| self.downcast_ref::<hyper::Error>().map(Get::kind))
+ .or_else(|| self.downcast_ref::<reqwest::Error>().map(Get::kind))
+ .or_else(|| self.downcast_ref::<ImportMapError>().map(Get::kind))
+ .or_else(|| self.downcast_ref::<io::Error>().map(Get::kind))
+ .or_else(|| self.downcast_ref::<JSError>().map(Get::kind))
+ .or_else(|| self.downcast_ref::<ModuleResolutionError>().map(Get::kind))
+ .or_else(|| self.downcast_ref::<StaticError>().map(Get::kind))
+ .or_else(|| self.downcast_ref::<uri::InvalidUri>().map(Get::kind))
+ .or_else(|| self.downcast_ref::<url::ParseError>().map(Get::kind))
+ .or_else(|| self.downcast_ref::<VarError>().map(Get::kind))
+ .or_else(|| self.downcast_ref::<ReadlineError>().map(Get::kind))
+ .or_else(|| {
+ self
+ .downcast_ref::<serde_json::error::Error>()
+ .map(Get::kind)
+ })
+ .or_else(|| unix_error_kind(self))
+ .unwrap_or_else(|| {
+ panic!("Can't get ErrorKind for {:?}", self);
+ })
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::colors::strip_ansi_codes;
+ use crate::diagnostics::Diagnostic;
+ use crate::diagnostics::DiagnosticCategory;
+ use crate::diagnostics::DiagnosticItem;
+ use deno::ErrBox;
+ use deno::StackFrame;
+ use deno::V8Exception;
+
+ fn js_error() -> JSError {
+ JSError::new(V8Exception {
+ message: "Error: foo bar".to_string(),
+ source_line: None,
+ script_resource_name: None,
+ line_number: None,
+ start_position: None,
+ end_position: None,
+ error_level: None,
+ start_column: None,
+ end_column: None,
+ frames: vec![
+ StackFrame {
+ line: 4,
+ column: 16,
+ script_name: "foo_bar.ts".to_string(),
+ function_name: "foo".to_string(),
+ is_eval: false,
+ is_constructor: false,
+ is_wasm: false,
+ },
+ StackFrame {
+ line: 5,
+ column: 20,
+ script_name: "bar_baz.ts".to_string(),
+ function_name: "qat".to_string(),
+ is_eval: false,
+ is_constructor: false,
+ is_wasm: false,
+ },
+ StackFrame {
+ line: 1,
+ column: 1,
+ script_name: "deno_main.js".to_string(),
+ function_name: "".to_string(),
+ is_eval: false,
+ is_constructor: false,
+ is_wasm: false,
+ },
+ ],
+ })
+ }
+
+ fn diagnostic() -> Diagnostic {
+ Diagnostic {
+ items: vec![
+ DiagnosticItem {
+ message: "Example 1".to_string(),
+ message_chain: None,
+ code: 2322,
+ category: DiagnosticCategory::Error,
+ start_position: Some(267),
+ end_position: Some(273),
+ source_line: Some(" values: o => [".to_string()),
+ line_number: Some(18),
+ script_resource_name: Some(
+ "deno/tests/complex_diagnostics.ts".to_string(),
+ ),
+ start_column: Some(2),
+ end_column: Some(8),
+ related_information: None,
+ },
+ DiagnosticItem {
+ message: "Example 2".to_string(),
+ message_chain: None,
+ code: 2000,
+ category: DiagnosticCategory::Error,
+ start_position: Some(2),
+ end_position: Some(2),
+ source_line: Some(" values: undefined,".to_string()),
+ line_number: Some(128),
+ script_resource_name: Some("/foo/bar.ts".to_string()),
+ start_column: Some(2),
+ end_column: Some(8),
+ related_information: None,
+ },
+ ],
+ }
+ }
+
+ fn io_error() -> io::Error {
+ io::Error::from(io::ErrorKind::NotFound)
+ }
+
+ fn url_error() -> url::ParseError {
+ url::ParseError::EmptyHost
+ }
+
+ fn import_map_error() -> ImportMapError {
+ ImportMapError {
+ msg: "an import map error".to_string(),
+ }
+ }
+
+ #[test]
+ fn test_simple_error() {
+ let err =
+ ErrBox::from(DenoError::new(ErrorKind::NoError, "foo".to_string()));
+ assert_eq!(err.kind(), ErrorKind::NoError);
+ assert_eq!(err.to_string(), "foo");
+ }
+
+ #[test]
+ fn test_io_error() {
+ let err = ErrBox::from(io_error());
+ assert_eq!(err.kind(), ErrorKind::NotFound);
+ assert_eq!(err.to_string(), "entity not found");
+ }
+
+ #[test]
+ fn test_url_error() {
+ let err = ErrBox::from(url_error());
+ assert_eq!(err.kind(), ErrorKind::EmptyHost);
+ assert_eq!(err.to_string(), "empty host");
+ }
+
+ // TODO find a way to easily test tokio errors and unix errors
+
+ #[test]
+ fn test_diagnostic() {
+ let err = ErrBox::from(diagnostic());
+ assert_eq!(err.kind(), ErrorKind::Diagnostic);
+ assert_eq!(strip_ansi_codes(&err.to_string()), "error TS2322: Example 1\n\n► deno/tests/complex_diagnostics.ts:19:3\n\n19 values: o => [\n ~~~~~~\n\nerror TS2000: Example 2\n\n► /foo/bar.ts:129:3\n\n129 values: undefined,\n ~~~~~~\n\n\nFound 2 errors.\n");
+ }
+
+ #[test]
+ fn test_js_error() {
+ let err = ErrBox::from(js_error());
+ assert_eq!(err.kind(), ErrorKind::JSError);
+ assert_eq!(strip_ansi_codes(&err.to_string()), "error: Error: foo bar\n at foo (foo_bar.ts:5:17)\n at qat (bar_baz.ts:6:21)\n at deno_main.js:2:2");
+ }
+
+ #[test]
+ fn test_import_map_error() {
+ let err = ErrBox::from(import_map_error());
+ assert_eq!(err.kind(), ErrorKind::ImportMapError);
+ assert_eq!(err.to_string(), "an import map error");
+ }
+
+ #[test]
+ fn test_bad_resource() {
+ let err = bad_resource();
+ assert_eq!(err.kind(), ErrorKind::BadResource);
+ assert_eq!(err.to_string(), "bad resource id");
+ }
+
+ #[test]
+ fn test_permission_denied() {
+ let err = permission_denied();
+ assert_eq!(err.kind(), ErrorKind::PermissionDenied);
+ assert_eq!(err.to_string(), "permission denied");
+ }
+
+ #[test]
+ fn test_op_not_implemented() {
+ let err = op_not_implemented();
+ assert_eq!(err.kind(), ErrorKind::OpNotAvailable);
+ assert_eq!(err.to_string(), "op not implemented");
+ }
+
+ #[test]
+ fn test_no_buffer_specified() {
+ let err = no_buffer_specified();
+ assert_eq!(err.kind(), ErrorKind::InvalidInput);
+ assert_eq!(err.to_string(), "no buffer specified");
+ }
+
+ #[test]
+ fn test_no_async_support() {
+ let err = no_async_support();
+ assert_eq!(err.kind(), ErrorKind::NoAsyncSupport);
+ assert_eq!(err.to_string(), "op doesn't support async calls");
+ }
+
+ #[test]
+ fn test_no_sync_support() {
+ let err = no_sync_support();
+ assert_eq!(err.kind(), ErrorKind::NoSyncSupport);
+ assert_eq!(err.to_string(), "op doesn't support sync calls");
+ }
+}
diff --git a/cli/diagnostics.rs b/cli/diagnostics.rs
new file mode 100644
index 000000000..57fe56b6f
--- /dev/null
+++ b/cli/diagnostics.rs
@@ -0,0 +1,584 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+//! This module encodes TypeScript errors (diagnostics) into Rust structs and
+//! contains code for printing them to the console.
+use crate::colors;
+use crate::fmt_errors::format_maybe_source_line;
+use crate::fmt_errors::format_maybe_source_name;
+use crate::fmt_errors::DisplayFormatter;
+use serde_json;
+use serde_json::value::Value;
+use std::error::Error;
+use std::fmt;
+
+#[derive(Debug, PartialEq, Clone)]
+pub struct Diagnostic {
+ pub items: Vec<DiagnosticItem>,
+}
+
+impl Diagnostic {
+ /// Take a JSON value and attempt to map it to a
+ pub fn from_json_value(v: &serde_json::Value) -> Option<Self> {
+ if !v.is_object() {
+ return None;
+ }
+ let obj = v.as_object().unwrap();
+
+ let mut items = Vec::<DiagnosticItem>::new();
+ let items_v = &obj["items"];
+ if items_v.is_array() {
+ let items_values = items_v.as_array().unwrap();
+
+ for item_v in items_values {
+ items.push(DiagnosticItem::from_json_value(item_v));
+ }
+ }
+
+ Some(Self { items })
+ }
+
+ pub fn from_emit_result(json_str: &str) -> Option<Self> {
+ let v = serde_json::from_str::<serde_json::Value>(json_str)
+ .expect("Error decoding JSON string.");
+ let diagnostics_o = v.get("diagnostics");
+ if let Some(diagnostics_v) = diagnostics_o {
+ return Self::from_json_value(diagnostics_v);
+ }
+
+ None
+ }
+}
+
+impl fmt::Display for Diagnostic {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut i = 0;
+ for item in &self.items {
+ if i > 0 {
+ writeln!(f)?;
+ }
+ write!(f, "{}", item.to_string())?;
+ i += 1;
+ }
+
+ if i > 1 {
+ write!(f, "\n\nFound {} errors.\n", i)?;
+ }
+
+ Ok(())
+ }
+}
+
+impl Error for Diagnostic {
+ fn description(&self) -> &str {
+ &self.items[0].message
+ }
+}
+
+#[derive(Debug, PartialEq, Clone)]
+pub struct DiagnosticItem {
+ /// The top level message relating to the diagnostic item.
+ pub message: String,
+
+ /// A chain of messages, code, and categories of messages which indicate the
+ /// full diagnostic information.
+ pub message_chain: Option<DiagnosticMessageChain>,
+
+ /// Other diagnostic items that are related to the diagnostic, usually these
+ /// are suggestions of why an error occurred.
+ pub related_information: Option<Vec<DiagnosticItem>>,
+
+ /// The source line the diagnostic is in reference to.
+ pub source_line: Option<String>,
+
+ /// Zero-based index to the line number of the error.
+ pub line_number: Option<i64>,
+
+ /// The resource name provided to the TypeScript compiler.
+ pub script_resource_name: Option<String>,
+
+ /// Zero-based index to the start position in the entire script resource.
+ pub start_position: Option<i64>,
+
+ /// Zero-based index to the end position in the entire script resource.
+ pub end_position: Option<i64>,
+ pub category: DiagnosticCategory,
+
+ /// This is defined in TypeScript and can be referenced via
+ /// [diagnosticMessages.json](https://github.com/microsoft/TypeScript/blob/master/src/compiler/diagnosticMessages.json).
+ pub code: i64,
+
+ /// Zero-based index to the start column on `line_number`.
+ pub start_column: Option<i64>,
+
+ /// Zero-based index to the end column on `line_number`.
+ pub end_column: Option<i64>,
+}
+
+impl DiagnosticItem {
+ pub fn from_json_value(v: &serde_json::Value) -> Self {
+ let obj = v.as_object().unwrap();
+
+ // required attributes
+ let message = obj
+ .get("message")
+ .and_then(|v| v.as_str().map(String::from))
+ .unwrap();
+ let category = DiagnosticCategory::from(
+ obj.get("category").and_then(Value::as_i64).unwrap(),
+ );
+ let code = obj.get("code").and_then(Value::as_i64).unwrap();
+
+ // optional attributes
+ let source_line = obj
+ .get("sourceLine")
+ .and_then(|v| v.as_str().map(String::from));
+ let script_resource_name = obj
+ .get("scriptResourceName")
+ .and_then(|v| v.as_str().map(String::from));
+ let line_number = obj.get("lineNumber").and_then(Value::as_i64);
+ let start_position = obj.get("startPosition").and_then(Value::as_i64);
+ let end_position = obj.get("endPosition").and_then(Value::as_i64);
+ let start_column = obj.get("startColumn").and_then(Value::as_i64);
+ let end_column = obj.get("endColumn").and_then(Value::as_i64);
+
+ let message_chain_v = obj.get("messageChain");
+ let message_chain = match message_chain_v {
+ Some(v) => DiagnosticMessageChain::from_json_value(v),
+ _ => None,
+ };
+
+ let related_information_v = obj.get("relatedInformation");
+ let related_information = match related_information_v {
+ Some(r) => {
+ let mut related_information = Vec::<DiagnosticItem>::new();
+ let related_info_values = r.as_array().unwrap();
+
+ for related_info_v in related_info_values {
+ related_information
+ .push(DiagnosticItem::from_json_value(related_info_v));
+ }
+
+ Some(related_information)
+ }
+ _ => None,
+ };
+
+ Self {
+ message,
+ message_chain,
+ related_information,
+ code,
+ source_line,
+ script_resource_name,
+ line_number,
+ start_position,
+ end_position,
+ category,
+ start_column,
+ end_column,
+ }
+ }
+}
+
+impl DisplayFormatter for DiagnosticItem {
+ fn format_category_and_code(&self) -> String {
+ let category = match self.category {
+ DiagnosticCategory::Error => {
+ format!("{}", colors::red_bold("error".to_string()))
+ }
+ DiagnosticCategory::Warning => "warn".to_string(),
+ DiagnosticCategory::Debug => "debug".to_string(),
+ DiagnosticCategory::Info => "info".to_string(),
+ _ => "".to_string(),
+ };
+
+ let code =
+ colors::bold(format!(" TS{}", self.code.to_string())).to_string();
+
+ format!("{}{}: ", category, code)
+ }
+
+ fn format_message(&self, level: usize) -> String {
+ debug!("format_message");
+ if self.message_chain.is_none() {
+ return format!("{:indent$}{}", "", self.message, indent = level);
+ }
+
+ let mut s = self.message_chain.clone().unwrap().format_message(level);
+ s.pop();
+
+ s
+ }
+
+ fn format_related_info(&self) -> String {
+ if self.related_information.is_none() {
+ return "".to_string();
+ }
+
+ let mut s = String::new();
+ let related_information = self.related_information.clone().unwrap();
+ for related_diagnostic in related_information {
+ let rd = &related_diagnostic;
+ s.push_str(&format!(
+ "\n{}\n\n ► {}{}\n",
+ rd.format_message(2),
+ rd.format_source_name(),
+ rd.format_source_line(4),
+ ));
+ }
+
+ s
+ }
+
+ fn format_source_line(&self, level: usize) -> String {
+ format_maybe_source_line(
+ self.source_line.clone(),
+ self.line_number,
+ self.start_column,
+ self.end_column,
+ match self.category {
+ DiagnosticCategory::Error => true,
+ _ => false,
+ },
+ level,
+ )
+ }
+
+ fn format_source_name(&self) -> String {
+ format_maybe_source_name(
+ self.script_resource_name.clone(),
+ self.line_number,
+ self.start_column,
+ )
+ }
+}
+
+impl fmt::Display for DiagnosticItem {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(
+ f,
+ "{}{}\n\n► {}{}{}",
+ self.format_category_and_code(),
+ self.format_message(0),
+ self.format_source_name(),
+ self.format_source_line(0),
+ self.format_related_info(),
+ )
+ }
+}
+
+#[derive(Debug, PartialEq, Clone)]
+pub struct DiagnosticMessageChain {
+ pub message: String,
+ pub code: i64,
+ pub category: DiagnosticCategory,
+ pub next: Option<Vec<DiagnosticMessageChain>>,
+}
+
+impl DiagnosticMessageChain {
+ fn from_value(v: &serde_json::Value) -> Self {
+ let obj = v.as_object().unwrap();
+ let message = obj
+ .get("message")
+ .and_then(|v| v.as_str().map(String::from))
+ .unwrap();
+ let code = obj.get("code").and_then(Value::as_i64).unwrap();
+ let category = DiagnosticCategory::from(
+ obj.get("category").and_then(Value::as_i64).unwrap(),
+ );
+
+ let next_v = obj.get("next");
+ let next = match next_v {
+ Some(n) => DiagnosticMessageChain::from_next_array(n),
+ _ => None,
+ };
+
+ Self {
+ message,
+ code,
+ category,
+ next,
+ }
+ }
+
+ fn from_next_array(v: &serde_json::Value) -> Option<Vec<Self>> {
+ if !v.is_array() {
+ return None;
+ }
+
+ let vec = v
+ .as_array()
+ .unwrap()
+ .iter()
+ .map(|item| Self::from_value(&item))
+ .collect::<Vec<Self>>();
+
+ Some(vec)
+ }
+
+ pub fn from_json_value(v: &serde_json::Value) -> Option<Self> {
+ if !v.is_object() {
+ return None;
+ }
+
+ Some(Self::from_value(v))
+ }
+
+ pub fn format_message(&self, level: usize) -> String {
+ let mut s = String::new();
+
+ s.push_str(&std::iter::repeat(" ").take(level * 2).collect::<String>());
+ s.push_str(&self.message);
+ s.push('\n');
+ if self.next.is_some() {
+ let arr = self.next.clone().unwrap();
+ for dm in arr {
+ s.push_str(&dm.format_message(level + 1));
+ }
+ }
+
+ s
+ }
+}
+
+#[derive(Debug, PartialEq, Clone)]
+pub enum DiagnosticCategory {
+ Log, // 0
+ Debug, // 1
+ Info, // 2
+ Error, // 3
+ Warning, // 4
+ Suggestion, // 5
+}
+
+impl From<i64> for DiagnosticCategory {
+ fn from(value: i64) -> Self {
+ match value {
+ 0 => DiagnosticCategory::Log,
+ 1 => DiagnosticCategory::Debug,
+ 2 => DiagnosticCategory::Info,
+ 3 => DiagnosticCategory::Error,
+ 4 => DiagnosticCategory::Warning,
+ 5 => DiagnosticCategory::Suggestion,
+ _ => panic!("Unknown value: {}", value),
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::colors::strip_ansi_codes;
+
+ fn diagnostic1() -> Diagnostic {
+ Diagnostic {
+ items: vec![
+ DiagnosticItem {
+ message: "Type '(o: T) => { v: any; f: (x: B) => string; }[]' is not assignable to type '(r: B) => Value<B>[]'.".to_string(),
+ message_chain: Some(DiagnosticMessageChain {
+ message: "Type '(o: T) => { v: any; f: (x: B) => string; }[]' is not assignable to type '(r: B) => Value<B>[]'.".to_string(),
+ code: 2322,
+ category: DiagnosticCategory::Error,
+ next: Some(vec![DiagnosticMessageChain {
+ message: "Types of parameters 'o' and 'r' are incompatible.".to_string(),
+ code: 2328,
+ category: DiagnosticCategory::Error,
+ next: Some(vec![DiagnosticMessageChain {
+ message: "Type 'B' is not assignable to type 'T'.".to_string(),
+ code: 2322,
+ category: DiagnosticCategory::Error,
+ next: None,
+ }]),
+ }]),
+ }),
+ code: 2322,
+ category: DiagnosticCategory::Error,
+ start_position: Some(267),
+ end_position: Some(273),
+ source_line: Some(" values: o => [".to_string()),
+ line_number: Some(18),
+ script_resource_name: Some("deno/tests/complex_diagnostics.ts".to_string()),
+ start_column: Some(2),
+ end_column: Some(8),
+ related_information: Some(vec![
+ DiagnosticItem {
+ message: "The expected type comes from property 'values' which is declared here on type 'SettingsInterface<B>'".to_string(),
+ message_chain: None,
+ related_information: None,
+ code: 6500,
+ source_line: Some(" values?: (r: T) => Array<Value<T>>;".to_string()),
+ script_resource_name: Some("deno/tests/complex_diagnostics.ts".to_string()),
+ line_number: Some(6),
+ start_position: Some(94),
+ end_position: Some(100),
+ category: DiagnosticCategory::Info,
+ start_column: Some(2),
+ end_column: Some(8),
+ }
+ ])
+ }
+ ]
+ }
+ }
+
+ fn diagnostic2() -> Diagnostic {
+ Diagnostic {
+ items: vec![
+ DiagnosticItem {
+ message: "Example 1".to_string(),
+ message_chain: None,
+ code: 2322,
+ category: DiagnosticCategory::Error,
+ start_position: Some(267),
+ end_position: Some(273),
+ source_line: Some(" values: o => [".to_string()),
+ line_number: Some(18),
+ script_resource_name: Some(
+ "deno/tests/complex_diagnostics.ts".to_string(),
+ ),
+ start_column: Some(2),
+ end_column: Some(8),
+ related_information: None,
+ },
+ DiagnosticItem {
+ message: "Example 2".to_string(),
+ message_chain: None,
+ code: 2000,
+ category: DiagnosticCategory::Error,
+ start_position: Some(2),
+ end_position: Some(2),
+ source_line: Some(" values: undefined,".to_string()),
+ line_number: Some(128),
+ script_resource_name: Some("/foo/bar.ts".to_string()),
+ start_column: Some(2),
+ end_column: Some(8),
+ related_information: None,
+ },
+ ],
+ }
+ }
+
+ #[test]
+ fn from_json() {
+ let v = serde_json::from_str::<serde_json::Value>(
+ &r#"{
+ "items": [
+ {
+ "message": "Type '{ a(): { b: number; }; }' is not assignable to type '{ a(): { b: string; }; }'.",
+ "messageChain": {
+ "message": "Type '{ a(): { b: number; }; }' is not assignable to type '{ a(): { b: string; }; }'.",
+ "code": 2322,
+ "category": 3,
+ "next": [
+ {
+ "message": "Types of property 'a' are incompatible.",
+ "code": 2326,
+ "category": 3
+ }
+ ]
+ },
+ "code": 2322,
+ "category": 3,
+ "startPosition": 352,
+ "endPosition": 353,
+ "sourceLine": "x = y;",
+ "lineNumber": 29,
+ "scriptResourceName": "/deno/tests/error_003_typescript.ts",
+ "startColumn": 0,
+ "endColumn": 1
+ }
+ ]
+ }"#,
+ ).unwrap();
+ let r = Diagnostic::from_json_value(&v);
+ let expected = Some(
+ Diagnostic {
+ items: vec![
+ DiagnosticItem {
+ message: "Type \'{ a(): { b: number; }; }\' is not assignable to type \'{ a(): { b: string; }; }\'.".to_string(),
+ message_chain: Some(
+ DiagnosticMessageChain {
+ message: "Type \'{ a(): { b: number; }; }\' is not assignable to type \'{ a(): { b: string; }; }\'.".to_string(),
+ code: 2322,
+ category: DiagnosticCategory::Error,
+ next: Some(vec![
+ DiagnosticMessageChain {
+ message: "Types of property \'a\' are incompatible.".to_string(),
+ code: 2326,
+ category: DiagnosticCategory::Error,
+ next: None,
+ }
+ ])
+ }
+ ),
+ related_information: None,
+ source_line: Some("x = y;".to_string()),
+ line_number: Some(29),
+ script_resource_name: Some("/deno/tests/error_003_typescript.ts".to_string()),
+ start_position: Some(352),
+ end_position: Some(353),
+ category: DiagnosticCategory::Error,
+ code: 2322,
+ start_column: Some(0),
+ end_column: Some(1)
+ }
+ ]
+ }
+ );
+ assert_eq!(expected, r);
+ }
+
+ #[test]
+ fn from_emit_result() {
+ let r = Diagnostic::from_emit_result(
+ &r#"{
+ "emitSkipped": false,
+ "diagnostics": {
+ "items": [
+ {
+ "message": "foo bar",
+ "code": 9999,
+ "category": 3
+ }
+ ]
+ }
+ }"#,
+ );
+ let expected = Some(Diagnostic {
+ items: vec![DiagnosticItem {
+ message: "foo bar".to_string(),
+ message_chain: None,
+ related_information: None,
+ source_line: None,
+ line_number: None,
+ script_resource_name: None,
+ start_position: None,
+ end_position: None,
+ category: DiagnosticCategory::Error,
+ code: 9999,
+ start_column: None,
+ end_column: None,
+ }],
+ });
+ assert_eq!(expected, r);
+ }
+
+ #[test]
+ fn from_emit_result_none() {
+ let r = &r#"{"emitSkipped":false}"#;
+ assert!(Diagnostic::from_emit_result(r).is_none());
+ }
+
+ #[test]
+ fn diagnostic_to_string1() {
+ let d = diagnostic1();
+ let expected = "error TS2322: Type \'(o: T) => { v: any; f: (x: B) => string; }[]\' is not assignable to type \'(r: B) => Value<B>[]\'.\n Types of parameters \'o\' and \'r\' are incompatible.\n Type \'B\' is not assignable to type \'T\'.\n\n► deno/tests/complex_diagnostics.ts:19:3\n\n19 values: o => [\n ~~~~~~\n\n The expected type comes from property \'values\' which is declared here on type \'SettingsInterface<B>\'\n\n ► deno/tests/complex_diagnostics.ts:7:3\n\n 7 values?: (r: T) => Array<Value<T>>;\n ~~~~~~\n\n";
+ assert_eq!(expected, strip_ansi_codes(&d.to_string()));
+ }
+
+ #[test]
+ fn diagnostic_to_string2() {
+ let d = diagnostic2();
+ let expected = "error TS2322: Example 1\n\n► deno/tests/complex_diagnostics.ts:19:3\n\n19 values: o => [\n ~~~~~~\n\nerror TS2000: Example 2\n\n► /foo/bar.ts:129:3\n\n129 values: undefined,\n ~~~~~~\n\n\nFound 2 errors.\n";
+ assert_eq!(expected, strip_ansi_codes(&d.to_string()));
+ }
+}
diff --git a/cli/disk_cache.rs b/cli/disk_cache.rs
new file mode 100644
index 000000000..975a31f45
--- /dev/null
+++ b/cli/disk_cache.rs
@@ -0,0 +1,208 @@
+use crate::fs as deno_fs;
+use std::ffi::OsStr;
+use std::fs;
+use std::path::Component;
+use std::path::Path;
+use std::path::PathBuf;
+use std::path::Prefix;
+use std::str;
+use url::Url;
+
+#[derive(Clone)]
+pub struct DiskCache {
+ pub location: PathBuf,
+}
+
+impl DiskCache {
+ pub fn new(location: &Path) -> Self {
+ // TODO: ensure that 'location' is a directory
+ Self {
+ location: location.to_owned(),
+ }
+ }
+
+ pub fn get_cache_filename(self: &Self, url: &Url) -> PathBuf {
+ let mut out = PathBuf::new();
+
+ let scheme = url.scheme();
+ out.push(scheme);
+
+ match scheme {
+ "http" | "https" => {
+ let host = url.host_str().unwrap();
+ let host_port = match url.port() {
+ // Windows doesn't support ":" in filenames, so we represent port using a
+ // special string.
+ Some(port) => format!("{}_PORT{}", host, port),
+ None => host.to_string(),
+ };
+ out.push(host_port);
+
+ for path_seg in url.path_segments().unwrap() {
+ out.push(path_seg);
+ }
+ }
+ "file" => {
+ let path = url.to_file_path().unwrap();
+ let mut path_components = path.components();
+
+ if cfg!(target_os = "windows") {
+ if let Some(Component::Prefix(prefix_component)) =
+ path_components.next()
+ {
+ // Windows doesn't support ":" in filenames, so we need to extract disk prefix
+ // Example: file:///C:/deno/js/unit_test_runner.ts
+ // it should produce: file\c\deno\js\unit_test_runner.ts
+ match prefix_component.kind() {
+ Prefix::Disk(disk_byte) | Prefix::VerbatimDisk(disk_byte) => {
+ let disk = (disk_byte as char).to_string();
+ out.push(disk);
+ }
+ _ => unreachable!(),
+ }
+ }
+ }
+
+ // Must be relative, so strip forward slash
+ let mut remaining_components = path_components.as_path();
+ if let Ok(stripped) = remaining_components.strip_prefix("/") {
+ remaining_components = stripped;
+ };
+
+ out = out.join(remaining_components);
+ }
+ scheme => {
+ unimplemented!(
+ "Don't know how to create cache name for scheme: {}",
+ scheme
+ );
+ }
+ };
+
+ out
+ }
+
+ pub fn get_cache_filename_with_extension(
+ self: &Self,
+ url: &Url,
+ extension: &str,
+ ) -> PathBuf {
+ let base = self.get_cache_filename(url);
+
+ match base.extension() {
+ None => base.with_extension(extension),
+ Some(ext) => {
+ let original_extension = OsStr::to_str(ext).unwrap();
+ let final_extension = format!("{}.{}", original_extension, extension);
+ base.with_extension(final_extension)
+ }
+ }
+ }
+
+ pub fn get(self: &Self, filename: &Path) -> std::io::Result<Vec<u8>> {
+ let path = self.location.join(filename);
+ fs::read(&path)
+ }
+
+ pub fn set(self: &Self, filename: &Path, data: &[u8]) -> std::io::Result<()> {
+ let path = self.location.join(filename);
+ match path.parent() {
+ Some(ref parent) => fs::create_dir_all(parent),
+ None => Ok(()),
+ }?;
+ deno_fs::write_file(&path, data, 0o666)
+ }
+
+ pub fn remove(self: &Self, filename: &Path) -> std::io::Result<()> {
+ let path = self.location.join(filename);
+ fs::remove_file(path)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_get_cache_filename() {
+ let cache_location = if cfg!(target_os = "windows") {
+ PathBuf::from(r"C:\deno_dir\")
+ } else {
+ PathBuf::from("/deno_dir/")
+ };
+
+ let cache = DiskCache::new(&cache_location);
+
+ let mut test_cases = vec![
+ (
+ "http://deno.land/std/http/file_server.ts",
+ "http/deno.land/std/http/file_server.ts",
+ ),
+ (
+ "http://localhost:8000/std/http/file_server.ts",
+ "http/localhost_PORT8000/std/http/file_server.ts",
+ ),
+ (
+ "https://deno.land/std/http/file_server.ts",
+ "https/deno.land/std/http/file_server.ts",
+ ),
+ ];
+
+ if cfg!(target_os = "windows") {
+ test_cases.push(("file:///D:/a/1/s/format.ts", "file/D/a/1/s/format.ts"));
+ } else {
+ test_cases.push((
+ "file:///std/http/file_server.ts",
+ "file/std/http/file_server.ts",
+ ));
+ }
+
+ for test_case in &test_cases {
+ let cache_filename =
+ cache.get_cache_filename(&Url::parse(test_case.0).unwrap());
+ assert_eq!(cache_filename, PathBuf::from(test_case.1));
+ }
+ }
+
+ #[test]
+ fn test_get_cache_filename_with_extension() {
+ let cache = DiskCache::new(&PathBuf::from("foo"));
+
+ let mut test_cases = vec![
+ (
+ "http://deno.land/std/http/file_server.ts",
+ "js",
+ "http/deno.land/std/http/file_server.ts.js",
+ ),
+ (
+ "http://deno.land/std/http/file_server.ts",
+ "js.map",
+ "http/deno.land/std/http/file_server.ts.js.map",
+ ),
+ ];
+
+ if cfg!(target_os = "windows") {
+ test_cases.push((
+ "file:///D:/std/http/file_server",
+ "js",
+ "file/D/std/http/file_server.js",
+ ));
+ } else {
+ test_cases.push((
+ "file:///std/http/file_server",
+ "js",
+ "file/std/http/file_server.js",
+ ));
+ }
+
+ for test_case in &test_cases {
+ assert_eq!(
+ cache.get_cache_filename_with_extension(
+ &Url::parse(test_case.0).unwrap(),
+ test_case.1
+ ),
+ PathBuf::from(test_case.2)
+ )
+ }
+ }
+}
diff --git a/cli/file_fetcher.rs b/cli/file_fetcher.rs
new file mode 100644
index 000000000..e865bf945
--- /dev/null
+++ b/cli/file_fetcher.rs
@@ -0,0 +1,1519 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use crate::deno_error::too_many_redirects;
+use crate::deno_error::DenoError;
+use crate::deno_error::ErrorKind;
+use crate::deno_error::GetErrorKind;
+use crate::disk_cache::DiskCache;
+use crate::http_util;
+use crate::http_util::FetchOnceResult;
+use crate::msg;
+use crate::progress::Progress;
+use crate::tokio_util;
+use deno::ErrBox;
+use deno::ModuleSpecifier;
+use futures::future::Either;
+use futures::Future;
+use serde_json;
+use std;
+use std::collections::HashMap;
+use std::fs;
+use std::path::Path;
+use std::path::PathBuf;
+use std::result::Result;
+use std::str;
+use std::sync::Arc;
+use std::sync::Mutex;
+use url;
+use url::Url;
+
+/// Structure representing local or remote file.
+///
+/// In case of remote file `url` might be different than originally requested URL, if so
+/// `redirect_source_url` will contain original URL and `url` will be equal to final location.
+#[derive(Debug, Clone)]
+pub struct SourceFile {
+ pub url: Url,
+ pub filename: PathBuf,
+ pub media_type: msg::MediaType,
+ pub source_code: Vec<u8>,
+}
+
+pub type SourceFileFuture =
+ dyn Future<Item = SourceFile, Error = ErrBox> + Send;
+
+/// Simple struct implementing in-process caching to prevent multiple
+/// fs reads/net fetches for same file.
+#[derive(Clone, Default)]
+pub struct SourceFileCache(Arc<Mutex<HashMap<String, SourceFile>>>);
+
+impl SourceFileCache {
+ pub fn set(&self, key: String, source_file: SourceFile) {
+ let mut c = self.0.lock().unwrap();
+ c.insert(key, source_file);
+ }
+
+ pub fn get(&self, key: String) -> Option<SourceFile> {
+ let c = self.0.lock().unwrap();
+ match c.get(&key) {
+ Some(source_file) => Some(source_file.clone()),
+ None => None,
+ }
+ }
+}
+
+const SUPPORTED_URL_SCHEMES: [&str; 3] = ["http", "https", "file"];
+
+/// `DenoDir` serves as coordinator for multiple `DiskCache`s containing them
+/// in single directory that can be controlled with `$DENO_DIR` env variable.
+#[derive(Clone)]
+pub struct SourceFileFetcher {
+ deps_cache: DiskCache,
+ progress: Progress,
+ source_file_cache: SourceFileCache,
+ use_disk_cache: bool,
+ no_remote_fetch: bool,
+}
+
+impl SourceFileFetcher {
+ pub fn new(
+ deps_cache: DiskCache,
+ progress: Progress,
+ use_disk_cache: bool,
+ no_remote_fetch: bool,
+ ) -> std::io::Result<Self> {
+ let file_fetcher = Self {
+ deps_cache,
+ progress,
+ source_file_cache: SourceFileCache::default(),
+ use_disk_cache,
+ no_remote_fetch,
+ };
+
+ Ok(file_fetcher)
+ }
+
+ fn check_if_supported_scheme(url: &Url) -> Result<(), ErrBox> {
+ if !SUPPORTED_URL_SCHEMES.contains(&url.scheme()) {
+ return Err(
+ DenoError::new(
+ ErrorKind::UnsupportedFetchScheme,
+ format!("Unsupported scheme \"{}\" for module \"{}\". Supported schemes: {:#?}", url.scheme(), url, SUPPORTED_URL_SCHEMES),
+ ).into()
+ );
+ }
+
+ Ok(())
+ }
+
+ /// Required for TS compiler.
+ pub fn fetch_source_file(
+ self: &Self,
+ specifier: &ModuleSpecifier,
+ ) -> Result<SourceFile, ErrBox> {
+ tokio_util::block_on(self.fetch_source_file_async(specifier))
+ }
+
+ pub fn fetch_source_file_async(
+ self: &Self,
+ specifier: &ModuleSpecifier,
+ ) -> Box<SourceFileFuture> {
+ let module_url = specifier.as_url().to_owned();
+ debug!("fetch_source_file. specifier {} ", &module_url);
+
+ // Check if this file was already fetched and can be retrieved from in-process cache.
+ if let Some(source_file) = self.source_file_cache.get(specifier.to_string())
+ {
+ return Box::new(futures::future::ok(source_file));
+ }
+
+ let source_file_cache = self.source_file_cache.clone();
+ let specifier_ = specifier.clone();
+
+ let fut = self
+ .get_source_file_async(
+ &module_url,
+ self.use_disk_cache,
+ self.no_remote_fetch,
+ )
+ .then(move |result| {
+ let mut out = result.map_err(|err| {
+ if err.kind() == ErrorKind::NotFound {
+ // For NotFound, change the message to something better.
+ DenoError::new(
+ ErrorKind::NotFound,
+ format!("Cannot resolve module \"{}\"", module_url.to_string()),
+ )
+ .into()
+ } else {
+ err
+ }
+ })?;
+
+ // TODO: move somewhere?
+ if out.source_code.starts_with(b"#!") {
+ out.source_code = filter_shebang(out.source_code);
+ }
+
+ // Cache in-process for subsequent access.
+ source_file_cache.set(specifier_.to_string(), out.clone());
+
+ Ok(out)
+ });
+
+ Box::new(fut)
+ }
+
+ /// This is main method that is responsible for fetching local or remote files.
+ ///
+ /// If this is a remote module, and it has not yet been cached, the resulting
+ /// download will be cached on disk for subsequent access.
+ ///
+ /// If `use_disk_cache` is true then remote files are fetched from disk cache.
+ ///
+ /// If `no_remote_fetch` is true then if remote file is not found it disk
+ /// cache this method will fail.
+ fn get_source_file_async(
+ self: &Self,
+ module_url: &Url,
+ use_disk_cache: bool,
+ no_remote_fetch: bool,
+ ) -> impl Future<Item = SourceFile, Error = ErrBox> {
+ let url_scheme = module_url.scheme();
+ let is_local_file = url_scheme == "file";
+
+ if let Err(err) = SourceFileFetcher::check_if_supported_scheme(&module_url)
+ {
+ return Either::A(futures::future::err(err));
+ }
+
+ // Local files are always fetched from disk bypassing cache entirely.
+ if is_local_file {
+ match self.fetch_local_file(&module_url) {
+ Ok(source_file) => {
+ return Either::A(futures::future::ok(source_file));
+ }
+ Err(err) => {
+ return Either::A(futures::future::err(err));
+ }
+ }
+ }
+
+ // Fetch remote file and cache on-disk for subsequent access
+ Either::B(self.fetch_remote_source_async(
+ &module_url,
+ use_disk_cache,
+ no_remote_fetch,
+ 10,
+ ))
+ }
+
+ /// Fetch local source file.
+ fn fetch_local_file(
+ self: &Self,
+ module_url: &Url,
+ ) -> Result<SourceFile, ErrBox> {
+ let filepath = module_url.to_file_path().map_err(|()| {
+ ErrBox::from(DenoError::new(
+ ErrorKind::InvalidPath,
+ "File URL contains invalid path".to_owned(),
+ ))
+ })?;
+
+ let source_code = match fs::read(filepath.clone()) {
+ Ok(c) => c,
+ Err(e) => return Err(e.into()),
+ };
+
+ let media_type = map_content_type(&filepath, None);
+ Ok(SourceFile {
+ url: module_url.clone(),
+ filename: filepath,
+ media_type,
+ source_code,
+ })
+ }
+
+ /// Fetch cached remote file.
+ ///
+ /// This is a recursive operation if source file has redirections.
+ ///
+ /// It will keep reading <filename>.headers.json for information about redirection.
+ /// `module_initial_source_name` would be None on first call,
+ /// and becomes the name of the very first module that initiates the call
+ /// in subsequent recursions.
+ ///
+ /// AKA if redirection occurs, module_initial_source_name is the source path
+ /// that user provides, and the final module_name is the resolved path
+ /// after following all redirections.
+ fn fetch_cached_remote_source(
+ self: &Self,
+ module_url: &Url,
+ ) -> Result<Option<SourceFile>, ErrBox> {
+ let source_code_headers = self.get_source_code_headers(&module_url);
+ // If source code headers says that it would redirect elsewhere,
+ // (meaning that the source file might not exist; only .headers.json is present)
+ // Abort reading attempts to the cached source file and and follow the redirect.
+ if let Some(redirect_to) = source_code_headers.redirect_to {
+ // E.g.
+ // module_name https://import-meta.now.sh/redirect.js
+ // filename /Users/kun/Library/Caches/deno/deps/https/import-meta.now.sh/redirect.js
+ // redirect_to https://import-meta.now.sh/sub/final1.js
+ // real_filename /Users/kun/Library/Caches/deno/deps/https/import-meta.now.sh/sub/final1.js
+ // real_module_name = https://import-meta.now.sh/sub/final1.js
+ let redirect_url = Url::parse(&redirect_to).expect("Should be valid URL");
+
+ // Recurse.
+ // TODO(bartlomieju): I'm pretty sure we should call `fetch_remote_source_async` here.
+ // Should we expect that all redirects are cached?
+ return self.fetch_cached_remote_source(&redirect_url);
+ }
+
+ // No redirect needed or end of redirects.
+ // We can try read the file
+ let filepath = self
+ .deps_cache
+ .location
+ .join(self.deps_cache.get_cache_filename(&module_url));
+ let source_code = match fs::read(filepath.clone()) {
+ Err(e) => {
+ if e.kind() == std::io::ErrorKind::NotFound {
+ return Ok(None);
+ } else {
+ return Err(e.into());
+ }
+ }
+ Ok(c) => c,
+ };
+ let media_type = map_content_type(
+ &filepath,
+ source_code_headers.mime_type.as_ref().map(String::as_str),
+ );
+ Ok(Some(SourceFile {
+ url: module_url.clone(),
+ filename: filepath,
+ media_type,
+ source_code,
+ }))
+ }
+
+ /// Asynchronously fetch remote source file specified by the URL following redirects.
+ fn fetch_remote_source_async(
+ self: &Self,
+ module_url: &Url,
+ use_disk_cache: bool,
+ no_remote_fetch: bool,
+ redirect_limit: i64,
+ ) -> Box<SourceFileFuture> {
+ if redirect_limit < 0 {
+ return Box::new(futures::future::err(too_many_redirects()));
+ }
+
+ // First try local cache
+ if use_disk_cache {
+ match self.fetch_cached_remote_source(&module_url) {
+ Ok(Some(source_file)) => {
+ return Box::new(futures::future::ok(source_file));
+ }
+ Ok(None) => {
+ // there's no cached version
+ }
+ Err(err) => {
+ return Box::new(futures::future::err(err));
+ }
+ }
+ }
+
+ // If file wasn't found in cache check if we can fetch it
+ if no_remote_fetch {
+ // We can't fetch remote file - bail out
+ return Box::new(futures::future::err(
+ std::io::Error::new(
+ std::io::ErrorKind::NotFound,
+ format!(
+ "cannot find remote file '{}' in cache",
+ module_url.to_string()
+ ),
+ )
+ .into(),
+ ));
+ }
+
+ let download_job = self.progress.add("Download", &module_url.to_string());
+ let dir = self.clone();
+ let module_url = module_url.clone();
+
+ // Single pass fetch, either yields code or yields redirect.
+ let f = http_util::fetch_string_once(&module_url).and_then(move |r| {
+ match r {
+ FetchOnceResult::Redirect(new_module_url) => {
+ // If redirects, update module_name and filename for next looped call.
+ dir
+ .save_source_code_headers(
+ &module_url,
+ None,
+ Some(new_module_url.to_string()),
+ )
+ .unwrap();
+
+ // Explicit drop to keep reference alive until future completes.
+ drop(download_job);
+
+ // Recurse
+ Either::A(dir.fetch_remote_source_async(
+ &new_module_url,
+ use_disk_cache,
+ no_remote_fetch,
+ redirect_limit - 1,
+ ))
+ }
+ FetchOnceResult::Code(source, maybe_content_type) => {
+ // We land on the code.
+ dir
+ .save_source_code_headers(
+ &module_url,
+ maybe_content_type.clone(),
+ None,
+ )
+ .unwrap();
+
+ dir.save_source_code(&module_url, &source).unwrap();
+
+ let filepath = dir
+ .deps_cache
+ .location
+ .join(dir.deps_cache.get_cache_filename(&module_url));
+
+ let media_type = map_content_type(
+ &filepath,
+ maybe_content_type.as_ref().map(String::as_str),
+ );
+
+ let source_file = SourceFile {
+ url: module_url.clone(),
+ filename: filepath,
+ media_type,
+ source_code: source.as_bytes().to_owned(),
+ };
+
+ // Explicit drop to keep reference alive until future completes.
+ drop(download_job);
+
+ Either::B(futures::future::ok(source_file))
+ }
+ }
+ });
+
+ Box::new(f)
+ }
+
+ /// Get header metadata associated with a remote file.
+ ///
+ /// NOTE: chances are that the source file was downloaded due to redirects.
+ /// In this case, the headers file provides info about where we should go and get
+ /// the file that redirect eventually points to.
+ fn get_source_code_headers(self: &Self, url: &Url) -> SourceCodeHeaders {
+ let cache_key = self
+ .deps_cache
+ .get_cache_filename_with_extension(url, "headers.json");
+
+ if let Ok(bytes) = self.deps_cache.get(&cache_key) {
+ if let Ok(json_string) = std::str::from_utf8(&bytes) {
+ return SourceCodeHeaders::from_json_string(json_string.to_string());
+ }
+ }
+
+ SourceCodeHeaders::default()
+ }
+
+ /// Save contents of downloaded remote file in on-disk cache for subsequent access.
+ fn save_source_code(
+ self: &Self,
+ url: &Url,
+ source: &str,
+ ) -> std::io::Result<()> {
+ let cache_key = self.deps_cache.get_cache_filename(url);
+
+ // May not exist. DON'T unwrap.
+ let _ = self.deps_cache.remove(&cache_key);
+
+ self.deps_cache.set(&cache_key, source.as_bytes())
+ }
+
+ /// Save headers related to source file to {filename}.headers.json file,
+ /// only when there is actually something necessary to save.
+ ///
+ /// For example, if the extension ".js" already mean JS file and we have
+ /// content type of "text/javascript", then we would not save the mime type.
+ ///
+ /// If nothing needs to be saved, the headers file is not created.
+ fn save_source_code_headers(
+ self: &Self,
+ url: &Url,
+ mime_type: Option<String>,
+ redirect_to: Option<String>,
+ ) -> std::io::Result<()> {
+ let cache_key = self
+ .deps_cache
+ .get_cache_filename_with_extension(url, "headers.json");
+
+ // Remove possibly existing stale .headers.json file.
+ // May not exist. DON'T unwrap.
+ let _ = self.deps_cache.remove(&cache_key);
+
+ let headers = SourceCodeHeaders {
+ mime_type,
+ redirect_to,
+ };
+
+ let cache_filename = self.deps_cache.get_cache_filename(url);
+ if let Ok(maybe_json_string) = headers.to_json_string(&cache_filename) {
+ if let Some(json_string) = maybe_json_string {
+ return self.deps_cache.set(&cache_key, json_string.as_bytes());
+ }
+ }
+
+ Ok(())
+ }
+}
+
+fn map_file_extension(path: &Path) -> msg::MediaType {
+ match path.extension() {
+ None => msg::MediaType::Unknown,
+ Some(os_str) => match os_str.to_str() {
+ Some("ts") => msg::MediaType::TypeScript,
+ Some("tsx") => msg::MediaType::TSX,
+ Some("js") => msg::MediaType::JavaScript,
+ Some("jsx") => msg::MediaType::JSX,
+ Some("mjs") => msg::MediaType::JavaScript,
+ Some("json") => msg::MediaType::Json,
+ _ => msg::MediaType::Unknown,
+ },
+ }
+}
+
+// convert a ContentType string into a enumerated MediaType
+fn map_content_type(path: &Path, content_type: Option<&str>) -> msg::MediaType {
+ match content_type {
+ Some(content_type) => {
+ // sometimes there is additional data after the media type in
+ // Content-Type so we have to do a bit of manipulation so we are only
+ // dealing with the actual media type
+ let ct_vector: Vec<&str> = content_type.split(';').collect();
+ let ct: &str = ct_vector.first().unwrap();
+ match ct.to_lowercase().as_ref() {
+ "application/typescript"
+ | "text/typescript"
+ | "video/vnd.dlna.mpeg-tts"
+ | "video/mp2t"
+ | "application/x-typescript" => msg::MediaType::TypeScript,
+ "application/javascript"
+ | "text/javascript"
+ | "application/ecmascript"
+ | "text/ecmascript"
+ | "application/x-javascript" => msg::MediaType::JavaScript,
+ "application/json" | "text/json" => msg::MediaType::Json,
+ "text/plain" => map_file_extension(path),
+ _ => {
+ debug!("unknown content type: {}", content_type);
+ msg::MediaType::Unknown
+ }
+ }
+ }
+ None => map_file_extension(path),
+ }
+}
+
+fn filter_shebang(bytes: Vec<u8>) -> Vec<u8> {
+ let string = str::from_utf8(&bytes).unwrap();
+ if let Some(i) = string.find('\n') {
+ let (_, rest) = string.split_at(i);
+ rest.as_bytes().to_owned()
+ } else {
+ Vec::new()
+ }
+}
+
+#[derive(Debug, Default)]
+/// Header metadata associated with a particular "symbolic" source code file.
+/// (the associated source code file might not be cached, while remaining
+/// a user accessible entity through imports (due to redirects)).
+pub struct SourceCodeHeaders {
+ /// MIME type of the source code.
+ pub mime_type: Option<String>,
+ /// Where should we actually look for source code.
+ /// This should be an absolute path!
+ pub redirect_to: Option<String>,
+}
+
+static MIME_TYPE: &str = "mime_type";
+static REDIRECT_TO: &str = "redirect_to";
+
+impl SourceCodeHeaders {
+ pub fn from_json_string(headers_string: String) -> Self {
+ // TODO: use serde for deserialization
+ let maybe_headers_json: serde_json::Result<serde_json::Value> =
+ serde_json::from_str(&headers_string);
+
+ if let Ok(headers_json) = maybe_headers_json {
+ let mime_type = headers_json[MIME_TYPE].as_str().map(String::from);
+ let redirect_to = headers_json[REDIRECT_TO].as_str().map(String::from);
+
+ return SourceCodeHeaders {
+ mime_type,
+ redirect_to,
+ };
+ }
+
+ SourceCodeHeaders::default()
+ }
+
+ // TODO: remove this nonsense `cache_filename` param, this should be
+ // done when instantiating SourceCodeHeaders
+ pub fn to_json_string(
+ self: &Self,
+ cache_filename: &Path,
+ ) -> Result<Option<String>, serde_json::Error> {
+ // TODO(kevinkassimo): consider introduce serde::Deserialize to make things simpler.
+ // This is super ugly at this moment...
+ // Had trouble to make serde_derive work: I'm unable to build proc-macro2.
+ let mut value_map = serde_json::map::Map::new();
+
+ if let Some(mime_type) = &self.mime_type {
+ let resolved_mime_type =
+ map_content_type(Path::new(""), Some(mime_type.clone().as_str()));
+
+ // TODO: fix this
+ let ext_based_mime_type = map_file_extension(cache_filename);
+
+ // Add mime to headers only when content type is different from extension.
+ if ext_based_mime_type == msg::MediaType::Unknown
+ || resolved_mime_type != ext_based_mime_type
+ {
+ value_map.insert(MIME_TYPE.to_string(), json!(mime_type));
+ }
+ }
+
+ if let Some(redirect_to) = &self.redirect_to {
+ value_map.insert(REDIRECT_TO.to_string(), json!(redirect_to));
+ }
+
+ if value_map.is_empty() {
+ return Ok(None);
+ }
+
+ serde_json::to_string(&value_map)
+ .and_then(|serialized| Ok(Some(serialized)))
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::fs as deno_fs;
+ use tempfile::TempDir;
+
+ fn setup_file_fetcher(dir_path: &Path) -> SourceFileFetcher {
+ SourceFileFetcher::new(
+ DiskCache::new(&dir_path.to_path_buf().join("deps")),
+ Progress::new(),
+ true,
+ false,
+ )
+ .expect("setup fail")
+ }
+
+ fn test_setup() -> (TempDir, SourceFileFetcher) {
+ let temp_dir = TempDir::new().expect("tempdir fail");
+ let fetcher = setup_file_fetcher(temp_dir.path());
+ (temp_dir, fetcher)
+ }
+
+ macro_rules! file_url {
+ ($path:expr) => {
+ if cfg!(target_os = "windows") {
+ concat!("file:///C:", $path)
+ } else {
+ concat!("file://", $path)
+ }
+ };
+ }
+
+ #[test]
+ fn test_source_code_headers_get_and_save() {
+ let (_temp_dir, fetcher) = test_setup();
+ let url = Url::parse("http://example.com/f.js").unwrap();
+ let headers_filepath = fetcher.deps_cache.location.join(
+ fetcher
+ .deps_cache
+ .get_cache_filename_with_extension(&url, "headers.json"),
+ );
+
+ if let Some(ref parent) = headers_filepath.parent() {
+ fs::create_dir_all(parent).unwrap();
+ };
+
+ let _ = deno_fs::write_file(
+ headers_filepath.as_path(),
+ "{\"mime_type\":\"text/javascript\",\"redirect_to\":\"http://example.com/a.js\"}",
+ 0o666
+ );
+ let headers = fetcher.get_source_code_headers(&url);
+
+ assert_eq!(headers.mime_type.clone().unwrap(), "text/javascript");
+ assert_eq!(
+ headers.redirect_to.clone().unwrap(),
+ "http://example.com/a.js"
+ );
+
+ let _ = fetcher.save_source_code_headers(
+ &url,
+ Some("text/typescript".to_owned()),
+ Some("http://deno.land/a.js".to_owned()),
+ );
+ let headers2 = fetcher.get_source_code_headers(&url);
+ assert_eq!(headers2.mime_type.clone().unwrap(), "text/typescript");
+ assert_eq!(
+ headers2.redirect_to.clone().unwrap(),
+ "http://deno.land/a.js"
+ );
+ }
+
+ #[test]
+ fn test_fetch_local_file_no_panic() {
+ let (_temp_dir, fetcher) = test_setup();
+ if cfg!(windows) {
+ // Should fail: missing drive letter.
+ let u = Url::parse("file:///etc/passwd").unwrap();
+ fetcher.fetch_local_file(&u).unwrap_err();
+ } else {
+ // Should fail: local network paths are not supported on unix.
+ let u = Url::parse("file://server/etc/passwd").unwrap();
+ fetcher.fetch_local_file(&u).unwrap_err();
+ }
+ }
+
+ #[test]
+ fn test_get_source_code_1() {
+ let http_server_guard = crate::test_util::http_server();
+ let (temp_dir, fetcher) = test_setup();
+ let fetcher_1 = fetcher.clone();
+ let fetcher_2 = fetcher.clone();
+ let module_url =
+ Url::parse("http://localhost:4545/tests/subdir/mod2.ts").unwrap();
+ let module_url_1 = module_url.clone();
+ let module_url_2 = module_url.clone();
+ let headers_file_name = fetcher.deps_cache.location.join(
+ fetcher
+ .deps_cache
+ .get_cache_filename_with_extension(&module_url, "headers.json"),
+ );
+ let headers_file_name_1 = headers_file_name.clone();
+ let headers_file_name_2 = headers_file_name.clone();
+ let headers_file_name_3 = headers_file_name.clone();
+
+ let fut = fetcher
+ .get_source_file_async(&module_url, true, false)
+ .then(move |result| {
+ assert!(result.is_ok());
+ let r = result.unwrap();
+ assert_eq!(
+ r.source_code,
+ "export { printHello } from \"./print_hello.ts\";\n".as_bytes()
+ );
+ assert_eq!(&(r.media_type), &msg::MediaType::TypeScript);
+ // Should not create .headers.json file due to matching ext
+ assert!(fs::read_to_string(&headers_file_name_1).is_err());
+
+ // Modify .headers.json, write using fs write and read using save_source_code_headers
+ let _ = fs::write(
+ &headers_file_name_1,
+ "{ \"mime_type\": \"text/javascript\" }",
+ );
+ fetcher_1.get_source_file_async(&module_url, true, false)
+ })
+ .then(move |result2| {
+ assert!(result2.is_ok());
+ let r2 = result2.unwrap();
+ assert_eq!(
+ r2.source_code,
+ "export { printHello } from \"./print_hello.ts\";\n".as_bytes()
+ );
+ // If get_source_file_async does not call remote, this should be JavaScript
+ // as we modified before! (we do not overwrite .headers.json due to no http fetch)
+ assert_eq!(&(r2.media_type), &msg::MediaType::JavaScript);
+ assert_eq!(
+ fetcher_2
+ .get_source_code_headers(&module_url_1)
+ .mime_type
+ .unwrap(),
+ "text/javascript"
+ );
+
+ // Modify .headers.json again, but the other way around
+ let _ = fetcher_2.save_source_code_headers(
+ &module_url_1,
+ Some("application/json".to_owned()),
+ None,
+ );
+ fetcher_2.get_source_file_async(&module_url_1, true, false)
+ })
+ .then(move |result3| {
+ assert!(result3.is_ok());
+ let r3 = result3.unwrap();
+ assert_eq!(
+ r3.source_code,
+ "export { printHello } from \"./print_hello.ts\";\n".as_bytes()
+ );
+ // If get_source_file_async does not call remote, this should be JavaScript
+ // as we modified before! (we do not overwrite .headers.json due to no http fetch)
+ assert_eq!(&(r3.media_type), &msg::MediaType::Json);
+ assert!(fs::read_to_string(&headers_file_name_2)
+ .unwrap()
+ .contains("application/json"));
+
+ // let's create fresh instance of DenoDir (simulating another freshh Deno process)
+ // and don't use cache
+ let fetcher = setup_file_fetcher(temp_dir.path());
+ fetcher.get_source_file_async(&module_url_2, false, false)
+ })
+ .then(move |result4| {
+ assert!(result4.is_ok());
+ let r4 = result4.unwrap();
+ let expected4 =
+ "export { printHello } from \"./print_hello.ts\";\n".as_bytes();
+ assert_eq!(r4.source_code, expected4);
+ // Now the old .headers.json file should have gone! Resolved back to TypeScript
+ assert_eq!(&(r4.media_type), &msg::MediaType::TypeScript);
+ assert!(fs::read_to_string(&headers_file_name_3).is_err());
+ Ok(())
+ });
+
+ // http_util::fetch_sync_string requires tokio
+ tokio_util::run(fut);
+ drop(http_server_guard);
+ }
+
+ #[test]
+ fn test_get_source_code_2() {
+ let http_server_guard = crate::test_util::http_server();
+ let (temp_dir, fetcher) = test_setup();
+ let fetcher_1 = fetcher.clone();
+ let module_url =
+ Url::parse("http://localhost:4545/tests/subdir/mismatch_ext.ts").unwrap();
+ let module_url_1 = module_url.clone();
+ let module_url_2 = module_url.clone();
+ let headers_file_name = fetcher.deps_cache.location.join(
+ fetcher
+ .deps_cache
+ .get_cache_filename_with_extension(&module_url, "headers.json"),
+ );
+
+ let fut = fetcher
+ .get_source_file_async(&module_url, true, false)
+ .then(move |result| {
+ assert!(result.is_ok());
+ let r = result.unwrap();
+ let expected = "export const loaded = true;\n".as_bytes();
+ assert_eq!(r.source_code, expected);
+ // Mismatch ext with content type, create .headers.json
+ assert_eq!(&(r.media_type), &msg::MediaType::JavaScript);
+ assert_eq!(
+ fetcher
+ .get_source_code_headers(&module_url)
+ .mime_type
+ .unwrap(),
+ "text/javascript"
+ );
+
+ // Modify .headers.json
+ let _ = fetcher.save_source_code_headers(
+ &module_url,
+ Some("text/typescript".to_owned()),
+ None,
+ );
+ fetcher.get_source_file_async(&module_url, true, false)
+ })
+ .then(move |result2| {
+ assert!(result2.is_ok());
+ let r2 = result2.unwrap();
+ let expected2 = "export const loaded = true;\n".as_bytes();
+ assert_eq!(r2.source_code, expected2);
+ // If get_source_file_async does not call remote, this should be TypeScript
+ // as we modified before! (we do not overwrite .headers.json due to no http fetch)
+ assert_eq!(&(r2.media_type), &msg::MediaType::TypeScript);
+ assert!(fs::read_to_string(&headers_file_name).is_err());
+
+ // let's create fresh instance of DenoDir (simulating another freshh Deno process)
+ // and don't use cache
+ let fetcher = setup_file_fetcher(temp_dir.path());
+ fetcher.get_source_file_async(&module_url_1, false, false)
+ })
+ .then(move |result3| {
+ assert!(result3.is_ok());
+ let r3 = result3.unwrap();
+ let expected3 = "export const loaded = true;\n".as_bytes();
+ assert_eq!(r3.source_code, expected3);
+ // Now the old .headers.json file should be overwritten back to JavaScript!
+ // (due to http fetch)
+ assert_eq!(&(r3.media_type), &msg::MediaType::JavaScript);
+ assert_eq!(
+ fetcher_1
+ .get_source_code_headers(&module_url_2)
+ .mime_type
+ .unwrap(),
+ "text/javascript"
+ );
+ Ok(())
+ });
+
+ tokio_util::run(fut);
+ drop(http_server_guard);
+ }
+
+ #[test]
+ fn test_get_source_code_multiple_downloads_of_same_file() {
+ let http_server_guard = crate::test_util::http_server();
+ let (_temp_dir, fetcher) = test_setup();
+ // http_util::fetch_sync_string requires tokio
+ tokio_util::init(|| {
+ let specifier = ModuleSpecifier::resolve_url(
+ "http://localhost:4545/tests/subdir/mismatch_ext.ts",
+ )
+ .unwrap();
+ let headers_file_name = fetcher.deps_cache.location.join(
+ fetcher.deps_cache.get_cache_filename_with_extension(
+ specifier.as_url(),
+ "headers.json",
+ ),
+ );
+
+ // first download
+ let result = fetcher.fetch_source_file(&specifier);
+ assert!(result.is_ok());
+
+ let result = fs::File::open(&headers_file_name);
+ assert!(result.is_ok());
+ let headers_file = result.unwrap();
+ // save modified timestamp for headers file
+ let headers_file_metadata = headers_file.metadata().unwrap();
+ let headers_file_modified = headers_file_metadata.modified().unwrap();
+
+ // download file again, it should use already fetched file even though `use_disk_cache` is set to
+ // false, this can be verified using source header file creation timestamp (should be
+ // the same as after first download)
+ let result = fetcher.fetch_source_file(&specifier);
+ assert!(result.is_ok());
+
+ let result = fs::File::open(&headers_file_name);
+ assert!(result.is_ok());
+ let headers_file_2 = result.unwrap();
+ // save modified timestamp for headers file
+ let headers_file_metadata_2 = headers_file_2.metadata().unwrap();
+ let headers_file_modified_2 = headers_file_metadata_2.modified().unwrap();
+
+ assert_eq!(headers_file_modified, headers_file_modified_2);
+ });
+ drop(http_server_guard);
+ }
+
+ #[test]
+ fn test_get_source_code_3() {
+ let http_server_guard = crate::test_util::http_server();
+ let (_temp_dir, fetcher) = test_setup();
+
+ let redirect_module_url =
+ Url::parse("http://localhost:4546/tests/subdir/redirects/redirect1.js")
+ .unwrap();
+ let redirect_source_filepath = fetcher
+ .deps_cache
+ .location
+ .join("http/localhost_PORT4546/tests/subdir/redirects/redirect1.js");
+ let redirect_source_filename =
+ redirect_source_filepath.to_str().unwrap().to_string();
+ let target_module_url =
+ Url::parse("http://localhost:4545/tests/subdir/redirects/redirect1.js")
+ .unwrap();
+ let redirect_target_filepath = fetcher
+ .deps_cache
+ .location
+ .join("http/localhost_PORT4545/tests/subdir/redirects/redirect1.js");
+ let redirect_target_filename =
+ redirect_target_filepath.to_str().unwrap().to_string();
+
+ // Test basic follow and headers recording
+ let fut = fetcher
+ .get_source_file_async(&redirect_module_url, true, false)
+ .then(move |result| {
+ assert!(result.is_ok());
+ let mod_meta = result.unwrap();
+ // File that requires redirection is not downloaded.
+ assert!(fs::read_to_string(&redirect_source_filename).is_err());
+ // ... but its .headers.json is created.
+ let redirect_source_headers =
+ fetcher.get_source_code_headers(&redirect_module_url);
+ assert_eq!(
+ redirect_source_headers.redirect_to.unwrap(),
+ "http://localhost:4545/tests/subdir/redirects/redirect1.js"
+ );
+ // The target of redirection is downloaded instead.
+ assert_eq!(
+ fs::read_to_string(&redirect_target_filename).unwrap(),
+ "export const redirect = 1;\n"
+ );
+ let redirect_target_headers =
+ fetcher.get_source_code_headers(&target_module_url);
+ assert!(redirect_target_headers.redirect_to.is_none());
+
+ // Examine the meta result.
+ assert_eq!(mod_meta.url.clone(), target_module_url);
+ Ok(())
+ });
+
+ tokio_util::run(fut);
+ drop(http_server_guard);
+ }
+
+ #[test]
+ fn test_get_source_code_4() {
+ let http_server_guard = crate::test_util::http_server();
+ let (_temp_dir, fetcher) = test_setup();
+ let double_redirect_url =
+ Url::parse("http://localhost:4548/tests/subdir/redirects/redirect1.js")
+ .unwrap();
+ let double_redirect_path = fetcher
+ .deps_cache
+ .location
+ .join("http/localhost_PORT4548/tests/subdir/redirects/redirect1.js");
+
+ let redirect_url =
+ Url::parse("http://localhost:4546/tests/subdir/redirects/redirect1.js")
+ .unwrap();
+ let redirect_path = fetcher
+ .deps_cache
+ .location
+ .join("http/localhost_PORT4546/tests/subdir/redirects/redirect1.js");
+
+ let target_url =
+ Url::parse("http://localhost:4545/tests/subdir/redirects/redirect1.js")
+ .unwrap();
+ let target_path = fetcher
+ .deps_cache
+ .location
+ .join("http/localhost_PORT4545/tests/subdir/redirects/redirect1.js");
+
+ // Test double redirects and headers recording
+ let fut = fetcher
+ .get_source_file_async(&double_redirect_url, true, false)
+ .then(move |result| {
+ assert!(result.is_ok());
+ let mod_meta = result.unwrap();
+ assert!(fs::read_to_string(&double_redirect_path).is_err());
+ assert!(fs::read_to_string(&redirect_path).is_err());
+
+ let double_redirect_headers =
+ fetcher.get_source_code_headers(&double_redirect_url);
+ assert_eq!(
+ double_redirect_headers.redirect_to.unwrap(),
+ redirect_url.to_string()
+ );
+ let redirect_headers = fetcher.get_source_code_headers(&redirect_url);
+ assert_eq!(
+ redirect_headers.redirect_to.unwrap(),
+ target_url.to_string()
+ );
+
+ // The target of redirection is downloaded instead.
+ assert_eq!(
+ fs::read_to_string(&target_path).unwrap(),
+ "export const redirect = 1;\n"
+ );
+ let redirect_target_headers =
+ fetcher.get_source_code_headers(&target_url);
+ assert!(redirect_target_headers.redirect_to.is_none());
+
+ // Examine the meta result.
+ assert_eq!(mod_meta.url.clone(), target_url);
+ Ok(())
+ });
+
+ tokio_util::run(fut);
+ drop(http_server_guard);
+ }
+
+ #[test]
+ fn test_get_source_code_5() {
+ let http_server_guard = crate::test_util::http_server();
+ let (_temp_dir, fetcher) = test_setup();
+
+ let double_redirect_url =
+ Url::parse("http://localhost:4548/tests/subdir/redirects/redirect1.js")
+ .unwrap();
+
+ let redirect_url =
+ Url::parse("http://localhost:4546/tests/subdir/redirects/redirect1.js")
+ .unwrap();
+
+ let target_path = fetcher
+ .deps_cache
+ .location
+ .join("http/localhost_PORT4545/tests/subdir/redirects/redirect1.js");
+ let target_path_ = target_path.clone();
+
+ // Test that redirect target is not downloaded twice for different redirect source.
+ let fut = fetcher
+ .get_source_file_async(&double_redirect_url, true, false)
+ .then(move |result| {
+ assert!(result.is_ok());
+ let result = fs::File::open(&target_path);
+ assert!(result.is_ok());
+ let file = result.unwrap();
+ // save modified timestamp for headers file of redirect target
+ let file_metadata = file.metadata().unwrap();
+ let file_modified = file_metadata.modified().unwrap();
+
+ // When another file is fetched that also point to redirect target, then redirect target
+ // shouldn't be downloaded again. It can be verified using source header file creation
+ // timestamp (should be the same as after first `get_source_file`)
+ fetcher
+ .get_source_file_async(&redirect_url, true, false)
+ .map(move |r| (r, file_modified))
+ })
+ .then(move |result| {
+ assert!(result.is_ok());
+ let (_, file_modified) = result.unwrap();
+ let result = fs::File::open(&target_path_);
+ assert!(result.is_ok());
+ let file_2 = result.unwrap();
+ // save modified timestamp for headers file
+ let file_metadata_2 = file_2.metadata().unwrap();
+ let file_modified_2 = file_metadata_2.modified().unwrap();
+
+ assert_eq!(file_modified, file_modified_2);
+ Ok(())
+ });
+
+ tokio_util::run(fut);
+ drop(http_server_guard);
+ }
+
+ #[test]
+ fn test_get_source_code_6() {
+ let http_server_guard = crate::test_util::http_server();
+ let (_temp_dir, fetcher) = test_setup();
+ let double_redirect_url =
+ Url::parse("http://localhost:4548/tests/subdir/redirects/redirect1.js")
+ .unwrap();
+
+ // Test that redirections can be limited
+ let fut = fetcher
+ .fetch_remote_source_async(&double_redirect_url, false, false, 2)
+ .then(move |result| {
+ assert!(result.is_ok());
+ fetcher.fetch_remote_source_async(&double_redirect_url, false, false, 1)
+ })
+ .then(move |result| {
+ assert!(result.is_err());
+ let err = result.err().unwrap();
+ assert_eq!(err.kind(), ErrorKind::TooManyRedirects);
+ Ok(())
+ });
+
+ tokio_util::run(fut);
+ drop(http_server_guard);
+ }
+
+ #[test]
+ fn test_get_source_code_no_fetch() {
+ let http_server_guard = crate::test_util::http_server();
+ let (_temp_dir, fetcher) = test_setup();
+ let fetcher_1 = fetcher.clone();
+ let fetcher_2 = fetcher.clone();
+ let module_url =
+ Url::parse("http://localhost:4545/tests/002_hello.ts").unwrap();
+ let module_url_1 = module_url.clone();
+ let module_url_2 = module_url.clone();
+ // file hasn't been cached before and remote downloads are not allowed
+ let fut = fetcher
+ .get_source_file_async(&module_url, true, true)
+ .then(move |result| {
+ assert!(result.is_err());
+ let err = result.err().unwrap();
+ assert_eq!(err.kind(), ErrorKind::NotFound);
+
+ // download and cache file
+ fetcher_1.get_source_file_async(&module_url_1, true, false)
+ })
+ .then(move |result| {
+ assert!(result.is_ok());
+ // module is already cached, should be ok even with `no_remote_fetch`
+ fetcher_2.get_source_file_async(&module_url_2, true, true)
+ })
+ .then(move |result| {
+ assert!(result.is_ok());
+ Ok(())
+ });
+
+ tokio_util::run(fut);
+ drop(http_server_guard);
+ }
+
+ #[test]
+ fn test_fetch_source_async_1() {
+ let http_server_guard = crate::test_util::http_server();
+ let (_temp_dir, fetcher) = test_setup();
+ let module_url =
+ Url::parse("http://127.0.0.1:4545/tests/subdir/mt_video_mp2t.t3.ts")
+ .unwrap();
+ let headers_file_name = fetcher.deps_cache.location.join(
+ fetcher
+ .deps_cache
+ .get_cache_filename_with_extension(&module_url, "headers.json"),
+ );
+
+ let fut = fetcher
+ .fetch_remote_source_async(&module_url, false, false, 10)
+ .then(move |result| {
+ assert!(result.is_ok());
+ let r = result.unwrap();
+ assert_eq!(r.source_code, b"export const loaded = true;\n");
+ assert_eq!(&(r.media_type), &msg::MediaType::TypeScript);
+ // matching ext, no .headers.json file created
+ assert!(fs::read_to_string(&headers_file_name).is_err());
+ // Modify .headers.json, make sure read from local
+ let _ = fetcher.save_source_code_headers(
+ &module_url,
+ Some("text/javascript".to_owned()),
+ None,
+ );
+ let result2 = fetcher.fetch_cached_remote_source(&module_url);
+ assert!(result2.is_ok());
+ let r2 = result2.unwrap().unwrap();
+ assert_eq!(r2.source_code, b"export const loaded = true;\n");
+ // Not MediaType::TypeScript due to .headers.json modification
+ assert_eq!(&(r2.media_type), &msg::MediaType::JavaScript);
+ Ok(())
+ });
+
+ tokio_util::run(fut);
+ drop(http_server_guard);
+ }
+
+ #[test]
+ fn test_fetch_source_1() {
+ let http_server_guard = crate::test_util::http_server();
+
+ let (_temp_dir, fetcher) = test_setup();
+ let module_url =
+ Url::parse("http://localhost:4545/tests/subdir/mt_video_mp2t.t3.ts")
+ .unwrap();
+ let headers_file_name = fetcher.deps_cache.location.join(
+ fetcher
+ .deps_cache
+ .get_cache_filename_with_extension(&module_url, "headers.json"),
+ );
+
+ let fut = fetcher
+ .fetch_remote_source_async(&module_url, false, false, 10)
+ .then(move |result| {
+ assert!(result.is_ok());
+ let r = result.unwrap();
+ assert_eq!(r.source_code, "export const loaded = true;\n".as_bytes());
+ assert_eq!(&(r.media_type), &msg::MediaType::TypeScript);
+ // matching ext, no .headers.json file created
+ assert!(fs::read_to_string(&headers_file_name).is_err());
+
+ // Modify .headers.json, make sure read from local
+ let _ = fetcher.save_source_code_headers(
+ &module_url,
+ Some("text/javascript".to_owned()),
+ None,
+ );
+ let result2 = fetcher.fetch_cached_remote_source(&module_url);
+ assert!(result2.is_ok());
+ let r2 = result2.unwrap().unwrap();
+ assert_eq!(r2.source_code, "export const loaded = true;\n".as_bytes());
+ // Not MediaType::TypeScript due to .headers.json modification
+ assert_eq!(&(r2.media_type), &msg::MediaType::JavaScript);
+ Ok(())
+ });
+
+ tokio_util::run(fut);
+ drop(http_server_guard);
+ }
+
+ #[test]
+ fn test_fetch_source_2() {
+ let http_server_guard = crate::test_util::http_server();
+ let (_temp_dir, fetcher) = test_setup();
+ let fetcher_1 = fetcher.clone();
+ let fetcher_2 = fetcher.clone();
+ let fetcher_3 = fetcher.clone();
+ let module_url =
+ Url::parse("http://localhost:4545/tests/subdir/no_ext").unwrap();
+ let module_url_2 =
+ Url::parse("http://localhost:4545/tests/subdir/mismatch_ext.ts").unwrap();
+ let module_url_2_ = module_url_2.clone();
+ let module_url_3 =
+ Url::parse("http://localhost:4545/tests/subdir/unknown_ext.deno")
+ .unwrap();
+ let module_url_3_ = module_url_3.clone();
+
+ let fut = fetcher
+ .fetch_remote_source_async(&module_url, false, false, 10)
+ .then(move |result| {
+ assert!(result.is_ok());
+ let r = result.unwrap();
+ assert_eq!(r.source_code, "export const loaded = true;\n".as_bytes());
+ assert_eq!(&(r.media_type), &msg::MediaType::TypeScript);
+ // no ext, should create .headers.json file
+ assert_eq!(
+ fetcher_1
+ .get_source_code_headers(&module_url)
+ .mime_type
+ .unwrap(),
+ "text/typescript"
+ );
+ fetcher_1.fetch_remote_source_async(&module_url_2, false, false, 10)
+ })
+ .then(move |result| {
+ assert!(result.is_ok());
+ let r2 = result.unwrap();
+ assert_eq!(r2.source_code, "export const loaded = true;\n".as_bytes());
+ assert_eq!(&(r2.media_type), &msg::MediaType::JavaScript);
+ // mismatch ext, should create .headers.json file
+ assert_eq!(
+ fetcher_2
+ .get_source_code_headers(&module_url_2_)
+ .mime_type
+ .unwrap(),
+ "text/javascript"
+ );
+ // test unknown extension
+ fetcher_2.fetch_remote_source_async(&module_url_3, false, false, 10)
+ })
+ .then(move |result| {
+ assert!(result.is_ok());
+ let r3 = result.unwrap();
+ assert_eq!(r3.source_code, "export const loaded = true;\n".as_bytes());
+ assert_eq!(&(r3.media_type), &msg::MediaType::TypeScript);
+ // unknown ext, should create .headers.json file
+ assert_eq!(
+ fetcher_3
+ .get_source_code_headers(&module_url_3_)
+ .mime_type
+ .unwrap(),
+ "text/typescript"
+ );
+ futures::future::ok(())
+ });
+
+ tokio_util::run(fut);
+ drop(http_server_guard);
+ }
+
+ #[test]
+ fn test_fetch_source_file() {
+ let (_temp_dir, fetcher) = test_setup();
+
+ tokio_util::init(|| {
+ // Test failure case.
+ let specifier =
+ ModuleSpecifier::resolve_url(file_url!("/baddir/hello.ts")).unwrap();
+ let r = fetcher.fetch_source_file(&specifier);
+ assert!(r.is_err());
+
+ let p = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
+ .join("js/main.ts")
+ .to_owned();
+ let specifier =
+ ModuleSpecifier::resolve_url_or_path(p.to_str().unwrap()).unwrap();
+ let r = fetcher.fetch_source_file(&specifier);
+ assert!(r.is_ok());
+ })
+ }
+
+ #[test]
+ fn test_fetch_source_file_1() {
+ /*recompile ts file*/
+ let (_temp_dir, fetcher) = test_setup();
+
+ tokio_util::init(|| {
+ // Test failure case.
+ let specifier =
+ ModuleSpecifier::resolve_url(file_url!("/baddir/hello.ts")).unwrap();
+ let r = fetcher.fetch_source_file(&specifier);
+ assert!(r.is_err());
+
+ let p = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
+ .join("js/main.ts")
+ .to_owned();
+ let specifier =
+ ModuleSpecifier::resolve_url_or_path(p.to_str().unwrap()).unwrap();
+ let r = fetcher.fetch_source_file(&specifier);
+ assert!(r.is_ok());
+ })
+ }
+
+ #[test]
+ fn test_resolve_module_3() {
+ // unsupported schemes
+ let test_cases = [
+ "ftp://localhost:4545/testdata/subdir/print_hello.ts",
+ "blob:https://whatwg.org/d0360e2f-caee-469f-9a2f-87d5b0456f6f",
+ ];
+
+ for &test in test_cases.iter() {
+ let url = Url::parse(test).unwrap();
+ assert_eq!(
+ SourceFileFetcher::check_if_supported_scheme(&url)
+ .unwrap_err()
+ .kind(),
+ ErrorKind::UnsupportedFetchScheme
+ );
+ }
+ }
+
+ #[test]
+ fn test_map_file_extension() {
+ assert_eq!(
+ map_file_extension(Path::new("foo/bar.ts")),
+ msg::MediaType::TypeScript
+ );
+ assert_eq!(
+ map_file_extension(Path::new("foo/bar.tsx")),
+ msg::MediaType::TSX
+ );
+ assert_eq!(
+ map_file_extension(Path::new("foo/bar.d.ts")),
+ msg::MediaType::TypeScript
+ );
+ assert_eq!(
+ map_file_extension(Path::new("foo/bar.js")),
+ msg::MediaType::JavaScript
+ );
+ assert_eq!(
+ map_file_extension(Path::new("foo/bar.jsx")),
+ msg::MediaType::JSX
+ );
+ assert_eq!(
+ map_file_extension(Path::new("foo/bar.json")),
+ msg::MediaType::Json
+ );
+ assert_eq!(
+ map_file_extension(Path::new("foo/bar.txt")),
+ msg::MediaType::Unknown
+ );
+ assert_eq!(
+ map_file_extension(Path::new("foo/bar")),
+ msg::MediaType::Unknown
+ );
+ }
+
+ #[test]
+ fn test_map_content_type() {
+ // Extension only
+ assert_eq!(
+ map_content_type(Path::new("foo/bar.ts"), None),
+ msg::MediaType::TypeScript
+ );
+ assert_eq!(
+ map_content_type(Path::new("foo/bar.tsx"), None),
+ msg::MediaType::TSX
+ );
+ assert_eq!(
+ map_content_type(Path::new("foo/bar.d.ts"), None),
+ msg::MediaType::TypeScript
+ );
+ assert_eq!(
+ map_content_type(Path::new("foo/bar.js"), None),
+ msg::MediaType::JavaScript
+ );
+ assert_eq!(
+ map_content_type(Path::new("foo/bar.jsx"), None),
+ msg::MediaType::JSX
+ );
+ assert_eq!(
+ map_content_type(Path::new("foo/bar.json"), None),
+ msg::MediaType::Json
+ );
+ assert_eq!(
+ map_content_type(Path::new("foo/bar.txt"), None),
+ msg::MediaType::Unknown
+ );
+ assert_eq!(
+ map_content_type(Path::new("foo/bar"), None),
+ msg::MediaType::Unknown
+ );
+
+ // Media Type
+ assert_eq!(
+ map_content_type(Path::new("foo/bar"), Some("application/typescript")),
+ msg::MediaType::TypeScript
+ );
+ assert_eq!(
+ map_content_type(Path::new("foo/bar"), Some("text/typescript")),
+ msg::MediaType::TypeScript
+ );
+ assert_eq!(
+ map_content_type(Path::new("foo/bar"), Some("video/vnd.dlna.mpeg-tts")),
+ msg::MediaType::TypeScript
+ );
+ assert_eq!(
+ map_content_type(Path::new("foo/bar"), Some("video/mp2t")),
+ msg::MediaType::TypeScript
+ );
+ assert_eq!(
+ map_content_type(Path::new("foo/bar"), Some("application/x-typescript")),
+ msg::MediaType::TypeScript
+ );
+ assert_eq!(
+ map_content_type(Path::new("foo/bar"), Some("application/javascript")),
+ msg::MediaType::JavaScript
+ );
+ assert_eq!(
+ map_content_type(Path::new("foo/bar"), Some("text/javascript")),
+ msg::MediaType::JavaScript
+ );
+ assert_eq!(
+ map_content_type(Path::new("foo/bar"), Some("application/ecmascript")),
+ msg::MediaType::JavaScript
+ );
+ assert_eq!(
+ map_content_type(Path::new("foo/bar"), Some("text/ecmascript")),
+ msg::MediaType::JavaScript
+ );
+ assert_eq!(
+ map_content_type(Path::new("foo/bar"), Some("application/x-javascript")),
+ msg::MediaType::JavaScript
+ );
+ assert_eq!(
+ map_content_type(Path::new("foo/bar"), Some("application/json")),
+ msg::MediaType::Json
+ );
+ assert_eq!(
+ map_content_type(Path::new("foo/bar"), Some("text/json")),
+ msg::MediaType::Json
+ );
+ assert_eq!(
+ map_content_type(Path::new("foo/bar.ts"), Some("text/plain")),
+ msg::MediaType::TypeScript
+ );
+ assert_eq!(
+ map_content_type(Path::new("foo/bar.ts"), Some("foo/bar")),
+ msg::MediaType::Unknown
+ );
+ }
+
+ #[test]
+ fn test_filter_shebang() {
+ assert_eq!(filter_shebang(b"#!"[..].to_owned()), b"");
+ assert_eq!(
+ filter_shebang("#!\n\n".as_bytes().to_owned()),
+ "\n\n".as_bytes()
+ );
+ let code = "#!/usr/bin/env deno\nconsole.log('hello');\n"
+ .as_bytes()
+ .to_owned();
+ assert_eq!(filter_shebang(code), "\nconsole.log('hello');\n".as_bytes());
+ }
+}
diff --git a/cli/flags.rs b/cli/flags.rs
new file mode 100644
index 000000000..24d331bfa
--- /dev/null
+++ b/cli/flags.rs
@@ -0,0 +1,1799 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use crate::fs as deno_fs;
+use clap::App;
+use clap::AppSettings;
+use clap::Arg;
+use clap::ArgMatches;
+use clap::Shell;
+use clap::SubCommand;
+use deno::ModuleSpecifier;
+use log::Level;
+use std;
+use std::str;
+use std::str::FromStr;
+
+macro_rules! std_url {
+ ($x:expr) => {
+ concat!("https://deno.land/std@8c90bd9/", $x)
+ };
+}
+
+/// Used for `deno fmt <files>...` subcommand
+const PRETTIER_URL: &str = std_url!("prettier/main.ts");
+/// Used for `deno install...` subcommand
+const INSTALLER_URL: &str = std_url!("installer/mod.ts");
+/// Used for `deno test...` subcommand
+const TEST_RUNNER_URL: &str = std_url!("testing/runner.ts");
+/// Used for `deno xeval...` subcommand
+const XEVAL_URL: &str = std_url!("xeval/mod.ts");
+
+// Creates vector of strings, Vec<String>
+macro_rules! svec {
+ ($($x:expr),*) => (vec![$($x.to_string()),*]);
+}
+
+#[derive(Clone, Debug, PartialEq, Default)]
+pub struct DenoFlags {
+ pub log_level: Option<Level>,
+ pub version: bool,
+ pub reload: bool,
+ /// When the `--config`/`-c` flag is used to pass the name, this will be set
+ /// the path passed on the command line, otherwise `None`.
+ pub config_path: Option<String>,
+ /// When the `--importmap` flag is used to pass the name, this will be set
+ /// the path passed on the command line, otherwise `None`.
+ pub import_map_path: Option<String>,
+ pub allow_read: bool,
+ pub read_whitelist: Vec<String>,
+ pub allow_write: bool,
+ pub write_whitelist: Vec<String>,
+ pub allow_net: bool,
+ pub net_whitelist: Vec<String>,
+ pub allow_env: bool,
+ pub allow_run: bool,
+ pub allow_hrtime: bool,
+ pub no_prompts: bool,
+ pub no_fetch: bool,
+ pub seed: Option<u64>,
+ pub v8_flags: Option<Vec<String>>,
+ // Use tokio::runtime::current_thread
+ pub current_thread: bool,
+}
+
+static ENV_VARIABLES_HELP: &str = "ENVIRONMENT VARIABLES:
+ DENO_DIR Set deno's base directory
+ NO_COLOR Set to disable color
+ HTTP_PROXY Set proxy address for HTTP requests (module downloads, fetch)
+ HTTPS_PROXY Set proxy address for HTTPS requests (module downloads, fetch)";
+
+fn add_run_args<'a, 'b>(app: App<'a, 'b>) -> App<'a, 'b> {
+ app
+ .arg(
+ Arg::with_name("allow-read")
+ .long("allow-read")
+ .min_values(0)
+ .takes_value(true)
+ .use_delimiter(true)
+ .require_equals(true)
+ .help("Allow file system read access"),
+ )
+ .arg(
+ Arg::with_name("allow-write")
+ .long("allow-write")
+ .min_values(0)
+ .takes_value(true)
+ .use_delimiter(true)
+ .require_equals(true)
+ .help("Allow file system write access"),
+ )
+ .arg(
+ Arg::with_name("allow-net")
+ .long("allow-net")
+ .min_values(0)
+ .takes_value(true)
+ .use_delimiter(true)
+ .require_equals(true)
+ .help("Allow network access"),
+ )
+ .arg(
+ Arg::with_name("allow-env")
+ .long("allow-env")
+ .help("Allow environment access"),
+ )
+ .arg(
+ Arg::with_name("allow-run")
+ .long("allow-run")
+ .help("Allow running subprocesses"),
+ )
+ .arg(
+ Arg::with_name("allow-hrtime")
+ .long("allow-hrtime")
+ .help("Allow high resolution time measurement"),
+ )
+ .arg(
+ Arg::with_name("allow-all")
+ .short("A")
+ .long("allow-all")
+ .help("Allow all permissions"),
+ )
+ .arg(
+ Arg::with_name("no-prompt")
+ .long("no-prompt")
+ .help("Do not use prompts"),
+ )
+ .arg(
+ Arg::with_name("no-fetch")
+ .long("no-fetch")
+ .help("Do not download remote modules"),
+ )
+}
+
+pub fn create_cli_app<'a, 'b>() -> App<'a, 'b> {
+ add_run_args(App::new("deno"))
+ .bin_name("deno")
+ .global_settings(&[AppSettings::ColorNever, AppSettings::UnifiedHelpMessage, AppSettings::DisableVersion])
+ .settings(&[AppSettings::AllowExternalSubcommands])
+ .after_help(ENV_VARIABLES_HELP)
+ .long_about("A secure runtime for JavaScript and TypeScript built with V8, Rust, and Tokio.
+
+Docs: https://deno.land/manual.html
+Modules: https://github.com/denoland/deno_std
+Bugs: https://github.com/denoland/deno/issues
+
+To run the REPL:
+
+ deno
+
+To execute a sandboxed script:
+
+ deno https://deno.land/welcome.ts
+
+To evaluate code from the command line:
+
+ deno eval \"console.log(30933 + 404)\"
+
+To get help on the another subcommands (run in this case):
+
+ deno help run")
+ .arg(
+ Arg::with_name("version")
+ .short("v")
+ .long("version")
+ .help("Print the version"),
+ )
+ .arg(
+ Arg::with_name("log-level")
+ .short("L")
+ .long("log-level")
+ .help("Set log level")
+ .takes_value(true)
+ .possible_values(&["debug", "info"])
+ .global(true),
+ ).arg(
+ Arg::with_name("reload")
+ .short("r")
+ .long("reload")
+ .help("Reload source code cache (recompile TypeScript)")
+ .global(true),
+ ).arg(
+ Arg::with_name("config")
+ .short("c")
+ .long("config")
+ .value_name("FILE")
+ .help("Load compiler configuration file")
+ .takes_value(true)
+ .global(true),
+ )
+ .arg(
+ Arg::with_name("current-thread")
+ .long("current-thread")
+ .global(true)
+ .help("Use tokio::runtime::current_thread"),
+ ).arg(
+ Arg::with_name("importmap")
+ .long("importmap")
+ .value_name("FILE")
+ .help("Load import map file")
+ .long_help(
+ "Load import map file
+Specification: https://wicg.github.io/import-maps/
+Examples: https://github.com/WICG/import-maps#the-import-map",
+ )
+ .takes_value(true)
+ .global(true),
+ ).arg(
+ Arg::with_name("seed")
+ .long("seed")
+ .value_name("NUMBER")
+ .help("Seed Math.random()")
+ .takes_value(true)
+ .validator(|val: String| {
+ match val.parse::<u64>() {
+ Ok(_) => Ok(()),
+ Err(_) => Err("Seed should be a number".to_string())
+ }
+ })
+ .global(true),
+ ).arg(
+ Arg::with_name("v8-options")
+ .long("v8-options")
+ .help("Print V8 command line options")
+ .global(true),
+ ).arg(
+ Arg::with_name("v8-flags")
+ .long("v8-flags")
+ .takes_value(true)
+ .use_delimiter(true)
+ .require_equals(true)
+ .help("Set V8 command line options")
+ .global(true),
+ ).subcommand(
+ SubCommand::with_name("version")
+ .about("Print the version")
+ .long_about("Print current version of Deno.
+
+Includes versions of Deno, V8 JavaScript Engine, and the TypeScript
+compiler.",
+ ),
+ ).subcommand(
+ SubCommand::with_name("bundle")
+ .about("Bundle module and dependencies into single file")
+ .long_about(
+ "Output a single JavaScript file with all dependencies
+
+Example:
+
+ deno bundle https://deno.land/std/examples/colors.ts"
+ )
+ .arg(Arg::with_name("source_file").takes_value(true).required(true))
+ .arg(Arg::with_name("out_file").takes_value(true).required(false)),
+ ).subcommand(
+ SubCommand::with_name("fetch")
+ .about("Fetch the dependencies")
+ .long_about(
+ "Fetch and compile remote dependencies recursively.
+
+Downloads all statically imported scripts and save them in local
+cache, without running the code. No future import network requests
+would be made unless --reload is specified.
+
+ # Downloads all dependencies
+ deno fetch https://deno.land/std/http/file_server.ts
+
+ # Once cached, static imports no longer send network requests
+ deno run -A https://deno.land/std/http/file_server.ts",
+ ).arg(Arg::with_name("file").takes_value(true).required(true)),
+ ).subcommand(
+ SubCommand::with_name("types")
+ .about("Print runtime TypeScript declarations")
+ .long_about("Print runtime TypeScript declarations.
+
+ deno types > lib.deno_runtime.d.ts
+
+The declaration file could be saved and used for typing information.",
+ ),
+ ).subcommand(
+ SubCommand::with_name("info")
+ .about("Show info about cache or info related to source file")
+ .long_about("Show info about cache or info related to source file.
+
+ deno info
+
+The following information is shown:
+
+ DENO_DIR: location of directory containing Deno-related files
+ Remote modules cache: location of directory containing remote modules
+ TypeScript compiler cache: location of directory containing TS compiler output
+
+
+ deno info https://deno.land/std@v0.11/http/file_server.ts
+
+The following information is shown:
+
+ local: Local path of the file.
+ type: JavaScript, TypeScript, or JSON.
+ compiled: TypeScript only. shown local path of compiled source code.
+ map: TypeScript only. shown local path of source map.
+ deps: Dependency tree of the source file.",
+ ).arg(Arg::with_name("file").takes_value(true).required(false)),
+ ).subcommand(
+ SubCommand::with_name("eval")
+ .about("Eval script")
+ .long_about(
+ "Evaluate provided script.
+
+This command has implicit access to all permissions (equivalent to deno run --allow-all)
+
+ deno eval \"console.log('hello world')\"",
+ ).arg(Arg::with_name("code").takes_value(true).required(true)),
+ ).subcommand(
+ SubCommand::with_name("fmt")
+ .about("Format files")
+ .long_about(
+"Auto-format JavaScript/TypeScript source code using Prettier
+
+Automatically downloads Prettier dependencies on first run.
+
+ deno fmt myfile1.ts myfile2.ts",
+ ).arg(
+ Arg::with_name("stdout")
+ .long("stdout")
+ .help("Output formated code to stdout")
+ .takes_value(false),
+ ).arg(
+ Arg::with_name("files")
+ .takes_value(true)
+ .multiple(true)
+ .required(true),
+ ),
+ ).subcommand(
+ add_run_args(SubCommand::with_name("test"))
+ .about("Run tests")
+ .long_about(
+"Run tests using test runner
+
+Automatically downloads test runner on first run.
+
+ deno test **/*_test.ts **/test.ts",
+ ).arg(
+ Arg::with_name("failfast")
+ .short("f")
+ .long("failfast")
+ .help("Stop on first error")
+ .takes_value(false),
+ ).arg(
+ Arg::with_name("quiet")
+ .short("q")
+ .long("quiet")
+ .help("Don't show output from test cases")
+ .takes_value(false)
+ ).arg(
+ Arg::with_name("exclude")
+ .short("e")
+ .long("exclude")
+ .help("List of file names to exclude from run")
+ .takes_value(true)
+ .multiple(true)
+ ).arg(
+ Arg::with_name("files")
+ .help("List of file names to run")
+ .takes_value(true)
+ .multiple(true)
+ ),
+ ).subcommand(
+ add_run_args(SubCommand::with_name("run"))
+ .settings(&[
+ AppSettings::AllowExternalSubcommands,
+ AppSettings::DisableHelpSubcommand,
+ AppSettings::SubcommandRequired,
+ ]).about("Run a program given a filename or url to the source code")
+ .long_about(
+ "Run a program given a filename or url to the source code.
+
+By default all programs are run in sandbox without access to disk, network or
+ability to spawn subprocesses.
+
+ deno run https://deno.land/welcome.ts
+
+ # run program with permission to read from disk and listen to network
+ deno run --allow-net --allow-read https://deno.land/std/http/file_server.ts
+
+ # run program with permission to read whitelist files from disk and listen to network
+ deno run --allow-net --allow-read=$(pwd) https://deno.land/std/http/file_server.ts
+
+ # run program with all permissions
+ deno run -A https://deno.land/std/http/file_server.ts",
+ ).subcommand(
+ // this is a fake subcommand - it's used in conjunction with
+ // AppSettings:AllowExternalSubcommand to treat it as an
+ // entry point script
+ SubCommand::with_name("[SCRIPT]").about("Script to run"),
+ ),
+ ).subcommand(
+ SubCommand::with_name("xeval")
+ .about("Eval a script on text segments from stdin")
+ .long_about(
+ "Eval a script on lines from stdin
+
+Read from standard input and eval code on each whitespace-delimited
+string chunks.
+
+-I/--replvar optionally sets variable name for input to be used in eval.
+Otherwise '$' will be used as default variable name.
+
+This command has implicit access to all permissions (equivalent to deno run --allow-all)
+
+Print all the usernames in /etc/passwd:
+
+ cat /etc/passwd | deno xeval \"a = $.split(':'); if (a) console.log(a[0])\"
+
+A complicated way to print the current git branch:
+
+ git branch | deno xeval -I 'line' \"if (line.startsWith('*')) console.log(line.slice(2))\"
+
+Demonstrates breaking the input up by space delimiter instead of by lines:
+
+ cat LICENSE | deno xeval -d \" \" \"if ($ === 'MIT') console.log('MIT licensed')\"",
+ ).arg(
+ Arg::with_name("replvar")
+ .long("replvar")
+ .short("I")
+ .help("Set variable name to be used in eval, defaults to $")
+ .takes_value(true),
+ ).arg(
+ Arg::with_name("delim")
+ .long("delim")
+ .short("d")
+ .help("Set delimiter, defaults to newline")
+ .takes_value(true),
+ ).arg(Arg::with_name("code").takes_value(true).required(true)),
+ ).subcommand(
+ SubCommand::with_name("install")
+ .settings(&[
+ AppSettings::DisableHelpSubcommand,
+ AppSettings::AllowExternalSubcommands,
+ AppSettings::SubcommandRequired,
+ ])
+ .about("Install script as executable")
+ .long_about(
+"Automatically downloads deno_installer dependencies on first run.
+
+Default installation directory is $HOME/.deno/bin and it must be added to the path manually.
+
+ deno install file_server https://deno.land/std/http/file_server.ts --allow-net --allow-read
+
+ deno install colors https://deno.land/std/examples/colors.ts
+
+To change installation directory use -d/--dir flag
+
+ deno install -d /usr/local/bin file_server https://deno.land/std/http/file_server.ts --allow-net --allow-read",
+ ).arg(
+ Arg::with_name("dir")
+ .long("dir")
+ .short("d")
+ .help("Installation directory (defaults to $HOME/.deno/bin)")
+ .takes_value(true)
+ ).arg(
+ Arg::with_name("exe_name")
+ .help("Executable name")
+ .required(true),
+ ).subcommand(
+ // this is a fake subcommand - it's used in conjunction with
+ // AppSettings:AllowExternalSubcommand to treat it as an
+ // entry point script
+ SubCommand::with_name("[SCRIPT]").about("Script URL"),
+ ),
+ ).subcommand(
+ SubCommand::with_name("completions")
+ .settings(&[
+ AppSettings::DisableHelpSubcommand,
+ ]).about("Generate shell completions")
+ .long_about(
+"Output shell completion script to standard output.
+
+Example:
+
+ deno completions bash > /usr/local/etc/bash_completion.d/deno.bash
+ source /usr/local/etc/bash_completion.d/deno.bash")
+ .arg(
+ Arg::with_name("shell")
+ .possible_values(&Shell::variants())
+ .required(true),
+ ),
+ ).subcommand(
+ // this is a fake subcommand - it's used in conjunction with
+ // AppSettings:AllowExternalSubcommand to treat it as an
+ // entry point script
+ SubCommand::with_name("[SCRIPT]").about("Script to run"),
+ )
+}
+
+/// Convert paths supplied into full path.
+/// If a path is invalid, we print out a warning
+/// and ignore this path in the output.
+fn resolve_paths(paths: Vec<String>) -> Vec<String> {
+ let mut out: Vec<String> = vec![];
+ for pathstr in paths.iter() {
+ let result = deno_fs::resolve_from_cwd(pathstr);
+ if result.is_err() {
+ eprintln!("Unrecognized path to whitelist: {}", pathstr);
+ continue;
+ }
+ let mut full_path = result.unwrap().1;
+ // Remove trailing slash.
+ if full_path.len() > 1 && full_path.ends_with('/') {
+ full_path.pop();
+ }
+ out.push(full_path);
+ }
+ out
+}
+
+/// Parse ArgMatches into internal DenoFlags structure.
+/// This method should not make any side effects.
+pub fn parse_flags(
+ matches: &ArgMatches,
+ maybe_flags: Option<DenoFlags>,
+) -> DenoFlags {
+ let mut flags = maybe_flags.unwrap_or_default();
+
+ if matches.is_present("current-thread") {
+ flags.current_thread = true;
+ }
+ if matches.is_present("log-level") {
+ flags.log_level = match matches.value_of("log-level").unwrap() {
+ "debug" => Some(Level::Debug),
+ "info" => Some(Level::Info),
+ _ => unreachable!(),
+ };
+ }
+ if matches.is_present("version") {
+ flags.version = true;
+ }
+ if matches.is_present("reload") {
+ flags.reload = true;
+ }
+ flags.config_path = matches.value_of("config").map(ToOwned::to_owned);
+ if matches.is_present("v8-options") {
+ let v8_flags = svec!["deno", "--help"];
+ flags.v8_flags = Some(v8_flags);
+ }
+ if matches.is_present("v8-flags") {
+ let mut v8_flags: Vec<String> = matches
+ .values_of("v8-flags")
+ .unwrap()
+ .map(String::from)
+ .collect();
+
+ v8_flags.insert(0, "deno".to_string());
+ flags.v8_flags = Some(v8_flags);
+ }
+ if matches.is_present("seed") {
+ let seed_string = matches.value_of("seed").unwrap();
+ let seed = seed_string.parse::<u64>().unwrap();
+ flags.seed = Some(seed);
+
+ let v8_seed_flag = format!("--random-seed={}", seed);
+
+ match flags.v8_flags {
+ Some(ref mut v8_flags) => {
+ v8_flags.push(v8_seed_flag);
+ }
+ None => {
+ flags.v8_flags = Some(svec!["deno", v8_seed_flag]);
+ }
+ }
+ }
+
+ flags = parse_run_args(flags, matches);
+ // flags specific to "run" subcommand
+ if let Some(run_matches) = matches.subcommand_matches("run") {
+ flags = parse_run_args(flags.clone(), run_matches);
+ }
+ // flags specific to "test" subcommand
+ if let Some(test_matches) = matches.subcommand_matches("test") {
+ flags = parse_run_args(flags.clone(), test_matches);
+ }
+
+ flags
+}
+
+/// Parse permission specific matches Args and assign to DenoFlags.
+/// This method is required because multiple subcommands use permission args.
+fn parse_run_args(mut flags: DenoFlags, matches: &ArgMatches) -> DenoFlags {
+ if matches.is_present("allow-read") {
+ if matches.value_of("allow-read").is_some() {
+ let read_wl = matches.values_of("allow-read").unwrap();
+ let raw_read_whitelist: Vec<String> =
+ read_wl.map(std::string::ToString::to_string).collect();
+ flags.read_whitelist = resolve_paths(raw_read_whitelist);
+ debug!("read whitelist: {:#?}", &flags.read_whitelist);
+ } else {
+ flags.allow_read = true;
+ }
+ }
+ if matches.is_present("allow-write") {
+ if matches.value_of("allow-write").is_some() {
+ let write_wl = matches.values_of("allow-write").unwrap();
+ let raw_write_whitelist =
+ write_wl.map(std::string::ToString::to_string).collect();
+ flags.write_whitelist = resolve_paths(raw_write_whitelist);
+ debug!("write whitelist: {:#?}", &flags.write_whitelist);
+ } else {
+ flags.allow_write = true;
+ }
+ }
+ if matches.is_present("allow-net") {
+ if matches.value_of("allow-net").is_some() {
+ let net_wl = matches.values_of("allow-net").unwrap();
+ flags.net_whitelist =
+ net_wl.map(std::string::ToString::to_string).collect();
+ debug!("net whitelist: {:#?}", &flags.net_whitelist);
+ } else {
+ flags.allow_net = true;
+ }
+ }
+ if matches.is_present("allow-env") {
+ flags.allow_env = true;
+ }
+ if matches.is_present("allow-run") {
+ flags.allow_run = true;
+ }
+ if matches.is_present("allow-hrtime") {
+ flags.allow_hrtime = true;
+ }
+ if matches.is_present("allow-all") {
+ flags.allow_read = true;
+ flags.allow_env = true;
+ flags.allow_net = true;
+ flags.allow_run = true;
+ flags.allow_read = true;
+ flags.allow_write = true;
+ flags.allow_hrtime = true;
+ }
+ if matches.is_present("no-prompt") {
+ flags.no_prompts = true;
+ }
+ if matches.is_present("no-fetch") {
+ flags.no_fetch = true;
+ }
+ flags.import_map_path = matches.value_of("importmap").map(ToOwned::to_owned);
+
+ flags
+}
+
+/// Parse vector or arguments as DenoFlags.
+///
+/// This is very specialized utility that parses arguments passed after script URL.
+///
+/// Only dash (eg. `-r`) and double dash (eg. `--reload`) arguments are supported.
+/// Arguments recognized as DenoFlags will be eaten.
+/// Parsing stops after double dash `--` argument.
+///
+/// NOTE: this method ignores `-h/--help` and `-v/--version` flags.
+fn parse_script_args(
+ args: Vec<String>,
+ mut flags: DenoFlags,
+) -> (Vec<String>, DenoFlags) {
+ let mut argv = vec![];
+ let mut seen_double_dash = false;
+
+ // we have to iterate and parse argument one by one because clap returns error on any
+ // unrecognized argument
+ for arg in args.iter() {
+ if seen_double_dash {
+ argv.push(arg.to_string());
+ continue;
+ }
+
+ if arg == "--" {
+ seen_double_dash = true;
+ argv.push(arg.to_string());
+ continue;
+ }
+
+ if !arg.starts_with('-') || arg == "-" {
+ argv.push(arg.to_string());
+ continue;
+ }
+
+ let cli_app = create_cli_app();
+ // `get_matches_from_safe` returns error for `-h/-v` flags
+ let matches =
+ cli_app.get_matches_from_safe(vec!["deno".to_string(), arg.to_string()]);
+
+ if let Ok(m) = matches {
+ flags = parse_flags(&m, Some(flags));
+ } else {
+ argv.push(arg.to_string());
+ }
+ }
+
+ (argv, flags)
+}
+
+/// These are currently handled subcommands.
+/// There is no "Help" subcommand because it's handled by `clap::App` itself.
+#[derive(Debug, PartialEq)]
+pub enum DenoSubcommand {
+ Bundle,
+ Completions,
+ Eval,
+ Fetch,
+ Info,
+ Repl,
+ Run,
+ Types,
+ Version,
+}
+
+fn get_default_bundle_filename(source_file: &str) -> String {
+ let specifier = ModuleSpecifier::resolve_url_or_path(source_file).unwrap();
+ let path_segments = specifier.as_url().path_segments().unwrap();
+ let file_name = path_segments.filter(|s| !s.is_empty()).last().unwrap();
+ let file_stem = file_name.trim_end_matches(".ts").trim_end_matches(".js");
+ format!("{}.bundle.js", file_stem)
+}
+
+#[test]
+fn test_get_default_bundle_filename() {
+ assert_eq!(get_default_bundle_filename("blah.ts"), "blah.bundle.js");
+ assert_eq!(
+ get_default_bundle_filename("http://example.com/blah.ts"),
+ "blah.bundle.js"
+ );
+ assert_eq!(get_default_bundle_filename("blah.js"), "blah.bundle.js");
+ assert_eq!(
+ get_default_bundle_filename("http://example.com/blah.js"),
+ "blah.bundle.js"
+ );
+ assert_eq!(
+ get_default_bundle_filename("http://zombo.com/stuff/"),
+ "stuff.bundle.js"
+ );
+}
+
+pub fn flags_from_vec(
+ args: Vec<String>,
+) -> (DenoFlags, DenoSubcommand, Vec<String>) {
+ let cli_app = create_cli_app();
+ let matches = cli_app.get_matches_from(args);
+ let mut argv: Vec<String> = vec!["deno".to_string()];
+ let mut flags = parse_flags(&matches.clone(), None);
+
+ if flags.version {
+ return (flags, DenoSubcommand::Version, argv);
+ }
+
+ let subcommand = match matches.subcommand() {
+ ("bundle", Some(bundle_match)) => {
+ flags.allow_write = true;
+ let source_file: &str = bundle_match.value_of("source_file").unwrap();
+ let out_file = bundle_match
+ .value_of("out_file")
+ .map(String::from)
+ .unwrap_or_else(|| get_default_bundle_filename(source_file));
+ argv.extend(vec![source_file.to_string(), out_file.to_string()]);
+ DenoSubcommand::Bundle
+ }
+ ("completions", Some(completions_match)) => {
+ let shell: &str = completions_match.value_of("shell").unwrap();
+ let mut buf: Vec<u8> = vec![];
+ create_cli_app().gen_completions_to(
+ "deno",
+ Shell::from_str(shell).unwrap(),
+ &mut buf,
+ );
+ print!("{}", std::str::from_utf8(&buf).unwrap());
+ DenoSubcommand::Completions
+ }
+ ("eval", Some(eval_match)) => {
+ flags.allow_net = true;
+ flags.allow_env = true;
+ flags.allow_run = true;
+ flags.allow_read = true;
+ flags.allow_write = true;
+ flags.allow_hrtime = true;
+ let code: &str = eval_match.value_of("code").unwrap();
+ argv.extend(vec![code.to_string()]);
+ DenoSubcommand::Eval
+ }
+ ("fetch", Some(fetch_match)) => {
+ let file: &str = fetch_match.value_of("file").unwrap();
+ argv.extend(vec![file.to_string()]);
+ DenoSubcommand::Fetch
+ }
+ ("fmt", Some(fmt_match)) => {
+ flags.allow_read = true;
+ flags.allow_write = true;
+ argv.push(PRETTIER_URL.to_string());
+
+ let files: Vec<String> = fmt_match
+ .values_of("files")
+ .unwrap()
+ .map(String::from)
+ .collect();
+ argv.extend(files);
+
+ if !fmt_match.is_present("stdout") {
+ // `deno fmt` writes to the files by default
+ argv.push("--write".to_string());
+ }
+
+ DenoSubcommand::Run
+ }
+ ("info", Some(info_match)) => {
+ if info_match.is_present("file") {
+ argv.push(info_match.value_of("file").unwrap().to_string());
+ }
+ DenoSubcommand::Info
+ }
+ ("install", Some(install_match)) => {
+ flags.allow_read = true;
+ flags.allow_write = true;
+ flags.allow_net = true;
+ flags.allow_env = true;
+ flags.allow_run = true;
+ argv.push(INSTALLER_URL.to_string());
+
+ if install_match.is_present("dir") {
+ let install_dir = install_match.value_of("dir").unwrap();
+ argv.push("--dir".to_string());
+ argv.push(install_dir.to_string());
+ }
+
+ let exe_name: &str = install_match.value_of("exe_name").unwrap();
+ argv.push(exe_name.to_string());
+
+ match install_match.subcommand() {
+ (script_url, Some(script_match)) => {
+ argv.push(script_url.to_string());
+ if script_match.is_present("") {
+ let flags: Vec<String> = script_match
+ .values_of("")
+ .unwrap()
+ .map(String::from)
+ .collect();
+ argv.extend(flags);
+ }
+ DenoSubcommand::Run
+ }
+ _ => unreachable!(),
+ }
+ }
+ ("test", Some(test_match)) => {
+ flags.allow_read = true;
+ argv.push(TEST_RUNNER_URL.to_string());
+
+ if test_match.is_present("quiet") {
+ argv.push("--quiet".to_string());
+ }
+
+ if test_match.is_present("failfast") {
+ argv.push("--failfast".to_string());
+ }
+
+ if test_match.is_present("exclude") {
+ argv.push("--exclude".to_string());
+ let exclude: Vec<String> = test_match
+ .values_of("exclude")
+ .unwrap()
+ .map(String::from)
+ .collect();
+ argv.extend(exclude);
+ }
+
+ if test_match.is_present("files") {
+ argv.push("--".to_string());
+ let files: Vec<String> = test_match
+ .values_of("files")
+ .unwrap()
+ .map(String::from)
+ .collect();
+ argv.extend(files);
+ }
+
+ DenoSubcommand::Run
+ }
+ ("types", Some(_)) => DenoSubcommand::Types,
+ ("run", Some(run_match)) => {
+ match run_match.subcommand() {
+ (script, Some(script_match)) => {
+ argv.extend(vec![script.to_string()]);
+ // check if there are any extra arguments that should
+ // be passed to script
+ if script_match.is_present("") {
+ let script_args: Vec<String> = script_match
+ .values_of("")
+ .unwrap()
+ .map(String::from)
+ .collect();
+
+ let (script_args, flags_) = parse_script_args(script_args, flags);
+ flags = flags_;
+ argv.extend(script_args);
+ }
+ DenoSubcommand::Run
+ }
+ _ => unreachable!(),
+ }
+ }
+ ("xeval", Some(xeval_match)) => {
+ flags.allow_net = true;
+ flags.allow_env = true;
+ flags.allow_run = true;
+ flags.allow_read = true;
+ flags.allow_write = true;
+ flags.allow_hrtime = true;
+ argv.push(XEVAL_URL.to_string());
+
+ if xeval_match.is_present("delim") {
+ let delim = xeval_match.value_of("delim").unwrap();
+ argv.push("--delim".to_string());
+ argv.push(delim.to_string());
+ }
+
+ if xeval_match.is_present("replvar") {
+ let replvar = xeval_match.value_of("replvar").unwrap();
+ argv.push("--replvar".to_string());
+ argv.push(replvar.to_string());
+ }
+
+ let code: &str = xeval_match.value_of("code").unwrap();
+ argv.push(code.to_string());
+
+ DenoSubcommand::Run
+ }
+ (script, Some(script_match)) => {
+ argv.extend(vec![script.to_string()]);
+ // check if there are any extra arguments that should
+ // be passed to script
+ if script_match.is_present("") {
+ let script_args: Vec<String> = script_match
+ .values_of("")
+ .unwrap()
+ .map(String::from)
+ .collect();
+
+ let (script_args, flags_) = parse_script_args(script_args, flags);
+ flags = flags_;
+ argv.extend(script_args);
+ }
+ DenoSubcommand::Run
+ }
+ _ => {
+ flags.allow_net = true;
+ flags.allow_env = true;
+ flags.allow_run = true;
+ flags.allow_read = true;
+ flags.allow_write = true;
+ flags.allow_hrtime = true;
+ DenoSubcommand::Repl
+ }
+ };
+
+ (flags, subcommand, argv)
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_flags_from_vec_1() {
+ let (flags, subcommand, argv) = flags_from_vec(svec!["deno", "version"]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ version: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Version);
+ assert_eq!(argv, svec!["deno"]);
+
+ let (flags, subcommand, argv) = flags_from_vec(svec!["deno", "--version"]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ version: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Version);
+ assert_eq!(argv, svec!["deno"]);
+
+ let (flags, subcommand, argv) = flags_from_vec(svec!["deno", "-v"]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ version: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Version);
+ assert_eq!(argv, svec!["deno"]);
+ }
+
+ #[test]
+ fn test_flags_from_vec_2() {
+ let (flags, subcommand, argv) =
+ flags_from_vec(svec!["deno", "-r", "run", "script.ts"]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ reload: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "script.ts"]);
+ }
+
+ #[test]
+ fn test_flags_from_vec_3() {
+ let (flags, subcommand, argv) =
+ flags_from_vec(svec!["deno", "run", "-r", "--allow-write", "script.ts"]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ reload: true,
+ allow_write: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "script.ts"]);
+ }
+
+ #[test]
+ fn test_flags_from_vec_4() {
+ let (flags, subcommand, argv) =
+ flags_from_vec(svec!["deno", "-r", "run", "--allow-write", "script.ts"]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ reload: true,
+ allow_write: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "script.ts"]);
+ }
+
+ #[test]
+ fn test_flags_from_vec_5() {
+ let (flags, subcommand, argv) =
+ flags_from_vec(svec!["deno", "--v8-options", "run", "script.ts"]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ v8_flags: Some(svec!["deno", "--help"]),
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "script.ts"]);
+
+ let (flags, subcommand, argv) = flags_from_vec(svec![
+ "deno",
+ "--v8-flags=--expose-gc,--gc-stats=1",
+ "run",
+ "script.ts"
+ ]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ v8_flags: Some(svec!["deno", "--expose-gc", "--gc-stats=1"]),
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "script.ts"]);
+ }
+
+ #[test]
+ fn test_flags_from_vec_6() {
+ let (flags, subcommand, argv) = flags_from_vec(svec![
+ "deno",
+ "run",
+ "--allow-net",
+ "gist.ts",
+ "--title",
+ "X"
+ ]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ allow_net: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "gist.ts", "--title", "X"]);
+ }
+
+ #[test]
+ fn test_flags_from_vec_7() {
+ let (flags, subcommand, argv) =
+ flags_from_vec(svec!["deno", "run", "--allow-all", "gist.ts"]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ allow_net: true,
+ allow_env: true,
+ allow_run: true,
+ allow_read: true,
+ allow_write: true,
+ allow_hrtime: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "gist.ts"]);
+ }
+
+ #[test]
+ fn test_flags_from_vec_8() {
+ let (flags, subcommand, argv) =
+ flags_from_vec(svec!["deno", "run", "--allow-read", "gist.ts"]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ allow_read: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "gist.ts"]);
+ }
+
+ #[test]
+ fn test_flags_from_vec_9() {
+ let (flags, subcommand, argv) =
+ flags_from_vec(svec!["deno", "run", "--allow-hrtime", "script.ts"]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ allow_hrtime: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "script.ts"]);
+ }
+
+ #[test]
+ fn test_flags_from_vec_10() {
+ // notice that flags passed after double dash will not
+ // be parsed to DenoFlags but instead forwarded to
+ // script args as Deno.args
+ let (flags, subcommand, argv) = flags_from_vec(svec![
+ "deno",
+ "run",
+ "--allow-write",
+ "script.ts",
+ "--",
+ "-D",
+ "--allow-net"
+ ]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ allow_write: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "script.ts", "--", "-D", "--allow-net"]);
+ }
+
+ #[test]
+ fn test_flags_from_vec_11() {
+ let (flags, subcommand, argv) =
+ flags_from_vec(svec!["deno", "fmt", "script_1.ts", "script_2.ts"]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ allow_write: true,
+ allow_read: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(
+ argv,
+ svec![
+ "deno",
+ PRETTIER_URL,
+ "script_1.ts",
+ "script_2.ts",
+ "--write"
+ ]
+ );
+ }
+
+ #[test]
+ fn test_flags_from_vec_12() {
+ let (flags, subcommand, argv) = flags_from_vec(svec!["deno", "types"]);
+ assert_eq!(flags, DenoFlags::default());
+ assert_eq!(subcommand, DenoSubcommand::Types);
+ assert_eq!(argv, svec!["deno"]);
+ }
+
+ #[test]
+ fn test_flags_from_vec_13() {
+ let (flags, subcommand, argv) =
+ flags_from_vec(svec!["deno", "fetch", "script.ts"]);
+ assert_eq!(flags, DenoFlags::default());
+ assert_eq!(subcommand, DenoSubcommand::Fetch);
+ assert_eq!(argv, svec!["deno", "script.ts"]);
+ }
+
+ #[test]
+ fn test_flags_from_vec_14() {
+ let (flags, subcommand, argv) =
+ flags_from_vec(svec!["deno", "info", "script.ts"]);
+ assert_eq!(flags, DenoFlags::default());
+ assert_eq!(subcommand, DenoSubcommand::Info);
+ assert_eq!(argv, svec!["deno", "script.ts"]);
+
+ let (flags, subcommand, argv) = flags_from_vec(svec!["deno", "info"]);
+ assert_eq!(flags, DenoFlags::default());
+ assert_eq!(subcommand, DenoSubcommand::Info);
+ assert_eq!(argv, svec!["deno"]);
+ }
+
+ #[test]
+ fn test_flags_from_vec_15() {
+ let (flags, subcommand, argv) =
+ flags_from_vec(svec!["deno", "run", "-c", "tsconfig.json", "script.ts"]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ config_path: Some("tsconfig.json".to_owned()),
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "script.ts"]);
+ }
+
+ #[test]
+ fn test_flags_from_vec_16() {
+ let (flags, subcommand, argv) =
+ flags_from_vec(svec!["deno", "eval", "'console.log(\"hello\")'"]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ allow_net: true,
+ allow_env: true,
+ allow_run: true,
+ allow_read: true,
+ allow_write: true,
+ allow_hrtime: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Eval);
+ assert_eq!(argv, svec!["deno", "'console.log(\"hello\")'"]);
+ }
+
+ #[test]
+ fn test_flags_from_vec_17() {
+ let (flags, subcommand, argv) = flags_from_vec(svec!["deno"]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ allow_net: true,
+ allow_env: true,
+ allow_run: true,
+ allow_read: true,
+ allow_write: true,
+ allow_hrtime: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Repl);
+ assert_eq!(argv, svec!["deno"]);
+ }
+
+ #[test]
+ fn test_flags_from_vec_18() {
+ let (flags, subcommand, argv) = flags_from_vec(svec![
+ "deno",
+ "xeval",
+ "-I",
+ "val",
+ "-d",
+ " ",
+ "console.log(val)"
+ ]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ allow_net: true,
+ allow_env: true,
+ allow_run: true,
+ allow_read: true,
+ allow_write: true,
+ allow_hrtime: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(
+ argv,
+ svec![
+ "deno",
+ XEVAL_URL,
+ "--delim",
+ " ",
+ "--replvar",
+ "val",
+ "console.log(val)"
+ ]
+ );
+ }
+
+ #[test]
+ fn test_flags_from_vec_19() {
+ use tempfile::TempDir;
+ let temp_dir = TempDir::new().expect("tempdir fail");
+ let (_, temp_dir_path) =
+ deno_fs::resolve_from_cwd(temp_dir.path().to_str().unwrap()).unwrap();
+
+ let (flags, subcommand, argv) = flags_from_vec(svec![
+ "deno",
+ "run",
+ format!("--allow-read={}", &temp_dir_path),
+ "script.ts"
+ ]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ allow_read: false,
+ read_whitelist: svec![&temp_dir_path],
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "script.ts"]);
+ }
+
+ #[test]
+ fn test_flags_from_vec_20() {
+ use tempfile::TempDir;
+ let temp_dir = TempDir::new().expect("tempdir fail");
+ let (_, temp_dir_path) =
+ deno_fs::resolve_from_cwd(temp_dir.path().to_str().unwrap()).unwrap();
+
+ let (flags, subcommand, argv) = flags_from_vec(svec![
+ "deno",
+ "run",
+ format!("--allow-write={}", &temp_dir_path),
+ "script.ts"
+ ]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ allow_write: false,
+ write_whitelist: svec![&temp_dir_path],
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "script.ts"]);
+ }
+
+ #[test]
+ fn test_flags_from_vec_21() {
+ let (flags, subcommand, argv) = flags_from_vec(svec![
+ "deno",
+ "run",
+ "--allow-net=127.0.0.1",
+ "script.ts"
+ ]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ allow_net: false,
+ net_whitelist: svec!["127.0.0.1"],
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "script.ts"]);
+ }
+
+ #[test]
+ fn test_flags_from_vec_22() {
+ let (flags, subcommand, argv) = flags_from_vec(svec![
+ "deno",
+ "fmt",
+ "--stdout",
+ "script_1.ts",
+ "script_2.ts"
+ ]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ allow_write: true,
+ allow_read: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(
+ argv,
+ svec!["deno", PRETTIER_URL, "script_1.ts", "script_2.ts"]
+ );
+ }
+
+ #[test]
+ fn test_flags_from_vec_23() {
+ let (flags, subcommand, argv) = flags_from_vec(svec!["deno", "script.ts"]);
+ assert_eq!(flags, DenoFlags::default());
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "script.ts"]);
+ }
+
+ #[test]
+ fn test_flags_from_vec_24() {
+ let (flags, subcommand, argv) =
+ flags_from_vec(svec!["deno", "--allow-net", "--allow-read", "script.ts"]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ allow_net: true,
+ allow_read: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "script.ts"]);
+ }
+
+ #[test]
+ fn test_flags_from_vec_25() {
+ let (flags, subcommand, argv) = flags_from_vec(svec![
+ "deno",
+ "-r",
+ "--allow-net",
+ "run",
+ "--allow-read",
+ "script.ts"
+ ]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ reload: true,
+ allow_net: true,
+ allow_read: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "script.ts"]);
+ }
+
+ #[test]
+ fn test_flags_from_vec_26() {
+ let (flags, subcommand, argv) =
+ flags_from_vec(svec!["deno", "bundle", "source.ts", "bundle.js"]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ allow_write: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Bundle);
+ assert_eq!(argv, svec!["deno", "source.ts", "bundle.js"])
+ }
+
+ #[test]
+ fn test_flags_from_vec_27() {
+ let (flags, subcommand, argv) = flags_from_vec(svec![
+ "deno",
+ "run",
+ "--importmap=importmap.json",
+ "script.ts"
+ ]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ import_map_path: Some("importmap.json".to_owned()),
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "script.ts"]);
+
+ let (flags, subcommand, argv) =
+ flags_from_vec(svec!["deno", "--importmap=importmap.json", "script.ts"]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ import_map_path: Some("importmap.json".to_owned()),
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "script.ts"]);
+
+ let (flags, subcommand, argv) = flags_from_vec(svec![
+ "deno",
+ "fetch",
+ "--importmap=importmap.json",
+ "script.ts"
+ ]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ import_map_path: Some("importmap.json".to_owned()),
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Fetch);
+ assert_eq!(argv, svec!["deno", "script.ts"]);
+ }
+
+ #[test]
+ fn test_flags_from_vec_28() {
+ let (flags, subcommand, argv) =
+ flags_from_vec(svec!["deno", "--seed", "250", "run", "script.ts"]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ seed: Some(250 as u64),
+ v8_flags: Some(svec!["deno", "--random-seed=250"]),
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "script.ts"]);
+ }
+
+ #[test]
+ fn test_flags_from_vec_29() {
+ let (flags, subcommand, argv) = flags_from_vec(svec![
+ "deno",
+ "--seed",
+ "250",
+ "--v8-flags=--expose-gc",
+ "run",
+ "script.ts"
+ ]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ seed: Some(250 as u64),
+ v8_flags: Some(svec!["deno", "--expose-gc", "--random-seed=250"]),
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "script.ts"]);
+ }
+
+ #[test]
+ fn test_flags_from_vec_30() {
+ let (flags, subcommand, argv) = flags_from_vec(svec![
+ "deno",
+ "install",
+ "deno_colors",
+ "https://deno.land/std/examples/colors.ts"
+ ]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ allow_write: true,
+ allow_net: true,
+ allow_read: true,
+ allow_env: true,
+ allow_run: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(
+ argv,
+ svec![
+ "deno",
+ INSTALLER_URL,
+ "deno_colors",
+ "https://deno.land/std/examples/colors.ts"
+ ]
+ );
+
+ let (flags, subcommand, argv) = flags_from_vec(svec![
+ "deno",
+ "install",
+ "file_server",
+ "https://deno.land/std/http/file_server.ts",
+ "--allow-net",
+ "--allow-read"
+ ]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ allow_write: true,
+ allow_net: true,
+ allow_read: true,
+ allow_env: true,
+ allow_run: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(
+ argv,
+ svec![
+ "deno",
+ INSTALLER_URL,
+ "file_server",
+ "https://deno.land/std/http/file_server.ts",
+ "--allow-net",
+ "--allow-read"
+ ]
+ );
+
+ let (flags, subcommand, argv) = flags_from_vec(svec![
+ "deno",
+ "install",
+ "-d",
+ "/usr/local/bin",
+ "file_server",
+ "https://deno.land/std/http/file_server.ts",
+ "--allow-net",
+ "--allow-read"
+ ]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ allow_write: true,
+ allow_net: true,
+ allow_read: true,
+ allow_env: true,
+ allow_run: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(
+ argv,
+ svec![
+ "deno",
+ INSTALLER_URL,
+ "--dir",
+ "/usr/local/bin",
+ "file_server",
+ "https://deno.land/std/http/file_server.ts",
+ "--allow-net",
+ "--allow-read"
+ ]
+ );
+ }
+
+ #[test]
+ fn test_flags_from_vec_31() {
+ let (flags, subcommand, argv) =
+ flags_from_vec(svec!["deno", "--log-level=debug", "script.ts"]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ log_level: Some(Level::Debug),
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "script.ts"])
+ }
+
+ #[test]
+ fn test_flags_from_vec_32() {
+ let (flags, subcommand, argv) =
+ flags_from_vec(svec!["deno", "completions", "bash"]);
+ assert_eq!(flags, DenoFlags::default());
+ assert_eq!(subcommand, DenoSubcommand::Completions);
+ assert_eq!(argv, svec!["deno"])
+ }
+
+ #[test]
+ fn test_flags_from_vec_33() {
+ let (flags, subcommand, argv) =
+ flags_from_vec(svec!["deno", "script.ts", "--allow-read", "--allow-net"]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ allow_net: true,
+ allow_read: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "script.ts"]);
+
+ let (flags, subcommand, argv) = flags_from_vec(svec![
+ "deno",
+ "--allow-read",
+ "run",
+ "script.ts",
+ "--allow-net",
+ "-r",
+ "--help",
+ "--foo",
+ "bar"
+ ]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ allow_net: true,
+ allow_read: true,
+ reload: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "script.ts", "--help", "--foo", "bar"]);
+
+ let (flags, subcommand, argv) =
+ flags_from_vec(svec!["deno", "script.ts", "foo", "bar"]);
+ assert_eq!(flags, DenoFlags::default());
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "script.ts", "foo", "bar"]);
+
+ let (flags, subcommand, argv) =
+ flags_from_vec(svec!["deno", "script.ts", "-"]);
+ assert_eq!(flags, DenoFlags::default());
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "script.ts", "-"]);
+
+ let (flags, subcommand, argv) =
+ flags_from_vec(svec!["deno", "script.ts", "-", "foo", "bar"]);
+ assert_eq!(flags, DenoFlags::default());
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "script.ts", "-", "foo", "bar"]);
+ }
+
+ #[test]
+ fn test_flags_from_vec_34() {
+ let (flags, subcommand, argv) =
+ flags_from_vec(svec!["deno", "--no-fetch", "script.ts"]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ no_fetch: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "script.ts"])
+ }
+
+ #[test]
+ fn test_flags_from_vec_35() {
+ let (flags, subcommand, argv) =
+ flags_from_vec(svec!["deno", "--current-thread", "script.ts"]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ current_thread: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(argv, svec!["deno", "script.ts"])
+ }
+
+ #[test]
+ fn test_flags_from_vec_36() {
+ let (flags, subcommand, argv) = flags_from_vec(svec![
+ "deno",
+ "test",
+ "--exclude",
+ "some_dir/",
+ "**/*_test.ts"
+ ]);
+ assert_eq!(
+ flags,
+ DenoFlags {
+ allow_read: true,
+ ..DenoFlags::default()
+ }
+ );
+ assert_eq!(subcommand, DenoSubcommand::Run);
+ assert_eq!(
+ argv,
+ svec![
+ "deno",
+ TEST_RUNNER_URL,
+ "--exclude",
+ "some_dir/",
+ "**/*_test.ts"
+ ]
+ )
+ }
+}
diff --git a/cli/fmt_errors.rs b/cli/fmt_errors.rs
new file mode 100644
index 000000000..84fcf5b43
--- /dev/null
+++ b/cli/fmt_errors.rs
@@ -0,0 +1,315 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+//! This mod provides DenoError to unify errors across Deno.
+use crate::colors;
+use crate::source_maps::apply_source_map;
+use crate::source_maps::SourceMapGetter;
+use deno::ErrBox;
+use deno::StackFrame;
+use deno::V8Exception;
+use std::error::Error;
+use std::fmt;
+
+/// A trait which specifies parts of a diagnostic like item needs to be able to
+/// generate to conform its display to other diagnostic like items
+pub trait DisplayFormatter {
+ fn format_category_and_code(&self) -> String;
+ fn format_message(&self, level: usize) -> String;
+ fn format_related_info(&self) -> String;
+ fn format_source_line(&self, level: usize) -> String;
+ fn format_source_name(&self) -> String;
+}
+
+fn format_source_name(script_name: String, line: i64, column: i64) -> String {
+ let script_name_c = colors::cyan(script_name);
+ let line_c = colors::yellow((1 + line).to_string());
+ let column_c = colors::yellow((1 + column).to_string());
+ format!("{}:{}:{}", script_name_c, line_c, column_c,)
+}
+
+/// Formats optional source, line and column into a single string.
+pub fn format_maybe_source_name(
+ script_name: Option<String>,
+ line: Option<i64>,
+ column: Option<i64>,
+) -> String {
+ if script_name.is_none() {
+ return "".to_string();
+ }
+
+ assert!(line.is_some());
+ assert!(column.is_some());
+ format_source_name(script_name.unwrap(), line.unwrap(), column.unwrap())
+}
+
+/// Take an optional source line and associated information to format it into
+/// a pretty printed version of that line.
+pub fn format_maybe_source_line(
+ source_line: Option<String>,
+ line_number: Option<i64>,
+ start_column: Option<i64>,
+ end_column: Option<i64>,
+ is_error: bool,
+ level: usize,
+) -> String {
+ if source_line.is_none() || line_number.is_none() {
+ return "".to_string();
+ }
+
+ let source_line = source_line.as_ref().unwrap();
+ // sometimes source_line gets set with an empty string, which then outputs
+ // an empty source line when displayed, so need just short circuit here
+ if source_line.is_empty() {
+ return "".to_string();
+ }
+
+ assert!(start_column.is_some());
+ assert!(end_column.is_some());
+ let line = (1 + line_number.unwrap()).to_string();
+ let line_color = colors::black_on_white(line.to_string());
+ let line_len = line.clone().len();
+ let line_padding =
+ colors::black_on_white(format!("{:indent$}", "", indent = line_len))
+ .to_string();
+ let mut s = String::new();
+ let start_column = start_column.unwrap();
+ let end_column = end_column.unwrap();
+ // TypeScript uses `~` always, but V8 would utilise `^` always, even when
+ // doing ranges, so here, if we only have one marker (very common with V8
+ // errors) we will use `^` instead.
+ let underline_char = if (end_column - start_column) <= 1 {
+ '^'
+ } else {
+ '~'
+ };
+ for i in 0..end_column {
+ if i >= start_column {
+ s.push(underline_char);
+ } else {
+ s.push(' ');
+ }
+ }
+ let color_underline = if is_error {
+ colors::red(s).to_string()
+ } else {
+ colors::cyan(s).to_string()
+ };
+
+ let indent = format!("{:indent$}", "", indent = level);
+
+ format!(
+ "\n\n{}{} {}\n{}{} {}\n",
+ indent, line_color, source_line, indent, line_padding, color_underline
+ )
+}
+
+/// Format a message to preface with `error: ` with ansi codes for red.
+pub fn format_error_message(msg: String) -> String {
+ let preamble = colors::red("error:".to_string());
+ format!("{} {}", preamble, msg)
+}
+
+fn format_stack_frame(frame: &StackFrame) -> String {
+ // Note when we print to string, we change from 0-indexed to 1-indexed.
+ let function_name = colors::italic_bold(frame.function_name.clone());
+ let source_loc =
+ format_source_name(frame.script_name.clone(), frame.line, frame.column);
+
+ if !frame.function_name.is_empty() {
+ format!(" at {} ({})", function_name, source_loc)
+ } else if frame.is_eval {
+ format!(" at eval ({})", source_loc)
+ } else {
+ format!(" at {}", source_loc)
+ }
+}
+
+/// Wrapper around V8Exception which provides color to_string.
+#[derive(Debug)]
+pub struct JSError(V8Exception);
+
+impl JSError {
+ pub fn new(v8_exception: V8Exception) -> Self {
+ Self(v8_exception)
+ }
+
+ pub fn from_json(
+ json_str: &str,
+ source_map_getter: &impl SourceMapGetter,
+ ) -> ErrBox {
+ let unmapped_exception = V8Exception::from_json(json_str).unwrap();
+ Self::from_v8_exception(unmapped_exception, source_map_getter)
+ }
+
+ pub fn from_v8_exception(
+ unmapped_exception: V8Exception,
+ source_map_getter: &impl SourceMapGetter,
+ ) -> ErrBox {
+ let mapped_exception =
+ apply_source_map(&unmapped_exception, source_map_getter);
+ let js_error = Self(mapped_exception);
+ ErrBox::from(js_error)
+ }
+}
+
+impl DisplayFormatter for JSError {
+ fn format_category_and_code(&self) -> String {
+ "".to_string()
+ }
+
+ fn format_message(&self, _level: usize) -> String {
+ format!(
+ "{}{}",
+ colors::red_bold("error: ".to_string()),
+ self.0.message.clone()
+ )
+ }
+
+ fn format_related_info(&self) -> String {
+ "".to_string()
+ }
+
+ fn format_source_line(&self, level: usize) -> String {
+ format_maybe_source_line(
+ self.0.source_line.clone(),
+ self.0.line_number,
+ self.0.start_column,
+ self.0.end_column,
+ true,
+ level,
+ )
+ }
+
+ fn format_source_name(&self) -> String {
+ let e = &self.0;
+ if e.script_resource_name.is_none() {
+ return "".to_string();
+ }
+
+ format!(
+ "\n► {}",
+ format_maybe_source_name(
+ e.script_resource_name.clone(),
+ e.line_number,
+ e.start_column,
+ )
+ )
+ }
+}
+
+impl fmt::Display for JSError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(
+ f,
+ "{}{}{}",
+ self.format_message(0),
+ self.format_source_name(),
+ self.format_source_line(0),
+ )?;
+
+ for frame in &self.0.frames {
+ write!(f, "\n{}", format_stack_frame(&frame))?;
+ }
+ Ok(())
+ }
+}
+
+impl Error for JSError {}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::colors::strip_ansi_codes;
+
+ fn error1() -> V8Exception {
+ V8Exception {
+ message: "Error: foo bar".to_string(),
+ source_line: None,
+ script_resource_name: None,
+ line_number: None,
+ start_position: None,
+ end_position: None,
+ error_level: None,
+ start_column: None,
+ end_column: None,
+ frames: vec![
+ StackFrame {
+ line: 4,
+ column: 16,
+ script_name: "foo_bar.ts".to_string(),
+ function_name: "foo".to_string(),
+ is_eval: false,
+ is_constructor: false,
+ is_wasm: false,
+ },
+ StackFrame {
+ line: 5,
+ column: 20,
+ script_name: "bar_baz.ts".to_string(),
+ function_name: "qat".to_string(),
+ is_eval: false,
+ is_constructor: false,
+ is_wasm: false,
+ },
+ StackFrame {
+ line: 1,
+ column: 1,
+ script_name: "deno_main.js".to_string(),
+ function_name: "".to_string(),
+ is_eval: false,
+ is_constructor: false,
+ is_wasm: false,
+ },
+ ],
+ }
+ }
+
+ #[test]
+ fn js_error_to_string() {
+ let e = error1();
+ assert_eq!("error: Error: foo bar\n at foo (foo_bar.ts:5:17)\n at qat (bar_baz.ts:6:21)\n at deno_main.js:2:2", strip_ansi_codes(&JSError(e).to_string()));
+ }
+
+ #[test]
+ fn test_format_none_source_name() {
+ let actual = format_maybe_source_name(None, None, None);
+ assert_eq!(actual, "");
+ }
+
+ #[test]
+ fn test_format_some_source_name() {
+ let actual = format_maybe_source_name(
+ Some("file://foo/bar.ts".to_string()),
+ Some(1),
+ Some(2),
+ );
+ assert_eq!(strip_ansi_codes(&actual), "file://foo/bar.ts:2:3");
+ }
+
+ #[test]
+ fn test_format_none_source_line() {
+ let actual = format_maybe_source_line(None, None, None, None, false, 0);
+ assert_eq!(actual, "");
+ }
+
+ #[test]
+ fn test_format_some_source_line() {
+ let actual = format_maybe_source_line(
+ Some("console.log('foo');".to_string()),
+ Some(8),
+ Some(8),
+ Some(11),
+ true,
+ 0,
+ );
+ assert_eq!(
+ strip_ansi_codes(&actual),
+ "\n\n9 console.log(\'foo\');\n ~~~\n"
+ );
+ }
+
+ #[test]
+ fn test_format_error_message() {
+ let actual = format_error_message("foo".to_string());
+ assert_eq!(strip_ansi_codes(&actual), "error: foo");
+ }
+}
diff --git a/cli/fs.rs b/cli/fs.rs
new file mode 100644
index 000000000..19c76415c
--- /dev/null
+++ b/cli/fs.rs
@@ -0,0 +1,192 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use std;
+use std::fs::{create_dir, DirBuilder, File, OpenOptions};
+use std::io::ErrorKind;
+use std::io::Write;
+use std::path::{Path, PathBuf};
+
+use deno::ErrBox;
+use rand;
+use rand::Rng;
+use url::Url;
+
+#[cfg(unix)]
+use nix::unistd::{chown as unix_chown, Gid, Uid};
+#[cfg(any(unix))]
+use std::os::unix::fs::DirBuilderExt;
+#[cfg(any(unix))]
+use std::os::unix::fs::PermissionsExt;
+
+pub fn write_file<T: AsRef<[u8]>>(
+ filename: &Path,
+ data: T,
+ perm: u32,
+) -> std::io::Result<()> {
+ write_file_2(filename, data, true, perm, true, false)
+}
+
+pub fn write_file_2<T: AsRef<[u8]>>(
+ filename: &Path,
+ data: T,
+ update_perm: bool,
+ perm: u32,
+ is_create: bool,
+ is_append: bool,
+) -> std::io::Result<()> {
+ let mut file = OpenOptions::new()
+ .read(false)
+ .write(true)
+ .append(is_append)
+ .truncate(!is_append)
+ .create(is_create)
+ .open(filename)?;
+
+ if update_perm {
+ set_permissions(&mut file, perm)?;
+ }
+
+ file.write_all(data.as_ref())
+}
+
+#[cfg(any(unix))]
+fn set_permissions(file: &mut File, perm: u32) -> std::io::Result<()> {
+ debug!("set file perm to {}", perm);
+ file.set_permissions(PermissionsExt::from_mode(perm & 0o777))
+}
+#[cfg(not(any(unix)))]
+fn set_permissions(_file: &mut File, _perm: u32) -> std::io::Result<()> {
+ // NOOP on windows
+ Ok(())
+}
+
+pub fn make_temp_dir(
+ dir: Option<&Path>,
+ prefix: Option<&str>,
+ suffix: Option<&str>,
+) -> std::io::Result<PathBuf> {
+ let prefix_ = prefix.unwrap_or("");
+ let suffix_ = suffix.unwrap_or("");
+ let mut buf: PathBuf = match dir {
+ Some(ref p) => p.to_path_buf(),
+ None => std::env::temp_dir(),
+ }
+ .join("_");
+ let mut rng = rand::thread_rng();
+ loop {
+ let unique = rng.gen::<u32>();
+ buf.set_file_name(format!("{}{:08x}{}", prefix_, unique, suffix_));
+ // TODO: on posix, set mode flags to 0o700.
+ let r = create_dir(buf.as_path());
+ match r {
+ Err(ref e) if e.kind() == ErrorKind::AlreadyExists => continue,
+ Ok(_) => return Ok(buf),
+ Err(e) => return Err(e),
+ }
+ }
+}
+
+pub fn mkdir(path: &Path, perm: u32, recursive: bool) -> std::io::Result<()> {
+ debug!("mkdir -p {}", path.display());
+ let mut builder = DirBuilder::new();
+ builder.recursive(recursive);
+ set_dir_permission(&mut builder, perm);
+ builder.create(path)
+}
+
+#[cfg(any(unix))]
+fn set_dir_permission(builder: &mut DirBuilder, perm: u32) {
+ debug!("set dir perm to {}", perm);
+ builder.mode(perm & 0o777);
+}
+
+#[cfg(not(any(unix)))]
+fn set_dir_permission(_builder: &mut DirBuilder, _perm: u32) {
+ // NOOP on windows
+}
+
+pub fn normalize_path(path: &Path) -> String {
+ let s = String::from(path.to_str().unwrap());
+ if cfg!(windows) {
+ // TODO This isn't correct. Probbly should iterate over components.
+ s.replace("\\", "/")
+ } else {
+ s
+ }
+}
+
+#[cfg(unix)]
+pub fn chown(path: &str, uid: u32, gid: u32) -> Result<(), ErrBox> {
+ let nix_uid = Uid::from_raw(uid);
+ let nix_gid = Gid::from_raw(gid);
+ unix_chown(path, Option::Some(nix_uid), Option::Some(nix_gid))
+ .map_err(ErrBox::from)
+}
+
+#[cfg(not(unix))]
+pub fn chown(_path: &str, _uid: u32, _gid: u32) -> Result<(), ErrBox> {
+ // Noop
+ // TODO: implement chown for Windows
+ Err(crate::deno_error::op_not_implemented())
+}
+
+pub fn resolve_from_cwd(path: &str) -> Result<(PathBuf, String), ErrBox> {
+ let candidate_path = Path::new(path);
+
+ let resolved_path = if candidate_path.is_absolute() {
+ candidate_path.to_owned()
+ } else {
+ let cwd = std::env::current_dir().unwrap();
+ cwd.join(path)
+ };
+
+ // HACK: `Url::parse` is used here because it normalizes the path.
+ // Joining `/dev/deno/" with "./tests" using `PathBuf` yields `/deno/dev/./tests/`.
+ // On the other hand joining `/dev/deno/" with "./tests" using `Url` yields "/dev/deno/tests"
+ // - and that's what we want.
+ // There exists similar method on `PathBuf` - `PathBuf.canonicalize`, but the problem
+ // is `canonicalize` resolves symlinks and we don't want that.
+ // We just want to normalize the path...
+ // This only works on absolute paths - not worth extracting as a public utility.
+ let resolved_url =
+ Url::from_file_path(resolved_path).expect("Path should be absolute");
+ let normalized_url = Url::parse(resolved_url.as_str())
+ .expect("String from a URL should parse to a URL");
+ let normalized_path = normalized_url
+ .to_file_path()
+ .expect("URL from a path should contain a valid path");
+
+ let path_string = normalized_path.to_str().unwrap().to_string();
+
+ Ok((normalized_path, path_string))
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn resolve_from_cwd_child() {
+ let cwd = std::env::current_dir().unwrap();
+ assert_eq!(resolve_from_cwd("a").unwrap().0, cwd.join("a"));
+ }
+
+ #[test]
+ fn resolve_from_cwd_dot() {
+ let cwd = std::env::current_dir().unwrap();
+ assert_eq!(resolve_from_cwd(".").unwrap().0, cwd);
+ }
+
+ #[test]
+ fn resolve_from_cwd_parent() {
+ let cwd = std::env::current_dir().unwrap();
+ assert_eq!(resolve_from_cwd("a/..").unwrap().0, cwd);
+ }
+
+ // TODO: Get a good expected value here for Windows.
+ #[cfg(not(windows))]
+ #[test]
+ fn resolve_from_cwd_absolute() {
+ let expected = Path::new("/a");
+ assert_eq!(resolve_from_cwd("/a").unwrap().0, expected);
+ }
+}
diff --git a/cli/global_timer.rs b/cli/global_timer.rs
new file mode 100644
index 000000000..d3ca52f46
--- /dev/null
+++ b/cli/global_timer.rs
@@ -0,0 +1,50 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+
+//! This module helps deno implement timers.
+//!
+//! As an optimization, we want to avoid an expensive calls into rust for every
+//! setTimeout in JavaScript. Thus in //js/timers.ts a data structure is
+//! implemented that calls into Rust for only the smallest timeout. Thus we
+//! only need to be able to start and cancel a single timer (or Delay, as Tokio
+//! calls it) for an entire Isolate. This is what is implemented here.
+
+use crate::tokio_util::panic_on_error;
+use futures::Future;
+use std::time::Instant;
+use tokio::sync::oneshot;
+use tokio::timer::Delay;
+
+#[derive(Default)]
+pub struct GlobalTimer {
+ tx: Option<oneshot::Sender<()>>,
+}
+
+impl GlobalTimer {
+ pub fn new() -> Self {
+ Self { tx: None }
+ }
+
+ pub fn cancel(&mut self) {
+ if let Some(tx) = self.tx.take() {
+ tx.send(()).ok();
+ }
+ }
+
+ pub fn new_timeout(
+ &mut self,
+ deadline: Instant,
+ ) -> impl Future<Item = (), Error = ()> {
+ if self.tx.is_some() {
+ self.cancel();
+ }
+ assert!(self.tx.is_none());
+
+ let (tx, rx) = oneshot::channel();
+ self.tx = Some(tx);
+
+ let delay = panic_on_error(Delay::new(deadline));
+ let rx = panic_on_error(rx);
+
+ delay.select(rx).then(|_| Ok(()))
+ }
+}
diff --git a/cli/http_body.rs b/cli/http_body.rs
new file mode 100644
index 000000000..c03dfd637
--- /dev/null
+++ b/cli/http_body.rs
@@ -0,0 +1,89 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+
+use futures::stream::Stream;
+use futures::Async;
+use futures::Poll;
+use reqwest::r#async::Chunk;
+use reqwest::r#async::Decoder;
+use std::cmp::min;
+use std::io;
+use std::io::Read;
+use tokio::io::AsyncRead;
+
+/// Wraps `reqwest::Decoder` so that it can be exposed as an `AsyncRead` and integrated
+/// into resources more easily.
+pub struct HttpBody {
+ decoder: Decoder,
+ chunk: Option<Chunk>,
+ pos: usize,
+}
+
+impl HttpBody {
+ pub fn from(body: Decoder) -> Self {
+ Self {
+ decoder: body,
+ chunk: None,
+ pos: 0,
+ }
+ }
+}
+
+impl Read for HttpBody {
+ fn read(&mut self, _buf: &mut [u8]) -> io::Result<usize> {
+ unimplemented!();
+ }
+}
+
+impl AsyncRead for HttpBody {
+ fn poll_read(&mut self, buf: &mut [u8]) -> Poll<usize, io::Error> {
+ if let Some(chunk) = self.chunk.take() {
+ debug!(
+ "HttpBody Fake Read buf {} chunk {} pos {}",
+ buf.len(),
+ chunk.len(),
+ self.pos
+ );
+ let n = min(buf.len(), chunk.len() - self.pos);
+ {
+ let rest = &chunk[self.pos..];
+ buf[..n].clone_from_slice(&rest[..n]);
+ }
+ self.pos += n;
+ if self.pos == chunk.len() {
+ self.pos = 0;
+ } else {
+ self.chunk = Some(chunk);
+ }
+ return Ok(Async::Ready(n));
+ } else {
+ assert_eq!(self.pos, 0);
+ }
+
+ let p = self.decoder.poll();
+ match p {
+ Err(e) => Err(
+ // TODO Need to map hyper::Error into std::io::Error.
+ io::Error::new(io::ErrorKind::Other, e),
+ ),
+ Ok(Async::NotReady) => Ok(Async::NotReady),
+ Ok(Async::Ready(maybe_chunk)) => match maybe_chunk {
+ None => Ok(Async::Ready(0)),
+ Some(chunk) => {
+ debug!(
+ "HttpBody Real Read buf {} chunk {} pos {}",
+ buf.len(),
+ chunk.len(),
+ self.pos
+ );
+ let n = min(buf.len(), chunk.len());
+ buf[..n].clone_from_slice(&chunk[..n]);
+ if buf.len() < chunk.len() {
+ self.pos = n;
+ self.chunk = Some(chunk);
+ }
+ Ok(Async::Ready(n))
+ }
+ },
+ }
+ }
+}
diff --git a/cli/http_util.rs b/cli/http_util.rs
new file mode 100644
index 000000000..754cd60d2
--- /dev/null
+++ b/cli/http_util.rs
@@ -0,0 +1,217 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use crate::deno_error;
+use crate::deno_error::DenoError;
+use crate::version;
+use deno::ErrBox;
+use futures::{future, Future};
+use reqwest;
+use reqwest::header::HeaderMap;
+use reqwest::header::CONTENT_TYPE;
+use reqwest::header::LOCATION;
+use reqwest::header::USER_AGENT;
+use reqwest::r#async::Client;
+use reqwest::RedirectPolicy;
+use url::Url;
+
+/// Create new instance of async reqwest::Client. This client supports
+/// proxies and doesn't follow redirects.
+pub fn get_client() -> Client {
+ let mut headers = HeaderMap::new();
+ headers.insert(
+ USER_AGENT,
+ format!("Deno/{}", version::DENO).parse().unwrap(),
+ );
+ Client::builder()
+ .redirect(RedirectPolicy::none())
+ .default_headers(headers)
+ .use_sys_proxy()
+ .build()
+ .unwrap()
+}
+
+/// Construct the next uri based on base uri and location header fragment
+/// See <https://tools.ietf.org/html/rfc3986#section-4.2>
+fn resolve_url_from_location(base_url: &Url, location: &str) -> Url {
+ if location.starts_with("http://") || location.starts_with("https://") {
+ // absolute uri
+ Url::parse(location).expect("provided redirect url should be a valid url")
+ } else if location.starts_with("//") {
+ // "//" authority path-abempty
+ Url::parse(&format!("{}:{}", base_url.scheme(), location))
+ .expect("provided redirect url should be a valid url")
+ } else if location.starts_with('/') {
+ // path-absolute
+ base_url
+ .join(location)
+ .expect("provided redirect url should be a valid url")
+ } else {
+ // assuming path-noscheme | path-empty
+ let base_url_path_str = base_url.path().to_owned();
+ // Pop last part or url (after last slash)
+ let segs: Vec<&str> = base_url_path_str.rsplitn(2, '/').collect();
+ let new_path = format!("{}/{}", segs.last().unwrap_or(&""), location);
+ base_url
+ .join(&new_path)
+ .expect("provided redirect url should be a valid url")
+ }
+}
+
+#[derive(Debug, PartialEq)]
+pub enum FetchOnceResult {
+ // (code, maybe_content_type)
+ Code(String, Option<String>),
+ Redirect(Url),
+}
+
+/// Asynchronously fetchs the given HTTP URL one pass only.
+/// If no redirect is present and no error occurs,
+/// yields Code(code, maybe_content_type).
+/// If redirect occurs, does not follow and
+/// yields Redirect(url).
+pub fn fetch_string_once(
+ url: &Url,
+) -> impl Future<Item = FetchOnceResult, Error = ErrBox> {
+ type FetchAttempt = (Option<String>, Option<String>, Option<FetchOnceResult>);
+
+ let url = url.clone();
+ let client = get_client();
+
+ client
+ .get(url.clone())
+ .send()
+ .map_err(ErrBox::from)
+ .and_then(
+ move |mut response| -> Box<
+ dyn Future<Item = FetchAttempt, Error = ErrBox> + Send,
+ > {
+ if response.status().is_redirection() {
+ let location_string = response.headers()
+ .get(LOCATION)
+ .expect("url redirection should provide 'location' header")
+ .to_str()
+ .unwrap();
+
+ debug!("Redirecting to {:?}...", &location_string);
+ let new_url = resolve_url_from_location(&url, location_string);
+ // Boxed trait object turns out to be the savior for 2+ types yielding same results.
+ return Box::new(future::ok(None).join3(
+ future::ok(None),
+ future::ok(Some(FetchOnceResult::Redirect(new_url))),
+ ));
+ }
+
+ if response.status().is_client_error() || response.status().is_server_error() {
+ return Box::new(future::err(DenoError::new(
+ deno_error::ErrorKind::Other,
+ format!("Import '{}' failed: {}", &url, response.status()),
+ ).into()));
+ }
+
+ let content_type = response
+ .headers()
+ .get(CONTENT_TYPE)
+ .map(|content_type| content_type.to_str().unwrap().to_owned());
+
+ let body = response
+ .text()
+ .map_err(ErrBox::from);
+
+ Box::new(
+ Some(body).join3(future::ok(content_type), future::ok(None))
+ )
+ }
+ )
+ .and_then(move |(maybe_code, maybe_content_type, maybe_redirect)| {
+ if let Some(redirect) = maybe_redirect {
+ future::ok(redirect)
+ } else {
+ // maybe_code should always contain code here!
+ future::ok(FetchOnceResult::Code(
+ maybe_code.unwrap(),
+ maybe_content_type,
+ ))
+ }
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::tokio_util;
+
+ #[test]
+ fn test_fetch_sync_string() {
+ let http_server_guard = crate::test_util::http_server();
+ // Relies on external http server. See tools/http_server.py
+ let url = Url::parse("http://127.0.0.1:4545/package.json").unwrap();
+
+ let fut = fetch_string_once(&url).then(|result| match result {
+ Ok(FetchOnceResult::Code(code, maybe_content_type)) => {
+ assert!(!code.is_empty());
+ assert_eq!(maybe_content_type, Some("application/json".to_string()));
+ Ok(())
+ }
+ _ => panic!(),
+ });
+
+ tokio_util::run(fut);
+ drop(http_server_guard);
+ }
+
+ #[test]
+ fn test_fetch_string_once_with_redirect() {
+ let http_server_guard = crate::test_util::http_server();
+ // Relies on external http server. See tools/http_server.py
+ let url = Url::parse("http://127.0.0.1:4546/package.json").unwrap();
+ // Dns resolver substitutes `127.0.0.1` with `localhost`
+ let target_url = Url::parse("http://localhost:4545/package.json").unwrap();
+ let fut = fetch_string_once(&url).then(move |result| match result {
+ Ok(FetchOnceResult::Redirect(url)) => {
+ assert_eq!(url, target_url);
+ Ok(())
+ }
+ _ => panic!(),
+ });
+
+ tokio_util::run(fut);
+ drop(http_server_guard);
+ }
+
+ #[test]
+ fn test_resolve_url_from_location_full_1() {
+ let url = "http://deno.land".parse::<Url>().unwrap();
+ let new_uri = resolve_url_from_location(&url, "http://golang.org");
+ assert_eq!(new_uri.host_str().unwrap(), "golang.org");
+ }
+
+ #[test]
+ fn test_resolve_url_from_location_full_2() {
+ let url = "https://deno.land".parse::<Url>().unwrap();
+ let new_uri = resolve_url_from_location(&url, "https://golang.org");
+ assert_eq!(new_uri.host_str().unwrap(), "golang.org");
+ }
+
+ #[test]
+ fn test_resolve_url_from_location_relative_1() {
+ let url = "http://deno.land/x".parse::<Url>().unwrap();
+ let new_uri = resolve_url_from_location(&url, "//rust-lang.org/en-US");
+ assert_eq!(new_uri.host_str().unwrap(), "rust-lang.org");
+ assert_eq!(new_uri.path(), "/en-US");
+ }
+
+ #[test]
+ fn test_resolve_url_from_location_relative_2() {
+ let url = "http://deno.land/x".parse::<Url>().unwrap();
+ let new_uri = resolve_url_from_location(&url, "/y");
+ assert_eq!(new_uri.host_str().unwrap(), "deno.land");
+ assert_eq!(new_uri.path(), "/y");
+ }
+
+ #[test]
+ fn test_resolve_url_from_location_relative_3() {
+ let url = "http://deno.land/x".parse::<Url>().unwrap();
+ let new_uri = resolve_url_from_location(&url, "z");
+ assert_eq!(new_uri.host_str().unwrap(), "deno.land");
+ assert_eq!(new_uri.path(), "/z");
+ }
+}
diff --git a/cli/import_map.rs b/cli/import_map.rs
new file mode 100644
index 000000000..d2916c198
--- /dev/null
+++ b/cli/import_map.rs
@@ -0,0 +1,2076 @@
+use deno::ErrBox;
+use deno::ModuleSpecifier;
+use indexmap::IndexMap;
+use serde_json::Map;
+use serde_json::Value;
+use std::cmp::Ordering;
+use std::error::Error;
+use std::fmt;
+use std::fs;
+use std::io;
+use url::Url;
+
+#[derive(Debug)]
+pub struct ImportMapError {
+ pub msg: String,
+}
+
+impl ImportMapError {
+ pub fn new(msg: &str) -> Self {
+ ImportMapError {
+ msg: msg.to_string(),
+ }
+ }
+}
+
+impl fmt::Display for ImportMapError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.pad(&self.msg)
+ }
+}
+
+impl Error for ImportMapError {}
+
+// NOTE: here is difference between deno and reference implementation - deno currently
+// can't resolve URL with other schemes (eg. data:, about:, blob:)
+const SUPPORTED_FETCH_SCHEMES: [&str; 3] = ["http", "https", "file"];
+
+type SpecifierMap = IndexMap<String, Vec<ModuleSpecifier>>;
+type ScopesMap = IndexMap<String, SpecifierMap>;
+
+#[derive(Debug)]
+pub struct ImportMap {
+ base_url: String,
+ imports: SpecifierMap,
+ scopes: ScopesMap,
+}
+
+impl ImportMap {
+ pub fn load(file_path: &str) -> Result<Self, ErrBox> {
+ let file_url = ModuleSpecifier::resolve_url_or_path(file_path)?.to_string();
+ let resolved_path = std::env::current_dir().unwrap().join(file_path);
+ debug!(
+ "Attempt to load import map: {}",
+ resolved_path.to_str().unwrap()
+ );
+
+ // Load the contents of import map
+ let json_string = fs::read_to_string(&resolved_path).map_err(|err| {
+ io::Error::new(
+ io::ErrorKind::InvalidInput,
+ format!(
+ "Error retrieving import map file at \"{}\": {}",
+ resolved_path.to_str().unwrap(),
+ err.to_string()
+ )
+ .as_str(),
+ )
+ })?;
+ // The URL of the import map is the base URL for its values.
+ ImportMap::from_json(&file_url, &json_string).map_err(ErrBox::from)
+ }
+
+ pub fn from_json(
+ base_url: &str,
+ json_string: &str,
+ ) -> Result<Self, ImportMapError> {
+ let v: Value = match serde_json::from_str(json_string) {
+ Ok(v) => v,
+ Err(_) => {
+ return Err(ImportMapError::new("Unable to parse import map JSON"));
+ }
+ };
+
+ match v {
+ Value::Object(_) => {}
+ _ => {
+ return Err(ImportMapError::new("Import map JSON must be an object"));
+ }
+ }
+
+ let normalized_imports = match &v.get("imports") {
+ Some(imports_map) => {
+ if !imports_map.is_object() {
+ return Err(ImportMapError::new(
+ "Import map's 'imports' must be an object",
+ ));
+ }
+
+ let imports_map = imports_map.as_object().unwrap();
+ ImportMap::parse_specifier_map(imports_map, base_url)
+ }
+ None => IndexMap::new(),
+ };
+
+ let normalized_scopes = match &v.get("scopes") {
+ Some(scope_map) => {
+ if !scope_map.is_object() {
+ return Err(ImportMapError::new(
+ "Import map's 'scopes' must be an object",
+ ));
+ }
+
+ let scope_map = scope_map.as_object().unwrap();
+ ImportMap::parse_scope_map(scope_map, base_url)?
+ }
+ None => IndexMap::new(),
+ };
+
+ let import_map = ImportMap {
+ base_url: base_url.to_string(),
+ imports: normalized_imports,
+ scopes: normalized_scopes,
+ };
+
+ Ok(import_map)
+ }
+
+ fn try_url_like_specifier(specifier: &str, base: &str) -> Option<Url> {
+ // this should never fail
+ if specifier.starts_with('/')
+ || specifier.starts_with("./")
+ || specifier.starts_with("../")
+ {
+ let base_url = Url::parse(base).unwrap();
+ let url = base_url.join(specifier).unwrap();
+ return Some(url);
+ }
+
+ if let Ok(url) = Url::parse(specifier) {
+ if SUPPORTED_FETCH_SCHEMES.contains(&url.scheme()) {
+ return Some(url);
+ }
+ }
+
+ None
+ }
+
+ /// Parse provided key as import map specifier.
+ ///
+ /// Specifiers must be valid URLs (eg. "https://deno.land/x/std/testing/mod.ts")
+ /// or "bare" specifiers (eg. "moment").
+ // TODO: add proper error handling: https://github.com/WICG/import-maps/issues/100
+ fn normalize_specifier_key(
+ specifier_key: &str,
+ base_url: &str,
+ ) -> Option<String> {
+ // ignore empty keys
+ if specifier_key.is_empty() {
+ return None;
+ }
+
+ if let Some(url) =
+ ImportMap::try_url_like_specifier(specifier_key, base_url)
+ {
+ return Some(url.to_string());
+ }
+
+ // "bare" specifier
+ Some(specifier_key.to_string())
+ }
+
+ /// Parse provided addresses as valid URLs.
+ ///
+ /// Non-valid addresses are skipped.
+ fn normalize_addresses(
+ specifier_key: &str,
+ base_url: &str,
+ potential_addresses: Vec<String>,
+ ) -> Vec<ModuleSpecifier> {
+ let mut normalized_addresses: Vec<ModuleSpecifier> = vec![];
+
+ for potential_address in potential_addresses {
+ let url =
+ match ImportMap::try_url_like_specifier(&potential_address, base_url) {
+ Some(url) => url,
+ None => continue,
+ };
+
+ let url_string = url.to_string();
+ if specifier_key.ends_with('/') && !url_string.ends_with('/') {
+ eprintln!(
+ "Invalid target address {:?} for package specifier {:?}.\
+ Package address targets must end with \"/\".",
+ url_string, specifier_key
+ );
+ continue;
+ }
+
+ normalized_addresses.push(url.into());
+ }
+
+ normalized_addresses
+ }
+
+ /// Convert provided JSON map to valid SpecifierMap.
+ ///
+ /// From specification:
+ /// - order of iteration must be retained
+ /// - SpecifierMap's keys are sorted in longest and alphabetic order
+ fn parse_specifier_map(
+ json_map: &Map<String, Value>,
+ base_url: &str,
+ ) -> SpecifierMap {
+ let mut normalized_map: SpecifierMap = SpecifierMap::new();
+
+ // Order is preserved because of "preserve_order" feature of "serde_json".
+ for (specifier_key, value) in json_map.iter() {
+ let normalized_specifier_key =
+ match ImportMap::normalize_specifier_key(specifier_key, base_url) {
+ Some(s) => s,
+ None => continue,
+ };
+
+ let potential_addresses: Vec<String> = match value {
+ Value::String(address) => vec![address.to_string()],
+ Value::Array(address_array) => {
+ let mut string_addresses: Vec<String> = vec![];
+
+ for address in address_array {
+ match address {
+ Value::String(address) => {
+ string_addresses.push(address.to_string())
+ }
+ _ => continue,
+ }
+ }
+
+ string_addresses
+ }
+ Value::Null => vec![],
+ _ => vec![],
+ };
+
+ let normalized_address_array = ImportMap::normalize_addresses(
+ &normalized_specifier_key,
+ base_url,
+ potential_addresses,
+ );
+
+ debug!(
+ "normalized specifier {:?}; {:?}",
+ normalized_specifier_key, normalized_address_array
+ );
+ normalized_map.insert(normalized_specifier_key, normalized_address_array);
+ }
+
+ // Sort in longest and alphabetical order.
+ normalized_map.sort_by(|k1, _v1, k2, _v2| {
+ if k1.len() > k2.len() {
+ return Ordering::Less;
+ } else if k2.len() > k1.len() {
+ return Ordering::Greater;
+ }
+
+ k2.cmp(k1)
+ });
+
+ normalized_map
+ }
+
+ /// Convert provided JSON map to valid ScopeMap.
+ ///
+ /// From specification:
+ /// - order of iteration must be retained
+ /// - ScopeMap's keys are sorted in longest and alphabetic order
+ fn parse_scope_map(
+ scope_map: &Map<String, Value>,
+ base_url: &str,
+ ) -> Result<ScopesMap, ImportMapError> {
+ let mut normalized_map: ScopesMap = ScopesMap::new();
+
+ // Order is preserved because of "preserve_order" feature of "serde_json".
+ for (scope_prefix, potential_specifier_map) in scope_map.iter() {
+ if !potential_specifier_map.is_object() {
+ return Err(ImportMapError::new(&format!(
+ "The value for the {:?} scope prefix must be an object",
+ scope_prefix
+ )));
+ }
+
+ let potential_specifier_map =
+ potential_specifier_map.as_object().unwrap();
+
+ let scope_prefix_url =
+ match Url::parse(base_url).unwrap().join(scope_prefix) {
+ Ok(url) => {
+ if !SUPPORTED_FETCH_SCHEMES.contains(&url.scheme()) {
+ eprintln!(
+ "Invalid scope {:?}. Scope URLs must have a valid fetch scheme.",
+ url.to_string()
+ );
+ continue;
+ }
+ url.to_string()
+ }
+ _ => continue,
+ };
+
+ let norm_map =
+ ImportMap::parse_specifier_map(potential_specifier_map, base_url);
+
+ normalized_map.insert(scope_prefix_url, norm_map);
+ }
+
+ // Sort in longest and alphabetical order.
+ normalized_map.sort_by(|k1, _v1, k2, _v2| {
+ if k1.len() > k2.len() {
+ return Ordering::Less;
+ } else if k2.len() > k1.len() {
+ return Ordering::Greater;
+ }
+
+ k2.cmp(k1)
+ });
+
+ Ok(normalized_map)
+ }
+
+ pub fn resolve_scopes_match(
+ scopes: &ScopesMap,
+ normalized_specifier: &str,
+ referrer: &str,
+ ) -> Result<Option<ModuleSpecifier>, ImportMapError> {
+ // exact-match
+ if let Some(scope_imports) = scopes.get(referrer) {
+ if let Ok(scope_match) =
+ ImportMap::resolve_imports_match(scope_imports, normalized_specifier)
+ {
+ // Return only if there was actual match (not None).
+ if scope_match.is_some() {
+ return Ok(scope_match);
+ }
+ }
+ }
+
+ for (normalized_scope_key, scope_imports) in scopes.iter() {
+ if normalized_scope_key.ends_with('/')
+ && referrer.starts_with(normalized_scope_key)
+ {
+ if let Ok(scope_match) =
+ ImportMap::resolve_imports_match(scope_imports, normalized_specifier)
+ {
+ // Return only if there was actual match (not None).
+ if scope_match.is_some() {
+ return Ok(scope_match);
+ }
+ }
+ }
+ }
+
+ Ok(None)
+ }
+
+ // TODO: https://github.com/WICG/import-maps/issues/73#issuecomment-439327758
+ // for some more optimized candidate implementations.
+ pub fn resolve_imports_match(
+ imports: &SpecifierMap,
+ normalized_specifier: &str,
+ ) -> Result<Option<ModuleSpecifier>, ImportMapError> {
+ // exact-match
+ if let Some(address_vec) = imports.get(normalized_specifier) {
+ if address_vec.is_empty() {
+ return Err(ImportMapError::new(&format!(
+ "Specifier {:?} was mapped to no addresses.",
+ normalized_specifier
+ )));
+ } else if address_vec.len() == 1 {
+ let address = address_vec.first().unwrap();
+ debug!(
+ "Specifier {:?} was mapped to {:?}.",
+ normalized_specifier, address
+ );
+ return Ok(Some(address.clone()));
+ } else {
+ return Err(ImportMapError::new(
+ "Multi-address mappings are not yet supported",
+ ));
+ }
+ }
+
+ // package-prefix match
+ // "most-specific wins", i.e. when there are multiple matching keys,
+ // choose the longest.
+ // https://github.com/WICG/import-maps/issues/102
+ for (specifier_key, address_vec) in imports.iter() {
+ if specifier_key.ends_with('/')
+ && normalized_specifier.starts_with(specifier_key)
+ {
+ if address_vec.is_empty() {
+ return Err(ImportMapError::new(&format!("Specifier {:?} was mapped to no addresses (via prefix specifier key {:?}).", normalized_specifier, specifier_key)));
+ } else if address_vec.len() == 1 {
+ let address = address_vec.first().unwrap();
+ let after_prefix = &normalized_specifier[specifier_key.len()..];
+
+ let base_url = address.as_url();
+ if let Ok(url) = base_url.join(after_prefix) {
+ debug!("Specifier {:?} was mapped to {:?} (via prefix specifier key {:?}).", normalized_specifier, url, address);
+ return Ok(Some(ModuleSpecifier::from(url)));
+ }
+
+ unreachable!();
+ } else {
+ return Err(ImportMapError::new(
+ "Multi-address mappings are not yet supported",
+ ));
+ }
+ }
+ }
+
+ debug!(
+ "Specifier {:?} was not mapped in import map.",
+ normalized_specifier
+ );
+
+ Ok(None)
+ }
+
+ // TODO: add support for built-in modules
+ /// Currently we support two types of specifiers: URL (http://, https://, file://)
+ /// and "bare" (moment, jquery, lodash)
+ ///
+ /// Scenarios:
+ /// 1. import resolved using import map -> String
+ /// 2. import restricted by import map -> ImportMapError
+ /// 3. import not mapped -> None
+ pub fn resolve(
+ &self,
+ specifier: &str,
+ referrer: &str,
+ ) -> Result<Option<ModuleSpecifier>, ImportMapError> {
+ let resolved_url: Option<Url> =
+ ImportMap::try_url_like_specifier(specifier, referrer);
+ let normalized_specifier = match &resolved_url {
+ Some(url) => url.to_string(),
+ None => specifier.to_string(),
+ };
+
+ let scopes_match = ImportMap::resolve_scopes_match(
+ &self.scopes,
+ &normalized_specifier,
+ &referrer.to_string(),
+ )?;
+
+ // match found in scopes map
+ if scopes_match.is_some() {
+ return Ok(scopes_match);
+ }
+
+ let imports_match =
+ ImportMap::resolve_imports_match(&self.imports, &normalized_specifier)?;
+
+ // match found in import map
+ if imports_match.is_some() {
+ return Ok(imports_match);
+ }
+
+ // no match in import map but we got resolvable URL
+ if let Some(resolved_url) = resolved_url {
+ return Ok(Some(ModuleSpecifier::from(resolved_url)));
+ }
+
+ Err(ImportMapError::new(&format!(
+ "Unmapped bare specifier {:?}",
+ normalized_specifier
+ )))
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn load_nonexistent() {
+ let file_path = "nonexistent_import_map.json";
+ assert!(ImportMap::load(file_path).is_err());
+ }
+
+ #[test]
+ fn from_json_1() {
+ let base_url = "https://deno.land";
+
+ // empty JSON
+ assert!(ImportMap::from_json(base_url, "{}").is_ok());
+
+ let non_object_strings = vec!["null", "true", "1", "\"foo\"", "[]"];
+
+ // invalid JSON
+ for non_object in non_object_strings.to_vec() {
+ assert!(ImportMap::from_json(base_url, non_object).is_err());
+ }
+
+ // invalid schema: 'imports' is non-object
+ for non_object in non_object_strings.to_vec() {
+ assert!(ImportMap::from_json(
+ base_url,
+ &format!("{{\"imports\": {}}}", non_object),
+ )
+ .is_err());
+ }
+
+ // invalid schema: 'scopes' is non-object
+ for non_object in non_object_strings.to_vec() {
+ assert!(ImportMap::from_json(
+ base_url,
+ &format!("{{\"scopes\": {}}}", non_object),
+ )
+ .is_err());
+ }
+ }
+
+ #[test]
+ fn from_json_2() {
+ let json_map = r#"{
+ "imports": {
+ "foo": "https://example.com/1",
+ "bar": ["https://example.com/2"],
+ "fizz": null
+ }
+ }"#;
+ let result = ImportMap::from_json("https://deno.land", json_map);
+ assert!(result.is_ok());
+ }
+
+ #[test]
+ fn parse_specifier_keys_relative() {
+ // Should absolutize strings prefixed with ./, ../, or / into the corresponding URLs..
+ let json_map = r#"{
+ "imports": {
+ "./foo": "/dotslash",
+ "../foo": "/dotdotslash",
+ "/foo": "/slash"
+ }
+ }"#;
+ let import_map =
+ ImportMap::from_json("https://base.example/path1/path2/path3", json_map)
+ .unwrap();
+ assert_eq!(
+ import_map
+ .imports
+ .get("https://base.example/path1/path2/foo")
+ .unwrap()[0],
+ "https://base.example/dotslash".to_string()
+ );
+ assert_eq!(
+ import_map
+ .imports
+ .get("https://base.example/path1/foo")
+ .unwrap()[0],
+ "https://base.example/dotdotslash".to_string()
+ );
+ assert_eq!(
+ import_map.imports.get("https://base.example/foo").unwrap()[0],
+ "https://base.example/slash".to_string()
+ );
+
+ // Should absolutize the literal strings ./, ../, or / with no suffix..
+ let json_map = r#"{
+ "imports": {
+ "./": "/dotslash/",
+ "../": "/dotdotslash/",
+ "/": "/slash/"
+ }
+ }"#;
+ let import_map =
+ ImportMap::from_json("https://base.example/path1/path2/path3", json_map)
+ .unwrap();
+ assert_eq!(
+ import_map
+ .imports
+ .get("https://base.example/path1/path2/")
+ .unwrap()[0],
+ "https://base.example/dotslash/".to_string()
+ );
+ assert_eq!(
+ import_map
+ .imports
+ .get("https://base.example/path1/")
+ .unwrap()[0],
+ "https://base.example/dotdotslash/".to_string()
+ );
+ assert_eq!(
+ import_map.imports.get("https://base.example/").unwrap()[0],
+ "https://base.example/slash/".to_string()
+ );
+
+ // Should treat percent-encoded variants of ./, ../, or / as bare specifiers..
+ let json_map = r#"{
+ "imports": {
+ "%2E/": "/dotSlash1/",
+ "%2E%2E/": "/dotDotSlash1/",
+ ".%2F": "/dotSlash2",
+ "..%2F": "/dotDotSlash2",
+ "%2F": "/slash2",
+ "%2E%2F": "/dotSlash3",
+ "%2E%2E%2F": "/dotDotSlash3"
+ }
+ }"#;
+ let import_map =
+ ImportMap::from_json("https://base.example/path1/path2/path3", json_map)
+ .unwrap();
+ assert_eq!(
+ import_map.imports.get("%2E/").unwrap()[0],
+ "https://base.example/dotSlash1/".to_string()
+ );
+ assert_eq!(
+ import_map.imports.get("%2E%2E/").unwrap()[0],
+ "https://base.example/dotDotSlash1/".to_string()
+ );
+ assert_eq!(
+ import_map.imports.get(".%2F").unwrap()[0],
+ "https://base.example/dotSlash2".to_string()
+ );
+ assert_eq!(
+ import_map.imports.get("..%2F").unwrap()[0],
+ "https://base.example/dotDotSlash2".to_string()
+ );
+ assert_eq!(
+ import_map.imports.get("%2F").unwrap()[0],
+ "https://base.example/slash2".to_string()
+ );
+ assert_eq!(
+ import_map.imports.get("%2E%2F").unwrap()[0],
+ "https://base.example/dotSlash3".to_string()
+ );
+ assert_eq!(
+ import_map.imports.get("%2E%2E%2F").unwrap()[0],
+ "https://base.example/dotDotSlash3".to_string()
+ );
+ }
+
+ #[test]
+ fn parse_specifier_keys_absolute() {
+ // Should only accept absolute URL specifier keys with fetch schemes,.
+ // treating others as bare specifiers.
+ let json_map = r#"{
+ "imports": {
+ "file:///good": "/file",
+ "http://good/": "/http/",
+ "https://good/": "/https/",
+ "about:bad": "/about",
+ "blob:bad": "/blob",
+ "data:bad": "/data",
+ "filesystem:bad": "/filesystem",
+ "ftp://bad/": "/ftp/",
+ "import:bad": "/import",
+ "mailto:bad": "/mailto",
+ "javascript:bad": "/javascript",
+ "wss:bad": "/wss"
+ }
+ }"#;
+ let import_map =
+ ImportMap::from_json("https://base.example/path1/path2/path3", json_map)
+ .unwrap();
+ assert_eq!(
+ import_map.imports.get("http://good/").unwrap()[0],
+ "https://base.example/http/".to_string()
+ );
+ assert_eq!(
+ import_map.imports.get("https://good/").unwrap()[0],
+ "https://base.example/https/".to_string()
+ );
+ assert_eq!(
+ import_map.imports.get("file:///good").unwrap()[0],
+ "https://base.example/file".to_string()
+ );
+ assert_eq!(
+ import_map.imports.get("http://good/").unwrap()[0],
+ "https://base.example/http/".to_string()
+ );
+ assert_eq!(
+ import_map.imports.get("import:bad").unwrap()[0],
+ "https://base.example/import".to_string()
+ );
+ assert_eq!(
+ import_map.imports.get("mailto:bad").unwrap()[0],
+ "https://base.example/mailto".to_string()
+ );
+ assert_eq!(
+ import_map.imports.get("javascript:bad").unwrap()[0],
+ "https://base.example/javascript".to_string()
+ );
+ assert_eq!(
+ import_map.imports.get("wss:bad").unwrap()[0],
+ "https://base.example/wss".to_string()
+ );
+ assert_eq!(
+ import_map.imports.get("about:bad").unwrap()[0],
+ "https://base.example/about".to_string()
+ );
+ assert_eq!(
+ import_map.imports.get("blob:bad").unwrap()[0],
+ "https://base.example/blob".to_string()
+ );
+ assert_eq!(
+ import_map.imports.get("data:bad").unwrap()[0],
+ "https://base.example/data".to_string()
+ );
+
+ // Should parse absolute URLs, treating unparseable ones as bare specifiers..
+ let json_map = r#"{
+ "imports": {
+ "https://ex ample.org/": "/unparseable1/",
+ "https://example.com:demo": "/unparseable2",
+ "http://[www.example.com]/": "/unparseable3/",
+ "https:example.org": "/invalidButParseable1/",
+ "https://///example.com///": "/invalidButParseable2/",
+ "https://example.net": "/prettyNormal/",
+ "https://ex%41mple.com/": "/percentDecoding/",
+ "https://example.com/%41": "/noPercentDecoding"
+ }
+ }"#;
+ let import_map =
+ ImportMap::from_json("https://base.example/path1/path2/path3", json_map)
+ .unwrap();
+ assert_eq!(
+ import_map.imports.get("https://ex ample.org/").unwrap()[0],
+ "https://base.example/unparseable1/".to_string()
+ );
+ assert_eq!(
+ import_map.imports.get("https://example.com:demo").unwrap()[0],
+ "https://base.example/unparseable2".to_string()
+ );
+ assert_eq!(
+ import_map.imports.get("http://[www.example.com]/").unwrap()[0],
+ "https://base.example/unparseable3/".to_string()
+ );
+ assert_eq!(
+ import_map.imports.get("https://example.org/").unwrap()[0],
+ "https://base.example/invalidButParseable1/".to_string()
+ );
+ assert_eq!(
+ import_map.imports.get("https://example.com///").unwrap()[0],
+ "https://base.example/invalidButParseable2/".to_string()
+ );
+ assert_eq!(
+ import_map.imports.get("https://example.net/").unwrap()[0],
+ "https://base.example/prettyNormal/".to_string()
+ );
+ assert_eq!(
+ import_map.imports.get("https://example.com/").unwrap()[0],
+ "https://base.example/percentDecoding/".to_string()
+ );
+ assert_eq!(
+ import_map.imports.get("https://example.com/%41").unwrap()[0],
+ "https://base.example/noPercentDecoding".to_string()
+ );
+ }
+
+ #[test]
+ fn parse_scope_keys_relative() {
+ // Should work with no prefix..
+ let json_map = r#"{
+ "scopes": {
+ "foo": {}
+ }
+ }"#;
+ let import_map =
+ ImportMap::from_json("https://base.example/path1/path2/path3", json_map)
+ .unwrap();
+ assert!(import_map
+ .scopes
+ .contains_key("https://base.example/path1/path2/foo"));
+
+ // Should work with ./, ../, and / prefixes..
+ let json_map = r#"{
+ "scopes": {
+ "./foo": {},
+ "../foo": {},
+ "/foo": {}
+ }
+ }"#;
+ let import_map =
+ ImportMap::from_json("https://base.example/path1/path2/path3", json_map)
+ .unwrap();
+ assert!(import_map
+ .scopes
+ .contains_key("https://base.example/path1/path2/foo"));
+ assert!(import_map
+ .scopes
+ .contains_key("https://base.example/path1/foo"));
+ assert!(import_map.scopes.contains_key("https://base.example/foo"));
+
+ // Should work with /s, ?s, and #s..
+ let json_map = r#"{
+ "scopes": {
+ "foo/bar?baz#qux": {}
+ }
+ }"#;
+ let import_map =
+ ImportMap::from_json("https://base.example/path1/path2/path3", json_map)
+ .unwrap();
+ assert!(import_map
+ .scopes
+ .contains_key("https://base.example/path1/path2/foo/bar?baz#qux"));
+
+ // Should work with an empty string scope key..
+ let json_map = r#"{
+ "scopes": {
+ "": {}
+ }
+ }"#;
+ let import_map =
+ ImportMap::from_json("https://base.example/path1/path2/path3", json_map)
+ .unwrap();
+ assert!(import_map
+ .scopes
+ .contains_key("https://base.example/path1/path2/path3"));
+
+ // Should work with / suffixes..
+ let json_map = r#"{
+ "scopes": {
+ "foo/": {},
+ "./foo/": {},
+ "../foo/": {},
+ "/foo/": {},
+ "/foo//": {}
+ }
+ }"#;
+ let import_map =
+ ImportMap::from_json("https://base.example/path1/path2/path3", json_map)
+ .unwrap();
+ assert!(import_map
+ .scopes
+ .contains_key("https://base.example/path1/path2/foo/"));
+ assert!(import_map
+ .scopes
+ .contains_key("https://base.example/path1/path2/foo/"));
+ assert!(import_map
+ .scopes
+ .contains_key("https://base.example/path1/foo/"));
+ assert!(import_map.scopes.contains_key("https://base.example/foo/"));
+ assert!(import_map.scopes.contains_key("https://base.example/foo//"));
+
+ // Should deduplicate based on URL parsing rules..
+ let json_map = r#"{
+ "scopes": {
+ "foo/\\": {},
+ "foo//": {},
+ "foo\\\\": {}
+ }
+ }"#;
+ let import_map =
+ ImportMap::from_json("https://base.example/path1/path2/path3", json_map)
+ .unwrap();
+ assert!(import_map
+ .scopes
+ .contains_key("https://base.example/path1/path2/foo//"));
+ assert_eq!(import_map.scopes.len(), 1);
+ }
+
+ #[test]
+ fn parse_scope_keys_absolute() {
+ // Should only accept absolute URL scope keys with fetch schemes..
+ let json_map = r#"{
+ "scopes": {
+ "http://good/": {},
+ "https://good/": {},
+ "file:///good": {},
+ "about:bad": {},
+ "blob:bad": {},
+ "data:bad": {},
+ "filesystem:bad": {},
+ "ftp://bad/": {},
+ "import:bad": {},
+ "mailto:bad": {},
+ "javascript:bad": {},
+ "wss:bad": {}
+ }
+ }"#;
+ let import_map =
+ ImportMap::from_json("https://base.example/path1/path2/path3", json_map)
+ .unwrap();
+ assert!(import_map.scopes.contains_key("http://good/"));
+ assert!(import_map.scopes.contains_key("https://good/"));
+ assert!(import_map.scopes.contains_key("file:///good"));
+ assert_eq!(import_map.scopes.len(), 3);
+
+ // Should parse absolute URL scope keys, ignoring unparseable ones..
+ let json_map = r#"{
+ "scopes": {
+ "https://ex ample.org/": {},
+ "https://example.com:demo": {},
+ "http://[www.example.com]/": {},
+ "https:example.org": {},
+ "https://///example.com///": {},
+ "https://example.net": {},
+ "https://ex%41mple.com/foo/": {},
+ "https://example.com/%41": {}
+ }
+ }"#;
+ let import_map =
+ ImportMap::from_json("https://base.example/path1/path2/path3", json_map)
+ .unwrap();
+ // tricky case! remember we have a base URL
+ assert!(import_map
+ .scopes
+ .contains_key("https://base.example/path1/path2/example.org"));
+ assert!(import_map.scopes.contains_key("https://example.com///"));
+ assert!(import_map.scopes.contains_key("https://example.net/"));
+ assert!(import_map.scopes.contains_key("https://example.com/foo/"));
+ assert!(import_map.scopes.contains_key("https://example.com/%41"));
+ assert_eq!(import_map.scopes.len(), 5);
+ }
+
+ #[test]
+ fn parse_addresses_relative_url_like() {
+ // Should accept strings prefixed with ./, ../, or /..
+ let json_map = r#"{
+ "imports": {
+ "dotSlash": "./foo",
+ "dotDotSlash": "../foo",
+ "slash": "/foo"
+ }
+ }"#;
+ let import_map =
+ ImportMap::from_json("https://base.example/path1/path2/path3", json_map)
+ .unwrap();
+
+ assert_eq!(
+ import_map.imports.get("dotSlash").unwrap(),
+ &vec!["https://base.example/path1/path2/foo".to_string()]
+ );
+ assert_eq!(
+ import_map.imports.get("dotDotSlash").unwrap(),
+ &vec!["https://base.example/path1/foo".to_string()]
+ );
+ assert_eq!(
+ import_map.imports.get("slash").unwrap(),
+ &vec!["https://base.example/foo".to_string()]
+ );
+
+ // Should accept the literal strings ./, ../, or / with no suffix..
+ let json_map = r#"{
+ "imports": {
+ "dotSlash": "./",
+ "dotDotSlash": "../",
+ "slash": "/"
+ }
+ }"#;
+ let import_map =
+ ImportMap::from_json("https://base.example/path1/path2/path3", json_map)
+ .unwrap();
+
+ assert_eq!(
+ import_map.imports.get("dotSlash").unwrap(),
+ &vec!["https://base.example/path1/path2/".to_string()]
+ );
+ assert_eq!(
+ import_map.imports.get("dotDotSlash").unwrap(),
+ &vec!["https://base.example/path1/".to_string()]
+ );
+ assert_eq!(
+ import_map.imports.get("slash").unwrap(),
+ &vec!["https://base.example/".to_string()]
+ );
+
+ // Should ignore percent-encoded variants of ./, ../, or /..
+ let json_map = r#"{
+ "imports": {
+ "dotSlash1": "%2E/",
+ "dotDotSlash1": "%2E%2E/",
+ "dotSlash2": ".%2F",
+ "dotDotSlash2": "..%2F",
+ "slash2": "%2F",
+ "dotSlash3": "%2E%2F",
+ "dotDotSlash3": "%2E%2E%2F"
+ }
+ }"#;
+ let import_map =
+ ImportMap::from_json("https://base.example/path1/path2/path3", json_map)
+ .unwrap();
+
+ assert!(import_map.imports.get("dotSlash1").unwrap().is_empty());
+ assert!(import_map.imports.get("dotDotSlash1").unwrap().is_empty());
+ assert!(import_map.imports.get("dotSlash2").unwrap().is_empty());
+ assert!(import_map.imports.get("dotDotSlash2").unwrap().is_empty());
+ assert!(import_map.imports.get("slash2").unwrap().is_empty());
+ assert!(import_map.imports.get("dotSlash3").unwrap().is_empty());
+ assert!(import_map.imports.get("dotDotSlash3").unwrap().is_empty());
+ }
+
+ #[test]
+ fn parse_addresses_absolute_with_fetch_schemes() {
+ // Should only accept absolute URL addresses with fetch schemes..
+ let json_map = r#"{
+ "imports": {
+ "http": "http://good/",
+ "https": "https://good/",
+ "file": "file:///good",
+ "about": "about:bad",
+ "blob": "blob:bad",
+ "data": "data:bad",
+ "filesystem": "filesystem:bad",
+ "ftp": "ftp://good/",
+ "import": "import:bad",
+ "mailto": "mailto:bad",
+ "javascript": "javascript:bad",
+ "wss": "wss:bad"
+ }
+ }"#;
+ let import_map =
+ ImportMap::from_json("https://base.example/path1/path2/path3", json_map)
+ .unwrap();
+
+ assert_eq!(
+ import_map.imports.get("file").unwrap(),
+ &vec!["file:///good".to_string()]
+ );
+ assert_eq!(
+ import_map.imports.get("http").unwrap(),
+ &vec!["http://good/".to_string()]
+ );
+ assert_eq!(
+ import_map.imports.get("https").unwrap(),
+ &vec!["https://good/".to_string()]
+ );
+
+ assert!(import_map.imports.get("about").unwrap().is_empty());
+ assert!(import_map.imports.get("blob").unwrap().is_empty());
+ assert!(import_map.imports.get("data").unwrap().is_empty());
+ assert!(import_map.imports.get("filesystem").unwrap().is_empty());
+ assert!(import_map.imports.get("ftp").unwrap().is_empty());
+ assert!(import_map.imports.get("import").unwrap().is_empty());
+ assert!(import_map.imports.get("mailto").unwrap().is_empty());
+ assert!(import_map.imports.get("javascript").unwrap().is_empty());
+ assert!(import_map.imports.get("wss").unwrap().is_empty());
+ }
+
+ #[test]
+ fn parse_addresses_absolute_with_fetch_schemes_arrays() {
+ // Should only accept absolute URL addresses with fetch schemes inside arrays..
+ let json_map = r#"{
+ "imports": {
+ "http": ["http://good/"],
+ "https": ["https://good/"],
+ "file": ["file:///good"],
+ "about": ["about:bad"],
+ "blob": ["blob:bad"],
+ "data": ["data:bad"],
+ "filesystem": ["filesystem:bad"],
+ "ftp": ["ftp://good/"],
+ "import": ["import:bad"],
+ "mailto": ["mailto:bad"],
+ "javascript": ["javascript:bad"],
+ "wss": ["wss:bad"]
+ }
+ }"#;
+ let import_map =
+ ImportMap::from_json("https://base.example/path1/path2/path3", json_map)
+ .unwrap();
+
+ assert_eq!(
+ import_map.imports.get("file").unwrap(),
+ &vec!["file:///good".to_string()]
+ );
+ assert_eq!(
+ import_map.imports.get("http").unwrap(),
+ &vec!["http://good/".to_string()]
+ );
+ assert_eq!(
+ import_map.imports.get("https").unwrap(),
+ &vec!["https://good/".to_string()]
+ );
+
+ assert!(import_map.imports.get("about").unwrap().is_empty());
+ assert!(import_map.imports.get("blob").unwrap().is_empty());
+ assert!(import_map.imports.get("data").unwrap().is_empty());
+ assert!(import_map.imports.get("filesystem").unwrap().is_empty());
+ assert!(import_map.imports.get("ftp").unwrap().is_empty());
+ assert!(import_map.imports.get("import").unwrap().is_empty());
+ assert!(import_map.imports.get("mailto").unwrap().is_empty());
+ assert!(import_map.imports.get("javascript").unwrap().is_empty());
+ assert!(import_map.imports.get("wss").unwrap().is_empty());
+ }
+
+ #[test]
+ fn parse_addresses_unparseable() {
+ // Should parse absolute URLs, ignoring unparseable ones..
+ let json_map = r#"{
+ "imports": {
+ "unparseable1": "https://ex ample.org/",
+ "unparseable2": "https://example.com:demo",
+ "unparseable3": "http://[www.example.com]/",
+ "invalidButParseable1": "https:example.org",
+ "invalidButParseable2": "https://///example.com///",
+ "prettyNormal": "https://example.net",
+ "percentDecoding": "https://ex%41mple.com/",
+ "noPercentDecoding": "https://example.com/%41"
+ }
+ }"#;
+ let import_map =
+ ImportMap::from_json("https://base.example/path1/path2/path3", json_map)
+ .unwrap();
+
+ assert_eq!(
+ import_map.imports.get("invalidButParseable1").unwrap(),
+ &vec!["https://example.org/".to_string()]
+ );
+ assert_eq!(
+ import_map.imports.get("invalidButParseable2").unwrap(),
+ &vec!["https://example.com///".to_string()]
+ );
+ assert_eq!(
+ import_map.imports.get("prettyNormal").unwrap(),
+ &vec!["https://example.net/".to_string()]
+ );
+ assert_eq!(
+ import_map.imports.get("percentDecoding").unwrap(),
+ &vec!["https://example.com/".to_string()]
+ );
+ assert_eq!(
+ import_map.imports.get("noPercentDecoding").unwrap(),
+ &vec!["https://example.com/%41".to_string()]
+ );
+
+ assert!(import_map.imports.get("unparseable1").unwrap().is_empty());
+ assert!(import_map.imports.get("unparseable2").unwrap().is_empty());
+ assert!(import_map.imports.get("unparseable3").unwrap().is_empty());
+ }
+
+ #[test]
+ fn parse_addresses_unparseable_arrays() {
+ // Should parse absolute URLs, ignoring unparseable ones inside arrays..
+ let json_map = r#"{
+ "imports": {
+ "unparseable1": ["https://ex ample.org/"],
+ "unparseable2": ["https://example.com:demo"],
+ "unparseable3": ["http://[www.example.com]/"],
+ "invalidButParseable1": ["https:example.org"],
+ "invalidButParseable2": ["https://///example.com///"],
+ "prettyNormal": ["https://example.net"],
+ "percentDecoding": ["https://ex%41mple.com/"],
+ "noPercentDecoding": ["https://example.com/%41"]
+ }
+ }"#;
+ let import_map =
+ ImportMap::from_json("https://base.example/path1/path2/path3", json_map)
+ .unwrap();
+
+ assert_eq!(
+ import_map.imports.get("invalidButParseable1").unwrap(),
+ &vec!["https://example.org/".to_string()]
+ );
+ assert_eq!(
+ import_map.imports.get("invalidButParseable2").unwrap(),
+ &vec!["https://example.com///".to_string()]
+ );
+ assert_eq!(
+ import_map.imports.get("prettyNormal").unwrap(),
+ &vec!["https://example.net/".to_string()]
+ );
+ assert_eq!(
+ import_map.imports.get("percentDecoding").unwrap(),
+ &vec!["https://example.com/".to_string()]
+ );
+ assert_eq!(
+ import_map.imports.get("noPercentDecoding").unwrap(),
+ &vec!["https://example.com/%41".to_string()]
+ );
+
+ assert!(import_map.imports.get("unparseable1").unwrap().is_empty());
+ assert!(import_map.imports.get("unparseable2").unwrap().is_empty());
+ assert!(import_map.imports.get("unparseable3").unwrap().is_empty());
+ }
+
+ #[test]
+ fn parse_addresses_mismatched_trailing_slashes() {
+ // Should parse absolute URLs, ignoring unparseable ones inside arrays..
+ let json_map = r#"{
+ "imports": {
+ "trailer/": "/notrailer"
+ }
+ }"#;
+ let import_map =
+ ImportMap::from_json("https://base.example/path1/path2/path3", json_map)
+ .unwrap();
+
+ assert!(import_map.imports.get("trailer/").unwrap().is_empty());
+ // TODO: I'd be good to assert that warning was shown
+ }
+
+ #[test]
+ fn parse_addresses_mismatched_trailing_slashes_array() {
+ // Should warn for a mismatch alone in an array..
+ let json_map = r#"{
+ "imports": {
+ "trailer/": ["/notrailer"]
+ }
+ }"#;
+ let import_map =
+ ImportMap::from_json("https://base.example/path1/path2/path3", json_map)
+ .unwrap();
+
+ assert!(import_map.imports.get("trailer/").unwrap().is_empty());
+ // TODO: I'd be good to assert that warning was shown
+ }
+
+ #[test]
+ fn parse_addresses_mismatched_trailing_slashes_with_nonmismatched_array() {
+ // Should warn for a mismatch alone in an array..
+ let json_map = r#"{
+ "imports": {
+ "trailer/": ["/atrailer/", "/notrailer"]
+ }
+ }"#;
+ let import_map =
+ ImportMap::from_json("https://base.example/path1/path2/path3", json_map)
+ .unwrap();
+
+ assert_eq!(
+ import_map.imports.get("trailer/").unwrap(),
+ &vec!["https://base.example/atrailer/".to_string()]
+ );
+ // TODO: I'd be good to assert that warning was shown
+ }
+
+ #[test]
+ fn parse_addresses_other_invalid() {
+ // Should ignore unprefixed strings that are not absolute URLs.
+ for bad in &["bar", "\\bar", "~bar", "#bar", "?bar"] {
+ let json_map = json!({
+ "imports": {
+ "foo": bad
+ }
+ });
+ let import_map = ImportMap::from_json(
+ "https://base.example/path1/path2/path3",
+ &json_map.to_string(),
+ )
+ .unwrap();
+
+ assert!(import_map.imports.get("foo").unwrap().is_empty());
+ }
+ }
+
+ fn get_empty_import_map() -> ImportMap {
+ ImportMap {
+ base_url: "https://example.com/app/main.ts".to_string(),
+ imports: IndexMap::new(),
+ scopes: IndexMap::new(),
+ }
+ }
+
+ fn assert_resolve(
+ result: Result<Option<ModuleSpecifier>, ImportMapError>,
+ expected_url: &str,
+ ) {
+ let maybe_url = result
+ .unwrap_or_else(|err| panic!("ImportMap::resolve failed: {:?}", err));
+ let resolved_url =
+ maybe_url.unwrap_or_else(|| panic!("Unexpected None resolved URL"));
+ assert_eq!(resolved_url, expected_url.to_string());
+ }
+
+ #[test]
+ fn resolve_unmapped_relative_specifiers() {
+ let referrer_url = "https://example.com/js/script.ts";
+ let import_map = get_empty_import_map();
+
+ // Should resolve ./ specifiers as URLs.
+ assert_resolve(
+ import_map.resolve("./foo", referrer_url),
+ "https://example.com/js/foo",
+ );
+ assert_resolve(
+ import_map.resolve("./foo/bar", referrer_url),
+ "https://example.com/js/foo/bar",
+ );
+ assert_resolve(
+ import_map.resolve("./foo/../bar", referrer_url),
+ "https://example.com/js/bar",
+ );
+ assert_resolve(
+ import_map.resolve("./foo/../../bar", referrer_url),
+ "https://example.com/bar",
+ );
+
+ // Should resolve ../ specifiers as URLs.
+ assert_resolve(
+ import_map.resolve("../foo", referrer_url),
+ "https://example.com/foo",
+ );
+ assert_resolve(
+ import_map.resolve("../foo/bar", referrer_url),
+ "https://example.com/foo/bar",
+ );
+ assert_resolve(
+ import_map.resolve("../../../foo/bar", referrer_url),
+ "https://example.com/foo/bar",
+ );
+ }
+
+ #[test]
+ fn resolve_unmapped_absolute_specifiers() {
+ let referrer_url = "https://example.com/js/script.ts";
+ let import_map = get_empty_import_map();
+
+ // Should resolve / specifiers as URLs.
+ assert_resolve(
+ import_map.resolve("/foo", referrer_url),
+ "https://example.com/foo",
+ );
+ assert_resolve(
+ import_map.resolve("/foo/bar", referrer_url),
+ "https://example.com/foo/bar",
+ );
+ assert_resolve(
+ import_map.resolve("../../foo/bar", referrer_url),
+ "https://example.com/foo/bar",
+ );
+ assert_resolve(
+ import_map.resolve("/../foo/../bar", referrer_url),
+ "https://example.com/bar",
+ );
+
+ // Should parse absolute fetch-scheme URLs.
+ assert_resolve(
+ import_map.resolve("https://example.net", referrer_url),
+ "https://example.net/",
+ );
+ assert_resolve(
+ import_map.resolve("https://ex%41mple.com/", referrer_url),
+ "https://example.com/",
+ );
+ assert_resolve(
+ import_map.resolve("https:example.org", referrer_url),
+ "https://example.org/",
+ );
+ assert_resolve(
+ import_map.resolve("https://///example.com///", referrer_url),
+ "https://example.com///",
+ );
+ }
+
+ #[test]
+ fn resolve_unmapped_bad_specifiers() {
+ let referrer_url = "https://example.com/js/script.ts";
+ let import_map = get_empty_import_map();
+
+ // Should fail for absolute non-fetch-scheme URLs.
+ assert!(import_map.resolve("about:good", referrer_url).is_err());
+ assert!(import_map.resolve("mailto:bad", referrer_url).is_err());
+ assert!(import_map.resolve("import:bad", referrer_url).is_err());
+ assert!(import_map.resolve("javascript:bad", referrer_url).is_err());
+ assert!(import_map.resolve("wss:bad", referrer_url).is_err());
+
+ // Should fail for string not parseable as absolute URLs and not starting with ./, ../ or /.
+ assert!(import_map.resolve("foo", referrer_url).is_err());
+ assert!(import_map.resolve("\\foo", referrer_url).is_err());
+ assert!(import_map.resolve(":foo", referrer_url).is_err());
+ assert!(import_map.resolve("@foo", referrer_url).is_err());
+ assert!(import_map.resolve("%2E/foo", referrer_url).is_err());
+ assert!(import_map.resolve("%2E%2Efoo", referrer_url).is_err());
+ assert!(import_map.resolve(".%2Efoo", referrer_url).is_err());
+ assert!(import_map
+ .resolve("https://ex ample.org", referrer_url)
+ .is_err());
+ assert!(import_map
+ .resolve("https://example.org:deno", referrer_url)
+ .is_err());
+ assert!(import_map
+ .resolve("https://[example.org]", referrer_url)
+ .is_err());
+ }
+
+ #[test]
+ fn resolve_imports_mapped() {
+ let base_url = "https://example.com/app/main.ts";
+ let referrer_url = "https://example.com/js/script.ts";
+
+ // Should fail when mapping is to an empty array.
+ let json_map = r#"{
+ "imports": {
+ "moment": null,
+ "lodash": []
+ }
+ }"#;
+ let import_map = ImportMap::from_json(base_url, json_map).unwrap();
+
+ assert!(import_map.resolve("moment", referrer_url).is_err());
+ assert!(import_map.resolve("lodash", referrer_url).is_err());
+ }
+
+ #[test]
+ fn resolve_imports_package_like_modules() {
+ let base_url = "https://example.com/app/main.ts";
+ let referrer_url = "https://example.com/js/script.ts";
+
+ let json_map = r#"{
+ "imports": {
+ "moment": "/deps/moment/src/moment.js",
+ "moment/": "/deps/moment/src/",
+ "lodash-dot": "./deps/lodash-es/lodash.js",
+ "lodash-dot/": "./deps/lodash-es/",
+ "lodash-dotdot": "../deps/lodash-es/lodash.js",
+ "lodash-dotdot/": "../deps/lodash-es/",
+ "nowhere/": []
+ }
+ }"#;
+ let import_map = ImportMap::from_json(base_url, json_map).unwrap();
+
+ // Should work for package main modules.
+ assert_resolve(
+ import_map.resolve("moment", referrer_url),
+ "https://example.com/deps/moment/src/moment.js",
+ );
+ assert_resolve(
+ import_map.resolve("lodash-dot", referrer_url),
+ "https://example.com/app/deps/lodash-es/lodash.js",
+ );
+ assert_resolve(
+ import_map.resolve("lodash-dotdot", referrer_url),
+ "https://example.com/deps/lodash-es/lodash.js",
+ );
+
+ // Should work for package submodules.
+ assert_resolve(
+ import_map.resolve("moment/foo", referrer_url),
+ "https://example.com/deps/moment/src/foo",
+ );
+ assert_resolve(
+ import_map.resolve("lodash-dot/foo", referrer_url),
+ "https://example.com/app/deps/lodash-es/foo",
+ );
+ assert_resolve(
+ import_map.resolve("lodash-dotdot/foo", referrer_url),
+ "https://example.com/deps/lodash-es/foo",
+ );
+
+ // Should work for package names that end in a slash.
+ assert_resolve(
+ import_map.resolve("moment/", referrer_url),
+ "https://example.com/deps/moment/src/",
+ );
+
+ // Should fail for package modules that are not declared.
+ assert!(import_map.resolve("underscore/", referrer_url).is_err());
+ assert!(import_map.resolve("underscore/foo", referrer_url).is_err());
+
+ // Should fail for package submodules that map to nowhere.
+ assert!(import_map.resolve("nowhere/foo", referrer_url).is_err());
+ }
+
+ #[test]
+ fn resolve_imports_tricky_specifiers() {
+ let base_url = "https://example.com/app/main.ts";
+ let referrer_url = "https://example.com/js/script.ts";
+
+ let json_map = r#"{
+ "imports": {
+ "package/withslash": "/deps/package-with-slash/index.mjs",
+ "not-a-package": "/lib/not-a-package.mjs",
+ ".": "/lib/dot.mjs",
+ "..": "/lib/dotdot.mjs",
+ "..\\\\": "/lib/dotdotbackslash.mjs",
+ "%2E": "/lib/percent2e.mjs",
+ "%2F": "/lib/percent2f.mjs"
+ }
+ }"#;
+ let import_map = ImportMap::from_json(base_url, json_map).unwrap();
+
+ // Should work for explicitly-mapped specifiers that happen to have a slash.
+ assert_resolve(
+ import_map.resolve("package/withslash", referrer_url),
+ "https://example.com/deps/package-with-slash/index.mjs",
+ );
+
+ // Should work when the specifier has punctuation.
+ assert_resolve(
+ import_map.resolve(".", referrer_url),
+ "https://example.com/lib/dot.mjs",
+ );
+ assert_resolve(
+ import_map.resolve("..", referrer_url),
+ "https://example.com/lib/dotdot.mjs",
+ );
+ assert_resolve(
+ import_map.resolve("..\\\\", referrer_url),
+ "https://example.com/lib/dotdotbackslash.mjs",
+ );
+ assert_resolve(
+ import_map.resolve("%2E", referrer_url),
+ "https://example.com/lib/percent2e.mjs",
+ );
+ assert_resolve(
+ import_map.resolve("%2F", referrer_url),
+ "https://example.com/lib/percent2f.mjs",
+ );
+
+ // Should fail for attempting to get a submodule of something not declared with a trailing slash.
+ assert!(import_map
+ .resolve("not-a-package/foo", referrer_url)
+ .is_err());
+ }
+
+ #[test]
+ fn resolve_imports_url_like_specifier() {
+ let base_url = "https://example.com/app/main.ts";
+ let referrer_url = "https://example.com/js/script.ts";
+
+ let json_map = r#"{
+ "imports": {
+ "/node_modules/als-polyfill/index.mjs": "std:kv-storage",
+ "/lib/foo.mjs": "./more/bar.mjs",
+ "./dotrelative/foo.mjs": "/lib/dot.mjs",
+ "../dotdotrelative/foo.mjs": "/lib/dotdot.mjs",
+ "/lib/no.mjs": null,
+ "./dotrelative/no.mjs": [],
+ "/": "/lib/slash-only/",
+ "./": "/lib/dotslash-only/",
+ "/test/": "/lib/url-trailing-slash/",
+ "./test/": "/lib/url-trailing-slash-dot/",
+ "/test": "/lib/test1.mjs",
+ "../test": "/lib/test2.mjs"
+ }
+ }"#;
+ let import_map = ImportMap::from_json(base_url, json_map).unwrap();
+
+ // Should remap to other URLs.
+ assert_resolve(
+ import_map.resolve("https://example.com/lib/foo.mjs", referrer_url),
+ "https://example.com/app/more/bar.mjs",
+ );
+ assert_resolve(
+ import_map.resolve("https://///example.com/lib/foo.mjs", referrer_url),
+ "https://example.com/app/more/bar.mjs",
+ );
+ assert_resolve(
+ import_map.resolve("/lib/foo.mjs", referrer_url),
+ "https://example.com/app/more/bar.mjs",
+ );
+ assert_resolve(
+ import_map
+ .resolve("https://example.com/app/dotrelative/foo.mjs", referrer_url),
+ "https://example.com/lib/dot.mjs",
+ );
+ assert_resolve(
+ import_map.resolve("../app/dotrelative/foo.mjs", referrer_url),
+ "https://example.com/lib/dot.mjs",
+ );
+ assert_resolve(
+ import_map
+ .resolve("https://example.com/dotdotrelative/foo.mjs", referrer_url),
+ "https://example.com/lib/dotdot.mjs",
+ );
+ assert_resolve(
+ import_map.resolve("../dotdotrelative/foo.mjs", referrer_url),
+ "https://example.com/lib/dotdot.mjs",
+ );
+
+ // Should fail for URLs that remap to empty arrays.
+ assert!(import_map
+ .resolve("https://example.com/lib/no.mjs", referrer_url)
+ .is_err());
+ assert!(import_map.resolve("/lib/no.mjs", referrer_url).is_err());
+ assert!(import_map.resolve("../lib/no.mjs", referrer_url).is_err());
+ assert!(import_map
+ .resolve("https://example.com/app/dotrelative/no.mjs", referrer_url)
+ .is_err());
+ assert!(import_map
+ .resolve("/app/dotrelative/no.mjs", referrer_url)
+ .is_err());
+ assert!(import_map
+ .resolve("../app/dotrelative/no.mjs", referrer_url)
+ .is_err());
+
+ // Should remap URLs that are just composed from / and ..
+ assert_resolve(
+ import_map.resolve("https://example.com/", referrer_url),
+ "https://example.com/lib/slash-only/",
+ );
+ assert_resolve(
+ import_map.resolve("/", referrer_url),
+ "https://example.com/lib/slash-only/",
+ );
+ assert_resolve(
+ import_map.resolve("../", referrer_url),
+ "https://example.com/lib/slash-only/",
+ );
+ assert_resolve(
+ import_map.resolve("https://example.com/app/", referrer_url),
+ "https://example.com/lib/dotslash-only/",
+ );
+ assert_resolve(
+ import_map.resolve("/app/", referrer_url),
+ "https://example.com/lib/dotslash-only/",
+ );
+ assert_resolve(
+ import_map.resolve("../app/", referrer_url),
+ "https://example.com/lib/dotslash-only/",
+ );
+
+ // Should remap URLs that are prefix-matched by keys with trailing slashes.
+ assert_resolve(
+ import_map.resolve("/test/foo.mjs", referrer_url),
+ "https://example.com/lib/url-trailing-slash/foo.mjs",
+ );
+ assert_resolve(
+ import_map.resolve("https://example.com/app/test/foo.mjs", referrer_url),
+ "https://example.com/lib/url-trailing-slash-dot/foo.mjs",
+ );
+
+ // Should use the last entry's address when URL-like specifiers parse to the same absolute URL.
+ //
+ // NOTE: this works properly because of "preserve_order" feature flag to "serde_json" crate
+ assert_resolve(
+ import_map.resolve("/test", referrer_url),
+ "https://example.com/lib/test2.mjs",
+ );
+ }
+
+ #[test]
+ fn resolve_imports_overlapping_entities_with_trailing_slashes() {
+ let base_url = "https://example.com/app/main.ts";
+ let referrer_url = "https://example.com/js/script.ts";
+
+ // Should favor the most-specific key (no empty arrays).
+ {
+ let json_map = r#"{
+ "imports": {
+ "a": "/1",
+ "a/": "/2/",
+ "a/b": "/3",
+ "a/b/": "/4/"
+ }
+ }"#;
+ let import_map = ImportMap::from_json(base_url, json_map).unwrap();
+
+ assert_resolve(
+ import_map.resolve("a", referrer_url),
+ "https://example.com/1",
+ );
+ assert_resolve(
+ import_map.resolve("a/", referrer_url),
+ "https://example.com/2/",
+ );
+ assert_resolve(
+ import_map.resolve("a/b", referrer_url),
+ "https://example.com/3",
+ );
+ assert_resolve(
+ import_map.resolve("a/b/", referrer_url),
+ "https://example.com/4/",
+ );
+ assert_resolve(
+ import_map.resolve("a/b/c", referrer_url),
+ "https://example.com/4/c",
+ );
+ }
+
+ // Should favor the most-specific key when empty arrays are involved for less-specific keys.
+ {
+ let json_map = r#"{
+ "imports": {
+ "a": [],
+ "a/": [],
+ "a/b": "/3",
+ "a/b/": "/4/"
+ }
+ }"#;
+ let import_map = ImportMap::from_json(base_url, json_map).unwrap();
+
+ assert!(import_map.resolve("a", referrer_url).is_err());
+ assert!(import_map.resolve("a/", referrer_url).is_err());
+ assert!(import_map.resolve("a/x", referrer_url).is_err());
+ assert_resolve(
+ import_map.resolve("a/b", referrer_url),
+ "https://example.com/3",
+ );
+ assert_resolve(
+ import_map.resolve("a/b/", referrer_url),
+ "https://example.com/4/",
+ );
+ assert_resolve(
+ import_map.resolve("a/b/c", referrer_url),
+ "https://example.com/4/c",
+ );
+ assert!(import_map.resolve("a/x/c", referrer_url).is_err());
+ }
+ }
+
+ #[test]
+ fn resolve_scopes_map_to_empty_array() {
+ let base_url = "https://example.com/app/main.ts";
+ let referrer_url = "https://example.com/js";
+
+ let json_map = r#"{
+ "scopes": {
+ "/js/": {
+ "moment": "null",
+ "lodash": []
+ }
+ }
+ }"#;
+ let import_map = ImportMap::from_json(base_url, json_map).unwrap();
+
+ assert!(import_map.resolve("moment", referrer_url).is_err());
+ assert!(import_map.resolve("lodash", referrer_url).is_err());
+ }
+
+ #[test]
+ fn resolve_scopes_exact_vs_prefix_matching() {
+ let base_url = "https://example.com/app/main.ts";
+
+ let json_map = r#"{
+ "scopes": {
+ "/js": {
+ "moment": "/only-triggered-by-exact/moment",
+ "moment/": "/only-triggered-by-exact/moment/"
+ },
+ "/js/": {
+ "moment": "/triggered-by-any-subpath/moment",
+ "moment/": "/triggered-by-any-subpath/moment/"
+ }
+ }
+ }"#;
+ let import_map = ImportMap::from_json(base_url, json_map).unwrap();
+
+ let js_non_dir = "https://example.com/js";
+ let js_in_dir = "https://example.com/js/app.mjs";
+ let with_js_prefix = "https://example.com/jsiscool";
+
+ assert_resolve(
+ import_map.resolve("moment", js_non_dir),
+ "https://example.com/only-triggered-by-exact/moment",
+ );
+ assert_resolve(
+ import_map.resolve("moment/foo", js_non_dir),
+ "https://example.com/only-triggered-by-exact/moment/foo",
+ );
+ assert_resolve(
+ import_map.resolve("moment", js_in_dir),
+ "https://example.com/triggered-by-any-subpath/moment",
+ );
+ assert_resolve(
+ import_map.resolve("moment/foo", js_in_dir),
+ "https://example.com/triggered-by-any-subpath/moment/foo",
+ );
+ assert!(import_map.resolve("moment", with_js_prefix).is_err());
+ assert!(import_map.resolve("moment/foo", with_js_prefix).is_err());
+ }
+
+ #[test]
+ fn resolve_scopes_only_exact_in_map() {
+ let base_url = "https://example.com/app/main.ts";
+
+ let json_map = r#"{
+ "scopes": {
+ "/js": {
+ "moment": "/only-triggered-by-exact/moment",
+ "moment/": "/only-triggered-by-exact/moment/"
+ }
+ }
+ }"#;
+ let import_map = ImportMap::from_json(base_url, json_map).unwrap();
+
+ // Should match correctly when only an exact match is in the map.
+ let js_non_dir = "https://example.com/js";
+ let js_in_dir = "https://example.com/js/app.mjs";
+ let with_js_prefix = "https://example.com/jsiscool";
+
+ assert_resolve(
+ import_map.resolve("moment", js_non_dir),
+ "https://example.com/only-triggered-by-exact/moment",
+ );
+ assert_resolve(
+ import_map.resolve("moment/foo", js_non_dir),
+ "https://example.com/only-triggered-by-exact/moment/foo",
+ );
+ assert!(import_map.resolve("moment", js_in_dir).is_err());
+ assert!(import_map.resolve("moment/foo", js_in_dir).is_err());
+ assert!(import_map.resolve("moment", with_js_prefix).is_err());
+ assert!(import_map.resolve("moment/foo", with_js_prefix).is_err());
+ }
+
+ #[test]
+ fn resolve_scopes_only_prefix_in_map() {
+ let base_url = "https://example.com/app/main.ts";
+
+ let json_map = r#"{
+ "scopes": {
+ "/js/": {
+ "moment": "/triggered-by-any-subpath/moment",
+ "moment/": "/triggered-by-any-subpath/moment/"
+ }
+ }
+ }"#;
+ let import_map = ImportMap::from_json(base_url, json_map).unwrap();
+
+ // Should match correctly when only a prefix match is in the map.
+ let js_non_dir = "https://example.com/js";
+ let js_in_dir = "https://example.com/js/app.mjs";
+ let with_js_prefix = "https://example.com/jsiscool";
+
+ assert!(import_map.resolve("moment", js_non_dir).is_err());
+ assert!(import_map.resolve("moment/foo", js_non_dir).is_err());
+ assert_resolve(
+ import_map.resolve("moment", js_in_dir),
+ "https://example.com/triggered-by-any-subpath/moment",
+ );
+ assert_resolve(
+ import_map.resolve("moment/foo", js_in_dir),
+ "https://example.com/triggered-by-any-subpath/moment/foo",
+ );
+ assert!(import_map.resolve("moment", with_js_prefix).is_err());
+ assert!(import_map.resolve("moment/foo", with_js_prefix).is_err());
+ }
+
+ #[test]
+ fn resolve_scopes_package_like() {
+ let base_url = "https://example.com/app/main.ts";
+
+ let json_map = r#"{
+ "imports": {
+ "moment": "/node_modules/moment/src/moment.js",
+ "moment/": "/node_modules/moment/src/",
+ "lodash-dot": "./node_modules/lodash-es/lodash.js",
+ "lodash-dot/": "./node_modules/lodash-es/",
+ "lodash-dotdot": "../node_modules/lodash-es/lodash.js",
+ "lodash-dotdot/": "../node_modules/lodash-es/"
+ },
+ "scopes": {
+ "/": {
+ "moment": "/node_modules_3/moment/src/moment.js",
+ "vue": "/node_modules_3/vue/dist/vue.runtime.esm.js"
+ },
+ "/js/": {
+ "lodash-dot": "./node_modules_2/lodash-es/lodash.js",
+ "lodash-dot/": "./node_modules_2/lodash-es/",
+ "lodash-dotdot": "../node_modules_2/lodash-es/lodash.js",
+ "lodash-dotdot/": "../node_modules_2/lodash-es/"
+ }
+ }
+ }"#;
+ let import_map = ImportMap::from_json(base_url, json_map).unwrap();
+
+ // Should match correctly when only a prefix match is in the map.
+ let js_in_dir = "https://example.com/js/app.mjs";
+ let top_level = "https://example.com/app.mjs";
+
+ // Should resolve scoped.
+ assert_resolve(
+ import_map.resolve("lodash-dot", js_in_dir),
+ "https://example.com/app/node_modules_2/lodash-es/lodash.js",
+ );
+ assert_resolve(
+ import_map.resolve("lodash-dotdot", js_in_dir),
+ "https://example.com/node_modules_2/lodash-es/lodash.js",
+ );
+ assert_resolve(
+ import_map.resolve("lodash-dot/foo", js_in_dir),
+ "https://example.com/app/node_modules_2/lodash-es/foo",
+ );
+ assert_resolve(
+ import_map.resolve("lodash-dotdot/foo", js_in_dir),
+ "https://example.com/node_modules_2/lodash-es/foo",
+ );
+
+ // Should apply best scope match.
+ assert_resolve(
+ import_map.resolve("moment", top_level),
+ "https://example.com/node_modules_3/moment/src/moment.js",
+ );
+ assert_resolve(
+ import_map.resolve("moment", js_in_dir),
+ "https://example.com/node_modules_3/moment/src/moment.js",
+ );
+ assert_resolve(
+ import_map.resolve("vue", js_in_dir),
+ "https://example.com/node_modules_3/vue/dist/vue.runtime.esm.js",
+ );
+
+ // Should fallback to "imports".
+ assert_resolve(
+ import_map.resolve("moment/foo", top_level),
+ "https://example.com/node_modules/moment/src/foo",
+ );
+ assert_resolve(
+ import_map.resolve("moment/foo", js_in_dir),
+ "https://example.com/node_modules/moment/src/foo",
+ );
+ assert_resolve(
+ import_map.resolve("lodash-dot", top_level),
+ "https://example.com/app/node_modules/lodash-es/lodash.js",
+ );
+ assert_resolve(
+ import_map.resolve("lodash-dotdot", top_level),
+ "https://example.com/node_modules/lodash-es/lodash.js",
+ );
+ assert_resolve(
+ import_map.resolve("lodash-dot/foo", top_level),
+ "https://example.com/app/node_modules/lodash-es/foo",
+ );
+ assert_resolve(
+ import_map.resolve("lodash-dotdot/foo", top_level),
+ "https://example.com/node_modules/lodash-es/foo",
+ );
+
+ // Should still fail for package-like specifiers that are not declared.
+ assert!(import_map.resolve("underscore/", js_in_dir).is_err());
+ assert!(import_map.resolve("underscore/foo", js_in_dir).is_err());
+ }
+
+ #[test]
+ fn resolve_scopes_inheritance() {
+ // https://github.com/WICG/import-maps#scope-inheritance
+ let base_url = "https://example.com/app/main.ts";
+
+ let json_map = r#"{
+ "imports": {
+ "a": "/a-1.mjs",
+ "b": "/b-1.mjs",
+ "c": "/c-1.mjs"
+ },
+ "scopes": {
+ "/scope2/": {
+ "a": "/a-2.mjs"
+ },
+ "/scope2/scope3/": {
+ "b": "/b-3.mjs"
+ }
+ }
+ }"#;
+ let import_map = ImportMap::from_json(base_url, json_map).unwrap();
+
+ let scope_1_url = "https://example.com/scope1/foo.mjs";
+ let scope_2_url = "https://example.com/scope2/foo.mjs";
+ let scope_3_url = "https://example.com/scope2/scope3/foo.mjs";
+
+ // Should fall back to "imports" when none match.
+ assert_resolve(
+ import_map.resolve("a", scope_1_url),
+ "https://example.com/a-1.mjs",
+ );
+ assert_resolve(
+ import_map.resolve("b", scope_1_url),
+ "https://example.com/b-1.mjs",
+ );
+ assert_resolve(
+ import_map.resolve("c", scope_1_url),
+ "https://example.com/c-1.mjs",
+ );
+
+ // Should use a direct scope override.
+ assert_resolve(
+ import_map.resolve("a", scope_2_url),
+ "https://example.com/a-2.mjs",
+ );
+ assert_resolve(
+ import_map.resolve("b", scope_2_url),
+ "https://example.com/b-1.mjs",
+ );
+ assert_resolve(
+ import_map.resolve("c", scope_2_url),
+ "https://example.com/c-1.mjs",
+ );
+
+ // Should use an indirect scope override.
+ assert_resolve(
+ import_map.resolve("a", scope_3_url),
+ "https://example.com/a-2.mjs",
+ );
+ assert_resolve(
+ import_map.resolve("b", scope_3_url),
+ "https://example.com/b-3.mjs",
+ );
+ assert_resolve(
+ import_map.resolve("c", scope_3_url),
+ "https://example.com/c-1.mjs",
+ );
+ }
+
+ #[test]
+ fn resolve_scopes_relative_url_keys() {
+ // https://github.com/WICG/import-maps#scope-inheritance
+ let base_url = "https://example.com/app/main.ts";
+
+ let json_map = r#"{
+ "imports": {
+ "a": "/a-1.mjs",
+ "b": "/b-1.mjs",
+ "c": "/c-1.mjs"
+ },
+ "scopes": {
+ "": {
+ "a": "/a-empty-string.mjs"
+ },
+ "./": {
+ "b": "/b-dot-slash.mjs"
+ },
+ "../": {
+ "c": "/c-dot-dot-slash.mjs"
+ }
+ }
+ }"#;
+ let import_map = ImportMap::from_json(base_url, json_map).unwrap();
+ let in_same_dir_as_map = "https://example.com/app/foo.mjs";
+ let in_dir_above_map = "https://example.com/foo.mjs";
+
+ // Should resolve an empty string scope using the import map URL.
+ assert_resolve(
+ import_map.resolve("a", base_url),
+ "https://example.com/a-empty-string.mjs",
+ );
+ assert_resolve(
+ import_map.resolve("a", in_same_dir_as_map),
+ "https://example.com/a-1.mjs",
+ );
+
+ // Should resolve a ./ scope using the import map URL's directory.
+ assert_resolve(
+ import_map.resolve("b", base_url),
+ "https://example.com/b-dot-slash.mjs",
+ );
+ assert_resolve(
+ import_map.resolve("b", in_same_dir_as_map),
+ "https://example.com/b-dot-slash.mjs",
+ );
+
+ // Should resolve a ../ scope using the import map URL's directory.
+ assert_resolve(
+ import_map.resolve("c", base_url),
+ "https://example.com/c-dot-dot-slash.mjs",
+ );
+ assert_resolve(
+ import_map.resolve("c", in_same_dir_as_map),
+ "https://example.com/c-dot-dot-slash.mjs",
+ );
+ assert_resolve(
+ import_map.resolve("c", in_dir_above_map),
+ "https://example.com/c-dot-dot-slash.mjs",
+ );
+ }
+
+ #[test]
+ fn cant_resolve_to_built_in() {
+ let base_url = "https://example.com/app/main.ts";
+
+ let import_map = ImportMap::from_json(base_url, "{}").unwrap();
+
+ assert!(import_map.resolve("std:blank", base_url).is_err());
+ }
+
+ #[test]
+ fn resolve_builtins_remap() {
+ let base_url = "https://example.com/app/main.ts";
+
+ let json_map = r#"{
+ "imports": {
+ "std:blank": "./blank.mjs",
+ "std:none": "./none.mjs"
+ }
+ }"#;
+ let import_map = ImportMap::from_json(base_url, json_map).unwrap();
+
+ assert_resolve(
+ import_map.resolve("std:blank", base_url),
+ "https://example.com/app/blank.mjs",
+ );
+ assert_resolve(
+ import_map.resolve("std:none", base_url),
+ "https://example.com/app/none.mjs",
+ );
+ }
+}
diff --git a/cli/js.rs b/cli/js.rs
new file mode 100644
index 000000000..b76e49391
--- /dev/null
+++ b/cli/js.rs
@@ -0,0 +1,57 @@
+pub const TS_VERSION: &str = env!("TS_VERSION");
+
+pub static CLI_SNAPSHOT: &[u8] =
+ include_bytes!(concat!(env!("OUT_DIR"), "/CLI_SNAPSHOT.bin"));
+pub static CLI_SNAPSHOT_MAP: &[u8] =
+ include_bytes!(concat!(env!("OUT_DIR"), "/CLI_SNAPSHOT.js.map"));
+#[allow(dead_code)]
+pub static CLI_SNAPSHOT_DTS: &[u8] =
+ include_bytes!(concat!(env!("OUT_DIR"), "/CLI_SNAPSHOT.d.ts"));
+
+pub static COMPILER_SNAPSHOT: &[u8] =
+ include_bytes!(concat!(env!("OUT_DIR"), "/COMPILER_SNAPSHOT.bin"));
+pub static COMPILER_SNAPSHOT_MAP: &[u8] =
+ include_bytes!(concat!(env!("OUT_DIR"), "/COMPILER_SNAPSHOT.js.map"));
+#[allow(dead_code)]
+pub static COMPILER_SNAPSHOT_DTS: &[u8] =
+ include_bytes!(concat!(env!("OUT_DIR"), "/COMPILER_SNAPSHOT.d.ts"));
+
+static DENO_RUNTIME: &str = include_str!("js/lib.deno_runtime.d.ts");
+
+/// Same as deno_typescript::get_asset but also has lib.deno_runtime.d.ts
+pub fn get_asset(name: &str) -> Option<&'static str> {
+ match name {
+ "lib.deno_runtime.d.ts" => Some(DENO_RUNTIME),
+ _ => deno_typescript::get_asset(name),
+ }
+}
+
+#[test]
+fn cli_snapshot() {
+ let mut isolate =
+ deno::Isolate::new(deno::StartupData::Snapshot(CLI_SNAPSHOT), false);
+ deno::js_check(isolate.execute(
+ "<anon>",
+ r#"
+ if (!window) {
+ throw Error("bad");
+ }
+ console.log("we have console.log!!!");
+ "#,
+ ));
+}
+
+#[test]
+fn compiler_snapshot() {
+ let mut isolate =
+ deno::Isolate::new(deno::StartupData::Snapshot(COMPILER_SNAPSHOT), false);
+ deno::js_check(isolate.execute(
+ "<anon>",
+ r#"
+ if (!compilerMain) {
+ throw Error("bad");
+ }
+ console.log(`ts version: ${ts.version}`);
+ "#,
+ ));
+}
diff --git a/cli/js/base64.ts b/cli/js/base64.ts
new file mode 100644
index 000000000..4d30e00f1
--- /dev/null
+++ b/cli/js/base64.ts
@@ -0,0 +1,150 @@
+// Forked from https://github.com/beatgammit/base64-js
+// Copyright (c) 2014 Jameson Little. MIT License.
+
+const lookup: string[] = [];
+const revLookup: number[] = [];
+
+const code = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
+for (let i = 0, len = code.length; i < len; ++i) {
+ lookup[i] = code[i];
+ revLookup[code.charCodeAt(i)] = i;
+}
+
+// Support decoding URL-safe base64 strings, as Node.js does.
+// See: https://en.wikipedia.org/wiki/Base64#URL_applications
+revLookup["-".charCodeAt(0)] = 62;
+revLookup["_".charCodeAt(0)] = 63;
+
+function getLens(b64: string): [number, number] {
+ const len = b64.length;
+
+ if (len % 4 > 0) {
+ throw new Error("Invalid string. Length must be a multiple of 4");
+ }
+
+ // Trim off extra bytes after placeholder bytes are found
+ // See: https://github.com/beatgammit/base64-js/issues/42
+ let validLen = b64.indexOf("=");
+ if (validLen === -1) validLen = len;
+
+ const placeHoldersLen = validLen === len ? 0 : 4 - (validLen % 4);
+
+ return [validLen, placeHoldersLen];
+}
+
+// base64 is 4/3 + up to two characters of the original data
+export function byteLength(b64: string): number {
+ const lens = getLens(b64);
+ const validLen = lens[0];
+ const placeHoldersLen = lens[1];
+ return ((validLen + placeHoldersLen) * 3) / 4 - placeHoldersLen;
+}
+
+function _byteLength(
+ b64: string,
+ validLen: number,
+ placeHoldersLen: number
+): number {
+ return ((validLen + placeHoldersLen) * 3) / 4 - placeHoldersLen;
+}
+
+export function toByteArray(b64: string): Uint8Array {
+ let tmp;
+ const lens = getLens(b64);
+ const validLen = lens[0];
+ const placeHoldersLen = lens[1];
+
+ const arr = new Uint8Array(_byteLength(b64, validLen, placeHoldersLen));
+
+ let curByte = 0;
+
+ // if there are placeholders, only get up to the last complete 4 chars
+ const len = placeHoldersLen > 0 ? validLen - 4 : validLen;
+
+ let i;
+ for (i = 0; i < len; i += 4) {
+ tmp =
+ (revLookup[b64.charCodeAt(i)] << 18) |
+ (revLookup[b64.charCodeAt(i + 1)] << 12) |
+ (revLookup[b64.charCodeAt(i + 2)] << 6) |
+ revLookup[b64.charCodeAt(i + 3)];
+ arr[curByte++] = (tmp >> 16) & 0xff;
+ arr[curByte++] = (tmp >> 8) & 0xff;
+ arr[curByte++] = tmp & 0xff;
+ }
+
+ if (placeHoldersLen === 2) {
+ tmp =
+ (revLookup[b64.charCodeAt(i)] << 2) |
+ (revLookup[b64.charCodeAt(i + 1)] >> 4);
+ arr[curByte++] = tmp & 0xff;
+ }
+
+ if (placeHoldersLen === 1) {
+ tmp =
+ (revLookup[b64.charCodeAt(i)] << 10) |
+ (revLookup[b64.charCodeAt(i + 1)] << 4) |
+ (revLookup[b64.charCodeAt(i + 2)] >> 2);
+ arr[curByte++] = (tmp >> 8) & 0xff;
+ arr[curByte++] = tmp & 0xff;
+ }
+
+ return arr;
+}
+
+function tripletToBase64(num: number): string {
+ return (
+ lookup[(num >> 18) & 0x3f] +
+ lookup[(num >> 12) & 0x3f] +
+ lookup[(num >> 6) & 0x3f] +
+ lookup[num & 0x3f]
+ );
+}
+
+function encodeChunk(uint8: Uint8Array, start: number, end: number): string {
+ let tmp;
+ const output = [];
+ for (let i = start; i < end; i += 3) {
+ tmp =
+ ((uint8[i] << 16) & 0xff0000) +
+ ((uint8[i + 1] << 8) & 0xff00) +
+ (uint8[i + 2] & 0xff);
+ output.push(tripletToBase64(tmp));
+ }
+ return output.join("");
+}
+
+export function fromByteArray(uint8: Uint8Array): string {
+ let tmp;
+ const len = uint8.length;
+ const extraBytes = len % 3; // if we have 1 byte left, pad 2 bytes
+ const parts = [];
+ const maxChunkLength = 16383; // must be multiple of 3
+
+ // go through the array every three bytes, we'll deal with trailing stuff later
+ for (let i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
+ parts.push(
+ encodeChunk(
+ uint8,
+ i,
+ i + maxChunkLength > len2 ? len2 : i + maxChunkLength
+ )
+ );
+ }
+
+ // pad the end with zeros, but make sure to not forget the extra bytes
+ if (extraBytes === 1) {
+ tmp = uint8[len - 1];
+ parts.push(lookup[tmp >> 2] + lookup[(tmp << 4) & 0x3f] + "==");
+ } else if (extraBytes === 2) {
+ tmp = (uint8[len - 2] << 8) + uint8[len - 1];
+ parts.push(
+ lookup[tmp >> 10] +
+ lookup[(tmp >> 4) & 0x3f] +
+ lookup[(tmp << 2) & 0x3f] +
+ "="
+ );
+ }
+
+ return parts.join("");
+}
diff --git a/cli/js/blob.ts b/cli/js/blob.ts
new file mode 100644
index 000000000..50ab7f374
--- /dev/null
+++ b/cli/js/blob.ts
@@ -0,0 +1,178 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import * as domTypes from "./dom_types.ts";
+import { containsOnlyASCII, hasOwnProperty } from "./util.ts";
+import { TextEncoder } from "./text_encoding.ts";
+import { build } from "./build.ts";
+
+export const bytesSymbol = Symbol("bytes");
+
+function convertLineEndingsToNative(s: string): string {
+ const nativeLineEnd = build.os == "win" ? "\r\n" : "\n";
+
+ let position = 0;
+
+ let collectionResult = collectSequenceNotCRLF(s, position);
+
+ let token = collectionResult.collected;
+ position = collectionResult.newPosition;
+
+ let result = token;
+
+ while (position < s.length) {
+ const c = s.charAt(position);
+ if (c == "\r") {
+ result += nativeLineEnd;
+ position++;
+ if (position < s.length && s.charAt(position) == "\n") {
+ position++;
+ }
+ } else if (c == "\n") {
+ position++;
+ result += nativeLineEnd;
+ }
+
+ collectionResult = collectSequenceNotCRLF(s, position);
+
+ token = collectionResult.collected;
+ position = collectionResult.newPosition;
+
+ result += token;
+ }
+
+ return result;
+}
+
+function collectSequenceNotCRLF(
+ s: string,
+ position: number
+): { collected: string; newPosition: number } {
+ const start = position;
+ for (
+ let c = s.charAt(position);
+ position < s.length && !(c == "\r" || c == "\n");
+ c = s.charAt(++position)
+ );
+ return { collected: s.slice(start, position), newPosition: position };
+}
+
+function toUint8Arrays(
+ blobParts: domTypes.BlobPart[],
+ doNormalizeLineEndingsToNative: boolean
+): Uint8Array[] {
+ const ret: Uint8Array[] = [];
+ const enc = new TextEncoder();
+ for (const element of blobParts) {
+ if (typeof element === "string") {
+ let str = element;
+ if (doNormalizeLineEndingsToNative) {
+ str = convertLineEndingsToNative(element);
+ }
+ ret.push(enc.encode(str));
+ // eslint-disable-next-line @typescript-eslint/no-use-before-define
+ } else if (element instanceof DenoBlob) {
+ ret.push(element[bytesSymbol]);
+ } else if (element instanceof Uint8Array) {
+ ret.push(element);
+ } else if (element instanceof Uint16Array) {
+ const uint8 = new Uint8Array(element.buffer);
+ ret.push(uint8);
+ } else if (element instanceof Uint32Array) {
+ const uint8 = new Uint8Array(element.buffer);
+ ret.push(uint8);
+ } else if (ArrayBuffer.isView(element)) {
+ // Convert view to Uint8Array.
+ const uint8 = new Uint8Array(element.buffer);
+ ret.push(uint8);
+ } else if (element instanceof ArrayBuffer) {
+ // Create a new Uint8Array view for the given ArrayBuffer.
+ const uint8 = new Uint8Array(element);
+ ret.push(uint8);
+ } else {
+ ret.push(enc.encode(String(element)));
+ }
+ }
+ return ret;
+}
+
+function processBlobParts(
+ blobParts: domTypes.BlobPart[],
+ options: domTypes.BlobPropertyBag
+): Uint8Array {
+ const normalizeLineEndingsToNative = options.ending === "native";
+ // ArrayBuffer.transfer is not yet implemented in V8, so we just have to
+ // pre compute size of the array buffer and do some sort of static allocation
+ // instead of dynamic allocation.
+ const uint8Arrays = toUint8Arrays(blobParts, normalizeLineEndingsToNative);
+ const byteLength = uint8Arrays
+ .map((u8): number => u8.byteLength)
+ .reduce((a, b): number => a + b, 0);
+ const ab = new ArrayBuffer(byteLength);
+ const bytes = new Uint8Array(ab);
+
+ let courser = 0;
+ for (const u8 of uint8Arrays) {
+ bytes.set(u8, courser);
+ courser += u8.byteLength;
+ }
+
+ return bytes;
+}
+
+// A WeakMap holding blob to byte array mapping.
+// Ensures it does not impact garbage collection.
+export const blobBytesWeakMap = new WeakMap<domTypes.Blob, Uint8Array>();
+
+export class DenoBlob implements domTypes.Blob {
+ private readonly [bytesSymbol]: Uint8Array;
+ readonly size: number = 0;
+ readonly type: string = "";
+
+ /** A blob object represents a file-like object of immutable, raw data. */
+ constructor(
+ blobParts?: domTypes.BlobPart[],
+ options?: domTypes.BlobPropertyBag
+ ) {
+ if (arguments.length === 0) {
+ this[bytesSymbol] = new Uint8Array();
+ return;
+ }
+
+ options = options || {};
+ // Set ending property's default value to "transparent".
+ if (!hasOwnProperty(options, "ending")) {
+ options.ending = "transparent";
+ }
+
+ if (options.type && !containsOnlyASCII(options.type)) {
+ const errMsg = "The 'type' property must consist of ASCII characters.";
+ throw new SyntaxError(errMsg);
+ }
+
+ const bytes = processBlobParts(blobParts!, options);
+ // Normalize options.type.
+ let type = options.type ? options.type : "";
+ if (type.length) {
+ for (let i = 0; i < type.length; ++i) {
+ const char = type[i];
+ if (char < "\u0020" || char > "\u007E") {
+ type = "";
+ break;
+ }
+ }
+ type = type.toLowerCase();
+ }
+ // Set Blob object's properties.
+ this[bytesSymbol] = bytes;
+ this.size = bytes.byteLength;
+ this.type = type;
+
+ // Register bytes for internal private use.
+ blobBytesWeakMap.set(this, bytes);
+ }
+
+ slice(start?: number, end?: number, contentType?: string): DenoBlob {
+ return new DenoBlob([this[bytesSymbol].slice(start, end)], {
+ type: contentType || this.type
+ });
+ }
+}
diff --git a/cli/js/blob_test.ts b/cli/js/blob_test.ts
new file mode 100644
index 000000000..afa1182a9
--- /dev/null
+++ b/cli/js/blob_test.ts
@@ -0,0 +1,62 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { test, assert, assertEquals } from "./test_util.ts";
+
+test(function blobString(): void {
+ const b1 = new Blob(["Hello World"]);
+ const str = "Test";
+ const b2 = new Blob([b1, str]);
+ assertEquals(b2.size, b1.size + str.length);
+});
+
+test(function blobBuffer(): void {
+ const buffer = new ArrayBuffer(12);
+ const u8 = new Uint8Array(buffer);
+ const f1 = new Float32Array(buffer);
+ const b1 = new Blob([buffer, u8]);
+ assertEquals(b1.size, 2 * u8.length);
+ const b2 = new Blob([b1, f1]);
+ assertEquals(b2.size, 3 * u8.length);
+});
+
+test(function blobSlice(): void {
+ const blob = new Blob(["Deno", "Foo"]);
+ const b1 = blob.slice(0, 3, "Text/HTML");
+ assert(b1 instanceof Blob);
+ assertEquals(b1.size, 3);
+ assertEquals(b1.type, "text/html");
+ const b2 = blob.slice(-1, 3);
+ assertEquals(b2.size, 0);
+ const b3 = blob.slice(100, 3);
+ assertEquals(b3.size, 0);
+ const b4 = blob.slice(0, 10);
+ assertEquals(b4.size, blob.size);
+});
+
+test(function blobShouldNotThrowError(): void {
+ let hasThrown = false;
+
+ try {
+ const options1: object = {
+ ending: "utf8",
+ hasOwnProperty: "hasOwnProperty"
+ };
+ const options2: object = Object.create(null);
+ new Blob(["Hello World"], options1);
+ new Blob(["Hello World"], options2);
+ } catch {
+ hasThrown = true;
+ }
+
+ assertEquals(hasThrown, false);
+});
+
+test(function nativeEndLine(): void {
+ const options: object = {
+ ending: "native"
+ };
+ const blob = new Blob(["Hello\nWorld"], options);
+
+ assertEquals(blob.size, Deno.build.os === "win" ? 12 : 11);
+});
+
+// TODO(qti3e) Test the stored data in a Blob after implementing FileReader API.
diff --git a/cli/js/body.ts b/cli/js/body.ts
new file mode 100644
index 000000000..6567b1934
--- /dev/null
+++ b/cli/js/body.ts
@@ -0,0 +1,272 @@
+import * as formData from "./form_data.ts";
+import * as blob from "./blob.ts";
+import * as encoding from "./text_encoding.ts";
+import * as headers from "./headers.ts";
+import * as domTypes from "./dom_types.ts";
+
+const { Headers } = headers;
+
+// only namespace imports work for now, plucking out what we need
+const { FormData } = formData;
+const { TextEncoder, TextDecoder } = encoding;
+const Blob = blob.DenoBlob;
+const DenoBlob = blob.DenoBlob;
+
+export type BodySource =
+ | domTypes.Blob
+ | domTypes.BufferSource
+ | domTypes.FormData
+ | domTypes.URLSearchParams
+ | domTypes.ReadableStream
+ | string;
+
+function validateBodyType(owner: Body, bodySource: BodySource): boolean {
+ if (
+ bodySource instanceof Int8Array ||
+ bodySource instanceof Int16Array ||
+ bodySource instanceof Int32Array ||
+ bodySource instanceof Uint8Array ||
+ bodySource instanceof Uint16Array ||
+ bodySource instanceof Uint32Array ||
+ bodySource instanceof Uint8ClampedArray ||
+ bodySource instanceof Float32Array ||
+ bodySource instanceof Float64Array
+ ) {
+ return true;
+ } else if (bodySource instanceof ArrayBuffer) {
+ return true;
+ } else if (typeof bodySource === "string") {
+ return true;
+ } else if (bodySource instanceof FormData) {
+ return true;
+ } else if (!bodySource) {
+ return true; // null body is fine
+ }
+ throw new Error(
+ `Bad ${owner.constructor.name} body type: ${bodySource.constructor.name}`
+ );
+}
+
+function getHeaderValueParams(value: string): Map<string, string> {
+ const params = new Map();
+ // Forced to do so for some Map constructor param mismatch
+ value
+ .split(";")
+ .slice(1)
+ .map((s): string[] => s.trim().split("="))
+ .filter((arr): boolean => arr.length > 1)
+ .map(([k, v]): [string, string] => [k, v.replace(/^"([^"]*)"$/, "$1")])
+ .forEach(([k, v]): Map<string, string> => params.set(k, v));
+ return params;
+}
+
+function hasHeaderValueOf(s: string, value: string): boolean {
+ return new RegExp(`^${value}[\t\s]*;?`).test(s);
+}
+
+export const BodyUsedError =
+ "Failed to execute 'clone' on 'Body': body is already used";
+
+export class Body implements domTypes.Body {
+ protected _stream: domTypes.ReadableStream | null;
+
+ constructor(protected _bodySource: BodySource, readonly contentType: string) {
+ validateBodyType(this, _bodySource);
+ this._bodySource = _bodySource;
+ this.contentType = contentType;
+ this._stream = null;
+ }
+
+ get body(): domTypes.ReadableStream | null {
+ if (this._stream) {
+ return this._stream;
+ }
+ if (typeof this._bodySource === "string") {
+ throw Error("not implemented");
+ }
+ return this._stream;
+ }
+
+ get bodyUsed(): boolean {
+ if (this.body && this.body.locked) {
+ return true;
+ }
+ return false;
+ }
+
+ public async blob(): Promise<domTypes.Blob> {
+ return new Blob([await this.arrayBuffer()]);
+ }
+
+ // ref: https://fetch.spec.whatwg.org/#body-mixin
+ public async formData(): Promise<domTypes.FormData> {
+ const formData = new FormData();
+ const enc = new TextEncoder();
+ if (hasHeaderValueOf(this.contentType, "multipart/form-data")) {
+ const params = getHeaderValueParams(this.contentType);
+ if (!params.has("boundary")) {
+ // TypeError is required by spec
+ throw new TypeError("multipart/form-data must provide a boundary");
+ }
+ // ref: https://tools.ietf.org/html/rfc2046#section-5.1
+ const boundary = params.get("boundary")!;
+ const dashBoundary = `--${boundary}`;
+ const delimiter = `\r\n${dashBoundary}`;
+ const closeDelimiter = `${delimiter}--`;
+
+ const body = await this.text();
+ let bodyParts: string[];
+ const bodyEpilogueSplit = body.split(closeDelimiter);
+ if (bodyEpilogueSplit.length < 2) {
+ bodyParts = [];
+ } else {
+ // discard epilogue
+ const bodyEpilogueTrimmed = bodyEpilogueSplit[0];
+ // first boundary treated special due to optional prefixed \r\n
+ const firstBoundaryIndex = bodyEpilogueTrimmed.indexOf(dashBoundary);
+ if (firstBoundaryIndex < 0) {
+ throw new TypeError("Invalid boundary");
+ }
+ const bodyPreambleTrimmed = bodyEpilogueTrimmed
+ .slice(firstBoundaryIndex + dashBoundary.length)
+ .replace(/^[\s\r\n\t]+/, ""); // remove transport-padding CRLF
+ // trimStart might not be available
+ // Be careful! body-part allows trailing \r\n!
+ // (as long as it is not part of `delimiter`)
+ bodyParts = bodyPreambleTrimmed
+ .split(delimiter)
+ .map((s): string => s.replace(/^[\s\r\n\t]+/, ""));
+ // TODO: LWSP definition is actually trickier,
+ // but should be fine in our case since without headers
+ // we should just discard the part
+ }
+ for (const bodyPart of bodyParts) {
+ const headers = new Headers();
+ const headerOctetSeperatorIndex = bodyPart.indexOf("\r\n\r\n");
+ if (headerOctetSeperatorIndex < 0) {
+ continue; // Skip unknown part
+ }
+ const headerText = bodyPart.slice(0, headerOctetSeperatorIndex);
+ const octets = bodyPart.slice(headerOctetSeperatorIndex + 4);
+
+ // TODO: use textproto.readMIMEHeader from deno_std
+ const rawHeaders = headerText.split("\r\n");
+ for (const rawHeader of rawHeaders) {
+ const sepIndex = rawHeader.indexOf(":");
+ if (sepIndex < 0) {
+ continue; // Skip this header
+ }
+ const key = rawHeader.slice(0, sepIndex);
+ const value = rawHeader.slice(sepIndex + 1);
+ headers.set(key, value);
+ }
+ if (!headers.has("content-disposition")) {
+ continue; // Skip unknown part
+ }
+ // Content-Transfer-Encoding Deprecated
+ const contentDisposition = headers.get("content-disposition")!;
+ const partContentType = headers.get("content-type") || "text/plain";
+ // TODO: custom charset encoding (needs TextEncoder support)
+ // const contentTypeCharset =
+ // getHeaderValueParams(partContentType).get("charset") || "";
+ if (!hasHeaderValueOf(contentDisposition, "form-data")) {
+ continue; // Skip, might not be form-data
+ }
+ const dispositionParams = getHeaderValueParams(contentDisposition);
+ if (!dispositionParams.has("name")) {
+ continue; // Skip, unknown name
+ }
+ const dispositionName = dispositionParams.get("name")!;
+ if (dispositionParams.has("filename")) {
+ const filename = dispositionParams.get("filename")!;
+ const blob = new DenoBlob([enc.encode(octets)], {
+ type: partContentType
+ });
+ // TODO: based on spec
+ // https://xhr.spec.whatwg.org/#dom-formdata-append
+ // https://xhr.spec.whatwg.org/#create-an-entry
+ // Currently it does not mention how I could pass content-type
+ // to the internally created file object...
+ formData.append(dispositionName, blob, filename);
+ } else {
+ formData.append(dispositionName, octets);
+ }
+ }
+ return formData;
+ } else if (
+ hasHeaderValueOf(this.contentType, "application/x-www-form-urlencoded")
+ ) {
+ // From https://github.com/github/fetch/blob/master/fetch.js
+ // Copyright (c) 2014-2016 GitHub, Inc. MIT License
+ const body = await this.text();
+ try {
+ body
+ .trim()
+ .split("&")
+ .forEach(
+ (bytes): void => {
+ if (bytes) {
+ const split = bytes.split("=");
+ const name = split.shift()!.replace(/\+/g, " ");
+ const value = split.join("=").replace(/\+/g, " ");
+ formData.append(
+ decodeURIComponent(name),
+ decodeURIComponent(value)
+ );
+ }
+ }
+ );
+ } catch (e) {
+ throw new TypeError("Invalid form urlencoded format");
+ }
+ return formData;
+ } else {
+ throw new TypeError("Invalid form data");
+ }
+ }
+
+ public async text(): Promise<string> {
+ if (typeof this._bodySource === "string") {
+ return this._bodySource;
+ }
+
+ const ab = await this.arrayBuffer();
+ const decoder = new TextDecoder("utf-8");
+ return decoder.decode(ab);
+ }
+
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ public async json(): Promise<any> {
+ const raw = await this.text();
+ return JSON.parse(raw);
+ }
+
+ public async arrayBuffer(): Promise<ArrayBuffer> {
+ if (
+ this._bodySource instanceof Int8Array ||
+ this._bodySource instanceof Int16Array ||
+ this._bodySource instanceof Int32Array ||
+ this._bodySource instanceof Uint8Array ||
+ this._bodySource instanceof Uint16Array ||
+ this._bodySource instanceof Uint32Array ||
+ this._bodySource instanceof Uint8ClampedArray ||
+ this._bodySource instanceof Float32Array ||
+ this._bodySource instanceof Float64Array
+ ) {
+ return this._bodySource.buffer as ArrayBuffer;
+ } else if (this._bodySource instanceof ArrayBuffer) {
+ return this._bodySource;
+ } else if (typeof this._bodySource === "string") {
+ const enc = new TextEncoder();
+ return enc.encode(this._bodySource).buffer as ArrayBuffer;
+ } else if (this._bodySource instanceof FormData) {
+ const enc = new TextEncoder();
+ return enc.encode(this._bodySource.toString()).buffer as ArrayBuffer;
+ } else if (!this._bodySource) {
+ return new ArrayBuffer(0);
+ }
+ throw new Error(
+ `Body type not yet implemented: ${this._bodySource.constructor.name}`
+ );
+ }
+}
diff --git a/cli/js/body_test.ts b/cli/js/body_test.ts
new file mode 100644
index 000000000..ec76e9072
--- /dev/null
+++ b/cli/js/body_test.ts
@@ -0,0 +1,68 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { test, testPerm, assertEquals, assert } from "./test_util.ts";
+
+// just a hack to get a body object
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function buildBody(body: any): Body {
+ const stub = new Request("", {
+ body: body
+ });
+ return stub as Body;
+}
+
+const intArrays = [
+ Int8Array,
+ Int16Array,
+ Int32Array,
+ Uint8Array,
+ Uint16Array,
+ Uint32Array,
+ Uint8ClampedArray,
+ Float32Array,
+ Float64Array
+];
+test(async function arrayBufferFromByteArrays(): Promise<void> {
+ const buffer = new TextEncoder().encode("ahoyhoy8").buffer;
+
+ for (const type of intArrays) {
+ const body = buildBody(new type(buffer));
+ const text = new TextDecoder("utf-8").decode(await body.arrayBuffer());
+ assertEquals(text, "ahoyhoy8");
+ }
+});
+
+//FormData
+testPerm({ net: true }, async function bodyMultipartFormData(): Promise<void> {
+ const response = await fetch(
+ "http://localhost:4545/tests/subdir/multipart_form_data.txt"
+ );
+ const text = await response.text();
+
+ const body = buildBody(text);
+
+ // @ts-ignore
+ body.contentType = "multipart/form-data;boundary=boundary";
+
+ const formData = await body.formData();
+ assert(formData.has("field_1"));
+ assertEquals(formData.get("field_1").toString(), "value_1 \r\n");
+ assert(formData.has("field_2"));
+});
+
+testPerm({ net: true }, async function bodyURLEncodedFormData(): Promise<void> {
+ const response = await fetch(
+ "http://localhost:4545/tests/subdir/form_urlencoded.txt"
+ );
+ const text = await response.text();
+
+ const body = buildBody(text);
+
+ // @ts-ignore
+ body.contentType = "application/x-www-form-urlencoded";
+
+ const formData = await body.formData();
+ assert(formData.has("field_1"));
+ assertEquals(formData.get("field_1").toString(), "Hi");
+ assert(formData.has("field_2"));
+ assertEquals(formData.get("field_2").toString(), "<Deno>");
+});
diff --git a/cli/js/buffer.ts b/cli/js/buffer.ts
new file mode 100644
index 000000000..dc73b7e60
--- /dev/null
+++ b/cli/js/buffer.ts
@@ -0,0 +1,294 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+
+// This code has been ported almost directly from Go's src/bytes/buffer.go
+// Copyright 2009 The Go Authors. All rights reserved. BSD license.
+// https://github.com/golang/go/blob/master/LICENSE
+
+import { Reader, Writer, EOF, SyncReader, SyncWriter } from "./io.ts";
+import { assert } from "./util.ts";
+import { TextDecoder } from "./text_encoding.ts";
+import { DenoError, ErrorKind } from "./errors.ts";
+
+// MIN_READ is the minimum ArrayBuffer size passed to a read call by
+// buffer.ReadFrom. As long as the Buffer has at least MIN_READ bytes beyond
+// what is required to hold the contents of r, readFrom() will not grow the
+// underlying buffer.
+const MIN_READ = 512;
+const MAX_SIZE = 2 ** 32 - 2;
+
+// `off` is the offset into `dst` where it will at which to begin writing values
+// from `src`.
+// Returns the number of bytes copied.
+function copyBytes(dst: Uint8Array, src: Uint8Array, off = 0): number {
+ const r = dst.byteLength - off;
+ if (src.byteLength > r) {
+ src = src.subarray(0, r);
+ }
+ dst.set(src, off);
+ return src.byteLength;
+}
+
+/** A Buffer is a variable-sized buffer of bytes with read() and write()
+ * methods. Based on https://golang.org/pkg/bytes/#Buffer
+ */
+export class Buffer implements Reader, SyncReader, Writer, SyncWriter {
+ private buf: Uint8Array; // contents are the bytes buf[off : len(buf)]
+ private off = 0; // read at buf[off], write at buf[buf.byteLength]
+
+ constructor(ab?: ArrayBuffer) {
+ if (ab == null) {
+ this.buf = new Uint8Array(0);
+ return;
+ }
+
+ this.buf = new Uint8Array(ab);
+ }
+
+ /** bytes() returns a slice holding the unread portion of the buffer.
+ * The slice is valid for use only until the next buffer modification (that
+ * is, only until the next call to a method like read(), write(), reset(), or
+ * truncate()). The slice aliases the buffer content at least until the next
+ * buffer modification, so immediate changes to the slice will affect the
+ * result of future reads.
+ */
+ bytes(): Uint8Array {
+ return this.buf.subarray(this.off);
+ }
+
+ /** toString() returns the contents of the unread portion of the buffer
+ * as a string. Warning - if multibyte characters are present when data is
+ * flowing through the buffer, this method may result in incorrect strings
+ * due to a character being split.
+ */
+ toString(): string {
+ const decoder = new TextDecoder();
+ return decoder.decode(this.buf.subarray(this.off));
+ }
+
+ /** empty() returns whether the unread portion of the buffer is empty. */
+ empty(): boolean {
+ return this.buf.byteLength <= this.off;
+ }
+
+ /** length is a getter that returns the number of bytes of the unread
+ * portion of the buffer
+ */
+ get length(): number {
+ return this.buf.byteLength - this.off;
+ }
+
+ /** Returns the capacity of the buffer's underlying byte slice, that is,
+ * the total space allocated for the buffer's data.
+ */
+ get capacity(): number {
+ return this.buf.buffer.byteLength;
+ }
+
+ /** truncate() discards all but the first n unread bytes from the buffer but
+ * continues to use the same allocated storage. It throws if n is negative or
+ * greater than the length of the buffer.
+ */
+ truncate(n: number): void {
+ if (n === 0) {
+ this.reset();
+ return;
+ }
+ if (n < 0 || n > this.length) {
+ throw Error("bytes.Buffer: truncation out of range");
+ }
+ this._reslice(this.off + n);
+ }
+
+ /** reset() resets the buffer to be empty, but it retains the underlying
+ * storage for use by future writes. reset() is the same as truncate(0)
+ */
+ reset(): void {
+ this._reslice(0);
+ this.off = 0;
+ }
+
+ /** _tryGrowByReslice() is a version of grow for the fast-case
+ * where the internal buffer only needs to be resliced. It returns the index
+ * where bytes should be written and whether it succeeded.
+ * It returns -1 if a reslice was not needed.
+ */
+ private _tryGrowByReslice(n: number): number {
+ const l = this.buf.byteLength;
+ if (n <= this.capacity - l) {
+ this._reslice(l + n);
+ return l;
+ }
+ return -1;
+ }
+
+ private _reslice(len: number): void {
+ assert(len <= this.buf.buffer.byteLength);
+ this.buf = new Uint8Array(this.buf.buffer, 0, len);
+ }
+
+ /** readSync() reads the next len(p) bytes from the buffer or until the buffer
+ * is drained. The return value n is the number of bytes read. If the
+ * buffer has no data to return, eof in the response will be true.
+ */
+ readSync(p: Uint8Array): number | EOF {
+ if (this.empty()) {
+ // Buffer is empty, reset to recover space.
+ this.reset();
+ if (p.byteLength === 0) {
+ // this edge case is tested in 'bufferReadEmptyAtEOF' test
+ return 0;
+ }
+ return EOF;
+ }
+ const nread = copyBytes(p, this.buf.subarray(this.off));
+ this.off += nread;
+ return nread;
+ }
+
+ async read(p: Uint8Array): Promise<number | EOF> {
+ const rr = this.readSync(p);
+ return Promise.resolve(rr);
+ }
+
+ writeSync(p: Uint8Array): number {
+ const m = this._grow(p.byteLength);
+ return copyBytes(this.buf, p, m);
+ }
+
+ async write(p: Uint8Array): Promise<number> {
+ const n = this.writeSync(p);
+ return Promise.resolve(n);
+ }
+
+ /** _grow() grows the buffer to guarantee space for n more bytes.
+ * It returns the index where bytes should be written.
+ * If the buffer can't grow it will throw with ErrTooLarge.
+ */
+ private _grow(n: number): number {
+ const m = this.length;
+ // If buffer is empty, reset to recover space.
+ if (m === 0 && this.off !== 0) {
+ this.reset();
+ }
+ // Fast: Try to grow by means of a reslice.
+ const i = this._tryGrowByReslice(n);
+ if (i >= 0) {
+ return i;
+ }
+ const c = this.capacity;
+ if (n <= Math.floor(c / 2) - m) {
+ // We can slide things down instead of allocating a new
+ // ArrayBuffer. We only need m+n <= c to slide, but
+ // we instead let capacity get twice as large so we
+ // don't spend all our time copying.
+ copyBytes(this.buf, this.buf.subarray(this.off));
+ } else if (c > MAX_SIZE - c - n) {
+ throw new DenoError(
+ ErrorKind.TooLarge,
+ "The buffer cannot be grown beyond the maximum size."
+ );
+ } else {
+ // Not enough space anywhere, we need to allocate.
+ const buf = new Uint8Array(2 * c + n);
+ copyBytes(buf, this.buf.subarray(this.off));
+ this.buf = buf;
+ }
+ // Restore this.off and len(this.buf).
+ this.off = 0;
+ this._reslice(m + n);
+ return m;
+ }
+
+ /** grow() grows the buffer's capacity, if necessary, to guarantee space for
+ * another n bytes. After grow(n), at least n bytes can be written to the
+ * buffer without another allocation. If n is negative, grow() will panic. If
+ * the buffer can't grow it will throw ErrTooLarge.
+ * Based on https://golang.org/pkg/bytes/#Buffer.Grow
+ */
+ grow(n: number): void {
+ if (n < 0) {
+ throw Error("Buffer.grow: negative count");
+ }
+ const m = this._grow(n);
+ this._reslice(m);
+ }
+
+ /** readFrom() reads data from r until EOF and appends it to the buffer,
+ * growing the buffer as needed. It returns the number of bytes read. If the
+ * buffer becomes too large, readFrom will panic with ErrTooLarge.
+ * Based on https://golang.org/pkg/bytes/#Buffer.ReadFrom
+ */
+ async readFrom(r: Reader): Promise<number> {
+ let n = 0;
+ while (true) {
+ try {
+ const i = this._grow(MIN_READ);
+ this._reslice(i);
+ const fub = new Uint8Array(this.buf.buffer, i);
+ const nread = await r.read(fub);
+ if (nread === EOF) {
+ return n;
+ }
+ this._reslice(i + nread);
+ n += nread;
+ } catch (e) {
+ return n;
+ }
+ }
+ }
+
+ /** Sync version of `readFrom`
+ */
+ readFromSync(r: SyncReader): number {
+ let n = 0;
+ while (true) {
+ try {
+ const i = this._grow(MIN_READ);
+ this._reslice(i);
+ const fub = new Uint8Array(this.buf.buffer, i);
+ const nread = r.readSync(fub);
+ if (nread === EOF) {
+ return n;
+ }
+ this._reslice(i + nread);
+ n += nread;
+ } catch (e) {
+ return n;
+ }
+ }
+ }
+}
+
+/** Read `r` until EOF and return the content as `Uint8Array`.
+ */
+export async function readAll(r: Reader): Promise<Uint8Array> {
+ const buf = new Buffer();
+ await buf.readFrom(r);
+ return buf.bytes();
+}
+
+/** Read synchronously `r` until EOF and return the content as `Uint8Array`.
+ */
+export function readAllSync(r: SyncReader): Uint8Array {
+ const buf = new Buffer();
+ buf.readFromSync(r);
+ return buf.bytes();
+}
+
+/** Write all the content of `arr` to `w`.
+ */
+export async function writeAll(w: Writer, arr: Uint8Array): Promise<void> {
+ let nwritten = 0;
+ while (nwritten < arr.length) {
+ nwritten += await w.write(arr.subarray(nwritten));
+ }
+}
+
+/** Write synchronously all the content of `arr` to `w`.
+ */
+export function writeAllSync(w: SyncWriter, arr: Uint8Array): void {
+ let nwritten = 0;
+ while (nwritten < arr.length) {
+ nwritten += w.writeSync(arr.subarray(nwritten));
+ }
+}
diff --git a/cli/js/buffer_test.ts b/cli/js/buffer_test.ts
new file mode 100644
index 000000000..a157b927e
--- /dev/null
+++ b/cli/js/buffer_test.ts
@@ -0,0 +1,277 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+
+// This code has been ported almost directly from Go's src/bytes/buffer_test.go
+// Copyright 2009 The Go Authors. All rights reserved. BSD license.
+// https://github.com/golang/go/blob/master/LICENSE
+import { assertEquals, test } from "./test_util.ts";
+
+const { Buffer, readAll, readAllSync, writeAll, writeAllSync } = Deno;
+type Buffer = Deno.Buffer;
+
+// N controls how many iterations of certain checks are performed.
+const N = 100;
+let testBytes: Uint8Array | null;
+let testString: string | null;
+
+function init(): void {
+ if (testBytes == null) {
+ testBytes = new Uint8Array(N);
+ for (let i = 0; i < N; i++) {
+ testBytes[i] = "a".charCodeAt(0) + (i % 26);
+ }
+ const decoder = new TextDecoder();
+ testString = decoder.decode(testBytes);
+ }
+}
+
+function check(buf: Deno.Buffer, s: string): void {
+ const bytes = buf.bytes();
+ assertEquals(buf.length, bytes.byteLength);
+ const decoder = new TextDecoder();
+ const bytesStr = decoder.decode(bytes);
+ assertEquals(bytesStr, s);
+ assertEquals(buf.length, buf.toString().length);
+ assertEquals(buf.length, s.length);
+}
+
+// Fill buf through n writes of byte slice fub.
+// The initial contents of buf corresponds to the string s;
+// the result is the final contents of buf returned as a string.
+async function fillBytes(
+ buf: Buffer,
+ s: string,
+ n: number,
+ fub: Uint8Array
+): Promise<string> {
+ check(buf, s);
+ for (; n > 0; n--) {
+ const m = await buf.write(fub);
+ assertEquals(m, fub.byteLength);
+ const decoder = new TextDecoder();
+ s += decoder.decode(fub);
+ check(buf, s);
+ }
+ return s;
+}
+
+// Empty buf through repeated reads into fub.
+// The initial contents of buf corresponds to the string s.
+async function empty(buf: Buffer, s: string, fub: Uint8Array): Promise<void> {
+ check(buf, s);
+ while (true) {
+ const r = await buf.read(fub);
+ if (r === Deno.EOF) {
+ break;
+ }
+ s = s.slice(r);
+ check(buf, s);
+ }
+ check(buf, "");
+}
+
+function repeat(c: string, bytes: number): Uint8Array {
+ assertEquals(c.length, 1);
+ const ui8 = new Uint8Array(bytes);
+ ui8.fill(c.charCodeAt(0));
+ return ui8;
+}
+
+test(function bufferNewBuffer(): void {
+ init();
+ const buf = new Buffer(testBytes.buffer as ArrayBuffer);
+ check(buf, testString);
+});
+
+test(async function bufferBasicOperations(): Promise<void> {
+ init();
+ const buf = new Buffer();
+ for (let i = 0; i < 5; i++) {
+ check(buf, "");
+
+ buf.reset();
+ check(buf, "");
+
+ buf.truncate(0);
+ check(buf, "");
+
+ let n = await buf.write(testBytes.subarray(0, 1));
+ assertEquals(n, 1);
+ check(buf, "a");
+
+ n = await buf.write(testBytes.subarray(1, 2));
+ assertEquals(n, 1);
+ check(buf, "ab");
+
+ n = await buf.write(testBytes.subarray(2, 26));
+ assertEquals(n, 24);
+ check(buf, testString.slice(0, 26));
+
+ buf.truncate(26);
+ check(buf, testString.slice(0, 26));
+
+ buf.truncate(20);
+ check(buf, testString.slice(0, 20));
+
+ await empty(buf, testString.slice(0, 20), new Uint8Array(5));
+ await empty(buf, "", new Uint8Array(100));
+
+ // TODO buf.writeByte()
+ // TODO buf.readByte()
+ }
+});
+
+test(async function bufferReadEmptyAtEOF(): Promise<void> {
+ // check that EOF of 'buf' is not reached (even though it's empty) if
+ // results are written to buffer that has 0 length (ie. it can't store any data)
+ const buf = new Buffer();
+ const zeroLengthTmp = new Uint8Array(0);
+ const result = await buf.read(zeroLengthTmp);
+ assertEquals(result, 0);
+});
+
+test(async function bufferLargeByteWrites(): Promise<void> {
+ init();
+ const buf = new Buffer();
+ const limit = 9;
+ for (let i = 3; i < limit; i += 3) {
+ const s = await fillBytes(buf, "", 5, testBytes);
+ await empty(buf, s, new Uint8Array(Math.floor(testString.length / i)));
+ }
+ check(buf, "");
+});
+
+test(async function bufferTooLargeByteWrites(): Promise<void> {
+ init();
+ const tmp = new Uint8Array(72);
+ const growLen = Number.MAX_VALUE;
+ const xBytes = repeat("x", 0);
+ const buf = new Buffer(xBytes.buffer as ArrayBuffer);
+ await buf.read(tmp);
+
+ let err;
+ try {
+ buf.grow(growLen);
+ } catch (e) {
+ err = e;
+ }
+
+ assertEquals(err.kind, Deno.ErrorKind.TooLarge);
+ assertEquals(err.name, "TooLarge");
+});
+
+test(async function bufferLargeByteReads(): Promise<void> {
+ init();
+ const buf = new Buffer();
+ for (let i = 3; i < 30; i += 3) {
+ const n = Math.floor(testBytes.byteLength / i);
+ const s = await fillBytes(buf, "", 5, testBytes.subarray(0, n));
+ await empty(buf, s, new Uint8Array(testString.length));
+ }
+ check(buf, "");
+});
+
+test(function bufferCapWithPreallocatedSlice(): void {
+ const buf = new Buffer(new ArrayBuffer(10));
+ assertEquals(buf.capacity, 10);
+});
+
+test(async function bufferReadFrom(): Promise<void> {
+ init();
+ const buf = new Buffer();
+ for (let i = 3; i < 30; i += 3) {
+ const s = await fillBytes(
+ buf,
+ "",
+ 5,
+ testBytes.subarray(0, Math.floor(testBytes.byteLength / i))
+ );
+ const b = new Buffer();
+ await b.readFrom(buf);
+ const fub = new Uint8Array(testString.length);
+ await empty(b, s, fub);
+ }
+});
+
+test(async function bufferReadFromSync(): Promise<void> {
+ init();
+ const buf = new Buffer();
+ for (let i = 3; i < 30; i += 3) {
+ const s = await fillBytes(
+ buf,
+ "",
+ 5,
+ testBytes.subarray(0, Math.floor(testBytes.byteLength / i))
+ );
+ const b = new Buffer();
+ b.readFromSync(buf);
+ const fub = new Uint8Array(testString.length);
+ await empty(b, s, fub);
+ }
+});
+
+test(async function bufferTestGrow(): Promise<void> {
+ const tmp = new Uint8Array(72);
+ for (const startLen of [0, 100, 1000, 10000, 100000]) {
+ const xBytes = repeat("x", startLen);
+ for (const growLen of [0, 100, 1000, 10000, 100000]) {
+ const buf = new Buffer(xBytes.buffer as ArrayBuffer);
+ // If we read, this affects buf.off, which is good to test.
+ const result = await buf.read(tmp);
+ const nread = result === Deno.EOF ? 0 : result;
+ buf.grow(growLen);
+ const yBytes = repeat("y", growLen);
+ await buf.write(yBytes);
+ // Check that buffer has correct data.
+ assertEquals(
+ buf.bytes().subarray(0, startLen - nread),
+ xBytes.subarray(nread)
+ );
+ assertEquals(
+ buf.bytes().subarray(startLen - nread, startLen - nread + growLen),
+ yBytes
+ );
+ }
+ }
+});
+
+test(async function testReadAll(): Promise<void> {
+ init();
+ const reader = new Buffer(testBytes.buffer as ArrayBuffer);
+ const actualBytes = await readAll(reader);
+ assertEquals(testBytes.byteLength, actualBytes.byteLength);
+ for (let i = 0; i < testBytes.length; ++i) {
+ assertEquals(testBytes[i], actualBytes[i]);
+ }
+});
+
+test(function testReadAllSync(): void {
+ init();
+ const reader = new Buffer(testBytes.buffer as ArrayBuffer);
+ const actualBytes = readAllSync(reader);
+ assertEquals(testBytes.byteLength, actualBytes.byteLength);
+ for (let i = 0; i < testBytes.length; ++i) {
+ assertEquals(testBytes[i], actualBytes[i]);
+ }
+});
+
+test(async function testWriteAll(): Promise<void> {
+ init();
+ const writer = new Buffer();
+ await writeAll(writer, testBytes);
+ const actualBytes = writer.bytes();
+ assertEquals(testBytes.byteLength, actualBytes.byteLength);
+ for (let i = 0; i < testBytes.length; ++i) {
+ assertEquals(testBytes[i], actualBytes[i]);
+ }
+});
+
+test(function testWriteAllSync(): void {
+ init();
+ const writer = new Buffer();
+ writeAllSync(writer, testBytes);
+ const actualBytes = writer.bytes();
+ assertEquals(testBytes.byteLength, actualBytes.byteLength);
+ for (let i = 0; i < testBytes.length; ++i) {
+ assertEquals(testBytes[i], actualBytes[i]);
+ }
+});
diff --git a/cli/js/build.ts b/cli/js/build.ts
new file mode 100644
index 000000000..942f57458
--- /dev/null
+++ b/cli/js/build.ts
@@ -0,0 +1,27 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+
+export type OperatingSystem = "mac" | "win" | "linux";
+
+export type Arch = "x64" | "arm64";
+
+// Do not add unsupported platforms.
+/** Build related information */
+export interface BuildInfo {
+ /** The CPU architecture. */
+ arch: Arch;
+
+ /** The operating system. */
+ os: OperatingSystem;
+}
+
+export const build: BuildInfo = {
+ arch: "" as Arch,
+ os: "" as OperatingSystem
+};
+
+export function setBuildInfo(os: OperatingSystem, arch: Arch): void {
+ build.os = os;
+ build.arch = arch;
+
+ Object.freeze(build);
+}
diff --git a/cli/js/build_test.ts b/cli/js/build_test.ts
new file mode 100644
index 000000000..4423de338
--- /dev/null
+++ b/cli/js/build_test.ts
@@ -0,0 +1,10 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { test, assert } from "./test_util.ts";
+
+test(function buildInfo(): void {
+ // Deno.build is injected by rollup at compile time. Here
+ // we check it has been properly transformed.
+ const { arch, os } = Deno.build;
+ assert(arch === "x64");
+ assert(os === "mac" || os === "win" || os === "linux");
+});
diff --git a/cli/js/chmod.ts b/cli/js/chmod.ts
new file mode 100644
index 000000000..7bf54cc5b
--- /dev/null
+++ b/cli/js/chmod.ts
@@ -0,0 +1,20 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { sendSync, sendAsync } from "./dispatch_json.ts";
+import * as dispatch from "./dispatch.ts";
+
+/** Changes the permission of a specific file/directory of specified path
+ * synchronously.
+ *
+ * Deno.chmodSync("/path/to/file", 0o666);
+ */
+export function chmodSync(path: string, mode: number): void {
+ sendSync(dispatch.OP_CHMOD, { path, mode });
+}
+
+/** Changes the permission of a specific file/directory of specified path.
+ *
+ * await Deno.chmod("/path/to/file", 0o666);
+ */
+export async function chmod(path: string, mode: number): Promise<void> {
+ await sendAsync(dispatch.OP_CHMOD, { path, mode });
+}
diff --git a/cli/js/chmod_test.ts b/cli/js/chmod_test.ts
new file mode 100644
index 000000000..420f4f313
--- /dev/null
+++ b/cli/js/chmod_test.ts
@@ -0,0 +1,142 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { testPerm, assertEquals } from "./test_util.ts";
+
+const isNotWindows = Deno.build.os !== "win";
+
+testPerm({ read: true, write: true }, function chmodSyncSuccess(): void {
+ const enc = new TextEncoder();
+ const data = enc.encode("Hello");
+ const tempDir = Deno.makeTempDirSync();
+ const filename = tempDir + "/test.txt";
+ Deno.writeFileSync(filename, data, { perm: 0o666 });
+
+ // On windows no effect, but should not crash
+ Deno.chmodSync(filename, 0o777);
+
+ // Check success when not on windows
+ if (isNotWindows) {
+ const fileInfo = Deno.statSync(filename);
+ assertEquals(fileInfo.mode & 0o777, 0o777);
+ }
+});
+
+// Check symlink when not on windows
+if (isNotWindows) {
+ testPerm(
+ { read: true, write: true },
+ function chmodSyncSymlinkSuccess(): void {
+ const enc = new TextEncoder();
+ const data = enc.encode("Hello");
+ const tempDir = Deno.makeTempDirSync();
+
+ const filename = tempDir + "/test.txt";
+ Deno.writeFileSync(filename, data, { perm: 0o666 });
+ const symlinkName = tempDir + "/test_symlink.txt";
+ Deno.symlinkSync(filename, symlinkName);
+
+ let symlinkInfo = Deno.lstatSync(symlinkName);
+ const symlinkMode = symlinkInfo.mode & 0o777; // platform dependent
+
+ Deno.chmodSync(symlinkName, 0o777);
+
+ // Change actual file mode, not symlink
+ const fileInfo = Deno.statSync(filename);
+ assertEquals(fileInfo.mode & 0o777, 0o777);
+ symlinkInfo = Deno.lstatSync(symlinkName);
+ assertEquals(symlinkInfo.mode & 0o777, symlinkMode);
+ }
+ );
+}
+
+testPerm({ write: true }, function chmodSyncFailure(): void {
+ let err;
+ try {
+ const filename = "/badfile.txt";
+ Deno.chmodSync(filename, 0o777);
+ } catch (e) {
+ err = e;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.NotFound);
+ assertEquals(err.name, "NotFound");
+});
+
+testPerm({ write: false }, function chmodSyncPerm(): void {
+ let err;
+ try {
+ Deno.chmodSync("/somefile.txt", 0o777);
+ } catch (e) {
+ err = e;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(err.name, "PermissionDenied");
+});
+
+testPerm({ read: true, write: true }, async function chmodSuccess(): Promise<
+ void
+> {
+ const enc = new TextEncoder();
+ const data = enc.encode("Hello");
+ const tempDir = Deno.makeTempDirSync();
+ const filename = tempDir + "/test.txt";
+ Deno.writeFileSync(filename, data, { perm: 0o666 });
+
+ // On windows no effect, but should not crash
+ await Deno.chmod(filename, 0o777);
+
+ // Check success when not on windows
+ if (isNotWindows) {
+ const fileInfo = Deno.statSync(filename);
+ assertEquals(fileInfo.mode & 0o777, 0o777);
+ }
+});
+
+// Check symlink when not on windows
+if (isNotWindows) {
+ testPerm(
+ { read: true, write: true },
+ async function chmodSymlinkSuccess(): Promise<void> {
+ const enc = new TextEncoder();
+ const data = enc.encode("Hello");
+ const tempDir = Deno.makeTempDirSync();
+
+ const filename = tempDir + "/test.txt";
+ Deno.writeFileSync(filename, data, { perm: 0o666 });
+ const symlinkName = tempDir + "/test_symlink.txt";
+ Deno.symlinkSync(filename, symlinkName);
+
+ let symlinkInfo = Deno.lstatSync(symlinkName);
+ const symlinkMode = symlinkInfo.mode & 0o777; // platform dependent
+
+ await Deno.chmod(symlinkName, 0o777);
+
+ // Just change actual file mode, not symlink
+ const fileInfo = Deno.statSync(filename);
+ assertEquals(fileInfo.mode & 0o777, 0o777);
+ symlinkInfo = Deno.lstatSync(symlinkName);
+ assertEquals(symlinkInfo.mode & 0o777, symlinkMode);
+ }
+ );
+}
+
+testPerm({ write: true }, async function chmodFailure(): Promise<void> {
+ let err;
+ try {
+ const filename = "/badfile.txt";
+ await Deno.chmod(filename, 0o777);
+ } catch (e) {
+ err = e;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.NotFound);
+ assertEquals(err.name, "NotFound");
+});
+
+testPerm({ write: false }, async function chmodPerm(): Promise<void> {
+ let err;
+ try {
+ await Deno.chmod("/somefile.txt", 0o777);
+ } catch (e) {
+ err = e;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(err.name, "PermissionDenied");
+});
diff --git a/cli/js/chown.ts b/cli/js/chown.ts
new file mode 100644
index 000000000..a8bad1193
--- /dev/null
+++ b/cli/js/chown.ts
@@ -0,0 +1,27 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { sendSync, sendAsync } from "./dispatch_json.ts";
+import * as dispatch from "./dispatch.ts";
+
+/**
+ * Change owner of a regular file or directory synchronously. Unix only at the moment.
+ * @param path path to the file
+ * @param uid user id of the new owner
+ * @param gid group id of the new owner
+ */
+export function chownSync(path: string, uid: number, gid: number): void {
+ sendSync(dispatch.OP_CHOWN, { path, uid, gid });
+}
+
+/**
+ * Change owner of a regular file or directory asynchronously. Unix only at the moment.
+ * @param path path to the file
+ * @param uid user id of the new owner
+ * @param gid group id of the new owner
+ */
+export async function chown(
+ path: string,
+ uid: number,
+ gid: number
+): Promise<void> {
+ await sendAsync(dispatch.OP_CHOWN, { path, uid, gid });
+}
diff --git a/cli/js/chown_test.ts b/cli/js/chown_test.ts
new file mode 100644
index 000000000..84106d545
--- /dev/null
+++ b/cli/js/chown_test.ts
@@ -0,0 +1,145 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { testPerm, assertEquals } from "./test_util.ts";
+
+// chown on Windows is noop for now, so ignore its testing on Windows
+if (Deno.build.os !== "win") {
+ async function getUidAndGid(): Promise<{ uid: number; gid: number }> {
+ // get the user ID and group ID of the current process
+ const uidProc = Deno.run({
+ stdout: "piped",
+ args: ["python", "-c", "import os; print(os.getuid())"]
+ });
+ const gidProc = Deno.run({
+ stdout: "piped",
+ args: ["python", "-c", "import os; print(os.getgid())"]
+ });
+
+ assertEquals((await uidProc.status()).code, 0);
+ assertEquals((await gidProc.status()).code, 0);
+ const uid = parseInt(
+ new TextDecoder("utf-8").decode(await uidProc.output())
+ );
+ const gid = parseInt(
+ new TextDecoder("utf-8").decode(await gidProc.output())
+ );
+
+ return { uid, gid };
+ }
+
+ testPerm({}, async function chownNoWritePermission(): Promise<void> {
+ const filePath = "chown_test_file.txt";
+ try {
+ await Deno.chown(filePath, 1000, 1000);
+ } catch (e) {
+ assertEquals(e.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(e.name, "PermissionDenied");
+ }
+ });
+
+ testPerm(
+ { run: true, write: true },
+ async function chownSyncFileNotExist(): Promise<void> {
+ const { uid, gid } = await getUidAndGid();
+ const filePath = Deno.makeTempDirSync() + "/chown_test_file.txt";
+
+ try {
+ Deno.chownSync(filePath, uid, gid);
+ } catch (e) {
+ assertEquals(e.kind, Deno.ErrorKind.NotFound);
+ assertEquals(e.name, "NotFound");
+ }
+ }
+ );
+
+ testPerm(
+ { run: true, write: true },
+ async function chownFileNotExist(): Promise<void> {
+ const { uid, gid } = await getUidAndGid();
+ const filePath = (await Deno.makeTempDir()) + "/chown_test_file.txt";
+
+ try {
+ await Deno.chown(filePath, uid, gid);
+ } catch (e) {
+ assertEquals(e.kind, Deno.ErrorKind.NotFound);
+ assertEquals(e.name, "NotFound");
+ }
+ }
+ );
+
+ testPerm({ write: true }, function chownSyncPermissionDenied(): void {
+ const enc = new TextEncoder();
+ const dirPath = Deno.makeTempDirSync();
+ const filePath = dirPath + "/chown_test_file.txt";
+ const fileData = enc.encode("Hello");
+ Deno.writeFileSync(filePath, fileData);
+
+ try {
+ // try changing the file's owner to root
+ Deno.chownSync(filePath, 0, 0);
+ } catch (e) {
+ assertEquals(e.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(e.name, "PermissionDenied");
+ }
+ Deno.removeSync(dirPath, { recursive: true });
+ });
+
+ testPerm({ write: true }, async function chownPermissionDenied(): Promise<
+ void
+ > {
+ const enc = new TextEncoder();
+ const dirPath = await Deno.makeTempDir();
+ const filePath = dirPath + "/chown_test_file.txt";
+ const fileData = enc.encode("Hello");
+ await Deno.writeFile(filePath, fileData);
+
+ try {
+ // try changing the file's owner to root
+ await Deno.chown(filePath, 0, 0);
+ } catch (e) {
+ assertEquals(e.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(e.name, "PermissionDenied");
+ }
+ await Deno.remove(dirPath, { recursive: true });
+ });
+
+ testPerm(
+ { run: true, write: true },
+ async function chownSyncSucceed(): Promise<void> {
+ // TODO: when a file's owner is actually being changed,
+ // chown only succeeds if run under priviledged user (root)
+ // The test script has no such priviledge, so need to find a better way to test this case
+ const { uid, gid } = await getUidAndGid();
+
+ const enc = new TextEncoder();
+ const dirPath = Deno.makeTempDirSync();
+ const filePath = dirPath + "/chown_test_file.txt";
+ const fileData = enc.encode("Hello");
+ Deno.writeFileSync(filePath, fileData);
+
+ // the test script creates this file with the same uid and gid,
+ // here chown is a noop so it succeeds under non-priviledged user
+ Deno.chownSync(filePath, uid, gid);
+
+ Deno.removeSync(dirPath, { recursive: true });
+ }
+ );
+
+ testPerm({ run: true, write: true }, async function chownSucceed(): Promise<
+ void
+ > {
+ // TODO: same as chownSyncSucceed
+ const { uid, gid } = await getUidAndGid();
+
+ const enc = new TextEncoder();
+ const dirPath = await Deno.makeTempDir();
+ const filePath = dirPath + "/chown_test_file.txt";
+ const fileData = enc.encode("Hello");
+ await Deno.writeFile(filePath, fileData);
+
+ // the test script creates this file with the same uid and gid,
+ // here chown is a noop so it succeeds under non-priviledged user
+ await Deno.chown(filePath, uid, gid);
+
+ Deno.removeSync(dirPath, { recursive: true });
+ });
+}
diff --git a/cli/js/colors.ts b/cli/js/colors.ts
new file mode 100644
index 000000000..9937bdb57
--- /dev/null
+++ b/cli/js/colors.ts
@@ -0,0 +1,40 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+
+// TODO(kitsonk) Replace with `deno_std/colors/mod.ts` when we can load modules
+// which end in `.ts`.
+
+import { noColor } from "./deno.ts";
+
+interface Code {
+ open: string;
+ close: string;
+ regexp: RegExp;
+}
+
+const enabled = !noColor;
+
+function code(open: number, close: number): Code {
+ return {
+ open: `\x1b[${open}m`,
+ close: `\x1b[${close}m`,
+ regexp: new RegExp(`\\x1b\\[${close}m`, "g")
+ };
+}
+
+function run(str: string, code: Code): string {
+ return enabled
+ ? `${code.open}${str.replace(code.regexp, code.open)}${code.close}`
+ : str;
+}
+
+export function bold(str: string): string {
+ return run(str, code(1, 22));
+}
+
+export function yellow(str: string): string {
+ return run(str, code(33, 39));
+}
+
+export function cyan(str: string): string {
+ return run(str, code(36, 39));
+}
diff --git a/cli/js/compiler.ts b/cli/js/compiler.ts
new file mode 100644
index 000000000..57e5e3a47
--- /dev/null
+++ b/cli/js/compiler.ts
@@ -0,0 +1,667 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+// TODO(ry) Combine this implementation with //deno_typescript/compiler_main.js
+
+import "./globals.ts";
+import "./ts_global.d.ts";
+
+import { bold, cyan, yellow } from "./colors.ts";
+import { Console } from "./console.ts";
+import { core } from "./core.ts";
+import { Diagnostic, fromTypeScriptDiagnostic } from "./diagnostics.ts";
+import { cwd } from "./dir.ts";
+import * as dispatch from "./dispatch.ts";
+import { sendAsync, sendSync } from "./dispatch_json.ts";
+import * as os from "./os.ts";
+import { TextEncoder } from "./text_encoding.ts";
+import { getMappedModuleName, parseTypeDirectives } from "./type_directives.ts";
+import { assert, notImplemented } from "./util.ts";
+import * as util from "./util.ts";
+import { window } from "./window.ts";
+import { postMessage, workerClose, workerMain } from "./workers.ts";
+import { writeFileSync } from "./write_file.ts";
+
+// Warning! The values in this enum are duplicated in cli/msg.rs
+// Update carefully!
+enum MediaType {
+ JavaScript = 0,
+ JSX = 1,
+ TypeScript = 2,
+ TSX = 3,
+ Json = 4,
+ Unknown = 5
+}
+
+// Startup boilerplate. This is necessary because the compiler has its own
+// snapshot. (It would be great if we could remove these things or centralize
+// them somewhere else.)
+const console = new Console(core.print);
+window.console = console;
+window.workerMain = workerMain;
+function denoMain(): void {
+ os.start(true, "TS");
+}
+window["denoMain"] = denoMain;
+
+const ASSETS = "$asset$";
+const OUT_DIR = "$deno$";
+
+/** The format of the work message payload coming from the privileged side */
+interface CompilerReq {
+ rootNames: string[];
+ bundle?: string;
+ // TODO(ry) add compiler config to this interface.
+ // options: ts.CompilerOptions;
+ configPath?: string;
+ config?: string;
+}
+
+interface ConfigureResponse {
+ ignoredOptions?: string[];
+ diagnostics?: ts.Diagnostic[];
+}
+
+/** Options that either do nothing in Deno, or would cause undesired behavior
+ * if modified. */
+const ignoredCompilerOptions: readonly string[] = [
+ "allowSyntheticDefaultImports",
+ "baseUrl",
+ "build",
+ "composite",
+ "declaration",
+ "declarationDir",
+ "declarationMap",
+ "diagnostics",
+ "downlevelIteration",
+ "emitBOM",
+ "emitDeclarationOnly",
+ "esModuleInterop",
+ "extendedDiagnostics",
+ "forceConsistentCasingInFileNames",
+ "help",
+ "importHelpers",
+ "incremental",
+ "inlineSourceMap",
+ "inlineSources",
+ "init",
+ "isolatedModules",
+ "lib",
+ "listEmittedFiles",
+ "listFiles",
+ "mapRoot",
+ "maxNodeModuleJsDepth",
+ "module",
+ "moduleResolution",
+ "newLine",
+ "noEmit",
+ "noEmitHelpers",
+ "noEmitOnError",
+ "noLib",
+ "noResolve",
+ "out",
+ "outDir",
+ "outFile",
+ "paths",
+ "preserveSymlinks",
+ "preserveWatchOutput",
+ "pretty",
+ "rootDir",
+ "rootDirs",
+ "showConfig",
+ "skipDefaultLibCheck",
+ "skipLibCheck",
+ "sourceMap",
+ "sourceRoot",
+ "stripInternal",
+ "target",
+ "traceResolution",
+ "tsBuildInfoFile",
+ "types",
+ "typeRoots",
+ "version",
+ "watch"
+];
+
+/** The shape of the SourceFile that comes from the privileged side */
+interface SourceFileJson {
+ url: string;
+ filename: string;
+ mediaType: MediaType;
+ sourceCode: string;
+}
+
+/** A self registering abstraction of source files. */
+class SourceFile {
+ extension!: ts.Extension;
+ filename!: string;
+
+ /** An array of tuples which represent the imports for the source file. The
+ * first element is the one that will be requested at compile time, the
+ * second is the one that should be actually resolved. This provides the
+ * feature of type directives for Deno. */
+ importedFiles?: Array<[string, string]>;
+
+ mediaType!: MediaType;
+ processed = false;
+ sourceCode!: string;
+ tsSourceFile?: ts.SourceFile;
+ url!: string;
+
+ constructor(json: SourceFileJson) {
+ if (SourceFile._moduleCache.has(json.url)) {
+ throw new TypeError("SourceFile already exists");
+ }
+ Object.assign(this, json);
+ this.extension = getExtension(this.url, this.mediaType);
+ SourceFile._moduleCache.set(this.url, this);
+ }
+
+ /** Cache the source file to be able to be retrieved by `moduleSpecifier` and
+ * `containingFile`. */
+ cache(moduleSpecifier: string, containingFile: string): void {
+ let innerCache = SourceFile._specifierCache.get(containingFile);
+ if (!innerCache) {
+ innerCache = new Map();
+ SourceFile._specifierCache.set(containingFile, innerCache);
+ }
+ innerCache.set(moduleSpecifier, this);
+ }
+
+ /** Process the imports for the file and return them. */
+ imports(): Array<[string, string]> {
+ if (this.processed) {
+ throw new Error("SourceFile has already been processed.");
+ }
+ assert(this.sourceCode != null);
+ const preProcessedFileInfo = ts.preProcessFile(
+ this.sourceCode!,
+ true,
+ true
+ );
+ this.processed = true;
+ const files = (this.importedFiles = [] as Array<[string, string]>);
+
+ function process(references: ts.FileReference[]): void {
+ for (const { fileName } of references) {
+ files.push([fileName, fileName]);
+ }
+ }
+
+ const {
+ importedFiles,
+ referencedFiles,
+ libReferenceDirectives,
+ typeReferenceDirectives
+ } = preProcessedFileInfo;
+ const typeDirectives = parseTypeDirectives(this.sourceCode);
+ if (typeDirectives) {
+ for (const importedFile of importedFiles) {
+ files.push([
+ importedFile.fileName,
+ getMappedModuleName(importedFile, typeDirectives)
+ ]);
+ }
+ } else {
+ process(importedFiles);
+ }
+ process(referencedFiles);
+ process(libReferenceDirectives);
+ process(typeReferenceDirectives);
+ return files;
+ }
+
+ /** A cache of all the source files which have been loaded indexed by the
+ * url. */
+ private static _moduleCache: Map<string, SourceFile> = new Map();
+
+ /** A cache of source files based on module specifiers and containing files
+ * which is used by the TypeScript compiler to resolve the url */
+ private static _specifierCache: Map<
+ string,
+ Map<string, SourceFile>
+ > = new Map();
+
+ /** Retrieve a `SourceFile` based on a `moduleSpecifier` and `containingFile`
+ * or return `undefined` if not preset. */
+ static getUrl(
+ moduleSpecifier: string,
+ containingFile: string
+ ): string | undefined {
+ const containingCache = this._specifierCache.get(containingFile);
+ if (containingCache) {
+ const sourceFile = containingCache.get(moduleSpecifier);
+ return sourceFile && sourceFile.url;
+ }
+ return undefined;
+ }
+
+ /** Retrieve a `SourceFile` based on a `url` */
+ static get(url: string): SourceFile | undefined {
+ return this._moduleCache.get(url);
+ }
+}
+
+interface EmitResult {
+ emitSkipped: boolean;
+ diagnostics?: Diagnostic;
+}
+
+/** Ops to Rust to resolve special static assets. */
+function fetchAsset(name: string): string {
+ return sendSync(dispatch.OP_FETCH_ASSET, { name });
+}
+
+/** Ops to Rust to resolve and fetch modules meta data. */
+function fetchSourceFiles(
+ specifiers: string[],
+ referrer: string
+): Promise<SourceFileJson[]> {
+ util.log("compiler::fetchSourceFiles", { specifiers, referrer });
+ return sendAsync(dispatch.OP_FETCH_SOURCE_FILES, {
+ specifiers,
+ referrer
+ });
+}
+
+/** Recursively process the imports of modules, generating `SourceFile`s of any
+ * imported files.
+ *
+ * Specifiers are supplied in an array of tupples where the first is the
+ * specifier that will be requested in the code and the second is the specifier
+ * that should be actually resolved. */
+async function processImports(
+ specifiers: Array<[string, string]>,
+ referrer = ""
+): Promise<void> {
+ if (!specifiers.length) {
+ return;
+ }
+ const sources = specifiers.map(([, moduleSpecifier]) => moduleSpecifier);
+ const sourceFiles = await fetchSourceFiles(sources, referrer);
+ assert(sourceFiles.length === specifiers.length);
+ for (let i = 0; i < sourceFiles.length; i++) {
+ const sourceFileJson = sourceFiles[i];
+ const sourceFile =
+ SourceFile.get(sourceFileJson.url) || new SourceFile(sourceFileJson);
+ sourceFile.cache(specifiers[i][0], referrer);
+ if (!sourceFile.processed) {
+ await processImports(sourceFile.imports(), sourceFile.url);
+ }
+ }
+}
+
+/** Utility function to turn the number of bytes into a human readable
+ * unit */
+function humanFileSize(bytes: number): string {
+ const thresh = 1000;
+ if (Math.abs(bytes) < thresh) {
+ return bytes + " B";
+ }
+ const units = ["kB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"];
+ let u = -1;
+ do {
+ bytes /= thresh;
+ ++u;
+ } while (Math.abs(bytes) >= thresh && u < units.length - 1);
+ return `${bytes.toFixed(1)} ${units[u]}`;
+}
+
+/** Ops to rest for caching source map and compiled js */
+function cache(extension: string, moduleId: string, contents: string): void {
+ util.log("compiler::cache", { extension, moduleId });
+ sendSync(dispatch.OP_CACHE, { extension, moduleId, contents });
+}
+
+const encoder = new TextEncoder();
+
+/** Given a fileName and the data, emit the file to the file system. */
+function emitBundle(fileName: string, data: string): void {
+ // For internal purposes, when trying to emit to `$deno$` just no-op
+ if (fileName.startsWith("$deno$")) {
+ console.warn("skipping emitBundle", fileName);
+ return;
+ }
+ const encodedData = encoder.encode(data);
+ console.log(`Emitting bundle to "${fileName}"`);
+ writeFileSync(fileName, encodedData);
+ console.log(`${humanFileSize(encodedData.length)} emitted.`);
+}
+
+/** Returns the TypeScript Extension enum for a given media type. */
+function getExtension(fileName: string, mediaType: MediaType): ts.Extension {
+ switch (mediaType) {
+ case MediaType.JavaScript:
+ return ts.Extension.Js;
+ case MediaType.JSX:
+ return ts.Extension.Jsx;
+ case MediaType.TypeScript:
+ return fileName.endsWith(".d.ts") ? ts.Extension.Dts : ts.Extension.Ts;
+ case MediaType.TSX:
+ return ts.Extension.Tsx;
+ case MediaType.Json:
+ return ts.Extension.Json;
+ case MediaType.Unknown:
+ default:
+ throw TypeError("Cannot resolve extension.");
+ }
+}
+
+class Host implements ts.CompilerHost {
+ private readonly _options: ts.CompilerOptions = {
+ allowJs: true,
+ allowNonTsExtensions: true,
+ checkJs: false,
+ esModuleInterop: true,
+ module: ts.ModuleKind.ESNext,
+ outDir: OUT_DIR,
+ resolveJsonModule: true,
+ sourceMap: true,
+ stripComments: true,
+ target: ts.ScriptTarget.ESNext,
+ jsx: ts.JsxEmit.React
+ };
+
+ private _getAsset(filename: string): SourceFile {
+ const sourceFile = SourceFile.get(filename);
+ if (sourceFile) {
+ return sourceFile;
+ }
+ const url = filename.split("/").pop()!;
+ const assetName = url.includes(".") ? url : `${url}.d.ts`;
+ const sourceCode = fetchAsset(assetName);
+ return new SourceFile({
+ url,
+ filename,
+ mediaType: MediaType.TypeScript,
+ sourceCode
+ });
+ }
+
+ /* Deno specific APIs */
+
+ /** Provides the `ts.HostCompiler` interface for Deno.
+ *
+ * @param _bundle Set to a string value to configure the host to write out a
+ * bundle instead of caching individual files.
+ */
+ constructor(private _bundle?: string) {
+ if (this._bundle) {
+ // options we need to change when we are generating a bundle
+ const bundlerOptions: ts.CompilerOptions = {
+ module: ts.ModuleKind.AMD,
+ inlineSourceMap: true,
+ outDir: undefined,
+ outFile: `${OUT_DIR}/bundle.js`,
+ sourceMap: false
+ };
+ Object.assign(this._options, bundlerOptions);
+ }
+ }
+
+ /** Take a configuration string, parse it, and use it to merge with the
+ * compiler's configuration options. The method returns an array of compiler
+ * options which were ignored, or `undefined`. */
+ configure(path: string, configurationText: string): ConfigureResponse {
+ util.log("compiler::host.configure", path);
+ const { config, error } = ts.parseConfigFileTextToJson(
+ path,
+ configurationText
+ );
+ if (error) {
+ return { diagnostics: [error] };
+ }
+ const { options, errors } = ts.convertCompilerOptionsFromJson(
+ config.compilerOptions,
+ cwd()
+ );
+ const ignoredOptions: string[] = [];
+ for (const key of Object.keys(options)) {
+ if (
+ ignoredCompilerOptions.includes(key) &&
+ (!(key in this._options) || options[key] !== this._options[key])
+ ) {
+ ignoredOptions.push(key);
+ delete options[key];
+ }
+ }
+ Object.assign(this._options, options);
+ return {
+ ignoredOptions: ignoredOptions.length ? ignoredOptions : undefined,
+ diagnostics: errors.length ? errors : undefined
+ };
+ }
+
+ /* TypeScript CompilerHost APIs */
+
+ fileExists(_fileName: string): boolean {
+ return notImplemented();
+ }
+
+ getCanonicalFileName(fileName: string): string {
+ return fileName;
+ }
+
+ getCompilationSettings(): ts.CompilerOptions {
+ util.log("compiler::host.getCompilationSettings()");
+ return this._options;
+ }
+
+ getCurrentDirectory(): string {
+ return "";
+ }
+
+ getDefaultLibFileName(_options: ts.CompilerOptions): string {
+ return ASSETS + "/lib.deno_runtime.d.ts";
+ }
+
+ getNewLine(): string {
+ return "\n";
+ }
+
+ getSourceFile(
+ fileName: string,
+ languageVersion: ts.ScriptTarget,
+ onError?: (message: string) => void,
+ shouldCreateNewSourceFile?: boolean
+ ): ts.SourceFile | undefined {
+ util.log("compiler::host.getSourceFile", fileName);
+ try {
+ assert(!shouldCreateNewSourceFile);
+ const sourceFile = fileName.startsWith(ASSETS)
+ ? this._getAsset(fileName)
+ : SourceFile.get(fileName);
+ assert(sourceFile != null);
+ if (!sourceFile!.tsSourceFile) {
+ sourceFile!.tsSourceFile = ts.createSourceFile(
+ fileName,
+ sourceFile!.sourceCode,
+ languageVersion
+ );
+ }
+ return sourceFile!.tsSourceFile;
+ } catch (e) {
+ if (onError) {
+ onError(String(e));
+ } else {
+ throw e;
+ }
+ return undefined;
+ }
+ }
+
+ readFile(_fileName: string): string | undefined {
+ return notImplemented();
+ }
+
+ resolveModuleNames(
+ moduleNames: string[],
+ containingFile: string
+ ): Array<ts.ResolvedModuleFull | undefined> {
+ util.log("compiler::host.resolveModuleNames", {
+ moduleNames,
+ containingFile
+ });
+ return moduleNames.map(specifier => {
+ const url = SourceFile.getUrl(specifier, containingFile);
+ const sourceFile = specifier.startsWith(ASSETS)
+ ? this._getAsset(specifier)
+ : url
+ ? SourceFile.get(url)
+ : undefined;
+ if (!sourceFile) {
+ return undefined;
+ }
+ return {
+ resolvedFileName: sourceFile.url,
+ isExternalLibraryImport: specifier.startsWith(ASSETS),
+ extension: sourceFile.extension
+ };
+ });
+ }
+
+ useCaseSensitiveFileNames(): boolean {
+ return true;
+ }
+
+ writeFile(
+ fileName: string,
+ data: string,
+ _writeByteOrderMark: boolean,
+ onError?: (message: string) => void,
+ sourceFiles?: readonly ts.SourceFile[]
+ ): void {
+ util.log("compiler::host.writeFile", fileName);
+ try {
+ if (this._bundle) {
+ emitBundle(this._bundle, data);
+ } else {
+ assert(sourceFiles != null && sourceFiles.length == 1);
+ const url = sourceFiles![0].fileName;
+ const sourceFile = SourceFile.get(url);
+
+ if (sourceFile) {
+ // NOTE: If it's a `.json` file we don't want to write it to disk.
+ // JSON files are loaded and used by TS compiler to check types, but we don't want
+ // to emit them to disk because output file is the same as input file.
+ if (sourceFile.extension === ts.Extension.Json) {
+ return;
+ }
+
+ // NOTE: JavaScript files are only emitted to disk if `checkJs` option in on
+ if (
+ sourceFile.extension === ts.Extension.Js &&
+ !this._options.checkJs
+ ) {
+ return;
+ }
+ }
+
+ if (fileName.endsWith(".map")) {
+ // Source Map
+ cache(".map", url, data);
+ } else if (fileName.endsWith(".js") || fileName.endsWith(".json")) {
+ // Compiled JavaScript
+ cache(".js", url, data);
+ } else {
+ assert(false, "Trying to cache unhandled file type " + fileName);
+ }
+ }
+ } catch (e) {
+ if (onError) {
+ onError(String(e));
+ } else {
+ throw e;
+ }
+ }
+ }
+}
+
+// provide the "main" function that will be called by the privileged side when
+// lazy instantiating the compiler web worker
+window.compilerMain = function compilerMain(): void {
+ // workerMain should have already been called since a compiler is a worker.
+ window.onmessage = async ({ data }: { data: CompilerReq }): Promise<void> => {
+ const { rootNames, configPath, config, bundle } = data;
+ util.log(">>> compile start", { rootNames, bundle });
+
+ // This will recursively analyse all the code for other imports, requesting
+ // those from the privileged side, populating the in memory cache which
+ // will be used by the host, before resolving.
+ await processImports(rootNames.map(rootName => [rootName, rootName]));
+
+ const host = new Host(bundle);
+ let emitSkipped = true;
+ let diagnostics: ts.Diagnostic[] | undefined;
+
+ // if there is a configuration supplied, we need to parse that
+ if (config && config.length && configPath) {
+ const configResult = host.configure(configPath, config);
+ const ignoredOptions = configResult.ignoredOptions;
+ diagnostics = configResult.diagnostics;
+ if (ignoredOptions) {
+ console.warn(
+ yellow(`Unsupported compiler options in "${configPath}"\n`) +
+ cyan(` The following options were ignored:\n`) +
+ ` ${ignoredOptions
+ .map((value): string => bold(value))
+ .join(", ")}`
+ );
+ }
+ }
+
+ // if there was a configuration and no diagnostics with it, we will continue
+ // to generate the program and possibly emit it.
+ if (!diagnostics || (diagnostics && diagnostics.length === 0)) {
+ const options = host.getCompilationSettings();
+ const program = ts.createProgram(rootNames, options, host);
+
+ diagnostics = ts.getPreEmitDiagnostics(program).filter(
+ ({ code }): boolean => {
+ // TS1308: 'await' expression is only allowed within an async
+ // function.
+ if (code === 1308) return false;
+ // TS2691: An import path cannot end with a '.ts' extension. Consider
+ // importing 'bad-module' instead.
+ if (code === 2691) return false;
+ // TS5009: Cannot find the common subdirectory path for the input files.
+ if (code === 5009) return false;
+ // TS5055: Cannot write file
+ // 'http://localhost:4545/tests/subdir/mt_application_x_javascript.j4.js'
+ // because it would overwrite input file.
+ if (code === 5055) return false;
+ // TypeScript is overly opinionated that only CommonJS modules kinds can
+ // support JSON imports. Allegedly this was fixed in
+ // Microsoft/TypeScript#26825 but that doesn't seem to be working here,
+ // so we will ignore complaints about this compiler setting.
+ if (code === 5070) return false;
+ return true;
+ }
+ );
+
+ // We will only proceed with the emit if there are no diagnostics.
+ if (diagnostics && diagnostics.length === 0) {
+ if (bundle) {
+ console.log(`Bundling "${bundle}"`);
+ }
+ const emitResult = program.emit();
+ emitSkipped = emitResult.emitSkipped;
+ // emitResult.diagnostics is `readonly` in TS3.5+ and can't be assigned
+ // without casting.
+ diagnostics = emitResult.diagnostics as ts.Diagnostic[];
+ }
+ }
+
+ const result: EmitResult = {
+ emitSkipped,
+ diagnostics: diagnostics.length
+ ? fromTypeScriptDiagnostic(diagnostics)
+ : undefined
+ };
+
+ postMessage(result);
+
+ util.log("<<< compile end", { rootNames, bundle });
+
+ // The compiler isolate exits after a single message.
+ workerClose();
+ };
+};
diff --git a/cli/js/console.ts b/cli/js/console.ts
new file mode 100644
index 000000000..9f0ce4bd6
--- /dev/null
+++ b/cli/js/console.ts
@@ -0,0 +1,790 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { isTypedArray } from "./util.ts";
+import { TypedArray } from "./types.ts";
+import { TextEncoder } from "./text_encoding.ts";
+import { File, stdout } from "./files.ts";
+import { cliTable } from "./console_table.ts";
+
+type ConsoleContext = Set<unknown>;
+type ConsoleOptions = Partial<{
+ showHidden: boolean;
+ depth: number;
+ colors: boolean;
+ indentLevel: number;
+}>;
+
+// Default depth of logging nested objects
+const DEFAULT_MAX_DEPTH = 4;
+
+// Number of elements an object must have before it's displayed in appreviated
+// form.
+const OBJ_ABBREVIATE_SIZE = 5;
+
+const STR_ABBREVIATE_SIZE = 100;
+
+// Char codes
+const CHAR_PERCENT = 37; /* % */
+const CHAR_LOWERCASE_S = 115; /* s */
+const CHAR_LOWERCASE_D = 100; /* d */
+const CHAR_LOWERCASE_I = 105; /* i */
+const CHAR_LOWERCASE_F = 102; /* f */
+const CHAR_LOWERCASE_O = 111; /* o */
+const CHAR_UPPERCASE_O = 79; /* O */
+const CHAR_LOWERCASE_C = 99; /* c */
+export class CSI {
+ static kClear = "\x1b[1;1H";
+ static kClearScreenDown = "\x1b[0J";
+}
+
+/* eslint-disable @typescript-eslint/no-use-before-define */
+
+function cursorTo(stream: File, _x: number, _y?: number): void {
+ const uint8 = new TextEncoder().encode(CSI.kClear);
+ stream.write(uint8);
+}
+
+function clearScreenDown(stream: File): void {
+ const uint8 = new TextEncoder().encode(CSI.kClearScreenDown);
+ stream.write(uint8);
+}
+
+function getClassInstanceName(instance: unknown): string {
+ if (typeof instance !== "object") {
+ return "";
+ }
+ if (!instance) {
+ return "";
+ }
+
+ const proto = Object.getPrototypeOf(instance);
+ if (proto && proto.constructor) {
+ return proto.constructor.name; // could be "Object" or "Array"
+ }
+
+ return "";
+}
+
+function createFunctionString(value: Function, _ctx: ConsoleContext): string {
+ // Might be Function/AsyncFunction/GeneratorFunction
+ const cstrName = Object.getPrototypeOf(value).constructor.name;
+ if (value.name && value.name !== "anonymous") {
+ // from MDN spec
+ return `[${cstrName}: ${value.name}]`;
+ }
+ return `[${cstrName}]`;
+}
+
+interface IterablePrintConfig<T> {
+ typeName: string;
+ displayName: string;
+ delims: [string, string];
+ entryHandler: (
+ entry: T,
+ ctx: ConsoleContext,
+ level: number,
+ maxLevel: number
+ ) => string;
+}
+
+function createIterableString<T>(
+ value: Iterable<T>,
+ ctx: ConsoleContext,
+ level: number,
+ maxLevel: number,
+ config: IterablePrintConfig<T>
+): string {
+ if (level >= maxLevel) {
+ return `[${config.typeName}]`;
+ }
+ ctx.add(value);
+
+ const entries: string[] = [];
+ // In cases e.g. Uint8Array.prototype
+ try {
+ for (const el of value) {
+ entries.push(config.entryHandler(el, ctx, level + 1, maxLevel));
+ }
+ } catch (e) {}
+ ctx.delete(value);
+ const iPrefix = `${config.displayName ? config.displayName + " " : ""}`;
+ const iContent = entries.length === 0 ? "" : ` ${entries.join(", ")} `;
+ return `${iPrefix}${config.delims[0]}${iContent}${config.delims[1]}`;
+}
+
+function stringify(
+ value: unknown,
+ ctx: ConsoleContext,
+ level: number,
+ maxLevel: number
+): string {
+ switch (typeof value) {
+ case "string":
+ return value;
+ case "number":
+ // Special handling of -0
+ return Object.is(value, -0) ? "-0" : `${value}`;
+ case "boolean":
+ case "undefined":
+ case "symbol":
+ return String(value);
+ case "bigint":
+ return `${value}n`;
+ case "function":
+ return createFunctionString(value as Function, ctx);
+ case "object":
+ if (value === null) {
+ return "null";
+ }
+
+ if (ctx.has(value)) {
+ return "[Circular]";
+ }
+
+ return createObjectString(value, ctx, level, maxLevel);
+ default:
+ return "[Not Implemented]";
+ }
+}
+
+// Print strings when they are inside of arrays or objects with quotes
+function stringifyWithQuotes(
+ value: unknown,
+ ctx: ConsoleContext,
+ level: number,
+ maxLevel: number
+): string {
+ switch (typeof value) {
+ case "string":
+ const trunc =
+ value.length > STR_ABBREVIATE_SIZE
+ ? value.slice(0, STR_ABBREVIATE_SIZE) + "..."
+ : value;
+ return JSON.stringify(trunc);
+ default:
+ return stringify(value, ctx, level, maxLevel);
+ }
+}
+
+function createArrayString(
+ value: unknown[],
+ ctx: ConsoleContext,
+ level: number,
+ maxLevel: number
+): string {
+ const printConfig: IterablePrintConfig<unknown> = {
+ typeName: "Array",
+ displayName: "",
+ delims: ["[", "]"],
+ entryHandler: (el, ctx, level, maxLevel): string =>
+ stringifyWithQuotes(el, ctx, level + 1, maxLevel)
+ };
+ return createIterableString(value, ctx, level, maxLevel, printConfig);
+}
+
+function createTypedArrayString(
+ typedArrayName: string,
+ value: TypedArray,
+ ctx: ConsoleContext,
+ level: number,
+ maxLevel: number
+): string {
+ const printConfig: IterablePrintConfig<unknown> = {
+ typeName: typedArrayName,
+ displayName: typedArrayName,
+ delims: ["[", "]"],
+ entryHandler: (el, ctx, level, maxLevel): string =>
+ stringifyWithQuotes(el, ctx, level + 1, maxLevel)
+ };
+ return createIterableString(value, ctx, level, maxLevel, printConfig);
+}
+
+function createSetString(
+ value: Set<unknown>,
+ ctx: ConsoleContext,
+ level: number,
+ maxLevel: number
+): string {
+ const printConfig: IterablePrintConfig<unknown> = {
+ typeName: "Set",
+ displayName: "Set",
+ delims: ["{", "}"],
+ entryHandler: (el, ctx, level, maxLevel): string =>
+ stringifyWithQuotes(el, ctx, level + 1, maxLevel)
+ };
+ return createIterableString(value, ctx, level, maxLevel, printConfig);
+}
+
+function createMapString(
+ value: Map<unknown, unknown>,
+ ctx: ConsoleContext,
+ level: number,
+ maxLevel: number
+): string {
+ const printConfig: IterablePrintConfig<[unknown, unknown]> = {
+ typeName: "Map",
+ displayName: "Map",
+ delims: ["{", "}"],
+ entryHandler: (el, ctx, level, maxLevel): string => {
+ const [key, val] = el;
+ return `${stringifyWithQuotes(
+ key,
+ ctx,
+ level + 1,
+ maxLevel
+ )} => ${stringifyWithQuotes(val, ctx, level + 1, maxLevel)}`;
+ }
+ };
+ return createIterableString(value, ctx, level, maxLevel, printConfig);
+}
+
+function createWeakSetString(): string {
+ return "WeakSet { [items unknown] }"; // as seen in Node
+}
+
+function createWeakMapString(): string {
+ return "WeakMap { [items unknown] }"; // as seen in Node
+}
+
+function createDateString(value: Date): string {
+ // without quotes, ISO format
+ return value.toISOString();
+}
+
+function createRegExpString(value: RegExp): string {
+ return value.toString();
+}
+
+/* eslint-disable @typescript-eslint/ban-types */
+
+function createStringWrapperString(value: String): string {
+ return `[String: "${value.toString()}"]`;
+}
+
+function createBooleanWrapperString(value: Boolean): string {
+ return `[Boolean: ${value.toString()}]`;
+}
+
+function createNumberWrapperString(value: Number): string {
+ return `[Number: ${value.toString()}]`;
+}
+
+/* eslint-enable @typescript-eslint/ban-types */
+
+// TODO: Promise, requires v8 bindings to get info
+// TODO: Proxy
+
+function createRawObjectString(
+ value: { [key: string]: unknown },
+ ctx: ConsoleContext,
+ level: number,
+ maxLevel: number
+): string {
+ if (level >= maxLevel) {
+ return "[Object]";
+ }
+ ctx.add(value);
+
+ let baseString = "";
+
+ const className = getClassInstanceName(value);
+ let shouldShowClassName = false;
+ if (className && className !== "Object" && className !== "anonymous") {
+ shouldShowClassName = true;
+ }
+ const keys = Object.keys(value);
+ const entries: string[] = keys.map(
+ (key): string => {
+ if (keys.length > OBJ_ABBREVIATE_SIZE) {
+ return key;
+ } else {
+ return `${key}: ${stringifyWithQuotes(
+ value[key],
+ ctx,
+ level + 1,
+ maxLevel
+ )}`;
+ }
+ }
+ );
+
+ ctx.delete(value);
+
+ if (entries.length === 0) {
+ baseString = "{}";
+ } else {
+ baseString = `{ ${entries.join(", ")} }`;
+ }
+
+ if (shouldShowClassName) {
+ baseString = `${className} ${baseString}`;
+ }
+
+ return baseString;
+}
+
+function createObjectString(
+ value: {},
+ ...args: [ConsoleContext, number, number]
+): string {
+ if (customInspect in value && typeof value[customInspect] === "function") {
+ return String(value[customInspect]!());
+ } else if (value instanceof Error) {
+ return String(value.stack);
+ } else if (Array.isArray(value)) {
+ return createArrayString(value, ...args);
+ } else if (value instanceof Number) {
+ return createNumberWrapperString(value);
+ } else if (value instanceof Boolean) {
+ return createBooleanWrapperString(value);
+ } else if (value instanceof String) {
+ return createStringWrapperString(value);
+ } else if (value instanceof RegExp) {
+ return createRegExpString(value);
+ } else if (value instanceof Date) {
+ return createDateString(value);
+ } else if (value instanceof Set) {
+ return createSetString(value, ...args);
+ } else if (value instanceof Map) {
+ return createMapString(value, ...args);
+ } else if (value instanceof WeakSet) {
+ return createWeakSetString();
+ } else if (value instanceof WeakMap) {
+ return createWeakMapString();
+ } else if (isTypedArray(value)) {
+ return createTypedArrayString(
+ Object.getPrototypeOf(value).constructor.name,
+ value,
+ ...args
+ );
+ } else {
+ // Otherwise, default object formatting
+ return createRawObjectString(value, ...args);
+ }
+}
+
+/** TODO Do not expose this from "deno" namespace.
+ * @internal
+ */
+export function stringifyArgs(
+ args: unknown[],
+ options: ConsoleOptions = {}
+): string {
+ const first = args[0];
+ let a = 0;
+ let str = "";
+ let join = "";
+
+ if (typeof first === "string") {
+ let tempStr: string;
+ let lastPos = 0;
+
+ for (let i = 0; i < first.length - 1; i++) {
+ if (first.charCodeAt(i) === CHAR_PERCENT) {
+ const nextChar = first.charCodeAt(++i);
+ if (a + 1 !== args.length) {
+ switch (nextChar) {
+ case CHAR_LOWERCASE_S:
+ // format as a string
+ tempStr = String(args[++a]);
+ break;
+ case CHAR_LOWERCASE_D:
+ case CHAR_LOWERCASE_I:
+ // format as an integer
+ const tempInteger = args[++a];
+ if (typeof tempInteger === "bigint") {
+ tempStr = `${tempInteger}n`;
+ } else if (typeof tempInteger === "symbol") {
+ tempStr = "NaN";
+ } else {
+ tempStr = `${parseInt(String(tempInteger), 10)}`;
+ }
+ break;
+ case CHAR_LOWERCASE_F:
+ // format as a floating point value
+ const tempFloat = args[++a];
+ if (typeof tempFloat === "symbol") {
+ tempStr = "NaN";
+ } else {
+ tempStr = `${parseFloat(String(tempFloat))}`;
+ }
+ break;
+ case CHAR_LOWERCASE_O:
+ case CHAR_UPPERCASE_O:
+ // format as an object
+ tempStr = stringify(
+ args[++a],
+ new Set<unknown>(),
+ 0,
+ options.depth != undefined ? options.depth : DEFAULT_MAX_DEPTH
+ );
+ break;
+ case CHAR_PERCENT:
+ str += first.slice(lastPos, i);
+ lastPos = i + 1;
+ continue;
+ case CHAR_LOWERCASE_C:
+ // TODO: applies CSS style rules to the output string as specified
+ continue;
+ default:
+ // any other character is not a correct placeholder
+ continue;
+ }
+
+ if (lastPos !== i - 1) {
+ str += first.slice(lastPos, i - 1);
+ }
+
+ str += tempStr;
+ lastPos = i + 1;
+ } else if (nextChar === CHAR_PERCENT) {
+ str += first.slice(lastPos, i);
+ lastPos = i + 1;
+ }
+ }
+ }
+
+ if (lastPos !== 0) {
+ a++;
+ join = " ";
+ if (lastPos < first.length) {
+ str += first.slice(lastPos);
+ }
+ }
+ }
+
+ while (a < args.length) {
+ const value = args[a];
+ str += join;
+ if (typeof value === "string") {
+ str += value;
+ } else {
+ // use default maximum depth for null or undefined argument
+ str += stringify(
+ value,
+ new Set<unknown>(),
+ 0,
+ options.depth != undefined ? options.depth : DEFAULT_MAX_DEPTH
+ );
+ }
+ join = " ";
+ a++;
+ }
+
+ const { indentLevel } = options;
+ if (indentLevel != null && indentLevel > 0) {
+ const groupIndent = " ".repeat(indentLevel);
+ if (str.indexOf("\n") !== -1) {
+ str = str.replace(/\n/g, `\n${groupIndent}`);
+ }
+ str = groupIndent + str;
+ }
+
+ return str;
+}
+
+type PrintFunc = (x: string, isErr?: boolean) => void;
+
+const countMap = new Map<string, number>();
+const timerMap = new Map<string, number>();
+const isConsoleInstance = Symbol("isConsoleInstance");
+
+export class Console {
+ indentLevel: number;
+ [isConsoleInstance] = false;
+
+ /** @internal */
+ constructor(private printFunc: PrintFunc) {
+ this.indentLevel = 0;
+ this[isConsoleInstance] = true;
+
+ // ref https://console.spec.whatwg.org/#console-namespace
+ // For historical web-compatibility reasons, the namespace object for
+ // console must have as its [[Prototype]] an empty object, created as if
+ // by ObjectCreate(%ObjectPrototype%), instead of %ObjectPrototype%.
+ const console = Object.create({}) as Console;
+ Object.assign(console, this);
+ return console;
+ }
+
+ /** Writes the arguments to stdout */
+ log = (...args: unknown[]): void => {
+ this.printFunc(
+ stringifyArgs(args, {
+ indentLevel: this.indentLevel
+ }) + "\n",
+ false
+ );
+ };
+
+ /** Writes the arguments to stdout */
+ debug = this.log;
+ /** Writes the arguments to stdout */
+ info = this.log;
+
+ /** Writes the properties of the supplied `obj` to stdout */
+ dir = (obj: unknown, options: ConsoleOptions = {}): void => {
+ this.printFunc(stringifyArgs([obj], options) + "\n", false);
+ };
+
+ /** From MDN:
+ * Displays an interactive tree of the descendant elements of
+ * the specified XML/HTML element. If it is not possible to display
+ * as an element the JavaScript Object view is shown instead.
+ * The output is presented as a hierarchical listing of expandable
+ * nodes that let you see the contents of child nodes.
+ *
+ * Since we write to stdout, we can't display anything interactive
+ * we just fall back to `console.dir`.
+ */
+ dirxml = this.dir;
+
+ /** Writes the arguments to stdout */
+ warn = (...args: unknown[]): void => {
+ this.printFunc(
+ stringifyArgs(args, {
+ indentLevel: this.indentLevel
+ }) + "\n",
+ true
+ );
+ };
+
+ /** Writes the arguments to stdout */
+ error = this.warn;
+
+ /** Writes an error message to stdout if the assertion is `false`. If the
+ * assertion is `true`, nothing happens.
+ *
+ * ref: https://console.spec.whatwg.org/#assert
+ */
+ assert = (condition = false, ...args: unknown[]): void => {
+ if (condition) {
+ return;
+ }
+
+ if (args.length === 0) {
+ this.error("Assertion failed");
+ return;
+ }
+
+ const [first, ...rest] = args;
+
+ if (typeof first === "string") {
+ this.error(`Assertion failed: ${first}`, ...rest);
+ return;
+ }
+
+ this.error(`Assertion failed:`, ...args);
+ };
+
+ count = (label = "default"): void => {
+ label = String(label);
+
+ if (countMap.has(label)) {
+ const current = countMap.get(label) || 0;
+ countMap.set(label, current + 1);
+ } else {
+ countMap.set(label, 1);
+ }
+
+ this.info(`${label}: ${countMap.get(label)}`);
+ };
+
+ countReset = (label = "default"): void => {
+ label = String(label);
+
+ if (countMap.has(label)) {
+ countMap.set(label, 0);
+ } else {
+ this.warn(`Count for '${label}' does not exist`);
+ }
+ };
+
+ table = (data: unknown, properties?: string[]): void => {
+ if (properties !== undefined && !Array.isArray(properties)) {
+ throw new Error(
+ "The 'properties' argument must be of type Array. " +
+ "Received type string"
+ );
+ }
+
+ if (data === null || typeof data !== "object") {
+ return this.log(data);
+ }
+
+ const objectValues: { [key: string]: string[] } = {};
+ const indexKeys: string[] = [];
+ const values: string[] = [];
+
+ const stringifyValue = (value: unknown): string =>
+ stringifyWithQuotes(value, new Set<unknown>(), 0, 1);
+ const toTable = (header: string[], body: string[][]): void =>
+ this.log(cliTable(header, body));
+ const createColumn = (value: unknown, shift?: number): string[] => [
+ ...(shift ? [...new Array(shift)].map((): string => "") : []),
+ stringifyValue(value)
+ ];
+
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ let resultData: any;
+ const isSet = data instanceof Set;
+ const isMap = data instanceof Map;
+ const valuesKey = "Values";
+ const indexKey = isSet || isMap ? "(iteration index)" : "(index)";
+
+ if (data instanceof Set) {
+ resultData = [...data];
+ } else if (data instanceof Map) {
+ let idx = 0;
+ resultData = {};
+
+ data.forEach(
+ (v: unknown, k: unknown): void => {
+ resultData[idx] = { Key: k, Values: v };
+ idx++;
+ }
+ );
+ } else {
+ resultData = data!;
+ }
+
+ Object.keys(resultData).forEach(
+ (k, idx): void => {
+ const value: unknown = resultData[k]!;
+
+ if (value !== null && typeof value === "object") {
+ Object.entries(value as { [key: string]: unknown }).forEach(
+ ([k, v]): void => {
+ if (properties && !properties.includes(k)) {
+ return;
+ }
+
+ if (objectValues[k]) {
+ objectValues[k].push(stringifyValue(v));
+ } else {
+ objectValues[k] = createColumn(v, idx);
+ }
+ }
+ );
+
+ values.push("");
+ } else {
+ values.push(stringifyValue(value));
+ }
+
+ indexKeys.push(k);
+ }
+ );
+
+ const headerKeys = Object.keys(objectValues);
+ const bodyValues = Object.values(objectValues);
+ const header = [
+ indexKey,
+ ...(properties || [
+ ...headerKeys,
+ !isMap && values.length > 0 && valuesKey
+ ])
+ ].filter(Boolean) as string[];
+ const body = [indexKeys, ...bodyValues, values];
+
+ toTable(header, body);
+ };
+
+ time = (label = "default"): void => {
+ label = String(label);
+
+ if (timerMap.has(label)) {
+ this.warn(`Timer '${label}' already exists`);
+ return;
+ }
+
+ timerMap.set(label, Date.now());
+ };
+
+ timeLog = (label = "default", ...args: unknown[]): void => {
+ label = String(label);
+
+ if (!timerMap.has(label)) {
+ this.warn(`Timer '${label}' does not exists`);
+ return;
+ }
+
+ const startTime = timerMap.get(label) as number;
+ const duration = Date.now() - startTime;
+
+ this.info(`${label}: ${duration}ms`, ...args);
+ };
+
+ timeEnd = (label = "default"): void => {
+ label = String(label);
+
+ if (!timerMap.has(label)) {
+ this.warn(`Timer '${label}' does not exists`);
+ return;
+ }
+
+ const startTime = timerMap.get(label) as number;
+ timerMap.delete(label);
+ const duration = Date.now() - startTime;
+
+ this.info(`${label}: ${duration}ms`);
+ };
+
+ group = (...label: unknown[]): void => {
+ if (label.length > 0) {
+ this.log(...label);
+ }
+ this.indentLevel += 2;
+ };
+
+ groupCollapsed = this.group;
+
+ groupEnd = (): void => {
+ if (this.indentLevel > 0) {
+ this.indentLevel -= 2;
+ }
+ };
+
+ clear = (): void => {
+ this.indentLevel = 0;
+ cursorTo(stdout, 0, 0);
+ clearScreenDown(stdout);
+ };
+
+ trace = (...args: unknown[]): void => {
+ const message = stringifyArgs(args, { indentLevel: 0 });
+ const err = {
+ name: "Trace",
+ message
+ };
+ // @ts-ignore
+ Error.captureStackTrace(err, this.trace);
+ this.error((err as Error).stack);
+ };
+
+ static [Symbol.hasInstance](instance: Console): boolean {
+ return instance[isConsoleInstance];
+ }
+}
+
+/** A symbol which can be used as a key for a custom method which will be called
+ * when `Deno.inspect()` is called, or when the object is logged to the console.
+ */
+export const customInspect = Symbol.for("Deno.customInspect");
+
+/**
+ * `inspect()` converts input into string that has the same format
+ * as printed by `console.log(...)`;
+ */
+export function inspect(value: unknown, options?: ConsoleOptions): string {
+ const opts = options || {};
+ if (typeof value === "string") {
+ return value;
+ } else {
+ return stringify(
+ value,
+ new Set<unknown>(),
+ 0,
+ opts.depth != undefined ? opts.depth : DEFAULT_MAX_DEPTH
+ );
+ }
+}
diff --git a/cli/js/console_table.ts b/cli/js/console_table.ts
new file mode 100644
index 000000000..d74dc0127
--- /dev/null
+++ b/cli/js/console_table.ts
@@ -0,0 +1,94 @@
+// Copyright Joyent, Inc. and other Node contributors. MIT license.
+// Forked from Node's lib/internal/cli_table.js
+
+import { TextEncoder } from "./text_encoding.ts";
+import { hasOwnProperty } from "./util.ts";
+
+const encoder = new TextEncoder();
+
+const tableChars = {
+ middleMiddle: "─",
+ rowMiddle: "┼",
+ topRight: "┐",
+ topLeft: "┌",
+ leftMiddle: "├",
+ topMiddle: "┬",
+ bottomRight: "┘",
+ bottomLeft: "└",
+ bottomMiddle: "┴",
+ rightMiddle: "┤",
+ left: "│ ",
+ right: " │",
+ middle: " │ "
+};
+
+const colorRegExp = /\u001b\[\d\d?m/g;
+
+function removeColors(str: string): string {
+ return str.replace(colorRegExp, "");
+}
+
+function countBytes(str: string): number {
+ const normalized = removeColors(String(str)).normalize("NFC");
+
+ return encoder.encode(normalized).byteLength;
+}
+
+function renderRow(row: string[], columnWidths: number[]): string {
+ let out = tableChars.left;
+ for (let i = 0; i < row.length; i++) {
+ const cell = row[i];
+ const len = countBytes(cell);
+ const needed = (columnWidths[i] - len) / 2;
+ // round(needed) + ceil(needed) will always add up to the amount
+ // of spaces we need while also left justifying the output.
+ out += `${" ".repeat(needed)}${cell}${" ".repeat(Math.ceil(needed))}`;
+ if (i !== row.length - 1) {
+ out += tableChars.middle;
+ }
+ }
+ out += tableChars.right;
+ return out;
+}
+
+export function cliTable(head: string[], columns: string[][]): string {
+ const rows: string[][] = [];
+ const columnWidths = head.map((h: string): number => countBytes(h));
+ const longestColumn = columns.reduce(
+ (n: number, a: string[]): number => Math.max(n, a.length),
+ 0
+ );
+
+ for (let i = 0; i < head.length; i++) {
+ const column = columns[i];
+ for (let j = 0; j < longestColumn; j++) {
+ if (rows[j] === undefined) {
+ rows[j] = [];
+ }
+ const value = (rows[j][i] = hasOwnProperty(column, j) ? column[j] : "");
+ const width = columnWidths[i] || 0;
+ const counted = countBytes(value);
+ columnWidths[i] = Math.max(width, counted);
+ }
+ }
+
+ const divider = columnWidths.map(
+ (i: number): string => tableChars.middleMiddle.repeat(i + 2)
+ );
+
+ let result =
+ `${tableChars.topLeft}${divider.join(tableChars.topMiddle)}` +
+ `${tableChars.topRight}\n${renderRow(head, columnWidths)}\n` +
+ `${tableChars.leftMiddle}${divider.join(tableChars.rowMiddle)}` +
+ `${tableChars.rightMiddle}\n`;
+
+ for (const row of rows) {
+ result += `${renderRow(row, columnWidths)}\n`;
+ }
+
+ result +=
+ `${tableChars.bottomLeft}${divider.join(tableChars.bottomMiddle)}` +
+ tableChars.bottomRight;
+
+ return result;
+}
diff --git a/cli/js/console_test.ts b/cli/js/console_test.ts
new file mode 100644
index 000000000..903e65a82
--- /dev/null
+++ b/cli/js/console_test.ts
@@ -0,0 +1,698 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { assert, assertEquals, test } from "./test_util.ts";
+
+// Some of these APIs aren't exposed in the types and so we have to cast to any
+// in order to "trick" TypeScript.
+const {
+ Console,
+ customInspect,
+ stringifyArgs,
+ inspect,
+ write,
+ stdout
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+} = Deno as any;
+
+function stringify(...args: unknown[]): string {
+ return stringifyArgs(args).replace(/\n$/, "");
+}
+
+// test cases from web-platform-tests
+// via https://github.com/web-platform-tests/wpt/blob/master/console/console-is-a-namespace.any.js
+test(function consoleShouldBeANamespace(): void {
+ const prototype1 = Object.getPrototypeOf(console);
+ const prototype2 = Object.getPrototypeOf(prototype1);
+
+ assertEquals(Object.getOwnPropertyNames(prototype1).length, 0);
+ assertEquals(prototype2, Object.prototype);
+});
+
+test(function consoleHasRightInstance(): void {
+ assert(console instanceof Console);
+ assertEquals({} instanceof Console, false);
+});
+
+test(function consoleTestAssertShouldNotThrowError(): void {
+ console.assert(true);
+
+ let hasThrown = undefined;
+ try {
+ console.assert(false);
+ hasThrown = false;
+ } catch {
+ hasThrown = true;
+ }
+ assertEquals(hasThrown, false);
+});
+
+test(function consoleTestStringifyComplexObjects(): void {
+ assertEquals(stringify("foo"), "foo");
+ assertEquals(stringify(["foo", "bar"]), `[ "foo", "bar" ]`);
+ assertEquals(stringify({ foo: "bar" }), `{ foo: "bar" }`);
+});
+
+test(function consoleTestStringifyLongStrings(): void {
+ const veryLongString = "a".repeat(200);
+ // If we stringify an object containing the long string, it gets abbreviated.
+ let actual = stringify({ veryLongString });
+ assert(actual.includes("..."));
+ assert(actual.length < 200);
+ // However if we stringify the string itself, we get it exactly.
+ actual = stringify(veryLongString);
+ assertEquals(actual, veryLongString);
+});
+
+/* eslint-disable @typescript-eslint/explicit-function-return-type */
+test(function consoleTestStringifyCircular(): void {
+ class Base {
+ a = 1;
+ m1() {}
+ }
+
+ class Extended extends Base {
+ b = 2;
+ m2() {}
+ }
+
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ const nestedObj: any = {
+ num: 1,
+ bool: true,
+ str: "a",
+ method() {},
+ async asyncMethod() {},
+ *generatorMethod() {},
+ un: undefined,
+ nu: null,
+ arrowFunc: () => {},
+ extendedClass: new Extended(),
+ nFunc: new Function(),
+ extendedCstr: Extended
+ };
+
+ const circularObj = {
+ num: 2,
+ bool: false,
+ str: "b",
+ method() {},
+ un: undefined,
+ nu: null,
+ nested: nestedObj,
+ emptyObj: {},
+ arr: [1, "s", false, null, nestedObj],
+ baseClass: new Base()
+ };
+
+ nestedObj.o = circularObj;
+ const nestedObjExpected = `{ num, bool, str, method, asyncMethod, generatorMethod, un, nu, arrowFunc, extendedClass, nFunc, extendedCstr, o }`;
+
+ assertEquals(stringify(1), "1");
+ assertEquals(stringify(-0), "-0");
+ assertEquals(stringify(1n), "1n");
+ assertEquals(stringify("s"), "s");
+ assertEquals(stringify(false), "false");
+ assertEquals(stringify(new Number(1)), "[Number: 1]");
+ assertEquals(stringify(new Boolean(true)), "[Boolean: true]");
+ assertEquals(stringify(new String("deno")), `[String: "deno"]`);
+ assertEquals(stringify(/[0-9]*/), "/[0-9]*/");
+ assertEquals(
+ stringify(new Date("2018-12-10T02:26:59.002Z")),
+ "2018-12-10T02:26:59.002Z"
+ );
+ assertEquals(stringify(new Set([1, 2, 3])), "Set { 1, 2, 3 }");
+ assertEquals(
+ stringify(new Map([[1, "one"], [2, "two"]])),
+ `Map { 1 => "one", 2 => "two" }`
+ );
+ assertEquals(stringify(new WeakSet()), "WeakSet { [items unknown] }");
+ assertEquals(stringify(new WeakMap()), "WeakMap { [items unknown] }");
+ assertEquals(stringify(Symbol(1)), "Symbol(1)");
+ assertEquals(stringify(null), "null");
+ assertEquals(stringify(undefined), "undefined");
+ assertEquals(stringify(new Extended()), "Extended { a: 1, b: 2 }");
+ assertEquals(stringify(function f(): void {}), "[Function: f]");
+ assertEquals(
+ stringify(async function af(): Promise<void> {}),
+ "[AsyncFunction: af]"
+ );
+ assertEquals(stringify(function* gf() {}), "[GeneratorFunction: gf]");
+ assertEquals(
+ stringify(async function* agf() {}),
+ "[AsyncGeneratorFunction: agf]"
+ );
+ assertEquals(stringify(new Uint8Array([1, 2, 3])), "Uint8Array [ 1, 2, 3 ]");
+ assertEquals(stringify(Uint8Array.prototype), "TypedArray []");
+ assertEquals(
+ stringify({ a: { b: { c: { d: new Set([1]) } } } }),
+ "{ a: { b: { c: { d: [Set] } } } }"
+ );
+ assertEquals(stringify(nestedObj), nestedObjExpected);
+ assertEquals(stringify(JSON), "{}");
+ assertEquals(
+ stringify(console),
+ "{ printFunc, log, debug, info, dir, dirxml, warn, error, assert, count, countReset, table, time, timeLog, timeEnd, group, groupCollapsed, groupEnd, clear, trace, indentLevel }"
+ );
+ // test inspect is working the same
+ assertEquals(inspect(nestedObj), nestedObjExpected);
+});
+/* eslint-enable @typescript-eslint/explicit-function-return-type */
+
+test(function consoleTestStringifyWithDepth(): void {
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ const nestedObj: any = { a: { b: { c: { d: { e: { f: 42 } } } } } };
+ assertEquals(
+ stringifyArgs([nestedObj], { depth: 3 }),
+ "{ a: { b: { c: [Object] } } }"
+ );
+ assertEquals(
+ stringifyArgs([nestedObj], { depth: 4 }),
+ "{ a: { b: { c: { d: [Object] } } } }"
+ );
+ assertEquals(stringifyArgs([nestedObj], { depth: 0 }), "[Object]");
+ assertEquals(
+ stringifyArgs([nestedObj], { depth: null }),
+ "{ a: { b: { c: { d: [Object] } } } }"
+ );
+ // test inspect is working the same way
+ assertEquals(
+ inspect(nestedObj, { depth: 4 }),
+ "{ a: { b: { c: { d: [Object] } } } }"
+ );
+});
+
+test(function consoleTestWithCustomInspector(): void {
+ class A {
+ [customInspect](): string {
+ return "b";
+ }
+ }
+
+ assertEquals(stringify(new A()), "b");
+});
+
+test(function consoleTestWithIntegerFormatSpecifier(): void {
+ assertEquals(stringify("%i"), "%i");
+ assertEquals(stringify("%i", 42.0), "42");
+ assertEquals(stringify("%i", 42), "42");
+ assertEquals(stringify("%i", "42"), "42");
+ assertEquals(stringify("%i", "42.0"), "42");
+ assertEquals(stringify("%i", 1.5), "1");
+ assertEquals(stringify("%i", -0.5), "0");
+ assertEquals(stringify("%i", ""), "NaN");
+ assertEquals(stringify("%i", Symbol()), "NaN");
+ assertEquals(stringify("%i %d", 42, 43), "42 43");
+ assertEquals(stringify("%d %i", 42), "42 %i");
+ assertEquals(stringify("%d", 12345678901234567890123), "1");
+ assertEquals(
+ stringify("%i", 12345678901234567890123n),
+ "12345678901234567890123n"
+ );
+});
+
+test(function consoleTestWithFloatFormatSpecifier(): void {
+ assertEquals(stringify("%f"), "%f");
+ assertEquals(stringify("%f", 42.0), "42");
+ assertEquals(stringify("%f", 42), "42");
+ assertEquals(stringify("%f", "42"), "42");
+ assertEquals(stringify("%f", "42.0"), "42");
+ assertEquals(stringify("%f", 1.5), "1.5");
+ assertEquals(stringify("%f", -0.5), "-0.5");
+ assertEquals(stringify("%f", Math.PI), "3.141592653589793");
+ assertEquals(stringify("%f", ""), "NaN");
+ assertEquals(stringify("%f", Symbol("foo")), "NaN");
+ assertEquals(stringify("%f", 5n), "5");
+ assertEquals(stringify("%f %f", 42, 43), "42 43");
+ assertEquals(stringify("%f %f", 42), "42 %f");
+});
+
+test(function consoleTestWithStringFormatSpecifier(): void {
+ assertEquals(stringify("%s"), "%s");
+ assertEquals(stringify("%s", undefined), "undefined");
+ assertEquals(stringify("%s", "foo"), "foo");
+ assertEquals(stringify("%s", 42), "42");
+ assertEquals(stringify("%s", "42"), "42");
+ assertEquals(stringify("%s %s", 42, 43), "42 43");
+ assertEquals(stringify("%s %s", 42), "42 %s");
+ assertEquals(stringify("%s", Symbol("foo")), "Symbol(foo)");
+});
+
+test(function consoleTestWithObjectFormatSpecifier(): void {
+ assertEquals(stringify("%o"), "%o");
+ assertEquals(stringify("%o", 42), "42");
+ assertEquals(stringify("%o", "foo"), "foo");
+ assertEquals(stringify("o: %o, a: %O", {}, []), "o: {}, a: []");
+ assertEquals(stringify("%o", { a: 42 }), "{ a: 42 }");
+ assertEquals(
+ stringify("%o", { a: { b: { c: { d: new Set([1]) } } } }),
+ "{ a: { b: { c: { d: [Set] } } } }"
+ );
+});
+
+test(function consoleTestWithVariousOrInvalidFormatSpecifier(): void {
+ assertEquals(stringify("%s:%s"), "%s:%s");
+ assertEquals(stringify("%i:%i"), "%i:%i");
+ assertEquals(stringify("%d:%d"), "%d:%d");
+ assertEquals(stringify("%%s%s", "foo"), "%sfoo");
+ assertEquals(stringify("%s:%s", undefined), "undefined:%s");
+ assertEquals(stringify("%s:%s", "foo", "bar"), "foo:bar");
+ assertEquals(stringify("%s:%s", "foo", "bar", "baz"), "foo:bar baz");
+ assertEquals(stringify("%%%s%%", "hi"), "%hi%");
+ assertEquals(stringify("%d:%d", 12), "12:%d");
+ assertEquals(stringify("%i:%i", 12), "12:%i");
+ assertEquals(stringify("%f:%f", 12), "12:%f");
+ assertEquals(stringify("o: %o, a: %o", {}), "o: {}, a: %o");
+ assertEquals(stringify("abc%", 1), "abc% 1");
+});
+
+test(function consoleTestCallToStringOnLabel(): void {
+ const methods = ["count", "countReset", "time", "timeLog", "timeEnd"];
+
+ for (const method of methods) {
+ let hasCalled = false;
+
+ console[method]({
+ toString(): void {
+ hasCalled = true;
+ }
+ });
+
+ assertEquals(hasCalled, true);
+ }
+});
+
+test(function consoleTestError(): void {
+ class MyError extends Error {
+ constructor(errStr: string) {
+ super(errStr);
+ this.name = "MyError";
+ }
+ }
+ try {
+ throw new MyError("This is an error");
+ } catch (e) {
+ assert(
+ stringify(e)
+ .split("\n")[0] // error has been caught
+ .includes("MyError: This is an error")
+ );
+ }
+});
+
+test(function consoleTestClear(): void {
+ const stdoutWrite = stdout.write;
+ const uint8 = new TextEncoder().encode("\x1b[1;1H" + "\x1b[0J");
+ let buffer = new Uint8Array(0);
+
+ stdout.write = async (u8: Uint8Array): Promise<number> => {
+ const tmp = new Uint8Array(buffer.length + u8.length);
+ tmp.set(buffer, 0);
+ tmp.set(u8, buffer.length);
+ buffer = tmp;
+
+ return await write(stdout.rid, u8);
+ };
+ console.clear();
+ stdout.write = stdoutWrite;
+ assertEquals(buffer, uint8);
+});
+
+// Test bound this issue
+test(function consoleDetachedLog(): void {
+ const log = console.log;
+ const dir = console.dir;
+ const dirxml = console.dirxml;
+ const debug = console.debug;
+ const info = console.info;
+ const warn = console.warn;
+ const error = console.error;
+ const consoleAssert = console.assert;
+ const consoleCount = console.count;
+ const consoleCountReset = console.countReset;
+ const consoleTable = console.table;
+ const consoleTime = console.time;
+ const consoleTimeLog = console.timeLog;
+ const consoleTimeEnd = console.timeEnd;
+ const consoleGroup = console.group;
+ const consoleGroupEnd = console.groupEnd;
+ const consoleClear = console.clear;
+ log("Hello world");
+ dir("Hello world");
+ dirxml("Hello world");
+ debug("Hello world");
+ info("Hello world");
+ warn("Hello world");
+ error("Hello world");
+ consoleAssert(true);
+ consoleCount("Hello world");
+ consoleCountReset("Hello world");
+ consoleTable({ test: "Hello world" });
+ consoleTime("Hello world");
+ consoleTimeLog("Hello world");
+ consoleTimeEnd("Hello world");
+ consoleGroup("Hello world");
+ consoleGroupEnd();
+ consoleClear();
+});
+
+class StringBuffer {
+ chunks: string[] = [];
+ add(x: string): void {
+ this.chunks.push(x);
+ }
+ toString(): string {
+ return this.chunks.join("");
+ }
+}
+
+type ConsoleExamineFunc = (
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ csl: any,
+ out: StringBuffer,
+ err?: StringBuffer,
+ both?: StringBuffer
+) => void;
+
+function mockConsole(f: ConsoleExamineFunc): void {
+ const out = new StringBuffer();
+ const err = new StringBuffer();
+ const both = new StringBuffer();
+ const csl = new Console(
+ (x: string, isErr: boolean, printsNewLine: boolean): void => {
+ const content = x + (printsNewLine ? "\n" : "");
+ const buf = isErr ? err : out;
+ buf.add(content);
+ both.add(content);
+ }
+ );
+ f(csl, out, err, both);
+}
+
+// console.group test
+test(function consoleGroup(): void {
+ mockConsole(
+ (console, out): void => {
+ console.group("1");
+ console.log("2");
+ console.group("3");
+ console.log("4");
+ console.groupEnd();
+ console.groupEnd();
+ console.log("5");
+ console.log("6");
+
+ assertEquals(
+ out.toString(),
+ `1
+ 2
+ 3
+ 4
+5
+6
+`
+ );
+ }
+ );
+});
+
+// console.group with console.warn test
+test(function consoleGroupWarn(): void {
+ mockConsole(
+ (console, _out, _err, both): void => {
+ console.warn("1");
+ console.group();
+ console.warn("2");
+ console.group();
+ console.warn("3");
+ console.groupEnd();
+ console.warn("4");
+ console.groupEnd();
+ console.warn("5");
+
+ console.warn("6");
+ console.warn("7");
+ assertEquals(
+ both.toString(),
+ `1
+ 2
+ 3
+ 4
+5
+6
+7
+`
+ );
+ }
+ );
+});
+
+// console.table test
+test(function consoleTable(): void {
+ mockConsole(
+ (console, out): void => {
+ console.table({ a: "test", b: 1 });
+ assertEquals(
+ out.toString(),
+ `┌─────────┬────────┐
+│ (index) │ Values │
+├─────────┼────────┤
+│ a │ "test" │
+│ b │ 1 │
+└─────────┴────────┘
+`
+ );
+ }
+ );
+ mockConsole(
+ (console, out): void => {
+ console.table({ a: { b: 10 }, b: { b: 20, c: 30 } }, ["c"]);
+ assertEquals(
+ out.toString(),
+ `┌─────────┬────┐
+│ (index) │ c │
+├─────────┼────┤
+│ a │ │
+│ b │ 30 │
+└─────────┴────┘
+`
+ );
+ }
+ );
+ mockConsole(
+ (console, out): void => {
+ console.table([1, 2, [3, [4]], [5, 6], [[7], [8]]]);
+ assertEquals(
+ out.toString(),
+ `┌─────────┬───────┬───────┬────────┐
+│ (index) │ 0 │ 1 │ Values │
+├─────────┼───────┼───────┼────────┤
+│ 0 │ │ │ 1 │
+│ 1 │ │ │ 2 │
+│ 2 │ 3 │ [ 4 ] │ │
+│ 3 │ 5 │ 6 │ │
+│ 4 │ [ 7 ] │ [ 8 ] │ │
+└─────────┴───────┴───────┴────────┘
+`
+ );
+ }
+ );
+ mockConsole(
+ (console, out): void => {
+ console.table(new Set([1, 2, 3, "test"]));
+ assertEquals(
+ out.toString(),
+ `┌───────────────────┬────────┐
+│ (iteration index) │ Values │
+├───────────────────┼────────┤
+│ 0 │ 1 │
+│ 1 │ 2 │
+│ 2 │ 3 │
+│ 3 │ "test" │
+└───────────────────┴────────┘
+`
+ );
+ }
+ );
+ mockConsole(
+ (console, out): void => {
+ console.table(new Map([[1, "one"], [2, "two"]]));
+ assertEquals(
+ out.toString(),
+ `┌───────────────────┬─────┬────────┐
+│ (iteration index) │ Key │ Values │
+├───────────────────┼─────┼────────┤
+│ 0 │ 1 │ "one" │
+│ 1 │ 2 │ "two" │
+└───────────────────┴─────┴────────┘
+`
+ );
+ }
+ );
+ mockConsole(
+ (console, out): void => {
+ console.table({
+ a: true,
+ b: { c: { d: 10 }, e: [1, 2, [5, 6]] },
+ f: "test",
+ g: new Set([1, 2, 3, "test"]),
+ h: new Map([[1, "one"]])
+ });
+ assertEquals(
+ out.toString(),
+ `┌─────────┬───────────┬───────────────────┬────────┐
+│ (index) │ c │ e │ Values │
+├─────────┼───────────┼───────────────────┼────────┤
+│ a │ │ │ true │
+│ b │ { d: 10 } │ [ 1, 2, [Array] ] │ │
+│ f │ │ │ "test" │
+│ g │ │ │ │
+│ h │ │ │ │
+└─────────┴───────────┴───────────────────┴────────┘
+`
+ );
+ }
+ );
+ mockConsole(
+ (console, out): void => {
+ console.table([
+ 1,
+ "test",
+ false,
+ { a: 10 },
+ ["test", { b: 20, c: "test" }]
+ ]);
+ assertEquals(
+ out.toString(),
+ `┌─────────┬────────┬──────────────────────┬────┬────────┐
+│ (index) │ 0 │ 1 │ a │ Values │
+├─────────┼────────┼──────────────────────┼────┼────────┤
+│ 0 │ │ │ │ 1 │
+│ 1 │ │ │ │ "test" │
+│ 2 │ │ │ │ false │
+│ 3 │ │ │ 10 │ │
+│ 4 │ "test" │ { b: 20, c: "test" } │ │ │
+└─────────┴────────┴──────────────────────┴────┴────────┘
+`
+ );
+ }
+ );
+ mockConsole(
+ (console, out): void => {
+ console.table([]);
+ assertEquals(
+ out.toString(),
+ `┌─────────┐
+│ (index) │
+├─────────┤
+└─────────┘
+`
+ );
+ }
+ );
+ mockConsole(
+ (console, out): void => {
+ console.table({});
+ assertEquals(
+ out.toString(),
+ `┌─────────┐
+│ (index) │
+├─────────┤
+└─────────┘
+`
+ );
+ }
+ );
+ mockConsole(
+ (console, out): void => {
+ console.table(new Set());
+ assertEquals(
+ out.toString(),
+ `┌───────────────────┐
+│ (iteration index) │
+├───────────────────┤
+└───────────────────┘
+`
+ );
+ }
+ );
+ mockConsole(
+ (console, out): void => {
+ console.table(new Map());
+ assertEquals(
+ out.toString(),
+ `┌───────────────────┐
+│ (iteration index) │
+├───────────────────┤
+└───────────────────┘
+`
+ );
+ }
+ );
+ mockConsole(
+ (console, out): void => {
+ console.table("test");
+ assertEquals(out.toString(), "test\n");
+ }
+ );
+});
+
+// console.log(Error) test
+test(function consoleLogShouldNotThrowError(): void {
+ let result = 0;
+ try {
+ console.log(new Error("foo"));
+ result = 1;
+ } catch (e) {
+ result = 2;
+ }
+ assertEquals(result, 1);
+
+ // output errors to the console should not include "Uncaught"
+ mockConsole(
+ (console, out): void => {
+ console.log(new Error("foo"));
+ assertEquals(out.toString().includes("Uncaught"), false);
+ }
+ );
+});
+
+// console.dir test
+test(function consoleDir(): void {
+ mockConsole(
+ (console, out): void => {
+ console.dir("DIR");
+ assertEquals(out.toString(), "DIR\n");
+ }
+ );
+ mockConsole(
+ (console, out): void => {
+ console.dir("DIR", { indentLevel: 2 });
+ assertEquals(out.toString(), " DIR\n");
+ }
+ );
+});
+
+// console.dir test
+test(function consoleDirXml(): void {
+ mockConsole(
+ (console, out): void => {
+ console.dirxml("DIRXML");
+ assertEquals(out.toString(), "DIRXML\n");
+ }
+ );
+ mockConsole(
+ (console, out): void => {
+ console.dirxml("DIRXML", { indentLevel: 2 });
+ assertEquals(out.toString(), " DIRXML\n");
+ }
+ );
+});
+
+// console.trace test
+test(function consoleTrace(): void {
+ mockConsole(
+ (console, _out, err): void => {
+ console.trace("%s", "custom message");
+ assert(err.toString().includes("Trace: custom message"));
+ }
+ );
+});
diff --git a/cli/js/copy_file.ts b/cli/js/copy_file.ts
new file mode 100644
index 000000000..94d2b63db
--- /dev/null
+++ b/cli/js/copy_file.ts
@@ -0,0 +1,30 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { sendSync, sendAsync } from "./dispatch_json.ts";
+import * as dispatch from "./dispatch.ts";
+
+/** Copies the contents of a file to another by name synchronously.
+ * Creates a new file if target does not exists, and if target exists,
+ * overwrites original content of the target file.
+ *
+ * It would also copy the permission of the original file
+ * to the destination.
+ *
+ * Deno.copyFileSync("from.txt", "to.txt");
+ */
+export function copyFileSync(from: string, to: string): void {
+ sendSync(dispatch.OP_COPY_FILE, { from, to });
+}
+
+/** Copies the contents of a file to another by name.
+ *
+ * Creates a new file if target does not exists, and if target exists,
+ * overwrites original content of the target file.
+ *
+ * It would also copy the permission of the original file
+ * to the destination.
+ *
+ * await Deno.copyFile("from.txt", "to.txt");
+ */
+export async function copyFile(from: string, to: string): Promise<void> {
+ await sendAsync(dispatch.OP_COPY_FILE, { from, to });
+}
diff --git a/cli/js/copy_file_test.ts b/cli/js/copy_file_test.ts
new file mode 100644
index 000000000..72ae43f3e
--- /dev/null
+++ b/cli/js/copy_file_test.ts
@@ -0,0 +1,163 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { testPerm, assert, assertEquals } from "./test_util.ts";
+
+function readFileString(filename: string): string {
+ const dataRead = Deno.readFileSync(filename);
+ const dec = new TextDecoder("utf-8");
+ return dec.decode(dataRead);
+}
+
+function writeFileString(filename: string, s: string): void {
+ const enc = new TextEncoder();
+ const data = enc.encode(s);
+ Deno.writeFileSync(filename, data, { perm: 0o666 });
+}
+
+function assertSameContent(filename1: string, filename2: string): void {
+ const data1 = Deno.readFileSync(filename1);
+ const data2 = Deno.readFileSync(filename2);
+ assertEquals(data1, data2);
+}
+
+testPerm({ read: true, write: true }, function copyFileSyncSuccess(): void {
+ const tempDir = Deno.makeTempDirSync();
+ const fromFilename = tempDir + "/from.txt";
+ const toFilename = tempDir + "/to.txt";
+ writeFileString(fromFilename, "Hello world!");
+ Deno.copyFileSync(fromFilename, toFilename);
+ // No change to original file
+ assertEquals(readFileString(fromFilename), "Hello world!");
+ // Original == Dest
+ assertSameContent(fromFilename, toFilename);
+});
+
+testPerm({ write: true, read: true }, function copyFileSyncFailure(): void {
+ const tempDir = Deno.makeTempDirSync();
+ const fromFilename = tempDir + "/from.txt";
+ const toFilename = tempDir + "/to.txt";
+ // We skip initial writing here, from.txt does not exist
+ let err;
+ try {
+ Deno.copyFileSync(fromFilename, toFilename);
+ } catch (e) {
+ err = e;
+ }
+ assert(!!err);
+ assertEquals(err.kind, Deno.ErrorKind.NotFound);
+ assertEquals(err.name, "NotFound");
+});
+
+testPerm({ write: true, read: false }, function copyFileSyncPerm1(): void {
+ let caughtError = false;
+ try {
+ Deno.copyFileSync("/from.txt", "/to.txt");
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(e.name, "PermissionDenied");
+ }
+ assert(caughtError);
+});
+
+testPerm({ write: false, read: true }, function copyFileSyncPerm2(): void {
+ let caughtError = false;
+ try {
+ Deno.copyFileSync("/from.txt", "/to.txt");
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(e.name, "PermissionDenied");
+ }
+ assert(caughtError);
+});
+
+testPerm({ read: true, write: true }, function copyFileSyncOverwrite(): void {
+ const tempDir = Deno.makeTempDirSync();
+ const fromFilename = tempDir + "/from.txt";
+ const toFilename = tempDir + "/to.txt";
+ writeFileString(fromFilename, "Hello world!");
+ // Make Dest exist and have different content
+ writeFileString(toFilename, "Goodbye!");
+ Deno.copyFileSync(fromFilename, toFilename);
+ // No change to original file
+ assertEquals(readFileString(fromFilename), "Hello world!");
+ // Original == Dest
+ assertSameContent(fromFilename, toFilename);
+});
+
+testPerm({ read: true, write: true }, async function copyFileSuccess(): Promise<
+ void
+> {
+ const tempDir = Deno.makeTempDirSync();
+ const fromFilename = tempDir + "/from.txt";
+ const toFilename = tempDir + "/to.txt";
+ writeFileString(fromFilename, "Hello world!");
+ await Deno.copyFile(fromFilename, toFilename);
+ // No change to original file
+ assertEquals(readFileString(fromFilename), "Hello world!");
+ // Original == Dest
+ assertSameContent(fromFilename, toFilename);
+});
+
+testPerm({ read: true, write: true }, async function copyFileFailure(): Promise<
+ void
+> {
+ const tempDir = Deno.makeTempDirSync();
+ const fromFilename = tempDir + "/from.txt";
+ const toFilename = tempDir + "/to.txt";
+ // We skip initial writing here, from.txt does not exist
+ let err;
+ try {
+ await Deno.copyFile(fromFilename, toFilename);
+ } catch (e) {
+ err = e;
+ }
+ assert(!!err);
+ assertEquals(err.kind, Deno.ErrorKind.NotFound);
+ assertEquals(err.name, "NotFound");
+});
+
+testPerm(
+ { read: true, write: true },
+ async function copyFileOverwrite(): Promise<void> {
+ const tempDir = Deno.makeTempDirSync();
+ const fromFilename = tempDir + "/from.txt";
+ const toFilename = tempDir + "/to.txt";
+ writeFileString(fromFilename, "Hello world!");
+ // Make Dest exist and have different content
+ writeFileString(toFilename, "Goodbye!");
+ await Deno.copyFile(fromFilename, toFilename);
+ // No change to original file
+ assertEquals(readFileString(fromFilename), "Hello world!");
+ // Original == Dest
+ assertSameContent(fromFilename, toFilename);
+ }
+);
+
+testPerm({ read: false, write: true }, async function copyFilePerm1(): Promise<
+ void
+> {
+ let caughtError = false;
+ try {
+ await Deno.copyFile("/from.txt", "/to.txt");
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(e.name, "PermissionDenied");
+ }
+ assert(caughtError);
+});
+
+testPerm({ read: true, write: false }, async function copyFilePerm2(): Promise<
+ void
+> {
+ let caughtError = false;
+ try {
+ await Deno.copyFile("/from.txt", "/to.txt");
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(e.name, "PermissionDenied");
+ }
+ assert(caughtError);
+});
diff --git a/cli/js/core.ts b/cli/js/core.ts
new file mode 100644
index 000000000..d394d822f
--- /dev/null
+++ b/cli/js/core.ts
@@ -0,0 +1,6 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { window } from "./window.ts";
+
+// This allows us to access core in API even if we
+// dispose window.Deno
+export const core = window.Deno.core as DenoCore;
diff --git a/cli/js/custom_event.ts b/cli/js/custom_event.ts
new file mode 100644
index 000000000..922abd4b1
--- /dev/null
+++ b/cli/js/custom_event.ts
@@ -0,0 +1,48 @@
+// Copyright 2018 the Deno authors. All rights reserved. MIT license.
+import * as domTypes from "./dom_types.ts";
+import * as event from "./event.ts";
+import { getPrivateValue, requiredArguments } from "./util.ts";
+
+// WeakMaps are recommended for private attributes (see MDN link below)
+// https://developer.mozilla.org/en-US/docs/Archive/Add-ons/Add-on_SDK/Guides/Contributor_s_Guide/Private_Properties#Using_WeakMaps
+export const customEventAttributes = new WeakMap();
+
+export class CustomEvent extends event.Event implements domTypes.CustomEvent {
+ constructor(
+ type: string,
+ customEventInitDict: domTypes.CustomEventInit = {}
+ ) {
+ requiredArguments("CustomEvent", arguments.length, 1);
+ super(type, customEventInitDict);
+ const { detail = null } = customEventInitDict;
+ customEventAttributes.set(this, { detail });
+ }
+
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ get detail(): any {
+ return getPrivateValue(this, customEventAttributes, "detail");
+ }
+
+ initCustomEvent(
+ type: string,
+ bubbles?: boolean,
+ cancelable?: boolean,
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ detail?: any
+ ): void {
+ if (this.dispatched) {
+ return;
+ }
+
+ customEventAttributes.set(this, { detail });
+ }
+
+ get [Symbol.toStringTag](): string {
+ return "CustomEvent";
+ }
+}
+
+/** Built-in objects providing `get` methods for our
+ * interceptable JavaScript operations.
+ */
+Reflect.defineProperty(CustomEvent.prototype, "detail", { enumerable: true });
diff --git a/cli/js/custom_event_test.ts b/cli/js/custom_event_test.ts
new file mode 100644
index 000000000..4d2eb2c16
--- /dev/null
+++ b/cli/js/custom_event_test.ts
@@ -0,0 +1,27 @@
+// Copyright 2018 the Deno authors. All rights reserved. MIT license.
+import { test, assertEquals } from "./test_util.ts";
+
+test(function customEventInitializedWithDetail(): void {
+ const type = "touchstart";
+ const detail = { message: "hello" };
+ const customEventInit = {
+ bubbles: true,
+ cancelable: true,
+ detail
+ } as CustomEventInit;
+ const event = new CustomEvent(type, customEventInit);
+
+ assertEquals(event.bubbles, true);
+ assertEquals(event.cancelable, true);
+ assertEquals(event.currentTarget, null);
+ assertEquals(event.detail, detail);
+ assertEquals(event.isTrusted, false);
+ assertEquals(event.target, null);
+ assertEquals(event.type, type);
+});
+
+test(function toStringShouldBeWebCompatibility(): void {
+ const type = "touchstart";
+ const event = new CustomEvent(type, {});
+ assertEquals(event.toString(), "[object CustomEvent]");
+});
diff --git a/cli/js/deno.ts b/cli/js/deno.ts
new file mode 100644
index 000000000..511e4f0ec
--- /dev/null
+++ b/cli/js/deno.ts
@@ -0,0 +1,119 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+
+// Public deno module.
+export { env, exit, isTTY, execPath, homeDir, hostname } from "./os.ts";
+export { chdir, cwd } from "./dir.ts";
+export {
+ File,
+ open,
+ openSync,
+ stdin,
+ stdout,
+ stderr,
+ read,
+ readSync,
+ write,
+ writeSync,
+ seek,
+ seekSync,
+ close,
+ OpenMode
+} from "./files.ts";
+export {
+ EOF,
+ copy,
+ toAsyncIterator,
+ SeekMode,
+ Reader,
+ SyncReader,
+ Writer,
+ SyncWriter,
+ Closer,
+ Seeker,
+ SyncSeeker,
+ ReadCloser,
+ WriteCloser,
+ ReadSeeker,
+ WriteSeeker,
+ ReadWriteCloser,
+ ReadWriteSeeker
+} from "./io.ts";
+export {
+ Buffer,
+ readAll,
+ readAllSync,
+ writeAll,
+ writeAllSync
+} from "./buffer.ts";
+export { mkdirSync, mkdir } from "./mkdir.ts";
+export {
+ makeTempDirSync,
+ makeTempDir,
+ MakeTempDirOptions
+} from "./make_temp_dir.ts";
+export { chmodSync, chmod } from "./chmod.ts";
+export { chownSync, chown } from "./chown.ts";
+export { utimeSync, utime } from "./utime.ts";
+export { removeSync, remove, RemoveOption } from "./remove.ts";
+export { renameSync, rename } from "./rename.ts";
+export { readFileSync, readFile } from "./read_file.ts";
+export { readDirSync, readDir } from "./read_dir.ts";
+export { copyFileSync, copyFile } from "./copy_file.ts";
+export { readlinkSync, readlink } from "./read_link.ts";
+export { statSync, lstatSync, stat, lstat } from "./stat.ts";
+export { linkSync, link } from "./link.ts";
+export { symlinkSync, symlink } from "./symlink.ts";
+export { writeFileSync, writeFile, WriteFileOptions } from "./write_file.ts";
+export { applySourceMap } from "./error_stack.ts";
+export { ErrorKind, DenoError } from "./errors.ts";
+export {
+ permissions,
+ revokePermission,
+ Permission,
+ Permissions
+} from "./permissions.ts";
+export { truncateSync, truncate } from "./truncate.ts";
+export { FileInfo } from "./file_info.ts";
+export { connect, dial, listen, Listener, Conn } from "./net.ts";
+export { dialTLS } from "./tls.ts";
+export { metrics, Metrics } from "./metrics.ts";
+export { resources } from "./resources.ts";
+export {
+ kill,
+ run,
+ RunOptions,
+ Process,
+ ProcessStatus,
+ Signal
+} from "./process.ts";
+export { inspect, customInspect } from "./console.ts";
+export { build, OperatingSystem, Arch } from "./build.ts";
+export { version } from "./version.ts";
+export const args: string[] = [];
+
+// These are internal Deno APIs. We are marking them as internal so they do not
+// appear in the runtime type library.
+/** @internal */
+export { core } from "./core.ts";
+
+/** @internal */
+export { setPrepareStackTrace } from "./error_stack.ts";
+
+// TODO Don't expose Console nor stringifyArgs.
+/** @internal */
+export { Console, stringifyArgs } from "./console.ts";
+// TODO Don't expose DomIterableMixin.
+/** @internal */
+export { DomIterableMixin } from "./mixins/dom_iterable.ts";
+
+/** The current process id of the runtime. */
+export let pid: number;
+
+/** Reflects the NO_COLOR environment variable: https://no-color.org/ */
+export let noColor: boolean;
+
+// TODO(ry) This should not be exposed to Deno.
+export function _setGlobals(pid_: number, noColor_: boolean): void {
+ pid = pid_;
+ noColor = noColor_;
+}
diff --git a/cli/js/diagnostics.ts b/cli/js/diagnostics.ts
new file mode 100644
index 000000000..7cdb154b9
--- /dev/null
+++ b/cli/js/diagnostics.ts
@@ -0,0 +1,217 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+
+// Diagnostic provides an abstraction for advice/errors received from a
+// compiler, which is strongly influenced by the format of TypeScript
+// diagnostics.
+
+/** The log category for a diagnostic message */
+export enum DiagnosticCategory {
+ Log = 0,
+ Debug = 1,
+ Info = 2,
+ Error = 3,
+ Warning = 4,
+ Suggestion = 5
+}
+
+export interface DiagnosticMessageChain {
+ message: string;
+ category: DiagnosticCategory;
+ code: number;
+ next?: DiagnosticMessageChain[];
+}
+
+export interface DiagnosticItem {
+ /** A string message summarizing the diagnostic. */
+ message: string;
+
+ /** An ordered array of further diagnostics. */
+ messageChain?: DiagnosticMessageChain;
+
+ /** Information related to the diagnostic. This is present when there is a
+ * suggestion or other additional diagnostic information */
+ relatedInformation?: DiagnosticItem[];
+
+ /** The text of the source line related to the diagnostic */
+ sourceLine?: string;
+
+ /** The line number that is related to the diagnostic */
+ lineNumber?: number;
+
+ /** The name of the script resource related to the diagnostic */
+ scriptResourceName?: string;
+
+ /** The start position related to the diagnostic */
+ startPosition?: number;
+
+ /** The end position related to the diagnostic */
+ endPosition?: number;
+
+ /** The category of the diagnostic */
+ category: DiagnosticCategory;
+
+ /** A number identifier */
+ code: number;
+
+ /** The the start column of the sourceLine related to the diagnostic */
+ startColumn?: number;
+
+ /** The end column of the sourceLine related to the diagnostic */
+ endColumn?: number;
+}
+
+export interface Diagnostic {
+ /** An array of diagnostic items. */
+ items: DiagnosticItem[];
+}
+
+interface SourceInformation {
+ sourceLine: string;
+ lineNumber: number;
+ scriptResourceName: string;
+ startColumn: number;
+ endColumn: number;
+}
+
+function fromDiagnosticCategory(
+ category: ts.DiagnosticCategory
+): DiagnosticCategory {
+ switch (category) {
+ case ts.DiagnosticCategory.Error:
+ return DiagnosticCategory.Error;
+ case ts.DiagnosticCategory.Message:
+ return DiagnosticCategory.Info;
+ case ts.DiagnosticCategory.Suggestion:
+ return DiagnosticCategory.Suggestion;
+ case ts.DiagnosticCategory.Warning:
+ return DiagnosticCategory.Warning;
+ default:
+ throw new Error(
+ `Unexpected DiagnosticCategory: "${category}"/"${
+ ts.DiagnosticCategory[category]
+ }"`
+ );
+ }
+}
+
+function getSourceInformation(
+ sourceFile: ts.SourceFile,
+ start: number,
+ length: number
+): SourceInformation {
+ const scriptResourceName = sourceFile.fileName;
+ const {
+ line: lineNumber,
+ character: startColumn
+ } = sourceFile.getLineAndCharacterOfPosition(start);
+ const endPosition = sourceFile.getLineAndCharacterOfPosition(start + length);
+ const endColumn =
+ lineNumber === endPosition.line ? endPosition.character : startColumn;
+ const lastLineInFile = sourceFile.getLineAndCharacterOfPosition(
+ sourceFile.text.length
+ ).line;
+ const lineStart = sourceFile.getPositionOfLineAndCharacter(lineNumber, 0);
+ const lineEnd =
+ lineNumber < lastLineInFile
+ ? sourceFile.getPositionOfLineAndCharacter(lineNumber + 1, 0)
+ : sourceFile.text.length;
+ const sourceLine = sourceFile.text
+ .slice(lineStart, lineEnd)
+ .replace(/\s+$/g, "")
+ .replace("\t", " ");
+ return {
+ sourceLine,
+ lineNumber,
+ scriptResourceName,
+ startColumn,
+ endColumn
+ };
+}
+
+/** Converts a TypeScript diagnostic message chain to a Deno one. */
+function fromDiagnosticMessageChain(
+ messageChain: ts.DiagnosticMessageChain[] | undefined
+): DiagnosticMessageChain[] | undefined {
+ if (!messageChain) {
+ return undefined;
+ }
+
+ return messageChain.map(({ messageText: message, code, category, next }) => {
+ return {
+ message,
+ code,
+ category: fromDiagnosticCategory(category),
+ next: fromDiagnosticMessageChain(next)
+ };
+ });
+}
+
+/** Parse out information from a TypeScript diagnostic structure. */
+function parseDiagnostic(
+ item: ts.Diagnostic | ts.DiagnosticRelatedInformation
+): DiagnosticItem {
+ const {
+ messageText,
+ category: sourceCategory,
+ code,
+ file,
+ start: startPosition,
+ length
+ } = item;
+ const sourceInfo =
+ file && startPosition && length
+ ? getSourceInformation(file, startPosition, length)
+ : undefined;
+ const endPosition =
+ startPosition && length ? startPosition + length : undefined;
+ const category = fromDiagnosticCategory(sourceCategory);
+
+ let message: string;
+ let messageChain: DiagnosticMessageChain | undefined;
+ if (typeof messageText === "string") {
+ message = messageText;
+ } else {
+ message = messageText.messageText;
+ messageChain = fromDiagnosticMessageChain([messageText])![0];
+ }
+
+ const base = {
+ message,
+ messageChain,
+ code,
+ category,
+ startPosition,
+ endPosition
+ };
+
+ return sourceInfo ? { ...base, ...sourceInfo } : base;
+}
+
+/** Convert a diagnostic related information array into a Deno diagnostic
+ * array. */
+function parseRelatedInformation(
+ relatedInformation: readonly ts.DiagnosticRelatedInformation[]
+): DiagnosticItem[] {
+ const result: DiagnosticItem[] = [];
+ for (const item of relatedInformation) {
+ result.push(parseDiagnostic(item));
+ }
+ return result;
+}
+
+/** Convert TypeScript diagnostics to Deno diagnostics. */
+export function fromTypeScriptDiagnostic(
+ diagnostics: readonly ts.Diagnostic[]
+): Diagnostic {
+ const items: DiagnosticItem[] = [];
+ for (const sourceDiagnostic of diagnostics) {
+ const item: DiagnosticItem = parseDiagnostic(sourceDiagnostic);
+ if (sourceDiagnostic.relatedInformation) {
+ item.relatedInformation = parseRelatedInformation(
+ sourceDiagnostic.relatedInformation
+ );
+ }
+ items.push(item);
+ }
+ return { items };
+}
diff --git a/cli/js/dir.ts b/cli/js/dir.ts
new file mode 100644
index 000000000..ef1111555
--- /dev/null
+++ b/cli/js/dir.ts
@@ -0,0 +1,22 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { sendSync } from "./dispatch_json.ts";
+import * as dispatch from "./dispatch.ts";
+
+/**
+ * `cwd()` Return a string representing the current working directory.
+ * If the current directory can be reached via multiple paths
+ * (due to symbolic links), `cwd()` may return
+ * any one of them.
+ * throws `NotFound` exception if directory not available
+ */
+export function cwd(): string {
+ return sendSync(dispatch.OP_CWD);
+}
+
+/**
+ * `chdir()` Change the current working directory to path.
+ * throws `NotFound` exception if directory not available
+ */
+export function chdir(directory: string): void {
+ sendSync(dispatch.OP_CHDIR, { directory });
+}
diff --git a/cli/js/dir_test.ts b/cli/js/dir_test.ts
new file mode 100644
index 000000000..6c4e36d7a
--- /dev/null
+++ b/cli/js/dir_test.ts
@@ -0,0 +1,54 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { test, testPerm, assert, assertEquals } from "./test_util.ts";
+
+test(function dirCwdNotNull(): void {
+ assert(Deno.cwd() != null);
+});
+
+testPerm({ write: true }, function dirCwdChdirSuccess(): void {
+ const initialdir = Deno.cwd();
+ const path = Deno.makeTempDirSync();
+ Deno.chdir(path);
+ const current = Deno.cwd();
+ if (Deno.build.os === "mac") {
+ assertEquals(current, "/private" + path);
+ } else {
+ assertEquals(current, path);
+ }
+ Deno.chdir(initialdir);
+});
+
+testPerm({ write: true }, function dirCwdError(): void {
+ // excluding windows since it throws resource busy, while removeSync
+ if (["linux", "mac"].includes(Deno.build.os)) {
+ const initialdir = Deno.cwd();
+ const path = Deno.makeTempDirSync();
+ Deno.chdir(path);
+ Deno.removeSync(path);
+ try {
+ Deno.cwd();
+ throw Error("current directory removed, should throw error");
+ } catch (err) {
+ if (err instanceof Deno.DenoError) {
+ console.log(err.name === "NotFound");
+ } else {
+ throw Error("raised different exception");
+ }
+ }
+ Deno.chdir(initialdir);
+ }
+});
+
+testPerm({ write: true }, function dirChdirError(): void {
+ const path = Deno.makeTempDirSync() + "test";
+ try {
+ Deno.chdir(path);
+ throw Error("directory not available, should throw error");
+ } catch (err) {
+ if (err instanceof Deno.DenoError) {
+ console.log(err.name === "NotFound");
+ } else {
+ throw Error("raised different exception");
+ }
+ }
+});
diff --git a/cli/js/dispatch.ts b/cli/js/dispatch.ts
new file mode 100644
index 000000000..bff4d0f5b
--- /dev/null
+++ b/cli/js/dispatch.ts
@@ -0,0 +1,110 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import * as minimal from "./dispatch_minimal.ts";
+import * as json from "./dispatch_json.ts";
+
+// These consts are shared with Rust. Update with care.
+export let OP_READ: number;
+export let OP_WRITE: number;
+export let OP_EXIT: number;
+export let OP_IS_TTY: number;
+export let OP_ENV: number;
+export let OP_EXEC_PATH: number;
+export let OP_UTIME: number;
+export let OP_SET_ENV: number;
+export let OP_GET_ENV: number;
+export let OP_HOME_DIR: number;
+export let OP_START: number;
+export let OP_APPLY_SOURCE_MAP: number;
+export let OP_FORMAT_ERROR: number;
+export let OP_CACHE: number;
+export let OP_FETCH_SOURCE_FILES: number;
+export let OP_OPEN: number;
+export let OP_CLOSE: number;
+export let OP_SEEK: number;
+export let OP_FETCH: number;
+export let OP_METRICS: number;
+export let OP_REPL_START: number;
+export let OP_REPL_READLINE: number;
+export let OP_ACCEPT: number;
+export let OP_DIAL: number;
+export let OP_SHUTDOWN: number;
+export let OP_LISTEN: number;
+export let OP_RESOURCES: number;
+export let OP_GET_RANDOM_VALUES: number;
+export let OP_GLOBAL_TIMER_STOP: number;
+export let OP_GLOBAL_TIMER: number;
+export let OP_NOW: number;
+export let OP_PERMISSIONS: number;
+export let OP_REVOKE_PERMISSION: number;
+export let OP_CREATE_WORKER: number;
+export let OP_HOST_GET_WORKER_CLOSED: number;
+export let OP_HOST_POST_MESSAGE: number;
+export let OP_HOST_GET_MESSAGE: number;
+export let OP_WORKER_POST_MESSAGE: number;
+export let OP_WORKER_GET_MESSAGE: number;
+export let OP_RUN: number;
+export let OP_RUN_STATUS: number;
+export let OP_KILL: number;
+export let OP_CHDIR: number;
+export let OP_MKDIR: number;
+export let OP_CHMOD: number;
+export let OP_CHOWN: number;
+export let OP_REMOVE: number;
+export let OP_COPY_FILE: number;
+export let OP_STAT: number;
+export let OP_READ_DIR: number;
+export let OP_RENAME: number;
+export let OP_LINK: number;
+export let OP_SYMLINK: number;
+export let OP_READ_LINK: number;
+export let OP_TRUNCATE: number;
+export let OP_MAKE_TEMP_DIR: number;
+export let OP_CWD: number;
+export let OP_FETCH_ASSET: number;
+export let OP_DIAL_TLS: number;
+export let OP_HOSTNAME: number;
+
+export function asyncMsgFromRust(opId: number, ui8: Uint8Array): void {
+ switch (opId) {
+ case OP_WRITE:
+ case OP_READ:
+ minimal.asyncMsgFromRust(opId, ui8);
+ break;
+ case OP_EXIT:
+ case OP_IS_TTY:
+ case OP_ENV:
+ case OP_EXEC_PATH:
+ case OP_UTIME:
+ case OP_OPEN:
+ case OP_SEEK:
+ case OP_FETCH:
+ case OP_REPL_START:
+ case OP_REPL_READLINE:
+ case OP_ACCEPT:
+ case OP_DIAL:
+ case OP_GLOBAL_TIMER:
+ case OP_HOST_GET_WORKER_CLOSED:
+ case OP_HOST_GET_MESSAGE:
+ case OP_WORKER_GET_MESSAGE:
+ case OP_RUN_STATUS:
+ case OP_MKDIR:
+ case OP_CHMOD:
+ case OP_CHOWN:
+ case OP_REMOVE:
+ case OP_COPY_FILE:
+ case OP_STAT:
+ case OP_READ_DIR:
+ case OP_RENAME:
+ case OP_LINK:
+ case OP_SYMLINK:
+ case OP_READ_LINK:
+ case OP_TRUNCATE:
+ case OP_MAKE_TEMP_DIR:
+ case OP_DIAL_TLS:
+ case OP_FETCH_SOURCE_FILES:
+ json.asyncMsgFromRust(opId, ui8);
+ break;
+ default:
+ throw Error("bad async opId");
+ }
+}
diff --git a/cli/js/dispatch_json.ts b/cli/js/dispatch_json.ts
new file mode 100644
index 000000000..572ec855a
--- /dev/null
+++ b/cli/js/dispatch_json.ts
@@ -0,0 +1,86 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import * as util from "./util.ts";
+import { TextEncoder, TextDecoder } from "./text_encoding.ts";
+import { core } from "./core.ts";
+import { ErrorKind, DenoError } from "./errors.ts";
+
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+type Ok = any;
+
+interface JsonError {
+ kind: ErrorKind;
+ message: string;
+}
+
+interface JsonResponse {
+ ok?: Ok;
+ err?: JsonError;
+ promiseId?: number; // Only present in async messages.
+}
+
+const promiseTable = new Map<number, util.Resolvable<JsonResponse>>();
+let _nextPromiseId = 1;
+
+function nextPromiseId(): number {
+ return _nextPromiseId++;
+}
+
+function decode(ui8: Uint8Array): JsonResponse {
+ const s = new TextDecoder().decode(ui8);
+ return JSON.parse(s) as JsonResponse;
+}
+
+function encode(args: object): Uint8Array {
+ const s = JSON.stringify(args);
+ return new TextEncoder().encode(s);
+}
+
+function unwrapResponse(res: JsonResponse): Ok {
+ if (res.err != null) {
+ throw new DenoError(res.err!.kind, res.err!.message);
+ }
+ util.assert(res.ok != null);
+ return res.ok!;
+}
+
+export function asyncMsgFromRust(opId: number, resUi8: Uint8Array): void {
+ const res = decode(resUi8);
+ util.assert(res.promiseId != null);
+
+ const promise = promiseTable.get(res.promiseId!);
+ util.assert(promise != null);
+ promiseTable.delete(res.promiseId!);
+ promise!.resolve(res);
+}
+
+export function sendSync(
+ opId: number,
+ args: object = {},
+ zeroCopy?: Uint8Array
+): Ok {
+ const argsUi8 = encode(args);
+ const resUi8 = core.dispatch(opId, argsUi8, zeroCopy);
+ util.assert(resUi8 != null);
+
+ const res = decode(resUi8!);
+ util.assert(res.promiseId == null);
+ return unwrapResponse(res);
+}
+
+export async function sendAsync(
+ opId: number,
+ args: object = {},
+ zeroCopy?: Uint8Array
+): Promise<Ok> {
+ const promiseId = nextPromiseId();
+ args = Object.assign(args, { promiseId });
+ const promise = util.createResolvable<Ok>();
+ promiseTable.set(promiseId, promise);
+
+ const argsUi8 = encode(args);
+ const resUi8 = core.dispatch(opId, argsUi8, zeroCopy);
+ util.assert(resUi8 == null);
+
+ const res = await promise;
+ return unwrapResponse(res);
+}
diff --git a/cli/js/dispatch_json_test.ts b/cli/js/dispatch_json_test.ts
new file mode 100644
index 000000000..11dadc620
--- /dev/null
+++ b/cli/js/dispatch_json_test.ts
@@ -0,0 +1,19 @@
+import { testPerm, assertMatch, unreachable } from "./test_util.ts";
+
+const openErrorStackPattern = new RegExp(
+ `^.*
+ at unwrapResponse \\(.*dispatch_json\\.ts:.*\\)
+ at Object.sendAsync \\(.*dispatch_json\\.ts:.*\\)
+ at async Object\\.open \\(.*files\\.ts:.*\\).*$`,
+ "ms"
+);
+
+testPerm({ read: true }, async function sendAsyncStackTrace(): Promise<void> {
+ await Deno.open("nonexistent.txt")
+ .then(unreachable)
+ .catch(
+ (error): void => {
+ assertMatch(error.stack, openErrorStackPattern);
+ }
+ );
+});
diff --git a/cli/js/dispatch_minimal.ts b/cli/js/dispatch_minimal.ts
new file mode 100644
index 000000000..98636f85b
--- /dev/null
+++ b/cli/js/dispatch_minimal.ts
@@ -0,0 +1,80 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import * as util from "./util.ts";
+import { core } from "./core.ts";
+
+const promiseTableMin = new Map<number, util.Resolvable<number>>();
+// Note it's important that promiseId starts at 1 instead of 0, because sync
+// messages are indicated with promiseId 0. If we ever add wrap around logic for
+// overflows, this should be taken into account.
+let _nextPromiseId = 1;
+
+function nextPromiseId(): number {
+ return _nextPromiseId++;
+}
+
+export interface RecordMinimal {
+ promiseId: number;
+ opId: number; // Maybe better called dispatchId
+ arg: number;
+ result: number;
+}
+
+export function recordFromBufMinimal(
+ opId: number,
+ buf32: Int32Array
+): RecordMinimal {
+ if (buf32.length != 3) {
+ throw Error("Bad message");
+ }
+ return {
+ promiseId: buf32[0],
+ opId,
+ arg: buf32[1],
+ result: buf32[2]
+ };
+}
+
+const scratch32 = new Int32Array(3);
+const scratchBytes = new Uint8Array(
+ scratch32.buffer,
+ scratch32.byteOffset,
+ scratch32.byteLength
+);
+util.assert(scratchBytes.byteLength === scratch32.length * 4);
+
+export function asyncMsgFromRust(opId: number, ui8: Uint8Array): void {
+ const buf32 = new Int32Array(ui8.buffer, ui8.byteOffset, ui8.byteLength / 4);
+ const record = recordFromBufMinimal(opId, buf32);
+ const { promiseId, result } = record;
+ const promise = promiseTableMin.get(promiseId);
+ promiseTableMin.delete(promiseId);
+ promise!.resolve(result);
+}
+
+export function sendAsyncMinimal(
+ opId: number,
+ arg: number,
+ zeroCopy: Uint8Array
+): Promise<number> {
+ const promiseId = nextPromiseId(); // AKA cmdId
+ scratch32[0] = promiseId;
+ scratch32[1] = arg;
+ scratch32[2] = 0; // result
+ const promise = util.createResolvable<number>();
+ promiseTableMin.set(promiseId, promise);
+ core.dispatch(opId, scratchBytes, zeroCopy);
+ return promise;
+}
+
+export function sendSyncMinimal(
+ opId: number,
+ arg: number,
+ zeroCopy: Uint8Array
+): number {
+ scratch32[0] = 0; // promiseId 0 indicates sync
+ scratch32[1] = arg;
+ const res = core.dispatch(opId, scratchBytes, zeroCopy)!;
+ const res32 = new Int32Array(res.buffer, res.byteOffset, 3);
+ const resRecord = recordFromBufMinimal(opId, res32);
+ return resRecord.result;
+}
diff --git a/cli/js/dom_file.ts b/cli/js/dom_file.ts
new file mode 100644
index 000000000..1f9bf93a5
--- /dev/null
+++ b/cli/js/dom_file.ts
@@ -0,0 +1,24 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import * as domTypes from "./dom_types.ts";
+import * as blob from "./blob.ts";
+
+export class DomFileImpl extends blob.DenoBlob implements domTypes.DomFile {
+ lastModified: number;
+ name: string;
+
+ constructor(
+ fileBits: domTypes.BlobPart[],
+ fileName: string,
+ options?: domTypes.FilePropertyBag
+ ) {
+ options = options || {};
+ super(fileBits, options);
+
+ // 4.1.2.1 Replace any "/" character (U+002F SOLIDUS)
+ // with a ":" (U + 003A COLON)
+ this.name = String(fileName).replace(/\u002F/g, "\u003A");
+ // 4.1.3.3 If lastModified is not provided, set lastModified to the current
+ // date and time represented in number of milliseconds since the Unix Epoch.
+ this.lastModified = options.lastModified || Date.now();
+ }
+}
diff --git a/cli/js/dom_types.ts b/cli/js/dom_types.ts
new file mode 100644
index 000000000..308505cf5
--- /dev/null
+++ b/cli/js/dom_types.ts
@@ -0,0 +1,625 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+
+/*! ****************************************************************************
+Copyright (c) Microsoft Corporation. All rights reserved.
+Licensed under the Apache License, Version 2.0 (the "License"); you may not use
+this file except in compliance with the License. You may obtain a copy of the
+License at http://www.apache.org/licenses/LICENSE-2.0
+
+THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
+WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
+MERCHANTABLITY OR NON-INFRINGEMENT.
+
+See the Apache Version 2.0 License for specific language governing permissions
+and limitations under the License.
+*******************************************************************************/
+
+/* eslint-disable @typescript-eslint/no-explicit-any */
+
+export type BufferSource = ArrayBufferView | ArrayBuffer;
+
+export type HeadersInit =
+ | Headers
+ | Array<[string, string]>
+ | Record<string, string>;
+export type URLSearchParamsInit = string | string[][] | Record<string, string>;
+type BodyInit =
+ | Blob
+ | BufferSource
+ | FormData
+ | URLSearchParams
+ | ReadableStream
+ | string;
+export type RequestInfo = Request | string;
+type ReferrerPolicy =
+ | ""
+ | "no-referrer"
+ | "no-referrer-when-downgrade"
+ | "origin-only"
+ | "origin-when-cross-origin"
+ | "unsafe-url";
+export type BlobPart = BufferSource | Blob | string;
+export type FormDataEntryValue = DomFile | string;
+
+export interface DomIterable<K, V> {
+ keys(): IterableIterator<K>;
+ values(): IterableIterator<V>;
+ entries(): IterableIterator<[K, V]>;
+ [Symbol.iterator](): IterableIterator<[K, V]>;
+ forEach(
+ callback: (value: V, key: K, parent: this) => void,
+ thisArg?: any
+ ): void;
+}
+
+type EndingType = "transparent" | "native";
+
+export interface BlobPropertyBag {
+ type?: string;
+ ending?: EndingType;
+}
+
+interface AbortSignalEventMap {
+ abort: ProgressEvent;
+}
+
+// https://dom.spec.whatwg.org/#node
+export enum NodeType {
+ ELEMENT_NODE = 1,
+ TEXT_NODE = 3,
+ DOCUMENT_FRAGMENT_NODE = 11
+}
+
+export const eventTargetHost: unique symbol = Symbol();
+export const eventTargetListeners: unique symbol = Symbol();
+export const eventTargetMode: unique symbol = Symbol();
+export const eventTargetNodeType: unique symbol = Symbol();
+
+export interface EventTarget {
+ [eventTargetHost]: EventTarget | null;
+ [eventTargetListeners]: { [type in string]: EventListener[] };
+ [eventTargetMode]: string;
+ [eventTargetNodeType]: NodeType;
+ addEventListener(
+ type: string,
+ callback: (event: Event) => void | null,
+ options?: boolean | AddEventListenerOptions
+ ): void;
+ dispatchEvent(event: Event): boolean;
+ removeEventListener(
+ type: string,
+ callback?: (event: Event) => void | null,
+ options?: EventListenerOptions | boolean
+ ): void;
+}
+
+export interface ProgressEventInit extends EventInit {
+ lengthComputable?: boolean;
+ loaded?: number;
+ total?: number;
+}
+
+export interface URLSearchParams extends DomIterable<string, string> {
+ /**
+ * Appends a specified key/value pair as a new search parameter.
+ */
+ append(name: string, value: string): void;
+ /**
+ * Deletes the given search parameter, and its associated value,
+ * from the list of all search parameters.
+ */
+ delete(name: string): void;
+ /**
+ * Returns the first value associated to the given search parameter.
+ */
+ get(name: string): string | null;
+ /**
+ * Returns all the values association with a given search parameter.
+ */
+ getAll(name: string): string[];
+ /**
+ * Returns a Boolean indicating if such a search parameter exists.
+ */
+ has(name: string): boolean;
+ /**
+ * Sets the value associated to a given search parameter to the given value.
+ * If there were several values, delete the others.
+ */
+ set(name: string, value: string): void;
+ /**
+ * Sort all key/value pairs contained in this object in place
+ * and return undefined. The sort order is according to Unicode
+ * code points of the keys.
+ */
+ sort(): void;
+ /**
+ * Returns a query string suitable for use in a URL.
+ */
+ toString(): string;
+ /**
+ * Iterates over each name-value pair in the query
+ * and invokes the given function.
+ */
+ forEach(
+ callbackfn: (value: string, key: string, parent: this) => void,
+ thisArg?: any
+ ): void;
+}
+
+export interface EventListener {
+ handleEvent(event: Event): void;
+ readonly callback: (event: Event) => void | null;
+ readonly options: boolean | AddEventListenerOptions;
+}
+
+export interface EventInit {
+ bubbles?: boolean;
+ cancelable?: boolean;
+ composed?: boolean;
+}
+
+export interface CustomEventInit extends EventInit {
+ detail?: any;
+}
+
+export enum EventPhase {
+ NONE = 0,
+ CAPTURING_PHASE = 1,
+ AT_TARGET = 2,
+ BUBBLING_PHASE = 3
+}
+
+export interface EventPath {
+ item: EventTarget;
+ itemInShadowTree: boolean;
+ relatedTarget: EventTarget | null;
+ rootOfClosedTree: boolean;
+ slotInClosedTree: boolean;
+ target: EventTarget | null;
+ touchTargetList: EventTarget[];
+}
+
+export interface Event {
+ readonly type: string;
+ target: EventTarget | null;
+ currentTarget: EventTarget | null;
+ composedPath(): EventPath[];
+
+ eventPhase: number;
+
+ stopPropagation(): void;
+ stopImmediatePropagation(): void;
+
+ readonly bubbles: boolean;
+ readonly cancelable: boolean;
+ preventDefault(): void;
+ readonly defaultPrevented: boolean;
+ readonly composed: boolean;
+
+ isTrusted: boolean;
+ readonly timeStamp: Date;
+
+ dispatched: boolean;
+ readonly initialized: boolean;
+ inPassiveListener: boolean;
+ cancelBubble: boolean;
+ cancelBubbleImmediately: boolean;
+ path: EventPath[];
+ relatedTarget: EventTarget | null;
+}
+
+export interface CustomEvent extends Event {
+ readonly detail: any;
+ initCustomEvent(
+ type: string,
+ bubbles?: boolean,
+ cancelable?: boolean,
+ detail?: any | null
+ ): void;
+}
+
+export interface DomFile extends Blob {
+ readonly lastModified: number;
+ readonly name: string;
+}
+
+export interface DomFileConstructor {
+ new (bits: BlobPart[], filename: string, options?: FilePropertyBag): DomFile;
+ prototype: DomFile;
+}
+
+export interface FilePropertyBag extends BlobPropertyBag {
+ lastModified?: number;
+}
+
+interface ProgressEvent extends Event {
+ readonly lengthComputable: boolean;
+ readonly loaded: number;
+ readonly total: number;
+}
+
+export interface EventListenerOptions {
+ capture: boolean;
+}
+
+export interface AddEventListenerOptions extends EventListenerOptions {
+ once: boolean;
+ passive: boolean;
+}
+
+interface AbortSignal extends EventTarget {
+ readonly aborted: boolean;
+ onabort: ((this: AbortSignal, ev: ProgressEvent) => any) | null;
+ addEventListener<K extends keyof AbortSignalEventMap>(
+ type: K,
+ listener: (this: AbortSignal, ev: AbortSignalEventMap[K]) => any,
+ options?: boolean | AddEventListenerOptions
+ ): void;
+ addEventListener(
+ type: string,
+ listener: EventListener,
+ options?: boolean | AddEventListenerOptions
+ ): void;
+ removeEventListener<K extends keyof AbortSignalEventMap>(
+ type: K,
+ listener: (this: AbortSignal, ev: AbortSignalEventMap[K]) => any,
+ options?: boolean | EventListenerOptions
+ ): void;
+ removeEventListener(
+ type: string,
+ listener: EventListener,
+ options?: boolean | EventListenerOptions
+ ): void;
+}
+
+export interface ReadableStream {
+ readonly locked: boolean;
+ cancel(): Promise<void>;
+ getReader(): ReadableStreamReader;
+ tee(): [ReadableStream, ReadableStream];
+}
+
+export interface ReadableStreamReader {
+ cancel(): Promise<void>;
+ read(): Promise<any>;
+ releaseLock(): void;
+}
+
+export interface FormData extends DomIterable<string, FormDataEntryValue> {
+ append(name: string, value: string | Blob, fileName?: string): void;
+ delete(name: string): void;
+ get(name: string): FormDataEntryValue | null;
+ getAll(name: string): FormDataEntryValue[];
+ has(name: string): boolean;
+ set(name: string, value: string | Blob, fileName?: string): void;
+}
+
+export interface FormDataConstructor {
+ new (): FormData;
+ prototype: FormData;
+}
+
+/** A blob object represents a file-like object of immutable, raw data. */
+export interface Blob {
+ /** The size, in bytes, of the data contained in the `Blob` object. */
+ readonly size: number;
+ /** A string indicating the media type of the data contained in the `Blob`.
+ * If the type is unknown, this string is empty.
+ */
+ readonly type: string;
+ /** Returns a new `Blob` object containing the data in the specified range of
+ * bytes of the source `Blob`.
+ */
+ slice(start?: number, end?: number, contentType?: string): Blob;
+}
+
+export interface Body {
+ /** A simple getter used to expose a `ReadableStream` of the body contents. */
+ readonly body: ReadableStream | null;
+ /** Stores a `Boolean` that declares whether the body has been used in a
+ * response yet.
+ */
+ readonly bodyUsed: boolean;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with an `ArrayBuffer`.
+ */
+ arrayBuffer(): Promise<ArrayBuffer>;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with a `Blob`.
+ */
+ blob(): Promise<Blob>;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with a `FormData` object.
+ */
+ formData(): Promise<FormData>;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with the result of parsing the body text as JSON.
+ */
+ json(): Promise<any>;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with a `USVString` (text).
+ */
+ text(): Promise<string>;
+}
+
+export interface Headers extends DomIterable<string, string> {
+ /** Appends a new value onto an existing header inside a `Headers` object, or
+ * adds the header if it does not already exist.
+ */
+ append(name: string, value: string): void;
+ /** Deletes a header from a `Headers` object. */
+ delete(name: string): void;
+ /** Returns an iterator allowing to go through all key/value pairs
+ * contained in this Headers object. The both the key and value of each pairs
+ * are ByteString objects.
+ */
+ entries(): IterableIterator<[string, string]>;
+ /** Returns a `ByteString` sequence of all the values of a header within a
+ * `Headers` object with a given name.
+ */
+ get(name: string): string | null;
+ /** Returns a boolean stating whether a `Headers` object contains a certain
+ * header.
+ */
+ has(name: string): boolean;
+ /** Returns an iterator allowing to go through all keys contained in
+ * this Headers object. The keys are ByteString objects.
+ */
+ keys(): IterableIterator<string>;
+ /** Sets a new value for an existing header inside a Headers object, or adds
+ * the header if it does not already exist.
+ */
+ set(name: string, value: string): void;
+ /** Returns an iterator allowing to go through all values contained in
+ * this Headers object. The values are ByteString objects.
+ */
+ values(): IterableIterator<string>;
+ forEach(
+ callbackfn: (value: string, key: string, parent: this) => void,
+ thisArg?: any
+ ): void;
+ /** The Symbol.iterator well-known symbol specifies the default
+ * iterator for this Headers object
+ */
+ [Symbol.iterator](): IterableIterator<[string, string]>;
+}
+
+export interface HeadersConstructor {
+ new (init?: HeadersInit): Headers;
+ prototype: Headers;
+}
+
+type RequestCache =
+ | "default"
+ | "no-store"
+ | "reload"
+ | "no-cache"
+ | "force-cache"
+ | "only-if-cached";
+type RequestCredentials = "omit" | "same-origin" | "include";
+type RequestDestination =
+ | ""
+ | "audio"
+ | "audioworklet"
+ | "document"
+ | "embed"
+ | "font"
+ | "image"
+ | "manifest"
+ | "object"
+ | "paintworklet"
+ | "report"
+ | "script"
+ | "sharedworker"
+ | "style"
+ | "track"
+ | "video"
+ | "worker"
+ | "xslt";
+type RequestMode = "navigate" | "same-origin" | "no-cors" | "cors";
+type RequestRedirect = "follow" | "error" | "manual";
+type ResponseType =
+ | "basic"
+ | "cors"
+ | "default"
+ | "error"
+ | "opaque"
+ | "opaqueredirect";
+
+export interface RequestInit {
+ body?: BodyInit | null;
+ cache?: RequestCache;
+ credentials?: RequestCredentials;
+ headers?: HeadersInit;
+ integrity?: string;
+ keepalive?: boolean;
+ method?: string;
+ mode?: RequestMode;
+ redirect?: RequestRedirect;
+ referrer?: string;
+ referrerPolicy?: ReferrerPolicy;
+ signal?: AbortSignal | null;
+ window?: any;
+}
+
+export interface ResponseInit {
+ headers?: HeadersInit;
+ status?: number;
+ statusText?: string;
+}
+
+export interface RequestConstructor {
+ new (input: RequestInfo, init?: RequestInit): Request;
+ prototype: Request;
+}
+
+export interface Request extends Body {
+ /** Returns the cache mode associated with request, which is a string
+ * indicating how the the request will interact with the browser's cache when
+ * fetching.
+ */
+ readonly cache?: RequestCache;
+ /** Returns the credentials mode associated with request, which is a string
+ * indicating whether credentials will be sent with the request always, never,
+ * or only when sent to a same-origin URL.
+ */
+ readonly credentials?: RequestCredentials;
+ /** Returns the kind of resource requested by request, (e.g., `document` or
+ * `script`).
+ */
+ readonly destination?: RequestDestination;
+ /** Returns a Headers object consisting of the headers associated with
+ * request.
+ *
+ * Note that headers added in the network layer by the user agent
+ * will not be accounted for in this object, (e.g., the `Host` header).
+ */
+ readonly headers: Headers;
+ /** Returns request's subresource integrity metadata, which is a cryptographic
+ * hash of the resource being fetched. Its value consists of multiple hashes
+ * separated by whitespace. [SRI]
+ */
+ readonly integrity?: string;
+ /** Returns a boolean indicating whether or not request is for a history
+ * navigation (a.k.a. back-forward navigation).
+ */
+ readonly isHistoryNavigation?: boolean;
+ /** Returns a boolean indicating whether or not request is for a reload
+ * navigation.
+ */
+ readonly isReloadNavigation?: boolean;
+ /** Returns a boolean indicating whether or not request can outlive the global
+ * in which it was created.
+ */
+ readonly keepalive?: boolean;
+ /** Returns request's HTTP method, which is `GET` by default. */
+ readonly method: string;
+ /** Returns the mode associated with request, which is a string indicating
+ * whether the request will use CORS, or will be restricted to same-origin
+ * URLs.
+ */
+ readonly mode?: RequestMode;
+ /** Returns the redirect mode associated with request, which is a string
+ * indicating how redirects for the request will be handled during fetching.
+ *
+ * A request will follow redirects by default.
+ */
+ readonly redirect?: RequestRedirect;
+ /** Returns the referrer of request. Its value can be a same-origin URL if
+ * explicitly set in init, the empty string to indicate no referrer, and
+ * `about:client` when defaulting to the global's default.
+ *
+ * This is used during fetching to determine the value of the `Referer`
+ * header of the request being made.
+ */
+ readonly referrer?: string;
+ /** Returns the referrer policy associated with request. This is used during
+ * fetching to compute the value of the request's referrer.
+ */
+ readonly referrerPolicy?: ReferrerPolicy;
+ /** Returns the signal associated with request, which is an AbortSignal object
+ * indicating whether or not request has been aborted, and its abort event
+ * handler.
+ */
+ readonly signal?: AbortSignal;
+ /** Returns the URL of request as a string. */
+ readonly url: string;
+ clone(): Request;
+}
+
+export interface Response extends Body {
+ /** Contains the `Headers` object associated with the response. */
+ readonly headers: Headers;
+ /** Contains a boolean stating whether the response was successful (status in
+ * the range 200-299) or not.
+ */
+ readonly ok: boolean;
+ /** Indicates whether or not the response is the result of a redirect; that
+ * is, its URL list has more than one entry.
+ */
+ readonly redirected: boolean;
+ /** Contains the status code of the response (e.g., `200` for a success). */
+ readonly status: number;
+ /** Contains the status message corresponding to the status code (e.g., `OK`
+ * for `200`).
+ */
+ readonly statusText: string;
+ readonly trailer: Promise<Headers>;
+ /** Contains the type of the response (e.g., `basic`, `cors`). */
+ readonly type: ResponseType;
+ /** Contains the URL of the response. */
+ readonly url: string;
+ /** Creates a clone of a `Response` object. */
+ clone(): Response;
+}
+
+export interface Location {
+ /**
+ * Returns a DOMStringList object listing the origins of the ancestor browsing
+ * contexts, from the parent browsing context to the top-level browsing
+ * context.
+ */
+ readonly ancestorOrigins: string[];
+ /**
+ * Returns the Location object's URL's fragment (includes leading "#" if
+ * non-empty).
+ * Can be set, to navigate to the same URL with a changed fragment (ignores
+ * leading "#").
+ */
+ hash: string;
+ /**
+ * Returns the Location object's URL's host and port (if different from the
+ * default port for the scheme). Can be set, to navigate to the same URL with
+ * a changed host and port.
+ */
+ host: string;
+ /**
+ * Returns the Location object's URL's host. Can be set, to navigate to the
+ * same URL with a changed host.
+ */
+ hostname: string;
+ /**
+ * Returns the Location object's URL. Can be set, to navigate to the given
+ * URL.
+ */
+ href: string;
+ /** Returns the Location object's URL's origin. */
+ readonly origin: string;
+ /**
+ * Returns the Location object's URL's path.
+ * Can be set, to navigate to the same URL with a changed path.
+ */
+ pathname: string;
+ /**
+ * Returns the Location object's URL's port.
+ * Can be set, to navigate to the same URL with a changed port.
+ */
+ port: string;
+ /**
+ * Returns the Location object's URL's scheme.
+ * Can be set, to navigate to the same URL with a changed scheme.
+ */
+ protocol: string;
+ /**
+ * Returns the Location object's URL's query (includes leading "?" if
+ * non-empty). Can be set, to navigate to the same URL with a changed query
+ * (ignores leading "?").
+ */
+ search: string;
+ /**
+ * Navigates to the given URL.
+ */
+ assign(url: string): void;
+ /**
+ * Reloads the current page.
+ */
+ reload(): void;
+ /** @deprecated */
+ reload(forcedReload: boolean): void;
+ /**
+ * Removes the current page from the session history and navigates to the
+ * given URL.
+ */
+ replace(url: string): void;
+}
diff --git a/cli/js/dom_util.ts b/cli/js/dom_util.ts
new file mode 100644
index 000000000..725a35aaf
--- /dev/null
+++ b/cli/js/dom_util.ts
@@ -0,0 +1,85 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+// Utility functions for DOM nodes
+import * as domTypes from "./dom_types.ts";
+
+export function isNode(nodeImpl: domTypes.EventTarget | null): boolean {
+ return Boolean(nodeImpl && "nodeType" in nodeImpl);
+}
+
+export function isShadowRoot(nodeImpl: domTypes.EventTarget | null): boolean {
+ return Boolean(
+ nodeImpl &&
+ nodeImpl[domTypes.eventTargetNodeType] ===
+ domTypes.NodeType.DOCUMENT_FRAGMENT_NODE &&
+ nodeImpl[domTypes.eventTargetHost] != null
+ );
+}
+
+export function isSlotable(nodeImpl: domTypes.EventTarget | null): boolean {
+ return Boolean(
+ nodeImpl &&
+ (nodeImpl[domTypes.eventTargetNodeType] ===
+ domTypes.NodeType.ELEMENT_NODE ||
+ nodeImpl[domTypes.eventTargetNodeType] === domTypes.NodeType.TEXT_NODE)
+ );
+}
+
+// https://dom.spec.whatwg.org/#node-trees
+// const domSymbolTree = Symbol("DOM Symbol Tree");
+
+// https://dom.spec.whatwg.org/#concept-shadow-including-inclusive-ancestor
+export function isShadowInclusiveAncestor(
+ ancestor: domTypes.EventTarget | null,
+ node: domTypes.EventTarget | null
+): boolean {
+ while (isNode(node)) {
+ if (node === ancestor) {
+ return true;
+ }
+
+ if (isShadowRoot(node)) {
+ node = node && node[domTypes.eventTargetHost];
+ } else {
+ node = null; // domSymbolTree.parent(node);
+ }
+ }
+
+ return false;
+}
+
+export function getRoot(
+ node: domTypes.EventTarget | null
+): domTypes.EventTarget | null {
+ const root = node;
+
+ // for (const ancestor of domSymbolTree.ancestorsIterator(node)) {
+ // root = ancestor;
+ // }
+
+ return root;
+}
+
+// https://dom.spec.whatwg.org/#retarget
+export function retarget(
+ a: domTypes.EventTarget | null,
+ b: domTypes.EventTarget
+): domTypes.EventTarget | null {
+ while (true) {
+ if (!isNode(a)) {
+ return a;
+ }
+
+ const aRoot = getRoot(a);
+
+ if (aRoot) {
+ if (
+ !isShadowRoot(aRoot) ||
+ (isNode(b) && isShadowInclusiveAncestor(aRoot, b))
+ ) {
+ return a;
+ }
+
+ a = aRoot[domTypes.eventTargetHost];
+ }
+ }
+}
diff --git a/cli/js/error_stack.ts b/cli/js/error_stack.ts
new file mode 100644
index 000000000..98b0b02d4
--- /dev/null
+++ b/cli/js/error_stack.ts
@@ -0,0 +1,273 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+// Some of the code here is adapted directly from V8 and licensed under a BSD
+// style license available here: https://github.com/v8/v8/blob/24886f2d1c565287d33d71e4109a53bf0b54b75c/LICENSE.v8
+import * as dispatch from "./dispatch.ts";
+import { sendSync } from "./dispatch_json.ts";
+import { assert } from "./util.ts";
+
+export interface Location {
+ /** The full url for the module, e.g. `file://some/file.ts` or
+ * `https://some/file.ts`. */
+ filename: string;
+
+ /** The line number in the file. It is assumed to be 1-indexed. */
+ line: number;
+
+ /** The column number in the file. It is assumed to be 1-indexed. */
+ column: number;
+}
+
+/** Given a current location in a module, lookup the source location and
+ * return it.
+ *
+ * When Deno transpiles code, it keep source maps of the transpiled code. This
+ * function can be used to lookup the original location. This is automatically
+ * done when accessing the `.stack` of an error, or when an uncaught error is
+ * logged. This function can be used to perform the lookup for creating better
+ * error handling.
+ *
+ * **Note:** `line` and `column` are 1 indexed, which matches display
+ * expectations, but is not typical of most index numbers in Deno.
+ *
+ * An example:
+ *
+ * const orig = Deno.applySourceMap({
+ * location: "file://my/module.ts",
+ * line: 5,
+ * column: 15
+ * });
+ * console.log(`${orig.filename}:${orig.line}:${orig.column}`);
+ *
+ */
+export function applySourceMap(location: Location): Location {
+ const { filename, line, column } = location;
+ // On this side, line/column are 1 based, but in the source maps, they are
+ // 0 based, so we have to convert back and forth
+ const res = sendSync(dispatch.OP_APPLY_SOURCE_MAP, {
+ filename,
+ line: line - 1,
+ column: column - 1
+ });
+ return {
+ filename: res.filename,
+ line: res.line + 1,
+ column: res.column + 1
+ };
+}
+
+/** Mutate the call site so that it returns the location, instead of its
+ * original location.
+ */
+function patchCallSite(callSite: CallSite, location: Location): CallSite {
+ return {
+ getThis(): unknown {
+ return callSite.getThis();
+ },
+ getTypeName(): string {
+ return callSite.getTypeName();
+ },
+ getFunction(): Function {
+ return callSite.getFunction();
+ },
+ getFunctionName(): string {
+ return callSite.getFunctionName();
+ },
+ getMethodName(): string {
+ return callSite.getMethodName();
+ },
+ getFileName(): string {
+ return location.filename;
+ },
+ getLineNumber(): number {
+ return location.line;
+ },
+ getColumnNumber(): number {
+ return location.column;
+ },
+ getEvalOrigin(): string | null {
+ return callSite.getEvalOrigin();
+ },
+ isToplevel(): boolean {
+ return callSite.isToplevel();
+ },
+ isEval(): boolean {
+ return callSite.isEval();
+ },
+ isNative(): boolean {
+ return callSite.isNative();
+ },
+ isConstructor(): boolean {
+ return callSite.isConstructor();
+ },
+ isAsync(): boolean {
+ return callSite.isAsync();
+ },
+ isPromiseAll(): boolean {
+ return callSite.isPromiseAll();
+ },
+ getPromiseIndex(): number | null {
+ return callSite.getPromiseIndex();
+ }
+ };
+}
+
+/** Return a string representations of a CallSite's method call name
+ *
+ * This is adapted directly from V8.
+ */
+function getMethodCall(callSite: CallSite): string {
+ let result = "";
+
+ const typeName = callSite.getTypeName();
+ const methodName = callSite.getMethodName();
+ const functionName = callSite.getFunctionName();
+
+ if (functionName) {
+ if (typeName) {
+ const startsWithTypeName = functionName.startsWith(typeName);
+ if (!startsWithTypeName) {
+ result += `${typeName}.`;
+ }
+ }
+ result += functionName;
+
+ if (methodName) {
+ if (!functionName.endsWith(methodName)) {
+ result += ` [as ${methodName}]`;
+ }
+ }
+ } else {
+ if (typeName) {
+ result += `${typeName}.`;
+ }
+ if (methodName) {
+ result += methodName;
+ } else {
+ result += "<anonymous>";
+ }
+ }
+
+ return result;
+}
+
+/** Return a string representations of a CallSite's file location
+ *
+ * This is adapted directly from V8.
+ */
+function getFileLocation(callSite: CallSite): string {
+ if (callSite.isNative()) {
+ return "native";
+ }
+
+ let result = "";
+
+ const fileName = callSite.getFileName();
+ if (!fileName && callSite.isEval()) {
+ const evalOrigin = callSite.getEvalOrigin();
+ assert(evalOrigin != null);
+ result += `${evalOrigin}, `;
+ }
+
+ if (fileName) {
+ result += fileName;
+ } else {
+ result += "<anonymous>";
+ }
+
+ const lineNumber = callSite.getLineNumber();
+ if (lineNumber != null) {
+ result += `:${lineNumber}`;
+
+ const columnNumber = callSite.getColumnNumber();
+ if (columnNumber != null) {
+ result += `:${columnNumber}`;
+ }
+ }
+
+ return result;
+}
+
+/** Convert a CallSite to a string.
+ *
+ * This is adapted directly from V8.
+ */
+function callSiteToString(callSite: CallSite): string {
+ let result = "";
+ const functionName = callSite.getFunctionName();
+
+ const isTopLevel = callSite.isToplevel();
+ const isAsync = callSite.isAsync();
+ const isPromiseAll = callSite.isPromiseAll();
+ const isConstructor = callSite.isConstructor();
+ const isMethodCall = !(isTopLevel || isConstructor);
+
+ if (isAsync) {
+ result += "async ";
+ }
+ if (isPromiseAll) {
+ result += `Promise.all (index ${callSite.getPromiseIndex})`;
+ return result;
+ }
+ if (isMethodCall) {
+ result += getMethodCall(callSite);
+ } else if (isConstructor) {
+ result += "new ";
+ if (functionName) {
+ result += functionName;
+ } else {
+ result += "<anonymous>";
+ }
+ } else if (functionName) {
+ result += functionName;
+ } else {
+ result += getFileLocation(callSite);
+ return result;
+ }
+
+ result += ` (${getFileLocation(callSite)})`;
+ return result;
+}
+
+/** A replacement for the default stack trace preparer which will op into Rust
+ * to apply source maps to individual sites
+ */
+function prepareStackTrace(
+ error: Error,
+ structuredStackTrace: CallSite[]
+): string {
+ return (
+ `${error.name}: ${error.message}\n` +
+ structuredStackTrace
+ .map(
+ (callSite): CallSite => {
+ const filename = callSite.getFileName();
+ const line = callSite.getLineNumber();
+ const column = callSite.getColumnNumber();
+ if (filename && line != null && column != null) {
+ return patchCallSite(
+ callSite,
+ applySourceMap({
+ filename,
+ line,
+ column
+ })
+ );
+ }
+ return callSite;
+ }
+ )
+ .map((callSite): string => ` at ${callSiteToString(callSite)}`)
+ .join("\n")
+ );
+}
+
+/** Sets the `prepareStackTrace` method on the Error constructor which will
+ * op into Rust to remap source code for caught errors where the `.stack` is
+ * being accessed.
+ *
+ * See: https://v8.dev/docs/stack-trace-api
+ */
+// @internal
+export function setPrepareStackTrace(ErrorConstructor: typeof Error): void {
+ ErrorConstructor.prepareStackTrace = prepareStackTrace;
+}
diff --git a/cli/js/error_stack_test.ts b/cli/js/error_stack_test.ts
new file mode 100644
index 000000000..4c7edb2fd
--- /dev/null
+++ b/cli/js/error_stack_test.ts
@@ -0,0 +1,108 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { test, assert } from "./test_util.ts";
+
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+const { setPrepareStackTrace } = Deno as any;
+
+interface CallSite {
+ getThis(): unknown;
+ getTypeName(): string;
+ getFunction(): Function;
+ getFunctionName(): string;
+ getMethodName(): string;
+ getFileName(): string;
+ getLineNumber(): number | null;
+ getColumnNumber(): number | null;
+ getEvalOrigin(): string | null;
+ isToplevel(): boolean;
+ isEval(): boolean;
+ isNative(): boolean;
+ isConstructor(): boolean;
+ isAsync(): boolean;
+ isPromiseAll(): boolean;
+ getPromiseIndex(): number | null;
+}
+
+function getMockCallSite(
+ filename: string,
+ line: number | null,
+ column: number | null
+): CallSite {
+ return {
+ getThis(): unknown {
+ return undefined;
+ },
+ getTypeName(): string {
+ return "";
+ },
+ getFunction(): Function {
+ return (): void => {};
+ },
+ getFunctionName(): string {
+ return "";
+ },
+ getMethodName(): string {
+ return "";
+ },
+ getFileName(): string {
+ return filename;
+ },
+ getLineNumber(): number | null {
+ return line;
+ },
+ getColumnNumber(): number | null {
+ return column;
+ },
+ getEvalOrigin(): null {
+ return null;
+ },
+ isToplevel(): false {
+ return false;
+ },
+ isEval(): false {
+ return false;
+ },
+ isNative(): false {
+ return false;
+ },
+ isConstructor(): false {
+ return false;
+ },
+ isAsync(): false {
+ return false;
+ },
+ isPromiseAll(): false {
+ return false;
+ },
+ getPromiseIndex(): null {
+ return null;
+ }
+ };
+}
+
+test(function prepareStackTrace(): void {
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ const MockError = {} as any;
+ setPrepareStackTrace(MockError);
+ assert(typeof MockError.prepareStackTrace === "function");
+ const prepareStackTrace: (
+ error: Error,
+ structuredStackTrace: CallSite[]
+ ) => string = MockError.prepareStackTrace;
+ const result = prepareStackTrace(new Error("foo"), [
+ getMockCallSite("CLI_SNAPSHOT.js", 23, 0)
+ ]);
+ assert(result.startsWith("Error: foo\n"));
+ assert(result.includes(".ts:"), "should remap to something in 'js/'");
+});
+
+test(function applySourceMap(): void {
+ const result = Deno.applySourceMap({
+ filename: "CLI_SNAPSHOT.js",
+ line: 23,
+ column: 0
+ });
+ assert(result.filename.endsWith(".ts"));
+ assert(result.line != null);
+ assert(result.column != null);
+});
diff --git a/cli/js/errors.ts b/cli/js/errors.ts
new file mode 100644
index 000000000..02ddfa2f2
--- /dev/null
+++ b/cli/js/errors.ts
@@ -0,0 +1,79 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+
+/** A Deno specific error. The `kind` property is set to a specific error code
+ * which can be used to in application logic.
+ *
+ * try {
+ * somethingThatMightThrow();
+ * } catch (e) {
+ * if (
+ * e instanceof Deno.DenoError &&
+ * e.kind === Deno.ErrorKind.Overflow
+ * ) {
+ * console.error("Overflow error!");
+ * }
+ * }
+ *
+ */
+export class DenoError<T extends ErrorKind> extends Error {
+ constructor(readonly kind: T, msg: string) {
+ super(msg);
+ this.name = ErrorKind[kind];
+ }
+}
+
+// Warning! The values in this enum are duplicated in cli/msg.rs
+// Update carefully!
+export enum ErrorKind {
+ NoError = 0,
+ NotFound = 1,
+ PermissionDenied = 2,
+ ConnectionRefused = 3,
+ ConnectionReset = 4,
+ ConnectionAborted = 5,
+ NotConnected = 6,
+ AddrInUse = 7,
+ AddrNotAvailable = 8,
+ BrokenPipe = 9,
+ AlreadyExists = 10,
+ WouldBlock = 11,
+ InvalidInput = 12,
+ InvalidData = 13,
+ TimedOut = 14,
+ Interrupted = 15,
+ WriteZero = 16,
+ Other = 17,
+ UnexpectedEof = 18,
+ BadResource = 19,
+ CommandFailed = 20,
+ EmptyHost = 21,
+ IdnaError = 22,
+ InvalidPort = 23,
+ InvalidIpv4Address = 24,
+ InvalidIpv6Address = 25,
+ InvalidDomainCharacter = 26,
+ RelativeUrlWithoutBase = 27,
+ RelativeUrlWithCannotBeABaseBase = 28,
+ SetHostOnCannotBeABaseUrl = 29,
+ Overflow = 30,
+ HttpUser = 31,
+ HttpClosed = 32,
+ HttpCanceled = 33,
+ HttpParse = 34,
+ HttpOther = 35,
+ TooLarge = 36,
+ InvalidUri = 37,
+ InvalidSeekMode = 38,
+ OpNotAvailable = 39,
+ WorkerInitFailed = 40,
+ UnixError = 41,
+ NoAsyncSupport = 42,
+ NoSyncSupport = 43,
+ ImportMapError = 44,
+ InvalidPath = 45,
+ ImportPrefixMissing = 46,
+ UnsupportedFetchScheme = 47,
+ TooManyRedirects = 48,
+ Diagnostic = 49,
+ JSError = 50
+}
diff --git a/cli/js/event.ts b/cli/js/event.ts
new file mode 100644
index 000000000..3efc1c517
--- /dev/null
+++ b/cli/js/event.ts
@@ -0,0 +1,348 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import * as domTypes from "./dom_types.ts";
+import { getPrivateValue, requiredArguments } from "./util.ts";
+
+// WeakMaps are recommended for private attributes (see MDN link below)
+// https://developer.mozilla.org/en-US/docs/Archive/Add-ons/Add-on_SDK/Guides/Contributor_s_Guide/Private_Properties#Using_WeakMaps
+export const eventAttributes = new WeakMap();
+
+function isTrusted(this: Event): boolean {
+ return getPrivateValue(this, eventAttributes, "isTrusted");
+}
+
+export class Event implements domTypes.Event {
+ // The default value is `false`.
+ // Use `defineProperty` to define on each instance, NOT on the prototype.
+ isTrusted!: boolean;
+ // Each event has the following associated flags
+ private _canceledFlag = false;
+ private _dispatchedFlag = false;
+ private _initializedFlag = false;
+ private _inPassiveListenerFlag = false;
+ private _stopImmediatePropagationFlag = false;
+ private _stopPropagationFlag = false;
+
+ // Property for objects on which listeners will be invoked
+ private _path: domTypes.EventPath[] = [];
+
+ constructor(type: string, eventInitDict: domTypes.EventInit = {}) {
+ requiredArguments("Event", arguments.length, 1);
+ type = String(type);
+ this._initializedFlag = true;
+ eventAttributes.set(this, {
+ type,
+ bubbles: eventInitDict.bubbles || false,
+ cancelable: eventInitDict.cancelable || false,
+ composed: eventInitDict.composed || false,
+ currentTarget: null,
+ eventPhase: domTypes.EventPhase.NONE,
+ isTrusted: false,
+ relatedTarget: null,
+ target: null,
+ timeStamp: Date.now()
+ });
+ Reflect.defineProperty(this, "isTrusted", {
+ enumerable: true,
+ get: isTrusted
+ });
+ }
+
+ get bubbles(): boolean {
+ return getPrivateValue(this, eventAttributes, "bubbles");
+ }
+
+ get cancelBubble(): boolean {
+ return this._stopPropagationFlag;
+ }
+
+ set cancelBubble(value: boolean) {
+ this._stopPropagationFlag = value;
+ }
+
+ get cancelBubbleImmediately(): boolean {
+ return this._stopImmediatePropagationFlag;
+ }
+
+ set cancelBubbleImmediately(value: boolean) {
+ this._stopImmediatePropagationFlag = value;
+ }
+
+ get cancelable(): boolean {
+ return getPrivateValue(this, eventAttributes, "cancelable");
+ }
+
+ get composed(): boolean {
+ return getPrivateValue(this, eventAttributes, "composed");
+ }
+
+ get currentTarget(): domTypes.EventTarget {
+ return getPrivateValue(this, eventAttributes, "currentTarget");
+ }
+
+ set currentTarget(value: domTypes.EventTarget) {
+ eventAttributes.set(this, {
+ type: this.type,
+ bubbles: this.bubbles,
+ cancelable: this.cancelable,
+ composed: this.composed,
+ currentTarget: value,
+ eventPhase: this.eventPhase,
+ isTrusted: this.isTrusted,
+ relatedTarget: this.relatedTarget,
+ target: this.target,
+ timeStamp: this.timeStamp
+ });
+ }
+
+ get defaultPrevented(): boolean {
+ return this._canceledFlag;
+ }
+
+ get dispatched(): boolean {
+ return this._dispatchedFlag;
+ }
+
+ set dispatched(value: boolean) {
+ this._dispatchedFlag = value;
+ }
+
+ get eventPhase(): number {
+ return getPrivateValue(this, eventAttributes, "eventPhase");
+ }
+
+ set eventPhase(value: number) {
+ eventAttributes.set(this, {
+ type: this.type,
+ bubbles: this.bubbles,
+ cancelable: this.cancelable,
+ composed: this.composed,
+ currentTarget: this.currentTarget,
+ eventPhase: value,
+ isTrusted: this.isTrusted,
+ relatedTarget: this.relatedTarget,
+ target: this.target,
+ timeStamp: this.timeStamp
+ });
+ }
+
+ get initialized(): boolean {
+ return this._initializedFlag;
+ }
+
+ set inPassiveListener(value: boolean) {
+ this._inPassiveListenerFlag = value;
+ }
+
+ get path(): domTypes.EventPath[] {
+ return this._path;
+ }
+
+ set path(value: domTypes.EventPath[]) {
+ this._path = value;
+ }
+
+ get relatedTarget(): domTypes.EventTarget {
+ return getPrivateValue(this, eventAttributes, "relatedTarget");
+ }
+
+ set relatedTarget(value: domTypes.EventTarget) {
+ eventAttributes.set(this, {
+ type: this.type,
+ bubbles: this.bubbles,
+ cancelable: this.cancelable,
+ composed: this.composed,
+ currentTarget: this.currentTarget,
+ eventPhase: this.eventPhase,
+ isTrusted: this.isTrusted,
+ relatedTarget: value,
+ target: this.target,
+ timeStamp: this.timeStamp
+ });
+ }
+
+ get target(): domTypes.EventTarget {
+ return getPrivateValue(this, eventAttributes, "target");
+ }
+
+ set target(value: domTypes.EventTarget) {
+ eventAttributes.set(this, {
+ type: this.type,
+ bubbles: this.bubbles,
+ cancelable: this.cancelable,
+ composed: this.composed,
+ currentTarget: this.currentTarget,
+ eventPhase: this.eventPhase,
+ isTrusted: this.isTrusted,
+ relatedTarget: this.relatedTarget,
+ target: value,
+ timeStamp: this.timeStamp
+ });
+ }
+
+ get timeStamp(): Date {
+ return getPrivateValue(this, eventAttributes, "timeStamp");
+ }
+
+ get type(): string {
+ return getPrivateValue(this, eventAttributes, "type");
+ }
+
+ /** Returns the event’s path (objects on which listeners will be
+ * invoked). This does not include nodes in shadow trees if the
+ * shadow root was created with its ShadowRoot.mode closed.
+ *
+ * event.composedPath();
+ */
+ composedPath(): domTypes.EventPath[] {
+ if (this._path.length === 0) {
+ return [];
+ }
+
+ const composedPath: domTypes.EventPath[] = [
+ {
+ item: this.currentTarget,
+ itemInShadowTree: false,
+ relatedTarget: null,
+ rootOfClosedTree: false,
+ slotInClosedTree: false,
+ target: null,
+ touchTargetList: []
+ }
+ ];
+
+ let currentTargetIndex = 0;
+ let currentTargetHiddenSubtreeLevel = 0;
+
+ for (let index = this._path.length - 1; index >= 0; index--) {
+ const { item, rootOfClosedTree, slotInClosedTree } = this._path[index];
+
+ if (rootOfClosedTree) {
+ currentTargetHiddenSubtreeLevel++;
+ }
+
+ if (item === this.currentTarget) {
+ currentTargetIndex = index;
+ break;
+ }
+
+ if (slotInClosedTree) {
+ currentTargetHiddenSubtreeLevel--;
+ }
+ }
+
+ let currentHiddenLevel = currentTargetHiddenSubtreeLevel;
+ let maxHiddenLevel = currentTargetHiddenSubtreeLevel;
+
+ for (let i = currentTargetIndex - 1; i >= 0; i--) {
+ const { item, rootOfClosedTree, slotInClosedTree } = this._path[i];
+
+ if (rootOfClosedTree) {
+ currentHiddenLevel++;
+ }
+
+ if (currentHiddenLevel <= maxHiddenLevel) {
+ composedPath.unshift({
+ item,
+ itemInShadowTree: false,
+ relatedTarget: null,
+ rootOfClosedTree: false,
+ slotInClosedTree: false,
+ target: null,
+ touchTargetList: []
+ });
+ }
+
+ if (slotInClosedTree) {
+ currentHiddenLevel--;
+
+ if (currentHiddenLevel < maxHiddenLevel) {
+ maxHiddenLevel = currentHiddenLevel;
+ }
+ }
+ }
+
+ currentHiddenLevel = currentTargetHiddenSubtreeLevel;
+ maxHiddenLevel = currentTargetHiddenSubtreeLevel;
+
+ for (
+ let index = currentTargetIndex + 1;
+ index < this._path.length;
+ index++
+ ) {
+ const { item, rootOfClosedTree, slotInClosedTree } = this._path[index];
+
+ if (slotInClosedTree) {
+ currentHiddenLevel++;
+ }
+
+ if (currentHiddenLevel <= maxHiddenLevel) {
+ composedPath.push({
+ item,
+ itemInShadowTree: false,
+ relatedTarget: null,
+ rootOfClosedTree: false,
+ slotInClosedTree: false,
+ target: null,
+ touchTargetList: []
+ });
+ }
+
+ if (rootOfClosedTree) {
+ currentHiddenLevel--;
+
+ if (currentHiddenLevel < maxHiddenLevel) {
+ maxHiddenLevel = currentHiddenLevel;
+ }
+ }
+ }
+
+ return composedPath;
+ }
+
+ /** Cancels the event (if it is cancelable).
+ * See https://dom.spec.whatwg.org/#set-the-canceled-flag
+ *
+ * event.preventDefault();
+ */
+ preventDefault(): void {
+ if (this.cancelable && !this._inPassiveListenerFlag) {
+ this._canceledFlag = true;
+ }
+ }
+
+ /** Stops the propagation of events further along in the DOM.
+ *
+ * event.stopPropagation();
+ */
+ stopPropagation(): void {
+ this._stopPropagationFlag = true;
+ }
+
+ /** For this particular event, no other listener will be called.
+ * Neither those attached on the same element, nor those attached
+ * on elements which will be traversed later (in capture phase,
+ * for instance).
+ *
+ * event.stopImmediatePropagation();
+ */
+ stopImmediatePropagation(): void {
+ this._stopPropagationFlag = true;
+ this._stopImmediatePropagationFlag = true;
+ }
+}
+
+/** Built-in objects providing `get` methods for our
+ * interceptable JavaScript operations.
+ */
+Reflect.defineProperty(Event.prototype, "bubbles", { enumerable: true });
+Reflect.defineProperty(Event.prototype, "cancelable", { enumerable: true });
+Reflect.defineProperty(Event.prototype, "composed", { enumerable: true });
+Reflect.defineProperty(Event.prototype, "currentTarget", { enumerable: true });
+Reflect.defineProperty(Event.prototype, "defaultPrevented", {
+ enumerable: true
+});
+Reflect.defineProperty(Event.prototype, "dispatched", { enumerable: true });
+Reflect.defineProperty(Event.prototype, "eventPhase", { enumerable: true });
+Reflect.defineProperty(Event.prototype, "target", { enumerable: true });
+Reflect.defineProperty(Event.prototype, "timeStamp", { enumerable: true });
+Reflect.defineProperty(Event.prototype, "type", { enumerable: true });
diff --git a/cli/js/event_target.ts b/cli/js/event_target.ts
new file mode 100644
index 000000000..08c39544c
--- /dev/null
+++ b/cli/js/event_target.ts
@@ -0,0 +1,503 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import * as domTypes from "./dom_types.ts";
+import { DenoError, ErrorKind } from "./errors.ts";
+import { hasOwnProperty, requiredArguments } from "./util.ts";
+import {
+ getRoot,
+ isNode,
+ isShadowRoot,
+ isShadowInclusiveAncestor,
+ isSlotable,
+ retarget
+} from "./dom_util.ts";
+import { window } from "./window.ts";
+
+// https://dom.spec.whatwg.org/#get-the-parent
+// Note: Nodes, shadow roots, and documents override this algorithm so we set it to null.
+function getEventTargetParent(
+ _eventTarget: domTypes.EventTarget,
+ _event: domTypes.Event
+): null {
+ return null;
+}
+
+export const eventTargetAssignedSlot: unique symbol = Symbol();
+export const eventTargetHasActivationBehavior: unique symbol = Symbol();
+
+export class EventTarget implements domTypes.EventTarget {
+ public [domTypes.eventTargetHost]: domTypes.EventTarget | null = null;
+ public [domTypes.eventTargetListeners]: {
+ [type in string]: domTypes.EventListener[]
+ } = {};
+ public [domTypes.eventTargetMode] = "";
+ public [domTypes.eventTargetNodeType]: domTypes.NodeType =
+ domTypes.NodeType.DOCUMENT_FRAGMENT_NODE;
+ private [eventTargetAssignedSlot] = false;
+ private [eventTargetHasActivationBehavior] = false;
+
+ public addEventListener(
+ type: string,
+ callback: (event: domTypes.Event) => void | null,
+ options?: domTypes.AddEventListenerOptions | boolean
+ ): void {
+ const this_ = this || window;
+
+ requiredArguments("EventTarget.addEventListener", arguments.length, 2);
+ const normalizedOptions: domTypes.AddEventListenerOptions = eventTargetHelpers.normalizeAddEventHandlerOptions(
+ options
+ );
+
+ if (callback === null) {
+ return;
+ }
+
+ const listeners = this_[domTypes.eventTargetListeners];
+
+ if (!hasOwnProperty(listeners, type)) {
+ listeners[type] = [];
+ }
+
+ for (let i = 0; i < listeners[type].length; ++i) {
+ const listener = listeners[type][i];
+ if (
+ ((typeof listener.options === "boolean" &&
+ listener.options === normalizedOptions.capture) ||
+ (typeof listener.options === "object" &&
+ listener.options.capture === normalizedOptions.capture)) &&
+ listener.callback === callback
+ ) {
+ return;
+ }
+ }
+
+ // eslint-disable-next-line @typescript-eslint/no-this-alias
+ const eventTarget = this;
+ listeners[type].push({
+ callback,
+ options: normalizedOptions,
+ handleEvent(event: domTypes.Event): void {
+ this.callback.call(eventTarget, event);
+ }
+ } as domTypes.EventListener);
+ }
+
+ public removeEventListener(
+ type: string,
+ callback: (event: domTypes.Event) => void | null,
+ options?: domTypes.EventListenerOptions | boolean
+ ): void {
+ const this_ = this || window;
+
+ requiredArguments("EventTarget.removeEventListener", arguments.length, 2);
+ const listeners = this_[domTypes.eventTargetListeners];
+ if (hasOwnProperty(listeners, type) && callback !== null) {
+ listeners[type] = listeners[type].filter(
+ (listener): boolean => listener.callback !== callback
+ );
+ }
+
+ const normalizedOptions: domTypes.EventListenerOptions = eventTargetHelpers.normalizeEventHandlerOptions(
+ options
+ );
+
+ if (callback === null) {
+ // Optimization, not in the spec.
+ return;
+ }
+
+ if (!listeners[type]) {
+ return;
+ }
+
+ for (let i = 0; i < listeners[type].length; ++i) {
+ const listener = listeners[type][i];
+
+ if (
+ ((typeof listener.options === "boolean" &&
+ listener.options === normalizedOptions.capture) ||
+ (typeof listener.options === "object" &&
+ listener.options.capture === normalizedOptions.capture)) &&
+ listener.callback === callback
+ ) {
+ listeners[type].splice(i, 1);
+ break;
+ }
+ }
+ }
+
+ public dispatchEvent(event: domTypes.Event): boolean {
+ const this_ = this || window;
+
+ requiredArguments("EventTarget.dispatchEvent", arguments.length, 1);
+ const listeners = this_[domTypes.eventTargetListeners];
+ if (!hasOwnProperty(listeners, event.type)) {
+ return true;
+ }
+
+ if (event.dispatched || !event.initialized) {
+ throw new DenoError(
+ ErrorKind.InvalidData,
+ "Tried to dispatch an uninitialized event"
+ );
+ }
+
+ if (event.eventPhase !== domTypes.EventPhase.NONE) {
+ throw new DenoError(
+ ErrorKind.InvalidData,
+ "Tried to dispatch a dispatching event"
+ );
+ }
+
+ return eventTargetHelpers.dispatch(this_, event);
+ }
+
+ get [Symbol.toStringTag](): string {
+ return "EventTarget";
+ }
+}
+
+const eventTargetHelpers = {
+ // https://dom.spec.whatwg.org/#concept-event-dispatch
+ dispatch(
+ targetImpl: EventTarget,
+ eventImpl: domTypes.Event,
+ targetOverride?: domTypes.EventTarget
+ ): boolean {
+ let clearTargets = false;
+ let activationTarget = null;
+
+ eventImpl.dispatched = true;
+
+ targetOverride = targetOverride || targetImpl;
+ let relatedTarget = retarget(eventImpl.relatedTarget, targetImpl);
+
+ if (
+ targetImpl !== relatedTarget ||
+ targetImpl === eventImpl.relatedTarget
+ ) {
+ const touchTargets: domTypes.EventTarget[] = [];
+
+ eventTargetHelpers.appendToEventPath(
+ eventImpl,
+ targetImpl,
+ targetOverride,
+ relatedTarget,
+ touchTargets,
+ false
+ );
+
+ const isActivationEvent = eventImpl.type === "click";
+
+ if (isActivationEvent && targetImpl[eventTargetHasActivationBehavior]) {
+ activationTarget = targetImpl;
+ }
+
+ let slotInClosedTree = false;
+ let slotable =
+ isSlotable(targetImpl) && targetImpl[eventTargetAssignedSlot]
+ ? targetImpl
+ : null;
+ let parent = getEventTargetParent(targetImpl, eventImpl);
+
+ // Populate event path
+ // https://dom.spec.whatwg.org/#event-path
+ while (parent !== null) {
+ if (slotable !== null) {
+ slotable = null;
+
+ const parentRoot = getRoot(parent);
+ if (
+ isShadowRoot(parentRoot) &&
+ parentRoot &&
+ parentRoot[domTypes.eventTargetMode] === "closed"
+ ) {
+ slotInClosedTree = true;
+ }
+ }
+
+ relatedTarget = retarget(eventImpl.relatedTarget, parent);
+
+ if (
+ isNode(parent) &&
+ isShadowInclusiveAncestor(getRoot(targetImpl), parent)
+ ) {
+ eventTargetHelpers.appendToEventPath(
+ eventImpl,
+ parent,
+ null,
+ relatedTarget,
+ touchTargets,
+ slotInClosedTree
+ );
+ } else if (parent === relatedTarget) {
+ parent = null;
+ } else {
+ targetImpl = parent;
+
+ if (
+ isActivationEvent &&
+ activationTarget === null &&
+ targetImpl[eventTargetHasActivationBehavior]
+ ) {
+ activationTarget = targetImpl;
+ }
+
+ eventTargetHelpers.appendToEventPath(
+ eventImpl,
+ parent,
+ targetImpl,
+ relatedTarget,
+ touchTargets,
+ slotInClosedTree
+ );
+ }
+
+ if (parent !== null) {
+ parent = getEventTargetParent(parent, eventImpl);
+ }
+
+ slotInClosedTree = false;
+ }
+
+ let clearTargetsTupleIndex = -1;
+ for (
+ let i = eventImpl.path.length - 1;
+ i >= 0 && clearTargetsTupleIndex === -1;
+ i--
+ ) {
+ if (eventImpl.path[i].target !== null) {
+ clearTargetsTupleIndex = i;
+ }
+ }
+ const clearTargetsTuple = eventImpl.path[clearTargetsTupleIndex];
+
+ clearTargets =
+ (isNode(clearTargetsTuple.target) &&
+ isShadowRoot(getRoot(clearTargetsTuple.target))) ||
+ (isNode(clearTargetsTuple.relatedTarget) &&
+ isShadowRoot(getRoot(clearTargetsTuple.relatedTarget)));
+
+ eventImpl.eventPhase = domTypes.EventPhase.CAPTURING_PHASE;
+
+ for (let i = eventImpl.path.length - 1; i >= 0; --i) {
+ const tuple = eventImpl.path[i];
+
+ if (tuple.target === null) {
+ eventTargetHelpers.invokeEventListeners(targetImpl, tuple, eventImpl);
+ }
+ }
+
+ for (let i = 0; i < eventImpl.path.length; i++) {
+ const tuple = eventImpl.path[i];
+
+ if (tuple.target !== null) {
+ eventImpl.eventPhase = domTypes.EventPhase.AT_TARGET;
+ } else {
+ eventImpl.eventPhase = domTypes.EventPhase.BUBBLING_PHASE;
+ }
+
+ if (
+ (eventImpl.eventPhase === domTypes.EventPhase.BUBBLING_PHASE &&
+ eventImpl.bubbles) ||
+ eventImpl.eventPhase === domTypes.EventPhase.AT_TARGET
+ ) {
+ eventTargetHelpers.invokeEventListeners(targetImpl, tuple, eventImpl);
+ }
+ }
+ }
+
+ eventImpl.eventPhase = domTypes.EventPhase.NONE;
+
+ eventImpl.currentTarget = null;
+ eventImpl.path = [];
+ eventImpl.dispatched = false;
+ eventImpl.cancelBubble = false;
+ eventImpl.cancelBubbleImmediately = false;
+
+ if (clearTargets) {
+ eventImpl.target = null;
+ eventImpl.relatedTarget = null;
+ }
+
+ // TODO: invoke activation targets if HTML nodes will be implemented
+ // if (activationTarget !== null) {
+ // if (!eventImpl.defaultPrevented) {
+ // activationTarget._activationBehavior();
+ // }
+ // }
+
+ return !eventImpl.defaultPrevented;
+ },
+
+ // https://dom.spec.whatwg.org/#concept-event-listener-invoke
+ invokeEventListeners(
+ targetImpl: EventTarget,
+ tuple: domTypes.EventPath,
+ eventImpl: domTypes.Event
+ ): void {
+ const tupleIndex = eventImpl.path.indexOf(tuple);
+ for (let i = tupleIndex; i >= 0; i--) {
+ const t = eventImpl.path[i];
+ if (t.target) {
+ eventImpl.target = t.target;
+ break;
+ }
+ }
+
+ eventImpl.relatedTarget = tuple.relatedTarget;
+
+ if (eventImpl.cancelBubble) {
+ return;
+ }
+
+ eventImpl.currentTarget = tuple.item;
+
+ eventTargetHelpers.innerInvokeEventListeners(
+ targetImpl,
+ eventImpl,
+ tuple.item[domTypes.eventTargetListeners]
+ );
+ },
+
+ // https://dom.spec.whatwg.org/#concept-event-listener-inner-invoke
+ innerInvokeEventListeners(
+ targetImpl: EventTarget,
+ eventImpl: domTypes.Event,
+ targetListeners: { [type in string]: domTypes.EventListener[] }
+ ): boolean {
+ let found = false;
+
+ const { type } = eventImpl;
+
+ if (!targetListeners || !targetListeners[type]) {
+ return found;
+ }
+
+ // Copy event listeners before iterating since the list can be modified during the iteration.
+ const handlers = targetListeners[type].slice();
+
+ for (let i = 0; i < handlers.length; i++) {
+ const listener = handlers[i];
+
+ let capture, once, passive;
+ if (typeof listener.options === "boolean") {
+ capture = listener.options;
+ once = false;
+ passive = false;
+ } else {
+ capture = listener.options.capture;
+ once = listener.options.once;
+ passive = listener.options.passive;
+ }
+
+ // Check if the event listener has been removed since the listeners has been cloned.
+ if (!targetListeners[type].includes(listener)) {
+ continue;
+ }
+
+ found = true;
+
+ if (
+ (eventImpl.eventPhase === domTypes.EventPhase.CAPTURING_PHASE &&
+ !capture) ||
+ (eventImpl.eventPhase === domTypes.EventPhase.BUBBLING_PHASE && capture)
+ ) {
+ continue;
+ }
+
+ if (once) {
+ targetListeners[type].splice(
+ targetListeners[type].indexOf(listener),
+ 1
+ );
+ }
+
+ if (passive) {
+ eventImpl.inPassiveListener = true;
+ }
+
+ try {
+ if (listener.callback) {
+ listener.handleEvent(eventImpl);
+ }
+ } catch (error) {
+ throw new DenoError(ErrorKind.Interrupted, error.message);
+ }
+
+ eventImpl.inPassiveListener = false;
+
+ if (eventImpl.cancelBubbleImmediately) {
+ return found;
+ }
+ }
+
+ return found;
+ },
+
+ normalizeAddEventHandlerOptions(
+ options: boolean | domTypes.AddEventListenerOptions | undefined
+ ): domTypes.AddEventListenerOptions {
+ if (typeof options === "boolean" || typeof options === "undefined") {
+ const returnValue: domTypes.AddEventListenerOptions = {
+ capture: Boolean(options),
+ once: false,
+ passive: false
+ };
+
+ return returnValue;
+ } else {
+ return options;
+ }
+ },
+
+ normalizeEventHandlerOptions(
+ options: boolean | domTypes.EventListenerOptions | undefined
+ ): domTypes.EventListenerOptions {
+ if (typeof options === "boolean" || typeof options === "undefined") {
+ const returnValue: domTypes.EventListenerOptions = {
+ capture: Boolean(options)
+ };
+
+ return returnValue;
+ } else {
+ return options;
+ }
+ },
+
+ // https://dom.spec.whatwg.org/#concept-event-path-append
+ appendToEventPath(
+ eventImpl: domTypes.Event,
+ target: domTypes.EventTarget,
+ targetOverride: domTypes.EventTarget | null,
+ relatedTarget: domTypes.EventTarget | null,
+ touchTargets: domTypes.EventTarget[],
+ slotInClosedTree: boolean
+ ): void {
+ const itemInShadowTree = isNode(target) && isShadowRoot(getRoot(target));
+ const rootOfClosedTree =
+ isShadowRoot(target) && target[domTypes.eventTargetMode] === "closed";
+
+ eventImpl.path.push({
+ item: target,
+ itemInShadowTree,
+ target: targetOverride,
+ relatedTarget,
+ touchTargetList: touchTargets,
+ rootOfClosedTree,
+ slotInClosedTree
+ });
+ }
+};
+
+/** Built-in objects providing `get` methods for our
+ * interceptable JavaScript operations.
+ */
+Reflect.defineProperty(EventTarget.prototype, "addEventListener", {
+ enumerable: true
+});
+Reflect.defineProperty(EventTarget.prototype, "removeEventListener", {
+ enumerable: true
+});
+Reflect.defineProperty(EventTarget.prototype, "dispatchEvent", {
+ enumerable: true
+});
diff --git a/cli/js/event_target_test.ts b/cli/js/event_target_test.ts
new file mode 100644
index 000000000..9d7e7974c
--- /dev/null
+++ b/cli/js/event_target_test.ts
@@ -0,0 +1,142 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { test, assertEquals } from "./test_util.ts";
+
+test(function addEventListenerTest(): void {
+ const document = new EventTarget();
+
+ assertEquals(document.addEventListener("x", null, false), undefined);
+ assertEquals(document.addEventListener("x", null, true), undefined);
+ assertEquals(document.addEventListener("x", null), undefined);
+});
+
+test(function constructedEventTargetCanBeUsedAsExpected(): void {
+ const target = new EventTarget();
+ const event = new Event("foo", { bubbles: true, cancelable: false });
+ let callCount = 0;
+
+ const listener = (e): void => {
+ assertEquals(e, event);
+ ++callCount;
+ };
+
+ target.addEventListener("foo", listener);
+
+ target.dispatchEvent(event);
+ assertEquals(callCount, 1);
+
+ target.dispatchEvent(event);
+ assertEquals(callCount, 2);
+
+ target.removeEventListener("foo", listener);
+ target.dispatchEvent(event);
+ assertEquals(callCount, 2);
+});
+
+test(function anEventTargetCanBeSubclassed(): void {
+ class NicerEventTarget extends EventTarget {
+ on(type, callback?, options?): void {
+ this.addEventListener(type, callback, options);
+ }
+
+ off(type, callback?, options?): void {
+ this.removeEventListener(type, callback, options);
+ }
+ }
+
+ const target = new NicerEventTarget();
+ new Event("foo", { bubbles: true, cancelable: false });
+ let callCount = 0;
+
+ const listener = (): void => {
+ ++callCount;
+ };
+
+ target.on("foo", listener);
+ assertEquals(callCount, 0);
+
+ target.off("foo", listener);
+ assertEquals(callCount, 0);
+});
+
+test(function removingNullEventListenerShouldSucceed(): void {
+ const document = new EventTarget();
+ assertEquals(document.removeEventListener("x", null, false), undefined);
+ assertEquals(document.removeEventListener("x", null, true), undefined);
+ assertEquals(document.removeEventListener("x", null), undefined);
+});
+
+test(function constructedEventTargetUseObjectPrototype(): void {
+ const target = new EventTarget();
+ const event = new Event("toString", { bubbles: true, cancelable: false });
+ let callCount = 0;
+
+ const listener = (e): void => {
+ assertEquals(e, event);
+ ++callCount;
+ };
+
+ target.addEventListener("toString", listener);
+
+ target.dispatchEvent(event);
+ assertEquals(callCount, 1);
+
+ target.dispatchEvent(event);
+ assertEquals(callCount, 2);
+
+ target.removeEventListener("toString", listener);
+ target.dispatchEvent(event);
+ assertEquals(callCount, 2);
+});
+
+test(function toStringShouldBeWebCompatible(): void {
+ const target = new EventTarget();
+ assertEquals(target.toString(), "[object EventTarget]");
+});
+
+test(function dispatchEventShouldNotThrowError(): void {
+ let hasThrown = false;
+
+ try {
+ const target = new EventTarget();
+ const event = new Event("hasOwnProperty", {
+ bubbles: true,
+ cancelable: false
+ });
+ const listener = (): void => {};
+ target.addEventListener("hasOwnProperty", listener);
+ target.dispatchEvent(event);
+ } catch {
+ hasThrown = true;
+ }
+
+ assertEquals(hasThrown, false);
+});
+
+test(function eventTargetThisShouldDefaultToWindow(): void {
+ const {
+ addEventListener,
+ dispatchEvent,
+ removeEventListener
+ } = EventTarget.prototype;
+ let n = 1;
+ const event = new Event("hello");
+ const listener = (): void => {
+ n = 2;
+ };
+
+ addEventListener("hello", listener);
+ window.dispatchEvent(event);
+ assertEquals(n, 2);
+ n = 1;
+ removeEventListener("hello", listener);
+ window.dispatchEvent(event);
+ assertEquals(n, 1);
+
+ window.addEventListener("hello", listener);
+ dispatchEvent(event);
+ assertEquals(n, 2);
+ n = 1;
+ window.removeEventListener("hello", listener);
+ dispatchEvent(event);
+ assertEquals(n, 1);
+});
diff --git a/cli/js/event_test.ts b/cli/js/event_test.ts
new file mode 100644
index 000000000..72f4f5855
--- /dev/null
+++ b/cli/js/event_test.ts
@@ -0,0 +1,95 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { test, assertEquals, assertNotEquals } from "./test_util.ts";
+
+test(function eventInitializedWithType(): void {
+ const type = "click";
+ const event = new Event(type);
+
+ assertEquals(event.isTrusted, false);
+ assertEquals(event.target, null);
+ assertEquals(event.currentTarget, null);
+ assertEquals(event.type, "click");
+ assertEquals(event.bubbles, false);
+ assertEquals(event.cancelable, false);
+});
+
+test(function eventInitializedWithTypeAndDict(): void {
+ const init = "submit";
+ const eventInit = { bubbles: true, cancelable: true } as EventInit;
+ const event = new Event(init, eventInit);
+
+ assertEquals(event.isTrusted, false);
+ assertEquals(event.target, null);
+ assertEquals(event.currentTarget, null);
+ assertEquals(event.type, "submit");
+ assertEquals(event.bubbles, true);
+ assertEquals(event.cancelable, true);
+});
+
+test(function eventComposedPathSuccess(): void {
+ const type = "click";
+ const event = new Event(type);
+ const composedPath = event.composedPath();
+
+ assertEquals(composedPath, []);
+});
+
+test(function eventStopPropagationSuccess(): void {
+ const type = "click";
+ const event = new Event(type);
+
+ assertEquals(event.cancelBubble, false);
+ event.stopPropagation();
+ assertEquals(event.cancelBubble, true);
+});
+
+test(function eventStopImmediatePropagationSuccess(): void {
+ const type = "click";
+ const event = new Event(type);
+
+ assertEquals(event.cancelBubble, false);
+ assertEquals(event.cancelBubbleImmediately, false);
+ event.stopImmediatePropagation();
+ assertEquals(event.cancelBubble, true);
+ assertEquals(event.cancelBubbleImmediately, true);
+});
+
+test(function eventPreventDefaultSuccess(): void {
+ const type = "click";
+ const event = new Event(type);
+
+ assertEquals(event.defaultPrevented, false);
+ event.preventDefault();
+ assertEquals(event.defaultPrevented, false);
+
+ const eventInit = { bubbles: true, cancelable: true } as EventInit;
+ const cancelableEvent = new Event(type, eventInit);
+ assertEquals(cancelableEvent.defaultPrevented, false);
+ cancelableEvent.preventDefault();
+ assertEquals(cancelableEvent.defaultPrevented, true);
+});
+
+test(function eventInitializedWithNonStringType(): void {
+ const type = undefined;
+ const event = new Event(type);
+
+ assertEquals(event.isTrusted, false);
+ assertEquals(event.target, null);
+ assertEquals(event.currentTarget, null);
+ assertEquals(event.type, "undefined");
+ assertEquals(event.bubbles, false);
+ assertEquals(event.cancelable, false);
+});
+
+// ref https://github.com/web-platform-tests/wpt/blob/master/dom/events/Event-isTrusted.any.js
+test(function eventIsTrusted(): void {
+ const desc1 = Object.getOwnPropertyDescriptor(new Event("x"), "isTrusted");
+ assertNotEquals(desc1, undefined);
+ assertEquals(typeof desc1.get, "function");
+
+ const desc2 = Object.getOwnPropertyDescriptor(new Event("x"), "isTrusted");
+ assertNotEquals(desc2, undefined);
+ assertEquals(typeof desc2.get, "function");
+
+ assertEquals(desc1.get, desc2.get);
+});
diff --git a/cli/js/fetch.ts b/cli/js/fetch.ts
new file mode 100644
index 000000000..0a5f793a8
--- /dev/null
+++ b/cli/js/fetch.ts
@@ -0,0 +1,478 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import {
+ assert,
+ createResolvable,
+ notImplemented,
+ isTypedArray
+} from "./util.ts";
+import * as domTypes from "./dom_types.ts";
+import { TextDecoder, TextEncoder } from "./text_encoding.ts";
+import { DenoBlob, bytesSymbol as blobBytesSymbol } from "./blob.ts";
+import { Headers } from "./headers.ts";
+import * as io from "./io.ts";
+import { read, close } from "./files.ts";
+import { Buffer } from "./buffer.ts";
+import { FormData } from "./form_data.ts";
+import { URLSearchParams } from "./url_search_params.ts";
+import * as dispatch from "./dispatch.ts";
+import { sendAsync } from "./dispatch_json.ts";
+
+function getHeaderValueParams(value: string): Map<string, string> {
+ const params = new Map();
+ // Forced to do so for some Map constructor param mismatch
+ value
+ .split(";")
+ .slice(1)
+ .map((s): string[] => s.trim().split("="))
+ .filter((arr): boolean => arr.length > 1)
+ .map(([k, v]): [string, string] => [k, v.replace(/^"([^"]*)"$/, "$1")])
+ .forEach(([k, v]): Map<string, string> => params.set(k, v));
+ return params;
+}
+
+function hasHeaderValueOf(s: string, value: string): boolean {
+ return new RegExp(`^${value}[\t\s]*;?`).test(s);
+}
+
+class Body implements domTypes.Body, domTypes.ReadableStream, io.ReadCloser {
+ private _bodyUsed = false;
+ private _bodyPromise: null | Promise<ArrayBuffer> = null;
+ private _data: ArrayBuffer | null = null;
+ readonly locked: boolean = false; // TODO
+ readonly body: null | Body = this;
+
+ constructor(private rid: number, readonly contentType: string) {}
+
+ private async _bodyBuffer(): Promise<ArrayBuffer> {
+ assert(this._bodyPromise == null);
+ const buf = new Buffer();
+ try {
+ const nread = await buf.readFrom(this);
+ const ui8 = buf.bytes();
+ assert(ui8.byteLength === nread);
+ this._data = ui8.buffer.slice(
+ ui8.byteOffset,
+ ui8.byteOffset + nread
+ ) as ArrayBuffer;
+ assert(this._data.byteLength === nread);
+ } finally {
+ this.close();
+ }
+
+ return this._data;
+ }
+
+ async arrayBuffer(): Promise<ArrayBuffer> {
+ // If we've already bufferred the response, just return it.
+ if (this._data != null) {
+ return this._data;
+ }
+
+ // If there is no _bodyPromise yet, start it.
+ if (this._bodyPromise == null) {
+ this._bodyPromise = this._bodyBuffer();
+ }
+
+ return this._bodyPromise;
+ }
+
+ async blob(): Promise<domTypes.Blob> {
+ const arrayBuffer = await this.arrayBuffer();
+ return new DenoBlob([arrayBuffer], {
+ type: this.contentType
+ });
+ }
+
+ // ref: https://fetch.spec.whatwg.org/#body-mixin
+ async formData(): Promise<domTypes.FormData> {
+ const formData = new FormData();
+ const enc = new TextEncoder();
+ if (hasHeaderValueOf(this.contentType, "multipart/form-data")) {
+ const params = getHeaderValueParams(this.contentType);
+ if (!params.has("boundary")) {
+ // TypeError is required by spec
+ throw new TypeError("multipart/form-data must provide a boundary");
+ }
+ // ref: https://tools.ietf.org/html/rfc2046#section-5.1
+ const boundary = params.get("boundary")!;
+ const dashBoundary = `--${boundary}`;
+ const delimiter = `\r\n${dashBoundary}`;
+ const closeDelimiter = `${delimiter}--`;
+
+ const body = await this.text();
+ let bodyParts: string[];
+ const bodyEpilogueSplit = body.split(closeDelimiter);
+ if (bodyEpilogueSplit.length < 2) {
+ bodyParts = [];
+ } else {
+ // discard epilogue
+ const bodyEpilogueTrimmed = bodyEpilogueSplit[0];
+ // first boundary treated special due to optional prefixed \r\n
+ const firstBoundaryIndex = bodyEpilogueTrimmed.indexOf(dashBoundary);
+ if (firstBoundaryIndex < 0) {
+ throw new TypeError("Invalid boundary");
+ }
+ const bodyPreambleTrimmed = bodyEpilogueTrimmed
+ .slice(firstBoundaryIndex + dashBoundary.length)
+ .replace(/^[\s\r\n\t]+/, ""); // remove transport-padding CRLF
+ // trimStart might not be available
+ // Be careful! body-part allows trailing \r\n!
+ // (as long as it is not part of `delimiter`)
+ bodyParts = bodyPreambleTrimmed
+ .split(delimiter)
+ .map((s): string => s.replace(/^[\s\r\n\t]+/, ""));
+ // TODO: LWSP definition is actually trickier,
+ // but should be fine in our case since without headers
+ // we should just discard the part
+ }
+ for (const bodyPart of bodyParts) {
+ const headers = new Headers();
+ const headerOctetSeperatorIndex = bodyPart.indexOf("\r\n\r\n");
+ if (headerOctetSeperatorIndex < 0) {
+ continue; // Skip unknown part
+ }
+ const headerText = bodyPart.slice(0, headerOctetSeperatorIndex);
+ const octets = bodyPart.slice(headerOctetSeperatorIndex + 4);
+
+ // TODO: use textproto.readMIMEHeader from deno_std
+ const rawHeaders = headerText.split("\r\n");
+ for (const rawHeader of rawHeaders) {
+ const sepIndex = rawHeader.indexOf(":");
+ if (sepIndex < 0) {
+ continue; // Skip this header
+ }
+ const key = rawHeader.slice(0, sepIndex);
+ const value = rawHeader.slice(sepIndex + 1);
+ headers.set(key, value);
+ }
+ if (!headers.has("content-disposition")) {
+ continue; // Skip unknown part
+ }
+ // Content-Transfer-Encoding Deprecated
+ const contentDisposition = headers.get("content-disposition")!;
+ const partContentType = headers.get("content-type") || "text/plain";
+ // TODO: custom charset encoding (needs TextEncoder support)
+ // const contentTypeCharset =
+ // getHeaderValueParams(partContentType).get("charset") || "";
+ if (!hasHeaderValueOf(contentDisposition, "form-data")) {
+ continue; // Skip, might not be form-data
+ }
+ const dispositionParams = getHeaderValueParams(contentDisposition);
+ if (!dispositionParams.has("name")) {
+ continue; // Skip, unknown name
+ }
+ const dispositionName = dispositionParams.get("name")!;
+ if (dispositionParams.has("filename")) {
+ const filename = dispositionParams.get("filename")!;
+ const blob = new DenoBlob([enc.encode(octets)], {
+ type: partContentType
+ });
+ // TODO: based on spec
+ // https://xhr.spec.whatwg.org/#dom-formdata-append
+ // https://xhr.spec.whatwg.org/#create-an-entry
+ // Currently it does not mention how I could pass content-type
+ // to the internally created file object...
+ formData.append(dispositionName, blob, filename);
+ } else {
+ formData.append(dispositionName, octets);
+ }
+ }
+ return formData;
+ } else if (
+ hasHeaderValueOf(this.contentType, "application/x-www-form-urlencoded")
+ ) {
+ // From https://github.com/github/fetch/blob/master/fetch.js
+ // Copyright (c) 2014-2016 GitHub, Inc. MIT License
+ const body = await this.text();
+ try {
+ body
+ .trim()
+ .split("&")
+ .forEach(
+ (bytes): void => {
+ if (bytes) {
+ const split = bytes.split("=");
+ const name = split.shift()!.replace(/\+/g, " ");
+ const value = split.join("=").replace(/\+/g, " ");
+ formData.append(
+ decodeURIComponent(name),
+ decodeURIComponent(value)
+ );
+ }
+ }
+ );
+ } catch (e) {
+ throw new TypeError("Invalid form urlencoded format");
+ }
+ return formData;
+ } else {
+ throw new TypeError("Invalid form data");
+ }
+ }
+
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ async json(): Promise<any> {
+ const text = await this.text();
+ return JSON.parse(text);
+ }
+
+ async text(): Promise<string> {
+ const ab = await this.arrayBuffer();
+ const decoder = new TextDecoder("utf-8");
+ return decoder.decode(ab);
+ }
+
+ read(p: Uint8Array): Promise<number | io.EOF> {
+ this._bodyUsed = true;
+ return read(this.rid, p);
+ }
+
+ close(): void {
+ close(this.rid);
+ }
+
+ async cancel(): Promise<void> {
+ return notImplemented();
+ }
+
+ getReader(): domTypes.ReadableStreamReader {
+ return notImplemented();
+ }
+
+ tee(): [domTypes.ReadableStream, domTypes.ReadableStream] {
+ return notImplemented();
+ }
+
+ [Symbol.asyncIterator](): AsyncIterableIterator<Uint8Array> {
+ return io.toAsyncIterator(this);
+ }
+
+ get bodyUsed(): boolean {
+ return this._bodyUsed;
+ }
+}
+
+export class Response implements domTypes.Response {
+ readonly type = "basic"; // TODO
+ readonly redirected: boolean;
+ headers: domTypes.Headers;
+ readonly trailer: Promise<domTypes.Headers>;
+ readonly body: Body;
+
+ constructor(
+ readonly url: string,
+ readonly status: number,
+ readonly statusText: string,
+ headersList: Array<[string, string]>,
+ rid: number,
+ redirected_: boolean,
+ body_: null | Body = null
+ ) {
+ this.trailer = createResolvable();
+ this.headers = new Headers(headersList);
+ const contentType = this.headers.get("content-type") || "";
+
+ if (body_ == null) {
+ this.body = new Body(rid, contentType);
+ } else {
+ this.body = body_;
+ }
+
+ this.redirected = redirected_;
+ }
+
+ async arrayBuffer(): Promise<ArrayBuffer> {
+ return this.body.arrayBuffer();
+ }
+
+ async blob(): Promise<domTypes.Blob> {
+ return this.body.blob();
+ }
+
+ async formData(): Promise<domTypes.FormData> {
+ return this.body.formData();
+ }
+
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ async json(): Promise<any> {
+ return this.body.json();
+ }
+
+ async text(): Promise<string> {
+ return this.body.text();
+ }
+
+ get ok(): boolean {
+ return 200 <= this.status && this.status < 300;
+ }
+
+ get bodyUsed(): boolean {
+ return this.body.bodyUsed;
+ }
+
+ clone(): domTypes.Response {
+ if (this.bodyUsed) {
+ throw new TypeError(
+ "Failed to execute 'clone' on 'Response': Response body is already used"
+ );
+ }
+
+ const iterators = this.headers.entries();
+ const headersList: Array<[string, string]> = [];
+ for (const header of iterators) {
+ headersList.push(header);
+ }
+
+ return new Response(
+ this.url,
+ this.status,
+ this.statusText,
+ headersList,
+ -1,
+ this.redirected,
+ this.body
+ );
+ }
+}
+
+interface FetchResponse {
+ bodyRid: number;
+ status: number;
+ statusText: string;
+ headers: Array<[string, string]>;
+}
+
+async function sendFetchReq(
+ url: string,
+ method: string | null,
+ headers: domTypes.Headers | null,
+ body: ArrayBufferView | undefined
+): Promise<FetchResponse> {
+ let headerArray: Array<[string, string]> = [];
+ if (headers) {
+ headerArray = Array.from(headers.entries());
+ }
+
+ let zeroCopy = undefined;
+ if (body) {
+ zeroCopy = new Uint8Array(body.buffer, body.byteOffset, body.byteLength);
+ }
+
+ const args = {
+ method,
+ url,
+ headers: headerArray
+ };
+
+ return (await sendAsync(dispatch.OP_FETCH, args, zeroCopy)) as FetchResponse;
+}
+
+/** Fetch a resource from the network. */
+export async function fetch(
+ input: domTypes.Request | string,
+ init?: domTypes.RequestInit
+): Promise<Response> {
+ let url: string;
+ let method: string | null = null;
+ let headers: domTypes.Headers | null = null;
+ let body: ArrayBufferView | undefined;
+ let redirected = false;
+ let remRedirectCount = 20; // TODO: use a better way to handle
+
+ if (typeof input === "string") {
+ url = input;
+ if (init != null) {
+ method = init.method || null;
+ if (init.headers) {
+ headers =
+ init.headers instanceof Headers
+ ? init.headers
+ : new Headers(init.headers);
+ } else {
+ headers = null;
+ }
+
+ // ref: https://fetch.spec.whatwg.org/#body-mixin
+ // Body should have been a mixin
+ // but we are treating it as a separate class
+ if (init.body) {
+ if (!headers) {
+ headers = new Headers();
+ }
+ let contentType = "";
+ if (typeof init.body === "string") {
+ body = new TextEncoder().encode(init.body);
+ contentType = "text/plain;charset=UTF-8";
+ } else if (isTypedArray(init.body)) {
+ body = init.body;
+ } else if (init.body instanceof URLSearchParams) {
+ body = new TextEncoder().encode(init.body.toString());
+ contentType = "application/x-www-form-urlencoded;charset=UTF-8";
+ } else if (init.body instanceof DenoBlob) {
+ body = init.body[blobBytesSymbol];
+ contentType = init.body.type;
+ } else {
+ // TODO: FormData, ReadableStream
+ notImplemented();
+ }
+ if (contentType && !headers.has("content-type")) {
+ headers.set("content-type", contentType);
+ }
+ }
+ }
+ } else {
+ url = input.url;
+ method = input.method;
+ headers = input.headers;
+
+ //@ts-ignore
+ if (input._bodySource) {
+ body = new DataView(await input.arrayBuffer());
+ }
+ }
+
+ while (remRedirectCount) {
+ const fetchResponse = await sendFetchReq(url, method, headers, body);
+
+ const response = new Response(
+ url,
+ fetchResponse.status,
+ fetchResponse.statusText,
+ fetchResponse.headers,
+ fetchResponse.bodyRid,
+ redirected
+ );
+ if ([301, 302, 303, 307, 308].includes(response.status)) {
+ // We're in a redirect status
+ switch ((init && init.redirect) || "follow") {
+ case "error":
+ throw notImplemented();
+ case "manual":
+ throw notImplemented();
+ case "follow":
+ default:
+ let redirectUrl = response.headers.get("Location");
+ if (redirectUrl == null) {
+ return response; // Unspecified
+ }
+ if (
+ !redirectUrl.startsWith("http://") &&
+ !redirectUrl.startsWith("https://")
+ ) {
+ redirectUrl =
+ url.split("//")[0] +
+ "//" +
+ url.split("//")[1].split("/")[0] +
+ redirectUrl; // TODO: handle relative redirection more gracefully
+ }
+ url = redirectUrl;
+ redirected = true;
+ remRedirectCount--;
+ }
+ } else {
+ return response;
+ }
+ }
+ // Return a network error due to too many redirections
+ throw notImplemented();
+}
diff --git a/cli/js/fetch_test.ts b/cli/js/fetch_test.ts
new file mode 100644
index 000000000..56c693681
--- /dev/null
+++ b/cli/js/fetch_test.ts
@@ -0,0 +1,357 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import {
+ test,
+ testPerm,
+ assert,
+ assertEquals,
+ assertStrContains,
+ assertThrows
+} from "./test_util.ts";
+
+testPerm({ net: true }, async function fetchConnectionError(): Promise<void> {
+ let err;
+ try {
+ await fetch("http://localhost:4000");
+ } catch (err_) {
+ err = err_;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.HttpOther);
+ assertEquals(err.name, "HttpOther");
+ assertStrContains(err.message, "error trying to connect");
+});
+
+testPerm({ net: true }, async function fetchJsonSuccess(): Promise<void> {
+ const response = await fetch("http://localhost:4545/package.json");
+ const json = await response.json();
+ assertEquals(json.name, "deno");
+});
+
+test(async function fetchPerm(): Promise<void> {
+ let err;
+ try {
+ await fetch("http://localhost:4545/package.json");
+ } catch (err_) {
+ err = err_;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(err.name, "PermissionDenied");
+});
+
+testPerm({ net: true }, async function fetchUrl(): Promise<void> {
+ const response = await fetch("http://localhost:4545/package.json");
+ assertEquals(response.url, "http://localhost:4545/package.json");
+});
+
+testPerm({ net: true }, async function fetchHeaders(): Promise<void> {
+ const response = await fetch("http://localhost:4545/package.json");
+ const headers = response.headers;
+ assertEquals(headers.get("Content-Type"), "application/json");
+ assert(headers.get("Server").startsWith("SimpleHTTP"));
+});
+
+testPerm({ net: true }, async function fetchBlob(): Promise<void> {
+ const response = await fetch("http://localhost:4545/package.json");
+ const headers = response.headers;
+ const blob = await response.blob();
+ assertEquals(blob.type, headers.get("Content-Type"));
+ assertEquals(blob.size, Number(headers.get("Content-Length")));
+});
+
+testPerm({ net: true }, async function fetchBodyUsed(): Promise<void> {
+ const response = await fetch("http://localhost:4545/package.json");
+ assertEquals(response.bodyUsed, false);
+ assertThrows(
+ (): void => {
+ // Assigning to read-only property throws in the strict mode.
+ response.bodyUsed = true;
+ }
+ );
+ await response.blob();
+ assertEquals(response.bodyUsed, true);
+});
+
+testPerm({ net: true }, async function fetchAsyncIterator(): Promise<void> {
+ const response = await fetch("http://localhost:4545/package.json");
+ const headers = response.headers;
+ let total = 0;
+ for await (const chunk of response.body) {
+ total += chunk.length;
+ }
+
+ assertEquals(total, Number(headers.get("Content-Length")));
+});
+
+testPerm({ net: true }, async function responseClone(): Promise<void> {
+ const response = await fetch("http://localhost:4545/package.json");
+ const response1 = response.clone();
+ assert(response !== response1);
+ assertEquals(response.status, response1.status);
+ assertEquals(response.statusText, response1.statusText);
+ const ab = await response.arrayBuffer();
+ const ab1 = await response1.arrayBuffer();
+ for (let i = 0; i < ab.byteLength; i++) {
+ assertEquals(ab[i], ab1[i]);
+ }
+});
+
+testPerm({ net: true }, async function fetchEmptyInvalid(): Promise<void> {
+ let err;
+ try {
+ await fetch("");
+ } catch (err_) {
+ err = err_;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.RelativeUrlWithoutBase);
+ assertEquals(err.name, "RelativeUrlWithoutBase");
+});
+
+testPerm({ net: true }, async function fetchMultipartFormDataSuccess(): Promise<
+ void
+> {
+ const response = await fetch(
+ "http://localhost:4545/tests/subdir/multipart_form_data.txt"
+ );
+ const formData = await response.formData();
+ assert(formData.has("field_1"));
+ assertEquals(formData.get("field_1").toString(), "value_1 \r\n");
+ assert(formData.has("field_2"));
+ /* TODO(ry) Re-enable this test once we bring back the global File type.
+ const file = formData.get("field_2") as File;
+ assertEquals(file.name, "file.js");
+ */
+ // Currently we cannot read from file...
+});
+
+testPerm(
+ { net: true },
+ async function fetchURLEncodedFormDataSuccess(): Promise<void> {
+ const response = await fetch(
+ "http://localhost:4545/tests/subdir/form_urlencoded.txt"
+ );
+ const formData = await response.formData();
+ assert(formData.has("field_1"));
+ assertEquals(formData.get("field_1").toString(), "Hi");
+ assert(formData.has("field_2"));
+ assertEquals(formData.get("field_2").toString(), "<Deno>");
+ }
+);
+
+testPerm({ net: true }, async function fetchWithRedirection(): Promise<void> {
+ const response = await fetch("http://localhost:4546/"); // will redirect to http://localhost:4545/
+ assertEquals(response.status, 200);
+ assertEquals(response.statusText, "OK");
+ assertEquals(response.url, "http://localhost:4545/");
+ const body = await response.text();
+ assert(body.includes("<title>Directory listing for /</title>"));
+});
+
+testPerm({ net: true }, async function fetchWithRelativeRedirection(): Promise<
+ void
+> {
+ const response = await fetch("http://localhost:4545/tests"); // will redirect to /tests/
+ assertEquals(response.status, 200);
+ assertEquals(response.statusText, "OK");
+ const body = await response.text();
+ assert(body.includes("<title>Directory listing for /tests/</title>"));
+});
+
+// The feature below is not implemented, but the test should work after implementation
+/*
+testPerm({ net: true }, async function fetchWithInfRedirection(): Promise<
+ void
+> {
+ const response = await fetch("http://localhost:4549/tests"); // will redirect to the same place
+ assertEquals(response.status, 0); // network error
+});
+*/
+
+testPerm({ net: true }, async function fetchInitStringBody(): Promise<void> {
+ const data = "Hello World";
+ const response = await fetch("http://localhost:4545/echo_server", {
+ method: "POST",
+ body: data
+ });
+ const text = await response.text();
+ assertEquals(text, data);
+ assert(response.headers.get("content-type").startsWith("text/plain"));
+});
+
+testPerm({ net: true }, async function fetchRequestInitStringBody(): Promise<
+ void
+> {
+ const data = "Hello World";
+ const req = new Request("http://localhost:4545/echo_server", {
+ method: "POST",
+ body: data
+ });
+ const response = await fetch(req);
+ const text = await response.text();
+ assertEquals(text, data);
+});
+
+testPerm({ net: true }, async function fetchInitTypedArrayBody(): Promise<
+ void
+> {
+ const data = "Hello World";
+ const response = await fetch("http://localhost:4545/echo_server", {
+ method: "POST",
+ body: new TextEncoder().encode(data)
+ });
+ const text = await response.text();
+ assertEquals(text, data);
+});
+
+testPerm({ net: true }, async function fetchInitURLSearchParamsBody(): Promise<
+ void
+> {
+ const data = "param1=value1&param2=value2";
+ const params = new URLSearchParams(data);
+ const response = await fetch("http://localhost:4545/echo_server", {
+ method: "POST",
+ body: params
+ });
+ const text = await response.text();
+ assertEquals(text, data);
+ assert(
+ response.headers
+ .get("content-type")
+ .startsWith("application/x-www-form-urlencoded")
+ );
+});
+
+testPerm({ net: true }, async function fetchInitBlobBody(): Promise<void> {
+ const data = "const a = 1";
+ const blob = new Blob([data], {
+ type: "text/javascript"
+ });
+ const response = await fetch("http://localhost:4545/echo_server", {
+ method: "POST",
+ body: blob
+ });
+ const text = await response.text();
+ assertEquals(text, data);
+ assert(response.headers.get("content-type").startsWith("text/javascript"));
+});
+
+testPerm({ net: true }, async function fetchUserAgent(): Promise<void> {
+ const data = "Hello World";
+ const response = await fetch("http://localhost:4545/echo_server", {
+ method: "POST",
+ body: new TextEncoder().encode(data)
+ });
+ assertEquals(response.headers.get("user-agent"), `Deno/${Deno.version.deno}`);
+ await response.text();
+});
+
+// TODO(ry) The following tests work but are flaky. There's a race condition
+// somewhere. Here is what one of these flaky failures looks like:
+//
+// test fetchPostBodyString_permW0N1E0R0
+// assertEquals failed. actual = expected = POST /blah HTTP/1.1
+// hello: World
+// foo: Bar
+// host: 127.0.0.1:4502
+// content-length: 11
+// hello world
+// Error: actual: expected: POST /blah HTTP/1.1
+// hello: World
+// foo: Bar
+// host: 127.0.0.1:4502
+// content-length: 11
+// hello world
+// at Object.assertEquals (file:///C:/deno/js/testing/util.ts:29:11)
+// at fetchPostBodyString (file
+
+/*
+function bufferServer(addr: string): Deno.Buffer {
+ const listener = Deno.listen(addr);
+ const buf = new Deno.Buffer();
+ listener.accept().then(async conn => {
+ const p1 = buf.readFrom(conn);
+ const p2 = conn.write(
+ new TextEncoder().encode(
+ "HTTP/1.0 404 Not Found\r\nContent-Length: 2\r\n\r\nNF"
+ )
+ );
+ // Wait for both an EOF on the read side of the socket and for the write to
+ // complete before closing it. Due to keep-alive, the EOF won't be sent
+ // until the Connection close (HTTP/1.0) response, so readFrom() can't
+ // proceed write. Conversely, if readFrom() is async, waiting for the
+ // write() to complete is not a guarantee that we've read the incoming
+ // request.
+ await Promise.all([p1, p2]);
+ conn.close();
+ listener.close();
+ });
+ return buf;
+}
+
+testPerm({ net: true }, async function fetchRequest():Promise<void> {
+ const addr = "127.0.0.1:4501";
+ const buf = bufferServer(addr);
+ const response = await fetch(`http://${addr}/blah`, {
+ method: "POST",
+ headers: [["Hello", "World"], ["Foo", "Bar"]]
+ });
+ assertEquals(response.status, 404);
+ assertEquals(response.headers.get("Content-Length"), "2");
+
+ const actual = new TextDecoder().decode(buf.bytes());
+ const expected = [
+ "POST /blah HTTP/1.1\r\n",
+ "hello: World\r\n",
+ "foo: Bar\r\n",
+ `host: ${addr}\r\n\r\n`
+ ].join("");
+ assertEquals(actual, expected);
+});
+
+testPerm({ net: true }, async function fetchPostBodyString():Promise<void> {
+ const addr = "127.0.0.1:4502";
+ const buf = bufferServer(addr);
+ const body = "hello world";
+ const response = await fetch(`http://${addr}/blah`, {
+ method: "POST",
+ headers: [["Hello", "World"], ["Foo", "Bar"]],
+ body
+ });
+ assertEquals(response.status, 404);
+ assertEquals(response.headers.get("Content-Length"), "2");
+
+ const actual = new TextDecoder().decode(buf.bytes());
+ const expected = [
+ "POST /blah HTTP/1.1\r\n",
+ "hello: World\r\n",
+ "foo: Bar\r\n",
+ `host: ${addr}\r\n`,
+ `content-length: ${body.length}\r\n\r\n`,
+ body
+ ].join("");
+ assertEquals(actual, expected);
+});
+
+testPerm({ net: true }, async function fetchPostBodyTypedArray():Promise<void> {
+ const addr = "127.0.0.1:4503";
+ const buf = bufferServer(addr);
+ const bodyStr = "hello world";
+ const body = new TextEncoder().encode(bodyStr);
+ const response = await fetch(`http://${addr}/blah`, {
+ method: "POST",
+ headers: [["Hello", "World"], ["Foo", "Bar"]],
+ body
+ });
+ assertEquals(response.status, 404);
+ assertEquals(response.headers.get("Content-Length"), "2");
+
+ const actual = new TextDecoder().decode(buf.bytes());
+ const expected = [
+ "POST /blah HTTP/1.1\r\n",
+ "hello: World\r\n",
+ "foo: Bar\r\n",
+ `host: ${addr}\r\n`,
+ `content-length: ${body.byteLength}\r\n\r\n`,
+ bodyStr
+ ].join("");
+ assertEquals(actual, expected);
+});
+*/
diff --git a/cli/js/file_info.ts b/cli/js/file_info.ts
new file mode 100644
index 000000000..a98989e79
--- /dev/null
+++ b/cli/js/file_info.ts
@@ -0,0 +1,91 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { StatResponse } from "./stat.ts";
+
+/** A FileInfo describes a file and is returned by `stat`, `lstat`,
+ * `statSync`, `lstatSync`.
+ */
+export interface FileInfo {
+ /** The size of the file, in bytes. */
+ len: number;
+ /** The last modification time of the file. This corresponds to the `mtime`
+ * field from `stat` on Unix and `ftLastWriteTime` on Windows. This may not
+ * be available on all platforms.
+ */
+ modified: number | null;
+ /** The last access time of the file. This corresponds to the `atime`
+ * field from `stat` on Unix and `ftLastAccessTime` on Windows. This may not
+ * be available on all platforms.
+ */
+ accessed: number | null;
+ /** The last access time of the file. This corresponds to the `birthtime`
+ * field from `stat` on Unix and `ftCreationTime` on Windows. This may not
+ * be available on all platforms.
+ */
+ created: number | null;
+ /** The underlying raw st_mode bits that contain the standard Unix permissions
+ * for this file/directory. TODO Match behavior with Go on windows for mode.
+ */
+ mode: number | null;
+
+ /** The file or directory name. */
+ name: string | null;
+
+ /** Returns whether this is info for a regular file. This result is mutually
+ * exclusive to `FileInfo.isDirectory` and `FileInfo.isSymlink`.
+ */
+ isFile(): boolean;
+
+ /** Returns whether this is info for a regular directory. This result is
+ * mutually exclusive to `FileInfo.isFile` and `FileInfo.isSymlink`.
+ */
+ isDirectory(): boolean;
+
+ /** Returns whether this is info for a symlink. This result is
+ * mutually exclusive to `FileInfo.isFile` and `FileInfo.isDirectory`.
+ */
+ isSymlink(): boolean;
+}
+
+// @internal
+export class FileInfoImpl implements FileInfo {
+ private readonly _isFile: boolean;
+ private readonly _isSymlink: boolean;
+ len: number;
+ modified: number | null;
+ accessed: number | null;
+ created: number | null;
+ mode: number | null;
+ name: string | null;
+
+ /* @internal */
+ constructor(private _res: StatResponse) {
+ const modified = this._res.modified;
+ const accessed = this._res.accessed;
+ const created = this._res.created;
+ const hasMode = this._res.hasMode;
+ const mode = this._res.mode; // negative for invalid mode (Windows)
+ const name = this._res.name;
+
+ this._isFile = this._res.isFile;
+ this._isSymlink = this._res.isSymlink;
+ this.len = this._res.len;
+ this.modified = modified ? modified : null;
+ this.accessed = accessed ? accessed : null;
+ this.created = created ? created : null;
+ // null on Windows
+ this.mode = hasMode ? mode : null;
+ this.name = name ? name : null;
+ }
+
+ isFile(): boolean {
+ return this._isFile;
+ }
+
+ isDirectory(): boolean {
+ return !this._isFile && !this._isSymlink;
+ }
+
+ isSymlink(): boolean {
+ return this._isSymlink;
+ }
+}
diff --git a/cli/js/file_test.ts b/cli/js/file_test.ts
new file mode 100644
index 000000000..345dcd8fe
--- /dev/null
+++ b/cli/js/file_test.ts
@@ -0,0 +1,103 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { test, assert, assertEquals } from "./test_util.ts";
+
+function testFirstArgument(arg1, expectedSize): void {
+ const file = new File(arg1, "name");
+ assert(file instanceof File);
+ assertEquals(file.name, "name");
+ assertEquals(file.size, expectedSize);
+ assertEquals(file.type, "");
+}
+
+test(function fileEmptyFileBits(): void {
+ testFirstArgument([], 0);
+});
+
+test(function fileStringFileBits(): void {
+ testFirstArgument(["bits"], 4);
+});
+
+test(function fileUnicodeStringFileBits(): void {
+ testFirstArgument(["𝓽𝓮𝔁𝓽"], 16);
+});
+
+test(function fileStringObjectFileBits(): void {
+ testFirstArgument([new String("string object")], 13);
+});
+
+test(function fileEmptyBlobFileBits(): void {
+ testFirstArgument([new Blob()], 0);
+});
+
+test(function fileBlobFileBits(): void {
+ testFirstArgument([new Blob(["bits"])], 4);
+});
+
+test(function fileEmptyFileFileBits(): void {
+ testFirstArgument([new File([], "world.txt")], 0);
+});
+
+test(function fileFileFileBits(): void {
+ testFirstArgument([new File(["bits"], "world.txt")], 4);
+});
+
+test(function fileArrayBufferFileBits(): void {
+ testFirstArgument([new ArrayBuffer(8)], 8);
+});
+
+test(function fileTypedArrayFileBits(): void {
+ testFirstArgument([new Uint8Array([0x50, 0x41, 0x53, 0x53])], 4);
+});
+
+test(function fileVariousFileBits(): void {
+ testFirstArgument(
+ [
+ "bits",
+ new Blob(["bits"]),
+ new Blob(),
+ new Uint8Array([0x50, 0x41]),
+ new Uint16Array([0x5353]),
+ new Uint32Array([0x53534150])
+ ],
+ 16
+ );
+});
+
+test(function fileNumberInFileBits(): void {
+ testFirstArgument([12], 2);
+});
+
+test(function fileArrayInFileBits(): void {
+ testFirstArgument([[1, 2, 3]], 5);
+});
+
+test(function fileObjectInFileBits(): void {
+ // "[object Object]"
+ testFirstArgument([{}], 15);
+});
+
+function testSecondArgument(arg2, expectedFileName): void {
+ const file = new File(["bits"], arg2);
+ assert(file instanceof File);
+ assertEquals(file.name, expectedFileName);
+}
+
+test(function fileUsingFileName(): void {
+ testSecondArgument("dummy", "dummy");
+});
+
+test(function fileUsingSpecialCharacterInFileName(): void {
+ testSecondArgument("dummy/foo", "dummy:foo");
+});
+
+test(function fileUsingNullFileName(): void {
+ testSecondArgument(null, "null");
+});
+
+test(function fileUsingNumberFileName(): void {
+ testSecondArgument(1, "1");
+});
+
+test(function fileUsingEmptyStringFileName(): void {
+ testSecondArgument("", "");
+});
diff --git a/cli/js/files.ts b/cli/js/files.ts
new file mode 100644
index 000000000..b83a147e1
--- /dev/null
+++ b/cli/js/files.ts
@@ -0,0 +1,235 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import {
+ EOF,
+ Reader,
+ Writer,
+ Seeker,
+ Closer,
+ SeekMode,
+ SyncReader,
+ SyncWriter,
+ SyncSeeker
+} from "./io.ts";
+import { sendAsyncMinimal, sendSyncMinimal } from "./dispatch_minimal.ts";
+import * as dispatch from "./dispatch.ts";
+import {
+ sendSync as sendSyncJson,
+ sendAsync as sendAsyncJson
+} from "./dispatch_json.ts";
+
+/** Open a file and return an instance of the `File` object
+ * synchronously.
+ *
+ * const file = Deno.openSync("/foo/bar.txt");
+ */
+export function openSync(filename: string, mode: OpenMode = "r"): File {
+ const rid = sendSyncJson(dispatch.OP_OPEN, { filename, mode });
+ return new File(rid);
+}
+
+/** Open a file and return an instance of the `File` object.
+ *
+ * (async () => {
+ * const file = await Deno.open("/foo/bar.txt");
+ * })();
+ */
+export async function open(
+ filename: string,
+ mode: OpenMode = "r"
+): Promise<File> {
+ const rid = await sendAsyncJson(dispatch.OP_OPEN, { filename, mode });
+ return new File(rid);
+}
+
+/** Read synchronously from a file ID into an array buffer.
+ *
+ * Return `number | EOF` for the operation.
+ *
+ * const file = Deno.openSync("/foo/bar.txt");
+ * const buf = new Uint8Array(100);
+ * const nread = Deno.readSync(file.rid, buf);
+ * const text = new TextDecoder().decode(buf);
+ *
+ */
+export function readSync(rid: number, p: Uint8Array): number | EOF {
+ const nread = sendSyncMinimal(dispatch.OP_READ, rid, p);
+ if (nread < 0) {
+ throw new Error("read error");
+ } else if (nread == 0) {
+ return EOF;
+ } else {
+ return nread;
+ }
+}
+
+/** Read from a file ID into an array buffer.
+ *
+ * Resolves with the `number | EOF` for the operation.
+ *
+ * (async () => {
+ * const file = await Deno.open("/foo/bar.txt");
+ * const buf = new Uint8Array(100);
+ * const nread = await Deno.read(file.rid, buf);
+ * const text = new TextDecoder().decode(buf);
+ * })();
+ */
+export async function read(rid: number, p: Uint8Array): Promise<number | EOF> {
+ const nread = await sendAsyncMinimal(dispatch.OP_READ, rid, p);
+ if (nread < 0) {
+ throw new Error("read error");
+ } else if (nread == 0) {
+ return EOF;
+ } else {
+ return nread;
+ }
+}
+
+/** Write synchronously to the file ID the contents of the array buffer.
+ *
+ * Resolves with the number of bytes written.
+ *
+ * const encoder = new TextEncoder();
+ * const data = encoder.encode("Hello world\n");
+ * const file = Deno.openSync("/foo/bar.txt");
+ * Deno.writeSync(file.rid, data);
+ */
+export function writeSync(rid: number, p: Uint8Array): number {
+ const result = sendSyncMinimal(dispatch.OP_WRITE, rid, p);
+ if (result < 0) {
+ throw new Error("write error");
+ } else {
+ return result;
+ }
+}
+
+/** Write to the file ID the contents of the array buffer.
+ *
+ * Resolves with the number of bytes written.
+ *
+ * (async () => {
+ * const encoder = new TextEncoder();
+ * const data = encoder.encode("Hello world\n");
+ * const file = await Deno.open("/foo/bar.txt");
+ * await Deno.write(file.rid, data);
+ * })();
+ *
+ */
+export async function write(rid: number, p: Uint8Array): Promise<number> {
+ const result = await sendAsyncMinimal(dispatch.OP_WRITE, rid, p);
+ if (result < 0) {
+ throw new Error("write error");
+ } else {
+ return result;
+ }
+}
+
+/** Seek a file ID synchronously to the given offset under mode given by `whence`.
+ *
+ * const file = Deno.openSync("/foo/bar.txt");
+ * Deno.seekSync(file.rid, 0, 0);
+ */
+export function seekSync(rid: number, offset: number, whence: SeekMode): void {
+ sendSyncJson(dispatch.OP_SEEK, { rid, offset, whence });
+}
+
+/** Seek a file ID to the given offset under mode given by `whence`.
+ *
+ * (async () => {
+ * const file = await Deno.open("/foo/bar.txt");
+ * await Deno.seek(file.rid, 0, 0);
+ * })();
+ */
+export async function seek(
+ rid: number,
+ offset: number,
+ whence: SeekMode
+): Promise<void> {
+ await sendAsyncJson(dispatch.OP_SEEK, { rid, offset, whence });
+}
+
+/** Close the file ID. */
+export function close(rid: number): void {
+ sendSyncJson(dispatch.OP_CLOSE, { rid });
+}
+
+/** The Deno abstraction for reading and writing files. */
+export class File
+ implements
+ Reader,
+ SyncReader,
+ Writer,
+ SyncWriter,
+ Seeker,
+ SyncSeeker,
+ Closer {
+ constructor(readonly rid: number) {}
+
+ write(p: Uint8Array): Promise<number> {
+ return write(this.rid, p);
+ }
+
+ writeSync(p: Uint8Array): number {
+ return writeSync(this.rid, p);
+ }
+
+ read(p: Uint8Array): Promise<number | EOF> {
+ return read(this.rid, p);
+ }
+
+ readSync(p: Uint8Array): number | EOF {
+ return readSync(this.rid, p);
+ }
+
+ seek(offset: number, whence: SeekMode): Promise<void> {
+ return seek(this.rid, offset, whence);
+ }
+
+ seekSync(offset: number, whence: SeekMode): void {
+ return seekSync(this.rid, offset, whence);
+ }
+
+ close(): void {
+ close(this.rid);
+ }
+}
+
+/** An instance of `File` for stdin. */
+export const stdin = new File(0);
+/** An instance of `File` for stdout. */
+export const stdout = new File(1);
+/** An instance of `File` for stderr. */
+export const stderr = new File(2);
+
+export type OpenMode =
+ /** Read-only. Default. Starts at beginning of file. */
+ | "r"
+ /** Read-write. Start at beginning of file. */
+ | "r+"
+ /** Write-only. Opens and truncates existing file or creates new one for
+ * writing only.
+ */
+ | "w"
+ /** Read-write. Opens and truncates existing file or creates new one for
+ * writing and reading.
+ */
+ | "w+"
+ /** Write-only. Opens existing file or creates new one. Each write appends
+ * content to the end of file.
+ */
+ | "a"
+ /** Read-write. Behaves like "a" and allows to read from file. */
+ | "a+"
+ /** Write-only. Exclusive create - creates new file only if one doesn't exist
+ * already.
+ */
+ | "x"
+ /** Read-write. Behaves like `x` and allows to read from file. */
+ | "x+";
+
+/** A factory function for creating instances of `File` associated with the
+ * supplied file name.
+ * @internal
+ */
+export function create(filename: string): Promise<File> {
+ return open(filename, "w+");
+}
diff --git a/cli/js/files_test.ts b/cli/js/files_test.ts
new file mode 100644
index 000000000..004cb662b
--- /dev/null
+++ b/cli/js/files_test.ts
@@ -0,0 +1,329 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { test, testPerm, assert, assertEquals } from "./test_util.ts";
+
+test(function filesStdioFileDescriptors(): void {
+ assertEquals(Deno.stdin.rid, 0);
+ assertEquals(Deno.stdout.rid, 1);
+ assertEquals(Deno.stderr.rid, 2);
+});
+
+testPerm({ read: true }, async function filesCopyToStdout(): Promise<void> {
+ const filename = "package.json";
+ const file = await Deno.open(filename);
+ assert(file.rid > 2);
+ const bytesWritten = await Deno.copy(Deno.stdout, file);
+ const fileSize = Deno.statSync(filename).len;
+ assertEquals(bytesWritten, fileSize);
+ console.log("bytes written", bytesWritten);
+});
+
+testPerm({ read: true }, async function filesToAsyncIterator(): Promise<void> {
+ const filename = "tests/hello.txt";
+ const file = await Deno.open(filename);
+
+ let totalSize = 0;
+ for await (const buf of Deno.toAsyncIterator(file)) {
+ totalSize += buf.byteLength;
+ }
+
+ assertEquals(totalSize, 12);
+});
+
+test(async function readerToAsyncIterator(): Promise<void> {
+ // ref: https://github.com/denoland/deno/issues/2330
+ const encoder = new TextEncoder();
+
+ class TestReader implements Deno.Reader {
+ private offset = 0;
+ private buf = new Uint8Array(encoder.encode(this.s));
+
+ constructor(private readonly s: string) {}
+
+ async read(p: Uint8Array): Promise<number | Deno.EOF> {
+ const n = Math.min(p.byteLength, this.buf.byteLength - this.offset);
+ p.set(this.buf.slice(this.offset, this.offset + n));
+ this.offset += n;
+
+ if (n === 0) {
+ return Deno.EOF;
+ }
+
+ return n;
+ }
+ }
+
+ const reader = new TestReader("hello world!");
+
+ let totalSize = 0;
+ for await (const buf of Deno.toAsyncIterator(reader)) {
+ totalSize += buf.byteLength;
+ }
+
+ assertEquals(totalSize, 12);
+});
+
+testPerm({ write: false }, async function writePermFailure(): Promise<void> {
+ const filename = "tests/hello.txt";
+ const writeModes: Deno.OpenMode[] = ["w", "a", "x"];
+ for (const mode of writeModes) {
+ let err;
+ try {
+ await Deno.open(filename, mode);
+ } catch (e) {
+ err = e;
+ }
+ assert(!!err);
+ assertEquals(err.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(err.name, "PermissionDenied");
+ }
+});
+
+testPerm({ read: false }, async function readPermFailure(): Promise<void> {
+ let caughtError = false;
+ try {
+ await Deno.open("package.json", "r");
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(e.name, "PermissionDenied");
+ }
+ assert(caughtError);
+});
+
+testPerm({ write: true }, async function writeNullBufferFailure(): Promise<
+ void
+> {
+ const tempDir = Deno.makeTempDirSync();
+ const filename = tempDir + "hello.txt";
+ const file = await Deno.open(filename, "w");
+
+ // writing null should throw an error
+ let err;
+ try {
+ await file.write(null);
+ } catch (e) {
+ err = e;
+ }
+ // TODO: Check error kind when dispatch_minimal pipes errors properly
+ assert(!!err);
+
+ file.close();
+ await Deno.remove(tempDir, { recursive: true });
+});
+
+testPerm(
+ { write: true, read: true },
+ async function readNullBufferFailure(): Promise<void> {
+ const tempDir = Deno.makeTempDirSync();
+ const filename = tempDir + "hello.txt";
+ const file = await Deno.open(filename, "w+");
+
+ // reading file into null buffer should throw an error
+ let err;
+ try {
+ await file.read(null);
+ } catch (e) {
+ err = e;
+ }
+ // TODO: Check error kind when dispatch_minimal pipes errors properly
+ assert(!!err);
+
+ file.close();
+ await Deno.remove(tempDir, { recursive: true });
+ }
+);
+
+testPerm(
+ { write: false, read: false },
+ async function readWritePermFailure(): Promise<void> {
+ const filename = "tests/hello.txt";
+ const writeModes: Deno.OpenMode[] = ["r+", "w+", "a+", "x+"];
+ for (const mode of writeModes) {
+ let err;
+ try {
+ await Deno.open(filename, mode);
+ } catch (e) {
+ err = e;
+ }
+ assert(!!err);
+ assertEquals(err.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(err.name, "PermissionDenied");
+ }
+ }
+);
+
+testPerm({ read: true, write: true }, async function createFile(): Promise<
+ void
+> {
+ const tempDir = await Deno.makeTempDir();
+ const filename = tempDir + "/test.txt";
+ const f = await Deno.open(filename, "w");
+ let fileInfo = Deno.statSync(filename);
+ assert(fileInfo.isFile());
+ assert(fileInfo.len === 0);
+ const enc = new TextEncoder();
+ const data = enc.encode("Hello");
+ await f.write(data);
+ fileInfo = Deno.statSync(filename);
+ assert(fileInfo.len === 5);
+ f.close();
+
+ // TODO: test different modes
+ await Deno.remove(tempDir, { recursive: true });
+});
+
+testPerm({ read: true, write: true }, async function openModeWrite(): Promise<
+ void
+> {
+ const tempDir = Deno.makeTempDirSync();
+ const encoder = new TextEncoder();
+ const filename = tempDir + "hello.txt";
+ const data = encoder.encode("Hello world!\n");
+
+ let file = await Deno.open(filename, "w");
+ // assert file was created
+ let fileInfo = Deno.statSync(filename);
+ assert(fileInfo.isFile());
+ assertEquals(fileInfo.len, 0);
+ // write some data
+ await file.write(data);
+ fileInfo = Deno.statSync(filename);
+ assertEquals(fileInfo.len, 13);
+ // assert we can't read from file
+ let thrown = false;
+ try {
+ const buf = new Uint8Array(20);
+ await file.read(buf);
+ } catch (e) {
+ thrown = true;
+ } finally {
+ assert(thrown, "'w' mode shouldn't allow to read file");
+ }
+ file.close();
+ // assert that existing file is truncated on open
+ file = await Deno.open(filename, "w");
+ file.close();
+ const fileSize = Deno.statSync(filename).len;
+ assertEquals(fileSize, 0);
+ await Deno.remove(tempDir, { recursive: true });
+});
+
+testPerm(
+ { read: true, write: true },
+ async function openModeWriteRead(): Promise<void> {
+ const tempDir = Deno.makeTempDirSync();
+ const encoder = new TextEncoder();
+ const filename = tempDir + "hello.txt";
+ const data = encoder.encode("Hello world!\n");
+
+ const file = await Deno.open(filename, "w+");
+ // assert file was created
+ let fileInfo = Deno.statSync(filename);
+ assert(fileInfo.isFile());
+ assertEquals(fileInfo.len, 0);
+ // write some data
+ await file.write(data);
+ fileInfo = Deno.statSync(filename);
+ assertEquals(fileInfo.len, 13);
+
+ const buf = new Uint8Array(20);
+ await file.seek(0, Deno.SeekMode.SEEK_START);
+ const result = await file.read(buf);
+ assertEquals(result, 13);
+ file.close();
+
+ await Deno.remove(tempDir, { recursive: true });
+ }
+);
+
+testPerm({ read: true }, async function seekStart(): Promise<void> {
+ const filename = "tests/hello.txt";
+ const file = await Deno.open(filename);
+ // Deliberately move 1 step forward
+ await file.read(new Uint8Array(1)); // "H"
+ // Skipping "Hello "
+ await file.seek(6, Deno.SeekMode.SEEK_START);
+ const buf = new Uint8Array(6);
+ await file.read(buf);
+ const decoded = new TextDecoder().decode(buf);
+ assertEquals(decoded, "world!");
+});
+
+testPerm({ read: true }, function seekSyncStart(): void {
+ const filename = "tests/hello.txt";
+ const file = Deno.openSync(filename);
+ // Deliberately move 1 step forward
+ file.readSync(new Uint8Array(1)); // "H"
+ // Skipping "Hello "
+ file.seekSync(6, Deno.SeekMode.SEEK_START);
+ const buf = new Uint8Array(6);
+ file.readSync(buf);
+ const decoded = new TextDecoder().decode(buf);
+ assertEquals(decoded, "world!");
+});
+
+testPerm({ read: true }, async function seekCurrent(): Promise<void> {
+ const filename = "tests/hello.txt";
+ const file = await Deno.open(filename);
+ // Deliberately move 1 step forward
+ await file.read(new Uint8Array(1)); // "H"
+ // Skipping "ello "
+ await file.seek(5, Deno.SeekMode.SEEK_CURRENT);
+ const buf = new Uint8Array(6);
+ await file.read(buf);
+ const decoded = new TextDecoder().decode(buf);
+ assertEquals(decoded, "world!");
+});
+
+testPerm({ read: true }, function seekSyncCurrent(): void {
+ const filename = "tests/hello.txt";
+ const file = Deno.openSync(filename);
+ // Deliberately move 1 step forward
+ file.readSync(new Uint8Array(1)); // "H"
+ // Skipping "ello "
+ file.seekSync(5, Deno.SeekMode.SEEK_CURRENT);
+ const buf = new Uint8Array(6);
+ file.readSync(buf);
+ const decoded = new TextDecoder().decode(buf);
+ assertEquals(decoded, "world!");
+});
+
+testPerm({ read: true }, async function seekEnd(): Promise<void> {
+ const filename = "tests/hello.txt";
+ const file = await Deno.open(filename);
+ await file.seek(-6, Deno.SeekMode.SEEK_END);
+ const buf = new Uint8Array(6);
+ await file.read(buf);
+ const decoded = new TextDecoder().decode(buf);
+ assertEquals(decoded, "world!");
+});
+
+testPerm({ read: true }, function seekSyncEnd(): void {
+ const filename = "tests/hello.txt";
+ const file = Deno.openSync(filename);
+ file.seekSync(-6, Deno.SeekMode.SEEK_END);
+ const buf = new Uint8Array(6);
+ file.readSync(buf);
+ const decoded = new TextDecoder().decode(buf);
+ assertEquals(decoded, "world!");
+});
+
+testPerm({ read: true }, async function seekMode(): Promise<void> {
+ const filename = "tests/hello.txt";
+ const file = await Deno.open(filename);
+ let err;
+ try {
+ await file.seek(1, -1);
+ } catch (e) {
+ err = e;
+ }
+ assert(!!err);
+ assertEquals(err.kind, Deno.ErrorKind.InvalidSeekMode);
+ assertEquals(err.name, "InvalidSeekMode");
+
+ // We should still be able to read the file
+ // since it is still open.
+ const buf = new Uint8Array(1);
+ await file.read(buf); // "H"
+ assertEquals(new TextDecoder().decode(buf), "H");
+});
diff --git a/cli/js/form_data.ts b/cli/js/form_data.ts
new file mode 100644
index 000000000..89efb3c00
--- /dev/null
+++ b/cli/js/form_data.ts
@@ -0,0 +1,149 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import * as domTypes from "./dom_types.ts";
+import * as blob from "./blob.ts";
+import * as domFile from "./dom_file.ts";
+import { DomIterableMixin } from "./mixins/dom_iterable.ts";
+import { requiredArguments } from "./util.ts";
+
+const dataSymbol = Symbol("data");
+
+class FormDataBase {
+ private [dataSymbol]: Array<[string, domTypes.FormDataEntryValue]> = [];
+
+ /** Appends a new value onto an existing key inside a `FormData`
+ * object, or adds the key if it does not already exist.
+ *
+ * formData.append('name', 'first');
+ * formData.append('name', 'second');
+ */
+ append(name: string, value: string): void;
+ append(name: string, value: blob.DenoBlob, filename?: string): void;
+ append(name: string, value: string | blob.DenoBlob, filename?: string): void {
+ requiredArguments("FormData.append", arguments.length, 2);
+ name = String(name);
+ if (value instanceof blob.DenoBlob) {
+ const dfile = new domFile.DomFileImpl([value], filename || name);
+ this[dataSymbol].push([name, dfile]);
+ } else {
+ this[dataSymbol].push([name, String(value)]);
+ }
+ }
+
+ /** Deletes a key/value pair from a `FormData` object.
+ *
+ * formData.delete('name');
+ */
+ delete(name: string): void {
+ requiredArguments("FormData.delete", arguments.length, 1);
+ name = String(name);
+ let i = 0;
+ while (i < this[dataSymbol].length) {
+ if (this[dataSymbol][i][0] === name) {
+ this[dataSymbol].splice(i, 1);
+ } else {
+ i++;
+ }
+ }
+ }
+
+ /** Returns an array of all the values associated with a given key
+ * from within a `FormData`.
+ *
+ * formData.getAll('name');
+ */
+ getAll(name: string): domTypes.FormDataEntryValue[] {
+ requiredArguments("FormData.getAll", arguments.length, 1);
+ name = String(name);
+ const values = [];
+ for (const entry of this[dataSymbol]) {
+ if (entry[0] === name) {
+ values.push(entry[1]);
+ }
+ }
+
+ return values;
+ }
+
+ /** Returns the first value associated with a given key from within a
+ * `FormData` object.
+ *
+ * formData.get('name');
+ */
+ get(name: string): domTypes.FormDataEntryValue | null {
+ requiredArguments("FormData.get", arguments.length, 1);
+ name = String(name);
+ for (const entry of this[dataSymbol]) {
+ if (entry[0] === name) {
+ return entry[1];
+ }
+ }
+
+ return null;
+ }
+
+ /** Returns a boolean stating whether a `FormData` object contains a
+ * certain key/value pair.
+ *
+ * formData.has('name');
+ */
+ has(name: string): boolean {
+ requiredArguments("FormData.has", arguments.length, 1);
+ name = String(name);
+ return this[dataSymbol].some((entry): boolean => entry[0] === name);
+ }
+
+ /** Sets a new value for an existing key inside a `FormData` object, or
+ * adds the key/value if it does not already exist.
+ * ref: https://xhr.spec.whatwg.org/#dom-formdata-set
+ *
+ * formData.set('name', 'value');
+ */
+ set(name: string, value: string): void;
+ set(name: string, value: blob.DenoBlob, filename?: string): void;
+ set(name: string, value: string | blob.DenoBlob, filename?: string): void {
+ requiredArguments("FormData.set", arguments.length, 2);
+ name = String(name);
+
+ // If there are any entries in the context object’s entry list whose name
+ // is name, replace the first such entry with entry and remove the others
+ let found = false;
+ let i = 0;
+ while (i < this[dataSymbol].length) {
+ if (this[dataSymbol][i][0] === name) {
+ if (!found) {
+ if (value instanceof blob.DenoBlob) {
+ const dfile = new domFile.DomFileImpl([value], filename || name);
+ this[dataSymbol][i][1] = dfile;
+ } else {
+ this[dataSymbol][i][1] = String(value);
+ }
+ found = true;
+ } else {
+ this[dataSymbol].splice(i, 1);
+ continue;
+ }
+ }
+ i++;
+ }
+
+ // Otherwise, append entry to the context object’s entry list.
+ if (!found) {
+ if (value instanceof blob.DenoBlob) {
+ const dfile = new domFile.DomFileImpl([value], filename || name);
+ this[dataSymbol].push([name, dfile]);
+ } else {
+ this[dataSymbol].push([name, String(value)]);
+ }
+ }
+ }
+
+ get [Symbol.toStringTag](): string {
+ return "FormData";
+ }
+}
+
+export class FormData extends DomIterableMixin<
+ string,
+ domTypes.FormDataEntryValue,
+ typeof FormDataBase
+>(FormDataBase, dataSymbol) {}
diff --git a/cli/js/form_data_test.ts b/cli/js/form_data_test.ts
new file mode 100644
index 000000000..fe8b6cf32
--- /dev/null
+++ b/cli/js/form_data_test.ts
@@ -0,0 +1,179 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { test, assert, assertEquals } from "./test_util.ts";
+
+test(function formDataHasCorrectNameProp(): void {
+ assertEquals(FormData.name, "FormData");
+});
+
+test(function formDataParamsAppendSuccess(): void {
+ const formData = new FormData();
+ formData.append("a", "true");
+ assertEquals(formData.get("a"), "true");
+});
+
+test(function formDataParamsDeleteSuccess(): void {
+ const formData = new FormData();
+ formData.append("a", "true");
+ formData.append("b", "false");
+ assertEquals(formData.get("b"), "false");
+ formData.delete("b");
+ assertEquals(formData.get("a"), "true");
+ assertEquals(formData.get("b"), null);
+});
+
+test(function formDataParamsGetAllSuccess(): void {
+ const formData = new FormData();
+ formData.append("a", "true");
+ formData.append("b", "false");
+ formData.append("a", "null");
+ assertEquals(formData.getAll("a"), ["true", "null"]);
+ assertEquals(formData.getAll("b"), ["false"]);
+ assertEquals(formData.getAll("c"), []);
+});
+
+test(function formDataParamsGetSuccess(): void {
+ const formData = new FormData();
+ formData.append("a", "true");
+ formData.append("b", "false");
+ formData.append("a", "null");
+ formData.append("d", undefined);
+ formData.append("e", null);
+ assertEquals(formData.get("a"), "true");
+ assertEquals(formData.get("b"), "false");
+ assertEquals(formData.get("c"), null);
+ assertEquals(formData.get("d"), "undefined");
+ assertEquals(formData.get("e"), "null");
+});
+
+test(function formDataParamsHasSuccess(): void {
+ const formData = new FormData();
+ formData.append("a", "true");
+ formData.append("b", "false");
+ assert(formData.has("a"));
+ assert(formData.has("b"));
+ assert(!formData.has("c"));
+});
+
+test(function formDataParamsSetSuccess(): void {
+ const formData = new FormData();
+ formData.append("a", "true");
+ formData.append("b", "false");
+ formData.append("a", "null");
+ assertEquals(formData.getAll("a"), ["true", "null"]);
+ assertEquals(formData.getAll("b"), ["false"]);
+ formData.set("a", "false");
+ assertEquals(formData.getAll("a"), ["false"]);
+ formData.set("d", undefined);
+ assertEquals(formData.get("d"), "undefined");
+ formData.set("e", null);
+ assertEquals(formData.get("e"), "null");
+});
+
+test(function formDataSetEmptyBlobSuccess(): void {
+ const formData = new FormData();
+ formData.set("a", new Blob([]), "blank.txt");
+ formData.get("a");
+ /* TODO Fix this test.
+ assert(file instanceof File);
+ if (typeof file !== "string") {
+ assertEquals(file.name, "blank.txt");
+ }
+ */
+});
+
+test(function formDataParamsForEachSuccess(): void {
+ const init = [["a", "54"], ["b", "true"]];
+ const formData = new FormData();
+ for (const [name, value] of init) {
+ formData.append(name, value);
+ }
+ let callNum = 0;
+ formData.forEach(
+ (value, key, parent): void => {
+ assertEquals(formData, parent);
+ assertEquals(value, init[callNum][1]);
+ assertEquals(key, init[callNum][0]);
+ callNum++;
+ }
+ );
+ assertEquals(callNum, init.length);
+});
+
+test(function formDataParamsArgumentsCheck(): void {
+ const methodRequireOneParam = ["delete", "getAll", "get", "has", "forEach"];
+
+ const methodRequireTwoParams = ["append", "set"];
+
+ methodRequireOneParam.forEach(
+ (method): void => {
+ const formData = new FormData();
+ let hasThrown = 0;
+ let errMsg = "";
+ try {
+ formData[method]();
+ hasThrown = 1;
+ } catch (err) {
+ errMsg = err.message;
+ if (err instanceof TypeError) {
+ hasThrown = 2;
+ } else {
+ hasThrown = 3;
+ }
+ }
+ assertEquals(hasThrown, 2);
+ assertEquals(
+ errMsg,
+ `FormData.${method} requires at least 1 argument, but only 0 present`
+ );
+ }
+ );
+
+ methodRequireTwoParams.forEach(
+ (method: string): void => {
+ const formData = new FormData();
+ let hasThrown = 0;
+ let errMsg = "";
+
+ try {
+ formData[method]();
+ hasThrown = 1;
+ } catch (err) {
+ errMsg = err.message;
+ if (err instanceof TypeError) {
+ hasThrown = 2;
+ } else {
+ hasThrown = 3;
+ }
+ }
+ assertEquals(hasThrown, 2);
+ assertEquals(
+ errMsg,
+ `FormData.${method} requires at least 2 arguments, but only 0 present`
+ );
+
+ hasThrown = 0;
+ errMsg = "";
+ try {
+ formData[method]("foo");
+ hasThrown = 1;
+ } catch (err) {
+ errMsg = err.message;
+ if (err instanceof TypeError) {
+ hasThrown = 2;
+ } else {
+ hasThrown = 3;
+ }
+ }
+ assertEquals(hasThrown, 2);
+ assertEquals(
+ errMsg,
+ `FormData.${method} requires at least 2 arguments, but only 1 present`
+ );
+ }
+ );
+});
+
+test(function toStringShouldBeWebCompatibility(): void {
+ const formData = new FormData();
+ assertEquals(formData.toString(), "[object FormData]");
+});
diff --git a/cli/js/format_error.ts b/cli/js/format_error.ts
new file mode 100644
index 000000000..801da0d0b
--- /dev/null
+++ b/cli/js/format_error.ts
@@ -0,0 +1,9 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import * as dispatch from "./dispatch.ts";
+import { sendSync } from "./dispatch_json.ts";
+
+// TODO(bartlomieju): move to `repl.ts`?
+export function formatError(errString: string): string {
+ const res = sendSync(dispatch.OP_FORMAT_ERROR, { error: errString });
+ return res.error;
+}
diff --git a/cli/js/get_random_values.ts b/cli/js/get_random_values.ts
new file mode 100644
index 000000000..e54f34785
--- /dev/null
+++ b/cli/js/get_random_values.ts
@@ -0,0 +1,31 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import * as dispatch from "./dispatch.ts";
+import { sendSync } from "./dispatch_json.ts";
+import { assert } from "./util.ts";
+
+/** Synchronously collects cryptographically secure random values. The
+ * underlying CSPRNG in use is Rust's `rand::rngs::ThreadRng`.
+ *
+ * const arr = new Uint8Array(32);
+ * crypto.getRandomValues(arr);
+ */
+export function getRandomValues<
+ T extends
+ | Int8Array
+ | Uint8Array
+ | Uint8ClampedArray
+ | Int16Array
+ | Uint16Array
+ | Int32Array
+ | Uint32Array
+>(typedArray: T): T {
+ assert(typedArray !== null, "Input must not be null");
+ assert(typedArray.length <= 65536, "Input must not be longer than 65536");
+ const ui8 = new Uint8Array(
+ typedArray.buffer,
+ typedArray.byteOffset,
+ typedArray.byteLength
+ );
+ sendSync(dispatch.OP_GET_RANDOM_VALUES, {}, ui8);
+ return typedArray;
+}
diff --git a/cli/js/get_random_values_test.ts b/cli/js/get_random_values_test.ts
new file mode 100644
index 000000000..68c13d597
--- /dev/null
+++ b/cli/js/get_random_values_test.ts
@@ -0,0 +1,51 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { test, assertNotEquals, assertStrictEq } from "./test_util.ts";
+
+test(function getRandomValuesInt8Array(): void {
+ const arr = new Int8Array(32);
+ crypto.getRandomValues(arr);
+ assertNotEquals(arr, new Int8Array(32));
+});
+
+test(function getRandomValuesUint8Array(): void {
+ const arr = new Uint8Array(32);
+ crypto.getRandomValues(arr);
+ assertNotEquals(arr, new Uint8Array(32));
+});
+
+test(function getRandomValuesUint8ClampedArray(): void {
+ const arr = new Uint8ClampedArray(32);
+ crypto.getRandomValues(arr);
+ assertNotEquals(arr, new Uint8ClampedArray(32));
+});
+
+test(function getRandomValuesInt16Array(): void {
+ const arr = new Int16Array(4);
+ crypto.getRandomValues(arr);
+ assertNotEquals(arr, new Int16Array(4));
+});
+
+test(function getRandomValuesUint16Array(): void {
+ const arr = new Uint16Array(4);
+ crypto.getRandomValues(arr);
+ assertNotEquals(arr, new Uint16Array(4));
+});
+
+test(function getRandomValuesInt32Array(): void {
+ const arr = new Int32Array(8);
+ crypto.getRandomValues(arr);
+ assertNotEquals(arr, new Int32Array(8));
+});
+
+test(function getRandomValuesUint32Array(): void {
+ const arr = new Uint32Array(8);
+ crypto.getRandomValues(arr);
+ assertNotEquals(arr, new Uint32Array(8));
+});
+
+test(function getRandomValuesReturnValue(): void {
+ const arr = new Uint32Array(8);
+ const rtn = crypto.getRandomValues(arr);
+ assertNotEquals(arr, new Uint32Array(8));
+ assertStrictEq(rtn, arr);
+});
diff --git a/cli/js/globals.ts b/cli/js/globals.ts
new file mode 100644
index 000000000..b734b8da3
--- /dev/null
+++ b/cli/js/globals.ts
@@ -0,0 +1,207 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+// This is a "special" module, in that it define the global runtime scope of
+// Deno, and therefore it defines a lot of the runtime environment that code
+// is evaluated in. We use this file to automatically build the runtime type
+// library.
+
+// Modules which will make up part of the global public API surface should be
+// imported as namespaces, so when the runtime type library is generated they
+// can be expressed as a namespace in the type library.
+import { window } from "./window.ts";
+import * as blob from "./blob.ts";
+import * as consoleTypes from "./console.ts";
+import * as csprng from "./get_random_values.ts";
+import * as customEvent from "./custom_event.ts";
+import * as Deno from "./deno.ts";
+import * as domTypes from "./dom_types.ts";
+import * as domFile from "./dom_file.ts";
+import * as event from "./event.ts";
+import * as eventTarget from "./event_target.ts";
+import * as formData from "./form_data.ts";
+import * as fetchTypes from "./fetch.ts";
+import * as headers from "./headers.ts";
+import * as textEncoding from "./text_encoding.ts";
+import * as timers from "./timers.ts";
+import * as url from "./url.ts";
+import * as urlSearchParams from "./url_search_params.ts";
+import * as workers from "./workers.ts";
+import * as performanceUtil from "./performance.ts";
+
+import * as request from "./request.ts";
+
+// These imports are not exposed and therefore are fine to just import the
+// symbols required.
+import { core } from "./core.ts";
+
+// During the build process, augmentations to the variable `window` in this
+// file are tracked and created as part of default library that is built into
+// Deno, we only need to declare the enough to compile Deno.
+declare global {
+ interface CallSite {
+ getThis(): unknown;
+ getTypeName(): string;
+ getFunction(): Function;
+ getFunctionName(): string;
+ getMethodName(): string;
+ getFileName(): string;
+ getLineNumber(): number | null;
+ getColumnNumber(): number | null;
+ getEvalOrigin(): string | null;
+ isToplevel(): boolean;
+ isEval(): boolean;
+ isNative(): boolean;
+ isConstructor(): boolean;
+ isAsync(): boolean;
+ isPromiseAll(): boolean;
+ getPromiseIndex(): number | null;
+ }
+
+ interface ErrorConstructor {
+ prepareStackTrace(error: Error, structuredStackTrace: CallSite[]): string;
+ }
+
+ interface Object {
+ [consoleTypes.customInspect]?(): string;
+ }
+}
+
+// A self reference to the global object.
+window.window = window;
+
+// This is the Deno namespace, it is handled differently from other window
+// properties when building the runtime type library, as the whole module
+// is flattened into a single namespace.
+window.Deno = Deno;
+
+// Globally available functions and object instances.
+window.atob = textEncoding.atob;
+window.btoa = textEncoding.btoa;
+window.fetch = fetchTypes.fetch;
+window.clearTimeout = timers.clearTimeout;
+window.clearInterval = timers.clearInterval;
+window.console = new consoleTypes.Console(core.print);
+window.setTimeout = timers.setTimeout;
+window.setInterval = timers.setInterval;
+window.location = (undefined as unknown) as domTypes.Location;
+window.onload = undefined as undefined | Function;
+window.onunload = undefined as undefined | Function;
+// The following Crypto interface implementation is not up to par with the
+// standard https://www.w3.org/TR/WebCryptoAPI/#crypto-interface as it does not
+// yet incorporate the SubtleCrypto interface as its "subtle" property.
+window.crypto = (csprng as unknown) as Crypto;
+// window.queueMicrotask added by hand to self-maintained lib.deno_runtime.d.ts
+
+// When creating the runtime type library, we use modifications to `window` to
+// determine what is in the global namespace. When we put a class in the
+// namespace, we also need its global instance type as well, otherwise users
+// won't be able to refer to instances.
+// We have to export the type aliases, so that TypeScript _knows_ they are
+// being used, which it cannot statically determine within this module.
+window.Blob = blob.DenoBlob;
+export type Blob = domTypes.Blob;
+
+export type Body = domTypes.Body;
+
+window.File = domFile.DomFileImpl as domTypes.DomFileConstructor;
+export type File = domTypes.DomFile;
+
+export type CustomEventInit = domTypes.CustomEventInit;
+window.CustomEvent = customEvent.CustomEvent;
+export type CustomEvent = domTypes.CustomEvent;
+export type EventInit = domTypes.EventInit;
+window.Event = event.Event;
+export type Event = domTypes.Event;
+export type EventListener = domTypes.EventListener;
+window.EventTarget = eventTarget.EventTarget;
+export type EventTarget = domTypes.EventTarget;
+window.URL = url.URL;
+export type URL = url.URL;
+window.URLSearchParams = urlSearchParams.URLSearchParams;
+export type URLSearchParams = domTypes.URLSearchParams;
+
+// Using the `as` keyword to use standard compliant interfaces as the Deno
+// implementations contain some implementation details we wouldn't want to
+// expose in the runtime type library.
+window.Headers = headers.Headers as domTypes.HeadersConstructor;
+export type Headers = domTypes.Headers;
+window.FormData = formData.FormData as domTypes.FormDataConstructor;
+export type FormData = domTypes.FormData;
+
+window.TextEncoder = textEncoding.TextEncoder;
+export type TextEncoder = textEncoding.TextEncoder;
+window.TextDecoder = textEncoding.TextDecoder;
+export type TextDecoder = textEncoding.TextDecoder;
+
+window.Request = request.Request as domTypes.RequestConstructor;
+export type Request = domTypes.Request;
+
+window.Response = fetchTypes.Response;
+export type Response = domTypes.Response;
+
+window.performance = new performanceUtil.Performance();
+
+// This variable functioning correctly depends on `declareAsLet`
+// in //tools/ts_library_builder/main.ts
+window.onmessage = workers.onmessage;
+
+window.workerMain = workers.workerMain;
+window.workerClose = workers.workerClose;
+window.postMessage = workers.postMessage;
+
+window.Worker = workers.WorkerImpl;
+export type Worker = workers.Worker;
+
+window[domTypes.eventTargetHost] = null;
+window[domTypes.eventTargetListeners] = {};
+window[domTypes.eventTargetMode] = "";
+window[domTypes.eventTargetNodeType] = 0;
+window[eventTarget.eventTargetAssignedSlot] = false;
+window[eventTarget.eventTargetHasActivationBehavior] = false;
+window.addEventListener = eventTarget.EventTarget.prototype.addEventListener;
+window.dispatchEvent = eventTarget.EventTarget.prototype.dispatchEvent;
+window.removeEventListener =
+ eventTarget.EventTarget.prototype.removeEventListener;
+
+// Registers the handler for window.onload function.
+window.addEventListener(
+ "load",
+ (e: domTypes.Event): void => {
+ const onload = window.onload;
+ if (typeof onload === "function") {
+ onload(e);
+ }
+ }
+);
+// Registers the handler for window.onunload function.
+window.addEventListener(
+ "unload",
+ (e: domTypes.Event): void => {
+ const onunload = window.onunload;
+ if (typeof onunload === "function") {
+ onunload(e);
+ }
+ }
+);
+
+// below are interfaces that are available in TypeScript but
+// have different signatures
+export interface ImportMeta {
+ url: string;
+ main: boolean;
+}
+
+export interface Crypto {
+ readonly subtle: null;
+ getRandomValues: <
+ T extends
+ | Int8Array
+ | Uint8Array
+ | Uint8ClampedArray
+ | Int16Array
+ | Uint16Array
+ | Int32Array
+ | Uint32Array
+ >(
+ typedArray: T
+ ) => T;
+}
diff --git a/cli/js/globals_test.ts b/cli/js/globals_test.ts
new file mode 100644
index 000000000..d7c50c5b1
--- /dev/null
+++ b/cli/js/globals_test.ts
@@ -0,0 +1,104 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { test, assert } from "./test_util.ts";
+
+test(function globalThisExists(): void {
+ assert(globalThis != null);
+});
+
+test(function windowExists(): void {
+ assert(window != null);
+});
+
+test(function windowWindowExists(): void {
+ assert(window.window === window);
+});
+
+test(function globalThisEqualsWindow(): void {
+ assert(globalThis === window);
+});
+
+test(function DenoNamespaceExists(): void {
+ assert(Deno != null);
+});
+
+test(function DenoNamespaceEqualsWindowDeno(): void {
+ assert(Deno === window.Deno);
+});
+
+test(function DenoNamespaceIsFrozen(): void {
+ assert(Object.isFrozen(Deno));
+});
+
+test(function webAssemblyExists(): void {
+ assert(typeof WebAssembly.compile === "function");
+});
+
+test(function DenoNamespaceImmutable(): void {
+ const denoCopy = window.Deno;
+ try {
+ // @ts-ignore
+ Deno = 1;
+ } catch {}
+ assert(denoCopy === Deno);
+ try {
+ // @ts-ignore
+ window.Deno = 1;
+ } catch {}
+ assert(denoCopy === Deno);
+ try {
+ delete window.Deno;
+ } catch {}
+ assert(denoCopy === Deno);
+
+ const { readFile } = Deno;
+ try {
+ // @ts-ignore
+ Deno.readFile = 1;
+ } catch {}
+ assert(readFile === Deno.readFile);
+ try {
+ delete window.Deno.readFile;
+ } catch {}
+ assert(readFile === Deno.readFile);
+
+ // @ts-ignore
+ const { print } = Deno.core;
+ try {
+ // @ts-ignore
+ Deno.core.print = 1;
+ } catch {}
+ // @ts-ignore
+ assert(print === Deno.core.print);
+ try {
+ // @ts-ignore
+ delete Deno.core.print;
+ } catch {}
+ // @ts-ignore
+ assert(print === Deno.core.print);
+});
+
+test(async function windowQueueMicrotask(): Promise<void> {
+ let resolve1: () => void | undefined;
+ let resolve2: () => void | undefined;
+ let microtaskDone = false;
+ const p1 = new Promise(
+ (res): void => {
+ resolve1 = (): void => {
+ microtaskDone = true;
+ res();
+ };
+ }
+ );
+ const p2 = new Promise(
+ (res): void => {
+ resolve2 = (): void => {
+ assert(microtaskDone);
+ res();
+ };
+ }
+ );
+ window.queueMicrotask(resolve1!);
+ setTimeout(resolve2!, 0);
+ await p1;
+ await p2;
+});
diff --git a/cli/js/headers.ts b/cli/js/headers.ts
new file mode 100644
index 000000000..dc0de54dd
--- /dev/null
+++ b/cli/js/headers.ts
@@ -0,0 +1,139 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import * as domTypes from "./dom_types.ts";
+import { DomIterableMixin } from "./mixins/dom_iterable.ts";
+import { requiredArguments } from "./util.ts";
+
+// From node-fetch
+// Copyright (c) 2016 David Frank. MIT License.
+const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/;
+const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/;
+
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function isHeaders(value: any): value is domTypes.Headers {
+ // eslint-disable-next-line @typescript-eslint/no-use-before-define
+ return value instanceof Headers;
+}
+
+const headerMap = Symbol("header map");
+
+// ref: https://fetch.spec.whatwg.org/#dom-headers
+class HeadersBase {
+ private [headerMap]: Map<string, string>;
+ // TODO: headerGuard? Investigate if it is needed
+ // node-fetch did not implement this but it is in the spec
+
+ private _normalizeParams(name: string, value?: string): string[] {
+ name = String(name).toLowerCase();
+ value = String(value).trim();
+ return [name, value];
+ }
+
+ // The following name/value validations are copied from
+ // https://github.com/bitinn/node-fetch/blob/master/src/headers.js
+ // Copyright (c) 2016 David Frank. MIT License.
+ private _validateName(name: string): void {
+ if (invalidTokenRegex.test(name) || name === "") {
+ throw new TypeError(`${name} is not a legal HTTP header name`);
+ }
+ }
+
+ private _validateValue(value: string): void {
+ if (invalidHeaderCharRegex.test(value)) {
+ throw new TypeError(`${value} is not a legal HTTP header value`);
+ }
+ }
+
+ constructor(init?: domTypes.HeadersInit) {
+ if (init === null) {
+ throw new TypeError(
+ "Failed to construct 'Headers'; The provided value was not valid"
+ );
+ } else if (isHeaders(init)) {
+ this[headerMap] = new Map(init);
+ } else {
+ this[headerMap] = new Map();
+ if (Array.isArray(init)) {
+ for (const tuple of init) {
+ // If header does not contain exactly two items,
+ // then throw a TypeError.
+ // ref: https://fetch.spec.whatwg.org/#concept-headers-fill
+ requiredArguments(
+ "Headers.constructor tuple array argument",
+ tuple.length,
+ 2
+ );
+
+ const [name, value] = this._normalizeParams(tuple[0], tuple[1]);
+ this._validateName(name);
+ this._validateValue(value);
+ const existingValue = this[headerMap].get(name);
+ this[headerMap].set(
+ name,
+ existingValue ? `${existingValue}, ${value}` : value
+ );
+ }
+ } else if (init) {
+ const names = Object.keys(init);
+ for (const rawName of names) {
+ const rawValue = init[rawName];
+ const [name, value] = this._normalizeParams(rawName, rawValue);
+ this._validateName(name);
+ this._validateValue(value);
+ this[headerMap].set(name, value);
+ }
+ }
+ }
+ }
+
+ // ref: https://fetch.spec.whatwg.org/#concept-headers-append
+ append(name: string, value: string): void {
+ requiredArguments("Headers.append", arguments.length, 2);
+ const [newname, newvalue] = this._normalizeParams(name, value);
+ this._validateName(newname);
+ this._validateValue(newvalue);
+ const v = this[headerMap].get(newname);
+ const str = v ? `${v}, ${newvalue}` : newvalue;
+ this[headerMap].set(newname, str);
+ }
+
+ delete(name: string): void {
+ requiredArguments("Headers.delete", arguments.length, 1);
+ const [newname] = this._normalizeParams(name);
+ this._validateName(newname);
+ this[headerMap].delete(newname);
+ }
+
+ get(name: string): string | null {
+ requiredArguments("Headers.get", arguments.length, 1);
+ const [newname] = this._normalizeParams(name);
+ this._validateName(newname);
+ const value = this[headerMap].get(newname);
+ return value || null;
+ }
+
+ has(name: string): boolean {
+ requiredArguments("Headers.has", arguments.length, 1);
+ const [newname] = this._normalizeParams(name);
+ this._validateName(newname);
+ return this[headerMap].has(newname);
+ }
+
+ set(name: string, value: string): void {
+ requiredArguments("Headers.set", arguments.length, 2);
+ const [newname, newvalue] = this._normalizeParams(name, value);
+ this._validateName(newname);
+ this._validateValue(newvalue);
+ this[headerMap].set(newname, newvalue);
+ }
+
+ get [Symbol.toStringTag](): string {
+ return "Headers";
+ }
+}
+
+// @internal
+export class Headers extends DomIterableMixin<
+ string,
+ string,
+ typeof HeadersBase
+>(HeadersBase, headerMap) {}
diff --git a/cli/js/headers_test.ts b/cli/js/headers_test.ts
new file mode 100644
index 000000000..f08283c51
--- /dev/null
+++ b/cli/js/headers_test.ts
@@ -0,0 +1,331 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { test, assert, assertEquals } from "./test_util.ts";
+
+// Logic heavily copied from web-platform-tests, make
+// sure pass mostly header basic test
+// ref: https://github.com/web-platform-tests/wpt/blob/7c50c216081d6ea3c9afe553ee7b64534020a1b2/fetch/api/headers/headers-basic.html
+test(function newHeaderTest(): void {
+ new Headers();
+ new Headers(undefined);
+ new Headers({});
+ try {
+ new Headers(null);
+ } catch (e) {
+ assertEquals(
+ e.message,
+ "Failed to construct 'Headers'; The provided value was not valid"
+ );
+ }
+});
+
+const headerDict = {
+ name1: "value1",
+ name2: "value2",
+ name3: "value3",
+ name4: undefined,
+ "Content-Type": "value4"
+};
+const headerSeq = [];
+for (const name in headerDict) {
+ headerSeq.push([name, headerDict[name]]);
+}
+
+test(function newHeaderWithSequence(): void {
+ const headers = new Headers(headerSeq);
+ for (const name in headerDict) {
+ assertEquals(headers.get(name), String(headerDict[name]));
+ }
+ assertEquals(headers.get("length"), null);
+});
+
+test(function newHeaderWithRecord(): void {
+ const headers = new Headers(headerDict);
+ for (const name in headerDict) {
+ assertEquals(headers.get(name), String(headerDict[name]));
+ }
+});
+
+test(function newHeaderWithHeadersInstance(): void {
+ const headers = new Headers(headerDict);
+ const headers2 = new Headers(headers);
+ for (const name in headerDict) {
+ assertEquals(headers2.get(name), String(headerDict[name]));
+ }
+});
+
+test(function headerAppendSuccess(): void {
+ const headers = new Headers();
+ for (const name in headerDict) {
+ headers.append(name, headerDict[name]);
+ assertEquals(headers.get(name), String(headerDict[name]));
+ }
+});
+
+test(function headerSetSuccess(): void {
+ const headers = new Headers();
+ for (const name in headerDict) {
+ headers.set(name, headerDict[name]);
+ assertEquals(headers.get(name), String(headerDict[name]));
+ }
+});
+
+test(function headerHasSuccess(): void {
+ const headers = new Headers(headerDict);
+ for (const name in headerDict) {
+ assert(headers.has(name), "headers has name " + name);
+ assert(
+ !headers.has("nameNotInHeaders"),
+ "headers do not have header: nameNotInHeaders"
+ );
+ }
+});
+
+test(function headerDeleteSuccess(): void {
+ const headers = new Headers(headerDict);
+ for (const name in headerDict) {
+ assert(headers.has(name), "headers have a header: " + name);
+ headers.delete(name);
+ assert(!headers.has(name), "headers do not have anymore a header: " + name);
+ }
+});
+
+test(function headerGetSuccess(): void {
+ const headers = new Headers(headerDict);
+ for (const name in headerDict) {
+ assertEquals(headers.get(name), String(headerDict[name]));
+ assertEquals(headers.get("nameNotInHeaders"), null);
+ }
+});
+
+test(function headerEntriesSuccess(): void {
+ const headers = new Headers(headerDict);
+ const iterators = headers.entries();
+ for (const it of iterators) {
+ const key = it[0];
+ const value = it[1];
+ assert(headers.has(key));
+ assertEquals(value, headers.get(key));
+ }
+});
+
+test(function headerKeysSuccess(): void {
+ const headers = new Headers(headerDict);
+ const iterators = headers.keys();
+ for (const it of iterators) {
+ assert(headers.has(it));
+ }
+});
+
+test(function headerValuesSuccess(): void {
+ const headers = new Headers(headerDict);
+ const iterators = headers.values();
+ const entries = headers.entries();
+ const values = [];
+ for (const pair of entries) {
+ values.push(pair[1]);
+ }
+ for (const it of iterators) {
+ assert(values.includes(it));
+ }
+});
+
+const headerEntriesDict = {
+ name1: "value1",
+ Name2: "value2",
+ name: "value3",
+ "content-Type": "value4",
+ "Content-Typ": "value5",
+ "Content-Types": "value6"
+};
+
+test(function headerForEachSuccess(): void {
+ const headers = new Headers(headerEntriesDict);
+ const keys = Object.keys(headerEntriesDict);
+ keys.forEach(
+ (key): void => {
+ const value = headerEntriesDict[key];
+ const newkey = key.toLowerCase();
+ headerEntriesDict[newkey] = value;
+ }
+ );
+ let callNum = 0;
+ headers.forEach(
+ (value, key, container): void => {
+ assertEquals(headers, container);
+ assertEquals(value, headerEntriesDict[key]);
+ callNum++;
+ }
+ );
+ assertEquals(callNum, keys.length);
+});
+
+test(function headerSymbolIteratorSuccess(): void {
+ assert(Symbol.iterator in Headers.prototype);
+ const headers = new Headers(headerEntriesDict);
+ for (const header of headers) {
+ const key = header[0];
+ const value = header[1];
+ assert(headers.has(key));
+ assertEquals(value, headers.get(key));
+ }
+});
+
+test(function headerTypesAvailable(): void {
+ function newHeaders(): Headers {
+ return new Headers();
+ }
+ const headers = newHeaders();
+ assert(headers instanceof Headers);
+});
+
+// Modified from https://github.com/bitinn/node-fetch/blob/7d3293200a91ad52b5ca7962f9d6fd1c04983edb/test/test.js#L2001-L2014
+// Copyright (c) 2016 David Frank. MIT License.
+test(function headerIllegalReject(): void {
+ let errorCount = 0;
+ try {
+ new Headers({ "He y": "ok" });
+ } catch (e) {
+ errorCount++;
+ }
+ try {
+ new Headers({ "Hé-y": "ok" });
+ } catch (e) {
+ errorCount++;
+ }
+ try {
+ new Headers({ "He-y": "ăk" });
+ } catch (e) {
+ errorCount++;
+ }
+ const headers = new Headers();
+ try {
+ headers.append("Hé-y", "ok");
+ } catch (e) {
+ errorCount++;
+ }
+ try {
+ headers.delete("Hé-y");
+ } catch (e) {
+ errorCount++;
+ }
+ try {
+ headers.get("Hé-y");
+ } catch (e) {
+ errorCount++;
+ }
+ try {
+ headers.has("Hé-y");
+ } catch (e) {
+ errorCount++;
+ }
+ try {
+ headers.set("Hé-y", "ok");
+ } catch (e) {
+ errorCount++;
+ }
+ try {
+ headers.set("", "ok");
+ } catch (e) {
+ errorCount++;
+ }
+ assertEquals(errorCount, 9);
+ // 'o k' is valid value but invalid name
+ new Headers({ "He-y": "o k" });
+});
+
+// If pair does not contain exactly two items,then throw a TypeError.
+test(function headerParamsShouldThrowTypeError(): void {
+ let hasThrown = 0;
+
+ try {
+ new Headers(([["1"]] as unknown) as Array<[string, string]>);
+ hasThrown = 1;
+ } catch (err) {
+ if (err instanceof TypeError) {
+ hasThrown = 2;
+ } else {
+ hasThrown = 3;
+ }
+ }
+
+ assertEquals(hasThrown, 2);
+});
+
+test(function headerParamsArgumentsCheck(): void {
+ const methodRequireOneParam = ["delete", "get", "has", "forEach"];
+
+ const methodRequireTwoParams = ["append", "set"];
+
+ methodRequireOneParam.forEach(
+ (method): void => {
+ const headers = new Headers();
+ let hasThrown = 0;
+ let errMsg = "";
+ try {
+ headers[method]();
+ hasThrown = 1;
+ } catch (err) {
+ errMsg = err.message;
+ if (err instanceof TypeError) {
+ hasThrown = 2;
+ } else {
+ hasThrown = 3;
+ }
+ }
+ assertEquals(hasThrown, 2);
+ assertEquals(
+ errMsg,
+ `Headers.${method} requires at least 1 argument, but only 0 present`
+ );
+ }
+ );
+
+ methodRequireTwoParams.forEach(
+ (method): void => {
+ const headers = new Headers();
+ let hasThrown = 0;
+ let errMsg = "";
+
+ try {
+ headers[method]();
+ hasThrown = 1;
+ } catch (err) {
+ errMsg = err.message;
+ if (err instanceof TypeError) {
+ hasThrown = 2;
+ } else {
+ hasThrown = 3;
+ }
+ }
+ assertEquals(hasThrown, 2);
+ assertEquals(
+ errMsg,
+ `Headers.${method} requires at least 2 arguments, but only 0 present`
+ );
+
+ hasThrown = 0;
+ errMsg = "";
+ try {
+ headers[method]("foo");
+ hasThrown = 1;
+ } catch (err) {
+ errMsg = err.message;
+ if (err instanceof TypeError) {
+ hasThrown = 2;
+ } else {
+ hasThrown = 3;
+ }
+ }
+ assertEquals(hasThrown, 2);
+ assertEquals(
+ errMsg,
+ `Headers.${method} requires at least 2 arguments, but only 1 present`
+ );
+ }
+ );
+});
+
+test(function toStringShouldBeWebCompatibility(): void {
+ const headers = new Headers();
+ assertEquals(headers.toString(), "[object Headers]");
+});
diff --git a/cli/js/io.ts b/cli/js/io.ts
new file mode 100644
index 000000000..1a7bf8c4c
--- /dev/null
+++ b/cli/js/io.ts
@@ -0,0 +1,170 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+// Interfaces 100% copied from Go.
+// Documentation liberally lifted from them too.
+// Thank you! We love Go!
+
+// TODO(kt3k): EOF should be `unique symbol` type.
+// That might require some changes of ts_library_builder.
+// See #2591 for more details.
+export const EOF = null;
+export type EOF = null;
+
+// Seek whence values.
+// https://golang.org/pkg/io/#pkg-constants
+export enum SeekMode {
+ SEEK_START = 0,
+ SEEK_CURRENT = 1,
+ SEEK_END = 2
+}
+
+// Reader is the interface that wraps the basic read() method.
+// https://golang.org/pkg/io/#Reader
+export interface Reader {
+ /** Reads up to p.byteLength bytes into `p`. It resolves to the number
+ * of bytes read (`0` < `n` <= `p.byteLength`) and rejects if any error encountered.
+ * Even if `read()` returns `n` < `p.byteLength`, it may use all of `p` as
+ * scratch space during the call. If some data is available but not
+ * `p.byteLength` bytes, `read()` conventionally returns what is available
+ * instead of waiting for more.
+ *
+ * When `read()` encounters end-of-file condition, it returns EOF symbol.
+ *
+ * When `read()` encounters an error, it rejects with an error.
+ *
+ * Callers should always process the `n` > `0` bytes returned before
+ * considering the EOF. Doing so correctly handles I/O errors that happen
+ * after reading some bytes and also both of the allowed EOF behaviors.
+ *
+ * Implementations must not retain `p`.
+ */
+ read(p: Uint8Array): Promise<number | EOF>;
+}
+
+export interface SyncReader {
+ readSync(p: Uint8Array): number | EOF;
+}
+
+// Writer is the interface that wraps the basic write() method.
+// https://golang.org/pkg/io/#Writer
+export interface Writer {
+ /** Writes `p.byteLength` bytes from `p` to the underlying data
+ * stream. It resolves to the number of bytes written from `p` (`0` <= `n` <=
+ * `p.byteLength`) and any error encountered that caused the write to stop
+ * early. `write()` must return a non-null error if it returns `n` <
+ * `p.byteLength`. write() must not modify the slice data, even temporarily.
+ *
+ * Implementations must not retain `p`.
+ */
+ write(p: Uint8Array): Promise<number>;
+}
+
+export interface SyncWriter {
+ writeSync(p: Uint8Array): number;
+}
+// https://golang.org/pkg/io/#Closer
+export interface Closer {
+ // The behavior of Close after the first call is undefined. Specific
+ // implementations may document their own behavior.
+ close(): void;
+}
+
+// https://golang.org/pkg/io/#Seeker
+export interface Seeker {
+ /** Seek sets the offset for the next `read()` or `write()` to offset,
+ * interpreted according to `whence`: `SeekStart` means relative to the start
+ * of the file, `SeekCurrent` means relative to the current offset, and
+ * `SeekEnd` means relative to the end. Seek returns the new offset relative
+ * to the start of the file and an error, if any.
+ *
+ * Seeking to an offset before the start of the file is an error. Seeking to
+ * any positive offset is legal, but the behavior of subsequent I/O operations
+ * on the underlying object is implementation-dependent.
+ */
+ seek(offset: number, whence: SeekMode): Promise<void>;
+}
+
+export interface SyncSeeker {
+ seekSync(offset: number, whence: SeekMode): void;
+}
+
+// https://golang.org/pkg/io/#ReadCloser
+export interface ReadCloser extends Reader, Closer {}
+
+// https://golang.org/pkg/io/#WriteCloser
+export interface WriteCloser extends Writer, Closer {}
+
+// https://golang.org/pkg/io/#ReadSeeker
+export interface ReadSeeker extends Reader, Seeker {}
+
+// https://golang.org/pkg/io/#WriteSeeker
+export interface WriteSeeker extends Writer, Seeker {}
+
+// https://golang.org/pkg/io/#ReadWriteCloser
+export interface ReadWriteCloser extends Reader, Writer, Closer {}
+
+// https://golang.org/pkg/io/#ReadWriteSeeker
+export interface ReadWriteSeeker extends Reader, Writer, Seeker {}
+
+/** Copies from `src` to `dst` until either `EOF` is reached on `src`
+ * or an error occurs. It returns the number of bytes copied and the first
+ * error encountered while copying, if any.
+ *
+ * Because `copy()` is defined to read from `src` until `EOF`, it does not
+ * treat an `EOF` from `read()` as an error to be reported.
+ */
+// https://golang.org/pkg/io/#Copy
+export async function copy(dst: Writer, src: Reader): Promise<number> {
+ let n = 0;
+ const b = new Uint8Array(32 * 1024);
+ let gotEOF = false;
+ while (gotEOF === false) {
+ const result = await src.read(b);
+ if (result === EOF) {
+ gotEOF = true;
+ } else {
+ n += await dst.write(b.subarray(0, result));
+ }
+ }
+ return n;
+}
+
+/** Turns `r` into async iterator.
+ *
+ * for await (const chunk of toAsyncIterator(reader)) {
+ * console.log(chunk)
+ * }
+ */
+export function toAsyncIterator(r: Reader): AsyncIterableIterator<Uint8Array> {
+ const b = new Uint8Array(1024);
+ // Keep track if end-of-file has been reached, then
+ // signal that iterator is done during subsequent next()
+ // call. This is required because `r` can return a `number | EOF`
+ // with data read and EOF reached. But if iterator returns
+ // `done` then `value` is discarded.
+ //
+ // See https://github.com/denoland/deno/issues/2330 for reference.
+ let sawEof = false;
+
+ return {
+ [Symbol.asyncIterator](): AsyncIterableIterator<Uint8Array> {
+ return this;
+ },
+
+ async next(): Promise<IteratorResult<Uint8Array>> {
+ if (sawEof) {
+ return { value: new Uint8Array(), done: true };
+ }
+
+ const result = await r.read(b);
+ if (result === EOF) {
+ sawEof = true;
+ return { value: new Uint8Array(), done: true };
+ }
+
+ return {
+ value: b.subarray(0, result),
+ done: false
+ };
+ }
+ };
+}
diff --git a/cli/js/lib.deno_runtime.d.ts b/cli/js/lib.deno_runtime.d.ts
new file mode 100644
index 000000000..94b6b61cd
--- /dev/null
+++ b/cli/js/lib.deno_runtime.d.ts
@@ -0,0 +1,2800 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+
+/* eslint-disable @typescript-eslint/no-explicit-any */
+/* eslint-disable @typescript-eslint/no-empty-interface */
+
+/// <reference no-default-lib="true" />
+/// <reference lib="esnext" />
+
+declare namespace Deno {
+ // @url js/os.d.ts
+
+ /** The current process id of the runtime. */
+ export let pid: number;
+ /** Reflects the NO_COLOR environment variable: https://no-color.org/ */
+ export let noColor: boolean;
+ /** Check if running in terminal.
+ *
+ * console.log(Deno.isTTY().stdout);
+ */
+ export function isTTY(): {
+ stdin: boolean;
+ stdout: boolean;
+ stderr: boolean;
+ };
+ /** Get the hostname.
+ * Requires the `--allow-env` flag.
+ *
+ * console.log(Deno.hostname());
+ */
+ export function hostname(): string;
+ /** Exit the Deno process with optional exit code. */
+ export function exit(code?: number): never;
+ /** Returns a snapshot of the environment variables at invocation. Mutating a
+ * property in the object will set that variable in the environment for
+ * the process. The environment object will only accept `string`s
+ * as values.
+ *
+ * const myEnv = Deno.env();
+ * console.log(myEnv.SHELL);
+ * myEnv.TEST_VAR = "HELLO";
+ * const newEnv = Deno.env();
+ * console.log(myEnv.TEST_VAR == newEnv.TEST_VAR);
+ */
+ export function env(): {
+ [index: string]: string;
+ };
+ /** Returns the value of an environment variable at invocation.
+ * If the variable is not present, `undefined` will be returned.
+ *
+ * const myEnv = Deno.env();
+ * console.log(myEnv.SHELL);
+ * myEnv.TEST_VAR = "HELLO";
+ * const newEnv = Deno.env();
+ * console.log(myEnv.TEST_VAR == newEnv.TEST_VAR);
+ */
+ export function env(key: string): string | undefined;
+ /**
+ * Returns the current user's home directory.
+ * Requires the `--allow-env` flag.
+ */
+ export function homeDir(): string;
+ /**
+ * Returns the path to the current deno executable.
+ * Requires the `--allow-env` flag.
+ */
+ export function execPath(): string;
+
+ // @url js/dir.d.ts
+
+ /**
+ * `cwd()` Return a string representing the current working directory.
+ * If the current directory can be reached via multiple paths
+ * (due to symbolic links), `cwd()` may return
+ * any one of them.
+ * throws `NotFound` exception if directory not available
+ */
+ export function cwd(): string;
+ /**
+ * `chdir()` Change the current working directory to path.
+ * throws `NotFound` exception if directory not available
+ */
+ export function chdir(directory: string): void;
+
+ // @url js/io.d.ts
+
+ export const EOF: null;
+ export type EOF = null;
+ export enum SeekMode {
+ SEEK_START = 0,
+ SEEK_CURRENT = 1,
+ SEEK_END = 2
+ }
+ export interface Reader {
+ /** Reads up to p.byteLength bytes into `p`. It resolves to the number
+ * of bytes read (`0` < `n` <= `p.byteLength`) and rejects if any error encountered.
+ * Even if `read()` returns `n` < `p.byteLength`, it may use all of `p` as
+ * scratch space during the call. If some data is available but not
+ * `p.byteLength` bytes, `read()` conventionally returns what is available
+ * instead of waiting for more.
+ *
+ * When `read()` encounters end-of-file condition, it returns EOF symbol.
+ *
+ * When `read()` encounters an error, it rejects with an error.
+ *
+ * Callers should always process the `n` > `0` bytes returned before
+ * considering the EOF. Doing so correctly handles I/O errors that happen
+ * after reading some bytes and also both of the allowed EOF behaviors.
+ *
+ * Implementations must not retain `p`.
+ */
+ read(p: Uint8Array): Promise<number | EOF>;
+ }
+ export interface SyncReader {
+ readSync(p: Uint8Array): number | EOF;
+ }
+ export interface Writer {
+ /** Writes `p.byteLength` bytes from `p` to the underlying data
+ * stream. It resolves to the number of bytes written from `p` (`0` <= `n` <=
+ * `p.byteLength`) and any error encountered that caused the write to stop
+ * early. `write()` must return a non-null error if it returns `n` <
+ * `p.byteLength`. write() must not modify the slice data, even temporarily.
+ *
+ * Implementations must not retain `p`.
+ */
+ write(p: Uint8Array): Promise<number>;
+ }
+ export interface SyncWriter {
+ writeSync(p: Uint8Array): number;
+ }
+ export interface Closer {
+ close(): void;
+ }
+ export interface Seeker {
+ /** Seek sets the offset for the next `read()` or `write()` to offset,
+ * interpreted according to `whence`: `SeekStart` means relative to the start
+ * of the file, `SeekCurrent` means relative to the current offset, and
+ * `SeekEnd` means relative to the end. Seek returns the new offset relative
+ * to the start of the file and an error, if any.
+ *
+ * Seeking to an offset before the start of the file is an error. Seeking to
+ * any positive offset is legal, but the behavior of subsequent I/O operations
+ * on the underlying object is implementation-dependent.
+ */
+ seek(offset: number, whence: SeekMode): Promise<void>;
+ }
+ export interface SyncSeeker {
+ seekSync(offset: number, whence: SeekMode): void;
+ }
+ export interface ReadCloser extends Reader, Closer {}
+ export interface WriteCloser extends Writer, Closer {}
+ export interface ReadSeeker extends Reader, Seeker {}
+ export interface WriteSeeker extends Writer, Seeker {}
+ export interface ReadWriteCloser extends Reader, Writer, Closer {}
+ export interface ReadWriteSeeker extends Reader, Writer, Seeker {}
+ /** Copies from `src` to `dst` until either `EOF` is reached on `src`
+ * or an error occurs. It returns the number of bytes copied and the first
+ * error encountered while copying, if any.
+ *
+ * Because `copy()` is defined to read from `src` until `EOF`, it does not
+ * treat an `EOF` from `read()` as an error to be reported.
+ */
+ export function copy(dst: Writer, src: Reader): Promise<number>;
+ /** Turns `r` into async iterator.
+ *
+ * for await (const chunk of toAsyncIterator(reader)) {
+ * console.log(chunk)
+ * }
+ */
+ export function toAsyncIterator(r: Reader): AsyncIterableIterator<Uint8Array>;
+
+ // @url js/files.d.ts
+
+ /** Open a file and return an instance of the `File` object
+ * synchronously.
+ *
+ * const file = Deno.openSync("/foo/bar.txt");
+ */
+ export function openSync(filename: string, mode?: OpenMode): File;
+ /** Open a file and return an instance of the `File` object.
+ *
+ * (async () => {
+ * const file = await Deno.open("/foo/bar.txt");
+ * })();
+ */
+ export function open(filename: string, mode?: OpenMode): Promise<File>;
+ /** Read synchronously from a file ID into an array buffer.
+ *
+ * Return `number | EOF` for the operation.
+ *
+ * const file = Deno.openSync("/foo/bar.txt");
+ * const buf = new Uint8Array(100);
+ * const nread = Deno.readSync(file.rid, buf);
+ * const text = new TextDecoder().decode(buf);
+ *
+ */
+ export function readSync(rid: number, p: Uint8Array): number | EOF;
+ /** Read from a file ID into an array buffer.
+ *
+ * Resolves with the `number | EOF` for the operation.
+ *
+ * (async () => {
+ * const file = await Deno.open("/foo/bar.txt");
+ * const buf = new Uint8Array(100);
+ * const nread = await Deno.read(file.rid, buf);
+ * const text = new TextDecoder().decode(buf);
+ * })();
+ */
+ export function read(rid: number, p: Uint8Array): Promise<number | EOF>;
+ /** Write synchronously to the file ID the contents of the array buffer.
+ *
+ * Resolves with the number of bytes written.
+ *
+ * const encoder = new TextEncoder();
+ * const data = encoder.encode("Hello world\n");
+ * const file = Deno.openSync("/foo/bar.txt");
+ * Deno.writeSync(file.rid, data);
+ */
+ export function writeSync(rid: number, p: Uint8Array): number;
+ /** Write to the file ID the contents of the array buffer.
+ *
+ * Resolves with the number of bytes written.
+ *
+ * (async () => {
+ * const encoder = new TextEncoder();
+ * const data = encoder.encode("Hello world\n");
+ * const file = await Deno.open("/foo/bar.txt");
+ * await Deno.write(file.rid, data);
+ * })();
+ *
+ */
+ export function write(rid: number, p: Uint8Array): Promise<number>;
+ /** Seek a file ID synchronously to the given offset under mode given by `whence`.
+ *
+ * const file = Deno.openSync("/foo/bar.txt");
+ * Deno.seekSync(file.rid, 0, 0);
+ */
+ export function seekSync(rid: number, offset: number, whence: SeekMode): void;
+ /** Seek a file ID to the given offset under mode given by `whence`.
+ *
+ * (async () => {
+ * const file = await Deno.open("/foo/bar.txt");
+ * await Deno.seek(file.rid, 0, 0);
+ * })();
+ */
+ export function seek(
+ rid: number,
+ offset: number,
+ whence: SeekMode
+ ): Promise<void>;
+ /** Close the file ID. */
+ export function close(rid: number): void;
+ /** The Deno abstraction for reading and writing files. */
+ export class File
+ implements
+ Reader,
+ SyncReader,
+ Writer,
+ SyncWriter,
+ Seeker,
+ SyncSeeker,
+ Closer {
+ readonly rid: number;
+ constructor(rid: number);
+ write(p: Uint8Array): Promise<number>;
+ writeSync(p: Uint8Array): number;
+ read(p: Uint8Array): Promise<number | EOF>;
+ readSync(p: Uint8Array): number | EOF;
+ seek(offset: number, whence: SeekMode): Promise<void>;
+ seekSync(offset: number, whence: SeekMode): void;
+ close(): void;
+ }
+ /** An instance of `File` for stdin. */
+ export const stdin: File;
+ /** An instance of `File` for stdout. */
+ export const stdout: File;
+ /** An instance of `File` for stderr. */
+ export const stderr: File;
+ export type OpenMode =
+ | "r"
+ /** Read-write. Start at beginning of file. */
+ | "r+"
+ /** Write-only. Opens and truncates existing file or creates new one for
+ * writing only.
+ */
+ | "w"
+ /** Read-write. Opens and truncates existing file or creates new one for
+ * writing and reading.
+ */
+ | "w+"
+ /** Write-only. Opens existing file or creates new one. Each write appends
+ * content to the end of file.
+ */
+ | "a"
+ /** Read-write. Behaves like "a" and allows to read from file. */
+ | "a+"
+ /** Write-only. Exclusive create - creates new file only if one doesn't exist
+ * already.
+ */
+ | "x"
+ /** Read-write. Behaves like `x` and allows to read from file. */
+ | "x+";
+
+ // @url js/buffer.d.ts
+
+ /** A Buffer is a variable-sized buffer of bytes with read() and write()
+ * methods. Based on https://golang.org/pkg/bytes/#Buffer
+ */
+ export class Buffer implements Reader, SyncReader, Writer, SyncWriter {
+ private buf;
+ private off;
+ constructor(ab?: ArrayBuffer);
+ /** bytes() returns a slice holding the unread portion of the buffer.
+ * The slice is valid for use only until the next buffer modification (that
+ * is, only until the next call to a method like read(), write(), reset(), or
+ * truncate()). The slice aliases the buffer content at least until the next
+ * buffer modification, so immediate changes to the slice will affect the
+ * result of future reads.
+ */
+ bytes(): Uint8Array;
+ /** toString() returns the contents of the unread portion of the buffer
+ * as a string. Warning - if multibyte characters are present when data is
+ * flowing through the buffer, this method may result in incorrect strings
+ * due to a character being split.
+ */
+ toString(): string;
+ /** empty() returns whether the unread portion of the buffer is empty. */
+ empty(): boolean;
+ /** length is a getter that returns the number of bytes of the unread
+ * portion of the buffer
+ */
+ readonly length: number;
+ /** Returns the capacity of the buffer's underlying byte slice, that is,
+ * the total space allocated for the buffer's data.
+ */
+ readonly capacity: number;
+ /** truncate() discards all but the first n unread bytes from the buffer but
+ * continues to use the same allocated storage. It throws if n is negative or
+ * greater than the length of the buffer.
+ */
+ truncate(n: number): void;
+ /** reset() resets the buffer to be empty, but it retains the underlying
+ * storage for use by future writes. reset() is the same as truncate(0)
+ */
+ reset(): void;
+ /** _tryGrowByReslice() is a version of grow for the fast-case
+ * where the internal buffer only needs to be resliced. It returns the index
+ * where bytes should be written and whether it succeeded.
+ * It returns -1 if a reslice was not needed.
+ */
+ private _tryGrowByReslice;
+ private _reslice;
+ /** readSync() reads the next len(p) bytes from the buffer or until the buffer
+ * is drained. The return value n is the number of bytes read. If the
+ * buffer has no data to return, eof in the response will be true.
+ */
+ readSync(p: Uint8Array): number | EOF;
+ read(p: Uint8Array): Promise<number | EOF>;
+ writeSync(p: Uint8Array): number;
+ write(p: Uint8Array): Promise<number>;
+ /** _grow() grows the buffer to guarantee space for n more bytes.
+ * It returns the index where bytes should be written.
+ * If the buffer can't grow it will throw with ErrTooLarge.
+ */
+ private _grow;
+ /** grow() grows the buffer's capacity, if necessary, to guarantee space for
+ * another n bytes. After grow(n), at least n bytes can be written to the
+ * buffer without another allocation. If n is negative, grow() will panic. If
+ * the buffer can't grow it will throw ErrTooLarge.
+ * Based on https://golang.org/pkg/bytes/#Buffer.Grow
+ */
+ grow(n: number): void;
+ /** readFrom() reads data from r until EOF and appends it to the buffer,
+ * growing the buffer as needed. It returns the number of bytes read. If the
+ * buffer becomes too large, readFrom will panic with ErrTooLarge.
+ * Based on https://golang.org/pkg/bytes/#Buffer.ReadFrom
+ */
+ readFrom(r: Reader): Promise<number>;
+ /** Sync version of `readFrom`
+ */
+ readFromSync(r: SyncReader): number;
+ }
+ /** Read `r` until EOF and return the content as `Uint8Array`.
+ */
+ export function readAll(r: Reader): Promise<Uint8Array>;
+ /** Read synchronously `r` until EOF and return the content as `Uint8Array`.
+ */
+ export function readAllSync(r: SyncReader): Uint8Array;
+ /** Write all the content of `arr` to `w`.
+ */
+ export function writeAll(w: Writer, arr: Uint8Array): Promise<void>;
+ /** Write synchronously all the content of `arr` to `w`.
+ */
+ export function writeAllSync(w: SyncWriter, arr: Uint8Array): void;
+
+ // @url js/mkdir.d.ts
+
+ /** Creates a new directory with the specified path synchronously.
+ * If `recursive` is set to true, nested directories will be created (also known
+ * as "mkdir -p").
+ * `mode` sets permission bits (before umask) on UNIX and does nothing on
+ * Windows.
+ *
+ * Deno.mkdirSync("new_dir");
+ * Deno.mkdirSync("nested/directories", true);
+ */
+ export function mkdirSync(
+ path: string,
+ recursive?: boolean,
+ mode?: number
+ ): void;
+ /** Creates a new directory with the specified path.
+ * If `recursive` is set to true, nested directories will be created (also known
+ * as "mkdir -p").
+ * `mode` sets permission bits (before umask) on UNIX and does nothing on
+ * Windows.
+ *
+ * await Deno.mkdir("new_dir");
+ * await Deno.mkdir("nested/directories", true);
+ */
+ export function mkdir(
+ path: string,
+ recursive?: boolean,
+ mode?: number
+ ): Promise<void>;
+
+ // @url js/make_temp_dir.d.ts
+
+ export interface MakeTempDirOptions {
+ dir?: string;
+ prefix?: string;
+ suffix?: string;
+ }
+ /** makeTempDirSync is the synchronous version of `makeTempDir`.
+ *
+ * const tempDirName0 = Deno.makeTempDirSync();
+ * const tempDirName1 = Deno.makeTempDirSync({ prefix: 'my_temp' });
+ */
+ export function makeTempDirSync(options?: MakeTempDirOptions): string;
+ /** makeTempDir creates a new temporary directory in the directory `dir`, its
+ * name beginning with `prefix` and ending with `suffix`.
+ * It returns the full path to the newly created directory.
+ * If `dir` is unspecified, tempDir uses the default directory for temporary
+ * files. Multiple programs calling tempDir simultaneously will not choose the
+ * same directory. It is the caller's responsibility to remove the directory
+ * when no longer needed.
+ *
+ * const tempDirName0 = await Deno.makeTempDir();
+ * const tempDirName1 = await Deno.makeTempDir({ prefix: 'my_temp' });
+ */
+ export function makeTempDir(options?: MakeTempDirOptions): Promise<string>;
+
+ // @url js/chmod.d.ts
+
+ /** Changes the permission of a specific file/directory of specified path
+ * synchronously.
+ *
+ * Deno.chmodSync("/path/to/file", 0o666);
+ */
+ export function chmodSync(path: string, mode: number): void;
+ /** Changes the permission of a specific file/directory of specified path.
+ *
+ * await Deno.chmod("/path/to/file", 0o666);
+ */
+ export function chmod(path: string, mode: number): Promise<void>;
+
+ // @url js/chown.d.ts
+
+ /**
+ * Change owner of a regular file or directory synchronously. Unix only at the moment.
+ * @param path path to the file
+ * @param uid user id of the new owner
+ * @param gid group id of the new owner
+ */
+ export function chownSync(path: string, uid: number, gid: number): void;
+ /**
+ * Change owner of a regular file or directory asynchronously. Unix only at the moment.
+ * @param path path to the file
+ * @param uid user id of the new owner
+ * @param gid group id of the new owner
+ */
+ export function chown(path: string, uid: number, gid: number): Promise<void>;
+
+ // @url js/utime.d.ts
+
+ /** Synchronously changes the access and modification times of a file system
+ * object referenced by `filename`. Given times are either in seconds
+ * (Unix epoch time) or as `Date` objects.
+ *
+ * Deno.utimeSync("myfile.txt", 1556495550, new Date());
+ */
+ export function utimeSync(
+ filename: string,
+ atime: number | Date,
+ mtime: number | Date
+ ): void;
+ /** Changes the access and modification times of a file system object
+ * referenced by `filename`. Given times are either in seconds
+ * (Unix epoch time) or as `Date` objects.
+ *
+ * await Deno.utime("myfile.txt", 1556495550, new Date());
+ */
+ export function utime(
+ filename: string,
+ atime: number | Date,
+ mtime: number | Date
+ ): Promise<void>;
+
+ // @url js/remove.d.ts
+
+ export interface RemoveOption {
+ recursive?: boolean;
+ }
+ /** Removes the named file or directory synchronously. Would throw
+ * error if permission denied, not found, or directory not empty if `recursive`
+ * set to false.
+ * `recursive` is set to false by default.
+ *
+ * Deno.removeSync("/path/to/dir/or/file", {recursive: false});
+ */
+ export function removeSync(path: string, options?: RemoveOption): void;
+ /** Removes the named file or directory. Would throw error if
+ * permission denied, not found, or directory not empty if `recursive` set
+ * to false.
+ * `recursive` is set to false by default.
+ *
+ * await Deno.remove("/path/to/dir/or/file", {recursive: false});
+ */
+ export function remove(path: string, options?: RemoveOption): Promise<void>;
+
+ // @url js/rename.d.ts
+
+ /** Synchronously renames (moves) `oldpath` to `newpath`. If `newpath` already
+ * exists and is not a directory, `renameSync()` replaces it. OS-specific
+ * restrictions may apply when `oldpath` and `newpath` are in different
+ * directories.
+ *
+ * Deno.renameSync("old/path", "new/path");
+ */
+ export function renameSync(oldpath: string, newpath: string): void;
+ /** Renames (moves) `oldpath` to `newpath`. If `newpath` already exists and is
+ * not a directory, `rename()` replaces it. OS-specific restrictions may apply
+ * when `oldpath` and `newpath` are in different directories.
+ *
+ * await Deno.rename("old/path", "new/path");
+ */
+ export function rename(oldpath: string, newpath: string): Promise<void>;
+
+ // @url js/read_file.d.ts
+
+ /** Read the entire contents of a file synchronously.
+ *
+ * const decoder = new TextDecoder("utf-8");
+ * const data = Deno.readFileSync("hello.txt");
+ * console.log(decoder.decode(data));
+ */
+ export function readFileSync(filename: string): Uint8Array;
+ /** Read the entire contents of a file.
+ *
+ * const decoder = new TextDecoder("utf-8");
+ * const data = await Deno.readFile("hello.txt");
+ * console.log(decoder.decode(data));
+ */
+ export function readFile(filename: string): Promise<Uint8Array>;
+
+ // @url js/file_info.d.ts
+
+ /** A FileInfo describes a file and is returned by `stat`, `lstat`,
+ * `statSync`, `lstatSync`.
+ */
+ export interface FileInfo {
+ /** The size of the file, in bytes. */
+ len: number;
+ /** The last modification time of the file. This corresponds to the `mtime`
+ * field from `stat` on Unix and `ftLastWriteTime` on Windows. This may not
+ * be available on all platforms.
+ */
+ modified: number | null;
+ /** The last access time of the file. This corresponds to the `atime`
+ * field from `stat` on Unix and `ftLastAccessTime` on Windows. This may not
+ * be available on all platforms.
+ */
+ accessed: number | null;
+ /** The last access time of the file. This corresponds to the `birthtime`
+ * field from `stat` on Unix and `ftCreationTime` on Windows. This may not
+ * be available on all platforms.
+ */
+ created: number | null;
+ /** The underlying raw st_mode bits that contain the standard Unix permissions
+ * for this file/directory. TODO Match behavior with Go on windows for mode.
+ */
+ mode: number | null;
+ /** The file or directory name. */
+ name: string | null;
+ /** Returns whether this is info for a regular file. This result is mutually
+ * exclusive to `FileInfo.isDirectory` and `FileInfo.isSymlink`.
+ */
+ isFile(): boolean;
+ /** Returns whether this is info for a regular directory. This result is
+ * mutually exclusive to `FileInfo.isFile` and `FileInfo.isSymlink`.
+ */
+ isDirectory(): boolean;
+ /** Returns whether this is info for a symlink. This result is
+ * mutually exclusive to `FileInfo.isFile` and `FileInfo.isDirectory`.
+ */
+ isSymlink(): boolean;
+ }
+
+ // @url js/read_dir.d.ts
+
+ /** Reads the directory given by path and returns a list of file info
+ * synchronously.
+ *
+ * const files = Deno.readDirSync("/");
+ */
+ export function readDirSync(path: string): FileInfo[];
+ /** Reads the directory given by path and returns a list of file info.
+ *
+ * const files = await Deno.readDir("/");
+ */
+ export function readDir(path: string): Promise<FileInfo[]>;
+
+ // @url js/copy_file.d.ts
+
+ /** Copies the contents of a file to another by name synchronously.
+ * Creates a new file if target does not exists, and if target exists,
+ * overwrites original content of the target file.
+ *
+ * It would also copy the permission of the original file
+ * to the destination.
+ *
+ * Deno.copyFileSync("from.txt", "to.txt");
+ */
+ export function copyFileSync(from: string, to: string): void;
+ /** Copies the contents of a file to another by name.
+ *
+ * Creates a new file if target does not exists, and if target exists,
+ * overwrites original content of the target file.
+ *
+ * It would also copy the permission of the original file
+ * to the destination.
+ *
+ * await Deno.copyFile("from.txt", "to.txt");
+ */
+ export function copyFile(from: string, to: string): Promise<void>;
+
+ // @url js/read_link.d.ts
+
+ /** Returns the destination of the named symbolic link synchronously.
+ *
+ * const targetPath = Deno.readlinkSync("symlink/path");
+ */
+ export function readlinkSync(name: string): string;
+ /** Returns the destination of the named symbolic link.
+ *
+ * const targetPath = await Deno.readlink("symlink/path");
+ */
+ export function readlink(name: string): Promise<string>;
+
+ // @url js/stat.d.ts
+
+ interface StatResponse {
+ isFile: boolean;
+ isSymlink: boolean;
+ len: number;
+ modified: number;
+ accessed: number;
+ created: number;
+ mode: number;
+ hasMode: boolean;
+ name: string | null;
+ }
+ /** Queries the file system for information on the path provided. If the given
+ * path is a symlink information about the symlink will be returned.
+ *
+ * const fileInfo = await Deno.lstat("hello.txt");
+ * assert(fileInfo.isFile());
+ */
+ export function lstat(filename: string): Promise<FileInfo>;
+ /** Queries the file system for information on the path provided synchronously.
+ * If the given path is a symlink information about the symlink will be
+ * returned.
+ *
+ * const fileInfo = Deno.lstatSync("hello.txt");
+ * assert(fileInfo.isFile());
+ */
+ export function lstatSync(filename: string): FileInfo;
+ /** Queries the file system for information on the path provided. `stat` Will
+ * always follow symlinks.
+ *
+ * const fileInfo = await Deno.stat("hello.txt");
+ * assert(fileInfo.isFile());
+ */
+ export function stat(filename: string): Promise<FileInfo>;
+ /** Queries the file system for information on the path provided synchronously.
+ * `statSync` Will always follow symlinks.
+ *
+ * const fileInfo = Deno.statSync("hello.txt");
+ * assert(fileInfo.isFile());
+ */
+ export function statSync(filename: string): FileInfo;
+
+ // @url js/link.d.ts
+
+ /** Synchronously creates `newname` as a hard link to `oldname`.
+ *
+ * Deno.linkSync("old/name", "new/name");
+ */
+ export function linkSync(oldname: string, newname: string): void;
+ /** Creates `newname` as a hard link to `oldname`.
+ *
+ * await Deno.link("old/name", "new/name");
+ */
+ export function link(oldname: string, newname: string): Promise<void>;
+
+ // @url js/symlink.d.ts
+
+ /** Synchronously creates `newname` as a symbolic link to `oldname`. The type
+ * argument can be set to `dir` or `file` and is only available on Windows
+ * (ignored on other platforms).
+ *
+ * Deno.symlinkSync("old/name", "new/name");
+ */
+ export function symlinkSync(
+ oldname: string,
+ newname: string,
+ type?: string
+ ): void;
+ /** Creates `newname` as a symbolic link to `oldname`. The type argument can be
+ * set to `dir` or `file` and is only available on Windows (ignored on other
+ * platforms).
+ *
+ * await Deno.symlink("old/name", "new/name");
+ */
+ export function symlink(
+ oldname: string,
+ newname: string,
+ type?: string
+ ): Promise<void>;
+
+ // @url js/write_file.d.ts
+
+ /** Options for writing to a file.
+ * `perm` would change the file's permission if set.
+ * `create` decides if the file should be created if not exists (default: true)
+ * `append` decides if the file should be appended (default: false)
+ */
+ export interface WriteFileOptions {
+ perm?: number;
+ create?: boolean;
+ append?: boolean;
+ }
+ /** Write a new file, with given filename and data synchronously.
+ *
+ * const encoder = new TextEncoder();
+ * const data = encoder.encode("Hello world\n");
+ * Deno.writeFileSync("hello.txt", data);
+ */
+ export function writeFileSync(
+ filename: string,
+ data: Uint8Array,
+ options?: WriteFileOptions
+ ): void;
+ /** Write a new file, with given filename and data.
+ *
+ * const encoder = new TextEncoder();
+ * const data = encoder.encode("Hello world\n");
+ * await Deno.writeFile("hello.txt", data);
+ */
+ export function writeFile(
+ filename: string,
+ data: Uint8Array,
+ options?: WriteFileOptions
+ ): Promise<void>;
+
+ // @url js/error_stack.d.ts
+
+ interface Location {
+ /** The full url for the module, e.g. `file://some/file.ts` or
+ * `https://some/file.ts`. */
+ filename: string;
+ /** The line number in the file. It is assumed to be 1-indexed. */
+ line: number;
+ /** The column number in the file. It is assumed to be 1-indexed. */
+ column: number;
+ }
+ /** Given a current location in a module, lookup the source location and
+ * return it.
+ *
+ * When Deno transpiles code, it keep source maps of the transpiled code. This
+ * function can be used to lookup the original location. This is automatically
+ * done when accessing the `.stack` of an error, or when an uncaught error is
+ * logged. This function can be used to perform the lookup for creating better
+ * error handling.
+ *
+ * **Note:** `line` and `column` are 1 indexed, which matches display
+ * expectations, but is not typical of most index numbers in Deno.
+ *
+ * An example:
+ *
+ * const orig = Deno.applySourceMap({
+ * location: "file://my/module.ts",
+ * line: 5,
+ * column: 15
+ * });
+ * console.log(`${orig.filename}:${orig.line}:${orig.column}`);
+ *
+ */
+ export function applySourceMap(location: Location): Location;
+
+ // @url js/errors.d.ts
+
+ /** A Deno specific error. The `kind` property is set to a specific error code
+ * which can be used to in application logic.
+ *
+ * try {
+ * somethingThatMightThrow();
+ * } catch (e) {
+ * if (
+ * e instanceof Deno.DenoError &&
+ * e.kind === Deno.ErrorKind.Overflow
+ * ) {
+ * console.error("Overflow error!");
+ * }
+ * }
+ *
+ */
+ export class DenoError<T extends ErrorKind> extends Error {
+ readonly kind: T;
+ constructor(kind: T, msg: string);
+ }
+ export enum ErrorKind {
+ NoError = 0,
+ NotFound = 1,
+ PermissionDenied = 2,
+ ConnectionRefused = 3,
+ ConnectionReset = 4,
+ ConnectionAborted = 5,
+ NotConnected = 6,
+ AddrInUse = 7,
+ AddrNotAvailable = 8,
+ BrokenPipe = 9,
+ AlreadyExists = 10,
+ WouldBlock = 11,
+ InvalidInput = 12,
+ InvalidData = 13,
+ TimedOut = 14,
+ Interrupted = 15,
+ WriteZero = 16,
+ Other = 17,
+ UnexpectedEof = 18,
+ BadResource = 19,
+ CommandFailed = 20,
+ EmptyHost = 21,
+ IdnaError = 22,
+ InvalidPort = 23,
+ InvalidIpv4Address = 24,
+ InvalidIpv6Address = 25,
+ InvalidDomainCharacter = 26,
+ RelativeUrlWithoutBase = 27,
+ RelativeUrlWithCannotBeABaseBase = 28,
+ SetHostOnCannotBeABaseUrl = 29,
+ Overflow = 30,
+ HttpUser = 31,
+ HttpClosed = 32,
+ HttpCanceled = 33,
+ HttpParse = 34,
+ HttpOther = 35,
+ TooLarge = 36,
+ InvalidUri = 37,
+ InvalidSeekMode = 38,
+ OpNotAvailable = 39,
+ WorkerInitFailed = 40,
+ UnixError = 41,
+ NoAsyncSupport = 42,
+ NoSyncSupport = 43,
+ ImportMapError = 44,
+ InvalidPath = 45,
+ ImportPrefixMissing = 46,
+ UnsupportedFetchScheme = 47,
+ TooManyRedirects = 48,
+ Diagnostic = 49,
+ JSError = 50
+ }
+
+ // @url js/permissions.d.ts
+
+ /** Permissions as granted by the caller */
+ export interface Permissions {
+ read: boolean;
+ write: boolean;
+ net: boolean;
+ env: boolean;
+ run: boolean;
+ hrtime: boolean;
+ }
+ export type Permission = keyof Permissions;
+ /** Inspect granted permissions for the current program.
+ *
+ * if (Deno.permissions().read) {
+ * const file = await Deno.readFile("example.test");
+ * // ...
+ * }
+ */
+ export function permissions(): Permissions;
+ /** Revoke a permission. When the permission was already revoked nothing changes
+ *
+ * if (Deno.permissions().read) {
+ * const file = await Deno.readFile("example.test");
+ * Deno.revokePermission('read');
+ * }
+ * Deno.readFile("example.test"); // -> error or permission prompt
+ */
+ export function revokePermission(permission: Permission): void;
+
+ // @url js/truncate.d.ts
+
+ /** Truncates or extends the specified file synchronously, updating the size of
+ * this file to become size.
+ *
+ * Deno.truncateSync("hello.txt", 10);
+ */
+ export function truncateSync(name: string, len?: number): void;
+ /**
+ * Truncates or extends the specified file, updating the size of this file to
+ * become size.
+ *
+ * await Deno.truncate("hello.txt", 10);
+ */
+ export function truncate(name: string, len?: number): Promise<void>;
+
+ // @url js/net.d.ts
+
+ type Transport = "tcp";
+ interface Addr {
+ transport: Transport;
+ address: string;
+ }
+
+ /** A Listener is a generic network listener for stream-oriented protocols. */
+ export interface Listener extends AsyncIterator<Conn> {
+ /** Waits for and resolves to the next connection to the `Listener`. */
+ accept(): Promise<Conn>;
+ /** Close closes the listener. Any pending accept promises will be rejected
+ * with errors.
+ */
+ close(): void;
+ /** Return the address of the `Listener`. */
+ addr(): Addr;
+ [Symbol.asyncIterator](): AsyncIterator<Conn>;
+ }
+ export interface Conn extends Reader, Writer, Closer {
+ /** The local address of the connection. */
+ localAddr: string;
+ /** The remote address of the connection. */
+ remoteAddr: string;
+ /** The resource ID of the connection. */
+ rid: number;
+ /** Shuts down (`shutdown(2)`) the reading side of the TCP connection. Most
+ * callers should just use `close()`.
+ */
+ closeRead(): void;
+ /** Shuts down (`shutdown(2)`) the writing side of the TCP connection. Most
+ * callers should just use `close()`.
+ */
+ closeWrite(): void;
+ }
+
+ export interface ListenOptions {
+ port: number;
+ hostname?: string;
+ transport?: Transport;
+ }
+
+ /** Listen announces on the local transport address.
+ *
+ * @param options
+ * @param options.port The port to connect to. (Required.)
+ * @param options.hostname A literal IP address or host name that can be
+ * resolved to an IP address. If not specified, defaults to 0.0.0.0
+ * @param options.transport Defaults to "tcp". Later we plan to add "tcp4",
+ * "tcp6", "udp", "udp4", "udp6", "ip", "ip4", "ip6", "unix", "unixgram" and
+ * "unixpacket".
+ *
+ * Examples:
+ *
+ * listen({ port: 80 })
+ * listen({ hostname: "192.0.2.1", port: 80 })
+ * listen({ hostname: "[2001:db8::1]", port: 80 });
+ * listen({ hostname: "golang.org", port: 80, transport: "tcp" })
+ */
+ export function listen(options: ListenOptions): Listener;
+
+ export interface DialOptions {
+ port: number;
+ hostname?: string;
+ transport?: Transport;
+ }
+
+ /** Dial connects to the address on the named transport.
+ *
+ * @param options
+ * @param options.port The port to connect to. (Required.)
+ * @param options.hostname A literal IP address or host name that can be
+ * resolved to an IP address. If not specified, defaults to 127.0.0.1
+ * @param options.transport Defaults to "tcp". Later we plan to add "tcp4",
+ * "tcp6", "udp", "udp4", "udp6", "ip", "ip4", "ip6", "unix", "unixgram" and
+ * "unixpacket".
+ *
+ * Examples:
+ *
+ * dial({ port: 80 })
+ * dial({ hostname: "192.0.2.1", port: 80 })
+ * dial({ hostname: "[2001:db8::1]", port: 80 });
+ * dial({ hostname: "golang.org", port: 80, transport: "tcp" })
+ */
+ export function dial(options: DialOptions): Promise<Conn>;
+
+ export interface DialTLSOptions {
+ port: number;
+ hostname?: string;
+ }
+
+ /**
+ * dialTLS establishes a secure connection over TLS (transport layer security).
+ */
+ export function dialTLS(options: DialTLSOptions): Promise<Conn>;
+
+ // @url js/metrics.d.ts
+ export interface Metrics {
+ opsDispatched: number;
+ opsCompleted: number;
+ bytesSentControl: number;
+ bytesSentData: number;
+ bytesReceived: number;
+ }
+ /** Receive metrics from the privileged side of Deno.
+ *
+ * > console.table(Deno.metrics())
+ * ┌──────────────────┬────────┐
+ * │ (index) │ Values │
+ * ├──────────────────┼────────┤
+ * │ opsDispatched │ 9 │
+ * │ opsCompleted │ 9 │
+ * │ bytesSentControl │ 504 │
+ * │ bytesSentData │ 0 │
+ * │ bytesReceived │ 856 │
+ * └──────────────────┴────────┘
+ */
+ export function metrics(): Metrics;
+
+ // @url js/resources.d.ts
+
+ interface ResourceMap {
+ [rid: number]: string;
+ }
+ /** Returns a map of open _file like_ resource ids along with their string
+ * representation.
+ */
+ export function resources(): ResourceMap;
+
+ // @url js/process.d.ts
+
+ /** How to handle subprocess stdio.
+ *
+ * "inherit" The default if unspecified. The child inherits from the
+ * corresponding parent descriptor.
+ *
+ * "piped" A new pipe should be arranged to connect the parent and child
+ * subprocesses.
+ *
+ * "null" This stream will be ignored. This is the equivalent of attaching the
+ * stream to /dev/null.
+ */
+ type ProcessStdio = "inherit" | "piped" | "null";
+ export interface RunOptions {
+ args: string[];
+ cwd?: string;
+ env?: {
+ [key: string]: string;
+ };
+ stdout?: ProcessStdio | number;
+ stderr?: ProcessStdio | number;
+ stdin?: ProcessStdio | number;
+ }
+ /** Send a signal to process under given PID. Unix only at this moment.
+ * If pid is negative, the signal will be sent to the process group identified
+ * by -pid.
+ * Requires the `--allow-run` flag.
+ */
+ export function kill(pid: number, signo: number): void;
+ export class Process {
+ readonly rid: number;
+ readonly pid: number;
+ readonly stdin?: WriteCloser;
+ readonly stdout?: ReadCloser;
+ readonly stderr?: ReadCloser;
+ status(): Promise<ProcessStatus>;
+ /** Buffer the stdout and return it as Uint8Array after EOF.
+ * You must set stdout to "piped" when creating the process.
+ * This calls close() on stdout after its done.
+ */
+ output(): Promise<Uint8Array>;
+ /** Buffer the stderr and return it as Uint8Array after EOF.
+ * You must set stderr to "piped" when creating the process.
+ * This calls close() on stderr after its done.
+ */
+ stderrOutput(): Promise<Uint8Array>;
+ close(): void;
+ kill(signo: number): void;
+ }
+ export interface ProcessStatus {
+ success: boolean;
+ code?: number;
+ signal?: number;
+ }
+ /**
+ * Spawns new subprocess.
+ *
+ * Subprocess uses same working directory as parent process unless `opt.cwd`
+ * is specified.
+ *
+ * Environmental variables for subprocess can be specified using `opt.env`
+ * mapping.
+ *
+ * By default subprocess inherits stdio of parent process. To change that
+ * `opt.stdout`, `opt.stderr` and `opt.stdin` can be specified independently -
+ * they can be set to either `ProcessStdio` or `rid` of open file.
+ */
+ export function run(opt: RunOptions): Process;
+ enum LinuxSignal {
+ SIGHUP = 1,
+ SIGINT = 2,
+ SIGQUIT = 3,
+ SIGILL = 4,
+ SIGTRAP = 5,
+ SIGABRT = 6,
+ SIGBUS = 7,
+ SIGFPE = 8,
+ SIGKILL = 9,
+ SIGUSR1 = 10,
+ SIGSEGV = 11,
+ SIGUSR2 = 12,
+ SIGPIPE = 13,
+ SIGALRM = 14,
+ SIGTERM = 15,
+ SIGSTKFLT = 16,
+ SIGCHLD = 17,
+ SIGCONT = 18,
+ SIGSTOP = 19,
+ SIGTSTP = 20,
+ SIGTTIN = 21,
+ SIGTTOU = 22,
+ SIGURG = 23,
+ SIGXCPU = 24,
+ SIGXFSZ = 25,
+ SIGVTALRM = 26,
+ SIGPROF = 27,
+ SIGWINCH = 28,
+ SIGIO = 29,
+ SIGPWR = 30,
+ SIGSYS = 31
+ }
+ enum MacOSSignal {
+ SIGHUP = 1,
+ SIGINT = 2,
+ SIGQUIT = 3,
+ SIGILL = 4,
+ SIGTRAP = 5,
+ SIGABRT = 6,
+ SIGEMT = 7,
+ SIGFPE = 8,
+ SIGKILL = 9,
+ SIGBUS = 10,
+ SIGSEGV = 11,
+ SIGSYS = 12,
+ SIGPIPE = 13,
+ SIGALRM = 14,
+ SIGTERM = 15,
+ SIGURG = 16,
+ SIGSTOP = 17,
+ SIGTSTP = 18,
+ SIGCONT = 19,
+ SIGCHLD = 20,
+ SIGTTIN = 21,
+ SIGTTOU = 22,
+ SIGIO = 23,
+ SIGXCPU = 24,
+ SIGXFSZ = 25,
+ SIGVTALRM = 26,
+ SIGPROF = 27,
+ SIGWINCH = 28,
+ SIGINFO = 29,
+ SIGUSR1 = 30,
+ SIGUSR2 = 31
+ }
+ /** Signals numbers. This is platform dependent.
+ */
+ export const Signal: typeof MacOSSignal | typeof LinuxSignal;
+ export {};
+
+ // @url js/console.d.ts
+
+ type ConsoleOptions = Partial<{
+ showHidden: boolean;
+ depth: number;
+ colors: boolean;
+ indentLevel: number;
+ }>;
+ /** A symbol which can be used as a key for a custom method which will be called
+ * when `Deno.inspect()` is called, or when the object is logged to the console.
+ */
+ export const customInspect: unique symbol;
+ /**
+ * `inspect()` converts input into string that has the same format
+ * as printed by `console.log(...)`;
+ */
+ export function inspect(value: unknown, options?: ConsoleOptions): string;
+
+ // @url js/build.d.ts
+
+ export type OperatingSystem = "mac" | "win" | "linux";
+ export type Arch = "x64" | "arm64";
+ /** Build related information */
+ interface BuildInfo {
+ /** The CPU architecture. */
+ arch: Arch;
+ /** The operating system. */
+ os: OperatingSystem;
+ }
+ export const build: BuildInfo;
+
+ // @url js/version.d.ts
+
+ interface Version {
+ deno: string;
+ v8: string;
+ typescript: string;
+ }
+ export const version: Version;
+ export {};
+
+ // @url js/deno.d.ts
+
+ export const args: string[];
+}
+
+// @url js/globals.ts
+
+declare interface Window {
+ window: Window & typeof globalThis;
+ atob: typeof textEncoding.atob;
+ btoa: typeof textEncoding.btoa;
+ fetch: typeof fetchTypes.fetch;
+ clearTimeout: typeof timers.clearTimeout;
+ clearInterval: typeof timers.clearInterval;
+ console: consoleTypes.Console;
+ setTimeout: typeof timers.setTimeout;
+ setInterval: typeof timers.setInterval;
+ location: domTypes.Location;
+ onload: Function | undefined;
+ onunload: Function | undefined;
+ crypto: Crypto;
+ Blob: typeof blob.DenoBlob;
+ File: domTypes.DomFileConstructor;
+ CustomEvent: typeof customEvent.CustomEvent;
+ Event: typeof event.Event;
+ EventTarget: typeof eventTarget.EventTarget;
+ URL: typeof url.URL;
+ URLSearchParams: typeof urlSearchParams.URLSearchParams;
+ Headers: domTypes.HeadersConstructor;
+ FormData: domTypes.FormDataConstructor;
+ TextEncoder: typeof textEncoding.TextEncoder;
+ TextDecoder: typeof textEncoding.TextDecoder;
+ Request: domTypes.RequestConstructor;
+ Response: typeof fetchTypes.Response;
+ performance: performanceUtil.Performance;
+ onmessage: (e: { data: any }) => void;
+ workerMain: typeof workers.workerMain;
+ workerClose: typeof workers.workerClose;
+ postMessage: typeof workers.postMessage;
+ Worker: typeof workers.WorkerImpl;
+ addEventListener: (
+ type: string,
+ callback: (event: domTypes.Event) => void | null,
+ options?: boolean | domTypes.AddEventListenerOptions | undefined
+ ) => void;
+ dispatchEvent: (event: domTypes.Event) => boolean;
+ removeEventListener: (
+ type: string,
+ callback: (event: domTypes.Event) => void | null,
+ options?: boolean | domTypes.EventListenerOptions | undefined
+ ) => void;
+ queueMicrotask: (task: () => void) => void;
+ Deno: typeof Deno;
+}
+
+declare const window: Window & typeof globalThis;
+declare const atob: typeof textEncoding.atob;
+declare const btoa: typeof textEncoding.btoa;
+declare const fetch: typeof fetchTypes.fetch;
+declare const clearTimeout: typeof timers.clearTimeout;
+declare const clearInterval: typeof timers.clearInterval;
+declare const console: consoleTypes.Console;
+declare const setTimeout: typeof timers.setTimeout;
+declare const setInterval: typeof timers.setInterval;
+declare const location: domTypes.Location;
+declare const onload: Function | undefined;
+declare const onunload: Function | undefined;
+declare const crypto: Crypto;
+declare const Blob: typeof blob.DenoBlob;
+declare const File: domTypes.DomFileConstructor;
+declare const CustomEventInit: typeof customEvent.CustomEventInit;
+declare const CustomEvent: typeof customEvent.CustomEvent;
+declare const EventInit: typeof event.EventInit;
+declare const Event: typeof event.Event;
+declare const EventListener: typeof eventTarget.EventListener;
+declare const EventTarget: typeof eventTarget.EventTarget;
+declare const URL: typeof url.URL;
+declare const URLSearchParams: typeof urlSearchParams.URLSearchParams;
+declare const Headers: domTypes.HeadersConstructor;
+declare const FormData: domTypes.FormDataConstructor;
+declare const TextEncoder: typeof textEncoding.TextEncoder;
+declare const TextDecoder: typeof textEncoding.TextDecoder;
+declare const Request: domTypes.RequestConstructor;
+declare const Response: typeof fetchTypes.Response;
+declare const performance: performanceUtil.Performance;
+declare let onmessage: (e: { data: any }) => void;
+declare const workerMain: typeof workers.workerMain;
+declare const workerClose: typeof workers.workerClose;
+declare const postMessage: typeof workers.postMessage;
+declare const Worker: typeof workers.WorkerImpl;
+declare const addEventListener: (
+ type: string,
+ callback: (event: domTypes.Event) => void | null,
+ options?: boolean | domTypes.AddEventListenerOptions | undefined
+) => void;
+declare const dispatchEvent: (event: domTypes.Event) => boolean;
+declare const removeEventListener: (
+ type: string,
+ callback: (event: domTypes.Event) => void | null,
+ options?: boolean | domTypes.EventListenerOptions | undefined
+) => void;
+
+declare type Blob = domTypes.Blob;
+declare type Body = domTypes.Body;
+declare type File = domTypes.DomFile;
+declare type CustomEventInit = domTypes.CustomEventInit;
+declare type CustomEvent = domTypes.CustomEvent;
+declare type EventInit = domTypes.EventInit;
+declare type Event = domTypes.Event;
+declare type EventListener = domTypes.EventListener;
+declare type EventTarget = domTypes.EventTarget;
+declare type URL = url.URL;
+declare type URLSearchParams = domTypes.URLSearchParams;
+declare type Headers = domTypes.Headers;
+declare type FormData = domTypes.FormData;
+declare type TextEncoder = textEncoding.TextEncoder;
+declare type TextDecoder = textEncoding.TextDecoder;
+declare type Request = domTypes.Request;
+declare type Response = domTypes.Response;
+declare type Worker = workers.Worker;
+
+declare interface ImportMeta {
+ url: string;
+ main: boolean;
+}
+
+declare interface Crypto {
+ readonly subtle: null;
+ getRandomValues: <
+ T extends
+ | Int8Array
+ | Uint8Array
+ | Uint8ClampedArray
+ | Int16Array
+ | Uint16Array
+ | Int32Array
+ | Uint32Array
+ >(
+ typedArray: T
+ ) => T;
+}
+
+declare namespace domTypes {
+ // @url js/dom_types.d.ts
+
+ export type BufferSource = ArrayBufferView | ArrayBuffer;
+ export type HeadersInit =
+ | Headers
+ | Array<[string, string]>
+ | Record<string, string>;
+ export type URLSearchParamsInit =
+ | string
+ | string[][]
+ | Record<string, string>;
+ type BodyInit =
+ | Blob
+ | BufferSource
+ | FormData
+ | URLSearchParams
+ | ReadableStream
+ | string;
+ export type RequestInfo = Request | string;
+ type ReferrerPolicy =
+ | ""
+ | "no-referrer"
+ | "no-referrer-when-downgrade"
+ | "origin-only"
+ | "origin-when-cross-origin"
+ | "unsafe-url";
+ export type BlobPart = BufferSource | Blob | string;
+ export type FormDataEntryValue = DomFile | string;
+ export interface DomIterable<K, V> {
+ keys(): IterableIterator<K>;
+ values(): IterableIterator<V>;
+ entries(): IterableIterator<[K, V]>;
+ [Symbol.iterator](): IterableIterator<[K, V]>;
+ forEach(
+ callback: (value: V, key: K, parent: this) => void,
+ thisArg?: any
+ ): void;
+ }
+ type EndingType = "transparent" | "native";
+ export interface BlobPropertyBag {
+ type?: string;
+ ending?: EndingType;
+ }
+ interface AbortSignalEventMap {
+ abort: ProgressEvent;
+ }
+ export enum NodeType {
+ ELEMENT_NODE = 1,
+ TEXT_NODE = 3,
+ DOCUMENT_FRAGMENT_NODE = 11
+ }
+ export const eventTargetHost: unique symbol;
+ export const eventTargetListeners: unique symbol;
+ export const eventTargetMode: unique symbol;
+ export const eventTargetNodeType: unique symbol;
+ export interface EventTarget {
+ [eventTargetHost]: EventTarget | null;
+ [eventTargetListeners]: { [type in string]: EventListener[] };
+ [eventTargetMode]: string;
+ [eventTargetNodeType]: NodeType;
+ addEventListener(
+ type: string,
+ callback: (event: Event) => void | null,
+ options?: boolean | AddEventListenerOptions
+ ): void;
+ dispatchEvent(event: Event): boolean;
+ removeEventListener(
+ type: string,
+ callback?: (event: Event) => void | null,
+ options?: EventListenerOptions | boolean
+ ): void;
+ }
+ export interface ProgressEventInit extends EventInit {
+ lengthComputable?: boolean;
+ loaded?: number;
+ total?: number;
+ }
+ export interface URLSearchParams extends DomIterable<string, string> {
+ /**
+ * Appends a specified key/value pair as a new search parameter.
+ */
+ append(name: string, value: string): void;
+ /**
+ * Deletes the given search parameter, and its associated value,
+ * from the list of all search parameters.
+ */
+ delete(name: string): void;
+ /**
+ * Returns the first value associated to the given search parameter.
+ */
+ get(name: string): string | null;
+ /**
+ * Returns all the values association with a given search parameter.
+ */
+ getAll(name: string): string[];
+ /**
+ * Returns a Boolean indicating if such a search parameter exists.
+ */
+ has(name: string): boolean;
+ /**
+ * Sets the value associated to a given search parameter to the given value.
+ * If there were several values, delete the others.
+ */
+ set(name: string, value: string): void;
+ /**
+ * Sort all key/value pairs contained in this object in place
+ * and return undefined. The sort order is according to Unicode
+ * code points of the keys.
+ */
+ sort(): void;
+ /**
+ * Returns a query string suitable for use in a URL.
+ */
+ toString(): string;
+ /**
+ * Iterates over each name-value pair in the query
+ * and invokes the given function.
+ */
+ forEach(
+ callbackfn: (value: string, key: string, parent: this) => void,
+ thisArg?: any
+ ): void;
+ }
+ export interface EventListener {
+ handleEvent(event: Event): void;
+ readonly callback: (event: Event) => void | null;
+ readonly options: boolean | AddEventListenerOptions;
+ }
+ export interface EventInit {
+ bubbles?: boolean;
+ cancelable?: boolean;
+ composed?: boolean;
+ }
+ export interface CustomEventInit extends EventInit {
+ detail?: any;
+ }
+ export enum EventPhase {
+ NONE = 0,
+ CAPTURING_PHASE = 1,
+ AT_TARGET = 2,
+ BUBBLING_PHASE = 3
+ }
+ export interface EventPath {
+ item: EventTarget;
+ itemInShadowTree: boolean;
+ relatedTarget: EventTarget | null;
+ rootOfClosedTree: boolean;
+ slotInClosedTree: boolean;
+ target: EventTarget | null;
+ touchTargetList: EventTarget[];
+ }
+ export interface Event {
+ readonly type: string;
+ target: EventTarget | null;
+ currentTarget: EventTarget | null;
+ composedPath(): EventPath[];
+ eventPhase: number;
+ stopPropagation(): void;
+ stopImmediatePropagation(): void;
+ readonly bubbles: boolean;
+ readonly cancelable: boolean;
+ preventDefault(): void;
+ readonly defaultPrevented: boolean;
+ readonly composed: boolean;
+ isTrusted: boolean;
+ readonly timeStamp: Date;
+ dispatched: boolean;
+ readonly initialized: boolean;
+ inPassiveListener: boolean;
+ cancelBubble: boolean;
+ cancelBubbleImmediately: boolean;
+ path: EventPath[];
+ relatedTarget: EventTarget | null;
+ }
+ export interface CustomEvent extends Event {
+ readonly detail: any;
+ initCustomEvent(
+ type: string,
+ bubbles?: boolean,
+ cancelable?: boolean,
+ detail?: any | null
+ ): void;
+ }
+ export interface DomFile extends Blob {
+ readonly lastModified: number;
+ readonly name: string;
+ }
+ export interface DomFileConstructor {
+ new (
+ bits: BlobPart[],
+ filename: string,
+ options?: FilePropertyBag
+ ): DomFile;
+ prototype: DomFile;
+ }
+ export interface FilePropertyBag extends BlobPropertyBag {
+ lastModified?: number;
+ }
+ interface ProgressEvent extends Event {
+ readonly lengthComputable: boolean;
+ readonly loaded: number;
+ readonly total: number;
+ }
+ export interface EventListenerOptions {
+ capture: boolean;
+ }
+ export interface AddEventListenerOptions extends EventListenerOptions {
+ once: boolean;
+ passive: boolean;
+ }
+ interface AbortSignal extends EventTarget {
+ readonly aborted: boolean;
+ onabort: ((this: AbortSignal, ev: ProgressEvent) => any) | null;
+ addEventListener<K extends keyof AbortSignalEventMap>(
+ type: K,
+ listener: (this: AbortSignal, ev: AbortSignalEventMap[K]) => any,
+ options?: boolean | AddEventListenerOptions
+ ): void;
+ addEventListener(
+ type: string,
+ listener: EventListener,
+ options?: boolean | AddEventListenerOptions
+ ): void;
+ removeEventListener<K extends keyof AbortSignalEventMap>(
+ type: K,
+ listener: (this: AbortSignal, ev: AbortSignalEventMap[K]) => any,
+ options?: boolean | EventListenerOptions
+ ): void;
+ removeEventListener(
+ type: string,
+ listener: EventListener,
+ options?: boolean | EventListenerOptions
+ ): void;
+ }
+ export interface ReadableStream {
+ readonly locked: boolean;
+ cancel(): Promise<void>;
+ getReader(): ReadableStreamReader;
+ tee(): [ReadableStream, ReadableStream];
+ }
+ export interface ReadableStreamReader {
+ cancel(): Promise<void>;
+ read(): Promise<any>;
+ releaseLock(): void;
+ }
+ export interface FormData extends DomIterable<string, FormDataEntryValue> {
+ append(name: string, value: string | Blob, fileName?: string): void;
+ delete(name: string): void;
+ get(name: string): FormDataEntryValue | null;
+ getAll(name: string): FormDataEntryValue[];
+ has(name: string): boolean;
+ set(name: string, value: string | Blob, fileName?: string): void;
+ }
+ export interface FormDataConstructor {
+ new (): FormData;
+ prototype: FormData;
+ }
+ /** A blob object represents a file-like object of immutable, raw data. */
+ export interface Blob {
+ /** The size, in bytes, of the data contained in the `Blob` object. */
+ readonly size: number;
+ /** A string indicating the media type of the data contained in the `Blob`.
+ * If the type is unknown, this string is empty.
+ */
+ readonly type: string;
+ /** Returns a new `Blob` object containing the data in the specified range of
+ * bytes of the source `Blob`.
+ */
+ slice(start?: number, end?: number, contentType?: string): Blob;
+ }
+ export interface Body {
+ /** A simple getter used to expose a `ReadableStream` of the body contents. */
+ readonly body: ReadableStream | null;
+ /** Stores a `Boolean` that declares whether the body has been used in a
+ * response yet.
+ */
+ readonly bodyUsed: boolean;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with an `ArrayBuffer`.
+ */
+ arrayBuffer(): Promise<ArrayBuffer>;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with a `Blob`.
+ */
+ blob(): Promise<Blob>;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with a `FormData` object.
+ */
+ formData(): Promise<FormData>;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with the result of parsing the body text as JSON.
+ */
+ json(): Promise<any>;
+ /** Takes a `Response` stream and reads it to completion. It returns a promise
+ * that resolves with a `USVString` (text).
+ */
+ text(): Promise<string>;
+ }
+ export interface Headers extends DomIterable<string, string> {
+ /** Appends a new value onto an existing header inside a `Headers` object, or
+ * adds the header if it does not already exist.
+ */
+ append(name: string, value: string): void;
+ /** Deletes a header from a `Headers` object. */
+ delete(name: string): void;
+ /** Returns an iterator allowing to go through all key/value pairs
+ * contained in this Headers object. The both the key and value of each pairs
+ * are ByteString objects.
+ */
+ entries(): IterableIterator<[string, string]>;
+ /** Returns a `ByteString` sequence of all the values of a header within a
+ * `Headers` object with a given name.
+ */
+ get(name: string): string | null;
+ /** Returns a boolean stating whether a `Headers` object contains a certain
+ * header.
+ */
+ has(name: string): boolean;
+ /** Returns an iterator allowing to go through all keys contained in
+ * this Headers object. The keys are ByteString objects.
+ */
+ keys(): IterableIterator<string>;
+ /** Sets a new value for an existing header inside a Headers object, or adds
+ * the header if it does not already exist.
+ */
+ set(name: string, value: string): void;
+ /** Returns an iterator allowing to go through all values contained in
+ * this Headers object. The values are ByteString objects.
+ */
+ values(): IterableIterator<string>;
+ forEach(
+ callbackfn: (value: string, key: string, parent: this) => void,
+ thisArg?: any
+ ): void;
+ /** The Symbol.iterator well-known symbol specifies the default
+ * iterator for this Headers object
+ */
+ [Symbol.iterator](): IterableIterator<[string, string]>;
+ }
+ export interface HeadersConstructor {
+ new (init?: HeadersInit): Headers;
+ prototype: Headers;
+ }
+ type RequestCache =
+ | "default"
+ | "no-store"
+ | "reload"
+ | "no-cache"
+ | "force-cache"
+ | "only-if-cached";
+ type RequestCredentials = "omit" | "same-origin" | "include";
+ type RequestDestination =
+ | ""
+ | "audio"
+ | "audioworklet"
+ | "document"
+ | "embed"
+ | "font"
+ | "image"
+ | "manifest"
+ | "object"
+ | "paintworklet"
+ | "report"
+ | "script"
+ | "sharedworker"
+ | "style"
+ | "track"
+ | "video"
+ | "worker"
+ | "xslt";
+ type RequestMode = "navigate" | "same-origin" | "no-cors" | "cors";
+ type RequestRedirect = "follow" | "error" | "manual";
+ type ResponseType =
+ | "basic"
+ | "cors"
+ | "default"
+ | "error"
+ | "opaque"
+ | "opaqueredirect";
+ export interface RequestInit {
+ body?: BodyInit | null;
+ cache?: RequestCache;
+ credentials?: RequestCredentials;
+ headers?: HeadersInit;
+ integrity?: string;
+ keepalive?: boolean;
+ method?: string;
+ mode?: RequestMode;
+ redirect?: RequestRedirect;
+ referrer?: string;
+ referrerPolicy?: ReferrerPolicy;
+ signal?: AbortSignal | null;
+ window?: any;
+ }
+ export interface ResponseInit {
+ headers?: HeadersInit;
+ status?: number;
+ statusText?: string;
+ }
+ export interface RequestConstructor {
+ new (input: RequestInfo, init?: RequestInit): Request;
+ prototype: Request;
+ }
+ export interface Request extends Body {
+ /** Returns the cache mode associated with request, which is a string
+ * indicating how the the request will interact with the browser's cache when
+ * fetching.
+ */
+ readonly cache?: RequestCache;
+ /** Returns the credentials mode associated with request, which is a string
+ * indicating whether credentials will be sent with the request always, never,
+ * or only when sent to a same-origin URL.
+ */
+ readonly credentials?: RequestCredentials;
+ /** Returns the kind of resource requested by request, (e.g., `document` or
+ * `script`).
+ */
+ readonly destination?: RequestDestination;
+ /** Returns a Headers object consisting of the headers associated with
+ * request.
+ *
+ * Note that headers added in the network layer by the user agent
+ * will not be accounted for in this object, (e.g., the `Host` header).
+ */
+ readonly headers: Headers;
+ /** Returns request's subresource integrity metadata, which is a cryptographic
+ * hash of the resource being fetched. Its value consists of multiple hashes
+ * separated by whitespace. [SRI]
+ */
+ readonly integrity?: string;
+ /** Returns a boolean indicating whether or not request is for a history
+ * navigation (a.k.a. back-forward navigation).
+ */
+ readonly isHistoryNavigation?: boolean;
+ /** Returns a boolean indicating whether or not request is for a reload
+ * navigation.
+ */
+ readonly isReloadNavigation?: boolean;
+ /** Returns a boolean indicating whether or not request can outlive the global
+ * in which it was created.
+ */
+ readonly keepalive?: boolean;
+ /** Returns request's HTTP method, which is `GET` by default. */
+ readonly method: string;
+ /** Returns the mode associated with request, which is a string indicating
+ * whether the request will use CORS, or will be restricted to same-origin
+ * URLs.
+ */
+ readonly mode?: RequestMode;
+ /** Returns the redirect mode associated with request, which is a string
+ * indicating how redirects for the request will be handled during fetching.
+ *
+ * A request will follow redirects by default.
+ */
+ readonly redirect?: RequestRedirect;
+ /** Returns the referrer of request. Its value can be a same-origin URL if
+ * explicitly set in init, the empty string to indicate no referrer, and
+ * `about:client` when defaulting to the global's default.
+ *
+ * This is used during fetching to determine the value of the `Referer`
+ * header of the request being made.
+ */
+ readonly referrer?: string;
+ /** Returns the referrer policy associated with request. This is used during
+ * fetching to compute the value of the request's referrer.
+ */
+ readonly referrerPolicy?: ReferrerPolicy;
+ /** Returns the signal associated with request, which is an AbortSignal object
+ * indicating whether or not request has been aborted, and its abort event
+ * handler.
+ */
+ readonly signal?: AbortSignal;
+ /** Returns the URL of request as a string. */
+ readonly url: string;
+ clone(): Request;
+ }
+ export interface Response extends Body {
+ /** Contains the `Headers` object associated with the response. */
+ readonly headers: Headers;
+ /** Contains a boolean stating whether the response was successful (status in
+ * the range 200-299) or not.
+ */
+ readonly ok: boolean;
+ /** Indicates whether or not the response is the result of a redirect; that
+ * is, its URL list has more than one entry.
+ */
+ readonly redirected: boolean;
+ /** Contains the status code of the response (e.g., `200` for a success). */
+ readonly status: number;
+ /** Contains the status message corresponding to the status code (e.g., `OK`
+ * for `200`).
+ */
+ readonly statusText: string;
+ readonly trailer: Promise<Headers>;
+ /** Contains the type of the response (e.g., `basic`, `cors`). */
+ readonly type: ResponseType;
+ /** Contains the URL of the response. */
+ readonly url: string;
+ /** Creates a clone of a `Response` object. */
+ clone(): Response;
+ }
+ export interface Location {
+ /**
+ * Returns a DOMStringList object listing the origins of the ancestor browsing
+ * contexts, from the parent browsing context to the top-level browsing
+ * context.
+ */
+ readonly ancestorOrigins: string[];
+ /**
+ * Returns the Location object's URL's fragment (includes leading "#" if
+ * non-empty).
+ * Can be set, to navigate to the same URL with a changed fragment (ignores
+ * leading "#").
+ */
+ hash: string;
+ /**
+ * Returns the Location object's URL's host and port (if different from the
+ * default port for the scheme). Can be set, to navigate to the same URL with
+ * a changed host and port.
+ */
+ host: string;
+ /**
+ * Returns the Location object's URL's host. Can be set, to navigate to the
+ * same URL with a changed host.
+ */
+ hostname: string;
+ /**
+ * Returns the Location object's URL. Can be set, to navigate to the given
+ * URL.
+ */
+ href: string;
+ /** Returns the Location object's URL's origin. */
+ readonly origin: string;
+ /**
+ * Returns the Location object's URL's path.
+ * Can be set, to navigate to the same URL with a changed path.
+ */
+ pathname: string;
+ /**
+ * Returns the Location object's URL's port.
+ * Can be set, to navigate to the same URL with a changed port.
+ */
+ port: string;
+ /**
+ * Returns the Location object's URL's scheme.
+ * Can be set, to navigate to the same URL with a changed scheme.
+ */
+ protocol: string;
+ /**
+ * Returns the Location object's URL's query (includes leading "?" if
+ * non-empty). Can be set, to navigate to the same URL with a changed query
+ * (ignores leading "?").
+ */
+ search: string;
+ /**
+ * Navigates to the given URL.
+ */
+ assign(url: string): void;
+ /**
+ * Reloads the current page.
+ */
+ reload(): void;
+ /** @deprecated */
+ reload(forcedReload: boolean): void;
+ /**
+ * Removes the current page from the session history and navigates to the
+ * given URL.
+ */
+ replace(url: string): void;
+ }
+}
+
+declare namespace blob {
+ // @url js/blob.d.ts
+
+ export const bytesSymbol: unique symbol;
+ export const blobBytesWeakMap: WeakMap<domTypes.Blob, Uint8Array>;
+ export class DenoBlob implements domTypes.Blob {
+ private readonly [bytesSymbol];
+ readonly size: number;
+ readonly type: string;
+ /** A blob object represents a file-like object of immutable, raw data. */
+ constructor(
+ blobParts?: domTypes.BlobPart[],
+ options?: domTypes.BlobPropertyBag
+ );
+ slice(start?: number, end?: number, contentType?: string): DenoBlob;
+ }
+}
+
+declare namespace consoleTypes {
+ // @url js/console.d.ts
+
+ type ConsoleOptions = Partial<{
+ showHidden: boolean;
+ depth: number;
+ colors: boolean;
+ indentLevel: number;
+ }>;
+ export class CSI {
+ static kClear: string;
+ static kClearScreenDown: string;
+ }
+ const isConsoleInstance: unique symbol;
+ export class Console {
+ private printFunc;
+ indentLevel: number;
+ [isConsoleInstance]: boolean;
+ /** Writes the arguments to stdout */
+ log: (...args: unknown[]) => void;
+ /** Writes the arguments to stdout */
+ debug: (...args: unknown[]) => void;
+ /** Writes the arguments to stdout */
+ info: (...args: unknown[]) => void;
+ /** Writes the properties of the supplied `obj` to stdout */
+ dir: (
+ obj: unknown,
+ options?: Partial<{
+ showHidden: boolean;
+ depth: number;
+ colors: boolean;
+ indentLevel: number;
+ }>
+ ) => void;
+
+ /** From MDN:
+ * Displays an interactive tree of the descendant elements of
+ * the specified XML/HTML element. If it is not possible to display
+ * as an element the JavaScript Object view is shown instead.
+ * The output is presented as a hierarchical listing of expandable
+ * nodes that let you see the contents of child nodes.
+ *
+ * Since we write to stdout, we can't display anything interactive
+ * we just fall back to `console.dir`.
+ */
+ dirxml: (
+ obj: unknown,
+ options?: Partial<{
+ showHidden: boolean;
+ depth: number;
+ colors: boolean;
+ indentLevel: number;
+ }>
+ ) => void;
+
+ /** Writes the arguments to stdout */
+ warn: (...args: unknown[]) => void;
+ /** Writes the arguments to stdout */
+ error: (...args: unknown[]) => void;
+ /** Writes an error message to stdout if the assertion is `false`. If the
+ * assertion is `true`, nothing happens.
+ *
+ * ref: https://console.spec.whatwg.org/#assert
+ */
+ assert: (condition?: boolean, ...args: unknown[]) => void;
+ count: (label?: string) => void;
+ countReset: (label?: string) => void;
+ table: (data: unknown, properties?: string[] | undefined) => void;
+ time: (label?: string) => void;
+ timeLog: (label?: string, ...args: unknown[]) => void;
+ timeEnd: (label?: string) => void;
+ group: (...label: unknown[]) => void;
+ groupCollapsed: (...label: unknown[]) => void;
+ groupEnd: () => void;
+ clear: () => void;
+ trace: (...args: unknown[]) => void;
+ static [Symbol.hasInstance](instance: Console): boolean;
+ }
+ /** A symbol which can be used as a key for a custom method which will be called
+ * when `Deno.inspect()` is called, or when the object is logged to the console.
+ */
+ export const customInspect: unique symbol;
+ /**
+ * `inspect()` converts input into string that has the same format
+ * as printed by `console.log(...)`;
+ */
+ export function inspect(value: unknown, options?: ConsoleOptions): string;
+}
+
+declare namespace event {
+ // @url js/event.d.ts
+
+ export const eventAttributes: WeakMap<object, any>;
+ export class EventInit implements domTypes.EventInit {
+ bubbles: boolean;
+ cancelable: boolean;
+ composed: boolean;
+ constructor({
+ bubbles,
+ cancelable,
+ composed
+ }?: {
+ bubbles?: boolean | undefined;
+ cancelable?: boolean | undefined;
+ composed?: boolean | undefined;
+ });
+ }
+ export class Event implements domTypes.Event {
+ isTrusted: boolean;
+ private _canceledFlag;
+ private _dispatchedFlag;
+ private _initializedFlag;
+ private _inPassiveListenerFlag;
+ private _stopImmediatePropagationFlag;
+ private _stopPropagationFlag;
+ private _path;
+ constructor(type: string, eventInitDict?: domTypes.EventInit);
+ readonly bubbles: boolean;
+ cancelBubble: boolean;
+ cancelBubbleImmediately: boolean;
+ readonly cancelable: boolean;
+ readonly composed: boolean;
+ currentTarget: domTypes.EventTarget;
+ readonly defaultPrevented: boolean;
+ dispatched: boolean;
+ eventPhase: number;
+ readonly initialized: boolean;
+ inPassiveListener: boolean;
+ path: domTypes.EventPath[];
+ relatedTarget: domTypes.EventTarget;
+ target: domTypes.EventTarget;
+ readonly timeStamp: Date;
+ readonly type: string;
+ /** Returns the event’s path (objects on which listeners will be
+ * invoked). This does not include nodes in shadow trees if the
+ * shadow root was created with its ShadowRoot.mode closed.
+ *
+ * event.composedPath();
+ */
+ composedPath(): domTypes.EventPath[];
+ /** Cancels the event (if it is cancelable).
+ * See https://dom.spec.whatwg.org/#set-the-canceled-flag
+ *
+ * event.preventDefault();
+ */
+ preventDefault(): void;
+ /** Stops the propagation of events further along in the DOM.
+ *
+ * event.stopPropagation();
+ */
+ stopPropagation(): void;
+ /** For this particular event, no other listener will be called.
+ * Neither those attached on the same element, nor those attached
+ * on elements which will be traversed later (in capture phase,
+ * for instance).
+ *
+ * event.stopImmediatePropagation();
+ */
+ stopImmediatePropagation(): void;
+ }
+}
+
+declare namespace customEvent {
+ // @url js/custom_event.d.ts
+
+ export const customEventAttributes: WeakMap<object, any>;
+ export class CustomEventInit extends event.EventInit
+ implements domTypes.CustomEventInit {
+ detail: any;
+ constructor({
+ bubbles,
+ cancelable,
+ composed,
+ detail
+ }: domTypes.CustomEventInit);
+ }
+ export class CustomEvent extends event.Event implements domTypes.CustomEvent {
+ constructor(type: string, customEventInitDict?: domTypes.CustomEventInit);
+ readonly detail: any;
+ initCustomEvent(
+ type: string,
+ bubbles?: boolean,
+ cancelable?: boolean,
+ detail?: any
+ ): void;
+ readonly [Symbol.toStringTag]: string;
+ }
+}
+
+declare namespace eventTarget {
+ // @url js/event_target.d.ts
+
+ export class EventListenerOptions implements domTypes.EventListenerOptions {
+ _capture: boolean;
+ constructor({ capture }?: { capture?: boolean | undefined });
+ readonly capture: boolean;
+ }
+ export class AddEventListenerOptions extends EventListenerOptions
+ implements domTypes.AddEventListenerOptions {
+ _passive: boolean;
+ _once: boolean;
+ constructor({
+ capture,
+ passive,
+ once
+ }?: {
+ capture?: boolean | undefined;
+ passive?: boolean | undefined;
+ once?: boolean | undefined;
+ });
+ readonly passive: boolean;
+ readonly once: boolean;
+ }
+ export class EventListener implements domTypes.EventListener {
+ allEvents: domTypes.Event[];
+ atEvents: domTypes.Event[];
+ bubbledEvents: domTypes.Event[];
+ capturedEvents: domTypes.Event[];
+ private _callback;
+ private _options;
+ constructor(
+ callback: (event: domTypes.Event) => void | null,
+ options: boolean | domTypes.AddEventListenerOptions
+ );
+ handleEvent(event: domTypes.Event): void;
+ readonly callback: (event: domTypes.Event) => void | null;
+ readonly options: domTypes.AddEventListenerOptions | boolean;
+ }
+ export const eventTargetAssignedSlot: unique symbol;
+ export const eventTargetHasActivationBehavior: unique symbol;
+ export class EventTarget implements domTypes.EventTarget {
+ [domTypes.eventTargetHost]: domTypes.EventTarget | null;
+ [domTypes.eventTargetListeners]: {
+ [type in string]: domTypes.EventListener[]
+ };
+ [domTypes.eventTargetMode]: string;
+ [domTypes.eventTargetNodeType]: domTypes.NodeType;
+ private [eventTargetAssignedSlot];
+ private [eventTargetHasActivationBehavior];
+ addEventListener(
+ type: string,
+ callback: (event: domTypes.Event) => void | null,
+ options?: domTypes.AddEventListenerOptions | boolean
+ ): void;
+ removeEventListener(
+ type: string,
+ callback: (event: domTypes.Event) => void | null,
+ options?: domTypes.EventListenerOptions | boolean
+ ): void;
+ dispatchEvent(event: domTypes.Event): boolean;
+ readonly [Symbol.toStringTag]: string;
+ }
+}
+
+declare namespace io {
+ // @url js/io.d.ts
+
+ export const EOF: null;
+ export type EOF = null;
+ export enum SeekMode {
+ SEEK_START = 0,
+ SEEK_CURRENT = 1,
+ SEEK_END = 2
+ }
+ export interface Reader {
+ /** Reads up to p.byteLength bytes into `p`. It resolves to the number
+ * of bytes read (`0` < `n` <= `p.byteLength`) and rejects if any error encountered.
+ * Even if `read()` returns `n` < `p.byteLength`, it may use all of `p` as
+ * scratch space during the call. If some data is available but not
+ * `p.byteLength` bytes, `read()` conventionally returns what is available
+ * instead of waiting for more.
+ *
+ * When `read()` encounters end-of-file condition, it returns EOF symbol.
+ *
+ * When `read()` encounters an error, it rejects with an error.
+ *
+ * Callers should always process the `n` > `0` bytes returned before
+ * considering the EOF. Doing so correctly handles I/O errors that happen
+ * after reading some bytes and also both of the allowed EOF behaviors.
+ *
+ * Implementations must not retain `p`.
+ */
+ read(p: Uint8Array): Promise<number | EOF>;
+ }
+ export interface SyncReader {
+ readSync(p: Uint8Array): number | EOF;
+ }
+ export interface Writer {
+ /** Writes `p.byteLength` bytes from `p` to the underlying data
+ * stream. It resolves to the number of bytes written from `p` (`0` <= `n` <=
+ * `p.byteLength`) and any error encountered that caused the write to stop
+ * early. `write()` must return a non-null error if it returns `n` <
+ * `p.byteLength`. write() must not modify the slice data, even temporarily.
+ *
+ * Implementations must not retain `p`.
+ */
+ write(p: Uint8Array): Promise<number>;
+ }
+ export interface SyncWriter {
+ writeSync(p: Uint8Array): number;
+ }
+ export interface Closer {
+ close(): void;
+ }
+ export interface Seeker {
+ /** Seek sets the offset for the next `read()` or `write()` to offset,
+ * interpreted according to `whence`: `SeekStart` means relative to the start
+ * of the file, `SeekCurrent` means relative to the current offset, and
+ * `SeekEnd` means relative to the end. Seek returns the new offset relative
+ * to the start of the file and an error, if any.
+ *
+ * Seeking to an offset before the start of the file is an error. Seeking to
+ * any positive offset is legal, but the behavior of subsequent I/O operations
+ * on the underlying object is implementation-dependent.
+ */
+ seek(offset: number, whence: SeekMode): Promise<void>;
+ }
+ export interface SyncSeeker {
+ seekSync(offset: number, whence: SeekMode): void;
+ }
+ export interface ReadCloser extends Reader, Closer {}
+ export interface WriteCloser extends Writer, Closer {}
+ export interface ReadSeeker extends Reader, Seeker {}
+ export interface WriteSeeker extends Writer, Seeker {}
+ export interface ReadWriteCloser extends Reader, Writer, Closer {}
+ export interface ReadWriteSeeker extends Reader, Writer, Seeker {}
+ /** Copies from `src` to `dst` until either `EOF` is reached on `src`
+ * or an error occurs. It returns the number of bytes copied and the first
+ * error encountered while copying, if any.
+ *
+ * Because `copy()` is defined to read from `src` until `EOF`, it does not
+ * treat an `EOF` from `read()` as an error to be reported.
+ */
+ export function copy(dst: Writer, src: Reader): Promise<number>;
+ /** Turns `r` into async iterator.
+ *
+ * for await (const chunk of toAsyncIterator(reader)) {
+ * console.log(chunk)
+ * }
+ */
+ export function toAsyncIterator(r: Reader): AsyncIterableIterator<Uint8Array>;
+}
+
+declare namespace fetchTypes {
+ // @url js/fetch.d.ts
+
+ class Body implements domTypes.Body, domTypes.ReadableStream, io.ReadCloser {
+ private rid;
+ readonly contentType: string;
+ bodyUsed: boolean;
+ private _bodyPromise;
+ private _data;
+ readonly locked: boolean;
+ readonly body: null | Body;
+ constructor(rid: number, contentType: string);
+ private _bodyBuffer;
+ arrayBuffer(): Promise<ArrayBuffer>;
+ blob(): Promise<domTypes.Blob>;
+ formData(): Promise<domTypes.FormData>;
+ json(): Promise<any>;
+ text(): Promise<string>;
+ read(p: Uint8Array): Promise<number | io.EOF>;
+ close(): void;
+ cancel(): Promise<void>;
+ getReader(): domTypes.ReadableStreamReader;
+ tee(): [domTypes.ReadableStream, domTypes.ReadableStream];
+ [Symbol.asyncIterator](): AsyncIterableIterator<Uint8Array>;
+ }
+ export class Response implements domTypes.Response {
+ readonly url: string;
+ readonly status: number;
+ statusText: string;
+ readonly type = "basic";
+ readonly redirected: boolean;
+ headers: domTypes.Headers;
+ readonly trailer: Promise<domTypes.Headers>;
+ bodyUsed: boolean;
+ readonly body: Body;
+ constructor(
+ url: string,
+ status: number,
+ headersList: Array<[string, string]>,
+ rid: number,
+ redirected_: boolean,
+ body_?: null | Body
+ );
+ arrayBuffer(): Promise<ArrayBuffer>;
+ blob(): Promise<domTypes.Blob>;
+ formData(): Promise<domTypes.FormData>;
+ json(): Promise<any>;
+ text(): Promise<string>;
+ readonly ok: boolean;
+ clone(): domTypes.Response;
+ }
+ /** Fetch a resource from the network. */
+ export function fetch(
+ input: domTypes.Request | string,
+ init?: domTypes.RequestInit
+ ): Promise<Response>;
+}
+
+declare namespace textEncoding {
+ // @url js/text_encoding.d.ts
+
+ export function atob(s: string): string;
+ /** Creates a base-64 ASCII string from the input string. */
+ export function btoa(s: string): string;
+ export interface TextDecodeOptions {
+ stream?: false;
+ }
+ export interface TextDecoderOptions {
+ fatal?: boolean;
+ ignoreBOM?: boolean;
+ }
+ export class TextDecoder {
+ private _encoding;
+ /** Returns encoding's name, lowercased. */
+ readonly encoding: string;
+ /** Returns `true` if error mode is "fatal", and `false` otherwise. */
+ readonly fatal: boolean;
+ /** Returns `true` if ignore BOM flag is set, and `false` otherwise. */
+ readonly ignoreBOM = false;
+ constructor(label?: string, options?: TextDecoderOptions);
+ /** Returns the result of running encoding's decoder. */
+ decode(input?: domTypes.BufferSource, options?: TextDecodeOptions): string;
+ readonly [Symbol.toStringTag]: string;
+ }
+ interface TextEncoderEncodeIntoResult {
+ read: number;
+ written: number;
+ }
+ export class TextEncoder {
+ /** Returns "utf-8". */
+ readonly encoding = "utf-8";
+ /** Returns the result of running UTF-8's encoder. */
+ encode(input?: string): Uint8Array;
+ encodeInto(input: string, dest: Uint8Array): TextEncoderEncodeIntoResult;
+ readonly [Symbol.toStringTag]: string;
+ }
+}
+
+declare namespace timers {
+ // @url js/timers.d.ts
+
+ export type Args = unknown[];
+ /** Sets a timer which executes a function once after the timer expires. */
+ export function setTimeout(
+ cb: (...args: Args) => void,
+ delay?: number,
+ ...args: Args
+ ): number;
+ /** Repeatedly calls a function , with a fixed time delay between each call. */
+ export function setInterval(
+ cb: (...args: Args) => void,
+ delay?: number,
+ ...args: Args
+ ): number;
+ export function clearTimeout(id?: number): void;
+ export function clearInterval(id?: number): void;
+}
+
+declare namespace urlSearchParams {
+ // @url js/url_search_params.d.ts
+
+ export class URLSearchParams {
+ private params;
+ private url;
+ constructor(init?: string | string[][] | Record<string, string>);
+ private updateSteps;
+ /** Appends a specified key/value pair as a new search parameter.
+ *
+ * searchParams.append('name', 'first');
+ * searchParams.append('name', 'second');
+ */
+ append(name: string, value: string): void;
+ /** Deletes the given search parameter and its associated value,
+ * from the list of all search parameters.
+ *
+ * searchParams.delete('name');
+ */
+ delete(name: string): void;
+ /** Returns all the values associated with a given search parameter
+ * as an array.
+ *
+ * searchParams.getAll('name');
+ */
+ getAll(name: string): string[];
+ /** Returns the first value associated to the given search parameter.
+ *
+ * searchParams.get('name');
+ */
+ get(name: string): string | null;
+ /** Returns a Boolean that indicates whether a parameter with the
+ * specified name exists.
+ *
+ * searchParams.has('name');
+ */
+ has(name: string): boolean;
+ /** Sets the value associated with a given search parameter to the
+ * given value. If there were several matching values, this method
+ * deletes the others. If the search parameter doesn't exist, this
+ * method creates it.
+ *
+ * searchParams.set('name', 'value');
+ */
+ set(name: string, value: string): void;
+ /** Sort all key/value pairs contained in this object in place and
+ * return undefined. The sort order is according to Unicode code
+ * points of the keys.
+ *
+ * searchParams.sort();
+ */
+ sort(): void;
+ /** Calls a function for each element contained in this object in
+ * place and return undefined. Optionally accepts an object to use
+ * as this when executing callback as second argument.
+ *
+ * searchParams.forEach((value, key, parent) => {
+ * console.log(value, key, parent);
+ * });
+ *
+ */
+ forEach(
+ callbackfn: (value: string, key: string, parent: this) => void,
+ thisArg?: any
+ ): void;
+ /** Returns an iterator allowing to go through all keys contained
+ * in this object.
+ *
+ * for (const key of searchParams.keys()) {
+ * console.log(key);
+ * }
+ */
+ keys(): IterableIterator<string>;
+ /** Returns an iterator allowing to go through all values contained
+ * in this object.
+ *
+ * for (const value of searchParams.values()) {
+ * console.log(value);
+ * }
+ */
+ values(): IterableIterator<string>;
+ /** Returns an iterator allowing to go through all key/value
+ * pairs contained in this object.
+ *
+ * for (const [key, value] of searchParams.entries()) {
+ * console.log(key, value);
+ * }
+ */
+ entries(): IterableIterator<[string, string]>;
+ /** Returns an iterator allowing to go through all key/value
+ * pairs contained in this object.
+ *
+ * for (const [key, value] of searchParams[Symbol.iterator]()) {
+ * console.log(key, value);
+ * }
+ */
+ [Symbol.iterator](): IterableIterator<[string, string]>;
+ /** Returns a query string suitable for use in a URL.
+ *
+ * searchParams.toString();
+ */
+ toString(): string;
+ private _handleStringInitialization;
+ private _handleArrayInitialization;
+ }
+}
+
+declare namespace url {
+ // @url js/url.d.ts
+
+ export const blobURLMap: Map<string, domTypes.Blob>;
+ export class URL {
+ private _parts;
+ private _searchParams;
+ private _updateSearchParams;
+ hash: string;
+ host: string;
+ hostname: string;
+ href: string;
+ readonly origin: string;
+ password: string;
+ pathname: string;
+ port: string;
+ protocol: string;
+ search: string;
+ username: string;
+ readonly searchParams: urlSearchParams.URLSearchParams;
+ constructor(url: string, base?: string | URL);
+ toString(): string;
+ toJSON(): string;
+ static createObjectURL(b: domTypes.Blob): string;
+ static revokeObjectURL(url: string): void;
+ }
+}
+
+declare namespace workers {
+ // @url js/workers.d.ts
+
+ export function encodeMessage(data: any): Uint8Array;
+ export function decodeMessage(dataIntArray: Uint8Array): any;
+ export let onmessage: (e: { data: any }) => void;
+ export function postMessage(data: any): void;
+ export function getMessage(): Promise<any>;
+ export let isClosing: boolean;
+ export function workerClose(): void;
+ export function workerMain(): Promise<void>;
+ export interface Worker {
+ onerror?: () => void;
+ onmessage?: (e: { data: any }) => void;
+ onmessageerror?: () => void;
+ postMessage(data: any): void;
+ closed: Promise<void>;
+ }
+ export interface WorkerOptions {}
+ /** Extended Deno Worker initialization options.
+ * `noDenoNamespace` hides global `window.Deno` namespace for
+ * spawned worker and nested workers spawned by it (default: false).
+ */
+ export interface DenoWorkerOptions extends WorkerOptions {
+ noDenoNamespace?: boolean;
+ }
+ export class WorkerImpl implements Worker {
+ private readonly rid;
+ private isClosing;
+ private readonly isClosedPromise;
+ onerror?: () => void;
+ onmessage?: (data: any) => void;
+ onmessageerror?: () => void;
+ constructor(specifier: string, options?: DenoWorkerOptions);
+ readonly closed: Promise<void>;
+ postMessage(data: any): void;
+ private run;
+ }
+}
+
+declare namespace performanceUtil {
+ // @url js/performance.d.ts
+
+ export class Performance {
+ /** Returns a current time from Deno's start in milliseconds.
+ *
+ * Use the flag --allow-hrtime return a precise value.
+ *
+ * const t = performance.now();
+ * console.log(`${t} ms since start!`);
+ */
+ now(): number;
+ }
+}
+
+// @url js/lib.web_assembly.d.ts
+
+// This follows the WebIDL at: https://webassembly.github.io/spec/js-api/
+// And follow on WebIDL at: https://webassembly.github.io/spec/web-api/
+
+/* eslint-disable @typescript-eslint/no-unused-vars, @typescript-eslint/no-explicit-any */
+
+declare namespace WebAssembly {
+ interface WebAssemblyInstantiatedSource {
+ module: Module;
+ instance: Instance;
+ }
+
+ /** Compiles a `WebAssembly.Module` from WebAssembly binary code. This
+ * function is useful if it is necessary to a compile a module before it can
+ * be instantiated (otherwise, the `WebAssembly.instantiate()` function
+ * should be used). */
+ function compile(bufferSource: domTypes.BufferSource): Promise<Module>;
+
+ /** Compiles a `WebAssembly.Module` directly from a streamed underlying
+ * source. This function is useful if it is necessary to a compile a module
+ * before it can be instantiated (otherwise, the
+ * `WebAssembly.instantiateStreaming()` function should be used). */
+ function compileStreaming(
+ source: Promise<domTypes.Response>
+ ): Promise<Module>;
+
+ /** Takes the WebAssembly binary code, in the form of a typed array or
+ * `ArrayBuffer`, and performs both compilation and instantiation in one step.
+ * The returned `Promise` resolves to both a compiled `WebAssembly.Module` and
+ * its first `WebAssembly.Instance`. */
+ function instantiate(
+ bufferSource: domTypes.BufferSource,
+ importObject?: object
+ ): Promise<WebAssemblyInstantiatedSource>;
+
+ /** Takes an already-compiled `WebAssembly.Module` and returns a `Promise`
+ * that resolves to an `Instance` of that `Module`. This overload is useful if
+ * the `Module` has already been compiled. */
+ function instantiate(
+ module: Module,
+ importObject?: object
+ ): Promise<Instance>;
+
+ /** Compiles and instantiates a WebAssembly module directly from a streamed
+ * underlying source. This is the most efficient, optimized way to load wasm
+ * code. */
+ function instantiateStreaming(
+ source: Promise<domTypes.Response>,
+ importObject?: object
+ ): Promise<WebAssemblyInstantiatedSource>;
+
+ /** Validates a given typed array of WebAssembly binary code, returning
+ * whether the bytes form a valid wasm module (`true`) or not (`false`). */
+ function validate(bufferSource: domTypes.BufferSource): boolean;
+
+ type ImportExportKind = "function" | "table" | "memory" | "global";
+
+ interface ModuleExportDescriptor {
+ name: string;
+ kind: ImportExportKind;
+ }
+ interface ModuleImportDescriptor {
+ module: string;
+ name: string;
+ kind: ImportExportKind;
+ }
+
+ class Module {
+ constructor(bufferSource: domTypes.BufferSource);
+
+ /** Given a `Module` and string, returns a copy of the contents of all
+ * custom sections in the module with the given string name. */
+ static customSections(
+ moduleObject: Module,
+ sectionName: string
+ ): ArrayBuffer;
+
+ /** Given a `Module`, returns an array containing descriptions of all the
+ * declared exports. */
+ static exports(moduleObject: Module): ModuleExportDescriptor[];
+
+ /** Given a `Module`, returns an array containing descriptions of all the
+ * declared imports. */
+ static imports(moduleObject: Module): ModuleImportDescriptor[];
+ }
+
+ class Instance<T extends object = { [key: string]: any }> {
+ constructor(module: Module, importObject?: object);
+
+ /** An object containing as its members all the functions exported from the
+ * WebAssembly module instance, to allow them to be accessed and used by
+ * JavaScript. */
+ readonly exports: T;
+ }
+
+ interface MemoryDescriptor {
+ initial: number;
+ maximum?: number;
+ }
+
+ class Memory {
+ constructor(descriptor: MemoryDescriptor);
+
+ /** An accessor property that returns the buffer contained in the memory. */
+ readonly buffer: ArrayBuffer;
+
+ /** Increases the size of the memory instance by a specified number of
+ * WebAssembly pages (each one is 64KB in size). */
+ grow(delta: number): number;
+ }
+
+ type TableKind = "anyfunc";
+
+ interface TableDescriptor {
+ element: TableKind;
+ initial: number;
+ maximum?: number;
+ }
+
+ class Table {
+ constructor(descriptor: TableDescriptor);
+
+ /** Returns the length of the table, i.e. the number of elements. */
+ readonly length: number;
+
+ /** Accessor function — gets the element stored at a given index. */
+ get(index: number): (...args: any[]) => any;
+
+ /** Increases the size of the Table instance by a specified number of
+ * elements. */
+ grow(delta: number): number;
+
+ /** Sets an element stored at a given index to a given value. */
+ set(index: number, value: (...args: any[]) => any): void;
+ }
+
+ interface GlobalDescriptor {
+ value: string;
+ mutable?: boolean;
+ }
+
+ /** Represents a global variable instance, accessible from both JavaScript and
+ * importable/exportable across one or more `WebAssembly.Module` instances.
+ * This allows dynamic linking of multiple modules. */
+ class Global {
+ constructor(descriptor: GlobalDescriptor, value?: any);
+
+ /** Old-style method that returns the value contained inside the global
+ * variable. */
+ valueOf(): any;
+
+ /** The value contained inside the global variable — this can be used to
+ * directly set and get the global's value. */
+ value: any;
+ }
+
+ /** Indicates an error during WebAssembly decoding or validation */
+ class CompileError extends Error {
+ constructor(message: string, fileName?: string, lineNumber?: string);
+ }
+
+ /** Indicates an error during module instantiation (besides traps from the
+ * start function). */
+ class LinkError extends Error {
+ constructor(message: string, fileName?: string, lineNumber?: string);
+ }
+
+ /** Is thrown whenever WebAssembly specifies a trap. */
+ class RuntimeError extends Error {
+ constructor(message: string, fileName?: string, lineNumber?: string);
+ }
+}
+
+/* eslint-enable @typescript-eslint/no-unused-vars, @typescript-eslint/no-explicit-any */
diff --git a/cli/js/lib.web_assembly.d.ts b/cli/js/lib.web_assembly.d.ts
new file mode 100644
index 000000000..8c357840a
--- /dev/null
+++ b/cli/js/lib.web_assembly.d.ts
@@ -0,0 +1,173 @@
+// This follows the WebIDL at: https://webassembly.github.io/spec/js-api/
+// And follow on WebIDL at: https://webassembly.github.io/spec/web-api/
+
+/* eslint-disable @typescript-eslint/no-unused-vars, @typescript-eslint/no-explicit-any */
+
+declare namespace WebAssembly {
+ interface WebAssemblyInstantiatedSource {
+ module: Module;
+ instance: Instance;
+ }
+
+ /** Compiles a `WebAssembly.Module` from WebAssembly binary code. This
+ * function is useful if it is necessary to a compile a module before it can
+ * be instantiated (otherwise, the `WebAssembly.instantiate()` function
+ * should be used). */
+ function compile(bufferSource: domTypes.BufferSource): Promise<Module>;
+
+ /** Compiles a `WebAssembly.Module` directly from a streamed underlying
+ * source. This function is useful if it is necessary to a compile a module
+ * before it can be instantiated (otherwise, the
+ * `WebAssembly.instantiateStreaming()` function should be used). */
+ function compileStreaming(
+ source: Promise<domTypes.Response>
+ ): Promise<Module>;
+
+ /** Takes the WebAssembly binary code, in the form of a typed array or
+ * `ArrayBuffer`, and performs both compilation and instantiation in one step.
+ * The returned `Promise` resolves to both a compiled `WebAssembly.Module` and
+ * its first `WebAssembly.Instance`. */
+ function instantiate(
+ bufferSource: domTypes.BufferSource,
+ importObject?: object
+ ): Promise<WebAssemblyInstantiatedSource>;
+
+ /** Takes an already-compiled `WebAssembly.Module` and returns a `Promise`
+ * that resolves to an `Instance` of that `Module`. This overload is useful if
+ * the `Module` has already been compiled. */
+ function instantiate(
+ module: Module,
+ importObject?: object
+ ): Promise<Instance>;
+
+ /** Compiles and instantiates a WebAssembly module directly from a streamed
+ * underlying source. This is the most efficient, optimized way to load wasm
+ * code. */
+ function instantiateStreaming(
+ source: Promise<domTypes.Response>,
+ importObject?: object
+ ): Promise<WebAssemblyInstantiatedSource>;
+
+ /** Validates a given typed array of WebAssembly binary code, returning
+ * whether the bytes form a valid wasm module (`true`) or not (`false`). */
+ function validate(bufferSource: domTypes.BufferSource): boolean;
+
+ type ImportExportKind = "function" | "table" | "memory" | "global";
+
+ interface ModuleExportDescriptor {
+ name: string;
+ kind: ImportExportKind;
+ }
+ interface ModuleImportDescriptor {
+ module: string;
+ name: string;
+ kind: ImportExportKind;
+ }
+
+ class Module {
+ constructor(bufferSource: domTypes.BufferSource);
+
+ /** Given a `Module` and string, returns a copy of the contents of all
+ * custom sections in the module with the given string name. */
+ static customSections(
+ moduleObject: Module,
+ sectionName: string
+ ): ArrayBuffer;
+
+ /** Given a `Module`, returns an array containing descriptions of all the
+ * declared exports. */
+ static exports(moduleObject: Module): ModuleExportDescriptor[];
+
+ /** Given a `Module`, returns an array containing descriptions of all the
+ * declared imports. */
+ static imports(moduleObject: Module): ModuleImportDescriptor[];
+ }
+
+ class Instance<T extends object = { [key: string]: any }> {
+ constructor(module: Module, importObject?: object);
+
+ /** An object containing as its members all the functions exported from the
+ * WebAssembly module instance, to allow them to be accessed and used by
+ * JavaScript. */
+ readonly exports: T;
+ }
+
+ interface MemoryDescriptor {
+ initial: number;
+ maximum?: number;
+ }
+
+ class Memory {
+ constructor(descriptor: MemoryDescriptor);
+
+ /** An accessor property that returns the buffer contained in the memory. */
+ readonly buffer: ArrayBuffer;
+
+ /** Increases the size of the memory instance by a specified number of
+ * WebAssembly pages (each one is 64KB in size). */
+ grow(delta: number): number;
+ }
+
+ type TableKind = "anyfunc";
+
+ interface TableDescriptor {
+ element: TableKind;
+ initial: number;
+ maximum?: number;
+ }
+
+ class Table {
+ constructor(descriptor: TableDescriptor);
+
+ /** Returns the length of the table, i.e. the number of elements. */
+ readonly length: number;
+
+ /** Accessor function — gets the element stored at a given index. */
+ get(index: number): (...args: any[]) => any;
+
+ /** Increases the size of the Table instance by a specified number of
+ * elements. */
+ grow(delta: number): number;
+
+ /** Sets an element stored at a given index to a given value. */
+ set(index: number, value: (...args: any[]) => any): void;
+ }
+
+ interface GlobalDescriptor {
+ value: string;
+ mutable?: boolean;
+ }
+
+ /** Represents a global variable instance, accessible from both JavaScript and
+ * importable/exportable across one or more `WebAssembly.Module` instances.
+ * This allows dynamic linking of multiple modules. */
+ class Global {
+ constructor(descriptor: GlobalDescriptor, value?: any);
+
+ /** Old-style method that returns the value contained inside the global
+ * variable. */
+ valueOf(): any;
+
+ /** The value contained inside the global variable — this can be used to
+ * directly set and get the global's value. */
+ value: any;
+ }
+
+ /** Indicates an error during WebAssembly decoding or validation */
+ class CompileError extends Error {
+ constructor(message: string, fileName?: string, lineNumber?: string);
+ }
+
+ /** Indicates an error during module instantiation (besides traps from the
+ * start function). */
+ class LinkError extends Error {
+ constructor(message: string, fileName?: string, lineNumber?: string);
+ }
+
+ /** Is thrown whenever WebAssembly specifies a trap. */
+ class RuntimeError extends Error {
+ constructor(message: string, fileName?: string, lineNumber?: string);
+ }
+}
+
+/* eslint-enable @typescript-eslint/no-unused-vars, @typescript-eslint/no-explicit-any */
diff --git a/cli/js/link.ts b/cli/js/link.ts
new file mode 100644
index 000000000..a6f732926
--- /dev/null
+++ b/cli/js/link.ts
@@ -0,0 +1,19 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { sendSync, sendAsync } from "./dispatch_json.ts";
+import * as dispatch from "./dispatch.ts";
+
+/** Synchronously creates `newname` as a hard link to `oldname`.
+ *
+ * Deno.linkSync("old/name", "new/name");
+ */
+export function linkSync(oldname: string, newname: string): void {
+ sendSync(dispatch.OP_LINK, { oldname, newname });
+}
+
+/** Creates `newname` as a hard link to `oldname`.
+ *
+ * await Deno.link("old/name", "new/name");
+ */
+export async function link(oldname: string, newname: string): Promise<void> {
+ await sendAsync(dispatch.OP_LINK, { oldname, newname });
+}
diff --git a/cli/js/link_test.ts b/cli/js/link_test.ts
new file mode 100644
index 000000000..9425e6eab
--- /dev/null
+++ b/cli/js/link_test.ts
@@ -0,0 +1,115 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { testPerm, assert, assertEquals } from "./test_util.ts";
+
+testPerm({ read: true, write: true }, function linkSyncSuccess(): void {
+ const testDir = Deno.makeTempDirSync();
+ const oldData = "Hardlink";
+ const oldName = testDir + "/oldname";
+ const newName = testDir + "/newname";
+ Deno.writeFileSync(oldName, new TextEncoder().encode(oldData));
+ // Create the hard link.
+ Deno.linkSync(oldName, newName);
+ // We should expect reading the same content.
+ const newData = new TextDecoder().decode(Deno.readFileSync(newName));
+ assertEquals(oldData, newData);
+ // Writing to newname also affects oldname.
+ const newData2 = "Modified";
+ Deno.writeFileSync(newName, new TextEncoder().encode(newData2));
+ assertEquals(newData2, new TextDecoder().decode(Deno.readFileSync(oldName)));
+ // Writing to oldname also affects newname.
+ const newData3 = "ModifiedAgain";
+ Deno.writeFileSync(oldName, new TextEncoder().encode(newData3));
+ assertEquals(newData3, new TextDecoder().decode(Deno.readFileSync(newName)));
+ // Remove oldname. File still accessible through newname.
+ Deno.removeSync(oldName);
+ const newNameStat = Deno.statSync(newName);
+ assert(newNameStat.isFile());
+ assert(!newNameStat.isSymlink()); // Not a symlink.
+ assertEquals(newData3, new TextDecoder().decode(Deno.readFileSync(newName)));
+});
+
+testPerm({ read: true, write: true }, function linkSyncExists(): void {
+ const testDir = Deno.makeTempDirSync();
+ const oldName = testDir + "/oldname";
+ const newName = testDir + "/newname";
+ Deno.writeFileSync(oldName, new TextEncoder().encode("oldName"));
+ // newname is already created.
+ Deno.writeFileSync(newName, new TextEncoder().encode("newName"));
+
+ let err;
+ try {
+ Deno.linkSync(oldName, newName);
+ } catch (e) {
+ err = e;
+ }
+ assert(!!err);
+ assertEquals(err.kind, Deno.ErrorKind.AlreadyExists);
+ assertEquals(err.name, "AlreadyExists");
+});
+
+testPerm({ read: true, write: true }, function linkSyncNotFound(): void {
+ const testDir = Deno.makeTempDirSync();
+ const oldName = testDir + "/oldname";
+ const newName = testDir + "/newname";
+
+ let err;
+ try {
+ Deno.linkSync(oldName, newName);
+ } catch (e) {
+ err = e;
+ }
+ assert(!!err);
+ assertEquals(err.kind, Deno.ErrorKind.NotFound);
+ assertEquals(err.name, "NotFound");
+});
+
+testPerm({ read: false, write: true }, function linkSyncReadPerm(): void {
+ let err;
+ try {
+ Deno.linkSync("oldbaddir", "newbaddir");
+ } catch (e) {
+ err = e;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(err.name, "PermissionDenied");
+});
+
+testPerm({ read: true, write: false }, function linkSyncWritePerm(): void {
+ let err;
+ try {
+ Deno.linkSync("oldbaddir", "newbaddir");
+ } catch (e) {
+ err = e;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(err.name, "PermissionDenied");
+});
+
+testPerm({ read: true, write: true }, async function linkSuccess(): Promise<
+ void
+> {
+ const testDir = Deno.makeTempDirSync();
+ const oldData = "Hardlink";
+ const oldName = testDir + "/oldname";
+ const newName = testDir + "/newname";
+ Deno.writeFileSync(oldName, new TextEncoder().encode(oldData));
+ // Create the hard link.
+ await Deno.link(oldName, newName);
+ // We should expect reading the same content.
+ const newData = new TextDecoder().decode(Deno.readFileSync(newName));
+ assertEquals(oldData, newData);
+ // Writing to newname also affects oldname.
+ const newData2 = "Modified";
+ Deno.writeFileSync(newName, new TextEncoder().encode(newData2));
+ assertEquals(newData2, new TextDecoder().decode(Deno.readFileSync(oldName)));
+ // Writing to oldname also affects newname.
+ const newData3 = "ModifiedAgain";
+ Deno.writeFileSync(oldName, new TextEncoder().encode(newData3));
+ assertEquals(newData3, new TextDecoder().decode(Deno.readFileSync(newName)));
+ // Remove oldname. File still accessible through newname.
+ Deno.removeSync(oldName);
+ const newNameStat = Deno.statSync(newName);
+ assert(newNameStat.isFile());
+ assert(!newNameStat.isSymlink()); // Not a symlink.
+ assertEquals(newData3, new TextDecoder().decode(Deno.readFileSync(newName)));
+});
diff --git a/cli/js/location.ts b/cli/js/location.ts
new file mode 100644
index 000000000..d495f99ca
--- /dev/null
+++ b/cli/js/location.ts
@@ -0,0 +1,52 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { URL } from "./url.ts";
+import { notImplemented } from "./util.ts";
+import { Location } from "./dom_types.ts";
+import { window } from "./window.ts";
+
+export class LocationImpl implements Location {
+ constructor(url: string) {
+ const u = new URL(url);
+ this.url = u;
+ this.hash = u.hash;
+ this.host = u.host;
+ this.href = u.href;
+ this.hostname = u.hostname;
+ this.origin = u.protocol + "//" + u.host;
+ this.pathname = u.pathname;
+ this.protocol = u.protocol;
+ this.port = u.port;
+ this.search = u.search;
+ }
+
+ private url: URL;
+
+ toString(): string {
+ return this.url.toString();
+ }
+
+ readonly ancestorOrigins: string[] = [];
+ hash: string;
+ host: string;
+ hostname: string;
+ href: string;
+ readonly origin: string;
+ pathname: string;
+ port: string;
+ protocol: string;
+ search: string;
+ assign(_url: string): void {
+ throw notImplemented();
+ }
+ reload(): void {
+ throw notImplemented();
+ }
+ replace(_url: string): void {
+ throw notImplemented();
+ }
+}
+
+export function setLocation(url: string): void {
+ window.location = new LocationImpl(url);
+ Object.freeze(window.location);
+}
diff --git a/cli/js/location_test.ts b/cli/js/location_test.ts
new file mode 100644
index 000000000..c8daab16d
--- /dev/null
+++ b/cli/js/location_test.ts
@@ -0,0 +1,8 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { test, assert } from "./test_util.ts";
+
+test(function locationBasic(): void {
+ // location example: file:///Users/rld/src/deno/js/unit_tests.ts
+ console.log("location", window.location.toString());
+ assert(window.location.toString().endsWith("unit_tests.ts"));
+});
diff --git a/cli/js/main.ts b/cli/js/main.ts
new file mode 100644
index 000000000..09e7ce453
--- /dev/null
+++ b/cli/js/main.ts
@@ -0,0 +1,41 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import "./globals.ts";
+
+import { assert, log } from "./util.ts";
+import * as os from "./os.ts";
+import { args } from "./deno.ts";
+import { setPrepareStackTrace } from "./error_stack.ts";
+import { replLoop } from "./repl.ts";
+import { setVersions } from "./version.ts";
+import { window } from "./window.ts";
+import { setLocation } from "./location.ts";
+import { setBuildInfo } from "./build.ts";
+import { setSignals } from "./process.ts";
+
+function denoMain(preserveDenoNamespace = true, name?: string): void {
+ const s = os.start(preserveDenoNamespace, name);
+
+ setBuildInfo(s.os, s.arch);
+ setSignals();
+ setVersions(s.denoVersion, s.v8Version, s.tsVersion);
+
+ setPrepareStackTrace(Error);
+
+ if (s.mainModule) {
+ assert(s.mainModule.length > 0);
+ setLocation(s.mainModule);
+ }
+
+ log("cwd", s.cwd);
+
+ for (let i = 1; i < s.argv.length; i++) {
+ args.push(s.argv[i]);
+ }
+ log("args", args);
+ Object.freeze(args);
+
+ if (!s.mainModule) {
+ replLoop();
+ }
+}
+window["denoMain"] = denoMain;
diff --git a/cli/js/make_temp_dir.ts b/cli/js/make_temp_dir.ts
new file mode 100644
index 000000000..14494b5da
--- /dev/null
+++ b/cli/js/make_temp_dir.ts
@@ -0,0 +1,35 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { sendSync, sendAsync } from "./dispatch_json.ts";
+import * as dispatch from "./dispatch.ts";
+
+export interface MakeTempDirOptions {
+ dir?: string;
+ prefix?: string;
+ suffix?: string;
+}
+
+/** makeTempDirSync is the synchronous version of `makeTempDir`.
+ *
+ * const tempDirName0 = Deno.makeTempDirSync();
+ * const tempDirName1 = Deno.makeTempDirSync({ prefix: 'my_temp' });
+ */
+export function makeTempDirSync(options: MakeTempDirOptions = {}): string {
+ return sendSync(dispatch.OP_MAKE_TEMP_DIR, options);
+}
+
+/** makeTempDir creates a new temporary directory in the directory `dir`, its
+ * name beginning with `prefix` and ending with `suffix`.
+ * It returns the full path to the newly created directory.
+ * If `dir` is unspecified, tempDir uses the default directory for temporary
+ * files. Multiple programs calling tempDir simultaneously will not choose the
+ * same directory. It is the caller's responsibility to remove the directory
+ * when no longer needed.
+ *
+ * const tempDirName0 = await Deno.makeTempDir();
+ * const tempDirName1 = await Deno.makeTempDir({ prefix: 'my_temp' });
+ */
+export async function makeTempDir(
+ options: MakeTempDirOptions = {}
+): Promise<string> {
+ return await sendAsync(dispatch.OP_MAKE_TEMP_DIR, options);
+}
diff --git a/cli/js/make_temp_dir_test.ts b/cli/js/make_temp_dir_test.ts
new file mode 100644
index 000000000..aa44b65c5
--- /dev/null
+++ b/cli/js/make_temp_dir_test.ts
@@ -0,0 +1,66 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { test, testPerm, assert, assertEquals } from "./test_util.ts";
+
+testPerm({ write: true }, function makeTempDirSyncSuccess(): void {
+ const dir1 = Deno.makeTempDirSync({ prefix: "hello", suffix: "world" });
+ const dir2 = Deno.makeTempDirSync({ prefix: "hello", suffix: "world" });
+ // Check that both dirs are different.
+ assert(dir1 !== dir2);
+ for (const dir of [dir1, dir2]) {
+ // Check that the prefix and suffix are applied.
+ const lastPart = dir.replace(/^.*[\\\/]/, "");
+ assert(lastPart.startsWith("hello"));
+ assert(lastPart.endsWith("world"));
+ }
+ // Check that the `dir` option works.
+ const dir3 = Deno.makeTempDirSync({ dir: dir1 });
+ assert(dir3.startsWith(dir1));
+ assert(/^[\\\/]/.test(dir3.slice(dir1.length)));
+ // Check that creating a temp dir inside a nonexisting directory fails.
+ let err;
+ try {
+ Deno.makeTempDirSync({ dir: "/baddir" });
+ } catch (err_) {
+ err = err_;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.NotFound);
+ assertEquals(err.name, "NotFound");
+});
+
+test(function makeTempDirSyncPerm(): void {
+ // makeTempDirSync should require write permissions (for now).
+ let err;
+ try {
+ Deno.makeTempDirSync({ dir: "/baddir" });
+ } catch (err_) {
+ err = err_;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(err.name, "PermissionDenied");
+});
+
+testPerm({ write: true }, async function makeTempDirSuccess(): Promise<void> {
+ const dir1 = await Deno.makeTempDir({ prefix: "hello", suffix: "world" });
+ const dir2 = await Deno.makeTempDir({ prefix: "hello", suffix: "world" });
+ // Check that both dirs are different.
+ assert(dir1 !== dir2);
+ for (const dir of [dir1, dir2]) {
+ // Check that the prefix and suffix are applied.
+ const lastPart = dir.replace(/^.*[\\\/]/, "");
+ assert(lastPart.startsWith("hello"));
+ assert(lastPart.endsWith("world"));
+ }
+ // Check that the `dir` option works.
+ const dir3 = await Deno.makeTempDir({ dir: dir1 });
+ assert(dir3.startsWith(dir1));
+ assert(/^[\\\/]/.test(dir3.slice(dir1.length)));
+ // Check that creating a temp dir inside a nonexisting directory fails.
+ let err;
+ try {
+ await Deno.makeTempDir({ dir: "/baddir" });
+ } catch (err_) {
+ err = err_;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.NotFound);
+ assertEquals(err.name, "NotFound");
+});
diff --git a/cli/js/metrics.ts b/cli/js/metrics.ts
new file mode 100644
index 000000000..b32c29789
--- /dev/null
+++ b/cli/js/metrics.ts
@@ -0,0 +1,28 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import * as dispatch from "./dispatch.ts";
+import { sendSync } from "./dispatch_json.ts";
+
+export interface Metrics {
+ opsDispatched: number;
+ opsCompleted: number;
+ bytesSentControl: number;
+ bytesSentData: number;
+ bytesReceived: number;
+}
+
+/** Receive metrics from the privileged side of Deno.
+ *
+ * > console.table(Deno.metrics())
+ * ┌──────────────────┬────────┐
+ * │ (index) │ Values │
+ * ├──────────────────┼────────┤
+ * │ opsDispatched │ 9 │
+ * │ opsCompleted │ 9 │
+ * │ bytesSentControl │ 504 │
+ * │ bytesSentData │ 0 │
+ * │ bytesReceived │ 856 │
+ * └──────────────────┴────────┘
+ */
+export function metrics(): Metrics {
+ return sendSync(dispatch.OP_METRICS);
+}
diff --git a/cli/js/metrics_test.ts b/cli/js/metrics_test.ts
new file mode 100644
index 000000000..de41a0cb1
--- /dev/null
+++ b/cli/js/metrics_test.ts
@@ -0,0 +1,46 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { test, testPerm, assert } from "./test_util.ts";
+
+test(async function metrics(): Promise<void> {
+ const m1 = Deno.metrics();
+ assert(m1.opsDispatched > 0);
+ assert(m1.opsCompleted > 0);
+ assert(m1.bytesSentControl > 0);
+ assert(m1.bytesSentData >= 0);
+ assert(m1.bytesReceived > 0);
+
+ // Write to stdout to ensure a "data" message gets sent instead of just
+ // control messages.
+ const dataMsg = new Uint8Array([41, 42, 43]);
+ await Deno.stdout.write(dataMsg);
+
+ const m2 = Deno.metrics();
+ assert(m2.opsDispatched > m1.opsDispatched);
+ assert(m2.opsCompleted > m1.opsCompleted);
+ assert(m2.bytesSentControl > m1.bytesSentControl);
+ assert(m2.bytesSentData >= m1.bytesSentData + dataMsg.byteLength);
+ assert(m2.bytesReceived > m1.bytesReceived);
+});
+
+testPerm({ write: true }, function metricsUpdatedIfNoResponseSync(): void {
+ const filename = Deno.makeTempDirSync() + "/test.txt";
+
+ const data = new Uint8Array([41, 42, 43]);
+ Deno.writeFileSync(filename, data, { perm: 0o666 });
+
+ const metrics = Deno.metrics();
+ assert(metrics.opsDispatched === metrics.opsCompleted);
+});
+
+testPerm(
+ { write: true },
+ async function metricsUpdatedIfNoResponseAsync(): Promise<void> {
+ const filename = Deno.makeTempDirSync() + "/test.txt";
+
+ const data = new Uint8Array([41, 42, 43]);
+ await Deno.writeFile(filename, data, { perm: 0o666 });
+
+ const metrics = Deno.metrics();
+ assert(metrics.opsDispatched === metrics.opsCompleted);
+ }
+);
diff --git a/cli/js/mixins/dom_iterable.ts b/cli/js/mixins/dom_iterable.ts
new file mode 100644
index 000000000..bbd1905ce
--- /dev/null
+++ b/cli/js/mixins/dom_iterable.ts
@@ -0,0 +1,82 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+// eslint-disable-next-line @typescript-eslint/no-unused-vars
+import { DomIterable } from "../dom_types.ts";
+import { window } from "../window.ts";
+import { requiredArguments } from "../util.ts";
+
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+type Constructor<T = {}> = new (...args: any[]) => T;
+
+/** Mixes in a DOM iterable methods into a base class, assumes that there is
+ * a private data iterable that is part of the base class, located at
+ * `[dataSymbol]`.
+ * TODO Don't expose DomIterableMixin from "deno" namespace.
+ */
+export function DomIterableMixin<K, V, TBase extends Constructor>(
+ Base: TBase,
+ dataSymbol: symbol
+): TBase & Constructor<DomIterable<K, V>> {
+ // we have to cast `this` as `any` because there is no way to describe the
+ // Base class in a way where the Symbol `dataSymbol` is defined. So the
+ // runtime code works, but we do lose a little bit of type safety.
+
+ // Additionally, we have to not use .keys() nor .values() since the internal
+ // slot differs in type - some have a Map, which yields [K, V] in
+ // Symbol.iterator, and some have an Array, which yields V, in this case
+ // [K, V] too as they are arrays of tuples.
+
+ const DomIterable = class extends Base {
+ *entries(): IterableIterator<[K, V]> {
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ for (const entry of (this as any)[dataSymbol]) {
+ yield entry;
+ }
+ }
+
+ *keys(): IterableIterator<K> {
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ for (const [key] of (this as any)[dataSymbol]) {
+ yield key;
+ }
+ }
+
+ *values(): IterableIterator<V> {
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ for (const [, value] of (this as any)[dataSymbol]) {
+ yield value;
+ }
+ }
+
+ forEach(
+ callbackfn: (value: V, key: K, parent: this) => void,
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ thisArg?: any
+ ): void {
+ requiredArguments(
+ `${this.constructor.name}.forEach`,
+ arguments.length,
+ 1
+ );
+ callbackfn = callbackfn.bind(thisArg == null ? window : Object(thisArg));
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ for (const [key, value] of (this as any)[dataSymbol]) {
+ callbackfn(value, key, this);
+ }
+ }
+
+ *[Symbol.iterator](): IterableIterator<[K, V]> {
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ for (const entry of (this as any)[dataSymbol]) {
+ yield entry;
+ }
+ }
+ };
+
+ // we want the Base class name to be the name of the class.
+ Object.defineProperty(DomIterable, "name", {
+ value: Base.name,
+ configurable: true
+ });
+
+ return DomIterable;
+}
diff --git a/cli/js/mixins/dom_iterable_test.ts b/cli/js/mixins/dom_iterable_test.ts
new file mode 100644
index 000000000..4c84fa68e
--- /dev/null
+++ b/cli/js/mixins/dom_iterable_test.ts
@@ -0,0 +1,79 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { test, assert, assertEquals } from "../test_util.ts";
+
+// eslint-disable-next-line @typescript-eslint/explicit-function-return-type
+function setup() {
+ const dataSymbol = Symbol("data symbol");
+ class Base {
+ private [dataSymbol] = new Map<string, number>();
+
+ constructor(
+ data: Array<[string, number]> | IterableIterator<[string, number]>
+ ) {
+ for (const [key, value] of data) {
+ this[dataSymbol].set(key, value);
+ }
+ }
+ }
+
+ return {
+ Base,
+ // This is using an internal API we don't want published as types, so having
+ // to cast to any to "trick" TypeScript
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ DomIterable: (Deno as any).DomIterableMixin(Base, dataSymbol)
+ };
+}
+
+test(function testDomIterable(): void {
+ const { DomIterable, Base } = setup();
+
+ const fixture: Array<[string, number]> = [["foo", 1], ["bar", 2]];
+
+ const domIterable = new DomIterable(fixture);
+
+ assertEquals(Array.from(domIterable.entries()), fixture);
+ assertEquals(Array.from(domIterable.values()), [1, 2]);
+ assertEquals(Array.from(domIterable.keys()), ["foo", "bar"]);
+
+ let result: Array<[string, number]> = [];
+ for (const [key, value] of domIterable) {
+ assert(key != null);
+ assert(value != null);
+ result.push([key, value]);
+ }
+ assertEquals(fixture, result);
+
+ result = [];
+ const scope = {};
+ function callback(value, key, parent): void {
+ assertEquals(parent, domIterable);
+ assert(key != null);
+ assert(value != null);
+ assert(this === scope);
+ result.push([key, value]);
+ }
+ domIterable.forEach(callback, scope);
+ assertEquals(fixture, result);
+
+ assertEquals(DomIterable.name, Base.name);
+});
+
+test(function testDomIterableScope(): void {
+ const { DomIterable } = setup();
+
+ const domIterable = new DomIterable([["foo", 1]]);
+
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ function checkScope(thisArg: any, expected: any): void {
+ function callback(): void {
+ assertEquals(this, expected);
+ }
+ domIterable.forEach(callback, thisArg);
+ }
+
+ checkScope(0, Object(0));
+ checkScope("", Object(""));
+ checkScope(null, window);
+ checkScope(undefined, window);
+});
diff --git a/cli/js/mkdir.ts b/cli/js/mkdir.ts
new file mode 100644
index 000000000..bc09ba358
--- /dev/null
+++ b/cli/js/mkdir.ts
@@ -0,0 +1,33 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { sendSync, sendAsync } from "./dispatch_json.ts";
+import * as dispatch from "./dispatch.ts";
+
+/** Creates a new directory with the specified path synchronously.
+ * If `recursive` is set to true, nested directories will be created (also known
+ * as "mkdir -p").
+ * `mode` sets permission bits (before umask) on UNIX and does nothing on
+ * Windows.
+ *
+ * Deno.mkdirSync("new_dir");
+ * Deno.mkdirSync("nested/directories", true);
+ */
+export function mkdirSync(path: string, recursive = false, mode = 0o777): void {
+ sendSync(dispatch.OP_MKDIR, { path, recursive, mode });
+}
+
+/** Creates a new directory with the specified path.
+ * If `recursive` is set to true, nested directories will be created (also known
+ * as "mkdir -p").
+ * `mode` sets permission bits (before umask) on UNIX and does nothing on
+ * Windows.
+ *
+ * await Deno.mkdir("new_dir");
+ * await Deno.mkdir("nested/directories", true);
+ */
+export async function mkdir(
+ path: string,
+ recursive = false,
+ mode = 0o777
+): Promise<void> {
+ await sendAsync(dispatch.OP_MKDIR, { path, recursive, mode });
+}
diff --git a/cli/js/mkdir_test.ts b/cli/js/mkdir_test.ts
new file mode 100644
index 000000000..9e97265f0
--- /dev/null
+++ b/cli/js/mkdir_test.ts
@@ -0,0 +1,66 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { testPerm, assert, assertEquals } from "./test_util.ts";
+
+testPerm({ read: true, write: true }, function mkdirSyncSuccess(): void {
+ const path = Deno.makeTempDirSync() + "/dir";
+ Deno.mkdirSync(path);
+ const pathInfo = Deno.statSync(path);
+ assert(pathInfo.isDirectory());
+});
+
+testPerm({ read: true, write: true }, function mkdirSyncMode(): void {
+ const path = Deno.makeTempDirSync() + "/dir";
+ Deno.mkdirSync(path, false, 0o755); // no perm for x
+ const pathInfo = Deno.statSync(path);
+ if (pathInfo.mode !== null) {
+ // Skip windows
+ assertEquals(pathInfo.mode & 0o777, 0o755);
+ }
+});
+
+testPerm({ write: false }, function mkdirSyncPerm(): void {
+ let err;
+ try {
+ Deno.mkdirSync("/baddir");
+ } catch (e) {
+ err = e;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(err.name, "PermissionDenied");
+});
+
+testPerm({ read: true, write: true }, async function mkdirSuccess(): Promise<
+ void
+> {
+ const path = Deno.makeTempDirSync() + "/dir";
+ await Deno.mkdir(path);
+ const pathInfo = Deno.statSync(path);
+ assert(pathInfo.isDirectory());
+});
+
+testPerm({ write: true }, function mkdirErrIfExists(): void {
+ let err;
+ try {
+ Deno.mkdirSync(".");
+ } catch (e) {
+ err = e;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.AlreadyExists);
+ assertEquals(err.name, "AlreadyExists");
+});
+
+testPerm({ read: true, write: true }, function mkdirSyncRecursive(): void {
+ const path = Deno.makeTempDirSync() + "/nested/directory";
+ Deno.mkdirSync(path, true);
+ const pathInfo = Deno.statSync(path);
+ assert(pathInfo.isDirectory());
+});
+
+testPerm({ read: true, write: true }, async function mkdirRecursive(): Promise<
+ void
+> {
+ const path = Deno.makeTempDirSync() + "/nested/directory";
+ await Deno.mkdir(path, true);
+ const pathInfo = Deno.statSync(path);
+ assert(pathInfo.isDirectory());
+});
diff --git a/cli/js/mock_builtin.js b/cli/js/mock_builtin.js
new file mode 100644
index 000000000..9c6730d69
--- /dev/null
+++ b/cli/js/mock_builtin.js
@@ -0,0 +1,2 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+export default undefined;
diff --git a/cli/js/net.ts b/cli/js/net.ts
new file mode 100644
index 000000000..a7ad2b73c
--- /dev/null
+++ b/cli/js/net.ts
@@ -0,0 +1,205 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { EOF, Reader, Writer, Closer } from "./io.ts";
+import { notImplemented } from "./util.ts";
+import { read, write, close } from "./files.ts";
+import * as dispatch from "./dispatch.ts";
+import { sendSync, sendAsync } from "./dispatch_json.ts";
+
+export type Transport = "tcp";
+// TODO support other types:
+// export type Transport = "tcp" | "tcp4" | "tcp6" | "unix" | "unixpacket";
+
+// TODO(ry) Replace 'address' with 'hostname' and 'port', similar to DialOptions
+// and ListenOptions.
+export interface Addr {
+ transport: Transport;
+ address: string;
+}
+
+/** A Listener is a generic transport listener for stream-oriented protocols. */
+export interface Listener extends AsyncIterator<Conn> {
+ /** Waits for and resolves to the next connection to the `Listener`. */
+ accept(): Promise<Conn>;
+
+ /** Close closes the listener. Any pending accept promises will be rejected
+ * with errors.
+ */
+ close(): void;
+
+ /** Return the address of the `Listener`. */
+ addr(): Addr;
+
+ [Symbol.asyncIterator](): AsyncIterator<Conn>;
+}
+
+enum ShutdownMode {
+ // See http://man7.org/linux/man-pages/man2/shutdown.2.html
+ // Corresponding to SHUT_RD, SHUT_WR, SHUT_RDWR
+ Read = 0,
+ Write,
+ ReadWrite // unused
+}
+
+function shutdown(rid: number, how: ShutdownMode): void {
+ sendSync(dispatch.OP_SHUTDOWN, { rid, how });
+}
+
+export class ConnImpl implements Conn {
+ constructor(
+ readonly rid: number,
+ readonly remoteAddr: string,
+ readonly localAddr: string
+ ) {}
+
+ write(p: Uint8Array): Promise<number> {
+ return write(this.rid, p);
+ }
+
+ read(p: Uint8Array): Promise<number | EOF> {
+ return read(this.rid, p);
+ }
+
+ close(): void {
+ close(this.rid);
+ }
+
+ /** closeRead shuts down (shutdown(2)) the reading side of the TCP connection.
+ * Most callers should just use close().
+ */
+ closeRead(): void {
+ shutdown(this.rid, ShutdownMode.Read);
+ }
+
+ /** closeWrite shuts down (shutdown(2)) the writing side of the TCP
+ * connection. Most callers should just use close().
+ */
+ closeWrite(): void {
+ shutdown(this.rid, ShutdownMode.Write);
+ }
+}
+
+class ListenerImpl implements Listener {
+ constructor(
+ readonly rid: number,
+ private transport: Transport,
+ private localAddr: string
+ ) {}
+
+ async accept(): Promise<Conn> {
+ const res = await sendAsync(dispatch.OP_ACCEPT, { rid: this.rid });
+ return new ConnImpl(res.rid, res.remoteAddr, res.localAddr);
+ }
+
+ close(): void {
+ close(this.rid);
+ }
+
+ addr(): Addr {
+ return {
+ transport: this.transport,
+ address: this.localAddr
+ };
+ }
+
+ async next(): Promise<IteratorResult<Conn>> {
+ return {
+ done: false,
+ value: await this.accept()
+ };
+ }
+
+ [Symbol.asyncIterator](): AsyncIterator<Conn> {
+ return this;
+ }
+}
+
+export interface Conn extends Reader, Writer, Closer {
+ /** The local address of the connection. */
+ localAddr: string;
+ /** The remote address of the connection. */
+ remoteAddr: string;
+ /** The resource ID of the connection. */
+ rid: number;
+ /** Shuts down (`shutdown(2)`) the reading side of the TCP connection. Most
+ * callers should just use `close()`.
+ */
+ closeRead(): void;
+ /** Shuts down (`shutdown(2)`) the writing side of the TCP connection. Most
+ * callers should just use `close()`.
+ */
+ closeWrite(): void;
+}
+
+export interface ListenOptions {
+ port: number;
+ hostname?: string;
+ transport?: Transport;
+}
+
+/** Listen announces on the local transport address.
+ *
+ * @param options
+ * @param options.port The port to connect to. (Required.)
+ * @param options.hostname A literal IP address or host name that can be
+ * resolved to an IP address. If not specified, defaults to 0.0.0.0
+ * @param options.transport Defaults to "tcp". Later we plan to add "tcp4",
+ * "tcp6", "udp", "udp4", "udp6", "ip", "ip4", "ip6", "unix", "unixgram" and
+ * "unixpacket".
+ *
+ * Examples:
+ *
+ * listen({ port: 80 })
+ * listen({ hostname: "192.0.2.1", port: 80 })
+ * listen({ hostname: "[2001:db8::1]", port: 80 });
+ * listen({ hostname: "golang.org", port: 80, transport: "tcp" })
+ */
+export function listen(options: ListenOptions): Listener {
+ const hostname = options.hostname || "0.0.0.0";
+ const transport = options.transport || "tcp";
+ const res = sendSync(dispatch.OP_LISTEN, {
+ hostname,
+ port: options.port,
+ transport
+ });
+ return new ListenerImpl(res.rid, transport, res.localAddr);
+}
+
+export interface DialOptions {
+ port: number;
+ hostname?: string;
+ transport?: Transport;
+}
+
+/** Dial connects to the address on the named transport.
+ *
+ * @param options
+ * @param options.port The port to connect to. (Required.)
+ * @param options.hostname A literal IP address or host name that can be
+ * resolved to an IP address. If not specified, defaults to 127.0.0.1
+ * @param options.transport Defaults to "tcp". Later we plan to add "tcp4",
+ * "tcp6", "udp", "udp4", "udp6", "ip", "ip4", "ip6", "unix", "unixgram" and
+ * "unixpacket".
+ *
+ * Examples:
+ *
+ * dial({ port: 80 })
+ * dial({ hostname: "192.0.2.1", port: 80 })
+ * dial({ hostname: "[2001:db8::1]", port: 80 });
+ * dial({ hostname: "golang.org", port: 80, transport: "tcp" })
+ */
+export async function dial(options: DialOptions): Promise<Conn> {
+ const res = await sendAsync(dispatch.OP_DIAL, {
+ hostname: options.hostname || "127.0.0.1",
+ port: options.port,
+ transport: options.transport || "tcp"
+ });
+ return new ConnImpl(res.rid, res.remoteAddr!, res.localAddr!);
+}
+
+/** **RESERVED** */
+export async function connect(
+ _transport: Transport,
+ _address: string
+): Promise<Conn> {
+ return notImplemented();
+}
diff --git a/cli/js/net_test.ts b/cli/js/net_test.ts
new file mode 100644
index 000000000..33f4f7d07
--- /dev/null
+++ b/cli/js/net_test.ts
@@ -0,0 +1,229 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { testPerm, assert, assertEquals } from "./test_util.ts";
+
+testPerm({ net: true }, function netListenClose(): void {
+ const listener = Deno.listen({ hostname: "127.0.0.1", port: 4500 });
+ const addr = listener.addr();
+ assertEquals(addr.transport, "tcp");
+ // TODO(ry) Replace 'address' with 'hostname' and 'port', similar to
+ // DialOptions and ListenOptions.
+ assertEquals(addr.address, "127.0.0.1:4500");
+ listener.close();
+});
+
+testPerm({ net: true }, async function netCloseWhileAccept(): Promise<void> {
+ const listener = Deno.listen({ port: 4501 });
+ const p = listener.accept();
+ listener.close();
+ let err;
+ try {
+ await p;
+ } catch (e) {
+ err = e;
+ }
+ assert(!!err);
+ assertEquals(err.kind, Deno.ErrorKind.Other);
+ assertEquals(err.message, "Listener has been closed");
+});
+
+testPerm({ net: true }, async function netConcurrentAccept(): Promise<void> {
+ const listener = Deno.listen({ port: 4502 });
+ let acceptErrCount = 0;
+ const checkErr = (e): void => {
+ assertEquals(e.kind, Deno.ErrorKind.Other);
+ if (e.message === "Listener has been closed") {
+ assertEquals(acceptErrCount, 1);
+ } else if (e.message === "Another accept task is ongoing") {
+ acceptErrCount++;
+ } else {
+ throw new Error("Unexpected error message");
+ }
+ };
+ const p = listener.accept().catch(checkErr);
+ const p1 = listener.accept().catch(checkErr);
+ await Promise.race([p, p1]);
+ listener.close();
+ await [p, p1];
+ assertEquals(acceptErrCount, 1);
+});
+
+testPerm({ net: true }, async function netDialListen(): Promise<void> {
+ const listener = Deno.listen({ port: 4500 });
+ listener.accept().then(
+ async (conn): Promise<void> => {
+ assert(conn.remoteAddr != null);
+ assertEquals(conn.localAddr, "127.0.0.1:4500");
+ await conn.write(new Uint8Array([1, 2, 3]));
+ conn.close();
+ }
+ );
+ const conn = await Deno.dial({ hostname: "127.0.0.1", port: 4500 });
+ assertEquals(conn.remoteAddr, "127.0.0.1:4500");
+ assert(conn.localAddr != null);
+ const buf = new Uint8Array(1024);
+ const readResult = await conn.read(buf);
+ assertEquals(3, readResult);
+ assertEquals(1, buf[0]);
+ assertEquals(2, buf[1]);
+ assertEquals(3, buf[2]);
+ assert(conn.rid > 0);
+
+ assert(readResult !== Deno.EOF);
+
+ const readResult2 = await conn.read(buf);
+ assertEquals(Deno.EOF, readResult2);
+
+ listener.close();
+ conn.close();
+});
+
+/* TODO(ry) Re-enable this test.
+testPerm({ net: true }, async function netListenAsyncIterator(): Promise<void> {
+ const listener = Deno.listen(":4500");
+ const runAsyncIterator = async (): Promise<void> => {
+ for await (let conn of listener) {
+ await conn.write(new Uint8Array([1, 2, 3]));
+ conn.close();
+ }
+ };
+ runAsyncIterator();
+ const conn = await Deno.dial("127.0.0.1:4500");
+ const buf = new Uint8Array(1024);
+ const readResult = await conn.read(buf);
+ assertEquals(3, readResult);
+ assertEquals(1, buf[0]);
+ assertEquals(2, buf[1]);
+ assertEquals(3, buf[2]);
+ assert(conn.rid > 0);
+
+ assert(readResult !== Deno.EOF);
+
+ const readResult2 = await conn.read(buf);
+ assertEquals(Deno.EOF, readResult2);
+
+ listener.close();
+ conn.close();
+});
+ */
+
+/* TODO Fix broken test.
+testPerm({ net: true }, async function netCloseReadSuccess() {
+ const addr = "127.0.0.1:4500";
+ const listener = Deno.listen(addr);
+ const closeDeferred = deferred();
+ const closeReadDeferred = deferred();
+ listener.accept().then(async conn => {
+ await closeReadDeferred.promise;
+ await conn.write(new Uint8Array([1, 2, 3]));
+ const buf = new Uint8Array(1024);
+ const readResult = await conn.read(buf);
+ assertEquals(3, readResult);
+ assertEquals(4, buf[0]);
+ assertEquals(5, buf[1]);
+ assertEquals(6, buf[2]);
+ conn.close();
+ closeDeferred.resolve();
+ });
+ const conn = await Deno.dial(addr);
+ conn.closeRead(); // closing read
+ closeReadDeferred.resolve();
+ const buf = new Uint8Array(1024);
+ const readResult = await conn.read(buf);
+ assertEquals(Deno.EOF, readResult); // with immediate EOF
+ // Ensure closeRead does not impact write
+ await conn.write(new Uint8Array([4, 5, 6]));
+ await closeDeferred.promise;
+ listener.close();
+ conn.close();
+});
+*/
+
+/* TODO Fix broken test.
+testPerm({ net: true }, async function netDoubleCloseRead() {
+ const addr = "127.0.0.1:4500";
+ const listener = Deno.listen(addr);
+ const closeDeferred = deferred();
+ listener.accept().then(async conn => {
+ await conn.write(new Uint8Array([1, 2, 3]));
+ await closeDeferred.promise;
+ conn.close();
+ });
+ const conn = await Deno.dial(addr);
+ conn.closeRead(); // closing read
+ let err;
+ try {
+ // Duplicated close should throw error
+ conn.closeRead();
+ } catch (e) {
+ err = e;
+ }
+ assert(!!err);
+ assertEquals(err.kind, Deno.ErrorKind.NotConnected);
+ assertEquals(err.name, "NotConnected");
+ closeDeferred.resolve();
+ listener.close();
+ conn.close();
+});
+*/
+
+/* TODO Fix broken test.
+testPerm({ net: true }, async function netCloseWriteSuccess() {
+ const addr = "127.0.0.1:4500";
+ const listener = Deno.listen(addr);
+ const closeDeferred = deferred();
+ listener.accept().then(async conn => {
+ await conn.write(new Uint8Array([1, 2, 3]));
+ await closeDeferred.promise;
+ conn.close();
+ });
+ const conn = await Deno.dial(addr);
+ conn.closeWrite(); // closing write
+ const buf = new Uint8Array(1024);
+ // Check read not impacted
+ const readResult = await conn.read(buf);
+ assertEquals(3, readResult);
+ assertEquals(1, buf[0]);
+ assertEquals(2, buf[1]);
+ assertEquals(3, buf[2]);
+ // Check write should be closed
+ let err;
+ try {
+ await conn.write(new Uint8Array([1, 2, 3]));
+ } catch (e) {
+ err = e;
+ }
+ assert(!!err);
+ assertEquals(err.kind, Deno.ErrorKind.BrokenPipe);
+ assertEquals(err.name, "BrokenPipe");
+ closeDeferred.resolve();
+ listener.close();
+ conn.close();
+});
+*/
+
+/* TODO Fix broken test.
+testPerm({ net: true }, async function netDoubleCloseWrite() {
+ const addr = "127.0.0.1:4500";
+ const listener = Deno.listen(addr);
+ const closeDeferred = deferred();
+ listener.accept().then(async conn => {
+ await closeDeferred.promise;
+ conn.close();
+ });
+ const conn = await Deno.dial(addr);
+ conn.closeWrite(); // closing write
+ let err;
+ try {
+ // Duplicated close should throw error
+ conn.closeWrite();
+ } catch (e) {
+ err = e;
+ }
+ assert(!!err);
+ assertEquals(err.kind, Deno.ErrorKind.NotConnected);
+ assertEquals(err.name, "NotConnected");
+ closeDeferred.resolve();
+ listener.close();
+ conn.close();
+});
+*/
diff --git a/cli/js/os.ts b/cli/js/os.ts
new file mode 100644
index 000000000..2fc06434a
--- /dev/null
+++ b/cli/js/os.ts
@@ -0,0 +1,151 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { core } from "./core.ts";
+import * as dispatch from "./dispatch.ts";
+import { sendSync } from "./dispatch_json.ts";
+import { assert } from "./util.ts";
+import * as util from "./util.ts";
+import { window } from "./window.ts";
+import { OperatingSystem, Arch } from "./build.ts";
+
+// builtin modules
+import { _setGlobals } from "./deno.ts";
+
+/** Check if running in terminal.
+ *
+ * console.log(Deno.isTTY().stdout);
+ */
+export function isTTY(): { stdin: boolean; stdout: boolean; stderr: boolean } {
+ return sendSync(dispatch.OP_IS_TTY);
+}
+
+/** Get the hostname.
+ * Requires the `--allow-env` flag.
+ *
+ * console.log(Deno.hostname());
+ */
+export function hostname(): string {
+ return sendSync(dispatch.OP_HOSTNAME);
+}
+
+/** Exit the Deno process with optional exit code. */
+export function exit(code = 0): never {
+ sendSync(dispatch.OP_EXIT, { code });
+ return util.unreachable();
+}
+
+function setEnv(key: string, value: string): void {
+ sendSync(dispatch.OP_SET_ENV, { key, value });
+}
+
+function getEnv(key: string): string | undefined {
+ return sendSync(dispatch.OP_GET_ENV, { key })[0];
+}
+
+/** Returns a snapshot of the environment variables at invocation. Mutating a
+ * property in the object will set that variable in the environment for
+ * the process. The environment object will only accept `string`s
+ * as values.
+ *
+ * console.log(Deno.env("SHELL"));
+ * const myEnv = Deno.env();
+ * console.log(myEnv.SHELL);
+ * myEnv.TEST_VAR = "HELLO";
+ * const newEnv = Deno.env();
+ * console.log(myEnv.TEST_VAR == newEnv.TEST_VAR);
+ */
+export function env(): { [index: string]: string };
+export function env(key: string): string | undefined;
+export function env(
+ key?: string
+): { [index: string]: string } | string | undefined {
+ if (key) {
+ return getEnv(key);
+ }
+ const env = sendSync(dispatch.OP_ENV);
+ return new Proxy(env, {
+ set(obj, prop: string, value: string): boolean {
+ setEnv(prop, value);
+ return Reflect.set(obj, prop, value);
+ }
+ });
+}
+
+interface Start {
+ cwd: string;
+ pid: number;
+ argv: string[];
+ mainModule: string; // Absolute URL.
+ debugFlag: boolean;
+ depsFlag: boolean;
+ typesFlag: boolean;
+ versionFlag: boolean;
+ denoVersion: string;
+ v8Version: string;
+ tsVersion: string;
+ noColor: boolean;
+ xevalDelim: string;
+ os: OperatingSystem;
+ arch: Arch;
+}
+
+// This function bootstraps an environment within Deno, it is shared both by
+// the runtime and the compiler environments.
+// @internal
+export function start(preserveDenoNamespace = true, source?: string): Start {
+ core.setAsyncHandler(dispatch.asyncMsgFromRust);
+ const ops = core.ops();
+ // TODO(bartlomieju): this is a prototype, we should come up with
+ // something a bit more sophisticated
+ for (const [name, opId] of Object.entries(ops)) {
+ const opName = `OP_${name.toUpperCase()}`;
+ // Assign op ids to actual variables
+ dispatch[opName] = opId;
+ }
+ // First we send an empty `Start` message to let the privileged side know we
+ // are ready. The response should be a `StartRes` message containing the CLI
+ // args and other info.
+ const s = sendSync(dispatch.OP_START);
+
+ util.setLogDebug(s.debugFlag, source);
+
+ // pid and noColor need to be set in the Deno module before it's set to be
+ // frozen.
+ _setGlobals(s.pid, s.noColor);
+ delete window.Deno._setGlobals;
+ Object.freeze(window.Deno);
+
+ if (preserveDenoNamespace) {
+ util.immutableDefine(window, "Deno", window.Deno);
+ // Deno.core could ONLY be safely frozen here (not in globals.ts)
+ // since shared_queue.js will modify core properties.
+ Object.freeze(window.Deno.core);
+ // core.sharedQueue is an object so we should also freeze it.
+ Object.freeze(window.Deno.core.sharedQueue);
+ } else {
+ // Remove window.Deno
+ delete window.Deno;
+ assert(window.Deno === undefined);
+ }
+
+ return s;
+}
+
+/**
+ * Returns the current user's home directory.
+ * Requires the `--allow-env` flag.
+ */
+export function homeDir(): string {
+ const path = sendSync(dispatch.OP_HOME_DIR);
+ if (!path) {
+ throw new Error("Could not get home directory.");
+ }
+ return path;
+}
+
+/**
+ * Returns the path to the current deno executable.
+ * Requires the `--allow-env` flag.
+ */
+export function execPath(): string {
+ return sendSync(dispatch.OP_EXEC_PATH);
+}
diff --git a/cli/js/os_test.ts b/cli/js/os_test.ts
new file mode 100644
index 000000000..0d07df1b4
--- /dev/null
+++ b/cli/js/os_test.ts
@@ -0,0 +1,165 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import {
+ test,
+ testPerm,
+ assert,
+ assertEquals,
+ assertNotEquals
+} from "./test_util.ts";
+
+testPerm({ env: true }, function envSuccess(): void {
+ const env = Deno.env();
+ assert(env !== null);
+ // eslint-disable-next-line @typescript-eslint/camelcase
+ env.test_var = "Hello World";
+ const newEnv = Deno.env();
+ assertEquals(env.test_var, newEnv.test_var);
+ assertEquals(Deno.env("test_var"), env.test_var);
+});
+
+testPerm({ env: true }, function envNotFound(): void {
+ const r = Deno.env("env_var_does_not_exist!");
+ assertEquals(r, undefined);
+});
+
+test(function envPermissionDenied1(): void {
+ let err;
+ try {
+ Deno.env();
+ } catch (e) {
+ err = e;
+ }
+ assertNotEquals(err, undefined);
+ assertEquals(err.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(err.name, "PermissionDenied");
+});
+
+test(function envPermissionDenied2(): void {
+ let err;
+ try {
+ Deno.env("PATH");
+ } catch (e) {
+ err = e;
+ }
+ assertNotEquals(err, undefined);
+ assertEquals(err.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(err.name, "PermissionDenied");
+});
+
+if (Deno.build.os === "win") {
+ // This test verifies that on Windows, environment variables are
+ // case-insensitive. Case normalization needs be done using the collation
+ // that Windows uses, rather than naively using String.toLowerCase().
+ testPerm({ env: true, run: true }, async function envCaseInsensitive() {
+ // Utility function that runs a Deno subprocess with the environment
+ // specified in `inputEnv`. The subprocess reads the environment variables
+ // which are in the keys of `expectedEnv` and writes them to stdout as JSON.
+ // It is then verified that these match with the values of `expectedEnv`.
+ const checkChildEnv = async (inputEnv, expectedEnv): Promise<void> => {
+ const src = `
+ console.log(
+ ${JSON.stringify(Object.keys(expectedEnv))}.map(k => Deno.env(k))
+ )`;
+ const proc = Deno.run({
+ args: [Deno.execPath(), "eval", src],
+ env: inputEnv,
+ stdout: "piped"
+ });
+ const status = await proc.status();
+ assertEquals(status.success, true);
+ const expectedValues = Object.values(expectedEnv);
+ const actualValues = JSON.parse(
+ new TextDecoder().decode(await proc.output())
+ );
+ assertEquals(actualValues, expectedValues);
+ };
+
+ assertEquals(Deno.env("path"), Deno.env("PATH"));
+ assertEquals(Deno.env("Path"), Deno.env("PATH"));
+
+ // Check 'foo', 'Foo' and 'Foo' are case folded.
+ await checkChildEnv({ foo: "X" }, { foo: "X", Foo: "X", FOO: "X" });
+
+ // Check that 'µ' and 'Μ' are not case folded.
+ const lc1 = "µ";
+ const uc1 = lc1.toUpperCase();
+ assertNotEquals(lc1, uc1);
+ await checkChildEnv(
+ { [lc1]: "mu", [uc1]: "MU" },
+ { [lc1]: "mu", [uc1]: "MU" }
+ );
+
+ // Check that 'dž' and 'DŽ' are folded, but 'Dž' is preserved.
+ const c2 = "Dž";
+ const lc2 = c2.toLowerCase();
+ const uc2 = c2.toUpperCase();
+ assertNotEquals(c2, lc2);
+ assertNotEquals(c2, uc2);
+ await checkChildEnv(
+ { [c2]: "Dz", [lc2]: "dz" },
+ { [c2]: "Dz", [lc2]: "dz", [uc2]: "dz" }
+ );
+ await checkChildEnv(
+ { [c2]: "Dz", [uc2]: "DZ" },
+ { [c2]: "Dz", [uc2]: "DZ", [lc2]: "DZ" }
+ );
+ });
+}
+
+test(function osPid(): void {
+ console.log("pid", Deno.pid);
+ assert(Deno.pid > 0);
+});
+
+// See complete tests in tools/is_tty_test.py
+test(function osIsTTYSmoke(): void {
+ console.log(Deno.isTTY());
+});
+
+testPerm({ env: true }, function homeDir(): void {
+ assertNotEquals(Deno.homeDir(), "");
+});
+
+testPerm({ env: false }, function homeDirPerm(): void {
+ let caughtError = false;
+ try {
+ Deno.homeDir();
+ } catch (err) {
+ caughtError = true;
+ assertEquals(err.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(err.name, "PermissionDenied");
+ }
+ assert(caughtError);
+});
+
+testPerm({ env: true }, function execPath(): void {
+ assertNotEquals(Deno.execPath(), "");
+});
+
+testPerm({ env: false }, function execPathPerm(): void {
+ let caughtError = false;
+ try {
+ Deno.execPath();
+ } catch (err) {
+ caughtError = true;
+ assertEquals(err.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(err.name, "PermissionDenied");
+ }
+ assert(caughtError);
+});
+
+testPerm({ env: true }, function hostnameDir(): void {
+ assertNotEquals(Deno.hostname(), "");
+});
+
+testPerm({ env: false }, function hostnamePerm(): void {
+ let caughtError = false;
+ try {
+ Deno.hostname();
+ } catch (err) {
+ caughtError = true;
+ assertEquals(err.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(err.name, "PermissionDenied");
+ }
+ assert(caughtError);
+});
diff --git a/cli/js/performance.ts b/cli/js/performance.ts
new file mode 100644
index 000000000..6ea8e56e1
--- /dev/null
+++ b/cli/js/performance.ts
@@ -0,0 +1,22 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import * as dispatch from "./dispatch.ts";
+import { sendSync } from "./dispatch_json.ts";
+
+interface NowResponse {
+ seconds: number;
+ subsecNanos: number;
+}
+
+export class Performance {
+ /** Returns a current time from Deno's start in milliseconds.
+ *
+ * Use the flag --allow-hrtime return a precise value.
+ *
+ * const t = performance.now();
+ * console.log(`${t} ms since start!`);
+ */
+ now(): number {
+ const res = sendSync(dispatch.OP_NOW) as NowResponse;
+ return res.seconds * 1e3 + res.subsecNanos / 1e6;
+ }
+}
diff --git a/cli/js/performance_test.ts b/cli/js/performance_test.ts
new file mode 100644
index 000000000..ac682364e
--- /dev/null
+++ b/cli/js/performance_test.ts
@@ -0,0 +1,10 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { testPerm, assert } from "./test_util.ts";
+
+testPerm({ hrtime: false }, function now(): void {
+ const start = performance.now();
+ setTimeout((): void => {
+ const end = performance.now();
+ assert(end - start >= 10);
+ }, 10);
+});
diff --git a/cli/js/permissions.ts b/cli/js/permissions.ts
new file mode 100644
index 000000000..4f393501c
--- /dev/null
+++ b/cli/js/permissions.ts
@@ -0,0 +1,39 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import * as dispatch from "./dispatch.ts";
+import { sendSync } from "./dispatch_json.ts";
+
+/** Permissions as granted by the caller */
+export interface Permissions {
+ read: boolean;
+ write: boolean;
+ net: boolean;
+ env: boolean;
+ run: boolean;
+ hrtime: boolean;
+ // NOTE: Keep in sync with src/permissions.rs
+}
+
+export type Permission = keyof Permissions;
+
+/** Inspect granted permissions for the current program.
+ *
+ * if (Deno.permissions().read) {
+ * const file = await Deno.readFile("example.test");
+ * // ...
+ * }
+ */
+export function permissions(): Permissions {
+ return sendSync(dispatch.OP_PERMISSIONS) as Permissions;
+}
+
+/** Revoke a permission. When the permission was already revoked nothing changes
+ *
+ * if (Deno.permissions().read) {
+ * const file = await Deno.readFile("example.test");
+ * Deno.revokePermission('read');
+ * }
+ * Deno.readFile("example.test"); // -> error or permission prompt
+ */
+export function revokePermission(permission: Permission): void {
+ sendSync(dispatch.OP_REVOKE_PERMISSION, { permission });
+}
diff --git a/cli/js/permissions_test.ts b/cli/js/permissions_test.ts
new file mode 100644
index 000000000..6511c2dcb
--- /dev/null
+++ b/cli/js/permissions_test.ts
@@ -0,0 +1,28 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { testPerm, assert, assertEquals } from "./test_util.ts";
+
+const knownPermissions: Deno.Permission[] = [
+ "run",
+ "read",
+ "write",
+ "net",
+ "env",
+ "hrtime"
+];
+
+for (const grant of knownPermissions) {
+ testPerm({ [grant]: true }, function envGranted(): void {
+ const perms = Deno.permissions();
+ assert(perms !== null);
+ for (const perm in perms) {
+ assertEquals(perms[perm], perm === grant);
+ }
+
+ Deno.revokePermission(grant);
+
+ const revoked = Deno.permissions();
+ for (const perm in revoked) {
+ assertEquals(revoked[perm], false);
+ }
+ });
+}
diff --git a/cli/js/process.ts b/cli/js/process.ts
new file mode 100644
index 000000000..0c77929f9
--- /dev/null
+++ b/cli/js/process.ts
@@ -0,0 +1,307 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { sendSync, sendAsync } from "./dispatch_json.ts";
+import * as dispatch from "./dispatch.ts";
+import { File, close } from "./files.ts";
+import { ReadCloser, WriteCloser } from "./io.ts";
+import { readAll } from "./buffer.ts";
+import { assert, unreachable } from "./util.ts";
+import { build } from "./build.ts";
+
+/** How to handle subprocess stdio.
+ *
+ * "inherit" The default if unspecified. The child inherits from the
+ * corresponding parent descriptor.
+ *
+ * "piped" A new pipe should be arranged to connect the parent and child
+ * subprocesses.
+ *
+ * "null" This stream will be ignored. This is the equivalent of attaching the
+ * stream to /dev/null.
+ */
+export type ProcessStdio = "inherit" | "piped" | "null";
+
+// TODO Maybe extend VSCode's 'CommandOptions'?
+// See https://code.visualstudio.com/docs/editor/tasks-appendix#_schema-for-tasksjson
+export interface RunOptions {
+ args: string[];
+ cwd?: string;
+ env?: { [key: string]: string };
+ stdout?: ProcessStdio | number;
+ stderr?: ProcessStdio | number;
+ stdin?: ProcessStdio | number;
+}
+
+interface RunStatusResponse {
+ gotSignal: boolean;
+ exitCode: number;
+ exitSignal: number;
+}
+
+async function runStatus(rid: number): Promise<ProcessStatus> {
+ const res = (await sendAsync(dispatch.OP_RUN_STATUS, {
+ rid
+ })) as RunStatusResponse;
+
+ if (res.gotSignal) {
+ const signal = res.exitSignal;
+ return { signal, success: false };
+ } else {
+ const code = res.exitCode;
+ return { code, success: code === 0 };
+ }
+}
+
+/** Send a signal to process under given PID. Unix only at this moment.
+ * If pid is negative, the signal will be sent to the process group identified
+ * by -pid.
+ * Requires the `--allow-run` flag.
+ */
+export function kill(pid: number, signo: number): void {
+ sendSync(dispatch.OP_KILL, { pid, signo });
+}
+
+export class Process {
+ readonly rid: number;
+ readonly pid: number;
+ readonly stdin?: WriteCloser;
+ readonly stdout?: ReadCloser;
+ readonly stderr?: ReadCloser;
+
+ // @internal
+ constructor(res: RunResponse) {
+ this.rid = res.rid;
+ this.pid = res.pid;
+
+ if (res.stdinRid && res.stdinRid > 0) {
+ this.stdin = new File(res.stdinRid);
+ }
+
+ if (res.stdoutRid && res.stdoutRid > 0) {
+ this.stdout = new File(res.stdoutRid);
+ }
+
+ if (res.stderrRid && res.stderrRid > 0) {
+ this.stderr = new File(res.stderrRid);
+ }
+ }
+
+ async status(): Promise<ProcessStatus> {
+ return await runStatus(this.rid);
+ }
+
+ /** Buffer the stdout and return it as Uint8Array after EOF.
+ * You must set stdout to "piped" when creating the process.
+ * This calls close() on stdout after its done.
+ */
+ async output(): Promise<Uint8Array> {
+ if (!this.stdout) {
+ throw new Error("Process.output: stdout is undefined");
+ }
+ try {
+ return await readAll(this.stdout);
+ } finally {
+ this.stdout.close();
+ }
+ }
+
+ /** Buffer the stderr and return it as Uint8Array after EOF.
+ * You must set stderr to "piped" when creating the process.
+ * This calls close() on stderr after its done.
+ */
+ async stderrOutput(): Promise<Uint8Array> {
+ if (!this.stderr) {
+ throw new Error("Process.stderrOutput: stderr is undefined");
+ }
+ try {
+ return await readAll(this.stderr);
+ } finally {
+ this.stderr.close();
+ }
+ }
+
+ close(): void {
+ close(this.rid);
+ }
+
+ kill(signo: number): void {
+ kill(this.pid, signo);
+ }
+}
+
+export interface ProcessStatus {
+ success: boolean;
+ code?: number;
+ signal?: number; // TODO: Make this a string, e.g. 'SIGTERM'.
+}
+
+// TODO: this method is only used to validate proper option, probably can be renamed
+function stdioMap(s: string): string {
+ switch (s) {
+ case "inherit":
+ case "piped":
+ case "null":
+ return s;
+ default:
+ return unreachable();
+ }
+}
+
+function isRid(arg: unknown): arg is number {
+ return !isNaN(arg as number);
+}
+
+interface RunResponse {
+ rid: number;
+ pid: number;
+ stdinRid: number | null;
+ stdoutRid: number | null;
+ stderrRid: number | null;
+}
+/**
+ * Spawns new subprocess.
+ *
+ * Subprocess uses same working directory as parent process unless `opt.cwd`
+ * is specified.
+ *
+ * Environmental variables for subprocess can be specified using `opt.env`
+ * mapping.
+ *
+ * By default subprocess inherits stdio of parent process. To change that
+ * `opt.stdout`, `opt.stderr` and `opt.stdin` can be specified independently -
+ * they can be set to either `ProcessStdio` or `rid` of open file.
+ */
+export function run(opt: RunOptions): Process {
+ assert(opt.args.length > 0);
+ let env: Array<[string, string]> = [];
+ if (opt.env) {
+ env = Array.from(Object.entries(opt.env));
+ }
+
+ let stdin = stdioMap("inherit");
+ let stdout = stdioMap("inherit");
+ let stderr = stdioMap("inherit");
+ let stdinRid = 0;
+ let stdoutRid = 0;
+ let stderrRid = 0;
+
+ if (opt.stdin) {
+ if (isRid(opt.stdin)) {
+ stdinRid = opt.stdin;
+ } else {
+ stdin = stdioMap(opt.stdin);
+ }
+ }
+
+ if (opt.stdout) {
+ if (isRid(opt.stdout)) {
+ stdoutRid = opt.stdout;
+ } else {
+ stdout = stdioMap(opt.stdout);
+ }
+ }
+
+ if (opt.stderr) {
+ if (isRid(opt.stderr)) {
+ stderrRid = opt.stderr;
+ } else {
+ stderr = stdioMap(opt.stderr);
+ }
+ }
+
+ const req = {
+ args: opt.args.map(String),
+ cwd: opt.cwd,
+ env,
+ stdin,
+ stdout,
+ stderr,
+ stdinRid,
+ stdoutRid,
+ stderrRid
+ };
+
+ const res = sendSync(dispatch.OP_RUN, req) as RunResponse;
+ return new Process(res);
+}
+
+// From `kill -l`
+enum LinuxSignal {
+ SIGHUP = 1,
+ SIGINT = 2,
+ SIGQUIT = 3,
+ SIGILL = 4,
+ SIGTRAP = 5,
+ SIGABRT = 6,
+ SIGBUS = 7,
+ SIGFPE = 8,
+ SIGKILL = 9,
+ SIGUSR1 = 10,
+ SIGSEGV = 11,
+ SIGUSR2 = 12,
+ SIGPIPE = 13,
+ SIGALRM = 14,
+ SIGTERM = 15,
+ SIGSTKFLT = 16,
+ SIGCHLD = 17,
+ SIGCONT = 18,
+ SIGSTOP = 19,
+ SIGTSTP = 20,
+ SIGTTIN = 21,
+ SIGTTOU = 22,
+ SIGURG = 23,
+ SIGXCPU = 24,
+ SIGXFSZ = 25,
+ SIGVTALRM = 26,
+ SIGPROF = 27,
+ SIGWINCH = 28,
+ SIGIO = 29,
+ SIGPWR = 30,
+ SIGSYS = 31
+}
+
+// From `kill -l`
+enum MacOSSignal {
+ SIGHUP = 1,
+ SIGINT = 2,
+ SIGQUIT = 3,
+ SIGILL = 4,
+ SIGTRAP = 5,
+ SIGABRT = 6,
+ SIGEMT = 7,
+ SIGFPE = 8,
+ SIGKILL = 9,
+ SIGBUS = 10,
+ SIGSEGV = 11,
+ SIGSYS = 12,
+ SIGPIPE = 13,
+ SIGALRM = 14,
+ SIGTERM = 15,
+ SIGURG = 16,
+ SIGSTOP = 17,
+ SIGTSTP = 18,
+ SIGCONT = 19,
+ SIGCHLD = 20,
+ SIGTTIN = 21,
+ SIGTTOU = 22,
+ SIGIO = 23,
+ SIGXCPU = 24,
+ SIGXFSZ = 25,
+ SIGVTALRM = 26,
+ SIGPROF = 27,
+ SIGWINCH = 28,
+ SIGINFO = 29,
+ SIGUSR1 = 30,
+ SIGUSR2 = 31
+}
+
+/** Signals numbers. This is platform dependent.
+ */
+export const Signal = {};
+
+export function setSignals(): void {
+ if (build.os === "mac") {
+ Object.assign(Signal, MacOSSignal);
+ } else {
+ Object.assign(Signal, LinuxSignal);
+ }
+}
diff --git a/cli/js/process_test.ts b/cli/js/process_test.ts
new file mode 100644
index 000000000..42db06dee
--- /dev/null
+++ b/cli/js/process_test.ts
@@ -0,0 +1,377 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import {
+ test,
+ testPerm,
+ assert,
+ assertEquals,
+ assertStrContains
+} from "./test_util.ts";
+const {
+ kill,
+ run,
+ DenoError,
+ ErrorKind,
+ readFile,
+ open,
+ makeTempDir,
+ writeFile
+} = Deno;
+
+test(function runPermissions(): void {
+ let caughtError = false;
+ try {
+ Deno.run({ args: ["python", "-c", "print('hello world')"] });
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(e.name, "PermissionDenied");
+ }
+ assert(caughtError);
+});
+
+testPerm({ run: true }, async function runSuccess(): Promise<void> {
+ const p = run({
+ args: ["python", "-c", "print('hello world')"]
+ });
+ const status = await p.status();
+ console.log("status", status);
+ assertEquals(status.success, true);
+ assertEquals(status.code, 0);
+ assertEquals(status.signal, undefined);
+ p.close();
+});
+
+testPerm({ run: true }, async function runCommandFailedWithCode(): Promise<
+ void
+> {
+ const p = run({
+ args: ["python", "-c", "import sys;sys.exit(41 + 1)"]
+ });
+ const status = await p.status();
+ assertEquals(status.success, false);
+ assertEquals(status.code, 42);
+ assertEquals(status.signal, undefined);
+ p.close();
+});
+
+testPerm({ run: true }, async function runCommandFailedWithSignal(): Promise<
+ void
+> {
+ if (Deno.build.os === "win") {
+ return; // No signals on windows.
+ }
+ const p = run({
+ args: ["python", "-c", "import os;os.kill(os.getpid(), 9)"]
+ });
+ const status = await p.status();
+ assertEquals(status.success, false);
+ assertEquals(status.code, undefined);
+ assertEquals(status.signal, 9);
+ p.close();
+});
+
+testPerm({ run: true }, function runNotFound(): void {
+ let error;
+ try {
+ run({ args: ["this file hopefully doesn't exist"] });
+ } catch (e) {
+ error = e;
+ }
+ assert(error !== undefined);
+ assert(error instanceof DenoError);
+ assertEquals(error.kind, ErrorKind.NotFound);
+});
+
+testPerm(
+ { write: true, run: true },
+ async function runWithCwdIsAsync(): Promise<void> {
+ const enc = new TextEncoder();
+ const cwd = await makeTempDir({ prefix: "deno_command_test" });
+
+ const exitCodeFile = "deno_was_here";
+ const pyProgramFile = "poll_exit.py";
+ const pyProgram = `
+from sys import exit
+from time import sleep
+
+while True:
+ try:
+ with open("${exitCodeFile}", "r") as f:
+ line = f.readline()
+ code = int(line)
+ exit(code)
+ except IOError:
+ # Retry if we got here before deno wrote the file.
+ sleep(0.01)
+ pass
+`;
+
+ Deno.writeFileSync(`${cwd}/${pyProgramFile}.py`, enc.encode(pyProgram));
+ const p = run({
+ cwd,
+ args: ["python", `${pyProgramFile}.py`]
+ });
+
+ // Write the expected exit code *after* starting python.
+ // This is how we verify that `run()` is actually asynchronous.
+ const code = 84;
+ Deno.writeFileSync(`${cwd}/${exitCodeFile}`, enc.encode(`${code}`));
+
+ const status = await p.status();
+ assertEquals(status.success, false);
+ assertEquals(status.code, code);
+ assertEquals(status.signal, undefined);
+ p.close();
+ }
+);
+
+testPerm({ run: true }, async function runStdinPiped(): Promise<void> {
+ const p = run({
+ args: ["python", "-c", "import sys; assert 'hello' == sys.stdin.read();"],
+ stdin: "piped"
+ });
+ assert(!p.stdout);
+ assert(!p.stderr);
+
+ const msg = new TextEncoder().encode("hello");
+ const n = await p.stdin.write(msg);
+ assertEquals(n, msg.byteLength);
+
+ p.stdin.close();
+
+ const status = await p.status();
+ assertEquals(status.success, true);
+ assertEquals(status.code, 0);
+ assertEquals(status.signal, undefined);
+ p.close();
+});
+
+testPerm({ run: true }, async function runStdoutPiped(): Promise<void> {
+ const p = run({
+ args: ["python", "-c", "import sys; sys.stdout.write('hello')"],
+ stdout: "piped"
+ });
+ assert(!p.stdin);
+ assert(!p.stderr);
+
+ const data = new Uint8Array(10);
+ let r = await p.stdout.read(data);
+ if (r === Deno.EOF) {
+ throw new Error("p.stdout.read(...) should not be EOF");
+ }
+ assertEquals(r, 5);
+ const s = new TextDecoder().decode(data.subarray(0, r));
+ assertEquals(s, "hello");
+ r = await p.stdout.read(data);
+ assertEquals(r, Deno.EOF);
+ p.stdout.close();
+
+ const status = await p.status();
+ assertEquals(status.success, true);
+ assertEquals(status.code, 0);
+ assertEquals(status.signal, undefined);
+ p.close();
+});
+
+testPerm({ run: true }, async function runStderrPiped(): Promise<void> {
+ const p = run({
+ args: ["python", "-c", "import sys; sys.stderr.write('hello')"],
+ stderr: "piped"
+ });
+ assert(!p.stdin);
+ assert(!p.stdout);
+
+ const data = new Uint8Array(10);
+ let r = await p.stderr.read(data);
+ if (r === Deno.EOF) {
+ throw new Error("p.stderr.read should not return EOF here");
+ }
+ assertEquals(r, 5);
+ const s = new TextDecoder().decode(data.subarray(0, r));
+ assertEquals(s, "hello");
+ r = await p.stderr.read(data);
+ assertEquals(r, Deno.EOF);
+ p.stderr.close();
+
+ const status = await p.status();
+ assertEquals(status.success, true);
+ assertEquals(status.code, 0);
+ assertEquals(status.signal, undefined);
+ p.close();
+});
+
+testPerm({ run: true }, async function runOutput(): Promise<void> {
+ const p = run({
+ args: ["python", "-c", "import sys; sys.stdout.write('hello')"],
+ stdout: "piped"
+ });
+ const output = await p.output();
+ const s = new TextDecoder().decode(output);
+ assertEquals(s, "hello");
+ p.close();
+});
+
+testPerm({ run: true }, async function runStderrOutput(): Promise<void> {
+ const p = run({
+ args: ["python", "-c", "import sys; sys.stderr.write('error')"],
+ stderr: "piped"
+ });
+ const error = await p.stderrOutput();
+ const s = new TextDecoder().decode(error);
+ assertEquals(s, "error");
+ p.close();
+});
+
+testPerm(
+ { run: true, write: true, read: true },
+ async function runRedirectStdoutStderr(): Promise<void> {
+ const tempDir = await makeTempDir();
+ const fileName = tempDir + "/redirected_stdio.txt";
+ const file = await open(fileName, "w");
+
+ const p = run({
+ args: [
+ "python",
+ "-c",
+ "import sys; sys.stderr.write('error\\n'); sys.stdout.write('output\\n');"
+ ],
+ stdout: file.rid,
+ stderr: file.rid
+ });
+
+ await p.status();
+ p.close();
+ file.close();
+
+ const fileContents = await readFile(fileName);
+ const decoder = new TextDecoder();
+ const text = decoder.decode(fileContents);
+
+ assertStrContains(text, "error");
+ assertStrContains(text, "output");
+ }
+);
+
+testPerm(
+ { run: true, write: true, read: true },
+ async function runRedirectStdin(): Promise<void> {
+ const tempDir = await makeTempDir();
+ const fileName = tempDir + "/redirected_stdio.txt";
+ const encoder = new TextEncoder();
+ await writeFile(fileName, encoder.encode("hello"));
+ const file = await open(fileName, "r");
+
+ const p = run({
+ args: ["python", "-c", "import sys; assert 'hello' == sys.stdin.read();"],
+ stdin: file.rid
+ });
+
+ const status = await p.status();
+ assertEquals(status.code, 0);
+ p.close();
+ file.close();
+ }
+);
+
+testPerm({ run: true }, async function runEnv(): Promise<void> {
+ const p = run({
+ args: [
+ "python",
+ "-c",
+ "import os, sys; sys.stdout.write(os.environ.get('FOO', '') + os.environ.get('BAR', ''))"
+ ],
+ env: {
+ FOO: "0123",
+ BAR: "4567"
+ },
+ stdout: "piped"
+ });
+ const output = await p.output();
+ const s = new TextDecoder().decode(output);
+ assertEquals(s, "01234567");
+ p.close();
+});
+
+testPerm({ run: true }, async function runClose(): Promise<void> {
+ const p = run({
+ args: [
+ "python",
+ "-c",
+ "from time import sleep; import sys; sleep(10000); sys.stderr.write('error')"
+ ],
+ stderr: "piped"
+ });
+ assert(!p.stdin);
+ assert(!p.stdout);
+
+ p.close();
+
+ const data = new Uint8Array(10);
+ const r = await p.stderr.read(data);
+ assertEquals(r, Deno.EOF);
+});
+
+test(function signalNumbers(): void {
+ if (Deno.build.os === "mac") {
+ assertEquals(Deno.Signal.SIGSTOP, 17);
+ } else if (Deno.build.os === "linux") {
+ assertEquals(Deno.Signal.SIGSTOP, 19);
+ }
+});
+
+// Ignore signal tests on windows for now...
+if (Deno.build.os !== "win") {
+ test(function killPermissions(): void {
+ let caughtError = false;
+ try {
+ // Unlike the other test cases, we don't have permission to spawn a
+ // subprocess we can safely kill. Instead we send SIGCONT to the current
+ // process - assuming that Deno does not have a special handler set for it
+ // and will just continue even if a signal is erroneously sent.
+ Deno.kill(Deno.pid, Deno.Signal.SIGCONT);
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(e.name, "PermissionDenied");
+ }
+ assert(caughtError);
+ });
+
+ testPerm({ run: true }, async function killSuccess(): Promise<void> {
+ const p = run({
+ args: ["python", "-c", "from time import sleep; sleep(10000)"]
+ });
+
+ assertEquals(Deno.Signal.SIGINT, 2);
+ kill(p.pid, Deno.Signal.SIGINT);
+ const status = await p.status();
+
+ assertEquals(status.success, false);
+ // TODO(ry) On Linux, status.code is sometimes undefined and sometimes 1.
+ // The following assert is causing this test to be flaky. Investigate and
+ // re-enable when it can be made deterministic.
+ // assertEquals(status.code, 1);
+ // assertEquals(status.signal, Deno.Signal.SIGINT);
+ });
+
+ testPerm({ run: true }, async function killFailed(): Promise<void> {
+ const p = run({
+ args: ["python", "-c", "from time import sleep; sleep(10000)"]
+ });
+ assert(!p.stdin);
+ assert(!p.stdout);
+
+ let err;
+ try {
+ kill(p.pid, 12345);
+ } catch (e) {
+ err = e;
+ }
+
+ assert(!!err);
+ assertEquals(err.kind, Deno.ErrorKind.InvalidInput);
+ assertEquals(err.name, "InvalidInput");
+
+ p.close();
+ });
+}
diff --git a/cli/js/read_dir.ts b/cli/js/read_dir.ts
new file mode 100644
index 000000000..2fa6a566b
--- /dev/null
+++ b/cli/js/read_dir.ts
@@ -0,0 +1,34 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { sendSync, sendAsync } from "./dispatch_json.ts";
+import * as dispatch from "./dispatch.ts";
+import { FileInfo, FileInfoImpl } from "./file_info.ts";
+import { StatResponse } from "./stat.ts";
+
+interface ReadDirResponse {
+ entries: StatResponse[];
+}
+
+function res(response: ReadDirResponse): FileInfo[] {
+ return response.entries.map(
+ (statRes: StatResponse): FileInfo => {
+ return new FileInfoImpl(statRes);
+ }
+ );
+}
+
+/** Reads the directory given by path and returns a list of file info
+ * synchronously.
+ *
+ * const files = Deno.readDirSync("/");
+ */
+export function readDirSync(path: string): FileInfo[] {
+ return res(sendSync(dispatch.OP_READ_DIR, { path }));
+}
+
+/** Reads the directory given by path and returns a list of file info.
+ *
+ * const files = await Deno.readDir("/");
+ */
+export async function readDir(path: string): Promise<FileInfo[]> {
+ return res(await sendAsync(dispatch.OP_READ_DIR, { path }));
+}
diff --git a/cli/js/read_dir_test.ts b/cli/js/read_dir_test.ts
new file mode 100644
index 000000000..3e11df9fe
--- /dev/null
+++ b/cli/js/read_dir_test.ts
@@ -0,0 +1,84 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { testPerm, assert, assertEquals } from "./test_util.ts";
+
+type FileInfo = Deno.FileInfo;
+
+function assertSameContent(files: FileInfo[]): void {
+ let counter = 0;
+
+ for (const file of files) {
+ if (file.name === "subdir") {
+ assert(file.isDirectory());
+ counter++;
+ }
+
+ if (file.name === "002_hello.ts") {
+ assertEquals(file.mode!, Deno.statSync(`tests/${file.name}`).mode!);
+ counter++;
+ }
+ }
+
+ assertEquals(counter, 2);
+}
+
+testPerm({ read: true }, function readDirSyncSuccess(): void {
+ const files = Deno.readDirSync("tests/");
+ assertSameContent(files);
+});
+
+testPerm({ read: false }, function readDirSyncPerm(): void {
+ let caughtError = false;
+ try {
+ Deno.readDirSync("tests/");
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(e.name, "PermissionDenied");
+ }
+ assert(caughtError);
+});
+
+testPerm({ read: true }, function readDirSyncNotDir(): void {
+ let caughtError = false;
+ let src;
+
+ try {
+ src = Deno.readDirSync("package.json");
+ } catch (err) {
+ caughtError = true;
+ assertEquals(err.kind, Deno.ErrorKind.Other);
+ }
+ assert(caughtError);
+ assertEquals(src, undefined);
+});
+
+testPerm({ read: true }, function readDirSyncNotFound(): void {
+ let caughtError = false;
+ let src;
+
+ try {
+ src = Deno.readDirSync("bad_dir_name");
+ } catch (err) {
+ caughtError = true;
+ assertEquals(err.kind, Deno.ErrorKind.NotFound);
+ }
+ assert(caughtError);
+ assertEquals(src, undefined);
+});
+
+testPerm({ read: true }, async function readDirSuccess(): Promise<void> {
+ const files = await Deno.readDir("tests/");
+ assertSameContent(files);
+});
+
+testPerm({ read: false }, async function readDirPerm(): Promise<void> {
+ let caughtError = false;
+ try {
+ await Deno.readDir("tests/");
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(e.name, "PermissionDenied");
+ }
+ assert(caughtError);
+});
diff --git a/cli/js/read_file.ts b/cli/js/read_file.ts
new file mode 100644
index 000000000..de6630cc0
--- /dev/null
+++ b/cli/js/read_file.ts
@@ -0,0 +1,29 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { open, openSync } from "./files.ts";
+import { readAll, readAllSync } from "./buffer.ts";
+
+/** Read the entire contents of a file synchronously.
+ *
+ * const decoder = new TextDecoder("utf-8");
+ * const data = Deno.readFileSync("hello.txt");
+ * console.log(decoder.decode(data));
+ */
+export function readFileSync(filename: string): Uint8Array {
+ const file = openSync(filename);
+ const contents = readAllSync(file);
+ file.close();
+ return contents;
+}
+
+/** Read the entire contents of a file.
+ *
+ * const decoder = new TextDecoder("utf-8");
+ * const data = await Deno.readFile("hello.txt");
+ * console.log(decoder.decode(data));
+ */
+export async function readFile(filename: string): Promise<Uint8Array> {
+ const file = await open(filename);
+ const contents = await readAll(file);
+ file.close();
+ return contents;
+}
diff --git a/cli/js/read_file_test.ts b/cli/js/read_file_test.ts
new file mode 100644
index 000000000..7d4f4789c
--- /dev/null
+++ b/cli/js/read_file_test.ts
@@ -0,0 +1,57 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { testPerm, assert, assertEquals } from "./test_util.ts";
+
+testPerm({ read: true }, function readFileSyncSuccess(): void {
+ const data = Deno.readFileSync("package.json");
+ assert(data.byteLength > 0);
+ const decoder = new TextDecoder("utf-8");
+ const json = decoder.decode(data);
+ const pkg = JSON.parse(json);
+ assertEquals(pkg.name, "deno");
+});
+
+testPerm({ read: false }, function readFileSyncPerm(): void {
+ let caughtError = false;
+ try {
+ Deno.readFileSync("package.json");
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(e.name, "PermissionDenied");
+ }
+ assert(caughtError);
+});
+
+testPerm({ read: true }, function readFileSyncNotFound(): void {
+ let caughtError = false;
+ let data;
+ try {
+ data = Deno.readFileSync("bad_filename");
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.NotFound);
+ }
+ assert(caughtError);
+ assert(data === undefined);
+});
+
+testPerm({ read: true }, async function readFileSuccess(): Promise<void> {
+ const data = await Deno.readFile("package.json");
+ assert(data.byteLength > 0);
+ const decoder = new TextDecoder("utf-8");
+ const json = decoder.decode(data);
+ const pkg = JSON.parse(json);
+ assertEquals(pkg.name, "deno");
+});
+
+testPerm({ read: false }, async function readFilePerm(): Promise<void> {
+ let caughtError = false;
+ try {
+ await Deno.readFile("package.json");
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(e.name, "PermissionDenied");
+ }
+ assert(caughtError);
+});
diff --git a/cli/js/read_link.ts b/cli/js/read_link.ts
new file mode 100644
index 000000000..861fbff0b
--- /dev/null
+++ b/cli/js/read_link.ts
@@ -0,0 +1,19 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { sendSync, sendAsync } from "./dispatch_json.ts";
+import * as dispatch from "./dispatch.ts";
+
+/** Returns the destination of the named symbolic link synchronously.
+ *
+ * const targetPath = Deno.readlinkSync("symlink/path");
+ */
+export function readlinkSync(name: string): string {
+ return sendSync(dispatch.OP_READ_LINK, { name });
+}
+
+/** Returns the destination of the named symbolic link.
+ *
+ * const targetPath = await Deno.readlink("symlink/path");
+ */
+export async function readlink(name: string): Promise<string> {
+ return await sendAsync(dispatch.OP_READ_LINK, { name });
+}
diff --git a/cli/js/read_link_test.ts b/cli/js/read_link_test.ts
new file mode 100644
index 000000000..83a693e3b
--- /dev/null
+++ b/cli/js/read_link_test.ts
@@ -0,0 +1,69 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { testPerm, assert, assertEquals } from "./test_util.ts";
+
+testPerm({ write: true, read: true }, function readlinkSyncSuccess(): void {
+ const testDir = Deno.makeTempDirSync();
+ const target = testDir + "/target";
+ const symlink = testDir + "/symln";
+ Deno.mkdirSync(target);
+ // TODO Add test for Windows once symlink is implemented for Windows.
+ // See https://github.com/denoland/deno/issues/815.
+ if (Deno.build.os !== "win") {
+ Deno.symlinkSync(target, symlink);
+ const targetPath = Deno.readlinkSync(symlink);
+ assertEquals(targetPath, target);
+ }
+});
+
+testPerm({ read: false }, async function readlinkSyncPerm(): Promise<void> {
+ let caughtError = false;
+ try {
+ Deno.readlinkSync("/symlink");
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(e.name, "PermissionDenied");
+ }
+ assert(caughtError);
+});
+
+testPerm({ read: true }, function readlinkSyncNotFound(): void {
+ let caughtError = false;
+ let data;
+ try {
+ data = Deno.readlinkSync("bad_filename");
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.NotFound);
+ }
+ assert(caughtError);
+ assertEquals(data, undefined);
+});
+
+testPerm({ write: true, read: true }, async function readlinkSuccess(): Promise<
+ void
+> {
+ const testDir = Deno.makeTempDirSync();
+ const target = testDir + "/target";
+ const symlink = testDir + "/symln";
+ Deno.mkdirSync(target);
+ // TODO Add test for Windows once symlink is implemented for Windows.
+ // See https://github.com/denoland/deno/issues/815.
+ if (Deno.build.os !== "win") {
+ Deno.symlinkSync(target, symlink);
+ const targetPath = await Deno.readlink(symlink);
+ assertEquals(targetPath, target);
+ }
+});
+
+testPerm({ read: false }, async function readlinkPerm(): Promise<void> {
+ let caughtError = false;
+ try {
+ await Deno.readlink("/symlink");
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(e.name, "PermissionDenied");
+ }
+ assert(caughtError);
+});
diff --git a/cli/js/remove.ts b/cli/js/remove.ts
new file mode 100644
index 000000000..36413a7c4
--- /dev/null
+++ b/cli/js/remove.ts
@@ -0,0 +1,32 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { sendSync, sendAsync } from "./dispatch_json.ts";
+import * as dispatch from "./dispatch.ts";
+
+export interface RemoveOption {
+ recursive?: boolean;
+}
+
+/** Removes the named file or directory synchronously. Would throw
+ * error if permission denied, not found, or directory not empty if `recursive`
+ * set to false.
+ * `recursive` is set to false by default.
+ *
+ * Deno.removeSync("/path/to/dir/or/file", {recursive: false});
+ */
+export function removeSync(path: string, options: RemoveOption = {}): void {
+ sendSync(dispatch.OP_REMOVE, { path, recursive: !!options.recursive });
+}
+
+/** Removes the named file or directory. Would throw error if
+ * permission denied, not found, or directory not empty if `recursive` set
+ * to false.
+ * `recursive` is set to false by default.
+ *
+ * await Deno.remove("/path/to/dir/or/file", {recursive: false});
+ */
+export async function remove(
+ path: string,
+ options: RemoveOption = {}
+): Promise<void> {
+ await sendAsync(dispatch.OP_REMOVE, { path, recursive: !!options.recursive });
+}
diff --git a/cli/js/remove_test.ts b/cli/js/remove_test.ts
new file mode 100644
index 000000000..f14386f7f
--- /dev/null
+++ b/cli/js/remove_test.ts
@@ -0,0 +1,335 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { testPerm, assert, assertEquals } from "./test_util.ts";
+
+// SYNC
+
+testPerm({ write: true }, function removeSyncDirSuccess(): void {
+ // REMOVE EMPTY DIRECTORY
+ const path = Deno.makeTempDirSync() + "/dir/subdir";
+ Deno.mkdirSync(path);
+ const pathInfo = Deno.statSync(path);
+ assert(pathInfo.isDirectory()); // check exist first
+ Deno.removeSync(path); // remove
+ // We then check again after remove
+ let err;
+ try {
+ Deno.statSync(path);
+ } catch (e) {
+ err = e;
+ }
+ // Directory is gone
+ assertEquals(err.kind, Deno.ErrorKind.NotFound);
+ assertEquals(err.name, "NotFound");
+});
+
+testPerm({ write: true }, function removeSyncFileSuccess(): void {
+ // REMOVE FILE
+ const enc = new TextEncoder();
+ const data = enc.encode("Hello");
+ const filename = Deno.makeTempDirSync() + "/test.txt";
+ Deno.writeFileSync(filename, data, { perm: 0o666 });
+ const fileInfo = Deno.statSync(filename);
+ assert(fileInfo.isFile()); // check exist first
+ Deno.removeSync(filename); // remove
+ // We then check again after remove
+ let err;
+ try {
+ Deno.statSync(filename);
+ } catch (e) {
+ err = e;
+ }
+ // File is gone
+ assertEquals(err.kind, Deno.ErrorKind.NotFound);
+ assertEquals(err.name, "NotFound");
+});
+
+testPerm({ write: true }, function removeSyncFail(): void {
+ // NON-EMPTY DIRECTORY
+ const path = Deno.makeTempDirSync() + "/dir/subdir";
+ const subPath = path + "/subsubdir";
+ Deno.mkdirSync(path);
+ Deno.mkdirSync(subPath);
+ const pathInfo = Deno.statSync(path);
+ assert(pathInfo.isDirectory()); // check exist first
+ const subPathInfo = Deno.statSync(subPath);
+ assert(subPathInfo.isDirectory()); // check exist first
+ let err;
+ try {
+ // Should not be able to recursively remove
+ Deno.removeSync(path);
+ } catch (e) {
+ err = e;
+ }
+ // TODO(ry) Is Other really the error we should get here? What would Go do?
+ assertEquals(err.kind, Deno.ErrorKind.Other);
+ assertEquals(err.name, "Other");
+ // NON-EXISTENT DIRECTORY/FILE
+ try {
+ // Non-existent
+ Deno.removeSync("/baddir");
+ } catch (e) {
+ err = e;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.NotFound);
+ assertEquals(err.name, "NotFound");
+});
+
+testPerm({ write: false }, function removeSyncPerm(): void {
+ let err;
+ try {
+ Deno.removeSync("/baddir");
+ } catch (e) {
+ err = e;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(err.name, "PermissionDenied");
+});
+
+testPerm({ write: true }, function removeAllSyncDirSuccess(): void {
+ // REMOVE EMPTY DIRECTORY
+ let path = Deno.makeTempDirSync() + "/dir/subdir";
+ Deno.mkdirSync(path);
+ let pathInfo = Deno.statSync(path);
+ assert(pathInfo.isDirectory()); // check exist first
+ Deno.removeSync(path, { recursive: true }); // remove
+ // We then check again after remove
+ let err;
+ try {
+ Deno.statSync(path);
+ } catch (e) {
+ err = e;
+ }
+ // Directory is gone
+ assertEquals(err.kind, Deno.ErrorKind.NotFound);
+ assertEquals(err.name, "NotFound");
+ // REMOVE NON-EMPTY DIRECTORY
+ path = Deno.makeTempDirSync() + "/dir/subdir";
+ const subPath = path + "/subsubdir";
+ Deno.mkdirSync(path);
+ Deno.mkdirSync(subPath);
+ pathInfo = Deno.statSync(path);
+ assert(pathInfo.isDirectory()); // check exist first
+ const subPathInfo = Deno.statSync(subPath);
+ assert(subPathInfo.isDirectory()); // check exist first
+ Deno.removeSync(path, { recursive: true }); // remove
+ // We then check parent directory again after remove
+ try {
+ Deno.statSync(path);
+ } catch (e) {
+ err = e;
+ }
+ // Directory is gone
+ assertEquals(err.kind, Deno.ErrorKind.NotFound);
+ assertEquals(err.name, "NotFound");
+});
+
+testPerm({ write: true }, function removeAllSyncFileSuccess(): void {
+ // REMOVE FILE
+ const enc = new TextEncoder();
+ const data = enc.encode("Hello");
+ const filename = Deno.makeTempDirSync() + "/test.txt";
+ Deno.writeFileSync(filename, data, { perm: 0o666 });
+ const fileInfo = Deno.statSync(filename);
+ assert(fileInfo.isFile()); // check exist first
+ Deno.removeSync(filename, { recursive: true }); // remove
+ // We then check again after remove
+ let err;
+ try {
+ Deno.statSync(filename);
+ } catch (e) {
+ err = e;
+ }
+ // File is gone
+ assertEquals(err.kind, Deno.ErrorKind.NotFound);
+ assertEquals(err.name, "NotFound");
+});
+
+testPerm({ write: true }, function removeAllSyncFail(): void {
+ // NON-EXISTENT DIRECTORY/FILE
+ let err;
+ try {
+ // Non-existent
+ Deno.removeSync("/baddir", { recursive: true });
+ } catch (e) {
+ err = e;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.NotFound);
+ assertEquals(err.name, "NotFound");
+});
+
+testPerm({ write: false }, function removeAllSyncPerm(): void {
+ let err;
+ try {
+ Deno.removeSync("/baddir", { recursive: true });
+ } catch (e) {
+ err = e;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(err.name, "PermissionDenied");
+});
+
+// ASYNC
+
+testPerm({ write: true }, async function removeDirSuccess(): Promise<void> {
+ // REMOVE EMPTY DIRECTORY
+ const path = Deno.makeTempDirSync() + "/dir/subdir";
+ Deno.mkdirSync(path);
+ const pathInfo = Deno.statSync(path);
+ assert(pathInfo.isDirectory()); // check exist first
+ await Deno.remove(path); // remove
+ // We then check again after remove
+ let err;
+ try {
+ Deno.statSync(path);
+ } catch (e) {
+ err = e;
+ }
+ // Directory is gone
+ assertEquals(err.kind, Deno.ErrorKind.NotFound);
+ assertEquals(err.name, "NotFound");
+});
+
+testPerm({ write: true }, async function removeFileSuccess(): Promise<void> {
+ // REMOVE FILE
+ const enc = new TextEncoder();
+ const data = enc.encode("Hello");
+ const filename = Deno.makeTempDirSync() + "/test.txt";
+ Deno.writeFileSync(filename, data, { perm: 0o666 });
+ const fileInfo = Deno.statSync(filename);
+ assert(fileInfo.isFile()); // check exist first
+ await Deno.remove(filename); // remove
+ // We then check again after remove
+ let err;
+ try {
+ Deno.statSync(filename);
+ } catch (e) {
+ err = e;
+ }
+ // File is gone
+ assertEquals(err.kind, Deno.ErrorKind.NotFound);
+ assertEquals(err.name, "NotFound");
+});
+
+testPerm({ write: true }, async function removeFail(): Promise<void> {
+ // NON-EMPTY DIRECTORY
+ const path = Deno.makeTempDirSync() + "/dir/subdir";
+ const subPath = path + "/subsubdir";
+ Deno.mkdirSync(path);
+ Deno.mkdirSync(subPath);
+ const pathInfo = Deno.statSync(path);
+ assert(pathInfo.isDirectory()); // check exist first
+ const subPathInfo = Deno.statSync(subPath);
+ assert(subPathInfo.isDirectory()); // check exist first
+ let err;
+ try {
+ // Should not be able to recursively remove
+ await Deno.remove(path);
+ } catch (e) {
+ err = e;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.Other);
+ assertEquals(err.name, "Other");
+ // NON-EXISTENT DIRECTORY/FILE
+ try {
+ // Non-existent
+ await Deno.remove("/baddir");
+ } catch (e) {
+ err = e;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.NotFound);
+ assertEquals(err.name, "NotFound");
+});
+
+testPerm({ write: false }, async function removePerm(): Promise<void> {
+ let err;
+ try {
+ await Deno.remove("/baddir");
+ } catch (e) {
+ err = e;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(err.name, "PermissionDenied");
+});
+
+testPerm({ write: true }, async function removeAllDirSuccess(): Promise<void> {
+ // REMOVE EMPTY DIRECTORY
+ let path = Deno.makeTempDirSync() + "/dir/subdir";
+ Deno.mkdirSync(path);
+ let pathInfo = Deno.statSync(path);
+ assert(pathInfo.isDirectory()); // check exist first
+ await Deno.remove(path, { recursive: true }); // remove
+ // We then check again after remove
+ let err;
+ try {
+ Deno.statSync(path);
+ } catch (e) {
+ err = e;
+ }
+ // Directory is gone
+ assertEquals(err.kind, Deno.ErrorKind.NotFound);
+ assertEquals(err.name, "NotFound");
+ // REMOVE NON-EMPTY DIRECTORY
+ path = Deno.makeTempDirSync() + "/dir/subdir";
+ const subPath = path + "/subsubdir";
+ Deno.mkdirSync(path);
+ Deno.mkdirSync(subPath);
+ pathInfo = Deno.statSync(path);
+ assert(pathInfo.isDirectory()); // check exist first
+ const subPathInfo = Deno.statSync(subPath);
+ assert(subPathInfo.isDirectory()); // check exist first
+ await Deno.remove(path, { recursive: true }); // remove
+ // We then check parent directory again after remove
+ try {
+ Deno.statSync(path);
+ } catch (e) {
+ err = e;
+ }
+ // Directory is gone
+ assertEquals(err.kind, Deno.ErrorKind.NotFound);
+ assertEquals(err.name, "NotFound");
+});
+
+testPerm({ write: true }, async function removeAllFileSuccess(): Promise<void> {
+ // REMOVE FILE
+ const enc = new TextEncoder();
+ const data = enc.encode("Hello");
+ const filename = Deno.makeTempDirSync() + "/test.txt";
+ Deno.writeFileSync(filename, data, { perm: 0o666 });
+ const fileInfo = Deno.statSync(filename);
+ assert(fileInfo.isFile()); // check exist first
+ await Deno.remove(filename, { recursive: true }); // remove
+ // We then check again after remove
+ let err;
+ try {
+ Deno.statSync(filename);
+ } catch (e) {
+ err = e;
+ }
+ // File is gone
+ assertEquals(err.kind, Deno.ErrorKind.NotFound);
+ assertEquals(err.name, "NotFound");
+});
+
+testPerm({ write: true }, async function removeAllFail(): Promise<void> {
+ // NON-EXISTENT DIRECTORY/FILE
+ let err;
+ try {
+ // Non-existent
+ await Deno.remove("/baddir", { recursive: true });
+ } catch (e) {
+ err = e;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.NotFound);
+ assertEquals(err.name, "NotFound");
+});
+
+testPerm({ write: false }, async function removeAllPerm(): Promise<void> {
+ let err;
+ try {
+ await Deno.remove("/baddir", { recursive: true });
+ } catch (e) {
+ err = e;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(err.name, "PermissionDenied");
+});
diff --git a/cli/js/rename.ts b/cli/js/rename.ts
new file mode 100644
index 000000000..c906ce37b
--- /dev/null
+++ b/cli/js/rename.ts
@@ -0,0 +1,24 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { sendSync, sendAsync } from "./dispatch_json.ts";
+import * as dispatch from "./dispatch.ts";
+
+/** Synchronously renames (moves) `oldpath` to `newpath`. If `newpath` already
+ * exists and is not a directory, `renameSync()` replaces it. OS-specific
+ * restrictions may apply when `oldpath` and `newpath` are in different
+ * directories.
+ *
+ * Deno.renameSync("old/path", "new/path");
+ */
+export function renameSync(oldpath: string, newpath: string): void {
+ sendSync(dispatch.OP_RENAME, { oldpath, newpath });
+}
+
+/** Renames (moves) `oldpath` to `newpath`. If `newpath` already exists and is
+ * not a directory, `rename()` replaces it. OS-specific restrictions may apply
+ * when `oldpath` and `newpath` are in different directories.
+ *
+ * await Deno.rename("old/path", "new/path");
+ */
+export async function rename(oldpath: string, newpath: string): Promise<void> {
+ await sendAsync(dispatch.OP_RENAME, { oldpath, newpath });
+}
diff --git a/cli/js/rename_test.ts b/cli/js/rename_test.ts
new file mode 100644
index 000000000..43d02d419
--- /dev/null
+++ b/cli/js/rename_test.ts
@@ -0,0 +1,74 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { testPerm, assert, assertEquals } from "./test_util.ts";
+
+testPerm({ read: true, write: true }, function renameSyncSuccess(): void {
+ const testDir = Deno.makeTempDirSync();
+ const oldpath = testDir + "/oldpath";
+ const newpath = testDir + "/newpath";
+ Deno.mkdirSync(oldpath);
+ Deno.renameSync(oldpath, newpath);
+ const newPathInfo = Deno.statSync(newpath);
+ assert(newPathInfo.isDirectory());
+
+ let caughtErr = false;
+ let oldPathInfo;
+
+ try {
+ oldPathInfo = Deno.statSync(oldpath);
+ } catch (e) {
+ caughtErr = true;
+ assertEquals(e.kind, Deno.ErrorKind.NotFound);
+ }
+ assert(caughtErr);
+ assertEquals(oldPathInfo, undefined);
+});
+
+testPerm({ read: false, write: true }, function renameSyncReadPerm(): void {
+ let err;
+ try {
+ const oldpath = "/oldbaddir";
+ const newpath = "/newbaddir";
+ Deno.renameSync(oldpath, newpath);
+ } catch (e) {
+ err = e;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(err.name, "PermissionDenied");
+});
+
+testPerm({ read: true, write: false }, function renameSyncWritePerm(): void {
+ let err;
+ try {
+ const oldpath = "/oldbaddir";
+ const newpath = "/newbaddir";
+ Deno.renameSync(oldpath, newpath);
+ } catch (e) {
+ err = e;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(err.name, "PermissionDenied");
+});
+
+testPerm({ read: true, write: true }, async function renameSuccess(): Promise<
+ void
+> {
+ const testDir = Deno.makeTempDirSync();
+ const oldpath = testDir + "/oldpath";
+ const newpath = testDir + "/newpath";
+ Deno.mkdirSync(oldpath);
+ await Deno.rename(oldpath, newpath);
+ const newPathInfo = Deno.statSync(newpath);
+ assert(newPathInfo.isDirectory());
+
+ let caughtErr = false;
+ let oldPathInfo;
+
+ try {
+ oldPathInfo = Deno.statSync(oldpath);
+ } catch (e) {
+ caughtErr = true;
+ assertEquals(e.kind, Deno.ErrorKind.NotFound);
+ }
+ assert(caughtErr);
+ assertEquals(oldPathInfo, undefined);
+});
diff --git a/cli/js/repl.ts b/cli/js/repl.ts
new file mode 100644
index 000000000..966e809e8
--- /dev/null
+++ b/cli/js/repl.ts
@@ -0,0 +1,197 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { close } from "./files.ts";
+import { exit } from "./os.ts";
+import { window } from "./window.ts";
+import { core } from "./core.ts";
+import { formatError } from "./format_error.ts";
+import { stringifyArgs } from "./console.ts";
+import * as dispatch from "./dispatch.ts";
+import { sendSync, sendAsync } from "./dispatch_json.ts";
+
+const { console } = window;
+
+/**
+ * REPL logging.
+ * In favor of console.log to avoid unwanted indentation
+ */
+function replLog(...args: unknown[]): void {
+ core.print(stringifyArgs(args) + "\n");
+}
+
+/**
+ * REPL logging for errors.
+ * In favor of console.error to avoid unwanted indentation
+ */
+function replError(...args: unknown[]): void {
+ core.print(stringifyArgs(args) + "\n", true);
+}
+
+const helpMsg = [
+ "_ Get last evaluation result",
+ "_error Get last thrown error",
+ "exit Exit the REPL",
+ "help Print this help message"
+].join("\n");
+
+const replCommands = {
+ exit: {
+ get(): void {
+ exit(0);
+ }
+ },
+ help: {
+ get(): string {
+ return helpMsg;
+ }
+ }
+};
+
+function startRepl(historyFile: string): number {
+ return sendSync(dispatch.OP_REPL_START, { historyFile });
+}
+
+// @internal
+export async function readline(rid: number, prompt: string): Promise<string> {
+ return sendAsync(dispatch.OP_REPL_READLINE, { rid, prompt });
+}
+
+// Error messages that allow users to continue input
+// instead of throwing an error to REPL
+// ref: https://github.com/v8/v8/blob/master/src/message-template.h
+// TODO(kevinkassimo): this list might not be comprehensive
+const recoverableErrorMessages = [
+ "Unexpected end of input", // { or [ or (
+ "Missing initializer in const declaration", // const a
+ "Missing catch or finally after try", // try {}
+ "missing ) after argument list", // console.log(1
+ "Unterminated template literal" // `template
+ // TODO(kevinkassimo): need a parser to handling errors such as:
+ // "Missing } in template expression" // `${ or `${ a 123 }`
+];
+
+function isRecoverableError(e: Error): boolean {
+ return recoverableErrorMessages.includes(e.message);
+}
+
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+type Value = any;
+
+let lastEvalResult: Value = undefined;
+let lastThrownError: Value = undefined;
+
+// Evaluate code.
+// Returns true if code is consumed (no error/irrecoverable error).
+// Returns false if error is recoverable
+function evaluate(code: string): boolean {
+ const [result, errInfo] = core.evalContext(code);
+ if (!errInfo) {
+ lastEvalResult = result;
+ replLog(result);
+ } else if (errInfo.isCompileError && isRecoverableError(errInfo.thrown)) {
+ // Recoverable compiler error
+ return false; // don't consume code.
+ } else {
+ lastThrownError = errInfo.thrown;
+ if (errInfo.isNativeError) {
+ const formattedError = formatError(
+ core.errorToJSON(errInfo.thrown as Error)
+ );
+ replError(formattedError);
+ } else {
+ replError("Thrown:", errInfo.thrown);
+ }
+ }
+ return true;
+}
+
+// @internal
+export async function replLoop(): Promise<void> {
+ Object.defineProperties(window, replCommands);
+
+ const historyFile = "deno_history.txt";
+ const rid = startRepl(historyFile);
+
+ const quitRepl = (exitCode: number): void => {
+ // Special handling in case user calls deno.close(3).
+ try {
+ close(rid); // close signals Drop on REPL and saves history.
+ } catch {}
+ exit(exitCode);
+ };
+
+ // Configure window._ to give the last evaluation result.
+ Object.defineProperty(window, "_", {
+ configurable: true,
+ get: (): Value => lastEvalResult,
+ set: (value: Value): Value => {
+ Object.defineProperty(window, "_", {
+ value: value,
+ writable: true,
+ enumerable: true,
+ configurable: true
+ });
+ console.log("Last evaluation result is no longer saved to _.");
+ }
+ });
+
+ // Configure window._error to give the last thrown error.
+ Object.defineProperty(window, "_error", {
+ configurable: true,
+ get: (): Value => lastThrownError,
+ set: (value: Value): Value => {
+ Object.defineProperty(window, "_error", {
+ value: value,
+ writable: true,
+ enumerable: true,
+ configurable: true
+ });
+ console.log("Last thrown error is no longer saved to _error.");
+ }
+ });
+
+ while (true) {
+ let code = "";
+ // Top level read
+ try {
+ code = await readline(rid, "> ");
+ if (code.trim() === "") {
+ continue;
+ }
+ } catch (err) {
+ if (err.message === "EOF") {
+ quitRepl(0);
+ } else {
+ // If interrupted, don't print error.
+ if (err.message !== "Interrupted") {
+ // e.g. this happens when we have deno.close(3).
+ // We want to display the problem.
+ const formattedError = formatError(core.errorToJSON(err));
+ replError(formattedError);
+ }
+ // Quit REPL anyways.
+ quitRepl(1);
+ }
+ }
+ // Start continued read
+ while (!evaluate(code)) {
+ code += "\n";
+ try {
+ code += await readline(rid, " ");
+ } catch (err) {
+ // If interrupted on continued read,
+ // abort this read instead of quitting.
+ if (err.message === "Interrupted") {
+ break;
+ } else if (err.message === "EOF") {
+ quitRepl(0);
+ } else {
+ // e.g. this happens when we have deno.close(3).
+ // We want to display the problem.
+ const formattedError = formatError(core.errorToJSON(err));
+ replError(formattedError);
+ quitRepl(1);
+ }
+ }
+ }
+ }
+}
diff --git a/cli/js/request.ts b/cli/js/request.ts
new file mode 100644
index 000000000..0c77b8854
--- /dev/null
+++ b/cli/js/request.ts
@@ -0,0 +1,151 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import * as headers from "./headers.ts";
+import * as body from "./body.ts";
+import * as domTypes from "./dom_types.ts";
+
+const { Headers } = headers;
+
+function byteUpperCase(s: string): string {
+ return String(s).replace(/[a-z]/g, function byteUpperCaseReplace(c): string {
+ return c.toUpperCase();
+ });
+}
+
+function normalizeMethod(m: string): string {
+ const u = byteUpperCase(m);
+ if (
+ u === "DELETE" ||
+ u === "GET" ||
+ u === "HEAD" ||
+ u === "OPTIONS" ||
+ u === "POST" ||
+ u === "PUT"
+ ) {
+ return u;
+ }
+ return m;
+}
+
+/**
+ * An HTTP request
+ * @param {Blob|String} [body]
+ * @param {Object} [init]
+ */
+export class Request extends body.Body implements domTypes.Request {
+ public method: string;
+ public url: string;
+ public credentials?: "omit" | "same-origin" | "include";
+ public headers: domTypes.Headers;
+
+ constructor(input: domTypes.RequestInfo, init?: domTypes.RequestInit) {
+ if (arguments.length < 1) {
+ throw TypeError("Not enough arguments");
+ }
+
+ if (!init) {
+ init = {};
+ }
+
+ let b: body.BodySource;
+
+ // prefer body from init
+ if (init.body) {
+ b = init.body;
+ } else if (input instanceof Request && input._bodySource) {
+ if (input.bodyUsed) {
+ throw TypeError(body.BodyUsedError);
+ }
+ b = input._bodySource;
+ } else if (typeof input === "object" && "body" in input && input.body) {
+ if (input.bodyUsed) {
+ throw TypeError(body.BodyUsedError);
+ }
+ b = input.body;
+ } else {
+ b = "";
+ }
+
+ let headers: domTypes.Headers;
+
+ // prefer headers from init
+ if (init.headers) {
+ headers = new Headers(init.headers);
+ } else if (input instanceof Request) {
+ headers = input.headers;
+ } else {
+ headers = new Headers();
+ }
+
+ const contentType = headers.get("content-type") || "";
+ super(b, contentType);
+ this.headers = headers;
+
+ // readonly attribute ByteString method;
+ /**
+ * The HTTP request method
+ * @readonly
+ * @default GET
+ * @type {string}
+ */
+ this.method = "GET";
+
+ // readonly attribute USVString url;
+ /**
+ * The request URL
+ * @readonly
+ * @type {string}
+ */
+ this.url = "";
+
+ // readonly attribute RequestCredentials credentials;
+ this.credentials = "omit";
+
+ if (input instanceof Request) {
+ if (input.bodyUsed) {
+ throw TypeError(body.BodyUsedError);
+ }
+ this.method = input.method;
+ this.url = input.url;
+ this.headers = new Headers(input.headers);
+ this.credentials = input.credentials;
+ this._stream = input._stream;
+ } else if (typeof input === "string") {
+ this.url = input;
+ }
+
+ if (init && "method" in init) {
+ this.method = normalizeMethod(init.method as string);
+ }
+
+ if (
+ init &&
+ "credentials" in init &&
+ init.credentials &&
+ ["omit", "same-origin", "include"].indexOf(init.credentials) !== -1
+ ) {
+ this.credentials = init.credentials;
+ }
+ }
+
+ public clone(): domTypes.Request {
+ if (this.bodyUsed) {
+ throw TypeError(body.BodyUsedError);
+ }
+
+ const iterators = this.headers.entries();
+ const headersList: Array<[string, string]> = [];
+ for (const header of iterators) {
+ headersList.push(header);
+ }
+
+ const body2 = this._bodySource;
+
+ const cloned = new Request(this.url, {
+ body: body2,
+ method: this.method,
+ headers: new Headers(headersList),
+ credentials: this.credentials
+ });
+ return cloned;
+ }
+}
diff --git a/cli/js/request_test.ts b/cli/js/request_test.ts
new file mode 100644
index 000000000..e9e1f5164
--- /dev/null
+++ b/cli/js/request_test.ts
@@ -0,0 +1,17 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { test, assertEquals } from "./test_util.ts";
+
+test(function fromInit(): void {
+ const req = new Request("https://example.com", {
+ body: "ahoyhoy",
+ method: "POST",
+ headers: {
+ "test-header": "value"
+ }
+ });
+
+ // @ts-ignore
+ assertEquals("ahoyhoy", req._bodySource);
+ assertEquals(req.url, "https://example.com");
+ assertEquals(req.headers.get("test-header"), "value");
+});
diff --git a/cli/js/resources.ts b/cli/js/resources.ts
new file mode 100644
index 000000000..27598ce09
--- /dev/null
+++ b/cli/js/resources.ts
@@ -0,0 +1,19 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import * as dispatch from "./dispatch.ts";
+import { sendSync } from "./dispatch_json.ts";
+
+export interface ResourceMap {
+ [rid: number]: string;
+}
+
+/** Returns a map of open _file like_ resource ids along with their string
+ * representation.
+ */
+export function resources(): ResourceMap {
+ const res = sendSync(dispatch.OP_RESOURCES) as Array<[number, string]>;
+ const resources: ResourceMap = {};
+ for (const resourceTuple of res) {
+ resources[resourceTuple[0]] = resourceTuple[1];
+ }
+ return resources;
+}
diff --git a/cli/js/resources_test.ts b/cli/js/resources_test.ts
new file mode 100644
index 000000000..753ef3e17
--- /dev/null
+++ b/cli/js/resources_test.ts
@@ -0,0 +1,48 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { test, testPerm, assertEquals } from "./test_util.ts";
+
+test(function resourcesStdio(): void {
+ const res = Deno.resources();
+
+ assertEquals(res[0], "stdin");
+ assertEquals(res[1], "stdout");
+ assertEquals(res[2], "stderr");
+});
+
+testPerm({ net: true }, async function resourcesNet(): Promise<void> {
+ const listener = Deno.listen({ port: 4501 });
+ const dialerConn = await Deno.dial({ port: 4501 });
+ const listenerConn = await listener.accept();
+
+ const res = Deno.resources();
+ assertEquals(
+ Object.values(res).filter((r): boolean => r === "tcpListener").length,
+ 1
+ );
+ assertEquals(
+ Object.values(res).filter((r): boolean => r === "tcpStream").length,
+ 2
+ );
+
+ listenerConn.close();
+ dialerConn.close();
+ listener.close();
+});
+
+testPerm({ read: true }, async function resourcesFile(): Promise<void> {
+ const resourcesBefore = Deno.resources();
+ await Deno.open("tests/hello.txt");
+ const resourcesAfter = Deno.resources();
+
+ // check that exactly one new resource (file) was added
+ assertEquals(
+ Object.keys(resourcesAfter).length,
+ Object.keys(resourcesBefore).length + 1
+ );
+ const newRid = Object.keys(resourcesAfter).find(
+ (rid): boolean => {
+ return !resourcesBefore.hasOwnProperty(rid);
+ }
+ );
+ assertEquals(resourcesAfter[newRid], "fsFile");
+});
diff --git a/cli/js/stat.ts b/cli/js/stat.ts
new file mode 100644
index 000000000..1f53e6f7b
--- /dev/null
+++ b/cli/js/stat.ts
@@ -0,0 +1,73 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { sendSync, sendAsync } from "./dispatch_json.ts";
+import * as dispatch from "./dispatch.ts";
+import { FileInfo, FileInfoImpl } from "./file_info.ts";
+
+export interface StatResponse {
+ isFile: boolean;
+ isSymlink: boolean;
+ len: number;
+ modified: number;
+ accessed: number;
+ created: number;
+ mode: number;
+ hasMode: boolean; // false on windows
+ name: string | null;
+}
+
+/** Queries the file system for information on the path provided. If the given
+ * path is a symlink information about the symlink will be returned.
+ *
+ * const fileInfo = await Deno.lstat("hello.txt");
+ * assert(fileInfo.isFile());
+ */
+export async function lstat(filename: string): Promise<FileInfo> {
+ const res = (await sendAsync(dispatch.OP_STAT, {
+ filename,
+ lstat: true
+ })) as StatResponse;
+ return new FileInfoImpl(res);
+}
+
+/** Queries the file system for information on the path provided synchronously.
+ * If the given path is a symlink information about the symlink will be
+ * returned.
+ *
+ * const fileInfo = Deno.lstatSync("hello.txt");
+ * assert(fileInfo.isFile());
+ */
+export function lstatSync(filename: string): FileInfo {
+ const res = sendSync(dispatch.OP_STAT, {
+ filename,
+ lstat: true
+ }) as StatResponse;
+ return new FileInfoImpl(res);
+}
+
+/** Queries the file system for information on the path provided. `stat` Will
+ * always follow symlinks.
+ *
+ * const fileInfo = await Deno.stat("hello.txt");
+ * assert(fileInfo.isFile());
+ */
+export async function stat(filename: string): Promise<FileInfo> {
+ const res = (await sendAsync(dispatch.OP_STAT, {
+ filename,
+ lstat: false
+ })) as StatResponse;
+ return new FileInfoImpl(res);
+}
+
+/** Queries the file system for information on the path provided synchronously.
+ * `statSync` Will always follow symlinks.
+ *
+ * const fileInfo = Deno.statSync("hello.txt");
+ * assert(fileInfo.isFile());
+ */
+export function statSync(filename: string): FileInfo {
+ const res = sendSync(dispatch.OP_STAT, {
+ filename,
+ lstat: false
+ }) as StatResponse;
+ return new FileInfoImpl(res);
+}
diff --git a/cli/js/stat_test.ts b/cli/js/stat_test.ts
new file mode 100644
index 000000000..1542f1080
--- /dev/null
+++ b/cli/js/stat_test.ts
@@ -0,0 +1,172 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { testPerm, assert, assertEquals } from "./test_util.ts";
+
+// TODO Add tests for modified, accessed, and created fields once there is a way
+// to create temp files.
+testPerm({ read: true }, async function statSyncSuccess(): Promise<void> {
+ const packageInfo = Deno.statSync("package.json");
+ assert(packageInfo.isFile());
+ assert(!packageInfo.isSymlink());
+
+ const modulesInfo = Deno.statSync("node_modules");
+ assert(modulesInfo.isDirectory());
+ assert(!modulesInfo.isSymlink());
+
+ const testsInfo = Deno.statSync("tests");
+ assert(testsInfo.isDirectory());
+ assert(!testsInfo.isSymlink());
+});
+
+testPerm({ read: false }, async function statSyncPerm(): Promise<void> {
+ let caughtError = false;
+ try {
+ Deno.statSync("package.json");
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(e.name, "PermissionDenied");
+ }
+ assert(caughtError);
+});
+
+testPerm({ read: true }, async function statSyncNotFound(): Promise<void> {
+ let caughtError = false;
+ let badInfo;
+
+ try {
+ badInfo = Deno.statSync("bad_file_name");
+ } catch (err) {
+ caughtError = true;
+ assertEquals(err.kind, Deno.ErrorKind.NotFound);
+ assertEquals(err.name, "NotFound");
+ }
+
+ assert(caughtError);
+ assertEquals(badInfo, undefined);
+});
+
+testPerm({ read: true }, async function lstatSyncSuccess(): Promise<void> {
+ const packageInfo = Deno.lstatSync("package.json");
+ assert(packageInfo.isFile());
+ assert(!packageInfo.isSymlink());
+
+ const modulesInfo = Deno.lstatSync("node_modules");
+ assert(!modulesInfo.isDirectory());
+ assert(modulesInfo.isSymlink());
+
+ const i = Deno.lstatSync("website");
+ assert(i.isDirectory());
+ assert(!i.isSymlink());
+});
+
+testPerm({ read: false }, async function lstatSyncPerm(): Promise<void> {
+ let caughtError = false;
+ try {
+ Deno.lstatSync("package.json");
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(e.name, "PermissionDenied");
+ }
+ assert(caughtError);
+});
+
+testPerm({ read: true }, async function lstatSyncNotFound(): Promise<void> {
+ let caughtError = false;
+ let badInfo;
+
+ try {
+ badInfo = Deno.lstatSync("bad_file_name");
+ } catch (err) {
+ caughtError = true;
+ assertEquals(err.kind, Deno.ErrorKind.NotFound);
+ assertEquals(err.name, "NotFound");
+ }
+
+ assert(caughtError);
+ assertEquals(badInfo, undefined);
+});
+
+testPerm({ read: true }, async function statSuccess(): Promise<void> {
+ const packageInfo = await Deno.stat("package.json");
+ assert(packageInfo.isFile());
+ assert(!packageInfo.isSymlink());
+
+ const modulesInfo = await Deno.stat("node_modules");
+ assert(modulesInfo.isDirectory());
+ assert(!modulesInfo.isSymlink());
+
+ const i = await Deno.stat("tests");
+ assert(i.isDirectory());
+ assert(!i.isSymlink());
+});
+
+testPerm({ read: false }, async function statPerm(): Promise<void> {
+ let caughtError = false;
+ try {
+ await Deno.stat("package.json");
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(e.name, "PermissionDenied");
+ }
+ assert(caughtError);
+});
+
+testPerm({ read: true }, async function statNotFound(): Promise<void> {
+ let caughtError = false;
+ let badInfo;
+
+ try {
+ badInfo = await Deno.stat("bad_file_name");
+ } catch (err) {
+ caughtError = true;
+ assertEquals(err.kind, Deno.ErrorKind.NotFound);
+ assertEquals(err.name, "NotFound");
+ }
+
+ assert(caughtError);
+ assertEquals(badInfo, undefined);
+});
+
+testPerm({ read: true }, async function lstatSuccess(): Promise<void> {
+ const packageInfo = await Deno.lstat("package.json");
+ assert(packageInfo.isFile());
+ assert(!packageInfo.isSymlink());
+
+ const modulesInfo = await Deno.lstat("node_modules");
+ assert(!modulesInfo.isDirectory());
+ assert(modulesInfo.isSymlink());
+
+ const i = await Deno.lstat("website");
+ assert(i.isDirectory());
+ assert(!i.isSymlink());
+});
+
+testPerm({ read: false }, async function lstatPerm(): Promise<void> {
+ let caughtError = false;
+ try {
+ await Deno.lstat("package.json");
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(e.name, "PermissionDenied");
+ }
+ assert(caughtError);
+});
+
+testPerm({ read: true }, async function lstatNotFound(): Promise<void> {
+ let caughtError = false;
+ let badInfo;
+
+ try {
+ badInfo = await Deno.lstat("bad_file_name");
+ } catch (err) {
+ caughtError = true;
+ assertEquals(err.kind, Deno.ErrorKind.NotFound);
+ assertEquals(err.name, "NotFound");
+ }
+
+ assert(caughtError);
+ assertEquals(badInfo, undefined);
+});
diff --git a/cli/js/symlink.ts b/cli/js/symlink.ts
new file mode 100644
index 000000000..21ebb2f59
--- /dev/null
+++ b/cli/js/symlink.ts
@@ -0,0 +1,39 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { sendSync, sendAsync } from "./dispatch_json.ts";
+import * as dispatch from "./dispatch.ts";
+import * as util from "./util.ts";
+import { build } from "./build.ts";
+
+/** Synchronously creates `newname` as a symbolic link to `oldname`. The type
+ * argument can be set to `dir` or `file` and is only available on Windows
+ * (ignored on other platforms).
+ *
+ * Deno.symlinkSync("old/name", "new/name");
+ */
+export function symlinkSync(
+ oldname: string,
+ newname: string,
+ type?: string
+): void {
+ if (build.os === "win" && type) {
+ return util.notImplemented();
+ }
+ sendSync(dispatch.OP_SYMLINK, { oldname, newname });
+}
+
+/** Creates `newname` as a symbolic link to `oldname`. The type argument can be
+ * set to `dir` or `file` and is only available on Windows (ignored on other
+ * platforms).
+ *
+ * await Deno.symlink("old/name", "new/name");
+ */
+export async function symlink(
+ oldname: string,
+ newname: string,
+ type?: string
+): Promise<void> {
+ if (build.os === "win" && type) {
+ return util.notImplemented();
+ }
+ await sendAsync(dispatch.OP_SYMLINK, { oldname, newname });
+}
diff --git a/cli/js/symlink_test.ts b/cli/js/symlink_test.ts
new file mode 100644
index 000000000..bce1f6ae5
--- /dev/null
+++ b/cli/js/symlink_test.ts
@@ -0,0 +1,80 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { test, testPerm, assert, assertEquals } from "./test_util.ts";
+
+testPerm({ read: true, write: true }, function symlinkSyncSuccess(): void {
+ const testDir = Deno.makeTempDirSync();
+ const oldname = testDir + "/oldname";
+ const newname = testDir + "/newname";
+ Deno.mkdirSync(oldname);
+ let errOnWindows;
+ // Just for now, until we implement symlink for Windows.
+ try {
+ Deno.symlinkSync(oldname, newname);
+ } catch (e) {
+ errOnWindows = e;
+ }
+ if (errOnWindows) {
+ assertEquals(Deno.build.os, "win");
+ assertEquals(errOnWindows.kind, Deno.ErrorKind.Other);
+ assertEquals(errOnWindows.message, "Not implemented");
+ } else {
+ const newNameInfoLStat = Deno.lstatSync(newname);
+ const newNameInfoStat = Deno.statSync(newname);
+ assert(newNameInfoLStat.isSymlink());
+ assert(newNameInfoStat.isDirectory());
+ }
+});
+
+test(function symlinkSyncPerm(): void {
+ let err;
+ try {
+ Deno.symlinkSync("oldbaddir", "newbaddir");
+ } catch (e) {
+ err = e;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(err.name, "PermissionDenied");
+});
+
+// Just for now, until we implement symlink for Windows.
+// Symlink with type should succeed on other platforms with type ignored
+testPerm({ write: true }, function symlinkSyncNotImplemented(): void {
+ const testDir = Deno.makeTempDirSync();
+ const oldname = testDir + "/oldname";
+ const newname = testDir + "/newname";
+ let err;
+ try {
+ Deno.symlinkSync(oldname, newname, "dir");
+ } catch (e) {
+ err = e;
+ }
+ if (err) {
+ assertEquals(Deno.build.os, "win");
+ assertEquals(err.message, "Not implemented");
+ }
+});
+
+testPerm({ read: true, write: true }, async function symlinkSuccess(): Promise<
+ void
+> {
+ const testDir = Deno.makeTempDirSync();
+ const oldname = testDir + "/oldname";
+ const newname = testDir + "/newname";
+ Deno.mkdirSync(oldname);
+ let errOnWindows;
+ // Just for now, until we implement symlink for Windows.
+ try {
+ await Deno.symlink(oldname, newname);
+ } catch (e) {
+ errOnWindows = e;
+ }
+ if (errOnWindows) {
+ assertEquals(errOnWindows.kind, Deno.ErrorKind.Other);
+ assertEquals(errOnWindows.message, "Not implemented");
+ } else {
+ const newNameInfoLStat = Deno.lstatSync(newname);
+ const newNameInfoStat = Deno.statSync(newname);
+ assert(newNameInfoLStat.isSymlink());
+ assert(newNameInfoStat.isDirectory());
+ }
+});
diff --git a/cli/js/test_util.ts b/cli/js/test_util.ts
new file mode 100644
index 000000000..2f2916e11
--- /dev/null
+++ b/cli/js/test_util.ts
@@ -0,0 +1,262 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+//
+// We want to test many ops in deno which have different behavior depending on
+// the permissions set. These tests can specify which permissions they expect,
+// which appends a special string like "permW1N0" to the end of the test name.
+// Here we run several copies of deno with different permissions, filtering the
+// tests by the special string. permW1N0 means allow-write but not allow-net.
+// See tools/unit_tests.py for more details.
+
+import * as testing from "../../std/testing/mod.ts";
+import { assert, assertEquals } from "../../std/testing/asserts.ts";
+export {
+ assert,
+ assertThrows,
+ assertEquals,
+ assertMatch,
+ assertNotEquals,
+ assertStrictEq,
+ assertStrContains,
+ unreachable
+} from "../../std/testing/asserts.ts";
+
+interface TestPermissions {
+ read?: boolean;
+ write?: boolean;
+ net?: boolean;
+ env?: boolean;
+ run?: boolean;
+ hrtime?: boolean;
+}
+
+const processPerms = Deno.permissions();
+
+function permissionsMatch(
+ processPerms: Deno.Permissions,
+ requiredPerms: Deno.Permissions
+): boolean {
+ for (const permName in processPerms) {
+ if (processPerms[permName] !== requiredPerms[permName]) {
+ return false;
+ }
+ }
+
+ return true;
+}
+
+export const permissionCombinations: Map<string, Deno.Permissions> = new Map();
+
+function permToString(perms: Deno.Permissions): string {
+ const r = perms.read ? 1 : 0;
+ const w = perms.write ? 1 : 0;
+ const n = perms.net ? 1 : 0;
+ const e = perms.env ? 1 : 0;
+ const u = perms.run ? 1 : 0;
+ const h = perms.hrtime ? 1 : 0;
+ return `permR${r}W${w}N${n}E${e}U${u}H${h}`;
+}
+
+function registerPermCombination(perms: Deno.Permissions): void {
+ const key = permToString(perms);
+ if (!permissionCombinations.has(key)) {
+ permissionCombinations.set(key, perms);
+ }
+}
+
+function normalizeTestPermissions(perms: TestPermissions): Deno.Permissions {
+ return {
+ read: !!perms.read,
+ write: !!perms.write,
+ net: !!perms.net,
+ run: !!perms.run,
+ env: !!perms.env,
+ hrtime: !!perms.hrtime
+ };
+}
+
+export function testPerm(
+ perms: TestPermissions,
+ fn: testing.TestFunction
+): void {
+ const normalizedPerms = normalizeTestPermissions(perms);
+
+ registerPermCombination(normalizedPerms);
+
+ if (!permissionsMatch(processPerms, normalizedPerms)) {
+ return;
+ }
+
+ testing.test(fn);
+}
+
+export function test(fn: testing.TestFunction): void {
+ testPerm(
+ {
+ read: false,
+ write: false,
+ net: false,
+ env: false,
+ run: false,
+ hrtime: false
+ },
+ fn
+ );
+}
+
+function extractNumber(re: RegExp, str: string): number | undefined {
+ const match = str.match(re);
+
+ if (match) {
+ return Number.parseInt(match[1]);
+ }
+}
+
+export function parseUnitTestOutput(
+ rawOutput: Uint8Array,
+ print: boolean
+): { actual?: number; expected?: number; resultOutput?: string } {
+ const decoder = new TextDecoder();
+ const output = decoder.decode(rawOutput);
+
+ let expected, actual, result;
+
+ for (const line of output.split("\n")) {
+ if (!expected) {
+ // expect "running 30 tests"
+ expected = extractNumber(/running (\d+) tests/, line);
+ } else if (line.indexOf("test result:") !== -1) {
+ result = line;
+ }
+
+ if (print) {
+ console.log(line);
+ }
+ }
+
+ // Check that the number of expected tests equals what was reported at the
+ // bottom.
+ if (result) {
+ // result should be a string like this:
+ // "test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; ..."
+ actual = extractNumber(/(\d+) passed/, result);
+ }
+
+ return { actual, expected, resultOutput: result };
+}
+
+test(function permissionsMatches(): void {
+ assert(
+ permissionsMatch(
+ {
+ read: true,
+ write: false,
+ net: false,
+ env: false,
+ run: false,
+ hrtime: false
+ },
+ normalizeTestPermissions({ read: true })
+ )
+ );
+
+ assert(
+ permissionsMatch(
+ {
+ read: false,
+ write: false,
+ net: false,
+ env: false,
+ run: false,
+ hrtime: false
+ },
+ normalizeTestPermissions({})
+ )
+ );
+
+ assertEquals(
+ permissionsMatch(
+ {
+ read: false,
+ write: true,
+ net: true,
+ env: true,
+ run: true,
+ hrtime: true
+ },
+ normalizeTestPermissions({ read: true })
+ ),
+ false
+ );
+
+ assertEquals(
+ permissionsMatch(
+ {
+ read: true,
+ write: false,
+ net: true,
+ env: false,
+ run: false,
+ hrtime: false
+ },
+ normalizeTestPermissions({ read: true })
+ ),
+ false
+ );
+
+ assert(
+ permissionsMatch(
+ {
+ read: true,
+ write: true,
+ net: true,
+ env: true,
+ run: true,
+ hrtime: true
+ },
+ {
+ read: true,
+ write: true,
+ net: true,
+ env: true,
+ run: true,
+ hrtime: true
+ }
+ )
+ );
+});
+
+testPerm({ read: true }, async function parsingUnitTestOutput(): Promise<void> {
+ const cwd = Deno.cwd();
+ const testDataPath = `${cwd}/tools/testdata/`;
+
+ let result;
+
+ // This is an example of a successful unit test output.
+ result = parseUnitTestOutput(
+ await Deno.readFile(`${testDataPath}/unit_test_output1.txt`),
+ false
+ );
+ assertEquals(result.actual, 96);
+ assertEquals(result.expected, 96);
+
+ // This is an example of a silently dying unit test.
+ result = parseUnitTestOutput(
+ await Deno.readFile(`${testDataPath}/unit_test_output2.txt`),
+ false
+ );
+ assertEquals(result.actual, undefined);
+ assertEquals(result.expected, 96);
+
+ // This is an example of compiling before successful unit tests.
+ result = parseUnitTestOutput(
+ await Deno.readFile(`${testDataPath}/unit_test_output3.txt`),
+ false
+ );
+ assertEquals(result.actual, 96);
+ assertEquals(result.expected, 96);
+
+ // Check what happens on empty output.
+ result = parseUnitTestOutput(new TextEncoder().encode("\n\n\n"), false);
+ assertEquals(result.actual, undefined);
+ assertEquals(result.expected, undefined);
+});
diff --git a/cli/js/text_encoding.ts b/cli/js/text_encoding.ts
new file mode 100644
index 000000000..8386ff8b0
--- /dev/null
+++ b/cli/js/text_encoding.ts
@@ -0,0 +1,554 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+// The following code is based off of text-encoding at:
+// https://github.com/inexorabletash/text-encoding
+//
+// Anyone is free to copy, modify, publish, use, compile, sell, or
+// distribute this software, either in source code form or as a compiled
+// binary, for any purpose, commercial or non-commercial, and by any
+// means.
+//
+// In jurisdictions that recognize copyright laws, the author or authors
+// of this software dedicate any and all copyright interest in the
+// software to the public domain. We make this dedication for the benefit
+// of the public at large and to the detriment of our heirs and
+// successors. We intend this dedication to be an overt act of
+// relinquishment in perpetuity of all present and future rights to this
+// software under copyright law.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+// IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+// OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+// ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+// OTHER DEALINGS IN THE SOFTWARE.
+
+import * as base64 from "./base64.ts";
+import * as domTypes from "./dom_types.ts";
+import { DenoError, ErrorKind } from "./errors.ts";
+
+const CONTINUE = null;
+const END_OF_STREAM = -1;
+const FINISHED = -1;
+
+function decoderError(fatal: boolean): number | never {
+ if (fatal) {
+ throw new TypeError("Decoder error.");
+ }
+ return 0xfffd; // default code point
+}
+
+function inRange(a: number, min: number, max: number): boolean {
+ return min <= a && a <= max;
+}
+
+function isASCIIByte(a: number): boolean {
+ return inRange(a, 0x00, 0x7f);
+}
+
+function stringToCodePoints(input: string): number[] {
+ const u: number[] = [];
+ for (const c of input) {
+ u.push(c.codePointAt(0)!);
+ }
+ return u;
+}
+
+class UTF8Decoder implements Decoder {
+ private _codePoint = 0;
+ private _bytesSeen = 0;
+ private _bytesNeeded = 0;
+ private _fatal: boolean;
+ private _ignoreBOM: boolean;
+ private _lowerBoundary = 0x80;
+ private _upperBoundary = 0xbf;
+
+ constructor(options: DecoderOptions) {
+ this._fatal = options.fatal || false;
+ this._ignoreBOM = options.ignoreBOM || false;
+ }
+
+ handler(stream: Stream, byte: number): number | null {
+ if (byte === END_OF_STREAM && this._bytesNeeded !== 0) {
+ this._bytesNeeded = 0;
+ return decoderError(this._fatal);
+ }
+
+ if (byte === END_OF_STREAM) {
+ return FINISHED;
+ }
+
+ if (this._ignoreBOM) {
+ if (
+ (this._bytesSeen === 0 && byte !== 0xef) ||
+ (this._bytesSeen === 1 && byte !== 0xbb)
+ ) {
+ this._ignoreBOM = false;
+ }
+
+ if (this._bytesSeen === 2) {
+ this._ignoreBOM = false;
+ if (byte === 0xbf) {
+ //Ignore BOM
+ this._codePoint = 0;
+ this._bytesNeeded = 0;
+ this._bytesSeen = 0;
+ return CONTINUE;
+ }
+ }
+ }
+
+ if (this._bytesNeeded === 0) {
+ if (isASCIIByte(byte)) {
+ // Single byte code point
+ return byte;
+ } else if (inRange(byte, 0xc2, 0xdf)) {
+ // Two byte code point
+ this._bytesNeeded = 1;
+ this._codePoint = byte & 0x1f;
+ } else if (inRange(byte, 0xe0, 0xef)) {
+ // Three byte code point
+ if (byte === 0xe0) {
+ this._lowerBoundary = 0xa0;
+ } else if (byte === 0xed) {
+ this._upperBoundary = 0x9f;
+ }
+ this._bytesNeeded = 2;
+ this._codePoint = byte & 0xf;
+ } else if (inRange(byte, 0xf0, 0xf4)) {
+ if (byte === 0xf0) {
+ this._lowerBoundary = 0x90;
+ } else if (byte === 0xf4) {
+ this._upperBoundary = 0x8f;
+ }
+ this._bytesNeeded = 3;
+ this._codePoint = byte & 0x7;
+ } else {
+ return decoderError(this._fatal);
+ }
+ return CONTINUE;
+ }
+
+ if (!inRange(byte, this._lowerBoundary, this._upperBoundary)) {
+ // Byte out of range, so encoding error
+ this._codePoint = 0;
+ this._bytesNeeded = 0;
+ this._bytesSeen = 0;
+ stream.prepend(byte);
+ return decoderError(this._fatal);
+ }
+
+ this._lowerBoundary = 0x80;
+ this._upperBoundary = 0xbf;
+
+ this._codePoint = (this._codePoint << 6) | (byte & 0x3f);
+
+ this._bytesSeen++;
+
+ if (this._bytesSeen !== this._bytesNeeded) {
+ return CONTINUE;
+ }
+
+ const codePoint = this._codePoint;
+
+ this._codePoint = 0;
+ this._bytesNeeded = 0;
+ this._bytesSeen = 0;
+
+ return codePoint;
+ }
+}
+
+class UTF8Encoder implements Encoder {
+ handler(codePoint: number): number | number[] {
+ if (codePoint === END_OF_STREAM) {
+ return FINISHED;
+ }
+
+ if (inRange(codePoint, 0x00, 0x7f)) {
+ return codePoint;
+ }
+
+ let count: number;
+ let offset: number;
+ if (inRange(codePoint, 0x0080, 0x07ff)) {
+ count = 1;
+ offset = 0xc0;
+ } else if (inRange(codePoint, 0x0800, 0xffff)) {
+ count = 2;
+ offset = 0xe0;
+ } else if (inRange(codePoint, 0x10000, 0x10ffff)) {
+ count = 3;
+ offset = 0xf0;
+ } else {
+ throw TypeError(`Code point out of range: \\x${codePoint.toString(16)}`);
+ }
+
+ const bytes = [(codePoint >> (6 * count)) + offset];
+
+ while (count > 0) {
+ const temp = codePoint >> (6 * (count - 1));
+ bytes.push(0x80 | (temp & 0x3f));
+ count--;
+ }
+
+ return bytes;
+ }
+}
+
+/** Decodes a string of data which has been encoded using base-64. */
+export function atob(s: string): string {
+ s = String(s);
+ s = s.replace(/[\t\n\f\r ]/g, "");
+
+ if (s.length % 4 === 0) {
+ s = s.replace(/==?$/, "");
+ }
+
+ const rem = s.length % 4;
+ if (rem === 1 || /[^+/0-9A-Za-z]/.test(s)) {
+ // TODO: throw `DOMException`
+ throw new DenoError(
+ ErrorKind.InvalidInput,
+ "The string to be decoded is not correctly encoded"
+ );
+ }
+
+ // base64-js requires length exactly times of 4
+ if (rem > 0) {
+ s = s.padEnd(s.length + (4 - rem), "=");
+ }
+
+ const byteArray: Uint8Array = base64.toByteArray(s);
+ let result = "";
+ for (let i = 0; i < byteArray.length; i++) {
+ result += String.fromCharCode(byteArray[i]);
+ }
+ return result;
+}
+
+/** Creates a base-64 ASCII string from the input string. */
+export function btoa(s: string): string {
+ const byteArray = [];
+ for (let i = 0; i < s.length; i++) {
+ const charCode = s[i].charCodeAt(0);
+ if (charCode > 0xff) {
+ throw new DenoError(
+ ErrorKind.InvalidInput,
+ "The string to be encoded contains characters " +
+ "outside of the Latin1 range."
+ );
+ }
+ byteArray.push(charCode);
+ }
+ const result = base64.fromByteArray(Uint8Array.from(byteArray));
+ return result;
+}
+
+interface DecoderOptions {
+ fatal?: boolean;
+ ignoreBOM?: boolean;
+}
+
+interface Decoder {
+ handler(stream: Stream, byte: number): number | null;
+}
+
+interface Encoder {
+ handler(codePoint: number): number | number[];
+}
+
+class SingleByteDecoder implements Decoder {
+ private _index: number[];
+ private _fatal: boolean;
+
+ constructor(index: number[], options: DecoderOptions) {
+ if (options.ignoreBOM) {
+ throw new TypeError("Ignoring the BOM is available only with utf-8.");
+ }
+ this._fatal = options.fatal || false;
+ this._index = index;
+ }
+ handler(stream: Stream, byte: number): number {
+ if (byte === END_OF_STREAM) {
+ return FINISHED;
+ }
+ if (isASCIIByte(byte)) {
+ return byte;
+ }
+ const codePoint = this._index[byte - 0x80];
+
+ if (codePoint == null) {
+ return decoderError(this._fatal);
+ }
+
+ return codePoint;
+ }
+}
+
+// The encodingMap is a hash of labels that are indexed by the conical
+// encoding.
+const encodingMap: { [key: string]: string[] } = {
+ "windows-1252": [
+ "ansi_x3.4-1968",
+ "ascii",
+ "cp1252",
+ "cp819",
+ "csisolatin1",
+ "ibm819",
+ "iso-8859-1",
+ "iso-ir-100",
+ "iso8859-1",
+ "iso88591",
+ "iso_8859-1",
+ "iso_8859-1:1987",
+ "l1",
+ "latin1",
+ "us-ascii",
+ "windows-1252",
+ "x-cp1252"
+ ],
+ "utf-8": ["unicode-1-1-utf-8", "utf-8", "utf8"]
+};
+// We convert these into a Map where every label resolves to its canonical
+// encoding type.
+const encodings = new Map<string, string>();
+for (const key of Object.keys(encodingMap)) {
+ const labels = encodingMap[key];
+ for (const label of labels) {
+ encodings.set(label, key);
+ }
+}
+
+// A map of functions that return new instances of a decoder indexed by the
+// encoding type.
+const decoders = new Map<string, (options: DecoderOptions) => Decoder>();
+decoders.set(
+ "utf-8",
+ (options: DecoderOptions): UTF8Decoder => {
+ return new UTF8Decoder(options);
+ }
+);
+
+// Single byte decoders are an array of code point lookups
+const encodingIndexes = new Map<string, number[]>();
+// prettier-ignore
+encodingIndexes.set("windows-1252", [8364,129,8218,402,8222,8230,8224,8225,710,8240,352,8249,338,141,381,143,144,8216,8217,8220,8221,8226,8211,8212,732,8482,353,8250,339,157,382,376,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255]);
+for (const [key, index] of encodingIndexes) {
+ decoders.set(
+ key,
+ (options: DecoderOptions): SingleByteDecoder => {
+ return new SingleByteDecoder(index, options);
+ }
+ );
+}
+
+function codePointsToString(codePoints: number[]): string {
+ let s = "";
+ for (const cp of codePoints) {
+ s += String.fromCodePoint(cp);
+ }
+ return s;
+}
+
+class Stream {
+ private _tokens: number[];
+ constructor(tokens: number[] | Uint8Array) {
+ this._tokens = [].slice.call(tokens);
+ this._tokens.reverse();
+ }
+
+ endOfStream(): boolean {
+ return !this._tokens.length;
+ }
+
+ read(): number {
+ return !this._tokens.length ? END_OF_STREAM : this._tokens.pop()!;
+ }
+
+ prepend(token: number | number[]): void {
+ if (Array.isArray(token)) {
+ while (token.length) {
+ this._tokens.push(token.pop()!);
+ }
+ } else {
+ this._tokens.push(token);
+ }
+ }
+
+ push(token: number | number[]): void {
+ if (Array.isArray(token)) {
+ while (token.length) {
+ this._tokens.unshift(token.shift()!);
+ }
+ } else {
+ this._tokens.unshift(token);
+ }
+ }
+}
+
+export interface TextDecodeOptions {
+ stream?: false;
+}
+
+export interface TextDecoderOptions {
+ fatal?: boolean;
+ ignoreBOM?: boolean;
+}
+
+type EitherArrayBuffer = SharedArrayBuffer | ArrayBuffer;
+
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function isEitherArrayBuffer(x: any): x is EitherArrayBuffer {
+ return x instanceof SharedArrayBuffer || x instanceof ArrayBuffer;
+}
+
+export class TextDecoder {
+ private _encoding: string;
+
+ /** Returns encoding's name, lowercased. */
+ get encoding(): string {
+ return this._encoding;
+ }
+ /** Returns `true` if error mode is "fatal", and `false` otherwise. */
+ readonly fatal: boolean = false;
+ /** Returns `true` if ignore BOM flag is set, and `false` otherwise. */
+ readonly ignoreBOM: boolean = false;
+
+ constructor(label = "utf-8", options: TextDecoderOptions = { fatal: false }) {
+ if (options.ignoreBOM) {
+ this.ignoreBOM = true;
+ }
+ if (options.fatal) {
+ this.fatal = true;
+ }
+ label = String(label)
+ .trim()
+ .toLowerCase();
+ const encoding = encodings.get(label);
+ if (!encoding) {
+ throw new RangeError(
+ `The encoding label provided ('${label}') is invalid.`
+ );
+ }
+ if (!decoders.has(encoding)) {
+ throw new TypeError(`Internal decoder ('${encoding}') not found.`);
+ }
+ this._encoding = encoding;
+ }
+
+ /** Returns the result of running encoding's decoder. */
+ decode(
+ input?: domTypes.BufferSource,
+ options: TextDecodeOptions = { stream: false }
+ ): string {
+ if (options.stream) {
+ throw new TypeError("Stream not supported.");
+ }
+
+ let bytes: Uint8Array;
+ if (input instanceof Uint8Array) {
+ bytes = input;
+ } else if (isEitherArrayBuffer(input)) {
+ bytes = new Uint8Array(input);
+ } else if (
+ typeof input === "object" &&
+ "buffer" in input &&
+ isEitherArrayBuffer(input.buffer)
+ ) {
+ bytes = new Uint8Array(input.buffer, input.byteOffset, input.byteLength);
+ } else {
+ bytes = new Uint8Array(0);
+ }
+
+ const decoder = decoders.get(this._encoding)!({
+ fatal: this.fatal,
+ ignoreBOM: this.ignoreBOM
+ });
+ const inputStream = new Stream(bytes);
+ const output: number[] = [];
+
+ while (true) {
+ const result = decoder.handler(inputStream, inputStream.read());
+ if (result === FINISHED) {
+ break;
+ }
+
+ if (result !== CONTINUE) {
+ output.push(result);
+ }
+ }
+
+ if (output.length > 0 && output[0] === 0xfeff) {
+ output.shift();
+ }
+
+ return codePointsToString(output);
+ }
+ get [Symbol.toStringTag](): string {
+ return "TextDecoder";
+ }
+}
+
+interface TextEncoderEncodeIntoResult {
+ read: number;
+ written: number;
+}
+
+export class TextEncoder {
+ /** Returns "utf-8". */
+ readonly encoding = "utf-8";
+ /** Returns the result of running UTF-8's encoder. */
+ encode(input = ""): Uint8Array {
+ const encoder = new UTF8Encoder();
+ const inputStream = new Stream(stringToCodePoints(input));
+ const output: number[] = [];
+
+ while (true) {
+ const result = encoder.handler(inputStream.read());
+ if (result === FINISHED) {
+ break;
+ }
+ if (Array.isArray(result)) {
+ output.push(...result);
+ } else {
+ output.push(result);
+ }
+ }
+
+ return new Uint8Array(output);
+ }
+ encodeInto(input: string, dest: Uint8Array): TextEncoderEncodeIntoResult {
+ const encoder = new UTF8Encoder();
+ const inputStream = new Stream(stringToCodePoints(input));
+
+ let written = 0;
+ let read = 0;
+ while (true) {
+ const result = encoder.handler(inputStream.read());
+ if (result === FINISHED) {
+ break;
+ }
+ read++;
+ if (Array.isArray(result)) {
+ dest.set(result, written);
+ written += result.length;
+ if (result.length > 3) {
+ // increment read a second time if greater than U+FFFF
+ read++;
+ }
+ } else {
+ dest[written] = result;
+ written++;
+ }
+ }
+
+ return {
+ read,
+ written
+ };
+ }
+ get [Symbol.toStringTag](): string {
+ return "TextEncoder";
+ }
+}
diff --git a/cli/js/text_encoding_test.ts b/cli/js/text_encoding_test.ts
new file mode 100644
index 000000000..aaa9e6b9d
--- /dev/null
+++ b/cli/js/text_encoding_test.ts
@@ -0,0 +1,193 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { test, assert, assertEquals } from "./test_util.ts";
+
+test(function btoaSuccess(): void {
+ const text = "hello world";
+ const encoded = btoa(text);
+ assertEquals(encoded, "aGVsbG8gd29ybGQ=");
+});
+
+test(function atobSuccess(): void {
+ const encoded = "aGVsbG8gd29ybGQ=";
+ const decoded = atob(encoded);
+ assertEquals(decoded, "hello world");
+});
+
+test(function atobWithAsciiWhitespace(): void {
+ const encodedList = [
+ " aGVsbG8gd29ybGQ=",
+ " aGVsbG8gd29ybGQ=",
+ "aGVsbG8gd29ybGQ= ",
+ "aGVsbG8gd29ybGQ=\n",
+ "aGVsbG\t8gd29ybGQ=",
+ `aGVsbG\t8g
+ d29ybGQ=`
+ ];
+
+ for (const encoded of encodedList) {
+ const decoded = atob(encoded);
+ assertEquals(decoded, "hello world");
+ }
+});
+
+test(function atobThrows(): void {
+ let threw = false;
+ try {
+ atob("aGVsbG8gd29ybGQ==");
+ } catch (e) {
+ threw = true;
+ }
+ assert(threw);
+});
+
+test(function atobThrows2(): void {
+ let threw = false;
+ try {
+ atob("aGVsbG8gd29ybGQ===");
+ } catch (e) {
+ threw = true;
+ }
+ assert(threw);
+});
+
+test(function btoaFailed(): void {
+ const text = "你好";
+ let err;
+ try {
+ btoa(text);
+ } catch (e) {
+ err = e;
+ }
+ assert(!!err);
+ assertEquals(err.name, "InvalidInput");
+});
+
+test(function textDecoder2(): void {
+ // prettier-ignore
+ const fixture = new Uint8Array([
+ 0xf0, 0x9d, 0x93, 0xbd,
+ 0xf0, 0x9d, 0x93, 0xae,
+ 0xf0, 0x9d, 0x94, 0x81,
+ 0xf0, 0x9d, 0x93, 0xbd
+ ]);
+ const decoder = new TextDecoder();
+ assertEquals(decoder.decode(fixture), "𝓽𝓮𝔁𝓽");
+});
+
+test(function textDecoderIgnoreBOM(): void {
+ // prettier-ignore
+ const fixture = new Uint8Array([
+ 0xef, 0xbb, 0xbf,
+ 0xf0, 0x9d, 0x93, 0xbd,
+ 0xf0, 0x9d, 0x93, 0xae,
+ 0xf0, 0x9d, 0x94, 0x81,
+ 0xf0, 0x9d, 0x93, 0xbd
+ ]);
+ const decoder = new TextDecoder("utf-8", { ignoreBOM: true });
+ assertEquals(decoder.decode(fixture), "𝓽𝓮𝔁𝓽");
+});
+
+test(function textDecoderNotBOM(): void {
+ // prettier-ignore
+ const fixture = new Uint8Array([
+ 0xef, 0xbb, 0x89,
+ 0xf0, 0x9d, 0x93, 0xbd,
+ 0xf0, 0x9d, 0x93, 0xae,
+ 0xf0, 0x9d, 0x94, 0x81,
+ 0xf0, 0x9d, 0x93, 0xbd
+ ]);
+ const decoder = new TextDecoder("utf-8", { ignoreBOM: true });
+ assertEquals(decoder.decode(fixture), "ﻉ𝓽𝓮𝔁𝓽");
+});
+
+test(function textDecoderASCII(): void {
+ const fixture = new Uint8Array([0x89, 0x95, 0x9f, 0xbf]);
+ const decoder = new TextDecoder("ascii");
+ assertEquals(decoder.decode(fixture), "‰•Ÿ¿");
+});
+
+test(function textDecoderErrorEncoding(): void {
+ let didThrow = false;
+ try {
+ new TextDecoder("foo");
+ } catch (e) {
+ didThrow = true;
+ assertEquals(e.message, "The encoding label provided ('foo') is invalid.");
+ }
+ assert(didThrow);
+});
+
+test(function textEncoder(): void {
+ const fixture = "𝓽𝓮𝔁𝓽";
+ const encoder = new TextEncoder();
+ // prettier-ignore
+ assertEquals(Array.from(encoder.encode(fixture)), [
+ 0xf0, 0x9d, 0x93, 0xbd,
+ 0xf0, 0x9d, 0x93, 0xae,
+ 0xf0, 0x9d, 0x94, 0x81,
+ 0xf0, 0x9d, 0x93, 0xbd
+ ]);
+});
+
+test(function textEncodeInto(): void {
+ const fixture = "text";
+ const encoder = new TextEncoder();
+ const bytes = new Uint8Array(5);
+ const result = encoder.encodeInto(fixture, bytes);
+ assertEquals(result.read, 4);
+ assertEquals(result.written, 4);
+ // prettier-ignore
+ assertEquals(Array.from(bytes), [
+ 0x74, 0x65, 0x78, 0x74, 0x00,
+ ]);
+});
+
+test(function textEncodeInto2(): void {
+ const fixture = "𝓽𝓮𝔁𝓽";
+ const encoder = new TextEncoder();
+ const bytes = new Uint8Array(17);
+ const result = encoder.encodeInto(fixture, bytes);
+ assertEquals(result.read, 8);
+ assertEquals(result.written, 16);
+ // prettier-ignore
+ assertEquals(Array.from(bytes), [
+ 0xf0, 0x9d, 0x93, 0xbd,
+ 0xf0, 0x9d, 0x93, 0xae,
+ 0xf0, 0x9d, 0x94, 0x81,
+ 0xf0, 0x9d, 0x93, 0xbd, 0x00,
+ ]);
+});
+
+test(function textDecoderSharedUint8Array(): void {
+ const ab = new SharedArrayBuffer(6);
+ const dataView = new DataView(ab);
+ const charCodeA = "A".charCodeAt(0);
+ for (let i = 0; i < ab.byteLength; i++) {
+ dataView.setUint8(i, charCodeA + i);
+ }
+ const ui8 = new Uint8Array(ab);
+ const decoder = new TextDecoder();
+ const actual = decoder.decode(ui8);
+ assertEquals(actual, "ABCDEF");
+});
+
+test(function textDecoderSharedInt32Array(): void {
+ const ab = new SharedArrayBuffer(8);
+ const dataView = new DataView(ab);
+ const charCodeA = "A".charCodeAt(0);
+ for (let i = 0; i < ab.byteLength; i++) {
+ dataView.setUint8(i, charCodeA + i);
+ }
+ const i32 = new Int32Array(ab);
+ const decoder = new TextDecoder();
+ const actual = decoder.decode(i32);
+ assertEquals(actual, "ABCDEFGH");
+});
+
+test(function toStringShouldBeWebCompatibility(): void {
+ const encoder = new TextEncoder();
+ assertEquals(encoder.toString(), "[object TextEncoder]");
+
+ const decoder = new TextDecoder();
+ assertEquals(decoder.toString(), "[object TextDecoder]");
+});
diff --git a/cli/js/timers.ts b/cli/js/timers.ts
new file mode 100644
index 000000000..5bc4922e3
--- /dev/null
+++ b/cli/js/timers.ts
@@ -0,0 +1,280 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { assert } from "./util.ts";
+import { window } from "./window.ts";
+import * as dispatch from "./dispatch.ts";
+import { sendSync, sendAsync } from "./dispatch_json.ts";
+
+const { console } = window;
+
+interface Timer {
+ id: number;
+ callback: () => void;
+ delay: number;
+ due: number;
+ repeat: boolean;
+ scheduled: boolean;
+}
+
+// We'll subtract EPOCH every time we retrieve the time with Date.now(). This
+// ensures that absolute time values stay below UINT32_MAX - 2, which is the
+// maximum object key that EcmaScript considers "numerical". After running for
+// about a month, this is no longer true, and Deno explodes.
+// TODO(piscisaureus): fix that ^.
+const EPOCH = Date.now();
+const APOCALYPSE = 2 ** 32 - 2;
+
+// Timeout values > TIMEOUT_MAX are set to 1.
+const TIMEOUT_MAX = 2 ** 31 - 1;
+
+let globalTimeoutDue: number | null = null;
+
+let nextTimerId = 1;
+const idMap = new Map<number, Timer>();
+const dueMap: { [due: number]: Timer[] } = Object.create(null);
+
+function getTime(): number {
+ // TODO: use a monotonic clock.
+ const now = Date.now() - EPOCH;
+ assert(now >= 0 && now < APOCALYPSE);
+ return now;
+}
+
+function clearGlobalTimeout(): void {
+ globalTimeoutDue = null;
+ sendSync(dispatch.OP_GLOBAL_TIMER_STOP);
+}
+
+async function setGlobalTimeout(due: number, now: number): Promise<void> {
+ // Since JS and Rust don't use the same clock, pass the time to rust as a
+ // relative time value. On the Rust side we'll turn that into an absolute
+ // value again.
+ const timeout = due - now;
+ assert(timeout >= 0);
+
+ // Send message to the backend.
+ globalTimeoutDue = due;
+ await sendAsync(dispatch.OP_GLOBAL_TIMER, { timeout });
+ // eslint-disable-next-line @typescript-eslint/no-use-before-define
+ fireTimers();
+}
+
+function setOrClearGlobalTimeout(due: number | null, now: number): void {
+ if (due == null) {
+ clearGlobalTimeout();
+ } else {
+ setGlobalTimeout(due, now);
+ }
+}
+
+function schedule(timer: Timer, now: number): void {
+ assert(!timer.scheduled);
+ assert(now <= timer.due);
+ // Find or create the list of timers that will fire at point-in-time `due`.
+ let list = dueMap[timer.due];
+ if (list === undefined) {
+ list = dueMap[timer.due] = [];
+ }
+ // Append the newly scheduled timer to the list and mark it as scheduled.
+ list.push(timer);
+ timer.scheduled = true;
+ // If the new timer is scheduled to fire before any timer that existed before,
+ // update the global timeout to reflect this.
+ if (globalTimeoutDue === null || globalTimeoutDue > timer.due) {
+ setOrClearGlobalTimeout(timer.due, now);
+ }
+}
+
+function unschedule(timer: Timer): void {
+ if (!timer.scheduled) {
+ return;
+ }
+ // Find the list of timers that will fire at point-in-time `due`.
+ const list = dueMap[timer.due];
+ if (list.length === 1) {
+ // Time timer is the only one in the list. Remove the entire list.
+ assert(list[0] === timer);
+ delete dueMap[timer.due];
+ // If the unscheduled timer was 'next up', find when the next timer that
+ // still exists is due, and update the global alarm accordingly.
+ if (timer.due === globalTimeoutDue) {
+ let nextTimerDue: number | null = null;
+ for (const key in dueMap) {
+ nextTimerDue = Number(key);
+ break;
+ }
+ setOrClearGlobalTimeout(nextTimerDue, getTime());
+ }
+ } else {
+ // Multiple timers that are due at the same point in time.
+ // Remove this timer from the list.
+ const index = list.indexOf(timer);
+ assert(index > -1);
+ list.splice(index, 1);
+ }
+}
+
+function fire(timer: Timer): void {
+ // If the timer isn't found in the ID map, that means it has been cancelled
+ // between the timer firing and the promise callback (this function).
+ if (!idMap.has(timer.id)) {
+ return;
+ }
+ // Reschedule the timer if it is a repeating one, otherwise drop it.
+ if (!timer.repeat) {
+ // One-shot timer: remove the timer from this id-to-timer map.
+ idMap.delete(timer.id);
+ } else {
+ // Interval timer: compute when timer was supposed to fire next.
+ // However make sure to never schedule the next interval in the past.
+ const now = getTime();
+ timer.due = Math.max(now, timer.due + timer.delay);
+ schedule(timer, now);
+ }
+ // Call the user callback. Intermediate assignment is to avoid leaking `this`
+ // to it, while also keeping the stack trace neat when it shows up in there.
+ const callback = timer.callback;
+ callback();
+}
+
+function fireTimers(): void {
+ const now = getTime();
+ // Bail out if we're not expecting the global timer to fire.
+ if (globalTimeoutDue === null) {
+ return;
+ }
+ // After firing the timers that are due now, this will hold the due time of
+ // the first timer that hasn't fired yet.
+ let nextTimerDue: number | null = null;
+ // Walk over the keys of the 'due' map. Since dueMap is actually a regular
+ // object and its keys are numerical and smaller than UINT32_MAX - 2,
+ // keys are iterated in ascending order.
+ for (const key in dueMap) {
+ // Convert the object key (a string) to a number.
+ const due = Number(key);
+ // Break out of the loop if the next timer isn't due to fire yet.
+ if (Number(due) > now) {
+ nextTimerDue = due;
+ break;
+ }
+ // Get the list of timers that have this due time, then drop it.
+ const list = dueMap[key];
+ delete dueMap[key];
+ // Fire all the timers in the list.
+ for (const timer of list) {
+ // With the list dropped, the timer is no longer scheduled.
+ timer.scheduled = false;
+ // Place the callback on the microtask queue.
+ Promise.resolve(timer).then(fire);
+ }
+ }
+
+ // Update the global alarm to go off when the first-up timer that hasn't fired
+ // yet is due.
+ setOrClearGlobalTimeout(nextTimerDue, now);
+}
+
+export type Args = unknown[];
+
+function checkThis(thisArg: unknown): void {
+ if (thisArg !== null && thisArg !== undefined && thisArg !== window) {
+ throw new TypeError("Illegal invocation");
+ }
+}
+
+function checkBigInt(n: unknown): void {
+ if (typeof n === "bigint") {
+ throw new TypeError("Cannot convert a BigInt value to a number");
+ }
+}
+
+function setTimer(
+ cb: (...args: Args) => void,
+ delay: number,
+ args: Args,
+ repeat: boolean
+): number {
+ // Bind `args` to the callback and bind `this` to window(global).
+ const callback: () => void = cb.bind(window, ...args);
+ // In the browser, the delay value must be coercible to an integer between 0
+ // and INT32_MAX. Any other value will cause the timer to fire immediately.
+ // We emulate this behavior.
+ const now = getTime();
+ if (delay > TIMEOUT_MAX) {
+ console.warn(
+ `${delay} does not fit into` +
+ " a 32-bit signed integer." +
+ "\nTimeout duration was set to 1."
+ );
+ delay = 1;
+ }
+ delay = Math.max(0, delay | 0);
+
+ // Create a new, unscheduled timer object.
+ const timer = {
+ id: nextTimerId++,
+ callback,
+ args,
+ delay,
+ due: now + delay,
+ repeat,
+ scheduled: false
+ };
+ // Register the timer's existence in the id-to-timer map.
+ idMap.set(timer.id, timer);
+ // Schedule the timer in the due table.
+ schedule(timer, now);
+ return timer.id;
+}
+
+/** Sets a timer which executes a function once after the timer expires. */
+export function setTimeout(
+ cb: (...args: Args) => void,
+ delay = 0,
+ ...args: Args
+): number {
+ checkBigInt(delay);
+ // @ts-ignore
+ checkThis(this);
+ return setTimer(cb, delay, args, false);
+}
+
+/** Repeatedly calls a function , with a fixed time delay between each call. */
+export function setInterval(
+ cb: (...args: Args) => void,
+ delay = 0,
+ ...args: Args
+): number {
+ checkBigInt(delay);
+ // @ts-ignore
+ checkThis(this);
+ return setTimer(cb, delay, args, true);
+}
+
+/** Clears a previously set timer by id. AKA clearTimeout and clearInterval. */
+function clearTimer(id: number): void {
+ id = Number(id);
+ const timer = idMap.get(id);
+ if (timer === undefined) {
+ // Timer doesn't exist any more or never existed. This is not an error.
+ return;
+ }
+ // Unschedule the timer if it is currently scheduled, and forget about it.
+ unschedule(timer);
+ idMap.delete(timer.id);
+}
+
+export function clearTimeout(id = 0): void {
+ checkBigInt(id);
+ if (id === 0) {
+ return;
+ }
+ clearTimer(id);
+}
+
+export function clearInterval(id = 0): void {
+ checkBigInt(id);
+ if (id === 0) {
+ return;
+ }
+ clearTimer(id);
+}
diff --git a/cli/js/timers_test.ts b/cli/js/timers_test.ts
new file mode 100644
index 000000000..bc4fcffcf
--- /dev/null
+++ b/cli/js/timers_test.ts
@@ -0,0 +1,291 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { test, assert, assertEquals, assertNotEquals } from "./test_util.ts";
+
+function deferred(): {
+ promise: Promise<{}>;
+ resolve: (value?: {} | PromiseLike<{}>) => void;
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ reject: (reason?: any) => void;
+} {
+ let resolve;
+ let reject;
+ const promise = new Promise(
+ (res, rej): void => {
+ resolve = res;
+ reject = rej;
+ }
+ );
+ return {
+ promise,
+ resolve,
+ reject
+ };
+}
+
+async function waitForMs(ms): Promise<number> {
+ return new Promise((resolve): number => setTimeout(resolve, ms));
+}
+
+test(async function timeoutSuccess(): Promise<void> {
+ const { promise, resolve } = deferred();
+ let count = 0;
+ setTimeout((): void => {
+ count++;
+ resolve();
+ }, 500);
+ await promise;
+ // count should increment
+ assertEquals(count, 1);
+});
+
+test(async function timeoutArgs(): Promise<void> {
+ const { promise, resolve } = deferred();
+ const arg = 1;
+ setTimeout(
+ (a, b, c): void => {
+ assertEquals(a, arg);
+ assertEquals(b, arg.toString());
+ assertEquals(c, [arg]);
+ resolve();
+ },
+ 10,
+ arg,
+ arg.toString(),
+ [arg]
+ );
+ await promise;
+});
+
+test(async function timeoutCancelSuccess(): Promise<void> {
+ let count = 0;
+ const id = setTimeout((): void => {
+ count++;
+ }, 1);
+ // Cancelled, count should not increment
+ clearTimeout(id);
+ await waitForMs(600);
+ assertEquals(count, 0);
+});
+
+test(async function timeoutCancelMultiple(): Promise<void> {
+ function uncalled(): never {
+ throw new Error("This function should not be called.");
+ }
+
+ // Set timers and cancel them in the same order.
+ const t1 = setTimeout(uncalled, 10);
+ const t2 = setTimeout(uncalled, 10);
+ const t3 = setTimeout(uncalled, 10);
+ clearTimeout(t1);
+ clearTimeout(t2);
+ clearTimeout(t3);
+
+ // Set timers and cancel them in reverse order.
+ const t4 = setTimeout(uncalled, 20);
+ const t5 = setTimeout(uncalled, 20);
+ const t6 = setTimeout(uncalled, 20);
+ clearTimeout(t6);
+ clearTimeout(t5);
+ clearTimeout(t4);
+
+ // Sleep until we're certain that the cancelled timers aren't gonna fire.
+ await waitForMs(50);
+});
+
+test(async function timeoutCancelInvalidSilentFail(): Promise<void> {
+ // Expect no panic
+ const { promise, resolve } = deferred();
+ let count = 0;
+ const id = setTimeout((): void => {
+ count++;
+ // Should have no effect
+ clearTimeout(id);
+ resolve();
+ }, 500);
+ await promise;
+ assertEquals(count, 1);
+
+ // Should silently fail (no panic)
+ clearTimeout(2147483647);
+});
+
+test(async function intervalSuccess(): Promise<void> {
+ const { promise, resolve } = deferred();
+ let count = 0;
+ const id = setInterval((): void => {
+ count++;
+ clearInterval(id);
+ resolve();
+ }, 100);
+ await promise;
+ // Clear interval
+ clearInterval(id);
+ // count should increment twice
+ assertEquals(count, 1);
+});
+
+test(async function intervalCancelSuccess(): Promise<void> {
+ let count = 0;
+ const id = setInterval((): void => {
+ count++;
+ }, 1);
+ clearInterval(id);
+ await waitForMs(500);
+ assertEquals(count, 0);
+});
+
+test(async function intervalOrdering(): Promise<void> {
+ const timers = [];
+ let timeouts = 0;
+ function onTimeout(): void {
+ ++timeouts;
+ for (let i = 1; i < timers.length; i++) {
+ clearTimeout(timers[i]);
+ }
+ }
+ for (let i = 0; i < 10; i++) {
+ timers[i] = setTimeout(onTimeout, 1);
+ }
+ await waitForMs(500);
+ assertEquals(timeouts, 1);
+});
+
+test(async function intervalCancelInvalidSilentFail(): Promise<void> {
+ // Should silently fail (no panic)
+ clearInterval(2147483647);
+});
+
+test(async function fireCallbackImmediatelyWhenDelayOverMaxValue(): Promise<
+ void
+> {
+ let count = 0;
+ setTimeout((): void => {
+ count++;
+ }, 2 ** 31);
+ await waitForMs(1);
+ assertEquals(count, 1);
+});
+
+test(async function timeoutCallbackThis(): Promise<void> {
+ const { promise, resolve } = deferred();
+ const obj = {
+ foo(): void {
+ assertEquals(this, window);
+ resolve();
+ }
+ };
+ setTimeout(obj.foo, 1);
+ await promise;
+});
+
+test(async function timeoutBindThis(): Promise<void> {
+ function noop(): void {}
+
+ const thisCheckPassed = [null, undefined, window, globalThis];
+
+ const thisCheckFailed = [
+ 0,
+ "",
+ true,
+ false,
+ {},
+ [],
+ "foo",
+ (): void => {},
+ Object.prototype
+ ];
+
+ thisCheckPassed.forEach(
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ (thisArg: any): void => {
+ let hasThrown = 0;
+ try {
+ setTimeout.call(thisArg, noop, 1);
+ hasThrown = 1;
+ } catch (err) {
+ if (err instanceof TypeError) {
+ hasThrown = 2;
+ } else {
+ hasThrown = 3;
+ }
+ }
+ assertEquals(hasThrown, 1);
+ }
+ );
+
+ thisCheckFailed.forEach(
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ (thisArg: any): void => {
+ let hasThrown = 0;
+ try {
+ setTimeout.call(thisArg, noop, 1);
+ hasThrown = 1;
+ } catch (err) {
+ if (err instanceof TypeError) {
+ hasThrown = 2;
+ } else {
+ hasThrown = 3;
+ }
+ }
+ assertEquals(hasThrown, 2);
+ }
+ );
+});
+
+test(async function clearTimeoutShouldConvertToNumber(): Promise<void> {
+ let called = false;
+ const obj = {
+ valueOf(): number {
+ called = true;
+ return 1;
+ }
+ };
+ clearTimeout((obj as unknown) as number);
+ assert(called);
+});
+
+test(function setTimeoutShouldThrowWithBigint(): void {
+ let hasThrown = 0;
+ try {
+ setTimeout((): void => {}, (1n as unknown) as number);
+ hasThrown = 1;
+ } catch (err) {
+ if (err instanceof TypeError) {
+ hasThrown = 2;
+ } else {
+ hasThrown = 3;
+ }
+ }
+ assertEquals(hasThrown, 2);
+});
+
+test(function clearTimeoutShouldThrowWithBigint(): void {
+ let hasThrown = 0;
+ try {
+ clearTimeout((1n as unknown) as number);
+ hasThrown = 1;
+ } catch (err) {
+ if (err instanceof TypeError) {
+ hasThrown = 2;
+ } else {
+ hasThrown = 3;
+ }
+ }
+ assertEquals(hasThrown, 2);
+});
+
+test(function testFunctionName(): void {
+ assertEquals(clearTimeout.name, "clearTimeout");
+ assertEquals(clearInterval.name, "clearInterval");
+});
+
+test(function testFunctionParamsLength(): void {
+ assertEquals(setTimeout.length, 1);
+ assertEquals(setInterval.length, 1);
+ assertEquals(clearTimeout.length, 0);
+ assertEquals(clearInterval.length, 0);
+});
+
+test(function clearTimeoutAndClearIntervalNotBeEquals(): void {
+ assertNotEquals(clearTimeout, clearInterval);
+});
diff --git a/cli/js/tls.ts b/cli/js/tls.ts
new file mode 100644
index 000000000..ec24b458b
--- /dev/null
+++ b/cli/js/tls.ts
@@ -0,0 +1,21 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { sendAsync } from "./dispatch_json.ts";
+import * as dispatch from "./dispatch.ts";
+import { Conn, ConnImpl } from "./net.ts";
+
+// TODO(ry) There are many configuration options to add...
+// https://docs.rs/rustls/0.16.0/rustls/struct.ClientConfig.html
+interface DialTLSOptions {
+ port: number;
+ hostname?: string;
+}
+const dialTLSDefaults = { hostname: "127.0.0.1", transport: "tcp" };
+
+/**
+ * dialTLS establishes a secure connection over TLS (transport layer security).
+ */
+export async function dialTLS(options: DialTLSOptions): Promise<Conn> {
+ options = Object.assign(dialTLSDefaults, options);
+ const res = await sendAsync(dispatch.OP_DIAL_TLS, options);
+ return new ConnImpl(res.rid, res.remoteAddr!, res.localAddr!);
+}
diff --git a/cli/js/tls_test.ts b/cli/js/tls_test.ts
new file mode 100644
index 000000000..25900f876
--- /dev/null
+++ b/cli/js/tls_test.ts
@@ -0,0 +1,25 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { test, testPerm, assert, assertEquals } from "./test_util.ts";
+
+// TODO(ry) The tests in this file use github.com:443, but it would be better to
+// not rely on an internet connection and rather use a localhost TLS server.
+
+test(async function dialTLSNoPerm(): Promise<void> {
+ let err;
+ try {
+ await Deno.dialTLS({ hostname: "github.com", port: 443 });
+ } catch (e) {
+ err = e;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(err.name, "PermissionDenied");
+});
+
+testPerm({ net: true }, async function dialTLSBasic(): Promise<void> {
+ const conn = await Deno.dialTLS({ hostname: "github.com", port: 443 });
+ assert(conn.rid > 0);
+ const body = new TextEncoder().encode("GET / HTTP/1.0\r\n\r\n");
+ const writeResult = await conn.write(body);
+ assertEquals(body.length, writeResult);
+ conn.close();
+});
diff --git a/cli/js/truncate.ts b/cli/js/truncate.ts
new file mode 100644
index 000000000..5ce7b5158
--- /dev/null
+++ b/cli/js/truncate.ts
@@ -0,0 +1,34 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { sendSync, sendAsync } from "./dispatch_json.ts";
+import * as dispatch from "./dispatch.ts";
+
+function coerceLen(len?: number): number {
+ if (!len) {
+ return 0;
+ }
+
+ if (len < 0) {
+ return 0;
+ }
+
+ return len;
+}
+
+/** Truncates or extends the specified file synchronously, updating the size of
+ * this file to become size.
+ *
+ * Deno.truncateSync("hello.txt", 10);
+ */
+export function truncateSync(name: string, len?: number): void {
+ sendSync(dispatch.OP_TRUNCATE, { name, len: coerceLen(len) });
+}
+
+/**
+ * Truncates or extends the specified file, updating the size of this file to
+ * become size.
+ *
+ * await Deno.truncate("hello.txt", 10);
+ */
+export async function truncate(name: string, len?: number): Promise<void> {
+ await sendAsync(dispatch.OP_TRUNCATE, { name, len: coerceLen(len) });
+}
diff --git a/cli/js/truncate_test.ts b/cli/js/truncate_test.ts
new file mode 100644
index 000000000..055db8652
--- /dev/null
+++ b/cli/js/truncate_test.ts
@@ -0,0 +1,74 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { testPerm, assertEquals } from "./test_util.ts";
+
+function readDataSync(name: string): string {
+ const data = Deno.readFileSync(name);
+ const decoder = new TextDecoder("utf-8");
+ const text = decoder.decode(data);
+ return text;
+}
+
+async function readData(name: string): Promise<string> {
+ const data = await Deno.readFile(name);
+ const decoder = new TextDecoder("utf-8");
+ const text = decoder.decode(data);
+ return text;
+}
+
+testPerm({ read: true, write: true }, function truncateSyncSuccess(): void {
+ const enc = new TextEncoder();
+ const d = enc.encode("Hello");
+ const filename = Deno.makeTempDirSync() + "/test_truncateSync.txt";
+ Deno.writeFileSync(filename, d);
+ Deno.truncateSync(filename, 20);
+ let data = readDataSync(filename);
+ assertEquals(data.length, 20);
+ Deno.truncateSync(filename, 5);
+ data = readDataSync(filename);
+ assertEquals(data.length, 5);
+ Deno.truncateSync(filename, -5);
+ data = readDataSync(filename);
+ assertEquals(data.length, 0);
+ Deno.removeSync(filename);
+});
+
+testPerm({ read: true, write: true }, async function truncateSuccess(): Promise<
+ void
+> {
+ const enc = new TextEncoder();
+ const d = enc.encode("Hello");
+ const filename = Deno.makeTempDirSync() + "/test_truncate.txt";
+ await Deno.writeFile(filename, d);
+ await Deno.truncate(filename, 20);
+ let data = await readData(filename);
+ assertEquals(data.length, 20);
+ await Deno.truncate(filename, 5);
+ data = await readData(filename);
+ assertEquals(data.length, 5);
+ await Deno.truncate(filename, -5);
+ data = await readData(filename);
+ assertEquals(data.length, 0);
+ await Deno.remove(filename);
+});
+
+testPerm({ write: false }, function truncateSyncPerm(): void {
+ let err;
+ try {
+ Deno.mkdirSync("/test_truncateSyncPermission.txt");
+ } catch (e) {
+ err = e;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(err.name, "PermissionDenied");
+});
+
+testPerm({ write: false }, async function truncatePerm(): Promise<void> {
+ let err;
+ try {
+ await Deno.mkdir("/test_truncatePermission.txt");
+ } catch (e) {
+ err = e;
+ }
+ assertEquals(err.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(err.name, "PermissionDenied");
+});
diff --git a/cli/js/ts_global.d.ts b/cli/js/ts_global.d.ts
new file mode 100644
index 000000000..71a01e30e
--- /dev/null
+++ b/cli/js/ts_global.d.ts
@@ -0,0 +1,19 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+
+// This scopes the `ts` namespace globally, which is where it exists at runtime
+// when building Deno, but the `typescript/lib/typescript.d.ts` is defined as a
+// module.
+
+// Warning! This is a magical import. We don't want to have multiple copies of
+// typescript.d.ts around the repo, there's already one in
+// deno_typescript/typescript/lib/typescript.d.ts. Ideally we could simply point
+// to that in this import specifier, but "cargo package" is very strict and
+// requires all files to be present in a crate's subtree.
+// eslint-disable-next-line @typescript-eslint/no-unused-vars
+import * as ts_ from "$asset$/typescript.d.ts";
+
+declare global {
+ namespace ts {
+ export = ts_;
+ }
+}
diff --git a/cli/js/type_directives.ts b/cli/js/type_directives.ts
new file mode 100644
index 000000000..9b27887b5
--- /dev/null
+++ b/cli/js/type_directives.ts
@@ -0,0 +1,91 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+
+interface FileReference {
+ fileName: string;
+ pos: number;
+ end: number;
+}
+
+/** Remap the module name based on any supplied type directives passed. */
+export function getMappedModuleName(
+ source: FileReference,
+ typeDirectives: Map<FileReference, string>
+): string {
+ const { fileName: sourceFileName, pos: sourcePos } = source;
+ for (const [{ fileName, pos }, value] of typeDirectives.entries()) {
+ if (sourceFileName === fileName && sourcePos === pos) {
+ return value;
+ }
+ }
+ return source.fileName;
+}
+
+/** Matches directives that look something like this and parses out the value
+ * of the directive:
+ *
+ * // @deno-types="./foo.d.ts"
+ *
+ * [See Diagram](http://bit.ly/31nZPCF)
+ */
+const typeDirectiveRegEx = /@deno-types\s*=\s*(["'])((?:(?=(\\?))\3.)*?)\1/gi;
+
+/** Matches `import`, `import from` or `export from` statements and parses out the value of the
+ * module specifier in the second capture group:
+ *
+ * import "./foo.js"
+ * import * as foo from "./foo.js"
+ * export { a, b, c } from "./bar.js"
+ *
+ * [See Diagram](http://bit.ly/2lOsp0K)
+ */
+const importExportRegEx = /(?:import|export)(?:\s+|\s+[\s\S]*?from\s+)?(["'])((?:(?=(\\?))\3.)*?)\1/;
+
+/** Parses out any Deno type directives that are part of the source code, or
+ * returns `undefined` if there are not any.
+ */
+export function parseTypeDirectives(
+ sourceCode: string | undefined
+): Map<FileReference, string> | undefined {
+ if (!sourceCode) {
+ return;
+ }
+
+ // collect all the directives in the file and their start and end positions
+ const directives: FileReference[] = [];
+ let maybeMatch: RegExpExecArray | null = null;
+ while ((maybeMatch = typeDirectiveRegEx.exec(sourceCode))) {
+ const [matchString, , fileName] = maybeMatch;
+ const { index: pos } = maybeMatch;
+ directives.push({
+ fileName,
+ pos,
+ end: pos + matchString.length
+ });
+ }
+ if (!directives.length) {
+ return;
+ }
+
+ // work from the last directive backwards for the next `import`/`export`
+ // statement
+ directives.reverse();
+ const results = new Map<FileReference, string>();
+ for (const { end, fileName, pos } of directives) {
+ const searchString = sourceCode.substring(end);
+ const maybeMatch = importExportRegEx.exec(searchString);
+ if (maybeMatch) {
+ const [matchString, , targetFileName] = maybeMatch;
+ const targetPos =
+ end + maybeMatch.index + matchString.indexOf(targetFileName) - 1;
+ const target: FileReference = {
+ fileName: targetFileName,
+ pos: targetPos,
+ end: targetPos + targetFileName.length
+ };
+ results.set(target, fileName);
+ }
+ sourceCode = sourceCode.substring(0, pos);
+ }
+
+ return results;
+}
diff --git a/cli/js/types.ts b/cli/js/types.ts
new file mode 100644
index 000000000..88462d758
--- /dev/null
+++ b/cli/js/types.ts
@@ -0,0 +1,2 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+export type TypedArray = Uint8Array | Float32Array | Int32Array;
diff --git a/cli/js/unit_test_runner.ts b/cli/js/unit_test_runner.ts
new file mode 100755
index 000000000..d310f0a4e
--- /dev/null
+++ b/cli/js/unit_test_runner.ts
@@ -0,0 +1,107 @@
+#!/usr/bin/env -S deno run --reload --allow-run
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import "./unit_tests.ts";
+import { permissionCombinations, parseUnitTestOutput } from "./test_util.ts";
+
+interface TestResult {
+ perms: string;
+ output: string;
+ result: number;
+}
+
+function permsToCliFlags(perms: Deno.Permissions): string[] {
+ return Object.keys(perms)
+ .map(
+ (key): string => {
+ if (!perms[key]) return "";
+
+ const cliFlag = key.replace(
+ /\.?([A-Z])/g,
+ (x, y): string => `-${y.toLowerCase()}`
+ );
+ return `--allow-${cliFlag}`;
+ }
+ )
+ .filter((e): boolean => e.length > 0);
+}
+
+function fmtPerms(perms: Deno.Permissions): string {
+ let fmt = permsToCliFlags(perms).join(" ");
+
+ if (!fmt) {
+ fmt = "<no permissions>";
+ }
+
+ return fmt;
+}
+
+async function main(): Promise<void> {
+ console.log(
+ "Discovered permission combinations for tests:",
+ permissionCombinations.size
+ );
+
+ for (const perms of permissionCombinations.values()) {
+ console.log("\t" + fmtPerms(perms));
+ }
+
+ const testResults = new Set<TestResult>();
+
+ for (const perms of permissionCombinations.values()) {
+ const permsFmt = fmtPerms(perms);
+ console.log(`Running tests for: ${permsFmt}`);
+ const cliPerms = permsToCliFlags(perms);
+ // run subsequent tests using same deno executable
+ const args = [
+ Deno.execPath(),
+ "run",
+ "--no-prompt",
+ ...cliPerms,
+ "cli/js/unit_tests.ts"
+ ];
+
+ const p = Deno.run({
+ args,
+ stdout: "piped"
+ });
+
+ const { actual, expected, resultOutput } = parseUnitTestOutput(
+ await p.output(),
+ true
+ );
+
+ let result = 0;
+
+ if (!actual && !expected) {
+ console.error("Bad cli/js/unit_test.ts output");
+ result = 1;
+ } else if (expected !== actual) {
+ result = 1;
+ }
+
+ testResults.add({
+ perms: permsFmt,
+ output: resultOutput,
+ result
+ });
+ }
+
+ // if any run tests returned non-zero status then whole test
+ // run should fail
+ let testsFailed = false;
+
+ for (const testResult of testResults) {
+ console.log(`Summary for ${testResult.perms}`);
+ console.log(testResult.output + "\n");
+ testsFailed = testsFailed || Boolean(testResult.result);
+ }
+
+ if (testsFailed) {
+ console.error("Unit tests failed");
+ Deno.exit(1);
+ }
+
+ console.log("Unit tests passed");
+}
+
+main();
diff --git a/cli/js/unit_tests.ts b/cli/js/unit_tests.ts
new file mode 100644
index 000000000..a3f150f4c
--- /dev/null
+++ b/cli/js/unit_tests.ts
@@ -0,0 +1,65 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+
+// This test is executed as part of tools/test.py
+// But it can also be run manually: ./target/debug/deno cli/js/unit_tests.ts
+
+import "./blob_test.ts";
+import "./body_test.ts";
+import "./buffer_test.ts";
+import "./build_test.ts";
+import "./chmod_test.ts";
+import "./chown_test.ts";
+import "./console_test.ts";
+import "./copy_file_test.ts";
+import "./custom_event_test.ts";
+import "./dir_test.ts";
+import "./dispatch_json_test.ts";
+import "./error_stack_test.ts";
+import "./event_test.ts";
+import "./event_target_test.ts";
+import "./fetch_test.ts";
+import "./file_test.ts";
+import "./files_test.ts";
+import "./form_data_test.ts";
+import "./get_random_values_test.ts";
+import "./globals_test.ts";
+import "./headers_test.ts";
+import "./link_test.ts";
+import "./location_test.ts";
+import "./make_temp_dir_test.ts";
+import "./metrics_test.ts";
+import "./mixins/dom_iterable_test.ts";
+import "./mkdir_test.ts";
+import "./net_test.ts";
+import "./os_test.ts";
+import "./process_test.ts";
+import "./read_dir_test.ts";
+import "./read_file_test.ts";
+import "./read_link_test.ts";
+import "./rename_test.ts";
+import "./request_test.ts";
+import "./resources_test.ts";
+import "./stat_test.ts";
+import "./symlink_test.ts";
+import "./text_encoding_test.ts";
+import "./timers_test.ts";
+import "./tls_test.ts";
+import "./truncate_test.ts";
+import "./url_test.ts";
+import "./url_search_params_test.ts";
+import "./utime_test.ts";
+import "./write_file_test.ts";
+import "./performance_test.ts";
+import "./permissions_test.ts";
+import "./version_test.ts";
+
+import "../../website/app_test.ts";
+
+import { runIfMain } from "../../std/testing/mod.ts";
+
+async function main(): Promise<void> {
+ // Testing entire test suite serially
+ runIfMain(import.meta);
+}
+
+main();
diff --git a/cli/js/url.ts b/cli/js/url.ts
new file mode 100644
index 000000000..f22198da4
--- /dev/null
+++ b/cli/js/url.ts
@@ -0,0 +1,376 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import * as urlSearchParams from "./url_search_params.ts";
+import * as domTypes from "./dom_types.ts";
+import { getRandomValues } from "./get_random_values.ts";
+import { window } from "./window.ts";
+
+interface URLParts {
+ protocol: string;
+ username: string;
+ password: string;
+ hostname: string;
+ port: string;
+ path: string;
+ query: string | null;
+ hash: string;
+}
+
+const patterns = {
+ protocol: "(?:([^:/?#]+):)",
+ authority: "(?://([^/?#]*))",
+ path: "([^?#]*)",
+ query: "(\\?[^#]*)",
+ hash: "(#.*)",
+
+ authentication: "(?:([^:]*)(?::([^@]*))?@)",
+ hostname: "([^:]+)",
+ port: "(?::(\\d+))"
+};
+
+const urlRegExp = new RegExp(
+ `^${patterns.protocol}?${patterns.authority}?${patterns.path}${
+ patterns.query
+ }?${patterns.hash}?`
+);
+
+const authorityRegExp = new RegExp(
+ `^${patterns.authentication}?${patterns.hostname}${patterns.port}?$`
+);
+
+const searchParamsMethods: Array<keyof urlSearchParams.URLSearchParams> = [
+ "append",
+ "delete",
+ "set"
+];
+
+function parse(url: string): URLParts | undefined {
+ const urlMatch = urlRegExp.exec(url);
+ if (urlMatch) {
+ const [, , authority] = urlMatch;
+ const authorityMatch = authority
+ ? authorityRegExp.exec(authority)
+ : [null, null, null, null, null];
+ if (authorityMatch) {
+ return {
+ protocol: urlMatch[1] || "",
+ username: authorityMatch[1] || "",
+ password: authorityMatch[2] || "",
+ hostname: authorityMatch[3] || "",
+ port: authorityMatch[4] || "",
+ path: urlMatch[3] || "",
+ query: urlMatch[4] || "",
+ hash: urlMatch[5] || ""
+ };
+ }
+ }
+ return undefined;
+}
+
+// Based on https://github.com/kelektiv/node-uuid
+// TODO(kevinkassimo): Use deno_std version once possible.
+function generateUUID(): string {
+ return "00000000-0000-4000-8000-000000000000".replace(
+ /[0]/g,
+ (): string =>
+ // random integer from 0 to 15 as a hex digit.
+ (getRandomValues(new Uint8Array(1))[0] % 16).toString(16)
+ );
+}
+
+// Keep it outside of URL to avoid any attempts of access.
+export const blobURLMap = new Map<string, domTypes.Blob>();
+
+function isAbsolutePath(path: string): boolean {
+ return path.startsWith("/");
+}
+
+// Resolves `.`s and `..`s where possible.
+// Preserves repeating and trailing `/`s by design.
+function normalizePath(path: string): string {
+ const isAbsolute = isAbsolutePath(path);
+ path = path.replace(/^\//, "");
+ const pathSegments = path.split("/");
+
+ const newPathSegments: string[] = [];
+ for (let i = 0; i < pathSegments.length; i++) {
+ const previous = newPathSegments[newPathSegments.length - 1];
+ if (
+ pathSegments[i] == ".." &&
+ previous != ".." &&
+ (previous != undefined || isAbsolute)
+ ) {
+ newPathSegments.pop();
+ } else if (pathSegments[i] != ".") {
+ newPathSegments.push(pathSegments[i]);
+ }
+ }
+
+ let newPath = newPathSegments.join("/");
+ if (!isAbsolute) {
+ if (newPathSegments.length == 0) {
+ newPath = ".";
+ }
+ } else {
+ newPath = `/${newPath}`;
+ }
+ return newPath;
+}
+
+// Standard URL basing logic, applied to paths.
+function resolvePathFromBase(path: string, basePath: string): string {
+ const normalizedPath = normalizePath(path);
+ if (isAbsolutePath(normalizedPath)) {
+ return normalizedPath;
+ }
+ const normalizedBasePath = normalizePath(basePath);
+ if (!isAbsolutePath(normalizedBasePath)) {
+ throw new TypeError("Base path must be absolute.");
+ }
+
+ // Special case.
+ if (path == "") {
+ return normalizedBasePath;
+ }
+
+ // Remove everything after the last `/` in `normalizedBasePath`.
+ const prefix = normalizedBasePath.replace(/[^\/]*$/, "");
+ // If `normalizedPath` ends with `.` or `..`, add a trailing space.
+ const suffix = normalizedPath.replace(/(?<=(^|\/)(\.|\.\.))$/, "/");
+
+ return normalizePath(prefix + suffix);
+}
+
+export class URL {
+ private _parts: URLParts;
+ private _searchParams!: urlSearchParams.URLSearchParams;
+
+ private _updateSearchParams(): void {
+ const searchParams = new urlSearchParams.URLSearchParams(this.search);
+
+ for (const methodName of searchParamsMethods) {
+ /* eslint-disable @typescript-eslint/no-explicit-any */
+ const method: (...args: any[]) => any = searchParams[methodName];
+ searchParams[methodName] = (...args: unknown[]): any => {
+ method.apply(searchParams, args);
+ this.search = searchParams.toString();
+ };
+ /* eslint-enable */
+ }
+ this._searchParams = searchParams;
+
+ // convert to `any` that has avoided the private limit
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ (this._searchParams as any).url = this;
+ }
+
+ get hash(): string {
+ return this._parts.hash;
+ }
+
+ set hash(value: string) {
+ value = unescape(String(value));
+ if (!value) {
+ this._parts.hash = "";
+ } else {
+ if (value.charAt(0) !== "#") {
+ value = `#${value}`;
+ }
+ // hashes can contain % and # unescaped
+ this._parts.hash = escape(value)
+ .replace(/%25/g, "%")
+ .replace(/%23/g, "#");
+ }
+ }
+
+ get host(): string {
+ return `${this.hostname}${this.port ? `:${this.port}` : ""}`;
+ }
+
+ set host(value: string) {
+ value = String(value);
+ const url = new URL(`http://${value}`);
+ this._parts.hostname = url.hostname;
+ this._parts.port = url.port;
+ }
+
+ get hostname(): string {
+ return this._parts.hostname;
+ }
+
+ set hostname(value: string) {
+ value = String(value);
+ this._parts.hostname = encodeURIComponent(value);
+ }
+
+ get href(): string {
+ const authentication =
+ this.username || this.password
+ ? `${this.username}${this.password ? ":" + this.password : ""}@`
+ : "";
+
+ return `${this.protocol}//${authentication}${this.host}${this.pathname}${
+ this.search
+ }${this.hash}`;
+ }
+
+ set href(value: string) {
+ value = String(value);
+ if (value !== this.href) {
+ const url = new URL(value);
+ this._parts = { ...url._parts };
+ this._updateSearchParams();
+ }
+ }
+
+ get origin(): string {
+ return `${this.protocol}//${this.host}`;
+ }
+
+ get password(): string {
+ return this._parts.password;
+ }
+
+ set password(value: string) {
+ value = String(value);
+ this._parts.password = encodeURIComponent(value);
+ }
+
+ get pathname(): string {
+ return this._parts.path ? this._parts.path : "/";
+ }
+
+ set pathname(value: string) {
+ value = unescape(String(value));
+ if (!value || value.charAt(0) !== "/") {
+ value = `/${value}`;
+ }
+ // paths can contain % unescaped
+ this._parts.path = escape(value).replace(/%25/g, "%");
+ }
+
+ get port(): string {
+ return this._parts.port;
+ }
+
+ set port(value: string) {
+ const port = parseInt(String(value), 10);
+ this._parts.port = isNaN(port)
+ ? ""
+ : Math.max(0, port % 2 ** 16).toString();
+ }
+
+ get protocol(): string {
+ return `${this._parts.protocol}:`;
+ }
+
+ set protocol(value: string) {
+ value = String(value);
+ if (value) {
+ if (value.charAt(value.length - 1) === ":") {
+ value = value.slice(0, -1);
+ }
+ this._parts.protocol = encodeURIComponent(value);
+ }
+ }
+
+ get search(): string {
+ if (this._parts.query === null || this._parts.query === "") {
+ return "";
+ }
+
+ return this._parts.query;
+ }
+
+ set search(value: string) {
+ value = String(value);
+ let query: string | null;
+
+ if (value === "") {
+ query = null;
+ } else if (value.charAt(0) !== "?") {
+ query = `?${value}`;
+ } else {
+ query = value;
+ }
+
+ this._parts.query = query;
+ this._updateSearchParams();
+ }
+
+ get username(): string {
+ return this._parts.username;
+ }
+
+ set username(value: string) {
+ value = String(value);
+ this._parts.username = encodeURIComponent(value);
+ }
+
+ get searchParams(): urlSearchParams.URLSearchParams {
+ return this._searchParams;
+ }
+
+ constructor(url: string, base?: string | URL) {
+ let baseParts: URLParts | undefined;
+ if (base) {
+ baseParts = typeof base === "string" ? parse(base) : base._parts;
+ if (!baseParts || baseParts.protocol == "") {
+ throw new TypeError("Invalid base URL.");
+ }
+ }
+
+ const urlParts = parse(url);
+ if (!urlParts) {
+ throw new TypeError("Invalid URL.");
+ }
+
+ if (urlParts.protocol) {
+ this._parts = urlParts;
+ } else if (baseParts) {
+ this._parts = {
+ protocol: baseParts.protocol,
+ username: baseParts.username,
+ password: baseParts.password,
+ hostname: baseParts.hostname,
+ port: baseParts.port,
+ path: resolvePathFromBase(urlParts.path, baseParts.path || "/"),
+ query: urlParts.query,
+ hash: urlParts.hash
+ };
+ } else {
+ throw new TypeError("URL requires a base URL.");
+ }
+ this._updateSearchParams();
+ }
+
+ toString(): string {
+ return this.href;
+ }
+
+ toJSON(): string {
+ return this.href;
+ }
+
+ // TODO(kevinkassimo): implement MediaSource version in the future.
+ static createObjectURL(b: domTypes.Blob): string {
+ const origin = window.location.origin || "http://deno-opaque-origin";
+ const key = `blob:${origin}/${generateUUID()}`;
+ blobURLMap.set(key, b);
+ return key;
+ }
+
+ static revokeObjectURL(url: string): void {
+ let urlObject;
+ try {
+ urlObject = new URL(url);
+ } catch {
+ throw new TypeError("Provided URL string is not valid");
+ }
+ if (urlObject.protocol !== "blob:") {
+ return;
+ }
+ // Origin match check seems irrelevant for now, unless we implement
+ // persisten storage for per window.location.origin at some point.
+ blobURLMap.delete(url);
+ }
+}
diff --git a/cli/js/url_search_params.ts b/cli/js/url_search_params.ts
new file mode 100644
index 000000000..0835133d5
--- /dev/null
+++ b/cli/js/url_search_params.ts
@@ -0,0 +1,297 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { URL } from "./url.ts";
+import { requiredArguments, isIterable } from "./util.ts";
+
+export class URLSearchParams {
+ private params: Array<[string, string]> = [];
+ private url: URL | null = null;
+
+ constructor(init: string | string[][] | Record<string, string> = "") {
+ if (typeof init === "string") {
+ this._handleStringInitialization(init);
+ return;
+ }
+
+ if (Array.isArray(init) || isIterable(init)) {
+ this._handleArrayInitialization(init);
+ return;
+ }
+
+ if (Object(init) !== init) {
+ return;
+ }
+
+ if (init instanceof URLSearchParams) {
+ this.params = init.params;
+ return;
+ }
+
+ // Overload: record<USVString, USVString>
+ for (const key of Object.keys(init)) {
+ this.append(key, init[key]);
+ }
+ }
+
+ private updateSteps(): void {
+ if (this.url === null) {
+ return;
+ }
+
+ let query: string | null = this.toString();
+ if (query === "") {
+ query = null;
+ }
+
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ (this.url as any)._parts.query = query;
+ }
+
+ /** Appends a specified key/value pair as a new search parameter.
+ *
+ * searchParams.append('name', 'first');
+ * searchParams.append('name', 'second');
+ */
+ append(name: string, value: string): void {
+ requiredArguments("URLSearchParams.append", arguments.length, 2);
+ this.params.push([String(name), String(value)]);
+ this.updateSteps();
+ }
+
+ /** Deletes the given search parameter and its associated value,
+ * from the list of all search parameters.
+ *
+ * searchParams.delete('name');
+ */
+ delete(name: string): void {
+ requiredArguments("URLSearchParams.delete", arguments.length, 1);
+ name = String(name);
+ let i = 0;
+ while (i < this.params.length) {
+ if (this.params[i][0] === name) {
+ this.params.splice(i, 1);
+ } else {
+ i++;
+ }
+ }
+ this.updateSteps();
+ }
+
+ /** Returns all the values associated with a given search parameter
+ * as an array.
+ *
+ * searchParams.getAll('name');
+ */
+ getAll(name: string): string[] {
+ requiredArguments("URLSearchParams.getAll", arguments.length, 1);
+ name = String(name);
+ const values = [];
+ for (const entry of this.params) {
+ if (entry[0] === name) {
+ values.push(entry[1]);
+ }
+ }
+
+ return values;
+ }
+
+ /** Returns the first value associated to the given search parameter.
+ *
+ * searchParams.get('name');
+ */
+ get(name: string): string | null {
+ requiredArguments("URLSearchParams.get", arguments.length, 1);
+ name = String(name);
+ for (const entry of this.params) {
+ if (entry[0] === name) {
+ return entry[1];
+ }
+ }
+
+ return null;
+ }
+
+ /** Returns a Boolean that indicates whether a parameter with the
+ * specified name exists.
+ *
+ * searchParams.has('name');
+ */
+ has(name: string): boolean {
+ requiredArguments("URLSearchParams.has", arguments.length, 1);
+ name = String(name);
+ return this.params.some((entry): boolean => entry[0] === name);
+ }
+
+ /** Sets the value associated with a given search parameter to the
+ * given value. If there were several matching values, this method
+ * deletes the others. If the search parameter doesn't exist, this
+ * method creates it.
+ *
+ * searchParams.set('name', 'value');
+ */
+ set(name: string, value: string): void {
+ requiredArguments("URLSearchParams.set", arguments.length, 2);
+
+ // If there are any name-value pairs whose name is name, in list,
+ // set the value of the first such name-value pair to value
+ // and remove the others.
+ name = String(name);
+ value = String(value);
+ let found = false;
+ let i = 0;
+ while (i < this.params.length) {
+ if (this.params[i][0] === name) {
+ if (!found) {
+ this.params[i][1] = value;
+ found = true;
+ i++;
+ } else {
+ this.params.splice(i, 1);
+ }
+ } else {
+ i++;
+ }
+ }
+
+ // Otherwise, append a new name-value pair whose name is name
+ // and value is value, to list.
+ if (!found) {
+ this.append(name, value);
+ }
+
+ this.updateSteps();
+ }
+
+ /** Sort all key/value pairs contained in this object in place and
+ * return undefined. The sort order is according to Unicode code
+ * points of the keys.
+ *
+ * searchParams.sort();
+ */
+ sort(): void {
+ this.params = this.params.sort(
+ (a, b): number => (a[0] === b[0] ? 0 : a[0] > b[0] ? 1 : -1)
+ );
+ this.updateSteps();
+ }
+
+ /** Calls a function for each element contained in this object in
+ * place and return undefined. Optionally accepts an object to use
+ * as this when executing callback as second argument.
+ *
+ * searchParams.forEach((value, key, parent) => {
+ * console.log(value, key, parent);
+ * });
+ *
+ */
+ forEach(
+ callbackfn: (value: string, key: string, parent: this) => void,
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ thisArg?: any
+ ): void {
+ requiredArguments("URLSearchParams.forEach", arguments.length, 1);
+
+ if (typeof thisArg !== "undefined") {
+ callbackfn = callbackfn.bind(thisArg);
+ }
+
+ for (const [key, value] of this.entries()) {
+ callbackfn(value, key, this);
+ }
+ }
+
+ /** Returns an iterator allowing to go through all keys contained
+ * in this object.
+ *
+ * for (const key of searchParams.keys()) {
+ * console.log(key);
+ * }
+ */
+ *keys(): IterableIterator<string> {
+ for (const entry of this.params) {
+ yield entry[0];
+ }
+ }
+
+ /** Returns an iterator allowing to go through all values contained
+ * in this object.
+ *
+ * for (const value of searchParams.values()) {
+ * console.log(value);
+ * }
+ */
+ *values(): IterableIterator<string> {
+ for (const entry of this.params) {
+ yield entry[1];
+ }
+ }
+
+ /** Returns an iterator allowing to go through all key/value
+ * pairs contained in this object.
+ *
+ * for (const [key, value] of searchParams.entries()) {
+ * console.log(key, value);
+ * }
+ */
+ *entries(): IterableIterator<[string, string]> {
+ yield* this.params;
+ }
+
+ /** Returns an iterator allowing to go through all key/value
+ * pairs contained in this object.
+ *
+ * for (const [key, value] of searchParams[Symbol.iterator]()) {
+ * console.log(key, value);
+ * }
+ */
+ *[Symbol.iterator](): IterableIterator<[string, string]> {
+ yield* this.params;
+ }
+
+ /** Returns a query string suitable for use in a URL.
+ *
+ * searchParams.toString();
+ */
+ toString(): string {
+ return this.params
+ .map(
+ (tuple): string =>
+ `${encodeURIComponent(tuple[0])}=${encodeURIComponent(tuple[1])}`
+ )
+ .join("&");
+ }
+
+ private _handleStringInitialization(init: string): void {
+ // Overload: USVString
+ // If init is a string and starts with U+003F (?),
+ // remove the first code point from init.
+ if (init.charCodeAt(0) === 0x003f) {
+ init = init.slice(1);
+ }
+
+ for (const pair of init.split("&")) {
+ // Empty params are ignored
+ if (pair.length === 0) {
+ continue;
+ }
+ const position = pair.indexOf("=");
+ const name = pair.slice(0, position === -1 ? pair.length : position);
+ const value = pair.slice(name.length + 1);
+ this.append(decodeURIComponent(name), decodeURIComponent(value));
+ }
+ }
+
+ private _handleArrayInitialization(
+ init: string[][] | Iterable<[string, string]>
+ ): void {
+ // Overload: sequence<sequence<USVString>>
+ for (const tuple of init) {
+ // If pair does not contain exactly two items, then throw a TypeError.
+ if (tuple.length !== 2) {
+ throw new TypeError(
+ "URLSearchParams.constructor tuple array argument must only contain pair elements"
+ );
+ }
+ this.append(tuple[0], tuple[1]);
+ }
+ }
+}
diff --git a/cli/js/url_search_params_test.ts b/cli/js/url_search_params_test.ts
new file mode 100644
index 000000000..08b0c5a1f
--- /dev/null
+++ b/cli/js/url_search_params_test.ts
@@ -0,0 +1,238 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { test, assert, assertEquals } from "./test_util.ts";
+
+test(function urlSearchParamsInitString(): void {
+ const init = "c=4&a=2&b=3&%C3%A1=1";
+ const searchParams = new URLSearchParams(init);
+ assert(
+ init === searchParams.toString(),
+ "The init query string does not match"
+ );
+});
+
+test(function urlSearchParamsInitIterable(): void {
+ const init = [["a", "54"], ["b", "true"]];
+ const searchParams = new URLSearchParams(init);
+ assertEquals(searchParams.toString(), "a=54&b=true");
+});
+
+test(function urlSearchParamsInitRecord(): void {
+ const init = { a: "54", b: "true" };
+ const searchParams = new URLSearchParams(init);
+ assertEquals(searchParams.toString(), "a=54&b=true");
+});
+
+test(function urlSearchParamsInit(): void {
+ const params1 = new URLSearchParams("a=b");
+ assertEquals(params1.toString(), "a=b");
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ const params2 = new URLSearchParams(params1 as any);
+ assertEquals(params2.toString(), "a=b");
+});
+
+test(function urlSearchParamsAppendSuccess(): void {
+ const searchParams = new URLSearchParams();
+ searchParams.append("a", "true");
+ assertEquals(searchParams.toString(), "a=true");
+});
+
+test(function urlSearchParamsDeleteSuccess(): void {
+ const init = "a=54&b=true";
+ const searchParams = new URLSearchParams(init);
+ searchParams.delete("b");
+ assertEquals(searchParams.toString(), "a=54");
+});
+
+test(function urlSearchParamsGetAllSuccess(): void {
+ const init = "a=54&b=true&a=true";
+ const searchParams = new URLSearchParams(init);
+ assertEquals(searchParams.getAll("a"), ["54", "true"]);
+ assertEquals(searchParams.getAll("b"), ["true"]);
+ assertEquals(searchParams.getAll("c"), []);
+});
+
+test(function urlSearchParamsGetSuccess(): void {
+ const init = "a=54&b=true&a=true";
+ const searchParams = new URLSearchParams(init);
+ assertEquals(searchParams.get("a"), "54");
+ assertEquals(searchParams.get("b"), "true");
+ assertEquals(searchParams.get("c"), null);
+});
+
+test(function urlSearchParamsHasSuccess(): void {
+ const init = "a=54&b=true&a=true";
+ const searchParams = new URLSearchParams(init);
+ assert(searchParams.has("a"));
+ assert(searchParams.has("b"));
+ assert(!searchParams.has("c"));
+});
+
+test(function urlSearchParamsSetReplaceFirstAndRemoveOthers(): void {
+ const init = "a=54&b=true&a=true";
+ const searchParams = new URLSearchParams(init);
+ searchParams.set("a", "false");
+ assertEquals(searchParams.toString(), "a=false&b=true");
+});
+
+test(function urlSearchParamsSetAppendNew(): void {
+ const init = "a=54&b=true&a=true";
+ const searchParams = new URLSearchParams(init);
+ searchParams.set("c", "foo");
+ assertEquals(searchParams.toString(), "a=54&b=true&a=true&c=foo");
+});
+
+test(function urlSearchParamsSortSuccess(): void {
+ const init = "c=4&a=2&b=3&a=1";
+ const searchParams = new URLSearchParams(init);
+ searchParams.sort();
+ assertEquals(searchParams.toString(), "a=2&a=1&b=3&c=4");
+});
+
+test(function urlSearchParamsForEachSuccess(): void {
+ const init = [["a", "54"], ["b", "true"]];
+ const searchParams = new URLSearchParams(init);
+ let callNum = 0;
+ searchParams.forEach(
+ (value, key, parent): void => {
+ assertEquals(searchParams, parent);
+ assertEquals(value, init[callNum][1]);
+ assertEquals(key, init[callNum][0]);
+ callNum++;
+ }
+ );
+ assertEquals(callNum, init.length);
+});
+
+test(function urlSearchParamsMissingName(): void {
+ const init = "=4";
+ const searchParams = new URLSearchParams(init);
+ assertEquals(searchParams.get(""), "4");
+ assertEquals(searchParams.toString(), "=4");
+});
+
+test(function urlSearchParamsMissingValue(): void {
+ const init = "4=";
+ const searchParams = new URLSearchParams(init);
+ assertEquals(searchParams.get("4"), "");
+ assertEquals(searchParams.toString(), "4=");
+});
+
+test(function urlSearchParamsMissingEqualSign(): void {
+ const init = "4";
+ const searchParams = new URLSearchParams(init);
+ assertEquals(searchParams.get("4"), "");
+ assertEquals(searchParams.toString(), "4=");
+});
+
+test(function urlSearchParamsMissingPair(): void {
+ const init = "c=4&&a=54&";
+ const searchParams = new URLSearchParams(init);
+ assertEquals(searchParams.toString(), "c=4&a=54");
+});
+
+// If pair does not contain exactly two items, then throw a TypeError.
+// ref https://url.spec.whatwg.org/#interface-urlsearchparams
+test(function urlSearchParamsShouldThrowTypeError(): void {
+ let hasThrown = 0;
+
+ try {
+ new URLSearchParams([["1"]]);
+ hasThrown = 1;
+ } catch (err) {
+ if (err instanceof TypeError) {
+ hasThrown = 2;
+ } else {
+ hasThrown = 3;
+ }
+ }
+
+ assertEquals(hasThrown, 2);
+
+ try {
+ new URLSearchParams([["1", "2", "3"]]);
+ hasThrown = 1;
+ } catch (err) {
+ if (err instanceof TypeError) {
+ hasThrown = 2;
+ } else {
+ hasThrown = 3;
+ }
+ }
+
+ assertEquals(hasThrown, 2);
+});
+
+test(function urlSearchParamsAppendArgumentsCheck(): void {
+ const methodRequireOneParam = ["delete", "getAll", "get", "has", "forEach"];
+
+ const methodRequireTwoParams = ["append", "set"];
+
+ methodRequireOneParam.concat(methodRequireTwoParams).forEach(
+ (method: string): void => {
+ const searchParams = new URLSearchParams();
+ let hasThrown = 0;
+ try {
+ searchParams[method]();
+ hasThrown = 1;
+ } catch (err) {
+ if (err instanceof TypeError) {
+ hasThrown = 2;
+ } else {
+ hasThrown = 3;
+ }
+ }
+ assertEquals(hasThrown, 2);
+ }
+ );
+
+ methodRequireTwoParams.forEach(
+ (method: string): void => {
+ const searchParams = new URLSearchParams();
+ let hasThrown = 0;
+ try {
+ searchParams[method]("foo");
+ hasThrown = 1;
+ } catch (err) {
+ if (err instanceof TypeError) {
+ hasThrown = 2;
+ } else {
+ hasThrown = 3;
+ }
+ }
+ assertEquals(hasThrown, 2);
+ }
+ );
+});
+
+// ref: https://github.com/web-platform-tests/wpt/blob/master/url/urlsearchparams-delete.any.js
+test(function urlSearchParamsDeletingAppendedMultiple(): void {
+ const params = new URLSearchParams();
+ params.append("first", (1 as unknown) as string);
+ assert(params.has("first"));
+ assertEquals(params.get("first"), "1");
+ params.delete("first");
+ assertEquals(params.has("first"), false);
+ params.append("first", (1 as unknown) as string);
+ params.append("first", (10 as unknown) as string);
+ params.delete("first");
+ assertEquals(params.has("first"), false);
+});
+
+// ref: https://github.com/web-platform-tests/wpt/blob/master/url/urlsearchparams-constructor.any.js#L176-L182
+test(function urlSearchParamsCustomSymbolIterator(): void {
+ const params = new URLSearchParams();
+ params[Symbol.iterator] = function*(): IterableIterator<[string, string]> {
+ yield ["a", "b"];
+ };
+ const params1 = new URLSearchParams((params as unknown) as string[][]);
+ assertEquals(params1.get("a"), "b");
+});
+
+test(function urlSearchParamsCustomSymbolIteratorWithNonStringParams(): void {
+ const params = {};
+ params[Symbol.iterator] = function*(): IterableIterator<[number, number]> {
+ yield [1, 2];
+ };
+ const params1 = new URLSearchParams((params as unknown) as string[][]);
+ assertEquals(params1.get("1"), "2");
+});
diff --git a/cli/js/url_test.ts b/cli/js/url_test.ts
new file mode 100644
index 000000000..07a8028ce
--- /dev/null
+++ b/cli/js/url_test.ts
@@ -0,0 +1,181 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { test, assert, assertEquals } from "./test_util.ts";
+
+test(function urlParsing(): void {
+ const url = new URL(
+ "https://foo:bar@baz.qat:8000/qux/quux?foo=bar&baz=12#qat"
+ );
+ assertEquals(url.hash, "#qat");
+ assertEquals(url.host, "baz.qat:8000");
+ assertEquals(url.hostname, "baz.qat");
+ assertEquals(
+ url.href,
+ "https://foo:bar@baz.qat:8000/qux/quux?foo=bar&baz=12#qat"
+ );
+ assertEquals(url.origin, "https://baz.qat:8000");
+ assertEquals(url.password, "bar");
+ assertEquals(url.pathname, "/qux/quux");
+ assertEquals(url.port, "8000");
+ assertEquals(url.protocol, "https:");
+ assertEquals(url.search, "?foo=bar&baz=12");
+ assertEquals(url.searchParams.getAll("foo"), ["bar"]);
+ assertEquals(url.searchParams.getAll("baz"), ["12"]);
+ assertEquals(url.username, "foo");
+ assertEquals(
+ String(url),
+ "https://foo:bar@baz.qat:8000/qux/quux?foo=bar&baz=12#qat"
+ );
+ assertEquals(
+ JSON.stringify({ key: url }),
+ `{"key":"https://foo:bar@baz.qat:8000/qux/quux?foo=bar&baz=12#qat"}`
+ );
+});
+
+test(function urlModifications(): void {
+ const url = new URL(
+ "https://foo:bar@baz.qat:8000/qux/quux?foo=bar&baz=12#qat"
+ );
+ url.hash = "";
+ assertEquals(
+ url.href,
+ "https://foo:bar@baz.qat:8000/qux/quux?foo=bar&baz=12"
+ );
+ url.host = "qat.baz:8080";
+ assertEquals(
+ url.href,
+ "https://foo:bar@qat.baz:8080/qux/quux?foo=bar&baz=12"
+ );
+ url.hostname = "foo.bar";
+ assertEquals(
+ url.href,
+ "https://foo:bar@foo.bar:8080/qux/quux?foo=bar&baz=12"
+ );
+ url.password = "qux";
+ assertEquals(
+ url.href,
+ "https://foo:qux@foo.bar:8080/qux/quux?foo=bar&baz=12"
+ );
+ url.pathname = "/foo/bar%qat";
+ assertEquals(
+ url.href,
+ "https://foo:qux@foo.bar:8080/foo/bar%qat?foo=bar&baz=12"
+ );
+ url.port = "";
+ assertEquals(url.href, "https://foo:qux@foo.bar/foo/bar%qat?foo=bar&baz=12");
+ url.protocol = "http:";
+ assertEquals(url.href, "http://foo:qux@foo.bar/foo/bar%qat?foo=bar&baz=12");
+ url.search = "?foo=bar&foo=baz";
+ assertEquals(url.href, "http://foo:qux@foo.bar/foo/bar%qat?foo=bar&foo=baz");
+ assertEquals(url.searchParams.getAll("foo"), ["bar", "baz"]);
+ url.username = "foo@bar";
+ assertEquals(
+ url.href,
+ "http://foo%40bar:qux@foo.bar/foo/bar%qat?foo=bar&foo=baz"
+ );
+ url.searchParams.set("bar", "qat");
+ assertEquals(
+ url.href,
+ "http://foo%40bar:qux@foo.bar/foo/bar%qat?foo=bar&foo=baz&bar=qat"
+ );
+ url.searchParams.delete("foo");
+ assertEquals(url.href, "http://foo%40bar:qux@foo.bar/foo/bar%qat?bar=qat");
+ url.searchParams.append("foo", "bar");
+ assertEquals(
+ url.href,
+ "http://foo%40bar:qux@foo.bar/foo/bar%qat?bar=qat&foo=bar"
+ );
+});
+
+test(function urlModifyHref(): void {
+ const url = new URL("http://example.com/");
+ url.href = "https://foo:bar@example.com:8080/baz/qat#qux";
+ assertEquals(url.protocol, "https:");
+ assertEquals(url.username, "foo");
+ assertEquals(url.password, "bar");
+ assertEquals(url.host, "example.com:8080");
+ assertEquals(url.hostname, "example.com");
+ assertEquals(url.pathname, "/baz/qat");
+ assertEquals(url.hash, "#qux");
+});
+
+test(function urlModifyPathname(): void {
+ const url = new URL("http://foo.bar/baz%qat/qux%quux");
+ assertEquals(url.pathname, "/baz%qat/qux%quux");
+ url.pathname = url.pathname;
+ assertEquals(url.pathname, "/baz%qat/qux%quux");
+ url.pathname = "baz#qat qux";
+ assertEquals(url.pathname, "/baz%23qat%20qux");
+ url.pathname = url.pathname;
+ assertEquals(url.pathname, "/baz%23qat%20qux");
+});
+
+test(function urlModifyHash(): void {
+ const url = new URL("http://foo.bar");
+ url.hash = "%foo bar/qat%qux#bar";
+ assertEquals(url.hash, "#%foo%20bar/qat%qux#bar");
+ url.hash = url.hash;
+ assertEquals(url.hash, "#%foo%20bar/qat%qux#bar");
+});
+
+test(function urlSearchParamsReuse(): void {
+ const url = new URL(
+ "https://foo:bar@baz.qat:8000/qux/quux?foo=bar&baz=12#qat"
+ );
+ const sp = url.searchParams;
+ url.host = "baz.qat";
+ assert(sp === url.searchParams, "Search params should be reused.");
+});
+
+test(function urlBaseURL(): void {
+ const base = new URL(
+ "https://foo:bar@baz.qat:8000/qux/quux?foo=bar&baz=12#qat"
+ );
+ const url = new URL("/foo/bar?baz=foo#qux", base);
+ assertEquals(url.href, "https://foo:bar@baz.qat:8000/foo/bar?baz=foo#qux");
+});
+
+test(function urlBaseString(): void {
+ const url = new URL(
+ "/foo/bar?baz=foo#qux",
+ "https://foo:bar@baz.qat:8000/qux/quux?foo=bar&baz=12#qat"
+ );
+ assertEquals(url.href, "https://foo:bar@baz.qat:8000/foo/bar?baz=foo#qux");
+});
+
+test(function urlRelativeWithBase(): void {
+ assertEquals(new URL("", "file:///a/a/a").href, "file:///a/a/a");
+ assertEquals(new URL(".", "file:///a/a/a").href, "file:///a/a/");
+ assertEquals(new URL("..", "file:///a/a/a").href, "file:///a/");
+ assertEquals(new URL("b", "file:///a/a/a").href, "file:///a/a/b");
+ assertEquals(new URL("b", "file:///a/a/a/").href, "file:///a/a/a/b");
+ assertEquals(new URL("b/", "file:///a/a/a").href, "file:///a/a/b/");
+ assertEquals(new URL("../b", "file:///a/a/a").href, "file:///a/b");
+});
+
+test(function emptyBasePath(): void {
+ assertEquals(new URL("", "http://example.com").href, "http://example.com/");
+});
+
+test(function deletingAllParamsRemovesQuestionMarkFromURL(): void {
+ const url = new URL("http://example.com/?param1&param2");
+ url.searchParams.delete("param1");
+ url.searchParams.delete("param2");
+ assertEquals(url.href, "http://example.com/");
+ assertEquals(url.search, "");
+});
+
+test(function removingNonExistentParamRemovesQuestionMarkFromURL(): void {
+ const url = new URL("http://example.com/?");
+ assertEquals(url.href, "http://example.com/?");
+ url.searchParams.delete("param1");
+ assertEquals(url.href, "http://example.com/");
+ assertEquals(url.search, "");
+});
+
+test(function sortingNonExistentParamRemovesQuestionMarkFromURL(): void {
+ const url = new URL("http://example.com/?");
+ assertEquals(url.href, "http://example.com/?");
+ url.searchParams.sort();
+ assertEquals(url.href, "http://example.com/");
+ assertEquals(url.search, "");
+});
diff --git a/cli/js/util.ts b/cli/js/util.ts
new file mode 100644
index 000000000..013dc7ee1
--- /dev/null
+++ b/cli/js/util.ts
@@ -0,0 +1,225 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { TypedArray } from "./types.ts";
+import { window } from "./window.ts";
+
+let logDebug = false;
+let logSource = "JS";
+
+// @internal
+export function setLogDebug(debug: boolean, source?: string): void {
+ logDebug = debug;
+ if (source) {
+ logSource = source;
+ }
+}
+
+/** Debug logging for deno.
+ * Enable with the `--log-debug` or `-D` command line flag.
+ * @internal
+ */
+export function log(...args: unknown[]): void {
+ if (logDebug) {
+ // if we destructure `console` off `window` too early, we don't bind to
+ // the right console, therefore we don't log anything out.
+ window.console.log(`DEBUG ${logSource} -`, ...args);
+ }
+}
+
+// @internal
+export function assert(cond: boolean, msg = "assert"): void {
+ if (!cond) {
+ throw Error(msg);
+ }
+}
+
+// @internal
+export function typedArrayToArrayBuffer(ta: TypedArray): ArrayBuffer {
+ const ab = ta.buffer.slice(ta.byteOffset, ta.byteOffset + ta.byteLength);
+ return ab as ArrayBuffer;
+}
+
+// @internal
+export function arrayToStr(ui8: Uint8Array): string {
+ return String.fromCharCode(...ui8);
+}
+
+/** A `Resolvable` is a Promise with the `reject` and `resolve` functions
+ * placed as methods on the promise object itself. It allows you to do:
+ *
+ * const p = createResolvable<number>();
+ * // ...
+ * p.resolve(42);
+ *
+ * It'd be prettier to make `Resolvable` a class that inherits from `Promise`,
+ * rather than an interface. This is possible in ES2016, however typescript
+ * produces broken code when targeting ES5 code.
+ *
+ * At the time of writing, the GitHub issue is closed in favour of a proposed
+ * solution that is awaiting feedback.
+ *
+ * @see https://github.com/Microsoft/TypeScript/issues/15202
+ * @see https://github.com/Microsoft/TypeScript/issues/15397
+ * @internal
+ */
+
+export interface ResolvableMethods<T> {
+ resolve: (value?: T | PromiseLike<T>) => void;
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ reject: (reason?: any) => void;
+}
+
+// @internal
+export type Resolvable<T> = Promise<T> & ResolvableMethods<T>;
+
+// @internal
+export function createResolvable<T>(): Resolvable<T> {
+ let methods: ResolvableMethods<T>;
+ const promise = new Promise<T>(
+ (resolve, reject): void => {
+ methods = { resolve, reject };
+ }
+ );
+ // TypeScript doesn't know that the Promise callback occurs synchronously
+ // therefore use of not null assertion (`!`)
+ return Object.assign(promise, methods!) as Resolvable<T>;
+}
+
+// @internal
+export function notImplemented(): never {
+ throw new Error("Not implemented");
+}
+
+// @internal
+export function unreachable(): never {
+ throw new Error("Code not reachable");
+}
+
+// @internal
+export function hexdump(u8: Uint8Array): string {
+ return Array.prototype.map
+ .call(
+ u8,
+ (x: number): string => {
+ return ("00" + x.toString(16)).slice(-2);
+ }
+ )
+ .join(" ");
+}
+
+// @internal
+export function containsOnlyASCII(str: string): boolean {
+ if (typeof str !== "string") {
+ return false;
+ }
+ return /^[\x00-\x7F]*$/.test(str);
+}
+
+const TypedArrayConstructor = Object.getPrototypeOf(Uint8Array);
+export function isTypedArray(x: unknown): x is TypedArray {
+ return x instanceof TypedArrayConstructor;
+}
+
+// Returns whether o is an object, not null, and not a function.
+// @internal
+export function isObject(o: unknown): o is object {
+ return o != null && typeof o === "object";
+}
+
+// Returns whether o is iterable.
+export function isIterable<T, P extends keyof T, K extends T[P]>(
+ o: T
+): o is T & Iterable<[P, K]> {
+ // checks for null and undefined
+ if (o == null) {
+ return false;
+ }
+ return (
+ typeof ((o as unknown) as Iterable<[P, K]>)[Symbol.iterator] === "function"
+ );
+}
+
+// @internal
+export function requiredArguments(
+ name: string,
+ length: number,
+ required: number
+): void {
+ if (length < required) {
+ const errMsg = `${name} requires at least ${required} argument${
+ required === 1 ? "" : "s"
+ }, but only ${length} present`;
+ throw new TypeError(errMsg);
+ }
+}
+
+// @internal
+export function immutableDefine(
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ o: any,
+ p: string | number | symbol,
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ value: any
+): void {
+ Object.defineProperty(o, p, {
+ value,
+ configurable: false,
+ writable: false
+ });
+}
+
+// Returns values from a WeakMap to emulate private properties in JavaScript
+export function getPrivateValue<
+ K extends object,
+ V extends object,
+ W extends keyof V
+>(instance: K, weakMap: WeakMap<K, V>, key: W): V[W] {
+ if (weakMap.has(instance)) {
+ return weakMap.get(instance)![key];
+ }
+ throw new TypeError("Illegal invocation");
+}
+
+/**
+ * Determines whether an object has a property with the specified name.
+ * Avoid calling prototype builtin `hasOwnProperty` for two reasons:
+ *
+ * 1. `hasOwnProperty` is defined on the object as something else:
+ *
+ * const options = {
+ * ending: 'utf8',
+ * hasOwnProperty: 'foo'
+ * };
+ * options.hasOwnProperty('ending') // throws a TypeError
+ *
+ * 2. The object doesn't inherit from `Object.prototype`:
+ *
+ * const options = Object.create(null);
+ * options.ending = 'utf8';
+ * options.hasOwnProperty('ending'); // throws a TypeError
+ *
+ * @param obj A Object.
+ * @param v A property name.
+ * @see https://eslint.org/docs/rules/no-prototype-builtins
+ * @internal
+ */
+export function hasOwnProperty<T>(obj: T, v: PropertyKey): boolean {
+ if (obj == null) {
+ return false;
+ }
+ return Object.prototype.hasOwnProperty.call(obj, v);
+}
+
+/**
+ * Split a number into two parts: lower 32 bit and higher 32 bit
+ * (as if the number is represented as uint64.)
+ *
+ * @param n Number to split.
+ * @internal
+ */
+export function splitNumberToParts(n: number): number[] {
+ // JS bitwise operators (OR, SHIFT) operate as if number is uint32.
+ const lower = n | 0;
+ // This is also faster than Math.floor(n / 0x100000000) in V8.
+ const higher = (n - lower) / 0x100000000;
+ return [lower, higher];
+}
diff --git a/cli/js/utime.ts b/cli/js/utime.ts
new file mode 100644
index 000000000..7495378b1
--- /dev/null
+++ b/cli/js/utime.ts
@@ -0,0 +1,45 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { sendSync, sendAsync } from "./dispatch_json.ts";
+import { OP_UTIME } from "./dispatch.ts";
+
+function toSecondsFromEpoch(v: number | Date): number {
+ return v instanceof Date ? v.valueOf() / 1000 : v;
+}
+
+/** Synchronously changes the access and modification times of a file system
+ * object referenced by `filename`. Given times are either in seconds
+ * (Unix epoch time) or as `Date` objects.
+ *
+ * Deno.utimeSync("myfile.txt", 1556495550, new Date());
+ */
+export function utimeSync(
+ filename: string,
+ atime: number | Date,
+ mtime: number | Date
+): void {
+ sendSync(OP_UTIME, {
+ filename,
+ // TODO(ry) split atime, mtime into [seconds, nanoseconds] tuple
+ atime: toSecondsFromEpoch(atime),
+ mtime: toSecondsFromEpoch(mtime)
+ });
+}
+
+/** Changes the access and modification times of a file system object
+ * referenced by `filename`. Given times are either in seconds
+ * (Unix epoch time) or as `Date` objects.
+ *
+ * await Deno.utime("myfile.txt", 1556495550, new Date());
+ */
+export async function utime(
+ filename: string,
+ atime: number | Date,
+ mtime: number | Date
+): Promise<void> {
+ await sendAsync(OP_UTIME, {
+ filename,
+ // TODO(ry) split atime, mtime into [seconds, nanoseconds] tuple
+ atime: toSecondsFromEpoch(atime),
+ mtime: toSecondsFromEpoch(mtime)
+ });
+}
diff --git a/cli/js/utime_test.ts b/cli/js/utime_test.ts
new file mode 100644
index 000000000..535ee1f40
--- /dev/null
+++ b/cli/js/utime_test.ts
@@ -0,0 +1,181 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { testPerm, assert, assertEquals } from "./test_util.ts";
+
+// Allow 10 second difference.
+// Note this might not be enough for FAT (but we are not testing on such fs).
+function assertFuzzyTimestampEquals(t1: number, t2: number): void {
+ assert(Math.abs(t1 - t2) < 10);
+}
+
+testPerm({ read: true, write: true }, function utimeSyncFileSuccess(): void {
+ const testDir = Deno.makeTempDirSync();
+ const filename = testDir + "/file.txt";
+ Deno.writeFileSync(filename, new TextEncoder().encode("hello"), {
+ perm: 0o666
+ });
+
+ const atime = 1000;
+ const mtime = 50000;
+ Deno.utimeSync(filename, atime, mtime);
+
+ const fileInfo = Deno.statSync(filename);
+ assertFuzzyTimestampEquals(fileInfo.accessed, atime);
+ assertFuzzyTimestampEquals(fileInfo.modified, mtime);
+});
+
+testPerm(
+ { read: true, write: true },
+ function utimeSyncDirectorySuccess(): void {
+ const testDir = Deno.makeTempDirSync();
+
+ const atime = 1000;
+ const mtime = 50000;
+ Deno.utimeSync(testDir, atime, mtime);
+
+ const dirInfo = Deno.statSync(testDir);
+ assertFuzzyTimestampEquals(dirInfo.accessed, atime);
+ assertFuzzyTimestampEquals(dirInfo.modified, mtime);
+ }
+);
+
+testPerm({ read: true, write: true }, function utimeSyncDateSuccess(): void {
+ const testDir = Deno.makeTempDirSync();
+
+ const atime = 1000;
+ const mtime = 50000;
+ Deno.utimeSync(testDir, new Date(atime * 1000), new Date(mtime * 1000));
+
+ const dirInfo = Deno.statSync(testDir);
+ assertFuzzyTimestampEquals(dirInfo.accessed, atime);
+ assertFuzzyTimestampEquals(dirInfo.modified, mtime);
+});
+
+testPerm(
+ { read: true, write: true },
+ function utimeSyncLargeNumberSuccess(): void {
+ const testDir = Deno.makeTempDirSync();
+
+ // There are Rust side caps (might be fs relate),
+ // so JUST make them slightly larger than UINT32_MAX.
+ const atime = 0x100000001;
+ const mtime = 0x100000002;
+ Deno.utimeSync(testDir, atime, mtime);
+
+ const dirInfo = Deno.statSync(testDir);
+ assertFuzzyTimestampEquals(dirInfo.accessed, atime);
+ assertFuzzyTimestampEquals(dirInfo.modified, mtime);
+ }
+);
+
+testPerm({ read: true, write: true }, function utimeSyncNotFound(): void {
+ const atime = 1000;
+ const mtime = 50000;
+
+ let caughtError = false;
+ try {
+ Deno.utimeSync("/baddir", atime, mtime);
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.NotFound);
+ assertEquals(e.name, "NotFound");
+ }
+ assert(caughtError);
+});
+
+testPerm({ read: true, write: false }, function utimeSyncPerm(): void {
+ const atime = 1000;
+ const mtime = 50000;
+
+ let caughtError = false;
+ try {
+ Deno.utimeSync("/some_dir", atime, mtime);
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(e.name, "PermissionDenied");
+ }
+ assert(caughtError);
+});
+
+testPerm(
+ { read: true, write: true },
+ async function utimeFileSuccess(): Promise<void> {
+ const testDir = Deno.makeTempDirSync();
+ const filename = testDir + "/file.txt";
+ Deno.writeFileSync(filename, new TextEncoder().encode("hello"), {
+ perm: 0o666
+ });
+
+ const atime = 1000;
+ const mtime = 50000;
+ await Deno.utime(filename, atime, mtime);
+
+ const fileInfo = Deno.statSync(filename);
+ assertFuzzyTimestampEquals(fileInfo.accessed, atime);
+ assertFuzzyTimestampEquals(fileInfo.modified, mtime);
+ }
+);
+
+testPerm(
+ { read: true, write: true },
+ async function utimeDirectorySuccess(): Promise<void> {
+ const testDir = Deno.makeTempDirSync();
+
+ const atime = 1000;
+ const mtime = 50000;
+ await Deno.utime(testDir, atime, mtime);
+
+ const dirInfo = Deno.statSync(testDir);
+ assertFuzzyTimestampEquals(dirInfo.accessed, atime);
+ assertFuzzyTimestampEquals(dirInfo.modified, mtime);
+ }
+);
+
+testPerm(
+ { read: true, write: true },
+ async function utimeDateSuccess(): Promise<void> {
+ const testDir = Deno.makeTempDirSync();
+
+ const atime = 1000;
+ const mtime = 50000;
+ await Deno.utime(testDir, new Date(atime * 1000), new Date(mtime * 1000));
+
+ const dirInfo = Deno.statSync(testDir);
+ assertFuzzyTimestampEquals(dirInfo.accessed, atime);
+ assertFuzzyTimestampEquals(dirInfo.modified, mtime);
+ }
+);
+
+testPerm({ read: true, write: true }, async function utimeNotFound(): Promise<
+ void
+> {
+ const atime = 1000;
+ const mtime = 50000;
+
+ let caughtError = false;
+ try {
+ await Deno.utime("/baddir", atime, mtime);
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.NotFound);
+ assertEquals(e.name, "NotFound");
+ }
+ assert(caughtError);
+});
+
+testPerm({ read: true, write: false }, async function utimeSyncPerm(): Promise<
+ void
+> {
+ const atime = 1000;
+ const mtime = 50000;
+
+ let caughtError = false;
+ try {
+ await Deno.utime("/some_dir", atime, mtime);
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(e.name, "PermissionDenied");
+ }
+ assert(caughtError);
+});
diff --git a/cli/js/version.ts b/cli/js/version.ts
new file mode 100644
index 000000000..08ac58122
--- /dev/null
+++ b/cli/js/version.ts
@@ -0,0 +1,28 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+interface Version {
+ deno: string;
+ v8: string;
+ typescript: string;
+}
+
+export const version: Version = {
+ deno: "",
+ v8: "",
+ typescript: ""
+};
+
+/**
+ * Sets the deno, v8, and typescript versions and freezes the version object.
+ * @internal
+ */
+export function setVersions(
+ denoVersion: string,
+ v8Version: string,
+ tsVersion: string
+): void {
+ version.deno = denoVersion;
+ version.v8 = v8Version;
+ version.typescript = tsVersion;
+
+ Object.freeze(version);
+}
diff --git a/cli/js/version_test.ts b/cli/js/version_test.ts
new file mode 100644
index 000000000..b32230812
--- /dev/null
+++ b/cli/js/version_test.ts
@@ -0,0 +1,8 @@
+import { test, assert } from "./test_util.ts";
+
+test(function version(): void {
+ const pattern = /^\d+\.\d+\.\d+/;
+ assert(pattern.test(Deno.version.deno));
+ assert(pattern.test(Deno.version.v8));
+ assert(pattern.test(Deno.version.typescript));
+});
diff --git a/cli/js/window.ts b/cli/js/window.ts
new file mode 100644
index 000000000..3d3d6601f
--- /dev/null
+++ b/cli/js/window.ts
@@ -0,0 +1,9 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+// (0, eval) is indirect eval.
+// See the links below for details:
+// - https://stackoverflow.com/a/14120023
+// - https://tc39.github.io/ecma262/#sec-performeval (spec)
+export const window = (0, eval)("this");
+// TODO: The above should be replaced with globalThis
+// when the globalThis proposal goes to stage 4
+// See https://github.com/tc39/proposal-global
diff --git a/cli/js/workers.ts b/cli/js/workers.ts
new file mode 100644
index 000000000..281fe619f
--- /dev/null
+++ b/cli/js/workers.ts
@@ -0,0 +1,193 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+/* eslint-disable @typescript-eslint/no-explicit-any */
+import * as dispatch from "./dispatch.ts";
+import { sendAsync, sendSync } from "./dispatch_json.ts";
+import { log } from "./util.ts";
+import { TextDecoder, TextEncoder } from "./text_encoding.ts";
+import { window } from "./window.ts";
+import { blobURLMap } from "./url.ts";
+import { blobBytesWeakMap } from "./blob.ts";
+
+const encoder = new TextEncoder();
+const decoder = new TextDecoder();
+
+export function encodeMessage(data: any): Uint8Array {
+ const dataJson = JSON.stringify(data);
+ return encoder.encode(dataJson);
+}
+
+export function decodeMessage(dataIntArray: Uint8Array): any {
+ const dataJson = decoder.decode(dataIntArray);
+ return JSON.parse(dataJson);
+}
+
+function createWorker(
+ specifier: string,
+ includeDenoNamespace: boolean,
+ hasSourceCode: boolean,
+ sourceCode: Uint8Array
+): number {
+ return sendSync(dispatch.OP_CREATE_WORKER, {
+ specifier,
+ includeDenoNamespace,
+ hasSourceCode,
+ sourceCode: new TextDecoder().decode(sourceCode)
+ });
+}
+
+async function hostGetWorkerClosed(rid: number): Promise<void> {
+ await sendAsync(dispatch.OP_HOST_GET_WORKER_CLOSED, { rid });
+}
+
+function hostPostMessage(rid: number, data: any): void {
+ const dataIntArray = encodeMessage(data);
+ sendSync(dispatch.OP_HOST_POST_MESSAGE, { rid }, dataIntArray);
+}
+
+async function hostGetMessage(rid: number): Promise<any> {
+ const res = await sendAsync(dispatch.OP_HOST_GET_MESSAGE, { rid });
+
+ if (res.data != null) {
+ return decodeMessage(new Uint8Array(res.data));
+ } else {
+ return null;
+ }
+}
+
+// Stuff for workers
+export const onmessage: (e: { data: any }) => void = (): void => {};
+
+export function postMessage(data: any): void {
+ const dataIntArray = encodeMessage(data);
+ sendSync(dispatch.OP_WORKER_POST_MESSAGE, {}, dataIntArray);
+}
+
+export async function getMessage(): Promise<any> {
+ log("getMessage");
+ const res = await sendAsync(dispatch.OP_WORKER_GET_MESSAGE);
+
+ if (res.data != null) {
+ return decodeMessage(new Uint8Array(res.data));
+ } else {
+ return null;
+ }
+}
+
+export let isClosing = false;
+
+export function workerClose(): void {
+ isClosing = true;
+}
+
+export async function workerMain(): Promise<void> {
+ log("workerMain");
+
+ while (!isClosing) {
+ const data = await getMessage();
+ if (data == null) {
+ log("workerMain got null message. quitting.");
+ break;
+ }
+
+ if (window["onmessage"]) {
+ const event = { data };
+ const result: void | Promise<void> = window.onmessage(event);
+ if (result && "then" in result) {
+ await result;
+ }
+ }
+
+ if (!window["onmessage"]) {
+ break;
+ }
+ }
+}
+
+export interface Worker {
+ onerror?: () => void;
+ onmessage?: (e: { data: any }) => void;
+ onmessageerror?: () => void;
+ postMessage(data: any): void;
+ closed: Promise<void>;
+}
+
+// TODO(kevinkassimo): Maybe implement reasonable web worker options?
+// eslint-disable-next-line @typescript-eslint/no-empty-interface
+export interface WorkerOptions {}
+
+/** Extended Deno Worker initialization options.
+ * `noDenoNamespace` hides global `window.Deno` namespace for
+ * spawned worker and nested workers spawned by it (default: false).
+ */
+export interface DenoWorkerOptions extends WorkerOptions {
+ noDenoNamespace?: boolean;
+}
+
+export class WorkerImpl implements Worker {
+ private readonly rid: number;
+ private isClosing = false;
+ private readonly isClosedPromise: Promise<void>;
+ public onerror?: () => void;
+ public onmessage?: (data: any) => void;
+ public onmessageerror?: () => void;
+
+ constructor(specifier: string, options?: DenoWorkerOptions) {
+ let hasSourceCode = false;
+ let sourceCode = new Uint8Array();
+
+ let includeDenoNamespace = true;
+ if (options && options.noDenoNamespace) {
+ includeDenoNamespace = false;
+ }
+ // Handle blob URL.
+ if (specifier.startsWith("blob:")) {
+ hasSourceCode = true;
+ const b = blobURLMap.get(specifier);
+ if (!b) {
+ throw new Error("No Blob associated with the given URL is found");
+ }
+ const blobBytes = blobBytesWeakMap.get(b!);
+ if (!blobBytes) {
+ throw new Error("Invalid Blob");
+ }
+ sourceCode = blobBytes!;
+ }
+
+ this.rid = createWorker(
+ specifier,
+ includeDenoNamespace,
+ hasSourceCode,
+ sourceCode
+ );
+ this.run();
+ this.isClosedPromise = hostGetWorkerClosed(this.rid);
+ this.isClosedPromise.then(
+ (): void => {
+ this.isClosing = true;
+ }
+ );
+ }
+
+ get closed(): Promise<void> {
+ return this.isClosedPromise;
+ }
+
+ postMessage(data: any): void {
+ hostPostMessage(this.rid, data);
+ }
+
+ private async run(): Promise<void> {
+ while (!this.isClosing) {
+ const data = await hostGetMessage(this.rid);
+ if (data == null) {
+ log("worker got null message. quitting.");
+ break;
+ }
+ // TODO(afinch7) stop this from eating messages before onmessage has been assigned
+ if (this.onmessage) {
+ const event = { data };
+ this.onmessage(event);
+ }
+ }
+ }
+}
diff --git a/cli/js/write_file.ts b/cli/js/write_file.ts
new file mode 100644
index 000000000..d6307e002
--- /dev/null
+++ b/cli/js/write_file.ts
@@ -0,0 +1,76 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { stat, statSync } from "./stat.ts";
+import { open, openSync } from "./files.ts";
+import { chmod, chmodSync } from "./chmod.ts";
+import { writeAll, writeAllSync } from "./buffer.ts";
+
+/** Options for writing to a file.
+ * `perm` would change the file's permission if set.
+ * `create` decides if the file should be created if not exists (default: true)
+ * `append` decides if the file should be appended (default: false)
+ */
+export interface WriteFileOptions {
+ perm?: number;
+ create?: boolean;
+ append?: boolean;
+}
+
+/** Write a new file, with given filename and data synchronously.
+ *
+ * const encoder = new TextEncoder();
+ * const data = encoder.encode("Hello world\n");
+ * Deno.writeFileSync("hello.txt", data);
+ */
+export function writeFileSync(
+ filename: string,
+ data: Uint8Array,
+ options: WriteFileOptions = {}
+): void {
+ if (options.create !== undefined) {
+ const create = !!options.create;
+ if (!create) {
+ // verify that file exists
+ statSync(filename);
+ }
+ }
+
+ const openMode = !!options.append ? "a" : "w";
+ const file = openSync(filename, openMode);
+
+ if (options.perm !== undefined && options.perm !== null) {
+ chmodSync(filename, options.perm);
+ }
+
+ writeAllSync(file, data);
+ file.close();
+}
+
+/** Write a new file, with given filename and data.
+ *
+ * const encoder = new TextEncoder();
+ * const data = encoder.encode("Hello world\n");
+ * await Deno.writeFile("hello.txt", data);
+ */
+export async function writeFile(
+ filename: string,
+ data: Uint8Array,
+ options: WriteFileOptions = {}
+): Promise<void> {
+ if (options.create !== undefined) {
+ const create = !!options.create;
+ if (!create) {
+ // verify that file exists
+ await stat(filename);
+ }
+ }
+
+ const openMode = !!options.append ? "a" : "w";
+ const file = await open(filename, openMode);
+
+ if (options.perm !== undefined && options.perm !== null) {
+ await chmod(filename, options.perm);
+ }
+
+ await writeAll(file, data);
+ file.close();
+}
diff --git a/cli/js/write_file_test.ts b/cli/js/write_file_test.ts
new file mode 100644
index 000000000..e1bbb67b3
--- /dev/null
+++ b/cli/js/write_file_test.ts
@@ -0,0 +1,219 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { testPerm, assert, assertEquals } from "./test_util.ts";
+
+testPerm({ read: true, write: true }, function writeFileSyncSuccess(): void {
+ const enc = new TextEncoder();
+ const data = enc.encode("Hello");
+ const filename = Deno.makeTempDirSync() + "/test.txt";
+ Deno.writeFileSync(filename, data);
+ const dataRead = Deno.readFileSync(filename);
+ const dec = new TextDecoder("utf-8");
+ const actual = dec.decode(dataRead);
+ assertEquals("Hello", actual);
+});
+
+testPerm({ write: true }, function writeFileSyncFail(): void {
+ const enc = new TextEncoder();
+ const data = enc.encode("Hello");
+ const filename = "/baddir/test.txt";
+ // The following should fail because /baddir doesn't exist (hopefully).
+ let caughtError = false;
+ try {
+ Deno.writeFileSync(filename, data);
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.NotFound);
+ assertEquals(e.name, "NotFound");
+ }
+ assert(caughtError);
+});
+
+testPerm({ write: false }, function writeFileSyncPerm(): void {
+ const enc = new TextEncoder();
+ const data = enc.encode("Hello");
+ const filename = "/baddir/test.txt";
+ // The following should fail due to no write permission
+ let caughtError = false;
+ try {
+ Deno.writeFileSync(filename, data);
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(e.name, "PermissionDenied");
+ }
+ assert(caughtError);
+});
+
+testPerm({ read: true, write: true }, function writeFileSyncUpdatePerm(): void {
+ if (Deno.build.os !== "win") {
+ const enc = new TextEncoder();
+ const data = enc.encode("Hello");
+ const filename = Deno.makeTempDirSync() + "/test.txt";
+ Deno.writeFileSync(filename, data, { perm: 0o755 });
+ assertEquals(Deno.statSync(filename).mode & 0o777, 0o755);
+ Deno.writeFileSync(filename, data, { perm: 0o666 });
+ assertEquals(Deno.statSync(filename).mode & 0o777, 0o666);
+ }
+});
+
+testPerm({ read: true, write: true }, function writeFileSyncCreate(): void {
+ const enc = new TextEncoder();
+ const data = enc.encode("Hello");
+ const filename = Deno.makeTempDirSync() + "/test.txt";
+ let caughtError = false;
+ // if create turned off, the file won't be created
+ try {
+ Deno.writeFileSync(filename, data, { create: false });
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.NotFound);
+ assertEquals(e.name, "NotFound");
+ }
+ assert(caughtError);
+
+ // Turn on create, should have no error
+ Deno.writeFileSync(filename, data, { create: true });
+ Deno.writeFileSync(filename, data, { create: false });
+ const dataRead = Deno.readFileSync(filename);
+ const dec = new TextDecoder("utf-8");
+ const actual = dec.decode(dataRead);
+ assertEquals("Hello", actual);
+});
+
+testPerm({ read: true, write: true }, function writeFileSyncAppend(): void {
+ const enc = new TextEncoder();
+ const data = enc.encode("Hello");
+ const filename = Deno.makeTempDirSync() + "/test.txt";
+ Deno.writeFileSync(filename, data);
+ Deno.writeFileSync(filename, data, { append: true });
+ let dataRead = Deno.readFileSync(filename);
+ const dec = new TextDecoder("utf-8");
+ let actual = dec.decode(dataRead);
+ assertEquals("HelloHello", actual);
+ // Now attempt overwrite
+ Deno.writeFileSync(filename, data, { append: false });
+ dataRead = Deno.readFileSync(filename);
+ actual = dec.decode(dataRead);
+ assertEquals("Hello", actual);
+ // append not set should also overwrite
+ Deno.writeFileSync(filename, data);
+ dataRead = Deno.readFileSync(filename);
+ actual = dec.decode(dataRead);
+ assertEquals("Hello", actual);
+});
+
+testPerm(
+ { read: true, write: true },
+ async function writeFileSuccess(): Promise<void> {
+ const enc = new TextEncoder();
+ const data = enc.encode("Hello");
+ const filename = Deno.makeTempDirSync() + "/test.txt";
+ await Deno.writeFile(filename, data);
+ const dataRead = Deno.readFileSync(filename);
+ const dec = new TextDecoder("utf-8");
+ const actual = dec.decode(dataRead);
+ assertEquals("Hello", actual);
+ }
+);
+
+testPerm(
+ { read: true, write: true },
+ async function writeFileNotFound(): Promise<void> {
+ const enc = new TextEncoder();
+ const data = enc.encode("Hello");
+ const filename = "/baddir/test.txt";
+ // The following should fail because /baddir doesn't exist (hopefully).
+ let caughtError = false;
+ try {
+ await Deno.writeFile(filename, data);
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.NotFound);
+ assertEquals(e.name, "NotFound");
+ }
+ assert(caughtError);
+ }
+);
+
+testPerm({ read: true, write: false }, async function writeFilePerm(): Promise<
+ void
+> {
+ const enc = new TextEncoder();
+ const data = enc.encode("Hello");
+ const filename = "/baddir/test.txt";
+ // The following should fail due to no write permission
+ let caughtError = false;
+ try {
+ await Deno.writeFile(filename, data);
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.PermissionDenied);
+ assertEquals(e.name, "PermissionDenied");
+ }
+ assert(caughtError);
+});
+
+testPerm(
+ { read: true, write: true },
+ async function writeFileUpdatePerm(): Promise<void> {
+ if (Deno.build.os !== "win") {
+ const enc = new TextEncoder();
+ const data = enc.encode("Hello");
+ const filename = Deno.makeTempDirSync() + "/test.txt";
+ await Deno.writeFile(filename, data, { perm: 0o755 });
+ assertEquals(Deno.statSync(filename).mode & 0o777, 0o755);
+ await Deno.writeFile(filename, data, { perm: 0o666 });
+ assertEquals(Deno.statSync(filename).mode & 0o777, 0o666);
+ }
+ }
+);
+
+testPerm({ read: true, write: true }, async function writeFileCreate(): Promise<
+ void
+> {
+ const enc = new TextEncoder();
+ const data = enc.encode("Hello");
+ const filename = Deno.makeTempDirSync() + "/test.txt";
+ let caughtError = false;
+ // if create turned off, the file won't be created
+ try {
+ await Deno.writeFile(filename, data, { create: false });
+ } catch (e) {
+ caughtError = true;
+ assertEquals(e.kind, Deno.ErrorKind.NotFound);
+ assertEquals(e.name, "NotFound");
+ }
+ assert(caughtError);
+
+ // Turn on create, should have no error
+ await Deno.writeFile(filename, data, { create: true });
+ await Deno.writeFile(filename, data, { create: false });
+ const dataRead = Deno.readFileSync(filename);
+ const dec = new TextDecoder("utf-8");
+ const actual = dec.decode(dataRead);
+ assertEquals("Hello", actual);
+});
+
+testPerm({ read: true, write: true }, async function writeFileAppend(): Promise<
+ void
+> {
+ const enc = new TextEncoder();
+ const data = enc.encode("Hello");
+ const filename = Deno.makeTempDirSync() + "/test.txt";
+ await Deno.writeFile(filename, data);
+ await Deno.writeFile(filename, data, { append: true });
+ let dataRead = Deno.readFileSync(filename);
+ const dec = new TextDecoder("utf-8");
+ let actual = dec.decode(dataRead);
+ assertEquals("HelloHello", actual);
+ // Now attempt overwrite
+ await Deno.writeFile(filename, data, { append: false });
+ dataRead = Deno.readFileSync(filename);
+ actual = dec.decode(dataRead);
+ assertEquals("Hello", actual);
+ // append not set should also overwrite
+ await Deno.writeFile(filename, data);
+ dataRead = Deno.readFileSync(filename);
+ actual = dec.decode(dataRead);
+ assertEquals("Hello", actual);
+});
diff --git a/cli/lib.rs b/cli/lib.rs
new file mode 100644
index 000000000..8d0904ddb
--- /dev/null
+++ b/cli/lib.rs
@@ -0,0 +1,409 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+#[macro_use]
+extern crate lazy_static;
+#[macro_use]
+extern crate log;
+#[macro_use]
+extern crate futures;
+#[macro_use]
+extern crate serde_json;
+extern crate clap;
+extern crate deno;
+extern crate indexmap;
+#[cfg(unix)]
+extern crate nix;
+extern crate rand;
+extern crate serde;
+extern crate serde_derive;
+extern crate url;
+
+pub mod colors;
+pub mod compilers;
+pub mod deno_dir;
+pub mod deno_error;
+pub mod diagnostics;
+mod disk_cache;
+mod file_fetcher;
+pub mod flags;
+pub mod fmt_errors;
+mod fs;
+mod global_timer;
+mod http_body;
+mod http_util;
+mod import_map;
+mod js;
+pub mod msg;
+pub mod ops;
+pub mod permissions;
+mod progress;
+mod repl;
+pub mod resolve_addr;
+pub mod resources;
+mod shell;
+mod signal;
+pub mod source_maps;
+mod startup_data;
+pub mod state;
+pub mod test_util;
+mod tokio_read;
+mod tokio_util;
+mod tokio_write;
+pub mod version;
+pub mod worker;
+
+use crate::deno_error::js_check;
+use crate::deno_error::print_err_and_exit;
+use crate::progress::Progress;
+use crate::state::ThreadSafeState;
+use crate::worker::Worker;
+use deno::v8_set_flags;
+use deno::ErrBox;
+use deno::ModuleSpecifier;
+use flags::DenoFlags;
+use flags::DenoSubcommand;
+use futures::lazy;
+use futures::Future;
+use log::Level;
+use log::Metadata;
+use log::Record;
+use std::env;
+
+static LOGGER: Logger = Logger;
+
+struct Logger;
+
+impl log::Log for Logger {
+ fn enabled(&self, metadata: &Metadata) -> bool {
+ metadata.level() <= log::max_level()
+ }
+
+ fn log(&self, record: &Record) {
+ if self.enabled(record.metadata()) {
+ let mut target = record.target().to_string();
+
+ if let Some(line_no) = record.line() {
+ target.push_str(":");
+ target.push_str(&line_no.to_string());
+ }
+
+ println!("{} RS - {} - {}", record.level(), target, record.args());
+ }
+ }
+ fn flush(&self) {}
+}
+
+fn create_worker_and_state(
+ flags: DenoFlags,
+ argv: Vec<String>,
+) -> (Worker, ThreadSafeState) {
+ use crate::shell::Shell;
+ use std::sync::Arc;
+ use std::sync::Mutex;
+ let shell = Arc::new(Mutex::new(Shell::new()));
+ let progress = Progress::new();
+ progress.set_callback(move |_done, _completed, _total, status, msg| {
+ if !status.is_empty() {
+ let mut s = shell.lock().unwrap();
+ s.status(status, msg).expect("shell problem");
+ }
+ });
+ // TODO(kevinkassimo): maybe make include_deno_namespace also configurable?
+ let state = ThreadSafeState::new(flags, argv, progress, true)
+ .map_err(deno_error::print_err_and_exit)
+ .unwrap();
+ let worker = Worker::new(
+ "main".to_string(),
+ startup_data::deno_isolate_init(),
+ state.clone(),
+ );
+
+ (worker, state)
+}
+
+fn types_command() {
+ let content = crate::js::get_asset("lib.deno_runtime.d.ts").unwrap();
+ println!("{}", content);
+}
+
+fn print_cache_info(worker: Worker) {
+ let state = worker.state;
+
+ println!(
+ "{} {:?}",
+ colors::bold("DENO_DIR location:".to_string()),
+ state.dir.root
+ );
+ println!(
+ "{} {:?}",
+ colors::bold("Remote modules cache:".to_string()),
+ state.dir.deps_cache.location
+ );
+ println!(
+ "{} {:?}",
+ colors::bold("TypeScript compiler cache:".to_string()),
+ state.dir.gen_cache.location
+ );
+}
+
+pub fn print_file_info(
+ worker: Worker,
+ module_specifier: &ModuleSpecifier,
+) -> impl Future<Item = Worker, Error = ()> {
+ let state_ = worker.state.clone();
+ let module_specifier_ = module_specifier.clone();
+
+ state_
+ .file_fetcher
+ .fetch_source_file_async(&module_specifier)
+ .map_err(|err| println!("{}", err))
+ .and_then(|out| {
+ println!(
+ "{} {}",
+ colors::bold("local:".to_string()),
+ out.filename.to_str().unwrap()
+ );
+
+ println!(
+ "{} {}",
+ colors::bold("type:".to_string()),
+ msg::enum_name_media_type(out.media_type)
+ );
+
+ state_
+ .clone()
+ .fetch_compiled_module(&module_specifier_)
+ .map_err(|e| {
+ debug!("compiler error exiting!");
+ eprintln!("\n{}", e.to_string());
+ std::process::exit(1);
+ })
+ .and_then(move |compiled| {
+ if out.media_type == msg::MediaType::TypeScript
+ || (out.media_type == msg::MediaType::JavaScript
+ && state_.ts_compiler.compile_js)
+ {
+ let compiled_source_file = state_
+ .ts_compiler
+ .get_compiled_source_file(&out.url)
+ .unwrap();
+
+ println!(
+ "{} {}",
+ colors::bold("compiled:".to_string()),
+ compiled_source_file.filename.to_str().unwrap(),
+ );
+ }
+
+ if let Ok(source_map) = state_
+ .clone()
+ .ts_compiler
+ .get_source_map_file(&module_specifier_)
+ {
+ println!(
+ "{} {}",
+ colors::bold("map:".to_string()),
+ source_map.filename.to_str().unwrap()
+ );
+ }
+
+ if let Some(deps) =
+ worker.state.modules.lock().unwrap().deps(&compiled.name)
+ {
+ println!("{}{}", colors::bold("deps:\n".to_string()), deps.name);
+ if let Some(ref depsdeps) = deps.deps {
+ for d in depsdeps {
+ println!("{}", d);
+ }
+ }
+ } else {
+ println!(
+ "{} cannot retrieve full dependency graph",
+ colors::bold("deps:".to_string()),
+ );
+ }
+ Ok(worker)
+ })
+ })
+}
+
+fn info_command(flags: DenoFlags, argv: Vec<String>) {
+ let (mut worker, state) = create_worker_and_state(flags, argv.clone());
+
+ // If it was just "deno info" print location of caches and exit
+ if argv.len() == 1 {
+ return print_cache_info(worker);
+ }
+
+ let main_module = state.main_module().unwrap();
+ let main_future = lazy(move || {
+ // Setup runtime.
+ js_check(worker.execute("denoMain()"));
+ debug!("main_module {}", main_module);
+
+ worker
+ .execute_mod_async(&main_module, true)
+ .map_err(print_err_and_exit)
+ .and_then(move |()| print_file_info(worker, &main_module))
+ .and_then(|worker| {
+ worker.then(|result| {
+ js_check(result);
+ Ok(())
+ })
+ })
+ });
+ tokio_util::run(main_future);
+}
+
+fn fetch_command(flags: DenoFlags, argv: Vec<String>) {
+ let (mut worker, state) = create_worker_and_state(flags, argv.clone());
+
+ let main_module = state.main_module().unwrap();
+ let main_future = lazy(move || {
+ // Setup runtime.
+ js_check(worker.execute("denoMain()"));
+ debug!("main_module {}", main_module);
+
+ worker.execute_mod_async(&main_module, true).then(|result| {
+ js_check(result);
+ Ok(())
+ })
+ });
+ tokio_util::run(main_future);
+}
+
+fn eval_command(flags: DenoFlags, argv: Vec<String>) {
+ let (mut worker, state) = create_worker_and_state(flags, argv);
+ // Wrap provided script in async function so asynchronous methods
+ // work. This is required until top-level await is not supported.
+ let js_source = format!(
+ "async function _topLevelWrapper(){{
+ {}
+ }}
+ _topLevelWrapper();
+ ",
+ &state.argv[1]
+ );
+
+ let main_future = lazy(move || {
+ js_check(worker.execute("denoMain()"));
+ // ATM imports in `deno eval` are not allowed
+ // TODO Support ES modules once Worker supports evaluating anonymous modules.
+ js_check(worker.execute(&js_source));
+ worker.then(|result| {
+ js_check(result);
+ Ok(())
+ })
+ });
+ tokio_util::run(main_future);
+}
+
+fn bundle_command(flags: DenoFlags, argv: Vec<String>) {
+ let (mut _worker, state) = create_worker_and_state(flags, argv);
+
+ let main_module = state.main_module().unwrap();
+ assert!(state.argv.len() >= 3);
+ let out_file = state.argv[2].clone();
+ debug!(">>>>> bundle_async START");
+ let bundle_future = state
+ .ts_compiler
+ .bundle_async(state.clone(), main_module.to_string(), out_file)
+ .map_err(|err| {
+ debug!("diagnostics returned, exiting!");
+ eprintln!("");
+ print_err_and_exit(err);
+ })
+ .and_then(move |_| {
+ debug!(">>>>> bundle_async END");
+ Ok(())
+ });
+ tokio_util::run(bundle_future);
+}
+
+fn run_repl(flags: DenoFlags, argv: Vec<String>) {
+ let (mut worker, _state) = create_worker_and_state(flags, argv);
+
+ // REPL situation.
+ let main_future = lazy(move || {
+ // Setup runtime.
+ js_check(worker.execute("denoMain()"));
+ worker
+ .then(|result| {
+ js_check(result);
+ Ok(())
+ })
+ .map_err(|(err, _worker): (ErrBox, Worker)| print_err_and_exit(err))
+ });
+ tokio_util::run(main_future);
+}
+
+fn run_script(flags: DenoFlags, argv: Vec<String>) {
+ let use_current_thread = flags.current_thread;
+ let (mut worker, state) = create_worker_and_state(flags, argv);
+
+ let main_module = state.main_module().unwrap();
+ // Normal situation of executing a module.
+ let main_future = lazy(move || {
+ // Setup runtime.
+ js_check(worker.execute("denoMain()"));
+ debug!("main_module {}", main_module);
+
+ let mut worker_ = worker.clone();
+
+ worker
+ .execute_mod_async(&main_module, false)
+ .and_then(move |()| {
+ js_check(worker.execute("window.dispatchEvent(new Event('load'))"));
+ worker.then(move |result| {
+ js_check(result);
+ js_check(
+ worker_.execute("window.dispatchEvent(new Event('unload'))"),
+ );
+ Ok(())
+ })
+ })
+ .map_err(print_err_and_exit)
+ });
+
+ if use_current_thread {
+ tokio_util::run_on_current_thread(main_future);
+ } else {
+ tokio_util::run(main_future);
+ }
+}
+
+fn version_command() {
+ println!("deno: {}", version::DENO);
+ println!("v8: {}", version::v8());
+ println!("typescript: {}", version::TYPESCRIPT);
+}
+
+pub fn main() {
+ #[cfg(windows)]
+ ansi_term::enable_ansi_support().ok(); // For Windows 10
+
+ log::set_logger(&LOGGER).unwrap();
+ let args: Vec<String> = env::args().collect();
+ let (flags, subcommand, argv) = flags::flags_from_vec(args);
+
+ if let Some(ref v8_flags) = flags.v8_flags {
+ v8_set_flags(v8_flags.clone());
+ }
+
+ let log_level = match flags.log_level {
+ Some(level) => level,
+ None => Level::Warn,
+ };
+ log::set_max_level(log_level.to_level_filter());
+
+ match subcommand {
+ DenoSubcommand::Bundle => bundle_command(flags, argv),
+ DenoSubcommand::Completions => {}
+ DenoSubcommand::Eval => eval_command(flags, argv),
+ DenoSubcommand::Fetch => fetch_command(flags, argv),
+ DenoSubcommand::Info => info_command(flags, argv),
+ DenoSubcommand::Repl => run_repl(flags, argv),
+ DenoSubcommand::Run => run_script(flags, argv),
+ DenoSubcommand::Types => types_command(),
+ DenoSubcommand::Version => version_command(),
+ }
+}
diff --git a/cli/main.rs b/cli/main.rs
new file mode 100644
index 000000000..b24c61a9b
--- /dev/null
+++ b/cli/main.rs
@@ -0,0 +1,5 @@
+extern crate deno_cli;
+
+fn main() {
+ deno_cli::main();
+}
diff --git a/cli/msg.rs b/cli/msg.rs
new file mode 100644
index 000000000..20ab9db13
--- /dev/null
+++ b/cli/msg.rs
@@ -0,0 +1,85 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+
+// Warning! The values in this enum are duplicated in js/errors.ts
+// Update carefully!
+#[allow(non_camel_case_types)]
+#[repr(i8)]
+#[derive(Clone, Copy, PartialEq, Debug)]
+pub enum ErrorKind {
+ NoError = 0,
+ NotFound = 1,
+ PermissionDenied = 2,
+ ConnectionRefused = 3,
+ ConnectionReset = 4,
+ ConnectionAborted = 5,
+ NotConnected = 6,
+ AddrInUse = 7,
+ AddrNotAvailable = 8,
+ BrokenPipe = 9,
+ AlreadyExists = 10,
+ WouldBlock = 11,
+ InvalidInput = 12,
+ InvalidData = 13,
+ TimedOut = 14,
+ Interrupted = 15,
+ WriteZero = 16,
+ Other = 17,
+ UnexpectedEof = 18,
+ BadResource = 19,
+ CommandFailed = 20,
+ EmptyHost = 21,
+ IdnaError = 22,
+ InvalidPort = 23,
+ InvalidIpv4Address = 24,
+ InvalidIpv6Address = 25,
+ InvalidDomainCharacter = 26,
+ RelativeUrlWithoutBase = 27,
+ RelativeUrlWithCannotBeABaseBase = 28,
+ SetHostOnCannotBeABaseUrl = 29,
+ Overflow = 30,
+ HttpUser = 31,
+ HttpClosed = 32,
+ HttpCanceled = 33,
+ HttpParse = 34,
+ HttpOther = 35,
+ TooLarge = 36,
+ InvalidUri = 37,
+ InvalidSeekMode = 38,
+ OpNotAvailable = 39,
+ WorkerInitFailed = 40,
+ UnixError = 41,
+ NoAsyncSupport = 42,
+ NoSyncSupport = 43,
+ ImportMapError = 44,
+ InvalidPath = 45,
+ ImportPrefixMissing = 46,
+ UnsupportedFetchScheme = 47,
+ TooManyRedirects = 48,
+ Diagnostic = 49,
+ JSError = 50,
+}
+
+// Warning! The values in this enum are duplicated in js/compiler.ts
+// Update carefully!
+#[allow(non_camel_case_types)]
+#[repr(i8)]
+#[derive(Clone, Copy, PartialEq, Debug)]
+pub enum MediaType {
+ JavaScript = 0,
+ JSX = 1,
+ TypeScript = 2,
+ TSX = 3,
+ Json = 4,
+ Unknown = 5,
+}
+
+pub fn enum_name_media_type(mt: MediaType) -> &'static str {
+ match mt {
+ MediaType::JavaScript => "JavaScript",
+ MediaType::JSX => "JSX",
+ MediaType::TypeScript => "TypeScript",
+ MediaType::TSX => "TSX",
+ MediaType::Json => "Json",
+ MediaType::Unknown => "Unknown",
+ }
+}
diff --git a/cli/ops/compiler.rs b/cli/ops/compiler.rs
new file mode 100644
index 000000000..4228842dd
--- /dev/null
+++ b/cli/ops/compiler.rs
@@ -0,0 +1,99 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use super::dispatch_json::{Deserialize, JsonOp, Value};
+use crate::futures::future::join_all;
+use crate::futures::Future;
+use crate::state::ThreadSafeState;
+use deno::*;
+
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct CacheArgs {
+ module_id: String,
+ contents: String,
+ extension: String,
+}
+
+pub fn op_cache(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: CacheArgs = serde_json::from_value(args)?;
+
+ let module_specifier = ModuleSpecifier::resolve_url(&args.module_id)
+ .expect("Should be valid module specifier");
+
+ state.ts_compiler.cache_compiler_output(
+ &module_specifier,
+ &args.extension,
+ &args.contents,
+ )?;
+
+ Ok(JsonOp::Sync(json!({})))
+}
+
+#[derive(Deserialize)]
+struct FetchSourceFilesArgs {
+ specifiers: Vec<String>,
+ referrer: String,
+}
+
+pub fn op_fetch_source_files(
+ state: &ThreadSafeState,
+ args: Value,
+ _data: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: FetchSourceFilesArgs = serde_json::from_value(args)?;
+
+ // TODO(ry) Maybe a security hole. Only the compiler worker should have access
+ // to this. Need a test to demonstrate the hole.
+ let is_dyn_import = false;
+
+ let mut futures = vec![];
+ for specifier in &args.specifiers {
+ let resolved_specifier =
+ state.resolve(specifier, &args.referrer, false, is_dyn_import)?;
+ let fut = state
+ .file_fetcher
+ .fetch_source_file_async(&resolved_specifier);
+ futures.push(fut);
+ }
+
+ let future = join_all(futures)
+ .map_err(ErrBox::from)
+ .and_then(move |files| {
+ let res = files
+ .into_iter()
+ .map(|file| {
+ json!({
+ "url": file.url.to_string(),
+ "filename": file.filename.to_str().unwrap(),
+ "mediaType": file.media_type as i32,
+ "sourceCode": String::from_utf8(file.source_code).unwrap(),
+ })
+ })
+ .collect();
+
+ futures::future::ok(res)
+ });
+
+ Ok(JsonOp::Async(Box::new(future)))
+}
+
+#[derive(Deserialize)]
+struct FetchAssetArgs {
+ name: String,
+}
+
+pub fn op_fetch_asset(
+ _state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: FetchAssetArgs = serde_json::from_value(args)?;
+ if let Some(source_code) = crate::js::get_asset(&args.name) {
+ Ok(JsonOp::Sync(json!(source_code)))
+ } else {
+ panic!("op_fetch_asset bad asset {}", args.name)
+ }
+}
diff --git a/cli/ops/dispatch_json.rs b/cli/ops/dispatch_json.rs
new file mode 100644
index 000000000..3a8faf2a8
--- /dev/null
+++ b/cli/ops/dispatch_json.rs
@@ -0,0 +1,111 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use crate::tokio_util;
+use deno::*;
+use futures::Future;
+use futures::Poll;
+pub use serde_derive::Deserialize;
+use serde_json::json;
+pub use serde_json::Value;
+
+pub type AsyncJsonOp = Box<dyn Future<Item = Value, Error = ErrBox> + Send>;
+
+pub enum JsonOp {
+ Sync(Value),
+ Async(AsyncJsonOp),
+}
+
+fn json_err(err: ErrBox) -> Value {
+ use crate::deno_error::GetErrorKind;
+ json!({
+ "message": err.to_string(),
+ "kind": err.kind() as u32,
+ })
+}
+
+fn serialize_result(
+ promise_id: Option<u64>,
+ result: Result<Value, ErrBox>,
+) -> Buf {
+ let value = match result {
+ Ok(v) => json!({ "ok": v, "promiseId": promise_id }),
+ Err(err) => json!({ "err": json_err(err), "promiseId": promise_id }),
+ };
+ let mut vec = serde_json::to_vec(&value).unwrap();
+ debug!("JSON response pre-align, len={}", vec.len());
+ // Align to 32bit word, padding with the space character.
+ vec.resize((vec.len() + 3usize) & !3usize, b' ');
+ vec.into_boxed_slice()
+}
+
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct AsyncArgs {
+ promise_id: Option<u64>,
+}
+
+pub fn json_op<D>(d: D) -> impl Fn(&[u8], Option<PinnedBuf>) -> CoreOp
+where
+ D: Fn(Value, Option<PinnedBuf>) -> Result<JsonOp, ErrBox>,
+{
+ move |control: &[u8], zero_copy: Option<PinnedBuf>| {
+ let async_args: AsyncArgs = serde_json::from_slice(control).unwrap();
+ let promise_id = async_args.promise_id;
+ let is_sync = promise_id.is_none();
+
+ let result = serde_json::from_slice(control)
+ .map_err(ErrBox::from)
+ .and_then(|args| d(args, zero_copy));
+
+ // Convert to CoreOp
+ match result {
+ Ok(JsonOp::Sync(sync_value)) => {
+ assert!(promise_id.is_none());
+ CoreOp::Sync(serialize_result(promise_id, Ok(sync_value)))
+ }
+ Ok(JsonOp::Async(fut)) => {
+ assert!(promise_id.is_some());
+ let fut2 = Box::new(fut.then(move |result| -> Result<Buf, ()> {
+ Ok(serialize_result(promise_id, result))
+ }));
+ CoreOp::Async(fut2)
+ }
+ Err(sync_err) => {
+ let buf = serialize_result(promise_id, Err(sync_err));
+ if is_sync {
+ CoreOp::Sync(buf)
+ } else {
+ CoreOp::Async(Box::new(futures::future::ok(buf)))
+ }
+ }
+ }
+ }
+}
+
+// This is just type conversion. Implement From trait?
+// See https://github.com/tokio-rs/tokio/blob/ffd73a64e7ec497622b7f939e38017afe7124dc4/tokio-fs/src/lib.rs#L76-L85
+fn convert_blocking_json<F>(f: F) -> Poll<Value, ErrBox>
+where
+ F: FnOnce() -> Result<Value, ErrBox>,
+{
+ use futures::Async::*;
+ match tokio_threadpool::blocking(f) {
+ Ok(Ready(Ok(v))) => Ok(Ready(v)),
+ Ok(Ready(Err(err))) => Err(err),
+ Ok(NotReady) => Ok(NotReady),
+ Err(err) => panic!("blocking error {}", err),
+ }
+}
+
+pub fn blocking_json<F>(is_sync: bool, f: F) -> Result<JsonOp, ErrBox>
+where
+ F: 'static + Send + FnOnce() -> Result<Value, ErrBox>,
+{
+ if is_sync {
+ Ok(JsonOp::Sync(f()?))
+ } else {
+ Ok(JsonOp::Async(Box::new(futures::sync::oneshot::spawn(
+ tokio_util::poll_fn(move || convert_blocking_json(f)),
+ &tokio_executor::DefaultExecutor::current(),
+ ))))
+ }
+}
diff --git a/cli/ops/dispatch_minimal.rs b/cli/ops/dispatch_minimal.rs
new file mode 100644
index 000000000..618a040bf
--- /dev/null
+++ b/cli/ops/dispatch_minimal.rs
@@ -0,0 +1,110 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+// Do not add flatbuffer dependencies to this module.
+//! Connects to js/dispatch_minimal.ts sendAsyncMinimal This acts as a faster
+//! alternative to flatbuffers using a very simple list of int32s to lay out
+//! messages. The first i32 is used to determine if a message a flatbuffer
+//! message or a "minimal" message.
+use deno::Buf;
+use deno::CoreOp;
+use deno::ErrBox;
+use deno::Op;
+use deno::PinnedBuf;
+use futures::Future;
+
+pub type MinimalOp = dyn Future<Item = i32, Error = ErrBox> + Send;
+pub type Dispatcher = fn(i32, Option<PinnedBuf>) -> Box<MinimalOp>;
+
+#[derive(Copy, Clone, Debug, PartialEq)]
+// This corresponds to RecordMinimal on the TS side.
+pub struct Record {
+ pub promise_id: i32,
+ pub arg: i32,
+ pub result: i32,
+}
+
+impl Into<Buf> for Record {
+ fn into(self) -> Buf {
+ let vec = vec![self.promise_id, self.arg, self.result];
+ let buf32 = vec.into_boxed_slice();
+ let ptr = Box::into_raw(buf32) as *mut [u8; 3 * 4];
+ unsafe { Box::from_raw(ptr) }
+ }
+}
+
+pub fn parse_min_record(bytes: &[u8]) -> Option<Record> {
+ if bytes.len() % std::mem::size_of::<i32>() != 0 {
+ return None;
+ }
+ let p = bytes.as_ptr();
+ #[allow(clippy::cast_ptr_alignment)]
+ let p32 = p as *const i32;
+ let s = unsafe { std::slice::from_raw_parts(p32, bytes.len() / 4) };
+
+ if s.len() != 3 {
+ return None;
+ }
+ let ptr = s.as_ptr();
+ let ints = unsafe { std::slice::from_raw_parts(ptr, 3) };
+ Some(Record {
+ promise_id: ints[0],
+ arg: ints[1],
+ result: ints[2],
+ })
+}
+
+#[test]
+fn test_parse_min_record() {
+ let buf = vec![1, 0, 0, 0, 3, 0, 0, 0, 4, 0, 0, 0];
+ assert_eq!(
+ parse_min_record(&buf),
+ Some(Record {
+ promise_id: 1,
+ arg: 3,
+ result: 4,
+ })
+ );
+
+ let buf = vec![];
+ assert_eq!(parse_min_record(&buf), None);
+
+ let buf = vec![5];
+ assert_eq!(parse_min_record(&buf), None);
+}
+
+pub fn minimal_op(
+ d: Dispatcher,
+) -> impl Fn(&[u8], Option<PinnedBuf>) -> CoreOp {
+ move |control: &[u8], zero_copy: Option<PinnedBuf>| {
+ let mut record = parse_min_record(control).unwrap();
+ let is_sync = record.promise_id == 0;
+ let rid = record.arg;
+ let min_op = d(rid, zero_copy);
+
+ // Convert to CoreOp
+ let fut = Box::new(min_op.then(move |result| -> Result<Buf, ()> {
+ match result {
+ Ok(r) => {
+ record.result = r;
+ }
+ Err(err) => {
+ // TODO(ry) The dispatch_minimal doesn't properly pipe errors back to
+ // the caller.
+ debug!("swallowed err {}", err);
+ record.result = -1;
+ }
+ }
+ Ok(record.into())
+ }));
+
+ if is_sync {
+ // Warning! Possible deadlocks can occur if we try to wait for a future
+ // while in a future. The safe but expensive alternative is to use
+ // tokio_util::block_on.
+ // This block is only exercised for readSync and writeSync, which I think
+ // works since they're simple polling futures.
+ Op::Sync(fut.wait().unwrap())
+ } else {
+ Op::Async(fut)
+ }
+ }
+}
diff --git a/cli/ops/errors.rs b/cli/ops/errors.rs
new file mode 100644
index 000000000..cd21a3880
--- /dev/null
+++ b/cli/ops/errors.rs
@@ -0,0 +1,56 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use super::dispatch_json::{Deserialize, JsonOp, Value};
+use crate::fmt_errors::JSError;
+use crate::source_maps::get_orig_position;
+use crate::source_maps::CachedMaps;
+use crate::state::ThreadSafeState;
+use deno::*;
+use std::collections::HashMap;
+
+#[derive(Deserialize)]
+struct FormatErrorArgs {
+ error: String,
+}
+
+pub fn op_format_error(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: FormatErrorArgs = serde_json::from_value(args)?;
+ let error = JSError::from_json(&args.error, &state.ts_compiler);
+
+ Ok(JsonOp::Sync(json!({
+ "error": error.to_string(),
+ })))
+}
+
+#[derive(Deserialize)]
+struct ApplySourceMap {
+ filename: String,
+ line: i32,
+ column: i32,
+}
+
+pub fn op_apply_source_map(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: ApplySourceMap = serde_json::from_value(args)?;
+
+ let mut mappings_map: CachedMaps = HashMap::new();
+ let (orig_filename, orig_line, orig_column) = get_orig_position(
+ args.filename,
+ args.line.into(),
+ args.column.into(),
+ &mut mappings_map,
+ &state.ts_compiler,
+ );
+
+ Ok(JsonOp::Sync(json!({
+ "filename": orig_filename.to_string(),
+ "line": orig_line as u32,
+ "column": orig_column as u32,
+ })))
+}
diff --git a/cli/ops/fetch.rs b/cli/ops/fetch.rs
new file mode 100644
index 000000000..f69065f1d
--- /dev/null
+++ b/cli/ops/fetch.rs
@@ -0,0 +1,73 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use super::dispatch_json::{Deserialize, JsonOp, Value};
+use crate::http_util::get_client;
+use crate::resources;
+use crate::state::ThreadSafeState;
+use deno::*;
+use http::header::HeaderName;
+use http::header::HeaderValue;
+use http::Method;
+use hyper;
+use hyper::rt::Future;
+use std;
+use std::convert::From;
+
+#[derive(Deserialize)]
+struct FetchArgs {
+ method: Option<String>,
+ url: String,
+ headers: Vec<(String, String)>,
+}
+
+pub fn op_fetch(
+ state: &ThreadSafeState,
+ args: Value,
+ data: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: FetchArgs = serde_json::from_value(args)?;
+ let url = args.url;
+
+ let client = get_client();
+
+ let method = match args.method {
+ Some(method_str) => Method::from_bytes(method_str.as_bytes())?,
+ None => Method::GET,
+ };
+
+ let url_ = url::Url::parse(&url).map_err(ErrBox::from)?;
+ state.check_net_url(&url_)?;
+
+ let mut request = client.request(method, url_);
+
+ if let Some(buf) = data {
+ request = request.body(Vec::from(&*buf));
+ }
+
+ for (key, value) in args.headers {
+ let name = HeaderName::from_bytes(key.as_bytes()).unwrap();
+ let v = HeaderValue::from_str(&value).unwrap();
+ request = request.header(name, v);
+ }
+ debug!("Before fetch {}", url);
+ let future = request.send().map_err(ErrBox::from).and_then(move |res| {
+ let status = res.status();
+ let mut res_headers = Vec::new();
+ for (key, val) in res.headers().iter() {
+ res_headers.push((key.to_string(), val.to_str().unwrap().to_owned()));
+ }
+
+ let body = res.into_body();
+ let body_resource = resources::add_reqwest_body(body);
+
+ let json_res = json!({
+ "bodyRid": body_resource.rid,
+ "status": status.as_u16(),
+ "statusText": status.canonical_reason().unwrap_or(""),
+ "headers": res_headers
+ });
+
+ futures::future::ok(json_res)
+ });
+
+ Ok(JsonOp::Async(Box::new(future)))
+}
diff --git a/cli/ops/files.rs b/cli/ops/files.rs
new file mode 100644
index 000000000..01abff3a9
--- /dev/null
+++ b/cli/ops/files.rs
@@ -0,0 +1,136 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use super::dispatch_json::{Deserialize, JsonOp, Value};
+use crate::fs as deno_fs;
+use crate::resources;
+use crate::state::ThreadSafeState;
+use deno::*;
+use futures::Future;
+use std;
+use std::convert::From;
+use tokio;
+
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct OpenArgs {
+ promise_id: Option<u64>,
+ filename: String,
+ mode: String,
+}
+
+pub fn op_open(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: OpenArgs = serde_json::from_value(args)?;
+ let (filename, filename_) = deno_fs::resolve_from_cwd(&args.filename)?;
+ let mode = args.mode.as_ref();
+
+ let mut open_options = tokio::fs::OpenOptions::new();
+
+ match mode {
+ "r" => {
+ open_options.read(true);
+ }
+ "r+" => {
+ open_options.read(true).write(true);
+ }
+ "w" => {
+ open_options.create(true).write(true).truncate(true);
+ }
+ "w+" => {
+ open_options
+ .read(true)
+ .create(true)
+ .write(true)
+ .truncate(true);
+ }
+ "a" => {
+ open_options.create(true).append(true);
+ }
+ "a+" => {
+ open_options.read(true).create(true).append(true);
+ }
+ "x" => {
+ open_options.create_new(true).write(true);
+ }
+ "x+" => {
+ open_options.create_new(true).read(true).write(true);
+ }
+ &_ => {
+ panic!("Unknown file open mode.");
+ }
+ }
+
+ match mode {
+ "r" => {
+ state.check_read(&filename_)?;
+ }
+ "w" | "a" | "x" => {
+ state.check_write(&filename_)?;
+ }
+ &_ => {
+ state.check_read(&filename_)?;
+ state.check_write(&filename_)?;
+ }
+ }
+
+ let is_sync = args.promise_id.is_none();
+ let op = open_options.open(filename).map_err(ErrBox::from).and_then(
+ move |fs_file| {
+ let resource = resources::add_fs_file(fs_file);
+ futures::future::ok(json!(resource.rid))
+ },
+ );
+
+ if is_sync {
+ let buf = op.wait()?;
+ Ok(JsonOp::Sync(buf))
+ } else {
+ Ok(JsonOp::Async(Box::new(op)))
+ }
+}
+
+#[derive(Deserialize)]
+struct CloseArgs {
+ rid: i32,
+}
+
+pub fn op_close(
+ _state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: CloseArgs = serde_json::from_value(args)?;
+
+ let resource = resources::lookup(args.rid as u32)?;
+ resource.close();
+ Ok(JsonOp::Sync(json!({})))
+}
+
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct SeekArgs {
+ promise_id: Option<u64>,
+ rid: i32,
+ offset: i32,
+ whence: i32,
+}
+
+pub fn op_seek(
+ _state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: SeekArgs = serde_json::from_value(args)?;
+
+ let resource = resources::lookup(args.rid as u32)?;
+ let op = resources::seek(resource, args.offset, args.whence as u32)
+ .and_then(move |_| futures::future::ok(json!({})));
+ if args.promise_id.is_none() {
+ let buf = op.wait()?;
+ Ok(JsonOp::Sync(buf))
+ } else {
+ Ok(JsonOp::Async(Box::new(op)))
+ }
+}
diff --git a/cli/ops/fs.rs b/cli/ops/fs.rs
new file mode 100644
index 000000000..c549bef32
--- /dev/null
+++ b/cli/ops/fs.rs
@@ -0,0 +1,525 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+// Some deserializer fields are only used on Unix and Windows build fails without it
+#![allow(dead_code)]
+use super::dispatch_json::{blocking_json, Deserialize, JsonOp, Value};
+use crate::deno_error::DenoError;
+use crate::deno_error::ErrorKind;
+use crate::fs as deno_fs;
+use crate::state::ThreadSafeState;
+use deno::*;
+use remove_dir_all::remove_dir_all;
+use std::convert::From;
+use std::fs;
+use std::path::PathBuf;
+use std::time::UNIX_EPOCH;
+
+#[cfg(unix)]
+use std::os::unix::fs::PermissionsExt;
+
+#[derive(Deserialize)]
+struct ChdirArgs {
+ directory: String,
+}
+
+pub fn op_chdir(
+ _state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: ChdirArgs = serde_json::from_value(args)?;
+ std::env::set_current_dir(&args.directory)?;
+ Ok(JsonOp::Sync(json!({})))
+}
+
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct MkdirArgs {
+ promise_id: Option<u64>,
+ path: String,
+ recursive: bool,
+ mode: u32,
+}
+
+pub fn op_mkdir(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: MkdirArgs = serde_json::from_value(args)?;
+ let (path, path_) = deno_fs::resolve_from_cwd(args.path.as_ref())?;
+
+ state.check_write(&path_)?;
+
+ let is_sync = args.promise_id.is_none();
+ blocking_json(is_sync, move || {
+ debug!("op_mkdir {}", path_);
+ deno_fs::mkdir(&path, args.mode, args.recursive)?;
+ Ok(json!({}))
+ })
+}
+
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct ChmodArgs {
+ promise_id: Option<u64>,
+ path: String,
+ mode: u32,
+}
+
+pub fn op_chmod(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: ChmodArgs = serde_json::from_value(args)?;
+ let (path, path_) = deno_fs::resolve_from_cwd(args.path.as_ref())?;
+
+ state.check_write(&path_)?;
+
+ let is_sync = args.promise_id.is_none();
+ blocking_json(is_sync, move || {
+ debug!("op_chmod {}", &path_);
+ // Still check file/dir exists on windows
+ let _metadata = fs::metadata(&path)?;
+ #[cfg(any(unix))]
+ {
+ let mut permissions = _metadata.permissions();
+ permissions.set_mode(args.mode);
+ fs::set_permissions(&path, permissions)?;
+ }
+ Ok(json!({}))
+ })
+}
+
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct ChownArgs {
+ promise_id: Option<u64>,
+ path: String,
+ uid: u32,
+ gid: u32,
+}
+
+pub fn op_chown(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: ChownArgs = serde_json::from_value(args)?;
+
+ state.check_write(&args.path)?;
+
+ let is_sync = args.promise_id.is_none();
+ blocking_json(is_sync, move || {
+ debug!("op_chown {}", &args.path);
+ match deno_fs::chown(args.path.as_ref(), args.uid, args.gid) {
+ Ok(_) => Ok(json!({})),
+ Err(e) => Err(e),
+ }
+ })
+}
+
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct RemoveArgs {
+ promise_id: Option<u64>,
+ path: String,
+ recursive: bool,
+}
+
+pub fn op_remove(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: RemoveArgs = serde_json::from_value(args)?;
+ let (path, path_) = deno_fs::resolve_from_cwd(args.path.as_ref())?;
+ let recursive = args.recursive;
+
+ state.check_write(&path_)?;
+
+ let is_sync = args.promise_id.is_none();
+ blocking_json(is_sync, move || {
+ debug!("op_remove {}", path.display());
+ let metadata = fs::metadata(&path)?;
+ if metadata.is_file() {
+ fs::remove_file(&path)?;
+ } else if recursive {
+ remove_dir_all(&path)?;
+ } else {
+ fs::remove_dir(&path)?;
+ }
+ Ok(json!({}))
+ })
+}
+
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct CopyFileArgs {
+ promise_id: Option<u64>,
+ from: String,
+ to: String,
+}
+
+pub fn op_copy_file(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: CopyFileArgs = serde_json::from_value(args)?;
+
+ let (from, from_) = deno_fs::resolve_from_cwd(args.from.as_ref())?;
+ let (to, to_) = deno_fs::resolve_from_cwd(args.to.as_ref())?;
+
+ state.check_read(&from_)?;
+ state.check_write(&to_)?;
+
+ debug!("op_copy_file {} {}", from.display(), to.display());
+ let is_sync = args.promise_id.is_none();
+ blocking_json(is_sync, move || {
+ // On *nix, Rust deem non-existent path as invalid input
+ // See https://github.com/rust-lang/rust/issues/54800
+ // Once the issue is reolved, we should remove this workaround.
+ if cfg!(unix) && !from.is_file() {
+ return Err(
+ DenoError::new(ErrorKind::NotFound, "File not found".to_string())
+ .into(),
+ );
+ }
+
+ fs::copy(&from, &to)?;
+ Ok(json!({}))
+ })
+}
+
+macro_rules! to_seconds {
+ ($time:expr) => {{
+ // Unwrap is safe here as if the file is before the unix epoch
+ // something is very wrong.
+ $time
+ .and_then(|t| Ok(t.duration_since(UNIX_EPOCH).unwrap().as_secs()))
+ .unwrap_or(0)
+ }};
+}
+
+#[cfg(any(unix))]
+fn get_mode(perm: &fs::Permissions) -> u32 {
+ perm.mode()
+}
+
+#[cfg(not(any(unix)))]
+fn get_mode(_perm: &fs::Permissions) -> u32 {
+ 0
+}
+
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct StatArgs {
+ promise_id: Option<u64>,
+ filename: String,
+ lstat: bool,
+}
+
+pub fn op_stat(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: StatArgs = serde_json::from_value(args)?;
+
+ let (filename, filename_) =
+ deno_fs::resolve_from_cwd(args.filename.as_ref())?;
+ let lstat = args.lstat;
+
+ state.check_read(&filename_)?;
+
+ let is_sync = args.promise_id.is_none();
+ blocking_json(is_sync, move || {
+ debug!("op_stat {} {}", filename.display(), lstat);
+ let metadata = if lstat {
+ fs::symlink_metadata(&filename)?
+ } else {
+ fs::metadata(&filename)?
+ };
+
+ Ok(json!({
+ "isFile": metadata.is_file(),
+ "isSymlink": metadata.file_type().is_symlink(),
+ "len": metadata.len(),
+ "modified":to_seconds!(metadata.modified()),
+ "accessed":to_seconds!(metadata.accessed()),
+ "created":to_seconds!(metadata.created()),
+ "mode": get_mode(&metadata.permissions()),
+ "hasMode": cfg!(target_family = "unix"), // false on windows,
+ }))
+ })
+}
+
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct ReadDirArgs {
+ promise_id: Option<u64>,
+ path: String,
+}
+
+pub fn op_read_dir(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: ReadDirArgs = serde_json::from_value(args)?;
+ let (path, path_) = deno_fs::resolve_from_cwd(args.path.as_ref())?;
+
+ state.check_read(&path_)?;
+
+ let is_sync = args.promise_id.is_none();
+ blocking_json(is_sync, move || {
+ debug!("op_read_dir {}", path.display());
+
+ let entries: Vec<_> = fs::read_dir(path)?
+ .map(|entry| {
+ let entry = entry.unwrap();
+ let metadata = entry.metadata().unwrap();
+ let file_type = metadata.file_type();
+
+ json!({
+ "isFile": file_type.is_file(),
+ "isSymlink": file_type.is_symlink(),
+ "len": metadata.len(),
+ "modified": to_seconds!(metadata.modified()),
+ "accessed": to_seconds!(metadata.accessed()),
+ "created": to_seconds!(metadata.created()),
+ "mode": get_mode(&metadata.permissions()),
+ "name": entry.file_name().to_str().unwrap(),
+ "hasMode": cfg!(target_family = "unix"), // false on windows,
+ })
+ })
+ .collect();
+
+ Ok(json!({ "entries": entries }))
+ })
+}
+
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct RenameArgs {
+ promise_id: Option<u64>,
+ oldpath: String,
+ newpath: String,
+}
+
+pub fn op_rename(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: RenameArgs = serde_json::from_value(args)?;
+
+ let (oldpath, oldpath_) = deno_fs::resolve_from_cwd(args.oldpath.as_ref())?;
+ let (newpath, newpath_) = deno_fs::resolve_from_cwd(args.newpath.as_ref())?;
+
+ state.check_read(&oldpath_)?;
+ state.check_write(&oldpath_)?;
+ state.check_write(&newpath_)?;
+
+ let is_sync = args.promise_id.is_none();
+ blocking_json(is_sync, move || {
+ debug!("op_rename {} {}", oldpath.display(), newpath.display());
+ fs::rename(&oldpath, &newpath)?;
+ Ok(json!({}))
+ })
+}
+
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct LinkArgs {
+ promise_id: Option<u64>,
+ oldname: String,
+ newname: String,
+}
+
+pub fn op_link(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: LinkArgs = serde_json::from_value(args)?;
+
+ let (oldname, oldname_) = deno_fs::resolve_from_cwd(args.oldname.as_ref())?;
+ let (newname, newname_) = deno_fs::resolve_from_cwd(args.newname.as_ref())?;
+
+ state.check_read(&oldname_)?;
+ state.check_write(&newname_)?;
+
+ let is_sync = args.promise_id.is_none();
+ blocking_json(is_sync, move || {
+ debug!("op_link {} {}", oldname.display(), newname.display());
+ std::fs::hard_link(&oldname, &newname)?;
+ Ok(json!({}))
+ })
+}
+
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct SymlinkArgs {
+ promise_id: Option<u64>,
+ oldname: String,
+ newname: String,
+}
+
+pub fn op_symlink(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: SymlinkArgs = serde_json::from_value(args)?;
+
+ let (oldname, _oldname_) = deno_fs::resolve_from_cwd(args.oldname.as_ref())?;
+ let (newname, newname_) = deno_fs::resolve_from_cwd(args.newname.as_ref())?;
+
+ state.check_write(&newname_)?;
+ // TODO Use type for Windows.
+ if cfg!(windows) {
+ return Err(
+ DenoError::new(ErrorKind::Other, "Not implemented".to_string()).into(),
+ );
+ }
+ let is_sync = args.promise_id.is_none();
+ blocking_json(is_sync, move || {
+ debug!("op_symlink {} {}", oldname.display(), newname.display());
+ #[cfg(any(unix))]
+ std::os::unix::fs::symlink(&oldname, &newname)?;
+ Ok(json!({}))
+ })
+}
+
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct ReadLinkArgs {
+ promise_id: Option<u64>,
+ name: String,
+}
+
+pub fn op_read_link(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: ReadLinkArgs = serde_json::from_value(args)?;
+
+ let (name, name_) = deno_fs::resolve_from_cwd(args.name.as_ref())?;
+
+ state.check_read(&name_)?;
+
+ let is_sync = args.promise_id.is_none();
+ blocking_json(is_sync, move || {
+ debug!("op_read_link {}", name.display());
+ let path = fs::read_link(&name)?;
+ let path_str = path.to_str().unwrap();
+
+ Ok(json!(path_str))
+ })
+}
+
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct TruncateArgs {
+ promise_id: Option<u64>,
+ name: String,
+ len: u64,
+}
+
+pub fn op_truncate(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: TruncateArgs = serde_json::from_value(args)?;
+
+ let (filename, filename_) = deno_fs::resolve_from_cwd(args.name.as_ref())?;
+ let len = args.len;
+
+ state.check_write(&filename_)?;
+
+ let is_sync = args.promise_id.is_none();
+ blocking_json(is_sync, move || {
+ debug!("op_truncate {} {}", filename_, len);
+ let f = fs::OpenOptions::new().write(true).open(&filename)?;
+ f.set_len(len)?;
+ Ok(json!({}))
+ })
+}
+
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct MakeTempDirArgs {
+ promise_id: Option<u64>,
+ dir: Option<String>,
+ prefix: Option<String>,
+ suffix: Option<String>,
+}
+
+pub fn op_make_temp_dir(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: MakeTempDirArgs = serde_json::from_value(args)?;
+
+ // FIXME
+ state.check_write("make_temp")?;
+
+ let dir = args.dir.map(PathBuf::from);
+ let prefix = args.prefix.map(String::from);
+ let suffix = args.suffix.map(String::from);
+
+ let is_sync = args.promise_id.is_none();
+ blocking_json(is_sync, move || {
+ // TODO(piscisaureus): use byte vector for paths, not a string.
+ // See https://github.com/denoland/deno/issues/627.
+ // We can't assume that paths are always valid utf8 strings.
+ let path = deno_fs::make_temp_dir(
+ // Converting Option<String> to Option<&str>
+ dir.as_ref().map(|x| &**x),
+ prefix.as_ref().map(|x| &**x),
+ suffix.as_ref().map(|x| &**x),
+ )?;
+ let path_str = path.to_str().unwrap();
+
+ Ok(json!(path_str))
+ })
+}
+
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct Utime {
+ promise_id: Option<u64>,
+ filename: String,
+ atime: u64,
+ mtime: u64,
+}
+
+pub fn op_utime(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: Utime = serde_json::from_value(args)?;
+ state.check_write(&args.filename)?;
+ let is_sync = args.promise_id.is_none();
+ blocking_json(is_sync, move || {
+ debug!("op_utimes {} {} {}", args.filename, args.atime, args.mtime);
+ utime::set_file_times(args.filename, args.atime, args.mtime)?;
+ Ok(json!({}))
+ })
+}
+
+pub fn op_cwd(
+ _state: &ThreadSafeState,
+ _args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let path = std::env::current_dir()?;
+ let path_str = path.into_os_string().into_string().unwrap();
+ Ok(JsonOp::Sync(json!(path_str)))
+}
diff --git a/cli/ops/io.rs b/cli/ops/io.rs
new file mode 100644
index 000000000..8b8520c35
--- /dev/null
+++ b/cli/ops/io.rs
@@ -0,0 +1,46 @@
+use super::dispatch_minimal::MinimalOp;
+use crate::deno_error;
+use crate::resources;
+use crate::tokio_read;
+use crate::tokio_write;
+use deno::ErrBox;
+use deno::PinnedBuf;
+use futures::Future;
+
+pub fn op_read(rid: i32, zero_copy: Option<PinnedBuf>) -> Box<MinimalOp> {
+ debug!("read rid={}", rid);
+ let zero_copy = match zero_copy {
+ None => {
+ return Box::new(futures::future::err(deno_error::no_buffer_specified()))
+ }
+ Some(buf) => buf,
+ };
+
+ match resources::lookup(rid as u32) {
+ Err(e) => Box::new(futures::future::err(e)),
+ Ok(resource) => Box::new(
+ tokio_read::read(resource, zero_copy)
+ .map_err(ErrBox::from)
+ .and_then(move |(_resource, _buf, nread)| Ok(nread as i32)),
+ ),
+ }
+}
+
+pub fn op_write(rid: i32, zero_copy: Option<PinnedBuf>) -> Box<MinimalOp> {
+ debug!("write rid={}", rid);
+ let zero_copy = match zero_copy {
+ None => {
+ return Box::new(futures::future::err(deno_error::no_buffer_specified()))
+ }
+ Some(buf) => buf,
+ };
+
+ match resources::lookup(rid as u32) {
+ Err(e) => Box::new(futures::future::err(e)),
+ Ok(resource) => Box::new(
+ tokio_write::write(resource, zero_copy)
+ .map_err(ErrBox::from)
+ .and_then(move |(_resource, _buf, nwritten)| Ok(nwritten as i32)),
+ ),
+ }
+}
diff --git a/cli/ops/metrics.rs b/cli/ops/metrics.rs
new file mode 100644
index 000000000..e1a23f6c8
--- /dev/null
+++ b/cli/ops/metrics.rs
@@ -0,0 +1,21 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use super::dispatch_json::{JsonOp, Value};
+use crate::state::ThreadSafeState;
+use deno::*;
+use std::sync::atomic::Ordering;
+
+pub fn op_metrics(
+ state: &ThreadSafeState,
+ _args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let m = &state.metrics;
+
+ Ok(JsonOp::Sync(json!({
+ "opsDispatched": m.ops_dispatched.load(Ordering::SeqCst) as u64,
+ "opsCompleted": m.ops_completed.load(Ordering::SeqCst) as u64,
+ "bytesSentControl": m.bytes_sent_control.load(Ordering::SeqCst) as u64,
+ "bytesSentData": m.bytes_sent_data.load(Ordering::SeqCst) as u64,
+ "bytesReceived": m.bytes_received.load(Ordering::SeqCst) as u64
+ })))
+}
diff --git a/cli/ops/mod.rs b/cli/ops/mod.rs
new file mode 100644
index 000000000..0c07adcc9
--- /dev/null
+++ b/cli/ops/mod.rs
@@ -0,0 +1,25 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+mod dispatch_json;
+mod dispatch_minimal;
+
+pub use dispatch_json::json_op;
+pub use dispatch_json::JsonOp;
+pub use dispatch_minimal::minimal_op;
+
+pub mod compiler;
+pub mod errors;
+pub mod fetch;
+pub mod files;
+pub mod fs;
+pub mod io;
+pub mod metrics;
+pub mod net;
+pub mod os;
+pub mod performance;
+pub mod permissions;
+pub mod process;
+pub mod random;
+pub mod repl;
+pub mod resources;
+pub mod timers;
+pub mod workers;
diff --git a/cli/ops/net.rs b/cli/ops/net.rs
new file mode 100644
index 000000000..507eff504
--- /dev/null
+++ b/cli/ops/net.rs
@@ -0,0 +1,151 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use super::dispatch_json::{Deserialize, JsonOp, Value};
+use crate::resolve_addr::resolve_addr;
+use crate::resources;
+use crate::resources::Resource;
+use crate::state::ThreadSafeState;
+use crate::tokio_util;
+use deno::*;
+use futures::Future;
+use std;
+use std::convert::From;
+use std::net::Shutdown;
+use tokio;
+use tokio::net::TcpListener;
+use tokio::net::TcpStream;
+
+#[derive(Deserialize)]
+struct AcceptArgs {
+ rid: i32,
+}
+
+pub fn op_accept(
+ _state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: AcceptArgs = serde_json::from_value(args)?;
+ let server_rid = args.rid as u32;
+
+ let server_resource = resources::lookup(server_rid)?;
+ let op = tokio_util::accept(server_resource)
+ .and_then(move |(tcp_stream, _socket_addr)| {
+ let local_addr = tcp_stream.local_addr()?;
+ let remote_addr = tcp_stream.peer_addr()?;
+ let tcp_stream_resource = resources::add_tcp_stream(tcp_stream);
+ Ok((tcp_stream_resource, local_addr, remote_addr))
+ })
+ .map_err(ErrBox::from)
+ .and_then(move |(tcp_stream_resource, local_addr, remote_addr)| {
+ futures::future::ok(json!({
+ "rid": tcp_stream_resource.rid,
+ "localAddr": local_addr.to_string(),
+ "remoteAddr": remote_addr.to_string(),
+ }))
+ });
+
+ Ok(JsonOp::Async(Box::new(op)))
+}
+
+#[derive(Deserialize)]
+struct DialArgs {
+ transport: String,
+ hostname: String,
+ port: u16,
+}
+
+pub fn op_dial(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: DialArgs = serde_json::from_value(args)?;
+ assert_eq!(args.transport, "tcp"); // TODO Support others.
+
+ // TODO(ry) Using format! is suboptimal here. Better would be if
+ // state.check_net and resolve_addr() took hostname and port directly.
+ let address = format!("{}:{}", args.hostname, args.port);
+
+ state.check_net(&address)?;
+
+ let op = resolve_addr(&address).and_then(move |addr| {
+ TcpStream::connect(&addr)
+ .map_err(ErrBox::from)
+ .and_then(move |tcp_stream| {
+ let local_addr = tcp_stream.local_addr()?;
+ let remote_addr = tcp_stream.peer_addr()?;
+ let tcp_stream_resource = resources::add_tcp_stream(tcp_stream);
+ Ok((tcp_stream_resource, local_addr, remote_addr))
+ })
+ .map_err(ErrBox::from)
+ .and_then(move |(tcp_stream_resource, local_addr, remote_addr)| {
+ futures::future::ok(json!({
+ "rid": tcp_stream_resource.rid,
+ "localAddr": local_addr.to_string(),
+ "remoteAddr": remote_addr.to_string(),
+ }))
+ })
+ });
+
+ Ok(JsonOp::Async(Box::new(op)))
+}
+
+#[derive(Deserialize)]
+struct ShutdownArgs {
+ rid: i32,
+ how: i32,
+}
+
+pub fn op_shutdown(
+ _state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: ShutdownArgs = serde_json::from_value(args)?;
+
+ let rid = args.rid as u32;
+ let how = args.how;
+ let mut resource = resources::lookup(rid)?;
+
+ let shutdown_mode = match how {
+ 0 => Shutdown::Read,
+ 1 => Shutdown::Write,
+ _ => unimplemented!(),
+ };
+
+ // Use UFCS for disambiguation
+ Resource::shutdown(&mut resource, shutdown_mode)?;
+ Ok(JsonOp::Sync(json!({})))
+}
+
+#[derive(Deserialize)]
+struct ListenArgs {
+ transport: String,
+ hostname: String,
+ port: u16,
+}
+
+pub fn op_listen(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: ListenArgs = serde_json::from_value(args)?;
+ assert_eq!(args.transport, "tcp");
+
+ // TODO(ry) Using format! is suboptimal here. Better would be if
+ // state.check_net and resolve_addr() took hostname and port directly.
+ let address = format!("{}:{}", args.hostname, args.port);
+
+ state.check_net(&address)?;
+
+ let addr = resolve_addr(&address).wait()?;
+ let listener = TcpListener::bind(&addr)?;
+ let local_addr = listener.local_addr()?;
+ let resource = resources::add_tcp_listener(listener);
+
+ Ok(JsonOp::Sync(json!({
+ "rid": resource.rid,
+ "localAddr": local_addr.to_string()
+ })))
+}
diff --git a/cli/ops/os.rs b/cli/ops/os.rs
new file mode 100644
index 000000000..92f640afd
--- /dev/null
+++ b/cli/ops/os.rs
@@ -0,0 +1,157 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use super::dispatch_json::{Deserialize, JsonOp, Value};
+use crate::colors;
+use crate::fs as deno_fs;
+use crate::state::ThreadSafeState;
+use crate::version;
+use atty;
+use deno::*;
+use log;
+use std::collections::HashMap;
+use std::env;
+use sys_info;
+use url::Url;
+
+/// BUILD_OS and BUILD_ARCH match the values in Deno.build. See js/build.ts.
+#[cfg(target_os = "macos")]
+static BUILD_OS: &str = "mac";
+#[cfg(target_os = "linux")]
+static BUILD_OS: &str = "linux";
+#[cfg(target_os = "windows")]
+static BUILD_OS: &str = "win";
+#[cfg(target_arch = "x86_64")]
+static BUILD_ARCH: &str = "x64";
+
+pub fn op_start(
+ state: &ThreadSafeState,
+ _args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ Ok(JsonOp::Sync(json!({
+ "cwd": deno_fs::normalize_path(&env::current_dir().unwrap()),
+ "pid": std::process::id(),
+ "argv": state.argv,
+ "mainModule": state.main_module().map(|x| x.as_str().to_string()),
+ "debugFlag": state
+ .flags
+ .log_level
+ .map_or(false, |l| l == log::Level::Debug),
+ "versionFlag": state.flags.version,
+ "v8Version": version::v8(),
+ "denoVersion": version::DENO,
+ "tsVersion": version::TYPESCRIPT,
+ "noColor": !colors::use_color(),
+ "os": BUILD_OS,
+ "arch": BUILD_ARCH,
+ })))
+}
+
+pub fn op_home_dir(
+ state: &ThreadSafeState,
+ _args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ state.check_env()?;
+ let path = dirs::home_dir()
+ .unwrap_or_default()
+ .into_os_string()
+ .into_string()
+ .unwrap_or_default();
+ Ok(JsonOp::Sync(json!(path)))
+}
+
+pub fn op_exec_path(
+ state: &ThreadSafeState,
+ _args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ state.check_env()?;
+ let current_exe = env::current_exe().unwrap();
+ // Now apply URL parser to current exe to get fully resolved path, otherwise
+ // we might get `./` and `../` bits in `exec_path`
+ let exe_url = Url::from_file_path(current_exe).unwrap();
+ let path = exe_url.to_file_path().unwrap();
+ Ok(JsonOp::Sync(json!(path)))
+}
+
+#[derive(Deserialize)]
+struct SetEnv {
+ key: String,
+ value: String,
+}
+
+pub fn op_set_env(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: SetEnv = serde_json::from_value(args)?;
+ state.check_env()?;
+ env::set_var(args.key, args.value);
+ Ok(JsonOp::Sync(json!({})))
+}
+
+pub fn op_env(
+ state: &ThreadSafeState,
+ _args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ state.check_env()?;
+ let v = env::vars().collect::<HashMap<String, String>>();
+ Ok(JsonOp::Sync(json!(v)))
+}
+
+#[derive(Deserialize)]
+struct GetEnv {
+ key: String,
+}
+
+pub fn op_get_env(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: GetEnv = serde_json::from_value(args)?;
+ state.check_env()?;
+ let r = match env::var(args.key) {
+ Err(env::VarError::NotPresent) => json!([]),
+ v => json!([v?]),
+ };
+ Ok(JsonOp::Sync(r))
+}
+
+#[derive(Deserialize)]
+struct Exit {
+ code: i32,
+}
+
+pub fn op_exit(
+ _s: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: Exit = serde_json::from_value(args)?;
+ std::process::exit(args.code)
+}
+
+pub fn op_is_tty(
+ _s: &ThreadSafeState,
+ _args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ Ok(JsonOp::Sync(json!({
+ "stdin": atty::is(atty::Stream::Stdin),
+ "stdout": atty::is(atty::Stream::Stdout),
+ "stderr": atty::is(atty::Stream::Stderr),
+ })))
+}
+
+pub fn op_hostname(
+ state: &ThreadSafeState,
+ _args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ state.check_env()?;
+ let hostname = sys_info::hostname().unwrap_or_else(|_| "".to_owned());
+ Ok(JsonOp::Sync(json!(hostname)))
+}
diff --git a/cli/ops/performance.rs b/cli/ops/performance.rs
new file mode 100644
index 000000000..090fc3323
--- /dev/null
+++ b/cli/ops/performance.rs
@@ -0,0 +1,30 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use super::dispatch_json::{JsonOp, Value};
+use crate::state::ThreadSafeState;
+use deno::*;
+
+// Returns a milliseconds and nanoseconds subsec
+// since the start time of the deno runtime.
+// If the High precision flag is not set, the
+// nanoseconds are rounded on 2ms.
+pub fn op_now(
+ state: &ThreadSafeState,
+ _args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let seconds = state.start_time.elapsed().as_secs();
+ let mut subsec_nanos = state.start_time.elapsed().subsec_nanos();
+ let reduced_time_precision = 2_000_000; // 2ms in nanoseconds
+
+ // If the permission is not enabled
+ // Round the nano result on 2 milliseconds
+ // see: https://developer.mozilla.org/en-US/docs/Web/API/DOMHighResTimeStamp#Reduced_time_precision
+ if !state.permissions.allows_hrtime() {
+ subsec_nanos -= subsec_nanos % reduced_time_precision
+ }
+
+ Ok(JsonOp::Sync(json!({
+ "seconds": seconds,
+ "subsecNanos": subsec_nanos,
+ })))
+}
diff --git a/cli/ops/permissions.rs b/cli/ops/permissions.rs
new file mode 100644
index 000000000..5d14f39be
--- /dev/null
+++ b/cli/ops/permissions.rs
@@ -0,0 +1,44 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use super::dispatch_json::{Deserialize, JsonOp, Value};
+use crate::state::ThreadSafeState;
+use deno::*;
+
+pub fn op_permissions(
+ state: &ThreadSafeState,
+ _args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ Ok(JsonOp::Sync(json!({
+ "run": state.permissions.allows_run(),
+ "read": state.permissions.allows_read(),
+ "write": state.permissions.allows_write(),
+ "net": state.permissions.allows_net(),
+ "env": state.permissions.allows_env(),
+ "hrtime": state.permissions.allows_hrtime(),
+ })))
+}
+
+#[derive(Deserialize)]
+struct RevokePermissionArgs {
+ permission: String,
+}
+
+pub fn op_revoke_permission(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: RevokePermissionArgs = serde_json::from_value(args)?;
+ let permission = args.permission.as_ref();
+ match permission {
+ "run" => state.permissions.revoke_run(),
+ "read" => state.permissions.revoke_read(),
+ "write" => state.permissions.revoke_write(),
+ "net" => state.permissions.revoke_net(),
+ "env" => state.permissions.revoke_env(),
+ "hrtime" => state.permissions.revoke_hrtime(),
+ _ => Ok(()),
+ }?;
+
+ Ok(JsonOp::Sync(json!({})))
+}
diff --git a/cli/ops/process.rs b/cli/ops/process.rs
new file mode 100644
index 000000000..8dff53c6e
--- /dev/null
+++ b/cli/ops/process.rs
@@ -0,0 +1,157 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use super::dispatch_json::{Deserialize, JsonOp, Value};
+use crate::resources;
+use crate::signal::kill;
+use crate::state::ThreadSafeState;
+use deno::*;
+use futures;
+use futures::Future;
+use std;
+use std::convert::From;
+use std::process::Command;
+use tokio_process::CommandExt;
+
+#[cfg(unix)]
+use std::os::unix::process::ExitStatusExt;
+
+fn subprocess_stdio_map(s: &str) -> std::process::Stdio {
+ match s {
+ "inherit" => std::process::Stdio::inherit(),
+ "piped" => std::process::Stdio::piped(),
+ "null" => std::process::Stdio::null(),
+ _ => unreachable!(),
+ }
+}
+
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct RunArgs {
+ args: Vec<String>,
+ cwd: Option<String>,
+ env: Vec<(String, String)>,
+ stdin: String,
+ stdout: String,
+ stderr: String,
+ stdin_rid: u32,
+ stdout_rid: u32,
+ stderr_rid: u32,
+}
+
+pub fn op_run(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let run_args: RunArgs = serde_json::from_value(args)?;
+
+ state.check_run()?;
+
+ let args = run_args.args;
+ let env = run_args.env;
+ let cwd = run_args.cwd;
+
+ let mut c = Command::new(args.get(0).unwrap());
+ (1..args.len()).for_each(|i| {
+ let arg = args.get(i).unwrap();
+ c.arg(arg);
+ });
+ cwd.map(|d| c.current_dir(d));
+ for (key, value) in &env {
+ c.env(key, value);
+ }
+
+ // TODO: make this work with other resources, eg. sockets
+ let stdin_rid = run_args.stdin_rid;
+ if stdin_rid > 0 {
+ c.stdin(resources::get_file(stdin_rid)?);
+ } else {
+ c.stdin(subprocess_stdio_map(run_args.stdin.as_ref()));
+ }
+
+ let stdout_rid = run_args.stdout_rid;
+ if stdout_rid > 0 {
+ c.stdout(resources::get_file(stdout_rid)?);
+ } else {
+ c.stdout(subprocess_stdio_map(run_args.stdout.as_ref()));
+ }
+
+ let stderr_rid = run_args.stderr_rid;
+ if stderr_rid > 0 {
+ c.stderr(resources::get_file(stderr_rid)?);
+ } else {
+ c.stderr(subprocess_stdio_map(run_args.stderr.as_ref()));
+ }
+
+ // Spawn the command.
+ let child = c.spawn_async().map_err(ErrBox::from)?;
+
+ let pid = child.id();
+ let resources = resources::add_child(child);
+
+ Ok(JsonOp::Sync(json!({
+ "rid": resources.child_rid,
+ "pid": pid,
+ "stdinRid": resources.stdin_rid,
+ "stdoutRid": resources.stdout_rid,
+ "stderrRid": resources.stderr_rid,
+ })))
+}
+
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct RunStatusArgs {
+ rid: i32,
+}
+
+pub fn op_run_status(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: RunStatusArgs = serde_json::from_value(args)?;
+ let rid = args.rid as u32;
+
+ state.check_run()?;
+
+ let future = resources::child_status(rid)?;
+
+ let future = future.and_then(move |run_status| {
+ let code = run_status.code();
+
+ #[cfg(unix)]
+ let signal = run_status.signal();
+ #[cfg(not(unix))]
+ let signal = None;
+
+ code
+ .or(signal)
+ .expect("Should have either an exit code or a signal.");
+ let got_signal = signal.is_some();
+
+ futures::future::ok(json!({
+ "gotSignal": got_signal,
+ "exitCode": code.unwrap_or(-1),
+ "exitSignal": signal.unwrap_or(-1),
+ }))
+ });
+
+ Ok(JsonOp::Async(Box::new(future)))
+}
+
+#[derive(Deserialize)]
+struct KillArgs {
+ pid: i32,
+ signo: i32,
+}
+
+pub fn op_kill(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ state.check_run()?;
+
+ let args: KillArgs = serde_json::from_value(args)?;
+ kill(args.pid, args.signo)?;
+ Ok(JsonOp::Sync(json!({})))
+}
diff --git a/cli/ops/random.rs b/cli/ops/random.rs
new file mode 100644
index 000000000..7470eab40
--- /dev/null
+++ b/cli/ops/random.rs
@@ -0,0 +1,24 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use super::dispatch_json::{JsonOp, Value};
+use crate::state::ThreadSafeState;
+use deno::*;
+use rand::thread_rng;
+use rand::Rng;
+
+pub fn op_get_random_values(
+ state: &ThreadSafeState,
+ _args: Value,
+ zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ assert!(zero_copy.is_some());
+
+ if let Some(ref seeded_rng) = state.seeded_rng {
+ let mut rng = seeded_rng.lock().unwrap();
+ rng.fill(&mut zero_copy.unwrap()[..]);
+ } else {
+ let mut rng = thread_rng();
+ rng.fill(&mut zero_copy.unwrap()[..]);
+ }
+
+ Ok(JsonOp::Sync(json!({})))
+}
diff --git a/cli/ops/repl.rs b/cli/ops/repl.rs
new file mode 100644
index 000000000..7ab7509de
--- /dev/null
+++ b/cli/ops/repl.rs
@@ -0,0 +1,50 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use super::dispatch_json::{blocking_json, Deserialize, JsonOp, Value};
+use crate::repl;
+use crate::resources;
+use crate::state::ThreadSafeState;
+use deno::*;
+
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct ReplStartArgs {
+ history_file: String,
+}
+
+pub fn op_repl_start(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: ReplStartArgs = serde_json::from_value(args)?;
+
+ debug!("op_repl_start {}", args.history_file);
+ let history_path = repl::history_path(&state.dir, &args.history_file);
+ let repl = repl::Repl::new(history_path);
+ let resource = resources::add_repl(repl);
+
+ Ok(JsonOp::Sync(json!(resource.rid)))
+}
+
+#[derive(Deserialize)]
+struct ReplReadlineArgs {
+ rid: i32,
+ prompt: String,
+}
+
+pub fn op_repl_readline(
+ _state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: ReplReadlineArgs = serde_json::from_value(args)?;
+ let rid = args.rid;
+ let prompt = args.prompt;
+ debug!("op_repl_readline {} {}", rid, prompt);
+
+ blocking_json(false, move || {
+ let repl = resources::get_repl(rid as u32)?;
+ let line = repl.lock().unwrap().readline(&prompt)?;
+ Ok(json!(line))
+ })
+}
diff --git a/cli/ops/resources.rs b/cli/ops/resources.rs
new file mode 100644
index 000000000..dafd01d08
--- /dev/null
+++ b/cli/ops/resources.rs
@@ -0,0 +1,14 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use super::dispatch_json::{JsonOp, Value};
+use crate::resources::table_entries;
+use crate::state::ThreadSafeState;
+use deno::*;
+
+pub fn op_resources(
+ _state: &ThreadSafeState,
+ _args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let serialized_resources = table_entries();
+ Ok(JsonOp::Sync(json!(serialized_resources)))
+}
diff --git a/cli/ops/timers.rs b/cli/ops/timers.rs
new file mode 100644
index 000000000..abcd5c1b3
--- /dev/null
+++ b/cli/ops/timers.rs
@@ -0,0 +1,42 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use super::dispatch_json::{Deserialize, JsonOp, Value};
+use crate::state::ThreadSafeState;
+use deno::*;
+use futures::Future;
+use std;
+use std::time::Duration;
+use std::time::Instant;
+
+pub fn op_global_timer_stop(
+ state: &ThreadSafeState,
+ _args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let state = state;
+ let mut t = state.global_timer.lock().unwrap();
+ t.cancel();
+ Ok(JsonOp::Sync(json!({})))
+}
+
+#[derive(Deserialize)]
+struct GlobalTimerArgs {
+ timeout: u64,
+}
+
+pub fn op_global_timer(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: GlobalTimerArgs = serde_json::from_value(args)?;
+ let val = args.timeout;
+
+ let state = state;
+ let mut t = state.global_timer.lock().unwrap();
+ let deadline = Instant::now() + Duration::from_millis(val);
+ let f = t
+ .new_timeout(deadline)
+ .then(move |_| futures::future::ok(json!({})));
+
+ Ok(JsonOp::Async(Box::new(f)))
+}
diff --git a/cli/ops/tls.rs b/cli/ops/tls.rs
new file mode 100644
index 000000000..2b1d94f2b
--- /dev/null
+++ b/cli/ops/tls.rs
@@ -0,0 +1,76 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use super::dispatch_json::{Deserialize, JsonOp, Value};
+use crate::resolve_addr::resolve_addr;
+use crate::resources;
+use crate::state::ThreadSafeState;
+use deno::*;
+use futures::Future;
+use std;
+use std::convert::From;
+use std::sync::Arc;
+use tokio;
+use tokio::net::TcpStream;
+use tokio_rustls::{rustls::ClientConfig, TlsConnector};
+use webpki;
+use webpki::DNSNameRef;
+use webpki_roots;
+
+#[derive(Deserialize)]
+struct DialTLSArgs {
+ hostname: String,
+ port: u16,
+}
+
+pub fn op_dial_tls(
+ state: &ThreadSafeState,
+ args: Value,
+ _zero_copy: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: DialTLSArgs = serde_json::from_value(args)?;
+
+ // TODO(ry) Using format! is suboptimal here. Better would be if
+ // state.check_net and resolve_addr() took hostname and port directly.
+ let address = format!("{}:{}", args.hostname, args.port);
+
+ state.check_net(&address)?;
+
+ let mut domain = args.hostname;
+ if domain.is_empty() {
+ domain.push_str("localhost");
+ }
+
+ let op = resolve_addr(&address).and_then(move |addr| {
+ TcpStream::connect(&addr)
+ .and_then(move |tcp_stream| {
+ let local_addr = tcp_stream.local_addr()?;
+ let remote_addr = tcp_stream.peer_addr()?;
+ let mut config = ClientConfig::new();
+ config
+ .root_store
+ .add_server_trust_anchors(&webpki_roots::TLS_SERVER_ROOTS);
+
+ let tls_connector = TlsConnector::from(Arc::new(config));
+ Ok((tls_connector, tcp_stream, local_addr, remote_addr))
+ })
+ .map_err(ErrBox::from)
+ .and_then(
+ move |(tls_connector, tcp_stream, local_addr, remote_addr)| {
+ let dnsname = DNSNameRef::try_from_ascii_str(&domain)
+ .expect("Invalid DNS lookup");
+ tls_connector
+ .connect(dnsname, tcp_stream)
+ .map_err(ErrBox::from)
+ .and_then(move |tls_stream| {
+ let tls_stream_resource = resources::add_tls_stream(tls_stream);
+ futures::future::ok(json!({
+ "rid": tls_stream_resource.rid,
+ "localAddr": local_addr.to_string(),
+ "remoteAddr": remote_addr.to_string(),
+ }))
+ })
+ },
+ )
+ });
+
+ Ok(JsonOp::Async(Box::new(op)))
+}
diff --git a/cli/ops/workers.rs b/cli/ops/workers.rs
new file mode 100644
index 000000000..6950f25d6
--- /dev/null
+++ b/cli/ops/workers.rs
@@ -0,0 +1,227 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use super::dispatch_json::{Deserialize, JsonOp, Value};
+use crate::deno_error::js_check;
+use crate::deno_error::DenoError;
+use crate::deno_error::ErrorKind;
+use crate::resources;
+use crate::startup_data;
+use crate::state::ThreadSafeState;
+use crate::worker::Worker;
+use deno::*;
+use futures;
+use futures::Async;
+use futures::Future;
+use futures::Sink;
+use futures::Stream;
+use std;
+use std::convert::From;
+
+struct GetMessageFuture {
+ pub state: ThreadSafeState,
+}
+
+impl Future for GetMessageFuture {
+ type Item = Option<Buf>;
+ type Error = ();
+
+ fn poll(&mut self) -> Result<Async<Self::Item>, Self::Error> {
+ let mut wc = self.state.worker_channels.lock().unwrap();
+ wc.1
+ .poll()
+ .map_err(|err| panic!("worker_channel recv err {:?}", err))
+ }
+}
+
+/// Get message from host as guest worker
+pub fn op_worker_get_message(
+ state: &ThreadSafeState,
+ _args: Value,
+ _data: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let op = GetMessageFuture {
+ state: state.clone(),
+ };
+
+ let op = op
+ .map_err(move |_| -> ErrBox { unimplemented!() })
+ .and_then(move |maybe_buf| {
+ debug!("op_worker_get_message");
+
+ futures::future::ok(json!({
+ "data": maybe_buf.map(|buf| buf.to_owned())
+ }))
+ });
+
+ Ok(JsonOp::Async(Box::new(op)))
+}
+
+/// Post message to host as guest worker
+pub fn op_worker_post_message(
+ state: &ThreadSafeState,
+ _args: Value,
+ data: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let d = Vec::from(data.unwrap().as_ref()).into_boxed_slice();
+
+ let tx = {
+ let wc = state.worker_channels.lock().unwrap();
+ wc.0.clone()
+ };
+ tx.send(d)
+ .wait()
+ .map_err(|e| DenoError::new(ErrorKind::Other, e.to_string()))?;
+
+ Ok(JsonOp::Sync(json!({})))
+}
+
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct CreateWorkerArgs {
+ specifier: String,
+ include_deno_namespace: bool,
+ has_source_code: bool,
+ source_code: String,
+}
+
+/// Create worker as the host
+pub fn op_create_worker(
+ state: &ThreadSafeState,
+ args: Value,
+ _data: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: CreateWorkerArgs = serde_json::from_value(args)?;
+
+ let specifier = args.specifier.as_ref();
+ // Only include deno namespace if requested AND current worker
+ // has included namespace (to avoid escalation).
+ let include_deno_namespace =
+ args.include_deno_namespace && state.include_deno_namespace;
+ let has_source_code = args.has_source_code;
+ let source_code = args.source_code;
+
+ let parent_state = state.clone();
+
+ let mut module_specifier = ModuleSpecifier::resolve_url_or_path(specifier)?;
+
+ let mut child_argv = parent_state.argv.clone();
+
+ if !has_source_code {
+ if let Some(module) = state.main_module() {
+ module_specifier =
+ ModuleSpecifier::resolve_import(specifier, &module.to_string())?;
+ child_argv[1] = module_specifier.to_string();
+ }
+ }
+
+ let child_state = ThreadSafeState::new(
+ parent_state.flags.clone(),
+ child_argv,
+ parent_state.progress.clone(),
+ include_deno_namespace,
+ )?;
+ let rid = child_state.resource.rid;
+ let name = format!("USER-WORKER-{}", specifier);
+ let deno_main_call = format!("denoMain({})", include_deno_namespace);
+
+ let mut worker =
+ Worker::new(name, startup_data::deno_isolate_init(), child_state);
+ js_check(worker.execute(&deno_main_call));
+ js_check(worker.execute("workerMain()"));
+
+ let exec_cb = move |worker: Worker| {
+ let mut workers_tl = parent_state.workers.lock().unwrap();
+ workers_tl.insert(rid, worker.shared());
+ json!(rid)
+ };
+
+ // Has provided source code, execute immediately.
+ if has_source_code {
+ js_check(worker.execute(&source_code));
+ return Ok(JsonOp::Sync(exec_cb(worker)));
+ }
+
+ let op = worker
+ .execute_mod_async(&module_specifier, false)
+ .and_then(move |()| Ok(exec_cb(worker)));
+
+ let result = op.wait()?;
+ Ok(JsonOp::Sync(result))
+}
+
+#[derive(Deserialize)]
+struct HostGetWorkerClosedArgs {
+ rid: i32,
+}
+
+/// Return when the worker closes
+pub fn op_host_get_worker_closed(
+ state: &ThreadSafeState,
+ args: Value,
+ _data: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: HostGetWorkerClosedArgs = serde_json::from_value(args)?;
+
+ let rid = args.rid as u32;
+ let state = state.clone();
+
+ let shared_worker_future = {
+ let workers_tl = state.workers.lock().unwrap();
+ let worker = workers_tl.get(&rid).unwrap();
+ worker.clone()
+ };
+
+ let op = Box::new(
+ shared_worker_future.then(move |_result| futures::future::ok(json!({}))),
+ );
+
+ Ok(JsonOp::Async(Box::new(op)))
+}
+
+#[derive(Deserialize)]
+struct HostGetMessageArgs {
+ rid: i32,
+}
+
+/// Get message from guest worker as host
+pub fn op_host_get_message(
+ _state: &ThreadSafeState,
+ args: Value,
+ _data: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: HostGetMessageArgs = serde_json::from_value(args)?;
+
+ let rid = args.rid as u32;
+ let op = resources::get_message_from_worker(rid)
+ .map_err(move |_| -> ErrBox { unimplemented!() })
+ .and_then(move |maybe_buf| {
+ futures::future::ok(json!({
+ "data": maybe_buf.map(|buf| buf.to_owned())
+ }))
+ });
+
+ Ok(JsonOp::Async(Box::new(op)))
+}
+
+#[derive(Deserialize)]
+struct HostPostMessageArgs {
+ rid: i32,
+}
+
+/// Post message to guest worker as host
+pub fn op_host_post_message(
+ _state: &ThreadSafeState,
+ args: Value,
+ data: Option<PinnedBuf>,
+) -> Result<JsonOp, ErrBox> {
+ let args: HostPostMessageArgs = serde_json::from_value(args)?;
+
+ let rid = args.rid as u32;
+
+ let d = Vec::from(data.unwrap().as_ref()).into_boxed_slice();
+
+ resources::post_message_to_worker(rid, d)
+ .wait()
+ .map_err(|e| DenoError::new(ErrorKind::Other, e.to_string()))?;
+
+ Ok(JsonOp::Sync(json!({})))
+}
diff --git a/cli/permissions.rs b/cli/permissions.rs
new file mode 100644
index 000000000..814c3ff94
--- /dev/null
+++ b/cli/permissions.rs
@@ -0,0 +1,637 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use crate::deno_error::permission_denied;
+use crate::flags::DenoFlags;
+use ansi_term::Style;
+use atty;
+use deno::ErrBox;
+use log;
+use std::collections::HashSet;
+use std::fmt;
+use std::io;
+use std::path::PathBuf;
+use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
+use std::sync::Arc;
+
+const PERMISSION_EMOJI: &str = "⚠️";
+
+/// Tri-state value for storing permission state
+pub enum PermissionAccessorState {
+ Allow = 0,
+ Ask = 1,
+ Deny = 2,
+}
+
+impl From<usize> for PermissionAccessorState {
+ fn from(val: usize) -> Self {
+ match val {
+ 0 => PermissionAccessorState::Allow,
+ 1 => PermissionAccessorState::Ask,
+ 2 => PermissionAccessorState::Deny,
+ _ => unreachable!(),
+ }
+ }
+}
+
+impl From<bool> for PermissionAccessorState {
+ fn from(val: bool) -> Self {
+ if val {
+ PermissionAccessorState::Allow
+ } else {
+ PermissionAccessorState::Ask
+ }
+ }
+}
+
+impl fmt::Display for PermissionAccessorState {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ PermissionAccessorState::Allow => f.pad("Allow"),
+ PermissionAccessorState::Ask => f.pad("Ask"),
+ PermissionAccessorState::Deny => f.pad("Deny"),
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct PermissionAccessor {
+ state: Arc<AtomicUsize>,
+}
+
+impl PermissionAccessor {
+ pub fn new(state: PermissionAccessorState) -> Self {
+ Self {
+ state: Arc::new(AtomicUsize::new(state as usize)),
+ }
+ }
+
+ pub fn is_allow(&self) -> bool {
+ match self.get_state() {
+ PermissionAccessorState::Allow => true,
+ _ => false,
+ }
+ }
+
+ /// If the state is "Allow" walk it back to the default "Ask"
+ /// Don't do anything if state is "Deny"
+ pub fn revoke(&self) {
+ if self.is_allow() {
+ self.ask();
+ }
+ }
+
+ pub fn allow(&self) {
+ self.set_state(PermissionAccessorState::Allow)
+ }
+
+ pub fn ask(&self) {
+ self.set_state(PermissionAccessorState::Ask)
+ }
+
+ pub fn deny(&self) {
+ self.set_state(PermissionAccessorState::Deny)
+ }
+
+ /// Update this accessors state based on a PromptResult value
+ /// This will only update the state if the PromptResult value
+ /// is one of the "Always" values
+ pub fn update_with_prompt_result(&self, prompt_result: &PromptResult) {
+ match prompt_result {
+ PromptResult::AllowAlways => self.allow(),
+ PromptResult::DenyAlways => self.deny(),
+ _ => {}
+ }
+ }
+
+ #[inline]
+ pub fn get_state(&self) -> PermissionAccessorState {
+ self.state.load(Ordering::SeqCst).into()
+ }
+ fn set_state(&self, state: PermissionAccessorState) {
+ self.state.store(state as usize, Ordering::SeqCst)
+ }
+}
+
+impl From<bool> for PermissionAccessor {
+ fn from(val: bool) -> Self {
+ Self::new(PermissionAccessorState::from(val))
+ }
+}
+
+impl Default for PermissionAccessor {
+ fn default() -> Self {
+ Self {
+ state: Arc::new(AtomicUsize::new(PermissionAccessorState::Ask as usize)),
+ }
+ }
+}
+
+#[derive(Debug, Default)]
+pub struct DenoPermissions {
+ // Keep in sync with src/permissions.ts
+ pub allow_read: PermissionAccessor,
+ pub read_whitelist: Arc<HashSet<String>>,
+ pub allow_write: PermissionAccessor,
+ pub write_whitelist: Arc<HashSet<String>>,
+ pub allow_net: PermissionAccessor,
+ pub net_whitelist: Arc<HashSet<String>>,
+ pub allow_env: PermissionAccessor,
+ pub allow_run: PermissionAccessor,
+ pub allow_hrtime: PermissionAccessor,
+ pub no_prompts: AtomicBool,
+}
+
+impl DenoPermissions {
+ pub fn from_flags(flags: &DenoFlags) -> Self {
+ Self {
+ allow_read: PermissionAccessor::from(flags.allow_read),
+ read_whitelist: Arc::new(flags.read_whitelist.iter().cloned().collect()),
+ allow_write: PermissionAccessor::from(flags.allow_write),
+ write_whitelist: Arc::new(
+ flags.write_whitelist.iter().cloned().collect(),
+ ),
+ allow_net: PermissionAccessor::from(flags.allow_net),
+ net_whitelist: Arc::new(flags.net_whitelist.iter().cloned().collect()),
+ allow_env: PermissionAccessor::from(flags.allow_env),
+ allow_run: PermissionAccessor::from(flags.allow_run),
+ allow_hrtime: PermissionAccessor::from(flags.allow_hrtime),
+ no_prompts: AtomicBool::new(flags.no_prompts),
+ }
+ }
+
+ pub fn check_run(&self) -> Result<(), ErrBox> {
+ let msg = "access to run a subprocess";
+
+ match self.allow_run.get_state() {
+ PermissionAccessorState::Allow => {
+ self.log_perm_access(msg);
+ Ok(())
+ }
+ PermissionAccessorState::Ask => match self.try_permissions_prompt(msg) {
+ Err(e) => Err(e),
+ Ok(v) => {
+ self.allow_run.update_with_prompt_result(&v);
+ v.check()?;
+ self.log_perm_access(msg);
+ Ok(())
+ }
+ },
+ PermissionAccessorState::Deny => Err(permission_denied()),
+ }
+ }
+
+ pub fn check_read(&self, filename: &str) -> Result<(), ErrBox> {
+ let msg = &format!("read access to \"{}\"", filename);
+ match self.allow_read.get_state() {
+ PermissionAccessorState::Allow => {
+ self.log_perm_access(msg);
+ Ok(())
+ }
+ state => {
+ if check_path_white_list(filename, &self.read_whitelist) {
+ self.log_perm_access(msg);
+ Ok(())
+ } else {
+ match state {
+ PermissionAccessorState::Ask => {
+ match self.try_permissions_prompt(msg) {
+ Err(e) => Err(e),
+ Ok(v) => {
+ self.allow_read.update_with_prompt_result(&v);
+ v.check()?;
+ self.log_perm_access(msg);
+ Ok(())
+ }
+ }
+ }
+ PermissionAccessorState::Deny => Err(permission_denied()),
+ _ => unreachable!(),
+ }
+ }
+ }
+ }
+ }
+
+ pub fn check_write(&self, filename: &str) -> Result<(), ErrBox> {
+ let msg = &format!("write access to \"{}\"", filename);
+ match self.allow_write.get_state() {
+ PermissionAccessorState::Allow => {
+ self.log_perm_access(msg);
+ Ok(())
+ }
+ state => {
+ if check_path_white_list(filename, &self.write_whitelist) {
+ self.log_perm_access(msg);
+ Ok(())
+ } else {
+ match state {
+ PermissionAccessorState::Ask => {
+ match self.try_permissions_prompt(msg) {
+ Err(e) => Err(e),
+ Ok(v) => {
+ self.allow_write.update_with_prompt_result(&v);
+ v.check()?;
+ self.log_perm_access(msg);
+ Ok(())
+ }
+ }
+ }
+ PermissionAccessorState::Deny => Err(permission_denied()),
+ _ => unreachable!(),
+ }
+ }
+ }
+ }
+ }
+
+ pub fn check_net(&self, host_and_port: &str) -> Result<(), ErrBox> {
+ let msg = &format!("network access to \"{}\"", host_and_port);
+ match self.allow_net.get_state() {
+ PermissionAccessorState::Allow => {
+ self.log_perm_access(msg);
+ Ok(())
+ }
+ state => {
+ let parts = host_and_port.split(':').collect::<Vec<&str>>();
+ if match parts.len() {
+ 2 => {
+ if self.net_whitelist.contains(parts[0]) {
+ true
+ } else {
+ self
+ .net_whitelist
+ .contains(&format!("{}:{}", parts[0], parts[1]))
+ }
+ }
+ 1 => self.net_whitelist.contains(parts[0]),
+ _ => panic!("Failed to parse origin string: {}", host_and_port),
+ } {
+ self.log_perm_access(msg);
+ Ok(())
+ } else {
+ self.check_net_inner(state, msg)
+ }
+ }
+ }
+ }
+
+ pub fn check_net_url(&self, url: &url::Url) -> Result<(), ErrBox> {
+ let msg = &format!("network access to \"{}\"", url);
+ match self.allow_net.get_state() {
+ PermissionAccessorState::Allow => {
+ self.log_perm_access(msg);
+ Ok(())
+ }
+ state => {
+ let host = url.host().unwrap();
+ let whitelist_result = {
+ if self.net_whitelist.contains(&format!("{}", host)) {
+ true
+ } else {
+ match url.port() {
+ Some(port) => {
+ self.net_whitelist.contains(&format!("{}:{}", host, port))
+ }
+ None => false,
+ }
+ }
+ };
+ if whitelist_result {
+ self.log_perm_access(msg);
+ Ok(())
+ } else {
+ self.check_net_inner(state, msg)
+ }
+ }
+ }
+ }
+
+ fn check_net_inner(
+ &self,
+ state: PermissionAccessorState,
+ prompt_str: &str,
+ ) -> Result<(), ErrBox> {
+ match state {
+ PermissionAccessorState::Ask => {
+ match self.try_permissions_prompt(prompt_str) {
+ Err(e) => Err(e),
+ Ok(v) => {
+ self.allow_net.update_with_prompt_result(&v);
+ v.check()?;
+ self.log_perm_access(prompt_str);
+ Ok(())
+ }
+ }
+ }
+ PermissionAccessorState::Deny => Err(permission_denied()),
+ _ => unreachable!(),
+ }
+ }
+
+ pub fn check_env(&self) -> Result<(), ErrBox> {
+ let msg = "access to environment variables";
+ match self.allow_env.get_state() {
+ PermissionAccessorState::Allow => {
+ self.log_perm_access(msg);
+ Ok(())
+ }
+ PermissionAccessorState::Ask => match self.try_permissions_prompt(msg) {
+ Err(e) => Err(e),
+ Ok(v) => {
+ self.allow_env.update_with_prompt_result(&v);
+ v.check()?;
+ self.log_perm_access(msg);
+ Ok(())
+ }
+ },
+ PermissionAccessorState::Deny => Err(permission_denied()),
+ }
+ }
+
+ /// Try to present the user with a permission prompt
+ /// will error with permission_denied if no_prompts is enabled
+ fn try_permissions_prompt(
+ &self,
+ message: &str,
+ ) -> Result<PromptResult, ErrBox> {
+ if self.no_prompts.load(Ordering::SeqCst) {
+ return Err(permission_denied());
+ }
+ if !atty::is(atty::Stream::Stdin) || !atty::is(atty::Stream::Stderr) {
+ return Err(permission_denied());
+ };
+ permission_prompt(message)
+ }
+
+ fn log_perm_access(&self, message: &str) {
+ if log_enabled!(log::Level::Info) {
+ eprintln!(
+ "{}",
+ Style::new()
+ .bold()
+ .paint(format!("{}️ Granted {}", PERMISSION_EMOJI, message))
+ );
+ }
+ }
+
+ pub fn allows_run(&self) -> bool {
+ self.allow_run.is_allow()
+ }
+
+ pub fn allows_read(&self) -> bool {
+ self.allow_read.is_allow()
+ }
+
+ pub fn allows_write(&self) -> bool {
+ self.allow_write.is_allow()
+ }
+
+ pub fn allows_net(&self) -> bool {
+ self.allow_net.is_allow()
+ }
+
+ pub fn allows_env(&self) -> bool {
+ self.allow_env.is_allow()
+ }
+
+ pub fn allows_hrtime(&self) -> bool {
+ self.allow_hrtime.is_allow()
+ }
+
+ pub fn revoke_run(&self) -> Result<(), ErrBox> {
+ self.allow_run.revoke();
+ Ok(())
+ }
+
+ pub fn revoke_read(&self) -> Result<(), ErrBox> {
+ self.allow_read.revoke();
+ Ok(())
+ }
+
+ pub fn revoke_write(&self) -> Result<(), ErrBox> {
+ self.allow_write.revoke();
+ Ok(())
+ }
+
+ pub fn revoke_net(&self) -> Result<(), ErrBox> {
+ self.allow_net.revoke();
+ Ok(())
+ }
+
+ pub fn revoke_env(&self) -> Result<(), ErrBox> {
+ self.allow_env.revoke();
+ Ok(())
+ }
+ pub fn revoke_hrtime(&self) -> Result<(), ErrBox> {
+ self.allow_hrtime.revoke();
+ Ok(())
+ }
+}
+
+/// Quad-state value for representing user input on permission prompt
+#[derive(Debug, Clone)]
+pub enum PromptResult {
+ AllowAlways = 0,
+ AllowOnce = 1,
+ DenyOnce = 2,
+ DenyAlways = 3,
+}
+
+impl PromptResult {
+ /// If value is any form of deny this will error with permission_denied
+ pub fn check(&self) -> Result<(), ErrBox> {
+ match self {
+ PromptResult::DenyOnce => Err(permission_denied()),
+ PromptResult::DenyAlways => Err(permission_denied()),
+ _ => Ok(()),
+ }
+ }
+}
+
+impl fmt::Display for PromptResult {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ PromptResult::AllowAlways => f.pad("AllowAlways"),
+ PromptResult::AllowOnce => f.pad("AllowOnce"),
+ PromptResult::DenyOnce => f.pad("DenyOnce"),
+ PromptResult::DenyAlways => f.pad("DenyAlways"),
+ }
+ }
+}
+
+fn permission_prompt(message: &str) -> Result<PromptResult, ErrBox> {
+ let msg = format!("️{} Deno requests {}. Grant? [a/y/n/d (a = allow always, y = allow once, n = deny once, d = deny always)] ", PERMISSION_EMOJI, message);
+ // print to stderr so that if deno is > to a file this is still displayed.
+ eprint!("{}", Style::new().bold().paint(msg));
+ loop {
+ let mut input = String::new();
+ let stdin = io::stdin();
+ let _nread = stdin.read_line(&mut input)?;
+ let ch = input.chars().next().unwrap();
+ match ch.to_ascii_lowercase() {
+ 'a' => return Ok(PromptResult::AllowAlways),
+ 'y' => return Ok(PromptResult::AllowOnce),
+ 'n' => return Ok(PromptResult::DenyOnce),
+ 'd' => return Ok(PromptResult::DenyAlways),
+ _ => {
+ // If we don't get a recognized option try again.
+ let msg_again = format!("Unrecognized option '{}' [a/y/n/d (a = allow always, y = allow once, n = deny once, d = deny always)] ", ch);
+ eprint!("{}", Style::new().bold().paint(msg_again));
+ }
+ };
+ }
+}
+
+fn check_path_white_list(
+ filename: &str,
+ white_list: &Arc<HashSet<String>>,
+) -> bool {
+ let mut path_buf = PathBuf::from(filename);
+
+ loop {
+ if white_list.contains(path_buf.to_str().unwrap()) {
+ return true;
+ }
+ if !path_buf.pop() {
+ break;
+ }
+ }
+ false
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ // Creates vector of strings, Vec<String>
+ macro_rules! svec {
+ ($($x:expr),*) => (vec![$($x.to_string()),*]);
+ }
+
+ #[test]
+ fn check_paths() {
+ let whitelist = svec!["/a/specific/dir/name", "/a/specific", "/b/c"];
+
+ let perms = DenoPermissions::from_flags(&DenoFlags {
+ read_whitelist: whitelist.clone(),
+ write_whitelist: whitelist.clone(),
+ no_prompts: true,
+ ..Default::default()
+ });
+
+ // Inside of /a/specific and /a/specific/dir/name
+ assert!(perms.check_read("/a/specific/dir/name").is_ok());
+ assert!(perms.check_write("/a/specific/dir/name").is_ok());
+
+ // Inside of /a/specific but outside of /a/specific/dir/name
+ assert!(perms.check_read("/a/specific/dir").is_ok());
+ assert!(perms.check_write("/a/specific/dir").is_ok());
+
+ // Inside of /a/specific and /a/specific/dir/name
+ assert!(perms.check_read("/a/specific/dir/name/inner").is_ok());
+ assert!(perms.check_write("/a/specific/dir/name/inner").is_ok());
+
+ // Inside of /a/specific but outside of /a/specific/dir/name
+ assert!(perms.check_read("/a/specific/other/dir").is_ok());
+ assert!(perms.check_write("/a/specific/other/dir").is_ok());
+
+ // Exact match with /b/c
+ assert!(perms.check_read("/b/c").is_ok());
+ assert!(perms.check_write("/b/c").is_ok());
+
+ // Sub path within /b/c
+ assert!(perms.check_read("/b/c/sub/path").is_ok());
+ assert!(perms.check_write("/b/c/sub/path").is_ok());
+
+ // Inside of /b but outside of /b/c
+ assert!(perms.check_read("/b/e").is_err());
+ assert!(perms.check_write("/b/e").is_err());
+
+ // Inside of /a but outside of /a/specific
+ assert!(perms.check_read("/a/b").is_err());
+ assert!(perms.check_write("/a/b").is_err());
+ }
+
+ #[test]
+ fn test_check_net() {
+ let perms = DenoPermissions::from_flags(&DenoFlags {
+ net_whitelist: svec![
+ "localhost",
+ "deno.land",
+ "github.com:3000",
+ "127.0.0.1",
+ "172.16.0.2:8000"
+ ],
+ no_prompts: true,
+ ..Default::default()
+ });
+
+ let domain_tests = vec![
+ ("localhost:1234", true),
+ ("deno.land", true),
+ ("deno.land:3000", true),
+ ("deno.lands", false),
+ ("deno.lands:3000", false),
+ ("github.com:3000", true),
+ ("github.com", false),
+ ("github.com:2000", false),
+ ("github.net:3000", false),
+ ("127.0.0.1", true),
+ ("127.0.0.1:3000", true),
+ ("127.0.0.2", false),
+ ("127.0.0.2:3000", false),
+ ("172.16.0.2:8000", true),
+ ("172.16.0.2", false),
+ ("172.16.0.2:6000", false),
+ ("172.16.0.1:8000", false),
+ // Just some random hosts that should err
+ ("somedomain", false),
+ ("192.168.0.1", false),
+ ];
+
+ let url_tests = vec![
+ // Any protocol + port for localhost should be ok, since we don't specify
+ ("http://localhost", true),
+ ("https://localhost", true),
+ ("https://localhost:4443", true),
+ ("tcp://localhost:5000", true),
+ ("udp://localhost:6000", true),
+ // Correct domain + any port and protocol should be ok incorrect shouldn't
+ ("https://deno.land/std/example/welcome.ts", true),
+ ("https://deno.land:3000/std/example/welcome.ts", true),
+ ("https://deno.lands/std/example/welcome.ts", false),
+ ("https://deno.lands:3000/std/example/welcome.ts", false),
+ // Correct domain + port should be ok all other combinations should err
+ ("https://github.com:3000/denoland/deno", true),
+ ("https://github.com/denoland/deno", false),
+ ("https://github.com:2000/denoland/deno", false),
+ ("https://github.net:3000/denoland/deno", false),
+ // Correct ipv4 address + any port should be ok others should err
+ ("tcp://127.0.0.1", true),
+ ("https://127.0.0.1", true),
+ ("tcp://127.0.0.1:3000", true),
+ ("https://127.0.0.1:3000", true),
+ ("tcp://127.0.0.2", false),
+ ("https://127.0.0.2", false),
+ ("tcp://127.0.0.2:3000", false),
+ ("https://127.0.0.2:3000", false),
+ // Correct address + port should be ok all other combinations should err
+ ("tcp://172.16.0.2:8000", true),
+ ("https://172.16.0.2:8000", true),
+ ("tcp://172.16.0.2", false),
+ ("https://172.16.0.2", false),
+ ("tcp://172.16.0.2:6000", false),
+ ("https://172.16.0.2:6000", false),
+ ("tcp://172.16.0.1:8000", false),
+ ("https://172.16.0.1:8000", false),
+ ];
+
+ for (url_str, is_ok) in url_tests.iter() {
+ let u = url::Url::parse(url_str).unwrap();
+ assert_eq!(*is_ok, perms.check_net_url(&u).is_ok());
+ }
+
+ for (domain, is_ok) in domain_tests.iter() {
+ assert_eq!(*is_ok, perms.check_net(domain).is_ok());
+ }
+ }
+}
diff --git a/cli/progress.rs b/cli/progress.rs
new file mode 100644
index 000000000..c2fa7979f
--- /dev/null
+++ b/cli/progress.rs
@@ -0,0 +1,168 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use std::sync::Arc;
+use std::sync::Mutex;
+
+#[derive(Clone, Default)]
+pub struct Progress(Arc<Mutex<Inner>>);
+
+impl Progress {
+ pub fn new() -> Self {
+ Progress::default()
+ }
+
+ pub fn set_callback<F>(&self, f: F)
+ where
+ F: Fn(bool, usize, usize, &str, &str) + Send + Sync + 'static,
+ {
+ let mut s = self.0.lock().unwrap();
+ assert!(s.callback.is_none());
+ s.callback = Some(Arc::new(f));
+ }
+
+ /// Returns job counts: (complete, total)
+ pub fn progress(&self) -> (usize, usize) {
+ let s = self.0.lock().unwrap();
+ s.progress()
+ }
+
+ pub fn history(&self) -> Vec<String> {
+ let s = self.0.lock().unwrap();
+ s.job_names.clone()
+ }
+
+ pub fn add(&self, status: &str, name: &str) -> Job {
+ let mut s = self.0.lock().unwrap();
+ let id = s.job_names.len();
+ s.maybe_call_callback(
+ false,
+ s.complete,
+ s.job_names.len() + 1,
+ status,
+ name,
+ );
+ s.job_names.push(name.to_string());
+ Job {
+ id,
+ inner: self.0.clone(),
+ }
+ }
+
+ pub fn done(&self) {
+ let s = self.0.lock().unwrap();
+ s.maybe_call_callback(true, s.complete, s.job_names.len(), "", "");
+ }
+}
+
+type Callback = dyn Fn(bool, usize, usize, &str, &str) + Send + Sync;
+
+#[derive(Default)]
+struct Inner {
+ job_names: Vec<String>,
+ complete: usize,
+ callback: Option<Arc<Callback>>,
+}
+
+impl Inner {
+ pub fn maybe_call_callback(
+ &self,
+ done: bool,
+ complete: usize,
+ total: usize,
+ status: &str,
+ msg: &str,
+ ) {
+ if let Some(ref cb) = self.callback {
+ cb(done, complete, total, status, msg);
+ }
+ }
+
+ /// Returns job counts: (complete, total)
+ pub fn progress(&self) -> (usize, usize) {
+ let total = self.job_names.len();
+ (self.complete, total)
+ }
+}
+
+pub struct Job {
+ inner: Arc<Mutex<Inner>>,
+ id: usize,
+}
+
+impl Drop for Job {
+ fn drop(&mut self) {
+ let mut s = self.inner.lock().unwrap();
+ s.complete += 1;
+ let name = &s.job_names[self.id];
+ let (complete, total) = s.progress();
+ s.maybe_call_callback(false, complete, total, "", name);
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn progress() {
+ let p = Progress::new();
+ assert_eq!(p.progress(), (0, 0));
+ {
+ let _j1 = p.add("status", "hello");
+ assert_eq!(p.progress(), (0, 1));
+ }
+ assert_eq!(p.progress(), (1, 1));
+ {
+ let _j2 = p.add("status", "hello");
+ assert_eq!(p.progress(), (1, 2));
+ }
+ assert_eq!(p.progress(), (2, 2));
+ }
+
+ #[test]
+ fn history() {
+ let p = Progress::new();
+ let _a = p.add("status", "a");
+ let _b = p.add("status", "b");
+ assert_eq!(p.history(), vec!["a", "b"]);
+ }
+
+ #[test]
+ fn callback() {
+ let callback_history: Arc<Mutex<Vec<(usize, usize, String)>>> =
+ Arc::new(Mutex::new(Vec::new()));
+ {
+ let p = Progress::new();
+ let callback_history_ = callback_history.clone();
+
+ p.set_callback(move |_done, complete, total, _status, msg| {
+ // println!("callback: {}, {}, {}", complete, total, msg);
+ let mut h = callback_history_.lock().unwrap();
+ h.push((complete, total, String::from(msg)));
+ });
+ {
+ let _a = p.add("status", "a");
+ let _b = p.add("status", "b");
+ }
+ let _c = p.add("status", "c");
+ }
+
+ let h = callback_history.lock().unwrap();
+ assert_eq!(
+ h.to_vec(),
+ vec![
+ (0, 1, "a".to_string()),
+ (0, 2, "b".to_string()),
+ (1, 2, "b".to_string()),
+ (2, 2, "a".to_string()),
+ (2, 3, "c".to_string()),
+ (3, 3, "c".to_string()),
+ ]
+ );
+ }
+
+ #[test]
+ fn thread_safe() {
+ fn f<S: Send + Sync>(_: S) {}
+ f(Progress::new());
+ }
+}
diff --git a/cli/repl.rs b/cli/repl.rs
new file mode 100644
index 000000000..0cac6c4ea
--- /dev/null
+++ b/cli/repl.rs
@@ -0,0 +1,128 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use crate::deno_dir::DenoDir;
+use deno::ErrBox;
+use rustyline;
+use std::path::PathBuf;
+
+#[cfg(not(windows))]
+use rustyline::Editor;
+
+// Work around the issue that on Windows, `struct Editor` does not implement the
+// `Send` trait, because it embeds a windows HANDLE which is a type alias for
+// *mut c_void. This value isn't actually a pointer and there's nothing that
+// can be mutated through it, so hack around it. TODO: a prettier solution.
+#[cfg(windows)]
+use std::ops::{Deref, DerefMut};
+
+#[cfg(windows)]
+struct Editor<T: rustyline::Helper> {
+ inner: rustyline::Editor<T>,
+}
+
+#[cfg(windows)]
+unsafe impl<T: rustyline::Helper> Send for Editor<T> {}
+
+#[cfg(windows)]
+impl<T: rustyline::Helper> Editor<T> {
+ pub fn new() -> Editor<T> {
+ Editor {
+ inner: rustyline::Editor::<T>::new(),
+ }
+ }
+}
+
+#[cfg(windows)]
+impl<T: rustyline::Helper> Deref for Editor<T> {
+ type Target = rustyline::Editor<T>;
+
+ fn deref(&self) -> &rustyline::Editor<T> {
+ &self.inner
+ }
+}
+
+#[cfg(windows)]
+impl<T: rustyline::Helper> DerefMut for Editor<T> {
+ fn deref_mut(&mut self) -> &mut rustyline::Editor<T> {
+ &mut self.inner
+ }
+}
+
+pub struct Repl {
+ editor: Editor<()>,
+ history_file: PathBuf,
+}
+
+impl Repl {
+ pub fn new(history_file: PathBuf) -> Self {
+ let mut repl = Self {
+ editor: Editor::<()>::new(),
+ history_file,
+ };
+
+ repl.load_history();
+ repl
+ }
+
+ fn load_history(&mut self) {
+ debug!("Loading REPL history: {:?}", self.history_file);
+ self
+ .editor
+ .load_history(&self.history_file.to_str().unwrap())
+ .map_err(|e| {
+ debug!("Unable to load history file: {:?} {}", self.history_file, e)
+ })
+ // ignore this error (e.g. it occurs on first load)
+ .unwrap_or(())
+ }
+
+ fn save_history(&mut self) -> Result<(), ErrBox> {
+ if !self.history_dir_exists() {
+ eprintln!(
+ "Unable to save REPL history: {:?} directory does not exist",
+ self.history_file
+ );
+ return Ok(());
+ }
+
+ self
+ .editor
+ .save_history(&self.history_file.to_str().unwrap())
+ .map(|_| debug!("Saved REPL history to: {:?}", self.history_file))
+ .map_err(|e| {
+ eprintln!("Unable to save REPL history: {:?} {}", self.history_file, e);
+ ErrBox::from(e)
+ })
+ }
+
+ fn history_dir_exists(&self) -> bool {
+ self
+ .history_file
+ .parent()
+ .map(|ref p| p.exists())
+ .unwrap_or(false)
+ }
+
+ pub fn readline(&mut self, prompt: &str) -> Result<String, ErrBox> {
+ self
+ .editor
+ .readline(&prompt)
+ .map(|line| {
+ self.editor.add_history_entry(line.clone());
+ line
+ })
+ .map_err(ErrBox::from)
+ // Forward error to TS side for processing
+ }
+}
+
+impl Drop for Repl {
+ fn drop(&mut self) {
+ self.save_history().unwrap();
+ }
+}
+
+pub fn history_path(dir: &DenoDir, history_file: &str) -> PathBuf {
+ let mut p: PathBuf = dir.root.clone();
+ p.push(history_file);
+ p
+}
diff --git a/cli/resolve_addr.rs b/cli/resolve_addr.rs
new file mode 100644
index 000000000..b783444d8
--- /dev/null
+++ b/cli/resolve_addr.rs
@@ -0,0 +1,132 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use crate::deno_error;
+use deno::ErrBox;
+use futures::Async;
+use futures::Future;
+use futures::Poll;
+use std::net::SocketAddr;
+use std::net::ToSocketAddrs;
+
+/// Go-style network address parsing. Returns a future.
+/// Examples:
+/// "192.0.2.1:25"
+/// ":80"
+/// "[2001:db8::1]:80"
+/// "198.51.100.1:80"
+/// "deno.land:443"
+pub fn resolve_addr(address: &str) -> ResolveAddrFuture {
+ ResolveAddrFuture {
+ address: address.to_string(),
+ }
+}
+
+pub struct ResolveAddrFuture {
+ address: String,
+}
+
+impl Future for ResolveAddrFuture {
+ type Item = SocketAddr;
+ type Error = ErrBox;
+
+ fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
+ // The implementation of this is not actually async at the moment,
+ // however we intend to use async DNS resolution in the future and
+ // so we expose this as a future instead of Result.
+ match split(&self.address) {
+ None => Err(deno_error::invalid_address_syntax()),
+ Some(addr_port_pair) => {
+ // I absolutely despise the .to_socket_addrs() API.
+ let r = addr_port_pair.to_socket_addrs().map_err(ErrBox::from);
+
+ r.and_then(|mut iter| match iter.next() {
+ Some(a) => Ok(Async::Ready(a)),
+ None => panic!("There should be at least one result"),
+ })
+ }
+ }
+ }
+}
+
+fn split(address: &str) -> Option<(&str, u16)> {
+ address.rfind(':').and_then(|i| {
+ let (a, p) = address.split_at(i);
+ // Default to localhost if given just the port. Example: ":80"
+ let addr = if !a.is_empty() { a } else { "0.0.0.0" };
+ // If this looks like an ipv6 IP address. Example: "[2001:db8::1]"
+ // Then we remove the brackets.
+ let addr = if addr.starts_with('[') && addr.ends_with(']') {
+ let l = addr.len() - 1;
+ addr.get(1..l).unwrap()
+ } else {
+ addr
+ };
+
+ let p = p.trim_start_matches(':');
+ match p.parse::<u16>() {
+ Err(_) => None,
+ Ok(port) => Some((addr, port)),
+ }
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use std::net::Ipv4Addr;
+ use std::net::Ipv6Addr;
+ use std::net::SocketAddrV4;
+ use std::net::SocketAddrV6;
+
+ #[test]
+ fn split1() {
+ assert_eq!(split("127.0.0.1:80"), Some(("127.0.0.1", 80)));
+ }
+
+ #[test]
+ fn split2() {
+ assert_eq!(split(":80"), Some(("0.0.0.0", 80)));
+ }
+
+ #[test]
+ fn split3() {
+ assert_eq!(split("no colon"), None);
+ }
+
+ #[test]
+ fn split4() {
+ assert_eq!(split("deno.land:443"), Some(("deno.land", 443)));
+ }
+
+ #[test]
+ fn split5() {
+ assert_eq!(split("[2001:db8::1]:8080"), Some(("2001:db8::1", 8080)));
+ }
+
+ #[test]
+ fn resolve_addr1() {
+ let expected =
+ SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(127, 0, 0, 1), 80));
+ let actual = resolve_addr("127.0.0.1:80").wait().unwrap();
+ assert_eq!(actual, expected);
+ }
+
+ #[test]
+ fn resolve_addr3() {
+ let expected =
+ SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(192, 0, 2, 1), 25));
+ let actual = resolve_addr("192.0.2.1:25").wait().unwrap();
+ assert_eq!(actual, expected);
+ }
+
+ #[test]
+ fn resolve_addr_ipv6() {
+ let expected = SocketAddr::V6(SocketAddrV6::new(
+ Ipv6Addr::new(0x2001, 0xdb8, 0, 0, 0, 0, 0, 1),
+ 8080,
+ 0,
+ 0,
+ ));
+ let actual = resolve_addr("[2001:db8::1]:8080").wait().unwrap();
+ assert_eq!(actual, expected);
+ }
+}
diff --git a/cli/resources.rs b/cli/resources.rs
new file mode 100644
index 000000000..fc4aa7eb5
--- /dev/null
+++ b/cli/resources.rs
@@ -0,0 +1,585 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+
+// Think of Resources as File Descriptors. They are integers that are allocated
+// by the privileged side of Deno to refer to various resources. The simplest
+// example are standard file system files and stdio - but there will be other
+// resources added in the future that might not correspond to operating system
+// level File Descriptors. To avoid confusion we call them "resources" not "file
+// descriptors". This module implements a global resource table. Ops (AKA
+// handlers) look up resources by their integer id here.
+
+use crate::deno_error;
+use crate::deno_error::bad_resource;
+use crate::http_body::HttpBody;
+use crate::repl::Repl;
+use crate::state::WorkerChannels;
+
+use deno::Buf;
+use deno::ErrBox;
+
+use futures;
+use futures::Future;
+use futures::Poll;
+use futures::Sink;
+use futures::Stream;
+use reqwest::r#async::Decoder as ReqwestDecoder;
+use std;
+use std::collections::BTreeMap;
+use std::io::{Error, Read, Seek, SeekFrom, Write};
+use std::net::{Shutdown, SocketAddr};
+use std::process::ExitStatus;
+use std::sync::atomic::AtomicUsize;
+use std::sync::atomic::Ordering;
+use std::sync::{Arc, Mutex};
+use tokio;
+use tokio::io::{AsyncRead, AsyncWrite};
+use tokio::net::TcpStream;
+use tokio::sync::mpsc;
+use tokio_process;
+use tokio_rustls::client::TlsStream;
+
+pub type ResourceId = u32; // Sometimes referred to RID.
+
+// These store Deno's file descriptors. These are not necessarily the operating
+// system ones.
+type ResourceTable = BTreeMap<ResourceId, Repr>;
+
+#[cfg(not(windows))]
+use std::os::unix::io::FromRawFd;
+
+#[cfg(windows)]
+use std::os::windows::io::FromRawHandle;
+
+#[cfg(windows)]
+extern crate winapi;
+
+lazy_static! {
+ // Starts at 3 because stdio is [0-2].
+ static ref NEXT_RID: AtomicUsize = AtomicUsize::new(3);
+ static ref RESOURCE_TABLE: Mutex<ResourceTable> = Mutex::new({
+ let mut m = BTreeMap::new();
+ // TODO Load these lazily during lookup?
+ m.insert(0, Repr::Stdin(tokio::io::stdin()));
+
+ m.insert(1, Repr::Stdout({
+ #[cfg(not(windows))]
+ let stdout = unsafe { std::fs::File::from_raw_fd(1) };
+ #[cfg(windows)]
+ let stdout = unsafe {
+ std::fs::File::from_raw_handle(winapi::um::processenv::GetStdHandle(
+ winapi::um::winbase::STD_OUTPUT_HANDLE))
+ };
+ tokio::fs::File::from_std(stdout)
+ }));
+
+ m.insert(2, Repr::Stderr(tokio::io::stderr()));
+ m
+ });
+}
+
+// Internal representation of Resource.
+enum Repr {
+ Stdin(tokio::io::Stdin),
+ Stdout(tokio::fs::File),
+ Stderr(tokio::io::Stderr),
+ FsFile(tokio::fs::File),
+ // Since TcpListener might be closed while there is a pending accept task,
+ // we need to track the task so that when the listener is closed,
+ // this pending task could be notified and die.
+ // Currently TcpListener itself does not take care of this issue.
+ // See: https://github.com/tokio-rs/tokio/issues/846
+ TcpListener(tokio::net::TcpListener, Option<futures::task::Task>),
+ TcpStream(tokio::net::TcpStream),
+ TlsStream(Box<TlsStream<TcpStream>>),
+ HttpBody(HttpBody),
+ Repl(Arc<Mutex<Repl>>),
+ // Enum size is bounded by the largest variant.
+ // Use `Box` around large `Child` struct.
+ // https://rust-lang.github.io/rust-clippy/master/index.html#large_enum_variant
+ Child(Box<tokio_process::Child>),
+ ChildStdin(tokio_process::ChildStdin),
+ ChildStdout(tokio_process::ChildStdout),
+ ChildStderr(tokio_process::ChildStderr),
+ Worker(WorkerChannels),
+}
+
+/// If the given rid is open, this returns the type of resource, E.G. "worker".
+/// If the rid is closed or was never open, it returns None.
+pub fn get_type(rid: ResourceId) -> Option<String> {
+ let table = RESOURCE_TABLE.lock().unwrap();
+ table.get(&rid).map(inspect_repr)
+}
+
+pub fn table_entries() -> Vec<(u32, String)> {
+ let table = RESOURCE_TABLE.lock().unwrap();
+
+ table
+ .iter()
+ .map(|(key, value)| (*key, inspect_repr(&value)))
+ .collect()
+}
+
+#[test]
+fn test_table_entries() {
+ let mut entries = table_entries();
+ entries.sort();
+ assert_eq!(entries[0], (0, String::from("stdin")));
+ assert_eq!(entries[1], (1, String::from("stdout")));
+ assert_eq!(entries[2], (2, String::from("stderr")));
+}
+
+fn inspect_repr(repr: &Repr) -> String {
+ let h_repr = match repr {
+ Repr::Stdin(_) => "stdin",
+ Repr::Stdout(_) => "stdout",
+ Repr::Stderr(_) => "stderr",
+ Repr::FsFile(_) => "fsFile",
+ Repr::TcpListener(_, _) => "tcpListener",
+ Repr::TcpStream(_) => "tcpStream",
+ Repr::TlsStream(_) => "tlsStream",
+ Repr::HttpBody(_) => "httpBody",
+ Repr::Repl(_) => "repl",
+ Repr::Child(_) => "child",
+ Repr::ChildStdin(_) => "childStdin",
+ Repr::ChildStdout(_) => "childStdout",
+ Repr::ChildStderr(_) => "childStderr",
+ Repr::Worker(_) => "worker",
+ };
+
+ String::from(h_repr)
+}
+
+// Abstract async file interface.
+// Ideally in unix, if Resource represents an OS rid, it will be the same.
+#[derive(Clone, Debug)]
+pub struct Resource {
+ pub rid: ResourceId,
+}
+
+impl Resource {
+ // TODO Should it return a Resource instead of net::TcpStream?
+ pub fn poll_accept(&mut self) -> Poll<(TcpStream, SocketAddr), Error> {
+ let mut table = RESOURCE_TABLE.lock().unwrap();
+ let maybe_repr = table.get_mut(&self.rid);
+ match maybe_repr {
+ None => Err(std::io::Error::new(
+ std::io::ErrorKind::Other,
+ "Listener has been closed",
+ )),
+ Some(repr) => match repr {
+ Repr::TcpListener(ref mut s, _) => s.poll_accept(),
+ _ => panic!("Cannot accept"),
+ },
+ }
+ }
+
+ /// Track the current task (for TcpListener resource).
+ /// Throws an error if another task is already tracked.
+ pub fn track_task(&mut self) -> Result<(), std::io::Error> {
+ let mut table = RESOURCE_TABLE.lock().unwrap();
+ // Only track if is TcpListener.
+ if let Some(Repr::TcpListener(_, t)) = table.get_mut(&self.rid) {
+ // Currently, we only allow tracking a single accept task for a listener.
+ // This might be changed in the future with multiple workers.
+ // Caveat: TcpListener by itself also only tracks an accept task at a time.
+ // See https://github.com/tokio-rs/tokio/issues/846#issuecomment-454208883
+ if t.is_some() {
+ return Err(std::io::Error::new(
+ std::io::ErrorKind::Other,
+ "Another accept task is ongoing",
+ ));
+ }
+ t.replace(futures::task::current());
+ }
+ Ok(())
+ }
+
+ /// Stop tracking a task (for TcpListener resource).
+ /// Happens when the task is done and thus no further tracking is needed.
+ pub fn untrack_task(&mut self) {
+ let mut table = RESOURCE_TABLE.lock().unwrap();
+ // Only untrack if is TcpListener.
+ if let Some(Repr::TcpListener(_, t)) = table.get_mut(&self.rid) {
+ if t.is_some() {
+ t.take();
+ }
+ }
+ }
+
+ // close(2) is done by dropping the value. Therefore we just need to remove
+ // the resource from the RESOURCE_TABLE.
+ pub fn close(&self) {
+ let mut table = RESOURCE_TABLE.lock().unwrap();
+ let r = table.remove(&self.rid).unwrap();
+ // If TcpListener, we must kill all pending accepts!
+ if let Repr::TcpListener(_, Some(t)) = r {
+ // Call notify on the tracked task, so that they would error out.
+ t.notify();
+ }
+ }
+
+ pub fn shutdown(&mut self, how: Shutdown) -> Result<(), ErrBox> {
+ let mut table = RESOURCE_TABLE.lock().unwrap();
+ let repr = table.get_mut(&self.rid).ok_or_else(bad_resource)?;
+
+ match repr {
+ Repr::TcpStream(ref mut f) => {
+ TcpStream::shutdown(f, how).map_err(ErrBox::from)
+ }
+ _ => Err(bad_resource()),
+ }
+ }
+}
+
+impl Read for Resource {
+ fn read(&mut self, _buf: &mut [u8]) -> std::io::Result<usize> {
+ unimplemented!();
+ }
+}
+
+/// `DenoAsyncRead` is the same as the `tokio_io::AsyncRead` trait
+/// but uses an `ErrBox` error instead of `std::io:Error`
+pub trait DenoAsyncRead {
+ fn poll_read(&mut self, buf: &mut [u8]) -> Poll<usize, ErrBox>;
+}
+
+impl DenoAsyncRead for Resource {
+ fn poll_read(&mut self, buf: &mut [u8]) -> Poll<usize, ErrBox> {
+ let mut table = RESOURCE_TABLE.lock().unwrap();
+ let repr = table.get_mut(&self.rid).ok_or_else(bad_resource)?;
+
+ let r = match repr {
+ Repr::FsFile(ref mut f) => f.poll_read(buf),
+ Repr::Stdin(ref mut f) => f.poll_read(buf),
+ Repr::TcpStream(ref mut f) => f.poll_read(buf),
+ Repr::TlsStream(ref mut f) => f.poll_read(buf),
+ Repr::HttpBody(ref mut f) => f.poll_read(buf),
+ Repr::ChildStdout(ref mut f) => f.poll_read(buf),
+ Repr::ChildStderr(ref mut f) => f.poll_read(buf),
+ _ => {
+ return Err(bad_resource());
+ }
+ };
+
+ r.map_err(ErrBox::from)
+ }
+}
+
+impl Write for Resource {
+ fn write(&mut self, _buf: &[u8]) -> std::io::Result<usize> {
+ unimplemented!()
+ }
+
+ fn flush(&mut self) -> std::io::Result<()> {
+ unimplemented!()
+ }
+}
+
+/// `DenoAsyncWrite` is the same as the `tokio_io::AsyncWrite` trait
+/// but uses an `ErrBox` error instead of `std::io:Error`
+pub trait DenoAsyncWrite {
+ fn poll_write(&mut self, buf: &[u8]) -> Poll<usize, ErrBox>;
+
+ fn shutdown(&mut self) -> Poll<(), ErrBox>;
+}
+
+impl DenoAsyncWrite for Resource {
+ fn poll_write(&mut self, buf: &[u8]) -> Poll<usize, ErrBox> {
+ let mut table = RESOURCE_TABLE.lock().unwrap();
+ let repr = table.get_mut(&self.rid).ok_or_else(bad_resource)?;
+
+ let r = match repr {
+ Repr::FsFile(ref mut f) => f.poll_write(buf),
+ Repr::Stdout(ref mut f) => f.poll_write(buf),
+ Repr::Stderr(ref mut f) => f.poll_write(buf),
+ Repr::TcpStream(ref mut f) => f.poll_write(buf),
+ Repr::TlsStream(ref mut f) => f.poll_write(buf),
+ Repr::ChildStdin(ref mut f) => f.poll_write(buf),
+ _ => {
+ return Err(bad_resource());
+ }
+ };
+
+ r.map_err(ErrBox::from)
+ }
+
+ fn shutdown(&mut self) -> futures::Poll<(), ErrBox> {
+ unimplemented!()
+ }
+}
+
+fn new_rid() -> ResourceId {
+ let next_rid = NEXT_RID.fetch_add(1, Ordering::SeqCst);
+ next_rid as ResourceId
+}
+
+pub fn add_fs_file(fs_file: tokio::fs::File) -> Resource {
+ let rid = new_rid();
+ let mut tg = RESOURCE_TABLE.lock().unwrap();
+ let r = tg.insert(rid, Repr::FsFile(fs_file));
+ assert!(r.is_none());
+ Resource { rid }
+}
+
+pub fn add_tcp_listener(listener: tokio::net::TcpListener) -> Resource {
+ let rid = new_rid();
+ let mut tg = RESOURCE_TABLE.lock().unwrap();
+ let r = tg.insert(rid, Repr::TcpListener(listener, None));
+ assert!(r.is_none());
+ Resource { rid }
+}
+
+pub fn add_tcp_stream(stream: tokio::net::TcpStream) -> Resource {
+ let rid = new_rid();
+ let mut tg = RESOURCE_TABLE.lock().unwrap();
+ let r = tg.insert(rid, Repr::TcpStream(stream));
+ assert!(r.is_none());
+ Resource { rid }
+}
+
+pub fn add_tls_stream(stream: TlsStream<TcpStream>) -> Resource {
+ let rid = new_rid();
+ let mut tg = RESOURCE_TABLE.lock().unwrap();
+ let r = tg.insert(rid, Repr::TlsStream(Box::new(stream)));
+ assert!(r.is_none());
+ Resource { rid }
+}
+
+pub fn add_reqwest_body(body: ReqwestDecoder) -> Resource {
+ let rid = new_rid();
+ let mut tg = RESOURCE_TABLE.lock().unwrap();
+ let body = HttpBody::from(body);
+ let r = tg.insert(rid, Repr::HttpBody(body));
+ assert!(r.is_none());
+ Resource { rid }
+}
+
+pub fn add_repl(repl: Repl) -> Resource {
+ let rid = new_rid();
+ let mut tg = RESOURCE_TABLE.lock().unwrap();
+ let r = tg.insert(rid, Repr::Repl(Arc::new(Mutex::new(repl))));
+ assert!(r.is_none());
+ Resource { rid }
+}
+
+pub fn add_worker(wc: WorkerChannels) -> Resource {
+ let rid = new_rid();
+ let mut tg = RESOURCE_TABLE.lock().unwrap();
+ let r = tg.insert(rid, Repr::Worker(wc));
+ assert!(r.is_none());
+ Resource { rid }
+}
+
+/// Post message to worker as a host or privilged overlord
+pub fn post_message_to_worker(
+ rid: ResourceId,
+ buf: Buf,
+) -> futures::sink::Send<mpsc::Sender<Buf>> {
+ let mut table = RESOURCE_TABLE.lock().unwrap();
+ let maybe_repr = table.get_mut(&rid);
+ match maybe_repr {
+ Some(Repr::Worker(ref mut wc)) => {
+ // unwrap here is incorrect, but doing it anyway
+ wc.0.clone().send(buf)
+ }
+ // TODO: replace this panic with `bad_resource`
+ _ => panic!("bad resource"), // futures::future::err(bad_resource()).into(),
+ }
+}
+
+pub struct WorkerReceiver {
+ rid: ResourceId,
+}
+
+// Invert the dumbness that tokio_process causes by making Child itself a future.
+impl Future for WorkerReceiver {
+ type Item = Option<Buf>;
+ type Error = ErrBox;
+
+ fn poll(&mut self) -> Poll<Option<Buf>, ErrBox> {
+ let mut table = RESOURCE_TABLE.lock().unwrap();
+ let maybe_repr = table.get_mut(&self.rid);
+ match maybe_repr {
+ Some(Repr::Worker(ref mut wc)) => wc.1.poll().map_err(ErrBox::from),
+ _ => Err(bad_resource()),
+ }
+ }
+}
+
+pub fn get_message_from_worker(rid: ResourceId) -> WorkerReceiver {
+ WorkerReceiver { rid }
+}
+
+pub struct WorkerReceiverStream {
+ rid: ResourceId,
+}
+
+// Invert the dumbness that tokio_process causes by making Child itself a future.
+impl Stream for WorkerReceiverStream {
+ type Item = Buf;
+ type Error = ErrBox;
+
+ fn poll(&mut self) -> Poll<Option<Buf>, ErrBox> {
+ let mut table = RESOURCE_TABLE.lock().unwrap();
+ let maybe_repr = table.get_mut(&self.rid);
+ match maybe_repr {
+ Some(Repr::Worker(ref mut wc)) => wc.1.poll().map_err(ErrBox::from),
+ _ => Err(bad_resource()),
+ }
+ }
+}
+
+pub fn get_message_stream_from_worker(rid: ResourceId) -> WorkerReceiverStream {
+ WorkerReceiverStream { rid }
+}
+
+pub struct ChildResources {
+ pub child_rid: ResourceId,
+ pub stdin_rid: Option<ResourceId>,
+ pub stdout_rid: Option<ResourceId>,
+ pub stderr_rid: Option<ResourceId>,
+}
+
+pub fn add_child(mut c: tokio_process::Child) -> ChildResources {
+ let child_rid = new_rid();
+ let mut tg = RESOURCE_TABLE.lock().unwrap();
+
+ let mut resources = ChildResources {
+ child_rid,
+ stdin_rid: None,
+ stdout_rid: None,
+ stderr_rid: None,
+ };
+
+ if c.stdin().is_some() {
+ let stdin = c.stdin().take().unwrap();
+ let rid = new_rid();
+ let r = tg.insert(rid, Repr::ChildStdin(stdin));
+ assert!(r.is_none());
+ resources.stdin_rid = Some(rid);
+ }
+ if c.stdout().is_some() {
+ let stdout = c.stdout().take().unwrap();
+ let rid = new_rid();
+ let r = tg.insert(rid, Repr::ChildStdout(stdout));
+ assert!(r.is_none());
+ resources.stdout_rid = Some(rid);
+ }
+ if c.stderr().is_some() {
+ let stderr = c.stderr().take().unwrap();
+ let rid = new_rid();
+ let r = tg.insert(rid, Repr::ChildStderr(stderr));
+ assert!(r.is_none());
+ resources.stderr_rid = Some(rid);
+ }
+
+ let r = tg.insert(child_rid, Repr::Child(Box::new(c)));
+ assert!(r.is_none());
+
+ resources
+}
+
+pub struct ChildStatus {
+ rid: ResourceId,
+}
+
+// Invert the dumbness that tokio_process causes by making Child itself a future.
+impl Future for ChildStatus {
+ type Item = ExitStatus;
+ type Error = ErrBox;
+
+ fn poll(&mut self) -> Poll<ExitStatus, ErrBox> {
+ let mut table = RESOURCE_TABLE.lock().unwrap();
+ let maybe_repr = table.get_mut(&self.rid);
+ match maybe_repr {
+ Some(Repr::Child(ref mut child)) => child.poll().map_err(ErrBox::from),
+ _ => Err(bad_resource()),
+ }
+ }
+}
+
+pub fn child_status(rid: ResourceId) -> Result<ChildStatus, ErrBox> {
+ let mut table = RESOURCE_TABLE.lock().unwrap();
+ let maybe_repr = table.get_mut(&rid);
+ match maybe_repr {
+ Some(Repr::Child(ref mut _child)) => Ok(ChildStatus { rid }),
+ _ => Err(bad_resource()),
+ }
+}
+
+pub fn get_repl(rid: ResourceId) -> Result<Arc<Mutex<Repl>>, ErrBox> {
+ let mut table = RESOURCE_TABLE.lock().unwrap();
+ let maybe_repr = table.get_mut(&rid);
+ match maybe_repr {
+ Some(Repr::Repl(ref mut r)) => Ok(r.clone()),
+ _ => Err(bad_resource()),
+ }
+}
+
+// TODO: revamp this after the following lands:
+// https://github.com/tokio-rs/tokio/pull/785
+pub fn get_file(rid: ResourceId) -> Result<std::fs::File, ErrBox> {
+ let mut table = RESOURCE_TABLE.lock().unwrap();
+ // We take ownership of File here.
+ // It is put back below while still holding the lock.
+ let maybe_repr = table.remove(&rid);
+
+ match maybe_repr {
+ Some(Repr::FsFile(r)) => {
+ // Trait Clone not implemented on tokio::fs::File,
+ // so convert to std File first.
+ let std_file = r.into_std();
+ // Create a copy and immediately put back.
+ // We don't want to block other resource ops.
+ // try_clone() would yield a copy containing the same
+ // underlying fd, so operations on the copy would also
+ // affect the one in resource table, and we don't need
+ // to write back.
+ let maybe_std_file_copy = std_file.try_clone();
+ // Insert the entry back with the same rid.
+ table.insert(rid, Repr::FsFile(tokio_fs::File::from_std(std_file)));
+
+ maybe_std_file_copy.map_err(ErrBox::from)
+ }
+ _ => Err(bad_resource()),
+ }
+}
+
+pub fn lookup(rid: ResourceId) -> Result<Resource, ErrBox> {
+ debug!("resource lookup {}", rid);
+ let table = RESOURCE_TABLE.lock().unwrap();
+ table
+ .get(&rid)
+ .ok_or_else(bad_resource)
+ .map(|_| Resource { rid })
+}
+
+pub fn seek(
+ resource: Resource,
+ offset: i32,
+ whence: u32,
+) -> Box<dyn Future<Item = (), Error = ErrBox> + Send> {
+ // Translate seek mode to Rust repr.
+ let seek_from = match whence {
+ 0 => SeekFrom::Start(offset as u64),
+ 1 => SeekFrom::Current(i64::from(offset)),
+ 2 => SeekFrom::End(i64::from(offset)),
+ _ => {
+ return Box::new(futures::future::err(
+ deno_error::DenoError::new(
+ deno_error::ErrorKind::InvalidSeekMode,
+ format!("Invalid seek mode: {}", whence),
+ )
+ .into(),
+ ));
+ }
+ };
+
+ match get_file(resource.rid) {
+ Ok(mut file) => Box::new(futures::future::lazy(move || {
+ let result = file.seek(seek_from).map(|_| {}).map_err(ErrBox::from);
+ futures::future::result(result)
+ })),
+ Err(err) => Box::new(futures::future::err(err)),
+ }
+}
diff --git a/cli/shell.rs b/cli/shell.rs
new file mode 100644
index 000000000..aaf29b4e0
--- /dev/null
+++ b/cli/shell.rs
@@ -0,0 +1,491 @@
+// This file was forked from Cargo on 2019.05.29:
+// https://github.com/rust-lang/cargo/blob/edd874/src/cargo/core/shell.rs
+// Cargo is MIT licenced:
+// https://github.com/rust-lang/cargo/blob/edd874/LICENSE-MIT
+
+#![allow(dead_code)]
+#![allow(irrefutable_let_patterns)]
+
+use std::fmt;
+use std::io::prelude::*;
+
+use atty;
+use deno::ErrBox;
+use termcolor::Color::{Cyan, Green, Red, Yellow};
+use termcolor::{self, Color, ColorSpec, StandardStream, WriteColor};
+
+/// The requested verbosity of output.
+#[derive(Debug, Clone, Copy, PartialEq)]
+pub enum Verbosity {
+ Verbose,
+ Normal,
+ Quiet,
+}
+
+/// An abstraction around a `Write`able object that remembers preferences for output verbosity and
+/// color.
+pub struct Shell {
+ /// the `Write`able object, either with or without color support (represented by different enum
+ /// variants)
+ err: ShellOut,
+ /// How verbose messages should be
+ verbosity: Verbosity,
+ /// Flag that indicates the current line needs to be cleared before
+ /// printing. Used when a progress bar is currently displayed.
+ needs_clear: bool,
+}
+
+impl fmt::Debug for Shell {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self.err {
+ /*
+ ShellOut::Write(_) => f
+ .debug_struct("Shell")
+ .field("verbosity", &self.verbosity)
+ .finish(),
+ */
+ ShellOut::Stream { color_choice, .. } => f
+ .debug_struct("Shell")
+ .field("verbosity", &self.verbosity)
+ .field("color_choice", &color_choice)
+ .finish(),
+ }
+ }
+}
+
+/// A `Write`able object, either with or without color support
+enum ShellOut {
+ /// A plain write object without color support
+ // TODO(ry) Disabling this type of output because it makes Shell
+ // not thread safe and thus not includable in ThreadSafeState.
+ // But I think we will want this in the future.
+ //Write(Box<dyn Write>),
+ /// Color-enabled stdio, with information on whether color should be used
+ Stream {
+ stream: StandardStream,
+ tty: bool,
+ color_choice: ColorChoice,
+ },
+}
+
+/// Whether messages should use color output
+#[derive(Debug, PartialEq, Clone, Copy)]
+pub enum ColorChoice {
+ /// Force color output
+ Always,
+ /// Force disable color output
+ Never,
+ /// Intelligently guess whether to use color output
+ CargoAuto,
+}
+
+impl Shell {
+ /// Creates a new shell (color choice and verbosity), defaulting to 'auto' color and verbose
+ /// output.
+ pub fn new() -> Shell {
+ Shell {
+ err: ShellOut::Stream {
+ stream: StandardStream::stderr(
+ ColorChoice::CargoAuto.to_termcolor_color_choice(),
+ ),
+ color_choice: ColorChoice::CargoAuto,
+ tty: atty::is(atty::Stream::Stderr),
+ },
+ verbosity: Verbosity::Verbose,
+ needs_clear: false,
+ }
+ }
+
+ /*
+ /// Creates a shell from a plain writable object, with no color, and max verbosity.
+ pub fn from_write(out: Box<dyn Write>) -> Shell {
+ Shell {
+ err: ShellOut::Write(out),
+ verbosity: Verbosity::Verbose,
+ needs_clear: false,
+ }
+ }
+ */
+
+ /// Prints a message, where the status will have `color` color, and can be justified. The
+ /// messages follows without color.
+ fn print(
+ &mut self,
+ status: &dyn fmt::Display,
+ message: Option<&dyn fmt::Display>,
+ color: Color,
+ justified: bool,
+ ) -> Result<(), ErrBox> {
+ match self.verbosity {
+ Verbosity::Quiet => Ok(()),
+ _ => {
+ if self.needs_clear {
+ self.err_erase_line();
+ }
+ self.err.print(status, message, color, justified)
+ }
+ }
+ }
+
+ /// Sets whether the next print should clear the current line.
+ pub fn set_needs_clear(&mut self, needs_clear: bool) {
+ self.needs_clear = needs_clear;
+ }
+
+ /// Returns `true` if the `needs_clear` flag is unset.
+ pub fn is_cleared(&self) -> bool {
+ !self.needs_clear
+ }
+
+ /// Returns the width of the terminal in spaces, if any.
+ pub fn err_width(&self) -> Option<usize> {
+ match self.err {
+ ShellOut::Stream { tty: true, .. } => imp::stderr_width(),
+ _ => None,
+ }
+ }
+
+ /// Returns `true` if stderr is a tty.
+ pub fn is_err_tty(&self) -> bool {
+ match self.err {
+ ShellOut::Stream { tty, .. } => tty,
+ // _ => false,
+ }
+ }
+
+ /// Gets a reference to the underlying writer.
+ pub fn err(&mut self) -> &mut dyn Write {
+ if self.needs_clear {
+ self.err_erase_line();
+ }
+ self.err.as_write()
+ }
+
+ /// Erase from cursor to end of line.
+ pub fn err_erase_line(&mut self) {
+ if let ShellOut::Stream { tty: true, .. } = self.err {
+ imp::err_erase_line(self);
+ self.needs_clear = false;
+ }
+ }
+
+ /// Shortcut to right-align and color green a status message.
+ pub fn status<T, U>(&mut self, status: T, message: U) -> Result<(), ErrBox>
+ where
+ T: fmt::Display,
+ U: fmt::Display,
+ {
+ self.print(&status, Some(&message), Green, false)
+ }
+
+ pub fn status_header<T>(&mut self, status: T) -> Result<(), ErrBox>
+ where
+ T: fmt::Display,
+ {
+ self.print(&status, None, Cyan, true)
+ }
+
+ /// Shortcut to right-align a status message.
+ pub fn status_with_color<T, U>(
+ &mut self,
+ status: T,
+ message: U,
+ color: Color,
+ ) -> Result<(), ErrBox>
+ where
+ T: fmt::Display,
+ U: fmt::Display,
+ {
+ self.print(&status, Some(&message), color, true)
+ }
+
+ /// Runs the callback only if we are in verbose mode.
+ pub fn verbose<F>(&mut self, mut callback: F) -> Result<(), ErrBox>
+ where
+ F: FnMut(&mut Shell) -> Result<(), ErrBox>,
+ {
+ match self.verbosity {
+ Verbosity::Verbose => callback(self),
+ _ => Ok(()),
+ }
+ }
+
+ /// Runs the callback if we are not in verbose mode.
+ pub fn concise<F>(&mut self, mut callback: F) -> Result<(), ErrBox>
+ where
+ F: FnMut(&mut Shell) -> Result<(), ErrBox>,
+ {
+ match self.verbosity {
+ Verbosity::Verbose => Ok(()),
+ _ => callback(self),
+ }
+ }
+
+ /// Prints a red 'error' message.
+ pub fn error<T: fmt::Display>(&mut self, message: T) -> Result<(), ErrBox> {
+ self.print(&"error:", Some(&message), Red, false)
+ }
+
+ /// Prints an amber 'warning' message.
+ pub fn warn<T: fmt::Display>(&mut self, message: T) -> Result<(), ErrBox> {
+ match self.verbosity {
+ Verbosity::Quiet => Ok(()),
+ _ => self.print(&"warning:", Some(&message), Yellow, false),
+ }
+ }
+
+ /// Updates the verbosity of the shell.
+ pub fn set_verbosity(&mut self, verbosity: Verbosity) {
+ self.verbosity = verbosity;
+ }
+
+ /// Gets the verbosity of the shell.
+ pub fn verbosity(&self) -> Verbosity {
+ self.verbosity
+ }
+
+ /// Updates the color choice (always, never, or auto) from a string..
+ pub fn set_color_choice(
+ &mut self,
+ color: Option<&str>,
+ ) -> Result<(), ErrBox> {
+ if let ShellOut::Stream {
+ ref mut stream,
+ ref mut color_choice,
+ ..
+ } = self.err
+ {
+ let cfg = match color {
+ Some("always") => ColorChoice::Always,
+ Some("never") => ColorChoice::Never,
+
+ Some("auto") | None => ColorChoice::CargoAuto,
+
+ Some(arg) => panic!(
+ "argument for --color must be auto, always, or \
+ never, but found `{}`",
+ arg
+ ),
+ };
+ *color_choice = cfg;
+ *stream = StandardStream::stderr(cfg.to_termcolor_color_choice());
+ }
+ Ok(())
+ }
+
+ /// Gets the current color choice.
+ ///
+ /// If we are not using a color stream, this will always return `Never`, even if the color
+ /// choice has been set to something else.
+ pub fn color_choice(&self) -> ColorChoice {
+ match self.err {
+ ShellOut::Stream { color_choice, .. } => color_choice,
+ // ShellOut::Write(_) => ColorChoice::Never,
+ }
+ }
+
+ /// Whether the shell supports color.
+ pub fn supports_color(&self) -> bool {
+ match &self.err {
+ // ShellOut::Write(_) => false,
+ ShellOut::Stream { stream, .. } => stream.supports_color(),
+ }
+ }
+
+ /// Prints a message and translates ANSI escape code into console colors.
+ pub fn print_ansi(&mut self, message: &[u8]) -> Result<(), ErrBox> {
+ if self.needs_clear {
+ self.err_erase_line();
+ }
+ #[cfg(windows)]
+ {
+ if let ShellOut::Stream { stream, .. } = &mut self.err {
+ ::fwdansi::write_ansi(stream, message)?;
+ return Ok(());
+ }
+ }
+ self.err().write_all(message)?;
+ Ok(())
+ }
+}
+
+impl Default for Shell {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+impl ShellOut {
+ /// Prints out a message with a status. The status comes first, and is bold plus the given
+ /// color. The status can be justified, in which case the max width that will right align is
+ /// 12 chars.
+ fn print(
+ &mut self,
+ status: &dyn fmt::Display,
+ message: Option<&dyn fmt::Display>,
+ color: Color,
+ justified: bool,
+ ) -> Result<(), ErrBox> {
+ match *self {
+ ShellOut::Stream { ref mut stream, .. } => {
+ stream.reset()?;
+ stream
+ .set_color(ColorSpec::new().set_bold(true).set_fg(Some(color)))?;
+ if justified {
+ write!(stream, "{:>12}", status)?;
+ } else {
+ write!(stream, "{}", status)?;
+ }
+ stream.reset()?;
+ match message {
+ Some(message) => writeln!(stream, " {}", message)?,
+ None => write!(stream, " ")?,
+ }
+ } /*
+ ShellOut::Write(ref mut w) => {
+ if justified {
+ write!(w, "{:>12}", status)?;
+ } else {
+ write!(w, "{}", status)?;
+ }
+ match message {
+ Some(message) => writeln!(w, " {}", message)?,
+ None => write!(w, " ")?,
+ }
+ }
+ */
+ }
+ Ok(())
+ }
+
+ /// Gets this object as a `io::Write`.
+ fn as_write(&mut self) -> &mut dyn Write {
+ match *self {
+ ShellOut::Stream { ref mut stream, .. } => stream,
+ // ShellOut::Write(ref mut w) => w,
+ }
+ }
+}
+
+impl ColorChoice {
+ /// Converts our color choice to termcolor's version.
+ fn to_termcolor_color_choice(self) -> termcolor::ColorChoice {
+ match self {
+ ColorChoice::Always => termcolor::ColorChoice::Always,
+ ColorChoice::Never => termcolor::ColorChoice::Never,
+ ColorChoice::CargoAuto => {
+ if atty::is(atty::Stream::Stderr) {
+ termcolor::ColorChoice::Auto
+ } else {
+ termcolor::ColorChoice::Never
+ }
+ }
+ }
+ }
+}
+
+#[cfg(any(target_os = "linux", target_os = "macos"))]
+mod imp {
+ use std::mem;
+
+ use libc;
+
+ use super::Shell;
+
+ pub fn stderr_width() -> Option<usize> {
+ unsafe {
+ let mut winsize: libc::winsize = mem::zeroed();
+ if libc::ioctl(libc::STDERR_FILENO, libc::TIOCGWINSZ, &mut winsize) < 0 {
+ return None;
+ }
+ if winsize.ws_col > 0 {
+ Some(winsize.ws_col as usize)
+ } else {
+ None
+ }
+ }
+ }
+
+ pub fn err_erase_line(shell: &mut Shell) {
+ // This is the "EL - Erase in Line" sequence. It clears from the cursor
+ // to the end of line.
+ // https://en.wikipedia.org/wiki/ANSI_escape_code#CSI_sequences
+ let _ = shell.err.as_write().write_all(b"\x1B[K");
+ }
+}
+
+#[cfg(all(unix, not(any(target_os = "linux", target_os = "macos"))))]
+mod imp {
+ pub(super) use super::default_err_erase_line as err_erase_line;
+
+ pub fn stderr_width() -> Option<usize> {
+ None
+ }
+}
+
+#[cfg(windows)]
+mod imp {
+ use std::{cmp, mem, ptr};
+ use winapi::um::fileapi::*;
+ use winapi::um::handleapi::*;
+ use winapi::um::processenv::*;
+ use winapi::um::winbase::*;
+ use winapi::um::wincon::*;
+ use winapi::um::winnt::*;
+
+ pub(super) use super::default_err_erase_line as err_erase_line;
+
+ pub fn stderr_width() -> Option<usize> {
+ unsafe {
+ let stdout = GetStdHandle(STD_ERROR_HANDLE);
+ let mut csbi: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed();
+ if GetConsoleScreenBufferInfo(stdout, &mut csbi) != 0 {
+ return Some((csbi.srWindow.Right - csbi.srWindow.Left) as usize);
+ }
+
+ // On mintty/msys/cygwin based terminals, the above fails with
+ // INVALID_HANDLE_VALUE. Use an alternate method which works
+ // in that case as well.
+ let h = CreateFileA(
+ "CONOUT$\0".as_ptr() as *const CHAR,
+ GENERIC_READ | GENERIC_WRITE,
+ FILE_SHARE_READ | FILE_SHARE_WRITE,
+ ptr::null_mut(),
+ OPEN_EXISTING,
+ 0,
+ ptr::null_mut(),
+ );
+ if h == INVALID_HANDLE_VALUE {
+ return None;
+ }
+
+ let mut csbi: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed();
+ let rc = GetConsoleScreenBufferInfo(h, &mut csbi);
+ CloseHandle(h);
+ if rc != 0 {
+ let width = (csbi.srWindow.Right - csbi.srWindow.Left) as usize;
+ // Unfortunately cygwin/mintty does not set the size of the
+ // backing console to match the actual window size. This
+ // always reports a size of 80 or 120 (not sure what
+ // determines that). Use a conservative max of 60 which should
+ // work in most circumstances. ConEmu does some magic to
+ // resize the console correctly, but there's no reasonable way
+ // to detect which kind of terminal we are running in, or if
+ // GetConsoleScreenBufferInfo returns accurate information.
+ return Some(cmp::min(60, width));
+ }
+ None
+ }
+ }
+}
+
+#[cfg(any(
+ all(unix, not(any(target_os = "linux", target_os = "macos"))),
+ windows
+))]
+fn default_err_erase_line(shell: &mut Shell) {
+ if let Some(max_width) = imp::stderr_width() {
+ let blank = " ".repeat(max_width);
+ drop(write!(shell.err.as_write(), "{}\r", blank));
+ }
+}
diff --git a/cli/signal.rs b/cli/signal.rs
new file mode 100644
index 000000000..22aa90be3
--- /dev/null
+++ b/cli/signal.rs
@@ -0,0 +1,16 @@
+use deno::ErrBox;
+
+#[cfg(unix)]
+pub fn kill(pid: i32, signo: i32) -> Result<(), ErrBox> {
+ use nix::sys::signal::{kill as unix_kill, Signal};
+ use nix::unistd::Pid;
+ let sig = Signal::from_c_int(signo)?;
+ unix_kill(Pid::from_raw(pid), Option::Some(sig)).map_err(ErrBox::from)
+}
+
+#[cfg(not(unix))]
+pub fn kill(_pid: i32, _signal: i32) -> Result<(), ErrBox> {
+ // NOOP
+ // TODO: implement this for windows
+ Ok(())
+}
diff --git a/cli/source_maps.rs b/cli/source_maps.rs
new file mode 100644
index 000000000..95df48a31
--- /dev/null
+++ b/cli/source_maps.rs
@@ -0,0 +1,458 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+//! This mod provides functions to remap a deno::V8Exception based on a source map
+use deno::StackFrame;
+use deno::V8Exception;
+use serde_json;
+use source_map_mappings::parse_mappings;
+use source_map_mappings::Bias;
+use source_map_mappings::Mappings;
+use std::collections::HashMap;
+use std::str;
+
+pub trait SourceMapGetter {
+ /// Returns the raw source map file.
+ fn get_source_map(&self, script_name: &str) -> Option<Vec<u8>>;
+ fn get_source_line(&self, script_name: &str, line: usize) -> Option<String>;
+}
+
+/// Cached filename lookups. The key can be None if a previous lookup failed to
+/// find a SourceMap.
+pub type CachedMaps = HashMap<String, Option<SourceMap>>;
+
+pub struct SourceMap {
+ mappings: Mappings,
+ sources: Vec<String>,
+}
+
+impl SourceMap {
+ /// Take a JSON string and attempt to decode it, returning an optional
+ /// instance of `SourceMap`.
+ fn from_json(json_str: &str) -> Option<Self> {
+ // Ugly. Maybe use serde_derive.
+ match serde_json::from_str::<serde_json::Value>(json_str) {
+ Ok(serde_json::Value::Object(map)) => match map["mappings"].as_str() {
+ None => None,
+ Some(mappings_str) => {
+ match parse_mappings::<()>(mappings_str.as_bytes()) {
+ Err(_) => None,
+ Ok(mappings) => {
+ if !map["sources"].is_array() {
+ return None;
+ }
+ let sources_val = map["sources"].as_array().unwrap();
+ let mut sources = Vec::<String>::new();
+
+ for source_val in sources_val {
+ match source_val.as_str() {
+ None => return None,
+ Some(source) => {
+ sources.push(source.to_string());
+ }
+ }
+ }
+
+ Some(SourceMap { sources, mappings })
+ }
+ }
+ }
+ },
+ _ => None,
+ }
+ }
+}
+
+// The bundle does not get built for 'cargo check', so we don't embed the
+// bundle source map. The built in source map is the source map for the main
+// JavaScript bundle which is then used to create the snapshot. Runtime stack
+// traces can contain positions within the bundle which we will map to the
+// original Deno TypeScript code.
+#[cfg(feature = "check-only")]
+fn builtin_source_map(_: &str) -> Option<Vec<u8>> {
+ None
+}
+
+#[cfg(not(feature = "check-only"))]
+fn builtin_source_map(script_name: &str) -> Option<Vec<u8>> {
+ if script_name.ends_with("CLI_SNAPSHOT.js") {
+ Some(crate::js::CLI_SNAPSHOT_MAP.to_vec())
+ } else if script_name.ends_with("COMPILER_SNAPSHOT.js") {
+ Some(crate::js::COMPILER_SNAPSHOT_MAP.to_vec())
+ } else {
+ None
+ }
+}
+
+/// Apply a source map to a V8Exception, returning a V8Exception where the filenames,
+/// the lines and the columns point to their original source location, not their
+/// transpiled location if applicable.
+pub fn apply_source_map<G: SourceMapGetter>(
+ v8_exception: &V8Exception,
+ getter: &G,
+) -> V8Exception {
+ let mut mappings_map: CachedMaps = HashMap::new();
+
+ let mut frames = Vec::<StackFrame>::new();
+ for frame in &v8_exception.frames {
+ let f = frame_apply_source_map(&frame, &mut mappings_map, getter);
+ frames.push(f);
+ }
+
+ let (script_resource_name, line_number, start_column) =
+ get_maybe_orig_position(
+ v8_exception.script_resource_name.clone(),
+ v8_exception.line_number,
+ v8_exception.start_column,
+ &mut mappings_map,
+ getter,
+ );
+ // It is better to just move end_column to be the same distance away from
+ // start column because sometimes the code point is not available in the
+ // source file map.
+ let end_column = match v8_exception.end_column {
+ Some(ec) => {
+ if let Some(sc) = start_column {
+ Some(ec - (v8_exception.start_column.unwrap() - sc))
+ } else {
+ None
+ }
+ }
+ _ => None,
+ };
+ // if there is a source line that we might be different in the source file, we
+ // will go fetch it from the getter
+ let source_line = match line_number {
+ Some(ln)
+ if v8_exception.source_line.is_some()
+ && script_resource_name.is_some() =>
+ {
+ getter.get_source_line(
+ &v8_exception.script_resource_name.clone().unwrap(),
+ ln as usize,
+ )
+ }
+ _ => v8_exception.source_line.clone(),
+ };
+
+ V8Exception {
+ message: v8_exception.message.clone(),
+ frames,
+ error_level: v8_exception.error_level,
+ source_line,
+ script_resource_name,
+ line_number,
+ start_column,
+ end_column,
+ // These are difficult to map to their original position and they are not
+ // currently used in any output, so we don't remap them.
+ start_position: v8_exception.start_position,
+ end_position: v8_exception.end_position,
+ }
+}
+
+fn frame_apply_source_map<G: SourceMapGetter>(
+ frame: &StackFrame,
+ mappings_map: &mut CachedMaps,
+ getter: &G,
+) -> StackFrame {
+ let (script_name, line, column) = get_orig_position(
+ frame.script_name.to_string(),
+ frame.line,
+ frame.column,
+ mappings_map,
+ getter,
+ );
+
+ StackFrame {
+ script_name,
+ function_name: frame.function_name.clone(),
+ line,
+ column,
+ is_eval: frame.is_eval,
+ is_constructor: frame.is_constructor,
+ is_wasm: frame.is_wasm,
+ }
+}
+
+fn get_maybe_orig_position<G: SourceMapGetter>(
+ script_name: Option<String>,
+ line: Option<i64>,
+ column: Option<i64>,
+ mappings_map: &mut CachedMaps,
+ getter: &G,
+) -> (Option<String>, Option<i64>, Option<i64>) {
+ match (script_name, line, column) {
+ (Some(script_name_v), Some(line_v), Some(column_v)) => {
+ let (script_name, line, column) = get_orig_position(
+ script_name_v,
+ line_v - 1,
+ column_v,
+ mappings_map,
+ getter,
+ );
+ (Some(script_name), Some(line), Some(column))
+ }
+ _ => (None, None, None),
+ }
+}
+
+pub fn get_orig_position<G: SourceMapGetter>(
+ script_name: String,
+ line: i64,
+ column: i64,
+ mappings_map: &mut CachedMaps,
+ getter: &G,
+) -> (String, i64, i64) {
+ let maybe_sm = get_mappings(&script_name, mappings_map, getter);
+ let default_pos = (script_name, line, column);
+
+ match maybe_sm {
+ None => default_pos,
+ Some(sm) => match sm.mappings.original_location_for(
+ line as u32,
+ column as u32,
+ Bias::default(),
+ ) {
+ None => default_pos,
+ Some(mapping) => match &mapping.original {
+ None => default_pos,
+ Some(original) => {
+ let orig_source = sm.sources[original.source as usize].clone();
+ (
+ orig_source,
+ i64::from(original.original_line),
+ i64::from(original.original_column),
+ )
+ }
+ },
+ },
+ }
+}
+
+fn get_mappings<'a, G: SourceMapGetter>(
+ script_name: &str,
+ mappings_map: &'a mut CachedMaps,
+ getter: &G,
+) -> &'a Option<SourceMap> {
+ mappings_map
+ .entry(script_name.to_string())
+ .or_insert_with(|| parse_map_string(script_name, getter))
+}
+
+// TODO(kitsonk) parsed source maps should probably be cached in state in
+// the module meta data.
+fn parse_map_string<G: SourceMapGetter>(
+ script_name: &str,
+ getter: &G,
+) -> Option<SourceMap> {
+ builtin_source_map(script_name)
+ .or_else(|| getter.get_source_map(script_name))
+ .and_then(|raw_source_map| {
+ SourceMap::from_json(str::from_utf8(&raw_source_map).unwrap())
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ struct MockSourceMapGetter {}
+
+ impl SourceMapGetter for MockSourceMapGetter {
+ fn get_source_map(&self, script_name: &str) -> Option<Vec<u8>> {
+ let s = match script_name {
+ "foo_bar.ts" => r#"{"sources": ["foo_bar.ts"], "mappings":";;;IAIA,OAAO,CAAC,GAAG,CAAC,qBAAqB,EAAE,EAAE,CAAC,OAAO,CAAC,CAAC;IAC/C,OAAO,CAAC,GAAG,CAAC,eAAe,EAAE,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,WAAW,EAAE,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;IAE3C,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC"}"#,
+ "bar_baz.ts" => r#"{"sources": ["bar_baz.ts"], "mappings":";;;IAEA,CAAC,KAAK,IAAI,EAAE;QACV,MAAM,GAAG,GAAG,sDAAa,OAAO,2BAAC,CAAC;QAClC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;IACnB,CAAC,CAAC,EAAE,CAAC;IAEQ,QAAA,GAAG,GAAG,KAAK,CAAC;IAEzB,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC"}"#,
+ _ => return None,
+ };
+ Some(s.as_bytes().to_owned())
+ }
+
+ fn get_source_line(
+ &self,
+ script_name: &str,
+ line: usize,
+ ) -> Option<String> {
+ let s = match script_name {
+ "foo_bar.ts" => vec![
+ "console.log('foo');",
+ "console.log('foo');",
+ "console.log('foo');",
+ "console.log('foo');",
+ "console.log('foo');",
+ ],
+ _ => return None,
+ };
+ if s.len() > line {
+ Some(s[line].to_string())
+ } else {
+ None
+ }
+ }
+ }
+
+ fn error1() -> V8Exception {
+ V8Exception {
+ message: "Error: foo bar".to_string(),
+ source_line: None,
+ script_resource_name: None,
+ line_number: None,
+ start_position: None,
+ end_position: None,
+ error_level: None,
+ start_column: None,
+ end_column: None,
+ frames: vec![
+ StackFrame {
+ line: 4,
+ column: 16,
+ script_name: "foo_bar.ts".to_string(),
+ function_name: "foo".to_string(),
+ is_eval: false,
+ is_constructor: false,
+ is_wasm: false,
+ },
+ StackFrame {
+ line: 5,
+ column: 20,
+ script_name: "bar_baz.ts".to_string(),
+ function_name: "qat".to_string(),
+ is_eval: false,
+ is_constructor: false,
+ is_wasm: false,
+ },
+ StackFrame {
+ line: 1,
+ column: 1,
+ script_name: "deno_main.js".to_string(),
+ function_name: "".to_string(),
+ is_eval: false,
+ is_constructor: false,
+ is_wasm: false,
+ },
+ ],
+ }
+ }
+
+ #[test]
+ fn v8_exception_apply_source_map_1() {
+ let e = error1();
+ let getter = MockSourceMapGetter {};
+ let actual = apply_source_map(&e, &getter);
+ let expected = V8Exception {
+ message: "Error: foo bar".to_string(),
+ source_line: None,
+ script_resource_name: None,
+ line_number: None,
+ start_position: None,
+ end_position: None,
+ error_level: None,
+ start_column: None,
+ end_column: None,
+ frames: vec![
+ StackFrame {
+ line: 5,
+ column: 12,
+ script_name: "foo_bar.ts".to_string(),
+ function_name: "foo".to_string(),
+ is_eval: false,
+ is_constructor: false,
+ is_wasm: false,
+ },
+ StackFrame {
+ line: 4,
+ column: 14,
+ script_name: "bar_baz.ts".to_string(),
+ function_name: "qat".to_string(),
+ is_eval: false,
+ is_constructor: false,
+ is_wasm: false,
+ },
+ StackFrame {
+ line: 1,
+ column: 1,
+ script_name: "deno_main.js".to_string(),
+ function_name: "".to_string(),
+ is_eval: false,
+ is_constructor: false,
+ is_wasm: false,
+ },
+ ],
+ };
+ assert_eq!(actual, expected);
+ }
+
+ #[test]
+ fn v8_exception_apply_source_map_2() {
+ let e = V8Exception {
+ message: "TypeError: baz".to_string(),
+ source_line: None,
+ script_resource_name: None,
+ line_number: None,
+ start_position: None,
+ end_position: None,
+ error_level: None,
+ start_column: None,
+ end_column: None,
+ frames: vec![StackFrame {
+ line: 11,
+ column: 12,
+ script_name: "CLI_SNAPSHOT.js".to_string(),
+ function_name: "setLogDebug".to_string(),
+ is_eval: false,
+ is_constructor: false,
+ is_wasm: false,
+ }],
+ };
+ let getter = MockSourceMapGetter {};
+ let actual = apply_source_map(&e, &getter);
+ assert_eq!(actual.message, "TypeError: baz");
+ // Because this is accessing the live bundle, this test might be more fragile
+ assert_eq!(actual.frames.len(), 1);
+ assert!(actual.frames[0].script_name.ends_with("/window.ts"));
+ }
+
+ #[test]
+ fn v8_exception_apply_source_map_line() {
+ let e = V8Exception {
+ message: "TypeError: baz".to_string(),
+ source_line: Some("foo".to_string()),
+ script_resource_name: Some("foo_bar.ts".to_string()),
+ line_number: Some(4),
+ start_position: None,
+ end_position: None,
+ error_level: None,
+ start_column: Some(16),
+ end_column: None,
+ frames: vec![],
+ };
+ let getter = MockSourceMapGetter {};
+ let actual = apply_source_map(&e, &getter);
+ assert_eq!(actual.source_line, Some("console.log('foo');".to_string()));
+ }
+
+ #[test]
+ fn source_map_from_json() {
+ let json = r#"{"version":3,"file":"error_001.js","sourceRoot":"","sources":["file:///Users/rld/src/deno/tests/error_001.ts"],"names":[],"mappings":"AAAA,SAAS,GAAG;IACV,MAAM,KAAK,CAAC,KAAK,CAAC,CAAC;AACrB,CAAC;AAED,SAAS,GAAG;IACV,GAAG,EAAE,CAAC;AACR,CAAC;AAED,GAAG,EAAE,CAAC"}"#;
+ let sm = SourceMap::from_json(json).unwrap();
+ assert_eq!(sm.sources.len(), 1);
+ assert_eq!(
+ sm.sources[0],
+ "file:///Users/rld/src/deno/tests/error_001.ts"
+ );
+ let mapping = sm
+ .mappings
+ .original_location_for(1, 10, Bias::default())
+ .unwrap();
+ assert_eq!(mapping.generated_line, 1);
+ assert_eq!(mapping.generated_column, 10);
+ assert_eq!(
+ mapping.original,
+ Some(source_map_mappings::OriginalLocation {
+ source: 0,
+ original_line: 1,
+ original_column: 8,
+ name: None
+ })
+ );
+ }
+}
diff --git a/cli/startup_data.rs b/cli/startup_data.rs
new file mode 100644
index 000000000..7dc2fdb60
--- /dev/null
+++ b/cli/startup_data.rs
@@ -0,0 +1,59 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+#[cfg(feature = "no-snapshot-init")]
+use deno::Script;
+
+use crate::js::CLI_SNAPSHOT;
+use crate::js::COMPILER_SNAPSHOT;
+use deno::StartupData;
+
+#[cfg(feature = "no-snapshot-init")]
+pub fn deno_isolate_init() -> StartupData<'static> {
+ debug!("Deno isolate init without snapshots.");
+ #[cfg(not(feature = "check-only"))]
+ let source =
+ include_str!(concat!(env!("GN_OUT_DIR"), "/gen/cli/bundle/main.js"));
+ #[cfg(feature = "check-only")]
+ let source = "";
+
+ StartupData::Script(Script {
+ filename: "gen/cli/bundle/main.js",
+ source,
+ })
+}
+
+#[cfg(not(feature = "no-snapshot-init"))]
+pub fn deno_isolate_init() -> StartupData<'static> {
+ debug!("Deno isolate init with snapshots.");
+ #[cfg(not(feature = "check-only"))]
+ let data = CLI_SNAPSHOT;
+ #[cfg(feature = "check-only")]
+ let data = b"";
+
+ StartupData::Snapshot(data)
+}
+
+#[cfg(feature = "no-snapshot-init")]
+pub fn compiler_isolate_init() -> StartupData<'static> {
+ debug!("Compiler isolate init without snapshots.");
+ #[cfg(not(feature = "check-only"))]
+ let source =
+ include_str!(concat!(env!("GN_OUT_DIR"), "/gen/cli/bundle/compiler.js"));
+ #[cfg(feature = "check-only")]
+ let source = "";
+
+ StartupData::Script(Script {
+ filename: "gen/cli/bundle/compiler.js",
+ source,
+ })
+}
+
+#[cfg(not(feature = "no-snapshot-init"))]
+pub fn compiler_isolate_init() -> StartupData<'static> {
+ debug!("Deno isolate init with snapshots.");
+ #[cfg(not(feature = "check-only"))]
+ let data = COMPILER_SNAPSHOT;
+ #[cfg(feature = "check-only")]
+ let data = b"";
+
+ StartupData::Snapshot(data)
+}
diff --git a/cli/state.rs b/cli/state.rs
new file mode 100644
index 000000000..6f2c6db8f
--- /dev/null
+++ b/cli/state.rs
@@ -0,0 +1,433 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use crate::compilers::CompiledModule;
+use crate::compilers::JsCompiler;
+use crate::compilers::JsonCompiler;
+use crate::compilers::TsCompiler;
+use crate::deno_dir;
+use crate::deno_error::permission_denied;
+use crate::file_fetcher::SourceFileFetcher;
+use crate::flags;
+use crate::global_timer::GlobalTimer;
+use crate::import_map::ImportMap;
+use crate::msg;
+use crate::ops::JsonOp;
+use crate::permissions::DenoPermissions;
+use crate::progress::Progress;
+use crate::resources;
+use crate::resources::ResourceId;
+use crate::worker::Worker;
+use deno::Buf;
+use deno::CoreOp;
+use deno::ErrBox;
+use deno::Loader;
+use deno::ModuleSpecifier;
+use deno::Op;
+use deno::PinnedBuf;
+use futures::future::Shared;
+use futures::Future;
+use rand::rngs::StdRng;
+use rand::SeedableRng;
+use serde_json::Value;
+use std;
+use std::collections::HashMap;
+use std::env;
+use std::ops::Deref;
+use std::str;
+use std::sync::atomic::{AtomicUsize, Ordering};
+use std::sync::Arc;
+use std::sync::Mutex;
+use std::time::Instant;
+use tokio::sync::mpsc as async_mpsc;
+
+pub type WorkerSender = async_mpsc::Sender<Buf>;
+pub type WorkerReceiver = async_mpsc::Receiver<Buf>;
+pub type WorkerChannels = (WorkerSender, WorkerReceiver);
+pub type UserWorkerTable = HashMap<ResourceId, Shared<Worker>>;
+
+#[derive(Default)]
+pub struct Metrics {
+ pub ops_dispatched: AtomicUsize,
+ pub ops_completed: AtomicUsize,
+ pub bytes_sent_control: AtomicUsize,
+ pub bytes_sent_data: AtomicUsize,
+ pub bytes_received: AtomicUsize,
+ pub resolve_count: AtomicUsize,
+ pub compiler_starts: AtomicUsize,
+}
+
+/// Isolate cannot be passed between threads but ThreadSafeState can.
+/// ThreadSafeState satisfies Send and Sync. So any state that needs to be
+/// accessed outside the main V8 thread should be inside ThreadSafeState.
+pub struct ThreadSafeState(Arc<State>);
+
+#[cfg_attr(feature = "cargo-clippy", allow(stutter))]
+pub struct State {
+ pub modules: Arc<Mutex<deno::Modules>>,
+ pub main_module: Option<ModuleSpecifier>,
+ pub dir: deno_dir::DenoDir,
+ pub argv: Vec<String>,
+ pub permissions: DenoPermissions,
+ pub flags: flags::DenoFlags,
+ /// When flags contains a `.import_map_path` option, the content of the
+ /// import map file will be resolved and set.
+ pub import_map: Option<ImportMap>,
+ pub metrics: Metrics,
+ pub worker_channels: Mutex<WorkerChannels>,
+ pub global_timer: Mutex<GlobalTimer>,
+ pub workers: Mutex<UserWorkerTable>,
+ pub start_time: Instant,
+ /// A reference to this worker's resource.
+ pub resource: resources::Resource,
+ /// Reference to global progress bar.
+ pub progress: Progress,
+ pub seeded_rng: Option<Mutex<StdRng>>,
+
+ pub file_fetcher: SourceFileFetcher,
+ pub js_compiler: JsCompiler,
+ pub json_compiler: JsonCompiler,
+ pub ts_compiler: TsCompiler,
+
+ pub include_deno_namespace: bool,
+}
+
+impl Clone for ThreadSafeState {
+ fn clone(&self) -> Self {
+ ThreadSafeState(self.0.clone())
+ }
+}
+
+impl Deref for ThreadSafeState {
+ type Target = Arc<State>;
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+impl ThreadSafeState {
+ // TODO: better name welcome
+ /// Wrap core `OpDispatcher` to collect metrics.
+ pub fn cli_op<D>(
+ &self,
+ dispatcher: D,
+ ) -> impl Fn(&[u8], Option<PinnedBuf>) -> CoreOp
+ where
+ D: Fn(&[u8], Option<PinnedBuf>) -> CoreOp,
+ {
+ let state = self.clone();
+
+ move |control: &[u8], zero_copy: Option<PinnedBuf>| -> CoreOp {
+ let bytes_sent_control = control.len();
+ let bytes_sent_zero_copy =
+ zero_copy.as_ref().map(|b| b.len()).unwrap_or(0);
+
+ let op = dispatcher(control, zero_copy);
+ state.metrics_op_dispatched(bytes_sent_control, bytes_sent_zero_copy);
+
+ match op {
+ Op::Sync(buf) => {
+ state.metrics_op_completed(buf.len());
+ Op::Sync(buf)
+ }
+ Op::Async(fut) => {
+ let state = state.clone();
+ let result_fut = Box::new(fut.map(move |buf: Buf| {
+ state.clone().metrics_op_completed(buf.len());
+ buf
+ }));
+ Op::Async(result_fut)
+ }
+ }
+ }
+ }
+
+ /// This is a special function that provides `state` argument to dispatcher.
+ ///
+ /// NOTE: This only works with JSON dispatcher.
+ /// This is a band-aid for transition to `Isolate.register_op` API as most of our
+ /// ops require `state` argument.
+ pub fn stateful_op<D>(
+ &self,
+ dispatcher: D,
+ ) -> impl Fn(Value, Option<PinnedBuf>) -> Result<JsonOp, ErrBox>
+ where
+ D: Fn(&ThreadSafeState, Value, Option<PinnedBuf>) -> Result<JsonOp, ErrBox>,
+ {
+ let state = self.clone();
+
+ move |args: Value, zero_copy: Option<PinnedBuf>| -> Result<JsonOp, ErrBox> {
+ dispatcher(&state, args, zero_copy)
+ }
+ }
+}
+
+impl Loader for ThreadSafeState {
+ fn resolve(
+ &self,
+ specifier: &str,
+ referrer: &str,
+ is_main: bool,
+ is_dyn_import: bool,
+ ) -> Result<ModuleSpecifier, ErrBox> {
+ if !is_main {
+ if let Some(import_map) = &self.import_map {
+ let result = import_map.resolve(specifier, referrer)?;
+ if let Some(r) = result {
+ return Ok(r);
+ }
+ }
+ }
+ let module_specifier =
+ ModuleSpecifier::resolve_import(specifier, referrer)?;
+
+ if is_dyn_import {
+ self.check_dyn_import(&module_specifier)?;
+ }
+
+ Ok(module_specifier)
+ }
+
+ /// Given an absolute url, load its source code.
+ fn load(
+ &self,
+ module_specifier: &ModuleSpecifier,
+ ) -> Box<deno::SourceCodeInfoFuture> {
+ self.metrics.resolve_count.fetch_add(1, Ordering::SeqCst);
+ let module_url_specified = module_specifier.to_string();
+ Box::new(self.fetch_compiled_module(module_specifier).map(
+ |compiled_module| deno::SourceCodeInfo {
+ // Real module name, might be different from initial specifier
+ // due to redirections.
+ code: compiled_module.code,
+ module_url_specified,
+ module_url_found: compiled_module.name,
+ },
+ ))
+ }
+}
+
+impl ThreadSafeState {
+ pub fn new(
+ flags: flags::DenoFlags,
+ argv_rest: Vec<String>,
+ progress: Progress,
+ include_deno_namespace: bool,
+ ) -> Result<Self, ErrBox> {
+ let custom_root = env::var("DENO_DIR").map(String::into).ok();
+
+ let (worker_in_tx, worker_in_rx) = async_mpsc::channel::<Buf>(1);
+ let (worker_out_tx, worker_out_rx) = async_mpsc::channel::<Buf>(1);
+ let internal_channels = (worker_out_tx, worker_in_rx);
+ let external_channels = (worker_in_tx, worker_out_rx);
+ let resource = resources::add_worker(external_channels);
+
+ let dir = deno_dir::DenoDir::new(custom_root)?;
+
+ let file_fetcher = SourceFileFetcher::new(
+ dir.deps_cache.clone(),
+ progress.clone(),
+ !flags.reload,
+ flags.no_fetch,
+ )?;
+
+ let ts_compiler = TsCompiler::new(
+ file_fetcher.clone(),
+ dir.gen_cache.clone(),
+ !flags.reload,
+ flags.config_path.clone(),
+ )?;
+
+ let main_module: Option<ModuleSpecifier> = if argv_rest.len() <= 1 {
+ None
+ } else {
+ let root_specifier = argv_rest[1].clone();
+ Some(ModuleSpecifier::resolve_url_or_path(&root_specifier)?)
+ };
+
+ let import_map: Option<ImportMap> = match &flags.import_map_path {
+ None => None,
+ Some(file_path) => Some(ImportMap::load(file_path)?),
+ };
+
+ let mut seeded_rng = None;
+ if let Some(seed) = flags.seed {
+ seeded_rng = Some(Mutex::new(StdRng::seed_from_u64(seed)));
+ };
+
+ let modules = Arc::new(Mutex::new(deno::Modules::new()));
+
+ let state = State {
+ main_module,
+ modules,
+ dir,
+ argv: argv_rest,
+ permissions: DenoPermissions::from_flags(&flags),
+ flags,
+ import_map,
+ metrics: Metrics::default(),
+ worker_channels: Mutex::new(internal_channels),
+ global_timer: Mutex::new(GlobalTimer::new()),
+ workers: Mutex::new(UserWorkerTable::new()),
+ start_time: Instant::now(),
+ resource,
+ progress,
+ seeded_rng,
+ file_fetcher,
+ ts_compiler,
+ js_compiler: JsCompiler {},
+ json_compiler: JsonCompiler {},
+ include_deno_namespace,
+ };
+
+ Ok(ThreadSafeState(Arc::new(state)))
+ }
+
+ pub fn fetch_compiled_module(
+ self: &Self,
+ module_specifier: &ModuleSpecifier,
+ ) -> impl Future<Item = CompiledModule, Error = ErrBox> {
+ let state_ = self.clone();
+
+ self
+ .file_fetcher
+ .fetch_source_file_async(&module_specifier)
+ .and_then(move |out| match out.media_type {
+ msg::MediaType::Unknown => {
+ state_.js_compiler.compile_async(state_.clone(), &out)
+ }
+ msg::MediaType::Json => {
+ state_.json_compiler.compile_async(state_.clone(), &out)
+ }
+ msg::MediaType::TypeScript
+ | msg::MediaType::TSX
+ | msg::MediaType::JSX => {
+ state_.ts_compiler.compile_async(state_.clone(), &out)
+ }
+ msg::MediaType::JavaScript => {
+ if state_.ts_compiler.compile_js {
+ state_.ts_compiler.compile_async(state_.clone(), &out)
+ } else {
+ state_.js_compiler.compile_async(state_.clone(), &out)
+ }
+ }
+ })
+ }
+
+ /// Read main module from argv
+ pub fn main_module(&self) -> Option<ModuleSpecifier> {
+ match &self.main_module {
+ Some(module_specifier) => Some(module_specifier.clone()),
+ None => None,
+ }
+ }
+
+ #[inline]
+ pub fn check_read(&self, filename: &str) -> Result<(), ErrBox> {
+ self.permissions.check_read(filename)
+ }
+
+ #[inline]
+ pub fn check_write(&self, filename: &str) -> Result<(), ErrBox> {
+ self.permissions.check_write(filename)
+ }
+
+ #[inline]
+ pub fn check_env(&self) -> Result<(), ErrBox> {
+ self.permissions.check_env()
+ }
+
+ #[inline]
+ pub fn check_net(&self, host_and_port: &str) -> Result<(), ErrBox> {
+ self.permissions.check_net(host_and_port)
+ }
+
+ #[inline]
+ pub fn check_net_url(&self, url: &url::Url) -> Result<(), ErrBox> {
+ self.permissions.check_net_url(url)
+ }
+
+ #[inline]
+ pub fn check_run(&self) -> Result<(), ErrBox> {
+ self.permissions.check_run()
+ }
+
+ pub fn check_dyn_import(
+ self: &Self,
+ module_specifier: &ModuleSpecifier,
+ ) -> Result<(), ErrBox> {
+ let u = module_specifier.as_url();
+ match u.scheme() {
+ "http" | "https" => {
+ self.check_net_url(u)?;
+ Ok(())
+ }
+ "file" => {
+ let filename = u
+ .to_file_path()
+ .unwrap()
+ .into_os_string()
+ .into_string()
+ .unwrap();
+ self.check_read(&filename)?;
+ Ok(())
+ }
+ _ => Err(permission_denied()),
+ }
+ }
+
+ #[cfg(test)]
+ pub fn mock(argv: Vec<String>) -> ThreadSafeState {
+ ThreadSafeState::new(
+ flags::DenoFlags::default(),
+ argv,
+ Progress::new(),
+ true,
+ )
+ .unwrap()
+ }
+
+ pub fn metrics_op_dispatched(
+ &self,
+ bytes_sent_control: usize,
+ bytes_sent_data: usize,
+ ) {
+ self.metrics.ops_dispatched.fetch_add(1, Ordering::SeqCst);
+ self
+ .metrics
+ .bytes_sent_control
+ .fetch_add(bytes_sent_control, Ordering::SeqCst);
+ self
+ .metrics
+ .bytes_sent_data
+ .fetch_add(bytes_sent_data, Ordering::SeqCst);
+ }
+
+ pub fn metrics_op_completed(&self, bytes_received: usize) {
+ self.metrics.ops_completed.fetch_add(1, Ordering::SeqCst);
+ self
+ .metrics
+ .bytes_received
+ .fetch_add(bytes_received, Ordering::SeqCst);
+ }
+}
+
+#[test]
+fn thread_safe() {
+ fn f<S: Send + Sync>(_: S) {}
+ f(ThreadSafeState::mock(vec![
+ String::from("./deno"),
+ String::from("hello.js"),
+ ]));
+}
+
+#[test]
+fn import_map_given_for_repl() {
+ let _result = ThreadSafeState::new(
+ flags::DenoFlags {
+ import_map_path: Some("import_map.json".to_string()),
+ ..flags::DenoFlags::default()
+ },
+ vec![String::from("./deno")],
+ Progress::new(),
+ true,
+ );
+}
diff --git a/cli/test_util.rs b/cli/test_util.rs
new file mode 100644
index 000000000..e10c0af7c
--- /dev/null
+++ b/cli/test_util.rs
@@ -0,0 +1,77 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+
+// TODO(ry) Make this file test-only. Somehow it's very difficult to export
+// methods to tests/integration_tests.rs and tests/tty_tests.rs if this
+// is enabled...
+// #![cfg(test)]
+
+use std::path::PathBuf;
+use std::process::Child;
+use std::process::Command;
+use std::sync::Mutex;
+use std::sync::MutexGuard;
+
+lazy_static! {
+ static ref GUARD: Mutex<()> = Mutex::new(());
+}
+
+pub fn root_path() -> PathBuf {
+ PathBuf::from(concat!(env!("CARGO_MANIFEST_DIR"), "/.."))
+}
+
+pub fn target_dir() -> PathBuf {
+ let current_exe = std::env::current_exe().unwrap();
+ let target_dir = current_exe.parent().unwrap().parent().unwrap();
+ println!("target_dir {}", target_dir.display());
+ target_dir.into()
+}
+
+pub fn deno_exe_path() -> PathBuf {
+ // Something like /Users/rld/src/deno/target/debug/deps/deno
+ let mut p = target_dir().join("deno");
+ if cfg!(windows) {
+ p.set_extension("exe");
+ }
+ p
+}
+
+pub struct HttpServerGuard<'a> {
+ #[allow(dead_code)]
+ g: MutexGuard<'a, ()>,
+ child: Child,
+}
+
+impl<'a> Drop for HttpServerGuard<'a> {
+ fn drop(&mut self) {
+ match self.child.try_wait() {
+ Ok(None) => {
+ self.child.kill().expect("failed to kill http_server.py");
+ }
+ Ok(Some(status)) => {
+ panic!("http_server.py exited unexpectedly {}", status)
+ }
+ Err(e) => panic!("http_server.py err {}", e),
+ }
+ }
+}
+
+/// Starts tools/http_server.py when the returned guard is dropped, the server
+/// will be killed.
+pub fn http_server<'a>() -> HttpServerGuard<'a> {
+ // TODO(ry) Allow tests to use the http server in parallel.
+ let g = GUARD.lock().unwrap();
+
+ println!("tools/http_server.py starting...");
+ let child = Command::new("python")
+ .current_dir(root_path())
+ .arg("tools/http_server.py")
+ .spawn()
+ .expect("failed to execute child");
+
+ // Wait 1 second for the server to come up. TODO(ry) this is Racy.
+ std::thread::sleep(std::time::Duration::from_secs(2));
+
+ println!("tools/http_server.py ready");
+
+ HttpServerGuard { child, g }
+}
diff --git a/cli/tests/001_hello.js b/cli/tests/001_hello.js
new file mode 100644
index 000000000..accefceba
--- /dev/null
+++ b/cli/tests/001_hello.js
@@ -0,0 +1 @@
+console.log("Hello World");
diff --git a/cli/tests/001_hello.js.out b/cli/tests/001_hello.js.out
new file mode 100644
index 000000000..557db03de
--- /dev/null
+++ b/cli/tests/001_hello.js.out
@@ -0,0 +1 @@
+Hello World
diff --git a/cli/tests/002_hello.ts b/cli/tests/002_hello.ts
new file mode 100644
index 000000000..accefceba
--- /dev/null
+++ b/cli/tests/002_hello.ts
@@ -0,0 +1 @@
+console.log("Hello World");
diff --git a/cli/tests/002_hello.ts.out b/cli/tests/002_hello.ts.out
new file mode 100644
index 000000000..557db03de
--- /dev/null
+++ b/cli/tests/002_hello.ts.out
@@ -0,0 +1 @@
+Hello World
diff --git a/cli/tests/003_relative_import.ts b/cli/tests/003_relative_import.ts
new file mode 100644
index 000000000..01d5d7faa
--- /dev/null
+++ b/cli/tests/003_relative_import.ts
@@ -0,0 +1,3 @@
+import { printHello } from "./subdir/print_hello.ts";
+
+printHello();
diff --git a/cli/tests/003_relative_import.ts.out b/cli/tests/003_relative_import.ts.out
new file mode 100644
index 000000000..e965047ad
--- /dev/null
+++ b/cli/tests/003_relative_import.ts.out
@@ -0,0 +1 @@
+Hello
diff --git a/cli/tests/004_set_timeout.ts b/cli/tests/004_set_timeout.ts
new file mode 100644
index 000000000..58f899ee3
--- /dev/null
+++ b/cli/tests/004_set_timeout.ts
@@ -0,0 +1,11 @@
+setTimeout((): void => {
+ console.log("World");
+}, 10);
+
+console.log("Hello");
+
+const id = setTimeout((): void => {
+ console.log("Not printed");
+}, 10000);
+
+clearTimeout(id);
diff --git a/cli/tests/004_set_timeout.ts.out b/cli/tests/004_set_timeout.ts.out
new file mode 100644
index 000000000..f9264f7fb
--- /dev/null
+++ b/cli/tests/004_set_timeout.ts.out
@@ -0,0 +1,2 @@
+Hello
+World
diff --git a/cli/tests/005_more_imports.ts b/cli/tests/005_more_imports.ts
new file mode 100644
index 000000000..52dd1df7b
--- /dev/null
+++ b/cli/tests/005_more_imports.ts
@@ -0,0 +1,11 @@
+import { returnsHi, returnsFoo2, printHello3 } from "./subdir/mod1.ts";
+
+printHello3();
+
+if (returnsHi() !== "Hi") {
+ throw Error("Unexpected");
+}
+
+if (returnsFoo2() !== "Foo") {
+ throw Error("Unexpected");
+}
diff --git a/cli/tests/005_more_imports.ts.out b/cli/tests/005_more_imports.ts.out
new file mode 100644
index 000000000..e965047ad
--- /dev/null
+++ b/cli/tests/005_more_imports.ts.out
@@ -0,0 +1 @@
+Hello
diff --git a/cli/tests/006_url_imports.ts b/cli/tests/006_url_imports.ts
new file mode 100644
index 000000000..109cb603e
--- /dev/null
+++ b/cli/tests/006_url_imports.ts
@@ -0,0 +1,3 @@
+import { printHello } from "http://localhost:4545/cli/tests/subdir/mod2.ts";
+printHello();
+console.log("success");
diff --git a/cli/tests/006_url_imports.ts.out b/cli/tests/006_url_imports.ts.out
new file mode 100644
index 000000000..989ce33e9
--- /dev/null
+++ b/cli/tests/006_url_imports.ts.out
@@ -0,0 +1,2 @@
+Hello
+success
diff --git a/cli/tests/012_async.ts b/cli/tests/012_async.ts
new file mode 100644
index 000000000..1f1822c04
--- /dev/null
+++ b/cli/tests/012_async.ts
@@ -0,0 +1,13 @@
+// Check that we can use the async keyword.
+async function main(): Promise<void> {
+ await new Promise(
+ (resolve): void => {
+ console.log("2");
+ setTimeout(resolve, 100);
+ }
+ );
+ console.log("3");
+}
+
+console.log("1");
+main();
diff --git a/cli/tests/012_async.ts.out b/cli/tests/012_async.ts.out
new file mode 100644
index 000000000..01e79c32a
--- /dev/null
+++ b/cli/tests/012_async.ts.out
@@ -0,0 +1,3 @@
+1
+2
+3
diff --git a/cli/tests/013_dynamic_import.ts b/cli/tests/013_dynamic_import.ts
new file mode 100644
index 000000000..6bbce3132
--- /dev/null
+++ b/cli/tests/013_dynamic_import.ts
@@ -0,0 +1,15 @@
+(async (): Promise<void> => {
+ const { returnsHi, returnsFoo2, printHello3 } = await import(
+ "./subdir/mod1.ts"
+ );
+
+ printHello3();
+
+ if (returnsHi() !== "Hi") {
+ throw Error("Unexpected");
+ }
+
+ if (returnsFoo2() !== "Foo") {
+ throw Error("Unexpected");
+ }
+})();
diff --git a/cli/tests/013_dynamic_import.ts.out b/cli/tests/013_dynamic_import.ts.out
new file mode 100644
index 000000000..e965047ad
--- /dev/null
+++ b/cli/tests/013_dynamic_import.ts.out
@@ -0,0 +1 @@
+Hello
diff --git a/cli/tests/014_duplicate_import.ts b/cli/tests/014_duplicate_import.ts
new file mode 100644
index 000000000..97864fea7
--- /dev/null
+++ b/cli/tests/014_duplicate_import.ts
@@ -0,0 +1,9 @@
+// with all the imports of the same module, the module should only be
+// instantiated once
+import "./subdir/auto_print_hello.ts";
+
+import "./subdir/auto_print_hello.ts";
+
+(async (): Promise<void> => {
+ await import("./subdir/auto_print_hello.ts");
+})();
diff --git a/cli/tests/014_duplicate_import.ts.out b/cli/tests/014_duplicate_import.ts.out
new file mode 100644
index 000000000..4effa19f4
--- /dev/null
+++ b/cli/tests/014_duplicate_import.ts.out
@@ -0,0 +1 @@
+hello!
diff --git a/cli/tests/015_duplicate_parallel_import.js b/cli/tests/015_duplicate_parallel_import.js
new file mode 100644
index 000000000..37033cfa2
--- /dev/null
+++ b/cli/tests/015_duplicate_parallel_import.js
@@ -0,0 +1,20 @@
+// Importing the same module in parallel, the module should only be
+// instantiated once.
+
+const promises = new Array(100)
+ .fill(null)
+ .map(() => import("./subdir/mod1.ts"));
+
+Promise.all(promises).then(imports => {
+ const mod = imports.reduce((first, cur) => {
+ if (typeof first !== "object") {
+ throw new Error("Expected an object.");
+ }
+ if (first !== cur) {
+ throw new Error("More than one instance of the same module.");
+ }
+ return first;
+ });
+
+ mod.printHello3();
+});
diff --git a/cli/tests/015_duplicate_parallel_import.js.out b/cli/tests/015_duplicate_parallel_import.js.out
new file mode 100644
index 000000000..e965047ad
--- /dev/null
+++ b/cli/tests/015_duplicate_parallel_import.js.out
@@ -0,0 +1 @@
+Hello
diff --git a/cli/tests/016_double_await.ts b/cli/tests/016_double_await.ts
new file mode 100644
index 000000000..9b4801567
--- /dev/null
+++ b/cli/tests/016_double_await.ts
@@ -0,0 +1,8 @@
+// This is to test if Deno would die at 2nd await
+// See https://github.com/denoland/deno/issues/919
+(async (): Promise<void> => {
+ const currDirInfo = await Deno.stat(".");
+ const parentDirInfo = await Deno.stat("..");
+ console.log(currDirInfo.isDirectory());
+ console.log(parentDirInfo.isFile());
+})();
diff --git a/cli/tests/016_double_await.ts.out b/cli/tests/016_double_await.ts.out
new file mode 100644
index 000000000..da29283aa
--- /dev/null
+++ b/cli/tests/016_double_await.ts.out
@@ -0,0 +1,2 @@
+true
+false
diff --git a/cli/tests/017_import_redirect.ts b/cli/tests/017_import_redirect.ts
new file mode 100644
index 000000000..1265dd4ed
--- /dev/null
+++ b/cli/tests/017_import_redirect.ts
@@ -0,0 +1,4 @@
+// http -> https redirect would happen:
+import { printHello } from "http://gist.githubusercontent.com/ry/f12b2aa3409e6b52645bc346a9e22929/raw/79318f239f51d764384a8bded8d7c6a833610dde/print_hello.ts";
+
+printHello();
diff --git a/cli/tests/017_import_redirect.ts.out b/cli/tests/017_import_redirect.ts.out
new file mode 100644
index 000000000..e965047ad
--- /dev/null
+++ b/cli/tests/017_import_redirect.ts.out
@@ -0,0 +1 @@
+Hello
diff --git a/cli/tests/018_async_catch.ts b/cli/tests/018_async_catch.ts
new file mode 100644
index 000000000..0d034d798
--- /dev/null
+++ b/cli/tests/018_async_catch.ts
@@ -0,0 +1,14 @@
+async function fn(): Promise<never> {
+ throw new Error("message");
+}
+async function call(): Promise<void> {
+ try {
+ console.log("before await fn()");
+ await fn();
+ console.log("after await fn()");
+ } catch (error) {
+ console.log("catch");
+ }
+ console.log("after try-catch");
+}
+call().catch((): void => console.log("outer catch"));
diff --git a/cli/tests/018_async_catch.ts.out b/cli/tests/018_async_catch.ts.out
new file mode 100644
index 000000000..4fc219973
--- /dev/null
+++ b/cli/tests/018_async_catch.ts.out
@@ -0,0 +1,3 @@
+before await fn()
+catch
+after try-catch
diff --git a/cli/tests/019_media_types.ts b/cli/tests/019_media_types.ts
new file mode 100644
index 000000000..cc99be83b
--- /dev/null
+++ b/cli/tests/019_media_types.ts
@@ -0,0 +1,24 @@
+// When run against the test HTTP server, it will serve different media types
+// based on the URL containing `.t#.` strings, which exercises the different
+// mapping of media types end to end.
+
+import { loaded as loadedTs1 } from "http://localhost:4545/cli/tests/subdir/mt_text_typescript.t1.ts";
+import { loaded as loadedTs2 } from "http://localhost:4545/cli/tests/subdir/mt_video_vdn.t2.ts";
+import { loaded as loadedTs3 } from "http://localhost:4545/cli/tests/subdir/mt_video_mp2t.t3.ts";
+import { loaded as loadedTs4 } from "http://localhost:4545/cli/tests/subdir/mt_application_x_typescript.t4.ts";
+import { loaded as loadedJs1 } from "http://localhost:4545/cli/tests/subdir/mt_text_javascript.j1.js";
+import { loaded as loadedJs2 } from "http://localhost:4545/cli/tests/subdir/mt_application_ecmascript.j2.js";
+import { loaded as loadedJs3 } from "http://localhost:4545/cli/tests/subdir/mt_text_ecmascript.j3.js";
+import { loaded as loadedJs4 } from "http://localhost:4545/cli/tests/subdir/mt_application_x_javascript.j4.js";
+
+console.log(
+ "success",
+ loadedTs1,
+ loadedTs2,
+ loadedTs3,
+ loadedTs4,
+ loadedJs1,
+ loadedJs2,
+ loadedJs3,
+ loadedJs4
+);
diff --git a/cli/tests/019_media_types.ts.out b/cli/tests/019_media_types.ts.out
new file mode 100644
index 000000000..7b5fdd44f
--- /dev/null
+++ b/cli/tests/019_media_types.ts.out
@@ -0,0 +1 @@
+success true true true true true true true true
diff --git a/cli/tests/020_json_modules.ts b/cli/tests/020_json_modules.ts
new file mode 100644
index 000000000..fdc85c440
--- /dev/null
+++ b/cli/tests/020_json_modules.ts
@@ -0,0 +1,2 @@
+import config from "./subdir/config.json";
+console.log(JSON.stringify(config));
diff --git a/cli/tests/020_json_modules.ts.out b/cli/tests/020_json_modules.ts.out
new file mode 100644
index 000000000..5d1623e6b
--- /dev/null
+++ b/cli/tests/020_json_modules.ts.out
@@ -0,0 +1 @@
+{"foo":{"bar":true,"baz":["qat",1]}}
diff --git a/cli/tests/021_mjs_modules.ts b/cli/tests/021_mjs_modules.ts
new file mode 100644
index 000000000..6052b9081
--- /dev/null
+++ b/cli/tests/021_mjs_modules.ts
@@ -0,0 +1,2 @@
+import { isMod5 } from "./subdir/mod5.mjs";
+console.log(isMod5);
diff --git a/cli/tests/021_mjs_modules.ts.out b/cli/tests/021_mjs_modules.ts.out
new file mode 100644
index 000000000..27ba77dda
--- /dev/null
+++ b/cli/tests/021_mjs_modules.ts.out
@@ -0,0 +1 @@
+true
diff --git a/cli/tests/022_info_flag_script.out b/cli/tests/022_info_flag_script.out
new file mode 100644
index 000000000..48eef7365
--- /dev/null
+++ b/cli/tests/022_info_flag_script.out
@@ -0,0 +1,14 @@
+local: [WILDCARD]019_media_types.ts
+type: TypeScript
+compiled: [WILDCARD].js
+map: [WILDCARD].js.map
+deps:
+http://127.0.0.1:4545/cli/tests/019_media_types.ts
+ ├── http://localhost:4545/cli/tests/subdir/mt_text_typescript.t1.ts
+ ├── http://localhost:4545/cli/tests/subdir/mt_video_vdn.t2.ts
+ ├── http://localhost:4545/cli/tests/subdir/mt_video_mp2t.t3.ts
+ ├── http://localhost:4545/cli/tests/subdir/mt_application_x_typescript.t4.ts
+ ├── http://localhost:4545/cli/tests/subdir/mt_text_javascript.j1.js
+ ├── http://localhost:4545/cli/tests/subdir/mt_application_ecmascript.j2.js
+ ├── http://localhost:4545/cli/tests/subdir/mt_text_ecmascript.j3.js
+ └── http://localhost:4545/cli/tests/subdir/mt_application_x_javascript.j4.js
diff --git a/cli/tests/023_no_ext_with_headers b/cli/tests/023_no_ext_with_headers
new file mode 100644
index 000000000..87951d835
--- /dev/null
+++ b/cli/tests/023_no_ext_with_headers
@@ -0,0 +1 @@
+console.log("HELLO");
diff --git a/cli/tests/023_no_ext_with_headers.headers.json b/cli/tests/023_no_ext_with_headers.headers.json
new file mode 100644
index 000000000..5b6f09aeb
--- /dev/null
+++ b/cli/tests/023_no_ext_with_headers.headers.json
@@ -0,0 +1 @@
+{ "mime_type": "application/javascript" }
diff --git a/cli/tests/023_no_ext_with_headers.out b/cli/tests/023_no_ext_with_headers.out
new file mode 100644
index 000000000..e427984d4
--- /dev/null
+++ b/cli/tests/023_no_ext_with_headers.out
@@ -0,0 +1 @@
+HELLO
diff --git a/cli/tests/024_import_no_ext_with_headers.ts b/cli/tests/024_import_no_ext_with_headers.ts
new file mode 100644
index 000000000..c8621d0e6
--- /dev/null
+++ b/cli/tests/024_import_no_ext_with_headers.ts
@@ -0,0 +1 @@
+import "./023_no_ext_with_headers";
diff --git a/cli/tests/024_import_no_ext_with_headers.ts.out b/cli/tests/024_import_no_ext_with_headers.ts.out
new file mode 100644
index 000000000..e427984d4
--- /dev/null
+++ b/cli/tests/024_import_no_ext_with_headers.ts.out
@@ -0,0 +1 @@
+HELLO
diff --git a/cli/tests/025_hrtime.ts b/cli/tests/025_hrtime.ts
new file mode 100644
index 000000000..417ca6982
--- /dev/null
+++ b/cli/tests/025_hrtime.ts
@@ -0,0 +1,3 @@
+console.log(performance.now() % 2 !== 0);
+Deno.revokePermission("hrtime");
+console.log(performance.now() % 2 === 0);
diff --git a/cli/tests/025_hrtime.ts.out b/cli/tests/025_hrtime.ts.out
new file mode 100644
index 000000000..bb101b641
--- /dev/null
+++ b/cli/tests/025_hrtime.ts.out
@@ -0,0 +1,2 @@
+true
+true
diff --git a/cli/tests/025_reload_js_type_error.js b/cli/tests/025_reload_js_type_error.js
new file mode 100644
index 000000000..8d6e4b415
--- /dev/null
+++ b/cli/tests/025_reload_js_type_error.js
@@ -0,0 +1,5 @@
+// There was a bug where if this was executed with --reload it would throw a
+// type error.
+window.test = null;
+test = console;
+test.log("hello");
diff --git a/cli/tests/025_reload_js_type_error.js.out b/cli/tests/025_reload_js_type_error.js.out
new file mode 100644
index 000000000..ce0136250
--- /dev/null
+++ b/cli/tests/025_reload_js_type_error.js.out
@@ -0,0 +1 @@
+hello
diff --git a/cli/tests/026_redirect_javascript.js b/cli/tests/026_redirect_javascript.js
new file mode 100644
index 000000000..226a6b622
--- /dev/null
+++ b/cli/tests/026_redirect_javascript.js
@@ -0,0 +1,2 @@
+import { value } from "http://localhost:4547/redirects/redirect3.js";
+console.log(value);
diff --git a/cli/tests/026_redirect_javascript.js.out b/cli/tests/026_redirect_javascript.js.out
new file mode 100644
index 000000000..290864299
--- /dev/null
+++ b/cli/tests/026_redirect_javascript.js.out
@@ -0,0 +1 @@
+3 imports 1
diff --git a/cli/tests/026_workers.ts b/cli/tests/026_workers.ts
new file mode 100644
index 000000000..f45fc4b77
--- /dev/null
+++ b/cli/tests/026_workers.ts
@@ -0,0 +1,14 @@
+const jsWorker = new Worker("./subdir/test_worker.js");
+const tsWorker = new Worker("./subdir/test_worker.ts");
+
+tsWorker.onmessage = (e): void => {
+ console.log("Received ts: " + e.data);
+};
+
+jsWorker.onmessage = (e): void => {
+ console.log("Received js: " + e.data);
+
+ tsWorker.postMessage("Hello World");
+};
+
+jsWorker.postMessage("Hello World");
diff --git a/cli/tests/026_workers.ts.out b/cli/tests/026_workers.ts.out
new file mode 100644
index 000000000..7538cc867
--- /dev/null
+++ b/cli/tests/026_workers.ts.out
@@ -0,0 +1,4 @@
+Hello World
+Received js: Hello World
+Hello World
+Received ts: Hello World
diff --git a/cli/tests/027_redirect_typescript.ts b/cli/tests/027_redirect_typescript.ts
new file mode 100644
index 000000000..584341975
--- /dev/null
+++ b/cli/tests/027_redirect_typescript.ts
@@ -0,0 +1,2 @@
+import { value } from "http://localhost:4547/redirects/redirect4.ts";
+console.log(value);
diff --git a/cli/tests/027_redirect_typescript.ts.out b/cli/tests/027_redirect_typescript.ts.out
new file mode 100644
index 000000000..480d4e8ca
--- /dev/null
+++ b/cli/tests/027_redirect_typescript.ts.out
@@ -0,0 +1 @@
+4 imports 1
diff --git a/cli/tests/028_args.ts b/cli/tests/028_args.ts
new file mode 100644
index 000000000..51c5cb14b
--- /dev/null
+++ b/cli/tests/028_args.ts
@@ -0,0 +1,5 @@
+Deno.args.forEach(
+ (arg): void => {
+ console.log(arg);
+ }
+);
diff --git a/cli/tests/028_args.ts.out b/cli/tests/028_args.ts.out
new file mode 100644
index 000000000..fa36f6e4c
--- /dev/null
+++ b/cli/tests/028_args.ts.out
@@ -0,0 +1,7 @@
+028_args.ts
+--arg1
+val1
+--arg2=val2
+--
+arg3
+arg4
diff --git a/cli/tests/029_eval.out b/cli/tests/029_eval.out
new file mode 100644
index 000000000..ce0136250
--- /dev/null
+++ b/cli/tests/029_eval.out
@@ -0,0 +1 @@
+hello
diff --git a/cli/tests/030_xeval.out b/cli/tests/030_xeval.out
new file mode 100644
index 000000000..b1e67221a
--- /dev/null
+++ b/cli/tests/030_xeval.out
@@ -0,0 +1,3 @@
+A
+B
+C
diff --git a/cli/tests/031_xeval_replvar.out b/cli/tests/031_xeval_replvar.out
new file mode 100644
index 000000000..b1e67221a
--- /dev/null
+++ b/cli/tests/031_xeval_replvar.out
@@ -0,0 +1,3 @@
+A
+B
+C
diff --git a/cli/tests/032_xeval_delim.out b/cli/tests/032_xeval_delim.out
new file mode 100644
index 000000000..b1e67221a
--- /dev/null
+++ b/cli/tests/032_xeval_delim.out
@@ -0,0 +1,3 @@
+A
+B
+C
diff --git a/cli/tests/033_import_map.out b/cli/tests/033_import_map.out
new file mode 100644
index 000000000..e9b9160e9
--- /dev/null
+++ b/cli/tests/033_import_map.out
@@ -0,0 +1,7 @@
+Hello from remapped moment!
+Hello from remapped moment dir!
+Hello from remapped lodash!
+Hello from remapped lodash dir!
+Hello from remapped Vue!
+Hello from scoped moment!
+Hello from scoped!
diff --git a/cli/tests/034_onload.out b/cli/tests/034_onload.out
new file mode 100644
index 000000000..c9556e991
--- /dev/null
+++ b/cli/tests/034_onload.out
@@ -0,0 +1,11 @@
+log from nest_imported script
+log from imported script
+log from main
+got load event in onload function
+got load event in event handler (nest_imported)
+got load event in event handler (imported)
+got load event in event handler (main)
+got unload event in onunload function
+got unload event in event handler (nest_imported)
+got unload event in event handler (imported)
+got unload event in event handler (main)
diff --git a/cli/tests/034_onload/imported.ts b/cli/tests/034_onload/imported.ts
new file mode 100644
index 000000000..f9a7009b8
--- /dev/null
+++ b/cli/tests/034_onload/imported.ts
@@ -0,0 +1,11 @@
+import { assert } from "../../../std/testing/asserts.ts";
+import "./nest_imported.ts";
+
+const handler = (e: Event): void => {
+ assert(!e.cancelable);
+ console.log(`got ${e.type} event in event handler (imported)`);
+};
+
+window.addEventListener("load", handler);
+window.addEventListener("unload", handler);
+console.log("log from imported script");
diff --git a/cli/tests/034_onload/main.ts b/cli/tests/034_onload/main.ts
new file mode 100644
index 000000000..db6ca669a
--- /dev/null
+++ b/cli/tests/034_onload/main.ts
@@ -0,0 +1,23 @@
+import { assert } from "../../../std/testing/asserts.ts";
+import "./imported.ts";
+
+const eventHandler = (e: Event): void => {
+ assert(!e.cancelable);
+ console.log(`got ${e.type} event in event handler (main)`);
+};
+
+window.addEventListener("load", eventHandler);
+
+window.addEventListener("unload", eventHandler);
+
+window.onload = (e: Event): void => {
+ assert(!e.cancelable);
+ console.log(`got ${e.type} event in onload function`);
+};
+
+window.onunload = (e: Event): void => {
+ assert(!e.cancelable);
+ console.log(`got ${e.type} event in onunload function`);
+};
+
+console.log("log from main");
diff --git a/cli/tests/034_onload/nest_imported.ts b/cli/tests/034_onload/nest_imported.ts
new file mode 100644
index 000000000..6b4a40749
--- /dev/null
+++ b/cli/tests/034_onload/nest_imported.ts
@@ -0,0 +1,10 @@
+import { assert } from "../../../std/testing/asserts.ts";
+
+const handler = (e: Event): void => {
+ assert(!e.cancelable);
+ console.log(`got ${e.type} event in event handler (nest_imported)`);
+};
+
+window.addEventListener("load", handler);
+window.addEventListener("unload", handler);
+console.log("log from nest_imported script");
diff --git a/cli/tests/035_no_fetch_flag.out b/cli/tests/035_no_fetch_flag.out
new file mode 100644
index 000000000..26f020aa5
--- /dev/null
+++ b/cli/tests/035_no_fetch_flag.out
@@ -0,0 +1 @@
+Cannot resolve module "http://127.0.0.1:4545/cli/tests/019_media_types.ts"
diff --git a/cli/tests/036_import_map_fetch.out b/cli/tests/036_import_map_fetch.out
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/cli/tests/036_import_map_fetch.out
diff --git a/cli/tests/038_checkjs.js b/cli/tests/038_checkjs.js
new file mode 100644
index 000000000..628d3e376
--- /dev/null
+++ b/cli/tests/038_checkjs.js
@@ -0,0 +1,6 @@
+// console.log intentionally misspelled to trigger a type error
+consol.log("hello world!");
+
+// the following error should be ignored and not output to the console
+// eslint-disable-next-line
+const foo = new Foo();
diff --git a/cli/tests/038_checkjs.js.out b/cli/tests/038_checkjs.js.out
new file mode 100644
index 000000000..deaf77211
--- /dev/null
+++ b/cli/tests/038_checkjs.js.out
@@ -0,0 +1,15 @@
+[WILDCARD]
+error TS2552: Cannot find name 'consol'. Did you mean 'console'?
+
+[WILDCARD]tests/038_checkjs.js:2:1
+
+2 consol.log("hello world!");
+[WILDCARD]
+error TS2552: Cannot find name 'Foo'. Did you mean 'foo'?
+
+[WILDCARD]tests/038_checkjs.js:6:17
+
+6 const foo = new Foo();
+[WILDCARD]
+Found 2 errors.
+[WILDCARD] \ No newline at end of file
diff --git a/cli/tests/038_checkjs.tsconfig.json b/cli/tests/038_checkjs.tsconfig.json
new file mode 100644
index 000000000..08ac60b6c
--- /dev/null
+++ b/cli/tests/038_checkjs.tsconfig.json
@@ -0,0 +1,5 @@
+{
+ "compilerOptions": {
+ "checkJs": true
+ }
+}
diff --git a/cli/tests/039_worker_deno_ns.ts b/cli/tests/039_worker_deno_ns.ts
new file mode 100644
index 000000000..80ada4343
--- /dev/null
+++ b/cli/tests/039_worker_deno_ns.ts
@@ -0,0 +1,25 @@
+const w1 = new Worker("./039_worker_deno_ns/has_ns.ts");
+const w2 = new Worker("./039_worker_deno_ns/no_ns.ts", {
+ noDenoNamespace: true
+});
+let w1MsgCount = 0;
+let w2MsgCount = 0;
+w1.onmessage = (msg): void => {
+ console.log(msg.data);
+ w1MsgCount++;
+ if (w1MsgCount === 1) {
+ w1.postMessage("CONTINUE");
+ } else {
+ w2.postMessage("START");
+ }
+};
+w2.onmessage = (msg): void => {
+ console.log(msg.data);
+ w2MsgCount++;
+ if (w2MsgCount === 1) {
+ w2.postMessage("CONTINUE");
+ } else {
+ Deno.exit(0);
+ }
+};
+w1.postMessage("START");
diff --git a/cli/tests/039_worker_deno_ns.ts.out b/cli/tests/039_worker_deno_ns.ts.out
new file mode 100644
index 000000000..9b2f90099
--- /dev/null
+++ b/cli/tests/039_worker_deno_ns.ts.out
@@ -0,0 +1,4 @@
+has_ns.ts: is window.Deno available: true
+[SPAWNED BY has_ns.ts] maybe_ns.ts: is window.Deno available: true
+no_ns.ts: is window.Deno available: false
+[SPAWNED BY no_ns.ts] maybe_ns.ts: is window.Deno available: false
diff --git a/cli/tests/039_worker_deno_ns/has_ns.ts b/cli/tests/039_worker_deno_ns/has_ns.ts
new file mode 100644
index 000000000..8d2507122
--- /dev/null
+++ b/cli/tests/039_worker_deno_ns/has_ns.ts
@@ -0,0 +1,10 @@
+onmessage = (msg): void => {
+ if (msg.data === "START") {
+ postMessage("has_ns.ts: is window.Deno available: " + !!window.Deno);
+ } else {
+ const worker = new Worker("./maybe_ns.ts");
+ worker.onmessage = (msg): void => {
+ postMessage("[SPAWNED BY has_ns.ts] " + msg.data);
+ };
+ }
+};
diff --git a/cli/tests/039_worker_deno_ns/maybe_ns.ts b/cli/tests/039_worker_deno_ns/maybe_ns.ts
new file mode 100644
index 000000000..0bcbd1f97
--- /dev/null
+++ b/cli/tests/039_worker_deno_ns/maybe_ns.ts
@@ -0,0 +1 @@
+postMessage("maybe_ns.ts: is window.Deno available: " + !!window.Deno);
diff --git a/cli/tests/039_worker_deno_ns/no_ns.ts b/cli/tests/039_worker_deno_ns/no_ns.ts
new file mode 100644
index 000000000..0489a00a3
--- /dev/null
+++ b/cli/tests/039_worker_deno_ns/no_ns.ts
@@ -0,0 +1,10 @@
+onmessage = (msg): void => {
+ if (msg.data === "START") {
+ postMessage("no_ns.ts: is window.Deno available: " + !!window.Deno);
+ } else {
+ const worker = new Worker("./maybe_ns.ts");
+ worker.onmessage = (msg): void => {
+ postMessage("[SPAWNED BY no_ns.ts] " + msg.data);
+ };
+ }
+};
diff --git a/cli/tests/040_worker_blob.ts b/cli/tests/040_worker_blob.ts
new file mode 100644
index 000000000..1ba4528cf
--- /dev/null
+++ b/cli/tests/040_worker_blob.ts
@@ -0,0 +1,6 @@
+const b = new Blob(["console.log('code from Blob'); postMessage('DONE')"]);
+const blobURL = URL.createObjectURL(b);
+const worker = new Worker(blobURL);
+worker.onmessage = (): void => {
+ Deno.exit(0);
+};
diff --git a/cli/tests/040_worker_blob.ts.out b/cli/tests/040_worker_blob.ts.out
new file mode 100644
index 000000000..f49b8f3d6
--- /dev/null
+++ b/cli/tests/040_worker_blob.ts.out
@@ -0,0 +1 @@
+code from Blob
diff --git a/cli/tests/041_dyn_import_eval.out b/cli/tests/041_dyn_import_eval.out
new file mode 100644
index 000000000..1dfef2e98
--- /dev/null
+++ b/cli/tests/041_dyn_import_eval.out
@@ -0,0 +1 @@
+{ isMod4: true }
diff --git a/cli/tests/041_info_flag.out b/cli/tests/041_info_flag.out
new file mode 100644
index 000000000..c384fa892
--- /dev/null
+++ b/cli/tests/041_info_flag.out
@@ -0,0 +1,3 @@
+DENO_DIR location: "[WILDCARD]"
+Remote modules cache: "[WILDCARD]deps"
+TypeScript compiler cache: "[WILDCARD]gen"
diff --git a/cli/tests/042_dyn_import_evalcontext.ts b/cli/tests/042_dyn_import_evalcontext.ts
new file mode 100644
index 000000000..124a406d2
--- /dev/null
+++ b/cli/tests/042_dyn_import_evalcontext.ts
@@ -0,0 +1,4 @@
+// @ts-ignore
+Deno.core.evalContext(
+ "(async () => console.log(await import('./subdir/mod4.js')))()"
+);
diff --git a/cli/tests/042_dyn_import_evalcontext.ts.out b/cli/tests/042_dyn_import_evalcontext.ts.out
new file mode 100644
index 000000000..1dfef2e98
--- /dev/null
+++ b/cli/tests/042_dyn_import_evalcontext.ts.out
@@ -0,0 +1 @@
+{ isMod4: true }
diff --git a/cli/tests/044_bad_resource.ts b/cli/tests/044_bad_resource.ts
new file mode 100644
index 000000000..39ca3d120
--- /dev/null
+++ b/cli/tests/044_bad_resource.ts
@@ -0,0 +1,7 @@
+async function main(): Promise<void> {
+ const file = await Deno.open("044_bad_resource.ts", "r");
+ file.close();
+ await file.seek(10, 0);
+}
+
+main();
diff --git a/cli/tests/044_bad_resource.ts.out b/cli/tests/044_bad_resource.ts.out
new file mode 100644
index 000000000..155e4396f
--- /dev/null
+++ b/cli/tests/044_bad_resource.ts.out
@@ -0,0 +1,6 @@
+[WILDCARD]
+error: Uncaught BadResource: bad resource id
+[WILDCARD]dispatch_json.ts:[WILDCARD]
+ at DenoError ([WILDCARD]errors.ts:[WILDCARD])
+ at unwrapResponse ([WILDCARD]dispatch_json.ts:[WILDCARD])
+ at sendAsync ([WILDCARD]dispatch_json.ts:[WILDCARD])
diff --git a/cli/tests/045_proxy_client.ts b/cli/tests/045_proxy_client.ts
new file mode 100644
index 000000000..4fb3db83b
--- /dev/null
+++ b/cli/tests/045_proxy_client.ts
@@ -0,0 +1,7 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+async function main(): Promise<void> {
+ const res = await fetch("http://deno.land/welcome.ts");
+ console.log(`Response http: ${await res.text()}`);
+}
+
+main();
diff --git a/cli/tests/045_proxy_test.ts b/cli/tests/045_proxy_test.ts
new file mode 100644
index 000000000..f1226f4c4
--- /dev/null
+++ b/cli/tests/045_proxy_test.ts
@@ -0,0 +1,72 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+import { serve, ServerRequest } from "../../std/http/server.ts";
+import { assertEquals } from "../../std/testing/asserts.ts";
+
+const addr = Deno.args[1] || "127.0.0.1:4555";
+
+async function proxyServer(): Promise<void> {
+ const server = serve(addr);
+
+ console.log(`Proxy server listening on http://${addr}/`);
+ for await (const req of server) {
+ proxyRequest(req);
+ }
+}
+
+async function proxyRequest(req: ServerRequest): Promise<void> {
+ console.log(`Proxy request to: ${req.url}`);
+ const resp = await fetch(req.url, {
+ method: req.method,
+ headers: req.headers
+ });
+ req.respond(resp);
+}
+
+async function testFetch(): Promise<void> {
+ const c = Deno.run({
+ args: [
+ Deno.execPath(),
+ "--no-prompt",
+ "--reload",
+ "--allow-net",
+ "045_proxy_client.ts"
+ ],
+ stdout: "piped",
+ env: {
+ HTTP_PROXY: `http://${addr}`
+ }
+ });
+
+ const status = await c.status();
+ assertEquals(status.code, 0);
+ c.close();
+}
+
+async function testModuleDownload(): Promise<void> {
+ const http = Deno.run({
+ args: [
+ Deno.execPath(),
+ "--no-prompt",
+ "--reload",
+ "fetch",
+ "http://deno.land/welcome.ts"
+ ],
+ stdout: "piped",
+ env: {
+ HTTP_PROXY: `http://${addr}`
+ }
+ });
+
+ const httpStatus = await http.status();
+ assertEquals(httpStatus.code, 0);
+ http.close();
+}
+
+async function main(): Promise<void> {
+ proxyServer();
+ await testFetch();
+ await testModuleDownload();
+ Deno.exit(0);
+}
+
+main();
diff --git a/cli/tests/045_proxy_test.ts.out b/cli/tests/045_proxy_test.ts.out
new file mode 100644
index 000000000..7b898bcf1
--- /dev/null
+++ b/cli/tests/045_proxy_test.ts.out
@@ -0,0 +1,3 @@
+Proxy server listening on [WILDCARD]
+Proxy request to: http://deno.land/welcome.ts
+Proxy request to: http://deno.land/welcome.ts
diff --git a/cli/tests/046_jsx_test.tsx b/cli/tests/046_jsx_test.tsx
new file mode 100644
index 000000000..4e9380eb8
--- /dev/null
+++ b/cli/tests/046_jsx_test.tsx
@@ -0,0 +1,9 @@
+const React = {
+ createElement(factory: any, props: any, ...children: any[]) {
+ return {factory, props, children}
+ }
+}
+const View = () => (
+ <div class="deno">land</div>
+)
+console.log(<View />)
diff --git a/cli/tests/046_jsx_test.tsx.out b/cli/tests/046_jsx_test.tsx.out
new file mode 100644
index 000000000..85cfe824b
--- /dev/null
+++ b/cli/tests/046_jsx_test.tsx.out
@@ -0,0 +1 @@
+{ factory: [Function: View], props: null, children: [] }
diff --git a/cli/tests/047_jsx_test.jsx b/cli/tests/047_jsx_test.jsx
new file mode 100644
index 000000000..553c4c5a5
--- /dev/null
+++ b/cli/tests/047_jsx_test.jsx
@@ -0,0 +1,9 @@
+const React = {
+ createElement(factory, props, ...children) {
+ return {factory, props, children}
+ }
+}
+const View = () => (
+ <div class="deno">land</div>
+)
+console.log(<View />)
diff --git a/cli/tests/047_jsx_test.jsx.out b/cli/tests/047_jsx_test.jsx.out
new file mode 100644
index 000000000..85cfe824b
--- /dev/null
+++ b/cli/tests/047_jsx_test.jsx.out
@@ -0,0 +1 @@
+{ factory: [Function: View], props: null, children: [] }
diff --git a/cli/tests/README.md b/cli/tests/README.md
new file mode 100644
index 000000000..fe9071926
--- /dev/null
+++ b/cli/tests/README.md
@@ -0,0 +1,7 @@
+# Integration Tests
+
+This path contains integration tests. See integration_tests.rs for the index.
+
+TODO(ry) Currently //tests is a symlink to //cli/tests, to simplify transition.
+In the future the symlink should be removed when all the many references have
+been updated to the new path.
diff --git a/cli/tests/async_error.ts b/cli/tests/async_error.ts
new file mode 100644
index 000000000..81c983a50
--- /dev/null
+++ b/cli/tests/async_error.ts
@@ -0,0 +1,8 @@
+console.log("hello");
+const foo = async (): Promise<never> => {
+ console.log("before error");
+ throw Error("error");
+};
+
+foo();
+console.log("world");
diff --git a/cli/tests/async_error.ts.out b/cli/tests/async_error.ts.out
new file mode 100644
index 000000000..d07ba8cfe
--- /dev/null
+++ b/cli/tests/async_error.ts.out
@@ -0,0 +1,11 @@
+[WILDCARD]hello
+before error
+world
+error: Uncaught Error: error
+[WILDCARD]tests/async_error.ts:4:9
+
+4 throw Error("error");
+ ^
+
+ at foo ([WILDCARD]tests/async_error.ts:4:9)
+ at [WILDCARD]tests/async_error.ts:7:1
diff --git a/cli/tests/badly_formatted.js b/cli/tests/badly_formatted.js
new file mode 100644
index 000000000..17e3e6be0
--- /dev/null
+++ b/cli/tests/badly_formatted.js
@@ -0,0 +1,4 @@
+
+console.log(
+ "Hello World"
+)
diff --git a/cli/tests/badly_formatted_fixed.js b/cli/tests/badly_formatted_fixed.js
new file mode 100644
index 000000000..accefceba
--- /dev/null
+++ b/cli/tests/badly_formatted_fixed.js
@@ -0,0 +1 @@
+console.log("Hello World");
diff --git a/cli/tests/cat.ts b/cli/tests/cat.ts
new file mode 100644
index 000000000..756238be6
--- /dev/null
+++ b/cli/tests/cat.ts
@@ -0,0 +1,11 @@
+const { stdout, open, copy, args } = Deno;
+
+async function main(): Promise<void> {
+ for (let i = 1; i < args.length; i++) {
+ const filename = args[i];
+ const file = await open(filename);
+ await copy(stdout, file);
+ }
+}
+
+main();
diff --git a/cli/tests/circular1.js b/cli/tests/circular1.js
new file mode 100644
index 000000000..8b2cc4960
--- /dev/null
+++ b/cli/tests/circular1.js
@@ -0,0 +1,2 @@
+import "./circular2.js";
+console.log("circular1");
diff --git a/cli/tests/circular1.js.out b/cli/tests/circular1.js.out
new file mode 100644
index 000000000..21f7fd585
--- /dev/null
+++ b/cli/tests/circular1.js.out
@@ -0,0 +1,2 @@
+circular2
+circular1
diff --git a/cli/tests/circular2.js b/cli/tests/circular2.js
new file mode 100644
index 000000000..62127e04d
--- /dev/null
+++ b/cli/tests/circular2.js
@@ -0,0 +1,2 @@
+import "./circular1.js";
+console.log("circular2");
diff --git a/cli/tests/config.ts b/cli/tests/config.ts
new file mode 100644
index 000000000..e08061e77
--- /dev/null
+++ b/cli/tests/config.ts
@@ -0,0 +1,5 @@
+const map = new Map<string, { foo: string }>();
+
+if (map.get("bar").foo) {
+ console.log("here");
+}
diff --git a/cli/tests/config.ts.out b/cli/tests/config.ts.out
new file mode 100644
index 000000000..db5a8340e
--- /dev/null
+++ b/cli/tests/config.ts.out
@@ -0,0 +1,10 @@
+[WILDCARD]Unsupported compiler options in "[WILDCARD]config.tsconfig.json"
+ The following options were ignored:
+ module, target
+[WILDCARD]error TS2532: Object is possibly 'undefined'.
+
+[WILDCARD]tests/config.ts:3:5
+
+3 if (map.get("bar").foo) {
+ ~~~~~~~~~~~~~~
+
diff --git a/cli/tests/config.tsconfig.json b/cli/tests/config.tsconfig.json
new file mode 100644
index 000000000..074d7ac0b
--- /dev/null
+++ b/cli/tests/config.tsconfig.json
@@ -0,0 +1,7 @@
+{
+ "compilerOptions": {
+ "module": "amd",
+ "strict": true,
+ "target": "es5"
+ }
+}
diff --git a/cli/tests/echo_server.ts b/cli/tests/echo_server.ts
new file mode 100644
index 000000000..5c6b5954b
--- /dev/null
+++ b/cli/tests/echo_server.ts
@@ -0,0 +1,12 @@
+const { args, listen, copy } = Deno;
+const addr = args[1] || "0.0.0.0:4544";
+const [hostname, port] = addr.split(":");
+const listener = listen({ hostname, port: Number(port) });
+console.log("listening on", addr);
+listener.accept().then(
+ async (conn): Promise<void> => {
+ await copy(conn, conn);
+ conn.close();
+ listener.close();
+ }
+);
diff --git a/cli/tests/error_001.ts b/cli/tests/error_001.ts
new file mode 100644
index 000000000..f06f80cb4
--- /dev/null
+++ b/cli/tests/error_001.ts
@@ -0,0 +1,9 @@
+function foo(): never {
+ throw Error("bad");
+}
+
+function bar(): void {
+ foo();
+}
+
+bar();
diff --git a/cli/tests/error_001.ts.out b/cli/tests/error_001.ts.out
new file mode 100644
index 000000000..3c7e2828e
--- /dev/null
+++ b/cli/tests/error_001.ts.out
@@ -0,0 +1,9 @@
+[WILDCARD]error: Uncaught Error: bad
+[WILDCARD]tests/error_001.ts:2:9
+
+2 throw Error("bad");
+ ^
+
+ at foo ([WILDCARD]tests/error_001.ts:2:9)
+ at bar ([WILDCARD]tests/error_001.ts:6:3)
+ at [WILDCARD]tests/error_001.ts:9:1
diff --git a/cli/tests/error_002.ts b/cli/tests/error_002.ts
new file mode 100644
index 000000000..eb66764b7
--- /dev/null
+++ b/cli/tests/error_002.ts
@@ -0,0 +1,7 @@
+import { throwsError } from "./subdir/mod1.ts";
+
+function foo(): void {
+ throwsError();
+}
+
+foo();
diff --git a/cli/tests/error_002.ts.out b/cli/tests/error_002.ts.out
new file mode 100644
index 000000000..292544a33
--- /dev/null
+++ b/cli/tests/error_002.ts.out
@@ -0,0 +1,9 @@
+[WILDCARD]error: Uncaught Error: exception from mod1
+[WILDCARD]tests/subdir/mod1.ts:16:9
+
+16 throw Error("exception from mod1");
+ ^
+
+ at throwsError ([WILDCARD]tests/subdir/mod1.ts:16:9)
+ at foo ([WILDCARD]tests/error_002.ts:4:3)
+ at [WILDCARD]tests/error_002.ts:7:1
diff --git a/cli/tests/error_003_typescript.ts b/cli/tests/error_003_typescript.ts
new file mode 100644
index 000000000..4ce86bb83
--- /dev/null
+++ b/cli/tests/error_003_typescript.ts
@@ -0,0 +1,20 @@
+/* eslint-disable */
+let x = {
+ a: {
+ b: {
+ c() {
+ return { d: "hello" };
+ }
+ }
+ }
+};
+let y = {
+ a: {
+ b: {
+ c() {
+ return { d: 1234 };
+ }
+ }
+ }
+};
+x = y;
diff --git a/cli/tests/error_003_typescript.ts.out b/cli/tests/error_003_typescript.ts.out
new file mode 100644
index 000000000..0b1d94db4
--- /dev/null
+++ b/cli/tests/error_003_typescript.ts.out
@@ -0,0 +1,16 @@
+[WILDCARD]error TS2322: Type '{ a: { b: { c(): { d: number; }; }; }; }' is not assignable to type '{ a: { b: { c(): { d: string; }; }; }; }'.
+ Types of property 'a' are incompatible.
+ Type '{ b: { c(): { d: number; }; }; }' is not assignable to type '{ b: { c(): { d: string; }; }; }'.
+ Types of property 'b' are incompatible.
+ Type '{ c(): { d: number; }; }' is not assignable to type '{ c(): { d: string; }; }'.
+ Types of property 'c' are incompatible.
+ Type '() => { d: number; }' is not assignable to type '() => { d: string; }'.
+ Type '{ d: number; }' is not assignable to type '{ d: string; }'.
+ Types of property 'd' are incompatible.
+ Type 'number' is not assignable to type 'string'.
+
+[WILDCARD]/tests/error_003_typescript.ts:20:1
+
+20 x = y;
+ ^
+
diff --git a/cli/tests/error_004_missing_module.ts b/cli/tests/error_004_missing_module.ts
new file mode 100644
index 000000000..24ae52cf7
--- /dev/null
+++ b/cli/tests/error_004_missing_module.ts
@@ -0,0 +1,2 @@
+// eslint-disable-next-line
+import * as badModule from "./bad-module.ts";
diff --git a/cli/tests/error_004_missing_module.ts.out b/cli/tests/error_004_missing_module.ts.out
new file mode 100644
index 000000000..7a5f50938
--- /dev/null
+++ b/cli/tests/error_004_missing_module.ts.out
@@ -0,0 +1,5 @@
+[WILDCARD]error: Uncaught NotFound: Cannot resolve module "[WILDCARD]/bad-module.ts"
+[WILDCARD]dispatch_json.ts:[WILDCARD]
+ at DenoError ([WILDCARD]errors.ts:[WILDCARD])
+ at unwrapResponse ([WILDCARD]dispatch_json.ts:[WILDCARD])
+ at sendAsync[WILDCARD] ([WILDCARD]dispatch_json.ts:[WILDCARD])
diff --git a/cli/tests/error_005_missing_dynamic_import.ts b/cli/tests/error_005_missing_dynamic_import.ts
new file mode 100644
index 000000000..4c09feb5f
--- /dev/null
+++ b/cli/tests/error_005_missing_dynamic_import.ts
@@ -0,0 +1,4 @@
+(async (): Promise<void> => {
+ // eslint-disable-next-line
+ const badModule = await import("./bad-module.ts");
+})();
diff --git a/cli/tests/error_005_missing_dynamic_import.ts.out b/cli/tests/error_005_missing_dynamic_import.ts.out
new file mode 100644
index 000000000..7a5f50938
--- /dev/null
+++ b/cli/tests/error_005_missing_dynamic_import.ts.out
@@ -0,0 +1,5 @@
+[WILDCARD]error: Uncaught NotFound: Cannot resolve module "[WILDCARD]/bad-module.ts"
+[WILDCARD]dispatch_json.ts:[WILDCARD]
+ at DenoError ([WILDCARD]errors.ts:[WILDCARD])
+ at unwrapResponse ([WILDCARD]dispatch_json.ts:[WILDCARD])
+ at sendAsync[WILDCARD] ([WILDCARD]dispatch_json.ts:[WILDCARD])
diff --git a/cli/tests/error_006_import_ext_failure.ts b/cli/tests/error_006_import_ext_failure.ts
new file mode 100644
index 000000000..3c32303a3
--- /dev/null
+++ b/cli/tests/error_006_import_ext_failure.ts
@@ -0,0 +1 @@
+import "./non-existent";
diff --git a/cli/tests/error_006_import_ext_failure.ts.out b/cli/tests/error_006_import_ext_failure.ts.out
new file mode 100644
index 000000000..d88477df8
--- /dev/null
+++ b/cli/tests/error_006_import_ext_failure.ts.out
@@ -0,0 +1,5 @@
+[WILDCARD]error: Uncaught NotFound: Cannot resolve module "[WILDCARD]/non-existent"
+[WILDCARD]dispatch_json.ts:[WILDCARD]
+ at DenoError ([WILDCARD]errors.ts:[WILDCARD])
+ at unwrapResponse ([WILDCARD]dispatch_json.ts:[WILDCARD])
+ at sendAsync[WILDCARD] ([WILDCARD]dispatch_json.ts:[WILDCARD])
diff --git a/cli/tests/error_007_any.ts b/cli/tests/error_007_any.ts
new file mode 100644
index 000000000..778886fcb
--- /dev/null
+++ b/cli/tests/error_007_any.ts
@@ -0,0 +1 @@
+throw {};
diff --git a/cli/tests/error_007_any.ts.out b/cli/tests/error_007_any.ts.out
new file mode 100644
index 000000000..45dbffd04
--- /dev/null
+++ b/cli/tests/error_007_any.ts.out
@@ -0,0 +1 @@
+[WILDCARD]error: Uncaught #<Object>
diff --git a/cli/tests/error_008_checkjs.js b/cli/tests/error_008_checkjs.js
new file mode 100644
index 000000000..628d3e376
--- /dev/null
+++ b/cli/tests/error_008_checkjs.js
@@ -0,0 +1,6 @@
+// console.log intentionally misspelled to trigger a type error
+consol.log("hello world!");
+
+// the following error should be ignored and not output to the console
+// eslint-disable-next-line
+const foo = new Foo();
diff --git a/cli/tests/error_008_checkjs.js.out b/cli/tests/error_008_checkjs.js.out
new file mode 100644
index 000000000..5c50e8513
--- /dev/null
+++ b/cli/tests/error_008_checkjs.js.out
@@ -0,0 +1,7 @@
+[WILDCARD]error: Uncaught ReferenceError: consol is not defined
+[WILDCARD]tests/error_008_checkjs.js:2:1
+
+2 consol.log("hello world!");
+ ^
+
+ at [WILDCARD]tests/error_008_checkjs.js:2:1
diff --git a/cli/tests/error_009_missing_js_module.disabled b/cli/tests/error_009_missing_js_module.disabled
new file mode 100644
index 000000000..b16bb232b
--- /dev/null
+++ b/cli/tests/error_009_missing_js_module.disabled
@@ -0,0 +1,4 @@
+args: tests/error_009_missing_js_module.js
+check_stderr: true
+exit_code: 1
+output: tests/error_009_missing_js_module.js.out \ No newline at end of file
diff --git a/cli/tests/error_009_missing_js_module.js b/cli/tests/error_009_missing_js_module.js
new file mode 100644
index 000000000..e6ca88934
--- /dev/null
+++ b/cli/tests/error_009_missing_js_module.js
@@ -0,0 +1 @@
+import "./bad-module.js";
diff --git a/cli/tests/error_009_missing_js_module.js.out b/cli/tests/error_009_missing_js_module.js.out
new file mode 100644
index 000000000..edb08da1c
--- /dev/null
+++ b/cli/tests/error_009_missing_js_module.js.out
@@ -0,0 +1 @@
+Cannot resolve module "./bad-module.js" from "[WILDCARD]error_009_missing_js_module.js"
diff --git a/cli/tests/error_010_nonexistent_arg.disabled b/cli/tests/error_010_nonexistent_arg.disabled
new file mode 100644
index 000000000..9d183107c
--- /dev/null
+++ b/cli/tests/error_010_nonexistent_arg.disabled
@@ -0,0 +1,4 @@
+args: not-a-valid-filename.ts
+output: tests/error_010_nonexistent_arg.out
+exit_code: 1
+check_stderr: true
diff --git a/cli/tests/error_010_nonexistent_arg.out b/cli/tests/error_010_nonexistent_arg.out
new file mode 100644
index 000000000..ef4f7b041
--- /dev/null
+++ b/cli/tests/error_010_nonexistent_arg.out
@@ -0,0 +1 @@
+[WILDCARD]Cannot resolve module "file:[WILDCARD]not-a-valid-filename.ts" from "."
diff --git a/cli/tests/error_011_bad_module_specifier.ts b/cli/tests/error_011_bad_module_specifier.ts
new file mode 100644
index 000000000..e74d6b821
--- /dev/null
+++ b/cli/tests/error_011_bad_module_specifier.ts
@@ -0,0 +1,2 @@
+// eslint-disable-next-line
+import * as badModule from "bad-module.ts";
diff --git a/cli/tests/error_011_bad_module_specifier.ts.out b/cli/tests/error_011_bad_module_specifier.ts.out
new file mode 100644
index 000000000..0a90cd32c
--- /dev/null
+++ b/cli/tests/error_011_bad_module_specifier.ts.out
@@ -0,0 +1,5 @@
+[WILDCARD]error: Uncaught ImportPrefixMissing: relative import path "bad-module.ts" not prefixed with / or ./ or ../
+[WILDCARD]dispatch_json.ts:[WILDCARD]
+ at DenoError ([WILDCARD]errors.ts:[WILDCARD])
+ at unwrapResponse ([WILDCARD]dispatch_json.ts:[WILDCARD])
+ at sendAsync[WILDCARD] ([WILDCARD]dispatch_json.ts:[WILDCARD])
diff --git a/cli/tests/error_012_bad_dynamic_import_specifier.ts b/cli/tests/error_012_bad_dynamic_import_specifier.ts
new file mode 100644
index 000000000..0420a80bf
--- /dev/null
+++ b/cli/tests/error_012_bad_dynamic_import_specifier.ts
@@ -0,0 +1,4 @@
+(async (): Promise<void> => {
+ // eslint-disable-next-line
+ const badModule = await import("bad-module.ts");
+})();
diff --git a/cli/tests/error_012_bad_dynamic_import_specifier.ts.out b/cli/tests/error_012_bad_dynamic_import_specifier.ts.out
new file mode 100644
index 000000000..0a90cd32c
--- /dev/null
+++ b/cli/tests/error_012_bad_dynamic_import_specifier.ts.out
@@ -0,0 +1,5 @@
+[WILDCARD]error: Uncaught ImportPrefixMissing: relative import path "bad-module.ts" not prefixed with / or ./ or ../
+[WILDCARD]dispatch_json.ts:[WILDCARD]
+ at DenoError ([WILDCARD]errors.ts:[WILDCARD])
+ at unwrapResponse ([WILDCARD]dispatch_json.ts:[WILDCARD])
+ at sendAsync[WILDCARD] ([WILDCARD]dispatch_json.ts:[WILDCARD])
diff --git a/cli/tests/error_013_missing_script.out b/cli/tests/error_013_missing_script.out
new file mode 100644
index 000000000..9836c361f
--- /dev/null
+++ b/cli/tests/error_013_missing_script.out
@@ -0,0 +1 @@
+Cannot resolve module "[WILDCARD]missing_file_name"
diff --git a/cli/tests/error_014_catch_dynamic_import_error.js b/cli/tests/error_014_catch_dynamic_import_error.js
new file mode 100644
index 000000000..ad3735fc3
--- /dev/null
+++ b/cli/tests/error_014_catch_dynamic_import_error.js
@@ -0,0 +1,31 @@
+(async () => {
+ try {
+ await import("does not exist");
+ } catch (err) {
+ console.log("Caught direct dynamic import error.");
+ console.log(err);
+ }
+
+ try {
+ await import("./subdir/indirect_import_error.js");
+ } catch (err) {
+ console.log("Caught indirect direct dynamic import error.");
+ console.log(err);
+ }
+
+ try {
+ await import("./subdir/throws.js");
+ } catch (err) {
+ console.log("Caught error thrown by dynamically imported module.");
+ console.log(err);
+ }
+
+ try {
+ await import("./subdir/indirect_throws.js");
+ } catch (err) {
+ console.log(
+ "Caught error thrown indirectly by dynamically imported module."
+ );
+ console.log(err);
+ }
+})();
diff --git a/cli/tests/error_014_catch_dynamic_import_error.js.out b/cli/tests/error_014_catch_dynamic_import_error.js.out
new file mode 100644
index 000000000..c18b680a1
--- /dev/null
+++ b/cli/tests/error_014_catch_dynamic_import_error.js.out
@@ -0,0 +1,12 @@
+Caught direct dynamic import error.
+TypeError: relative import path "does not exist" not prefixed with / or ./ or ../
+
+Caught indirect direct dynamic import error.
+TypeError: relative import path "does not exist either" not prefixed with / or ./ or ../
+
+Caught error thrown by dynamically imported module.
+Error: An error
+ at file:///[WILDCARD]tests/subdir/throws.js:5:7
+Caught error thrown indirectly by dynamically imported module.
+Error: An error
+ at file:///[WILDCARD]tests/subdir/throws.js:5:7
diff --git a/cli/tests/error_015_dynamic_import_permissions.js b/cli/tests/error_015_dynamic_import_permissions.js
new file mode 100644
index 000000000..3460ca787
--- /dev/null
+++ b/cli/tests/error_015_dynamic_import_permissions.js
@@ -0,0 +1,3 @@
+(async () => {
+ await import("http://localhost:4545/tests/subdir/mod4.js");
+})();
diff --git a/cli/tests/error_015_dynamic_import_permissions.out b/cli/tests/error_015_dynamic_import_permissions.out
new file mode 100644
index 000000000..90ccd0d1a
--- /dev/null
+++ b/cli/tests/error_015_dynamic_import_permissions.out
@@ -0,0 +1 @@
+error: Uncaught TypeError: permission denied
diff --git a/cli/tests/error_016_dynamic_import_permissions2.js b/cli/tests/error_016_dynamic_import_permissions2.js
new file mode 100644
index 000000000..71c70815c
--- /dev/null
+++ b/cli/tests/error_016_dynamic_import_permissions2.js
@@ -0,0 +1,5 @@
+// If this is executed with --allow-net but not --allow-read the following
+// import should cause a permission denied error.
+(async () => {
+ await import("http://localhost:4545/tests/subdir/evil_remote_import.js");
+})();
diff --git a/cli/tests/error_016_dynamic_import_permissions2.out b/cli/tests/error_016_dynamic_import_permissions2.out
new file mode 100644
index 000000000..f52186481
--- /dev/null
+++ b/cli/tests/error_016_dynamic_import_permissions2.out
@@ -0,0 +1,2 @@
+[WILDCARD]
+error: Uncaught TypeError: permission denied
diff --git a/cli/tests/error_stack.ts b/cli/tests/error_stack.ts
new file mode 100644
index 000000000..f2125d662
--- /dev/null
+++ b/cli/tests/error_stack.ts
@@ -0,0 +1,10 @@
+function foo(): never {
+ throw new Error("foo");
+}
+
+try {
+ foo();
+} catch (e) {
+ console.log(e);
+ throw e;
+}
diff --git a/cli/tests/error_stack.ts.out b/cli/tests/error_stack.ts.out
new file mode 100644
index 000000000..2bb629e2d
--- /dev/null
+++ b/cli/tests/error_stack.ts.out
@@ -0,0 +1,6 @@
+[WILDCARD]Error: foo
+ at foo ([WILDCARD]tests/error_stack.ts:2:9)
+ at [WILDCARD]tests/error_stack.ts:6:3
+error: Uncaught Error: foo
+ at foo ([WILDCARD]tests/error_stack.ts:2:9)
+ at [WILDCARD]tests/error_stack.ts:6:3
diff --git a/cli/tests/error_syntax.js b/cli/tests/error_syntax.js
new file mode 100644
index 000000000..0c0c09855
--- /dev/null
+++ b/cli/tests/error_syntax.js
@@ -0,0 +1,3 @@
+
+// prettier-ignore
+(the following is a syntax error ^^ ! )
diff --git a/cli/tests/error_syntax.js.out b/cli/tests/error_syntax.js.out
new file mode 100644
index 000000000..6253f3dd5
--- /dev/null
+++ b/cli/tests/error_syntax.js.out
@@ -0,0 +1,6 @@
+error: Uncaught SyntaxError: Unexpected identifier
+[WILDCARD]tests/error_syntax.js:3:6
+
+3 (the following is a syntax error ^^ ! )
+ ~~~~~~~~~
+
diff --git a/cli/tests/error_type_definitions.ts b/cli/tests/error_type_definitions.ts
new file mode 100644
index 000000000..ceb11787e
--- /dev/null
+++ b/cli/tests/error_type_definitions.ts
@@ -0,0 +1,5 @@
+// @deno-types="./type_definitions/bar.d.ts"
+import { Bar } from "./type_definitions/bar.js";
+
+const bar = new Bar();
+console.log(bar);
diff --git a/cli/tests/error_type_definitions.ts.out b/cli/tests/error_type_definitions.ts.out
new file mode 100644
index 000000000..d0b599862
--- /dev/null
+++ b/cli/tests/error_type_definitions.ts.out
@@ -0,0 +1,5 @@
+[WILDCARD]error: Uncaught ImportPrefixMissing: relative import path "baz" not prefixed with / or ./ or ../
+[WILDCARD]dispatch_json.ts:[WILDCARD]
+ at DenoError ([WILDCARD]errors.ts:[WILDCARD])
+ at unwrapResponse ([WILDCARD]dispatch_json.ts:[WILDCARD])
+ at sendAsync[WILDCARD] ([WILDCARD]dispatch_json.ts:[WILDCARD])
diff --git a/cli/tests/error_worker_dynamic.ts b/cli/tests/error_worker_dynamic.ts
new file mode 100644
index 000000000..16fadf573
--- /dev/null
+++ b/cli/tests/error_worker_dynamic.ts
@@ -0,0 +1,3 @@
+const b = new Blob(['throw new Error("hello");']);
+const blobURL = URL.createObjectURL(b);
+new Worker(blobURL);
diff --git a/cli/tests/error_worker_dynamic.ts.out b/cli/tests/error_worker_dynamic.ts.out
new file mode 100644
index 000000000..4bea7b656
--- /dev/null
+++ b/cli/tests/error_worker_dynamic.ts.out
@@ -0,0 +1,3 @@
+[WILDCARD]error: Uncaught Error: hello
+[WILDCARD]__anonymous__:1:7
+ at [WILDCARD]__anonymous__:1:7
diff --git a/cli/tests/esm_imports_a.js b/cli/tests/esm_imports_a.js
new file mode 100644
index 000000000..673cd9aa3
--- /dev/null
+++ b/cli/tests/esm_imports_a.js
@@ -0,0 +1,3 @@
+import { retb } from "./esm_imports_b.js";
+
+if (retb() != "b") throw Error();
diff --git a/cli/tests/esm_imports_b.js b/cli/tests/esm_imports_b.js
new file mode 100644
index 000000000..321dfc05a
--- /dev/null
+++ b/cli/tests/esm_imports_b.js
@@ -0,0 +1,3 @@
+export function retb() {
+ return "b";
+}
diff --git a/cli/tests/exec_path.ts b/cli/tests/exec_path.ts
new file mode 100644
index 000000000..b70b23237
--- /dev/null
+++ b/cli/tests/exec_path.ts
@@ -0,0 +1 @@
+console.log(Deno.execPath());
diff --git a/cli/tests/exit_error42.ts b/cli/tests/exit_error42.ts
new file mode 100644
index 000000000..e4db41f3a
--- /dev/null
+++ b/cli/tests/exit_error42.ts
@@ -0,0 +1,3 @@
+console.log("before");
+Deno.exit(42);
+console.log("after");
diff --git a/cli/tests/exit_error42.ts.out b/cli/tests/exit_error42.ts.out
new file mode 100644
index 000000000..90be1f305
--- /dev/null
+++ b/cli/tests/exit_error42.ts.out
@@ -0,0 +1 @@
+before
diff --git a/cli/tests/fetch_deps.ts b/cli/tests/fetch_deps.ts
new file mode 100644
index 000000000..e6ef8854e
--- /dev/null
+++ b/cli/tests/fetch_deps.ts
@@ -0,0 +1,14 @@
+// Run ./tools/http_server.py too in order for this test to run.
+import { assert } from "../std/testing/asserts.ts";
+
+// TODO Top level await https://github.com/denoland/deno/issues/471
+async function main(): Promise<void> {
+ const response = await fetch("http://localhost:4545/package.json");
+ const json = await response.json();
+ const deps = Object.keys(json.devDependencies);
+ console.log("Deno JS Deps");
+ console.log(deps.map((d): string => `* ${d}`).join("\n"));
+ assert(deps.includes("typescript"));
+}
+
+main();
diff --git a/cli/tests/hello.txt b/cli/tests/hello.txt
new file mode 100644
index 000000000..6769dd60b
--- /dev/null
+++ b/cli/tests/hello.txt
@@ -0,0 +1 @@
+Hello world! \ No newline at end of file
diff --git a/cli/tests/https_import.ts b/cli/tests/https_import.ts
new file mode 100644
index 000000000..faaf2175f
--- /dev/null
+++ b/cli/tests/https_import.ts
@@ -0,0 +1,5 @@
+// TODO Use https://localhost:4555/ but we need more infrastructure to
+// support verifying self-signed certificates.
+import { printHello } from "https://gist.githubusercontent.com/ry/f12b2aa3409e6b52645bc346a9e22929/raw/79318f239f51d764384a8bded8d7c6a833610dde/print_hello.ts";
+
+printHello();
diff --git a/cli/tests/https_import.ts.out b/cli/tests/https_import.ts.out
new file mode 100644
index 000000000..e965047ad
--- /dev/null
+++ b/cli/tests/https_import.ts.out
@@ -0,0 +1 @@
+Hello
diff --git a/cli/tests/if_main.ts b/cli/tests/if_main.ts
new file mode 100644
index 000000000..b47066b2d
--- /dev/null
+++ b/cli/tests/if_main.ts
@@ -0,0 +1,7 @@
+if (window.location.toString() == import.meta.url) {
+ console.log("main");
+} else {
+ console.log("import.meta.url", import.meta.url);
+ console.log("window.location", window.location.toString());
+ throw Error("not main");
+}
diff --git a/cli/tests/if_main.ts.out b/cli/tests/if_main.ts.out
new file mode 100644
index 000000000..ba2906d06
--- /dev/null
+++ b/cli/tests/if_main.ts.out
@@ -0,0 +1 @@
+main
diff --git a/cli/tests/import_meta.ts b/cli/tests/import_meta.ts
new file mode 100644
index 000000000..d111059ea
--- /dev/null
+++ b/cli/tests/import_meta.ts
@@ -0,0 +1,3 @@
+console.log("import_meta", import.meta.url, import.meta.main);
+
+import "./import_meta2.ts";
diff --git a/cli/tests/import_meta.ts.out b/cli/tests/import_meta.ts.out
new file mode 100644
index 000000000..f38aa98ea
--- /dev/null
+++ b/cli/tests/import_meta.ts.out
@@ -0,0 +1,2 @@
+import_meta2 [WILDCARD]import_meta2.ts false
+import_meta [WILDCARD]import_meta.ts true
diff --git a/cli/tests/import_meta2.ts b/cli/tests/import_meta2.ts
new file mode 100644
index 000000000..7f59a5a46
--- /dev/null
+++ b/cli/tests/import_meta2.ts
@@ -0,0 +1 @@
+console.log("import_meta2", import.meta.url, import.meta.main);
diff --git a/cli/tests/importmaps/import_map.json b/cli/tests/importmaps/import_map.json
new file mode 100644
index 000000000..601874aab
--- /dev/null
+++ b/cli/tests/importmaps/import_map.json
@@ -0,0 +1,14 @@
+{
+ "imports": {
+ "moment": "./moment/moment.ts",
+ "moment/": "./moment/",
+ "lodash": "./lodash/lodash.ts",
+ "lodash/": "./lodash/",
+ "https://www.unpkg.com/vue/dist/vue.runtime.esm.js": "./vue.ts"
+ },
+ "scopes": {
+ "scope/": {
+ "moment": "./scoped_moment.ts"
+ }
+ }
+}
diff --git a/cli/tests/importmaps/lodash/lodash.ts b/cli/tests/importmaps/lodash/lodash.ts
new file mode 100644
index 000000000..2ec04ed3c
--- /dev/null
+++ b/cli/tests/importmaps/lodash/lodash.ts
@@ -0,0 +1 @@
+console.log("Hello from remapped lodash!");
diff --git a/cli/tests/importmaps/lodash/other_file.ts b/cli/tests/importmaps/lodash/other_file.ts
new file mode 100644
index 000000000..714adae3f
--- /dev/null
+++ b/cli/tests/importmaps/lodash/other_file.ts
@@ -0,0 +1 @@
+console.log("Hello from remapped lodash dir!");
diff --git a/cli/tests/importmaps/moment/moment.ts b/cli/tests/importmaps/moment/moment.ts
new file mode 100644
index 000000000..2b54a431e
--- /dev/null
+++ b/cli/tests/importmaps/moment/moment.ts
@@ -0,0 +1 @@
+console.log("Hello from remapped moment!");
diff --git a/cli/tests/importmaps/moment/other_file.ts b/cli/tests/importmaps/moment/other_file.ts
new file mode 100644
index 000000000..24f3a0226
--- /dev/null
+++ b/cli/tests/importmaps/moment/other_file.ts
@@ -0,0 +1 @@
+console.log("Hello from remapped moment dir!");
diff --git a/cli/tests/importmaps/scope/scoped.ts b/cli/tests/importmaps/scope/scoped.ts
new file mode 100644
index 000000000..9a0b5d8e3
--- /dev/null
+++ b/cli/tests/importmaps/scope/scoped.ts
@@ -0,0 +1,2 @@
+import "moment";
+console.log("Hello from scoped!");
diff --git a/cli/tests/importmaps/scoped_moment.ts b/cli/tests/importmaps/scoped_moment.ts
new file mode 100644
index 000000000..9f67f88d4
--- /dev/null
+++ b/cli/tests/importmaps/scoped_moment.ts
@@ -0,0 +1 @@
+console.log("Hello from scoped moment!");
diff --git a/cli/tests/importmaps/test.ts b/cli/tests/importmaps/test.ts
new file mode 100644
index 000000000..9b09e9953
--- /dev/null
+++ b/cli/tests/importmaps/test.ts
@@ -0,0 +1,6 @@
+import "moment";
+import "moment/other_file.ts";
+import "lodash";
+import "lodash/other_file.ts";
+import "https://www.unpkg.com/vue/dist/vue.runtime.esm.js";
+import "./scope/scoped.ts";
diff --git a/cli/tests/importmaps/vue.ts b/cli/tests/importmaps/vue.ts
new file mode 100644
index 000000000..76dbe1917
--- /dev/null
+++ b/cli/tests/importmaps/vue.ts
@@ -0,0 +1 @@
+console.log("Hello from remapped Vue!");
diff --git a/cli/tests/integration_tests.rs b/cli/tests/integration_tests.rs
new file mode 100644
index 000000000..4271036d1
--- /dev/null
+++ b/cli/tests/integration_tests.rs
@@ -0,0 +1,578 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+#[macro_use]
+extern crate lazy_static;
+extern crate tempfile;
+mod util;
+use util::*;
+
+#[test]
+fn benchmark_test() {
+ run_python_script("tools/benchmark_test.py")
+}
+
+#[test]
+fn deno_dir_test() {
+ let g = http_server();
+ run_python_script("tools/deno_dir_test.py");
+ drop(g);
+}
+
+// TODO(#2933): Rewrite this test in rust.
+#[test]
+fn fetch_test() {
+ let g = http_server();
+ run_python_script("tools/fetch_test.py");
+ drop(g);
+}
+
+// TODO(#2933): Rewrite this test in rust.
+#[test]
+fn fmt_test() {
+ let g = http_server();
+ run_python_script("tools/fmt_test.py");
+ drop(g);
+}
+
+#[test]
+fn js_unit_tests() {
+ let g = http_server();
+ let mut deno = deno_cmd()
+ .current_dir(root_path())
+ .arg("run")
+ .arg("--reload")
+ .arg("--allow-run")
+ .arg("--allow-env")
+ .arg("cli/js/unit_test_runner.ts")
+ .spawn()
+ .expect("failed to spawn script");
+ let status = deno.wait().expect("failed to wait for the child process");
+ assert_eq!(Some(0), status.code());
+ assert!(status.success());
+ drop(g);
+}
+
+// TODO(#2933): Rewrite this test in rust.
+#[test]
+fn repl_test() {
+ run_python_script("tools/repl_test.py")
+}
+
+#[test]
+fn setup_test() {
+ run_python_script("tools/setup_test.py")
+}
+
+#[test]
+fn target_test() {
+ run_python_script("tools/target_test.py")
+}
+
+#[test]
+fn util_test() {
+ run_python_script("tools/util_test.py")
+}
+
+macro_rules! itest(
+ ($name:ident {$( $key:ident: $value:expr,)*}) => {
+ #[test]
+ fn $name() {
+ (CheckOutputIntegrationTest {
+ $(
+ $key: $value,
+ )*
+ .. Default::default()
+ }).run()
+ }
+ }
+);
+
+itest!(_001_hello {
+ args: "run --reload 001_hello.js",
+ output: "001_hello.js.out",
+});
+
+itest!(_002_hello {
+ args: "run --reload 002_hello.ts",
+ output: "002_hello.ts.out",
+});
+
+itest!(_003_relative_import {
+ args: "run --reload 003_relative_import.ts",
+ output: "003_relative_import.ts.out",
+});
+
+itest!(_004_set_timeout {
+ args: "run --reload 004_set_timeout.ts",
+ output: "004_set_timeout.ts.out",
+});
+
+itest!(_005_more_imports {
+ args: "run --reload 005_more_imports.ts",
+ output: "005_more_imports.ts.out",
+});
+
+itest!(_006_url_imports {
+ args: "run --reload 006_url_imports.ts",
+ output: "006_url_imports.ts.out",
+ http_server: true,
+});
+
+itest!(_012_async {
+ args: "run --reload 012_async.ts",
+ output: "012_async.ts.out",
+});
+
+itest!(_013_dynamic_import {
+ args: "013_dynamic_import.ts --reload --allow-read",
+ output: "013_dynamic_import.ts.out",
+});
+
+itest!(_014_duplicate_import {
+ args: "014_duplicate_import.ts --reload --allow-read",
+ output: "014_duplicate_import.ts.out",
+});
+
+itest!(_015_duplicate_parallel_import {
+ args: "015_duplicate_parallel_import.js --reload --allow-read",
+ output: "015_duplicate_parallel_import.js.out",
+});
+
+itest!(_016_double_await {
+ args: "run --allow-read --reload 016_double_await.ts",
+ output: "016_double_await.ts.out",
+});
+
+itest!(_017_import_redirect {
+ args: "run --reload 017_import_redirect.ts",
+ output: "017_import_redirect.ts.out",
+});
+
+itest!(_018_async_catch {
+ args: "run --reload 018_async_catch.ts",
+ output: "018_async_catch.ts.out",
+});
+
+itest!(_019_media_types {
+ args: "run --reload 019_media_types.ts",
+ output: "019_media_types.ts.out",
+ http_server: true,
+});
+
+itest!(_020_json_modules {
+ args: "run --reload 020_json_modules.ts",
+ output: "020_json_modules.ts.out",
+});
+
+itest!(_021_mjs_modules {
+ args: "run --reload 021_mjs_modules.ts",
+ output: "021_mjs_modules.ts.out",
+});
+
+itest!(_022_info_flag_script {
+ args: "info http://127.0.0.1:4545/cli/tests/019_media_types.ts",
+ output: "022_info_flag_script.out",
+ http_server: true,
+});
+
+itest!(_023_no_ext_with_headers {
+ args: "run --reload 023_no_ext_with_headers",
+ output: "023_no_ext_with_headers.out",
+});
+
+// FIXME(bartlomieju): this test should use remote file
+// itest!(_024_import_no_ext_with_headers {
+// args: "run --reload 024_import_no_ext_with_headers.ts",
+// output: "024_import_no_ext_with_headers.ts.out",
+// });
+
+itest!(_025_hrtime {
+ args: "run --allow-hrtime --reload 025_hrtime.ts",
+ output: "025_hrtime.ts.out",
+});
+
+itest!(_025_reload_js_type_error {
+ args: "run --reload 025_reload_js_type_error.js",
+ output: "025_reload_js_type_error.js.out",
+});
+
+itest!(_026_redirect_javascript {
+ args: "run --reload 026_redirect_javascript.js",
+ output: "026_redirect_javascript.js.out",
+ http_server: true,
+});
+
+itest!(_026_workers {
+ args: "run --reload 026_workers.ts",
+ output: "026_workers.ts.out",
+});
+
+itest!(_027_redirect_typescript {
+ args: "run --reload 027_redirect_typescript.ts",
+ output: "027_redirect_typescript.ts.out",
+ http_server: true,
+});
+
+itest!(_028_args {
+ args: "run --reload 028_args.ts --arg1 val1 --arg2=val2 -- arg3 arg4",
+ output: "028_args.ts.out",
+});
+
+itest!(_029_eval {
+ args: "eval console.log(\"hello\")",
+ output: "029_eval.out",
+});
+
+itest!(_030_xeval {
+ args: "xeval console.log($.toUpperCase())",
+ input: Some("a\nb\n\nc"),
+ output: "030_xeval.out",
+});
+
+itest!(_031_xeval_replvar {
+ args: "xeval -I val console.log(val.toUpperCase());",
+ input: Some("a\nb\n\nc"),
+ output: "031_xeval_replvar.out",
+});
+
+itest!(_032_xeval_delim {
+ args: "xeval -d DELIM console.log($.toUpperCase());",
+ input: Some("aDELIMbDELIMDELIMc"),
+ output: "032_xeval_delim.out",
+});
+
+itest!(_033_import_map {
+ args:
+ "run --reload --importmap=importmaps/import_map.json importmaps/test.ts",
+ output: "033_import_map.out",
+});
+
+itest!(_034_onload {
+ args: "run --reload 034_onload/main.ts",
+ output: "034_onload.out",
+});
+
+itest!(_035_no_fetch_flag {
+ args:
+ "--reload --no-fetch http://127.0.0.1:4545/cli/tests/019_media_types.ts",
+ output: "035_no_fetch_flag.out",
+ exit_code: 1,
+ check_stderr: true,
+ http_server: true,
+});
+
+itest!(_036_import_map_fetch {
+ args:
+ "fetch --reload --importmap=importmaps/import_map.json importmaps/test.ts",
+ output: "036_import_map_fetch.out",
+});
+
+itest!(_037_current_thread {
+ args: "run --current-thread --reload 034_onload/main.ts",
+ output: "034_onload.out",
+});
+
+itest!(_038_checkjs {
+ // checking if JS file is run through TS compiler
+ args: "run --reload --config 038_checkjs.tsconfig.json 038_checkjs.js",
+ check_stderr: true,
+ exit_code: 1,
+ output: "038_checkjs.js.out",
+});
+
+itest!(_039_worker_deno_ns {
+ args: "run --reload 039_worker_deno_ns.ts",
+ output: "039_worker_deno_ns.ts.out",
+});
+
+itest!(_040_worker_blob {
+ args: "run --reload 040_worker_blob.ts",
+ output: "040_worker_blob.ts.out",
+});
+
+itest!(_041_dyn_import_eval {
+ args: "eval import('./subdir/mod4.js').then(console.log)",
+ output: "041_dyn_import_eval.out",
+});
+
+itest!(_041_info_flag {
+ args: "info",
+ output: "041_info_flag.out",
+});
+
+itest!(_042_dyn_import_evalcontext {
+ args: "run --allow-read --reload 042_dyn_import_evalcontext.ts",
+ output: "042_dyn_import_evalcontext.ts.out",
+});
+
+itest!(_044_bad_resource {
+ args: "run --reload --allow-read 044_bad_resource.ts",
+ output: "044_bad_resource.ts.out",
+ check_stderr: true,
+ exit_code: 1,
+});
+
+itest!(_045_proxy {
+ args: "run --allow-net --allow-env --allow-run --reload 045_proxy_test.ts",
+ output: "045_proxy_test.ts.out",
+});
+
+itest!(_046_tsx {
+ args: "run --reload 046_jsx_test.tsx",
+ output: "046_jsx_test.tsx.out",
+});
+
+itest!(_047_jsx {
+ args: "run --reload 047_jsx_test.jsx",
+ output: "047_jsx_test.jsx.out",
+});
+
+itest!(async_error {
+ exit_code: 1,
+ args: "run --reload async_error.ts",
+ check_stderr: true,
+ output: "async_error.ts.out",
+});
+
+itest!(circular1 {
+ args: "run --reload circular1.js",
+ output: "circular1.js.out",
+});
+
+itest!(config {
+ args: "run --reload --config config.tsconfig.json config.ts",
+ check_stderr: true,
+ exit_code: 1,
+ output: "config.ts.out",
+});
+
+itest!(error_001 {
+ args: "run --reload error_001.ts",
+ check_stderr: true,
+ exit_code: 1,
+ output: "error_001.ts.out",
+});
+
+itest!(error_002 {
+ args: "run --reload error_002.ts",
+ check_stderr: true,
+ exit_code: 1,
+ output: "error_002.ts.out",
+});
+
+itest!(error_003_typescript {
+ args: "run --reload error_003_typescript.ts",
+ check_stderr: true,
+ exit_code: 1,
+ output: "error_003_typescript.ts.out",
+});
+
+// Supposing that we've already attempted to run error_003_typescript.ts
+// we want to make sure that JS wasn't emitted. Running again without reload flag
+// should result in the same output.
+// https://github.com/denoland/deno/issues/2436
+itest!(error_003_typescript2 {
+ args: "run error_003_typescript.ts",
+ check_stderr: true,
+ exit_code: 1,
+ output: "error_003_typescript.ts.out",
+});
+
+itest!(error_004_missing_module {
+ args: "run --reload error_004_missing_module.ts",
+ check_stderr: true,
+ exit_code: 1,
+ output: "error_004_missing_module.ts.out",
+});
+
+itest!(error_005_missing_dynamic_import {
+ args: "run --reload error_005_missing_dynamic_import.ts",
+ check_stderr: true,
+ exit_code: 1,
+ output: "error_005_missing_dynamic_import.ts.out",
+});
+
+itest!(error_006_import_ext_failure {
+ args: "run --reload error_006_import_ext_failure.ts",
+ check_stderr: true,
+ exit_code: 1,
+ output: "error_006_import_ext_failure.ts.out",
+});
+
+itest!(error_007_any {
+ args: "run --reload error_007_any.ts",
+ check_stderr: true,
+ exit_code: 1,
+ output: "error_007_any.ts.out",
+});
+
+itest!(error_008_checkjs {
+ args: "run --reload error_008_checkjs.js",
+ check_stderr: true,
+ exit_code: 1,
+ output: "error_008_checkjs.js.out",
+});
+
+itest!(error_011_bad_module_specifier {
+ args: "run --reload error_011_bad_module_specifier.ts",
+ check_stderr: true,
+ exit_code: 1,
+ output: "error_011_bad_module_specifier.ts.out",
+});
+
+itest!(error_012_bad_dynamic_import_specifier {
+ args: "run --reload error_012_bad_dynamic_import_specifier.ts",
+ check_stderr: true,
+ exit_code: 1,
+ output: "error_012_bad_dynamic_import_specifier.ts.out",
+});
+
+itest!(error_013_missing_script {
+ args: "run --reload missing_file_name",
+ check_stderr: true,
+ exit_code: 1,
+ output: "error_013_missing_script.out",
+});
+
+itest!(error_014_catch_dynamic_import_error {
+ args: "error_014_catch_dynamic_import_error.js --reload --allow-read",
+ output: "error_014_catch_dynamic_import_error.js.out",
+ exit_code: 1,
+});
+
+itest!(error_015_dynamic_import_permissions {
+ args: "--reload --no-prompt error_015_dynamic_import_permissions.js",
+ output: "error_015_dynamic_import_permissions.out",
+ check_stderr: true,
+ exit_code: 1,
+ http_server: true,
+});
+
+// We have an allow-net flag but not allow-read, it should still result in error.
+itest!(error_016_dynamic_import_permissions2 {
+ args:
+ "--no-prompt --reload --allow-net error_016_dynamic_import_permissions2.js",
+ output: "error_016_dynamic_import_permissions2.out",
+ check_stderr: true,
+ exit_code: 1,
+ http_server: true,
+});
+
+itest!(error_stack {
+ args: "run --reload error_stack.ts",
+ check_stderr: true,
+ exit_code: 1,
+ output: "error_stack.ts.out",
+});
+
+itest!(error_syntax {
+ args: "run --reload error_syntax.js",
+ check_stderr: true,
+ exit_code: 1,
+ output: "error_syntax.js.out",
+});
+
+itest!(error_type_definitions {
+ args: "run --reload error_type_definitions.ts",
+ check_stderr: true,
+ exit_code: 1,
+ output: "error_type_definitions.ts.out",
+});
+
+itest!(error_worker_dynamic {
+ args: "run --reload error_worker_dynamic.ts",
+ check_stderr: true,
+ exit_code: 1,
+ output: "error_worker_dynamic.ts.out",
+});
+
+itest!(exit_error42 {
+ exit_code: 42,
+ args: "run --reload exit_error42.ts",
+ output: "exit_error42.ts.out",
+});
+
+itest!(https_import {
+ args: "run --reload https_import.ts",
+ output: "https_import.ts.out",
+});
+
+itest!(if_main {
+ args: "run --reload if_main.ts",
+ output: "if_main.ts.out",
+});
+
+itest!(import_meta {
+ args: "run --reload import_meta.ts",
+ output: "import_meta.ts.out",
+});
+
+itest!(seed_random {
+ args: "run --seed=100 seed_random.js",
+ output: "seed_random.js.out",
+});
+
+itest!(type_definitions {
+ args: "run --reload type_definitions.ts",
+ output: "type_definitions.ts.out",
+});
+
+itest!(types {
+ args: "types",
+ output: "types.out",
+});
+
+itest!(unbuffered_stderr {
+ args: "run --reload unbuffered_stderr.ts",
+ check_stderr: true,
+ output: "unbuffered_stderr.ts.out",
+});
+
+itest!(unbuffered_stdout {
+ args: "run --reload unbuffered_stdout.ts",
+ output: "unbuffered_stdout.ts.out",
+});
+
+itest!(v8_flags {
+ args: "run --v8-flags=--expose-gc v8_flags.js",
+ output: "v8_flags.js.out",
+});
+
+itest!(v8_help {
+ args: "--v8-options",
+ output: "v8_help.out",
+});
+
+itest!(version {
+ args: "version",
+ output: "version.out",
+});
+
+itest!(version_long_flag {
+ args: "--version",
+ output: "version.out",
+});
+
+itest!(version_short_flag {
+ args: "-v",
+ output: "version.out",
+});
+
+itest!(wasm {
+ args: "run wasm.ts",
+ output: "wasm.ts.out",
+});
+
+itest!(wasm_async {
+ args: "wasm_async.js",
+ output: "wasm_async.out",
+});
+
+itest!(top_level_await {
+ args: "--allow-read top_level_await.js",
+ output: "top_level_await.out",
+});
+
+itest!(top_level_await_ts {
+ args: "--allow-read top_level_await.ts",
+ output: "top_level_await.out",
+});
diff --git a/cli/tests/is_tty.ts b/cli/tests/is_tty.ts
new file mode 100644
index 000000000..2e3fdb49f
--- /dev/null
+++ b/cli/tests/is_tty.ts
@@ -0,0 +1 @@
+console.log(Deno.isTTY().stdin);
diff --git a/cli/tests/no_color.js b/cli/tests/no_color.js
new file mode 100644
index 000000000..cea11a52f
--- /dev/null
+++ b/cli/tests/no_color.js
@@ -0,0 +1 @@
+console.log("noColor", Deno.noColor);
diff --git a/cli/tests/seed_random.js b/cli/tests/seed_random.js
new file mode 100644
index 000000000..7f6e336df
--- /dev/null
+++ b/cli/tests/seed_random.js
@@ -0,0 +1,11 @@
+for (let i = 0; i < 10; ++i) {
+ console.log(Math.random());
+}
+
+const arr = new Uint8Array(32);
+
+crypto.getRandomValues(arr);
+console.log(arr);
+
+crypto.getRandomValues(arr);
+console.log(arr);
diff --git a/cli/tests/seed_random.js.out b/cli/tests/seed_random.js.out
new file mode 100644
index 000000000..c65e40f97
--- /dev/null
+++ b/cli/tests/seed_random.js.out
@@ -0,0 +1,12 @@
+0.858562739044346
+0.8973397944553141
+0.15335012655691727
+0.36867387434349963
+0.3591039342838782
+0.7044499748617652
+0.7461423057751548
+0.3824611207183364
+0.5950178237266042
+0.22440633214343908
+Uint8Array [ 116, 125, 169, 69, 106, 231, 99, 39, 148, 188, 211, 41, 46, 211, 236, 141, 55, 10, 214, 63, 118, 230, 218, 249, 125, 161, 137, 110, 214, 36, 159, 154 ]
+Uint8Array [ 248, 21, 21, 9, 41, 0, 71, 124, 244, 209, 252, 151, 7, 10, 168, 250, 84, 170, 243, 140, 53, 47, 99, 212, 18, 146, 68, 48, 66, 222, 67, 112 ]
diff --git a/cli/tests/subdir/auto_print_hello.ts b/cli/tests/subdir/auto_print_hello.ts
new file mode 100644
index 000000000..5efa72e03
--- /dev/null
+++ b/cli/tests/subdir/auto_print_hello.ts
@@ -0,0 +1,2 @@
+console.log("hello!");
+export default {};
diff --git a/cli/tests/subdir/bench_worker.ts b/cli/tests/subdir/bench_worker.ts
new file mode 100644
index 000000000..094cefb80
--- /dev/null
+++ b/cli/tests/subdir/bench_worker.ts
@@ -0,0 +1,20 @@
+onmessage = function(e): void {
+ const { cmdId, action, data } = e.data;
+ switch (action) {
+ case 0: // Static response
+ postMessage({
+ cmdId,
+ data: "HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World\n"
+ });
+ break;
+ case 1: // Respond with request data
+ postMessage({ cmdId, data });
+ break;
+ case 2: // Ping
+ postMessage({ cmdId });
+ break;
+ case 3: // Close
+ workerClose();
+ break;
+ }
+};
diff --git a/cli/tests/subdir/config.json b/cli/tests/subdir/config.json
new file mode 100644
index 000000000..01c3b5e79
--- /dev/null
+++ b/cli/tests/subdir/config.json
@@ -0,0 +1,6 @@
+{
+ "foo": {
+ "bar": true,
+ "baz": ["qat", 1]
+ }
+}
diff --git a/cli/tests/subdir/evil_remote_import.js b/cli/tests/subdir/evil_remote_import.js
new file mode 100644
index 000000000..4ff7d1b97
--- /dev/null
+++ b/cli/tests/subdir/evil_remote_import.js
@@ -0,0 +1,4 @@
+// We expect to get a permission denied error if we dynamically
+// import this module without --allow-read.
+export * from "file:///c:/etc/passwd";
+console.log("Hello from evil_remote_import.js");
diff --git a/cli/tests/subdir/form_urlencoded.txt b/cli/tests/subdir/form_urlencoded.txt
new file mode 100644
index 000000000..70e087c20
--- /dev/null
+++ b/cli/tests/subdir/form_urlencoded.txt
@@ -0,0 +1 @@
+field_1=Hi&field_2=%3CDeno%3E \ No newline at end of file
diff --git a/cli/tests/subdir/indirect_import_error.js b/cli/tests/subdir/indirect_import_error.js
new file mode 100644
index 000000000..84011d291
--- /dev/null
+++ b/cli/tests/subdir/indirect_import_error.js
@@ -0,0 +1 @@
+export * from "does not exist either";
diff --git a/cli/tests/subdir/indirect_throws.js b/cli/tests/subdir/indirect_throws.js
new file mode 100644
index 000000000..e1810a66c
--- /dev/null
+++ b/cli/tests/subdir/indirect_throws.js
@@ -0,0 +1 @@
+export * from "./throws.js";
diff --git a/cli/tests/subdir/mismatch_ext.ts b/cli/tests/subdir/mismatch_ext.ts
new file mode 100644
index 000000000..e67d2a017
--- /dev/null
+++ b/cli/tests/subdir/mismatch_ext.ts
@@ -0,0 +1 @@
+export const loaded = true;
diff --git a/cli/tests/subdir/mod1.ts b/cli/tests/subdir/mod1.ts
new file mode 100644
index 000000000..393535588
--- /dev/null
+++ b/cli/tests/subdir/mod1.ts
@@ -0,0 +1,17 @@
+import { returnsFoo, printHello2 } from "./subdir2/mod2.ts";
+
+export function returnsHi(): string {
+ return "Hi";
+}
+
+export function returnsFoo2(): string {
+ return returnsFoo();
+}
+
+export function printHello3(): void {
+ printHello2();
+}
+
+export function throwsError(): void {
+ throw Error("exception from mod1");
+}
diff --git a/cli/tests/subdir/mod2.ts b/cli/tests/subdir/mod2.ts
new file mode 100644
index 000000000..ce1adc0e8
--- /dev/null
+++ b/cli/tests/subdir/mod2.ts
@@ -0,0 +1 @@
+export { printHello } from "./print_hello.ts";
diff --git a/cli/tests/subdir/mod3.js b/cli/tests/subdir/mod3.js
new file mode 100644
index 000000000..ce534f570
--- /dev/null
+++ b/cli/tests/subdir/mod3.js
@@ -0,0 +1 @@
+export const isTSFile = false;
diff --git a/cli/tests/subdir/mod4.js b/cli/tests/subdir/mod4.js
new file mode 100644
index 000000000..71332dbc4
--- /dev/null
+++ b/cli/tests/subdir/mod4.js
@@ -0,0 +1 @@
+export const isMod4 = true;
diff --git a/cli/tests/subdir/mod5.mjs b/cli/tests/subdir/mod5.mjs
new file mode 100644
index 000000000..f21d8862b
--- /dev/null
+++ b/cli/tests/subdir/mod5.mjs
@@ -0,0 +1 @@
+export const isMod5 = true;
diff --git a/cli/tests/subdir/mt_application_ecmascript.j2.js b/cli/tests/subdir/mt_application_ecmascript.j2.js
new file mode 100644
index 000000000..e67d2a017
--- /dev/null
+++ b/cli/tests/subdir/mt_application_ecmascript.j2.js
@@ -0,0 +1 @@
+export const loaded = true;
diff --git a/cli/tests/subdir/mt_application_x_javascript.j4.js b/cli/tests/subdir/mt_application_x_javascript.j4.js
new file mode 100644
index 000000000..e67d2a017
--- /dev/null
+++ b/cli/tests/subdir/mt_application_x_javascript.j4.js
@@ -0,0 +1 @@
+export const loaded = true;
diff --git a/cli/tests/subdir/mt_application_x_typescript.t4.ts b/cli/tests/subdir/mt_application_x_typescript.t4.ts
new file mode 100644
index 000000000..e67d2a017
--- /dev/null
+++ b/cli/tests/subdir/mt_application_x_typescript.t4.ts
@@ -0,0 +1 @@
+export const loaded = true;
diff --git a/cli/tests/subdir/mt_javascript.js b/cli/tests/subdir/mt_javascript.js
new file mode 100644
index 000000000..e67d2a017
--- /dev/null
+++ b/cli/tests/subdir/mt_javascript.js
@@ -0,0 +1 @@
+export const loaded = true;
diff --git a/cli/tests/subdir/mt_text_ecmascript.j3.js b/cli/tests/subdir/mt_text_ecmascript.j3.js
new file mode 100644
index 000000000..e67d2a017
--- /dev/null
+++ b/cli/tests/subdir/mt_text_ecmascript.j3.js
@@ -0,0 +1 @@
+export const loaded = true;
diff --git a/cli/tests/subdir/mt_text_javascript.j1.js b/cli/tests/subdir/mt_text_javascript.j1.js
new file mode 100644
index 000000000..e67d2a017
--- /dev/null
+++ b/cli/tests/subdir/mt_text_javascript.j1.js
@@ -0,0 +1 @@
+export const loaded = true;
diff --git a/cli/tests/subdir/mt_text_typescript.t1.ts b/cli/tests/subdir/mt_text_typescript.t1.ts
new file mode 100644
index 000000000..e67d2a017
--- /dev/null
+++ b/cli/tests/subdir/mt_text_typescript.t1.ts
@@ -0,0 +1 @@
+export const loaded = true;
diff --git a/cli/tests/subdir/mt_video_mp2t.t3.ts b/cli/tests/subdir/mt_video_mp2t.t3.ts
new file mode 100644
index 000000000..e67d2a017
--- /dev/null
+++ b/cli/tests/subdir/mt_video_mp2t.t3.ts
@@ -0,0 +1 @@
+export const loaded = true;
diff --git a/cli/tests/subdir/mt_video_vdn.t2.ts b/cli/tests/subdir/mt_video_vdn.t2.ts
new file mode 100644
index 000000000..e67d2a017
--- /dev/null
+++ b/cli/tests/subdir/mt_video_vdn.t2.ts
@@ -0,0 +1 @@
+export const loaded = true;
diff --git a/cli/tests/subdir/no_ext b/cli/tests/subdir/no_ext
new file mode 100644
index 000000000..e67d2a017
--- /dev/null
+++ b/cli/tests/subdir/no_ext
@@ -0,0 +1 @@
+export const loaded = true;
diff --git a/cli/tests/subdir/print_hello.ts b/cli/tests/subdir/print_hello.ts
new file mode 100644
index 000000000..7ecce5040
--- /dev/null
+++ b/cli/tests/subdir/print_hello.ts
@@ -0,0 +1,3 @@
+export function printHello(): void {
+ console.log("Hello");
+}
diff --git a/cli/tests/subdir/redirects/redirect1.js b/cli/tests/subdir/redirects/redirect1.js
new file mode 100644
index 000000000..d674be88c
--- /dev/null
+++ b/cli/tests/subdir/redirects/redirect1.js
@@ -0,0 +1 @@
+export const redirect = 1;
diff --git a/cli/tests/subdir/redirects/redirect1.ts b/cli/tests/subdir/redirects/redirect1.ts
new file mode 100644
index 000000000..d674be88c
--- /dev/null
+++ b/cli/tests/subdir/redirects/redirect1.ts
@@ -0,0 +1 @@
+export const redirect = 1;
diff --git a/cli/tests/subdir/redirects/redirect2.js b/cli/tests/subdir/redirects/redirect2.js
new file mode 100644
index 000000000..e4244f638
--- /dev/null
+++ b/cli/tests/subdir/redirects/redirect2.js
@@ -0,0 +1 @@
+import "./redirect1.js";
diff --git a/cli/tests/subdir/redirects/redirect3.js b/cli/tests/subdir/redirects/redirect3.js
new file mode 100644
index 000000000..e24f2af32
--- /dev/null
+++ b/cli/tests/subdir/redirects/redirect3.js
@@ -0,0 +1,2 @@
+import { redirect } from "./redirect1.js";
+export const value = `3 imports ${redirect}`;
diff --git a/cli/tests/subdir/redirects/redirect4.ts b/cli/tests/subdir/redirects/redirect4.ts
new file mode 100644
index 000000000..45c65c5eb
--- /dev/null
+++ b/cli/tests/subdir/redirects/redirect4.ts
@@ -0,0 +1,2 @@
+import { redirect } from "./redirect1.ts";
+export const value = `4 imports ${redirect}`;
diff --git a/cli/tests/subdir/subdir2/mod2.ts b/cli/tests/subdir/subdir2/mod2.ts
new file mode 100644
index 000000000..c88d4708c
--- /dev/null
+++ b/cli/tests/subdir/subdir2/mod2.ts
@@ -0,0 +1,9 @@
+import { printHello } from "../print_hello.ts";
+
+export function returnsFoo(): string {
+ return "Foo";
+}
+
+export function printHello2(): void {
+ printHello();
+}
diff --git a/cli/tests/subdir/test_worker.js b/cli/tests/subdir/test_worker.js
new file mode 100644
index 000000000..53d38ba96
--- /dev/null
+++ b/cli/tests/subdir/test_worker.js
@@ -0,0 +1,7 @@
+onmessage = function(e) {
+ console.log(e.data);
+
+ postMessage(e.data);
+
+ workerClose();
+};
diff --git a/cli/tests/subdir/test_worker.ts b/cli/tests/subdir/test_worker.ts
new file mode 100644
index 000000000..c8109d131
--- /dev/null
+++ b/cli/tests/subdir/test_worker.ts
@@ -0,0 +1,7 @@
+onmessage = function(e): void {
+ console.log(e.data);
+
+ postMessage(e.data);
+
+ workerClose();
+};
diff --git a/cli/tests/subdir/throws.js b/cli/tests/subdir/throws.js
new file mode 100644
index 000000000..b77e7104f
--- /dev/null
+++ b/cli/tests/subdir/throws.js
@@ -0,0 +1,5 @@
+export function boo() {
+ console.log("Boo!");
+}
+
+throw new Error("An error");
diff --git a/cli/tests/subdir/unknown_ext.deno b/cli/tests/subdir/unknown_ext.deno
new file mode 100644
index 000000000..e67d2a017
--- /dev/null
+++ b/cli/tests/subdir/unknown_ext.deno
@@ -0,0 +1 @@
+export const loaded = true;
diff --git a/cli/tests/top_level_await.js b/cli/tests/top_level_await.js
new file mode 100644
index 000000000..af6fbd662
--- /dev/null
+++ b/cli/tests/top_level_await.js
@@ -0,0 +1,3 @@
+const buf = await Deno.readFile("hello.txt");
+const n = await Deno.stdout.write(buf);
+console.log(`\n\nwrite ${n}`);
diff --git a/cli/tests/top_level_await.out b/cli/tests/top_level_await.out
new file mode 100644
index 000000000..4b65d15fe
--- /dev/null
+++ b/cli/tests/top_level_await.out
@@ -0,0 +1,3 @@
+Hello world!
+
+write 12
diff --git a/cli/tests/top_level_await.ts b/cli/tests/top_level_await.ts
new file mode 100644
index 000000000..65de253ea
--- /dev/null
+++ b/cli/tests/top_level_await.ts
@@ -0,0 +1,3 @@
+const buf: Uint8Array = await Deno.readFile("hello.txt");
+const n: number = await Deno.stdout.write(buf);
+console.log(`\n\nwrite ${n}`);
diff --git a/cli/tests/tty_tests.rs b/cli/tests/tty_tests.rs
new file mode 100644
index 000000000..413d39caf
--- /dev/null
+++ b/cli/tests/tty_tests.rs
@@ -0,0 +1,18 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+#[macro_use]
+extern crate lazy_static;
+extern crate tempfile;
+mod util;
+use util::*;
+
+// TODO(#2933): Rewrite these tests in rust.
+// TODO(ry) These tests can't run in parallel.
+#[test]
+fn tty_tests() {
+ let g = http_server();
+ run_python_script("tools/complex_permissions_test.py");
+ run_python_script("tools/permission_prompt_test.py");
+ // TODO(ry) is_tty_test is not passing on travis when run with "cargo test"
+ // run_python_script("tools/is_tty_test.py");
+ drop(g);
+}
diff --git a/cli/tests/type_definitions.ts b/cli/tests/type_definitions.ts
new file mode 100644
index 000000000..ecf3ae0b2
--- /dev/null
+++ b/cli/tests/type_definitions.ts
@@ -0,0 +1,10 @@
+// @deno-types="./type_definitions/foo.d.ts"
+import { foo } from "./type_definitions/foo.js";
+// @deno-types="./type_definitions/fizz.d.ts"
+import "./type_definitions/fizz.js";
+
+import * as qat from "./type_definitions/qat.ts";
+
+console.log(foo);
+console.log(fizz);
+console.log(qat.qat);
diff --git a/cli/tests/type_definitions.ts.out b/cli/tests/type_definitions.ts.out
new file mode 100644
index 000000000..b4fa88c50
--- /dev/null
+++ b/cli/tests/type_definitions.ts.out
@@ -0,0 +1,3 @@
+[WILDCARD]foo
+fizz
+qat
diff --git a/cli/tests/type_definitions/bar.d.ts b/cli/tests/type_definitions/bar.d.ts
new file mode 100644
index 000000000..d43335dbb
--- /dev/null
+++ b/cli/tests/type_definitions/bar.d.ts
@@ -0,0 +1,7 @@
+/// <reference types="baz" />
+
+declare namespace bar {
+ export class Bar {
+ baz: string;
+ }
+}
diff --git a/cli/tests/type_definitions/fizz.d.ts b/cli/tests/type_definitions/fizz.d.ts
new file mode 100644
index 000000000..34eb41b96
--- /dev/null
+++ b/cli/tests/type_definitions/fizz.d.ts
@@ -0,0 +1,2 @@
+/** A global value. */
+declare const fizz: string;
diff --git a/cli/tests/type_definitions/fizz.js b/cli/tests/type_definitions/fizz.js
new file mode 100644
index 000000000..852162c94
--- /dev/null
+++ b/cli/tests/type_definitions/fizz.js
@@ -0,0 +1 @@
+globalThis.fizz = "fizz";
diff --git a/cli/tests/type_definitions/foo.d.ts b/cli/tests/type_definitions/foo.d.ts
new file mode 100644
index 000000000..ce39201e1
--- /dev/null
+++ b/cli/tests/type_definitions/foo.d.ts
@@ -0,0 +1,2 @@
+/** An exported value. */
+export const foo: string;
diff --git a/cli/tests/type_definitions/foo.js b/cli/tests/type_definitions/foo.js
new file mode 100644
index 000000000..61d366eb2
--- /dev/null
+++ b/cli/tests/type_definitions/foo.js
@@ -0,0 +1 @@
+export const foo = "foo";
diff --git a/cli/tests/type_definitions/qat.ts b/cli/tests/type_definitions/qat.ts
new file mode 100644
index 000000000..6196c9d38
--- /dev/null
+++ b/cli/tests/type_definitions/qat.ts
@@ -0,0 +1 @@
+export const qat = "qat";
diff --git a/cli/tests/types.out b/cli/tests/types.out
new file mode 100644
index 000000000..6c17b0f90
--- /dev/null
+++ b/cli/tests/types.out
@@ -0,0 +1,14 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+[WILDCARD]
+
+declare namespace Deno {
+[WILDCARD]
+}
+[WILDCARD]
+declare interface Window {
+[WILDCARD]
+ Deno: typeof Deno;
+}
+
+declare const window: Window & typeof globalThis;
+[WILDCARD]
diff --git a/cli/tests/unbuffered_stderr.ts b/cli/tests/unbuffered_stderr.ts
new file mode 100644
index 000000000..f4bceb1fc
--- /dev/null
+++ b/cli/tests/unbuffered_stderr.ts
@@ -0,0 +1,3 @@
+const { stderr } = Deno;
+
+stderr.write(new TextEncoder().encode("x"));
diff --git a/cli/tests/unbuffered_stderr.ts.out b/cli/tests/unbuffered_stderr.ts.out
new file mode 100644
index 000000000..500019738
--- /dev/null
+++ b/cli/tests/unbuffered_stderr.ts.out
@@ -0,0 +1,2 @@
+[WILDCARD]
+x \ No newline at end of file
diff --git a/cli/tests/unbuffered_stdout.ts b/cli/tests/unbuffered_stdout.ts
new file mode 100644
index 000000000..fdb1a0e23
--- /dev/null
+++ b/cli/tests/unbuffered_stdout.ts
@@ -0,0 +1,3 @@
+const { stdout } = Deno;
+
+stdout.write(new TextEncoder().encode("a"));
diff --git a/cli/tests/unbuffered_stdout.ts.out b/cli/tests/unbuffered_stdout.ts.out
new file mode 100644
index 000000000..2e65efe2a
--- /dev/null
+++ b/cli/tests/unbuffered_stdout.ts.out
@@ -0,0 +1 @@
+a \ No newline at end of file
diff --git a/cli/tests/util/mod.rs b/cli/tests/util/mod.rs
new file mode 100644
index 000000000..a91e5367b
--- /dev/null
+++ b/cli/tests/util/mod.rs
@@ -0,0 +1,218 @@
+//! Test utilites shared between integration_tests.rs and tty_tests.rs
+use deno_cli::colors::strip_ansi_codes;
+pub use deno_cli::test_util::*;
+use os_pipe::pipe;
+use std::io::Read;
+use std::io::Write;
+use std::process::Command;
+use std::process::Stdio;
+use tempfile::TempDir;
+
+lazy_static! {
+ static ref DENO_DIR: TempDir = { TempDir::new().expect("tempdir fail") };
+}
+
+#[allow(dead_code)]
+pub fn deno_cmd() -> Command {
+ let mut c = Command::new(deno_exe_path());
+ c.env("DENO_DIR", DENO_DIR.path());
+ c
+}
+
+pub fn run_python_script(script: &str) {
+ let output = Command::new("python")
+ .env("DENO_DIR", DENO_DIR.path())
+ .current_dir(root_path())
+ .arg(script)
+ .arg(format!("--executable={}", deno_exe_path().display()))
+ .env("DENO_BUILD_PATH", target_dir())
+ .output()
+ .expect("failed to spawn script");
+ if !output.status.success() {
+ let stdout = String::from_utf8(output.stdout).unwrap();
+ let stderr = String::from_utf8(output.stderr).unwrap();
+ panic!(
+ "{} executed with failing error code\n{}{}",
+ script, stdout, stderr
+ );
+ }
+}
+
+#[derive(Debug, Default)]
+pub struct CheckOutputIntegrationTest {
+ pub args: &'static str,
+ pub output: &'static str,
+ pub input: Option<&'static str>,
+ pub exit_code: i32,
+ pub check_stderr: bool,
+ pub http_server: bool,
+}
+
+impl CheckOutputIntegrationTest {
+ #[allow(dead_code)]
+ pub fn run(&self) {
+ let args = self.args.split_whitespace();
+ let root = root_path();
+ let deno_exe = deno_exe_path();
+ println!("root path {}", root.display());
+ println!("deno_exe path {}", deno_exe.display());
+
+ let http_server_guard = if self.http_server {
+ Some(http_server())
+ } else {
+ None
+ };
+
+ let (mut reader, writer) = pipe().unwrap();
+ let tests_dir = root.join("cli").join("tests");
+ let mut command = deno_cmd();
+ command.args(args);
+ command.current_dir(&tests_dir);
+ command.stdin(Stdio::piped());
+ command.stderr(Stdio::null());
+
+ if self.check_stderr {
+ let writer_clone = writer.try_clone().unwrap();
+ command.stderr(writer_clone);
+ }
+
+ command.stdout(writer);
+
+ let mut process = command.spawn().expect("failed to execute process");
+
+ if let Some(input) = self.input {
+ let mut p_stdin = process.stdin.take().unwrap();
+ write!(p_stdin, "{}", input).unwrap();
+ }
+
+ // Very important when using pipes: This parent process is still
+ // holding its copies of the write ends, and we have to close them
+ // before we read, otherwise the read end will never report EOF. The
+ // Command object owns the writers now, and dropping it closes them.
+ drop(command);
+
+ let mut actual = String::new();
+ reader.read_to_string(&mut actual).unwrap();
+
+ let status = process.wait().expect("failed to finish process");
+ let exit_code = status.code().unwrap();
+
+ drop(http_server_guard);
+
+ actual = strip_ansi_codes(&actual).to_string();
+
+ if self.exit_code != exit_code {
+ println!("OUTPUT\n{}\nOUTPUT", actual);
+ panic!(
+ "bad exit code, expected: {:?}, actual: {:?}",
+ self.exit_code, exit_code
+ );
+ }
+
+ let output_path = tests_dir.join(self.output);
+ println!("output path {}", output_path.display());
+ let expected =
+ std::fs::read_to_string(output_path).expect("cannot read output");
+
+ if !wildcard_match(&expected, &actual) {
+ println!("OUTPUT\n{}\nOUTPUT", actual);
+ println!("EXPECTED\n{}\nEXPECTED", expected);
+ panic!("pattern match failed");
+ }
+ }
+}
+
+fn wildcard_match(pattern: &str, s: &str) -> bool {
+ pattern_match(pattern, s, "[WILDCARD]")
+}
+
+fn pattern_match(pattern: &str, s: &str, wildcard: &str) -> bool {
+ // Normalize line endings
+ let s = s.replace("\r\n", "\n");
+ let pattern = pattern.replace("\r\n", "\n");
+
+ if pattern == wildcard {
+ return true;
+ }
+
+ let parts = pattern.split(wildcard).collect::<Vec<&str>>();
+ if parts.len() == 1 {
+ return pattern == s;
+ }
+
+ if !s.starts_with(parts[0]) {
+ return false;
+ }
+
+ let mut t = s.split_at(parts[0].len());
+
+ for (i, part) in parts.iter().enumerate() {
+ if i == 0 {
+ continue;
+ }
+ dbg!(part, i);
+ if i == parts.len() - 1 && (*part == "" || *part == "\n") {
+ dbg!("exit 1 true", i);
+ return true;
+ }
+ if let Some(found) = t.1.find(*part) {
+ dbg!("found ", found);
+ t = t.1.split_at(found + part.len());
+ } else {
+ dbg!("exit false ", i);
+ return false;
+ }
+ }
+
+ dbg!("end ", t.1.len());
+ t.1.is_empty()
+}
+
+#[test]
+fn test_wildcard_match() {
+ let fixtures = vec![
+ ("foobarbaz", "foobarbaz", true),
+ ("[WILDCARD]", "foobarbaz", true),
+ ("foobar", "foobarbaz", false),
+ ("foo[WILDCARD]baz", "foobarbaz", true),
+ ("foo[WILDCARD]baz", "foobazbar", false),
+ ("foo[WILDCARD]baz[WILDCARD]qux", "foobarbazqatqux", true),
+ ("foo[WILDCARD]", "foobar", true),
+ ("foo[WILDCARD]baz[WILDCARD]", "foobarbazqat", true),
+ // check with different line endings
+ ("foo[WILDCARD]\nbaz[WILDCARD]\n", "foobar\nbazqat\n", true),
+ (
+ "foo[WILDCARD]\nbaz[WILDCARD]\n",
+ "foobar\r\nbazqat\r\n",
+ true,
+ ),
+ (
+ "foo[WILDCARD]\r\nbaz[WILDCARD]\n",
+ "foobar\nbazqat\r\n",
+ true,
+ ),
+ (
+ "foo[WILDCARD]\r\nbaz[WILDCARD]\r\n",
+ "foobar\nbazqat\n",
+ true,
+ ),
+ (
+ "foo[WILDCARD]\r\nbaz[WILDCARD]\r\n",
+ "foobar\r\nbazqat\r\n",
+ true,
+ ),
+ ];
+
+ // Iterate through the fixture lists, testing each one
+ for (pattern, string, expected) in fixtures {
+ let actual = wildcard_match(pattern, string);
+ dbg!(pattern, string, expected);
+ assert_eq!(actual, expected);
+ }
+}
+
+#[test]
+fn test_pattern_match() {
+ assert!(pattern_match("foo[BAR]baz", "foobarbaz", "[BAR]"));
+ assert!(!pattern_match("foo[BAR]baz", "foobazbar", "[BAR]"));
+}
diff --git a/cli/tests/v8_flags.js b/cli/tests/v8_flags.js
new file mode 100644
index 000000000..f7999c4af
--- /dev/null
+++ b/cli/tests/v8_flags.js
@@ -0,0 +1 @@
+console.log(typeof gc);
diff --git a/cli/tests/v8_flags.js.out b/cli/tests/v8_flags.js.out
new file mode 100644
index 000000000..e2dbde096
--- /dev/null
+++ b/cli/tests/v8_flags.js.out
@@ -0,0 +1 @@
+function
diff --git a/cli/tests/v8_help.out b/cli/tests/v8_help.out
new file mode 100644
index 000000000..3d7aac28d
--- /dev/null
+++ b/cli/tests/v8_help.out
@@ -0,0 +1,3 @@
+[WILDCARD]
+Synopsis:
+[WILDCARD]d8[WILDCARD] \ No newline at end of file
diff --git a/cli/tests/version.out b/cli/tests/version.out
new file mode 100644
index 000000000..de13d769f
--- /dev/null
+++ b/cli/tests/version.out
@@ -0,0 +1,3 @@
+deno:[WILDCARD]
+v8:[WILDCARD]
+typescript:[WILDCARD] \ No newline at end of file
diff --git a/cli/tests/wasm.ts b/cli/tests/wasm.ts
new file mode 100644
index 000000000..26ad7ba28
--- /dev/null
+++ b/cli/tests/wasm.ts
@@ -0,0 +1,15 @@
+// prettier-ignore
+const wasmCode = new Uint8Array([
+ 0, 97, 115, 109, 1, 0, 0, 0, 1, 133, 128, 128, 128, 0, 1, 96, 0, 1, 127,
+ 3, 130, 128, 128, 128, 0, 1, 0, 4, 132, 128, 128, 128, 0, 1, 112, 0, 0,
+ 5, 131, 128, 128, 128, 0, 1, 0, 1, 6, 129, 128, 128, 128, 0, 0, 7, 145,
+ 128, 128, 128, 0, 2, 6, 109, 101, 109, 111, 114, 121, 2, 0, 4, 109, 97,
+ 105, 110, 0, 0, 10, 138, 128, 128, 128, 0, 1, 132, 128, 128, 128, 0, 0,
+ 65, 42, 11
+ ]);
+
+const wasmModule = new WebAssembly.Module(wasmCode);
+
+const wasmInstance = new WebAssembly.Instance(wasmModule);
+
+console.log(wasmInstance.exports.main().toString());
diff --git a/cli/tests/wasm.ts.out b/cli/tests/wasm.ts.out
new file mode 100644
index 000000000..d81cc0710
--- /dev/null
+++ b/cli/tests/wasm.ts.out
@@ -0,0 +1 @@
+42
diff --git a/cli/tests/wasm_async.js b/cli/tests/wasm_async.js
new file mode 100644
index 000000000..98a178aad
--- /dev/null
+++ b/cli/tests/wasm_async.js
@@ -0,0 +1,27 @@
+// The following blob can be created by taking the following s-expr and pass
+// it through wat2wasm.
+// (module
+// (func $add (param $a i32) (param $b i32) (result i32)
+// local.get $a
+// local.get $b
+// i32.add)
+// (export "add" (func $add))
+// )
+// prettier-ignore
+const bytes = new Uint8Array([
+ 0x00, 0x61, 0x73, 0x6d, 0x01, 0x00, 0x00, 0x00, 0x01, 0x07, 0x01, 0x60,
+ 0x02, 0x7f, 0x7f, 0x01, 0x7f, 0x03, 0x02, 0x01, 0x00, 0x07, 0x07, 0x01,
+ 0x03, 0x61, 0x64, 0x64, 0x00, 0x00, 0x0a, 0x09, 0x01, 0x07, 0x00, 0x20,
+ 0x00, 0x20, 0x01, 0x6a, 0x0b
+]);
+
+async function main() {
+ const wasm = await WebAssembly.instantiate(bytes);
+ const result = wasm.instance.exports.add(1, 3);
+ console.log("1 + 3 =", result);
+ if (result != 4) {
+ throw Error("bad");
+ }
+}
+
+main();
diff --git a/cli/tests/wasm_async.out b/cli/tests/wasm_async.out
new file mode 100644
index 000000000..5cdf17de7
--- /dev/null
+++ b/cli/tests/wasm_async.out
@@ -0,0 +1 @@
+1 + 3 = 4
diff --git a/cli/tests/workers_round_robin_bench.ts b/cli/tests/workers_round_robin_bench.ts
new file mode 100644
index 000000000..7c34e75e5
--- /dev/null
+++ b/cli/tests/workers_round_robin_bench.ts
@@ -0,0 +1,79 @@
+// Benchmark measures time it takes to send a message to a group of workers one
+// at a time and wait for a response from all of them. Just a general
+// throughput and consistency benchmark.
+const data = "HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World\n";
+const workerCount = 4;
+const cmdsPerWorker = 400;
+
+export interface ResolvableMethods<T> {
+ resolve: (value?: T | PromiseLike<T>) => void;
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ reject: (reason?: any) => void;
+}
+
+export type Resolvable<T> = Promise<T> & ResolvableMethods<T>;
+
+export function createResolvable<T>(): Resolvable<T> {
+ let methods: ResolvableMethods<T>;
+ const promise = new Promise<T>(
+ (resolve, reject): void => {
+ methods = { resolve, reject };
+ }
+ );
+ // TypeScript doesn't know that the Promise callback occurs synchronously
+ // therefore use of not null assertion (`!`)
+ return Object.assign(promise, methods!) as Resolvable<T>;
+}
+
+function handleAsyncMsgFromWorker(
+ promiseTable: Map<number, Resolvable<string>>,
+ msg: { cmdId: number; data: string }
+): void {
+ const promise = promiseTable.get(msg.cmdId);
+ if (promise === null) {
+ throw new Error(`Failed to find promise: cmdId: ${msg.cmdId}, msg: ${msg}`);
+ }
+ promise.resolve(data);
+}
+
+async function main(): Promise<void> {
+ const workers: Array<[Map<number, Resolvable<string>>, Worker]> = [];
+ for (let i = 1; i <= workerCount; ++i) {
+ const worker = new Worker("./subdir/bench_worker.ts");
+ const promise = new Promise(
+ (resolve): void => {
+ worker.onmessage = (e): void => {
+ if (e.data.cmdId === 0) resolve();
+ };
+ }
+ );
+ worker.postMessage({ cmdId: 0, action: 2 });
+ await promise;
+ workers.push([new Map(), worker]);
+ }
+ // assign callback function
+ for (const [promiseTable, worker] of workers) {
+ worker.onmessage = (e): void => {
+ handleAsyncMsgFromWorker(promiseTable, e.data);
+ };
+ }
+ for (const cmdId of Array(cmdsPerWorker).keys()) {
+ const promises: Array<Promise<string>> = [];
+ for (const [promiseTable, worker] of workers) {
+ const promise = createResolvable<string>();
+ promiseTable.set(cmdId, promise);
+ worker.postMessage({ cmdId: cmdId, action: 1, data });
+ promises.push(promise);
+ }
+ for (const promise of promises) {
+ await promise;
+ }
+ }
+ for (const [, worker] of workers) {
+ worker.postMessage({ action: 3 });
+ await worker.closed; // Required to avoid a cmdId not in table error.
+ }
+ console.log("Finished!");
+}
+
+main();
diff --git a/cli/tests/workers_startup_bench.ts b/cli/tests/workers_startup_bench.ts
new file mode 100644
index 000000000..fbea4dc40
--- /dev/null
+++ b/cli/tests/workers_startup_bench.ts
@@ -0,0 +1,27 @@
+// Benchmark measures time it takes to start and stop a number of workers.
+const workerCount = 50;
+
+async function bench(): Promise<void> {
+ const workers: Worker[] = [];
+ for (let i = 1; i <= workerCount; ++i) {
+ const worker = new Worker("./subdir/bench_worker.ts");
+ const promise = new Promise(
+ (resolve): void => {
+ worker.onmessage = (e): void => {
+ if (e.data.cmdId === 0) resolve();
+ };
+ }
+ );
+ worker.postMessage({ cmdId: 0, action: 2 });
+ await promise;
+ workers.push(worker);
+ }
+ console.log("Done creating workers closing workers!");
+ for (const worker of workers) {
+ worker.postMessage({ action: 3 });
+ await worker.closed; // Required to avoid a cmdId not in table error.
+ }
+ console.log("Finished!");
+}
+
+bench();
diff --git a/cli/tokio_read.rs b/cli/tokio_read.rs
new file mode 100644
index 000000000..25c4df191
--- /dev/null
+++ b/cli/tokio_read.rs
@@ -0,0 +1,64 @@
+// Copyright (c) 2019 Tokio Contributors. All rights reserved. MIT license.
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+// Forked from: https://github.com/tokio-rs/tokio/blob/9b3f8564af4bb1aee07fab3c401eb412ca5eeac5/tokio-io/src/io/read.rs
+use crate::resources::DenoAsyncRead;
+use deno::ErrBox;
+use futures::{Future, Poll};
+use std::mem;
+
+/// This is almost the same implementation as in tokio, the only difference is
+/// that error type is `ErrBox` instead of `std::io::Error`.
+
+#[derive(Debug)]
+enum State<R, T> {
+ Pending { rd: R, buf: T },
+ Empty,
+}
+
+/// Tries to read some bytes directly into the given `buf` in asynchronous
+/// manner, returning a future type.
+///
+/// The returned future will resolve to both the I/O stream and the buffer
+/// as well as the number of bytes read once the read operation is completed.
+pub fn read<R, T>(rd: R, buf: T) -> Read<R, T>
+where
+ R: DenoAsyncRead,
+ T: AsMut<[u8]>,
+{
+ Read {
+ state: State::Pending { rd, buf },
+ }
+}
+
+/// A future which can be used to easily read available number of bytes to fill
+/// a buffer.
+///
+/// Created by the [`read`] function.
+#[derive(Debug)]
+pub struct Read<R, T> {
+ state: State<R, T>,
+}
+
+impl<R, T> Future for Read<R, T>
+where
+ R: DenoAsyncRead,
+ T: AsMut<[u8]>,
+{
+ type Item = (R, T, usize);
+ type Error = ErrBox;
+
+ fn poll(&mut self) -> Poll<(R, T, usize), ErrBox> {
+ let nread = match self.state {
+ State::Pending {
+ ref mut rd,
+ ref mut buf,
+ } => try_ready!(rd.poll_read(&mut buf.as_mut()[..])),
+ State::Empty => panic!("poll a Read after it's done"),
+ };
+
+ match mem::replace(&mut self.state, State::Empty) {
+ State::Pending { rd, buf } => Ok((rd, buf, nread).into()),
+ State::Empty => panic!("invalid internal state"),
+ }
+ }
+}
diff --git a/cli/tokio_util.rs b/cli/tokio_util.rs
new file mode 100644
index 000000000..678bb8e66
--- /dev/null
+++ b/cli/tokio_util.rs
@@ -0,0 +1,168 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use crate::resources::Resource;
+use deno::ErrBox;
+use futures;
+use futures::Future;
+use futures::Poll;
+use std::io;
+use std::mem;
+use std::net::SocketAddr;
+use tokio;
+use tokio::net::TcpStream;
+use tokio::runtime;
+
+pub fn create_threadpool_runtime(
+) -> Result<tokio::runtime::Runtime, tokio::io::Error> {
+ runtime::Builder::new()
+ .panic_handler(|err| std::panic::resume_unwind(err))
+ .build()
+}
+
+pub fn run<F>(future: F)
+where
+ F: Future<Item = (), Error = ()> + Send + 'static,
+{
+ // tokio::runtime::current_thread::run(future)
+ let rt = create_threadpool_runtime().expect("Unable to create Tokio runtime");
+ rt.block_on_all(future).unwrap();
+}
+
+pub fn run_on_current_thread<F>(future: F)
+where
+ F: Future<Item = (), Error = ()> + Send + 'static,
+{
+ tokio::runtime::current_thread::run(future);
+}
+
+/// THIS IS A HACK AND SHOULD BE AVOIDED.
+///
+/// This spawns a new thread and creates a single-threaded tokio runtime on that thread,
+/// to execute the given future.
+///
+/// This is useful when we want to block the main runtime to
+/// resolve a future without worrying that we'll use up all the threads in the
+/// main runtime.
+pub fn block_on<F, R>(future: F) -> Result<R, ErrBox>
+where
+ F: Send + 'static + Future<Item = R, Error = ErrBox>,
+ R: Send + 'static,
+{
+ use std::sync::mpsc::channel;
+ use std::thread;
+ let (sender, receiver) = channel();
+ // Create a new runtime to evaluate the future asynchronously.
+ thread::spawn(move || {
+ let r = tokio::runtime::current_thread::block_on_all(future);
+ sender
+ .send(r)
+ .expect("Unable to send blocking future result")
+ });
+ receiver
+ .recv()
+ .expect("Unable to receive blocking future result")
+}
+
+// Set the default executor so we can use tokio::spawn(). It's difficult to
+// pass around mut references to the runtime, so using with_default is
+// preferable. Ideally Tokio would provide this function.
+#[cfg(test)]
+pub fn init<F>(f: F)
+where
+ F: FnOnce(),
+{
+ let rt = create_threadpool_runtime().expect("Unable to create Tokio runtime");
+ let mut executor = rt.executor();
+ let mut enter = tokio_executor::enter().expect("Multiple executors at once");
+ tokio_executor::with_default(&mut executor, &mut enter, move |_enter| f());
+}
+
+#[derive(Debug)]
+enum AcceptState {
+ Pending(Resource),
+ Empty,
+}
+
+/// Simply accepts a connection.
+pub fn accept(r: Resource) -> Accept {
+ Accept {
+ state: AcceptState::Pending(r),
+ }
+}
+
+/// A future which can be used to easily read available number of bytes to fill
+/// a buffer.
+///
+/// Created by the [`read`] function.
+#[derive(Debug)]
+pub struct Accept {
+ state: AcceptState,
+}
+impl Future for Accept {
+ type Item = (TcpStream, SocketAddr);
+ type Error = io::Error;
+
+ fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
+ let (stream, addr) = match self.state {
+ // Similar to try_ready!, but also track/untrack accept task
+ // in TcpListener resource.
+ // In this way, when the listener is closed, the task can be
+ // notified to error out (instead of stuck forever).
+ AcceptState::Pending(ref mut r) => match r.poll_accept() {
+ Ok(futures::prelude::Async::Ready(t)) => {
+ r.untrack_task();
+ t
+ }
+ Ok(futures::prelude::Async::NotReady) => {
+ // Would error out if another accept task is being tracked.
+ r.track_task()?;
+ return Ok(futures::prelude::Async::NotReady);
+ }
+ Err(e) => {
+ r.untrack_task();
+ return Err(e);
+ }
+ },
+ AcceptState::Empty => panic!("poll Accept after it's done"),
+ };
+
+ match mem::replace(&mut self.state, AcceptState::Empty) {
+ AcceptState::Pending(_) => Ok((stream, addr).into()),
+ AcceptState::Empty => panic!("invalid internal state"),
+ }
+ }
+}
+
+/// `futures::future::poll_fn` only support `F: FnMut()->Poll<T, E>`
+/// However, we require that `F: FnOnce()->Poll<T, E>`.
+/// Therefore, we created our version of `poll_fn`.
+pub fn poll_fn<T, E, F>(f: F) -> PollFn<F>
+where
+ F: FnOnce() -> Poll<T, E>,
+{
+ PollFn { inner: Some(f) }
+}
+
+pub struct PollFn<F> {
+ inner: Option<F>,
+}
+
+impl<T, E, F> Future for PollFn<F>
+where
+ F: FnOnce() -> Poll<T, E>,
+{
+ type Item = T;
+ type Error = E;
+
+ fn poll(&mut self) -> Poll<T, E> {
+ let f = self.inner.take().expect("Inner fn has been taken.");
+ f()
+ }
+}
+
+pub fn panic_on_error<I, E, F>(f: F) -> impl Future<Item = I, Error = ()>
+where
+ F: Future<Item = I, Error = E>,
+ E: std::fmt::Debug,
+{
+ f.map_err(|err| panic!("Future got unexpected error: {:?}", err))
+}
diff --git a/cli/tokio_write.rs b/cli/tokio_write.rs
new file mode 100644
index 000000000..31b4cda30
--- /dev/null
+++ b/cli/tokio_write.rs
@@ -0,0 +1,62 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use crate::resources::DenoAsyncWrite;
+use deno::ErrBox;
+use futures::{Future, Poll};
+use std::mem;
+
+/// A future used to write some data to a stream.
+///
+/// This is created by the [`write`] top-level method.
+///
+/// [`write`]: fn.write.html
+#[derive(Debug)]
+pub struct Write<A, T> {
+ state: State<A, T>,
+}
+
+#[derive(Debug)]
+enum State<A, T> {
+ Pending { a: A, buf: T },
+ Empty,
+}
+
+/// Creates a future that will write some of the buffer `buf` to
+/// the stream `a` provided.
+///
+/// Any error which happens during writing will cause both the stream and the
+/// buffer to get destroyed.
+pub fn write<A, T>(a: A, buf: T) -> Write<A, T>
+where
+ A: DenoAsyncWrite,
+ T: AsRef<[u8]>,
+{
+ Write {
+ state: State::Pending { a, buf },
+ }
+}
+
+/// This is almost the same implementation as in tokio, difference is
+/// that error type is `ErrBox` instead of `std::io::Error`.
+impl<A, T> Future for Write<A, T>
+where
+ A: DenoAsyncWrite,
+ T: AsRef<[u8]>,
+{
+ type Item = (A, T, usize);
+ type Error = ErrBox;
+
+ fn poll(&mut self) -> Poll<(A, T, usize), ErrBox> {
+ let nwritten = match self.state {
+ State::Pending {
+ ref mut a,
+ ref mut buf,
+ } => try_ready!(a.poll_write(buf.as_ref())),
+ State::Empty => panic!("poll a Read after it's done"),
+ };
+
+ match mem::replace(&mut self.state, State::Empty) {
+ State::Pending { a, buf } => Ok((a, buf, nwritten).into()),
+ State::Empty => panic!("invalid internal state"),
+ }
+ }
+}
diff --git a/cli/version.rs b/cli/version.rs
new file mode 100644
index 000000000..a62d904f8
--- /dev/null
+++ b/cli/version.rs
@@ -0,0 +1,7 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+pub const DENO: &str = env!("CARGO_PKG_VERSION");
+pub const TYPESCRIPT: &str = crate::js::TS_VERSION;
+
+pub fn v8() -> &'static str {
+ deno::v8_version()
+}
diff --git a/cli/worker.rs b/cli/worker.rs
new file mode 100644
index 000000000..41de7d1ed
--- /dev/null
+++ b/cli/worker.rs
@@ -0,0 +1,613 @@
+// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
+use crate::fmt_errors::JSError;
+use crate::ops::json_op;
+use crate::ops::minimal_op;
+use crate::ops::*;
+use crate::state::ThreadSafeState;
+use deno;
+use deno::ErrBox;
+use deno::ModuleSpecifier;
+use deno::RecursiveLoad;
+use deno::StartupData;
+use futures::Async;
+use futures::Future;
+use std::env;
+use std::sync::Arc;
+use std::sync::Mutex;
+use url::Url;
+
+/// Wraps deno::Isolate to provide source maps, ops for the CLI, and
+/// high-level module loading
+#[derive(Clone)]
+pub struct Worker {
+ isolate: Arc<Mutex<deno::Isolate>>,
+ pub state: ThreadSafeState,
+}
+
+impl Worker {
+ pub fn new(
+ _name: String,
+ startup_data: StartupData,
+ state: ThreadSafeState,
+ ) -> Worker {
+ let isolate = Arc::new(Mutex::new(deno::Isolate::new(startup_data, false)));
+ {
+ let mut i = isolate.lock().unwrap();
+ let state_ = state.clone();
+
+ i.register_op("read", state_.cli_op(minimal_op(io::op_read)));
+ i.register_op("write", state_.cli_op(minimal_op(io::op_write)));
+
+ i.register_op(
+ "exit",
+ state_.cli_op(json_op(state_.stateful_op(os::op_exit))),
+ );
+ i.register_op(
+ "is_tty",
+ state_.cli_op(json_op(state_.stateful_op(os::op_is_tty))),
+ );
+ i.register_op(
+ "env",
+ state_.cli_op(json_op(state_.stateful_op(os::op_env))),
+ );
+ i.register_op(
+ "exec_path",
+ state_.cli_op(json_op(state_.stateful_op(os::op_exec_path))),
+ );
+ i.register_op(
+ "utime",
+ state_.cli_op(json_op(state_.stateful_op(fs::op_utime))),
+ );
+ i.register_op(
+ "set_env",
+ state_.cli_op(json_op(state_.stateful_op(os::op_set_env))),
+ );
+ i.register_op(
+ "get_env",
+ state_.cli_op(json_op(state_.stateful_op(os::op_get_env))),
+ );
+ i.register_op(
+ "home_dir",
+ state_.cli_op(json_op(state_.stateful_op(os::op_home_dir))),
+ );
+ i.register_op(
+ "start",
+ state_.cli_op(json_op(state_.stateful_op(os::op_start))),
+ );
+ i.register_op(
+ "apply_source_map",
+ state_.cli_op(json_op(state_.stateful_op(errors::op_apply_source_map))),
+ );
+ i.register_op(
+ "format_error",
+ state_.cli_op(json_op(state_.stateful_op(errors::op_format_error))),
+ );
+ i.register_op(
+ "cache",
+ state_.cli_op(json_op(state_.stateful_op(compiler::op_cache))),
+ );
+ i.register_op(
+ "fetch_source_files",
+ state_
+ .cli_op(json_op(state_.stateful_op(compiler::op_fetch_source_files))),
+ );
+ i.register_op(
+ "open",
+ state_.cli_op(json_op(state_.stateful_op(files::op_open))),
+ );
+ i.register_op(
+ "close",
+ state_.cli_op(json_op(state_.stateful_op(files::op_close))),
+ );
+ i.register_op(
+ "seek",
+ state_.cli_op(json_op(state_.stateful_op(files::op_seek))),
+ );
+ i.register_op(
+ "fetch",
+ state_.cli_op(json_op(state_.stateful_op(fetch::op_fetch))),
+ );
+ i.register_op(
+ "metrics",
+ state_.cli_op(json_op(state_.stateful_op(metrics::op_metrics))),
+ );
+ i.register_op(
+ "repl_start",
+ state_.cli_op(json_op(state_.stateful_op(repl::op_repl_start))),
+ );
+ i.register_op(
+ "repl_readline",
+ state_.cli_op(json_op(state_.stateful_op(repl::op_repl_readline))),
+ );
+ i.register_op(
+ "accept",
+ state_.cli_op(json_op(state_.stateful_op(net::op_accept))),
+ );
+ i.register_op(
+ "dial",
+ state_.cli_op(json_op(state_.stateful_op(net::op_dial))),
+ );
+ i.register_op(
+ "dial_tls",
+ state_.cli_op(json_op(state_.stateful_op(net::op_dial))),
+ );
+ i.register_op(
+ "shutdown",
+ state_.cli_op(json_op(state_.stateful_op(net::op_shutdown))),
+ );
+ i.register_op(
+ "listen",
+ state_.cli_op(json_op(state_.stateful_op(net::op_listen))),
+ );
+ i.register_op(
+ "resources",
+ state_.cli_op(json_op(state_.stateful_op(resources::op_resources))),
+ );
+ i.register_op(
+ "get_random_values",
+ state_
+ .cli_op(json_op(state_.stateful_op(random::op_get_random_values))),
+ );
+ i.register_op(
+ "global_timer_stop",
+ state_
+ .cli_op(json_op(state_.stateful_op(timers::op_global_timer_stop))),
+ );
+ i.register_op(
+ "global_timer",
+ state_.cli_op(json_op(state_.stateful_op(timers::op_global_timer))),
+ );
+ i.register_op(
+ "now",
+ state_.cli_op(json_op(state_.stateful_op(performance::op_now))),
+ );
+ i.register_op(
+ "permissions",
+ state_.cli_op(json_op(state_.stateful_op(permissions::op_permissions))),
+ );
+ i.register_op(
+ "revoke_permission",
+ state_.cli_op(json_op(
+ state_.stateful_op(permissions::op_revoke_permission),
+ )),
+ );
+ i.register_op(
+ "create_worker",
+ state_.cli_op(json_op(state_.stateful_op(workers::op_create_worker))),
+ );
+ i.register_op(
+ "host_get_worker_closed",
+ state_.cli_op(json_op(
+ state_.stateful_op(workers::op_host_get_worker_closed),
+ )),
+ );
+ i.register_op(
+ "host_post_message",
+ state_
+ .cli_op(json_op(state_.stateful_op(workers::op_host_post_message))),
+ );
+ i.register_op(
+ "host_get_message",
+ state_
+ .cli_op(json_op(state_.stateful_op(workers::op_host_get_message))),
+ );
+ // TODO: make sure these two ops are only accessible to appropriate Worker
+ i.register_op(
+ "worker_post_message",
+ state_
+ .cli_op(json_op(state_.stateful_op(workers::op_worker_post_message))),
+ );
+ i.register_op(
+ "worker_get_message",
+ state_
+ .cli_op(json_op(state_.stateful_op(workers::op_worker_get_message))),
+ );
+ i.register_op(
+ "run",
+ state_.cli_op(json_op(state_.stateful_op(process::op_run))),
+ );
+ i.register_op(
+ "run_status",
+ state_.cli_op(json_op(state_.stateful_op(process::op_run_status))),
+ );
+ i.register_op(
+ "kill",
+ state_.cli_op(json_op(state_.stateful_op(process::op_kill))),
+ );
+ i.register_op(
+ "chdir",
+ state_.cli_op(json_op(state_.stateful_op(fs::op_chdir))),
+ );
+ i.register_op(
+ "mkdir",
+ state_.cli_op(json_op(state_.stateful_op(fs::op_mkdir))),
+ );
+ i.register_op(
+ "chmod",
+ state_.cli_op(json_op(state_.stateful_op(fs::op_chmod))),
+ );
+ i.register_op(
+ "chown",
+ state_.cli_op(json_op(state_.stateful_op(fs::op_chown))),
+ );
+ i.register_op(
+ "remove",
+ state_.cli_op(json_op(state_.stateful_op(fs::op_remove))),
+ );
+ i.register_op(
+ "copy_file",
+ state_.cli_op(json_op(state_.stateful_op(fs::op_copy_file))),
+ );
+ i.register_op(
+ "stat",
+ state_.cli_op(json_op(state_.stateful_op(fs::op_stat))),
+ );
+ i.register_op(
+ "read_dir",
+ state_.cli_op(json_op(state_.stateful_op(fs::op_read_dir))),
+ );
+ i.register_op(
+ "rename",
+ state_.cli_op(json_op(state_.stateful_op(fs::op_rename))),
+ );
+ i.register_op(
+ "link",
+ state_.cli_op(json_op(state_.stateful_op(fs::op_link))),
+ );
+ i.register_op(
+ "symlink",
+ state_.cli_op(json_op(state_.stateful_op(fs::op_symlink))),
+ );
+ i.register_op(
+ "read_link",
+ state_.cli_op(json_op(state_.stateful_op(fs::op_read_link))),
+ );
+ i.register_op(
+ "truncate",
+ state_.cli_op(json_op(state_.stateful_op(fs::op_truncate))),
+ );
+ i.register_op(
+ "make_temp_dir",
+ state_.cli_op(json_op(state_.stateful_op(fs::op_make_temp_dir))),
+ );
+ i.register_op(
+ "cwd",
+ state_.cli_op(json_op(state_.stateful_op(fs::op_cwd))),
+ );
+ i.register_op(
+ "fetch_asset",
+ state_.cli_op(json_op(state_.stateful_op(compiler::op_fetch_asset))),
+ );
+ i.register_op(
+ "hostname",
+ state_.cli_op(json_op(state_.stateful_op(os::op_hostname))),
+ );
+
+ let state_ = state.clone();
+ i.set_dyn_import(move |id, specifier, referrer| {
+ let load_stream = RecursiveLoad::dynamic_import(
+ id,
+ specifier,
+ referrer,
+ state_.clone(),
+ state_.modules.clone(),
+ );
+ Box::new(load_stream)
+ });
+
+ let state_ = state.clone();
+ i.set_js_error_create(move |v8_exception| {
+ JSError::from_v8_exception(v8_exception, &state_.ts_compiler)
+ })
+ }
+ Self { isolate, state }
+ }
+
+ /// Same as execute2() but the filename defaults to "$CWD/__anonymous__".
+ pub fn execute(&mut self, js_source: &str) -> Result<(), ErrBox> {
+ let path = env::current_dir().unwrap().join("__anonymous__");
+ let url = Url::from_file_path(path).unwrap();
+ self.execute2(url.as_str(), js_source)
+ }
+
+ /// Executes the provided JavaScript source code. The js_filename argument is
+ /// provided only for debugging purposes.
+ pub fn execute2(
+ &mut self,
+ js_filename: &str,
+ js_source: &str,
+ ) -> Result<(), ErrBox> {
+ let mut isolate = self.isolate.lock().unwrap();
+ isolate.execute(js_filename, js_source)
+ }
+
+ /// Executes the provided JavaScript module.
+ pub fn execute_mod_async(
+ &mut self,
+ module_specifier: &ModuleSpecifier,
+ is_prefetch: bool,
+ ) -> impl Future<Item = (), Error = ErrBox> {
+ let worker = self.clone();
+ let loader = self.state.clone();
+ let isolate = self.isolate.clone();
+ let modules = self.state.modules.clone();
+ let recursive_load =
+ RecursiveLoad::main(&module_specifier.to_string(), loader, modules)
+ .get_future(isolate);
+ recursive_load.and_then(move |id| -> Result<(), ErrBox> {
+ worker.state.progress.done();
+ if is_prefetch {
+ Ok(())
+ } else {
+ let mut isolate = worker.isolate.lock().unwrap();
+ isolate.mod_evaluate(id)
+ }
+ })
+ }
+}
+
+impl Future for Worker {
+ type Item = ();
+ type Error = ErrBox;
+
+ fn poll(&mut self) -> Result<Async<()>, ErrBox> {
+ let mut isolate = self.isolate.lock().unwrap();
+ isolate.poll()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::flags;
+ use crate::progress::Progress;
+ use crate::resources;
+ use crate::startup_data;
+ use crate::state::ThreadSafeState;
+ use crate::tokio_util;
+ use futures::future::lazy;
+ use std::sync::atomic::Ordering;
+
+ #[test]
+ fn execute_mod_esm_imports_a() {
+ let p = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
+ .parent()
+ .unwrap()
+ .join("tests/esm_imports_a.js")
+ .to_owned();
+ let module_specifier =
+ ModuleSpecifier::resolve_url_or_path(&p.to_string_lossy()).unwrap();
+ let argv = vec![String::from("./deno"), module_specifier.to_string()];
+ let state = ThreadSafeState::new(
+ flags::DenoFlags::default(),
+ argv,
+ Progress::new(),
+ true,
+ )
+ .unwrap();
+ let state_ = state.clone();
+ tokio_util::run(lazy(move || {
+ let mut worker =
+ Worker::new("TEST".to_string(), StartupData::None, state);
+ worker
+ .execute_mod_async(&module_specifier, false)
+ .then(|result| {
+ if let Err(err) = result {
+ eprintln!("execute_mod err {:?}", err);
+ }
+ tokio_util::panic_on_error(worker)
+ })
+ }));
+
+ let metrics = &state_.metrics;
+ assert_eq!(metrics.resolve_count.load(Ordering::SeqCst), 2);
+ // Check that we didn't start the compiler.
+ assert_eq!(metrics.compiler_starts.load(Ordering::SeqCst), 0);
+ }
+
+ #[test]
+ fn execute_mod_circular() {
+ let p = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
+ .parent()
+ .unwrap()
+ .join("tests/circular1.ts")
+ .to_owned();
+ let module_specifier =
+ ModuleSpecifier::resolve_url_or_path(&p.to_string_lossy()).unwrap();
+ let argv = vec![String::from("deno"), module_specifier.to_string()];
+ let state = ThreadSafeState::new(
+ flags::DenoFlags::default(),
+ argv,
+ Progress::new(),
+ true,
+ )
+ .unwrap();
+ let state_ = state.clone();
+ tokio_util::run(lazy(move || {
+ let mut worker =
+ Worker::new("TEST".to_string(), StartupData::None, state);
+ worker
+ .execute_mod_async(&module_specifier, false)
+ .then(|result| {
+ if let Err(err) = result {
+ eprintln!("execute_mod err {:?}", err);
+ }
+ tokio_util::panic_on_error(worker)
+ })
+ }));
+
+ let metrics = &state_.metrics;
+ // TODO assert_eq!(metrics.resolve_count.load(Ordering::SeqCst), 2);
+ // Check that we didn't start the compiler.
+ assert_eq!(metrics.compiler_starts.load(Ordering::SeqCst), 0);
+ }
+
+ #[test]
+ fn execute_006_url_imports() {
+ let http_server_guard = crate::test_util::http_server();
+
+ let p = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
+ .parent()
+ .unwrap()
+ .join("cli/tests/006_url_imports.ts")
+ .to_owned();
+ let module_specifier =
+ ModuleSpecifier::resolve_url_or_path(&p.to_string_lossy()).unwrap();
+ let argv = vec![String::from("deno"), module_specifier.to_string()];
+ let mut flags = flags::DenoFlags::default();
+ flags.reload = true;
+ let state =
+ ThreadSafeState::new(flags, argv, Progress::new(), true).unwrap();
+ let state_ = state.clone();
+ tokio_util::run(lazy(move || {
+ let mut worker = Worker::new(
+ "TEST".to_string(),
+ startup_data::deno_isolate_init(),
+ state,
+ );
+ worker.execute("denoMain()").unwrap();
+ worker
+ .execute_mod_async(&module_specifier, false)
+ .then(|result| {
+ if let Err(err) = result {
+ eprintln!("execute_mod err {:?}", err);
+ }
+ tokio_util::panic_on_error(worker)
+ })
+ }));
+
+ let metrics = &state_.metrics;
+ assert_eq!(metrics.resolve_count.load(Ordering::SeqCst), 3);
+ // Check that we've only invoked the compiler once.
+ assert_eq!(metrics.compiler_starts.load(Ordering::SeqCst), 1);
+ drop(http_server_guard);
+ }
+
+ fn create_test_worker() -> Worker {
+ let state = ThreadSafeState::mock(vec![
+ String::from("./deno"),
+ String::from("hello.js"),
+ ]);
+ let mut worker =
+ Worker::new("TEST".to_string(), startup_data::deno_isolate_init(), state);
+ worker.execute("denoMain()").unwrap();
+ worker.execute("workerMain()").unwrap();
+ worker
+ }
+
+ #[test]
+ fn test_worker_messages() {
+ tokio_util::init(|| {
+ let mut worker = create_test_worker();
+ let source = r#"
+ onmessage = function(e) {
+ console.log("msg from main script", e.data);
+ if (e.data == "exit") {
+ delete window.onmessage;
+ return;
+ } else {
+ console.assert(e.data === "hi");
+ }
+ postMessage([1, 2, 3]);
+ console.log("after postMessage");
+ }
+ "#;
+ worker.execute(source).unwrap();
+
+ let resource = worker.state.resource.clone();
+ let resource_ = resource.clone();
+
+ tokio::spawn(lazy(move || {
+ worker.then(move |r| -> Result<(), ()> {
+ resource_.close();
+ r.unwrap();
+ Ok(())
+ })
+ }));
+
+ let msg = json!("hi").to_string().into_boxed_str().into_boxed_bytes();
+
+ let r = resources::post_message_to_worker(resource.rid, msg).wait();
+ assert!(r.is_ok());
+
+ let maybe_msg = resources::get_message_from_worker(resource.rid)
+ .wait()
+ .unwrap();
+ assert!(maybe_msg.is_some());
+ // Check if message received is [1, 2, 3] in json
+ assert_eq!(*maybe_msg.unwrap(), *b"[1,2,3]");
+
+ let msg = json!("exit")
+ .to_string()
+ .into_boxed_str()
+ .into_boxed_bytes();
+ let r = resources::post_message_to_worker(resource.rid, msg).wait();
+ assert!(r.is_ok());
+ })
+ }
+
+ #[test]
+ fn removed_from_resource_table_on_close() {
+ tokio_util::init(|| {
+ let mut worker = create_test_worker();
+ worker
+ .execute("onmessage = () => { delete window.onmessage; }")
+ .unwrap();
+
+ let resource = worker.state.resource.clone();
+ let rid = resource.rid;
+
+ let worker_future = worker
+ .then(move |r| -> Result<(), ()> {
+ resource.close();
+ println!("workers.rs after resource close");
+ r.unwrap();
+ Ok(())
+ })
+ .shared();
+
+ let worker_future_ = worker_future.clone();
+ tokio::spawn(lazy(move || worker_future_.then(|_| Ok(()))));
+
+ assert_eq!(resources::get_type(rid), Some("worker".to_string()));
+
+ let msg = json!("hi").to_string().into_boxed_str().into_boxed_bytes();
+ let r = resources::post_message_to_worker(rid, msg).wait();
+ assert!(r.is_ok());
+ debug!("rid {:?}", rid);
+
+ worker_future.wait().unwrap();
+ assert_eq!(resources::get_type(rid), None);
+ })
+ }
+
+ #[test]
+ fn execute_mod_resolve_error() {
+ tokio_util::init(|| {
+ // "foo" is not a valid module specifier so this should return an error.
+ let mut worker = create_test_worker();
+ let module_specifier =
+ ModuleSpecifier::resolve_url_or_path("does-not-exist").unwrap();
+ let result = worker.execute_mod_async(&module_specifier, false).wait();
+ assert!(result.is_err());
+ })
+ }
+
+ #[test]
+ fn execute_mod_002_hello() {
+ tokio_util::init(|| {
+ // This assumes cwd is project root (an assumption made throughout the
+ // tests).
+ let mut worker = create_test_worker();
+ let p = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"))
+ .parent()
+ .unwrap()
+ .join("tests/002_hello.ts")
+ .to_owned();
+ let module_specifier =
+ ModuleSpecifier::resolve_url_or_path(&p.to_string_lossy()).unwrap();
+ let result = worker.execute_mod_async(&module_specifier, false).wait();
+ assert!(result.is_ok());
+ })
+ }
+}