use crate::elm; use crate::reporting::{CompilerError, InterpreterError, Problem, SetupError, TypeError}; use crate::PortableHash; use deno_core::futures::StreamExt; use elm_project_utils::ChecksumConstraint; use os_pipe::dup_stderr; use serde::{Deserialize, Serialize}; use sqlx::sqlite::SqlitePool; use sqlx::Row; use std::cell::RefCell; use std::fs::{self, canonicalize}; use std::hash::Hasher; use std::io::{self, Read, Write}; use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; use std::sync::Arc; use std::time::Instant; use tokio; use tracing::{info_span, Instrument}; mod fixtures; #[derive(Serialize)] #[serde(tag = "$")] pub(crate) enum ElmResult { Ok { a: T }, Err { a: E }, } impl ElmResult { fn ok(a: T) -> Self { Self::Ok { a } } fn err(a: E) -> Self { Self::Err { a } } } pub(crate) fn exec( file: PathBuf, debug: bool, function: String, input_source: Option, output: Option, verbosity: u64, sqlite_path: Option, ) -> Result<(), Problem> { // Our first elm make call is where we build the users program. There is a pretty good chance // this won't work. elm::make(&file, debug, verbosity)?; // Step 2, find the elm.json and elm-stuff directory let elm_project_dir = elm::find_project_root("elm.json", "./").map_err(CompilerError::MissingElmJson)?; let elm_cache_dir = elm_project_dir.join("elm-stuff").join("0.19.1"); if !elm_cache_dir.is_dir() { return Err(CompilerError::MissingElmStuff(elm_cache_dir).into()); } let data = std::fs::read(&file) .map_err(|io_err| CompilerError::ReadInputFailed(io_err, file.clone()))?; let mut hasher = PortableHash::new(); hasher.write_all(&data).unwrap(); // also include the function name in the checksum so users can run multiple functions // from the same file but still get caching if the file does not change. hasher.write_all(function.as_bytes()).unwrap(); let source_checksum = hasher.finish(); // step 2.5 get the module name out of the file. let entrypoint = elmi::Global( elmi::ModuleNameCanonical { package: elmi::PackageName::new("author", "project"), module: elm::parse_module_name(&data)?, }, elmi::Name(function.clone()), ); // step 3 find all the filepaths in the elm-stuff/0.19.1/* folder let interfaces = elm::load_interfaces(&elm_cache_dir)?; // Step 5, check for the desired function let span = info_span!("resolved target function"); let timing_guard = span.enter(); let (input_type, output_type) = match interfaces.get(&entrypoint.0) { Some(interface) => match interface.values.get(&entrypoint.1) { Some(annotation) => { let elmi::CannonicalAnnotation(_free_vars, tipe) = annotation; resolve_function_type(tipe)? } None => return Err(CompilerError::BadImport(entrypoint).into()), }, None => return Err(CompilerError::MissingModuleTypeInformation(entrypoint.0).into()), }; drop(timing_guard); // step 6 create our private project let generator_dir = setup_generator_project(verbosity, elm_project_dir.clone())?; // step 7 create an Elm fixture file to run our function let span = info_span!("create Elm fixture files"); let timing_guard = span.enter(); let (gen_module_name, source) = fixtures::scripting::generate( source_checksum, entrypoint.0.module.clone(), entrypoint.1.clone(), input_type, output_type, ); let mut source_filename = generator_dir.join("src").join(&gen_module_name); source_filename.set_extension("elm"); let span = info_span!("file writes"); let file_write_timing_guard = span.enter(); fs::File::create(&source_filename) .map_err(|io_err| CompilerError::WriteOutputFailed(io_err, source_filename.clone()))? .write_all(source.as_bytes()) .map_err(|io_err| CompilerError::WriteOutputFailed(io_err, source_filename.clone()))?; drop(file_write_timing_guard); drop(timing_guard); // step 8 compile the fixture let mut intermediate_file = generator_dir.join("obj").join(&gen_module_name); intermediate_file.set_extension("js"); let mut command = Command::new("elm"); command .arg("make") .arg("--output") .arg(&intermediate_file) .current_dir(&generator_dir) .stdin(Stdio::null()) .stderr(Stdio::piped()); if verbosity < 1 { command.stdout(Stdio::piped()); } else { let pipe = dup_stderr() .map_err(|io_err| CompilerError::ReadInputFailed(io_err, "stdout".into()))?; command.stdout(pipe); } if debug { command.arg("--debug"); } command.arg(&source_filename); let span = info_span!("elm make fixture file"); let timing_guard = span.enter(); match command.output() { Ok(output) => { if !output.status.success() { return Err(CompilerError::FailedBuildingFixture.into()); } } Err(_) => { return Err(CompilerError::FailedBuildingFixture.into()); } } drop(timing_guard); // Step 9 fixup the compiled script to run in Deno let data = fs::read_to_string(&intermediate_file) .map_err(|io_err| CompilerError::ReadInputFailed(io_err, intermediate_file.clone()))?; // TODO figure out how to replace multiple substrings in a single pass. One neat trick // might be to allocate enough space in our starting buffer to write the new code by // having a really large replace block. For example if we are replacing a string // `"REPLACE_ME" + "abc" * 2000` we would have over 6k bytes to write out the new code. We // will have to do some extra book keeping to make sure the buffer space is big enough // for the replacement code. let span = info_span!("munge fixture javascript"); let timing_guard = span.enter(); let final_script = (|| { let mut final_script = data .replace("'REPLACE_ME_WITH_JSON_STRINGIFY'", "JSON.stringify(x)") .replace( "$elm$json$Json$Decode$fail('REPLACE_ME_WITH_BYTES_DECODER');", r#" _Json_decodePrim(function(value) { return (typeof value === 'object' && value instanceof DataView) ? $elm$core$Result$Ok(value) : _Json_expecting('a DataView', value); });"#, ) .replace(";}(this));", ";}(globalThis));"); if sqlite_path.is_some() { final_script = final_script .replace( "var $author$project$Astrid$Query$execute = function (query) {\n\treturn $author$project$Astrid$Query$dummyExecute;\n};", include_str!("fixtures/sql-client-integration.js"), ) .replace( "var $author$project$Astrid$Query$fetch = F3(\n\tfunction (sql, parameters, decoder) {\n\t\treturn $author$project$Astrid$Query$Dummy;\n\t});", "var $author$project$Astrid$Query$fetch = _Query_fetchAll;", ) .replace( "var $author$project$Astrid$Query$fetchOne = F3(\n\tfunction (sql, parameters, decoder) {\n\t\treturn $author$project$Astrid$Query$Dummy;\n\t});", "var $author$project$Astrid$Query$fetchOne = _Query_fetchOne;", ) .replace( "var $author$project$Astrid$Query$map3 = F4(\n\tfunction (f, a, b, c) {\n\t\treturn $author$project$Astrid$Query$Dummy;\n\t});", r#"var $author$project$Astrid$Query$map3 = _Query_map3;"#, ) .replace( "var $author$project$Astrid$Query$map2 = F3(\n\tfunction (f, a, b) {\n\t\treturn $author$project$Astrid$Query$Dummy;\n\t});", r#"var $author$project$Astrid$Query$map2 = _Query_map2;"#, ) .replace( "var $author$project$Astrid$Query$map = F2(\n\tfunction (f, a) {\n\t\treturn $author$project$Astrid$Query$Dummy;\n\t});", r#"var $author$project$Astrid$Query$map = _Query_map1;"#, ) .replace( "var $author$project$Astrid$Query$andThen = F2(\n\tfunction (f, q) {\n\t\treturn $author$project$Astrid$Query$Dummy;\n\t});", r#"var $author$project$Astrid$Query$andThen = _Query_andThen;"#, ); // final_script.replace("var $author$project$Astrid$Query$run = ", "JSON.stringify(x)"); final_script.push_str("\n\n"); //final_script.push_str(r#" // Deno.core.print(JSON.stringify( // Deno.core.opSync( // 'op_starmelon_batch_queries', // [ [true, "select json_object('id', id, 'foo', foo) from foobar", []] // , [false, "select json_object('id', id, 'foo', foo) from foobar", []] // ] // ) // )) //"#); } final_script.push_str("\n\n"); // I think that when I set this script to be the main module, I am skipping the // deno/runtime/js/99_main.js script that sets up a bunch of global variables. If I // manually add the timer related code below then setTimeout works again. // NB. there are 706 lines of setup code that add a bunch of apis to the global window // scope. Figure out if I need to include all of them. For example, starmelon does not need // to perform http calls right now, but I eventually want to. final_script.push_str("const { setTimeout } = globalThis.__bootstrap.timers;\n"); final_script.push_str( "Deno.core.setMacrotaskCallback(globalThis.__bootstrap.timers.handleTimerMacrotask);\n", ); final_script.push_str("globalThis.setTimeout = setTimeout;\n"); final_script.push_str(&format!("var worker = Elm.{}.init();\n", &gen_module_name)); // add a shortcut for invoking the function so I don't have to traverse so many object // lookups using the rust v8 API. match input_type { None => { final_script.push_str( "globalThis.runOnInput = function() { worker.ports.onInput.send(null) };\n", ); } Some(InputType::Value) => { final_script .push_str("globalThis.runOnInput = function(data) { worker.ports.onInput.send(JSON.parse(data)) };\n"); } Some(InputType::String) => { final_script.push_str( "globalThis.runOnInput = function(data) { worker.ports.onInput.send(data) };", ); } Some(InputType::Bytes) => { final_script.push_str( r#" globalThis.runOnInput = function(data) { const dv = new DataView(data.buffer) worker.ports.onInput.send(dv) };"#, ); } } match output_type { OutputType::Value => { final_script.push_str( r#" worker.ports.onOutput.subscribe(function(output){ if (output.ctor === "Ok") { const json = JSON.stringify(output.a); Deno.core.opSync('op_starmelon_string_output', output); } else { Deno.core.opSync('op_starmelon_problem', output.a); } }); "#, ); } OutputType::Bytes => { final_script.push_str( r#" // Elm will send a DataView worker.ports.onOutput.subscribe(function(output){ if (output.ctor === "Ok") { const ui8 = new Uint8Array(output.a.buffer); Deno.core.opSync('op_starmelon_bytes_output', ui8); } else { Deno.core.opSync('op_starmelon_problem', output.a); } }); "#, ); } OutputType::String | OutputType::Html => { final_script.push_str( r#" worker.ports.onOutput.subscribe(function(output){ if (output.ctor === "Ok") { Deno.core.opSync('op_starmelon_string_output', output.a); } else { Deno.core.opSync('op_starmelon_problem', output.a); } }); "#, ); } } final_script })(); drop(timing_guard); let mut final_file = generator_dir.join("bin").join(&gen_module_name); final_file.set_extension("js"); let span = info_span!("file writes"); let timing_guard = span.enter(); std::fs::write(&final_file, final_script) .map_err(|io_err| CompilerError::WriteOutputFailed(io_err, final_file.clone()))?; drop(timing_guard); // Create a tokio runtime before registering ops so we can block on futures inside sync ops let span = info_span!("create tokio runtime"); let timing_guard = span.enter(); let sys = tokio::runtime::Builder::new_current_thread() // The default number of additional threads for running blocking FnOnce is 512. .max_blocking_threads(1) .enable_all() .build() .unwrap(); drop(timing_guard); // step 10 create a v8 isolate. We need to register a different callback depending on // the output type (string, or bytes) let span = info_span!("create v8 isolate"); let timing_guard = span.enter(); let (mut worker, main_module) = runtime::setup_worker(&final_file.to_string_lossy()) .map_err(|err| InterpreterError::EventLoop(err))?; drop(timing_guard); let span = info_span!("register private api"); let timing_guard = span.enter(); let mailbox: Arc, String>>>> = Arc::new(RefCell::new(None)); let mailbox_clone = Arc::clone(&mailbox); worker.js_runtime.register_op( "op_starmelon_bytes_output", deno_core::op_sync(move |_state, msg: deno_core::ZeroCopyBuf, _: ()| { let slice: &[u8] = &msg; eprintln!("got message from v8 runtime {:?}", slice.to_owned()); if let Ok(mut mailbox) = mailbox_clone.try_borrow_mut() { mailbox.replace(Ok(slice.to_owned())); } Ok(()) }), ); let mailbox_clone = Arc::clone(&mailbox); worker.js_runtime.register_op( "op_starmelon_string_output", deno_core::op_sync(move |_state, msg: String, _: ()| { if let Ok(mut mailbox) = mailbox_clone.try_borrow_mut() { mailbox.replace(Ok(msg.into_bytes())); } Ok(()) }), ); let mailbox_clone = Arc::clone(&mailbox); worker.js_runtime.register_op( "op_starmelon_problem", deno_core::op_sync(move |_state, msg: String, _: ()| { eprintln!("got problem from v8 runtime {:?}", &msg); if let Ok(mut mailbox) = mailbox_clone.try_borrow_mut() { mailbox.replace(Err(msg)); } Ok(()) }), ); // Step 10.B setup the sqlite database feature let sql_background_thread_handle = if let Some(database_url) = sqlite_path { // I want to construct the connection in the initial thread so I can tell if the connection // failed let db_pool = sys .block_on(async { SqlitePool::connect(&database_url.to_string_lossy()).await }) .unwrap(); let (worker_mailbox, rx) = std::sync::mpsc::channel::<( oneshot::Sender>, Vec<(bool, String, Vec)>, )>(); let sql_worker_thread = std::thread::spawn(move || { let worker = tokio::runtime::Builder::new_current_thread() .enable_all() .build() .unwrap(); loop { if let Ok((response, queries)) = rx.recv() { // I am not sure if I should only work on one database task at a time, or // submit as many takes as possible. Just spawning the future onto this // exectutor does not seem to work, even though the docs say the thread pool // will poll the future until it completes. let db_pool_clone = db_pool.clone(); let span = info_span!("inside sql queries futures"); let f = async move { let _start = Instant::now(); let db_pool = db_pool_clone; let mut result: Vec> = vec![]; for (fetch_all, sql, _args) in queries { let mut acc = Vec::new(); if fetch_all { let mut stream = sqlx::query(&sql).fetch(&db_pool); loop { match stream.next().await { None => break, Some(Ok(row)) => { match row.try_get::(0) { Ok(s) => acc.push(s), // TODO set an error flag before returning this one Err(_) => break, }; } Some(Err(err)) => { eprintln!("got fetch_all sql error {:?}", err); break; } } } result.push(acc); } else { let s = sqlx::query(&sql) .fetch_one(&db_pool) .await .and_then(|row| row.try_get::(0)) .unwrap(); result.push(vec![s]); } } response.send(ElmResult::ok(result)) }; // I found it interesting that the runtime of the future from the viewpoint of // tracing was around 230us for a trivial select 2 rows query, but walltime I // measured was around 700us. So polling the future or waiting for file IO is // more expensive than I thought. worker.block_on(f.instrument(span)).unwrap(); } else { break; } } }); let worker_mailbox_clone = worker_mailbox.clone(); worker.js_runtime.register_op( "op_starmelon_batch_queries", deno_core::op_sync( move |_state, queries: Vec<(bool, String, Vec)>, _: ()| { let worker_mailbox = worker_mailbox_clone.clone(); let (sender, receiver) = oneshot::channel::>, String>>(); let span = info_span!("run sql"); let timing_guard = span.enter(); worker_mailbox.send((sender, queries)).unwrap(); let elm_result = receiver.recv().unwrap(); drop(timing_guard); Ok(elm_result) }, ), ); Some((worker_mailbox, sql_worker_thread)) } else { None }; worker.js_runtime.sync_ops_cache(); drop(timing_guard); // step 11 marshal the input into the v8 isolate. If we are reading from an // input file load that, if we are reading from stdin read that. let input = match input_type { None => None, Some(input_type) => { let buffer = match input_source { None => { // read from stdin when input_source was not present let mut stdin = io::stdin(); let mut buffer = Vec::new(); stdin.read_to_end(&mut buffer).unwrap(); buffer } Some(path) => { let buffer = std::fs::read(&path) .map_err(|io_err| CompilerError::ReadInputFailed(io_err, path.clone()))?; buffer } }; match input_type { InputType::String => { let s = String::from_utf8(buffer).map_err(|_| CompilerError::BadInput)?; Some(ValidatedInput::String(s)) } InputType::Bytes => Some(ValidatedInput::Bytes(buffer)), InputType::Value => { let _: serde_json::Value = serde_json::from_slice(&buffer).map_err(|_| CompilerError::BadInput)?; let s = String::from_utf8(buffer).map_err(|_| CompilerError::BadInput)?; Some(ValidatedInput::Value(s)) } } } }; let span = info_span!("eval javascript"); let timing_guard = span.enter(); sys.block_on(async move { runtime::xyz(worker, main_module, input).await })?; drop(timing_guard); // step 13 receive the callback // If I understood which combination of run_event_loop was required to execute // the javascript to completion then we should have something in our mailbox by // now. Another way to do this would be with an Arc. This will panic if the // mailbox is currently borrowed match mailbox.replace(None) { Some(Ok(buffer)) => match output { None => { io::stdout() .write_all(&buffer) .map_err(|io_err| CompilerError::WriteOutputFailed(io_err, "stdout".into()))?; } Some(filename) => { let mut f = fs::File::create(&filename) .map_err(|io_err| CompilerError::WriteOutputFailed(io_err, filename))?; f.write_all(&buffer) .map_err(|io_err| CompilerError::WriteOutputFailed(io_err, "stdout".into()))?; } }, Some(Err(problem)) => { println!("had a problem {}", problem); } None => println!("nothing in the mailbox"), } if let Some((tx, thread_handle)) = sql_background_thread_handle { drop(tx); thread_handle.join().unwrap(); } Ok(()) } #[derive(Debug, Copy, Clone)] pub enum InputType { Value, String, Bytes, } pub enum ValidatedInput { String(String), Value(String), Bytes(Vec), } #[derive(Debug, Copy, Clone)] pub enum OutputType { Html, String, Bytes, Value, } fn resolve_function_type(tipe: &elmi::Type) -> Result<(Option, OutputType), TypeError> { match tipe { elmi::Type::TLambda(a, b) => { // We want to check the output types first because this is where we will figure out if // there is more than one argument let output_type = resolve_output_type(&**b)?; let input_type = resolve_input_type(&**a)?; Ok((Some(input_type), output_type)) } elmi::Type::TVar(_) => Err(TypeError::CantEvalGeneric), elmi::Type::TType(module_name, name, args) if args.is_empty() => { // If our function returns a primitive type if module_name == "elm/core/String" && name == "String" { return Ok((None, OutputType::String)); } if module_name == "elm/bytes/Bytes" && name == "Bytes" { return Ok((None, OutputType::String)); } Err(TypeError::CantEvalType(tipe.clone())) } elmi::Type::TType(module_name, name, _args) => { if module_name == "elm/virtual-dom/VirtualDom" && name == "Node" { return Ok((None, OutputType::Html)); } Err(TypeError::CantEvalCustomType) } elmi::Type::TRecord(_, _) => Err(TypeError::CantEvalRecord), elmi::Type::TUnit => Err(TypeError::CantEvalUnit), elmi::Type::TTuple(_, _, _) => Err(TypeError::CantEvalTuple), elmi::Type::TAlias(_, _, _, ref alias) => { match &**alias { elmi::AliasType::Filled(tipe) => { // I think the recursion is limited to a single step. I have not tested what // the CannonicalAnnotation would look like for a doubly indirect alias, for // example for `view` below // ```elm // type alias Foo = Int // type alias Bar = String // // type alias Zap = Foo -> Bar // // view : Zap // ``` resolve_function_type(tipe) } elmi::AliasType::Holey(_) => return Err(TypeError::CantEvalHoleyAlias), } } } } fn resolve_input_type(tipe: &elmi::Type) -> Result { match tipe { elmi::Type::TLambda(_, _) => Err(TypeError::EvalRequiresSingleArgument(tipe.clone())), elmi::Type::TType(module_name, name, args) if args.is_empty() => { if module_name == "elm/core/String" && name == "String" { Ok(InputType::String) } else if module_name == "elm/bytes/Bytes" && name == "Bytes" { Ok(InputType::Bytes) } else if module_name == "elm/json/Json.Encode" && name == "Value" { Ok(InputType::Value) } else { Err(TypeError::InputTypeNotSupported(tipe.clone())) } } elmi::Type::TAlias(_, _, _, ref alias) => match &**alias { elmi::AliasType::Filled(tipe) => resolve_input_type(tipe), elmi::AliasType::Holey(_) => Err(TypeError::CantEvalHoleyAlias), }, _ => Err(TypeError::OutputTypeNotSupported(tipe.clone())), } } fn resolve_output_type(tipe: &elmi::Type) -> Result { match tipe { elmi::Type::TType(module_name, name, _args) => { if module_name == "elm/core/String" && name == "String" { Ok(OutputType::String) } else if module_name == "elm/bytes/Bytes" && name == "Bytes" { Ok(OutputType::Bytes) } else if module_name == "elm/json/Json.Encode" && name == "Value" { Ok(OutputType::Value) } else if module_name == "elm/virtual-dom/VirtualDom" && name == "Node" { Ok(OutputType::Html) } else { Err(TypeError::OutputTypeNotSupported(tipe.clone())) } } elmi::Type::TAlias(_, _, _, ref alias) => match &**alias { elmi::AliasType::Filled(tipe) => resolve_output_type(tipe), elmi::AliasType::Holey(_) => Err(TypeError::CantEvalHoleyAlias), }, _ => Err(TypeError::OutputTypeNotSupported(tipe.clone())), } } fn setup_generator_project(verbosity: u64, elm_project_dir: PathBuf) -> Result { // I added a couple of random bytes to the directory name to reduce the risk of // collisions with other programs that also use elm-stuff for their scratch space let our_temp_dir = elm_project_dir.join("elm-stuff").join("starmelon-5d9ecc"); if !our_temp_dir.exists() { fs::create_dir(&our_temp_dir) .map_err(|io_err| CompilerError::WriteOutputFailed(io_err, our_temp_dir.clone()))?; } let source_dir = our_temp_dir.join("src"); let bin_dir = our_temp_dir.join("bin"); // Rather than attempt to modify the elm.json to use the extra libraries, I will invoke // elm install. Because the elm install commands take 500ms for a no-op I decided to // use a checksum file when the source elm.json changes let elm_json_path = elm_project_dir.join("elm.json"); let mut constraint = ChecksumConstraint::new(&elm_json_path, our_temp_dir.join("elm-json-checksum"))?; if !constraint.dirty()? { return Ok(our_temp_dir); } let mut data = fs::read(&elm_json_path) .map_err(|io_err| CompilerError::ReadInputFailed(io_err, elm_json_path.clone()))?; let mut elm_json = serde_json::from_slice::(&mut data) .map_err(|err| CompilerError::FailedParseElmJson(err))?; let mut new_src_directories = Vec::with_capacity(elm_json.source_directories.len() + 1); for dirname in elm_json.source_directories.iter() { let dir_path = elm_project_dir.join(dirname); let dir = canonicalize(&dir_path) .map_err(|io_err| CompilerError::ReadInputFailed(io_err, dir_path.clone()))?; new_src_directories.push(dir.to_string_lossy().to_string()); } if !source_dir.exists() { std::fs::create_dir(&source_dir) .map_err(|io_err| CompilerError::WriteOutputFailed(io_err, source_dir.clone()))?; } if !bin_dir.exists() { std::fs::create_dir(&bin_dir) .map_err(|io_err| CompilerError::WriteOutputFailed(io_err, bin_dir.clone()))?; } new_src_directories.push( canonicalize(our_temp_dir.join("src")) .map_err(|io_err| CompilerError::ReadInputFailed(io_err, our_temp_dir.join("src")))? .to_string_lossy() .to_string(), ); eprintln!("before json {:?}", elm_json); elm_json.source_directories = new_src_directories; eprintln!("modified json {:?}", elm_json); let generator_elm_json_path = our_temp_dir.join("elm.json"); let mut generator_elm_json_file = std::fs::File::create(&generator_elm_json_path).map_err(|io_err| { CompilerError::WriteOutputFailed(io_err, generator_elm_json_path.clone()) })?; serde_json::to_writer(&mut generator_elm_json_file, &elm_json).map_err(|io_err| { CompilerError::WriteElmJsonFailed(io_err, generator_elm_json_path.clone()) })?; elm_install(verbosity, &our_temp_dir, "ThinkAlexandria/elm-html-in-elm")?; elm_install(verbosity, &our_temp_dir, "elm/json")?; elm_install(verbosity, &our_temp_dir, "elm/bytes")?; constraint.update()?; Ok(our_temp_dir) } #[derive(Debug, Serialize, Deserialize)] struct ElmApplication { #[serde(rename = "source-directories")] source_directories: Vec, #[serde(flatten)] other_fields: serde_json::Value, } fn elm_install, S: AsRef>( verbosity: u64, our_temp_dir: P, package: S, ) -> Result<(), Problem> { let span = info_span!("elm_install"); let _ = span.enter(); let mut command = Command::new("elm"); command .arg("install") .arg(package.as_ref()) .stdin(Stdio::piped()) .current_dir(&our_temp_dir); if verbosity < 1 { command.stderr(Stdio::null()); command.stdout(Stdio::null()); } else { let pipe = dup_stderr() .map_err(|io_err| CompilerError::ReadInputFailed(io_err, "stdout".into()))?; command.stdout(pipe); command.stderr(Stdio::piped()); } let mut child = command.spawn().unwrap(); let child_stdin = child.stdin.as_mut().unwrap(); child_stdin.write_all(b"y\n").unwrap(); drop(child_stdin); match child.wait() { Ok(exit_status) => { if !exit_status.success() { return Err(SetupError::InstallDependencyFailed.into()); } } Err(_) => { return Err(SetupError::InstallDependencyFailed.into()); } } Ok(()) } mod runtime { use crate::exec::ValidatedInput; use crate::reporting::InterpreterError; use deno_core::error::{type_error, AnyError}; use deno_core::futures::FutureExt; use deno_core::{resolve_url, FsModuleLoader, ModuleLoader, ModuleSpecifier}; use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel; use deno_runtime::permissions::Permissions; use deno_runtime::worker::MainWorker; use deno_runtime::worker::WorkerOptions; use deno_runtime::BootstrapOptions; use deno_web::BlobStore; use rusty_v8 as v8; use std::convert::TryFrom; use std::pin::Pin; use std::rc::Rc; use std::sync::Arc; use tracing::{info_span, Instrument}; fn get_error_class_name(e: &AnyError) -> &'static str { deno_runtime::errors::get_error_class_name(e).unwrap_or("Error") } pub fn setup_worker(path_str: &str) -> Result<(MainWorker, ModuleSpecifier), AnyError> { //let module_loader = Rc::new(EmbeddedModuleLoader("void".to_owned())); let module_loader = Rc::new(FsModuleLoader); let create_web_worker_cb = Arc::new(|_| { todo!("Web workers are not supported in the example"); }); let options = WorkerOptions { bootstrap: BootstrapOptions { args: vec![], apply_source_maps: false, cpu_count: 1, debug_flag: false, enable_testing_features: false, location: None, no_color: false, runtime_version: "0.29.0".to_string(), ts_version: "2.0.0".to_string(), unstable: false, }, extensions: vec![], unsafely_ignore_certificate_errors: None, root_cert_store: None, user_agent: "hello_runtime".to_string(), seed: None, module_loader, create_web_worker_cb, js_error_create_fn: None, maybe_inspector_server: None, should_break_on_first_statement: false, get_error_class_fn: Some(&get_error_class_name), origin_storage_dir: None, blob_store: BlobStore::default(), broadcast_channel: InMemoryBroadcastChannel::default(), shared_array_buffer_store: None, compiled_wasm_module_store: None, }; let main_module = deno_core::resolve_path(path_str)?; // note: resolve_url is just calling url::Url::parse and mapping the error type // let main_module = resolve_url(SPECIFIER)?; let permissions = Permissions::allow_all(); let worker = MainWorker::from_options(main_module.clone(), permissions, options); //worker.bootstrap(&options); Ok((worker, main_module)) } pub async fn xyz( mut worker: MainWorker, main_module: ModuleSpecifier, input: Option, ) -> Result<(), InterpreterError> { let wait_for_inspector = false; // step 10 load the module into our v8 isolate worker .execute_main_module(&main_module) .instrument(info_span!("execute main module")) .await?; worker .run_event_loop(wait_for_inspector) .instrument(info_span!("run v8 event loop")) .await?; { let scope = &mut worker.js_runtime.handle_scope(); let ctx = scope.get_current_context(); let global = ctx.global(scope); let entrypoint = { let x = v8::String::new(scope, "runOnInput") .ok_or(InterpreterError::AllocationFailed)?; v8::Local::new(scope, x).into() }; let v8_value = global .get(scope, entrypoint) .ok_or(InterpreterError::ReferenceError)?; // step 12 invoke the function let function = v8::Local::::try_from(v8_value)?; let this = v8::undefined(scope).into(); let span = info_span!("dispatch v8 call"); let timing_guard = span.enter(); match input { None => { function.call(scope, this, &[]); } Some(ValidatedInput::String(s)) => { let arg1 = { let x = v8::String::new(scope, &s).ok_or(InterpreterError::AllocationFailed)?; v8::Local::new(scope, x).into() }; function.call(scope, this, &[arg1]); } Some(ValidatedInput::Value(v)) => { let arg1 = { let x = v8::String::new(scope, &v).ok_or(InterpreterError::AllocationFailed)?; v8::Local::new(scope, x).into() }; function.call(scope, this, &[arg1]); } Some(ValidatedInput::Bytes(data)) => { let length = data.len(); let y = data.into_boxed_slice(); let k = v8::ArrayBuffer::new_backing_store_from_boxed_slice(y).make_shared(); let x = v8::ArrayBuffer::with_backing_store(scope, &k); let arg1 = v8::Local::new(scope, x).into(); let c = v8::Uint8Array::new(scope, arg1, 0, length).unwrap(); let arg2 = v8::Local::new(scope, c).into(); function.call(scope, this, &[arg2]); } } drop(timing_guard) } worker .run_event_loop(wait_for_inspector) .instrument(info_span!("run v8 event loop")) .await?; Ok(()) } const SPECIFIER: &str = "file://$deno$/bundle.js"; struct EmbeddedModuleLoader(String); impl ModuleLoader for EmbeddedModuleLoader { fn resolve( &self, specifier: &str, _referrer: &str, _is_main: bool, ) -> Result { if let Ok(module_specifier) = resolve_url(specifier) { //if get_source_from_data_url(&module_specifier).is_ok() // || specifier == SPECIFIER //{ return Ok(module_specifier); //} } Err(type_error( "Self-contained binaries don't support module loading", )) } fn load( &self, module_specifier: &ModuleSpecifier, _maybe_referrer: Option, _is_dyn_import: bool, ) -> Pin> { let module_specifier = module_specifier.clone(); //let is_data_uri = get_source_from_data_url(&module_specifier).ok(); //let code = if let Some((ref source, _)) = is_data_uri { // source.to_string() //} else { let code = self.0.to_string(); //}; async move { //if is_data_uri.is_none() && module_specifier.to_string() != SPECIFIER { // return Err(type_error( // "Self-contained binaries don't support module loading", // )); //} Ok(deno_core::ModuleSource { code, module_url_specified: module_specifier.to_string(), module_url_found: module_specifier.to_string(), }) } .boxed_local() } } }