Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add Resolver for Playground #196

Merged
merged 20 commits into from
Apr 2, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 1 addition & 4 deletions cli/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ pub enum CliError {
#[error("Core error: {0}")]
Core(#[from] CoreError),

#[error("Cannot infer output format, please specifiy --format")]
#[error("Cannot infer output format, please specify --format")]
UnknownOutputFormat,

#[error("Reqwest error '{0}'")]
Expand All @@ -37,9 +37,6 @@ pub enum CliError {
#[error("Could not get registry source")]
Registry,

#[error("Specifier '{0}' not supported")]
Specifier(String),

#[error("Could not find local path to '{0}'")]
Local(String),

Expand Down
112 changes: 81 additions & 31 deletions cli/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,16 @@
use std::{
collections::HashMap,
env,
fs::{self, File},
io::{stdout, Write},
path::{Path, PathBuf},
sync::{
atomic::{AtomicUsize, Ordering},
Arc, Mutex,
},
time::Duration,
};

use clap::Parser;
use crossterm::{
cursor,
Expand All @@ -12,30 +25,22 @@ use notify_debouncer_mini::{
};
use once_cell::sync::OnceCell;
use portpicker::Port;
use std::{
collections::HashMap,
env,
fs::{self, File},
io::{stdout, Write},
path::{Path, PathBuf},
sync::{
atomic::{AtomicUsize, Ordering},
Arc, Mutex,
},
time::Duration,
use tokio::sync::{
mpsc::{self, channel, Receiver},
RwLock,
};
use tokio::sync::{mpsc, RwLock};
use tokio::task::spawn_blocking;
use tokio_stream::wrappers::UnboundedReceiverStream;
use warp::{
ws::{Message, WebSocket},
Filter, Rejection, Reply,
};

use crate::file_access::CliAccessManager;
use error::CliError;
use modmark_core::{context::CompilationState, eval, Context, CoreError, OutputFormat};
use parser::{parse, Ast};

use crate::file_access::CliAccessManager;
use crate::package::PackageManager;

mod error;
Expand Down Expand Up @@ -142,28 +147,47 @@ static DEFAULT_REGISTRY: &str =
"https://raw.githubusercontent.com/modmark-org/package-registry/main/package-registry.json";

static CTX: OnceCell<Mutex<Context<PackageManager, CliAccessManager>>> = OnceCell::new();
static RESOLVE_COMPLETE_RX: OnceCell<Mutex<Receiver<()>>> = OnceCell::new();
static PREVIEW_PORT: OnceCell<Option<Port>> = OnceCell::new();
static ABSOLUTE_OUTPUT_PATH: OnceCell<PathBuf> = OnceCell::new();
static CONNECTION_ID_COUNTER: AtomicUsize = AtomicUsize::new(1);
static MAX_COMPILATION_TRIES: usize = 3;

type PreviewConnections = Arc<RwLock<HashMap<usize, mpsc::UnboundedSender<Message>>>>;
type PreviewDoc = Arc<Mutex<String>>;

type CompilationResult = Result<(String, CompilationState, Ast), CoreError>;
type CompilationResult = Result<(String, CompilationState, Ast), Vec<CoreError>>;

/// Compile a file and return the transpiled content, compilation state and ast.
fn compile_file(input_file: &Path, output_format: &OutputFormat) -> CompilationResult {
let source = fs::read_to_string(input_file)?;
let ast = parse(&source)?;
let (output, state) = eval(
&source,
&mut CTX.get().unwrap().lock().unwrap(),
output_format,
)?;
async fn compile_file(input_file: &Path, output_format: &OutputFormat) -> CompilationResult {
let source = fs::read_to_string(input_file).map_err(|e| vec![e.into()])?;
let ast = parse(&source).map_err(|e| vec![e.into()])?;

for i in 1..=MAX_COMPILATION_TRIES {
if let Some((output, state)) = eval(
&source,
&mut CTX.get().unwrap().lock().unwrap(),
output_format,
)? {
return Ok((output, state, ast));
}

Ok((output, state, ast))
}
if i != MAX_COMPILATION_TRIES {
spawn_blocking(|| {
RESOLVE_COMPLETE_RX
.get()
.unwrap()
.lock()
.unwrap()
.blocking_recv();
})
.await
.unwrap();
}
}

Err(vec![])
}
fn print_compiling_message() -> Result<(), CliError> {
let mut stdout = stdout();
stdout.execute(terminal::Clear(terminal::ClearType::All))?;
Expand All @@ -178,12 +202,27 @@ fn print_result(result: &CompilationResult, args: &Args) -> Result<(), CliError>

let (_, state, ast) = match result {
Ok(result) => result,
Err(error) => {
Err(errors) => {
stdout.execute(terminal::Clear(terminal::ClearType::All))?;
stdout.execute(cursor::MoveTo(0, 0))?;
stdout.execute(style::PrintStyledContent(
format!("Compilation error:\n{error}\n\n").red(),
))?;
let num_errors = errors.len();
if num_errors == 0 {
stdout.execute(style::PrintStyledContent(
"No result retrieved from compiler\n".red(),
))?;
} else if num_errors == 1 {
let error = errors.first().unwrap();
stdout.execute(style::PrintStyledContent(
format!("1 compilation error:\n{error}\n").red(),
))?;
} else {
stdout.execute(style::PrintStyledContent(
format!("{} compilation errors:\n", num_errors).red(),
))?;
for error in errors {
stdout.execute(style::PrintStyledContent(format!("{error:?}\n").red()))?;
}
}
return Ok(());
}
};
Expand Down Expand Up @@ -279,8 +318,18 @@ async fn run_cli(args: Args) -> Result<(), CliError> {
.unwrap_or(DEFAULT_REGISTRY)
.to_string();

let (tx, rx) = channel::<()>(1);

RESOLVE_COMPLETE_RX.set(Mutex::new(rx)).unwrap();
CTX.set(Mutex::new(
Context::new(PackageManager { registry }, CliAccessManager::new(&args)).unwrap(),
Context::new(
PackageManager {
registry,
complete_tx: tx,
},
CliAccessManager::new(&args),
)
.unwrap(),
))
.unwrap();

Expand Down Expand Up @@ -326,7 +375,7 @@ async fn run_cli(args: Args) -> Result<(), CliError> {
// just compile the file once, assuming that they actually provided a output file
if args.output.is_some() {
print_compiling_message()?;
let compilation_result = compile_file(&args.input, &args.get_output_format()?);
let compilation_result = compile_file(&args.input, &args.get_output_format()?).await;
save_result(&compilation_result, &args)?;
print_result(&compilation_result, &args)?;
} else {
Expand Down Expand Up @@ -433,7 +482,8 @@ async fn watch_files<P: AsRef<Path>>(
&args
.get_output_format()
.unwrap_or_else(|_| OutputFormat::new("html")),
);
)
.await;

// Write to the output file (if there was one)
save_result(&compilation_result, args)?;
Expand Down
69 changes: 37 additions & 32 deletions cli/src/package.rs
Original file line number Diff line number Diff line change
@@ -1,52 +1,57 @@
use crate::error::CliError;
use directories::ProjectDirs;
use futures::{executor::block_on, future::join_all};
use modmark_core::Resolve;
use serde_json;
use std::{
env::current_dir,
fs::{self, create_dir_all, File},
io::copy,
path::PathBuf,
};

use directories::ProjectDirs;
use futures::future::join_all;
use tokio::sync::mpsc::Sender;

use modmark_core::package_store::{PackageID, PackageSource, Resolve, ResolveTask};

use crate::error::CliError;

#[derive(Clone)]
pub struct PackageManager {
pub(crate) registry: String,
pub(crate) complete_tx: Sender<()>,
}

impl Resolve for PackageManager {
type Error = CliError;
fn resolve(&self, path: &str) -> Result<Vec<u8>, Self::Error> {
// Hmm, it does not seem like the best idea to block the thread like this
// but it should work good enough for our needs
// https://github.com/tokio-rs/tokio/issues/2289
tokio::task::block_in_place(move || block_on(async { self.resolve_package(path).await }))
}

fn resolve_all(&self, paths: &[&str]) -> Vec<Result<Vec<u8>, Self::Error>> {
tokio::task::block_in_place(move || block_on(async { self.resolve_packages(paths).await }))
fn resolve_all(&self, paths: Vec<ResolveTask>) {
// I don't know how to get rid of all these clones...
let self_clone = self.clone();
tokio::spawn(async move {
join_all(
paths
.into_iter()
.map(|task| {
let another_self = self_clone.clone();
tokio::spawn(async move { another_self.resolve(task).await })
})
.collect::<Vec<_>>(),
)
.await;
self_clone.complete_tx.send(()).await.unwrap();
});
}
}

impl PackageManager {
async fn resolve_packages(&self, paths: &[&str]) -> Vec<Result<Vec<u8>, CliError>> {
let futures = paths.iter().map(|&path| self.resolve_package(path));
join_all(futures).await
}

async fn resolve_package(&self, path: &str) -> Result<Vec<u8>, CliError> {
let splitter = path.split_once(':');

let Some((specifier, package_path)) = splitter else {
return self.fetch_local(path);
async fn resolve(&self, task: ResolveTask) {
let PackageID {
name,
source: target,
} = &task.package_id;
let result = match target {
PackageSource::Local => self.fetch_local(name),
PackageSource::Registry => self.fetch_registry(name).await,
PackageSource::Url => self.fetch_url(name).await,
PackageSource::Standard => Err(CliError::Registry),
};

match specifier {
"http" => self.fetch_url(path).await,
"https" => self.fetch_url(path).await,
"pkgs" => self.fetch_registry(package_path).await,
other => Err(CliError::Specifier(other.to_string())),
}
task.complete(result);
}

async fn fetch_url(&self, package_path: &str) -> Result<Vec<u8>, CliError> {
Expand Down
Loading