summaryrefslogtreecommitdiff
path: root/cli/doc/parser.rs
diff options
context:
space:
mode:
authorBartek IwaƄczuk <biwanczuk@gmail.com>2020-04-07 19:47:06 +0200
committerGitHub <noreply@github.com>2020-04-07 19:47:06 +0200
commit86fd0c66a645a3dd262e57e330bb7fbe4663e468 (patch)
treecedd0b8e4b759de8f675467ebf7771a1eea0f525 /cli/doc/parser.rs
parent51f5276e8cbce89f396458e754d8f31c11fbf1ec (diff)
feat(doc): handle basic reexports (#4625)
Diffstat (limited to 'cli/doc/parser.rs')
-rw-r--r--cli/doc/parser.rs287
1 files changed, 255 insertions, 32 deletions
diff --git a/cli/doc/parser.rs b/cli/doc/parser.rs
index 7a8fee56f..7c67f8d09 100644
--- a/cli/doc/parser.rs
+++ b/cli/doc/parser.rs
@@ -1,4 +1,5 @@
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
+use crate::op_error::OpError;
use crate::swc_common;
use crate::swc_common::comments::CommentKind;
use crate::swc_common::comments::Comments;
@@ -22,34 +23,87 @@ use crate::swc_ecma_parser::Session;
use crate::swc_ecma_parser::SourceFileInput;
use crate::swc_ecma_parser::Syntax;
use crate::swc_ecma_parser::TsConfig;
+
+use deno_core::ErrBox;
+use deno_core::ModuleSpecifier;
+use futures::Future;
use regex::Regex;
+use std::collections::HashMap;
+use std::error::Error;
+use std::fmt;
+use std::pin::Pin;
use std::sync::Arc;
use std::sync::RwLock;
+use super::namespace::NamespaceDef;
+use super::node;
+use super::node::ModuleDoc;
use super::DocNode;
use super::DocNodeKind;
use super::Location;
-pub type SwcDiagnostics = Vec<Diagnostic>;
+#[derive(Clone, Debug)]
+pub struct SwcDiagnosticBuffer {
+ pub diagnostics: Vec<Diagnostic>,
+}
+
+impl Error for SwcDiagnosticBuffer {}
+
+impl fmt::Display for SwcDiagnosticBuffer {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let msg = self
+ .diagnostics
+ .iter()
+ .map(|d| d.message())
+ .collect::<Vec<String>>()
+ .join(",");
+
+ f.pad(&msg)
+ }
+}
+
+#[derive(Clone)]
+pub struct SwcErrorBuffer(Arc<RwLock<SwcDiagnosticBuffer>>);
-#[derive(Clone, Default)]
-pub struct BufferedError(Arc<RwLock<SwcDiagnostics>>);
+impl SwcErrorBuffer {
+ pub fn default() -> Self {
+ Self(Arc::new(RwLock::new(SwcDiagnosticBuffer {
+ diagnostics: vec![],
+ })))
+ }
+}
-impl Emitter for BufferedError {
+impl Emitter for SwcErrorBuffer {
fn emit(&mut self, db: &DiagnosticBuilder) {
- self.0.write().unwrap().push((**db).clone());
+ self.0.write().unwrap().diagnostics.push((**db).clone());
}
}
-impl From<BufferedError> for Vec<Diagnostic> {
- fn from(buf: BufferedError) -> Self {
+impl From<SwcErrorBuffer> for SwcDiagnosticBuffer {
+ fn from(buf: SwcErrorBuffer) -> Self {
let s = buf.0.read().unwrap();
s.clone()
}
}
+pub trait DocFileLoader {
+ fn resolve(
+ &self,
+ specifier: &str,
+ referrer: &str,
+ ) -> Result<ModuleSpecifier, OpError> {
+ ModuleSpecifier::resolve_import(specifier, referrer).map_err(OpError::from)
+ }
+
+ fn load_source_code(
+ &self,
+ specifier: &str,
+ ) -> Pin<Box<dyn Future<Output = Result<String, OpError>>>>;
+}
+
pub struct DocParser {
- pub buffered_error: BufferedError,
+ pub loader: Box<dyn DocFileLoader>,
+ pub buffered_error: SwcErrorBuffer,
pub source_map: Arc<SourceMap>,
pub handler: Handler,
pub comments: Comments,
@@ -57,8 +111,8 @@ pub struct DocParser {
}
impl DocParser {
- pub fn default() -> Self {
- let buffered_error = BufferedError::default();
+ pub fn new(loader: Box<dyn DocFileLoader>) -> Self {
+ let buffered_error = SwcErrorBuffer::default();
let handler = Handler::with_emitter_and_flags(
Box::new(buffered_error.clone()),
@@ -70,6 +124,7 @@ impl DocParser {
);
DocParser {
+ loader,
buffered_error,
source_map: Arc::new(SourceMap::default()),
handler,
@@ -78,15 +133,16 @@ impl DocParser {
}
}
- pub fn parse(
+ fn parse_module(
&self,
- file_name: String,
- source_code: String,
- ) -> Result<Vec<DocNode>, SwcDiagnostics> {
+ file_name: &str,
+ source_code: &str,
+ ) -> Result<ModuleDoc, SwcDiagnosticBuffer> {
swc_common::GLOBALS.set(&self.globals, || {
- let swc_source_file = self
- .source_map
- .new_source_file(FileName::Custom(file_name), source_code);
+ let swc_source_file = self.source_map.new_source_file(
+ FileName::Custom(file_name.to_string()),
+ source_code.to_string(),
+ );
let buffered_err = self.buffered_error.clone();
let session = Session {
@@ -112,15 +168,130 @@ impl DocParser {
.parse_module()
.map_err(move |mut err: DiagnosticBuilder| {
err.cancel();
- SwcDiagnostics::from(buffered_err)
+ SwcDiagnosticBuffer::from(buffered_err)
})?;
- let doc_entries = self.get_doc_nodes_for_module_body(module.body);
- Ok(doc_entries)
+ let doc_entries = self.get_doc_nodes_for_module_body(module.body.clone());
+ let reexports = self.get_reexports_for_module_body(module.body);
+ let module_doc = ModuleDoc {
+ exports: doc_entries,
+ reexports,
+ };
+ Ok(module_doc)
})
}
- pub fn get_doc_nodes_for_module_decl(
+ pub async fn parse(&self, file_name: &str) -> Result<Vec<DocNode>, ErrBox> {
+ let source_code = self.loader.load_source_code(file_name).await?;
+
+ let module_doc = self.parse_module(file_name, &source_code)?;
+ Ok(module_doc.exports)
+ }
+
+ async fn flatten_reexports(
+ &self,
+ reexports: &[node::Reexport],
+ referrer: &str,
+ ) -> Result<Vec<DocNode>, ErrBox> {
+ let mut by_src: HashMap<String, Vec<node::Reexport>> = HashMap::new();
+
+ let mut processed_reexports: Vec<DocNode> = vec![];
+
+ for reexport in reexports {
+ if by_src.get(&reexport.src).is_none() {
+ by_src.insert(reexport.src.to_string(), vec![]);
+ }
+
+ let bucket = by_src.get_mut(&reexport.src).unwrap();
+ bucket.push(reexport.clone());
+ }
+
+ for specifier in by_src.keys() {
+ let resolved_specifier = self.loader.resolve(specifier, referrer)?;
+ let doc_nodes = self.parse(&resolved_specifier.to_string()).await?;
+ let reexports_for_specifier = by_src.get(specifier).unwrap();
+
+ for reexport in reexports_for_specifier {
+ match &reexport.kind {
+ node::ReexportKind::All => {
+ processed_reexports.extend(doc_nodes.clone())
+ }
+ node::ReexportKind::Namespace(ns_name) => {
+ let ns_def = NamespaceDef {
+ elements: doc_nodes.clone(),
+ };
+ let ns_doc_node = DocNode {
+ kind: DocNodeKind::Namespace,
+ name: ns_name.to_string(),
+ location: Location {
+ filename: specifier.to_string(),
+ line: 1,
+ col: 0,
+ },
+ js_doc: None,
+ namespace_def: Some(ns_def),
+ enum_def: None,
+ type_alias_def: None,
+ interface_def: None,
+ variable_def: None,
+ function_def: None,
+ class_def: None,
+ };
+ processed_reexports.push(ns_doc_node);
+ }
+ node::ReexportKind::Named(ident, maybe_alias) => {
+ // Try to find reexport.
+ // NOTE: the reexport might actually be reexport from another
+ // module; for now we're skipping nested reexports.
+ let maybe_doc_node =
+ doc_nodes.iter().find(|node| &node.name == ident);
+
+ if let Some(doc_node) = maybe_doc_node {
+ let doc_node = doc_node.clone();
+ let doc_node = if let Some(alias) = maybe_alias {
+ DocNode {
+ name: alias.to_string(),
+ ..doc_node
+ }
+ } else {
+ doc_node
+ };
+
+ processed_reexports.push(doc_node);
+ }
+ }
+ node::ReexportKind::Default => {
+ // TODO: handle default export from child module
+ }
+ }
+ }
+ }
+
+ Ok(processed_reexports)
+ }
+
+ pub async fn parse_with_reexports(
+ &self,
+ file_name: &str,
+ ) -> Result<Vec<DocNode>, ErrBox> {
+ let source_code = self.loader.load_source_code(file_name).await?;
+
+ let module_doc = self.parse_module(file_name, &source_code)?;
+
+ let flattened_docs = if !module_doc.reexports.is_empty() {
+ let mut flattenned_reexports = self
+ .flatten_reexports(&module_doc.reexports, file_name)
+ .await?;
+ flattenned_reexports.extend(module_doc.exports);
+ flattenned_reexports
+ } else {
+ module_doc.exports
+ };
+
+ Ok(flattened_docs)
+ }
+
+ pub fn get_doc_nodes_for_module_exports(
&self,
module_decl: &ModuleDecl,
) -> Vec<DocNode> {
@@ -131,16 +302,6 @@ impl DocParser {
export_decl,
)]
}
- ModuleDecl::ExportNamed(_named_export) => {
- vec![]
- // TODO(bartlomieju):
- // super::module::get_doc_nodes_for_named_export(self, named_export)
- }
- ModuleDecl::ExportDefaultDecl(_) => vec![],
- ModuleDecl::ExportDefaultExpr(_) => vec![],
- ModuleDecl::ExportAll(_) => vec![],
- ModuleDecl::TsExportAssignment(_) => vec![],
- ModuleDecl::TsNamespaceExport(_) => vec![],
_ => vec![],
}
}
@@ -316,6 +477,67 @@ impl DocParser {
}
}
+ pub fn get_reexports_for_module_body(
+ &self,
+ module_body: Vec<swc_ecma_ast::ModuleItem>,
+ ) -> Vec<node::Reexport> {
+ use swc_ecma_ast::ExportSpecifier::*;
+
+ let mut reexports: Vec<node::Reexport> = vec![];
+
+ for node in module_body.iter() {
+ if let swc_ecma_ast::ModuleItem::ModuleDecl(module_decl) = node {
+ let r = match module_decl {
+ ModuleDecl::ExportNamed(named_export) => {
+ if let Some(src) = &named_export.src {
+ let src_str = src.value.to_string();
+ named_export
+ .specifiers
+ .iter()
+ .map(|export_specifier| match export_specifier {
+ Namespace(ns_export) => node::Reexport {
+ kind: node::ReexportKind::Namespace(
+ ns_export.name.sym.to_string(),
+ ),
+ src: src_str.to_string(),
+ },
+ Default(_) => node::Reexport {
+ kind: node::ReexportKind::Default,
+ src: src_str.to_string(),
+ },
+ Named(named_export) => {
+ let ident = named_export.orig.sym.to_string();
+ let maybe_alias =
+ named_export.exported.as_ref().map(|e| e.sym.to_string());
+ let kind = node::ReexportKind::Named(ident, maybe_alias);
+ node::Reexport {
+ kind,
+ src: src_str.to_string(),
+ }
+ }
+ })
+ .collect::<Vec<node::Reexport>>()
+ } else {
+ vec![]
+ }
+ }
+ ModuleDecl::ExportAll(export_all) => {
+ let reexport = node::Reexport {
+ kind: node::ReexportKind::All,
+ src: export_all.src.value.to_string(),
+ };
+ vec![reexport]
+ }
+ _ => vec![],
+ };
+
+ reexports.extend(r);
+ }
+ }
+
+ reexports
+ }
+
pub fn get_doc_nodes_for_module_body(
&self,
module_body: Vec<swc_ecma_ast::ModuleItem>,
@@ -324,7 +546,8 @@ impl DocParser {
for node in module_body.iter() {
match node {
swc_ecma_ast::ModuleItem::ModuleDecl(module_decl) => {
- doc_entries.extend(self.get_doc_nodes_for_module_decl(module_decl));
+ doc_entries
+ .extend(self.get_doc_nodes_for_module_exports(module_decl));
}
swc_ecma_ast::ModuleItem::Stmt(stmt) => {
if let Some(doc_node) = self.get_doc_node_for_stmt(stmt) {