summaryrefslogtreecommitdiff
path: root/mlua_derive/src
diff options
context:
space:
mode:
Diffstat (limited to 'mlua_derive/src')
-rw-r--r--mlua_derive/src/chunk.rs104
-rw-r--r--mlua_derive/src/lib.rs87
-rw-r--r--mlua_derive/src/token.rs234
3 files changed, 423 insertions, 2 deletions
diff --git a/mlua_derive/src/chunk.rs b/mlua_derive/src/chunk.rs
new file mode 100644
index 0000000..9ff61a9
--- /dev/null
+++ b/mlua_derive/src/chunk.rs
@@ -0,0 +1,104 @@
+use proc_macro::{TokenStream, TokenTree};
+
+use crate::token::{Pos, Token, Tokens};
+
+#[derive(Debug, Clone)]
+pub(crate) struct Capture {
+ key: Token,
+ rust: TokenTree,
+}
+
+impl Capture {
+ fn new(key: Token, rust: TokenTree) -> Self {
+ Self { key, rust }
+ }
+
+ /// Token string inside `chunk!`
+ pub(crate) fn key(&self) -> &Token {
+ &self.key
+ }
+
+ /// As rust variable, e.g. `x`
+ pub(crate) fn as_rust(&self) -> &TokenTree {
+ &self.rust
+ }
+}
+
+#[derive(Debug)]
+pub(crate) struct Captures(Vec<Capture>);
+
+impl Captures {
+ pub(crate) fn new() -> Self {
+ Self(Vec::new())
+ }
+
+ pub(crate) fn add(&mut self, token: &Token) -> Capture {
+ let tt = token.tree();
+ let key = token.clone();
+
+ match self.0.iter().find(|arg| arg.key() == &key) {
+ Some(arg) => arg.clone(),
+ None => {
+ let arg = Capture::new(key, tt.clone());
+ self.0.push(arg.clone());
+ arg
+ }
+ }
+ }
+
+ pub(crate) fn captures(&self) -> &[Capture] {
+ &self.0
+ }
+}
+
+#[derive(Debug)]
+pub(crate) struct Chunk {
+ source: String,
+ caps: Captures,
+}
+
+impl Chunk {
+ pub(crate) fn new(tokens: TokenStream) -> Self {
+ let tokens = Tokens::retokenize(tokens);
+
+ let mut source = String::new();
+ let mut caps = Captures::new();
+
+ let mut pos: Option<Pos> = None;
+ for t in tokens {
+ if t.is_cap() {
+ caps.add(&t);
+ }
+
+ let (line, col) = (t.start().line, t.start().column);
+ let (prev_line, prev_col) = pos
+ .take()
+ .map(|lc| (lc.line, lc.column))
+ .unwrap_or_else(|| (line, col));
+
+ if line > prev_line {
+ source.push_str("\n");
+ } else if line == prev_line {
+ for _ in 0..col.saturating_sub(prev_col) {
+ source.push_str(" ");
+ }
+ }
+ source.push_str(&t.to_string());
+
+ pos = Some(t.end());
+ }
+
+ Self {
+ source: source.trim_end().to_string(),
+ caps,
+ }
+ }
+
+ pub(crate) fn source(&self) -> &str {
+ &self.source
+ }
+
+ pub(crate) fn captures(&self) -> &[Capture] {
+ self.caps.captures()
+ }
+}
diff --git a/mlua_derive/src/lib.rs b/mlua_derive/src/lib.rs
index fa3d487..b735b6f 100644
--- a/mlua_derive/src/lib.rs
+++ b/mlua_derive/src/lib.rs
@@ -1,10 +1,14 @@
-extern crate proc_macro;
-
use proc_macro::TokenStream;
use proc_macro2::{Ident, Span};
use quote::quote_spanned;
use syn::{parse_macro_input, spanned::Spanned, AttributeArgs, Error, ItemFn};
+#[cfg(feature = "macros")]
+use {
+ crate::chunk::Chunk, proc_macro::TokenTree, proc_macro2::TokenStream as TokenStream2,
+ proc_macro_error::proc_macro_error, quote::quote,
+};
+
#[proc_macro_attribute]
pub fn lua_module(attr: TokenStream, item: TokenStream) -> TokenStream {
let args = parse_macro_input!(attr as AttributeArgs);
@@ -35,3 +39,82 @@ pub fn lua_module(attr: TokenStream, item: TokenStream) -> TokenStream {
wrapped.into()
}
+
+#[cfg(feature = "macros")]
+fn to_ident(tt: &TokenTree) -> TokenStream2 {
+ let s: TokenStream = tt.clone().into();
+ s.into()
+}
+
+#[cfg(feature = "macros")]
+#[proc_macro]
+#[proc_macro_error]
+pub fn chunk(input: TokenStream) -> TokenStream {
+ let chunk = Chunk::new(input);
+
+ let source = chunk.source();
+
+ let caps_len = chunk.captures().len();
+ let caps = chunk.captures().iter().map(|cap| {
+ let cap_name = cap.as_rust().to_string();
+ let cap = to_ident(cap.as_rust());
+ quote! { env.raw_set(#cap_name, #cap)?; }
+ });
+
+ let wrapped_code = quote! {{
+ use ::mlua::{AsChunk, ChunkMode, Lua, Result, Value};
+ use ::std::marker::PhantomData;
+ use ::std::sync::Mutex;
+
+ fn annotate<'a, F: FnOnce(&'a Lua) -> Result<Value<'a>>>(f: F) -> F { f }
+
+ struct InnerChunk<'a, F: FnOnce(&'a Lua) -> Result<Value<'a>>>(Mutex<Option<F>>, PhantomData<&'a ()>);
+
+ impl<'lua, F> AsChunk<'lua> for InnerChunk<'lua, F>
+ where
+ F: FnOnce(&'lua Lua) -> Result<Value<'lua>>,
+ {
+ fn source(&self) -> &[u8] {
+ (#source).as_bytes()
+ }
+
+ fn env(&self, lua: &'lua Lua) -> Option<Result<Value<'lua>>> {
+ if #caps_len > 0 {
+ if let Ok(mut make_env) = self.0.lock() {
+ if let Some(make_env) = make_env.take() {
+ return Some(make_env(lua));
+ }
+ }
+ }
+ None
+ }
+
+ fn mode(&self) -> Option<ChunkMode> {
+ Some(ChunkMode::Text)
+ }
+ }
+
+ let make_env = annotate(move |lua: &Lua| -> Result<Value> {
+ let globals = lua.globals();
+ let env = lua.create_table()?;
+ let meta = lua.create_table()?;
+ meta.raw_set("__index", globals.clone())?;
+ meta.raw_set("__newindex", globals)?;
+
+ // Add captured variables
+ #(#caps)*
+
+ env.set_metatable(Some(meta));
+ Ok(Value::Table(env))
+ });
+
+ &InnerChunk(Mutex::new(Some(make_env)), PhantomData)
+ }};
+
+ wrapped_code.into()
+}
+
+#[cfg(feature = "macros")]
+mod chunk;
+#[cfg(feature = "macros")]
+mod token;
diff --git a/mlua_derive/src/token.rs b/mlua_derive/src/token.rs
new file mode 100644
index 0000000..24182d4
--- /dev/null
+++ b/mlua_derive/src/token.rs
@@ -0,0 +1,234 @@
+use std::{
+ cmp::{Eq, PartialEq},
+ fmt::{self, Display, Formatter},
+ iter::IntoIterator,
+ vec::IntoIter,
+};
+
+use itertools::Itertools;
+use once_cell::sync::Lazy;
+use proc_macro::{Delimiter, Span, TokenStream, TokenTree};
+use proc_macro2::Span as Span2;
+use regex::Regex;
+
+#[derive(Clone, Copy, Debug)]
+pub(crate) struct Pos {
+ pub(crate) line: usize,
+ pub(crate) column: usize,
+}
+
+impl Pos {
+ fn new(line: usize, column: usize) -> Self {
+ Self { line, column }
+ }
+
+ fn left(&self) -> Self {
+ Self {
+ line: self.line,
+ column: self.column.saturating_sub(1),
+ }
+ }
+
+ fn right(&self) -> Self {
+ Self {
+ line: self.line,
+ column: self.column.saturating_add(1),
+ }
+ }
+}
+
+fn span_pos(span: &Span) -> (Pos, Pos) {
+ let span2: Span2 = span.clone().into();
+ let start = span2.start();
+ let end = span2.end();
+
+ // In stable, line/column information is not provided
+ // and set to 0 (line is 1-indexed)
+ if start.line == 0 || end.line == 0 {
+ return fallback_span_pos(span);
+ }
+
+ (
+ Pos::new(start.line, start.column),
+ Pos::new(end.line, end.column),
+ )
+}
+
+fn parse_pos(span: &Span) -> Option<(usize, usize)> {
+ // Workaround to somehow retrieve location information in span in stable rust :(
+
+ static RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"bytes\(([0-9]+)\.\.([0-9]+)\)").unwrap());
+
+ match RE.captures(&format!("{:?}", span)) {
+ Some(caps) => match (caps.get(1), caps.get(2)) {
+ (Some(start), Some(end)) => Some((
+ match start.as_str().parse() {
+ Ok(v) => v,
+ _ => return None,
+ },
+ match end.as_str().parse() {
+ Ok(v) => v,
+ _ => return None,
+ },
+ )),
+ _ => None,
+ },
+ None => None,
+ }
+}
+
+fn fallback_span_pos(span: &Span) -> (Pos, Pos) {
+ let (start, end) = match parse_pos(span) {
+ Some(v) => v,
+ None => proc_macro_error::abort_call_site!(
+ "Cannot retrieve span information; please use nightly"
+ ),
+ };
+ (Pos::new(1, start), Pos::new(1, end))
+}
+
+/// Attribute of token.
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+enum TokenAttr {
+ /// No attribute
+ None,
+ /// Starts with `$`
+ Cap,
+}
+
+#[derive(Clone, Debug)]
+pub(crate) struct Token {
+ source: String,
+ tree: TokenTree,
+ start: Pos,
+ end: Pos,
+ attr: TokenAttr,
+}
+
+impl PartialEq for Token {
+ fn eq(&self, other: &Self) -> bool {
+ self.source == other.source && self.attr == other.attr
+ }
+}
+
+impl Eq for Token {}
+
+impl Token {
+ fn new(tree: TokenTree) -> Self {
+ let (start, end) = span_pos(&tree.span());
+ Self {
+ source: tree.to_string(),
+ start,
+ end,
+ tree,
+ attr: TokenAttr::None,
+ }
+ }
+
+ fn new_delim(source: String, tree: TokenTree, open: bool) -> Self {
+ let (start, end) = span_pos(&tree.span());
+ let (start, end) = if open {
+ (start, start.right())
+ } else {
+ (end.left(), end)
+ };
+
+ Self {
+ source,
+ tree,
+ start,
+ end,
+ attr: TokenAttr::None,
+ }
+ }
+
+ pub(crate) fn tree(&self) -> &TokenTree {
+ &self.tree
+ }
+
+ pub(crate) fn is_cap(&self) -> bool {
+ self.attr == TokenAttr::Cap
+ }
+
+ pub(crate) fn start(&self) -> Pos {
+ self.start
+ }
+
+ pub(crate) fn end(&self) -> Pos {
+ self.end
+ }
+
+ fn is(&self, s: &str) -> bool {
+ self.source == s
+ }
+
+ fn attr(mut self, attr: TokenAttr) -> Self {
+ self.attr = attr;
+ self
+ }
+}
+
+#[derive(Debug)]
+pub(crate) struct Tokens(pub(crate) Vec<Token>);
+
+impl Tokens {
+ pub(crate) fn retokenize(tt: TokenStream) -> Tokens {
+ Tokens(
+ tt.into_iter()
+ .map(|tt| Tokens::from(tt))
+ .flatten()
+ .peekable()
+ .batching(|iter| {
+ // Find variable tokens
+ let t = iter.next()?;
+ if t.is("$") {
+ // `$` + `ident` => `$ident`
+ let t = iter.next().expect("$ must trail an identifier");
+ Some(t.attr(TokenAttr::Cap))
+ } else {
+ Some(t)
+ }
+ })
+ .collect(),
+ )
+ }
+}
+
+impl IntoIterator for Tokens {
+ type Item = Token;
+ type IntoIter = IntoIter<Token>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.0.into_iter()
+ }
+}
+
+impl From<TokenTree> for Tokens {
+ fn from(tt: TokenTree) -> Self {
+ let tts = match tt.clone() {
+ TokenTree::Group(g) => {
+ let (b, e) = match g.delimiter() {
+ Delimiter::Parenthesis => ("(", ")"),
+ Delimiter::Brace => ("{", "}"),
+ Delimiter::Bracket => ("[", "]"),
+ Delimiter::None => ("", ""),
+ };
+ let (b, e) = (b.into(), e.into());
+
+ vec![Token::new_delim(b, tt.clone(), true)]
+ .into_iter()
+ .chain(g.stream().into_iter().map(|tt| Tokens::from(tt)).flatten())
+ .chain(vec![Token::new_delim(e, tt.clone(), false)])
+ .collect()
+ }
+ _ => vec![Token::new(tt)],
+ };
+ Tokens(tts)
+ }
+}
+
+impl Display for Token {
+ fn fmt(&self, f: &mut Formatter) -> fmt::Result {
+ write!(f, "{}", self.source)
+ }
+}