Add chunk! macro support

This commit is contained in:
Alex Orlenko 2021-05-05 21:55:49 +01:00
parent 5199b02346
commit 3e03f4201c
10 changed files with 584 additions and 33 deletions

View file

@ -14,7 +14,7 @@ jobs:
- name: Generate code coverage
run: |
cargo tarpaulin --verbose --features lua53,vendored,async,send,serialize --out xml --exclude-files benches --exclude-files tests --exclude-files build --exclude-files src/ffi
cargo tarpaulin --verbose --features lua53,vendored,async,send,serialize,macros --out xml --exclude-files benches --exclude-files tests --exclude-files build --exclude-files src/ffi
- name: Upload to codecov.io
uses: codecov/codecov-action@v1

View file

@ -26,8 +26,8 @@ jobs:
override: true
- name: Build ${{ matrix.lua }} vendored
run: |
cargo build --release --features "${{ matrix.lua }} vendored"
cargo build --release --features "${{ matrix.lua }} vendored async send serialize"
cargo build --release --features "${{ matrix.lua }},vendored"
cargo build --release --features "${{ matrix.lua }},vendored,async,send,serialize,macros"
shell: bash
- name: Build ${{ matrix.lua }} pkg-config
if: ${{ matrix.os == 'ubuntu-18.04' && matrix.lua != 'lua54' }}
@ -51,7 +51,7 @@ jobs:
target: aarch64-apple-darwin
override: true
- name: Cross-compile
run: cargo build --target aarch64-apple-darwin --features "${{ matrix.lua }} async send serialize vendored"
run: cargo build --target aarch64-apple-darwin --features "${{ matrix.lua }},vendored,async,send,serialize,macros"
build_aarch64_cross_ubuntu:
name: Cross-compile to aarch64-unknown-linux-gnu
@ -73,7 +73,7 @@ jobs:
sudo apt-get install -y --no-install-recommends gcc-aarch64-linux-gnu libc6-dev-arm64-cross
shell: bash
- name: Cross-compile
run: cargo build --target aarch64-unknown-linux-gnu --features "${{ matrix.lua }} async send serialize vendored"
run: cargo build --target aarch64-unknown-linux-gnu --features "${{ matrix.lua }},vendored,async,send,serialize,macros"
shell: bash
build_armv7_cross_ubuntu:
@ -96,7 +96,7 @@ jobs:
sudo apt-get install -y --no-install-recommends gcc-arm-linux-gnueabihf libc-dev-armhf-cross
shell: bash
- name: Cross-compile
run: cargo build --target armv7-unknown-linux-gnueabihf --features "${{ matrix.lua }} async send serialize vendored"
run: cargo build --target armv7-unknown-linux-gnueabihf --features "${{ matrix.lua }},vendored,async,send,serialize,macros"
shell: bash
test:
@ -124,14 +124,14 @@ jobs:
override: true
- name: Run ${{ matrix.lua }} tests
run: |
cargo test --release --features "${{ matrix.lua }} vendored"
cargo test --release --features "${{ matrix.lua }} vendored async send serialize"
cargo test --release --features "${{ matrix.lua }},vendored"
cargo test --release --features "${{ matrix.lua }},vendored,async,send,serialize,macros"
shell: bash
- name: Run compile tests (macos lua53)
if: ${{ matrix.os == 'macos-latest' && matrix.lua == 'lua53' }}
run: |
TRYBUILD=overwrite cargo test --release --features "${{ matrix.lua }} vendored" -- --ignored
TRYBUILD=overwrite cargo test --release --features "${{ matrix.lua }} vendored async send serialize" -- --ignored
TRYBUILD=overwrite cargo test --release --features "${{ matrix.lua }},vendored" -- --ignored
TRYBUILD=overwrite cargo test --release --features "${{ matrix.lua }},vendored,async,send,serialize,macros" -- --ignored
shell: bash
test_modules:
@ -157,8 +157,8 @@ jobs:
override: true
- name: Run ${{ matrix.lua }} module tests
run: |
(cd examples/module && cargo build --release --features "${{ matrix.lua }} vendored")
(cd tests/module && cargo test --release --features "${{ matrix.lua }} vendored")
(cd examples/module && cargo build --release --features "${{ matrix.lua }},vendored")
(cd tests/module && cargo test --release --features "${{ matrix.lua }},vendored")
shell: bash
test_modules_windows:
@ -210,4 +210,4 @@ jobs:
- uses: actions-rs/clippy-check@v1
with:
token: ${{ secrets.GITHUB_TOKEN }}
args: --features "${{ matrix.lua }},vendored,async,send,serialize
args: --features "${{ matrix.lua }},vendored,async,send,serialize,macros"

View file

@ -17,7 +17,7 @@ with async/await features and support of writing native lua modules in Rust.
"""
[package.metadata.docs.rs]
features = ["lua53", "async", "send", "serialize"]
features = ["lua53", "async", "send", "serialize", "macros"]
rustdoc-args = ["--cfg", "docsrs"]
[workspace]
@ -38,6 +38,7 @@ module = ["mlua_derive"]
async = ["futures-core", "futures-task", "futures-util"]
send = []
serialize = ["serde", "erased-serde"]
macros = ["mlua_derive/macros"]
[dependencies]
mlua_derive = { version = "0.5", optional = true, path = "mlua_derive" }

View file

@ -11,7 +11,14 @@ license = "MIT"
[lib]
proc-macro = true
[features]
macros = ["proc-macro-error", "itertools", "regex", "once_cell"]
[dependencies]
proc-macro2 = "1.0"
quote = "1.0"
proc-macro2 = { version = "1.0", features = ["span-locations"] }
proc-macro-error = { version = "1.0", optional = true }
syn = { version = "1.0", features = ["full"] }
itertools = { version = "0.10", optional = true }
regex = { version = "1.4", optional = true }
once_cell = { version = "1.5", optional = true }

104
mlua_derive/src/chunk.rs Normal file
View file

@ -0,0 +1,104 @@
use proc_macro::{TokenStream, TokenTree};
use crate::token::{Pos, Token, Tokens};
#[derive(Debug, Clone)]
pub(crate) struct Capture {
key: Token,
rust: TokenTree,
}
impl Capture {
fn new(key: Token, rust: TokenTree) -> Self {
Self { key, rust }
}
/// Token string inside `chunk!`
pub(crate) fn key(&self) -> &Token {
&self.key
}
/// As rust variable, e.g. `x`
pub(crate) fn as_rust(&self) -> &TokenTree {
&self.rust
}
}
#[derive(Debug)]
pub(crate) struct Captures(Vec<Capture>);
impl Captures {
pub(crate) fn new() -> Self {
Self(Vec::new())
}
pub(crate) fn add(&mut self, token: &Token) -> Capture {
let tt = token.tree();
let key = token.clone();
match self.0.iter().find(|arg| arg.key() == &key) {
Some(arg) => arg.clone(),
None => {
let arg = Capture::new(key, tt.clone());
self.0.push(arg.clone());
arg
}
}
}
pub(crate) fn captures(&self) -> &[Capture] {
&self.0
}
}
#[derive(Debug)]
pub(crate) struct Chunk {
source: String,
caps: Captures,
}
impl Chunk {
pub(crate) fn new(tokens: TokenStream) -> Self {
let tokens = Tokens::retokenize(tokens);
let mut source = String::new();
let mut caps = Captures::new();
let mut pos: Option<Pos> = None;
for t in tokens {
if t.is_cap() {
caps.add(&t);
}
let (line, col) = (t.start().line, t.start().column);
let (prev_line, prev_col) = pos
.take()
.map(|lc| (lc.line, lc.column))
.unwrap_or_else(|| (line, col));
if line > prev_line {
source.push_str("\n");
} else if line == prev_line {
for _ in 0..col.saturating_sub(prev_col) {
source.push_str(" ");
}
}
source.push_str(&t.to_string());
pos = Some(t.end());
}
Self {
source: source.trim_end().to_string(),
caps,
}
}
pub(crate) fn source(&self) -> &str {
&self.source
}
pub(crate) fn captures(&self) -> &[Capture] {
self.caps.captures()
}
}

View file

@ -1,10 +1,14 @@
extern crate proc_macro;
use proc_macro::TokenStream;
use proc_macro2::{Ident, Span};
use quote::quote_spanned;
use syn::{parse_macro_input, spanned::Spanned, AttributeArgs, Error, ItemFn};
#[cfg(feature = "macros")]
use {
crate::chunk::Chunk, proc_macro::TokenTree, proc_macro2::TokenStream as TokenStream2,
proc_macro_error::proc_macro_error, quote::quote,
};
#[proc_macro_attribute]
pub fn lua_module(attr: TokenStream, item: TokenStream) -> TokenStream {
let args = parse_macro_input!(attr as AttributeArgs);
@ -35,3 +39,82 @@ pub fn lua_module(attr: TokenStream, item: TokenStream) -> TokenStream {
wrapped.into()
}
#[cfg(feature = "macros")]
fn to_ident(tt: &TokenTree) -> TokenStream2 {
let s: TokenStream = tt.clone().into();
s.into()
}
#[cfg(feature = "macros")]
#[proc_macro]
#[proc_macro_error]
pub fn chunk(input: TokenStream) -> TokenStream {
let chunk = Chunk::new(input);
let source = chunk.source();
let caps_len = chunk.captures().len();
let caps = chunk.captures().iter().map(|cap| {
let cap_name = cap.as_rust().to_string();
let cap = to_ident(cap.as_rust());
quote! { env.raw_set(#cap_name, #cap)?; }
});
let wrapped_code = quote! {{
use ::mlua::{AsChunk, ChunkMode, Lua, Result, Value};
use ::std::marker::PhantomData;
use ::std::sync::Mutex;
fn annotate<'a, F: FnOnce(&'a Lua) -> Result<Value<'a>>>(f: F) -> F { f }
struct InnerChunk<'a, F: FnOnce(&'a Lua) -> Result<Value<'a>>>(Mutex<Option<F>>, PhantomData<&'a ()>);
impl<'lua, F> AsChunk<'lua> for InnerChunk<'lua, F>
where
F: FnOnce(&'lua Lua) -> Result<Value<'lua>>,
{
fn source(&self) -> &[u8] {
(#source).as_bytes()
}
fn env(&self, lua: &'lua Lua) -> Option<Result<Value<'lua>>> {
if #caps_len > 0 {
if let Ok(mut make_env) = self.0.lock() {
if let Some(make_env) = make_env.take() {
return Some(make_env(lua));
}
}
}
None
}
fn mode(&self) -> Option<ChunkMode> {
Some(ChunkMode::Text)
}
}
let make_env = annotate(move |lua: &Lua| -> Result<Value> {
let globals = lua.globals();
let env = lua.create_table()?;
let meta = lua.create_table()?;
meta.raw_set("__index", globals.clone())?;
meta.raw_set("__newindex", globals)?;
// Add captured variables
#(#caps)*
env.set_metatable(Some(meta));
Ok(Value::Table(env))
});
&InnerChunk(Mutex::new(Some(make_env)), PhantomData)
}};
wrapped_code.into()
}
#[cfg(feature = "macros")]
mod chunk;
#[cfg(feature = "macros")]
mod token;

234
mlua_derive/src/token.rs Normal file
View file

@ -0,0 +1,234 @@
use std::{
cmp::{Eq, PartialEq},
fmt::{self, Display, Formatter},
iter::IntoIterator,
vec::IntoIter,
};
use itertools::Itertools;
use once_cell::sync::Lazy;
use proc_macro::{Delimiter, Span, TokenStream, TokenTree};
use proc_macro2::Span as Span2;
use regex::Regex;
#[derive(Clone, Copy, Debug)]
pub(crate) struct Pos {
pub(crate) line: usize,
pub(crate) column: usize,
}
impl Pos {
fn new(line: usize, column: usize) -> Self {
Self { line, column }
}
fn left(&self) -> Self {
Self {
line: self.line,
column: self.column.saturating_sub(1),
}
}
fn right(&self) -> Self {
Self {
line: self.line,
column: self.column.saturating_add(1),
}
}
}
fn span_pos(span: &Span) -> (Pos, Pos) {
let span2: Span2 = span.clone().into();
let start = span2.start();
let end = span2.end();
// In stable, line/column information is not provided
// and set to 0 (line is 1-indexed)
if start.line == 0 || end.line == 0 {
return fallback_span_pos(span);
}
(
Pos::new(start.line, start.column),
Pos::new(end.line, end.column),
)
}
fn parse_pos(span: &Span) -> Option<(usize, usize)> {
// Workaround to somehow retrieve location information in span in stable rust :(
static RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"bytes\(([0-9]+)\.\.([0-9]+)\)").unwrap());
match RE.captures(&format!("{:?}", span)) {
Some(caps) => match (caps.get(1), caps.get(2)) {
(Some(start), Some(end)) => Some((
match start.as_str().parse() {
Ok(v) => v,
_ => return None,
},
match end.as_str().parse() {
Ok(v) => v,
_ => return None,
},
)),
_ => None,
},
None => None,
}
}
fn fallback_span_pos(span: &Span) -> (Pos, Pos) {
let (start, end) = match parse_pos(span) {
Some(v) => v,
None => proc_macro_error::abort_call_site!(
"Cannot retrieve span information; please use nightly"
),
};
(Pos::new(1, start), Pos::new(1, end))
}
/// Attribute of token.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum TokenAttr {
/// No attribute
None,
/// Starts with `$`
Cap,
}
#[derive(Clone, Debug)]
pub(crate) struct Token {
source: String,
tree: TokenTree,
start: Pos,
end: Pos,
attr: TokenAttr,
}
impl PartialEq for Token {
fn eq(&self, other: &Self) -> bool {
self.source == other.source && self.attr == other.attr
}
}
impl Eq for Token {}
impl Token {
fn new(tree: TokenTree) -> Self {
let (start, end) = span_pos(&tree.span());
Self {
source: tree.to_string(),
start,
end,
tree,
attr: TokenAttr::None,
}
}
fn new_delim(source: String, tree: TokenTree, open: bool) -> Self {
let (start, end) = span_pos(&tree.span());
let (start, end) = if open {
(start, start.right())
} else {
(end.left(), end)
};
Self {
source,
tree,
start,
end,
attr: TokenAttr::None,
}
}
pub(crate) fn tree(&self) -> &TokenTree {
&self.tree
}
pub(crate) fn is_cap(&self) -> bool {
self.attr == TokenAttr::Cap
}
pub(crate) fn start(&self) -> Pos {
self.start
}
pub(crate) fn end(&self) -> Pos {
self.end
}
fn is(&self, s: &str) -> bool {
self.source == s
}
fn attr(mut self, attr: TokenAttr) -> Self {
self.attr = attr;
self
}
}
#[derive(Debug)]
pub(crate) struct Tokens(pub(crate) Vec<Token>);
impl Tokens {
pub(crate) fn retokenize(tt: TokenStream) -> Tokens {
Tokens(
tt.into_iter()
.map(|tt| Tokens::from(tt))
.flatten()
.peekable()
.batching(|iter| {
// Find variable tokens
let t = iter.next()?;
if t.is("$") {
// `$` + `ident` => `$ident`
let t = iter.next().expect("$ must trail an identifier");
Some(t.attr(TokenAttr::Cap))
} else {
Some(t)
}
})
.collect(),
)
}
}
impl IntoIterator for Tokens {
type Item = Token;
type IntoIter = IntoIter<Token>;
fn into_iter(self) -> Self::IntoIter {
self.0.into_iter()
}
}
impl From<TokenTree> for Tokens {
fn from(tt: TokenTree) -> Self {
let tts = match tt.clone() {
TokenTree::Group(g) => {
let (b, e) = match g.delimiter() {
Delimiter::Parenthesis => ("(", ")"),
Delimiter::Brace => ("{", "}"),
Delimiter::Bracket => ("[", "]"),
Delimiter::None => ("", ""),
};
let (b, e) = (b.into(), e.into());
vec![Token::new_delim(b, tt.clone(), true)]
.into_iter()
.chain(g.stream().into_iter().map(|tt| Tokens::from(tt)).flatten())
.chain(vec![Token::new_delim(e, tt.clone(), false)])
.collect()
}
_ => vec![Token::new(tt)],
};
Tokens(tts)
}
}
impl Display for Token {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}", self.source)
}
}

View file

@ -98,12 +98,13 @@ mod userdata;
mod util;
mod value;
#[doc(hidden)]
pub use crate::ffi::lua_State;
pub use crate::error::{Error, ExternalError, ExternalResult, Result};
pub use crate::function::Function;
pub use crate::hook::{Debug, DebugNames, DebugSource, DebugStack, HookTriggers};
pub use crate::lua::{Chunk, ChunkMode, GCMode, Lua, LuaOptions};
pub use crate::lua::{AsChunk, Chunk, ChunkMode, GCMode, Lua, LuaOptions};
pub use crate::multi::Variadic;
pub use crate::scope::Scope;
pub use crate::stdlib::StdLib;
@ -126,11 +127,60 @@ pub mod prelude;
#[cfg_attr(docsrs, doc(cfg(feature = "serialize")))]
pub mod serde;
// Re-export #[mlua_derive::lua_module].
#[cfg(feature = "mlua_derive")]
#[cfg(any(feature = "mlua_derive"))]
#[allow(unused_imports)]
#[macro_use]
extern crate mlua_derive;
#[cfg(feature = "mlua_derive")]
#[doc(hidden)]
pub use mlua_derive::*;
/// Create a type that implements [`AsChunk`] and can capture Rust variables.
///
/// This macro allows to write Lua code directly in Rust code.
///
/// Rust variables can be referenced from Lua using `$` prefix, as shown in the example below.
/// User Rust types needs to implement [`UserData`] or [`ToLua`] traits.
///
/// Captured variables are moved into the chunk.
///
/// ```
/// use mlua::{Lua, Result, chunk};
///
/// fn main() -> Result<()> {
/// let lua = Lua::new();
/// let name = "Rustacean";
/// lua.load(chunk! {
/// print("hello, " .. $name)
/// }).exec()
/// }
/// ```
///
/// ## Syntax issues
///
/// Since the Rust tokenizer will tokenize Lua code, this imposes some restrictions.
/// The main thing to remember is:
///
/// - Use double quoted strings (`""`) instead of single quoted strings (`''`).
///
/// (Single quoted strings only work if they contain a single character, since in Rust,
/// `'a'` is a character literal).
///
/// Other minor limitations:
///
/// - Certain escape codes in string literals.
/// (Specifically: `\a`, `\b`, `\f`, `\v`, `\123` (octal escape codes), `\u`, and `\U`).
///
/// These are accepted: : `\\`, `\n`, `\t`, `\r`, `\xAB` (hex escape codes), and `\0`
///
/// - The `//` (floor division) operator is unusable, as its start a comment.
///
/// Everything else should work.
///
/// [`AsChunk`]: trait.AsChunk.html
/// [`UserData`]: trait.UserData.html
/// [`ToLua`]: trait.ToLua.html
#[cfg(any(feature = "macros"))]
#[cfg_attr(docsrs, doc(cfg(feature = "macros")))]
pub use mlua_derive::chunk;
#[cfg(any(feature = "module"))]
#[cfg_attr(docsrs, doc(cfg(feature = "module")))]
pub use mlua_derive::lua_module;

View file

@ -769,14 +769,14 @@ impl Lua {
/// [`Chunk::exec`]: struct.Chunk.html#method.exec
pub fn load<'lua, 'a, S>(&'lua self, source: &'a S) -> Chunk<'lua, 'a>
where
S: AsRef<[u8]> + ?Sized,
S: AsChunk<'lua> + ?Sized,
{
Chunk {
lua: self,
source: source.as_ref(),
name: None,
env: None,
mode: None,
source: source.source(),
name: source.name(),
env: source.env(self),
mode: source.mode(),
}
}
@ -2010,7 +2010,7 @@ pub struct Chunk<'lua, 'a> {
lua: &'lua Lua,
source: &'a [u8],
name: Option<CString>,
env: Option<Value<'lua>>,
env: Option<Result<Value<'lua>>>,
mode: Option<ChunkMode>,
}
@ -2021,6 +2021,32 @@ pub enum ChunkMode {
Binary,
}
/// Trait for types [loadable by Lua] and convertible to a [`Chunk`]
///
/// [loadable by Lua]: https://www.lua.org/manual/5.3/manual.html#3.3.2
/// [`Chunk`]: struct.Chunk.html
pub trait AsChunk<'lua> {
/// Returns chunk data (can be text or binary)
fn source(&self) -> &[u8];
/// Returns optional chunk name
fn name(&self) -> Option<CString> {
None
}
/// Returns optional chunk [environment]
///
/// [environment]: https://www.lua.org/manual/5.3/manual.html#2.2
fn env(&self, _lua: &'lua Lua) -> Option<Result<Value<'lua>>> {
None
}
/// Returns optional chunk mode (text or binary)
fn mode(&self) -> Option<ChunkMode> {
None
}
}
impl<'lua, 'a> Chunk<'lua, 'a> {
/// Sets the name of this chunk, which results in more informative error traces.
pub fn set_name<S: AsRef<[u8]> + ?Sized>(mut self, name: &S) -> Result<Chunk<'lua, 'a>> {
@ -2046,7 +2072,8 @@ impl<'lua, 'a> Chunk<'lua, 'a> {
/// necessary to populate the environment in order for scripts using custom environments to be
/// useful.
pub fn set_environment<V: ToLua<'lua>>(mut self, env: V) -> Result<Chunk<'lua, 'a>> {
self.env = Some(env.to_lua(self.lua)?);
// Prefer to propagate errors here and wrap to `Ok`
self.env = Some(Ok(env.to_lua(self.lua)?));
Ok(self)
}
@ -2100,7 +2127,7 @@ impl<'lua, 'a> Chunk<'lua, 'a> {
} else if let Ok(function) = self.lua.load_chunk(
&self.expression_source(),
self.name.as_ref(),
self.env.clone(),
self.env()?,
self.mode,
) {
function.call(())
@ -2128,7 +2155,10 @@ impl<'lua, 'a> Chunk<'lua, 'a> {
} else if let Ok(function) = self.lua.load_chunk(
&self.expression_source(),
self.name.as_ref(),
self.env.clone(),
match self.env() {
Ok(env) => env,
Err(e) => return Box::pin(future::err(e)),
},
self.mode,
) {
function.call_async(())
@ -2170,7 +2200,14 @@ impl<'lua, 'a> Chunk<'lua, 'a> {
/// This simply compiles the chunk without actually executing it.
pub fn into_function(self) -> Result<Function<'lua>> {
self.lua
.load_chunk(self.source, self.name.as_ref(), self.env, self.mode)
.load_chunk(self.source, self.name.as_ref(), self.env()?, self.mode)
}
fn env(&self) -> Result<Option<Value<'lua>>> {
match self.env {
None => Ok(None),
Some(ref env) => env.clone().map(Some),
}
}
fn expression_source(&self) -> Vec<u8> {
@ -2181,6 +2218,12 @@ impl<'lua, 'a> Chunk<'lua, 'a> {
}
}
impl<'lua, T: AsRef<[u8]> + ?Sized> AsChunk<'lua> for T {
fn source(&self) -> &[u8] {
self.as_ref()
}
}
unsafe fn load_from_std_lib(state: *mut ffi::lua_State, libs: StdLib) -> Result<()> {
#[cfg(feature = "luajit")]
// Stop collector during library initialization

29
tests/macros.rs Normal file
View file

@ -0,0 +1,29 @@
#![cfg(feature = "macros")]
use mlua::{chunk, Lua, Result};
#[test]
fn test_chunk_macro() -> Result<()> {
let lua = Lua::new();
let name = "Rustacean";
let table = vec![1];
let data = lua.create_table()?;
data.raw_set("num", 1)?;
lua.globals().set("g", 123)?;
lua.load(chunk! {
assert($name == "Rustacean")
assert($table[1] == 1)
assert($data.num == 1)
assert(g == 123)
s = 321
})
.exec()?;
assert_eq!(lua.globals().get::<_, i32>("s")?, 321);
Ok(())
}