#![feature(trace_macros)] extern crate proc_macro; extern crate proc_macro2; #[macro_use] extern crate quote; extern crate syn; use proc_macro::TokenStream; use syn::{Ident, Attribute, DeriveInput}; fn find_attr_by_name<'a>(name: &str, attrs: &'a Vec) -> Option<&'a Attribute> { attrs.iter().find(|attr| { let first = attr.path.segments.first(); let seg: Option<&&syn::PathSegment> = first.as_ref().map(|x| x.value()); seg.map(|seg| seg.ident.to_string() == name).unwrap_or(false) }) } fn extract_attribute_arg_by_name(name: &str, attrs: &Vec) -> Option { use syn::{Meta, Lit, MetaNameValue}; find_attr_by_name(name, attrs) .and_then(|attr| { match attr.interpret_meta() { Some(Meta::NameValue(MetaNameValue { lit: Lit::Str(litstr), .. })) => Some(litstr.value()), _ => None, } }) } fn extract_attribute_list(name: &str, attrs: &Vec) -> Option>)>> { use syn::{Meta, MetaList, NestedMeta}; find_attr_by_name(name, attrs) .and_then(|attr| { match attr.interpret_meta() { Some(Meta::List(MetaList { nested, .. })) => { Some(nested.iter().map(|nested_meta| match nested_meta { &NestedMeta::Meta(Meta::Word(ref ident)) => (ident.clone(), None), &NestedMeta::Meta(Meta::List(MetaList { ref ident, nested: ref nested2, .. })) => { let own_args = nested2.iter().map(|nested_meta2| match nested_meta2 { &NestedMeta::Meta(Meta::Word(ref ident)) => ident.clone(), _ => panic!("Bad format for doubly-nested attribute list") }).collect(); (ident.clone(), Some(own_args)) }, _ => panic!("Bad format for nested list") }).collect()) }, _ => panic!("{} must be a comma-delimited list surrounded by parens", name) } }) } fn get_attribute_identifier(attr_name: &str, attrs: &Vec) -> Option { find_attr_by_name(attr_name, attrs).and_then(|attr| { let tts = attr.tts.clone().into_iter().collect::>(); if tts.len() == 2 { let ref after_equals: proc_macro2::TokenTree = tts[1]; match after_equals { proc_macro2::TokenTree::Ident(ident) => Some(ident.clone()), _ => None } } else { None } }) } #[proc_macro_derive(ProgrammingLanguageInterface, attributes(LanguageName, SourceFileExtension, PipelineSteps, DocMethod, HandleCustomInterpreterDirectives))] pub fn derive_programming_language_interface(input: TokenStream) -> TokenStream { let ast: DeriveInput = syn::parse(input).unwrap(); let name = &ast.ident; let attrs = &ast.attrs; let language_name: String = extract_attribute_arg_by_name("LanguageName", attrs).expect("LanguageName is required"); let file_ext = extract_attribute_arg_by_name("SourceFileExtension", attrs).expect("SourceFileExtension is required"); let passes = extract_attribute_list("PipelineSteps", attrs).expect("PipelineSteps are required"); let pass_idents = passes.iter().map(|x| x.0.clone()); let get_doc_impl = match get_attribute_identifier("DocMethod", attrs) { None => quote! { }, Some(method_name) => quote! { fn get_doc(&self, commands: &Vec<&str>) -> Option { self.#method_name(commands) } } }; let handle_custom_interpreter_directives_impl = match get_attribute_identifier("HandleCustomInterpreterDirectives", attrs) { None => quote! { }, Some(method_name) => quote! { fn handle_custom_interpreter_directives(&mut self, commands: &Vec<&str>) -> Option { //println!("If #method_name is &self not &mut self, this runs forever"); self.#method_name(commands) } } }; let pass_descriptors = passes.iter().map(|pass| { let name = pass.0.to_string(); let opts: Vec = match &pass.1 { None => vec![], Some(opts) => opts.iter().map(|o| o.to_string()).collect(), }; quote! { PassDescriptor { name: #name.to_string(), debug_options: vec![#(format!(#opts)),*] } } }); let tokens = quote! { use schala_repl::PassDescriptor; impl ProgrammingLanguageInterface for #name { fn get_language_name(&self) -> String { #language_name.to_string() } fn get_source_file_suffix(&self) -> String { #file_ext.to_string() } fn execute_pipeline(&mut self, input: &str, options: &EvalOptions) -> FinishedComputation { let mut chain = pass_chain![self, options; #(#pass_idents),* ]; chain(input) } fn get_passes(&self) -> Vec { vec![ #(#pass_descriptors),* ] } #get_doc_impl #handle_custom_interpreter_directives_impl } }; let output: TokenStream = tokens.into(); output }