ap: refactor to use custom vocab and iri types

IndexVocabulary and IriIndex have been replaced
with NyaVocabulary and NyaIri, and the JSON-LD
namespaces are defined through a proc macro now
This commit is contained in:
anna 2023-08-10 06:41:16 +02:00
parent 891c6a9a39
commit 3d71b04338
Signed by: fef
GPG key ID: 2585C2DC6D79B485
14 changed files with 1611 additions and 833 deletions

78
Cargo.lock generated
View file

@ -296,6 +296,12 @@ dependencies = [
"num-traits",
]
[[package]]
name = "atomic"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c59bdb34bc650a32731b31bd8f0829cc15d24a708ee31559e0bb34f2bc320cba"
[[package]]
name = "autocfg"
version = "1.1.0"
@ -1082,6 +1088,19 @@ dependencies = [
"want",
]
[[package]]
name = "hyper-rustls"
version = "0.23.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1788965e61b367cd03a62950836d5cd41560c3577d90e40e0819373194d1661c"
dependencies = [
"http",
"hyper",
"rustls 0.20.7",
"tokio",
"tokio-rustls",
]
[[package]]
name = "hyper-tls"
version = "0.5.0"
@ -1598,9 +1617,9 @@ checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "mime"
version = "0.3.16"
version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d"
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
[[package]]
name = "minimal-lexical"
@ -1738,9 +1757,11 @@ dependencies = [
"iref",
"json-ld",
"jsonwebtoken",
"langtag",
"locspan",
"log",
"mime",
"nyanoblog-macros",
"pretty_env_logger",
"rand",
"rdf-types",
@ -1755,6 +1776,13 @@ dependencies = [
"uuid",
]
[[package]]
name = "nyanoblog-macros"
version = "0.1.0"
dependencies = [
"proc-macro-crate",
]
[[package]]
name = "once_cell"
version = "1.16.0"
@ -1957,6 +1985,16 @@ dependencies = [
"log",
]
[[package]]
name = "proc-macro-crate"
version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919"
dependencies = [
"once_cell",
"toml_edit",
]
[[package]]
name = "proc-macro-error"
version = "1.0.4"
@ -2094,6 +2132,7 @@ dependencies = [
"http",
"http-body",
"hyper",
"hyper-rustls",
"hyper-tls",
"ipnet",
"js-sys",
@ -2104,16 +2143,19 @@ dependencies = [
"percent-encoding",
"pin-project-lite",
"rustls 0.20.7",
"rustls-pemfile",
"serde",
"serde_json",
"serde_urlencoded",
"tokio",
"tokio-native-tls",
"tokio-rustls",
"tower-service",
"url",
"wasm-bindgen",
"wasm-bindgen-futures",
"web-sys",
"webpki-roots 0.22.6",
"winreg",
]
@ -2880,6 +2922,23 @@ dependencies = [
"tracing",
]
[[package]]
name = "toml_datetime"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b"
[[package]]
name = "toml_edit"
version = "0.19.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8123f27e969974a3dfba720fdb560be359f57b44302d280ba72e76a74480e8a"
dependencies = [
"indexmap 2.0.0",
"toml_datetime",
"winnow",
]
[[package]]
name = "tower-service"
version = "0.3.2"
@ -2989,11 +3048,13 @@ dependencies = [
[[package]]
name = "uuid"
version = "1.2.2"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "422ee0de9031b5b948b97a8fc04e3aa35230001a722ddd27943e0be31564ce4c"
checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d"
dependencies = [
"atomic",
"getrandom",
"serde",
]
[[package]]
@ -3342,6 +3403,15 @@ version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"
[[package]]
name = "winnow"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "acaaa1190073b2b101e15083c38ee8ec891b5e05cbee516521e94ec008f61e64"
dependencies = [
"memchr",
]
[[package]]
name = "winreg"
version = "0.10.1"

View file

@ -1,29 +1,40 @@
[package]
name = "nyanoblog"
version = "0.1.0"
edition = "2021"
version.workspace = true
[workspace]
members = [
"macros"
]
[workspace.package]
version = "0.1.0"
[dependencies]
nyanoblog-macros = { path = "./macros" }
actix-rt = "2.7"
actix-web = { version = "4", features = ["rustls"] }
argon2 = "0.5.1"
async-trait = "0.1.59"
base64 = "0.21"
bytes = "1.3"
chrono = { version = "0.4", features = [ "alloc", "clock", "serde" ] }
chrono = { version = "0.4", features = [ "alloc", "clock", "serde", "std" ] }
dotenvy = "0.15.6"
futures = "0.3"
hashbrown = "0.13.1"
iref = "2.2"
json-ld = { version = "0.15.0" }
jsonwebtoken = { version = "8", default-features = false }
langtag = "0.3.4"
locspan = "0.7"
log = "0.4"
mime = "0.3"
mime = "0.3.17"
pretty_env_logger = "0.5.0"
rand = "0.8"
rdf-types = "0.15.4"
reqwest = { version = "0.11", features = [ "rustls" ] }
reqwest = { version = "0.11", features = [ "rustls-tls" ] }
rsa = { version = "0.9.2", features = [ "sha2" ] }
serde = { version = "1.0", features = [ "derive" ] }
serde_json = "1.0"
@ -31,4 +42,4 @@ serde_test = "1.0"
sqlx = { version = "0.7.1", features = [ "chrono", "postgres", "runtime-tokio", "tls-rustls", "uuid" ] }
static-iref = "2"
tokio = "1.23"
uuid = { version = "1.2", features = [ "v4" ] }
uuid = { version = "1.4", features = [ "v4", "v7", "serde" ] }

10
macros/Cargo.toml Normal file
View file

@ -0,0 +1,10 @@
[package]
name = "nyanoblog-macros"
edition = "2021"
version.workspace = true
[lib]
proc-macro = true
[dependencies]
proc-macro-crate = "1.3.1"

View file

@ -0,0 +1,523 @@
use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
use std::iter::Peekable;
use std::sync::atomic::{AtomicIsize, Ordering};
use crate::util::*;
// "forgive me, rustc, for i have sinned"
//
// good luck to whoever is trying to read not to mention understand this lmao
/// This is a global counter for unique indices because every invocation
/// of this macro should yield an enum with unique numeric representations
static INDEX: AtomicIsize = AtomicIsize::new(1);
pub fn invoke(input: TokenStream) -> Result<TokenStream> {
let mut stream = input.into_iter().peekable();
// parse the thing
let (mut meta, mut classes, mut props) = (None, None, None);
while let Some(tt) = stream.next() {
if let TokenTree::Ident(ident) = &tt {
let name = ident.to_string();
match name.as_str() {
"meta" => {
let body = eat_group(&mut stream, Delimiter::Brace)?;
if meta.replace(eat_meta(body.stream())?).is_some() {
return err("Duplicate definition of metadata", ident.span());
}
}
"class" => {
let body = eat_group(&mut stream, Delimiter::Brace)?;
let body = eat_enum_members(body.stream())?;
if classes.replace((body, ident.span())).is_some() {
return err("Duplicate class definition", ident.span());
}
}
"prop" => {
let body = eat_group(&mut stream, Delimiter::Brace)?;
let body = eat_enum_members(body.stream())?;
if props.replace((body, ident.span())).is_some() {
return err("Duplicate props definition", ident.span());
}
}
name => return err(format!("Unknown section \"{name}\""), ident.span()),
}
eat_maybe_comma(&mut stream);
}
}
// now spit out some new tokens
let meta = meta.ok_or_else(|| error("Missing meta block", None))?;
let mut ts = TokenStream::new();
if let Some((classes, span)) = classes {
ts.extend(compile_enum(
&meta,
make_ident_tt("__Class", None),
&classes,
span,
));
}
if let Some((props, span)) = props {
ts.extend(compile_enum(
&meta,
make_ident_tt("__Prop", None),
&props,
span,
));
}
let mut module = rust_code!(pub mod );
module.extend([meta.mod_name, make_group_tt(Delimiter::Brace, ts)]);
//println!("{module}");
Ok(module)
}
struct NsMeta {
mod_name: TokenTree,
iri_base: String,
alias_base: Option<String>,
}
#[derive(Clone)]
struct EnumMember {
name: Ident,
}
//
// parser
//
// meta { key1 = val1, key2 = val2, ... }
fn eat_meta(stream: TokenStream) -> Result<NsMeta> {
let mut stream = stream.into_iter().peekable();
let mut mod_name = None;
let mut iri_base = None;
let mut alias_base = None;
let mut had_preceding_comma = true;
while let Some(ident) = eat_ident_or_end(&mut stream)? {
if !had_preceding_comma {
return err("Expected a comma", ident.span());
}
let name = ident.to_string();
match name.as_str() {
"mod_name" => {
if mod_name.replace(eat_assign_ident(&mut stream)?).is_some() {
return err("Duplicate definition of key \"mod_name\"", ident.span());
}
}
"iri_base" => {
if iri_base.replace(eat_assign_string(&mut stream)?).is_some() {
return err("Duplicate definition of key \"iri_base\"", ident.span());
}
}
"alias_base" => {
if alias_base
.replace(eat_assign_string(&mut stream)?)
.is_some()
{
return err("Duplicate definition of key \"alias_base\"", ident.span());
}
}
name => return err(format!("Unknown key \"{name}\""), ident.span()),
}
had_preceding_comma = eat_maybe_comma(&mut stream);
}
Ok(NsMeta {
mod_name: mod_name.ok_or_else(|| error("Missing key \"mod_name\"", None))?,
iri_base: iri_base.ok_or_else(|| error("Missing key \"iri_base\"", None))?,
alias_base,
})
}
// qualifier { Member1, Member2, ... }
fn eat_enum_members(stream: TokenStream) -> Result<Vec<EnumMember>> {
let mut stream = stream.into_iter().peekable();
let mut members = Vec::new();
let mut had_preceding_comma = true;
while let Some(member) = eat_enum_member(&mut stream)? {
if !had_preceding_comma {
return err_unexpected(",", &member.name, member.name.span());
}
members.push(member);
had_preceding_comma = eat_maybe_comma(&mut stream);
}
Ok(members)
}
fn eat_enum_member(stream: &mut impl TtIter) -> Result<Option<EnumMember>> {
match stream.next() {
Some(tt) => {
if let TokenTree::Ident(name) = tt {
Ok(Some(EnumMember { name }))
} else {
err_unexpected("ident", &tt, tt.span())
}
}
None => Ok(None),
}
}
fn eat_assign_ident(stream: &mut Peekable<impl TtIter>) -> Result<TokenTree> {
eat_eq(stream)?;
Ok(TokenTree::Ident(eat_ident(stream)?))
}
fn eat_assign_string(stream: &mut impl TtIter) -> Result<String> {
let _ = eat_eq(stream)?;
match stream.next() {
Some(TokenTree::Literal(literal)) => {
let s = literal.to_string();
let sb = s.as_bytes();
if sb.len() >= 2 && sb[0] == b'"' && sb[sb.len() - 1] == b'"' {
Ok(String::from_utf8(Vec::from(&sb[1..(sb.len() - 1)])).unwrap())
} else {
err_unexpected("string", &literal, literal.span())
}
}
Some(tt) => err_unexpected("string", &tt, tt.span()),
None => err_end(),
}
}
fn eat_group(stream: &mut impl TtIter, delimiter: impl Into<Option<Delimiter>>) -> Result<Group> {
match stream.next() {
Some(TokenTree::Group(grp)) => {
if let Some(delim) = delimiter.into() {
if grp.delimiter() == delim {
Ok(grp)
} else {
err("Expected a `{`", grp.span())
}
} else {
Ok(grp)
}
}
Some(tt) => err_unexpected("{", &tt, tt.span()),
None => err_end(),
}
}
fn eat_ident(stream: &mut impl TtIter) -> Result<Ident> {
eat_ident_or_end(stream).and_then(|o| o.ok_or_else(error_end))
}
fn eat_ident_or_end(stream: &mut impl TtIter) -> Result<Option<Ident>> {
match stream.next() {
Some(TokenTree::Ident(ident)) => Ok(Some(ident)),
Some(tt) => err_unexpected("ident", &tt, tt.span()),
None => Ok(None),
}
}
fn eat_eq(stream: &mut impl TtIter) -> Result<TokenTree> {
match stream.next() {
Some(TokenTree::Punct(punct)) if punct.as_char() == '=' => Ok(TokenTree::Punct(punct)),
Some(tt) => err_unexpected("=", &tt, tt.span()),
None => err_end(),
}
}
fn eat_maybe_comma(stream: &mut Peekable<impl TtIter>) -> bool {
stream
.next_if(|tt| {
if let TokenTree::Punct(punct) = tt {
punct.as_char() == ','
} else {
false
}
})
.is_some()
}
//
// compiler
//
fn compile_enum(
meta: &NsMeta,
name: TokenTree,
members: &[EnumMember],
kw_span: Span,
) -> TokenStream {
// enum Name { Member1, Member2, ... }
let mut ts = rust_code!(
#[derive(Copy, Clone, Eq, PartialEq)]
#[allow(non_camel_case_types)]
pub
);
ts.extend([
make_ident_tt("enum", kw_span),
name.clone(),
make_group_tt(
Delimiter::Brace,
members.iter().flat_map(|m| {
[
TokenTree::Ident(m.name.clone()),
TokenTree::Punct(Punct::new('=', Spacing::Alone)),
TokenTree::Literal(Literal::isize_unsuffixed(
INDEX.fetch_add(1, Ordering::Relaxed),
)),
TokenTree::Punct(Punct::new(',', Spacing::Alone)),
]
}),
),
]);
ts.extend(rust_code!(pub use ));
ts.extend([name.clone()]);
ts.extend(rust_code!(::*;));
// impl Enum { ... }
ts.extend(compile_internal_impl(meta, &name, members));
// impl HasContext for Enum { ... }
ts.extend(compile_trait_impl(meta, &name, members));
ts
}
fn compile_internal_impl(meta: &NsMeta, name: &TokenTree, members: &[EnumMember]) -> TokenStream {
let mut ts = rust_code!(impl);
ts.extend([name.clone()]);
let impl_body = {
let mut impl_body = rust_code!(const __MEMBERS: );
impl_body.extend([
// [(Self, Iri<'static>); $n]
make_group_tt(
Delimiter::Bracket,
[
make_group_tt(
Delimiter::Parenthesis,
rust_code!(Self, ::iref::Iri<'static>),
),
make_punct_alone(';'),
TokenTree::Literal(Literal::usize_unsuffixed(members.len())),
],
),
make_punct_alone('='),
]);
impl_body.extend([
// [ (Enum::Name1, iri!("iri1")), (Enum::Name2, iri!("iri2")), ... ]
make_group_tt(
Delimiter::Bracket,
members.iter().flat_map(|memb| {
[
make_group_tt(Delimiter::Parenthesis, {
let mut ts = TokenStream::from_iter([
name.clone(),
make_punct_joint(':'),
make_punct_alone(':'),
TokenTree::Ident(memb.name.clone()),
make_punct_alone(','),
]);
ts.extend(compile_iri_macro_call(&meta.iri_base, Some(memb)));
ts
}),
make_punct_alone(','),
]
.into_iter()
}),
),
TokenTree::Punct(Punct::new(';', Spacing::Alone)),
]);
impl_body.extend(rust_code!(fn __from_str(s: &str) -> Option<Self>));
let fn_body = {
let mut fn_body = rust_code!(match s);
let match_body = {
let mut match_body = TokenStream::new();
for memb in members {
let memb_name = memb.name.to_string();
match_body.extend([
TokenTree::Literal(Literal::string(&memb_name)),
make_punct_joint('='),
make_punct_alone('>'),
make_ident_tt("Some", None),
make_group_tt(
Delimiter::Parenthesis,
[
name.clone(),
make_punct_joint(':'),
make_punct_alone(':'),
TokenTree::Ident(memb.name.clone()),
],
),
make_punct_alone(','),
]);
}
match_body.extend(rust_code!(_ => None,));
make_group_ts(Delimiter::Brace, match_body)
};
fn_body.extend(match_body);
make_group_ts(Delimiter::Brace, fn_body)
};
impl_body.extend(fn_body);
make_group_ts(Delimiter::Brace, impl_body)
};
ts.extend(impl_body);
ts
}
fn compile_trait_impl(meta: &NsMeta, name: &TokenTree, members: &[EnumMember]) -> TokenStream {
// impl HasContext for Enum
let mut ts = rust_code!(impl);
ts.extend(absolute_path(None, rust_code!(ap::vocab::HasContext)));
ts.extend(rust_code!(for));
ts.extend([name.clone()]);
let impl_body = {
// const ... = ...; const ... = ...; ...
let mut impl_body = compile_impl_const_members(meta, members);
// fn from_index(...) -> ... { ... }
impl_body.extend(compile_impl_from_index(name, members));
impl_body.extend(rust_code!(
fn as_index(&self) -> usize {
*self as usize
}
));
// fn from_iri(...) -> ... { ... }
impl_body.extend(compile_impl_from_iri());
make_group_ts(Delimiter::Brace, impl_body)
};
ts.extend(impl_body);
ts
}
fn compile_impl_const_members(meta: &NsMeta, members: &[EnumMember]) -> TokenStream {
let mut ts = TokenStream::new();
ts.extend(rust_code!(const COUNT: usize = ));
ts.extend([
TokenTree::Literal(Literal::usize_suffixed(members.len())),
TokenTree::Punct(Punct::new(';', Spacing::Alone)),
]);
ts.extend(rust_code!(const OFFSET: usize = ));
ts.extend([
TokenTree::Literal(Literal::usize_suffixed(
INDEX.load(Ordering::Relaxed) as usize - members.len(),
)),
TokenTree::Punct(Punct::new(';', Spacing::Alone)),
]);
ts.extend(rust_code!(const IRI_BASE: ::iref::Iri<'static> = ));
ts.extend(compile_iri_macro_call(&meta.iri_base, None));
ts.extend([make_punct_alone(';')]);
ts.extend(rust_code!(const ALIAS_BASE: ::core::option::Option<::iref::Iri<'static>> =));
match meta.alias_base.as_ref() {
Some(s) => {
ts.extend(rust_code!(Some));
ts.extend(make_group_ts(
Delimiter::Parenthesis,
compile_iri_macro_call(s, None),
));
}
None => ts.extend(rust_code!(None)),
}
ts.extend(rust_code!(;));
// this is so we get a ref with static lifetime; see compile_internal_impl()
ts.extend(rust_code!(
const MEMBERS: &'static [(Self, ::iref::Iri<'static>)] = &Self::__MEMBERS;
));
ts
}
fn compile_impl_from_index(name: &TokenTree, members: &[EnumMember]) -> TokenStream {
let mut ts = rust_code!(fn from_index(index: usize) -> Option<Self>);
let fn_body = {
let mut fn_body = rust_code!(match index);
let match_body = {
let mut match_body = TokenStream::new();
for member in members {
match_body.extend(rust_code!(x if x == ));
match_body.extend([
name.clone(),
make_punct_joint(':'),
make_punct_alone(':'),
TokenTree::Ident(member.name.clone()),
]);
match_body.extend(rust_code!(as usize => Some));
match_body.extend(make_group_ts(
Delimiter::Parenthesis,
[
name.clone(),
make_punct_joint(':'),
make_punct_alone(':'),
TokenTree::Ident(member.name.clone()),
],
));
match_body.extend([make_punct_alone(',')]);
}
match_body.extend(rust_code!(_ => None,));
make_group_ts(Delimiter::Brace, match_body)
};
fn_body.extend(match_body);
make_group_ts(Delimiter::Brace, fn_body)
};
ts.extend(fn_body);
ts
}
fn compile_impl_from_iri() -> TokenStream {
rust_code!(
fn from_iri(iri: &::iref::Iri) -> Option<Self> {
let iri = iri.as_str();
let iri_base = Self::IRI_BASE.as_str();
if iri.starts_with(iri_base) {
Self::__from_str(unsafe {
::std::str::from_utf8_unchecked(&iri.as_bytes()[iri_base.len()..])
})
} else if let Some(alias_base) = Self::ALIAS_BASE.as_ref() {
let alias_base = alias_base.as_str();
if iri.starts_with(alias_base) {
Self::__from_str(unsafe {
::std::str::from_utf8_unchecked(&iri.as_bytes()[alias_base.len()..])
})
} else {
None
}
} else {
None
}
}
)
}
fn compile_iri_macro_call(iri_base: &str, member: Option<&EnumMember>) -> TokenStream {
let mut ts = rust_code!(::static_iref::iri!);
let iri = match member {
Some(member) => format!("{iri_base}{}", member.name),
None => String::from(iri_base),
};
ts.extend(make_group_ts(
Delimiter::Parenthesis,
[TokenTree::Literal(Literal::string(&iri))],
));
ts
}

14
macros/src/lib.rs Normal file
View file

@ -0,0 +1,14 @@
use proc_macro::TokenStream;
mod define_json_ns;
mod util;
// XXX proc_macro_diagnostics isn't stable yet, so we just panic on errors for now
#[proc_macro]
pub fn define_json_ns(input: TokenStream) -> TokenStream {
match define_json_ns::invoke(input) {
Ok(output) => output,
Err(e) => panic!("{e}"),
}
}

103
macros/src/util.rs Normal file
View file

@ -0,0 +1,103 @@
use proc_macro::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
use proc_macro_crate::{crate_name, FoundCrate};
use std::fmt;
macro_rules! rust_code {
($($tt:tt)*) => {
<::proc_macro::TokenStream as ::std::str::FromStr>::from_str(stringify!($($tt)*))
.expect("Expected valid rust code")
}
}
pub(crate) use rust_code;
#[derive(Debug)]
pub struct Error {
msg: String,
span: Span,
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "{} (at {:?})", &self.msg, self.span)
}
}
impl std::error::Error for Error {}
pub type Result<T> = std::result::Result<T, Error>;
pub fn error(msg: impl Into<String>, span: impl Into<Option<Span>>) -> Error {
Error {
msg: msg.into(),
span: span.into().unwrap_or_else(Span::call_site),
}
}
pub fn err<T>(msg: impl Into<String>, span: impl Into<Option<Span>>) -> Result<T> {
Err(error(msg, span))
}
pub fn err_unexpected<T, D, S>(expected: &str, actual: D, span: S) -> Result<T>
where
D: fmt::Display,
S: Into<Option<Span>>,
{
err(
format!("Expected `{expected}`, got `{actual}` instead"),
span,
)
}
pub fn error_end() -> Error {
error("Unexpected end of stream", None)
}
pub fn err_end<T>() -> Result<T> {
Err(error_end())
}
pub trait TtIter: Iterator<Item = TokenTree> {}
impl<T: Iterator<Item = TokenTree>> TtIter for T {}
pub fn absolute_path(span: impl Into<Option<Span>>, path: TokenStream) -> TokenStream {
let span = span.into().unwrap_or_else(Span::call_site);
let root = match crate_name("nyanoblog") {
Ok(found) => match found {
FoundCrate::Itself => "crate".to_owned(),
FoundCrate::Name(name) => name,
},
Err(e) => panic!("cannot find crate nyanoblog: {e}"),
};
let mut absolute = make_ident_ts(&root, span);
absolute.extend(rust_code!(::));
absolute.extend(path);
absolute
}
pub fn make_ident_tt(name: &str, span: impl Into<Option<Span>>) -> TokenTree {
TokenTree::Ident(Ident::new(
name,
span.into().unwrap_or_else(Span::call_site),
))
}
pub fn make_ident_ts(name: &str, span: impl Into<Option<Span>>) -> TokenStream {
[make_ident_tt(name, span)].into_iter().collect()
}
pub fn make_group_tt(delim: Delimiter, inner: impl IntoIterator<Item = TokenTree>) -> TokenTree {
TokenTree::Group(Group::new(delim, inner.into_iter().collect()))
}
pub fn make_group_ts(delim: Delimiter, inner: impl IntoIterator<Item = TokenTree>) -> TokenStream {
TokenStream::from_iter([make_group_tt(delim, inner)])
}
pub fn make_punct_alone(c: char) -> TokenTree {
TokenTree::Punct(Punct::new(c, Spacing::Alone))
}
pub fn make_punct_joint(c: char) -> TokenTree {
TokenTree::Punct(Punct::new(c, Spacing::Joint))
}

View file

@ -17,7 +17,7 @@ use hashbrown::HashSet;
use iref::Iri;
use json_ld::{syntax::Value, Loader, Profile, RemoteDocument};
use locspan::{Meta, Span};
use rdf_types::{vocabulary::IriIndex, IriVocabulary, IriVocabularyMut};
use rdf_types::{IriVocabulary, IriVocabularyMut};
use reqwest::{
header::{ACCEPT, CONTENT_TYPE, LINK, LOCATION},
StatusCode,
@ -25,6 +25,7 @@ use reqwest::{
use std::hash::Hash;
use std::ops::ControlFlow;
use crate::ap::vocab::NyaIri;
use crate::core::*;
use crate::headers;
use crate::state::AppState;
@ -34,7 +35,7 @@ use crate::util::http::{
Response,
};
pub struct CachedLoader<I = IriIndex, M = Span, T = Value<M>> {
pub struct CachedLoader<I = NyaIri, M = Span, T = Value<M>> {
state: AppState,
parser: Box<DynParser<I, M, T>>,
}

View file

@ -5,20 +5,18 @@ use json_ld::{
use locspan::{Meta, Span};
use mime::Mime;
use rdf_types::vocabulary::BlankIdIndex;
use rdf_types::{vocabulary::IriIndex, IndexVocabulary, Vocabulary};
use crate::ap::vocab::{NyaIri, NyaVocabulary};
use crate::ap::{
loader::CachedLoader,
trans::{ApDocument, ParseApub},
vocab::Ids,
};
use crate::core::*;
use crate::state::AppState;
/// Main API for handling ActivityPub ingress, called by [`crate::job::inbox::InboxWorker`].
pub async fn process_document(state: &AppState, raw: &str, mime: &Mime) -> Result<()> {
let mut vocab: IndexVocabulary = IndexVocabulary::new();
let indices = Ids::populate(&mut vocab);
let mut vocab = NyaVocabulary::new();
let json = preprocess(raw)?;
let rd = RemoteDocument::new(None, Some(mime.clone()), json);
@ -26,14 +24,13 @@ pub async fn process_document(state: &AppState, raw: &str, mime: &Mime) -> Resul
let mut loader = CachedLoader::new_with(state.clone(), move |_vocab, _iri, bytes| {
let content = std::str::from_utf8(bytes.as_ref())
.map_err(|e| Error::MalformedApub(format!("Invalid encoding: {e}")))?;
preprocess(&content)
preprocess(content)
});
let rd = rd.expand_with(&mut vocab, &mut loader).await.unwrap();
let vocab = vocab;
// this loop will usually only run once (one object per request)
for object in rd.into_value() {
if let Err(e) = process_object(object, &vocab, &indices).await {
if let Err(e) = process_object(object).await {
error!("Error in remote document: {e}");
}
}
@ -46,12 +43,8 @@ fn preprocess(raw: &str) -> Result<Meta<Value<Span>, Span>> {
.map_err(|e| Error::MalformedApub(format!("{e}")))
}
async fn process_object(
obj: IndexedObject<IriIndex, BlankIdIndex, Span>,
vocab: &impl Vocabulary<Iri = IriIndex>,
ids: &Ids,
) -> Result<()> {
let document = ApDocument::parse_apub(&obj, vocab, ids);
async fn process_object(obj: IndexedObject<NyaIri, BlankIdIndex, Span>) -> Result<()> {
let document = ApDocument::parse_apub(&obj);
if let Some(doc) = document {
debug!("\nParsed document:\n{doc:?}");
}

View file

@ -1,12 +1,10 @@
use chrono::NaiveDateTime;
use rdf_types::vocabulary::IriIndex;
use rdf_types::Vocabulary;
use std::fmt;
use crate::ap::trans::{
matches_type, AbstractObject, ApDocument, DebugApub, ParseApub, PropHelper, RawObject,
};
use crate::ap::vocab::Ids;
use crate::ap::vocab::apub;
pub struct AbstractActivity {
_super: AbstractObject,
@ -18,23 +16,22 @@ pub struct AbstractActivity {
}
ap_extends!(AbstractActivity, AbstractObject);
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for AbstractActivity {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
matches_type(obj, &ids.apub.object.activity)?;
unsafe { Self::_parse_apub_unchecked(obj, vocab, ids) }
impl ParseApub for AbstractActivity {
fn parse_apub(obj: &RawObject) -> Option<Self> {
matches_type(obj, apub::Activity)?;
unsafe { Self::_parse_apub_unchecked(obj) }
}
unsafe fn _parse_apub_unchecked(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?;
let prop_ids = &ids.apub.property;
unsafe fn _parse_apub_unchecked(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj)?;
AbstractObject::_parse_apub_unchecked(obj, vocab, ids).map(|s| Self {
AbstractObject::_parse_apub_unchecked(obj).map(|s| Self {
_super: s,
actor: ph.parse_prop_vec(&prop_ids.actor),
object: ph.parse_prop_vec(&prop_ids.object),
target: ph.parse_prop_box(&prop_ids.target),
origin: ph.parse_prop_box(&prop_ids.origin),
instrument: ph.parse_prop_vec(&prop_ids.instrument),
actor: ph.parse_prop_vec(apub::actor),
object: ph.parse_prop_vec(apub::object),
target: ph.parse_prop_box(apub::target),
origin: ph.parse_prop_box(apub::origin),
instrument: ph.parse_prop_vec(apub::instrument),
})
}
}
@ -85,37 +82,37 @@ pub enum Activity {
View(View),
}
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Activity {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
Accept::parse_apub(obj, vocab, ids)
impl ParseApub for Activity {
fn parse_apub(obj: &RawObject) -> Option<Self> {
Accept::parse_apub(obj)
.map(Self::Accept)
.or_else(|| Add::parse_apub(obj, vocab, ids).map(Self::Add))
.or_else(|| Announce::parse_apub(obj, vocab, ids).map(Self::Announce))
.or_else(|| Arrive::parse_apub(obj, vocab, ids).map(Self::Arrive))
.or_else(|| Block::parse_apub(obj, vocab, ids).map(Self::Block))
.or_else(|| Create::parse_apub(obj, vocab, ids).map(Self::Create))
.or_else(|| Delete::parse_apub(obj, vocab, ids).map(Self::Delete))
.or_else(|| Dislike::parse_apub(obj, vocab, ids).map(Self::Dislike))
.or_else(|| Flag::parse_apub(obj, vocab, ids).map(Self::Flag))
.or_else(|| Follow::parse_apub(obj, vocab, ids).map(Self::Follow))
.or_else(|| Ignore::parse_apub(obj, vocab, ids).map(Self::Ignore))
.or_else(|| Invite::parse_apub(obj, vocab, ids).map(Self::Invite))
.or_else(|| Join::parse_apub(obj, vocab, ids).map(Self::Join))
.or_else(|| Leave::parse_apub(obj, vocab, ids).map(Self::Leave))
.or_else(|| Like::parse_apub(obj, vocab, ids).map(Self::Like))
.or_else(|| Listen::parse_apub(obj, vocab, ids).map(Self::Listen))
.or_else(|| Move::parse_apub(obj, vocab, ids).map(Self::Move))
.or_else(|| Offer::parse_apub(obj, vocab, ids).map(Self::Offer))
.or_else(|| Question::parse_apub(obj, vocab, ids).map(Self::Question))
.or_else(|| Reject::parse_apub(obj, vocab, ids).map(Self::Reject))
.or_else(|| Read::parse_apub(obj, vocab, ids).map(Self::Read))
.or_else(|| Remove::parse_apub(obj, vocab, ids).map(Self::Remove))
.or_else(|| TentativeReject::parse_apub(obj, vocab, ids).map(Self::TentativeReject))
.or_else(|| TentativeAccept::parse_apub(obj, vocab, ids).map(Self::TentativeAccept))
.or_else(|| Travel::parse_apub(obj, vocab, ids).map(Self::Travel))
.or_else(|| Undo::parse_apub(obj, vocab, ids).map(Self::Undo))
.or_else(|| Update::parse_apub(obj, vocab, ids).map(Self::Update))
.or_else(|| View::parse_apub(obj, vocab, ids).map(Self::View))
.or_else(|| Add::parse_apub(obj).map(Self::Add))
.or_else(|| Announce::parse_apub(obj).map(Self::Announce))
.or_else(|| Arrive::parse_apub(obj).map(Self::Arrive))
.or_else(|| Block::parse_apub(obj).map(Self::Block))
.or_else(|| Create::parse_apub(obj).map(Self::Create))
.or_else(|| Delete::parse_apub(obj).map(Self::Delete))
.or_else(|| Dislike::parse_apub(obj).map(Self::Dislike))
.or_else(|| Flag::parse_apub(obj).map(Self::Flag))
.or_else(|| Follow::parse_apub(obj).map(Self::Follow))
.or_else(|| Ignore::parse_apub(obj).map(Self::Ignore))
.or_else(|| Invite::parse_apub(obj).map(Self::Invite))
.or_else(|| Join::parse_apub(obj).map(Self::Join))
.or_else(|| Leave::parse_apub(obj).map(Self::Leave))
.or_else(|| Like::parse_apub(obj).map(Self::Like))
.or_else(|| Listen::parse_apub(obj).map(Self::Listen))
.or_else(|| Move::parse_apub(obj).map(Self::Move))
.or_else(|| Offer::parse_apub(obj).map(Self::Offer))
.or_else(|| Question::parse_apub(obj).map(Self::Question))
.or_else(|| Reject::parse_apub(obj).map(Self::Reject))
.or_else(|| Read::parse_apub(obj).map(Self::Read))
.or_else(|| Remove::parse_apub(obj).map(Self::Remove))
.or_else(|| TentativeReject::parse_apub(obj).map(Self::TentativeReject))
.or_else(|| TentativeAccept::parse_apub(obj).map(Self::TentativeAccept))
.or_else(|| Travel::parse_apub(obj).map(Self::Travel))
.or_else(|| Undo::parse_apub(obj).map(Self::Undo))
.or_else(|| Update::parse_apub(obj).map(Self::Update))
.or_else(|| View::parse_apub(obj).map(Self::View))
}
}
@ -234,21 +231,20 @@ pub struct Question {
}
ap_extends!(Question, AbstractActivity);
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Question {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
matches_type(obj, &ids.apub.object.question)?;
unsafe { Self::_parse_apub_unchecked(obj, vocab, ids) }
impl ParseApub for Question {
fn parse_apub(obj: &RawObject) -> Option<Self> {
matches_type(obj, apub::Question)?;
unsafe { Self::_parse_apub_unchecked(obj) }
}
unsafe fn _parse_apub_unchecked(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?;
let prop_ids = &ids.apub.property;
unsafe fn _parse_apub_unchecked(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj)?;
AbstractActivity::_parse_apub_unchecked(obj, vocab, ids).map(|s| Self {
AbstractActivity::_parse_apub_unchecked(obj).map(|s| Self {
_super: s,
one_of: ph.parse_prop_vec(&prop_ids.one_of),
any_of: ph.parse_prop_vec(&prop_ids.any_of),
closed: ph.parse_prop(&prop_ids.closed),
one_of: ph.parse_prop_vec(apub::oneOf),
any_of: ph.parse_prop_vec(apub::anyOf),
closed: ph.parse_prop(apub::closed),
})
}
}
@ -262,42 +258,30 @@ impl DebugApub for Question {
}
}
ap_empty_child_impl!(Accept, AbstractActivity, apub, activity, accept);
ap_empty_child_impl!(Add, AbstractActivity, apub, activity, add);
ap_empty_child_impl!(Announce, AbstractActivity, apub, activity, announce);
ap_empty_child_impl!(Arrive, AbstractActivity, apub, activity, arrive);
ap_empty_child_impl!(Block, Ignore, apub, activity, block);
ap_empty_child_impl!(Create, AbstractActivity, apub, activity, create);
ap_empty_child_impl!(Delete, AbstractActivity, apub, activity, delete);
ap_empty_child_impl!(Dislike, AbstractActivity, apub, activity, dislike);
ap_empty_child_impl!(Flag, AbstractActivity, apub, activity, flag);
ap_empty_child_impl!(Follow, AbstractActivity, apub, activity, follow);
ap_empty_child_impl!(Ignore, AbstractActivity, apub, activity, ignore);
ap_empty_child_impl!(Invite, AbstractActivity, apub, activity, invite);
ap_empty_child_impl!(Join, AbstractActivity, apub, activity, join);
ap_empty_child_impl!(Leave, AbstractActivity, apub, activity, leave);
ap_empty_child_impl!(Like, AbstractActivity, apub, activity, like);
ap_empty_child_impl!(Listen, AbstractActivity, apub, activity, listen);
ap_empty_child_impl!(Move, AbstractActivity, apub, activity, mov);
ap_empty_child_impl!(Offer, AbstractActivity, apub, activity, offer);
ap_empty_child_impl!(Reject, AbstractActivity, apub, activity, reject);
ap_empty_child_impl!(Read, AbstractActivity, apub, activity, read);
ap_empty_child_impl!(Remove, AbstractActivity, apub, activity, remove);
ap_empty_child_impl!(
TentativeReject,
AbstractActivity,
apub,
activity,
tentative_reject
);
ap_empty_child_impl!(
TentativeAccept,
AbstractActivity,
apub,
activity,
tentative_accept
);
ap_empty_child_impl!(Travel, AbstractActivity, apub, activity, travel);
ap_empty_child_impl!(Undo, AbstractActivity, apub, activity, undo);
ap_empty_child_impl!(Update, AbstractActivity, apub, activity, update);
ap_empty_child_impl!(View, AbstractActivity, apub, activity, view);
ap_empty_child_impl!(Accept, AbstractActivity, apub::Accept);
ap_empty_child_impl!(Add, AbstractActivity, apub::Add);
ap_empty_child_impl!(Announce, AbstractActivity, apub::Announce);
ap_empty_child_impl!(Arrive, AbstractActivity, apub::Arrive);
ap_empty_child_impl!(Block, Ignore, apub::Block);
ap_empty_child_impl!(Create, AbstractActivity, apub::Create);
ap_empty_child_impl!(Delete, AbstractActivity, apub::Delete);
ap_empty_child_impl!(Dislike, AbstractActivity, apub::Dislike);
ap_empty_child_impl!(Flag, AbstractActivity, apub::Flag);
ap_empty_child_impl!(Follow, AbstractActivity, apub::Follow);
ap_empty_child_impl!(Ignore, AbstractActivity, apub::Ignore);
ap_empty_child_impl!(Invite, AbstractActivity, apub::Invite);
ap_empty_child_impl!(Join, AbstractActivity, apub::Join);
ap_empty_child_impl!(Leave, AbstractActivity, apub::Leave);
ap_empty_child_impl!(Like, AbstractActivity, apub::Like);
ap_empty_child_impl!(Listen, AbstractActivity, apub::Listen);
ap_empty_child_impl!(Move, AbstractActivity, apub::Move);
ap_empty_child_impl!(Offer, AbstractActivity, apub::Offer);
ap_empty_child_impl!(Reject, AbstractActivity, apub::Reject);
ap_empty_child_impl!(Read, AbstractActivity, apub::Read);
ap_empty_child_impl!(Remove, AbstractActivity, apub::Remove);
ap_empty_child_impl!(TentativeReject, AbstractActivity, apub::TentativeReject);
ap_empty_child_impl!(TentativeAccept, AbstractActivity, apub::TentativeAccept);
ap_empty_child_impl!(Travel, AbstractActivity, apub::Travel);
ap_empty_child_impl!(Undo, AbstractActivity, apub::Undo);
ap_empty_child_impl!(Update, AbstractActivity, apub::Update);
ap_empty_child_impl!(View, AbstractActivity, apub::View);

View file

@ -1,18 +1,16 @@
use iref::IriBuf;
use rdf_types::vocabulary::IriIndex;
use rdf_types::Vocabulary;
use std::fmt;
use crate::ap::trans::{AbstractObject, ApDocument, DebugApub, ParseApub, PropHelper, RawObject};
use crate::ap::vocab::Ids;
use crate::ap::vocab::{apub, toot, NyaIri};
pub struct AbstractActor {
_super: AbstractObject,
inbox: Option<IriBuf>,
outbox: Option<IriBuf>,
following: Option<IriBuf>,
followers: Option<IriBuf>,
liked: Option<IriBuf>,
inbox: Option<NyaIri>,
outbox: Option<NyaIri>,
following: Option<NyaIri>,
followers: Option<NyaIri>,
liked: Option<NyaIri>,
preferred_username: Option<String>,
// Mastodon extensions
featured: Option<Box<ApDocument>>,
@ -22,23 +20,23 @@ pub struct AbstractActor {
}
ap_extends!(AbstractActor, AbstractObject);
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for AbstractActor {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?;
impl ParseApub for AbstractActor {
fn parse_apub(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj)?;
let result = unsafe { AbstractObject::_parse_apub_unchecked(obj, vocab, ids) };
let result = unsafe { AbstractObject::_parse_apub_unchecked(obj) };
result.map(|s| Self {
_super: s,
inbox: ph.parse_prop(&ids.apub.property.inbox),
outbox: ph.parse_prop(&ids.apub.property.outbox),
following: ph.parse_prop(&ids.apub.property.following),
followers: ph.parse_prop(&ids.apub.property.followers),
liked: ph.parse_prop(&ids.apub.property.liked),
preferred_username: ph.parse_prop(&ids.apub.property.preferred_username),
featured: ph.parse_prop_box(&ids.toot.props.featured),
featured_tags: ph.parse_prop_box(&ids.toot.props.featured_tags),
discoverable: ph.parse_prop(&ids.toot.props.discoverable),
suspended: ph.parse_prop(&ids.toot.props.suspended),
inbox: ph.parse_prop(apub::inbox),
outbox: ph.parse_prop(apub::outbox),
following: ph.parse_prop(apub::following),
followers: ph.parse_prop(apub::followers),
liked: ph.parse_prop(apub::liked),
preferred_username: ph.parse_prop(apub::preferredUsername),
featured: ph.parse_prop_box(toot::featured),
featured_tags: ph.parse_prop_box(toot::featuredTags),
discoverable: ph.parse_prop(toot::discoverable),
suspended: ph.parse_prop(toot::suspended),
})
}
}
@ -71,14 +69,14 @@ pub enum Actor {
Service(Service),
}
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Actor {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
Application::parse_apub(obj, vocab, ids)
impl ParseApub for Actor {
fn parse_apub(obj: &RawObject) -> Option<Self> {
Application::parse_apub(obj)
.map(Self::Application)
.or_else(|| Group::parse_apub(obj, vocab, ids).map(Self::Group))
.or_else(|| Organization::parse_apub(obj, vocab, ids).map(Self::Organization))
.or_else(|| Person::parse_apub(obj, vocab, ids).map(Self::Person))
.or_else(|| Service::parse_apub(obj, vocab, ids).map(Self::Service))
.or_else(|| Group::parse_apub(obj).map(Self::Group))
.or_else(|| Organization::parse_apub(obj).map(Self::Organization))
.or_else(|| Person::parse_apub(obj).map(Self::Person))
.or_else(|| Service::parse_apub(obj).map(Self::Service))
}
}
@ -117,8 +115,8 @@ impl DebugApub for Actor {
}
}
ap_empty_child_impl!(Application, AbstractActor, apub, object, application);
ap_empty_child_impl!(Group, AbstractActor, apub, object, group);
ap_empty_child_impl!(Organization, AbstractActor, apub, object, organization);
ap_empty_child_impl!(Person, AbstractActor, apub, object, person);
ap_empty_child_impl!(Service, AbstractActor, apub, object, service);
ap_empty_child_impl!(Application, AbstractActor, apub::Application);