ap: refactor to use custom vocab and iri types

IndexVocabulary and IriIndex have been replaced
with NyaVocabulary and NyaIri, and the JSON-LD
namespaces are defined through a proc macro now
This commit is contained in:
anna 2023-08-10 06:41:16 +02:00
parent 891c6a9a39
commit 3d71b04338
Signed by: fef
GPG key ID: 2585C2DC6D79B485
14 changed files with 1611 additions and 833 deletions

78
Cargo.lock generated
View file

@ -296,6 +296,12 @@ dependencies = [
"num-traits", "num-traits",
] ]
[[package]]
name = "atomic"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c59bdb34bc650a32731b31bd8f0829cc15d24a708ee31559e0bb34f2bc320cba"
[[package]] [[package]]
name = "autocfg" name = "autocfg"
version = "1.1.0" version = "1.1.0"
@ -1082,6 +1088,19 @@ dependencies = [
"want", "want",
] ]
[[package]]
name = "hyper-rustls"
version = "0.23.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1788965e61b367cd03a62950836d5cd41560c3577d90e40e0819373194d1661c"
dependencies = [
"http",
"hyper",
"rustls 0.20.7",
"tokio",
"tokio-rustls",
]
[[package]] [[package]]
name = "hyper-tls" name = "hyper-tls"
version = "0.5.0" version = "0.5.0"
@ -1598,9 +1617,9 @@ checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]] [[package]]
name = "mime" name = "mime"
version = "0.3.16" version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
[[package]] [[package]]
name = "minimal-lexical" name = "minimal-lexical"
@ -1738,9 +1757,11 @@ dependencies = [
"iref", "iref",
"json-ld", "json-ld",
"jsonwebtoken", "jsonwebtoken",
"langtag",
"locspan", "locspan",
"log", "log",
"mime", "mime",
"nyanoblog-macros",
"pretty_env_logger", "pretty_env_logger",
"rand", "rand",
"rdf-types", "rdf-types",
@ -1755,6 +1776,13 @@ dependencies = [
"uuid", "uuid",
] ]
[[package]]
name = "nyanoblog-macros"
version = "0.1.0"
dependencies = [
"proc-macro-crate",
]
[[package]] [[package]]
name = "once_cell" name = "once_cell"
version = "1.16.0" version = "1.16.0"
@ -1957,6 +1985,16 @@ dependencies = [
"log", "log",
] ]
[[package]]
name = "proc-macro-crate"
version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919"
dependencies = [
"once_cell",
"toml_edit",
]
[[package]] [[package]]
name = "proc-macro-error" name = "proc-macro-error"
version = "1.0.4" version = "1.0.4"
@ -2094,6 +2132,7 @@ dependencies = [
"http", "http",
"http-body", "http-body",
"hyper", "hyper",
"hyper-rustls",
"hyper-tls", "hyper-tls",
"ipnet", "ipnet",
"js-sys", "js-sys",
@ -2104,16 +2143,19 @@ dependencies = [
"percent-encoding", "percent-encoding",
"pin-project-lite", "pin-project-lite",
"rustls 0.20.7", "rustls 0.20.7",
"rustls-pemfile",
"serde", "serde",
"serde_json", "serde_json",
"serde_urlencoded", "serde_urlencoded",
"tokio", "tokio",
"tokio-native-tls", "tokio-native-tls",
"tokio-rustls",
"tower-service", "tower-service",
"url", "url",
"wasm-bindgen", "wasm-bindgen",
"wasm-bindgen-futures", "wasm-bindgen-futures",
"web-sys", "web-sys",
"webpki-roots 0.22.6",
"winreg", "winreg",
] ]
@ -2880,6 +2922,23 @@ dependencies = [
"tracing", "tracing",
] ]
[[package]]
name = "toml_datetime"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b"
[[package]]
name = "toml_edit"
version = "0.19.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8123f27e969974a3dfba720fdb560be359f57b44302d280ba72e76a74480e8a"
dependencies = [
"indexmap 2.0.0",
"toml_datetime",
"winnow",
]
[[package]] [[package]]
name = "tower-service" name = "tower-service"
version = "0.3.2" version = "0.3.2"
@ -2989,11 +3048,13 @@ dependencies = [
[[package]] [[package]]
name = "uuid" name = "uuid"
version = "1.2.2" version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "422ee0de9031b5b948b97a8fc04e3aa35230001a722ddd27943e0be31564ce4c" checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d"
dependencies = [ dependencies = [
"atomic",
"getrandom", "getrandom",
"serde",
] ]
[[package]] [[package]]
@ -3342,6 +3403,15 @@ version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"
[[package]]
name = "winnow"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "acaaa1190073b2b101e15083c38ee8ec891b5e05cbee516521e94ec008f61e64"
dependencies = [
"memchr",
]
[[package]] [[package]]
name = "winreg" name = "winreg"
version = "0.10.1" version = "0.10.1"

View file

@ -1,29 +1,40 @@
[package] [package]
name = "nyanoblog" name = "nyanoblog"
version = "0.1.0"
edition = "2021" edition = "2021"
version.workspace = true
[workspace]
members = [
"macros"
]
[workspace.package]
version = "0.1.0"
[dependencies] [dependencies]
nyanoblog-macros = { path = "./macros" }
actix-rt = "2.7" actix-rt = "2.7"
actix-web = { version = "4", features = ["rustls"] } actix-web = { version = "4", features = ["rustls"] }
argon2 = "0.5.1" argon2 = "0.5.1"
async-trait = "0.1.59" async-trait = "0.1.59"
base64 = "0.21" base64 = "0.21"
bytes = "1.3" bytes = "1.3"
chrono = { version = "0.4", features = [ "alloc", "clock", "serde" ] } chrono = { version = "0.4", features = [ "alloc", "clock", "serde", "std" ] }
dotenvy = "0.15.6" dotenvy = "0.15.6"
futures = "0.3" futures = "0.3"
hashbrown = "0.13.1" hashbrown = "0.13.1"
iref = "2.2" iref = "2.2"
json-ld = { version = "0.15.0" } json-ld = { version = "0.15.0" }
jsonwebtoken = { version = "8", default-features = false } jsonwebtoken = { version = "8", default-features = false }
langtag = "0.3.4"
locspan = "0.7" locspan = "0.7"
log = "0.4" log = "0.4"
mime = "0.3" mime = "0.3.17"
pretty_env_logger = "0.5.0" pretty_env_logger = "0.5.0"
rand = "0.8" rand = "0.8"
rdf-types = "0.15.4" rdf-types = "0.15.4"
reqwest = { version = "0.11", features = [ "rustls" ] } reqwest = { version = "0.11", features = [ "rustls-tls" ] }
rsa = { version = "0.9.2", features = [ "sha2" ] } rsa = { version = "0.9.2", features = [ "sha2" ] }
serde = { version = "1.0", features = [ "derive" ] } serde = { version = "1.0", features = [ "derive" ] }
serde_json = "1.0" serde_json = "1.0"
@ -31,4 +42,4 @@ serde_test = "1.0"
sqlx = { version = "0.7.1", features = [ "chrono", "postgres", "runtime-tokio", "tls-rustls", "uuid" ] } sqlx = { version = "0.7.1", features = [ "chrono", "postgres", "runtime-tokio", "tls-rustls", "uuid" ] }
static-iref = "2" static-iref = "2"
tokio = "1.23" tokio = "1.23"
uuid = { version = "1.2", features = [ "v4" ] } uuid = { version = "1.4", features = [ "v4", "v7", "serde" ] }

10
macros/Cargo.toml Normal file
View file

@ -0,0 +1,10 @@
[package]
name = "nyanoblog-macros"
edition = "2021"
version.workspace = true
[lib]
proc-macro = true
[dependencies]
proc-macro-crate = "1.3.1"

View file

@ -0,0 +1,523 @@
use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
use std::iter::Peekable;
use std::sync::atomic::{AtomicIsize, Ordering};
use crate::util::*;
// "forgive me, rustc, for i have sinned"
//
// good luck to whoever is trying to read not to mention understand this lmao
/// This is a global counter for unique indices because every invocation
/// of this macro should yield an enum with unique numeric representations
static INDEX: AtomicIsize = AtomicIsize::new(1);
pub fn invoke(input: TokenStream) -> Result<TokenStream> {
let mut stream = input.into_iter().peekable();
// parse the thing
let (mut meta, mut classes, mut props) = (None, None, None);
while let Some(tt) = stream.next() {
if let TokenTree::Ident(ident) = &tt {
let name = ident.to_string();
match name.as_str() {
"meta" => {
let body = eat_group(&mut stream, Delimiter::Brace)?;
if meta.replace(eat_meta(body.stream())?).is_some() {
return err("Duplicate definition of metadata", ident.span());
}
}
"class" => {
let body = eat_group(&mut stream, Delimiter::Brace)?;
let body = eat_enum_members(body.stream())?;
if classes.replace((body, ident.span())).is_some() {
return err("Duplicate class definition", ident.span());
}
}
"prop" => {
let body = eat_group(&mut stream, Delimiter::Brace)?;
let body = eat_enum_members(body.stream())?;
if props.replace((body, ident.span())).is_some() {
return err("Duplicate props definition", ident.span());
}
}
name => return err(format!("Unknown section \"{name}\""), ident.span()),
}
eat_maybe_comma(&mut stream);
}
}
// now spit out some new tokens
let meta = meta.ok_or_else(|| error("Missing meta block", None))?;
let mut ts = TokenStream::new();
if let Some((classes, span)) = classes {
ts.extend(compile_enum(
&meta,
make_ident_tt("__Class", None),
&classes,
span,
));
}
if let Some((props, span)) = props {
ts.extend(compile_enum(
&meta,
make_ident_tt("__Prop", None),
&props,
span,
));
}
let mut module = rust_code!(pub mod );
module.extend([meta.mod_name, make_group_tt(Delimiter::Brace, ts)]);
//println!("{module}");
Ok(module)
}
struct NsMeta {
mod_name: TokenTree,
iri_base: String,
alias_base: Option<String>,
}
#[derive(Clone)]
struct EnumMember {
name: Ident,
}
//
// parser
//
// meta { key1 = val1, key2 = val2, ... }
fn eat_meta(stream: TokenStream) -> Result<NsMeta> {
let mut stream = stream.into_iter().peekable();
let mut mod_name = None;
let mut iri_base = None;
let mut alias_base = None;
let mut had_preceding_comma = true;
while let Some(ident) = eat_ident_or_end(&mut stream)? {
if !had_preceding_comma {
return err("Expected a comma", ident.span());
}
let name = ident.to_string();
match name.as_str() {
"mod_name" => {
if mod_name.replace(eat_assign_ident(&mut stream)?).is_some() {
return err("Duplicate definition of key \"mod_name\"", ident.span());
}
}
"iri_base" => {
if iri_base.replace(eat_assign_string(&mut stream)?).is_some() {
return err("Duplicate definition of key \"iri_base\"", ident.span());
}
}
"alias_base" => {
if alias_base
.replace(eat_assign_string(&mut stream)?)
.is_some()
{
return err("Duplicate definition of key \"alias_base\"", ident.span());
}
}
name => return err(format!("Unknown key \"{name}\""), ident.span()),
}
had_preceding_comma = eat_maybe_comma(&mut stream);
}
Ok(NsMeta {
mod_name: mod_name.ok_or_else(|| error("Missing key \"mod_name\"", None))?,
iri_base: iri_base.ok_or_else(|| error("Missing key \"iri_base\"", None))?,
alias_base,
})
}
// qualifier { Member1, Member2, ... }
fn eat_enum_members(stream: TokenStream) -> Result<Vec<EnumMember>> {
let mut stream = stream.into_iter().peekable();
let mut members = Vec::new();
let mut had_preceding_comma = true;
while let Some(member) = eat_enum_member(&mut stream)? {
if !had_preceding_comma {
return err_unexpected(",", &member.name, member.name.span());
}
members.push(member);
had_preceding_comma = eat_maybe_comma(&mut stream);
}
Ok(members)
}
fn eat_enum_member(stream: &mut impl TtIter) -> Result<Option<EnumMember>> {
match stream.next() {
Some(tt) => {
if let TokenTree::Ident(name) = tt {
Ok(Some(EnumMember { name }))
} else {
err_unexpected("ident", &tt, tt.span())
}
}
None => Ok(None),
}
}
fn eat_assign_ident(stream: &mut Peekable<impl TtIter>) -> Result<TokenTree> {
eat_eq(stream)?;
Ok(TokenTree::Ident(eat_ident(stream)?))
}
fn eat_assign_string(stream: &mut impl TtIter) -> Result<String> {
let _ = eat_eq(stream)?;
match stream.next() {
Some(TokenTree::Literal(literal)) => {
let s = literal.to_string();
let sb = s.as_bytes();
if sb.len() >= 2 && sb[0] == b'"' && sb[sb.len() - 1] == b'"' {
Ok(String::from_utf8(Vec::from(&sb[1..(sb.len() - 1)])).unwrap())
} else {
err_unexpected("string", &literal, literal.span())
}
}
Some(tt) => err_unexpected("string", &tt, tt.span()),
None => err_end(),
}
}
fn eat_group(stream: &mut impl TtIter, delimiter: impl Into<Option<Delimiter>>) -> Result<Group> {
match stream.next() {
Some(TokenTree::Group(grp)) => {
if let Some(delim) = delimiter.into() {
if grp.delimiter() == delim {
Ok(grp)
} else {
err("Expected a `{`", grp.span())
}
} else {
Ok(grp)
}
}
Some(tt) => err_unexpected("{", &tt, tt.span()),
None => err_end(),
}
}
fn eat_ident(stream: &mut impl TtIter) -> Result<Ident> {
eat_ident_or_end(stream).and_then(|o| o.ok_or_else(error_end))
}
fn eat_ident_or_end(stream: &mut impl TtIter) -> Result<Option<Ident>> {
match stream.next() {
Some(TokenTree::Ident(ident)) => Ok(Some(ident)),
Some(tt) => err_unexpected("ident", &tt, tt.span()),
None => Ok(None),
}
}
fn eat_eq(stream: &mut impl TtIter) -> Result<TokenTree> {
match stream.next() {
Some(TokenTree::Punct(punct)) if punct.as_char() == '=' => Ok(TokenTree::Punct(punct)),
Some(tt) => err_unexpected("=", &tt, tt.span()),
None => err_end(),
}
}
fn eat_maybe_comma(stream: &mut Peekable<impl TtIter>) -> bool {
stream
.next_if(|tt| {
if let TokenTree::Punct(punct) = tt {
punct.as_char() == ','
} else {
false
}
})
.is_some()
}
//
// compiler
//
fn compile_enum(
meta: &NsMeta,
name: TokenTree,
members: &[EnumMember],
kw_span: Span,
) -> TokenStream {
// enum Name { Member1, Member2, ... }
let mut ts = rust_code!(
#[derive(Copy, Clone, Eq, PartialEq)]
#[allow(non_camel_case_types)]
pub
);
ts.extend([
make_ident_tt("enum", kw_span),
name.clone(),
make_group_tt(
Delimiter::Brace,
members.iter().flat_map(|m| {
[
TokenTree::Ident(m.name.clone()),
TokenTree::Punct(Punct::new('=', Spacing::Alone)),
TokenTree::Literal(Literal::isize_unsuffixed(
INDEX.fetch_add(1, Ordering::Relaxed),
)),
TokenTree::Punct(Punct::new(',', Spacing::Alone)),
]
}),
),
]);
ts.extend(rust_code!(pub use ));
ts.extend([name.clone()]);
ts.extend(rust_code!(::*;));
// impl Enum { ... }
ts.extend(compile_internal_impl(meta, &name, members));
// impl HasContext for Enum { ... }
ts.extend(compile_trait_impl(meta, &name, members));
ts
}
fn compile_internal_impl(meta: &NsMeta, name: &TokenTree, members: &[EnumMember]) -> TokenStream {
let mut ts = rust_code!(impl);
ts.extend([name.clone()]);
let impl_body = {
let mut impl_body = rust_code!(const __MEMBERS: );
impl_body.extend([
// [(Self, Iri<'static>); $n]
make_group_tt(
Delimiter::Bracket,
[
make_group_tt(
Delimiter::Parenthesis,
rust_code!(Self, ::iref::Iri<'static>),
),
make_punct_alone(';'),
TokenTree::Literal(Literal::usize_unsuffixed(members.len())),
],
),
make_punct_alone('='),
]);
impl_body.extend([
// [ (Enum::Name1, iri!("iri1")), (Enum::Name2, iri!("iri2")), ... ]
make_group_tt(
Delimiter::Bracket,
members.iter().flat_map(|memb| {
[
make_group_tt(Delimiter::Parenthesis, {
let mut ts = TokenStream::from_iter([
name.clone(),
make_punct_joint(':'),
make_punct_alone(':'),
TokenTree::Ident(memb.name.clone()),
make_punct_alone(','),
]);
ts.extend(compile_iri_macro_call(&meta.iri_base, Some(memb)));
ts
}),
make_punct_alone(','),
]
.into_iter()
}),
),
TokenTree::Punct(Punct::new(';', Spacing::Alone)),
]);
impl_body.extend(rust_code!(fn __from_str(s: &str) -> Option<Self>));
let fn_body = {
let mut fn_body = rust_code!(match s);
let match_body = {
let mut match_body = TokenStream::new();
for memb in members {
let memb_name = memb.name.to_string();
match_body.extend([
TokenTree::Literal(Literal::string(&memb_name)),
make_punct_joint('='),
make_punct_alone('>'),
make_ident_tt("Some", None),
make_group_tt(
Delimiter::Parenthesis,
[
name.clone(),
make_punct_joint(':'),
make_punct_alone(':'),
TokenTree::Ident(memb.name.clone()),
],
),
make_punct_alone(','),
]);
}
match_body.extend(rust_code!(_ => None,));
make_group_ts(Delimiter::Brace, match_body)
};
fn_body.extend(match_body);
make_group_ts(Delimiter::Brace, fn_body)
};
impl_body.extend(fn_body);
make_group_ts(Delimiter::Brace, impl_body)
};
ts.extend(impl_body);
ts
}
fn compile_trait_impl(meta: &NsMeta, name: &TokenTree, members: &[EnumMember]) -> TokenStream {
// impl HasContext for Enum
let mut ts = rust_code!(impl);
ts.extend(absolute_path(None, rust_code!(ap::vocab::HasContext)));
ts.extend(rust_code!(for));
ts.extend([name.clone()]);
let impl_body = {
// const ... = ...; const ... = ...; ...
let mut impl_body = compile_impl_const_members(meta, members);
// fn from_index(...) -> ... { ... }
impl_body.extend(compile_impl_from_index(name, members));
impl_body.extend(rust_code!(
fn as_index(&self) -> usize {
*self as usize
}
));
// fn from_iri(...) -> ... { ... }
impl_body.extend(compile_impl_from_iri());
make_group_ts(Delimiter::Brace, impl_body)
};
ts.extend(impl_body);
ts
}
fn compile_impl_const_members(meta: &NsMeta, members: &[EnumMember]) -> TokenStream {
let mut ts = TokenStream::new();
ts.extend(rust_code!(const COUNT: usize = ));
ts.extend([
TokenTree::Literal(Literal::usize_suffixed(members.len())),
TokenTree::Punct(Punct::new(';', Spacing::Alone)),
]);
ts.extend(rust_code!(const OFFSET: usize = ));
ts.extend([
TokenTree::Literal(Literal::usize_suffixed(
INDEX.load(Ordering::Relaxed) as usize - members.len(),
)),
TokenTree::Punct(Punct::new(';', Spacing::Alone)),
]);
ts.extend(rust_code!(const IRI_BASE: ::iref::Iri<'static> = ));
ts.extend(compile_iri_macro_call(&meta.iri_base, None));
ts.extend([make_punct_alone(';')]);
ts.extend(rust_code!(const ALIAS_BASE: ::core::option::Option<::iref::Iri<'static>> =));
match meta.alias_base.as_ref() {
Some(s) => {
ts.extend(rust_code!(Some));
ts.extend(make_group_ts(
Delimiter::Parenthesis,
compile_iri_macro_call(s, None),
));
}
None => ts.extend(rust_code!(None)),
}
ts.extend(rust_code!(;));
// this is so we get a ref with static lifetime; see compile_internal_impl()
ts.extend(rust_code!(
const MEMBERS: &'static [(Self, ::iref::Iri<'static>)] = &Self::__MEMBERS;
));
ts
}
fn compile_impl_from_index(name: &TokenTree, members: &[EnumMember]) -> TokenStream {
let mut ts = rust_code!(fn from_index(index: usize) -> Option<Self>);
let fn_body = {
let mut fn_body = rust_code!(match index);
let match_body = {
let mut match_body = TokenStream::new();
for member in members {
match_body.extend(rust_code!(x if x == ));
match_body.extend([
name.clone(),
make_punct_joint(':'),
make_punct_alone(':'),
TokenTree::Ident(member.name.clone()),
]);
match_body.extend(rust_code!(as usize => Some));
match_body.extend(make_group_ts(
Delimiter::Parenthesis,
[
name.clone(),
make_punct_joint(':'),
make_punct_alone(':'),
TokenTree::Ident(member.name.clone()),
],
));
match_body.extend([make_punct_alone(',')]);
}
match_body.extend(rust_code!(_ => None,));
make_group_ts(Delimiter::Brace, match_body)
};
fn_body.extend(match_body);
make_group_ts(Delimiter::Brace, fn_body)
};
ts.extend(fn_body);
ts
}
fn compile_impl_from_iri() -> TokenStream {
rust_code!(
fn from_iri(iri: &::iref::Iri) -> Option<Self> {
let iri = iri.as_str();
let iri_base = Self::IRI_BASE.as_str();
if iri.starts_with(iri_base) {
Self::__from_str(unsafe {
::std::str::from_utf8_unchecked(&iri.as_bytes()[iri_base.len()..])
})
} else if let Some(alias_base) = Self::ALIAS_BASE.as_ref() {
let alias_base = alias_base.as_str();
if iri.starts_with(alias_base) {
Self::__from_str(unsafe {
::std::str::from_utf8_unchecked(&iri.as_bytes()[alias_base.len()..])
})
} else {
None
}
} else {
None
}
}
)
}
fn compile_iri_macro_call(iri_base: &str, member: Option<&EnumMember>) -> TokenStream {
let mut ts = rust_code!(::static_iref::iri!);
let iri = match member {
Some(member) => format!("{iri_base}{}", member.name),
None => String::from(iri_base),
};
ts.extend(make_group_ts(
Delimiter::Parenthesis,
[TokenTree::Literal(Literal::string(&iri))],
));
ts
}

14
macros/src/lib.rs Normal file
View file

@ -0,0 +1,14 @@
use proc_macro::TokenStream;
mod define_json_ns;
mod util;
// XXX proc_macro_diagnostics isn't stable yet, so we just panic on errors for now
#[proc_macro]
pub fn define_json_ns(input: TokenStream) -> TokenStream {
match define_json_ns::invoke(input) {
Ok(output) => output,
Err(e) => panic!("{e}"),
}
}

103
macros/src/util.rs Normal file
View file

@ -0,0 +1,103 @@
use proc_macro::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
use proc_macro_crate::{crate_name, FoundCrate};
use std::fmt;
macro_rules! rust_code {
($($tt:tt)*) => {
<::proc_macro::TokenStream as ::std::str::FromStr>::from_str(stringify!($($tt)*))
.expect("Expected valid rust code")
}
}
pub(crate) use rust_code;
#[derive(Debug)]
pub struct Error {
msg: String,
span: Span,
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "{} (at {:?})", &self.msg, self.span)
}
}
impl std::error::Error for Error {}
pub type Result<T> = std::result::Result<T, Error>;
pub fn error(msg: impl Into<String>, span: impl Into<Option<Span>>) -> Error {
Error {
msg: msg.into(),
span: span.into().unwrap_or_else(Span::call_site),
}
}
pub fn err<T>(msg: impl Into<String>, span: impl Into<Option<Span>>) -> Result<T> {
Err(error(msg, span))
}
pub fn err_unexpected<T, D, S>(expected: &str, actual: D, span: S) -> Result<T>
where
D: fmt::Display,
S: Into<Option<Span>>,
{
err(
format!("Expected `{expected}`, got `{actual}` instead"),
span,
)
}
pub fn error_end() -> Error {
error("Unexpected end of stream", None)
}
pub fn err_end<T>() -> Result<T> {
Err(error_end())
}
pub trait TtIter: Iterator<Item = TokenTree> {}
impl<T: Iterator<Item = TokenTree>> TtIter for T {}
pub fn absolute_path(span: impl Into<Option<Span>>, path: TokenStream) -> TokenStream {
let span = span.into().unwrap_or_else(Span::call_site);
let root = match crate_name("nyanoblog") {
Ok(found) => match found {
FoundCrate::Itself => "crate".to_owned(),
FoundCrate::Name(name) => name,
},
Err(e) => panic!("cannot find crate nyanoblog: {e}"),
};
let mut absolute = make_ident_ts(&root, span);
absolute.extend(rust_code!(::));
absolute.extend(path);
absolute
}
pub fn make_ident_tt(name: &str, span: impl Into<Option<Span>>) -> TokenTree {
TokenTree::Ident(Ident::new(
name,
span.into().unwrap_or_else(Span::call_site),
))
}
pub fn make_ident_ts(name: &str, span: impl Into<Option<Span>>) -> TokenStream {
[make_ident_tt(name, span)].into_iter().collect()
}
pub fn make_group_tt(delim: Delimiter, inner: impl IntoIterator<Item = TokenTree>) -> TokenTree {
TokenTree::Group(Group::new(delim, inner.into_iter().collect()))
}
pub fn make_group_ts(delim: Delimiter, inner: impl IntoIterator<Item = TokenTree>) -> TokenStream {
TokenStream::from_iter([make_group_tt(delim, inner)])
}
pub fn make_punct_alone(c: char) -> TokenTree {
TokenTree::Punct(Punct::new(c, Spacing::Alone))
}
pub fn make_punct_joint(c: char) -> TokenTree {
TokenTree::Punct(Punct::new(c, Spacing::Joint))
}

View file

@ -17,7 +17,7 @@ use hashbrown::HashSet;
use iref::Iri; use iref::Iri;
use json_ld::{syntax::Value, Loader, Profile, RemoteDocument}; use json_ld::{syntax::Value, Loader, Profile, RemoteDocument};
use locspan::{Meta, Span}; use locspan::{Meta, Span};
use rdf_types::{vocabulary::IriIndex, IriVocabulary, IriVocabularyMut}; use rdf_types::{IriVocabulary, IriVocabularyMut};
use reqwest::{ use reqwest::{
header::{ACCEPT, CONTENT_TYPE, LINK, LOCATION}, header::{ACCEPT, CONTENT_TYPE, LINK, LOCATION},
StatusCode, StatusCode,
@ -25,6 +25,7 @@ use reqwest::{
use std::hash::Hash; use std::hash::Hash;
use std::ops::ControlFlow; use std::ops::ControlFlow;
use crate::ap::vocab::NyaIri;
use crate::core::*; use crate::core::*;
use crate::headers; use crate::headers;
use crate::state::AppState; use crate::state::AppState;
@ -34,7 +35,7 @@ use crate::util::http::{
Response, Response,
}; };
pub struct CachedLoader<I = IriIndex, M = Span, T = Value<M>> { pub struct CachedLoader<I = NyaIri, M = Span, T = Value<M>> {
state: AppState, state: AppState,
parser: Box<DynParser<I, M, T>>, parser: Box<DynParser<I, M, T>>,
} }

View file

@ -5,20 +5,18 @@ use json_ld::{
use locspan::{Meta, Span}; use locspan::{Meta, Span};
use mime::Mime; use mime::Mime;
use rdf_types::vocabulary::BlankIdIndex; use rdf_types::vocabulary::BlankIdIndex;
use rdf_types::{vocabulary::IriIndex, IndexVocabulary, Vocabulary};
use crate::ap::vocab::{NyaIri, NyaVocabulary};
use crate::ap::{ use crate::ap::{
loader::CachedLoader, loader::CachedLoader,
trans::{ApDocument, ParseApub}, trans::{ApDocument, ParseApub},
vocab::Ids,
}; };
use crate::core::*; use crate::core::*;
use crate::state::AppState; use crate::state::AppState;
/// Main API for handling ActivityPub ingress, called by [`crate::job::inbox::InboxWorker`]. /// Main API for handling ActivityPub ingress, called by [`crate::job::inbox::InboxWorker`].
pub async fn process_document(state: &AppState, raw: &str, mime: &Mime) -> Result<()> { pub async fn process_document(state: &AppState, raw: &str, mime: &Mime) -> Result<()> {
let mut vocab: IndexVocabulary = IndexVocabulary::new(); let mut vocab = NyaVocabulary::new();
let indices = Ids::populate(&mut vocab);
let json = preprocess(raw)?; let json = preprocess(raw)?;
let rd = RemoteDocument::new(None, Some(mime.clone()), json); let rd = RemoteDocument::new(None, Some(mime.clone()), json);
@ -26,14 +24,13 @@ pub async fn process_document(state: &AppState, raw: &str, mime: &Mime) -> Resul
let mut loader = CachedLoader::new_with(state.clone(), move |_vocab, _iri, bytes| { let mut loader = CachedLoader::new_with(state.clone(), move |_vocab, _iri, bytes| {
let content = std::str::from_utf8(bytes.as_ref()) let content = std::str::from_utf8(bytes.as_ref())
.map_err(|e| Error::MalformedApub(format!("Invalid encoding: {e}")))?; .map_err(|e| Error::MalformedApub(format!("Invalid encoding: {e}")))?;
preprocess(&content) preprocess(content)
}); });
let rd = rd.expand_with(&mut vocab, &mut loader).await.unwrap(); let rd = rd.expand_with(&mut vocab, &mut loader).await.unwrap();
let vocab = vocab;
// this loop will usually only run once (one object per request) // this loop will usually only run once (one object per request)
for object in rd.into_value() { for object in rd.into_value() {
if let Err(e) = process_object(object, &vocab, &indices).await { if let Err(e) = process_object(object).await {
error!("Error in remote document: {e}"); error!("Error in remote document: {e}");
} }
} }
@ -46,12 +43,8 @@ fn preprocess(raw: &str) -> Result<Meta<Value<Span>, Span>> {
.map_err(|e| Error::MalformedApub(format!("{e}"))) .map_err(|e| Error::MalformedApub(format!("{e}")))
} }
async fn process_object( async fn process_object(obj: IndexedObject<NyaIri, BlankIdIndex, Span>) -> Result<()> {
obj: IndexedObject<IriIndex, BlankIdIndex, Span>, let document = ApDocument::parse_apub(&obj);
vocab: &impl Vocabulary<Iri = IriIndex>,
ids: &Ids,
) -> Result<()> {
let document = ApDocument::parse_apub(&obj, vocab, ids);
if let Some(doc) = document { if let Some(doc) = document {
debug!("\nParsed document:\n{doc:?}"); debug!("\nParsed document:\n{doc:?}");
} }

View file

@ -1,12 +1,10 @@
use chrono::NaiveDateTime; use chrono::NaiveDateTime;
use rdf_types::vocabulary::IriIndex;
use rdf_types::Vocabulary;
use std::fmt; use std::fmt;
use crate::ap::trans::{ use crate::ap::trans::{
matches_type, AbstractObject, ApDocument, DebugApub, ParseApub, PropHelper, RawObject, matches_type, AbstractObject, ApDocument, DebugApub, ParseApub, PropHelper, RawObject,
}; };
use crate::ap::vocab::Ids; use crate::ap::vocab::apub;
pub struct AbstractActivity { pub struct AbstractActivity {
_super: AbstractObject, _super: AbstractObject,
@ -18,23 +16,22 @@ pub struct AbstractActivity {
} }
ap_extends!(AbstractActivity, AbstractObject); ap_extends!(AbstractActivity, AbstractObject);
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for AbstractActivity { impl ParseApub for AbstractActivity {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
matches_type(obj, &ids.apub.object.activity)?; matches_type(obj, apub::Activity)?;
unsafe { Self::_parse_apub_unchecked(obj, vocab, ids) } unsafe { Self::_parse_apub_unchecked(obj) }
} }
unsafe fn _parse_apub_unchecked(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { unsafe fn _parse_apub_unchecked(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?; let ph = PropHelper::new(obj)?;
let prop_ids = &ids.apub.property;
AbstractObject::_parse_apub_unchecked(obj, vocab, ids).map(|s| Self { AbstractObject::_parse_apub_unchecked(obj).map(|s| Self {
_super: s, _super: s,
actor: ph.parse_prop_vec(&prop_ids.actor), actor: ph.parse_prop_vec(apub::actor),
object: ph.parse_prop_vec(&prop_ids.object), object: ph.parse_prop_vec(apub::object),
target: ph.parse_prop_box(&prop_ids.target), target: ph.parse_prop_box(apub::target),
origin: ph.parse_prop_box(&prop_ids.origin), origin: ph.parse_prop_box(apub::origin),
instrument: ph.parse_prop_vec(&prop_ids.instrument), instrument: ph.parse_prop_vec(apub::instrument),
}) })
} }
} }
@ -85,37 +82,37 @@ pub enum Activity {
View(View), View(View),
} }
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Activity { impl ParseApub for Activity {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
Accept::parse_apub(obj, vocab, ids) Accept::parse_apub(obj)
.map(Self::Accept) .map(Self::Accept)
.or_else(|| Add::parse_apub(obj, vocab, ids).map(Self::Add)) .or_else(|| Add::parse_apub(obj).map(Self::Add))
.or_else(|| Announce::parse_apub(obj, vocab, ids).map(Self::Announce)) .or_else(|| Announce::parse_apub(obj).map(Self::Announce))
.or_else(|| Arrive::parse_apub(obj, vocab, ids).map(Self::Arrive)) .or_else(|| Arrive::parse_apub(obj).map(Self::Arrive))
.or_else(|| Block::parse_apub(obj, vocab, ids).map(Self::Block)) .or_else(|| Block::parse_apub(obj).map(Self::Block))
.or_else(|| Create::parse_apub(obj, vocab, ids).map(Self::Create)) .or_else(|| Create::parse_apub(obj).map(Self::Create))
.or_else(|| Delete::parse_apub(obj, vocab, ids).map(Self::Delete)) .or_else(|| Delete::parse_apub(obj).map(Self::Delete))
.or_else(|| Dislike::parse_apub(obj, vocab, ids).map(Self::Dislike)) .or_else(|| Dislike::parse_apub(obj).map(Self::Dislike))
.or_else(|| Flag::parse_apub(obj, vocab, ids).map(Self::Flag)) .or_else(|| Flag::parse_apub(obj).map(Self::Flag))
.or_else(|| Follow::parse_apub(obj, vocab, ids).map(Self::Follow)) .or_else(|| Follow::parse_apub(obj).map(Self::Follow))
.or_else(|| Ignore::parse_apub(obj, vocab, ids).map(Self::Ignore)) .or_else(|| Ignore::parse_apub(obj).map(Self::Ignore))
.or_else(|| Invite::parse_apub(obj, vocab, ids).map(Self::Invite)) .or_else(|| Invite::parse_apub(obj).map(Self::Invite))
.or_else(|| Join::parse_apub(obj, vocab, ids).map(Self::Join)) .or_else(|| Join::parse_apub(obj).map(Self::Join))
.or_else(|| Leave::parse_apub(obj, vocab, ids).map(Self::Leave)) .or_else(|| Leave::parse_apub(obj).map(Self::Leave))
.or_else(|| Like::parse_apub(obj, vocab, ids).map(Self::Like)) .or_else(|| Like::parse_apub(obj).map(Self::Like))
.or_else(|| Listen::parse_apub(obj, vocab, ids).map(Self::Listen)) .or_else(|| Listen::parse_apub(obj).map(Self::Listen))
.or_else(|| Move::parse_apub(obj, vocab, ids).map(Self::Move)) .or_else(|| Move::parse_apub(obj).map(Self::Move))
.or_else(|| Offer::parse_apub(obj, vocab, ids).map(Self::Offer)) .or_else(|| Offer::parse_apub(obj).map(Self::Offer))
.or_else(|| Question::parse_apub(obj, vocab, ids).map(Self::Question)) .or_else(|| Question::parse_apub(obj).map(Self::Question))
.or_else(|| Reject::parse_apub(obj, vocab, ids).map(Self::Reject)) .or_else(|| Reject::parse_apub(obj).map(Self::Reject))
.or_else(|| Read::parse_apub(obj, vocab, ids).map(Self::Read)) .or_else(|| Read::parse_apub(obj).map(Self::Read))
.or_else(|| Remove::parse_apub(obj, vocab, ids).map(Self::Remove)) .or_else(|| Remove::parse_apub(obj).map(Self::Remove))
.or_else(|| TentativeReject::parse_apub(obj, vocab, ids).map(Self::TentativeReject)) .or_else(|| TentativeReject::parse_apub(obj).map(Self::TentativeReject))
.or_else(|| TentativeAccept::parse_apub(obj, vocab, ids).map(Self::TentativeAccept)) .or_else(|| TentativeAccept::parse_apub(obj).map(Self::TentativeAccept))
.or_else(|| Travel::parse_apub(obj, vocab, ids).map(Self::Travel)) .or_else(|| Travel::parse_apub(obj).map(Self::Travel))
.or_else(|| Undo::parse_apub(obj, vocab, ids).map(Self::Undo)) .or_else(|| Undo::parse_apub(obj).map(Self::Undo))
.or_else(|| Update::parse_apub(obj, vocab, ids).map(Self::Update)) .or_else(|| Update::parse_apub(obj).map(Self::Update))
.or_else(|| View::parse_apub(obj, vocab, ids).map(Self::View)) .or_else(|| View::parse_apub(obj).map(Self::View))
} }
} }
@ -234,21 +231,20 @@ pub struct Question {
} }
ap_extends!(Question, AbstractActivity); ap_extends!(Question, AbstractActivity);
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Question { impl ParseApub for Question {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
matches_type(obj, &ids.apub.object.question)?; matches_type(obj, apub::Question)?;
unsafe { Self::_parse_apub_unchecked(obj, vocab, ids) } unsafe { Self::_parse_apub_unchecked(obj) }
} }
unsafe fn _parse_apub_unchecked(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { unsafe fn _parse_apub_unchecked(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?; let ph = PropHelper::new(obj)?;
let prop_ids = &ids.apub.property;
AbstractActivity::_parse_apub_unchecked(obj, vocab, ids).map(|s| Self { AbstractActivity::_parse_apub_unchecked(obj).map(|s| Self {
_super: s, _super: s,
one_of: ph.parse_prop_vec(&prop_ids.one_of), one_of: ph.parse_prop_vec(apub::oneOf),
any_of: ph.parse_prop_vec(&prop_ids.any_of), any_of: ph.parse_prop_vec(apub::anyOf),
closed: ph.parse_prop(&prop_ids.closed), closed: ph.parse_prop(apub::closed),
}) })
} }
} }
@ -262,42 +258,30 @@ impl DebugApub for Question {
} }
} }
ap_empty_child_impl!(Accept, AbstractActivity, apub, activity, accept); ap_empty_child_impl!(Accept, AbstractActivity, apub::Accept);
ap_empty_child_impl!(Add, AbstractActivity, apub, activity, add); ap_empty_child_impl!(Add, AbstractActivity, apub::Add);
ap_empty_child_impl!(Announce, AbstractActivity, apub, activity, announce); ap_empty_child_impl!(Announce, AbstractActivity, apub::Announce);
ap_empty_child_impl!(Arrive, AbstractActivity, apub, activity, arrive); ap_empty_child_impl!(Arrive, AbstractActivity, apub::Arrive);
ap_empty_child_impl!(Block, Ignore, apub, activity, block); ap_empty_child_impl!(Block, Ignore, apub::Block);
ap_empty_child_impl!(Create, AbstractActivity, apub, activity, create); ap_empty_child_impl!(Create, AbstractActivity, apub::Create);
ap_empty_child_impl!(Delete, AbstractActivity, apub, activity, delete); ap_empty_child_impl!(Delete, AbstractActivity, apub::Delete);
ap_empty_child_impl!(Dislike, AbstractActivity, apub, activity, dislike); ap_empty_child_impl!(Dislike, AbstractActivity, apub::Dislike);
ap_empty_child_impl!(Flag, AbstractActivity, apub, activity, flag); ap_empty_child_impl!(Flag, AbstractActivity, apub::Flag);
ap_empty_child_impl!(Follow, AbstractActivity, apub, activity, follow); ap_empty_child_impl!(Follow, AbstractActivity, apub::Follow);
ap_empty_child_impl!(Ignore, AbstractActivity, apub, activity, ignore); ap_empty_child_impl!(Ignore, AbstractActivity, apub::Ignore);
ap_empty_child_impl!(Invite, AbstractActivity, apub, activity, invite); ap_empty_child_impl!(Invite, AbstractActivity, apub::Invite);
ap_empty_child_impl!(Join, AbstractActivity, apub, activity, join); ap_empty_child_impl!(Join, AbstractActivity, apub::Join);
ap_empty_child_impl!(Leave, AbstractActivity, apub, activity, leave); ap_empty_child_impl!(Leave, AbstractActivity, apub::Leave);
ap_empty_child_impl!(Like, AbstractActivity, apub, activity, like); ap_empty_child_impl!(Like, AbstractActivity, apub::Like);
ap_empty_child_impl!(Listen, AbstractActivity, apub, activity, listen); ap_empty_child_impl!(Listen, AbstractActivity, apub::Listen);
ap_empty_child_impl!(Move, AbstractActivity, apub, activity, mov); ap_empty_child_impl!(Move, AbstractActivity, apub::Move);
ap_empty_child_impl!(Offer, AbstractActivity, apub, activity, offer); ap_empty_child_impl!(Offer, AbstractActivity, apub::Offer);
ap_empty_child_impl!(Reject, AbstractActivity, apub, activity, reject); ap_empty_child_impl!(Reject, AbstractActivity, apub::Reject);
ap_empty_child_impl!(Read, AbstractActivity, apub, activity, read); ap_empty_child_impl!(Read, AbstractActivity, apub::Read);
ap_empty_child_impl!(Remove, AbstractActivity, apub, activity, remove); ap_empty_child_impl!(Remove, AbstractActivity, apub::Remove);
ap_empty_child_impl!( ap_empty_child_impl!(TentativeReject, AbstractActivity, apub::TentativeReject);
TentativeReject, ap_empty_child_impl!(TentativeAccept, AbstractActivity, apub::TentativeAccept);
AbstractActivity, ap_empty_child_impl!(Travel, AbstractActivity, apub::Travel);
apub, ap_empty_child_impl!(Undo, AbstractActivity, apub::Undo);
activity, ap_empty_child_impl!(Update, AbstractActivity, apub::Update);
tentative_reject ap_empty_child_impl!(View, AbstractActivity, apub::View);
);
ap_empty_child_impl!(
TentativeAccept,
AbstractActivity,
apub,
activity,
tentative_accept
);
ap_empty_child_impl!(Travel, AbstractActivity, apub, activity, travel);
ap_empty_child_impl!(Undo, AbstractActivity, apub, activity, undo);
ap_empty_child_impl!(Update, AbstractActivity, apub, activity, update);
ap_empty_child_impl!(View, AbstractActivity, apub, activity, view);

View file

@ -1,18 +1,16 @@
use iref::IriBuf; use iref::IriBuf;
use rdf_types::vocabulary::IriIndex;
use rdf_types::Vocabulary;
use std::fmt; use std::fmt;
use crate::ap::trans::{AbstractObject, ApDocument, DebugApub, ParseApub, PropHelper, RawObject}; use crate::ap::trans::{AbstractObject, ApDocument, DebugApub, ParseApub, PropHelper, RawObject};
use crate::ap::vocab::Ids; use crate::ap::vocab::{apub, toot, NyaIri};
pub struct AbstractActor { pub struct AbstractActor {
_super: AbstractObject, _super: AbstractObject,
inbox: Option<IriBuf>, inbox: Option<NyaIri>,
outbox: Option<IriBuf>, outbox: Option<NyaIri>,
following: Option<IriBuf>, following: Option<NyaIri>,
followers: Option<IriBuf>, followers: Option<NyaIri>,
liked: Option<IriBuf>, liked: Option<NyaIri>,
preferred_username: Option<String>, preferred_username: Option<String>,
// Mastodon extensions // Mastodon extensions
featured: Option<Box<ApDocument>>, featured: Option<Box<ApDocument>>,
@ -22,23 +20,23 @@ pub struct AbstractActor {
} }
ap_extends!(AbstractActor, AbstractObject); ap_extends!(AbstractActor, AbstractObject);
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for AbstractActor { impl ParseApub for AbstractActor {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?; let ph = PropHelper::new(obj)?;
let result = unsafe { AbstractObject::_parse_apub_unchecked(obj, vocab, ids) }; let result = unsafe { AbstractObject::_parse_apub_unchecked(obj) };
result.map(|s| Self { result.map(|s| Self {
_super: s, _super: s,
inbox: ph.parse_prop(&ids.apub.property.inbox), inbox: ph.parse_prop(apub::inbox),
outbox: ph.parse_prop(&ids.apub.property.outbox), outbox: ph.parse_prop(apub::outbox),
following: ph.parse_prop(&ids.apub.property.following), following: ph.parse_prop(apub::following),
followers: ph.parse_prop(&ids.apub.property.followers), followers: ph.parse_prop(apub::followers),
liked: ph.parse_prop(&ids.apub.property.liked), liked: ph.parse_prop(apub::liked),
preferred_username: ph.parse_prop(&ids.apub.property.preferred_username), preferred_username: ph.parse_prop(apub::preferredUsername),
featured: ph.parse_prop_box(&ids.toot.props.featured), featured: ph.parse_prop_box(toot::featured),
featured_tags: ph.parse_prop_box(&ids.toot.props.featured_tags), featured_tags: ph.parse_prop_box(toot::featuredTags),
discoverable: ph.parse_prop(&ids.toot.props.discoverable), discoverable: ph.parse_prop(toot::discoverable),
suspended: ph.parse_prop(&ids.toot.props.suspended), suspended: ph.parse_prop(toot::suspended),
}) })
} }
} }
@ -71,14 +69,14 @@ pub enum Actor {
Service(Service), Service(Service),
} }
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Actor { impl ParseApub for Actor {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
Application::parse_apub(obj, vocab, ids) Application::parse_apub(obj)
.map(Self::Application) .map(Self::Application)
.or_else(|| Group::parse_apub(obj, vocab, ids).map(Self::Group)) .or_else(|| Group::parse_apub(obj).map(Self::Group))
.or_else(|| Organization::parse_apub(obj, vocab, ids).map(Self::Organization)) .or_else(|| Organization::parse_apub(obj).map(Self::Organization))
.or_else(|| Person::parse_apub(obj, vocab, ids).map(Self::Person)) .or_else(|| Person::parse_apub(obj).map(Self::Person))
.or_else(|| Service::parse_apub(obj, vocab, ids).map(Self::Service)) .or_else(|| Service::parse_apub(obj).map(Self::Service))
} }
} }
@ -117,8 +115,8 @@ impl DebugApub for Actor {
} }
} }
ap_empty_child_impl!(Application, AbstractActor, apub, object, application); ap_empty_child_impl!(Application, AbstractActor, apub::Application);
ap_empty_child_impl!(Group, AbstractActor, apub, object, group); ap_empty_child_impl!(Group, AbstractActor, apub::Group);
ap_empty_child_impl!(Organization, AbstractActor, apub, object, organization); ap_empty_child_impl!(Organization, AbstractActor, apub::Organization);
ap_empty_child_impl!(Person, AbstractActor, apub, object, person); ap_empty_child_impl!(Person, AbstractActor, apub::Person);
ap_empty_child_impl!(Service, AbstractActor, apub, object, service); ap_empty_child_impl!(Service, AbstractActor, apub::Service);

View file

@ -1,22 +1,19 @@
use iref::IriBuf;
use mime::Mime; use mime::Mime;
use rdf_types::vocabulary::IriIndex;
use rdf_types::Vocabulary;
use std::fmt; use std::fmt;
use crate::ap::trans::{matches_type, ApDocument, DebugApub, ParseApub, PropHelper, RawObject}; use crate::ap::trans::{matches_type, ApDocument, DebugApub, ParseApub, PropHelper, RawObject};
use crate::ap::vocab::Ids; use crate::ap::vocab::{apub, NyaIri};
pub enum Link { pub enum Link {
Link(BaseLink), Link(BaseLink),
Mention(Mention), Mention(Mention),
} }
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Link { impl ParseApub for Link {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
BaseLink::parse_apub(obj, vocab, ids) BaseLink::parse_apub(obj)
.map(Self::Link) .map(Self::Link)
.or_else(|| Mention::parse_apub(obj, vocab, ids).map(Self::Mention)) .or_else(|| Mention::parse_apub(obj).map(Self::Mention))
} }
} }
@ -44,7 +41,7 @@ impl DebugApub for Link {
} }
pub struct BaseLink { pub struct BaseLink {
pub id: Option<IriBuf>, pub id: Option<NyaIri>,
pub href: Option<String>, pub href: Option<String>,
pub rel: Option<String>, pub rel: Option<String>,
pub media_type: Option<Mime>, pub media_type: Option<Mime>,
@ -55,30 +52,25 @@ pub struct BaseLink {
pub preview: Option<Box<ApDocument>>, pub preview: Option<Box<ApDocument>>,
} }
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for BaseLink { impl ParseApub for BaseLink {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
matches_type(obj, &ids.apub.link.link)?; matches_type(obj, apub::Link)?;
unsafe { Self::_parse_apub_unchecked(obj, vocab, ids) } unsafe { Self::_parse_apub_unchecked(obj) }
} }
unsafe fn _parse_apub_unchecked(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { unsafe fn _parse_apub_unchecked(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?; let ph = PropHelper::new(obj)?;
let prop_ids = &ids.apub.property;
Some(Self { Some(Self {
id: obj id: obj.id().and_then(|id| id.as_iri().cloned()),
.id() href: ph.parse_prop(apub::href),
.and_then(|id| id.as_iri()) rel: ph.parse_prop(apub::rel),
.and_then(|idx| vocab.iri(idx)) media_type: ph.parse_prop(apub::mediaType),
.map(|iri| iri.to_owned()), name: ph.parse_prop(apub::name),
href: ph.parse_prop(&prop_ids.href), hreflang: ph.parse_prop(apub::hreflang),
rel: ph.parse_prop(&prop_ids.rel), height: ph.parse_prop(apub::height),
media_type: ph.parse_prop(&prop_ids.media_type), width: ph.parse_prop(apub::width),
name: ph.parse_prop(&prop_ids.name), preview: ph.parse_prop_box(apub::preview),
hreflang: ph.parse_prop(&prop_ids.hreflang),
height: ph.parse_prop(&prop_ids.height),
width: ph.parse_prop(&prop_ids.width),
preview: ph.parse_prop_box(&prop_ids.preview),
}) })
} }
} }
@ -100,4 +92,4 @@ impl DebugApub for BaseLink {
} }
} }
ap_empty_child_impl!(Mention, BaseLink, apub, link, mention); ap_empty_child_impl!(Mention, BaseLink, apub::Mention);

View file

@ -8,29 +8,28 @@ use json_ld::object::node::Properties;
use json_ld::IndexedObject; use json_ld::IndexedObject;
use locspan::Span; use locspan::Span;
use mime::Mime; use mime::Mime;
use rdf_types::vocabulary::{BlankIdIndex, IriIndex}; use rdf_types::vocabulary::BlankIdIndex;
use rdf_types::Vocabulary;
use std::collections::HashMap; use std::collections::HashMap;
use std::fmt; use std::fmt;
use std::str::FromStr; use std::str::FromStr;
use crate::ap::vocab::{Id, Ids}; use crate::ap::vocab::{apub, HasContext, NyaIri};
use crate::core::*; use crate::core::*;
use crate::util::xsd; use crate::util::xsd;
pub type RawProps = Properties<IriIndex, BlankIdIndex, Span>; pub type RawProps = Properties<NyaIri, BlankIdIndex, Span>;
pub type RawObject = IndexedObject<IriIndex, BlankIdIndex, Span>; pub type RawObject = IndexedObject<NyaIri, BlankIdIndex, Span>;
pub trait ParseApub<V: Vocabulary<Iri = IriIndex>>: Sized { pub trait ParseApub: Sized {
/// Attempt to translate a raw JSON-LD object (as in, from the `json-ld` crate) /// Attempt to translate a raw JSON-LD object (as in, from the `json-ld` crate)
/// to this type. Returns the parsed object on success, and the input on failure. /// to this type. Returns the parsed object on success, and the input on failure.
/// Unsupported properties SHOULD be logged but otherwise ignored. /// Unsupported properties SHOULD be logged but otherwise ignored.
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self>; fn parse_apub(obj: &RawObject) -> Option<Self>;
/// Only for internal use from subclasses, **DO NOT TOUCH**. /// Only for internal use from subclasses, **DO NOT TOUCH**.
/// Can cause an infinite recursion loop that ends in a segfault. /// Can cause an infinite recursion loop that ends in a segfault.
unsafe fn _parse_apub_unchecked(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { unsafe fn _parse_apub_unchecked(obj: &RawObject) -> Option<Self> {
Self::parse_apub(obj, vocab, ids) Self::parse_apub(obj)
} }
} }
@ -89,28 +88,17 @@ macro_rules! ap_empty_child {
} }
macro_rules! ap_empty_child_impl { macro_rules! ap_empty_child_impl {
($child:ident, $parent:ty, $id1:ident, $id2:ident, $id3:ident) => { ($child:ident, $parent:ty, $id:expr) => {
ap_empty_child!($child, $parent); ap_empty_child!($child, $parent);
impl<V> $crate::ap::trans::ParseApub<V> for $child impl $crate::ap::trans::ParseApub for $child {
where fn parse_apub(obj: &$crate::ap::trans::RawObject) -> Option<Self> {
V: ::rdf_types::Vocabulary<Iri = ::rdf_types::vocabulary::IriIndex>, $crate::ap::trans::matches_type(obj, $id)?;
{ unsafe { Self::_parse_apub_unchecked(obj) }
fn parse_apub(
obj: &$crate::ap::trans::RawObject,
vocab: &V,
ids: &$crate::ap::vocab::Ids,
) -> Option<Self> {
$crate::ap::trans::matches_type(obj, &ids.$id1.$id2.$id3)?;
unsafe { Self::_parse_apub_unchecked(obj, vocab, ids) }
} }
unsafe fn _parse_apub_unchecked( unsafe fn _parse_apub_unchecked(obj: &$crate::ap::trans::RawObject) -> Option<Self> {
obj: &$crate::ap::trans::RawObject, <$parent>::_parse_apub_unchecked(obj).map(|p| Self { _super: p })
vocab: &V,
ids: &$crate::ap::vocab::Ids,
) -> Option<Self> {
<$parent>::_parse_apub_unchecked(obj, vocab, ids).map(|p| Self { _super: p })
} }
} }
}; };
@ -153,14 +141,14 @@ pub enum ApDocument {
Remote(String), Remote(String),
} }
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for ApDocument { impl ParseApub for ApDocument {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
if let Some(s) = obj.as_value().and_then(|v| v.as_str()) { if let Some(s) = obj.as_value().and_then(|v| v.as_str()) {
Some(Self::Remote(String::from(s))) Some(Self::Remote(String::from(s)))
} else { } else {
Object::parse_apub(obj, vocab, ids) Object::parse_apub(obj)
.map(Self::Object) .map(Self::Object)
.or_else(|| Link::parse_apub(obj, vocab, ids).map(Self::Link)) .or_else(|| Link::parse_apub(obj).map(Self::Link))
} }
} }
} }
@ -205,11 +193,11 @@ pub enum ImageOrLink {
Image(Image), Image(Image),
} }
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for ImageOrLink { impl ParseApub for ImageOrLink {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
Image::parse_apub(obj, vocab, ids) Image::parse_apub(obj)
.map(Self::Image) .map(Self::Image)
.or_else(|| Link::parse_apub(obj, vocab, ids).map(Self::Link)) .or_else(|| Link::parse_apub(obj).map(Self::Link))
} }
} }
@ -236,13 +224,13 @@ pub enum Collection {
OrderedPage(OrderedCollectionPage), OrderedPage(OrderedCollectionPage),
} }
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Collection { impl ParseApub for Collection {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
BaseCollection::parse_apub(obj, vocab, ids) BaseCollection::parse_apub(obj)
.map(Self::Base) .map(Self::Base)
.or_else(|| OrderedCollection::parse_apub(obj, vocab, ids).map(Self::Ordered)) .or_else(|| OrderedCollection::parse_apub(obj).map(Self::Ordered))
.or_else(|| CollectionPage::parse_apub(obj, vocab, ids).map(Self::Page)) .or_else(|| CollectionPage::parse_apub(obj).map(Self::Page))
.or_else(|| OrderedCollectionPage::parse_apub(obj, vocab, ids).map(Self::OrderedPage)) .or_else(|| OrderedCollectionPage::parse_apub(obj).map(Self::OrderedPage))
} }
} }
@ -288,23 +276,22 @@ pub struct BaseCollection {
} }
ap_extends!(BaseCollection, AbstractObject); ap_extends!(BaseCollection, AbstractObject);
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for BaseCollection { impl ParseApub for BaseCollection {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
matches_type(obj, &ids.apub.object.collection)?; matches_type(obj, apub::Collection)?;
unsafe { Self::_parse_apub_unchecked(obj, vocab, ids) } unsafe { Self::_parse_apub_unchecked(obj) }
} }
unsafe fn _parse_apub_unchecked(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { unsafe fn _parse_apub_unchecked(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?; let ph = PropHelper::new(obj)?;
let prop_ids = &ids.apub.property;
AbstractObject::_parse_apub_unchecked(obj, vocab, ids).map(|s| Self { AbstractObject::_parse_apub_unchecked(obj).map(|s| Self {
_super: s, _super: s,
total_items: ph.parse_prop(&prop_ids.total_items), total_items: ph.parse_prop(apub::totalItems),
current: ph.parse_prop_box(&prop_ids.current), current: ph.parse_prop_box(apub::current),
first: ph.parse_prop_box(&prop_ids.first), first: ph.parse_prop_box(apub::first),
last: ph.parse_prop_box(&prop_ids.last), last: ph.parse_prop_box(apub::last),
items: ph.parse_prop_vec(&prop_ids.items), items: ph.parse_prop_vec(apub::items),
}) })
} }
} }
@ -324,13 +311,7 @@ impl DebugApub for BaseCollection {
} }
} }
ap_empty_child_impl!( ap_empty_child_impl!(OrderedCollection, BaseCollection, apub::OrderedCollection);
OrderedCollection,
BaseCollection,
apub,
object,
ordered_collection
);
pub struct CollectionPage { pub struct CollectionPage {
_super: BaseCollection, _super: BaseCollection,
@ -340,21 +321,20 @@ pub struct CollectionPage {
} }
ap_extends!(CollectionPage, BaseCollection); ap_extends!(CollectionPage, BaseCollection);
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for CollectionPage { impl ParseApub for CollectionPage {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
matches_type(obj, &ids.apub.object.collection_page)?; matches_type(obj, apub::CollectionPage)?;
unsafe { Self::_parse_apub_unchecked(obj, vocab, ids) } unsafe { Self::_parse_apub_unchecked(obj) }
} }
unsafe fn _parse_apub_unchecked(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { unsafe fn _parse_apub_unchecked(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?; let ph = PropHelper::new(obj)?;
let prop_ids = &ids.apub.property;
BaseCollection::_parse_apub_unchecked(obj, vocab, ids).map(|s| Self { BaseCollection::_parse_apub_unchecked(obj).map(|s| Self {
_super: s, _super: s,
part_of: ph.parse_prop_box(&prop_ids.part_of), part_of: ph.parse_prop_box(apub::partOf),
next: ph.parse_prop_box(&prop_ids.next), next: ph.parse_prop_box(apub::next),
prev: ph.parse_prop_box(&prop_ids.prev), prev: ph.parse_prop_box(apub::prev),
}) })
} }
} }
@ -374,19 +354,18 @@ pub struct OrderedCollectionPage {
} }
ap_extends!(OrderedCollectionPage, CollectionPage); ap_extends!(OrderedCollectionPage, CollectionPage);
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for OrderedCollectionPage { impl ParseApub for OrderedCollectionPage {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
matches_type(obj, &ids.apub.object.ordered_collection_page)?; matches_type(obj, apub::OrderedCollectionPage)?;
unsafe { Self::_parse_apub_unchecked(obj, vocab, ids) } unsafe { Self::_parse_apub_unchecked(obj) }
} }
unsafe fn _parse_apub_unchecked(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { unsafe fn _parse_apub_unchecked(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?; let ph = PropHelper::new(obj)?;
let prop_ids = &ids.apub.property;
CollectionPage::_parse_apub_unchecked(obj, vocab, ids).map(|s| Self { CollectionPage::_parse_apub_unchecked(obj).map(|s| Self {
_super: s, _super: s,
start_index: ph.parse_prop(&prop_ids.start_index), start_index: ph.parse_prop(apub::startIndex),
}) })
} }
} }
@ -403,11 +382,11 @@ pub enum CollectionOrLink {
Link(BaseLink), Link(BaseLink),
} }
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for CollectionOrLink { impl ParseApub for CollectionOrLink {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
Collection::parse_apub(obj, vocab, ids) Collection::parse_apub(obj)
.map(Self::Collection) .map(Self::Collection)
.or_else(|| BaseLink::parse_apub(obj, vocab, ids).map(Self::Link)) .or_else(|| BaseLink::parse_apub(obj).map(Self::Link))
} }
} }
@ -442,11 +421,11 @@ pub enum CollectionPageOrLink {
Link(BaseLink), Link(BaseLink),
} }
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for CollectionPageOrLink { impl ParseApub for CollectionPageOrLink {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
CollectionPage::parse_apub(obj, vocab, ids) CollectionPage::parse_apub(obj)
.map(Self::CollectionPage) .map(Self::CollectionPage)
.or_else(|| BaseLink::parse_apub(obj, vocab, ids).map(Self::Link)) .or_else(|| BaseLink::parse_apub(obj).map(Self::Link))
} }
} }
@ -476,20 +455,26 @@ impl DebugApub for CollectionPageOrLink {
} }
} }
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for IriBuf { impl ParseApub for IriBuf {
fn parse_apub(obj: &RawObject, vocab: &V, _ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
vocab.iri(obj.as_iri()?).map(|iri| iri.to_owned()) obj.as_iri().map(|iri| iri.to_owned())
} }
} }
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for xsd::Duration { impl ParseApub for NyaIri {
fn parse_apub(obj: &RawObject, _vocab: &V, _ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
obj.as_iri().cloned()
}
}
impl ParseApub for xsd::Duration {
fn parse_apub(obj: &RawObject) -> Option<Self> {
xsd::Duration::from_str(obj.as_value()?.as_str()?).ok() xsd::Duration::from_str(obj.as_value()?.as_str()?).ok()
} }
} }
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for NaiveDateTime { impl ParseApub for NaiveDateTime {
fn parse_apub(obj: &RawObject, _vocab: &V, _ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
// TODO: close enough for now, but this only supports UTC // TODO: close enough for now, but this only supports UTC
let dt = DateTime::<Utc>::from_str(obj.as_value()?.as_str()?).ok()?; let dt = DateTime::<Utc>::from_str(obj.as_value()?.as_str()?).ok()?;
@ -497,41 +482,40 @@ impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for NaiveDateTime {
} }
} }
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Mime { impl ParseApub for Mime {
fn parse_apub(obj: &RawObject, _vocab: &V, _ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
Mime::from_str(obj.as_value()?.as_str()?).ok() Mime::from_str(obj.as_value()?.as_str()?).ok()
} }
} }
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for String { impl ParseApub for String {
fn parse_apub(obj: &RawObject, _vocab: &V, _ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
Some(obj.as_value()?.as_str()?.to_owned()) Some(obj.as_value()?.as_str()?.to_owned())
} }
} }
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for u32 { impl ParseApub for u32 {
fn parse_apub(obj: &RawObject, _vocab: &V, _ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
obj.as_value()?.as_number()?.as_u32() obj.as_value()?.as_number()?.as_u32()
} }
} }
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for f32 { impl ParseApub for f32 {
fn parse_apub(obj: &RawObject, _vocab: &V, _ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
Some(obj.as_value()?.as_number()?.as_f32_lossy()) Some(obj.as_value()?.as_number()?.as_f32_lossy())
} }
} }
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for bool { impl ParseApub for bool {
fn parse_apub(obj: &RawObject, _vocab: &V, _ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
obj.as_bool() obj.as_bool()
} }
} }
fn matches_type(obj: &RawObject, iri_id: &Id) -> Option<()> { fn matches_type(obj: &RawObject, typ: impl HasContext) -> Option<()> {
let iri = iri_id.as_iri().expect("IDs should only refer to IRIs");
let type_matches = obj let type_matches = obj
.types() .types()
.any(|t| t.as_iri().is_some_and(|index| index == iri)); .any(|t| t.as_iri().is_some_and(|iri| iri == &typ.as_nya_iri()));
if type_matches { if type_matches {
Some(()) Some(())
} else { } else {
@ -539,48 +523,43 @@ fn matches_type(obj: &RawObject, iri_id: &Id) -> Option<()> {
} }
} }
struct PropHelper<'a, V: Vocabulary<Iri = IriIndex>> { struct PropHelper<'a> {
props: &'a RawProps, props: &'a RawProps,
vocab: &'a V,
ids: &'a Ids,
} }
impl<'a, V: Vocabulary<Iri = IriIndex>> PropHelper<'a, V> { impl<'a> PropHelper<'a> {
fn new(obj: &'a RawObject, vocab: &'a V, ids: &'a Ids) -> Option<Self> { fn new(obj: &'a RawObject) -> Option<Self> {
let props = obj.as_node()?.properties(); let props = obj.as_node()?.properties();
Some(Self { props, vocab, ids }) Some(Self { props })
} }
fn parse_prop<T: ParseApub<V>>(&self, prop_id: &Id) -> Option<T> { fn parse_prop<T: ParseApub>(&self, prop: impl HasContext) -> Option<T> {
T::parse_apub(self.props.get_any(prop_id)?, self.vocab, self.ids).or_else(|| { T::parse_apub(self.props.get_any(&prop.as_nya_iri())?).or_else(|| {
let iri = prop_id let iri = prop.as_iri();
.as_iri()
.and_then(|index| self.vocab.iri(index))
.expect("predefined IRIs must always exist");
warn!("Ignoring unknown value for property {iri}"); warn!("Ignoring unknown value for property {iri}");
None None
}) })
} }
fn parse_prop_box<T: ParseApub<V>>(&self, prop_id: &Id) -> Option<Box<T>> { fn parse_prop_box<T: ParseApub>(&self, prop: impl HasContext) -> Option<Box<T>> {
self.parse_prop(prop_id).map(Box::new) self.parse_prop(prop).map(Box::new)
} }
fn parse_prop_vec<T: ParseApub<V>>(&self, prop_id: &Id) -> Vec<T> { fn parse_prop_vec<T: ParseApub>(&self, prop: impl HasContext) -> Vec<T> {
self.props self.props
.get(prop_id) .get(&prop.as_nya_iri())
.filter_map(|prop| T::parse_apub(prop, self.vocab, self.ids)) .filter_map(|prop| T::parse_apub(prop))
.collect() .collect()
} }
} }
impl DebugApub for IriBuf { impl DebugApub for NyaIri {
fn debug_apub(&self, f: &mut fmt::Formatter, depth: usize) -> fmt::Result { fn debug_apub(&self, f: &mut fmt::Formatter, depth: usize) -> fmt::Result {
self.debug_apub_members(f, depth) self.debug_apub_members(f, depth)
} }
fn debug_apub_members(&self, f: &mut fmt::Formatter, _depth: usize) -> fmt::Result { fn debug_apub_members(&self, f: &mut fmt::Formatter, _depth: usize) -> fmt::Result {
writeln!(f, "{}", self.as_str()) writeln!(f, "{self}")
} }
} }

View file

@ -1,15 +1,12 @@
use chrono::NaiveDateTime; use chrono::NaiveDateTime;
use iref::IriBuf;
use mime::Mime; use mime::Mime;
use rdf_types::vocabulary::IriIndex;
use rdf_types::Vocabulary;
use std::fmt; use std::fmt;
use crate::ap::trans::{ use crate::ap::trans::{
activity, actor, matches_type, ApDocument, BaseCollection, Collection, DebugApub, ImageOrLink, activity, actor, matches_type, ApDocument, BaseCollection, Collection, DebugApub, ImageOrLink,
ParseApub, PropHelper, RawObject, ParseApub, PropHelper, RawObject,
}; };
use crate::ap::vocab::Ids; use crate::ap::vocab::{apub, toot, NyaIri};
use crate::util::xsd; use crate::util::xsd;
// The ActivityStreams vocabulary actually defines Image, Audio, Video, and Page // The ActivityStreams vocabulary actually defines Image, Audio, Video, and Page
@ -34,29 +31,29 @@ pub enum Object {
Video(Video), Video(Video),
Collection(Collection), Collection(Collection),
// Mastodon extensions // ActivityPub draft, used by Mastodon
Emoji(Emoji), Emoji(Emoji),
} }
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Object { impl ParseApub for Object {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
activity::Activity::parse_apub(obj, vocab, ids) activity::Activity::parse_apub(obj)
.map(Self::Activity) .map(Self::Activity)
.or_else(|| actor::Actor::parse_apub(obj, vocab, ids).map(Self::Actor)) .or_else(|| actor::Actor::parse_apub(obj).map(Self::Actor))
.or_else(|| Article::parse_apub(obj, vocab, ids).map(Self::Article)) .or_else(|| Article::parse_apub(obj).map(Self::Article))
.or_else(|| Audio::parse_apub(obj, vocab, ids).map(Self::Audio)) .or_else(|| Audio::parse_apub(obj).map(Self::Audio))
.or_else(|| Document::parse_apub(obj, vocab, ids).map(Self::Document)) .or_else(|| Document::parse_apub(obj).map(Self::Document))
.or_else(|| Event::parse_apub(obj, vocab, ids).map(Self::Event)) .or_else(|| Event::parse_apub(obj).map(Self::Event))
.or_else(|| Image::parse_apub(obj, vocab, ids).map(Self::Image)) .or_else(|| Image::parse_apub(obj).map(Self::Image))
.or_else(|| Note::parse_apub(obj, vocab, ids).map(Self::Note)) .or_else(|| Note::parse_apub(obj).map(Self::Note))
.or_else(|| Page::parse_apub(obj, vocab, ids).map(Self::Page)) .or_else(|| Page::parse_apub(obj).map(Self::Page))
.or_else(|| Place::parse_apub(obj, vocab, ids).map(Self::Place)) .or_else(|| Place::parse_apub(obj).map(Self::Place))
.or_else(|| Profile::parse_apub(obj, vocab, ids).map(Self::Profile)) .or_else(|| Profile::parse_apub(obj).map(Self::Profile))
.or_else(|| Relationship::parse_apub(obj, vocab, ids).map(Self::Relationship)) .or_else(|| Relationship::parse_apub(obj).map(Self::Relationship))
.or_else(|| Tombstone::parse_apub(obj, vocab, ids).map(Self::Tombstone)) .or_else(|| Tombstone::parse_apub(obj).map(Self::Tombstone))
.or_else(|| Video::parse_apub(obj, vocab, ids).map(Self::Video)) .or_else(|| Video::parse_apub(obj).map(Self::Video))
.or_else(|| Collection::parse_apub(obj, vocab, ids).map(Self::Collection)) .or_else(|| Collection::parse_apub(obj).map(Self::Collection))
.or_else(|| Emoji::parse_apub(obj, vocab, ids).map(Self::Emoji)) .or_else(|| Emoji::parse_apub(obj).map(Self::Emoji))
} }
} }
@ -129,7 +126,7 @@ impl DebugApub for Object {
} }
pub struct AbstractObject { pub struct AbstractObject {
pub id: Option<IriBuf>, pub id: Option<NyaIri>,
pub attachment: Vec<ApDocument>, pub attachment: Vec<ApDocument>,
pub attributed_to: Vec<ApDocument>, pub attributed_to: Vec<ApDocument>,
pub audience: Vec<ApDocument>, pub audience: Vec<ApDocument>,
@ -149,7 +146,7 @@ pub struct AbstractObject {
pub summary: Option<String>, // TODO: this could be a langString pub summary: Option<String>, // TODO: this could be a langString
pub tag: Option<Box<ApDocument>>, pub tag: Option<Box<ApDocument>>,
pub updated: Option<NaiveDateTime>, pub updated: Option<NaiveDateTime>,
pub url: Option<IriBuf>, pub url: Option<NyaIri>,
pub to: Option<Box<ApDocument>>, pub to: Option<Box<ApDocument>>,
pub bto: Option<Box<ApDocument>>, pub bto: Option<Box<ApDocument>>,
pub cc: Option<Box<ApDocument>>, pub cc: Option<Box<ApDocument>>,
@ -158,43 +155,38 @@ pub struct AbstractObject {
pub duration: Option<xsd::Duration>, pub duration: Option<xsd::Duration>,
} }
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for AbstractObject { impl ParseApub for AbstractObject {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?; let ph = PropHelper::new(obj)?;
let prop_ids = &ids.apub.property;
Some(Self { Some(Self {
id: obj id: obj.id().and_then(|id| id.as_iri().cloned()),
.id() attachment: ph.parse_prop_vec(apub::attachment),
.and_then(|id| id.as_iri()) attributed_to: ph.parse_prop_vec(apub::attributedTo),
.and_then(|idx| vocab.iri(idx)) audience: ph.parse_prop_vec(apub::audience),
.map(|iri| iri.to_owned()), content: ph.parse_prop(apub::content),
attachment: ph.parse_prop_vec(&prop_ids.attachment), context: ph.parse_prop_box(apub::context),
attributed_to: ph.parse_prop_vec(&prop_ids.attributed_to), name: ph.parse_prop(apub::name),
audience: ph.parse_prop_vec(&prop_ids.audience), end_time: ph.parse_prop(apub::endTime),
content: ph.parse_prop(&prop_ids.content), generator: ph.parse_prop_box(apub::generator),
context: ph.parse_prop_box(&prop_ids.context), icon: ph.parse_prop_box(apub::icon),
name: ph.parse_prop(&prop_ids.name), image: ph.parse_prop_box(apub::image),
end_time: ph.parse_prop(&prop_ids.end_time), in_reply_to: ph.parse_prop_vec(apub::inReplyTo),
generator: ph.parse_prop_box(&prop_ids.generator), location: ph.parse_prop_box(apub::location),
icon: ph.parse_prop_box(&prop_ids.icon), preview: ph.parse_prop_box(apub::preview),
image: ph.parse_prop_box(&prop_ids.image), published: ph.parse_prop(apub::published),
in_reply_to: ph.parse_prop_vec(&prop_ids.in_reply_to), replies: ph.parse_prop_box(apub::replies),
location: ph.parse_prop_box(&prop_ids.location), start_time: ph.parse_prop(apub::startTime),
preview: ph.parse_prop_box(&prop_ids.preview), summary: ph.parse_prop(apub::summary),
published: ph.parse_prop(&prop_ids.published), tag: ph.parse_prop_box(apub::tag),
replies: ph.parse_prop_box(&prop_ids.replies), updated: ph.parse_prop(apub::updated),
start_time: ph.parse_prop(&prop_ids.start_time), url: ph.parse_prop(apub::url),
summary: ph.parse_prop(&prop_ids.summary), to: ph.parse_prop_box(apub::to),
tag: ph.parse_prop_box(&prop_ids.tag), bto: ph.parse_prop_box(apub::bto),
updated: ph.parse_prop(&prop_ids.updated), cc: ph.parse_prop_box(apub::cc),
url: ph.parse_prop(&prop_ids.url), bcc: ph.parse_prop_box(apub::bcc),
to: ph.parse_prop_box(&prop_ids.to), media_type: ph.parse_prop(apub::mediaType),
bto: ph.parse_prop_box(&prop_ids.bto), duration: ph.parse_prop(apub::duration),
cc: ph.parse_prop_box(&prop_ids.cc),
bcc: ph.parse_prop_box(&prop_ids.bcc),
media_type: ph.parse_prop(&prop_ids.media_type),
duration: ph.parse_prop(&prop_ids.duration),
}) })
} }
} }
@ -239,21 +231,20 @@ pub struct Relationship {
} }
ap_extends!(Relationship, AbstractObject); ap_extends!(Relationship, AbstractObject);
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Relationship { impl ParseApub for Relationship {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
matches_type(obj, &ids.apub.object.relationship)?; matches_type(obj, apub::Relationship)?;
unsafe { Self::_parse_apub_unchecked(obj, vocab, ids) } unsafe { Self::_parse_apub_unchecked(obj) }
} }
unsafe fn _parse_apub_unchecked(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { unsafe fn _parse_apub_unchecked(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?; let ph = PropHelper::new(obj)?;
let prop_ids = &ids.apub.property;
AbstractObject::_parse_apub_unchecked(obj, vocab, ids).map(|s| Self { AbstractObject::_parse_apub_unchecked(obj).map(|s| Self {
_super: s, _super: s,
subject: ph.parse_prop_box(&prop_ids.subject), subject: ph.parse_prop_box(apub::subject),
object: ph.parse_prop_box(&prop_ids.object), object: ph.parse_prop_box(apub::object),
relationship: ph.parse_prop_box(&prop_ids.relationship), relationship: ph.parse_prop_box(apub::relationship),
}) })
} }
} }
@ -278,24 +269,23 @@ pub struct Place {
} }
ap_extends!(Place, AbstractObject); ap_extends!(Place, AbstractObject);
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Place { impl ParseApub for Place {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
matches_type(obj, &ids.apub.object.place)?; matches_type(obj, apub::Place)?;
unsafe { Self::_parse_apub_unchecked(obj, vocab, ids) } unsafe { Self::_parse_apub_unchecked(obj) }
} }
unsafe fn _parse_apub_unchecked(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { unsafe fn _parse_apub_unchecked(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?; let ph = PropHelper::new(obj)?;
let prop_ids = &ids.apub.property;
AbstractObject::_parse_apub_unchecked(obj, vocab, ids).map(|s| Self { AbstractObject::_parse_apub_unchecked(obj).map(|s| Self {
_super: s, _super: s,
accuracy: ph.parse_prop(&prop_ids.accuracy), accuracy: ph.parse_prop(apub::accuracy),
altitude: ph.parse_prop(&prop_ids.altitude), altitude: ph.parse_prop(apub::altitude),
latitude: ph.parse_prop(&prop_ids.latitude), latitude: ph.parse_prop(apub::latitude),
longitude: ph.parse_prop(&prop_ids.longitude), longitude: ph.parse_prop(apub::longitude),
radius: ph.parse_prop(&prop_ids.radius), radius: ph.parse_prop(apub::radius),
units: ph.parse_prop(&prop_ids.units), units: ph.parse_prop(apub::units),
}) })
} }
} }
@ -318,19 +308,18 @@ pub struct Profile {
} }
ap_extends!(Profile, AbstractObject); ap_extends!(Profile, AbstractObject);
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Profile { impl ParseApub for Profile {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
matches_type(obj, &ids.apub.object.profile)?; matches_type(obj, apub::Profile)?;
unsafe { Self::_parse_apub_unchecked(obj, vocab, ids) } unsafe { Self::_parse_apub_unchecked(obj) }
} }
unsafe fn _parse_apub_unchecked(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { unsafe fn _parse_apub_unchecked(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?; let ph = PropHelper::new(obj)?;
let prop_ids = &ids.apub.property;
AbstractObject::_parse_apub_unchecked(obj, vocab, ids).map(|s| Self { AbstractObject::_parse_apub_unchecked(obj).map(|s| Self {
_super: s, _super: s,
describes: ph.parse_prop_box(&prop_ids.describes), describes: ph.parse_prop_box(apub::describes),
}) })
} }
} }
@ -349,20 +338,19 @@ pub struct Tombstone {
} }
ap_extends!(Tombstone, AbstractObject); ap_extends!(Tombstone, AbstractObject);
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Tombstone { impl ParseApub for Tombstone {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
matches_type(obj, &ids.apub.object.tombstone)?; matches_type(obj, apub::Tombstone)?;
unsafe { Self::_parse_apub_unchecked(obj, vocab, ids) } unsafe { Self::_parse_apub_unchecked(obj) }
} }
unsafe fn _parse_apub_unchecked(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { unsafe fn _parse_apub_unchecked(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?; let ph = PropHelper::new(obj)?;
let prop_ids = &ids.apub.property;
AbstractObject::_parse_apub_unchecked(obj, vocab, ids).map(|s| Self { AbstractObject::_parse_apub_unchecked(obj).map(|s| Self {
_super: s, _super: s,
former_type: ph.parse_prop(&prop_ids.former_type), former_type: ph.parse_prop(apub::formerType),
deleted: ph.parse_prop(&prop_ids.deleted), deleted: ph.parse_prop(apub::deleted),
}) })
} }
} }
@ -383,20 +371,20 @@ pub struct Image {
} }
ap_extends!(Image, AbstractObject); ap_extends!(Image, AbstractObject);
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Image { impl ParseApub for Image {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { fn parse_apub(obj: &RawObject) -> Option<Self> {
matches_type(obj, &ids.apub.object.image)?; matches_type(obj, apub::Image)?;
unsafe { Self::_parse_apub_unchecked(obj, vocab, ids) } unsafe { Self::_parse_apub_unchecked(obj) }
} }
unsafe fn _parse_apub_unchecked(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> { unsafe fn _parse_apub_unchecked(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?; let ph = PropHelper::new(obj)?;
let focal_point: Vec<f32> = ph.parse_prop_vec(&ids.toot.props.focal_point); let focal_point: Vec<f32> = ph.parse_prop_vec(toot::focalPoint);
AbstractObject::_parse_apub_unchecked(obj, vocab, ids).map(|s| Self { AbstractObject::_parse_apub_unchecked(obj).map(|s| Self {
_super: s, _super: s,
focal_point: (focal_point.len() >= 2).then(|| [focal_point[0], focal_point[1]]), focal_point: (focal_point.len() >= 2).then(|| [focal_point[0], focal_point[1]]),
blurhash: ph.parse_prop(&ids.toot.props.blurhash), blurhash: ph.parse_prop(toot::blurhash),
}) })
} }
} }
@ -418,11 +406,11 @@ impl DebugApub for Image {
} }
} }
ap_empty_child_impl!(Article, AbstractObject, apub, object, article); ap_empty_child_impl!(Article, AbstractObject, apub::Article);
ap_empty_child_impl!(Document, AbstractObject, apub, object, document); ap_empty_child_impl!(Document, AbstractObject, apub::Document);
ap_empty_child_impl!(Audio, AbstractObject, apub, object, audio); ap_empty_child_impl!(Audio, AbstractObject, apub::Audio);
ap_empty_child_impl!(Video, AbstractObject, apub, object, video); ap_empty_child_impl!(Video, AbstractObject, apub::Video);
ap_empty_child_impl!(Note, AbstractObject, apub, object, note); ap_empty_child_impl!(Note, AbstractObject, apub::Note);
ap_empty_child_impl!(Page, AbstractObject, apub, object, page); ap_empty_child_impl!(Page, AbstractObject, apub::Page);
ap_empty_child_impl!(Event, AbstractObject, apub, object, event); ap_empty_child_impl!(Event, AbstractObject, apub::Event);
ap_empty_child_impl!(Emoji, AbstractObject, toot, class, emoji); ap_empty_child_impl!(Emoji, AbstractObject, toot::Emoji);

View file

@ -1,467 +1,579 @@
//! An annoyingly huge collection of all known IRIs, from all supported namespaces. //! ActivityStreams vocabulary interface including all well-known IRIs.
//! //!
//! This might be replaced with an entirely custom implementation of [`Vocabulary`] //! This module contains two things: The JSON-LD class and property definitions,
//! in the future because reinitializing the entire AP vocabulary from scratch for //! and the interface we provide to the `json_ld` crate to do its job.
//! every single post to the inbox is probably a bit inefficient. I hate my life. //! The latter consists of [`NyaVocabulary`] and [`NyaIri`], the two central
//! interfaces to the processor. The vocabulary is supposed to store all IRIs
//! encountered during document expansion so the actual IRI type used by the
//! processor can be a simple index that is trivial to do comparisons with.
//! We only really care about the IRIs we know and discard any other information
//! though. Therefore, our own IRI type stores either a reference to a static
//! array of well-known IRIs or, if we encounter one we don't know, an Arc.
//!
//! This makes working with IRIs way more convenient because we don't have to
//! pass references to the vocabulary everywhere. Furthermore, since URLs are
//! a subset of IRIs, all URL properties of ActivityPub documents are parsed as
//! IRIs. These URLs have to be stored *somewhere* anyway, so by using Arcs we
//! don't have to copy them over to the output struct.
use iref::{Iri, IriBuf};
use langtag::{AsLanguageTag, LanguageTag, LanguageTagBuf};
use rdf_types::vocabulary::{BlankIdIndex, LanguageTagIndex, LiteralIndex};
use rdf_types::{ use rdf_types::{
vocabulary::{BlankIdIndex, IriIndex}, literal, BlankId, BlankIdBuf, BlankIdVocabulary, BlankIdVocabularyMut, IriVocabulary,
Subject, VocabularyMut, IriVocabularyMut, LanguageTagVocabulary, LanguageTagVocabularyMut, Literal, LiteralVocabulary,
LiteralVocabularyMut,
}; };
use static_iref::iri; use std::fmt;
use std::hash::{Hash, Hasher};
use std::sync::Arc;
pub struct Ids { use nyanoblog_macros::define_json_ns;
/// IRI identifiers for the base ActivityStreams and ActivityPub
/// namespace (`https://www.w3.org/ns/activitystreams#`). /// Our vocabulary implementation.
pub apub: ApubIds, /// We assert that 99% of the IRIs encountered in the wild are well-known anyway,
/// IRI identifiers for Mastodon's extension namespace /// which means that using hash tables or similar data structures would likely
/// (`http://joinmastodon.org/ns#`). /// have a negative performance impact in the average case. This is why we just
pub toot: TootIds, /// store the few remaining ones in a vec.
///
// TODO: check if this is actually true
// TODO: what about blank ids, literals, and language tags? what even are those???
pub struct NyaVocabulary {
iri: Vec<Arc<IriBuf>>,
blank_id: Vec<BlankIdBuf>,
literal: Vec<Literal<literal::Type<NyaIri, LanguageTagIndex>, String>>,
language_tag: Vec<LanguageTagBuf>,
} }
pub type Id = Subject<IriIndex, BlankIdIndex>; impl NyaVocabulary {
pub fn new() -> Self {
/// Transform
/// `key => iri!("...")`
/// into
/// `key: Id::Iri(vocab.insert(iri!("..."))`
/// so the lines don't exceed five trillion characters.
macro_rules! populate_ids {
($vocab:ident, $($name:ident => $iri:expr),* $(,)?) => {
Self { Self {
$($name: Id::Iri($vocab.insert($iri)),)* iri: Vec::new(),
blank_id: Vec::new(),
literal: Vec::new(),
language_tag: Vec::new(),
} }
} }
} }
impl Ids { // XXX this probably takes up 16 bytes but could be compressed to 8 with some pointer magic
pub fn populate(vocab: &mut impl VocabularyMut<Iri = IriIndex>) -> Self { #[derive(Clone)]
Self { pub enum NyaIri {
apub: ApubIds::populate(vocab), /// Well-known IRI, references an entry from [`HasContext::MEMBERS`].
toot: TootIds::populate(vocab), ///
/// **ATTENTION:** Never, ever, EVER clone the `Iri` and throw it in a new
/// instance of `NyaIri`! We optimize comparisons between two `NyaIRI::Known`
/// by just comparing the raw pointers, because we know they will always
/// reference the same underlying static array in memory.
/// This is deliberately a raw pointer, cast from a `&'static Iri<'static>`,
/// in the hope that requiring an `unsafe` block prevents accidental derefs.
Known(*const Iri<'static>),
/// A new IRI that is not hardcoded in the app
Custom(Arc<IriBuf>),
}
// SAFETY: the raw pointer is always cast from &'static
unsafe impl Send for NyaIri {}
unsafe impl Sync for NyaIri {}
impl NyaIri {
pub fn to_owned(&self) -> IriBuf {
match self {
NyaIri::Known(iri) => unsafe { **iri }.to_owned(),
NyaIri::Custom(arc) => arc.as_ref().to_owned(),
} }
} }
} }
pub struct ApubIds { impl fmt::Display for NyaIri {
pub object: ApubObjectIds, fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
pub activity: ApubActivityIds, let iri = match self {
pub link: ApubLinkIds, NyaIri::Known(ptr) => unsafe { **ptr },
pub property: ApubPropertyIds, NyaIri::Custom(arc) => arc.as_iri(),
};
write!(f, "{iri}")
}
} }
impl ApubIds { impl hashbrown::Equivalent<json_ld::Id<NyaIri, BlankIdIndex>> for NyaIri {
fn populate(vocab: &mut impl VocabularyMut<Iri = IriIndex>) -> Self { fn equivalent(&self, key: &json_ld::Id<NyaIri, BlankIdIndex>) -> bool {
Self { match key {
object: ApubObjectIds::populate(vocab), json_ld::Id::Valid(id) => match id {
activity: ApubActivityIds::populate(vocab), rdf_types::Id::Blank(_) => false,
link: ApubLinkIds::populate(vocab), rdf_types::Id::Iri(iri) => iri == self,
property: ApubPropertyIds::populate(vocab), },
json_ld::Id::Invalid(_) => false,
} }
} }
} }
pub struct ApubObjectIds { impl IriVocabulary for NyaVocabulary {
pub activity: Id, type Iri = NyaIri;
pub application: Id,
pub article: Id, fn iri<'i>(&'i self, id: &'i Self::Iri) -> Option<Iri<'i>> {
pub audio: Id, match id {
pub collection: Id, NyaIri::Known(iri) => Some(unsafe { **iri }),
pub collection_page: Id, NyaIri::Custom(arc) => Some(arc.as_iri()),
pub relationship: Id, }
pub document: Id, }
pub event: Id,
pub group: Id, fn get(&self, iri: Iri) -> Option<Self::Iri> {
pub image: Id, resolve_known_iri(iri).or_else(|| {
pub intransitive_activity: Id, self.iri
pub note: Id, .iter()
pub object: Id, .find_map(|buf| (buf.as_iri() == iri).then_some(NyaIri::Custom(buf.clone())))
pub ordered_collection: Id, })
pub ordered_collection_page: Id, }
pub organization: Id,
pub page: Id,
pub person: Id,
pub place: Id,
pub profile: Id,
pub question: Id,
pub service: Id,
pub tombstone: Id,
pub video: Id,
} }
impl ApubObjectIds { impl IriVocabularyMut for NyaVocabulary {
fn populate(vocab: &mut impl VocabularyMut<Iri = IriIndex>) -> Self { fn insert(&mut self, iri: Iri) -> Self::Iri {
populate_ids! { match self.get(iri) {
vocab, Some(iri) => iri,
None => {
let arc = Arc::new(iri.to_owned());
self.iri.push(arc.clone());
NyaIri::Custom(arc)
}
}
}
activity => iri!("https://www.w3.org/ns/activitystreams#Activity"), fn insert_owned(&mut self, iri: IriBuf) -> Self::Iri {
application => iri!("https://www.w3.org/ns/activitystreams#Application"), match self.get(iri.as_iri()) {
article => iri!("https://www.w3.org/ns/activitystreams#Article"), Some(iri) => iri,
audio => iri!("https://www.w3.org/ns/activitystreams#Audio"), None => {
collection => iri!("https://www.w3.org/ns/activitystreams#Collection"), let arc = Arc::new(iri);
collection_page => iri!("https://www.w3.org/ns/activitystreams#CollectionPage"), self.iri.push(arc.clone());
relationship => iri!("https://www.w3.org/ns/activitystreams#Relationship"), NyaIri::Custom(arc)
document => iri!("https://www.w3.org/ns/activitystreams#Document"), }
event => iri!("https://www.w3.org/ns/activitystreams#Event"),
group => iri!("https://www.w3.org/ns/activitystreams#Group"),
image => iri!("https://www.w3.org/ns/activitystreams#Image"),
intransitive_activity => iri!("https://www.w3.org/ns/activitystreams#IntransitiveActivity"),
note => iri!("https://www.w3.org/ns/activitystreams#Note"),
object => iri!("https://www.w3.org/ns/activitystreams#Object"),
ordered_collection => iri!("https://www.w3.org/ns/activitystreams#OrderedCollection"),
ordered_collection_page => iri!("https://www.w3.org/ns/activitystreams#OrderedCollectionPage"),
organization => iri!("https://www.w3.org/ns/activitystreams#Organization"),
page => iri!("https://www.w3.org/ns/activitystreams#Page"),
person => iri!("https://www.w3.org/ns/activitystreams#Person"),
place => iri!("https://www.w3.org/ns/activitystreams#Place"),
profile => iri!("https://www.w3.org/ns/activitystreams#Profile"),
question => iri!("https://www.w3.org/ns/activitystreams#Question"),
service => iri!("https://www.w3.org/ns/activitystreams#Service"),
tombstone => iri!("https://www.w3.org/ns/activitystreams#Tombstone"),
video => iri!("https://www.w3.org/ns/activitystreams#Video"),
} }
} }
} }
pub struct ApubActivityIds { impl BlankIdVocabulary for NyaVocabulary {
pub accept: Id, type BlankId = BlankIdIndex;
pub add: Id,
pub announce: Id, fn blank_id<'b>(&'b self, id: &'b Self::BlankId) -> Option<&'b BlankId> {
pub arrive: Id, self.blank_id
pub block: Id, .get(usize::from(*id))
pub create: Id, .map(|id| id.as_blank_id_ref())
pub delete: Id, }
pub dislike: Id,
pub follow: Id, fn get_blank_id(&self, id: &BlankId) -> Option<Self::BlankId> {
pub flag: Id, self.blank_id
pub ignore: Id, .iter()
pub invite: Id, .enumerate()
pub join: Id, .find_map(|(index, buf)| (buf == id).then(|| BlankIdIndex::from(index)))
pub leave: Id, }
pub like: Id,
pub listen: Id,
pub mov: Id, // move
pub offer: Id,
pub read: Id,
pub remove: Id,
pub reject: Id,
pub tentative_accept: Id,
pub tentative_reject: Id,
pub travel: Id,
pub undo: Id,
pub update: Id,
pub view: Id,
} }
impl ApubActivityIds { impl BlankIdVocabularyMut for NyaVocabulary {
fn populate(vocab: &mut impl VocabularyMut<Iri = IriIndex>) -> Self { fn insert_blank_id(&mut self, id: &BlankId) -> Self::BlankId {
populate_ids! { match self.get_blank_id(id) {
vocab, Some(id) => id,
None => {
let index = self.blank_id.len();
self.blank_id.push((*id).to_owned());
BlankIdIndex::from(index)
}
}
}
accept => iri!("https://www.w3.org/ns/activitystreams#Accept"), fn insert_owned_blank_id(&mut self, id: BlankIdBuf) -> Self::BlankId {
add => iri!("https://www.w3.org/ns/activitystreams#Add"), match self.get_blank_id(id.as_blank_id_ref()) {
announce => iri!("https://www.w3.org/ns/activitystreams#Announce"), Some(id) => id,
arrive => iri!("https://www.w3.org/ns/activitystreams#Arrive"), None => {
block => iri!("https://www.w3.org/ns/activitystreams#Block"), let index = self.blank_id.len();
create => iri!("https://www.w3.org/ns/activitystreams#Create"), self.blank_id.push(id);
delete => iri!("https://www.w3.org/ns/activitystreams#Delete"), BlankIdIndex::from(index)
dislike => iri!("https://www.w3.org/ns/activitystreams#Dislike"), }
follow => iri!("https://www.w3.org/ns/activitystreams#Follow"),
flag => iri!("https://www.w3.org/ns/activitystreams#Flag"),
ignore => iri!("https://www.w3.org/ns/activitystreams#Ignore"),
invite => iri!("https://www.w3.org/ns/activitystreams#Invite"),
join => iri!("https://www.w3.org/ns/activitystreams#Join"),
leave => iri!("https://www.w3.org/ns/activitystreams#Leave"),
like => iri!("https://www.w3.org/ns/activitystreams#Like"),
listen => iri!("https://www.w3.org/ns/activitystreams#Listen"),
mov => iri!("https://www.w3.org/ns/activitystreams#Move"),
offer => iri!("https://www.w3.org/ns/activitystreams#Offer"),
read => iri!("https://www.w3.org/ns/activitystreams#Read"),
remove => iri!("https://www.w3.org/ns/activitystreams#Remove"),
reject => iri!("https://www.w3.org/ns/activitystreams#Reject"),
tentative_accept => iri!("https://www.w3.org/ns/activitystreams#TentativeAccept"),
tentative_reject => iri!("https://www.w3.org/ns/activitystreams#TentativeReject"),
travel => iri!("https://www.w3.org/ns/activitystreams#Travel"),
undo => iri!("https://www.w3.org/ns/activitystreams#Undo"),
update => iri!("https://www.w3.org/ns/activitystreams#Update"),
view => iri!("https://www.w3.org/ns/activitystreams#View"),
} }
} }
} }
pub struct ApubLinkIds { impl LiteralVocabulary for NyaVocabulary {
pub link: Id, type Literal = LiteralIndex;
pub mention: Id, type Type = literal::Type<NyaIri, LanguageTagIndex>;
pub is_following: Id, type Value = String;
pub is_followed_by: Id,
pub is_contact: Id, fn literal<'l>(
pub is_member: Id, &'l self,
// Mastodon extension id: &'l Self::Literal,
pub hashtag: Id, ) -> Option<&'l Literal<Self::Type, Self::Value>> {
self.literal.get(usize::from(*id))
}
fn get_literal(&self, id: &Literal<Self::Type, Self::Value>) -> Option<Self::Literal> {
self.literal
.iter()
.enumerate()
.find_map(|(index, buf)| (buf == id).then(|| LiteralIndex::from(index)))
}
} }
impl ApubLinkIds { impl LiteralVocabularyMut for NyaVocabulary {
fn populate(vocab: &mut impl VocabularyMut<Iri = IriIndex>) -> Self { fn insert_literal(&mut self, value: &Literal<Self::Type, Self::Value>) -> Self::Literal {
populate_ids! { match self.get_literal(value) {
vocab, Some(id) => id,
None => {
let index = self.literal.len();
self.literal.push(value.clone());
LiteralIndex::from(index)
}
}
}
link => iri!("https://www.w3.org/ns/activitystreams#Link"), fn insert_owned_literal(&mut self, id: Literal<Self::Type, Self::Value>) -> Self::Literal {
mention => iri!("https://www.w3.org/ns/activitystreams#Mention"), match self.get_literal(&id) {
is_following => iri!("https://www.w3.org/ns/activitystreams#IsFollowing"), Some(index) => index,
is_followed_by => iri!("https://www.w3.org/ns/activitystreams#IsFollowedBy"), None => {
is_contact => iri!("https://www.w3.org/ns/activitystreams#IsContact"), let index = self.literal.len();
is_member => iri!("https://www.w3.org/ns/activitystreams#IsMember"), self.literal.push(id);
hashtag => iri!("https://www.w3.org/ns/activitystreams#Hashtag"), LiteralIndex::from(index)
}
} }
} }
} }
pub struct ApubPropertyIds { impl LanguageTagVocabulary for NyaVocabulary {
pub subject: Id, type LanguageTag = LanguageTagIndex;
pub relationship: Id,
pub actor: Id,
pub attributed_to: Id,
pub attachment: Id,
pub attachments: Id,
pub author: Id,
pub bcc: Id,
pub bto: Id,
pub cc: Id,
pub context: Id,
pub current: Id,
pub first: Id,
pub generator: Id,
pub icon: Id,
pub image: Id,
pub in_reply_to: Id,
pub items: Id,
pub instrument: Id,
pub ordered_items: Id,
pub last: Id,
pub location: Id,
pub next: Id,
pub object: Id,
pub one_of: Id,
pub any_of: Id,
pub closed: Id,
pub origin: Id,
pub accuracy: Id,
pub prev: Id,
pub preview: Id,
pub provider: Id,
pub replies: Id,
pub result: Id,
pub audience: Id,
pub part_of: Id,
pub tag: Id,
pub tags: Id,
pub target: Id,
pub to: Id,
pub url: Id,
pub altitude: Id,
pub content: Id,
pub name: Id,
pub downstream_duplicates: Id,
pub duration: Id,
pub end_time: Id,
pub height: Id,
pub href: Id,
pub hreflang: Id,
pub latitude: Id,
pub longitude: Id,
pub media_type: Id,
pub published: Id,
pub radius: Id,
pub rating: Id,
pub rel: Id,
pub start_index: Id,
pub start_time: Id,
pub summary: Id,
pub total_items: Id,
pub units: Id,
pub updated: Id,
pub upstream_duplicates: Id,
pub verb: Id,
pub width: Id,
pub describes: Id,
pub former_type: Id,
pub deleted: Id,
// ActivityPub extensions fn language_tag<'l>(&'l self, id: &'l Self::LanguageTag) -> Option<LanguageTag<'l>> {
pub endpoints: Id, self.language_tag
pub following: Id, .get(usize::from(*id))
pub followers: Id, .map(|tag| tag.as_language_tag())
pub inbox: Id, }
pub liked: Id,
pub shares: Id,
pub likes: Id,
pub oauth_authorization_endpoint: Id,
pub oauth_token_endpoint: Id,
pub outbox: Id,
pub preferred_username: Id,
pub provide_client_key: Id,
pub proxy_url: Id,
pub shared_inbox: Id,
pub sign_client_key: Id,
pub source: Id,
pub streams: Id,
pub upload_media: Id,
// DID Core extensions fn get_language_tag(&self, id: LanguageTag) -> Option<Self::LanguageTag> {
pub also_known_as: Id, self.language_tag
.iter()
.enumerate()
.find_map(|(index, buf)| (buf == &id).then(|| LanguageTagIndex::from(index)))
}
} }
impl ApubPropertyIds { impl LanguageTagVocabularyMut for NyaVocabulary {
fn populate(vocab: &mut impl VocabularyMut<Iri = IriIndex>) -> Self { fn insert_language_tag(&mut self, value: LanguageTag) -> Self::LanguageTag {
populate_ids! { match self.get_language_tag(value) {
vocab, Some(id) => id,
None => {
let index = self.language_tag.len();
self.language_tag.push(value.cloned());
LanguageTagIndex::from(index)
}
}
}
subject => iri!("https://www.w3.org/ns/activitystreams#subject"), fn insert_owned_language_tag(&mut self, value: LanguageTagBuf) -> Self::LanguageTag {
relationship => iri!("https://www.w3.org/ns/activitystreams#relationship"), match self.get_language_tag(value.as_ref()) {
actor => iri!("https://www.w3.org/ns/activitystreams#actor"), Some(id) => id,
attributed_to => iri!("https://www.w3.org/ns/activitystreams#attributedTo"), None => {
attachment => iri!("https://www.w3.org/ns/activitystreams#attachment"), let index = self.language_tag.len();
attachments => iri!("https://www.w3.org/ns/activitystreams#attachments"), self.language_tag.push(value);
author => iri!("https://www.w3.org/ns/activitystreams#author"), LanguageTagIndex::from(index)
bcc => iri!("https://www.w3.org/ns/activitystreams#bcc"), }
bto => iri!("https://www.w3.org/ns/activitystreams#bto"),
cc => iri!("https://www.w3.org/ns/activitystreams#cc"),
context => iri!("https://www.w3.org/ns/activitystreams#context"),
current => iri!("https://www.w3.org/ns/activitystreams#current"),
first => iri!("https://www.w3.org/ns/activitystreams#first"),
generator => iri!("https://www.w3.org/ns/activitystreams#generator"),
icon => iri!("https://www.w3.org/ns/activitystreams#icon"),
image => iri!("https://www.w3.org/ns/activitystreams#image"),
in_reply_to => iri!("https://www.w3.org/ns/activitystreams#inReplyTo"),
items => iri!("https://www.w3.org/ns/activitystreams#items"),
instrument => iri!("https://www.w3.org/ns/activitystreams#instrument"),
ordered_items => iri!("https://www.w3.org/ns/activitystreams#orderedItems"),
last => iri!("https://www.w3.org/ns/activitystreams#last"),
location => iri!("https://www.w3.org/ns/activitystreams#location"),
next => iri!("https://www.w3.org/ns/activitystreams#next"),
object => iri!("https://www.w3.org/ns/activitystreams#object"),
one_of => iri!("https://www.w3.org/ns/activitystreams#oneOf"),
any_of => iri!("https://www.w3.org/ns/activitystreams#anyOf"),
closed => iri!("https://www.w3.org/ns/activitystreams#closed"),
origin => iri!("https://www.w3.org/ns/activitystreams#origin"),
accuracy => iri!("https://www.w3.org/ns/activitystreams#accuracy"),
prev => iri!("https://www.w3.org/ns/activitystreams#prev"),
preview => iri!("https://www.w3.org/ns/activitystreams#preview"),
provider => iri!("https://www.w3.org/ns/activitystreams#provider"),
replies => iri!("https://www.w3.org/ns/activitystreams#replies"),
result => iri!("https://www.w3.org/ns/activitystreams#result"),
audience => iri!("https://www.w3.org/ns/activitystreams#audience"),
part_of => iri!("https://www.w3.org/ns/activitystreams#partOf"),
tag => iri!("https://www.w3.org/ns/activitystreams#tag"),
tags => iri!("https://www.w3.org/ns/activitystreams#tags"),
target => iri!("https://www.w3.org/ns/activitystreams#taget"),
to => iri!("https://www.w3.org/ns/activitystreams#to"),
url => iri!("https://www.w3.org/ns/activitystreams#url"),
altitude => iri!("https://www.w3.org/ns/activitystreams#altitude"),
content => iri!("https://www.w3.org/ns/activitystreams#content"),
name => iri!("https://www.w3.org/ns/activitystreams#name"),
downstream_duplicates => iri!("https://www.w3.org/ns/activitystreams#downstreamDuplicates"),
duration => iri!("https://www.w3.org/ns/activitystreams#duration"),
end_time => iri!("https://www.w3.org/ns/activitystreams#endTime"),
height => iri!("https://www.w3.org/ns/activitystreams#height"),
href => iri!("https://www.w3.org/ns/activitystreams#href"),
hreflang => iri!("https://www.w3.org/ns/activitystreams#hreflang"),
latitude => iri!("https://www.w3.org/ns/activitystreams#latitude"),
longitude => iri!("https://www.w3.org/ns/activitystreams#longitude"),
media_type => iri!("https://www.w3.org/ns/activitystreams#mediaType"),
published => iri!("https://www.w3.org/ns/activitystreams#published"),
radius => iri!("https://www.w3.org/ns/activitystreams#radius"),
rating => iri!("https://www.w3.org/ns/activitystreams#ratine"),
rel => iri!("https://www.w3.org/ns/activitystreams#rel"),
start_index => iri!("https://www.w3.org/ns/activitystreams#startIndex"),
start_time => iri!("https://www.w3.org/ns/activitystreams#startTime"),
summary => iri!("https://www.w3.org/ns/activitystreams#summary"),
total_items => iri!("https://www.w3.org/ns/activitystreams#totalItems"),
units => iri!("https://www.w3.org/ns/activitystreams#units"),
updated => iri!("https://www.w3.org/ns/activitystreams#updated"),
upstream_duplicates => iri!("https://www.w3.org/ns/activitystreams#upstreamDuplicates"),
verb => iri!("https://www.w3.org/ns/activitystreams#verb"),
width => iri!("https://www.w3.org/ns/activitystreams#width"),
describes => iri!("https://www.w3.org/ns/activitystreams#describes"),
former_type => iri!("https://www.w3.org/ns/activitystreams#formerType"),
deleted => iri!("https://www.w3.org/ns/activitystreams#deleted"),
// ActivityPub extensions
endpoints => iri!("https://www.w3.org/ns/activitystreams#endpoints"),
following => iri!("https://www.w3.org/ns/activitystreams#following"),
followers => iri!("https://www.w3.org/ns/activitystreams#followers"),
inbox => iri!("http://www.w3.org/ns/ldp#inbox"),
liked => iri!("https://www.w3.org/ns/activitystreams#liked"),
shares => iri!("https://www.w3.org/ns/activitystreams#shares"),
likes => iri!("https://www.w3.org/ns/activitystreams#likes"),
oauth_authorization_endpoint => iri!("https://www.w3.org/ns/activitystreams#oauthAuthorizationEndpoint"),
oauth_token_endpoint => iri!("https://www.w3.org/ns/activitystreams#oauthTokenEndpoint"),
outbox => iri!("https://www.w3.org/ns/activitystreams#outbox"),
preferred_username => iri!("https://www.w3.org/ns/activitystreams#preferredUsername"),
provide_client_key => iri!("https://www.w3.org/ns/activitystreams#provideClientKey"),
proxy_url => iri!("https://www.w3.org/ns/activitystreams#proxyUrl"),
shared_inbox => iri!("https://www.w3.org/ns/activitystreams#sharedInbox"),
sign_client_key => iri!("https://www.w3.org/ns/activitystreams#signClientKey"),
source => iri!("https://www.w3.org/ns/activitystreams#source"),
streams => iri!("https://www.w3.org/ns/activitystreams#streams"),
upload_media => iri!("https://www.w3.org/ns/activitystreams#uploadMedia"),
also_known_as => iri!("https://www.w3.org/ns/activitystreams#alsoKnownAs"),
} }
} }
} }
pub struct TootIds { impl PartialEq for NyaIri {
pub class: TootClassIds, fn eq(&self, other: &Self) -> bool {
pub props: TootPropIds, match self {
NyaIri::Known(ptr) => match other {
// All well-known IRIs are only defined once, so it's safe to ignore
// the underlying data and just compare raw pointers.
// The dereference is necessary because matching &self yields &&IriBuf.
NyaIri::Known(other_ptr) => std::ptr::eq(*ptr, *other_ptr),
// by the same logic, we will never see an Arc with well-known IRI
NyaIri::Custom(_) => false,
},
NyaIri::Custom(idx) => match other {
NyaIri::Known(_) => false,
NyaIri::Custom(other_idx) => idx == other_idx,
},
}
}
} }
impl Eq for NyaIri {}
impl TootIds { impl Hash for NyaIri {
fn populate(vocab: &mut impl VocabularyMut<Iri = IriIndex>) -> Self { fn hash<H: Hasher>(&self, state: &mut H) {
Self { match self {
class: TootClassIds::populate(vocab), // XXX would it be ok to just hash the pointer here?
props: TootPropIds::populate(vocab), NyaIri::Known(iri) => unsafe { (**iri).hash(state) },
NyaIri::Custom(idx) => idx.hash(state),
} }
} }
} }
pub struct TootClassIds { /// [`define_json_ns!`] implements this trait for all enums it emits.
pub emoji: Id, pub trait HasContext: Sized + Clone + 'static {
pub identity_proof: Id, const OFFSET: usize;
} const COUNT: usize;
const IRI_BASE: Iri<'static>;
const ALIAS_BASE: Option<Iri<'static>>;
/// This is the slice that all instances of `NyaIri::Known` reference.
/// The [`define_json_ns!`] macro generates the underlying array as a
/// private member of the enums implementing this trait.
const MEMBERS: &'static [(Self, Iri<'static>)];
impl TootClassIds { fn from_index(index: usize) -> Option<Self>;
fn populate(vocab: &mut impl VocabularyMut<Iri = IriIndex>) -> Self { fn as_index(&self) -> usize;
populate_ids! { fn from_iri(iri: &Iri) -> Option<Self>;
vocab, fn as_iri(&self) -> &'static Iri<'static> {
&Self::MEMBERS[self.as_index() - Self::OFFSET].1
emoji => iri!("http://joinmastodon.org/ns#Emoji"), }
identity_proof => iri!("http://joinmastodon.org/ns#IdentityProof"), fn as_nya_iri(&self) -> NyaIri {
} NyaIri::Known(self.as_iri())
} }
} }
pub struct TootPropIds { // XXX this could be more efficient by storing all well-known IRIs in one array
pub blurhash: Id, #[allow(clippy::manual_map)]
pub focal_point: Id, fn resolve_known_iri(iri: Iri) -> Option<NyaIri> {
pub featured: Id, let prototype = if let Some(iri) = apub::__Class::from_iri(&iri) {
pub featured_tags: Id, Some(iri.as_iri())
pub discoverable: Id, } else if let Some(iri) = apub::__Prop::from_iri(&iri) {
pub suspended: Id, Some(iri.as_iri())
pub voters_count: Id, } else if let Some(iri) = sec::__Prop::from_iri(&iri) {
Some(iri.as_iri())
} else if let Some(iri) = toot::__Class::from_iri(&iri) {
Some(iri.as_iri())
} else if let Some(iri) = toot::__Prop::from_iri(&iri) {
Some(iri.as_iri())
} else {
None
};
prototype.map(|p| NyaIri::Known(p as *const Iri<'static>))
} }
impl TootPropIds { // Very basically, this macro generates a new module containing two enums,
fn populate(vocab: &mut impl VocabularyMut<Iri = IriIndex>) -> Self { // __Class and __Prop, and makes the module export all of their members.
populate_ids! { // This allows us to refer to them as, for instance, apub::Activity rather than
vocab, // apub::__Class::Activity.
// The enums are also HasContext, which makes them useful in the first place.
blurhash => iri!("http://joinmastodon.org/ns#blurhash"), define_json_ns! {
focal_point => iri!("http://joinmastodon.org/ns#focalPoint"), //! ActivityStreams base namespace with ActivityPub extensions
featured => iri!("http://joinmastodon.org/ns#featured"),
featured_tags => iri!("http://joinmastodon.org/ns#featuredTags"), meta {
discoverable => iri!("http://joinmastodon.org/ns#discoverable"), mod_name = apub,
suspended => iri!("http://joinmastodon.org/ns#suspended"), iri_base = "https://www.w3.org/ns/activitystreams#",
voters_count => iri!("http://joinmastodon.org/ns#votersCount"), // ActivityStreams mandates conforming implementations also recognize IRIs
} // with http instead of https as the protocol. We are aliasing the non-https
// IRI to the https one, such that HasContext::from_iri() matches both.
alias_base = "http://www.w3.org/ns/activitystreams#",
}
class {
// Object types
Activity,
Application,
Article,
Audio,
Collection,
CollectionPage,
Document,
Event,
Group,
Image,
IntransitiveActivity,
Note,
Object,
OrderedCollection,
OrderedCollectionPage,
Organization,
Page,
Person,
Place,
Profile,
Question,
Relationship,
Service,
Tombstone,
Video,
// Activity types
Accept,
Add,
Announce,
Arrive,
Block,
Create,
Delete,
Dislike,
Follow,
Flag,
Ignore,
Invite,
Join,
Leave,
Like,
Listen,
Move,
Offer,
Read,
Remove,
Reject,
TentativeAccept,
TentativeReject,
Travel,
Undo,
Update,
View,
// Link types
Hashtag, // proposed extension; Mastodon uses this
Link,
IsFollowing,
IsFollowedBy,
IsContact,
IsMember,
Mention,
}
prop {
subject,
relationship,
actor,
attributedTo,
attachment,
attachments,
author,
bcc,
bto,
cc,
context,
current,
first,
generator,
icon,
image,
inReplyTo,
items,
instrument,
orderedItems,
last,
location,
next,
object,
oneOf,
anyOf,
closed,
origin,
accuracy,
prev,
preview,
provider,
replies,
result,
audience,
partOf,
tag,
tags,
target,
to,
url,
altitude,
content,
name,
downstreamDuplicates,
duration,
endTime,
height,
href,
hreflang,
latitude,
longitude,
mediaType,
published,
radius,
rating,
rel,
startIndex,
startTime,
summary,
totalItems,
units,
updated,
upstreamDuplicates,
verb,
width,
describes,
formerType,
deleted,
// ActivityPub extensions
endpoints,
following,
followers,
inbox,
liked,
shares,
likes,
oauthAuthorizationEndpoint,
oauthTokenEndpoint,
outbox,
preferredUsername,
provideClientKey,
proxyUrl,
sharedInbox,
signClientKey,
source,
streams,
uploadMedia,
// DID core extensions
alsoKnownAs,
}
}
define_json_ns! {
//! W3ID security extensions
meta {
mod_name = sec,
iri_base = "https://www.w3id.org/security#",
}
prop {
verificationMethod,
controller,
proof,
domain,
challenge,
previousProof,
proofPurpose,
proofValue,
expires,
publicKey,
publicKeyPem,
owner,
signature,
signatureValue,
}
}
define_json_ns! {
//! Mastodon extensions
meta {
mod_name = toot,
iri_base = "http://joinmastodon.org/ns#",
}
class {
Emoji,
}
prop {
blurhash,
focalPoint,
featured,
featuredTags,
discoverable,
suspended,
votersCount,
} }
} }