ap: refactor to use custom vocab and iri types

IndexVocabulary and IriIndex have been replaced
with NyaVocabulary and NyaIri, and the JSON-LD
namespaces are defined through a proc macro now
This commit is contained in:
anna 2023-08-10 06:41:16 +02:00
parent 891c6a9a39
commit 3d71b04338
Signed by: fef
GPG key ID: 2585C2DC6D79B485
14 changed files with 1611 additions and 833 deletions

78
Cargo.lock generated
View file

@ -296,6 +296,12 @@ dependencies = [
"num-traits",
]
[[package]]
name = "atomic"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c59bdb34bc650a32731b31bd8f0829cc15d24a708ee31559e0bb34f2bc320cba"
[[package]]
name = "autocfg"
version = "1.1.0"
@ -1082,6 +1088,19 @@ dependencies = [
"want",
]
[[package]]
name = "hyper-rustls"
version = "0.23.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1788965e61b367cd03a62950836d5cd41560c3577d90e40e0819373194d1661c"
dependencies = [
"http",
"hyper",
"rustls 0.20.7",
"tokio",
"tokio-rustls",
]
[[package]]
name = "hyper-tls"
version = "0.5.0"
@ -1598,9 +1617,9 @@ checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "mime"
version = "0.3.16"
version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d"
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
[[package]]
name = "minimal-lexical"
@ -1738,9 +1757,11 @@ dependencies = [
"iref",
"json-ld",
"jsonwebtoken",
"langtag",
"locspan",
"log",
"mime",
"nyanoblog-macros",
"pretty_env_logger",
"rand",
"rdf-types",
@ -1755,6 +1776,13 @@ dependencies = [
"uuid",
]
[[package]]
name = "nyanoblog-macros"
version = "0.1.0"
dependencies = [
"proc-macro-crate",
]
[[package]]
name = "once_cell"
version = "1.16.0"
@ -1957,6 +1985,16 @@ dependencies = [
"log",
]
[[package]]
name = "proc-macro-crate"
version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919"
dependencies = [
"once_cell",
"toml_edit",
]
[[package]]
name = "proc-macro-error"
version = "1.0.4"
@ -2094,6 +2132,7 @@ dependencies = [
"http",
"http-body",
"hyper",
"hyper-rustls",
"hyper-tls",
"ipnet",
"js-sys",
@ -2104,16 +2143,19 @@ dependencies = [
"percent-encoding",
"pin-project-lite",
"rustls 0.20.7",
"rustls-pemfile",
"serde",
"serde_json",
"serde_urlencoded",
"tokio",
"tokio-native-tls",
"tokio-rustls",
"tower-service",
"url",
"wasm-bindgen",
"wasm-bindgen-futures",
"web-sys",
"webpki-roots 0.22.6",
"winreg",
]
@ -2880,6 +2922,23 @@ dependencies = [
"tracing",
]
[[package]]
name = "toml_datetime"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b"
[[package]]
name = "toml_edit"
version = "0.19.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8123f27e969974a3dfba720fdb560be359f57b44302d280ba72e76a74480e8a"
dependencies = [
"indexmap 2.0.0",
"toml_datetime",
"winnow",
]
[[package]]
name = "tower-service"
version = "0.3.2"
@ -2989,11 +3048,13 @@ dependencies = [
[[package]]
name = "uuid"
version = "1.2.2"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "422ee0de9031b5b948b97a8fc04e3aa35230001a722ddd27943e0be31564ce4c"
checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d"
dependencies = [
"atomic",
"getrandom",
"serde",
]
[[package]]
@ -3342,6 +3403,15 @@ version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"
[[package]]
name = "winnow"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "acaaa1190073b2b101e15083c38ee8ec891b5e05cbee516521e94ec008f61e64"
dependencies = [
"memchr",
]
[[package]]
name = "winreg"
version = "0.10.1"

View file

@ -1,29 +1,40 @@
[package]
name = "nyanoblog"
version = "0.1.0"
edition = "2021"
version.workspace = true
[workspace]
members = [
"macros"
]
[workspace.package]
version = "0.1.0"
[dependencies]
nyanoblog-macros = { path = "./macros" }
actix-rt = "2.7"
actix-web = { version = "4", features = ["rustls"] }
argon2 = "0.5.1"
async-trait = "0.1.59"
base64 = "0.21"
bytes = "1.3"
chrono = { version = "0.4", features = [ "alloc", "clock", "serde" ] }
chrono = { version = "0.4", features = [ "alloc", "clock", "serde", "std" ] }
dotenvy = "0.15.6"
futures = "0.3"
hashbrown = "0.13.1"
iref = "2.2"
json-ld = { version = "0.15.0" }
jsonwebtoken = { version = "8", default-features = false }
langtag = "0.3.4"
locspan = "0.7"
log = "0.4"
mime = "0.3"
mime = "0.3.17"
pretty_env_logger = "0.5.0"
rand = "0.8"
rdf-types = "0.15.4"
reqwest = { version = "0.11", features = [ "rustls" ] }
reqwest = { version = "0.11", features = [ "rustls-tls" ] }
rsa = { version = "0.9.2", features = [ "sha2" ] }
serde = { version = "1.0", features = [ "derive" ] }
serde_json = "1.0"
@ -31,4 +42,4 @@ serde_test = "1.0"
sqlx = { version = "0.7.1", features = [ "chrono", "postgres", "runtime-tokio", "tls-rustls", "uuid" ] }
static-iref = "2"
tokio = "1.23"
uuid = { version = "1.2", features = [ "v4" ] }
uuid = { version = "1.4", features = [ "v4", "v7", "serde" ] }

10
macros/Cargo.toml Normal file
View file

@ -0,0 +1,10 @@
[package]
name = "nyanoblog-macros"
edition = "2021"
version.workspace = true
[lib]
proc-macro = true
[dependencies]
proc-macro-crate = "1.3.1"

View file

@ -0,0 +1,523 @@
use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
use std::iter::Peekable;
use std::sync::atomic::{AtomicIsize, Ordering};
use crate::util::*;
// "forgive me, rustc, for i have sinned"
//
// good luck to whoever is trying to read not to mention understand this lmao
/// This is a global counter for unique indices because every invocation
/// of this macro should yield an enum with unique numeric representations
static INDEX: AtomicIsize = AtomicIsize::new(1);
pub fn invoke(input: TokenStream) -> Result<TokenStream> {
let mut stream = input.into_iter().peekable();
// parse the thing
let (mut meta, mut classes, mut props) = (None, None, None);
while let Some(tt) = stream.next() {
if let TokenTree::Ident(ident) = &tt {
let name = ident.to_string();
match name.as_str() {
"meta" => {
let body = eat_group(&mut stream, Delimiter::Brace)?;
if meta.replace(eat_meta(body.stream())?).is_some() {
return err("Duplicate definition of metadata", ident.span());
}
}
"class" => {
let body = eat_group(&mut stream, Delimiter::Brace)?;
let body = eat_enum_members(body.stream())?;
if classes.replace((body, ident.span())).is_some() {
return err("Duplicate class definition", ident.span());
}
}
"prop" => {
let body = eat_group(&mut stream, Delimiter::Brace)?;
let body = eat_enum_members(body.stream())?;
if props.replace((body, ident.span())).is_some() {
return err("Duplicate props definition", ident.span());
}
}
name => return err(format!("Unknown section \"{name}\""), ident.span()),
}
eat_maybe_comma(&mut stream);
}
}
// now spit out some new tokens
let meta = meta.ok_or_else(|| error("Missing meta block", None))?;
let mut ts = TokenStream::new();
if let Some((classes, span)) = classes {
ts.extend(compile_enum(
&meta,
make_ident_tt("__Class", None),
&classes,
span,
));
}
if let Some((props, span)) = props {
ts.extend(compile_enum(
&meta,
make_ident_tt("__Prop", None),
&props,
span,
));
}
let mut module = rust_code!(pub mod );
module.extend([meta.mod_name, make_group_tt(Delimiter::Brace, ts)]);
//println!("{module}");
Ok(module)
}
struct NsMeta {
mod_name: TokenTree,
iri_base: String,
alias_base: Option<String>,
}
#[derive(Clone)]
struct EnumMember {
name: Ident,
}
//
// parser
//
// meta { key1 = val1, key2 = val2, ... }
fn eat_meta(stream: TokenStream) -> Result<NsMeta> {
let mut stream = stream.into_iter().peekable();
let mut mod_name = None;
let mut iri_base = None;
let mut alias_base = None;
let mut had_preceding_comma = true;
while let Some(ident) = eat_ident_or_end(&mut stream)? {
if !had_preceding_comma {
return err("Expected a comma", ident.span());
}
let name = ident.to_string();
match name.as_str() {
"mod_name" => {
if mod_name.replace(eat_assign_ident(&mut stream)?).is_some() {
return err("Duplicate definition of key \"mod_name\"", ident.span());
}
}
"iri_base" => {
if iri_base.replace(eat_assign_string(&mut stream)?).is_some() {
return err("Duplicate definition of key \"iri_base\"", ident.span());
}
}
"alias_base" => {
if alias_base
.replace(eat_assign_string(&mut stream)?)
.is_some()
{
return err("Duplicate definition of key \"alias_base\"", ident.span());
}
}
name => return err(format!("Unknown key \"{name}\""), ident.span()),
}
had_preceding_comma = eat_maybe_comma(&mut stream);
}
Ok(NsMeta {
mod_name: mod_name.ok_or_else(|| error("Missing key \"mod_name\"", None))?,
iri_base: iri_base.ok_or_else(|| error("Missing key \"iri_base\"", None))?,
alias_base,
})
}
// qualifier { Member1, Member2, ... }
fn eat_enum_members(stream: TokenStream) -> Result<Vec<EnumMember>> {
let mut stream = stream.into_iter().peekable();
let mut members = Vec::new();
let mut had_preceding_comma = true;
while let Some(member) = eat_enum_member(&mut stream)? {
if !had_preceding_comma {
return err_unexpected(",", &member.name, member.name.span());
}
members.push(member);
had_preceding_comma = eat_maybe_comma(&mut stream);
}
Ok(members)
}
fn eat_enum_member(stream: &mut impl TtIter) -> Result<Option<EnumMember>> {
match stream.next() {
Some(tt) => {
if let TokenTree::Ident(name) = tt {
Ok(Some(EnumMember { name }))
} else {
err_unexpected("ident", &tt, tt.span())
}
}
None => Ok(None),
}
}
fn eat_assign_ident(stream: &mut Peekable<impl TtIter>) -> Result<TokenTree> {
eat_eq(stream)?;
Ok(TokenTree::Ident(eat_ident(stream)?))
}
fn eat_assign_string(stream: &mut impl TtIter) -> Result<String> {
let _ = eat_eq(stream)?;
match stream.next() {
Some(TokenTree::Literal(literal)) => {
let s = literal.to_string();
let sb = s.as_bytes();
if sb.len() >= 2 && sb[0] == b'"' && sb[sb.len() - 1] == b'"' {
Ok(String::from_utf8(Vec::from(&sb[1..(sb.len() - 1)])).unwrap())
} else {
err_unexpected("string", &literal, literal.span())
}
}
Some(tt) => err_unexpected("string", &tt, tt.span()),
None => err_end(),
}
}
fn eat_group(stream: &mut impl TtIter, delimiter: impl Into<Option<Delimiter>>) -> Result<Group> {
match stream.next() {
Some(TokenTree::Group(grp)) => {
if let Some(delim) = delimiter.into() {
if grp.delimiter() == delim {
Ok(grp)
} else {
err("Expected a `{`", grp.span())
}
} else {
Ok(grp)
}
}
Some(tt) => err_unexpected("{", &tt, tt.span()),
None => err_end(),
}
}
fn eat_ident(stream: &mut impl TtIter) -> Result<Ident> {
eat_ident_or_end(stream).and_then(|o| o.ok_or_else(error_end))
}
fn eat_ident_or_end(stream: &mut impl TtIter) -> Result<Option<Ident>> {
match stream.next() {
Some(TokenTree::Ident(ident)) => Ok(Some(ident)),
Some(tt) => err_unexpected("ident", &tt, tt.span()),
None => Ok(None),
}
}
fn eat_eq(stream: &mut impl TtIter) -> Result<TokenTree> {
match stream.next() {
Some(TokenTree::Punct(punct)) if punct.as_char() == '=' => Ok(TokenTree::Punct(punct)),
Some(tt) => err_unexpected("=", &tt, tt.span()),
None => err_end(),
}
}
fn eat_maybe_comma(stream: &mut Peekable<impl TtIter>) -> bool {
stream
.next_if(|tt| {
if let TokenTree::Punct(punct) = tt {
punct.as_char() == ','
} else {
false
}
})
.is_some()
}
//
// compiler
//
fn compile_enum(
meta: &NsMeta,
name: TokenTree,
members: &[EnumMember],
kw_span: Span,
) -> TokenStream {
// enum Name { Member1, Member2, ... }
let mut ts = rust_code!(
#[derive(Copy, Clone, Eq, PartialEq)]
#[allow(non_camel_case_types)]
pub
);
ts.extend([
make_ident_tt("enum", kw_span),
name.clone(),
make_group_tt(
Delimiter::Brace,
members.iter().flat_map(|m| {
[
TokenTree::Ident(m.name.clone()),
TokenTree::Punct(Punct::new('=', Spacing::Alone)),
TokenTree::Literal(Literal::isize_unsuffixed(
INDEX.fetch_add(1, Ordering::Relaxed),
)),
TokenTree::Punct(Punct::new(',', Spacing::Alone)),
]
}),
),
]);
ts.extend(rust_code!(pub use ));
ts.extend([name.clone()]);
ts.extend(rust_code!(::*;));
// impl Enum { ... }
ts.extend(compile_internal_impl(meta, &name, members));
// impl HasContext for Enum { ... }
ts.extend(compile_trait_impl(meta, &name, members));
ts
}
fn compile_internal_impl(meta: &NsMeta, name: &TokenTree, members: &[EnumMember]) -> TokenStream {
let mut ts = rust_code!(impl);
ts.extend([name.clone()]);
let impl_body = {
let mut impl_body = rust_code!(const __MEMBERS: );
impl_body.extend([
// [(Self, Iri<'static>); $n]
make_group_tt(
Delimiter::Bracket,
[
make_group_tt(
Delimiter::Parenthesis,
rust_code!(Self, ::iref::Iri<'static>),
),
make_punct_alone(';'),
TokenTree::Literal(Literal::usize_unsuffixed(members.len())),
],
),
make_punct_alone('='),
]);
impl_body.extend([
// [ (Enum::Name1, iri!("iri1")), (Enum::Name2, iri!("iri2")), ... ]
make_group_tt(
Delimiter::Bracket,
members.iter().flat_map(|memb| {
[
make_group_tt(Delimiter::Parenthesis, {
let mut ts = TokenStream::from_iter([
name.clone(),
make_punct_joint(':'),
make_punct_alone(':'),
TokenTree::Ident(memb.name.clone()),
make_punct_alone(','),
]);
ts.extend(compile_iri_macro_call(&meta.iri_base, Some(memb)));
ts
}),
make_punct_alone(','),
]
.into_iter()
}),
),
TokenTree::Punct(Punct::new(';', Spacing::Alone)),
]);
impl_body.extend(rust_code!(fn __from_str(s: &str) -> Option<Self>));
let fn_body = {
let mut fn_body = rust_code!(match s);
let match_body = {
let mut match_body = TokenStream::new();
for memb in members {
let memb_name = memb.name.to_string();
match_body.extend([
TokenTree::Literal(Literal::string(&memb_name)),
make_punct_joint('='),
make_punct_alone('>'),
make_ident_tt("Some", None),
make_group_tt(
Delimiter::Parenthesis,
[
name.clone(),
make_punct_joint(':'),
make_punct_alone(':'),
TokenTree::Ident(memb.name.clone()),
],
),
make_punct_alone(','),
]);
}
match_body.extend(rust_code!(_ => None,));
make_group_ts(Delimiter::Brace, match_body)
};
fn_body.extend(match_body);
make_group_ts(Delimiter::Brace, fn_body)
};
impl_body.extend(fn_body);
make_group_ts(Delimiter::Brace, impl_body)
};
ts.extend(impl_body);
ts
}
fn compile_trait_impl(meta: &NsMeta, name: &TokenTree, members: &[EnumMember]) -> TokenStream {
// impl HasContext for Enum
let mut ts = rust_code!(impl);
ts.extend(absolute_path(None, rust_code!(ap::vocab::HasContext)));
ts.extend(rust_code!(for));
ts.extend([name.clone()]);
let impl_body = {
// const ... = ...; const ... = ...; ...
let mut impl_body = compile_impl_const_members(meta, members);
// fn from_index(...) -> ... { ... }
impl_body.extend(compile_impl_from_index(name, members));
impl_body.extend(rust_code!(
fn as_index(&self) -> usize {
*self as usize
}
));
// fn from_iri(...) -> ... { ... }
impl_body.extend(compile_impl_from_iri());
make_group_ts(Delimiter::Brace, impl_body)
};
ts.extend(impl_body);
ts
}
fn compile_impl_const_members(meta: &NsMeta, members: &[EnumMember]) -> TokenStream {
let mut ts = TokenStream::new();
ts.extend(rust_code!(const COUNT: usize = ));
ts.extend([
TokenTree::Literal(Literal::usize_suffixed(members.len())),
TokenTree::Punct(Punct::new(';', Spacing::Alone)),
]);
ts.extend(rust_code!(const OFFSET: usize = ));
ts.extend([
TokenTree::Literal(Literal::usize_suffixed(
INDEX.load(Ordering::Relaxed) as usize - members.len(),
)),
TokenTree::Punct(Punct::new(';', Spacing::Alone)),
]);
ts.extend(rust_code!(const IRI_BASE: ::iref::Iri<'static> = ));
ts.extend(compile_iri_macro_call(&meta.iri_base, None));
ts.extend([make_punct_alone(';')]);
ts.extend(rust_code!(const ALIAS_BASE: ::core::option::Option<::iref::Iri<'static>> =));
match meta.alias_base.as_ref() {
Some(s) => {
ts.extend(rust_code!(Some));
ts.extend(make_group_ts(
Delimiter::Parenthesis,
compile_iri_macro_call(s, None),
));
}
None => ts.extend(rust_code!(None)),
}
ts.extend(rust_code!(;));
// this is so we get a ref with static lifetime; see compile_internal_impl()
ts.extend(rust_code!(
const MEMBERS: &'static [(Self, ::iref::Iri<'static>)] = &Self::__MEMBERS;
));
ts
}
fn compile_impl_from_index(name: &TokenTree, members: &[EnumMember]) -> TokenStream {
let mut ts = rust_code!(fn from_index(index: usize) -> Option<Self>);
let fn_body = {
let mut fn_body = rust_code!(match index);
let match_body = {
let mut match_body = TokenStream::new();
for member in members {
match_body.extend(rust_code!(x if x == ));
match_body.extend([
name.clone(),
make_punct_joint(':'),
make_punct_alone(':'),
TokenTree::Ident(member.name.clone()),
]);
match_body.extend(rust_code!(as usize => Some));
match_body.extend(make_group_ts(
Delimiter::Parenthesis,
[
name.clone(),
make_punct_joint(':'),
make_punct_alone(':'),
TokenTree::Ident(member.name.clone()),
],
));
match_body.extend([make_punct_alone(',')]);
}
match_body.extend(rust_code!(_ => None,));
make_group_ts(Delimiter::Brace, match_body)
};
fn_body.extend(match_body);
make_group_ts(Delimiter::Brace, fn_body)
};
ts.extend(fn_body);
ts
}
fn compile_impl_from_iri() -> TokenStream {
rust_code!(
fn from_iri(iri: &::iref::Iri) -> Option<Self> {
let iri = iri.as_str();
let iri_base = Self::IRI_BASE.as_str();
if iri.starts_with(iri_base) {
Self::__from_str(unsafe {
::std::str::from_utf8_unchecked(&iri.as_bytes()[iri_base.len()..])
})
} else if let Some(alias_base) = Self::ALIAS_BASE.as_ref() {
let alias_base = alias_base.as_str();
if iri.starts_with(alias_base) {
Self::__from_str(unsafe {
::std::str::from_utf8_unchecked(&iri.as_bytes()[alias_base.len()..])
})
} else {
None
}
} else {
None
}
}
)
}
fn compile_iri_macro_call(iri_base: &str, member: Option<&EnumMember>) -> TokenStream {
let mut ts = rust_code!(::static_iref::iri!);
let iri = match member {
Some(member) => format!("{iri_base}{}", member.name),
None => String::from(iri_base),
};
ts.extend(make_group_ts(
Delimiter::Parenthesis,
[TokenTree::Literal(Literal::string(&iri))],
));
ts
}

14
macros/src/lib.rs Normal file
View file

@ -0,0 +1,14 @@
use proc_macro::TokenStream;
mod define_json_ns;
mod util;
// XXX proc_macro_diagnostics isn't stable yet, so we just panic on errors for now
#[proc_macro]
pub fn define_json_ns(input: TokenStream) -> TokenStream {
match define_json_ns::invoke(input) {
Ok(output) => output,
Err(e) => panic!("{e}"),
}
}

103
macros/src/util.rs Normal file
View file

@ -0,0 +1,103 @@
use proc_macro::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
use proc_macro_crate::{crate_name, FoundCrate};
use std::fmt;
macro_rules! rust_code {
($($tt:tt)*) => {
<::proc_macro::TokenStream as ::std::str::FromStr>::from_str(stringify!($($tt)*))
.expect("Expected valid rust code")
}
}
pub(crate) use rust_code;
#[derive(Debug)]
pub struct Error {
msg: String,
span: Span,
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "{} (at {:?})", &self.msg, self.span)
}
}
impl std::error::Error for Error {}
pub type Result<T> = std::result::Result<T, Error>;
pub fn error(msg: impl Into<String>, span: impl Into<Option<Span>>) -> Error {
Error {
msg: msg.into(),
span: span.into().unwrap_or_else(Span::call_site),
}
}
pub fn err<T>(msg: impl Into<String>, span: impl Into<Option<Span>>) -> Result<T> {
Err(error(msg, span))
}
pub fn err_unexpected<T, D, S>(expected: &str, actual: D, span: S) -> Result<T>
where
D: fmt::Display,
S: Into<Option<Span>>,
{
err(
format!("Expected `{expected}`, got `{actual}` instead"),
span,
)
}
pub fn error_end() -> Error {
error("Unexpected end of stream", None)
}
pub fn err_end<T>() -> Result<T> {
Err(error_end())
}
pub trait TtIter: Iterator<Item = TokenTree> {}
impl<T: Iterator<Item = TokenTree>> TtIter for T {}
pub fn absolute_path(span: impl Into<Option<Span>>, path: TokenStream) -> TokenStream {
let span = span.into().unwrap_or_else(Span::call_site);
let root = match crate_name("nyanoblog") {
Ok(found) => match found {
FoundCrate::Itself => "crate".to_owned(),
FoundCrate::Name(name) => name,
},
Err(e) => panic!("cannot find crate nyanoblog: {e}"),
};
let mut absolute = make_ident_ts(&root, span);
absolute.extend(rust_code!(::));
absolute.extend(path);
absolute
}
pub fn make_ident_tt(name: &str, span: impl Into<Option<Span>>) -> TokenTree {
TokenTree::Ident(Ident::new(
name,
span.into().unwrap_or_else(Span::call_site),
))
}
pub fn make_ident_ts(name: &str, span: impl Into<Option<Span>>) -> TokenStream {
[make_ident_tt(name, span)].into_iter().collect()
}
pub fn make_group_tt(delim: Delimiter, inner: impl IntoIterator<Item = TokenTree>) -> TokenTree {
TokenTree::Group(Group::new(delim, inner.into_iter().collect()))
}
pub fn make_group_ts(delim: Delimiter, inner: impl IntoIterator<Item = TokenTree>) -> TokenStream {
TokenStream::from_iter([make_group_tt(delim, inner)])
}
pub fn make_punct_alone(c: char) -> TokenTree {
TokenTree::Punct(Punct::new(c, Spacing::Alone))
}
pub fn make_punct_joint(c: char) -> TokenTree {
TokenTree::Punct(Punct::new(c, Spacing::Joint))
}

View file

@ -17,7 +17,7 @@ use hashbrown::HashSet;
use iref::Iri;
use json_ld::{syntax::Value, Loader, Profile, RemoteDocument};
use locspan::{Meta, Span};
use rdf_types::{vocabulary::IriIndex, IriVocabulary, IriVocabularyMut};
use rdf_types::{IriVocabulary, IriVocabularyMut};
use reqwest::{
header::{ACCEPT, CONTENT_TYPE, LINK, LOCATION},
StatusCode,
@ -25,6 +25,7 @@ use reqwest::{
use std::hash::Hash;
use std::ops::ControlFlow;
use crate::ap::vocab::NyaIri;
use crate::core::*;
use crate::headers;
use crate::state::AppState;
@ -34,7 +35,7 @@ use crate::util::http::{
Response,
};
pub struct CachedLoader<I = IriIndex, M = Span, T = Value<M>> {
pub struct CachedLoader<I = NyaIri, M = Span, T = Value<M>> {
state: AppState,
parser: Box<DynParser<I, M, T>>,
}

View file

@ -5,20 +5,18 @@ use json_ld::{
use locspan::{Meta, Span};
use mime::Mime;
use rdf_types::vocabulary::BlankIdIndex;
use rdf_types::{vocabulary::IriIndex, IndexVocabulary, Vocabulary};
use crate::ap::vocab::{NyaIri, NyaVocabulary};
use crate::ap::{
loader::CachedLoader,
trans::{ApDocument, ParseApub},
vocab::Ids,
};
use crate::core::*;
use crate::state::AppState;
/// Main API for handling ActivityPub ingress, called by [`crate::job::inbox::InboxWorker`].
pub async fn process_document(state: &AppState, raw: &str, mime: &Mime) -> Result<()> {
let mut vocab: IndexVocabulary = IndexVocabulary::new();
let indices = Ids::populate(&mut vocab);
let mut vocab = NyaVocabulary::new();
let json = preprocess(raw)?;
let rd = RemoteDocument::new(None, Some(mime.clone()), json);
@ -26,14 +24,13 @@ pub async fn process_document(state: &AppState, raw: &str, mime: &Mime) -> Resul
let mut loader = CachedLoader::new_with(state.clone(), move |_vocab, _iri, bytes| {
let content = std::str::from_utf8(bytes.as_ref())
.map_err(|e| Error::MalformedApub(format!("Invalid encoding: {e}")))?;
preprocess(&content)
preprocess(content)
});
let rd = rd.expand_with(&mut vocab, &mut loader).await.unwrap();
let vocab = vocab;
// this loop will usually only run once (one object per request)
for object in rd.into_value() {
if let Err(e) = process_object(object, &vocab, &indices).await {
if let Err(e) = process_object(object).await {
error!("Error in remote document: {e}");
}
}
@ -46,12 +43,8 @@ fn preprocess(raw: &str) -> Result<Meta<Value<Span>, Span>> {
.map_err(|e| Error::MalformedApub(format!("{e}")))
}
async fn process_object(
obj: IndexedObject<IriIndex, BlankIdIndex, Span>,
vocab: &impl Vocabulary<Iri = IriIndex>,
ids: &Ids,
) -> Result<()> {
let document = ApDocument::parse_apub(&obj, vocab, ids);
async fn process_object(obj: IndexedObject<NyaIri, BlankIdIndex, Span>) -> Result<()> {
let document = ApDocument::parse_apub(&obj);
if let Some(doc) = document {
debug!("\nParsed document:\n{doc:?}");
}

View file

@ -1,12 +1,10 @@
use chrono::NaiveDateTime;
use rdf_types::vocabulary::IriIndex;
use rdf_types::Vocabulary;
use std::fmt;
use crate::ap::trans::{
matches_type, AbstractObject, ApDocument, DebugApub, ParseApub, PropHelper, RawObject,
};
use crate::ap::vocab::Ids;
use crate::ap::vocab::apub;
pub struct AbstractActivity {
_super: AbstractObject,
@ -18,23 +16,22 @@ pub struct AbstractActivity {
}
ap_extends!(AbstractActivity, AbstractObject);
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for AbstractActivity {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
matches_type(obj, &ids.apub.object.activity)?;
unsafe { Self::_parse_apub_unchecked(obj, vocab, ids) }
impl ParseApub for AbstractActivity {
fn parse_apub(obj: &RawObject) -> Option<Self> {
matches_type(obj, apub::Activity)?;
unsafe { Self::_parse_apub_unchecked(obj) }
}
unsafe fn _parse_apub_unchecked(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?;
let prop_ids = &ids.apub.property;
unsafe fn _parse_apub_unchecked(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj)?;
AbstractObject::_parse_apub_unchecked(obj, vocab, ids).map(|s| Self {
AbstractObject::_parse_apub_unchecked(obj).map(|s| Self {
_super: s,
actor: ph.parse_prop_vec(&prop_ids.actor),
object: ph.parse_prop_vec(&prop_ids.object),
target: ph.parse_prop_box(&prop_ids.target),
origin: ph.parse_prop_box(&prop_ids.origin),
instrument: ph.parse_prop_vec(&prop_ids.instrument),
actor: ph.parse_prop_vec(apub::actor),
object: ph.parse_prop_vec(apub::object),
target: ph.parse_prop_box(apub::target),
origin: ph.parse_prop_box(apub::origin),
instrument: ph.parse_prop_vec(apub::instrument),
})
}
}
@ -85,37 +82,37 @@ pub enum Activity {
View(View),
}
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Activity {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
Accept::parse_apub(obj, vocab, ids)
impl ParseApub for Activity {
fn parse_apub(obj: &RawObject) -> Option<Self> {
Accept::parse_apub(obj)
.map(Self::Accept)
.or_else(|| Add::parse_apub(obj, vocab, ids).map(Self::Add))
.or_else(|| Announce::parse_apub(obj, vocab, ids).map(Self::Announce))
.or_else(|| Arrive::parse_apub(obj, vocab, ids).map(Self::Arrive))
.or_else(|| Block::parse_apub(obj, vocab, ids).map(Self::Block))
.or_else(|| Create::parse_apub(obj, vocab, ids).map(Self::Create))
.or_else(|| Delete::parse_apub(obj, vocab, ids).map(Self::Delete))
.or_else(|| Dislike::parse_apub(obj, vocab, ids).map(Self::Dislike))
.or_else(|| Flag::parse_apub(obj, vocab, ids).map(Self::Flag))
.or_else(|| Follow::parse_apub(obj, vocab, ids).map(Self::Follow))
.or_else(|| Ignore::parse_apub(obj, vocab, ids).map(Self::Ignore))
.or_else(|| Invite::parse_apub(obj, vocab, ids).map(Self::Invite))
.or_else(|| Join::parse_apub(obj, vocab, ids).map(Self::Join))
.or_else(|| Leave::parse_apub(obj, vocab, ids).map(Self::Leave))
.or_else(|| Like::parse_apub(obj, vocab, ids).map(Self::Like))
.or_else(|| Listen::parse_apub(obj, vocab, ids).map(Self::Listen))
.or_else(|| Move::parse_apub(obj, vocab, ids).map(Self::Move))
.or_else(|| Offer::parse_apub(obj, vocab, ids).map(Self::Offer))
.or_else(|| Question::parse_apub(obj, vocab, ids).map(Self::Question))
.or_else(|| Reject::parse_apub(obj, vocab, ids).map(Self::Reject))
.or_else(|| Read::parse_apub(obj, vocab, ids).map(Self::Read))
.or_else(|| Remove::parse_apub(obj, vocab, ids).map(Self::Remove))
.or_else(|| TentativeReject::parse_apub(obj, vocab, ids).map(Self::TentativeReject))
.or_else(|| TentativeAccept::parse_apub(obj, vocab, ids).map(Self::TentativeAccept))
.or_else(|| Travel::parse_apub(obj, vocab, ids).map(Self::Travel))
.or_else(|| Undo::parse_apub(obj, vocab, ids).map(Self::Undo))
.or_else(|| Update::parse_apub(obj, vocab, ids).map(Self::Update))
.or_else(|| View::parse_apub(obj, vocab, ids).map(Self::View))
.or_else(|| Add::parse_apub(obj).map(Self::Add))
.or_else(|| Announce::parse_apub(obj).map(Self::Announce))
.or_else(|| Arrive::parse_apub(obj).map(Self::Arrive))
.or_else(|| Block::parse_apub(obj).map(Self::Block))
.or_else(|| Create::parse_apub(obj).map(Self::Create))
.or_else(|| Delete::parse_apub(obj).map(Self::Delete))
.or_else(|| Dislike::parse_apub(obj).map(Self::Dislike))
.or_else(|| Flag::parse_apub(obj).map(Self::Flag))
.or_else(|| Follow::parse_apub(obj).map(Self::Follow))
.or_else(|| Ignore::parse_apub(obj).map(Self::Ignore))
.or_else(|| Invite::parse_apub(obj).map(Self::Invite))
.or_else(|| Join::parse_apub(obj).map(Self::Join))
.or_else(|| Leave::parse_apub(obj).map(Self::Leave))
.or_else(|| Like::parse_apub(obj).map(Self::Like))
.or_else(|| Listen::parse_apub(obj).map(Self::Listen))
.or_else(|| Move::parse_apub(obj).map(Self::Move))
.or_else(|| Offer::parse_apub(obj).map(Self::Offer))
.or_else(|| Question::parse_apub(obj).map(Self::Question))
.or_else(|| Reject::parse_apub(obj).map(Self::Reject))
.or_else(|| Read::parse_apub(obj).map(Self::Read))
.or_else(|| Remove::parse_apub(obj).map(Self::Remove))
.or_else(|| TentativeReject::parse_apub(obj).map(Self::TentativeReject))
.or_else(|| TentativeAccept::parse_apub(obj).map(Self::TentativeAccept))
.or_else(|| Travel::parse_apub(obj).map(Self::Travel))
.or_else(|| Undo::parse_apub(obj).map(Self::Undo))
.or_else(|| Update::parse_apub(obj).map(Self::Update))
.or_else(|| View::parse_apub(obj).map(Self::View))
}
}
@ -234,21 +231,20 @@ pub struct Question {
}
ap_extends!(Question, AbstractActivity);
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Question {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
matches_type(obj, &ids.apub.object.question)?;
unsafe { Self::_parse_apub_unchecked(obj, vocab, ids) }
impl ParseApub for Question {
fn parse_apub(obj: &RawObject) -> Option<Self> {
matches_type(obj, apub::Question)?;
unsafe { Self::_parse_apub_unchecked(obj) }
}
unsafe fn _parse_apub_unchecked(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?;
let prop_ids = &ids.apub.property;
unsafe fn _parse_apub_unchecked(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj)?;
AbstractActivity::_parse_apub_unchecked(obj, vocab, ids).map(|s| Self {
AbstractActivity::_parse_apub_unchecked(obj).map(|s| Self {
_super: s,
one_of: ph.parse_prop_vec(&prop_ids.one_of),
any_of: ph.parse_prop_vec(&prop_ids.any_of),
closed: ph.parse_prop(&prop_ids.closed),
one_of: ph.parse_prop_vec(apub::oneOf),
any_of: ph.parse_prop_vec(apub::anyOf),
closed: ph.parse_prop(apub::closed),
})
}
}
@ -262,42 +258,30 @@ impl DebugApub for Question {
}
}
ap_empty_child_impl!(Accept, AbstractActivity, apub, activity, accept);
ap_empty_child_impl!(Add, AbstractActivity, apub, activity, add);
ap_empty_child_impl!(Announce, AbstractActivity, apub, activity, announce);
ap_empty_child_impl!(Arrive, AbstractActivity, apub, activity, arrive);
ap_empty_child_impl!(Block, Ignore, apub, activity, block);
ap_empty_child_impl!(Create, AbstractActivity, apub, activity, create);
ap_empty_child_impl!(Delete, AbstractActivity, apub, activity, delete);
ap_empty_child_impl!(Dislike, AbstractActivity, apub, activity, dislike);
ap_empty_child_impl!(Flag, AbstractActivity, apub, activity, flag);
ap_empty_child_impl!(Follow, AbstractActivity, apub, activity, follow);
ap_empty_child_impl!(Ignore, AbstractActivity, apub, activity, ignore);
ap_empty_child_impl!(Invite, AbstractActivity, apub, activity, invite);
ap_empty_child_impl!(Join, AbstractActivity, apub, activity, join);
ap_empty_child_impl!(Leave, AbstractActivity, apub, activity, leave);
ap_empty_child_impl!(Like, AbstractActivity, apub, activity, like);
ap_empty_child_impl!(Listen, AbstractActivity, apub, activity, listen);
ap_empty_child_impl!(Move, AbstractActivity, apub, activity, mov);
ap_empty_child_impl!(Offer, AbstractActivity, apub, activity, offer);
ap_empty_child_impl!(Reject, AbstractActivity, apub, activity, reject);
ap_empty_child_impl!(Read, AbstractActivity, apub, activity, read);
ap_empty_child_impl!(Remove, AbstractActivity, apub, activity, remove);
ap_empty_child_impl!(
TentativeReject,
AbstractActivity,
apub,
activity,
tentative_reject
);
ap_empty_child_impl!(
TentativeAccept,
AbstractActivity,
apub,
activity,
tentative_accept
);
ap_empty_child_impl!(Travel, AbstractActivity, apub, activity, travel);
ap_empty_child_impl!(Undo, AbstractActivity, apub, activity, undo);
ap_empty_child_impl!(Update, AbstractActivity, apub, activity, update);
ap_empty_child_impl!(View, AbstractActivity, apub, activity, view);
ap_empty_child_impl!(Accept, AbstractActivity, apub::Accept);
ap_empty_child_impl!(Add, AbstractActivity, apub::Add);
ap_empty_child_impl!(Announce, AbstractActivity, apub::Announce);
ap_empty_child_impl!(Arrive, AbstractActivity, apub::Arrive);
ap_empty_child_impl!(Block, Ignore, apub::Block);
ap_empty_child_impl!(Create, AbstractActivity, apub::Create);
ap_empty_child_impl!(Delete, AbstractActivity, apub::Delete);
ap_empty_child_impl!(Dislike, AbstractActivity, apub::Dislike);
ap_empty_child_impl!(Flag, AbstractActivity, apub::Flag);
ap_empty_child_impl!(Follow, AbstractActivity, apub::Follow);
ap_empty_child_impl!(Ignore, AbstractActivity, apub::Ignore);
ap_empty_child_impl!(Invite, AbstractActivity, apub::Invite);
ap_empty_child_impl!(Join, AbstractActivity, apub::Join);
ap_empty_child_impl!(Leave, AbstractActivity, apub::Leave);
ap_empty_child_impl!(Like, AbstractActivity, apub::Like);
ap_empty_child_impl!(Listen, AbstractActivity, apub::Listen);
ap_empty_child_impl!(Move, AbstractActivity, apub::Move);
ap_empty_child_impl!(Offer, AbstractActivity, apub::Offer);
ap_empty_child_impl!(Reject, AbstractActivity, apub::Reject);
ap_empty_child_impl!(Read, AbstractActivity, apub::Read);
ap_empty_child_impl!(Remove, AbstractActivity, apub::Remove);
ap_empty_child_impl!(TentativeReject, AbstractActivity, apub::TentativeReject);
ap_empty_child_impl!(TentativeAccept, AbstractActivity, apub::TentativeAccept);
ap_empty_child_impl!(Travel, AbstractActivity, apub::Travel);
ap_empty_child_impl!(Undo, AbstractActivity, apub::Undo);
ap_empty_child_impl!(Update, AbstractActivity, apub::Update);
ap_empty_child_impl!(View, AbstractActivity, apub::View);

View file

@ -1,18 +1,16 @@
use iref::IriBuf;
use rdf_types::vocabulary::IriIndex;
use rdf_types::Vocabulary;
use std::fmt;
use crate::ap::trans::{AbstractObject, ApDocument, DebugApub, ParseApub, PropHelper, RawObject};
use crate::ap::vocab::Ids;
use crate::ap::vocab::{apub, toot, NyaIri};
pub struct AbstractActor {
_super: AbstractObject,
inbox: Option<IriBuf>,
outbox: Option<IriBuf>,
following: Option<IriBuf>,
followers: Option<IriBuf>,
liked: Option<IriBuf>,
inbox: Option<NyaIri>,
outbox: Option<NyaIri>,
following: Option<NyaIri>,
followers: Option<NyaIri>,
liked: Option<NyaIri>,
preferred_username: Option<String>,
// Mastodon extensions
featured: Option<Box<ApDocument>>,
@ -22,23 +20,23 @@ pub struct AbstractActor {
}
ap_extends!(AbstractActor, AbstractObject);
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for AbstractActor {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?;
impl ParseApub for AbstractActor {
fn parse_apub(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj)?;
let result = unsafe { AbstractObject::_parse_apub_unchecked(obj, vocab, ids) };
let result = unsafe { AbstractObject::_parse_apub_unchecked(obj) };
result.map(|s| Self {
_super: s,
inbox: ph.parse_prop(&ids.apub.property.inbox),
outbox: ph.parse_prop(&ids.apub.property.outbox),
following: ph.parse_prop(&ids.apub.property.following),
followers: ph.parse_prop(&ids.apub.property.followers),
liked: ph.parse_prop(&ids.apub.property.liked),
preferred_username: ph.parse_prop(&ids.apub.property.preferred_username),
featured: ph.parse_prop_box(&ids.toot.props.featured),
featured_tags: ph.parse_prop_box(&ids.toot.props.featured_tags),
discoverable: ph.parse_prop(&ids.toot.props.discoverable),
suspended: ph.parse_prop(&ids.toot.props.suspended),
inbox: ph.parse_prop(apub::inbox),
outbox: ph.parse_prop(apub::outbox),
following: ph.parse_prop(apub::following),
followers: ph.parse_prop(apub::followers),
liked: ph.parse_prop(apub::liked),
preferred_username: ph.parse_prop(apub::preferredUsername),
featured: ph.parse_prop_box(toot::featured),
featured_tags: ph.parse_prop_box(toot::featuredTags),
discoverable: ph.parse_prop(toot::discoverable),
suspended: ph.parse_prop(toot::suspended),
})
}
}
@ -71,14 +69,14 @@ pub enum Actor {
Service(Service),
}
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Actor {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
Application::parse_apub(obj, vocab, ids)
impl ParseApub for Actor {
fn parse_apub(obj: &RawObject) -> Option<Self> {
Application::parse_apub(obj)
.map(Self::Application)
.or_else(|| Group::parse_apub(obj, vocab, ids).map(Self::Group))
.or_else(|| Organization::parse_apub(obj, vocab, ids).map(Self::Organization))
.or_else(|| Person::parse_apub(obj, vocab, ids).map(Self::Person))
.or_else(|| Service::parse_apub(obj, vocab, ids).map(Self::Service))
.or_else(|| Group::parse_apub(obj).map(Self::Group))
.or_else(|| Organization::parse_apub(obj).map(Self::Organization))
.or_else(|| Person::parse_apub(obj).map(Self::Person))
.or_else(|| Service::parse_apub(obj).map(Self::Service))
}
}
@ -117,8 +115,8 @@ impl DebugApub for Actor {
}
}
ap_empty_child_impl!(Application, AbstractActor, apub, object, application);
ap_empty_child_impl!(Group, AbstractActor, apub, object, group);
ap_empty_child_impl!(Organization, AbstractActor, apub, object, organization);
ap_empty_child_impl!(Person, AbstractActor, apub, object, person);
ap_empty_child_impl!(Service, AbstractActor, apub, object, service);
ap_empty_child_impl!(Application, AbstractActor, apub::Application);
ap_empty_child_impl!(Group, AbstractActor, apub::Group);
ap_empty_child_impl!(Organization, AbstractActor, apub::Organization);
ap_empty_child_impl!(Person, AbstractActor, apub::Person);
ap_empty_child_impl!(Service, AbstractActor, apub::Service);

View file

@ -1,22 +1,19 @@
use iref::IriBuf;
use mime::Mime;
use rdf_types::vocabulary::IriIndex;
use rdf_types::Vocabulary;
use std::fmt;
use crate::ap::trans::{matches_type, ApDocument, DebugApub, ParseApub, PropHelper, RawObject};
use crate::ap::vocab::Ids;
use crate::ap::vocab::{apub, NyaIri};
pub enum Link {
Link(BaseLink),
Mention(Mention),
}
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Link {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
BaseLink::parse_apub(obj, vocab, ids)
impl ParseApub for Link {
fn parse_apub(obj: &RawObject) -> Option<Self> {
BaseLink::parse_apub(obj)
.map(Self::Link)
.or_else(|| Mention::parse_apub(obj, vocab, ids).map(Self::Mention))
.or_else(|| Mention::parse_apub(obj).map(Self::Mention))
}
}
@ -44,7 +41,7 @@ impl DebugApub for Link {
}
pub struct BaseLink {
pub id: Option<IriBuf>,
pub id: Option<NyaIri>,
pub href: Option<String>,
pub rel: Option<String>,
pub media_type: Option<Mime>,
@ -55,30 +52,25 @@ pub struct BaseLink {
pub preview: Option<Box<ApDocument>>,
}
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for BaseLink {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
matches_type(obj, &ids.apub.link.link)?;
unsafe { Self::_parse_apub_unchecked(obj, vocab, ids) }
impl ParseApub for BaseLink {
fn parse_apub(obj: &RawObject) -> Option<Self> {
matches_type(obj, apub::Link)?;
unsafe { Self::_parse_apub_unchecked(obj) }
}
unsafe fn _parse_apub_unchecked(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?;
let prop_ids = &ids.apub.property;
unsafe fn _parse_apub_unchecked(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj)?;
Some(Self {
id: obj
.id()
.and_then(|id| id.as_iri())
.and_then(|idx| vocab.iri(idx))
.map(|iri| iri.to_owned()),
href: ph.parse_prop(&prop_ids.href),
rel: ph.parse_prop(&prop_ids.rel),
media_type: ph.parse_prop(&prop_ids.media_type),
name: ph.parse_prop(&prop_ids.name),
hreflang: ph.parse_prop(&prop_ids.hreflang),
height: ph.parse_prop(&prop_ids.height),
width: ph.parse_prop(&prop_ids.width),
preview: ph.parse_prop_box(&prop_ids.preview),
id: obj.id().and_then(|id| id.as_iri().cloned()),
href: ph.parse_prop(apub::href),
rel: ph.parse_prop(apub::rel),
media_type: ph.parse_prop(apub::mediaType),
name: ph.parse_prop(apub::name),
hreflang: ph.parse_prop(apub::hreflang),
height: ph.parse_prop(apub::height),
width: ph.parse_prop(apub::width),
preview: ph.parse_prop_box(apub::preview),
})
}
}
@ -100,4 +92,4 @@ impl DebugApub for BaseLink {
}
}
ap_empty_child_impl!(Mention, BaseLink, apub, link, mention);
ap_empty_child_impl!(Mention, BaseLink, apub::Mention);

View file

@ -8,29 +8,28 @@ use json_ld::object::node::Properties;
use json_ld::IndexedObject;
use locspan::Span;
use mime::Mime;
use rdf_types::vocabulary::{BlankIdIndex, IriIndex};
use rdf_types::Vocabulary;
use rdf_types::vocabulary::BlankIdIndex;
use std::collections::HashMap;
use std::fmt;
use std::str::FromStr;
use crate::ap::vocab::{Id, Ids};
use crate::ap::vocab::{apub, HasContext, NyaIri};
use crate::core::*;
use crate::util::xsd;
pub type RawProps = Properties<IriIndex, BlankIdIndex, Span>;
pub type RawObject = IndexedObject<IriIndex, BlankIdIndex, Span>;
pub type RawProps = Properties<NyaIri, BlankIdIndex, Span>;
pub type RawObject = IndexedObject<NyaIri, BlankIdIndex, Span>;
pub trait ParseApub<V: Vocabulary<Iri = IriIndex>>: Sized {
pub trait ParseApub: Sized {
/// Attempt to translate a raw JSON-LD object (as in, from the `json-ld` crate)
/// to this type. Returns the parsed object on success, and the input on failure.
/// Unsupported properties SHOULD be logged but otherwise ignored.
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self>;
fn parse_apub(obj: &RawObject) -> Option<Self>;
/// Only for internal use from subclasses, **DO NOT TOUCH**.
/// Can cause an infinite recursion loop that ends in a segfault.
unsafe fn _parse_apub_unchecked(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
Self::parse_apub(obj, vocab, ids)
unsafe fn _parse_apub_unchecked(obj: &RawObject) -> Option<Self> {
Self::parse_apub(obj)
}
}
@ -89,28 +88,17 @@ macro_rules! ap_empty_child {
}
macro_rules! ap_empty_child_impl {
($child:ident, $parent:ty, $id1:ident, $id2:ident, $id3:ident) => {
($child:ident, $parent:ty, $id:expr) => {
ap_empty_child!($child, $parent);
impl<V> $crate::ap::trans::ParseApub<V> for $child
where
V: ::rdf_types::Vocabulary<Iri = ::rdf_types::vocabulary::IriIndex>,
{
fn parse_apub(
obj: &$crate::ap::trans::RawObject,
vocab: &V,
ids: &$crate::ap::vocab::Ids,
) -> Option<Self> {
$crate::ap::trans::matches_type(obj, &ids.$id1.$id2.$id3)?;
unsafe { Self::_parse_apub_unchecked(obj, vocab, ids) }
impl $crate::ap::trans::ParseApub for $child {
fn parse_apub(obj: &$crate::ap::trans::RawObject) -> Option<Self> {
$crate::ap::trans::matches_type(obj, $id)?;
unsafe { Self::_parse_apub_unchecked(obj) }
}
unsafe fn _parse_apub_unchecked(
obj: &$crate::ap::trans::RawObject,
vocab: &V,
ids: &$crate::ap::vocab::Ids,
) -> Option<Self> {
<$parent>::_parse_apub_unchecked(obj, vocab, ids).map(|p| Self { _super: p })
unsafe fn _parse_apub_unchecked(obj: &$crate::ap::trans::RawObject) -> Option<Self> {
<$parent>::_parse_apub_unchecked(obj).map(|p| Self { _super: p })
}
}
};
@ -153,14 +141,14 @@ pub enum ApDocument {
Remote(String),
}
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for ApDocument {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
impl ParseApub for ApDocument {
fn parse_apub(obj: &RawObject) -> Option<Self> {
if let Some(s) = obj.as_value().and_then(|v| v.as_str()) {
Some(Self::Remote(String::from(s)))
} else {
Object::parse_apub(obj, vocab, ids)
Object::parse_apub(obj)
.map(Self::Object)
.or_else(|| Link::parse_apub(obj, vocab, ids).map(Self::Link))
.or_else(|| Link::parse_apub(obj).map(Self::Link))
}
}
}
@ -205,11 +193,11 @@ pub enum ImageOrLink {
Image(Image),
}
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for ImageOrLink {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
Image::parse_apub(obj, vocab, ids)
impl ParseApub for ImageOrLink {
fn parse_apub(obj: &RawObject) -> Option<Self> {
Image::parse_apub(obj)
.map(Self::Image)
.or_else(|| Link::parse_apub(obj, vocab, ids).map(Self::Link))
.or_else(|| Link::parse_apub(obj).map(Self::Link))
}
}
@ -236,13 +224,13 @@ pub enum Collection {
OrderedPage(OrderedCollectionPage),
}
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Collection {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
BaseCollection::parse_apub(obj, vocab, ids)
impl ParseApub for Collection {
fn parse_apub(obj: &RawObject) -> Option<Self> {
BaseCollection::parse_apub(obj)
.map(Self::Base)
.or_else(|| OrderedCollection::parse_apub(obj, vocab, ids).map(Self::Ordered))
.or_else(|| CollectionPage::parse_apub(obj, vocab, ids).map(Self::Page))
.or_else(|| OrderedCollectionPage::parse_apub(obj, vocab, ids).map(Self::OrderedPage))
.or_else(|| OrderedCollection::parse_apub(obj).map(Self::Ordered))
.or_else(|| CollectionPage::parse_apub(obj).map(Self::Page))
.or_else(|| OrderedCollectionPage::parse_apub(obj).map(Self::OrderedPage))
}
}
@ -288,23 +276,22 @@ pub struct BaseCollection {
}
ap_extends!(BaseCollection, AbstractObject);
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for BaseCollection {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
matches_type(obj, &ids.apub.object.collection)?;
unsafe { Self::_parse_apub_unchecked(obj, vocab, ids) }
impl ParseApub for BaseCollection {
fn parse_apub(obj: &RawObject) -> Option<Self> {
matches_type(obj, apub::Collection)?;
unsafe { Self::_parse_apub_unchecked(obj) }
}
unsafe fn _parse_apub_unchecked(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?;
let prop_ids = &ids.apub.property;
unsafe fn _parse_apub_unchecked(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj)?;
AbstractObject::_parse_apub_unchecked(obj, vocab, ids).map(|s| Self {
AbstractObject::_parse_apub_unchecked(obj).map(|s| Self {
_super: s,
total_items: ph.parse_prop(&prop_ids.total_items),
current: ph.parse_prop_box(&prop_ids.current),
first: ph.parse_prop_box(&prop_ids.first),
last: ph.parse_prop_box(&prop_ids.last),
items: ph.parse_prop_vec(&prop_ids.items),
total_items: ph.parse_prop(apub::totalItems),
current: ph.parse_prop_box(apub::current),
first: ph.parse_prop_box(apub::first),
last: ph.parse_prop_box(apub::last),
items: ph.parse_prop_vec(apub::items),
})
}
}
@ -324,13 +311,7 @@ impl DebugApub for BaseCollection {
}
}
ap_empty_child_impl!(
OrderedCollection,
BaseCollection,
apub,
object,
ordered_collection
);
ap_empty_child_impl!(OrderedCollection, BaseCollection, apub::OrderedCollection);
pub struct CollectionPage {
_super: BaseCollection,
@ -340,21 +321,20 @@ pub struct CollectionPage {
}
ap_extends!(CollectionPage, BaseCollection);
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for CollectionPage {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
matches_type(obj, &ids.apub.object.collection_page)?;
unsafe { Self::_parse_apub_unchecked(obj, vocab, ids) }
impl ParseApub for CollectionPage {
fn parse_apub(obj: &RawObject) -> Option<Self> {
matches_type(obj, apub::CollectionPage)?;
unsafe { Self::_parse_apub_unchecked(obj) }
}
unsafe fn _parse_apub_unchecked(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?;
let prop_ids = &ids.apub.property;
unsafe fn _parse_apub_unchecked(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj)?;
BaseCollection::_parse_apub_unchecked(obj, vocab, ids).map(|s| Self {
BaseCollection::_parse_apub_unchecked(obj).map(|s| Self {
_super: s,
part_of: ph.parse_prop_box(&prop_ids.part_of),
next: ph.parse_prop_box(&prop_ids.next),
prev: ph.parse_prop_box(&prop_ids.prev),
part_of: ph.parse_prop_box(apub::partOf),
next: ph.parse_prop_box(apub::next),
prev: ph.parse_prop_box(apub::prev),
})
}
}
@ -374,19 +354,18 @@ pub struct OrderedCollectionPage {
}
ap_extends!(OrderedCollectionPage, CollectionPage);
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for OrderedCollectionPage {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
matches_type(obj, &ids.apub.object.ordered_collection_page)?;
unsafe { Self::_parse_apub_unchecked(obj, vocab, ids) }
impl ParseApub for OrderedCollectionPage {
fn parse_apub(obj: &RawObject) -> Option<Self> {
matches_type(obj, apub::OrderedCollectionPage)?;
unsafe { Self::_parse_apub_unchecked(obj) }
}
unsafe fn _parse_apub_unchecked(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?;
let prop_ids = &ids.apub.property;
unsafe fn _parse_apub_unchecked(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj)?;
CollectionPage::_parse_apub_unchecked(obj, vocab, ids).map(|s| Self {
CollectionPage::_parse_apub_unchecked(obj).map(|s| Self {
_super: s,
start_index: ph.parse_prop(&prop_ids.start_index),
start_index: ph.parse_prop(apub::startIndex),
})
}
}
@ -403,11 +382,11 @@ pub enum CollectionOrLink {
Link(BaseLink),
}
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for CollectionOrLink {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
Collection::parse_apub(obj, vocab, ids)
impl ParseApub for CollectionOrLink {
fn parse_apub(obj: &RawObject) -> Option<Self> {
Collection::parse_apub(obj)
.map(Self::Collection)
.or_else(|| BaseLink::parse_apub(obj, vocab, ids).map(Self::Link))
.or_else(|| BaseLink::parse_apub(obj).map(Self::Link))
}
}
@ -442,11 +421,11 @@ pub enum CollectionPageOrLink {
Link(BaseLink),
}
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for CollectionPageOrLink {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
CollectionPage::parse_apub(obj, vocab, ids)
impl ParseApub for CollectionPageOrLink {
fn parse_apub(obj: &RawObject) -> Option<Self> {
CollectionPage::parse_apub(obj)
.map(Self::CollectionPage)
.or_else(|| BaseLink::parse_apub(obj, vocab, ids).map(Self::Link))
.or_else(|| BaseLink::parse_apub(obj).map(Self::Link))
}
}
@ -476,20 +455,26 @@ impl DebugApub for CollectionPageOrLink {
}
}
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for IriBuf {
fn parse_apub(obj: &RawObject, vocab: &V, _ids: &Ids) -> Option<Self> {
vocab.iri(obj.as_iri()?).map(|iri| iri.to_owned())
impl ParseApub for IriBuf {
fn parse_apub(obj: &RawObject) -> Option<Self> {
obj.as_iri().map(|iri| iri.to_owned())
}
}
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for xsd::Duration {
fn parse_apub(obj: &RawObject, _vocab: &V, _ids: &Ids) -> Option<Self> {
impl ParseApub for NyaIri {
fn parse_apub(obj: &RawObject) -> Option<Self> {
obj.as_iri().cloned()
}
}
impl ParseApub for xsd::Duration {
fn parse_apub(obj: &RawObject) -> Option<Self> {
xsd::Duration::from_str(obj.as_value()?.as_str()?).ok()
}
}
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for NaiveDateTime {
fn parse_apub(obj: &RawObject, _vocab: &V, _ids: &Ids) -> Option<Self> {
impl ParseApub for NaiveDateTime {
fn parse_apub(obj: &RawObject) -> Option<Self> {
use chrono::{DateTime, Utc};
// TODO: close enough for now, but this only supports UTC
let dt = DateTime::<Utc>::from_str(obj.as_value()?.as_str()?).ok()?;
@ -497,41 +482,40 @@ impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for NaiveDateTime {
}
}
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Mime {
fn parse_apub(obj: &RawObject, _vocab: &V, _ids: &Ids) -> Option<Self> {
impl ParseApub for Mime {
fn parse_apub(obj: &RawObject) -> Option<Self> {
Mime::from_str(obj.as_value()?.as_str()?).ok()
}
}
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for String {
fn parse_apub(obj: &RawObject, _vocab: &V, _ids: &Ids) -> Option<Self> {
impl ParseApub for String {
fn parse_apub(obj: &RawObject) -> Option<Self> {
Some(obj.as_value()?.as_str()?.to_owned())
}
}
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for u32 {
fn parse_apub(obj: &RawObject, _vocab: &V, _ids: &Ids) -> Option<Self> {
impl ParseApub for u32 {
fn parse_apub(obj: &RawObject) -> Option<Self> {
obj.as_value()?.as_number()?.as_u32()
}
}
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for f32 {
fn parse_apub(obj: &RawObject, _vocab: &V, _ids: &Ids) -> Option<Self> {
impl ParseApub for f32 {
fn parse_apub(obj: &RawObject) -> Option<Self> {
Some(obj.as_value()?.as_number()?.as_f32_lossy())
}
}
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for bool {
fn parse_apub(obj: &RawObject, _vocab: &V, _ids: &Ids) -> Option<Self> {
impl ParseApub for bool {
fn parse_apub(obj: &RawObject) -> Option<Self> {
obj.as_bool()
}
}
fn matches_type(obj: &RawObject, iri_id: &Id) -> Option<()> {
let iri = iri_id.as_iri().expect("IDs should only refer to IRIs");
fn matches_type(obj: &RawObject, typ: impl HasContext) -> Option<()> {
let type_matches = obj
.types()
.any(|t| t.as_iri().is_some_and(|index| index == iri));
.any(|t| t.as_iri().is_some_and(|iri| iri == &typ.as_nya_iri()));
if type_matches {
Some(())
} else {
@ -539,48 +523,43 @@ fn matches_type(obj: &RawObject, iri_id: &Id) -> Option<()> {
}
}
struct PropHelper<'a, V: Vocabulary<Iri = IriIndex>> {
struct PropHelper<'a> {
props: &'a RawProps,
vocab: &'a V,
ids: &'a Ids,
}
impl<'a, V: Vocabulary<Iri = IriIndex>> PropHelper<'a, V> {
fn new(obj: &'a RawObject, vocab: &'a V, ids: &'a Ids) -> Option<Self> {
impl<'a> PropHelper<'a> {
fn new(obj: &'a RawObject) -> Option<Self> {
let props = obj.as_node()?.properties();
Some(Self { props, vocab, ids })
Some(Self { props })
}
fn parse_prop<T: ParseApub<V>>(&self, prop_id: &Id) -> Option<T> {
T::parse_apub(self.props.get_any(prop_id)?, self.vocab, self.ids).or_else(|| {
let iri = prop_id
.as_iri()
.and_then(|index| self.vocab.iri(index))
.expect("predefined IRIs must always exist");
fn parse_prop<T: ParseApub>(&self, prop: impl HasContext) -> Option<T> {
T::parse_apub(self.props.get_any(&prop.as_nya_iri())?).or_else(|| {
let iri = prop.as_iri();
warn!("Ignoring unknown value for property {iri}");
None
})
}
fn parse_prop_box<T: ParseApub<V>>(&self, prop_id: &Id) -> Option<Box<T>> {
self.parse_prop(prop_id).map(Box::new)
fn parse_prop_box<T: ParseApub>(&self, prop: impl HasContext) -> Option<Box<T>> {
self.parse_prop(prop).map(Box::new)
}
fn parse_prop_vec<T: ParseApub<V>>(&self, prop_id: &Id) -> Vec<T> {
fn parse_prop_vec<T: ParseApub>(&self, prop: impl HasContext) -> Vec<T> {
self.props
.get(prop_id)
.filter_map(|prop| T::parse_apub(prop, self.vocab, self.ids))
.get(&prop.as_nya_iri())
.filter_map(|prop| T::parse_apub(prop))
.collect()
}
}
impl DebugApub for IriBuf {
impl DebugApub for NyaIri {
fn debug_apub(&self, f: &mut fmt::Formatter, depth: usize) -> fmt::Result {
self.debug_apub_members(f, depth)
}
fn debug_apub_members(&self, f: &mut fmt::Formatter, _depth: usize) -> fmt::Result {
writeln!(f, "{}", self.as_str())
writeln!(f, "{self}")
}
}

View file

@ -1,15 +1,12 @@
use chrono::NaiveDateTime;
use iref::IriBuf;
use mime::Mime;
use rdf_types::vocabulary::IriIndex;
use rdf_types::Vocabulary;
use std::fmt;
use crate::ap::trans::{
activity, actor, matches_type, ApDocument, BaseCollection, Collection, DebugApub, ImageOrLink,
ParseApub, PropHelper, RawObject,
};
use crate::ap::vocab::Ids;
use crate::ap::vocab::{apub, toot, NyaIri};
use crate::util::xsd;
// The ActivityStreams vocabulary actually defines Image, Audio, Video, and Page
@ -34,29 +31,29 @@ pub enum Object {
Video(Video),
Collection(Collection),
// Mastodon extensions
// ActivityPub draft, used by Mastodon
Emoji(Emoji),
}
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Object {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
activity::Activity::parse_apub(obj, vocab, ids)
impl ParseApub for Object {
fn parse_apub(obj: &RawObject) -> Option<Self> {
activity::Activity::parse_apub(obj)
.map(Self::Activity)
.or_else(|| actor::Actor::parse_apub(obj, vocab, ids).map(Self::Actor))
.or_else(|| Article::parse_apub(obj, vocab, ids).map(Self::Article))
.or_else(|| Audio::parse_apub(obj, vocab, ids).map(Self::Audio))
.or_else(|| Document::parse_apub(obj, vocab, ids).map(Self::Document))
.or_else(|| Event::parse_apub(obj, vocab, ids).map(Self::Event))
.or_else(|| Image::parse_apub(obj, vocab, ids).map(Self::Image))
.or_else(|| Note::parse_apub(obj, vocab, ids).map(Self::Note))
.or_else(|| Page::parse_apub(obj, vocab, ids).map(Self::Page))
.or_else(|| Place::parse_apub(obj, vocab, ids).map(Self::Place))
.or_else(|| Profile::parse_apub(obj, vocab, ids).map(Self::Profile))
.or_else(|| Relationship::parse_apub(obj, vocab, ids).map(Self::Relationship))
.or_else(|| Tombstone::parse_apub(obj, vocab, ids).map(Self::Tombstone))
.or_else(|| Video::parse_apub(obj, vocab, ids).map(Self::Video))
.or_else(|| Collection::parse_apub(obj, vocab, ids).map(Self::Collection))
.or_else(|| Emoji::parse_apub(obj, vocab, ids).map(Self::Emoji))
.or_else(|| actor::Actor::parse_apub(obj).map(Self::Actor))
.or_else(|| Article::parse_apub(obj).map(Self::Article))
.or_else(|| Audio::parse_apub(obj).map(Self::Audio))
.or_else(|| Document::parse_apub(obj).map(Self::Document))
.or_else(|| Event::parse_apub(obj).map(Self::Event))
.or_else(|| Image::parse_apub(obj).map(Self::Image))
.or_else(|| Note::parse_apub(obj).map(Self::Note))
.or_else(|| Page::parse_apub(obj).map(Self::Page))
.or_else(|| Place::parse_apub(obj).map(Self::Place))
.or_else(|| Profile::parse_apub(obj).map(Self::Profile))
.or_else(|| Relationship::parse_apub(obj).map(Self::Relationship))
.or_else(|| Tombstone::parse_apub(obj).map(Self::Tombstone))
.or_else(|| Video::parse_apub(obj).map(Self::Video))
.or_else(|| Collection::parse_apub(obj).map(Self::Collection))
.or_else(|| Emoji::parse_apub(obj).map(Self::Emoji))
}
}
@ -129,7 +126,7 @@ impl DebugApub for Object {
}
pub struct AbstractObject {
pub id: Option<IriBuf>,
pub id: Option<NyaIri>,
pub attachment: Vec<ApDocument>,
pub attributed_to: Vec<ApDocument>,
pub audience: Vec<ApDocument>,
@ -149,7 +146,7 @@ pub struct AbstractObject {
pub summary: Option<String>, // TODO: this could be a langString
pub tag: Option<Box<ApDocument>>,
pub updated: Option<NaiveDateTime>,
pub url: Option<IriBuf>,
pub url: Option<NyaIri>,
pub to: Option<Box<ApDocument>>,
pub bto: Option<Box<ApDocument>>,
pub cc: Option<Box<ApDocument>>,
@ -158,43 +155,38 @@ pub struct AbstractObject {
pub duration: Option<xsd::Duration>,
}
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for AbstractObject {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?;
let prop_ids = &ids.apub.property;
impl ParseApub for AbstractObject {
fn parse_apub(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj)?;
Some(Self {
id: obj
.id()
.and_then(|id| id.as_iri())
.and_then(|idx| vocab.iri(idx))
.map(|iri| iri.to_owned()),
attachment: ph.parse_prop_vec(&prop_ids.attachment),
attributed_to: ph.parse_prop_vec(&prop_ids.attributed_to),
audience: ph.parse_prop_vec(&prop_ids.audience),
content: ph.parse_prop(&prop_ids.content),
context: ph.parse_prop_box(&prop_ids.context),
name: ph.parse_prop(&prop_ids.name),
end_time: ph.parse_prop(&prop_ids.end_time),
generator: ph.parse_prop_box(&prop_ids.generator),
icon: ph.parse_prop_box(&prop_ids.icon),
image: ph.parse_prop_box(&prop_ids.image),
in_reply_to: ph.parse_prop_vec(&prop_ids.in_reply_to),
location: ph.parse_prop_box(&prop_ids.location),
preview: ph.parse_prop_box(&prop_ids.preview),
published: ph.parse_prop(&prop_ids.published),
replies: ph.parse_prop_box(&prop_ids.replies),
start_time: ph.parse_prop(&prop_ids.start_time),
summary: ph.parse_prop(&prop_ids.summary),
tag: ph.parse_prop_box(&prop_ids.tag),
updated: ph.parse_prop(&prop_ids.updated),
url: ph.parse_prop(&prop_ids.url),
to: ph.parse_prop_box(&prop_ids.to),
bto: ph.parse_prop_box(&prop_ids.bto),
cc: ph.parse_prop_box(&prop_ids.cc),
bcc: ph.parse_prop_box(&prop_ids.bcc),
media_type: ph.parse_prop(&prop_ids.media_type),
duration: ph.parse_prop(&prop_ids.duration),
id: obj.id().and_then(|id| id.as_iri().cloned()),
attachment: ph.parse_prop_vec(apub::attachment),
attributed_to: ph.parse_prop_vec(apub::attributedTo),
audience: ph.parse_prop_vec(apub::audience),
content: ph.parse_prop(apub::content),
context: ph.parse_prop_box(apub::context),
name: ph.parse_prop(apub::name),
end_time: ph.parse_prop(apub::endTime),
generator: ph.parse_prop_box(apub::generator),
icon: ph.parse_prop_box(apub::icon),
image: ph.parse_prop_box(apub::image),
in_reply_to: ph.parse_prop_vec(apub::inReplyTo),
location: ph.parse_prop_box(apub::location),
preview: ph.parse_prop_box(apub::preview),
published: ph.parse_prop(apub::published),
replies: ph.parse_prop_box(apub::replies),
start_time: ph.parse_prop(apub::startTime),
summary: ph.parse_prop(apub::summary),
tag: ph.parse_prop_box(apub::tag),
updated: ph.parse_prop(apub::updated),
url: ph.parse_prop(apub::url),
to: ph.parse_prop_box(apub::to),
bto: ph.parse_prop_box(apub::bto),
cc: ph.parse_prop_box(apub::cc),
bcc: ph.parse_prop_box(apub::bcc),
media_type: ph.parse_prop(apub::mediaType),
duration: ph.parse_prop(apub::duration),
})
}
}
@ -239,21 +231,20 @@ pub struct Relationship {
}
ap_extends!(Relationship, AbstractObject);
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Relationship {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
matches_type(obj, &ids.apub.object.relationship)?;
unsafe { Self::_parse_apub_unchecked(obj, vocab, ids) }
impl ParseApub for Relationship {
fn parse_apub(obj: &RawObject) -> Option<Self> {
matches_type(obj, apub::Relationship)?;
unsafe { Self::_parse_apub_unchecked(obj) }
}
unsafe fn _parse_apub_unchecked(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?;
let prop_ids = &ids.apub.property;
unsafe fn _parse_apub_unchecked(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj)?;
AbstractObject::_parse_apub_unchecked(obj, vocab, ids).map(|s| Self {
AbstractObject::_parse_apub_unchecked(obj).map(|s| Self {
_super: s,
subject: ph.parse_prop_box(&prop_ids.subject),
object: ph.parse_prop_box(&prop_ids.object),
relationship: ph.parse_prop_box(&prop_ids.relationship),
subject: ph.parse_prop_box(apub::subject),
object: ph.parse_prop_box(apub::object),
relationship: ph.parse_prop_box(apub::relationship),
})
}
}
@ -278,24 +269,23 @@ pub struct Place {
}
ap_extends!(Place, AbstractObject);
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Place {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
matches_type(obj, &ids.apub.object.place)?;
unsafe { Self::_parse_apub_unchecked(obj, vocab, ids) }
impl ParseApub for Place {
fn parse_apub(obj: &RawObject) -> Option<Self> {
matches_type(obj, apub::Place)?;
unsafe { Self::_parse_apub_unchecked(obj) }
}
unsafe fn _parse_apub_unchecked(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?;
let prop_ids = &ids.apub.property;
unsafe fn _parse_apub_unchecked(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj)?;
AbstractObject::_parse_apub_unchecked(obj, vocab, ids).map(|s| Self {
AbstractObject::_parse_apub_unchecked(obj).map(|s| Self {
_super: s,
accuracy: ph.parse_prop(&prop_ids.accuracy),
altitude: ph.parse_prop(&prop_ids.altitude),
latitude: ph.parse_prop(&prop_ids.latitude),
longitude: ph.parse_prop(&prop_ids.longitude),
radius: ph.parse_prop(&prop_ids.radius),
units: ph.parse_prop(&prop_ids.units),
accuracy: ph.parse_prop(apub::accuracy),
altitude: ph.parse_prop(apub::altitude),
latitude: ph.parse_prop(apub::latitude),
longitude: ph.parse_prop(apub::longitude),
radius: ph.parse_prop(apub::radius),
units: ph.parse_prop(apub::units),
})
}
}
@ -318,19 +308,18 @@ pub struct Profile {
}
ap_extends!(Profile, AbstractObject);
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Profile {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
matches_type(obj, &ids.apub.object.profile)?;
unsafe { Self::_parse_apub_unchecked(obj, vocab, ids) }
impl ParseApub for Profile {
fn parse_apub(obj: &RawObject) -> Option<Self> {
matches_type(obj, apub::Profile)?;
unsafe { Self::_parse_apub_unchecked(obj) }
}
unsafe fn _parse_apub_unchecked(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?;
let prop_ids = &ids.apub.property;
unsafe fn _parse_apub_unchecked(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj)?;
AbstractObject::_parse_apub_unchecked(obj, vocab, ids).map(|s| Self {
AbstractObject::_parse_apub_unchecked(obj).map(|s| Self {
_super: s,
describes: ph.parse_prop_box(&prop_ids.describes),
describes: ph.parse_prop_box(apub::describes),
})
}
}
@ -349,20 +338,19 @@ pub struct Tombstone {
}
ap_extends!(Tombstone, AbstractObject);
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Tombstone {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
matches_type(obj, &ids.apub.object.tombstone)?;
unsafe { Self::_parse_apub_unchecked(obj, vocab, ids) }
impl ParseApub for Tombstone {
fn parse_apub(obj: &RawObject) -> Option<Self> {
matches_type(obj, apub::Tombstone)?;
unsafe { Self::_parse_apub_unchecked(obj) }
}
unsafe fn _parse_apub_unchecked(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?;
let prop_ids = &ids.apub.property;
unsafe fn _parse_apub_unchecked(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj)?;
AbstractObject::_parse_apub_unchecked(obj, vocab, ids).map(|s| Self {
AbstractObject::_parse_apub_unchecked(obj).map(|s| Self {
_super: s,
former_type: ph.parse_prop(&prop_ids.former_type),
deleted: ph.parse_prop(&prop_ids.deleted),
former_type: ph.parse_prop(apub::formerType),
deleted: ph.parse_prop(apub::deleted),
})
}
}
@ -383,20 +371,20 @@ pub struct Image {
}
ap_extends!(Image, AbstractObject);
impl<V: Vocabulary<Iri = IriIndex>> ParseApub<V> for Image {
fn parse_apub(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
matches_type(obj, &ids.apub.object.image)?;
unsafe { Self::_parse_apub_unchecked(obj, vocab, ids) }
impl ParseApub for Image {
fn parse_apub(obj: &RawObject) -> Option<Self> {
matches_type(obj, apub::Image)?;
unsafe { Self::_parse_apub_unchecked(obj) }
}
unsafe fn _parse_apub_unchecked(obj: &RawObject, vocab: &V, ids: &Ids) -> Option<Self> {
let ph = PropHelper::new(obj, vocab, ids)?;
unsafe fn _parse_apub_unchecked(obj: &RawObject) -> Option<Self> {
let ph = PropHelper::new(obj)?;
let focal_point: Vec<f32> = ph.parse_prop_vec(&ids.toot.props.focal_point);
AbstractObject::_parse_apub_unchecked(obj, vocab, ids).map(|s| Self {
let focal_point: Vec<f32> = ph.parse_prop_vec(toot::focalPoint);
AbstractObject::_parse_apub_unchecked(obj).map(|s| Self {
_super: s,
focal_point: (focal_point.len() >= 2).then(|| [focal_point[0], focal_point[1]]),
blurhash: ph.parse_prop(&ids.toot.props.blurhash),
blurhash: ph.parse_prop(toot::blurhash),
})
}
}
@ -418,11 +406,11 @@ impl DebugApub for Image {
}
}
ap_empty_child_impl!(Article, AbstractObject, apub, object, article);
ap_empty_child_impl!(Document, AbstractObject, apub, object, document);
ap_empty_child_impl!(Audio, AbstractObject, apub, object, audio);
ap_empty_child_impl!(Video, AbstractObject, apub, object, video);
ap_empty_child_impl!(Note, AbstractObject, apub, object, note);
ap_empty_child_impl!(Page, AbstractObject, apub, object, page);
ap_empty_child_impl!(Event, AbstractObject, apub, object, event);
ap_empty_child_impl!(Emoji, AbstractObject, toot, class, emoji);
ap_empty_child_impl!(Article, AbstractObject, apub::Article);
ap_empty_child_impl!(Document, AbstractObject, apub::Document);
ap_empty_child_impl!(Audio, AbstractObject, apub::Audio);
ap_empty_child_impl!(Video, AbstractObject, apub::Video);
ap_empty_child_impl!(Note, AbstractObject, apub::Note);
ap_empty_child_impl!(Page, AbstractObject, apub::Page);
ap_empty_child_impl!(Event, AbstractObject, apub::Event);
ap_empty_child_impl!(Emoji, AbstractObject, toot::Emoji);

View file

@ -1,467 +1,579 @@
//! An annoyingly huge collection of all known IRIs, from all supported namespaces.
//! ActivityStreams vocabulary interface including all well-known IRIs.
//!
//! This might be replaced with an entirely custom implementation of [`Vocabulary`]
//! in the future because reinitializing the entire AP vocabulary from scratch for
//! every single post to the inbox is probably a bit inefficient. I hate my life.
//! This module contains two things: The JSON-LD class and property definitions,
//! and the interface we provide to the `json_ld` crate to do its job.
//! The latter consists of [`NyaVocabulary`] and [`NyaIri`], the two central
//! interfaces to the processor. The vocabulary is supposed to store all IRIs
//! encountered during document expansion so the actual IRI type used by the
//! processor can be a simple index that is trivial to do comparisons with.
//! We only really care about the IRIs we know and discard any other information
//! though. Therefore, our own IRI type stores either a reference to a static
//! array of well-known IRIs or, if we encounter one we don't know, an Arc.
//!
//! This makes working with IRIs way more convenient because we don't have to
//! pass references to the vocabulary everywhere. Furthermore, since URLs are
//! a subset of IRIs, all URL properties of ActivityPub documents are parsed as
//! IRIs. These URLs have to be stored *somewhere* anyway, so by using Arcs we
//! don't have to copy them over to the output struct.
use iref::{Iri, IriBuf};
use langtag::{AsLanguageTag, LanguageTag, LanguageTagBuf};
use rdf_types::vocabulary::{BlankIdIndex, LanguageTagIndex, LiteralIndex};
use rdf_types::{
vocabulary::{BlankIdIndex, IriIndex},
Subject, VocabularyMut,
literal, BlankId, BlankIdBuf, BlankIdVocabulary, BlankIdVocabularyMut, IriVocabulary,
IriVocabularyMut, LanguageTagVocabulary, LanguageTagVocabularyMut, Literal, LiteralVocabulary,
LiteralVocabularyMut,
};
use static_iref::iri;
use std::fmt;
use std::hash::{Hash, Hasher};
use std::sync::Arc;
pub struct Ids {
/// IRI identifiers for the base ActivityStreams and ActivityPub
/// namespace (`https://www.w3.org/ns/activitystreams#`).
pub apub: ApubIds,
/// IRI identifiers for Mastodon's extension namespace
/// (`http://joinmastodon.org/ns#`).
pub toot: TootIds,
use nyanoblog_macros::define_json_ns;
/// Our vocabulary implementation.
/// We assert that 99% of the IRIs encountered in the wild are well-known anyway,
/// which means that using hash tables or similar data structures would likely
/// have a negative performance impact in the average case. This is why we just
/// store the few remaining ones in a vec.
///
// TODO: check if this is actually true
// TODO: what about blank ids, literals, and language tags? what even are those???
pub struct NyaVocabulary {
iri: Vec<Arc<IriBuf>>,
blank_id: Vec<BlankIdBuf>,
literal: Vec<Literal<literal::Type<NyaIri, LanguageTagIndex>, String>>,
language_tag: Vec<LanguageTagBuf>,
}
pub type Id = Subject<IriIndex, BlankIdIndex>;
/// Transform
/// `key => iri!("...")`
/// into
/// `key: Id::Iri(vocab.insert(iri!("..."))`
/// so the lines don't exceed five trillion characters.
macro_rules! populate_ids {
($vocab:ident, $($name:ident => $iri:expr),* $(,)?) => {
impl NyaVocabulary {
pub fn new() -> Self {
Self {
$($name: Id::Iri($vocab.insert($iri)),)*
iri: Vec::new(),
blank_id: Vec::new(),
literal: Vec::new(),
language_tag: Vec::new(),
}
}
}
impl Ids {
pub fn populate(vocab: &mut impl VocabularyMut<Iri = IriIndex>) -> Self {
Self {
apub: ApubIds::populate(vocab),
toot: TootIds::populate(vocab),
// XXX this probably takes up 16 bytes but could be compressed to 8 with some pointer magic
#[derive(Clone)]
pub enum NyaIri {
/// Well-known IRI, references an entry from [`HasContext::MEMBERS`].
///
/// **ATTENTION:** Never, ever, EVER clone the `Iri` and throw it in a new
/// instance of `NyaIri`! We optimize comparisons between two `NyaIRI::Known`
/// by just comparing the raw pointers, because we know they will always
/// reference the same underlying static array in memory.
/// This is deliberately a raw pointer, cast from a `&'static Iri<'static>`,
/// in the hope that requiring an `unsafe` block prevents accidental derefs.
Known(*const Iri<'static>),
/// A new IRI that is not hardcoded in the app
Custom(Arc<IriBuf>),
}
// SAFETY: the raw pointer is always cast from &'static
unsafe impl Send for NyaIri {}
unsafe impl Sync for NyaIri {}
impl NyaIri {
pub fn to_owned(&self) -> IriBuf {
match self {
NyaIri::Known(iri) => unsafe { **iri }.to_owned(),
NyaIri::Custom(arc) => arc.as_ref().to_owned(),
}
}
}
pub struct ApubIds {
pub object: ApubObjectIds,
pub activity: ApubActivityIds,
pub link: ApubLinkIds,
pub property: ApubPropertyIds,
impl fmt::Display for NyaIri {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let iri = match self {
NyaIri::Known(ptr) => unsafe { **ptr },
NyaIri::Custom(arc) => arc.as_iri(),
};
write!(f, "{iri}")
}
}
impl ApubIds {
fn populate(vocab: &mut impl VocabularyMut<Iri = IriIndex>) -> Self {
Self {
object: ApubObjectIds::populate(vocab),
activity: ApubActivityIds::populate(vocab),
link: ApubLinkIds::populate(vocab),
property: ApubPropertyIds::populate(vocab),
impl hashbrown::Equivalent<json_ld::Id<NyaIri, BlankIdIndex>> for NyaIri {
fn equivalent(&self, key: &json_ld::Id<NyaIri, BlankIdIndex>) -> bool {
match key {
json_ld::Id::Valid(id) => match id {
rdf_types::Id::Blank(_) => false,
rdf_types::Id::Iri(iri) => iri == self,
},
json_ld::Id::Invalid(_) => false,
}
}
}
pub struct ApubObjectIds {
pub activity: Id,
pub application: Id,
pub article: Id,
pub audio: Id,
pub collection: Id,
pub collection_page: Id,
pub relationship: Id,
pub document: Id,
pub event: Id,
pub group: Id,
pub image: Id,
pub intransitive_activity: Id,
pub note: Id,
pub object: Id,
pub ordered_collection: Id,
pub ordered_collection_page: Id,
pub organization: Id,
pub page: Id,
pub person: Id,
pub place: Id,
pub profile: Id,
pub question: Id,
pub service: Id,
pub tombstone: Id,
pub video: Id,
impl IriVocabulary for NyaVocabulary {
type Iri = NyaIri;
fn iri<'i>(&'i self, id: &'i Self::Iri) -> Option<Iri<'i>> {
match id {
NyaIri::Known(iri) => Some(unsafe { **iri }),
NyaIri::Custom(arc) => Some(arc.as_iri()),
}
}
fn get(&self, iri: Iri) -> Option<Self::Iri> {
resolve_known_iri(iri).or_else(|| {
self.iri
.iter()
.find_map(|buf| (buf.as_iri() == iri).then_some(NyaIri::Custom(buf.clone())))
})
}
}
impl ApubObjectIds {
fn populate(vocab: &mut impl VocabularyMut<Iri = IriIndex>) -> Self {
populate_ids! {
vocab,
impl IriVocabularyMut for NyaVocabulary {
fn insert(&mut self, iri: Iri) -> Self::Iri {
match self.get(iri) {
Some(iri) => iri,
None => {
let arc = Arc::new(iri.to_owned());
self.iri.push(arc.clone());
NyaIri::Custom(arc)
}
}
}
activity => iri!("https://www.w3.org/ns/activitystreams#Activity"),
application => iri!("https://www.w3.org/ns/activitystreams#Application"),
article => iri!("https://www.w3.org/ns/activitystreams#Article"),
audio => iri!("https://www.w3.org/ns/activitystreams#Audio"),
collection => iri!("https://www.w3.org/ns/activitystreams#Collection"),
collection_page => iri!("https://www.w3.org/ns/activitystreams#CollectionPage"),
relationship => iri!("https://www.w3.org/ns/activitystreams#Relationship"),
document => iri!("https://www.w3.org/ns/activitystreams#Document"),
event => iri!("https://www.w3.org/ns/activitystreams#Event"),
group => iri!("https://www.w3.org/ns/activitystreams#Group"),
image => iri!("https://www.w3.org/ns/activitystreams#Image"),
intransitive_activity => iri!("https://www.w3.org/ns/activitystreams#IntransitiveActivity"),
note => iri!("https://www.w3.org/ns/activitystreams#Note"),
object => iri!("https://www.w3.org/ns/activitystreams#Object"),
ordered_collection => iri!("https://www.w3.org/ns/activitystreams#OrderedCollection"),
ordered_collection_page => iri!("https://www.w3.org/ns/activitystreams#OrderedCollectionPage"),
organization => iri!("https://www.w3.org/ns/activitystreams#Organization"),
page => iri!("https://www.w3.org/ns/activitystreams#Page"),
person => iri!("https://www.w3.org/ns/activitystreams#Person"),
place => iri!("https://www.w3.org/ns/activitystreams#Place"),
profile => iri!("https://www.w3.org/ns/activitystreams#Profile"),
question => iri!("https://www.w3.org/ns/activitystreams#Question"),
service => iri!("https://www.w3.org/ns/activitystreams#Service"),
tombstone => iri!("https://www.w3.org/ns/activitystreams#Tombstone"),
video => iri!("https://www.w3.org/ns/activitystreams#Video"),
fn insert_owned(&mut self, iri: IriBuf) -> Self::Iri {
match self.get(iri.as_iri()) {
Some(iri) => iri,
None => {
let arc = Arc::new(iri);
self.iri.push(arc.clone());
NyaIri::Custom(arc)
}
}
}
}
pub struct ApubActivityIds {
pub accept: Id,
pub add: Id,
pub announce: Id,
pub arrive: Id,
pub block: Id,
pub create: Id,
pub delete: Id,
pub dislike: Id,
pub follow: Id,
pub flag: Id,
pub ignore: Id,
pub invite: Id,
pub join: Id,
pub leave: Id,
pub like: Id,
pub listen: Id,
pub mov: Id, // move
pub offer: Id,
pub read: Id,
pub remove: Id,
pub reject: Id,
pub tentative_accept: Id,
pub tentative_reject: Id,
pub travel: Id,
pub undo: Id,
pub update: Id,
pub view: Id,
impl BlankIdVocabulary for NyaVocabulary {
type BlankId = BlankIdIndex;
fn blank_id<'b>(&'b self, id: &'b Self::BlankId) -> Option<&'b BlankId> {
self.blank_id
.get(usize::from(*id))
.map(|id| id.as_blank_id_ref())
}
fn get_blank_id(&self, id: &BlankId) -> Option<Self::BlankId> {
self.blank_id
.iter()
.enumerate()
.find_map(|(index, buf)| (buf == id).then(|| BlankIdIndex::from(index)))
}
}
impl ApubActivityIds {
fn populate(vocab: &mut impl VocabularyMut<Iri = IriIndex>) -> Self {
populate_ids! {
vocab,
impl BlankIdVocabularyMut for NyaVocabulary {
fn insert_blank_id(&mut self, id: &BlankId) -> Self::BlankId {
match self.get_blank_id(id) {
Some(id) => id,
None => {
let index = self.blank_id.len();
self.blank_id.push((*id).to_owned());
BlankIdIndex::from(index)
}
}
}
accept => iri!("https://www.w3.org/ns/activitystreams#Accept"),
add => iri!("https://www.w3.org/ns/activitystreams#Add"),
announce => iri!("https://www.w3.org/ns/activitystreams#Announce"),
arrive => iri!("https://www.w3.org/ns/activitystreams#Arrive"),
block => iri!("https://www.w3.org/ns/activitystreams#Block"),
create => iri!("https://www.w3.org/ns/activitystreams#Create"),
delete => iri!("https://www.w3.org/ns/activitystreams#Delete"),
dislike => iri!("https://www.w3.org/ns/activitystreams#Dislike"),
follow => iri!("https://www.w3.org/ns/activitystreams#Follow"),
flag => iri!("https://www.w3.org/ns/activitystreams#Flag"),
ignore => iri!("https://www.w3.org/ns/activitystreams#Ignore"),
invite => iri!("https://www.w3.org/ns/activitystreams#Invite"),
join => iri!("https://www.w3.org/ns/activitystreams#Join"),
leave => iri!("https://www.w3.org/ns/activitystreams#Leave"),
like => iri!("https://www.w3.org/ns/activitystreams#Like"),
listen => iri!("https://www.w3.org/ns/activitystreams#Listen"),
mov => iri!("https://www.w3.org/ns/activitystreams#Move"),
offer => iri!("https://www.w3.org/ns/activitystreams#Offer"),
read => iri!("https://www.w3.org/ns/activitystreams#Read"),
remove => iri!("https://www.w3.org/ns/activitystreams#Remove"),
reject => iri!("https://www.w3.org/ns/activitystreams#Reject"),
tentative_accept => iri!("https://www.w3.org/ns/activitystreams#TentativeAccept"),
tentative_reject => iri!("https://www.w3.org/ns/activitystreams#TentativeReject"),
travel => iri!("https://www.w3.org/ns/activitystreams#Travel"),
undo => iri!("https://www.w3.org/ns/activitystreams#Undo"),
update => iri!("https://www.w3.org/ns/activitystreams#Update"),
view => iri!("https://www.w3.org/ns/activitystreams#View"),
fn insert_owned_blank_id(&mut self, id: BlankIdBuf) -> Self::BlankId {
match self.get_blank_id(id.as_blank_id_ref()) {
Some(id) => id,
None => {
let index = self.blank_id.len();
self.blank_id.push(id);
BlankIdIndex::from(index)
}
}
}
}
pub struct ApubLinkIds {
pub link: Id,
pub mention: Id,
pub is_following: Id,
pub is_followed_by: Id,
pub is_contact: Id,
pub is_member: Id,
// Mastodon extension
pub hashtag: Id,
impl LiteralVocabulary for NyaVocabulary {
type Literal = LiteralIndex;
type Type = literal::Type<NyaIri, LanguageTagIndex>;
type Value = String;
fn literal<'l>(
&'l self,
id: &'l Self::Literal,
) -> Option<&'l Literal<Self::Type, Self::Value>> {
self.literal.get(usize::from(*id))
}
fn get_literal(&self, id: &Literal<Self::Type, Self::Value>) -> Option<Self::Literal> {
self.literal
.iter()
.enumerate()
.find_map(|(index, buf)| (buf == id).then(|| LiteralIndex::from(index)))
}
}
impl ApubLinkIds {
fn populate(vocab: &mut impl VocabularyMut<Iri = IriIndex>) -> Self {
populate_ids! {
vocab,
impl LiteralVocabularyMut for NyaVocabulary {
fn insert_literal(&mut self, value: &Literal<Self::Type, Self::Value>) -> Self::Literal {
match self.get_literal(value) {
Some(id) => id,
None => {
let index = self.literal.len();
self.literal.push(value.clone());
LiteralIndex::from(index)
}
}
}
link => iri!("https://www.w3.org/ns/activitystreams#Link"),
mention => iri!("https://www.w3.org/ns/activitystreams#Mention"),
is_following => iri!("https://www.w3.org/ns/activitystreams#IsFollowing"),
is_followed_by => iri!("https://www.w3.org/ns/activitystreams#IsFollowedBy"),
is_contact => iri!("https://www.w3.org/ns/activitystreams#IsContact"),
is_member => iri!("https://www.w3.org/ns/activitystreams#IsMember"),
hashtag => iri!("https://www.w3.org/ns/activitystreams#Hashtag"),
fn insert_owned_literal(&mut self, id: Literal<Self::Type, Self::Value>) -> Self::Literal {
match self.get_literal(&id) {
Some(index) => index,
None => {
let index = self.literal.len();
self.literal.push(id);
LiteralIndex::from(index)
}
}
}
}
pub struct ApubPropertyIds {
pub subject: Id,
pub relationship: Id,
pub actor: Id,
pub attributed_to: Id,
pub attachment: Id,
pub attachments: Id,
pub author: Id,
pub bcc: Id,
pub bto: Id,
pub cc: Id,
pub context: Id,
pub current: Id,
pub first: Id,
pub generator: Id,
pub icon: Id,
pub image: Id,
pub in_reply_to: Id,
pub items: Id,
pub instrument: Id,
pub ordered_items: Id,
pub last: Id,
pub location: Id,
pub next: Id,
pub object: Id,
pub one_of: Id,
pub any_of: Id,
pub closed: Id,
pub origin: Id,
pub accuracy: Id,
pub prev: Id,
pub preview: Id,
pub provider: Id,
pub replies: Id,
pub result: Id,
pub audience: Id,
pub part_of: Id,
pub tag: Id,
pub tags: Id,
pub target: Id,
pub to: Id,
pub url: Id,
pub altitude: Id,
pub content: Id,
pub name: Id,
pub downstream_duplicates: Id,
pub duration: Id,
pub end_time: Id,
pub height: Id,
pub href: Id,
pub hreflang: Id,
pub latitude: Id,
pub longitude: Id,
pub media_type: Id,
pub published: Id,
pub radius: Id,
pub rating: Id,
pub rel: Id,
pub start_index: Id,
pub start_time: Id,
pub summary: Id,
pub total_items: Id,
pub units: Id,
pub updated: Id,
pub upstream_duplicates: Id,
pub verb: Id,
pub width: Id,
pub describes: Id,
pub former_type: Id,
pub deleted: Id,
impl LanguageTagVocabulary for NyaVocabulary {
type LanguageTag = LanguageTagIndex;
// ActivityPub extensions
pub endpoints: Id,
pub following: Id,
pub followers: Id,
pub inbox: Id,
pub liked: Id,
pub shares: Id,
pub likes: Id,
pub oauth_authorization_endpoint: Id,
pub oauth_token_endpoint: Id,
pub outbox: Id,
pub preferred_username: Id,
pub provide_client_key: Id,
pub proxy_url: Id,
pub shared_inbox: Id,
pub sign_client_key: Id,
pub source: Id,
pub streams: Id,
pub upload_media: Id,
fn language_tag<'l>(&'l self, id: &'l Self::LanguageTag) -> Option<LanguageTag<'l>> {
self.language_tag
.get(usize::from(*id))
.map(|tag| tag.as_language_tag())
}
// DID Core extensions
pub also_known_as: Id,
fn get_language_tag(&self, id: LanguageTag) -> Option<Self::LanguageTag> {
self.language_tag
.iter()
.enumerate()
.find_map(|(index, buf)| (buf == &id).then(|| LanguageTagIndex::from(index)))
}
}
impl ApubPropertyIds {
fn populate(vocab: &mut impl VocabularyMut<Iri = IriIndex>) -> Self {
populate_ids! {
vocab,
impl LanguageTagVocabularyMut for NyaVocabulary {
fn insert_language_tag(&mut self, value: LanguageTag) -> Self::LanguageTag {
match self.get_language_tag(value) {
Some(id) => id,
None => {
let index = self.language_tag.len();
self.language_tag.push(value.cloned());
LanguageTagIndex::from(index)
}
}
}
subject => iri!("https://www.w3.org/ns/activitystreams#subject"),
relationship => iri!("https://www.w3.org/ns/activitystreams#relationship"),
actor => iri!("https://www.w3.org/ns/activitystreams#actor"),
attributed_to => iri!("https://www.w3.org/ns/activitystreams#attributedTo"),
attachment => iri!("https://www.w3.org/ns/activitystreams#attachment"),
attachments => iri!("https://www.w3.org/ns/activitystreams#attachments"),
author => iri!("https://www.w3.org/ns/activitystreams#author"),
bcc => iri!("https://www.w3.org/ns/activitystreams#bcc"),
bto => iri!("https://www.w3.org/ns/activitystreams#bto"),
cc => iri!("https://www.w3.org/ns/activitystreams#cc"),
context => iri!("https://www.w3.org/ns/activitystreams#context"),
current => iri!("https://www.w3.org/ns/activitystreams#current"),
first => iri!("https://www.w3.org/ns/activitystreams#first"),
generator => iri!("https://www.w3.org/ns/activitystreams#generator"),
icon => iri!("https://www.w3.org/ns/activitystreams#icon"),
image => iri!("https://www.w3.org/ns/activitystreams#image"),
in_reply_to => iri!("https://www.w3.org/ns/activitystreams#inReplyTo"),
items => iri!("https://www.w3.org/ns/activitystreams#items"),
instrument => iri!("https://www.w3.org/ns/activitystreams#instrument"),
ordered_items => iri!("https://www.w3.org/ns/activitystreams#orderedItems"),
last => iri!("https://www.w3.org/ns/activitystreams#last"),
location => iri!("https://www.w3.org/ns/activitystreams#location"),
next => iri!("https://www.w3.org/ns/activitystreams#next"),
object => iri!("https://www.w3.org/ns/activitystreams#object"),
one_of => iri!("https://www.w3.org/ns/activitystreams#oneOf"),
any_of => iri!("https://www.w3.org/ns/activitystreams#anyOf"),
closed => iri!("https://www.w3.org/ns/activitystreams#closed"),
origin => iri!("https://www.w3.org/ns/activitystreams#origin"),
accuracy => iri!("https://www.w3.org/ns/activitystreams#accuracy"),
prev => iri!("https://www.w3.org/ns/activitystreams#prev"),
preview => iri!("https://www.w3.org/ns/activitystreams#preview"),
provider => iri!("https://www.w3.org/ns/activitystreams#provider"),
replies => iri!("https://www.w3.org/ns/activitystreams#replies"),
result => iri!("https://www.w3.org/ns/activitystreams#result"),
audience => iri!("https://www.w3.org/ns/activitystreams#audience"),
part_of => iri!("https://www.w3.org/ns/activitystreams#partOf"),
tag => iri!("https://www.w3.org/ns/activitystreams#tag"),
tags => iri!("https://www.w3.org/ns/activitystreams#tags"),
target => iri!("https://www.w3.org/ns/activitystreams#taget"),
to => iri!("https://www.w3.org/ns/activitystreams#to"),
url => iri!("https://www.w3.org/ns/activitystreams#url"),
altitude => iri!("https://www.w3.org/ns/activitystreams#altitude"),
content => iri!("https://www.w3.org/ns/activitystreams#content"),
name => iri!("https://www.w3.org/ns/activitystreams#name"),
downstream_duplicates => iri!("https://www.w3.org/ns/activitystreams#downstreamDuplicates"),
duration => iri!("https://www.w3.org/ns/activitystreams#duration"),
end_time => iri!("https://www.w3.org/ns/activitystreams#endTime"),
height => iri!("https://www.w3.org/ns/activitystreams#height"),
href => iri!("https://www.w3.org/ns/activitystreams#href"),
hreflang => iri!("https://www.w3.org/ns/activitystreams#hreflang"),
latitude => iri!("https://www.w3.org/ns/activitystreams#latitude"),
longitude => iri!("https://www.w3.org/ns/activitystreams#longitude"),
media_type => iri!("https://www.w3.org/ns/activitystreams#mediaType"),
published => iri!("https://www.w3.org/ns/activitystreams#published"),
radius => iri!("https://www.w3.org/ns/activitystreams#radius"),
rating => iri!("https://www.w3.org/ns/activitystreams#ratine"),
rel => iri!("https://www.w3.org/ns/activitystreams#rel"),
start_index => iri!("https://www.w3.org/ns/activitystreams#startIndex"),
start_time => iri!("https://www.w3.org/ns/activitystreams#startTime"),
summary => iri!("https://www.w3.org/ns/activitystreams#summary"),
total_items => iri!("https://www.w3.org/ns/activitystreams#totalItems"),
units => iri!("https://www.w3.org/ns/activitystreams#units"),
updated => iri!("https://www.w3.org/ns/activitystreams#updated"),
upstream_duplicates => iri!("https://www.w3.org/ns/activitystreams#upstreamDuplicates"),
verb => iri!("https://www.w3.org/ns/activitystreams#verb"),
width => iri!("https://www.w3.org/ns/activitystreams#width"),
describes => iri!("https://www.w3.org/ns/activitystreams#describes"),
former_type => iri!("https://www.w3.org/ns/activitystreams#formerType"),
deleted => iri!("https://www.w3.org/ns/activitystreams#deleted"),
// ActivityPub extensions
endpoints => iri!("https://www.w3.org/ns/activitystreams#endpoints"),
following => iri!("https://www.w3.org/ns/activitystreams#following"),
followers => iri!("https://www.w3.org/ns/activitystreams#followers"),
inbox => iri!("http://www.w3.org/ns/ldp#inbox"),
liked => iri!("https://www.w3.org/ns/activitystreams#liked"),
shares => iri!("https://www.w3.org/ns/activitystreams#shares"),
likes => iri!("https://www.w3.org/ns/activitystreams#likes"),
oauth_authorization_endpoint => iri!("https://www.w3.org/ns/activitystreams#oauthAuthorizationEndpoint"),
oauth_token_endpoint => iri!("https://www.w3.org/ns/activitystreams#oauthTokenEndpoint"),
outbox => iri!("https://www.w3.org/ns/activitystreams#outbox"),
preferred_username => iri!("https://www.w3.org/ns/activitystreams#preferredUsername"),
provide_client_key => iri!("https://www.w3.org/ns/activitystreams#provideClientKey"),
proxy_url => iri!("https://www.w3.org/ns/activitystreams#proxyUrl"),
shared_inbox => iri!("https://www.w3.org/ns/activitystreams#sharedInbox"),
sign_client_key => iri!("https://www.w3.org/ns/activitystreams#signClientKey"),
source => iri!("https://www.w3.org/ns/activitystreams#source"),
streams => iri!("https://www.w3.org/ns/activitystreams#streams"),
upload_media => iri!("https://www.w3.org/ns/activitystreams#uploadMedia"),
also_known_as => iri!("https://www.w3.org/ns/activitystreams#alsoKnownAs"),
fn insert_owned_language_tag(&mut self, value: LanguageTagBuf) -> Self::LanguageTag {
match self.get_language_tag(value.as_ref()) {
Some(id) => id,
None => {
let index = self.language_tag.len();
self.language_tag.push(value);
LanguageTagIndex::from(index)
}
}
}
}
pub struct TootIds {
pub class: TootClassIds,
pub props: TootPropIds,
impl PartialEq for NyaIri {
fn eq(&self, other: &Self) -> bool {
match self {
NyaIri::Known(ptr) => match other {
// All well-known IRIs are only defined once, so it's safe to ignore
// the underlying data and just compare raw pointers.
// The dereference is necessary because matching &self yields &&IriBuf.
NyaIri::Known(other_ptr) => std::ptr::eq(*ptr, *other_ptr),
// by the same logic, we will never see an Arc with well-known IRI
NyaIri::Custom(_) => false,
},
NyaIri::Custom(idx) => match other {
NyaIri::Known(_) => false,
NyaIri::Custom(other_idx) => idx == other_idx,
},
}
}
}
impl Eq for NyaIri {}
impl TootIds {
fn populate(vocab: &mut impl VocabularyMut<Iri = IriIndex>) -> Self {
Self {
class: TootClassIds::populate(vocab),
props: TootPropIds::populate(vocab),
impl Hash for NyaIri {
fn hash<H: Hasher>(&self, state: &mut H) {
match self {
// XXX would it be ok to just hash the pointer here?
NyaIri::Known(iri) => unsafe { (**iri).hash(state) },
NyaIri::Custom(idx) => idx.hash(state),
}
}
}
pub struct TootClassIds {
pub emoji: Id,
pub identity_proof: Id,
}
/// [`define_json_ns!`] implements this trait for all enums it emits.
pub trait HasContext: Sized + Clone + 'static {
const OFFSET: usize;
const COUNT: usize;
const IRI_BASE: Iri<'static>;
const ALIAS_BASE: Option<Iri<'static>>;
/// This is the slice that all instances of `NyaIri::Known` reference.
/// The [`define_json_ns!`] macro generates the underlying array as a
/// private member of the enums implementing this trait.
const MEMBERS: &'static [(Self, Iri<'static>)];
impl TootClassIds {
fn populate(vocab: &mut impl VocabularyMut<Iri = IriIndex>) -> Self {
populate_ids! {
vocab,
emoji => iri!("http://joinmastodon.org/ns#Emoji"),
identity_proof => iri!("http://joinmastodon.org/ns#IdentityProof"),
}
fn from_index(index: usize) -> Option<Self>;
fn as_index(&self) -> usize;
fn from_iri(iri: &Iri) -> Option<Self>;
fn as_iri(&self) -> &'static Iri<'static> {
&Self::MEMBERS[self.as_index() - Self::OFFSET].1
}
fn as_nya_iri(&self) -> NyaIri {
NyaIri::Known(self.as_iri())
}
}
pub struct TootPropIds {
pub blurhash: Id,
pub focal_point: Id,
pub featured: Id,
pub featured_tags: Id,
pub discoverable: Id,
pub suspended: Id,
pub voters_count: Id,
// XXX this could be more efficient by storing all well-known IRIs in one array
#[allow(clippy::manual_map)]
fn resolve_known_iri(iri: Iri) -> Option<NyaIri> {
let prototype = if let Some(iri) = apub::__Class::from_iri(&iri) {
Some(iri.as_iri())
} else if let Some(iri) = apub::__Prop::from_iri(&iri) {
Some(iri.as_iri())
} else if let Some(iri) = sec::__Prop::from_iri(&iri) {
Some(iri.as_iri())
} else if let Some(iri) = toot::__Class::from_iri(&iri) {
Some(iri.as_iri())
} else if let Some(iri) = toot::__Prop::from_iri(&iri) {
Some(iri.as_iri())
} else {
None
};
prototype.map(|p| NyaIri::Known(p as *const Iri<'static>))
}
impl TootPropIds {
fn populate(vocab: &mut impl VocabularyMut<Iri = IriIndex>) -> Self {
populate_ids! {
vocab,
// Very basically, this macro generates a new module containing two enums,
// __Class and __Prop, and makes the module export all of their members.
// This allows us to refer to them as, for instance, apub::Activity rather than
// apub::__Class::Activity.
// The enums are also HasContext, which makes them useful in the first place.
blurhash => iri!("http://joinmastodon.org/ns#blurhash"),
focal_point => iri!("http://joinmastodon.org/ns#focalPoint"),
featured => iri!("http://joinmastodon.org/ns#featured"),
featured_tags => iri!("http://joinmastodon.org/ns#featuredTags"),
discoverable => iri!("http://joinmastodon.org/ns#discoverable"),
suspended => iri!("http://joinmastodon.org/ns#suspended"),
voters_count => iri!("http://joinmastodon.org/ns#votersCount"),
}
define_json_ns! {
//! ActivityStreams base namespace with ActivityPub extensions
meta {
mod_name = apub,
iri_base = "https://www.w3.org/ns/activitystreams#",
// ActivityStreams mandates conforming implementations also recognize IRIs
// with http instead of https as the protocol. We are aliasing the non-https
// IRI to the https one, such that HasContext::from_iri() matches both.
alias_base = "http://www.w3.org/ns/activitystreams#",
}
class {
// Object types
Activity,
Application,
Article,
Audio,
Collection,
CollectionPage,
Document,
Event,
Group,
Image,
IntransitiveActivity,
Note,
Object,
OrderedCollection,
OrderedCollectionPage,
Organization,
Page,
Person,
Place,
Profile,
Question,
Relationship,
Service,
Tombstone,
Video,
// Activity types
Accept,
Add,
Announce,
Arrive,
Block,
Create,
Delete,
Dislike,
Follow,
Flag,
Ignore,
Invite,
Join,
Leave,
Like,
Listen,
Move,
Offer,
Read,
Remove,
Reject,
TentativeAccept,
TentativeReject,
Travel,
Undo,
Update,
View,
// Link types
Hashtag, // proposed extension; Mastodon uses this
Link,
IsFollowing,
IsFollowedBy,
IsContact,
IsMember,
Mention,
}
prop {
subject,
relationship,
actor,
attributedTo,
attachment,
attachments,
author,
bcc,
bto,
cc,
context,
current,
first,
generator,
icon,
image,
inReplyTo,
items,
instrument,
orderedItems,
last,
location,
next,
object,
oneOf,
anyOf,
closed,
origin,
accuracy,
prev,
preview,
provider,
replies,
result,
audience,
partOf,
tag,
tags,
target,
to,
url,
altitude,
content,
name,
downstreamDuplicates,
duration,
endTime,
height,
href,
hreflang,
latitude,
longitude,
mediaType,
published,
radius,
rating,
rel,
startIndex,
startTime,
summary,
totalItems,
units,
updated,
upstreamDuplicates,
verb,
width,
describes,
formerType,
deleted,
// ActivityPub extensions
endpoints,
following,
followers,
inbox,
liked,
shares,
likes,
oauthAuthorizationEndpoint,
oauthTokenEndpoint,
outbox,
preferredUsername,
provideClientKey,
proxyUrl,
sharedInbox,
signClientKey,
source,
streams,
uploadMedia,
// DID core extensions
alsoKnownAs,
}
}
define_json_ns! {
//! W3ID security extensions
meta {
mod_name = sec,
iri_base = "https://www.w3id.org/security#",
}
prop {
verificationMethod,
controller,
proof,
domain,
challenge,
previousProof,
proofPurpose,
proofValue,
expires,
publicKey,
publicKeyPem,
owner,
signature,
signatureValue,
}
}
define_json_ns! {
//! Mastodon extensions
meta {
mod_name = toot,
iri_base = "http://joinmastodon.org/ns#",
}
class {
Emoji,
}
prop {
blurhash,
focalPoint,
featured,
featuredTags,
discoverable,
suspended,
votersCount,
}
}