Compare commits
No commits in common. "a6406ebd5af78c4940263f6e1ed9cb3d1b3815e8" and "0af4532a88a2e4405765ff176d7d56075cd1e16b" have entirely different histories.
a6406ebd5a
...
0af4532a88
21 changed files with 152 additions and 2104 deletions
1207
Cargo.lock
generated
1207
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
|
|
@ -8,7 +8,7 @@ resolver = "2"
|
||||||
unsafe_code = "forbid"
|
unsafe_code = "forbid"
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
version = "0.1.0"
|
version = "0.0.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
description = "PLAIn teXT tools for your data"
|
description = "PLAIn teXT tools for your data"
|
||||||
license = "EUPL-1.2"
|
license = "EUPL-1.2"
|
||||||
|
|
|
||||||
|
|
@ -14,12 +14,10 @@ jiff = "0.1.28"
|
||||||
kdl.workspace = true
|
kdl.workspace = true
|
||||||
miette = { version = "7.4.0", features = ["fancy", "syntect-highlighter"] }
|
miette = { version = "7.4.0", features = ["fancy", "syntect-highlighter"] }
|
||||||
owo-colors = "4.1.0"
|
owo-colors = "4.1.0"
|
||||||
paperless-rs = "0.1.5"
|
|
||||||
tokio = { version = "1.43.0", features = ["full"] }
|
tokio = { version = "1.43.0", features = ["full"] }
|
||||||
tokio-stream = { version = "0.1.17", features = ["full"] }
|
tokio-stream = { version = "0.1.17", features = ["full"] }
|
||||||
tracing = "0.1.41"
|
tracing = "0.1.41"
|
||||||
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
|
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
|
||||||
trustfall = "0.8.1"
|
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
|
||||||
|
|
@ -1,188 +0,0 @@
|
||||||
use std::collections::BTreeMap;
|
|
||||||
use std::collections::BTreeSet;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use std::sync::OnceLock;
|
|
||||||
|
|
||||||
use paperless_rs::PaperlessClient;
|
|
||||||
use tracing::debug;
|
|
||||||
use trustfall::provider::resolve_coercion_using_schema;
|
|
||||||
use trustfall::provider::resolve_property_with;
|
|
||||||
use trustfall::provider::AsVertex;
|
|
||||||
use trustfall::provider::ContextIterator;
|
|
||||||
use trustfall::provider::ContextOutcomeIterator;
|
|
||||||
use trustfall::provider::EdgeParameters;
|
|
||||||
use trustfall::provider::ResolveEdgeInfo;
|
|
||||||
use trustfall::provider::ResolveInfo;
|
|
||||||
use trustfall::provider::Typename;
|
|
||||||
use trustfall::provider::VertexIterator;
|
|
||||||
use trustfall::FieldValue;
|
|
||||||
use trustfall::Schema;
|
|
||||||
|
|
||||||
use super::vertex::Vertex;
|
|
||||||
use crate::parsing::DefinitionKind;
|
|
||||||
use crate::parsing::Record;
|
|
||||||
|
|
||||||
static SCHEMA: OnceLock<Schema> = OnceLock::new();
|
|
||||||
|
|
||||||
#[non_exhaustive]
|
|
||||||
pub struct Adapter {
|
|
||||||
schema: Arc<Schema>,
|
|
||||||
records: Vec<Record>,
|
|
||||||
definitions: Arc<BTreeMap<String, BTreeMap<String, DefinitionKind>>>,
|
|
||||||
paperless_client: Option<PaperlessClient>,
|
|
||||||
runtime_handle: tokio::runtime::Handle,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Debug for Adapter {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
f.debug_struct("Adapter").finish_non_exhaustive()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Adapter {
|
|
||||||
pub fn new(
|
|
||||||
schema: Schema,
|
|
||||||
records: Vec<Record>,
|
|
||||||
definitions: BTreeMap<String, BTreeMap<String, DefinitionKind>>,
|
|
||||||
paperless_client: Option<PaperlessClient>,
|
|
||||||
runtime: tokio::runtime::Handle,
|
|
||||||
) -> Self {
|
|
||||||
Self {
|
|
||||||
schema: Arc::new(schema),
|
|
||||||
records,
|
|
||||||
definitions: Arc::new(definitions),
|
|
||||||
paperless_client,
|
|
||||||
runtime_handle: runtime,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const SCHEMA_TEXT: &'static str = include_str!("./schema.graphql");
|
|
||||||
|
|
||||||
pub fn schema() -> &'static Schema {
|
|
||||||
SCHEMA.get_or_init(|| Schema::parse(Self::SCHEMA_TEXT).expect("not a valid schema"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> trustfall::provider::Adapter<'a> for Adapter {
|
|
||||||
type Vertex = Vertex;
|
|
||||||
|
|
||||||
fn resolve_starting_vertices(
|
|
||||||
&self,
|
|
||||||
edge_name: &Arc<str>,
|
|
||||||
_parameters: &EdgeParameters,
|
|
||||||
resolve_info: &ResolveInfo,
|
|
||||||
) -> VertexIterator<'a, Self::Vertex> {
|
|
||||||
match edge_name.as_ref() {
|
|
||||||
"Records" => super::entrypoints::records(resolve_info, &self.records),
|
|
||||||
_ => {
|
|
||||||
unreachable!(
|
|
||||||
"attempted to resolve starting vertices for unexpected edge name: {edge_name}"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn resolve_property<V: AsVertex<Self::Vertex> + 'a>(
|
|
||||||
&self,
|
|
||||||
contexts: ContextIterator<'a, V>,
|
|
||||||
type_name: &Arc<str>,
|
|
||||||
property_name: &Arc<str>,
|
|
||||||
resolve_info: &ResolveInfo,
|
|
||||||
) -> ContextOutcomeIterator<'a, V, FieldValue> {
|
|
||||||
if property_name.as_ref() == "__typename" {
|
|
||||||
return resolve_property_with(contexts, |vertex| vertex.typename().into());
|
|
||||||
}
|
|
||||||
|
|
||||||
debug!(?type_name, ?property_name, "Resolving property");
|
|
||||||
|
|
||||||
match type_name.as_ref() {
|
|
||||||
"PaperlessDocument" => super::properties::resolve_paperless_document_property(
|
|
||||||
contexts,
|
|
||||||
property_name.as_ref(),
|
|
||||||
resolve_info,
|
|
||||||
),
|
|
||||||
"Path" | "File" | "Directory" => super::properties::resolve_fs_property(
|
|
||||||
contexts,
|
|
||||||
type_name.as_ref(),
|
|
||||||
property_name.as_ref(),
|
|
||||||
resolve_info,
|
|
||||||
),
|
|
||||||
"Record" => {
|
|
||||||
super::properties::resolve_record_property(contexts, property_name, resolve_info)
|
|
||||||
}
|
|
||||||
kind if kind.starts_with("p_") => {
|
|
||||||
super::properties::resolve_record_property(contexts, property_name, resolve_info)
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
unreachable!(
|
|
||||||
"attempted to read property '{property_name}' on unexpected type: {type_name}"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn resolve_neighbors<V: AsVertex<Self::Vertex> + 'a>(
|
|
||||||
&self,
|
|
||||||
contexts: ContextIterator<'a, V>,
|
|
||||||
type_name: &Arc<str>,
|
|
||||||
edge_name: &Arc<str>,
|
|
||||||
parameters: &EdgeParameters,
|
|
||||||
resolve_info: &ResolveEdgeInfo,
|
|
||||||
) -> ContextOutcomeIterator<'a, V, VertexIterator<'a, Self::Vertex>> {
|
|
||||||
match type_name.as_ref() {
|
|
||||||
"Directory" => super::edges::resolve_directory_edge(
|
|
||||||
contexts,
|
|
||||||
edge_name.as_ref(),
|
|
||||||
parameters,
|
|
||||||
resolve_info,
|
|
||||||
),
|
|
||||||
kind if kind.starts_with("p_") => super::edges::resolve_record_edge(
|
|
||||||
contexts,
|
|
||||||
edge_name,
|
|
||||||
parameters,
|
|
||||||
resolve_info,
|
|
||||||
&self.definitions,
|
|
||||||
),
|
|
||||||
_ => {
|
|
||||||
unreachable!(
|
|
||||||
"attempted to resolve edge '{edge_name}' on unexpected type: {type_name}"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn resolve_coercion<V: AsVertex<Self::Vertex> + 'a>(
|
|
||||||
&self,
|
|
||||||
contexts: ContextIterator<'a, V>,
|
|
||||||
type_name: &Arc<str>,
|
|
||||||
coerce_to_type: &Arc<str>,
|
|
||||||
_resolve_info: &ResolveInfo,
|
|
||||||
) -> ContextOutcomeIterator<'a, V, bool> {
|
|
||||||
let schema = self.schema.clone();
|
|
||||||
let coerce_to_type = coerce_to_type.clone();
|
|
||||||
debug!(?coerce_to_type, ?type_name, "Trying to coerce");
|
|
||||||
|
|
||||||
Box::new(contexts.map(move |ctx| {
|
|
||||||
let subtypes: BTreeSet<_> = schema
|
|
||||||
.subtypes(coerce_to_type.as_ref())
|
|
||||||
.unwrap_or_else(|| panic!("type {coerce_to_type} is not part of this schema"))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
match ctx.active_vertex::<Vertex>() {
|
|
||||||
None => (ctx, false),
|
|
||||||
Some(vertex) => {
|
|
||||||
let typename = vertex.typename();
|
|
||||||
debug!(?coerce_to_type, ?vertex, "Trying to coerce");
|
|
||||||
if let Some(rec) = vertex.as_record() {
|
|
||||||
let is_rec = coerce_to_type.starts_with("p_");
|
|
||||||
let is_kind = rec.kind == coerce_to_type.as_ref()[2..];
|
|
||||||
(ctx, is_rec && is_kind)
|
|
||||||
} else {
|
|
||||||
let can_coerce = subtypes.contains(typename);
|
|
||||||
(ctx, can_coerce)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,96 +0,0 @@
|
||||||
use std::collections::BTreeMap;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use camino::Utf8PathBuf;
|
|
||||||
use trustfall::provider::resolve_neighbors_with;
|
|
||||||
use trustfall::provider::AsVertex;
|
|
||||||
use trustfall::provider::ContextIterator;
|
|
||||||
use trustfall::provider::ContextOutcomeIterator;
|
|
||||||
use trustfall::provider::EdgeParameters;
|
|
||||||
use trustfall::provider::ResolveEdgeInfo;
|
|
||||||
use trustfall::provider::VertexIterator;
|
|
||||||
|
|
||||||
use super::Vertex;
|
|
||||||
use crate::parsing::DefinitionKind;
|
|
||||||
use crate::parsing::Record;
|
|
||||||
|
|
||||||
pub(super) fn resolve_directory_edge<'a, V: AsVertex<Vertex> + 'a>(
|
|
||||||
contexts: ContextIterator<'a, V>,
|
|
||||||
edge_name: &str,
|
|
||||||
_parameters: &EdgeParameters,
|
|
||||||
resolve_info: &ResolveEdgeInfo,
|
|
||||||
) -> ContextOutcomeIterator<'a, V, VertexIterator<'a, Vertex>> {
|
|
||||||
match edge_name {
|
|
||||||
"Children" => directory::children(contexts, resolve_info),
|
|
||||||
_ => unreachable!("attempted to resolve unexpected edge '{edge_name}' on type 'Directory'"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mod directory {
|
|
||||||
use camino::Utf8Path;
|
|
||||||
use trustfall::provider::resolve_neighbors_with;
|
|
||||||
use trustfall::provider::AsVertex;
|
|
||||||
use trustfall::provider::ContextIterator;
|
|
||||||
use trustfall::provider::ContextOutcomeIterator;
|
|
||||||
use trustfall::provider::ResolveEdgeInfo;
|
|
||||||
use trustfall::provider::VertexIterator;
|
|
||||||
|
|
||||||
use crate::adapter::Vertex;
|
|
||||||
|
|
||||||
pub(super) fn children<'a, V: AsVertex<Vertex> + 'a>(
|
|
||||||
contexts: ContextIterator<'a, V>,
|
|
||||||
_resolve_info: &ResolveEdgeInfo,
|
|
||||||
) -> ContextOutcomeIterator<'a, V, VertexIterator<'a, Vertex>> {
|
|
||||||
resolve_neighbors_with(contexts, move |vertex| {
|
|
||||||
let vertex = vertex
|
|
||||||
.as_directory()
|
|
||||||
.expect("conversion failed, vertex was not a Directory");
|
|
||||||
|
|
||||||
fn read_children(path: &Utf8Path) -> Option<impl Iterator<Item = Vertex>> {
|
|
||||||
Some(
|
|
||||||
path.read_dir_utf8()
|
|
||||||
.ok()?
|
|
||||||
.flat_map(|item| Some(Vertex::Path(item.ok()?.path().to_path_buf()))),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
read_children(vertex)
|
|
||||||
.map(|i| {
|
|
||||||
let it: Box<dyn Iterator<Item = Vertex>> = Box::new(i);
|
|
||||||
it
|
|
||||||
})
|
|
||||||
.unwrap_or_else(|| Box::new(std::iter::empty()))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) fn resolve_record_edge<'a, V: AsVertex<Vertex> + 'a>(
|
|
||||||
contexts: ContextIterator<'a, V>,
|
|
||||||
edge_name: &Arc<str>,
|
|
||||||
_parameters: &EdgeParameters,
|
|
||||||
_resolve_info: &ResolveEdgeInfo,
|
|
||||||
definitions: &Arc<BTreeMap<String, BTreeMap<String, DefinitionKind>>>,
|
|
||||||
) -> ContextOutcomeIterator<'a, V, VertexIterator<'a, Vertex>> {
|
|
||||||
let edge_name = edge_name.clone();
|
|
||||||
let definitions = definitions.clone();
|
|
||||||
resolve_neighbors_with(contexts, move |v| {
|
|
||||||
let rec = v.as_record().expect("Expected a record");
|
|
||||||
let def = &definitions[&rec.kind][edge_name.as_ref()];
|
|
||||||
|
|
||||||
match def {
|
|
||||||
DefinitionKind::Path => Box::new(std::iter::once(path_from_rec(rec, &edge_name))),
|
|
||||||
_ => unreachable!("Only `Path` can appear as edge for now"),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn path_from_rec(rec: &Record, edge_name: &str) -> Vertex {
|
|
||||||
let pathb = Utf8PathBuf::from(rec.fields[edge_name].as_string().unwrap());
|
|
||||||
if pathb.is_file() {
|
|
||||||
Vertex::File(pathb)
|
|
||||||
} else if pathb.is_dir() {
|
|
||||||
Vertex::Directory(pathb)
|
|
||||||
} else {
|
|
||||||
Vertex::Path(pathb)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,16 +0,0 @@
|
||||||
use trustfall::provider::ResolveInfo;
|
|
||||||
use trustfall::provider::VertexIterator;
|
|
||||||
|
|
||||||
use super::vertex::Vertex;
|
|
||||||
use crate::parsing::Record;
|
|
||||||
|
|
||||||
pub(super) fn records<'a>(
|
|
||||||
_resolve_info: &ResolveInfo,
|
|
||||||
records: &'_ [Record],
|
|
||||||
) -> VertexIterator<'a, Vertex> {
|
|
||||||
#[expect(
|
|
||||||
clippy::unnecessary_to_owned,
|
|
||||||
reason = "We have to go through a vec to satisfy the lifetimes"
|
|
||||||
)]
|
|
||||||
Box::new(records.to_vec().into_iter().map(Vertex::Record))
|
|
||||||
}
|
|
||||||
|
|
@ -1,55 +0,0 @@
|
||||||
mod adapter_impl;
|
|
||||||
mod edges;
|
|
||||||
mod entrypoints;
|
|
||||||
mod properties;
|
|
||||||
mod vertex;
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests;
|
|
||||||
|
|
||||||
pub use adapter_impl::Adapter;
|
|
||||||
use tracing::trace;
|
|
||||||
use trustfall::Schema;
|
|
||||||
pub use vertex::Vertex;
|
|
||||||
|
|
||||||
pub struct CustomVertex {
|
|
||||||
pub name: String,
|
|
||||||
pub definition: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl crate::parsing::Definition {
|
|
||||||
fn to_custom_vertices(&self) -> Vec<CustomVertex> {
|
|
||||||
let name = format!("p_{}", self.name);
|
|
||||||
|
|
||||||
let fields = self
|
|
||||||
.fields
|
|
||||||
.iter()
|
|
||||||
.map(|(fname, ftype)| {
|
|
||||||
let kind = ftype.trustfall_kind(&format!("{name}{fname}"));
|
|
||||||
format!("{fname}: {kind}")
|
|
||||||
})
|
|
||||||
.chain([String::from("_at: String!"), String::from("_kind: String!")])
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
let definition = format!("type {name} implements Record {{ {} }}", fields.join(","));
|
|
||||||
|
|
||||||
[CustomVertex { name, definition }].into_iter().collect()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn to_schema(
|
|
||||||
definitions: &std::collections::BTreeMap<String, Vec<crate::parsing::Definition>>,
|
|
||||||
) -> trustfall::Schema {
|
|
||||||
let base_text = Adapter::SCHEMA_TEXT;
|
|
||||||
|
|
||||||
let generated = definitions
|
|
||||||
.values()
|
|
||||||
.flat_map(|defs| defs.last().unwrap().to_custom_vertices())
|
|
||||||
.map(|v| v.definition)
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n");
|
|
||||||
|
|
||||||
let input = format!("{base_text}{generated}");
|
|
||||||
trace!(%input, "Using schema");
|
|
||||||
Schema::parse(input).unwrap()
|
|
||||||
}
|
|
||||||
|
|
@ -1,180 +0,0 @@
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use kdl::KdlValue;
|
|
||||||
use trustfall::provider::field_property;
|
|
||||||
use trustfall::provider::resolve_property_with;
|
|
||||||
use trustfall::provider::AsVertex;
|
|
||||||
use trustfall::provider::ContextIterator;
|
|
||||||
use trustfall::provider::ContextOutcomeIterator;
|
|
||||||
use trustfall::provider::ResolveInfo;
|
|
||||||
use trustfall::FieldValue;
|
|
||||||
|
|
||||||
use super::vertex::Vertex;
|
|
||||||
|
|
||||||
pub(super) fn resolve_fs_property<'a, V: AsVertex<Vertex> + 'a>(
|
|
||||||
contexts: ContextIterator<'a, V>,
|
|
||||||
type_name: &str,
|
|
||||||
property_name: &str,
|
|
||||||
resolve_info: &ResolveInfo,
|
|
||||||
) -> ContextOutcomeIterator<'a, V, FieldValue> {
|
|
||||||
match (type_name, property_name) {
|
|
||||||
(_, "exists" | "basename" | "path") => {
|
|
||||||
resolve_path_property(contexts, property_name, resolve_info)
|
|
||||||
}
|
|
||||||
("Directory", _) => resolve_directory_property(contexts, property_name, resolve_info),
|
|
||||||
("File", _) => resolve_file_property(contexts, property_name, resolve_info),
|
|
||||||
_ => {
|
|
||||||
unreachable!(
|
|
||||||
"attempted to read unexpected property '{property_name}' on type '{type_name}'"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) fn resolve_path_property<'a, V: AsVertex<Vertex> + 'a>(
|
|
||||||
contexts: ContextIterator<'a, V>,
|
|
||||||
property_name: &str,
|
|
||||||
_resolve_info: &ResolveInfo,
|
|
||||||
) -> ContextOutcomeIterator<'a, V, FieldValue> {
|
|
||||||
match property_name {
|
|
||||||
"exists" => resolve_property_with(contexts, move |v: &Vertex| match v {
|
|
||||||
Vertex::Path(p) | Vertex::File(p) | Vertex::Directory(p) => p.exists().into(),
|
|
||||||
_ => {
|
|
||||||
panic!("Vertex was not a filesystem type")
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
"basename" => resolve_property_with(contexts, move |v: &Vertex| match v {
|
|
||||||
Vertex::Path(p) | Vertex::File(p) | Vertex::Directory(p) => p.file_name().into(),
|
|
||||||
_ => {
|
|
||||||
panic!("Vertex was not a filesystem type")
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
"path" => resolve_property_with(contexts, move |v: &Vertex| match v {
|
|
||||||
Vertex::Path(p) | Vertex::File(p) | Vertex::Directory(p) => p.to_string().into(),
|
|
||||||
_ => {
|
|
||||||
panic!("Vertex was not a filesystem type")
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
_ => {
|
|
||||||
unreachable!("attempted to read unexpected property '{property_name}' on type 'Path'")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) fn resolve_directory_property<'a, V: AsVertex<Vertex> + 'a>(
|
|
||||||
contexts: ContextIterator<'a, V>,
|
|
||||||
property_name: &str,
|
|
||||||
_resolve_info: &ResolveInfo,
|
|
||||||
) -> ContextOutcomeIterator<'a, V, FieldValue> {
|
|
||||||
match property_name {
|
|
||||||
"exists" => resolve_property_with(contexts, move |v: &Vertex| {
|
|
||||||
let directory = v.as_directory().expect("vertex was not a Directory");
|
|
||||||
|
|
||||||
directory.exists().into()
|
|
||||||
}),
|
|
||||||
"basename" => resolve_property_with(contexts, move |v: &Vertex| {
|
|
||||||
let directory = v.as_directory().expect("vertex was not a Directory");
|
|
||||||
|
|
||||||
directory.file_name().into()
|
|
||||||
}),
|
|
||||||
"path" => resolve_property_with(contexts, move |v: &Vertex| {
|
|
||||||
let directory = v.as_directory().expect("vertex was not a Directory");
|
|
||||||
|
|
||||||
directory.to_string().into()
|
|
||||||
}),
|
|
||||||
_ => {
|
|
||||||
unreachable!("attempted to read unexpected property '{property_name}' on type 'File'")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) fn resolve_file_property<'a, V: AsVertex<Vertex> + 'a>(
|
|
||||||
contexts: ContextIterator<'a, V>,
|
|
||||||
property_name: &str,
|
|
||||||
_resolve_info: &ResolveInfo,
|
|
||||||
) -> ContextOutcomeIterator<'a, V, FieldValue> {
|
|
||||||
match property_name {
|
|
||||||
"exists" => resolve_property_with(contexts, move |v: &Vertex| {
|
|
||||||
let file = v.as_file().expect("vertex was not a File");
|
|
||||||
|
|
||||||
file.exists().into()
|
|
||||||
}),
|
|
||||||
"basename" => resolve_property_with(contexts, move |v: &Vertex| {
|
|
||||||
let file = v.as_file().expect("vertex was not a File");
|
|
||||||
|
|
||||||
file.file_name().into()
|
|
||||||
}),
|
|
||||||
"path" => resolve_property_with(contexts, move |v: &Vertex| {
|
|
||||||
let file = v.as_file().expect("vertex was not a File");
|
|
||||||
|
|
||||||
file.to_string().into()
|
|
||||||
}),
|
|
||||||
"extension" => resolve_property_with(contexts, move |v: &Vertex| {
|
|
||||||
let file = v.as_file().expect("vertex was not a File");
|
|
||||||
|
|
||||||
file.extension().into()
|
|
||||||
}),
|
|
||||||
_ => {
|
|
||||||
unreachable!("attempted to read unexpected property '{property_name}' on type 'File'")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) fn resolve_paperless_document_property<'a, V: AsVertex<Vertex> + 'a>(
|
|
||||||
contexts: ContextIterator<'a, V>,
|
|
||||||
property_name: &str,
|
|
||||||
_resolve_info: &ResolveInfo,
|
|
||||||
) -> ContextOutcomeIterator<'a, V, FieldValue> {
|
|
||||||
match property_name {
|
|
||||||
"added" => resolve_property_with(contexts, field_property!(as_paperless_document, added)),
|
|
||||||
"archive_serial_number" => resolve_property_with(
|
|
||||||
contexts,
|
|
||||||
field_property!(as_paperless_document, archive_serial_number),
|
|
||||||
),
|
|
||||||
"content" => {
|
|
||||||
resolve_property_with(contexts, field_property!(as_paperless_document, content))
|
|
||||||
}
|
|
||||||
"created" => {
|
|
||||||
resolve_property_with(contexts, field_property!(as_paperless_document, created))
|
|
||||||
}
|
|
||||||
"id" => resolve_property_with(contexts, field_property!(as_paperless_document, id)),
|
|
||||||
"title" => resolve_property_with(contexts, field_property!(as_paperless_document, title)),
|
|
||||||
_ => {
|
|
||||||
unreachable!(
|
|
||||||
"attempted to read unexpected property '{property_name}' on type 'PaperlessDocument'"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) fn resolve_record_property<'a, V: AsVertex<Vertex> + 'a>(
|
|
||||||
contexts: ContextIterator<'a, V>,
|
|
||||||
property_name: &Arc<str>,
|
|
||||||
_resolve_info: &ResolveInfo,
|
|
||||||
) -> ContextOutcomeIterator<'a, V, FieldValue> {
|
|
||||||
let property_name = property_name.clone();
|
|
||||||
match property_name.as_ref() {
|
|
||||||
"_at" => resolve_property_with(
|
|
||||||
contexts,
|
|
||||||
field_property!(as_record, at, { at.to_string().into() }),
|
|
||||||
),
|
|
||||||
"_kind" => resolve_property_with(contexts, field_property!(as_record, kind)),
|
|
||||||
_ => resolve_property_with(contexts, move |v: &Vertex| {
|
|
||||||
let rec = v
|
|
||||||
.as_record()
|
|
||||||
.expect("Called record property without it being a record");
|
|
||||||
|
|
||||||
kdl_to_trustfall_value(rec.fields[property_name.as_ref()].clone())
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn kdl_to_trustfall_value(val: KdlValue) -> FieldValue {
|
|
||||||
match val {
|
|
||||||
KdlValue::Bool(b) => FieldValue::Boolean(b),
|
|
||||||
KdlValue::Float(f) => FieldValue::Float64(f),
|
|
||||||
KdlValue::Null => FieldValue::Null,
|
|
||||||
KdlValue::Integer(i) => FieldValue::Int64(i.try_into().unwrap()),
|
|
||||||
KdlValue::String(s) => FieldValue::String(s.into()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,88 +0,0 @@
|
||||||
schema {
|
|
||||||
query: RootSchemaQuery
|
|
||||||
}
|
|
||||||
|
|
||||||
directive @filter(
|
|
||||||
"""
|
|
||||||
Name of the filter operation to perform.
|
|
||||||
"""
|
|
||||||
op: String!
|
|
||||||
"""
|
|
||||||
List of string operands for the operator.
|
|
||||||
"""
|
|
||||||
value: [String!]
|
|
||||||
) repeatable on FIELD | INLINE_FRAGMENT
|
|
||||||
directive @tag(
|
|
||||||
"""
|
|
||||||
Name to apply to the given property field.
|
|
||||||
"""
|
|
||||||
name: String
|
|
||||||
) on FIELD
|
|
||||||
directive @output(
|
|
||||||
"""
|
|
||||||
What to designate the output field generated from this property field.
|
|
||||||
"""
|
|
||||||
name: String
|
|
||||||
) on FIELD
|
|
||||||
directive @optional on FIELD
|
|
||||||
directive @recurse(
|
|
||||||
"""
|
|
||||||
Recurse up to this many times on this edge. A depth of 1 produces the current
|
|
||||||
vertex and its immediate neighbors along the given edge.
|
|
||||||
"""
|
|
||||||
depth: Int!
|
|
||||||
) on FIELD
|
|
||||||
directive @fold on FIELD
|
|
||||||
directive @transform(
|
|
||||||
"""
|
|
||||||
Name of the transformation operation to perform.
|
|
||||||
"""
|
|
||||||
op: String!
|
|
||||||
) on FIELD
|
|
||||||
|
|
||||||
|
|
||||||
"""
|
|
||||||
All the possible data types to begin querying
|
|
||||||
"""
|
|
||||||
type RootSchemaQuery {
|
|
||||||
"""
|
|
||||||
All records in your plaixt instance
|
|
||||||
"""
|
|
||||||
Records: [Record!]!
|
|
||||||
}
|
|
||||||
|
|
||||||
interface Record {
|
|
||||||
_kind: String!
|
|
||||||
_at: String!
|
|
||||||
}
|
|
||||||
|
|
||||||
interface Path {
|
|
||||||
path: String!
|
|
||||||
exists: Boolean!
|
|
||||||
basename: String!
|
|
||||||
}
|
|
||||||
|
|
||||||
interface File implements Path {
|
|
||||||
path: String!
|
|
||||||
exists: Boolean!
|
|
||||||
basename: String!
|
|
||||||
|
|
||||||
extension: String!
|
|
||||||
}
|
|
||||||
|
|
||||||
type Directory implements Path {
|
|
||||||
path: String!
|
|
||||||
exists: Boolean!
|
|
||||||
basename: String!
|
|
||||||
|
|
||||||
Children: [Path!]!
|
|
||||||
}
|
|
||||||
|
|
||||||
type PaperlessDocument {
|
|
||||||
id: Int!
|
|
||||||
title: String!
|
|
||||||
content: String!
|
|
||||||
archive_serial_number: Int
|
|
||||||
created: String!
|
|
||||||
added: String!
|
|
||||||
}
|
|
||||||
|
|
@ -1,16 +0,0 @@
|
||||||
use trustfall::provider::check_adapter_invariants;
|
|
||||||
|
|
||||||
use super::Adapter;
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn adapter_satisfies_trustfall_invariants() {
|
|
||||||
let schema = Adapter::schema();
|
|
||||||
let adapter = Adapter::new(
|
|
||||||
schema.clone(),
|
|
||||||
vec![],
|
|
||||||
[].into(),
|
|
||||||
None,
|
|
||||||
tokio::runtime::Handle::current(),
|
|
||||||
);
|
|
||||||
check_adapter_invariants(schema, adapter);
|
|
||||||
}
|
|
||||||
|
|
@ -1,15 +0,0 @@
|
||||||
use camino::Utf8PathBuf;
|
|
||||||
use paperless_rs::endpoint::documents::Document as PaperlessDocument;
|
|
||||||
|
|
||||||
use crate::parsing::Record;
|
|
||||||
|
|
||||||
#[non_exhaustive]
|
|
||||||
#[derive(Debug, Clone, trustfall::provider::TrustfallEnumVertex)]
|
|
||||||
pub enum Vertex {
|
|
||||||
Path(Utf8PathBuf),
|
|
||||||
File(Utf8PathBuf),
|
|
||||||
Directory(Utf8PathBuf),
|
|
||||||
|
|
||||||
PaperlessDocument(PaperlessDocument),
|
|
||||||
Record(Record),
|
|
||||||
}
|
|
||||||
|
|
@ -1,39 +0,0 @@
|
||||||
use camino::Utf8Path;
|
|
||||||
use camino::Utf8PathBuf;
|
|
||||||
use kdl::KdlDocument;
|
|
||||||
use miette::Context;
|
|
||||||
use miette::LabeledSpan;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Config {
|
|
||||||
pub(crate) root_folder: Utf8PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) async fn parse_config(path: &Utf8Path) -> miette::Result<Config> {
|
|
||||||
let data = tokio::fs::read_to_string(path)
|
|
||||||
.await
|
|
||||||
.map_err(|e| miette::miette!(e))
|
|
||||||
.wrap_err_with(|| miette::miette!("Could not read configuration at \"{path}\""))?;
|
|
||||||
|
|
||||||
let doc: KdlDocument = data
|
|
||||||
.parse()
|
|
||||||
.map_err(|e| miette::Error::from(e).with_source_code(data.clone()))?;
|
|
||||||
|
|
||||||
Ok(Config {
|
|
||||||
root_folder: doc
|
|
||||||
.get("root_folder")
|
|
||||||
.ok_or_else(|| miette::miette!("\"root_folder\" configuration value not found"))
|
|
||||||
.and_then(|val| {
|
|
||||||
val.get(0)
|
|
||||||
.and_then(|v| v.as_string().map(Into::into))
|
|
||||||
.ok_or_else(|| {
|
|
||||||
miette::diagnostic!(
|
|
||||||
labels = vec![LabeledSpan::new_primary_with_span(None, val.span())],
|
|
||||||
"root_folder is expected to be a path"
|
|
||||||
)
|
|
||||||
.into()
|
|
||||||
})
|
|
||||||
.map_err(|e: miette::Report| e.with_source_code(data))
|
|
||||||
})?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
@ -1,24 +1,16 @@
|
||||||
#![allow(dead_code)]
|
#![allow(dead_code)]
|
||||||
|
|
||||||
use std::collections::BTreeMap;
|
use camino::Utf8Path;
|
||||||
use std::io::Read;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use camino::Utf8PathBuf;
|
use camino::Utf8PathBuf;
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use clap::Subcommand;
|
use clap::Subcommand;
|
||||||
use clap::ValueHint;
|
use clap::ValueHint;
|
||||||
use human_panic::Metadata;
|
use human_panic::Metadata;
|
||||||
use miette::IntoDiagnostic;
|
use kdl::KdlDocument;
|
||||||
use parsing::Definition;
|
use miette::LabeledSpan;
|
||||||
use parsing::Record;
|
use miette::WrapErr;
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
use tracing_subscriber::EnvFilter;
|
|
||||||
use trustfall::execute_query;
|
|
||||||
use trustfall::FieldValue;
|
|
||||||
|
|
||||||
mod adapter;
|
|
||||||
mod config;
|
|
||||||
mod parsing;
|
mod parsing;
|
||||||
|
|
||||||
#[derive(Debug, Parser)]
|
#[derive(Debug, Parser)]
|
||||||
|
|
@ -41,7 +33,11 @@ struct Args {
|
||||||
#[derive(Debug, Subcommand)]
|
#[derive(Debug, Subcommand)]
|
||||||
enum ArgMode {
|
enum ArgMode {
|
||||||
Dump,
|
Dump,
|
||||||
Query,
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Config {
|
||||||
|
root_folder: Utf8PathBuf,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
|
|
@ -51,107 +47,54 @@ async fn main() -> miette::Result<()> {
|
||||||
.authors(env!("CARGO_PKG_AUTHORS"))
|
.authors(env!("CARGO_PKG_AUTHORS"))
|
||||||
);
|
);
|
||||||
|
|
||||||
tracing_subscriber::fmt()
|
tracing_subscriber::fmt().pretty().init();
|
||||||
.with_env_filter(EnvFilter::from_default_env())
|
|
||||||
.pretty()
|
|
||||||
.init();
|
|
||||||
|
|
||||||
let args = Args::parse();
|
let args = Args::parse();
|
||||||
|
|
||||||
let config = config::parse_config(&args.config).await?;
|
let config = parse_config(&args.config).await?;
|
||||||
let root_folder = args.root_folder.as_ref().unwrap_or(&config.root_folder);
|
let root_folder = args.root_folder.as_ref().unwrap_or(&config.root_folder);
|
||||||
|
|
||||||
let definitions = parsing::load_definitions(&root_folder.join("definitions")).await?;
|
let load_records = async {
|
||||||
|
let definitions = parsing::load_definitions(&root_folder.join("definitions")).await?;
|
||||||
let records = parsing::load_records(root_folder, &definitions).await?;
|
parsing::load_records(root_folder, &definitions).await
|
||||||
|
};
|
||||||
let (schema, adapter) = get_schema_and_adapter(&definitions, records.clone());
|
|
||||||
|
|
||||||
match args.mode {
|
match args.mode {
|
||||||
ArgMode::Query => {
|
|
||||||
let mut query = String::new();
|
|
||||||
std::io::stdin()
|
|
||||||
.read_to_string(&mut query)
|
|
||||||
.into_diagnostic()?;
|
|
||||||
|
|
||||||
let result = execute_query(
|
|
||||||
&schema,
|
|
||||||
Arc::new(adapter),
|
|
||||||
&query,
|
|
||||||
BTreeMap::<Arc<str>, FieldValue>::from([]),
|
|
||||||
)
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
info!("Got records: {result:#?}");
|
|
||||||
}
|
|
||||||
ArgMode::Dump => {
|
ArgMode::Dump => {
|
||||||
print_records(&records);
|
let records = load_records.await?;
|
||||||
|
|
||||||
|
info!("Got records: {records:#?}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_schema_and_adapter(
|
async fn parse_config(path: &Utf8Path) -> miette::Result<Config> {
|
||||||
definitions: &BTreeMap<String, Vec<Definition>>,
|
let data = tokio::fs::read_to_string(path)
|
||||||
records: Vec<Record>,
|
.await
|
||||||
) -> (trustfall::Schema, adapter::Adapter) {
|
.map_err(|e| miette::miette!(e))
|
||||||
let schema = adapter::to_schema(definitions);
|
.wrap_err_with(|| miette::miette!("Could not read configuration at \"{path}\""))?;
|
||||||
let definitions = definitions
|
|
||||||
.iter()
|
let doc: KdlDocument = data
|
||||||
.map(|(name, def)| (name.clone(), def.last().cloned().unwrap().fields))
|
.parse()
|
||||||
.collect();
|
.map_err(|e| miette::Error::from(e).with_source_code(data.clone()))?;
|
||||||
let adapter = adapter::Adapter::new(
|
|
||||||
schema.clone(),
|
Ok(Config {
|
||||||
records,
|
root_folder: doc
|
||||||
definitions,
|
.get("root_folder")
|
||||||
None,
|
.ok_or_else(|| miette::miette!("\"root_folder\" configuration value not found"))
|
||||||
tokio::runtime::Handle::current(),
|
.and_then(|val| {
|
||||||
);
|
val.get(0)
|
||||||
(schema, adapter)
|
.and_then(|v| v.as_string().map(Into::into))
|
||||||
}
|
.ok_or_else(|| {
|
||||||
|
miette::diagnostic!(
|
||||||
fn print_records(records: &[Record]) {
|
labels = vec![LabeledSpan::new_primary_with_span(None, val.span())],
|
||||||
for record in records {
|
"root_folder is expected to be a path"
|
||||||
println!("{kind} @ {at} {{", kind = record.kind, at = record.at);
|
)
|
||||||
for field in &record.fields {
|
.into()
|
||||||
println!("\t{name} = {value}", name = field.0, value = field.1);
|
})
|
||||||
}
|
.map_err(|e: miette::Report| e.with_source_code(data))
|
||||||
println!("}}")
|
})?,
|
||||||
}
|
})
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use camino::Utf8PathBuf;
|
|
||||||
use tracing_subscriber::EnvFilter;
|
|
||||||
use trustfall::provider::check_adapter_invariants;
|
|
||||||
|
|
||||||
use crate::get_schema_and_adapter;
|
|
||||||
use crate::parsing;
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn ensure_adapter_satisfies_invariants() {
|
|
||||||
tracing_subscriber::fmt()
|
|
||||||
.with_env_filter(EnvFilter::from_default_env())
|
|
||||||
.pretty()
|
|
||||||
.with_test_writer()
|
|
||||||
.init();
|
|
||||||
|
|
||||||
let root_folder = Utf8PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../../examples");
|
|
||||||
|
|
||||||
println!("{root_folder}");
|
|
||||||
let definitions = parsing::load_definitions(&root_folder.join("definitions"))
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let records = parsing::load_records(&root_folder, &definitions)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let (schema, adapter) = get_schema_and_adapter(&definitions, records.clone());
|
|
||||||
|
|
||||||
check_adapter_invariants(&schema, adapter);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use camino::Utf8Path;
|
use camino::Utf8Path;
|
||||||
use camino::Utf8PathBuf;
|
use camino::Utf8PathBuf;
|
||||||
|
|
@ -14,7 +15,7 @@ use miette::NamedSource;
|
||||||
use owo_colors::OwoColorize;
|
use owo_colors::OwoColorize;
|
||||||
use tokio_stream::wrappers::ReadDirStream;
|
use tokio_stream::wrappers::ReadDirStream;
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug)]
|
||||||
pub struct Record {
|
pub struct Record {
|
||||||
pub(crate) kind: String,
|
pub(crate) kind: String,
|
||||||
pub(crate) at: Timestamp,
|
pub(crate) at: Timestamp,
|
||||||
|
|
@ -81,7 +82,8 @@ pub(crate) fn parse_record(
|
||||||
.map(|field| {
|
.map(|field| {
|
||||||
let Some(get) = field.get(0) else {
|
let Some(get) = field.get(0) else {
|
||||||
return Err(miette::diagnostic!(
|
return Err(miette::diagnostic!(
|
||||||
labels = vec![LabeledSpan::new_primary_with_span(None, at_entry.span())],
|
labels =
|
||||||
|
vec![LabeledSpan::new_primary_with_span(None, at_entry.span())],
|
||||||
"This datetime should be a string formatted as RFC3339."
|
"This datetime should be a string formatted as RFC3339."
|
||||||
))?;
|
))?;
|
||||||
};
|
};
|
||||||
|
|
@ -141,8 +143,9 @@ pub(crate) async fn load_records(
|
||||||
})
|
})
|
||||||
.flat_map(|val| futures::stream::iter(val.transpose()))
|
.flat_map(|val| futures::stream::iter(val.transpose()))
|
||||||
.and_then(|(name, bytes)| async move {
|
.and_then(|(name, bytes)| async move {
|
||||||
parse_record(&bytes, definitions)
|
parse_record(&bytes, definitions).map_err(|e| {
|
||||||
.map_err(|e| e.with_source_code(NamedSource::new(name, bytes).with_language("kdl")))
|
e.with_source_code(NamedSource::new(name, bytes).with_language("kdl"))
|
||||||
|
})
|
||||||
})
|
})
|
||||||
.map(|val| val.map(|recs| futures::stream::iter(recs).map(Ok::<_, miette::Report>)))
|
.map(|val| val.map(|recs| futures::stream::iter(recs).map(Ok::<_, miette::Report>)))
|
||||||
.try_flatten()
|
.try_flatten()
|
||||||
|
|
@ -152,32 +155,19 @@ pub(crate) async fn load_records(
|
||||||
Ok(defs)
|
Ok(defs)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug)]
|
||||||
pub enum DefinitionKind {
|
pub enum DefinitionKind {
|
||||||
String,
|
String,
|
||||||
Path,
|
|
||||||
OneOf(Vec<String>),
|
OneOf(Vec<String>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DefinitionKind {
|
impl DefinitionKind {
|
||||||
pub(crate) fn trustfall_kind(&self, _namespace: &str) -> String {
|
|
||||||
match self {
|
|
||||||
DefinitionKind::String => String::from("String!"),
|
|
||||||
DefinitionKind::Path => String::from("Path!"),
|
|
||||||
DefinitionKind::OneOf(_vecs) => String::from("String!"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn validate(&self, val: &KdlValue) -> Result<(), String> {
|
pub(crate) fn validate(&self, val: &KdlValue) -> Result<(), String> {
|
||||||
match self {
|
match self {
|
||||||
DefinitionKind::String => val
|
DefinitionKind::String => val
|
||||||
.is_string()
|
.is_string()
|
||||||
.then_some(())
|
.then_some(())
|
||||||
.ok_or("Expected a string here".to_string()),
|
.ok_or("Expected a string here".to_string()),
|
||||||
DefinitionKind::Path => val
|
|
||||||
.is_string()
|
|
||||||
.then_some(())
|
|
||||||
.ok_or("Expected a path encoded as a string here".to_string()),
|
|
||||||
DefinitionKind::OneOf(options) => val
|
DefinitionKind::OneOf(options) => val
|
||||||
.as_string()
|
.as_string()
|
||||||
.is_some_and(|val| options.iter().any(|o| o == val))
|
.is_some_and(|val| options.iter().any(|o| o == val))
|
||||||
|
|
@ -185,23 +175,6 @@ impl DefinitionKind {
|
||||||
.ok_or_else(|| format!("Expected one of: {}", options.join(", "))),
|
.ok_or_else(|| format!("Expected one of: {}", options.join(", "))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn extra_trustfall_kinds(
|
|
||||||
&self,
|
|
||||||
namespace: &str,
|
|
||||||
) -> Vec<crate::adapter::CustomVertex> {
|
|
||||||
match self {
|
|
||||||
DefinitionKind::OneOf(defs) => {
|
|
||||||
let name = format!("{namespace}Def");
|
|
||||||
let vec = vec![crate::adapter::CustomVertex {
|
|
||||||
definition: format!("enum {name} {{ {} }}", defs.join(",")),
|
|
||||||
name,
|
|
||||||
}];
|
|
||||||
vec
|
|
||||||
}
|
|
||||||
_ => vec![],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TryFrom<&str> for DefinitionKind {
|
impl TryFrom<&str> for DefinitionKind {
|
||||||
|
|
@ -209,23 +182,18 @@ impl TryFrom<&str> for DefinitionKind {
|
||||||
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
||||||
match value.to_ascii_lowercase().as_str() {
|
match value.to_ascii_lowercase().as_str() {
|
||||||
"string" => Ok(DefinitionKind::String),
|
"string" => Ok(DefinitionKind::String),
|
||||||
"path" => Ok(DefinitionKind::Path),
|
|
||||||
other => miette::bail!("Did not recognize valid field kind: \"{other}\""),
|
other => miette::bail!("Did not recognize valid field kind: \"{other}\""),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug)]
|
||||||
pub struct Definition {
|
pub struct Definition {
|
||||||
pub(crate) name: String,
|
|
||||||
pub(crate) since: Timestamp,
|
pub(crate) since: Timestamp,
|
||||||
pub(crate) fields: BTreeMap<String, DefinitionKind>,
|
pub(crate) fields: HashMap<String, DefinitionKind>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn parse_definition(
|
pub(crate) fn parse_definition(bytes: &str) -> miette::Result<Vec<Definition>> {
|
||||||
bytes: &str,
|
|
||||||
definition_name: String,
|
|
||||||
) -> miette::Result<Vec<Definition>> {
|
|
||||||
let doc: KdlDocument = bytes.parse()?;
|
let doc: KdlDocument = bytes.parse()?;
|
||||||
|
|
||||||
let mut defs = vec![];
|
let mut defs = vec![];
|
||||||
|
|
@ -320,27 +288,11 @@ pub(crate) fn parse_definition(
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
match field.name().value() {
|
|
||||||
"at" | "kind" => return Err(miette::diagnostic!(
|
|
||||||
labels = vec![LabeledSpan::new_primary_with_span(
|
|
||||||
Some(String::from("this name")),
|
|
||||||
field.name().span()
|
|
||||||
)],
|
|
||||||
help = "Both `at` and `kind` are reserved field names.",
|
|
||||||
"Reserved field name."
|
|
||||||
))?,
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok((field.name().to_string(), kind))
|
Ok((field.name().to_string(), kind))
|
||||||
})
|
})
|
||||||
.collect::<miette::Result<_>>()?;
|
.collect::<miette::Result<_>>()?;
|
||||||
|
|
||||||
defs.push(Definition {
|
defs.push(Definition { since, fields });
|
||||||
since,
|
|
||||||
fields,
|
|
||||||
name: definition_name.clone(),
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
unknown => {
|
unknown => {
|
||||||
return Err(miette::diagnostic!(
|
return Err(miette::diagnostic!(
|
||||||
|
|
@ -380,10 +332,9 @@ pub(crate) async fn load_definitions(
|
||||||
})
|
})
|
||||||
.flat_map(|val| futures::stream::iter(val.transpose()))
|
.flat_map(|val| futures::stream::iter(val.transpose()))
|
||||||
.and_then(|(name, bytes)| async move {
|
.and_then(|(name, bytes)| async move {
|
||||||
let definition_name = name.file_stem().unwrap().to_string();
|
|
||||||
Ok((
|
Ok((
|
||||||
definition_name.clone(),
|
name.file_stem().unwrap().to_string(),
|
||||||
parse_definition(&bytes, definition_name).map_err(|e| {
|
parse_definition(&bytes).map_err(|e| {
|
||||||
e.with_source_code(NamedSource::new(name, bytes).with_language("kdl"))
|
e.with_source_code(NamedSource::new(name, bytes).with_language("kdl"))
|
||||||
})?,
|
})?,
|
||||||
))
|
))
|
||||||
|
|
@ -393,3 +344,4 @@ pub(crate) async fn load_definitions(
|
||||||
|
|
||||||
Ok(defs)
|
Ok(defs)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,31 +1,11 @@
|
||||||
changelog "2025-01-29" {
|
changelog "2025-01-29" {
|
||||||
title "Added parsing of plaixt definitions"
|
title "Added parsing of plaixt definitions"
|
||||||
version "0.1.0"
|
version "0.1.0"
|
||||||
type "Feature"
|
kind "Feature"
|
||||||
}
|
}
|
||||||
|
|
||||||
changelog "2025-01-30 09:10:59+01:00" {
|
changelog "2025-01-30 09:10:59+01:00" {
|
||||||
title "Added parsing of plaixt records"
|
title "Added parsing of plaixt records"
|
||||||
version "0.1.0"
|
version "0.1.0"
|
||||||
type "Feature"
|
kind "Feature"
|
||||||
}
|
|
||||||
|
|
||||||
changelog "2025-02-01" {
|
|
||||||
title "Added CLI options"
|
|
||||||
version "0.1.0"
|
|
||||||
type "Feature"
|
|
||||||
}
|
|
||||||
|
|
||||||
changelog "2025-02-07" {
|
|
||||||
title "Added trustfall as a query frontend"
|
|
||||||
version "0.1.0"
|
|
||||||
type "Feature"
|
|
||||||
}
|
|
||||||
|
|
||||||
file_test "2025-02-08" {
|
|
||||||
path "Cargo.toml"
|
|
||||||
}
|
|
||||||
|
|
||||||
file_test "2025-02-08" {
|
|
||||||
path "/etc"
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,6 @@ define since="2025-01-29 20:27:30+01:00" {
|
||||||
fields {
|
fields {
|
||||||
title is=string
|
title is=string
|
||||||
version is=string
|
version is=string
|
||||||
type { oneOf "Bugfix" "Feature" "Chore" }
|
kind { oneOf "Bugfix" "Feature" "Chore" }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,7 +0,0 @@
|
||||||
// This is the default changelog entry for the plaixt project
|
|
||||||
|
|
||||||
define since="2025-02-08 00:00:00+01:00" {
|
|
||||||
fields {
|
|
||||||
path is="Path"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
6
flake.lock
generated
6
flake.lock
generated
|
|
@ -63,11 +63,11 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1738981474,
|
"lastModified": 1738117527,
|
||||||
"narHash": "sha256-YIELTXxfATG0g1wXjyaOWA4qrlubds3MG4FvMPCxSGg=",
|
"narHash": "sha256-GFviGfaezjGLFUlxdv3zyC7rSZvTXqwcG/YsF6MDkOw=",
|
||||||
"owner": "oxalica",
|
"owner": "oxalica",
|
||||||
"repo": "rust-overlay",
|
"repo": "rust-overlay",
|
||||||
"rev": "5c571e5ff246d8fc5f76ba6e38dc8edb6e4002fe",
|
"rev": "6a3dc6ce4132bd57359214d986db376f2333c14d",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
|
|
||||||
59
flake.nix
59
flake.nix
|
|
@ -16,17 +16,8 @@
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
outputs =
|
outputs = { self, nixpkgs, crane, flake-utils, rust-overlay, ... }:
|
||||||
{
|
flake-utils.lib.eachDefaultSystem (system:
|
||||||
self,
|
|
||||||
nixpkgs,
|
|
||||||
crane,
|
|
||||||
flake-utils,
|
|
||||||
rust-overlay,
|
|
||||||
...
|
|
||||||
}:
|
|
||||||
flake-utils.lib.eachDefaultSystem (
|
|
||||||
system:
|
|
||||||
let
|
let
|
||||||
pkgs = import nixpkgs {
|
pkgs = import nixpkgs {
|
||||||
inherit system;
|
inherit system;
|
||||||
|
|
@ -34,51 +25,29 @@
|
||||||
};
|
};
|
||||||
|
|
||||||
rustTarget = pkgs.rust-bin.fromRustupToolchainFile ./rust-toolchain.toml;
|
rustTarget = pkgs.rust-bin.fromRustupToolchainFile ./rust-toolchain.toml;
|
||||||
unstableRustTarget = pkgs.rust-bin.selectLatestNightlyWith (
|
unstableRustTarget = pkgs.rust-bin.selectLatestNightlyWith (toolchain: toolchain.default.override {
|
||||||
toolchain:
|
extensions = [ "rust-src" "miri" "rustfmt" ];
|
||||||
toolchain.default.override {
|
});
|
||||||
extensions = [
|
|
||||||
"rust-src"
|
|
||||||
"miri"
|
|
||||||
"rustfmt"
|
|
||||||
];
|
|
||||||
}
|
|
||||||
);
|
|
||||||
craneLib = (crane.mkLib pkgs).overrideToolchain rustTarget;
|
craneLib = (crane.mkLib pkgs).overrideToolchain rustTarget;
|
||||||
unstableCraneLib = (crane.mkLib pkgs).overrideToolchain unstableRustTarget;
|
unstableCraneLib = (crane.mkLib pkgs).overrideToolchain unstableRustTarget;
|
||||||
|
|
||||||
tomlInfo = craneLib.crateNameFromCargoToml { cargoToml = ./Cargo.toml; };
|
tomlInfo = craneLib.crateNameFromCargoToml { cargoToml = ./Cargo.toml; };
|
||||||
inherit (tomlInfo) version;
|
inherit (tomlInfo) pname version;
|
||||||
|
|
||||||
src = ./.;
|
src = ./.;
|
||||||
|
|
||||||
rustfmt' = pkgs.writeShellScriptBin "rustfmt" ''
|
rustfmt' = pkgs.writeShellScriptBin "rustfmt" ''
|
||||||
exec "${unstableRustTarget}/bin/rustfmt" "$@"
|
exec "${unstableRustTarget}/bin/rustfmt" "$@"
|
||||||
'';
|
'';
|
||||||
|
|
||||||
common = {
|
cargoArtifacts = craneLib.buildDepsOnly {
|
||||||
src = ./.;
|
inherit src;
|
||||||
|
cargoExtraArgs = "--all-features --all";
|
||||||
buildInputs = [
|
|
||||||
pkgs.openssl
|
|
||||||
pkgs.pkg-config
|
|
||||||
];
|
|
||||||
};
|
};
|
||||||
|
|
||||||
cargoArtifacts = craneLib.buildDepsOnly (
|
plaixt = craneLib.buildPackage {
|
||||||
common
|
inherit cargoArtifacts src version;
|
||||||
// {
|
cargoExtraArgs = "--all-features --all";
|
||||||
cargoExtraArgs = "--all-features --all";
|
};
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
plaixt = craneLib.buildPackage (
|
|
||||||
common
|
|
||||||
// {
|
|
||||||
inherit cargoArtifacts version;
|
|
||||||
cargoExtraArgs = "--all-features --all";
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
in
|
in
|
||||||
rec {
|
rec {
|
||||||
|
|
@ -109,8 +78,6 @@
|
||||||
devShells.plaixt = pkgs.mkShell {
|
devShells.plaixt = pkgs.mkShell {
|
||||||
buildInputs = [ ];
|
buildInputs = [ ];
|
||||||
|
|
||||||
inputsFrom = [ plaixt ];
|
|
||||||
|
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
rustfmt'
|
rustfmt'
|
||||||
rustTarget
|
rustTarget
|
||||||
|
|
|
||||||
27
query
27
query
|
|
@ -1,27 +0,0 @@
|
||||||
{
|
|
||||||
Records {
|
|
||||||
... on p_file_test {
|
|
||||||
_at @output
|
|
||||||
_kind @output
|
|
||||||
path {
|
|
||||||
... on Directory {
|
|
||||||
Children @recurse(depth: 10) {
|
|
||||||
path @output
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# fs__Path(path: "./crates/plaixt/") {
|
|
||||||
# ... on fs__Folder {
|
|
||||||
# directory: path @output
|
|
||||||
# children @fold {
|
|
||||||
# ... on fs__File {
|
|
||||||
# file: path @output
|
|
||||||
# size @output
|
|
||||||
# }
|
|
||||||
# }
|
|
||||||
# }
|
|
||||||
# }
|
|
||||||
}
|
|
||||||
|
|
@ -1,2 +1,2 @@
|
||||||
[toolchain]
|
[toolchain]
|
||||||
channel = "1.84.1"
|
channel = "1.84.0"
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue