Compare commits

...

10 commits

Author SHA1 Message Date
a6406ebd5a Make filesystem types handle their path impl
Signed-off-by: Marcel Müller <neikos@neikos.email>
2025-02-09 20:16:07 +01:00
11c3a8de94 Adapt to new trustfall model
Signed-off-by: Marcel Müller <neikos@neikos.email>
2025-02-09 19:14:54 +01:00
e600807376 Fix some things
Signed-off-by: Marcel Müller <neikos@neikos.email>
2025-02-08 15:45:21 +01:00
6b1e799d6b Remove a source of unwrap
Signed-off-by: Marcel Müller <neikos@neikos.email>
2025-02-08 12:01:30 +01:00
73391b5b7b Add a first attempt at multiplexing adapters
Signed-off-by: Marcel Müller <neikos@neikos.email>
2025-02-08 11:58:14 +01:00
3112e78bb4 Move trustfall related parts to its own module
Signed-off-by: Marcel Müller <neikos@neikos.email>
2025-02-08 10:08:58 +01:00
8a453a44f9 Make it actually work with trustfall and dynamic data
Signed-off-by: Marcel Müller <neikos@neikos.email>
2025-02-08 09:40:52 +01:00
6501b42328 Start using trustfall to query the repo
Signed-off-by: Marcel Müller <neikos@neikos.email>
2025-02-07 22:03:29 +01:00
68fbc3ce25 Move config to own module
Signed-off-by: Marcel Müller <neikos@neikos.email>
2025-02-01 11:47:40 +01:00
082e162320 Add changelog
Signed-off-by: Marcel Müller <neikos@neikos.email>
2025-02-01 11:46:05 +01:00
21 changed files with 2106 additions and 154 deletions

1211
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -8,7 +8,7 @@ resolver = "2"
unsafe_code = "forbid"
[workspace.package]
version = "0.0.0"
version = "0.1.0"
edition = "2021"
description = "PLAIn teXT tools for your data"
license = "EUPL-1.2"

View file

@ -14,10 +14,12 @@ jiff = "0.1.28"
kdl.workspace = true
miette = { version = "7.4.0", features = ["fancy", "syntect-highlighter"] }
owo-colors = "4.1.0"
paperless-rs = "0.1.5"
tokio = { version = "1.43.0", features = ["full"] }
tokio-stream = { version = "0.1.17", features = ["full"] }
tracing = "0.1.41"
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
trustfall = "0.8.1"
[lints]
workspace = true

View file

@ -0,0 +1,188 @@
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::sync::Arc;
use std::sync::OnceLock;
use paperless_rs::PaperlessClient;
use tracing::debug;
use trustfall::provider::resolve_coercion_using_schema;
use trustfall::provider::resolve_property_with;
use trustfall::provider::AsVertex;
use trustfall::provider::ContextIterator;
use trustfall::provider::ContextOutcomeIterator;
use trustfall::provider::EdgeParameters;
use trustfall::provider::ResolveEdgeInfo;
use trustfall::provider::ResolveInfo;
use trustfall::provider::Typename;
use trustfall::provider::VertexIterator;
use trustfall::FieldValue;
use trustfall::Schema;
use super::vertex::Vertex;
use crate::parsing::DefinitionKind;
use crate::parsing::Record;
static SCHEMA: OnceLock<Schema> = OnceLock::new();
#[non_exhaustive]
pub struct Adapter {
schema: Arc<Schema>,
records: Vec<Record>,
definitions: Arc<BTreeMap<String, BTreeMap<String, DefinitionKind>>>,
paperless_client: Option<PaperlessClient>,
runtime_handle: tokio::runtime::Handle,
}
impl std::fmt::Debug for Adapter {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Adapter").finish_non_exhaustive()
}
}
impl Adapter {
pub fn new(
schema: Schema,
records: Vec<Record>,
definitions: BTreeMap<String, BTreeMap<String, DefinitionKind>>,
paperless_client: Option<PaperlessClient>,
runtime: tokio::runtime::Handle,
) -> Self {
Self {
schema: Arc::new(schema),
records,
definitions: Arc::new(definitions),
paperless_client,
runtime_handle: runtime,
}
}
pub const SCHEMA_TEXT: &'static str = include_str!("./schema.graphql");
pub fn schema() -> &'static Schema {
SCHEMA.get_or_init(|| Schema::parse(Self::SCHEMA_TEXT).expect("not a valid schema"))
}
}
impl<'a> trustfall::provider::Adapter<'a> for Adapter {
type Vertex = Vertex;
fn resolve_starting_vertices(
&self,
edge_name: &Arc<str>,
_parameters: &EdgeParameters,
resolve_info: &ResolveInfo,
) -> VertexIterator<'a, Self::Vertex> {
match edge_name.as_ref() {
"Records" => super::entrypoints::records(resolve_info, &self.records),
_ => {
unreachable!(
"attempted to resolve starting vertices for unexpected edge name: {edge_name}"
)
}
}
}
fn resolve_property<V: AsVertex<Self::Vertex> + 'a>(
&self,
contexts: ContextIterator<'a, V>,
type_name: &Arc<str>,
property_name: &Arc<str>,
resolve_info: &ResolveInfo,
) -> ContextOutcomeIterator<'a, V, FieldValue> {
if property_name.as_ref() == "__typename" {
return resolve_property_with(contexts, |vertex| vertex.typename().into());
}
debug!(?type_name, ?property_name, "Resolving property");
match type_name.as_ref() {
"PaperlessDocument" => super::properties::resolve_paperless_document_property(
contexts,
property_name.as_ref(),
resolve_info,
),
"Path" | "File" | "Directory" => super::properties::resolve_fs_property(
contexts,
type_name.as_ref(),
property_name.as_ref(),
resolve_info,
),
"Record" => {
super::properties::resolve_record_property(contexts, property_name, resolve_info)
}
kind if kind.starts_with("p_") => {
super::properties::resolve_record_property(contexts, property_name, resolve_info)
}
_ => {
unreachable!(
"attempted to read property '{property_name}' on unexpected type: {type_name}"
)
}
}
}
fn resolve_neighbors<V: AsVertex<Self::Vertex> + 'a>(
&self,
contexts: ContextIterator<'a, V>,
type_name: &Arc<str>,
edge_name: &Arc<str>,
parameters: &EdgeParameters,
resolve_info: &ResolveEdgeInfo,
) -> ContextOutcomeIterator<'a, V, VertexIterator<'a, Self::Vertex>> {
match type_name.as_ref() {
"Directory" => super::edges::resolve_directory_edge(
contexts,
edge_name.as_ref(),
parameters,
resolve_info,
),
kind if kind.starts_with("p_") => super::edges::resolve_record_edge(
contexts,
edge_name,
parameters,
resolve_info,
&self.definitions,
),
_ => {
unreachable!(
"attempted to resolve edge '{edge_name}' on unexpected type: {type_name}"
)
}
}
}
fn resolve_coercion<V: AsVertex<Self::Vertex> + 'a>(
&self,
contexts: ContextIterator<'a, V>,
type_name: &Arc<str>,
coerce_to_type: &Arc<str>,
_resolve_info: &ResolveInfo,
) -> ContextOutcomeIterator<'a, V, bool> {
let schema = self.schema.clone();
let coerce_to_type = coerce_to_type.clone();
debug!(?coerce_to_type, ?type_name, "Trying to coerce");
Box::new(contexts.map(move |ctx| {
let subtypes: BTreeSet<_> = schema
.subtypes(coerce_to_type.as_ref())
.unwrap_or_else(|| panic!("type {coerce_to_type} is not part of this schema"))
.collect();
match ctx.active_vertex::<Vertex>() {
None => (ctx, false),
Some(vertex) => {
let typename = vertex.typename();
debug!(?coerce_to_type, ?vertex, "Trying to coerce");
if let Some(rec) = vertex.as_record() {
let is_rec = coerce_to_type.starts_with("p_");
let is_kind = rec.kind == coerce_to_type.as_ref()[2..];
(ctx, is_rec && is_kind)
} else {
let can_coerce = subtypes.contains(typename);
(ctx, can_coerce)
}
}
}
}))
}
}

View file

@ -0,0 +1,96 @@
use std::collections::BTreeMap;
use std::sync::Arc;
use camino::Utf8PathBuf;
use trustfall::provider::resolve_neighbors_with;
use trustfall::provider::AsVertex;
use trustfall::provider::ContextIterator;
use trustfall::provider::ContextOutcomeIterator;
use trustfall::provider::EdgeParameters;
use trustfall::provider::ResolveEdgeInfo;
use trustfall::provider::VertexIterator;
use super::Vertex;
use crate::parsing::DefinitionKind;
use crate::parsing::Record;
pub(super) fn resolve_directory_edge<'a, V: AsVertex<Vertex> + 'a>(
contexts: ContextIterator<'a, V>,
edge_name: &str,
_parameters: &EdgeParameters,
resolve_info: &ResolveEdgeInfo,
) -> ContextOutcomeIterator<'a, V, VertexIterator<'a, Vertex>> {
match edge_name {
"Children" => directory::children(contexts, resolve_info),
_ => unreachable!("attempted to resolve unexpected edge '{edge_name}' on type 'Directory'"),
}
}
mod directory {
use camino::Utf8Path;
use trustfall::provider::resolve_neighbors_with;
use trustfall::provider::AsVertex;
use trustfall::provider::ContextIterator;
use trustfall::provider::ContextOutcomeIterator;
use trustfall::provider::ResolveEdgeInfo;
use trustfall::provider::VertexIterator;
use crate::adapter::Vertex;
pub(super) fn children<'a, V: AsVertex<Vertex> + 'a>(
contexts: ContextIterator<'a, V>,
_resolve_info: &ResolveEdgeInfo,
) -> ContextOutcomeIterator<'a, V, VertexIterator<'a, Vertex>> {
resolve_neighbors_with(contexts, move |vertex| {
let vertex = vertex
.as_directory()
.expect("conversion failed, vertex was not a Directory");
fn read_children(path: &Utf8Path) -> Option<impl Iterator<Item = Vertex>> {
Some(
path.read_dir_utf8()
.ok()?
.flat_map(|item| Some(Vertex::Path(item.ok()?.path().to_path_buf()))),
)
}
read_children(vertex)
.map(|i| {
let it: Box<dyn Iterator<Item = Vertex>> = Box::new(i);
it
})
.unwrap_or_else(|| Box::new(std::iter::empty()))
})
}
}
pub(super) fn resolve_record_edge<'a, V: AsVertex<Vertex> + 'a>(
contexts: ContextIterator<'a, V>,
edge_name: &Arc<str>,
_parameters: &EdgeParameters,
_resolve_info: &ResolveEdgeInfo,
definitions: &Arc<BTreeMap<String, BTreeMap<String, DefinitionKind>>>,
) -> ContextOutcomeIterator<'a, V, VertexIterator<'a, Vertex>> {
let edge_name = edge_name.clone();
let definitions = definitions.clone();
resolve_neighbors_with(contexts, move |v| {
let rec = v.as_record().expect("Expected a record");
let def = &definitions[&rec.kind][edge_name.as_ref()];
match def {
DefinitionKind::Path => Box::new(std::iter::once(path_from_rec(rec, &edge_name))),
_ => unreachable!("Only `Path` can appear as edge for now"),
}
})
}
fn path_from_rec(rec: &Record, edge_name: &str) -> Vertex {
let pathb = Utf8PathBuf::from(rec.fields[edge_name].as_string().unwrap());
if pathb.is_file() {
Vertex::File(pathb)
} else if pathb.is_dir() {
Vertex::Directory(pathb)
} else {
Vertex::Path(pathb)
}
}

View file

@ -0,0 +1,16 @@
use trustfall::provider::ResolveInfo;
use trustfall::provider::VertexIterator;
use super::vertex::Vertex;
use crate::parsing::Record;
pub(super) fn records<'a>(
_resolve_info: &ResolveInfo,
records: &'_ [Record],
) -> VertexIterator<'a, Vertex> {
#[expect(
clippy::unnecessary_to_owned,
reason = "We have to go through a vec to satisfy the lifetimes"
)]
Box::new(records.to_vec().into_iter().map(Vertex::Record))
}

View file

@ -0,0 +1,55 @@
mod adapter_impl;
mod edges;
mod entrypoints;
mod properties;
mod vertex;
#[cfg(test)]
mod tests;
pub use adapter_impl::Adapter;
use tracing::trace;
use trustfall::Schema;
pub use vertex::Vertex;
pub struct CustomVertex {
pub name: String,
pub definition: String,
}
impl crate::parsing::Definition {
fn to_custom_vertices(&self) -> Vec<CustomVertex> {
let name = format!("p_{}", self.name);
let fields = self
.fields
.iter()
.map(|(fname, ftype)| {
let kind = ftype.trustfall_kind(&format!("{name}{fname}"));
format!("{fname}: {kind}")
})
.chain([String::from("_at: String!"), String::from("_kind: String!")])
.collect::<Vec<_>>();
let definition = format!("type {name} implements Record {{ {} }}", fields.join(","));
[CustomVertex { name, definition }].into_iter().collect()
}
}
pub(crate) fn to_schema(
definitions: &std::collections::BTreeMap<String, Vec<crate::parsing::Definition>>,
) -> trustfall::Schema {
let base_text = Adapter::SCHEMA_TEXT;
let generated = definitions
.values()
.flat_map(|defs| defs.last().unwrap().to_custom_vertices())
.map(|v| v.definition)
.collect::<Vec<_>>()
.join("\n");
let input = format!("{base_text}{generated}");
trace!(%input, "Using schema");
Schema::parse(input).unwrap()
}

View file

@ -0,0 +1,180 @@
use std::sync::Arc;
use kdl::KdlValue;
use trustfall::provider::field_property;
use trustfall::provider::resolve_property_with;
use trustfall::provider::AsVertex;
use trustfall::provider::ContextIterator;
use trustfall::provider::ContextOutcomeIterator;
use trustfall::provider::ResolveInfo;
use trustfall::FieldValue;
use super::vertex::Vertex;
pub(super) fn resolve_fs_property<'a, V: AsVertex<Vertex> + 'a>(
contexts: ContextIterator<'a, V>,
type_name: &str,
property_name: &str,
resolve_info: &ResolveInfo,
) -> ContextOutcomeIterator<'a, V, FieldValue> {
match (type_name, property_name) {
(_, "exists" | "basename" | "path") => {
resolve_path_property(contexts, property_name, resolve_info)
}
("Directory", _) => resolve_directory_property(contexts, property_name, resolve_info),
("File", _) => resolve_file_property(contexts, property_name, resolve_info),
_ => {
unreachable!(
"attempted to read unexpected property '{property_name}' on type '{type_name}'"
)
}
}
}
pub(super) fn resolve_path_property<'a, V: AsVertex<Vertex> + 'a>(
contexts: ContextIterator<'a, V>,
property_name: &str,
_resolve_info: &ResolveInfo,
) -> ContextOutcomeIterator<'a, V, FieldValue> {
match property_name {
"exists" => resolve_property_with(contexts, move |v: &Vertex| match v {
Vertex::Path(p) | Vertex::File(p) | Vertex::Directory(p) => p.exists().into(),
_ => {
panic!("Vertex was not a filesystem type")
}
}),
"basename" => resolve_property_with(contexts, move |v: &Vertex| match v {
Vertex::Path(p) | Vertex::File(p) | Vertex::Directory(p) => p.file_name().into(),
_ => {
panic!("Vertex was not a filesystem type")
}
}),
"path" => resolve_property_with(contexts, move |v: &Vertex| match v {
Vertex::Path(p) | Vertex::File(p) | Vertex::Directory(p) => p.to_string().into(),
_ => {
panic!("Vertex was not a filesystem type")
}
}),
_ => {
unreachable!("attempted to read unexpected property '{property_name}' on type 'Path'")
}
}
}
pub(super) fn resolve_directory_property<'a, V: AsVertex<Vertex> + 'a>(
contexts: ContextIterator<'a, V>,
property_name: &str,
_resolve_info: &ResolveInfo,
) -> ContextOutcomeIterator<'a, V, FieldValue> {
match property_name {
"exists" => resolve_property_with(contexts, move |v: &Vertex| {
let directory = v.as_directory().expect("vertex was not a Directory");
directory.exists().into()
}),
"basename" => resolve_property_with(contexts, move |v: &Vertex| {
let directory = v.as_directory().expect("vertex was not a Directory");
directory.file_name().into()
}),
"path" => resolve_property_with(contexts, move |v: &Vertex| {
let directory = v.as_directory().expect("vertex was not a Directory");
directory.to_string().into()
}),
_ => {
unreachable!("attempted to read unexpected property '{property_name}' on type 'File'")
}
}
}
pub(super) fn resolve_file_property<'a, V: AsVertex<Vertex> + 'a>(
contexts: ContextIterator<'a, V>,
property_name: &str,
_resolve_info: &ResolveInfo,
) -> ContextOutcomeIterator<'a, V, FieldValue> {
match property_name {
"exists" => resolve_property_with(contexts, move |v: &Vertex| {
let file = v.as_file().expect("vertex was not a File");
file.exists().into()
}),
"basename" => resolve_property_with(contexts, move |v: &Vertex| {
let file = v.as_file().expect("vertex was not a File");
file.file_name().into()
}),
"path" => resolve_property_with(contexts, move |v: &Vertex| {
let file = v.as_file().expect("vertex was not a File");
file.to_string().into()
}),
"extension" => resolve_property_with(contexts, move |v: &Vertex| {
let file = v.as_file().expect("vertex was not a File");
file.extension().into()
}),
_ => {
unreachable!("attempted to read unexpected property '{property_name}' on type 'File'")
}
}
}
pub(super) fn resolve_paperless_document_property<'a, V: AsVertex<Vertex> + 'a>(
contexts: ContextIterator<'a, V>,
property_name: &str,
_resolve_info: &ResolveInfo,
) -> ContextOutcomeIterator<'a, V, FieldValue> {
match property_name {
"added" => resolve_property_with(contexts, field_property!(as_paperless_document, added)),
"archive_serial_number" => resolve_property_with(
contexts,
field_property!(as_paperless_document, archive_serial_number),
),
"content" => {
resolve_property_with(contexts, field_property!(as_paperless_document, content))
}
"created" => {
resolve_property_with(contexts, field_property!(as_paperless_document, created))
}
"id" => resolve_property_with(contexts, field_property!(as_paperless_document, id)),
"title" => resolve_property_with(contexts, field_property!(as_paperless_document, title)),
_ => {
unreachable!(
"attempted to read unexpected property '{property_name}' on type 'PaperlessDocument'"
)
}
}
}
pub(super) fn resolve_record_property<'a, V: AsVertex<Vertex> + 'a>(
contexts: ContextIterator<'a, V>,
property_name: &Arc<str>,
_resolve_info: &ResolveInfo,
) -> ContextOutcomeIterator<'a, V, FieldValue> {
let property_name = property_name.clone();
match property_name.as_ref() {
"_at" => resolve_property_with(
contexts,
field_property!(as_record, at, { at.to_string().into() }),
),
"_kind" => resolve_property_with(contexts, field_property!(as_record, kind)),
_ => resolve_property_with(contexts, move |v: &Vertex| {
let rec = v
.as_record()
.expect("Called record property without it being a record");
kdl_to_trustfall_value(rec.fields[property_name.as_ref()].clone())
}),
}
}
fn kdl_to_trustfall_value(val: KdlValue) -> FieldValue {
match val {
KdlValue::Bool(b) => FieldValue::Boolean(b),
KdlValue::Float(f) => FieldValue::Float64(f),
KdlValue::Null => FieldValue::Null,
KdlValue::Integer(i) => FieldValue::Int64(i.try_into().unwrap()),
KdlValue::String(s) => FieldValue::String(s.into()),
}
}

View file

@ -0,0 +1,88 @@
schema {
query: RootSchemaQuery
}
directive @filter(
"""
Name of the filter operation to perform.
"""
op: String!
"""
List of string operands for the operator.
"""
value: [String!]
) repeatable on FIELD | INLINE_FRAGMENT
directive @tag(
"""
Name to apply to the given property field.
"""
name: String
) on FIELD
directive @output(
"""
What to designate the output field generated from this property field.
"""
name: String
) on FIELD
directive @optional on FIELD
directive @recurse(
"""
Recurse up to this many times on this edge. A depth of 1 produces the current
vertex and its immediate neighbors along the given edge.
"""
depth: Int!
) on FIELD
directive @fold on FIELD
directive @transform(
"""
Name of the transformation operation to perform.
"""
op: String!
) on FIELD
"""
All the possible data types to begin querying
"""
type RootSchemaQuery {
"""
All records in your plaixt instance
"""
Records: [Record!]!
}
interface Record {
_kind: String!
_at: String!
}
interface Path {
path: String!
exists: Boolean!
basename: String!
}
interface File implements Path {
path: String!
exists: Boolean!
basename: String!
extension: String!
}
type Directory implements Path {
path: String!
exists: Boolean!
basename: String!
Children: [Path!]!
}
type PaperlessDocument {
id: Int!
title: String!
content: String!
archive_serial_number: Int
created: String!
added: String!
}

View file

@ -0,0 +1,16 @@
use trustfall::provider::check_adapter_invariants;
use super::Adapter;
#[tokio::test]
async fn adapter_satisfies_trustfall_invariants() {
let schema = Adapter::schema();
let adapter = Adapter::new(
schema.clone(),
vec![],
[].into(),
None,
tokio::runtime::Handle::current(),
);
check_adapter_invariants(schema, adapter);
}

View file

@ -0,0 +1,15 @@
use camino::Utf8PathBuf;
use paperless_rs::endpoint::documents::Document as PaperlessDocument;
use crate::parsing::Record;
#[non_exhaustive]
#[derive(Debug, Clone, trustfall::provider::TrustfallEnumVertex)]
pub enum Vertex {
Path(Utf8PathBuf),
File(Utf8PathBuf),
Directory(Utf8PathBuf),
PaperlessDocument(PaperlessDocument),
Record(Record),
}

View file

@ -0,0 +1,39 @@
use camino::Utf8Path;
use camino::Utf8PathBuf;
use kdl::KdlDocument;
use miette::Context;
use miette::LabeledSpan;
#[derive(Debug)]
pub struct Config {
pub(crate) root_folder: Utf8PathBuf,
}
pub(crate) async fn parse_config(path: &Utf8Path) -> miette::Result<Config> {
let data = tokio::fs::read_to_string(path)
.await
.map_err(|e| miette::miette!(e))
.wrap_err_with(|| miette::miette!("Could not read configuration at \"{path}\""))?;
let doc: KdlDocument = data
.parse()
.map_err(|e| miette::Error::from(e).with_source_code(data.clone()))?;
Ok(Config {
root_folder: doc
.get("root_folder")
.ok_or_else(|| miette::miette!("\"root_folder\" configuration value not found"))
.and_then(|val| {
val.get(0)
.and_then(|v| v.as_string().map(Into::into))
.ok_or_else(|| {
miette::diagnostic!(
labels = vec![LabeledSpan::new_primary_with_span(None, val.span())],
"root_folder is expected to be a path"
)
.into()
})
.map_err(|e: miette::Report| e.with_source_code(data))
})?,
})
}

View file

@ -1,16 +1,24 @@
#![allow(dead_code)]
use camino::Utf8Path;
use std::collections::BTreeMap;
use std::io::Read;
use std::sync::Arc;
use camino::Utf8PathBuf;
use clap::Parser;
use clap::Subcommand;
use clap::ValueHint;
use human_panic::Metadata;
use kdl::KdlDocument;
use miette::LabeledSpan;
use miette::WrapErr;
use miette::IntoDiagnostic;
use parsing::Definition;
use parsing::Record;
use tracing::info;
use tracing_subscriber::EnvFilter;
use trustfall::execute_query;
use trustfall::FieldValue;
mod adapter;
mod config;
mod parsing;
#[derive(Debug, Parser)]
@ -33,11 +41,7 @@ struct Args {
#[derive(Debug, Subcommand)]
enum ArgMode {
Dump,
}
#[derive(Debug)]
pub struct Config {
root_folder: Utf8PathBuf,
Query,
}
#[tokio::main]
@ -47,54 +51,107 @@ async fn main() -> miette::Result<()> {
.authors(env!("CARGO_PKG_AUTHORS"))
);
tracing_subscriber::fmt().pretty().init();
tracing_subscriber::fmt()
.with_env_filter(EnvFilter::from_default_env())
.pretty()
.init();
let args = Args::parse();
let config = parse_config(&args.config).await?;
let config = config::parse_config(&args.config).await?;
let root_folder = args.root_folder.as_ref().unwrap_or(&config.root_folder);
let load_records = async {
let definitions = parsing::load_definitions(&root_folder.join("definitions")).await?;
parsing::load_records(root_folder, &definitions).await
};
let definitions = parsing::load_definitions(&root_folder.join("definitions")).await?;
let records = parsing::load_records(root_folder, &definitions).await?;
let (schema, adapter) = get_schema_and_adapter(&definitions, records.clone());
match args.mode {
ArgMode::Dump => {
let records = load_records.await?;
ArgMode::Query => {
let mut query = String::new();
std::io::stdin()
.read_to_string(&mut query)
.into_diagnostic()?;
info!("Got records: {records:#?}");
let result = execute_query(
&schema,
Arc::new(adapter),
&query,
BTreeMap::<Arc<str>, FieldValue>::from([]),
)
.unwrap()
.collect::<Vec<_>>();
info!("Got records: {result:#?}");
}
ArgMode::Dump => {
print_records(&records);
}
}
Ok(())
}
async fn parse_config(path: &Utf8Path) -> miette::Result<Config> {
let data = tokio::fs::read_to_string(path)
.await
.map_err(|e| miette::miette!(e))
.wrap_err_with(|| miette::miette!("Could not read configuration at \"{path}\""))?;
let doc: KdlDocument = data
.parse()
.map_err(|e| miette::Error::from(e).with_source_code(data.clone()))?;
Ok(Config {
root_folder: doc
.get("root_folder")
.ok_or_else(|| miette::miette!("\"root_folder\" configuration value not found"))
.and_then(|val| {
val.get(0)
.and_then(|v| v.as_string().map(Into::into))
.ok_or_else(|| {
miette::diagnostic!(
labels = vec![LabeledSpan::new_primary_with_span(None, val.span())],
"root_folder is expected to be a path"
)
.into()
})
.map_err(|e: miette::Report| e.with_source_code(data))
})?,
})
fn get_schema_and_adapter(
definitions: &BTreeMap<String, Vec<Definition>>,
records: Vec<Record>,
) -> (trustfall::Schema, adapter::Adapter) {
let schema = adapter::to_schema(definitions);
let definitions = definitions
.iter()
.map(|(name, def)| (name.clone(), def.last().cloned().unwrap().fields))
.collect();
let adapter = adapter::Adapter::new(
schema.clone(),
records,
definitions,
None,
tokio::runtime::Handle::current(),
);
(schema, adapter)
}
fn print_records(records: &[Record]) {
for record in records {
println!("{kind} @ {at} {{", kind = record.kind, at = record.at);
for field in &record.fields {
println!("\t{name} = {value}", name = field.0, value = field.1);
}
println!("}}")
}
}
#[cfg(test)]
mod tests {
use camino::Utf8PathBuf;
use tracing_subscriber::EnvFilter;
use trustfall::provider::check_adapter_invariants;
use crate::get_schema_and_adapter;
use crate::parsing;
#[tokio::test]
async fn ensure_adapter_satisfies_invariants() {
tracing_subscriber::fmt()
.with_env_filter(EnvFilter::from_default_env())
.pretty()
.with_test_writer()
.init();
let root_folder = Utf8PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../../examples");
println!("{root_folder}");
let definitions = parsing::load_definitions(&root_folder.join("definitions"))
.await
.unwrap();
let records = parsing::load_records(&root_folder, &definitions)
.await
.unwrap();
let (schema, adapter) = get_schema_and_adapter(&definitions, records.clone());
check_adapter_invariants(&schema, adapter);
}
}

View file

@ -1,5 +1,4 @@
use std::collections::BTreeMap;
use std::collections::HashMap;
use camino::Utf8Path;
use camino::Utf8PathBuf;
@ -15,7 +14,7 @@ use miette::NamedSource;
use owo_colors::OwoColorize;
use tokio_stream::wrappers::ReadDirStream;
#[derive(Debug)]
#[derive(Debug, Clone)]
pub struct Record {
pub(crate) kind: String,
pub(crate) at: Timestamp,
@ -82,8 +81,7 @@ pub(crate) fn parse_record(
.map(|field| {
let Some(get) = field.get(0) else {
return Err(miette::diagnostic!(
labels =
vec![LabeledSpan::new_primary_with_span(None, at_entry.span())],
labels = vec![LabeledSpan::new_primary_with_span(None, at_entry.span())],
"This datetime should be a string formatted as RFC3339."
))?;
};
@ -143,9 +141,8 @@ pub(crate) async fn load_records(
})
.flat_map(|val| futures::stream::iter(val.transpose()))
.and_then(|(name, bytes)| async move {
parse_record(&bytes, definitions).map_err(|e| {
e.with_source_code(NamedSource::new(name, bytes).with_language("kdl"))
})
parse_record(&bytes, definitions)
.map_err(|e| e.with_source_code(NamedSource::new(name, bytes).with_language("kdl")))
})
.map(|val| val.map(|recs| futures::stream::iter(recs).map(Ok::<_, miette::Report>)))
.try_flatten()
@ -155,19 +152,32 @@ pub(crate) async fn load_records(
Ok(defs)
}
#[derive(Debug)]
#[derive(Debug, Clone)]
pub enum DefinitionKind {
String,
Path,
OneOf(Vec<String>),
}
impl DefinitionKind {
pub(crate) fn trustfall_kind(&self, _namespace: &str) -> String {
match self {
DefinitionKind::String => String::from("String!"),
DefinitionKind::Path => String::from("Path!"),
DefinitionKind::OneOf(_vecs) => String::from("String!"),
}
}
pub(crate) fn validate(&self, val: &KdlValue) -> Result<(), String> {
match self {
DefinitionKind::String => val
.is_string()
.then_some(())
.ok_or("Expected a string here".to_string()),
DefinitionKind::Path => val
.is_string()
.then_some(())
.ok_or("Expected a path encoded as a string here".to_string()),
DefinitionKind::OneOf(options) => val
.as_string()
.is_some_and(|val| options.iter().any(|o| o == val))
@ -175,6 +185,23 @@ impl DefinitionKind {
.ok_or_else(|| format!("Expected one of: {}", options.join(", "))),
}
}
pub(crate) fn extra_trustfall_kinds(
&self,
namespace: &str,
) -> Vec<crate::adapter::CustomVertex> {
match self {
DefinitionKind::OneOf(defs) => {
let name = format!("{namespace}Def");
let vec = vec![crate::adapter::CustomVertex {
definition: format!("enum {name} {{ {} }}", defs.join(",")),
name,
}];
vec
}
_ => vec![],
}
}
}
impl TryFrom<&str> for DefinitionKind {
@ -182,18 +209,23 @@ impl TryFrom<&str> for DefinitionKind {
fn try_from(value: &str) -> Result<Self, Self::Error> {
match value.to_ascii_lowercase().as_str() {
"string" => Ok(DefinitionKind::String),
"path" => Ok(DefinitionKind::Path),
other => miette::bail!("Did not recognize valid field kind: \"{other}\""),
}
}
}
#[derive(Debug)]
#[derive(Debug, Clone)]
pub struct Definition {
pub(crate) name: String,
pub(crate) since: Timestamp,
pub(crate) fields: HashMap<String, DefinitionKind>,
pub(crate) fields: BTreeMap<String, DefinitionKind>,
}
pub(crate) fn parse_definition(bytes: &str) -> miette::Result<Vec<Definition>> {
pub(crate) fn parse_definition(
bytes: &str,
definition_name: String,
) -> miette::Result<Vec<Definition>> {
let doc: KdlDocument = bytes.parse()?;
let mut defs = vec![];
@ -288,11 +320,27 @@ pub(crate) fn parse_definition(bytes: &str) -> miette::Result<Vec<Definition>> {
}
};
match field.name().value() {
"at" | "kind" => return Err(miette::diagnostic!(
labels = vec![LabeledSpan::new_primary_with_span(
Some(String::from("this name")),
field.name().span()
)],
help = "Both `at` and `kind` are reserved field names.",
"Reserved field name."
))?,
_ => {}
}
Ok((field.name().to_string(), kind))
})
.collect::<miette::Result<_>>()?;
defs.push(Definition { since, fields });
defs.push(Definition {
since,
fields,
name: definition_name.clone(),
});
}
unknown => {
return Err(miette::diagnostic!(
@ -332,9 +380,10 @@ pub(crate) async fn load_definitions(
})
.flat_map(|val| futures::stream::iter(val.transpose()))
.and_then(|(name, bytes)| async move {
let definition_name = name.file_stem().unwrap().to_string();
Ok((
name.file_stem().unwrap().to_string(),
parse_definition(&bytes).map_err(|e| {
definition_name.clone(),
parse_definition(&bytes, definition_name).map_err(|e| {
e.with_source_code(NamedSource::new(name, bytes).with_language("kdl"))
})?,
))
@ -344,4 +393,3 @@ pub(crate) async fn load_definitions(
Ok(defs)
}

View file

@ -1,11 +1,31 @@
changelog "2025-01-29" {
title "Added parsing of plaixt definitions"
version "0.1.0"
kind "Feature"
type "Feature"
}
changelog "2025-01-30 09:10:59+01:00" {
title "Added parsing of plaixt records"
version "0.1.0"
kind "Feature"
type "Feature"
}
changelog "2025-02-01" {
title "Added CLI options"
version "0.1.0"
type "Feature"
}
changelog "2025-02-07" {
title "Added trustfall as a query frontend"
version "0.1.0"
type "Feature"
}
file_test "2025-02-08" {
path "Cargo.toml"
}
file_test "2025-02-08" {
path "/etc"
}

View file

@ -4,6 +4,6 @@ define since="2025-01-29 20:27:30+01:00" {
fields {
title is=string
version is=string
kind { oneOf "Bugfix" "Feature" "Chore" }
type { oneOf "Bugfix" "Feature" "Chore" }
}
}

View file

@ -0,0 +1,7 @@
// This is the default changelog entry for the plaixt project
define since="2025-02-08 00:00:00+01:00" {
fields {
path is="Path"
}
}

6
flake.lock generated
View file

@ -63,11 +63,11 @@
]
},
"locked": {
"lastModified": 1738117527,
"narHash": "sha256-GFviGfaezjGLFUlxdv3zyC7rSZvTXqwcG/YsF6MDkOw=",
"lastModified": 1738981474,
"narHash": "sha256-YIELTXxfATG0g1wXjyaOWA4qrlubds3MG4FvMPCxSGg=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "6a3dc6ce4132bd57359214d986db376f2333c14d",
"rev": "5c571e5ff246d8fc5f76ba6e38dc8edb6e4002fe",
"type": "github"
},
"original": {

View file

@ -16,8 +16,17 @@
};
};
outputs = { self, nixpkgs, crane, flake-utils, rust-overlay, ... }:
flake-utils.lib.eachDefaultSystem (system:
outputs =
{
self,
nixpkgs,
crane,
flake-utils,
rust-overlay,
...
}:
flake-utils.lib.eachDefaultSystem (
system:
let
pkgs = import nixpkgs {
inherit system;
@ -25,29 +34,51 @@
};
rustTarget = pkgs.rust-bin.fromRustupToolchainFile ./rust-toolchain.toml;
unstableRustTarget = pkgs.rust-bin.selectLatestNightlyWith (toolchain: toolchain.default.override {
extensions = [ "rust-src" "miri" "rustfmt" ];
});
unstableRustTarget = pkgs.rust-bin.selectLatestNightlyWith (
toolchain:
toolchain.default.override {
extensions = [
"rust-src"
"miri"
"rustfmt"
];
}
);
craneLib = (crane.mkLib pkgs).overrideToolchain rustTarget;
unstableCraneLib = (crane.mkLib pkgs).overrideToolchain unstableRustTarget;
tomlInfo = craneLib.crateNameFromCargoToml { cargoToml = ./Cargo.toml; };
inherit (tomlInfo) pname version;
inherit (tomlInfo) version;
src = ./.;
rustfmt' = pkgs.writeShellScriptBin "rustfmt" ''
exec "${unstableRustTarget}/bin/rustfmt" "$@"
'';
cargoArtifacts = craneLib.buildDepsOnly {
inherit src;
cargoExtraArgs = "--all-features --all";
common = {
src = ./.;
buildInputs = [
pkgs.openssl
pkgs.pkg-config
];
};
plaixt = craneLib.buildPackage {
inherit cargoArtifacts src version;
cargoExtraArgs = "--all-features --all";
};
cargoArtifacts = craneLib.buildDepsOnly (
common
// {
cargoExtraArgs = "--all-features --all";
}
);
plaixt = craneLib.buildPackage (
common
// {
inherit cargoArtifacts version;
cargoExtraArgs = "--all-features --all";
}
);
in
rec {
@ -78,6 +109,8 @@
devShells.plaixt = pkgs.mkShell {
buildInputs = [ ];
inputsFrom = [ plaixt ];
nativeBuildInputs = [
rustfmt'
rustTarget

27
query Normal file
View file

@ -0,0 +1,27 @@
{
Records {
... on p_file_test {
_at @output
_kind @output
path {
... on Directory {
Children @recurse(depth: 10) {
path @output
}
}
}
}
}
# fs__Path(path: "./crates/plaixt/") {
# ... on fs__Folder {
# directory: path @output
# children @fold {
# ... on fs__File {
# file: path @output
# size @output
# }
# }
# }
# }
}

View file

@ -1,2 +1,2 @@
[toolchain]
channel = "1.84.0"
channel = "1.84.1"