From 75a589f2359dac7f0dd0439f78e137f2e2dc4a9b Mon Sep 17 00:00:00 2001 From: Aleksandr Date: Sun, 22 Jun 2025 05:46:26 +0300 Subject: [PATCH] Initial commit --- .envrc | 1 + .gitignore | 3 + Cargo.toml | 42 +++ README.md | 11 + flake.lock | 99 ++++++ flake.nix | 41 +++ macros/Cargo.toml | 18 + macros/src/data.rs | 603 ++++++++++++++++++++++++++++++++++ macros/src/endpoint.rs | 68 ++++ macros/src/int.rs | 544 ++++++++++++++++++++++++++++++ macros/src/lib.rs | 69 ++++ macros/src/str.rs | 472 ++++++++++++++++++++++++++ macros/src/utils.rs | 239 ++++++++++++++ src/array.rs | 146 ++++++++ src/collections.rs | 6 + src/encoding.rs | 1 + src/encoding/dict.rs | 28 ++ src/error.rs | 69 ++++ src/fut.rs | 22 ++ src/generic.rs | 22 ++ src/handling/and_then.rs | 13 + src/handling/apply.rs | 29 ++ src/handling/mod.rs | 37 +++ src/handling/provide_state.rs | 1 + src/handling/then.rs | 29 ++ src/hash.rs | 1 + src/lib.rs | 59 ++++ src/rand.rs | 4 + src/slab.rs | 1 + src/str.rs | 212 ++++++++++++ src/str/ascii.rs | 109 ++++++ src/sync.rs | 1 + src/time/clock.rs | 62 ++++ src/time/date.rs | 526 +++++++++++++++++++++++++++++ src/time/mod.rs | 49 +++ src/time/ser.rs | 3 + src/time/str.rs | 278 ++++++++++++++++ src/time/tests.rs | 306 +++++++++++++++++ src/time/time.rs | 234 +++++++++++++ src/time/timestamp.rs | 321 ++++++++++++++++++ src/time/tz.rs | 23 ++ src/time/utils.rs | 25 ++ src/trace_id.rs | 13 + 43 files changed, 4840 insertions(+) create mode 100644 .envrc create mode 100644 .gitignore create mode 100644 Cargo.toml create mode 100644 README.md create mode 100644 flake.lock create mode 100644 flake.nix create mode 100644 macros/Cargo.toml create mode 100644 macros/src/data.rs create mode 100644 macros/src/endpoint.rs create mode 100644 macros/src/int.rs create mode 100644 macros/src/lib.rs create mode 100644 macros/src/str.rs create mode 100644 macros/src/utils.rs create mode 100644 src/array.rs create mode 100644 src/collections.rs create mode 100644 src/encoding.rs create mode 100644 src/encoding/dict.rs create mode 100644 src/error.rs create mode 100644 src/fut.rs create mode 100644 src/generic.rs create mode 100644 src/handling/and_then.rs create mode 100644 src/handling/apply.rs create mode 100644 src/handling/mod.rs create mode 100644 src/handling/provide_state.rs create mode 100644 src/handling/then.rs create mode 100644 src/hash.rs create mode 100644 src/lib.rs create mode 100644 src/rand.rs create mode 100644 src/slab.rs create mode 100644 src/str.rs create mode 100644 src/str/ascii.rs create mode 100644 src/sync.rs create mode 100644 src/time/clock.rs create mode 100644 src/time/date.rs create mode 100644 src/time/mod.rs create mode 100644 src/time/ser.rs create mode 100644 src/time/str.rs create mode 100644 src/time/tests.rs create mode 100644 src/time/time.rs create mode 100644 src/time/timestamp.rs create mode 100644 src/time/tz.rs create mode 100644 src/time/utils.rs create mode 100644 src/trace_id.rs diff --git a/.envrc b/.envrc new file mode 100644 index 0000000..a5dbbcb --- /dev/null +++ b/.envrc @@ -0,0 +1 @@ +use flake . diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..a5cc47d --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +Cargo.lock +/target +/.direnv diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..3ca20de --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,42 @@ +[package] +name = "eva" +version = "0.1.0" +edition = "2024" + +[workspace] +members = ["macros"] + +[features] +default = [] +# Very long running. +get_time_test = [] + +[dependencies.schemars] +version = "=1.0.0-alpha.17" +default-features = false +features = [ + "std", + "derive", +] + +[dependencies] +eva-macros.path = "./macros" + +serde = { version = "1.0", features = ["derive"] } + +auto_impl = { git = "https://github.com/nerodono/auto_impl.git", rev = "86021942264ceabe8542a5aadb2d922554c05a1b" } +trait-set = "0.3.0" + +compact_str = { version = "0.8.0", features = ["serde"] } +const_format = { version = "0.2.34", features = ["rust_1_83"] } +paste = "1.0.15" +rand = "0.9.1" +rand_xoshiro = { version = "0.7.0", features = ["serde"] } +hashbrown = { version = "0.15.2", features = ["serde"] } +ahash = "0.8.11" +perfect-derive = "0.1.5" + +seq-macro = "0.3.6" +bytesize = { version = "2.0.1", features = ["serde"] } +bytes = { version = "1.10.1", features = ["serde"] } +url = { version = "2.5.4", features = ["serde"] } diff --git a/README.md b/README.md new file mode 100644 index 0000000..81e1d33 --- /dev/null +++ b/README.md @@ -0,0 +1,11 @@ +# Eva + +A backbone standard library for the VienDesu! projects. Carefully crafted functionality. + +We emphasize: +- Correctness - the code inside is carefully crafted to ensure that it would be used in a correct way +- Performance - no perf trade-offs where possible +- Precise types - `eva` contains and allows you to define types more precisely, this ensures correctness statically and reveals more use-cases to the user and provides more information to the compiler + +This library may re-export other crates selectively or entirely, but I strive to do that as rarely as possible. + diff --git a/flake.lock b/flake.lock new file mode 100644 index 0000000..a55844a --- /dev/null +++ b/flake.lock @@ -0,0 +1,99 @@ +{ + "nodes": { + "fenix": { + "inputs": { + "nixpkgs": [ + "nixpkgs" + ], + "rust-analyzer-src": "rust-analyzer-src" + }, + "locked": { + "lastModified": 1750487788, + "narHash": "sha256-79O83W9osY3wyvxZHqL0gw85tcACSX0TU5en3+dky/0=", + "owner": "nix-community", + "repo": "fenix", + "rev": "933bc78d45abaf764dbfe0fd117be981631f3e9a", + "type": "github" + }, + "original": { + "owner": "nix-community", + "repo": "fenix", + "type": "github" + } + }, + "flake-utils": { + "inputs": { + "systems": "systems" + }, + "locked": { + "lastModified": 1731533236, + "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1750365781, + "narHash": "sha256-XE/lFNhz5lsriMm/yjXkvSZz5DfvKJLUjsS6pP8EC50=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "08f22084e6085d19bcfb4be30d1ca76ecb96fe54", + "type": "github" + }, + "original": { + "id": "nixpkgs", + "ref": "nixos-unstable", + "type": "indirect" + } + }, + "root": { + "inputs": { + "fenix": "fenix", + "flake-utils": "flake-utils", + "nixpkgs": "nixpkgs" + } + }, + "rust-analyzer-src": { + "flake": false, + "locked": { + "lastModified": 1750405264, + "narHash": "sha256-EMFKnO+J3dZOa9J+uiKZgHYgzALv9dqxY7NHV0DbO/U=", + "owner": "rust-lang", + "repo": "rust-analyzer", + "rev": "b0552d779f7137c76f109666ce0ad28395c0e582", + "type": "github" + }, + "original": { + "owner": "rust-lang", + "ref": "nightly", + "repo": "rust-analyzer", + "type": "github" + } + }, + "systems": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 0000000..a83c568 --- /dev/null +++ b/flake.nix @@ -0,0 +1,41 @@ +{ + inputs = { + fenix = { + url = "github:nix-community/fenix"; + inputs.nixpkgs.follows = "nixpkgs"; + }; + flake-utils.url = "github:numtide/flake-utils"; + nixpkgs.url = "nixpkgs/nixos-unstable"; + }; + + outputs = { flake-utils, fenix, nixpkgs, ... }: + flake-utils.lib.eachDefaultSystem(system: + let + overlays = [ fenix.overlays.default ]; + pkgs = import nixpkgs { inherit system overlays; }; + rust = with pkgs.fenix; combine [ + ((fromToolchainName { name = "1.87"; sha256 = "sha256-KUm16pHj+cRedf8vxs/Hd2YWxpOrWZ7UOrwhILdSJBU="; }).withComponents [ + "cargo" + "rustc" + "rust-src" + "rust-analyzer" + "clippy" + "llvm-tools-preview" + ]) + default.rustfmt + ]; + llvm = pkgs.llvmPackages_20; + in + { + devShells.default = pkgs.mkShell { + LIBCLANG_PATH = "${pkgs.libclang.lib}/lib"; + packages = [ rust ] ++ (with pkgs; [ + libclang.lib + ]); + buildInputs = with pkgs; [ + stdenv.cc.cc.lib + ]; + }; + } + ); +} diff --git a/macros/Cargo.toml b/macros/Cargo.toml new file mode 100644 index 0000000..a0e8e09 --- /dev/null +++ b/macros/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "eva-macros" +version = "0.1.0" +edition = "2024" + +[lib] +proc-macro = true + +[dependencies] +syn = "2.0.104" +quote = "1.0.40" +proc-macro-error = "1.0.4" +proc-macro2 = "1.0.95" + +darling = "0.20.10" +heck = "0.5.0" + +range-set-blaze = "0.3.0" diff --git a/macros/src/data.rs b/macros/src/data.rs new file mode 100644 index 0000000..275fc1e --- /dev/null +++ b/macros/src/data.rs @@ -0,0 +1,603 @@ +use proc_macro::TokenStream; +use syn::{Token, parse, punctuated::Punctuated, spanned::Spanned}; + +use proc_macro_error::abort; + +use heck::{ToKebabCase, ToLowerCamelCase, ToPascalCase, ToSnakeCase}; +use quote::quote; + +use crate::utils::{self, join, maybe, mk_derive}; + +fn wildcard_arm(var: &syn::Variant) -> proc_macro2::TokenStream { + use syn::Fields as F; + let ident = &var.ident; + match var.fields { + F::Unit => quote! { #ident }, + F::Named(..) => quote! { #ident { .. } }, + F::Unnamed(..) => quote! { #ident(..) }, + } +} + +fn let_destructure_pat(fields: &syn::Fields) -> proc_macro2::TokenStream { + let mut f_num = 0_usize; + utils::construct_pat(fields, |field| { + let wildcard_pat = if let Some(ident) = &field.ident { + ident.clone() + } else { + let prev = quote::format_ident!("_{f_num}"); + f_num += 1; + prev + }; + + utils::ConstructPat::Wildcard(wildcard_pat) + }) +} + +pub fn transform(args: TokenStream, body: TokenStream) -> TokenStream { + let orig = body.clone(); + let item = syn::parse_macro_input!(body as Item); + let args = syn::parse_macro_input!(args as Args); + + let snake_ident = item.ident.to_string().to_snake_case(); + let mut snake_ident = quote::format_ident!("{snake_ident}"); + snake_ident.set_span(item.ident.span()); + let snake_ident = snake_ident; + let eva = args.eva.unwrap_or({ + let eva = quote::format_ident!("eva"); + syn::parse_quote! { + ::#eva + } + }); + + let orig: proc_macro2::TokenStream = orig.into(); + let derives = &args.derives; + let deps = join(&eva, "_priv"); + let serde = join(&deps, "serde"); + + let std: syn::Path = syn::parse_quote!(::std); + let fmt = join(&std, "fmt"); + let cmp = join(&std, "cmp"); + let clone = join(&std, "clone"); + let marker = join(&std, "marker"); + + let ser = maybe(derives.serialize, || mk_derive(&join(&serde, "Serialize"))); + let de = maybe(derives.deserialize, || { + mk_derive(&join(&serde, "Deserialize")) + }); + + let schemars = join(&deps, "schemars"); + let json_schema = maybe(derives.json_schema, || { + let derive = mk_derive(&join(&schemars, "JsonSchema")); + let eva = quote! { #schemars }.to_string(); + quote! { + #derive + #[schemars(crate = #eva)] + } + }); + + let serde_attr = maybe(derives.serialize || derives.deserialize, || { + let eva = quote! { #serde }.to_string(); + quote! { #[serde(crate = #eva)] } + }); + + let partial_eq = maybe(derives.partial_eq, || mk_derive(&join(&cmp, "PartialEq"))); + let eq = maybe(derives.eq, || mk_derive(&join(&cmp, "Eq"))); + let partial_ord = maybe(derives.partial_ord, || mk_derive(&join(&cmp, "PartialOrd"))); + let ord = maybe(derives.ord, || mk_derive(&join(&cmp, "Ord"))); + + let clone = maybe(derives.clone, || mk_derive(&join(&clone, "Clone"))); + let copy = maybe(derives.copy, || mk_derive(&join(&marker, "Copy"))); + + let as_static_str = maybe( + args.as_static_str && matches!(item.kind, ItemKind::Enum { .. }), + || { + let ItemKind::Enum { variants } = &item.kind else { + unreachable!() + }; + let (ig, tyg, where_clause) = item.generics.split_for_impl(); + let case = join(&join(&eva, "generic"), "Case"); + let arms = variants.iter().fold(quote! {}, |acc, var| { + let pat = wildcard_arm(&var); + let pat = quote! { Self::#pat }; + + let var_name = var.ident.to_string(); + let snake = var_name.to_snake_case(); + let kebab = var_name.to_kebab_case(); + let pascal = var_name.to_pascal_case(); + let camel = var_name.to_lower_camel_case(); + + let case_match = quote! { + if let Some(cvt) = convert_case { + match cvt { + #case::Snake => #snake, + #case::Kebab => #kebab, + #case::Pascal => #pascal, + #case::Camel => #camel, + } + } else { + #var_name + } + }; + + quote! { #acc #pat => #case_match, } + }); + let ident = &item.ident; + + quote! { + impl #ig #ident #tyg #where_clause { + /// Convert enum discriminant into the string. + pub const fn as_static_str(&self, convert_case: Option<#case>) -> &'static str { + match *self { + #arms + } + } + } + } + }, + ); + + let display_impl = if let Some(display) = args.display { + match &display { + DisplayImpl::Fmt(DisplayFmt { + fmt: fmt_string, + args, + }) => { + let let_assign = if let ItemKind::Struct { fields } = &item.kind { + let pat = let_destructure_pat(fields); + quote! { let Self #pat = self; } + } else { + quote! {} + }; + let (ig, tyg, where_clause) = item.generics.split_for_impl(); + let ident = &item.ident; + + quote! { + const _: () = { + use #std::io::Write as _; + + #[allow(non_shorthand_field_pattern)] + impl #ig #fmt::Display for #ident #tyg #where_clause { + #[inline] + fn fmt(&self, f: &mut #fmt::Formatter<'_>) -> #fmt::Result { + #let_assign + #std::write!(f, #fmt_string, #(#args),*) + } + } + }; + } + } + spec @ (DisplayImpl::Doc | DisplayImpl::Name) => { + let ItemKind::Enum { variants } = &item.kind else { + abort!( + item.ident, + "to use this display setting the type must be enum" + ); + }; + + let display = join(&fmt, "Display"); + let match_arms = variants.iter().fold(quote! {}, |acc, var| { + let display_str = match spec { + DisplayImpl::Name => var.ident.to_string().to_snake_case(), + DisplayImpl::Doc => combine_docs(&var.attrs).unwrap_or_else(|| { + abort!(var, "this variant has no docs"); + }), + _ => unreachable!(), + }; + let pat = wildcard_arm(&var); + let pat = quote! { Self::#pat }; + quote! { #acc #pat => #display_str, } + }); + + let (ig, tyg, where_clause) = item.generics.split_for_impl(); + let ty_name = &item.ident; + + let result = join(&fmt, "Result"); + let formatter = join(&fmt, "Formatter"); + + let doc_match = quote! { + match *self { + #match_arms + } + }; + + quote! { + const _: () = { + #[automatically_derived] + #[allow(unreachable_code)] + impl #ig #display for #ty_name #tyg #where_clause { + #[inline] + fn fmt(&self, f: &mut #formatter<'_>) -> #result { + let doc_str: &'static str = #doc_match; + f.write_str(doc_str) + } + } + }; + } + } + } + } else { + if let ItemKind::Enum { variants } = &item.kind { + let mut arms = quote! {}; + for var in variants.iter() { + let Some(args) = var + .attrs + .iter() + .find(|attr| attr.path().is_ident("display")) + else { + continue; + }; + let syn::Meta::List(args) = &args.meta else { + abort!(args, "must be meta list"); + }; + let args: DisplayFmt = match syn::parse(args.tokens.clone().into()) { + Ok(r) => r, + Err(e) => return TokenStream::from(e.to_compile_error()), + }; + + let fmt_string = args.fmt; + let fmt_args = args.args; + let destructure = let_destructure_pat(&var.fields); + let ident = &var.ident; + arms = quote! { + #arms + Self::#ident #destructure => { + #std::write!(f, #fmt_string, #(#fmt_args),*) + }, + }; + } + let arms = arms; + + if arms.is_empty() { + quote! {} + } else { + let (ig, tyg, where_clause) = item.generics.split_for_impl(); + let ident = &item.ident; + quote! { + const _: () = { + use #std::io::Write as _; + + #[automatically_derived] + #[allow(non_shorthand_field_patterns)] + impl #ig #fmt::Display for #ident #tyg #where_clause { + #[inline] + fn fmt(&self, f: &mut #fmt::Formatter<'_>) -> #fmt::Result { + match self { + #arms + } + } + } + }; + } + } + } else { + quote! {} + } + }; + let from_impls = { + let mut impls = Vec::::new(); + match &item.kind { + ItemKind::Enum { variants } => { + for var in variants { + let mut generics = item.generics.clone(); + let mut from_assigned = None; + let create = utils::construct(&var.fields, |field| { + let ty = &field.ty; + if let Some(attr) = field.attrs.iter().find(|a| a.path().is_ident("from")) { + if from_assigned.is_some() { + abort!(attr, "must be only one #[from]"); + } + if !matches!(attr.meta, syn::Meta::Path(..)) { + abort!(attr, "must be simple path"); + } + + from_assigned = Some(ty.clone()); + quote! { from_value } + } else { + utils::add_bound( + &mut generics.where_clause, + syn::parse_quote! { + #ty: ::core::default::Default + }, + ); + quote! { <#ty as ::core::default::Default>::default() } + } + }); + + if let Some(ty) = from_assigned { + let var_ident = &var.ident; + let (ig, tyg, where_clause) = generics.split_for_impl(); + let ident = &item.ident; + impls.push(quote! { + impl #ig ::core::convert::From<#ty> for #ident #tyg #where_clause { + fn from(from_value: #ty) -> Self { + Self::#var_ident #create + } + } + }); + } + } + } + + ItemKind::Struct { fields } => {} + } + + quote! { #(#impls)* } + }; + let error = maybe(args.error, || { + let (ig, tyg, where_clause) = item.generics.split_for_impl(); + let ident = &item.ident; + + // TODO: make not that stupid. + quote! { + impl #ig #std::error::Error for #ident #tyg #where_clause {} + } + }); + let debug = maybe(derives.debug, || mk_derive(&join(&fmt, "Debug"))); + let rename = maybe(derives.serialize || derives.deserialize, || { + quote! { + #[serde(rename_all = "snake_case")] + } + }); + + quote! { + #debug + #clone + #copy + #partial_eq + #eq + #partial_ord + #ord + #ser + #de + #serde_attr + #json_schema + #rename + #[derive(#eva::_priv::RastGawno)] + #orig + + #error + + #from_impls + + #as_static_str + + #display_impl + } + .into() +} + +fn combine_docs(attrs: &[syn::Attribute]) -> Option { + let docs = utils::collect_docs(attrs)?; + let res = docs.trim().trim_end_matches('.'); + let mut it = res.chars(); + // Make first letter lowercase. + Some( + it.next() + .into_iter() + .flat_map(|c| c.to_lowercase()) + .chain(it) + .collect(), + ) +} + +struct Derives { + serialize: bool, + deserialize: bool, + json_schema: bool, + clone: bool, + copy: bool, + partial_eq: bool, + eq: bool, + debug: bool, + partial_ord: bool, + ord: bool, +} + +#[derive(Debug)] +enum DisplayImpl { + Fmt(DisplayFmt), + + /// Implement display on enums by parsing doc-comments. Strips + /// trailing dot if present. + Doc, + /// Implement display on enums by variant names. + Name, +} + +#[derive(Debug)] +pub struct DisplayFmt { + pub fmt: syn::LitStr, + pub args: Vec, +} + +impl parse::Parse for DisplayFmt { + fn parse(ps: parse::ParseStream) -> syn::Result { + let fmt: syn::LitStr = ps.parse()?; + let mut args = Vec::new(); + + loop { + if ps.peek(Token![,]) { + let _: Token![,] = ps.parse()?; + if ps.is_empty() { + break; + } + } else { + break; + } + + args.push(ps.parse()?); + } + + Ok(Self { fmt, args }) + } +} + +struct Args { + eva: Option, + meta_mod: Option, + error: bool, + derives: Derives, + as_static_str: bool, + display: Option, +} + +impl parse::Parse for Args { + fn parse(input: parse::ParseStream) -> syn::Result { + let mut meta_mod: Option = None; + let mut error = false; + let mut eva: Option = None; + + let mut as_static_str = true; + + let mut display: Option = None; + let mut derives = Derives { + serialize: true, + deserialize: true, + json_schema: true, + clone: true, + debug: true, + partial_eq: true, + eq: false, + copy: false, + partial_ord: false, + ord: false, + }; + + utils::comma_separated(input, |ps| { + if ps.peek(Token![crate]) { + let _: Token![crate] = ps.parse().unwrap(); + let _: Token![=] = ps.parse()?; + eva = Some(ps.parse()?); + return Ok(()); + } + + let param = utils::ident(ps)?; + + if param == "error" { + error = true; + } else if param == "display" { + let content; + syn::parenthesized!(content in ps); + if let Ok(fmt) = content.parse::() { + display = Some(DisplayImpl::Fmt(fmt)); + return Ok(()); + } + + let param = utils::ident(&content)?; + let imp = if param == "doc" { + DisplayImpl::Doc + } else if param == "name" { + DisplayImpl::Name + } else { + return Err(syn::Error::new(param.span(), "doc expected")); + }; + + display = Some(imp); + } else if param == "meta_mod" { + let _: Token![=] = ps.parse()?; + meta_mod = Some(ps.parse()?); + } else if param == "not" { + let content; + syn::parenthesized!(content in ps); + utils::comma_separated(&content, |ps| { + let ident = utils::ident(ps)?; + + if ident == "serde" { + derives.serialize = false; + derives.deserialize = false; + } else if ident == "Serialize" { + derives.serialize = false; + } else if ident == "Deserialize" { + derives.deserialize = false; + } else if ident == "Debug" { + derives.debug = false; + } else if ident == "Clone" { + derives.clone = false; + } else if ident == "schemars" || ident == "JsonSchema" { + derives.json_schema = false; + } else if ident == "as_static_str" { + as_static_str = false; + } else { + return Err(syn::Error::new(ident.span(), "got unknown `not` attribute")); + } + + Ok(()) + })?; + } else if param == "copy" { + derives.clone = true; + derives.copy = true; + } else if param == "eq" { + derives.eq = true; + derives.partial_eq = true; + } else if param == "ord" { + derives.eq = true; + derives.partial_eq = true; + derives.ord = true; + derives.partial_ord = true; + } else { + return Err(syn::Error::new(param.span(), "got unknown attribute")); + } + + Ok(()) + })?; + + Ok(Self { + meta_mod, + error, + eva, + derives, + display, + as_static_str, + }) + } +} + +#[derive(Debug)] +enum ItemKind { + Enum { + variants: Punctuated, + }, + Struct { + fields: syn::Fields, + }, +} + +#[derive(Debug)] +struct Item { + attrs: Vec, + vis: syn::Visibility, + ident: syn::Ident, + generics: syn::Generics, + + kind: ItemKind, +} + +impl parse::Parse for Item { + fn parse(input: parse::ParseStream) -> syn::Result { + let item: syn::Item = input.parse()?; + let span = item.span(); + + match item { + syn::Item::Enum(e) => Ok(Self { + attrs: e.attrs, + vis: e.vis, + ident: e.ident, + generics: e.generics, + kind: ItemKind::Enum { + variants: e.variants, + }, + }), + syn::Item::Struct(s) => Ok(Self { + attrs: s.attrs, + vis: s.vis, + ident: s.ident, + generics: s.generics, + kind: ItemKind::Struct { fields: s.fields }, + }), + _ => Err(syn::Error::new( + span, + "only structs and enums are supported", + )), + } + } +} diff --git a/macros/src/endpoint.rs b/macros/src/endpoint.rs new file mode 100644 index 0000000..ae366f6 --- /dev/null +++ b/macros/src/endpoint.rs @@ -0,0 +1,68 @@ +use proc_macro::TokenStream; + +use proc_macro_error::abort; +use quote::quote; + +pub fn transform(args: TokenStream, body: TokenStream) -> TokenStream { + _ = args; + + let f = syn::parse_macro_input!(body as syn::ItemFn); + let syn::ItemFn { + attrs, + vis, + sig, + block, + } = f; + + if sig.asyncness.is_none() { + abort!(sig, "must be async"); + } + + let generics = &sig.generics; + let ident = &sig.ident; + + if sig.inputs.len() != 2 { + abort!(sig.inputs, "must contain only state and args"); + } + + let fn_state = sig.inputs.get(0).unwrap(); + let fn_args = sig.inputs.get(1).unwrap(); + + let syn::FnArg::Typed(fn_state) = fn_state else { + abort!(fn_state, "must not be self"); + }; + let syn::FnArg::Typed(fn_args) = fn_args else { + abort!(fn_args, "must not be self"); + }; + + let fn_args_pat = &*fn_args.pat; + let fn_state_pat = &*fn_state.pat; + + let fn_args_ty = &*fn_args.ty; + let fn_state_ty = &*fn_state.ty; + + let output = match sig.output { + syn::ReturnType::Default => syn::parse_quote!(()), + syn::ReturnType::Type(_, t) => t.as_ref().clone(), + }; + + let (ig, tyg, where_clause) = generics.split_for_impl(); + + quote! { + #vis struct #ident; + + impl #ig ::eva::handling::Endpoint<#fn_args_ty, #fn_state_ty> for #ident #where_clause { + type Output = #output; + + #(#attrs)* + async fn call( + &self, + #fn_state_pat: #fn_state_ty, + #fn_args_pat: #fn_args_ty, + ) -> Self::Output { + #block + } + } + } + .into() +} diff --git a/macros/src/int.rs b/macros/src/int.rs new file mode 100644 index 0000000..578ad8d --- /dev/null +++ b/macros/src/int.rs @@ -0,0 +1,544 @@ +use proc_macro::{Span, TokenStream}; + +use range_set_blaze::{RangeSetBlaze, UIntPlusOne}; + +use crate::utils::{self, join, maybe}; + +use quote::{ToTokens, quote}; +use syn::{Token, parse, spanned::Spanned}; + +struct IsInclusive(bool); + +impl parse::Parse for IsInclusive { + fn parse(input: parse::ParseStream) -> syn::Result { + if input.peek(Token![=]) { + input.parse::().expect("checked above"); + Ok(Self(true)) + } else { + Ok(Self(false)) + } + } +} + +struct SupportedLit(i128); + +impl parse::Parse for SupportedLit { + fn parse(input: parse::ParseStream) -> syn::Result { + let lit: syn::Lit = input.parse()?; + match lit { + syn::Lit::Int(int) => { + let res = int.base10_parse::()?; + Ok(Self(res)) + } + syn::Lit::Char(chr) => Ok(Self(chr.value() as i128)), + syn::Lit::Byte(b) => Ok(Self(b.value() as i128)), + _ => Err(syn::Error::new( + Span::call_site().into(), + "this literal type is not supported", + )), + } + } +} + +struct SingleRange { + start: i128, + end: i128, +} + +impl parse::Parse for SingleRange { + fn parse(input: parse::ParseStream) -> syn::Result { + let SupportedLit(start) = input.parse()?; + // Allow specifying single values. + if !input.peek(Token![..]) { + return Ok(Self { start, end: start }); + } + + let _: Token![..] = input.parse()?; + let IsInclusive(inclusive) = input.parse()?; + let SupportedLit(mut end) = input.parse()?; + + if !inclusive { + end -= 1; + } + + if start > end { + return Err(syn::Error::new( + Span::call_site().into(), + "start must be less than end", + )); + } + + Ok(Self { start, end }) + } +} + +#[derive(Clone, Copy)] +pub enum Repr { + I8, + U8, + I16, + U16, + I32, + U32, + I64, + U64, + I128, + U128, +} + +impl ToTokens for Repr { + fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { + tokens.extend(match self { + Self::U8 => quote! { u8 }, + Self::I8 => quote! { i8 }, + Self::U16 => quote! { u16 }, + Self::I16 => quote! { i16 }, + Self::U32 => quote! { u32 }, + Self::I32 => quote! { i32 }, + Self::U64 => quote! { u64 }, + Self::I64 => quote! { i64 }, + Self::U128 => quote! { u128 }, + Self::I128 => quote! { i128 }, + }); + } +} + +impl parse::Parse for Repr { + fn parse(input: parse::ParseStream) -> syn::Result { + let tp: syn::TypePath = input.parse()?; + let p = tp.path; + let span = p.span(); + + Ok(if p.is_ident("u8") { + Self::U8 + } else if p.is_ident("u16") { + Self::U16 + } else if p.is_ident("u32") { + Self::U32 + } else if p.is_ident("u64") { + Self::U64 + } else if p.is_ident("u128") { + Self::U128 + } else if p.is_ident("i8") { + Self::I8 + } else if p.is_ident("i16") { + Self::I16 + } else if p.is_ident("i32") { + Self::I32 + } else if p.is_ident("i64") { + Self::I64 + } else if p.is_ident("i128") { + Self::I128 + } else { + return Err(syn::Error::new(span, "only u*/i* types are supported")); + }) + } +} + +struct Args { + repr: Repr, + range: RangeSetBlaze, + crate_: syn::Path, + + ser: bool, + de: bool, + schema: bool, +} + +impl parse::Parse for Args { + fn parse(input: parse::ParseStream) -> syn::Result { + let repr: Repr = input.parse()?; + let _: Token![,] = input.parse()?; + + let mut ser = true; + let mut de = true; + let mut schema = true; + + let range = { + let mut set = RangeSetBlaze::new(); + + loop { + let range: SingleRange = input.parse()?; + set.ranges_insert(range.start..=range.end); + if input.peek(Token![ | ]) { + let _: Token![ | ] = input.parse()?; + } else { + break; + } + } + + set + }; + + let mut crate_: syn::Path = syn::parse_quote!(eva); + if input.peek(Token![,]) { + let _: Token![,] = input.parse()?; + + utils::comma_separated(input, |ps| { + if ps.peek(Token![crate]) { + let _: Token![crate] = ps.parse().unwrap(); + utils::assign(ps)?; + crate_ = ps.parse()?; + return Ok(()); + } + + let ident: syn::Ident = ps.parse()?; + if ident == "not" { + let content; + syn::parenthesized!(content in ps); + + utils::comma_separated(&content, |ps| { + let ident: syn::Ident = ps.parse()?; + + if ident == "serde" { + ser = false; + de = false; + } else if ident == "schemars" || ident == "JsonSchema" { + schema = false; + } else if ident == "Serialize" { + ser = false; + } else if ident == "Deserialize" { + de = false; + } + + Ok(()) + })?; + } + Ok(()) + })?; + } + + Ok(Self { + repr, + range, + crate_, + + ser, + de, + schema, + }) + } +} + +fn int_lit(v: i128) -> syn::LitInt { + let v = v.to_string(); + syn::LitInt::new(&v, Span::call_site().into()) +} + +fn variant_name(of: i128) -> syn::Ident { + quote::format_ident!( + "{}{}", + if of >= 0 { "POS" } else { "NEG" }, + of.abs().to_string() + ) +} + +fn rs_len(rs: &RangeSetBlaze) -> usize { + let UIntPlusOne::UInt(m) = rs.len() else { + panic!("Shit happens") + }; + + m.try_into().unwrap() +} + +fn mk_enum(item: &syn::ItemEnum, args: &Args) -> TokenStream { + let name = &item.ident; + let attrs = &item.attrs; + let vis = item.vis.clone(); + let docs = utils::collect_docs(attrs); + + let cap: usize = rs_len(&args.range); + let mut variants: Vec = Vec::with_capacity(cap); + let repr = args.repr; + + for value in args.range.iter() { + let ident = variant_name(value); + let value = value.to_string(); + let lit = syn::LitInt::new(&value, Span::call_site().into()); + let var: syn::Variant = syn::parse_quote! { + #ident = #lit + }; + + variants.push(var); + } + + let range_checks = args.range.ranges().map(|range| { + let start = int_lit(*range.start()); + let end = int_lit(*range.end()); + quote! { + if (repr >= #start) && (repr <= #end) { + return Some(unsafe { Self::new_unchecked(repr) }) + } + } + }); + + let eva = &args.crate_; + let priv_ = join(&args.crate_, "_priv"); + let serde = join(&priv_, "serde"); + let schemars = join(&priv_, "schemars"); + + let json_schema = maybe(args.schema, || { + let schema_id = quote! { + ::std::borrow::Cow::Borrowed(concat!(module_path!(), "::", stringify!(#name))) + }; + let schema_name = name.to_string(); + let items: Vec<_> = args + .range + .ranges() + .map(|range| { + let minimum = int_lit(*range.start()); + let maximum = int_lit(*range.end()); + quote! { + { + "type": "integer", + "minimum": #minimum, + "maximum": #maximum + } + } + }) + .collect(); + let desc = if let Some(desc) = docs.as_deref() { + quote! { + "description": #desc, + } + } else { + quote! {} + }; + let schema = quote! { + #schemars::json_schema!({ + #desc + "anyOf": [#(#items),*] + }) + }; + + quote! { + impl #schemars::JsonSchema for #name { + fn schema_id() -> ::std::borrow::Cow<'static, str> { + #schema_id + } + + fn schema_name() -> ::std::borrow::Cow<'static, str> { + ::std::borrow::Cow::Borrowed(#schema_name) + } + + fn json_schema(_: &mut #schemars::SchemaGenerator) -> #schemars::Schema { + #schema + } + } + } + }); + + let serialize = maybe(args.ser, || { + quote! { + const _: () = { + use ::core::result::Result; + + impl #serde::Serialize for #name { + fn serialize(&self, serializer: S) -> Result + where + S: #serde::Serializer, + { + <#repr as #serde::Serialize>::serialize(&self.into_inner(), serializer) + } + } + }; + } + }); + let deserialize = maybe(args.de, || { + quote! { + const _: () = { + use ::core::result::Result; + + impl<'de> #serde::Deserialize<'de> for #name { + fn deserialize(deserializer: D) -> Result + where + D: #serde::Deserializer<'de>, + { + Self::new(<#repr as #serde::Deserialize<'de>>::deserialize(deserializer)?) + .ok_or_else(|| #serde::de::Error::custom(#eva::generic::OutOfRange)) + } + } + }; + } + }); + + let ranges = { + let ranges_no = args.range.ranges_len(); + let repr = &args.repr; + let ranges = args.range.ranges().map(|r| { + let from = int_lit(*r.start()); + let to = int_lit(*r.end()); + + quote! { #from..=#to } + }); + let values_no = rs_len(&args.range); + let values = args.range.iter().map(|x| { + let lit = int_lit(x); + quote! { #lit } + }); + + let variants = args.range.iter().map(|x| { + let variant = variant_name(x); + quote! { Self::#variant } + }); + + quote! { + impl #name { + #vis const RANGES: [::std::ops::RangeInclusive<#repr>; #ranges_no] = [#(#ranges),*]; + #vis const VALUES: [#repr; #values_no] = [#(#values),*]; + #vis const VARIANTS: [Self; #values_no] = [#(#variants),*]; + } + } + }; + + let nth_impl = { + let mut skip = 0; + let arms = args.range.ranges().fold(quote! {}, |acc, range| { + let skip_cur = int_lit(skip); + skip += *range.end() - *range.start() + 1; + let start = int_lit(*range.start()); + let end = int_lit(*range.end()); + + quote! { + #acc + #start ..= #end => (value - #start + #skip_cur) as usize, + } + }); + quote! { + impl #name { + /// Get index in the [`Self::VARIANTS`] array. + pub const fn nth(self) -> usize { + let value = self.into_inner(); + match value { + #arms + + _ => unsafe { ::core::hint::unreachable_unchecked() } + } + } + } + } + }; + let rand_impl = { + if args.range.is_empty() { + quote! {} + } else { + quote! { + impl #eva::rand::distr::Distribution<#name> for #eva::rand::distr::StandardUniform { + fn sample(&self, rng: &mut R) -> #name { + let idx: usize = ::random_range(rng, 0..#name::VARIANTS.len()); + #name::VARIANTS[idx] + } + } + } + } + }; + + quote! { + #[repr(#repr)] + #[derive(PartialEq, Eq)] + #(#attrs)* + #vis enum #name { + #(#variants),* + } + + #nth_impl + #rand_impl + #ranges + + #serialize + #deserialize + + #json_schema + + const _: () = { + use ::core::{mem, fmt, slice, cmp, result::Result, option::Option}; + + impl cmp::PartialEq<#repr> for #name { + fn eq(&self, other: &#repr) -> bool { + *other == *self as #repr + } + } + + impl cmp::PartialOrd for #name { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } + } + + impl cmp::Ord for #name { + fn cmp(&self, other: &Self) -> cmp::Ordering { + self.into_inner().cmp(&other.into_inner()) + } + } + + impl #name { + /// # Safety + /// + /// The `repr` must be in range. + #vis const unsafe fn new_unchecked(repr: #repr) -> Self { + unsafe { mem::transmute::<#repr, Self>(repr) } + } + + /// Create integer from underlying representation. + #vis const fn new(repr: #repr) -> Option { + #(#range_checks)* + None + } + + /// Convert slice into slice of underlying integer + /// representation. + #vis const fn as_repr_slice(slice: &[Self]) -> &[#repr] { + // SAFETY: #repr type domain is wider than ours. + unsafe { slice::from_raw_parts(slice.as_ptr().cast(), slice.len()) } + } + + /// Convert mutable slice into mutable slice of underlying integer + /// representation. + #vis const fn as_repr_slice_mut(slice: &mut [Self]) -> &mut [#repr] { + // SAFETY: #repr type domain is wider than ours. + unsafe { slice::from_raw_parts_mut(slice.as_mut_ptr().cast(), slice.len()) } + } + + /// Convert integer into its representation. + #vis const fn into_inner(self) -> #repr { + self as #repr + } + } + + impl Copy for #name {} + impl Clone for #name { + fn clone(&self) -> Self { + *self + } + } + + impl From<#name> for #repr { + fn from(val: #name) -> Self { + val as #repr + } + } + + impl fmt::Debug for #name { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let underlying: #repr = (*self).into(); + <#repr as fmt::Debug>::fmt(&underlying, f) + } + } + + impl fmt::Display for #name { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let underlying: #repr = (*self).into(); + <#repr as fmt::Display>::fmt(&underlying, f) + } + } + }; + } + .into() +} + +pub fn transform(args: TokenStream, body: TokenStream) -> TokenStream { + let body = syn::parse_macro_input!(body as syn::ItemEnum); + let args: Args = syn::parse_macro_input!(args as Args); + + mk_enum(&body, &args) +} diff --git a/macros/src/lib.rs b/macros/src/lib.rs new file mode 100644 index 0000000..14aa9d9 --- /dev/null +++ b/macros/src/lib.rs @@ -0,0 +1,69 @@ +#![expect(warnings)] + +use proc_macro::TokenStream; +use proc_macro_error::proc_macro_error; + +/// A "property" (trait). Includes set of utilities for working +/// with traits. +#[proc_macro_error] +#[proc_macro_attribute] +pub fn prop(args: TokenStream, body: TokenStream) -> TokenStream { + todo!() +} + +#[proc_macro_error] +#[proc_macro_attribute] +pub fn endpoint(args: TokenStream, body: TokenStream) -> TokenStream { + self::endpoint::transform(args, body) +} + +#[proc_macro_derive(RastGawno, attributes(display, from, bounds))] +pub fn rast_gawno(body: TokenStream) -> TokenStream { + quote::quote! {}.into() +} + +#[proc_macro_error] +#[proc_macro_attribute] +pub fn data(args: TokenStream, body: TokenStream) -> TokenStream { + self::data::transform(args, body) +} + +/// An integer type in a specified range. +/// +/// ```rust +/// #[int(u8, 0..=100)] +/// pub enum Rating {} +/// +/// #[int(u8, 1..=8 | 70..=100)] +/// pub enum Age {} +/// ``` +/// +/// Extension around [`sum`] macro. +#[proc_macro_error] +#[proc_macro_attribute] +pub fn int(args: TokenStream, body: TokenStream) -> TokenStream { + self::int::transform(args, body) +} + +/// A string type. +/// +/// ```rust +/// #[str(fixed)] +/// struct Fixed([ascii::Printable; 4]); +/// +/// #[str(newtype)] +/// struct Newtype(Fixed); +/// ``` +#[proc_macro_error] +#[proc_macro_attribute] +pub fn str(args: TokenStream, body: TokenStream) -> TokenStream { + self::str::transform(args, body) +} + +mod utils; + +mod int; +mod str; + +mod data; +mod endpoint; diff --git a/macros/src/str.rs b/macros/src/str.rs new file mode 100644 index 0000000..83313c6 --- /dev/null +++ b/macros/src/str.rs @@ -0,0 +1,472 @@ +use syn::{Token, parse}; + +use crate::utils; + +use quote::quote; + +use proc_macro::TokenStream; +use proc_macro_error::{OptionExt as _, abort}; + +pub fn transform(args: TokenStream, body: TokenStream) -> TokenStream { + let mut args = syn::parse_macro_input!(args as Args); + let mut input = syn::parse_macro_input!(body as syn::DeriveInput); + + let eva = &args.crate_; + let ident = input.ident.clone(); + let mut impls = quote! {}; + + let docs = utils::collect_docs(&input.attrs); + + let priv_ = utils::join(&eva, "_priv"); + let serde = utils::join(&priv_, "serde"); + let schemars = utils::join(&priv_, "schemars"); + + let (ig, tyg, where_clause) = input.generics.split_for_impl(); + let mut json_schema = quote! {}; + + match args.kind { + Kind::Fixed { error_ty } => { + input.attrs.push(syn::parse_quote!(#[repr(C)])); + + let mut generics = input.generics.clone(); + let syn::Data::Struct(s) = &input.data else { + todo!() + }; + let fields = &s.fields; + + args.copy = true; + args.clone = true; + + let mut pats = quote! {}; + + for field in fields { + let ty = &field.ty; + pats = quote! { + #pats + <#ty as #eva::str::HasPattern>::pat_into(buf); + }; + let predicate = syn::PredicateType { + lifetimes: None, + bounded_ty: ty.clone(), + colon_token: Token![:](proc_macro2::Span::mixed_site()), + bounds: syn::parse_quote!(#eva::str::FixedUtf8), + }; + let predicate = syn::WherePredicate::Type(predicate); + if let Some(ref mut clause) = generics.where_clause { + clause.predicates.push(predicate); + } else { + generics.where_clause = Some(syn::parse_quote! { + where #predicate + }); + } + } + + let (ig, tyg, where_clause) = generics.split_for_impl(); + let schema_id = quote! { + concat!(module_path!(), "::", stringify!(#ident)) + }; + let desc = if let Some(desc) = docs.as_deref() { + quote! { "description": #desc, } + } else { + quote! {} + }; + + let check = { + let tys = fields.iter().map(|f| &f.ty); + let expected_size = quote! { + 0_usize #(+ ::std::mem::size_of::<#tys>())* + }; + quote! { + let expected_size = #expected_size; + if s.len() != expected_size { + return Err(<#error_ty as #eva::str::FixedParseError>::length(expected_size)); + } + } + }; + let parse = utils::construct(&fields, |field| { + let ty = &field.ty; + quote! {{ + let ty_size = ::std::mem::size_of::<#ty>(); + let chunk = unsafe { s.get_unchecked(..ty_size) }; + let res = chunk.parse()?; + s = unsafe { s.get_unchecked(ty_size..) }; + res + }} + }); + impls = quote! { + #impls + + const _: () = { + use ::core::{primitive::str, result::Result, str::FromStr}; + use ::std::{borrow::Cow, string::String}; + + impl #ig #schemars::JsonSchema for #ident #tyg #where_clause { + #[inline] + fn schema_id() -> Cow<'static, str> { + Cow::Borrowed(#schema_id) + } + + #[inline] + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed(stringify!(#ident)) + } + + #[inline] + fn json_schema(_: &mut #schemars::SchemaGenerator) -> #schemars::Schema { + let len = std::mem::size_of::(); + let mut pat = String::from("^"); + ::pat_into(&mut pat); + pat.push('$'); + + #schemars::json_schema!({ + "type": "string", + #desc + "minLength": len, + "maxLength": len, + "pattern": pat, + }) + } + } + + impl #ig #eva::str::HasPattern for #ident #tyg #where_clause { + #[inline] + fn pat_into(buf: &mut String) { + #pats + } + } + + unsafe impl #ig #eva::str::FixedUtf8 for #ident #tyg #where_clause {} + impl #ig #ident #tyg #where_clause { + /// Get str representation. + pub const fn as_str(&self) -> &str { + #eva::str::reinterpret(self) + } + } + + impl #ig FromStr for #ident #tyg #where_clause { + type Err = #error_ty; + + #[inline] + fn from_str(mut s: &str) -> Result { + #check + Ok(Self #parse) + } + } + }; + }; + } + + Kind::Custom => {} + + Kind::Newtype => { + let syn::Data::Struct(s) = &input.data else { + abort!(input, "this must be struct"); + }; + let field = s + .fields + .iter() + .next() + .expect_or_abort("struct must have at least one field"); + let ty = &field.ty; + let access = if let Some(name) = &field.ident { + quote! { #name } + } else { + quote! { 0 } + }; + let schemars_path = quote! {#schemars}.to_string(); + json_schema = quote! { + #[derive(#schemars::JsonSchema)] + #[schemars(crate = #schemars_path)] + }; + + if args.pat { + impls = quote! { + #impls + + const _: () = { + use ::std::string::String; + + impl #ig #eva::str::HasPattern for #ident #tyg #where_clause { + #[inline(always)] + fn pat_into(buf: &mut String) { + <#ty as #eva::str::HasPattern>::pat_into(buf) + } + } + }; + }; + } + let mut first = true; + let parse = utils::construct(&s.fields, |field| { + if first { + first = false; + let ty = &field.ty; + quote! { + <#ty as FromStr>::from_str(s)? + } + } else { + quote! { ::core::default::Default::default() } + } + }); + + impls = quote! { + #impls + + const _: () = { + use ::core::{str::FromStr, result::Result}; + + impl #ig FromStr for #ident #tyg #where_clause { + type Err = <#ty as FromStr>::Err; + + #[inline] + fn from_str(s: &str) -> Result { + Ok(Self #parse) + } + } + + impl #ig #ident #tyg #where_clause { + #[inline] + pub fn as_str(&self) -> &str { + self.#access.as_str() + } + } + }; + }; + } + } + + let ser = utils::maybe(args.ser, || { + quote! { + const _: () = { + use ::core::result::Result; + + impl #ig #serde::Serialize for #ident #tyg #where_clause { + #[inline] + fn serialize(&self, serializer: S) -> Result + where + S: #serde::Serializer, + { + ::serialize(self.as_str(), serializer) + } + } + }; + } + }); + let de = utils::maybe(args.de, || { + let mut generics = input.generics.clone(); + let lt: syn::Lifetime = syn::parse_quote!('__de); + generics.params.insert( + 0, + syn::GenericParam::Lifetime(syn::LifetimeParam::new(lt.clone())), + ); + + let (ig, ..) = generics.split_for_impl(); + let (_, tyg, where_clause) = input.generics.split_for_impl(); + + quote! { + const _: () = { + use ::core::{result::Result, str::FromStr}; + + impl #ig #serde::Deserialize<#lt> for #ident #tyg #where_clause { + #[inline] + fn deserialize(deserializer: D) -> Result + where + D: #serde::Deserializer<#lt>, + { + let s = <#eva::str::CompactString as #serde::Deserialize<#lt>>::deserialize(deserializer)?; + ::from_str(&s) + .map_err(#serde::de::Error::custom) + } + } + }; + } + }); + + impls = quote! { + #impls + + const _: () = { + use ::core::{ + ops::Deref, + borrow::Borrow, + convert::AsRef, + cmp::{PartialEq, Eq}, + primitive::str, + hash::{Hasher, Hash}, + fmt, + }; + + impl #ig fmt::Debug for #ident #tyg #where_clause { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + ::fmt(self.as_str(), f) + } + } + + impl #ig fmt::Display for #ident #tyg #where_clause { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(self.as_str()) + } + } + + impl #ig Hash for #ident #tyg #where_clause { + fn hash(&self, state: &mut H) { + self.as_str().hash(state); + } + } + + impl #ig PartialEq for #ident #tyg #where_clause { + fn eq(&self, other: &str) -> bool { + self.as_str() == other + } + } + + impl #ig Borrow for #ident #tyg #where_clause { + fn borrow(&self) -> &str { + self.as_str() + } + } + + impl #ig AsRef for #ident #tyg #where_clause { + fn as_ref(&self) -> &str { + self.as_str() + } + } + + impl #ig Deref for #ident #tyg #where_clause { + type Target = str; + + fn deref(&self) -> &Self::Target { + self.as_str() + } + } + }; + }; + + let clone = utils::maybe(args.clone, || { + utils::mk_derive(&syn::parse_quote!(::core::clone::Clone)) + }); + let copy = utils::maybe(args.copy, || { + utils::mk_derive(&syn::parse_quote!(::core::marker::Copy)) + }); + + quote! { + #[derive(PartialEq, Eq, PartialOrd, Ord)] + #clone + #copy + #json_schema + #input + #impls + + #ser + #de + } + .into() +} + +struct Args { + kind: Kind, + ser: bool, + de: bool, + clone: bool, + pat: bool, + copy: bool, + schema: bool, + crate_: syn::Path, +} + +impl parse::Parse for Args { + fn parse(input: parse::ParseStream) -> syn::Result { + let ser = true; + let de = true; + let schema = true; + let clone = true; + let mut copy = false; + let mut pat = true; + + let kind = utils::ident(input)?; + let kind = if kind == "newtype" { + Kind::Newtype + } else if kind == "fixed" { + let content; + syn::parenthesized!(content in input); + let mut error_ty = None::; + utils::comma_separated(&content, |ps| { + let ident = utils::ident(ps)?; + if ident == "error" { + utils::assign(ps)?; + error_ty = Some(ps.parse()?); + } else { + return Err(syn::Error::new(ident.span(), "unexpected parameter")); + } + + Ok(()) + })?; + Kind::Fixed { + error_ty: error_ty.expect_or_abort("there must be error type"), + } + } else if kind == "custom" { + Kind::Custom + } else { + return Err(syn::Error::new( + kind.span(), + "expected `newtype`, `fixed` or `custom`", + )); + }; + let mut crate_: syn::Path = syn::parse_quote!(::eva); + + if utils::token_is(input, Token![,]) { + utils::comma_separated(input, |ps| { + if utils::token_is(ps, Token![crate]) { + utils::assign(ps)?; + crate_ = ps.parse()?; + return Ok(()); + } + + let ident = utils::ident(ps)?; + if ident == "copy" { + copy = true; + } else if ident == "not" { + let content; + syn::parenthesized!(content in ps); + utils::comma_separated(&content, |ps| { + let key = utils::ident(ps)?; + + if key == "pat" { + pat = false; + } else { + return Err(syn::Error::new(key.span(), "invalid key")); + } + + Ok(()) + })?; + } else { + return Err(syn::Error::new(ident.span(), "unexpected parameter")); + } + + Ok(()) + })?; + } + Ok(Self { + kind, + pat, + crate_, + clone: clone || copy, + copy, + ser, + de, + schema, + }) + } +} + +#[derive(Debug, Clone)] +enum Kind { + /// Newtype over some other string. + Newtype, + + /// Fixed size string combined from other fixed + /// size strings. + Fixed { error_ty: syn::Type }, + + /// Custom string, must provide `as_str` and `FromStr`. + Custom, +} diff --git a/macros/src/utils.rs b/macros/src/utils.rs new file mode 100644 index 0000000..16f5e39 --- /dev/null +++ b/macros/src/utils.rs @@ -0,0 +1,239 @@ +use syn::{Token, parse, punctuated::Punctuated, spanned::Spanned as _}; + +use std::{fmt, ops::Deref}; + +use proc_macro_error::abort; +use quote::quote; + +type PunctFields = Punctuated; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum RefOrOwned<'a, T> { + Ref(&'a T), + Owned(T), +} + +impl<'a, T> Deref for RefOrOwned<'a, T> { + type Target = T; + + fn deref(&self) -> &Self::Target { + match self { + Self::Ref(r) => *r, + Self::Owned(r) => r, + } + } +} + +pub fn once(fst: F, mut snd: S) -> impl FnMut(I) -> O +where + F: FnOnce(I) -> O, + S: FnMut(I) -> O, +{ + let mut fst = Some(fst); + move |x| { + if let Some(fst) = fst.take() { + fst(x) + } else { + snd(x) + } + } +} + +pub fn add_bound(to: &mut Option, pred: syn::WherePredicate) { + if let Some(clause) = to { + clause.predicates.push(pred); + } else { + *to = Some(syn::WhereClause { + where_token: syn::Token![where](pred.span()), + predicates: { + let mut preds = syn::punctuated::Punctuated::new(); + preds.push(pred); + + preds + }, + }); + } +} + +pub fn construct_wrap( + fields: &syn::Fields, +) -> fn(proc_macro2::TokenStream) -> proc_macro2::TokenStream { + match fields { + syn::Fields::Named(..) => |x| quote! { { #x } }, + syn::Fields::Unnamed(..) => |x| quote! { (#x) }, + syn::Fields::Unit => |_| quote! {}, + } +} + +fn empty_punct() -> Punctuated { + Punctuated::new() +} + +pub fn fields_of<'a>(fields: &'a syn::Fields) -> RefOrOwned<'a, PunctFields> { + match fields { + syn::Fields::Named(fields) => RefOrOwned::Ref(&fields.named), + syn::Fields::Unnamed(fields) => RefOrOwned::Ref(&fields.unnamed), + syn::Fields::Unit => RefOrOwned::Owned(PunctFields::new()), + } +} + +pub enum ConstructPat { + Wildcard(syn::Ident), + Other(proc_macro2::TokenStream), +} + +impl ConstructPat { + fn to_ts(&self) -> proc_macro2::TokenStream { + match self { + ConstructPat::Wildcard(i) => quote! { #i }, + ConstructPat::Other(othr) => othr.clone(), + } + } +} + +pub fn construct_pat(fields: &syn::Fields, mut mk_pat: F) -> proc_macro2::TokenStream +where + F: FnMut(&syn::Field) -> ConstructPat, +{ + let wrap = construct_wrap(fields); + let fields = fields_of(fields); + let inside = fields.iter().fold(quote! {}, |acc, field| { + let pat = mk_pat(field); + if let Some(ident) = &field.ident { + 'res: { + if let ConstructPat::Wildcard(ref wc) = pat { + if wc == ident { + break 'res quote! { #acc #wc, }; + } + } + + let ts = pat.to_ts(); + quote! { #acc #ident : #ts, } + } + } else { + let value = pat.to_ts(); + quote! { #acc #value, } + } + }); + + wrap(inside) +} + +pub fn construct(fields: &syn::Fields, mut mk_value: F) -> proc_macro2::TokenStream +where + F: FnMut(&syn::Field) -> proc_macro2::TokenStream, +{ + let wrap = construct_wrap(fields); + let fields = fields_of(fields); + let inside = fields.iter().fold(quote! {}, |acc, field| { + let value = mk_value(field); + if let Some(ident) = &field.ident { + quote! { #acc #ident : #value, } + } else { + quote! { #acc #value, } + } + }); + + wrap(inside) +} + +pub fn token_is(ps: parse::ParseStream, tok: F) -> bool +where + F: parse::Peek + Copy + FnOnce(I) -> T, + T: syn::token::Token + parse::Parse, +{ + if ps.peek(tok) { + let _: T = ps.parse().unwrap(); + true + } else { + false + } +} + +pub fn collect_docs(attrs: &[syn::Attribute]) -> Option { + let mut docs = String::new(); + let mut had = false; + + for attr in attrs { + if !attr.path().is_ident("doc") { + continue; + } + + let syn::Meta::NameValue(nv) = &attr.meta else { + abort!(attr, r##"must be in form #[doc = "string literal"]"##); + }; + + let syn::Expr::Lit(syn::ExprLit { + attrs: _, + lit: syn::Lit::Str(litstr), + }) = &nv.value + else { + abort!(nv, "must be string literal"); + }; + had = true; + docs.push_str(&litstr.value()); + } + + had.then_some(docs.trim().to_owned()) +} + +pub fn maybe(b: bool, f: F) -> proc_macro2::TokenStream +where + F: FnOnce() -> proc_macro2::TokenStream, +{ + if b { + f() + } else { + quote! {} + } +} + +pub fn mk_derive(path: &syn::Path) -> proc_macro2::TokenStream { + quote! { #[derive(#path)] } +} + +pub fn try_next(ts: parse::ParseStream) -> bool { + if ts.peek(Token![,]) { + let _: Token![,] = ts.parse().unwrap(); + true + } else { + false + } +} + +pub fn assign(ts: parse::ParseStream) -> syn::Result<()> { + let _: Token![=] = ts.parse()?; + Ok(()) +} + +pub fn ident(ts: parse::ParseStream) -> syn::Result { + ts.parse() +} + +pub fn err(tokens: T, msg: U) -> syn::Error { + syn::Error::new_spanned(tokens, msg) +} + +pub fn join(p: &syn::Path, next: &str) -> syn::Path { + let ident = quote::format_ident!("{next}"); + syn::parse_quote!(#p :: #ident) +} + +pub fn comma_separated( + ts: parse::ParseStream, + mut parse: impl FnMut(parse::ParseStream) -> syn::Result<()>, +) -> syn::Result<()> { + loop { + if ts.is_empty() { + break; + } + + parse(ts)?; + + if !try_next(ts) { + break; + } + } + + Ok(()) +} diff --git a/src/array.rs b/src/array.rs new file mode 100644 index 0000000..c5cebfe --- /dev/null +++ b/src/array.rs @@ -0,0 +1,146 @@ +//! # Array utilities and types. + +use std::{ + borrow::{Borrow, Cow}, + fmt, + ops::Deref, + sync::Arc, +}; + +use eva_macros::data; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize, de}; + +#[data(copy, error, display("too big, expected array no bigger than {LIMIT}"), crate = crate)] +pub struct TooBig; + +#[crate::perfect_derive(Debug, Default, Clone, PartialEq, Eq, Hash)] +pub struct ImmutableHeap(Arc<[T]>); + +impl Borrow<[T]> for ImmutableHeap { + fn borrow(&self) -> &[T] { + self.0.as_ref() + } +} + +impl AsRef<[T]> for ImmutableHeap { + fn as_ref(&self) -> &[T] { + self.0.as_ref() + } +} + +impl Deref for ImmutableHeap { + type Target = [T]; + + fn deref(&self) -> &Self::Target { + self.0.as_ref() + } +} + +impl<'a, T, const MAX: usize> IntoIterator for &'a ImmutableHeap { + type IntoIter = <&'a [T] as IntoIterator>::IntoIter; + type Item = &'a T; + + fn into_iter(self) -> Self::IntoIter { + self.0.into_iter() + } +} + +impl TryFrom> for ImmutableHeap { + type Error = TooBig; + + fn try_from(value: Vec) -> Result { + if value.len() > MAX { + return Err(TooBig::); + } + + Ok(Self(value.into())) + } +} + +impl TryFrom<[T; N]> for ImmutableHeap { + type Error = TooBig; + + fn try_from(value: [T; N]) -> Result { + if N > MAX { + return Err(TooBig::); + } + + Ok(Self(Arc::new(value))) + } +} + +impl<'de, T: Deserialize<'de>, const MAX: usize> Deserialize<'de> for ImmutableHeap { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + struct Visitor { + v: Vec, + } + + impl<'d, T: Deserialize<'d>, const MAX: usize> de::Visitor<'d> for Visitor { + type Value = Vec; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + write!(formatter, "array no bigger than {MAX}") + } + + fn visit_seq(self, mut seq: A) -> Result + where + A: de::SeqAccess<'d>, + { + let mut dst = self.v; + let mut fuel = MAX; + + loop { + if fuel <= 0 { + return Err(de::Error::custom(TooBig::)); + } + + let Some(element) = seq.next_element()? else { + break; + }; + + dst.push(element); + fuel -= 1; + } + + Ok(dst) + } + } + + let v = deserializer.deserialize_seq(Visitor:: { + v: Vec::with_capacity(MAX), + })?; + + Ok(Self(v.into())) + } +} + +impl Serialize for ImmutableHeap { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + self.0.serialize(serializer) + } +} + +impl JsonSchema for ImmutableHeap { + fn schema_id() -> Cow<'static, str> { + as JsonSchema>::schema_id() + } + + fn schema_name() -> Cow<'static, str> { + as JsonSchema>::schema_name() + } + + fn json_schema(generator: &mut schemars::SchemaGenerator) -> schemars::Schema { + schemars::json_schema!({ + "type": "array", + "items": T::json_schema(generator), + "maxItems": MAX, + }) + } +} diff --git a/src/collections.rs b/src/collections.rs new file mode 100644 index 0000000..f140c5f --- /dev/null +++ b/src/collections.rs @@ -0,0 +1,6 @@ +pub use hashbrown::{Equivalent, HashTable, hash_map, hash_table}; + +use std::hash::BuildHasherDefault; + +pub type HashMap = hashbrown::HashMap>; +pub type HashSet = hashbrown::HashSet>; diff --git a/src/encoding.rs b/src/encoding.rs new file mode 100644 index 0000000..f67cf6d --- /dev/null +++ b/src/encoding.rs @@ -0,0 +1 @@ +pub mod dict; diff --git a/src/encoding/dict.rs b/src/encoding/dict.rs new file mode 100644 index 0000000..f6ddf69 --- /dev/null +++ b/src/encoding/dict.rs @@ -0,0 +1,28 @@ +use crate::data; + +#[data(copy, ord, not(serde), crate = crate)] +pub struct Dict<'a, T>(pub &'a [T]); + +impl<'a, T> Dict<'a, T> { + pub const fn get_encoded_size(&self, of: u64) -> usize { + if of == 0 { + 1 + } else { + of.ilog(self.0.len() as u64) as usize + } + } + + pub fn encode(&self, mut value: u64, mut put: impl FnMut(&T)) { + let base = self.0.len() as u64; + loop { + let rem = value % base; + value /= base; + let idx = rem as usize; + put(&self.0[idx]); + + if value == 0 { + break; + } + } + } +} diff --git a/src/error.rs b/src/error.rs new file mode 100644 index 0000000..7fee5b6 --- /dev/null +++ b/src/error.rs @@ -0,0 +1,69 @@ +//! # Error utilities. + +use std::fmt; + +mod seal { + pub trait Seal {} +} + +#[doc(inline)] +pub use crate::_combined as combined; + +#[macro_export] +#[doc(hidden)] +macro_rules! _combined { + ($($(#[$outer_meta:meta])* $vis:vis enum $name:ident { + $( $VarName:ident($ty:ty) ),* $(,)? + })*) => {$( + $(#[$outer_meta])* + $vis enum $name {$( + $VarName($ty) + ),*} + + impl $name { + pub fn transmogrify(self) -> T + where + T: $crate::Anything $(+ std::convert::From<$ty>)* + { + match self {$( + Self::$VarName(v) => v.into() + ),*} + } + } + + $( + impl std::convert::From<$ty> for $name { + fn from(v: $ty) -> Self { + Self::$VarName(v) + } + } + )* + )*}; +} + +/// Indicate that error is highly unlikely. +#[track_caller] +pub const fn shit_happens() -> ! { + panic!("shit happens") +} + +pub trait ShitHappens: seal::Seal { + /// Same as [`shit_happens`], but for unwrapping errors. + fn shit_happens(self) -> T; +} + +impl seal::Seal for Option {} +impl ShitHappens for Option { + #[track_caller] + fn shit_happens(self) -> T { + self.unwrap_or_else(|| shit_happens()) + } +} + +impl seal::Seal for Result {} +impl ShitHappens for Result { + #[track_caller] + fn shit_happens(self) -> O { + self.expect("shit happens") + } +} diff --git a/src/fut.rs b/src/fut.rs new file mode 100644 index 0000000..8bbc24d --- /dev/null +++ b/src/fut.rs @@ -0,0 +1,22 @@ +use std::{ + marker::PhantomData, + pin::Pin, + task::{Context, Poll}, +}; + +/// Future which is never ready. +#[crate::perfect_derive(Debug, Clone, Default, Copy, PartialEq, Eq, PartialOrd, Ord)] +pub struct Never(PhantomData); + +impl Future for Never { + type Output = T; + + fn poll(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll { + Poll::Pending + } +} + +crate::trait_set! { + /// Future + Send. + pub trait Fut = Future + Send; +} diff --git a/src/generic.rs b/src/generic.rs new file mode 100644 index 0000000..2dc0f74 --- /dev/null +++ b/src/generic.rs @@ -0,0 +1,22 @@ +use crate::data; + +pub trait Anything {} +impl Anything for T {} + +// TODO: include all possible ranges. +#[data( + copy, + error, + display("integer out of range"), + crate = crate +)] +pub struct OutOfRange; + +/// Text case. +#[data(copy, crate = crate, display(name))] +pub enum Case { + Snake, + Pascal, + Kebab, + Camel, +} diff --git a/src/handling/and_then.rs b/src/handling/and_then.rs new file mode 100644 index 0000000..bdcf668 --- /dev/null +++ b/src/handling/and_then.rs @@ -0,0 +1,13 @@ +use crate::data; + +#[data(copy, ord, crate = crate)] +pub struct AndThen { + pub lhs: L, + pub rhs: R, +} + +impl AndThen { + pub const fn new(lhs: L, rhs: R) -> Self { + Self { lhs, rhs } + } +} diff --git a/src/handling/apply.rs b/src/handling/apply.rs new file mode 100644 index 0000000..6e874a9 --- /dev/null +++ b/src/handling/apply.rs @@ -0,0 +1,29 @@ +use crate::{ + data, + fut::Fut, + handling::{Endpoint, Handler}, +}; + +#[data(copy, ord, crate = crate)] +pub struct Apply { + pub lhs: L, + pub rhs: R, +} + +impl Apply { + pub const fn new(lhs: L, rhs: R) -> Self { + Self { lhs, rhs } + } +} + +impl Endpoint for Apply +where + L: for<'a> Handler, + R: Send + Sync, +{ + type Output = Output; + + fn call(&self, state: S, in_: I) -> impl Fut { + self.lhs.call(state, in_, &self.rhs) + } +} diff --git a/src/handling/mod.rs b/src/handling/mod.rs new file mode 100644 index 0000000..f16ee56 --- /dev/null +++ b/src/handling/mod.rs @@ -0,0 +1,37 @@ +use crate::{auto_impl, fut::Fut}; + +pub use self::{and_then::AndThen, apply::Apply, then::Then}; + +mod and_then; +mod apply; +mod then; + +pub trait HandlerExt: Sized { + fn then(self, rhs: R) -> Then { + Then::new(self, rhs) + } +} + +impl HandlerExt for T {} + +pub trait EndpointExt: Sized { + fn apply(self, rhs: R) -> Apply { + Apply::new(self, rhs) + } +} + +impl EndpointExt for T {} + +#[auto_impl(&, &mut, Arc, Box)] +pub trait Endpoint: Send + Sync { + type Output; + + fn call(&self, state: S, in_: I) -> impl Fut; +} + +#[auto_impl(&, &mut, Arc, Box)] +pub trait Handler: Send + Sync { + type Output; + + fn call(&self, state: S, in_: I, next: N) -> impl Fut; +} diff --git a/src/handling/provide_state.rs b/src/handling/provide_state.rs new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/src/handling/provide_state.rs @@ -0,0 +1 @@ + diff --git a/src/handling/then.rs b/src/handling/then.rs new file mode 100644 index 0000000..a660003 --- /dev/null +++ b/src/handling/then.rs @@ -0,0 +1,29 @@ +use crate::{ + data, + fut::Fut, + handling::{Apply, Handler}, +}; + +#[data(copy, ord, crate = crate)] +pub struct Then { + pub lhs: L, + pub rhs: R, +} + +impl Handler for Then +where + R: Send + Sync, + L: for<'a> Handler, Output = Output>, +{ + type Output = Output; + + fn call(&self, state: S, in_: I, next: N) -> impl Fut { + self.lhs.call(state, in_, Apply::new(&self.rhs, next)) + } +} + +impl Then { + pub const fn new(lhs: L, rhs: R) -> Self { + Self { lhs, rhs } + } +} diff --git a/src/hash.rs b/src/hash.rs new file mode 100644 index 0000000..4708cac --- /dev/null +++ b/src/hash.rs @@ -0,0 +1 @@ +pub type Hasher = ahash::AHasher; diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 0000000..8ce8ce8 --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,59 @@ +#[macro_export] +macro_rules! zst_error { + ($($tt:tt)*) => {{ + #[$crate::data( + copy, + crate = $crate, + not(serde, schemars), + display($($tt)*), + )] + struct E; + + E + }}; +} + +/// The trait that is implemented for everything. +pub trait Anything {} +impl Anything for T {} + +pub use bytes; +pub use bytesize; +pub use url; + +pub use eva_macros::{data, endpoint, int, str}; +pub use seq_macro::seq; + +pub use auto_impl::auto_impl; +pub use perfect_derive::perfect_derive; +pub use trait_set::trait_set; + +pub mod array; +pub mod error; + +pub mod fut; +pub mod time; + +pub mod sync; +pub mod trace_id; + +pub mod generic; +pub mod slab; +pub mod str; + +pub mod encoding; +pub mod rand; + +pub mod collections; +pub mod handling; +pub mod hash; + +pub use paste::paste; + +#[doc(hidden)] +pub mod _priv { + pub use schemars; + pub use serde; + + pub use eva_macros::RastGawno; +} diff --git a/src/rand.rs b/src/rand.rs new file mode 100644 index 0000000..038fc06 --- /dev/null +++ b/src/rand.rs @@ -0,0 +1,4 @@ +///! # Random utilities, refer to [`::rand`] crate docs. +pub use ::rand::*; + +pub use rand_xoshiro as xoshiro; diff --git a/src/slab.rs b/src/slab.rs new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/src/slab.rs @@ -0,0 +1 @@ + diff --git a/src/str.rs b/src/str.rs new file mode 100644 index 0000000..a313c80 --- /dev/null +++ b/src/str.rs @@ -0,0 +1,212 @@ +use std::{fmt, mem::MaybeUninit, slice, str::FromStr}; + +pub use compact_str::{ + CompactString, CompactStringExt, ToCompactString, ToCompactStringError, format_compact, +}; + +use crate::data; + +#[macro_export] +macro_rules! single_ascii_char { + ($ty:ty) => { + const _: () = { + use ::core::{result::Result, str::FromStr}; + use ::std::string::String; + + use $crate::str::{HasPattern, ParseError}; + + impl HasPattern for $ty { + #[inline] + fn pat_into(buf: &mut String) { + $crate::push_ascii_pat!(Self, buf); + } + } + + impl FromStr for $ty { + type Err = ParseError; + + #[inline] + fn from_str(s: &str) -> Result { + let [c]: [u8; 1] = s.as_bytes().try_into().map_err(|_| ParseError::Length)?; + Self::new(c).ok_or(ParseError::Char) + } + } + }; + }; +} + +/// Simple parse error. +#[data(copy, error, display(doc), crate = crate)] +pub enum ParseError { + /// Unexpected char. + Char, + /// Invalid length of string. + Length, +} + +pub trait FixedParseError { + fn length(expected: usize) -> Self; +} + +impl FixedParseError for ParseError { + #[inline] + fn length(expected: usize) -> Self { + _ = expected; + Self::Length + } +} + +pub trait HasPattern { + fn pat_into(buf: &mut String); + #[inline] + fn regex_pat() -> String { + let mut s = String::new(); + Self::pat_into(&mut s); + s + } + #[inline] + fn regex_pat_fullmatch() -> String { + let mut s = String::with_capacity(8); + s.push('^'); + Self::pat_into(&mut s); + s.push('$'); + s + } +} + +impl HasPattern for String { + #[inline] + fn pat_into(buf: &mut String) { + buf.push_str(".*"); + } +} + +impl HasPattern for CompactString { + #[inline] + fn pat_into(buf: &mut String) { + buf.push_str(".*"); + } +} + +/// # Safety +/// +/// Implementation of this trait implies that reinterpreting the reference to the type as &[u8] and then as valid utf8 sequence +/// is sound and defined. +pub unsafe trait FixedUtf8 +where + Self: Sized + Copy + FromStr + HasPattern, +{ +} + +/// Reinterpret fixed size string as a standard library string slice. +pub const fn reinterpret<'a, T: FixedUtf8>(val: &'a T) -> &'a str { + let ts = size_of::(); + let slice: &'a [u8] = unsafe { slice::from_raw_parts(val as *const T as *const u8, ts) }; + unsafe { std::str::from_utf8_unchecked(slice) } +} + +#[derive(Clone, Copy)] +union EitherUnion { + lhs: L, + rhs: R, +} + +#[derive(Clone, Copy)] +pub struct Either(EitherUnion); + +impl FromStr for Either { + type Err = R::Err; + + fn from_str(s: &str) -> Result { + if let Ok(res) = L::from_str(s) { + Ok(Self::left(res)) + } else { + R::from_str(s).map(Self::right) + } + } +} + +unsafe impl FixedUtf8 for Either {} + +impl HasPattern for Either { + fn pat_into(buf: &mut String) { + buf.push('('); + L::pat_into(buf); + buf.push('|'); + R::pat_into(buf); + buf.push(')'); + } +} + +impl Either { + const fn new(un: EitherUnion) -> Self { + const { + if size_of::() != size_of::() { + panic!("Could not make string `Either` of differently sized strings"); + } + } + + Self(un) + } + pub const fn left(lhs: L) -> Self { + Self::new(EitherUnion { lhs }) + } + + pub const fn right(rhs: R) -> Self { + Self::new(EitherUnion { rhs }) + } + + pub const fn as_str(&self) -> &str { + reinterpret(self) + } +} + +/// Sequence of fixed size string. +#[crate::str(custom, copy, crate = crate)] +pub struct Seq(pub [T; N]); + +impl Seq { + pub const fn as_str(&self) -> &str { + reinterpret(self) + } +} + +impl HasPattern for Seq { + fn pat_into(buf: &mut String) { + T::pat_into(buf); + buf.push_str("{"); + buf.push_str(&N.to_string()); + buf.push_str("}"); + } +} + +unsafe impl FixedUtf8 for Seq {} + +impl FromStr for Seq +where + T: FixedUtf8, +{ + type Err = T::Err; + + fn from_str(mut s: &str) -> Result { + let expected_size = size_of::() * N; + if s.len() != expected_size { + return Err(T::Err::length(expected_size)); + } + + let mut arr: [MaybeUninit; N] = unsafe { MaybeUninit::uninit().assume_init() }; + + for idx in 0..N { + let len = size_of::(); + let chunk = unsafe { s.get_unchecked(..len) }; + let res = T::from_str(chunk)?; + unsafe { arr.get_unchecked_mut(idx) }.write(res); + + s = unsafe { s.get_unchecked(len..) }; + } + + Ok(Self(unsafe { std::ptr::read((&raw const arr).cast()) })) + } +} + +pub mod ascii; diff --git a/src/str/ascii.rs b/src/str/ascii.rs new file mode 100644 index 0000000..502a78d --- /dev/null +++ b/src/str/ascii.rs @@ -0,0 +1,109 @@ +use crate::int; + +pub fn push_ascii_pat(u: u8, to: &mut String) { + let push = match u { + b'\r' => r"\r", + b'\n' => r"\n", + b'-' => r"\-", + b'^' => r"\^", + b'$' => r"\$", + b'[' => r"\[", + b']' => r"\]", + b'\\' => r"\", + b'.' => r"\.", + b'*' => r"\*", + b'+' => r"\+", + b'?' => r"\?", + b'{' => r"\{", + b'}' => r"\}", + b'|' => r"\|", + b'(' => r"\(", + b')' => r"\)", + _ => { + to.push(u as char); + return; + } + }; + to.push_str(push); +} + +#[macro_export] +macro_rules! push_ascii_pat { + ($e:ident, $to:expr) => {{ + let buf = $to; + let requires_brackets = match $e::RANGES.as_slice() { + [r] => r.len() > 1, + [] => false, + _ => true, + }; + if requires_brackets { + buf.push('['); + } + for range in $e::RANGES.iter().cloned() { + let start = *range.start(); + let end = *range.end(); + $crate::str::ascii::push_ascii_pat(start, buf); + if start != end { + buf.push('-'); + $crate::str::ascii::push_ascii_pat(end, buf); + } + } + if requires_brackets { + buf.push(']'); + } + }}; +} + +macro_rules! valid { + ($e:ident) => { + $crate::single_ascii_char!($e); + + unsafe impl $crate::str::FixedUtf8 for $e {} + + #[allow(dead_code)] + impl $e { + pub const fn as_str(&self) -> &str { + $crate::str::reinterpret(self) + } + } + }; +} + +pub(crate) use valid; + +#[int(u8, b'.', crate = crate)] +pub enum Dot {} +valid!(Dot); + +#[int(u8, b':', crate = crate)] +pub enum Colon {} +valid!(Colon); + +#[int(u8, b' ', crate = crate)] +pub enum Space {} +valid!(Space); + +#[int(u8, b'0'..=b'9', crate = crate)] +pub enum Digit {} +valid!(Digit); + +impl Digit { + pub const fn parse(self) -> u8 { + self as u8 - b'0' + } +} + +/// A valid ASCII character. +#[int(u8, 0..=177, crate = crate)] +pub enum Char {} +valid!(Char); + +/// Printable ASCII character. +#[int(u8, 32..=126, crate = crate)] +pub enum Printable {} +valid!(Printable); + +/// ASCII control character. +#[int(u8, 0..=31, crate = crate)] +pub enum Control {} +valid!(Control); diff --git a/src/sync.rs b/src/sync.rs new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/src/sync.rs @@ -0,0 +1 @@ + diff --git a/src/time/clock.rs b/src/time/clock.rs new file mode 100644 index 0000000..0fbc7a4 --- /dev/null +++ b/src/time/clock.rs @@ -0,0 +1,62 @@ +use std::{ + num::NonZeroU64, + sync::{Arc, atomic}, + time::{Duration, SystemTime}, +}; + +use crate::time::Timestamp; + +#[crate::auto_impl(&, &mut)] +pub trait Clock { + fn get(&self) -> Timestamp; +} + +#[derive(Debug, Clone)] +pub struct Mock(Arc); + +impl Default for Mock { + fn default() -> Self { + Self(Arc::new(atomic::AtomicU64::new( + Timestamp::TEST_ORIGIN.as_nanos().get(), + ))) + } +} + +impl Mock { + pub fn advance(&mut self, dur: Duration) { + self.0 + .fetch_add(dur.as_nanos() as u64, atomic::Ordering::Release); + } + + pub fn set(&mut self, ts: Timestamp) { + self.0.store(ts.as_nanos().get(), atomic::Ordering::Release); + } + + pub fn back(&self, dur: Duration) { + _ = dur; + todo!() + } +} + +impl Clock for Mock { + fn get(&self) -> Timestamp { + let time = self.0.load(atomic::Ordering::Acquire); + Timestamp::from_nanos(time.try_into().expect("invalid time set")) + } +} + +/// Real time clock. +#[derive(Debug, Clone, Copy, Default)] +pub struct RealTime(()); + +impl Clock for RealTime { + fn get(&self) -> Timestamp { + // TODO: get time with a fixed time zone. + let dur = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .unwrap(); + let nanos = NonZeroU64::new(dur.as_nanos() as u64).unwrap(); + + Timestamp::from_nanos(nanos) + } +} diff --git a/src/time/date.rs b/src/time/date.rs new file mode 100644 index 0000000..0248fca --- /dev/null +++ b/src/time/date.rs @@ -0,0 +1,526 @@ +use std::{borrow::Cow, mem, str::FromStr}; + +use schemars::JsonSchema; +use seq_macro::seq; +use serde::{Deserialize, Serialize, de}; + +use crate::{ + data, int, + str::{HasPattern, ParseError}, +}; + +use super::str; + +#[data(ord, copy, display(name), crate = crate)] +pub enum Leapness { + Leap = 1, + Ordinary = 0, +} + +impl Leapness { + pub const fn is_leap(self) -> bool { + matches!(self, Self::Leap) + } + + pub const fn is_ordinary(self) -> bool { + matches!(self, Self::Ordinary) + } +} + +#[data(copy, ord, not(serde, schemars), crate = crate)] +pub struct LooseDate { + day: Day, + month: Month, + year: Year, +} + +impl JsonSchema for LooseDate { + fn schema_id() -> Cow<'static, str> { + Cow::Borrowed(concat!(module_path!(), "::LooseDate")) + } + + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("LooseDate") + } + + fn json_schema(_: &mut schemars::SchemaGenerator) -> schemars::Schema { + schemars::json_schema!({ + "type": "string", + "pattern": str::DateStr::regex_pat_fullmatch(), + "description": "Day, month and year" + }) + } +} + +impl<'de> Deserialize<'de> for LooseDate { + fn deserialize(deserializer: D) -> Result + where + D: de::Deserializer<'de>, + { + let s = <&'de str as Deserialize<'de>>::deserialize(deserializer)?; + let date: str::DateStr = s.parse().map_err(de::Error::custom)?; + + date.parse().ok_or(de::Error::custom(zst_error!( + "invalid day, month and year combination" + ))) + } +} + +impl Serialize for LooseDate { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + self.to_str().serialize(serializer) + } +} + +impl LooseDate { + pub const fn from_dmy(day: Day, month: Month, year: Year) -> Option { + let last_day = month.last_day(year.leapness()); + if day as u8 > last_day as u8 { + return None; + } + + Some(Self { day, month, year }) + } + + /// Get day of month. + pub const fn day_of_month(self) -> Day { + self.day + } + + /// Get current month. + pub const fn month(self) -> Month { + self.month + } + + /// Get year. + pub const fn year(self) -> Year { + self.year + } + + /// Convert date to more compact representation. + pub const fn compact(self) -> Date { + let year = self.year(); + let leapness = year.leapness(); + + let first_day_of_year = year.first_day().days(); + let year_offset = self.month().days_from_year_start(leapness); + let month_offset = self.day_of_month() as u16 - 1; + + Date::from_days(first_day_of_year + year_offset + month_offset) + } + + /// Convert date to string. + pub const fn to_str(self) -> str::DateStr { + str::DateStr::new( + self.day_of_month().to_str(), + self.month().to_str(), + self.year().to_str(), + ) + } +} + +#[data(copy, ord, not(serde, schemars), crate = crate)] +#[derive(Hash)] +pub struct Date(u16); + +impl JsonSchema for Date { + fn schema_id() -> Cow<'static, str> { + Cow::Borrowed(concat!(module_path!(), "::Date")) + } + + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("Date") + } + + fn json_schema(generator: &mut schemars::SchemaGenerator) -> schemars::Schema { + schemars::json_schema!({ + "anyOf": [ + { + "type": "integer", + "minimum": 0, + "maximum": 65535, + "description": "number of days since 1970, only in binary formats" + }, + LooseDate::json_schema(generator) + ] + }) + } +} + +impl FromStr for Date { + type Err = ParseError; + + fn from_str(s: &str) -> Result { + str::DateStr::from_str(s)? + .parse() + .map(|d| d.compact()) + .ok_or(ParseError::Char) + } +} + +impl<'de> Deserialize<'de> for Date { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + if deserializer.is_human_readable() { + Ok(Date::from_days(u16::deserialize(deserializer)?)) + } else { + let loose = LooseDate::deserialize(deserializer)?; + Ok(loose.compact()) + } + } +} + +impl Serialize for Date { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + if serializer.is_human_readable() { + self.to_str().serialize(serializer) + } else { + self.days().serialize(serializer) + } + } +} + +impl Date { + pub const MIN: Self = Self(0); + pub const MAX: Self = Self(u16::MAX); + + pub const fn from_dmy(day: Day, month: Month, year: Year) -> Option { + let Some(loosen) = LooseDate::from_dmy(day, month, year) else { + return None; + }; + + Some(loosen.compact()) + } + + pub const fn to_secs(self) -> u64 { + let hours = (self.0 as u64) * 24; + let mins = hours * 60; + + mins * 60 + } + + /// Create date from number of days. + pub const fn from_days(days: u16) -> Self { + Self(days) + } + + /// Get number of days. + pub const fn days(self) -> u16 { + self.0 + } + + /// Loosen up layout to less compact one. + pub const fn loose(self) -> LooseDate { + let year = self.year(); + let total_days = self.days(); + let leapness = year.leapness(); + let days_since_year = total_days - year.first_day().days(); + + let month = unsafe { + Month::from_days_since_year_start(days_since_year, leapness).unwrap_unchecked() + }; + let day_of_month = days_since_year - month.days_from_year_start(leapness); + + LooseDate { + // SAFETY: safe, since condition for exiting loop is falling + // into days range. + day: unsafe { Day::new_unchecked(day_of_month as u8 + 1) }, + month, + year, + } + } + + /// Get current year. + pub const fn year(self) -> Year { + let days = self.days(); + if days == 0 { + return Year::MIN; + } + let naive_year = days / 365; + let leap_days = super::utils::leap_days_after((naive_year + Year::ORIGIN) - 1) + - Year::LEAP_DAYS_BEFORE1970; + // naive_year includes leap days from the previous years! Skewing + // the calculations! This nigger should be rape-fixed! + let fixed = days - leap_days; + + debug_assert!((fixed / 365) < 179); + unsafe { Year::new_unchecked((fixed / 365) as u8) } + } + + /// Get number of days since the current year. I.e. if it's + /// January 1st -> result would be 0. + pub const fn days_since_year(self) -> u16 { + self.0 - self.year().first_day().days() + } + + /// Get current month. + pub const fn month(self) -> Month { + self.loose().month() + } + + /// Get day of month. + pub const fn day_of_month(self) -> Day { + self.loose().day_of_month() + } + + /// Convert date to string. + pub const fn to_str(self) -> str::DateStr { + self.loose().to_str() + } +} + +#[int(u8, 0..179, crate = crate)] +pub enum Year {} + +impl Year { + pub const MIN: Self = Self::new(0).unwrap(); + pub const MAX: Self = Self::new(178).unwrap(); + + const LEAP_DAYS_BEFORE1970: u16 = super::utils::leap_days_after(1970 - 1); + pub const ORIGIN: u16 = 1970; + + pub const fn from_abs(abs: u16) -> Option { + if matches!(abs, 0..Self::ORIGIN) { + return None; + } + + let shifted = abs - Self::ORIGIN; + if shifted > Self::MAX as u16 { + None + } else { + Some(unsafe { Year::new_unchecked(shifted as u8) }) + } + } + + pub const fn leapness(self) -> Leapness { + if super::utils::is_leap_year(self.abs()) { + Leapness::Leap + } else { + Leapness::Ordinary + } + } + + /// Get date of the first day in a year. + pub const fn first_day(self) -> Date { + let leap_days = super::utils::leap_days_after(self.abs() - 1) - Self::LEAP_DAYS_BEFORE1970; + let naive_days = (self as u16) * 365; + + Date::from_days(naive_days + leap_days) + } + + /// Convert relative year to absolute year. + pub const fn abs(self) -> u16 { + self as u16 + Self::ORIGIN + } + + /// Convert year to fixed size string. + pub const fn to_str(self) -> str::YearStr { + let v = self.abs(); + let buf = [ + (v / 1000) as u8 + b'0', + (v / 100 % 10) as u8 + b'0', + (v / 10 % 10) as u8 + b'0', + (v % 10) as u8 + b'0', + ]; + + unsafe { mem::transmute::<[u8; 4], str::YearStr>(buf) } + } +} + +#[int(u8, 1..=32, crate = crate)] +pub enum Day {} + +impl Day { + pub const fn first() -> Self { + Self::VARIANTS[0] + } + + /// Get index inside [`Day::VARIANTS`]. + pub const fn index(self) -> usize { + self as usize - 1 + } + + /// Convert day to fixed size string. + pub const fn to_str(self) -> str::DayStr { + let d = self as u8; + unsafe { mem::transmute::<[u8; 2], str::DayStr>([d / 10 + b'0', d % 10 + b'0']) } + } +} + +#[data(copy, ord, crate = crate, display(name))] +pub enum Month { + Jan = 0, + Feb = 1, + Mar = 2, + Apr = 3, + May = 4, + Jun = 5, + Jul = 6, + Aug = 7, + Sep = 8, + Oct = 9, + Nov = 10, + Dec = 11, +} + +impl Month { + pub const MIN: Self = Self::Jan; + pub const MAX: Self = Self::Dec; + pub const VARIANTS: [Self; 12] = { + use Month::*; + + [Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, Nov, Dec] + }; + + pub const fn prev(self) -> Self { + let Some(prev) = self.prev_checked() else { + return Self::MIN; + }; + + prev + } + + pub const fn prev_checked(self) -> Option { + let Some(prev) = (self as u8).checked_sub(1) else { + return None; + }; + + Some(unsafe { Month::from_repr_unchecked(prev) }) + } + + pub const fn next(self) -> Self { + let Some(next) = self.next_checked() else { + return Self::MAX; + }; + + next + } + + pub const fn next_checked(self) -> Option { + let repr = self as u8 + 1; + Self::from_repr(repr) + } + + const fn from_days_since_year_start_impl(mut days: u16, leapness: Leapness) -> Self { + let mut month = Month::Jan; + loop { + let last_day = month.last_day(leapness); + if days < last_day as u16 { + break; + } + + days -= last_day as u16; + month = month.next(); + } + + month + } + + pub const fn from_days_since_year_start(days: u16, leapness: Leapness) -> Option { + use Leapness::*; + const ORDINARY: [Month; 365] = seq!(N in 0..365 { + [#(Month::from_days_since_year_start_impl(N, Ordinary),)*] + }); + const LEAP: [Month; 366] = seq!(N in 0..366 { + [#(Month::from_days_since_year_start_impl(N, Leap),)*] + }); + + match leapness { + Leap => { + if days >= 366 { + None + } else { + Some(LEAP[days as usize]) + } + } + Ordinary => { + if days >= 365 { + None + } else { + Some(ORDINARY[days as usize]) + } + } + } + } + + pub const fn days_from_year_start(self, leapness: Leapness) -> u16 { + const fn l(m: Month) -> u16 { + m.calc_days_from_year_start(Leapness::Leap) + } + + const fn o(m: Month) -> u16 { + m.calc_days_from_year_start(Leapness::Ordinary) + } + + const LEAP: [u16; 12] = seq!(N in 0..12 { + [#(l(Month::VARIANTS[N]),)*] + }); + const ORDINARY: [u16; 12] = seq!(N in 0..12 { + [#(o(Month::VARIANTS[N]),)*] + }); + + match leapness { + Leapness::Leap => LEAP[self as usize], + Leapness::Ordinary => ORDINARY[self as usize], + } + } + + const fn calc_days_from_year_start(self, leapness: Leapness) -> u16 { + let mut cur = Self::Jan; + let mut days = 0_u16; + + // Sigh, recursion would be better. + while cur as u8 != self as u8 { + let last_day = cur.last_day(leapness); + days += last_day as u16; + cur = cur.next(); + } + + days + } + + pub const fn last_day_naive(self) -> Day { + self.last_day(Leapness::Ordinary) + } + + /// Get last day of month with respect to the leapness of year. + pub const fn last_day(self, leapness: Leapness) -> Day { + use Day::*; + const LAST_DAY: [Day; 12] = [ + POS31, POS28, POS31, POS30, POS31, POS30, POS31, POS31, POS30, POS31, POS30, POS31, + ]; + const LAST_LEAP_DAY: [Day; 12] = { + let mut src = LAST_DAY; + src[1] = POS29; + src + }; + const TBL: [[Day; 12]; 2] = [LAST_DAY, LAST_LEAP_DAY]; + + TBL[leapness.is_leap() as usize][self as usize] + } + + pub const unsafe fn from_repr_unchecked(repr: u8) -> Self { + unsafe { mem::transmute::(repr) } + } + + pub const fn from_repr(repr: u8) -> Option { + match repr { + 0..12 => Some(unsafe { Self::from_repr_unchecked(repr) }), + _ => None, + } + } + + /// Convert month to fixed size string. + pub const fn to_str(self) -> str::MonthStr { + let v = self as u8 + 1; + unsafe { mem::transmute::<[u8; 2], str::MonthStr>([v / 10 + b'0', v % 10 + b'0']) } + } +} diff --git a/src/time/mod.rs b/src/time/mod.rs new file mode 100644 index 0000000..65d6408 --- /dev/null +++ b/src/time/mod.rs @@ -0,0 +1,49 @@ +//! # Date and time +//! +//! Contains heavily and precisely typed utilities for working +//! with types. +//! +//! # Glossary +//! +//! ### [`Clock`] +//! +//! Trait which is responsible for getting current time. +//! +//! - [`Mock`] - Mock [`Clock`], can be used for testing purposes +//! - [`RealTime`] - realtime clock +//! +//! ### Date and time types +//! +//! Main types: +//! - [`Date`] - day precise time, contains [`Day`], [`Month`] and [`Year`] +//! - [`Time`] - time during day, contains [`Hours`] and [`Mins`] +//! - [`SecsTime`] - same as [`Time`], but contains seconds +//! - [`PreciseTime`] - same as [`Time`], but contains seconds and nanoseconds +//! - [`Timestamp`] - maximum precision timestamp, nanoseconds precision +//! +//! Every timestamp here return timestamp in the UTC timezone. +//! +//! ### String representation +//! +//! Every type presented here have string representation, see [`self::str`] module. + +pub use self::{ + clock::{Clock, Mock, RealTime}, + date::{Date, Day, Leapness, LooseDate, Month, Year}, + time::{Hours, Mins, PreciseTime, Secs, SecsTime, SubsecNanos, Time}, + timestamp::Timestamp, +}; + +pub mod ser; +pub mod str; +pub mod tz; + +mod clock; +mod date; +mod time; +mod timestamp; + +mod utils; + +#[cfg(test)] +mod tests; diff --git a/src/time/ser.rs b/src/time/ser.rs new file mode 100644 index 0000000..1d5d7db --- /dev/null +++ b/src/time/ser.rs @@ -0,0 +1,3 @@ +pub mod tz { + pub mod msk {} +} diff --git a/src/time/str.rs b/src/time/str.rs new file mode 100644 index 0000000..61028e1 --- /dev/null +++ b/src/time/str.rs @@ -0,0 +1,278 @@ +//! # Strings that represent time + +use crate::{ + int, str, + str::{ParseError, Seq, ascii}, +}; + +use super::{ + Day, Hours, LooseDate, Mins, Month, PreciseTime, Secs, SecsTime, Time, Timestamp, Year, + time::SubsecNanos, timestamp::LooseTimestamp, +}; + +// == TimestampStr == + +/// Precise string timestamp representation. +/// +/// format: `dd.mm.YYYY HH:MM:SS.NNNNNNNNN`. +#[str(fixed(error = ParseError), crate = crate)] +pub struct TimestampStr(DateStr, ascii::Space, PreciseTimeStr); + +impl TimestampStr { + pub const fn new(date: DateStr, time: PreciseTimeStr) -> Self { + Self(date, ascii::Space::POS32, time) + } + + pub const fn parse_compact(self) -> Option { + let Some(loose) = self.parse() else { + return None; + }; + Some(loose.compact_loose().compact()) + } + + pub const fn parse(self) -> Option> { + let Some(date) = self.0.parse() else { + return None; + }; + let Some(time) = self.2.parse() else { + return None; + }; + + Some(LooseTimestamp::<_>::new(date, time)) + } +} + +// == PreciseTimeStr == + +/// [`TimeStr`] with seconds an nanoseconds. +/// +/// format: `HH:MM:SS.NNNNNNNNN`, where N is nanosecond digit. +#[str(fixed(error = ParseError), crate = crate)] +pub struct PreciseTimeStr(SecsTimeStr, ascii::Dot, SubsecNanosStr); + +impl PreciseTimeStr { + pub const fn new(secs_time: SecsTimeStr, subsec_nanos: SubsecNanosStr) -> Self { + Self(secs_time, ascii::Dot::POS46, subsec_nanos) + } + + pub const fn parse(self) -> Option { + let Some(time) = self.0.parse() else { + return None; + }; + + let nanos = self.2.parse(); + + Some(PreciseTime::new(time, nanos)) + } +} + +// == SecsTimeStr == + +/// [`TimeStr`] with seconds part. +/// +/// format: `HH:MM:SS`. +#[str(fixed(error = ParseError), crate = crate)] +pub struct SecsTimeStr(TimeStr, ascii::Colon, SecsStr); + +impl SecsTimeStr { + pub const fn new(time: TimeStr, secs: SecsStr) -> Self { + Self(time, ascii::Colon::POS58, secs) + } + + pub const fn parse(self) -> Option { + let Some(time) = self.0.parse() else { + return None; + }; + let secs = self.2.parse(); + + Some(SecsTime::new(time, secs)) + } +} + +// == DateStr == + +#[str(fixed(error = ParseError), crate = crate)] +pub struct DateStr(DayStr, ascii::Dot, MonthStr, ascii::Dot, YearStr); + +impl DateStr { + pub const fn new(day: DayStr, month: MonthStr, year: YearStr) -> Self { + Self(day, ascii::Dot::POS46, month, ascii::Dot::POS46, year) + } + + pub const fn parse(self) -> Option { + let Some(day) = self.0.parse() else { + return None; + }; + let Some(month) = self.2.parse() else { + return None; + }; + let Some(year) = self.4.parse() else { + return None; + }; + + LooseDate::from_dmy(day, month, year) + } +} + +// == TimeStr == + +/// Partly valid time string. Contains hours and minutes. +/// +/// Format: `HH:MM`. +#[str(fixed(error = ParseError), crate = crate)] +pub struct TimeStr(HoursStr, ascii::Colon, MinsStr); + +impl TimeStr { + pub const fn new(hours: HoursStr, minutes: MinsStr) -> Self { + Self(hours, ascii::Colon::POS58, minutes) + } + + pub const fn parse(self) -> Option