initial commit

This commit is contained in:
Kiana Sheibani 2024-09-14 00:34:31 -04:00
commit 3d4d271ccd
Signed by: toki
GPG key ID: 6CB106C25E86A9F7
15 changed files with 5905 additions and 0 deletions

3
.envrc Normal file
View file

@ -0,0 +1,3 @@
#!/usr/bin/env bash
use flake

2
.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
.direnv/
target/

5025
Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

13
Cargo.toml Normal file
View file

@ -0,0 +1,13 @@
[package]
name = "resheet"
version = "0.0.0"
edition = "2021"
build = "build.rs"
[dependencies]
slint = "1.7"
malachite = "0.4"
[build-dependencies]
slint-build = "1.7"

6
build.rs Normal file
View file

@ -0,0 +1,6 @@
use slint_build::*;
fn main() {
let config = CompilerConfiguration::new().with_style("cosmic-dark".into());
compile_with_config("ui/main.slint", config).unwrap();
}

268
flake.lock Normal file
View file

@ -0,0 +1,268 @@
{
"nodes": {
"crane": {
"flake": false,
"locked": {
"lastModified": 1699217310,
"narHash": "sha256-xpW3VFUG7yE6UE6Wl0dhqencuENSkV7qpnpe9I8VbPw=",
"owner": "ipetkov",
"repo": "crane",
"rev": "d535642bbe6f377077f7c23f0febb78b1463f449",
"type": "github"
},
"original": {
"owner": "ipetkov",
"ref": "v0.15.0",
"repo": "crane",
"type": "github"
}
},
"dream2nix": {
"inputs": {
"nixpkgs": [
"nci",
"nixpkgs"
],
"purescript-overlay": "purescript-overlay",
"pyproject-nix": "pyproject-nix"
},
"locked": {
"lastModified": 1722526955,
"narHash": "sha256-fFS8aDnfK9Qfm2FLnQ8pqWk8FzvFEv5LvTuZTZLREnc=",
"owner": "nix-community",
"repo": "dream2nix",
"rev": "3fd4c14d3683baac8d1f94286ae14fe160888b51",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "dream2nix",
"type": "github"
}
},
"flake-parts": {
"inputs": {
"nixpkgs-lib": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1725024810,
"narHash": "sha256-ODYRm8zHfLTH3soTFWE452ydPYz2iTvr9T8ftDMUQ3E=",
"owner": "hercules-ci",
"repo": "flake-parts",
"rev": "af510d4a62d071ea13925ce41c95e3dec816c01d",
"type": "github"
},
"original": {
"owner": "hercules-ci",
"repo": "flake-parts",
"type": "github"
}
},
"mk-naked-shell": {
"flake": false,
"locked": {
"lastModified": 1681286841,
"narHash": "sha256-3XlJrwlR0nBiREnuogoa5i1b4+w/XPe0z8bbrJASw0g=",
"owner": "yusdacra",
"repo": "mk-naked-shell",
"rev": "7612f828dd6f22b7fb332cc69440e839d7ffe6bd",
"type": "github"
},
"original": {
"owner": "yusdacra",
"repo": "mk-naked-shell",
"type": "github"
}
},
"nci": {
"inputs": {
"crane": "crane",
"dream2nix": "dream2nix",
"mk-naked-shell": "mk-naked-shell",
"nixpkgs": [
"nixpkgs"
],
"parts": "parts",
"rust-overlay": "rust-overlay",
"treefmt": "treefmt"
},
"locked": {
"lastModified": 1725171295,
"narHash": "sha256-DnggDZZLPLIRrKjiQw+/c0B5AYnvIZZ1yteDM3nE6RI=",
"owner": "yusdacra",
"repo": "nix-cargo-integration",
"rev": "6c42669c74c185d7e4809cf076105497ed49ddd1",
"type": "github"
},
"original": {
"owner": "yusdacra",
"repo": "nix-cargo-integration",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1725161990,
"narHash": "sha256-/ZeccCYM71zfJ17gEQ0U/XzJr4kN0VXUiUeJ2mKxLJU=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "4a9443e2a4e06cbaff89056b5cdf6777c1fe5755",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"parts": {
"inputs": {
"nixpkgs-lib": [
"nci",
"nixpkgs"
]
},
"locked": {
"lastModified": 1725024810,
"narHash": "sha256-ODYRm8zHfLTH3soTFWE452ydPYz2iTvr9T8ftDMUQ3E=",
"owner": "hercules-ci",
"repo": "flake-parts",
"rev": "af510d4a62d071ea13925ce41c95e3dec816c01d",
"type": "github"
},
"original": {
"owner": "hercules-ci",
"repo": "flake-parts",
"type": "github"
}
},
"purescript-overlay": {
"inputs": {
"nixpkgs": [
"nci",
"dream2nix",
"nixpkgs"
],
"slimlock": "slimlock"
},
"locked": {
"lastModified": 1696022621,
"narHash": "sha256-eMjFmsj2G1E0Q5XiibUNgFjTiSz0GxIeSSzzVdoN730=",
"owner": "thomashoneyman",
"repo": "purescript-overlay",
"rev": "047c7933abd6da8aa239904422e22d190ce55ead",
"type": "github"
},
"original": {
"owner": "thomashoneyman",
"repo": "purescript-overlay",
"type": "github"
}
},
"pyproject-nix": {
"flake": false,
"locked": {
"lastModified": 1702448246,
"narHash": "sha256-hFg5s/hoJFv7tDpiGvEvXP0UfFvFEDgTdyHIjDVHu1I=",
"owner": "davhau",
"repo": "pyproject.nix",
"rev": "5a06a2697b228c04dd2f35659b4b659ca74f7aeb",
"type": "github"
},
"original": {
"owner": "davhau",
"ref": "dream2nix",
"repo": "pyproject.nix",
"type": "github"
}
},
"root": {
"inputs": {
"flake-parts": "flake-parts",
"nci": "nci",
"nixpkgs": "nixpkgs",
"systems": "systems"
}
},
"rust-overlay": {
"flake": false,
"locked": {
"lastModified": 1725157860,
"narHash": "sha256-DhqyM7XJYKj+WAEaYwMtXaYX66tA+lOd31sd5QkxLDM=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "1fd72e343c6890f695243a37b367a1e3b90a49ee",
"type": "github"
},
"original": {
"owner": "oxalica",
"repo": "rust-overlay",
"type": "github"
}
},
"slimlock": {
"inputs": {
"nixpkgs": [
"nci",
"dream2nix",
"purescript-overlay",
"nixpkgs"
]
},
"locked": {
"lastModified": 1688610262,
"narHash": "sha256-Wg0ViDotFWGWqKIQzyYCgayeH8s4U1OZcTiWTQYdAp4=",
"owner": "thomashoneyman",
"repo": "slimlock",
"rev": "b5c6cdcaf636ebbebd0a1f32520929394493f1a6",
"type": "github"
},
"original": {
"owner": "thomashoneyman",
"repo": "slimlock",
"type": "github"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"treefmt": {
"inputs": {
"nixpkgs": [
"nci",
"nixpkgs"
]
},
"locked": {
"lastModified": 1724833132,
"narHash": "sha256-F4djBvyNRAXGusJiNYInqR6zIMI3rvlp6WiKwsRISos=",
"owner": "numtide",
"repo": "treefmt-nix",
"rev": "3ffd842a5f50f435d3e603312eefa4790db46af5",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "treefmt-nix",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

20
flake.nix Normal file
View file

@ -0,0 +1,20 @@
{
description = "Resheet - spreadsheets reimagined";
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
systems.url = "github:nix-systems/default";
flake-parts.url = "github:hercules-ci/flake-parts";
flake-parts.inputs.nixpkgs-lib.follows = "nixpkgs";
nci.url = "github:yusdacra/nix-cargo-integration";
nci.inputs.nixpkgs.follows = "nixpkgs";
};
outputs = inputs@{ flake-parts, systems, nci, ... }:
flake-parts.lib.mkFlake { inherit inputs; } {
systems = import systems;
imports = [ nci.flakeModule ./module.nix ];
};
}

25
module.nix Normal file
View file

@ -0,0 +1,25 @@
{
perSystem = { pkgs, config, self', ... }:
let
cfg = config.nci.outputs.resheet;
in {
nci.toolchainConfig = ./rust-toolchain.toml;
nci.projects.resheet.path = ./.;
nci.crates.resheet.runtimeLibs = with pkgs; [
libGL
wayland
libxkbcommon
fontconfig
];
# Exports
checks.build = self'.packages.resheet;
packages.default = self'.packages.resheet;
packages.resheet = cfg.packages.release;
devShells.default = cfg.devShell.overrideAttrs (prev: {
buildInputs = prev.buildInputs ++
[ pkgs.rust-analyzer pkgs.slint-lsp ];
});
};
}

2
rust-toolchain.toml Normal file
View file

@ -0,0 +1,2 @@
[toolchain]
channel = "nightly-2024-09-01"

2
src/interpreter/mod.rs Normal file
View file

@ -0,0 +1,2 @@
pub mod parser;
pub mod util;

377
src/interpreter/parser.rs Normal file
View file

@ -0,0 +1,377 @@
use super::util::*;
use malachite::num::conversion::traits::FromSciString;
use malachite::{Integer, Rational};
use std::collections::HashMap;
use std::fmt::Debug;
use std::sync::LazyLock;
static OPERATOR_CHARS: &'static [char] = &[
'!', '#', '$', '%', '&', '*', '+', '.', '/', '<', '=', '>', '?', '@', '\\', '^', '|', '-', '~',
':',
];
static RESERVED_OPER: LazyLock<HashMap<&'static str, TokenType>> = LazyLock::new(|| {
HashMap::from([
("=", TokenType::Equal),
(":", TokenType::Colon),
("->", TokenType::Arrow),
])
});
static RESERVED_IDENT: LazyLock<HashMap<&'static str, TokenType>> =
LazyLock::new(|| HashMap::from([("case", TokenType::Case)]));
static ESCAPE_SEQS: LazyLock<HashMap<char, char>> =
LazyLock::new(|| HashMap::from([('"', '"'), ('\\', '\\'), ('n', '\n')]));
// # Recognizer
#[derive(Copy, Clone, Debug)]
pub struct Recognizer<Iter> {
iter: Iter,
origin: Origin,
}
impl<Iter> Recognizer<Iter> {
fn rollback<T>(&mut self, func: impl FnOnce(&mut Self) -> T) -> T
where
Iter: Clone,
{
let old_iter = self.iter.clone();
let res = func(self);
self.iter = old_iter;
res
}
fn opt<T>(
&mut self,
func: impl FnOnce(&mut Self) -> Result<T, ParseExcept>,
) -> Result<Option<T>, ParseExcept>
where
Iter: Clone,
{
let old_iter = self.iter.clone();
match func(self) {
Ok(res) => Ok(Some(res)),
Err(exc) => {
if exc.fatal {
Err(exc)
} else {
self.iter = old_iter;
Ok(None)
}
}
}
}
fn alt<T, const N: usize>(
&mut self,
funcs: [fn(&mut Self) -> Result<T, ParseExcept>; N],
) -> Result<T, ParseExcept>
where
Iter: Clone,
{
let old_iter = self.iter.clone();
let mut except = ParseExcept::default();
for func in funcs {
match func(self) {
Ok(res) => return Ok(res),
Err(exc) => {
self.iter = old_iter.clone();
except = except.merge(exc);
if except.fatal {
return Err(except);
}
}
}
}
Err(except)
}
fn many<C, T>(
&mut self,
mut func: impl FnMut(&mut Self) -> Result<T, ParseExcept>,
) -> Result<C, ParseExcept>
where
Iter: Clone,
C: FromIterator<T>,
{
std::iter::from_fn(|| self.opt(&mut func).transpose()).try_collect()
}
fn many1<C, T>(
&mut self,
mut func: impl FnMut(&mut Self) -> Result<T, ParseExcept>,
) -> Result<C, ParseExcept>
where
Iter: Clone,
C: FromIterator<T>,
{
let mut iter = std::iter::from_fn(|| self.opt(&mut func).transpose()).peekable();
if iter.peek().is_some() {
iter.try_collect()
} else {
Err(ParseExcept::default())
}
}
fn repeat<C, T>(
mut self,
mut func: impl FnMut(&mut Self) -> Result<T, ParseExcept>,
) -> Result<C, ParseExcept>
where
Iter: Iterator + Clone,
C: FromIterator<T>,
{
std::iter::from_fn(|| {
if self.rollback(|slf| slf.iter.next()).is_none() {
None
} else {
let res = func(&mut self);
Some(res)
}
})
.try_collect()
}
fn pred(&mut self, pred: impl FnOnce(&Iter::Item) -> bool) -> Result<Iter::Item, ParseExcept>
where
Iter: Iterator,
{
if let Some(res) = self.iter.next() {
if pred(&res) {
Ok(res)
} else {
Err(ParseExcept::default())
}
} else {
Err(ParseExcept::default())
}
}
}
// # Lexer
#[derive(Clone, Debug)]
pub enum TokenType {
// Delimiters
LeftParen,
RightParen,
LeftBrack,
RightBrack,
LeftBrace,
RightBrace,
// Keywords
Case,
// Separators
Comma,
Colon,
Semicolon,
Equal,
Arrow,
// Literals.
Ident(String),
Oper(String),
Str(String),
Int(Integer),
Rat(Rational),
}
#[derive(Clone, Debug)]
pub struct Token {
pub ty: TokenType,
pub fc: FileContext,
}
pub type Lexer<'a> = Recognizer<CharPos<'a>>;
impl<'a> Lexer<'a> {
pub fn new(str: &'a str, origin: Origin) -> Self {
Recognizer {
iter: CharPos::new(str),
origin,
}
}
fn matches(&mut self, expected: char) -> Result<(), ParseExcept> {
self.pred(|&actual| expected == actual).map(|_| ())
}
fn context_over<E>(
&mut self,
func: impl FnOnce(&mut Self, FilePos) -> Result<TokenType, E>,
) -> Result<Token, E> {
let start = self.iter.current_pos();
let ty = func(self, start)?;
Ok(Token {
ty,
fc: FileContext {
origin: self.origin,
start,
end: self.iter.current_pos(),
},
})
}
fn char_token(&mut self, expected: char, ty: TokenType) -> Result<Token, ParseExcept> {
self.context_over(|slf, _| {
slf.matches(expected)?;
Ok(ty)
})
}
fn operator(&mut self) -> Result<Token, ParseExcept> {
self.context_over(|slf, _| {
let oper: String = slf.many1(|slf| slf.pred(|c| OPERATOR_CHARS.contains(c)))?;
Ok(RESERVED_OPER
.get(&oper[..])
.cloned()
.unwrap_or(TokenType::Oper(oper)))
})
}
pub fn identifier(&mut self) -> Result<Token, ParseExcept> {
self.context_over(|slf, _| {
let mut trailing = false;
let ident: String = slf.many1(|slf| {
let res = slf.pred(|c| c.is_alphabetic() || (trailing && c.is_ascii_digit()))?;
trailing = true;
Ok(res)
})?;
Ok(RESERVED_IDENT
.get(&ident[..])
.cloned()
.unwrap_or(TokenType::Ident(ident)))
})
}
fn string(&mut self) -> Result<Token, ParseExcept> {
self.context_over(|slf, start| {
slf.matches('"')?;
let contents = slf.many(|slf| {
let c = slf.iter.next().ok_or(ParseExcept::default())?;
if c == '\\' {
slf.iter
.next()
.and_then(|c| ESCAPE_SEQS.get(&c).copied())
.ok_or(ParseExcept::default())
} else if c != '"' {
Ok(c)
} else {
Err(ParseExcept::default())
}
})?;
slf.iter.next().ok_or(ParseExcept {
fatal: true,
errors: vec![ParseError {
ty: ParseErrorType::UnclosedString,
fc: FileContext {
origin: slf.origin,
start,
end: slf.iter.current_pos(),
},
}],
})?;
Ok(TokenType::Str(contents))
})
}
pub fn number(&mut self) -> Result<Token, ParseExcept> {
self.context_over(|slf, _| {
let literal = slf.many1::<String, _>(|slf| slf.pred(|&c| c.is_ascii_digit()))?
+ slf
.opt(|slf| {
slf.matches('.')?;
Ok(String::from(".")
+ &slf.many1::<String, _>(|slf| slf.pred(|&c| c.is_ascii_digit()))?)
})?
.as_deref()
.unwrap_or("")
+ slf
.opt(|slf| {
slf.matches('e')?;
Ok(String::from("e")
+ &slf
.opt(|slf| slf.pred(|&c| c == '-' || c == '+'))?
.into_iter()
.collect::<String>()
+ &slf.many1::<String, _>(|slf| slf.pred(|&c| c.is_ascii_digit()))?)
})?
.as_deref()
.unwrap_or("");
let rational = Rational::from_sci_string(&literal).expect(&format!(
"Number literal {:?} could not be parsed - This should not happen!",
literal
));
Ok(if let Ok(integer) = rational.clone().try_into() {
TokenType::Int(integer)
} else {
TokenType::Rat(rational)
})
})
}
pub fn lex(self) -> Result<Vec<Token>, ParseExcept> {
Ok(self
.repeat::<Vec<_>, _>(|slf| {
slf.alt([
|slf| slf.char_token('(', TokenType::LeftParen).map(Some),
|slf| slf.char_token(')', TokenType::RightParen).map(Some),
|slf| slf.char_token('[', TokenType::LeftBrack).map(Some),
|slf| slf.char_token(']', TokenType::RightBrack).map(Some),
|slf| slf.char_token('{', TokenType::LeftBrace).map(Some),
|slf| slf.char_token('}', TokenType::RightBrace).map(Some),
|slf| slf.char_token(',', TokenType::Comma).map(Some),
|slf| slf.char_token(';', TokenType::Semicolon).map(Some),
|slf| slf.string().map(Some),
|slf| slf.number().map(Some),
|slf| slf.identifier().map(Some),
|slf| slf.operator().map(Some),
|slf| match slf.many1(|slf| slf.pred(|c| c.is_whitespace()).map(|_| ())) {
Ok(()) => Ok(None),
Err(_) => Err(ParseExcept::default()),
},
])
.map_err(|exc| {
if exc.errors.is_empty() {
let pos = slf.iter.current_pos();
ParseExcept {
fatal: false,
errors: vec![if let Some(c) = slf.iter.next() {
ParseError {
ty: ParseErrorType::UnexpectedChar(c),
fc: FileContext {
origin: slf.origin,
start: pos,
end: pos.inc_col(),
},
}
} else {
ParseError {
ty: ParseErrorType::UnexpectedEOF,
fc: FileContext {
origin: slf.origin,
start: pos,
end: pos,
},
}
}],
}
} else {
exc
}
})
})?
.into_iter()
.flatten()
.collect())
}
}

137
src/interpreter/util.rs Normal file
View file

@ -0,0 +1,137 @@
use std::iter::FusedIterator;
use std::str::Chars;
#[derive(Copy, Clone, Debug)]
pub enum Origin {
InternalDefs,
Formula,
}
#[derive(Copy, Clone, Debug)]
pub struct FilePos {
pub line: usize,
pub col: usize,
}
#[derive(Copy, Clone, Debug)]
pub struct FileContext {
pub origin: Origin,
pub start: FilePos,
pub end: FilePos,
}
impl Default for FilePos {
fn default() -> Self {
FilePos { line: 0, col: 0 }
}
}
impl FilePos {
pub fn inc_col(self) -> Self {
FilePos {
line: self.line,
col: self.col + 1,
}
}
pub fn reset_col(self) -> Self {
FilePos {
line: self.line,
col: 0,
}
}
pub fn inc_line(self) -> Self {
FilePos {
line: self.line + 1,
col: self.col,
}
}
pub fn next_line(self) -> Self {
self.inc_line().reset_col()
}
}
#[derive(Clone, Debug)]
pub struct CharPos<'a> {
iter: Chars<'a>,
pos: FilePos,
}
impl<'a> CharPos<'a> {
pub fn new(str: &'a str) -> Self {
CharPos {
iter: str.chars(),
pos: FilePos::default(),
}
}
pub fn current_pos(&self) -> FilePos {
self.pos
}
pub fn done(&self) -> bool {
self.iter.as_str().is_empty()
}
}
impl<'a> Iterator for CharPos<'a> {
type Item = char;
fn next(&mut self) -> Option<Self::Item> {
let c = self.iter.next()?;
self.pos = if c == '\n' {
self.pos.next_line()
} else {
self.pos.inc_col()
};
Some(c)
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<'a> FusedIterator for CharPos<'a> {}
// # Parse Errors
#[derive(Copy, Clone, Debug)]
pub enum ParseErrorType {
UnexpectedChar(char),
UnexpectedEOF,
UnclosedString,
}
#[derive(Copy, Clone, Debug)]
pub struct ParseError {
pub ty: ParseErrorType,
pub fc: FileContext,
}
#[derive(Clone, Debug)]
pub struct ParseExcept {
pub fatal: bool,
pub errors: Vec<ParseError>,
}
impl Default for ParseExcept {
fn default() -> Self {
ParseExcept {
fatal: false,
errors: Vec::new(),
}
}
}
impl ParseExcept {
pub fn merge(mut self, other: Self) -> Self {
self.errors.extend(other.errors);
ParseExcept {
fatal: self.fatal || other.fatal,
errors: self.errors,
}
}
}

16
src/main.rs Normal file
View file

@ -0,0 +1,16 @@
#![allow(dead_code)]
#![feature(iterator_try_collect)]
mod interpreter;
mod ui;
use interpreter::parser::Lexer;
use interpreter::util::Origin;
fn main() {
let code = "3000e-5 :: map(f, t)";
for tok in Lexer::new(code, Origin::InternalDefs).lex().unwrap() {
println!("{:?}", tok);
}
}

3
src/ui.rs Normal file
View file

@ -0,0 +1,3 @@
pub mod slint {
slint::include_modules!();
}

6
ui/main.slint Normal file
View file

@ -0,0 +1,6 @@
export component HelloWorld inherits Window {
Text {
text: "Hello World";
color: green;
}
}