Compare commits

...

31 Commits

Author SHA1 Message Date
8b1ffb202e
backend: fix and fix 2024-09-20 23:16:41 +05:00
dcdfcec05f
frontend: update openapi client generation
frontend: base repository and uploader views
frontend: new components
2024-09-20 23:15:35 +05:00
1f2e1ec6e4
docs: icons 2024-09-20 23:14:14 +05:00
714a9d0879
new: documentation
fix: config env parsing
2024-09-06 11:13:25 +05:00
8d03a3e3b0
openapi client generation 2024-08-30 23:31:27 +05:00
adc4a59932
tune openapi 2024-08-30 23:28:58 +05:00
1b1142a0b0
repair tests 2024-08-30 12:38:43 +05:00
3637ea99a8
move some modules to core module 2024-08-29 23:01:39 +05:00
b3be3d25ee
stabilize directory workflow 2024-08-22 12:24:36 +05:00
680b0172f0
repository and directory tests 2024-08-15 01:02:12 +05:00
58e7175d45
tests and fixtures 2024-08-14 00:56:30 +05:00
aefedfe187
complete base file and directory models 2024-08-05 18:29:03 +05:00
9986429bdf
impove base mixin, filesystem model 2024-08-05 15:15:20 +05:00
383d7c57ab
directory, file, repository, tests 2024-08-05 01:28:20 +05:00
69a1aa2471
update dependencies
and more
2024-08-03 01:01:01 +05:00
727f1b51ee
fix optional package materia-frontend 2024-07-31 18:04:34 +05:00
6ad7c29a48
rename materia_server -> materia, serve spa via backend + assets 2024-07-31 16:37:04 +05:00
d60ff09dad
rebase project and workspaces 2024-07-31 14:59:22 +05:00
850bb89346
materia-server: add tests 2024-07-25 13:33:05 +05:00
577f6f3ddf
materia-web-client: repository view 2024-07-10 01:21:21 +05:00
b89e8f3393
materia-server: fix directory no parent 2024-07-10 01:20:51 +05:00
ec41110e0b
fix: api client, auth workflow 2024-07-08 16:26:35 +05:00
aef6c2b541
materia-server: base CRD api, resources routes 2024-07-05 18:34:14 +05:00
4312d5b5d1
materia-server: file remove api 2024-07-02 00:47:24 +05:00
1877554bb2
materia-server: file api 2024-06-25 22:30:05 +05:00
f7bac07837
materia-server: fix migrations, split app and cli 2024-06-24 18:52:04 +05:00
317085fc04
materia-server: repository api, directory api, collapsed modules 2024-06-22 01:45:13 +05:00
d8b19da646
materia-web-client: unify style with catppuccin
materia-server: fixing auth
2024-06-20 00:11:35 +05:00
997f37d5ee
new: reconstruct project
new: pdm package manager (python)
new: workspace for three subprojects
new: dream2nix module for packaging
new: postgresql and redis images
more: and more
2024-06-17 19:52:24 +05:00
e67fcc2216
new: filesystem hierarchy, file upload 2024-05-17 01:16:30 +05:00
aa12f90f51
new: api: user, middlewares, states; db: basic setup, migrations 2024-05-16 11:05:11 +05:00
147 changed files with 15082 additions and 2138 deletions

16
.gitignore vendored
View File

@ -1,4 +1,18 @@
/result*
/repl-result*
temp/
dist/
/.venv
__pycache__/
/temp
*.egg-info
.pdm.toml
.pdm-python
.pdm-build
.pytest_cache
.coverage
/site
src/materia/docs

View File

@ -16,6 +16,37 @@ alembic upgrade head
# Rollback the migration
alembic downgrade head
```
## Setup tests
```sh
nix build .#postgresql-devel
podman load < result
podman run -p 54320:5432 --name database -dt postgresql:latest
nix build .#redis-devel
podman load < result
podman run -p 63790:63790 --name cache -dt redis:latest
nix develop
pdm install --dev
eval $(pdm venv activate)
pytest
```
## Side notes
```
/var
/lib
/materia <-- data directory
/repository <-- repository directory
/rick <-- user name
/default <--| default repository name
... | possible features: external cloud drives?
/first <-- first level directories counts as root because no parent
/nested
/hello.txt
```
# License
**materia** is licensed under the [MIT License](LICENSE).

View File

@ -2,7 +2,7 @@
[alembic]
# path to migration scripts
script_location = src/db/migrations
script_location = ./src/materia/models/migrations
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
@ -60,7 +60,7 @@ version_path_separator = os # Use os.pathsep. Default configuration used for ne
# are written from script.py.mako
# output_encoding = utf-8
#sqlalchemy.url = driver://user:pass@localhost/dbname
sqlalchemy.url = postgresql+asyncpg://materia:materia@127.0.0.1:54320/materia
[post_write_hooks]
@ -91,7 +91,7 @@ keys = console
keys = generic
[logger_root]
level = WARN
level = INFO
handlers = console
qualname =

52
docs/api.md Normal file
View File

@ -0,0 +1,52 @@
---
hide:
- navigation
- toc
---
<style>
.md-typeset h1,
.md-content__button {
display: none;
}
.md-main__inner {
max-width: 100%; /* or 100%, if you want to stretch to full-width */
margin-top: 0;
}
.md-content__inner {
margin: 0;
padding-top: 0;
}
.md-content__inner > p {
margin: 0;
}
.md-content__inner::before {
display: none;
}
.md-footer__inner {
display: none;
}
.md-footer__inner:not([hidden]) {
display: none;
}
</style>
<rapi-doc
spec-url="/api/openapi.json"
theme = "dark"
show-header = "false"
show-info = "false"
allow-authentication = "true"
allow-server-selection = "true"
allow-api-list-style-selection = "true"
theme = "dark"
render-style = "focused"
bg-color="#1e2129"
primary-color="#a47bea"
regular-font="Roboto"
mono-font="Roboto Mono"
show-method-in-nav-bar="as-colored-text">
<img slot="logo" style="display: none"/>
</rapi-doc>
<script
type="module"
src="https://unpkg.com/rapidoc/dist/rapidoc-min.js"
></script>

BIN
docs/img/favicon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 129 KiB

310
docs/img/logo-black.svg Normal file

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 78 KiB

BIN
docs/img/logo-full.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 81 KiB

310
docs/img/logo-white.svg Normal file

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 78 KiB

BIN
docs/img/logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 130 KiB

528
docs/img/logo.svg Normal file

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 122 KiB

12
docs/index.md Normal file
View File

@ -0,0 +1,12 @@
# Materia
<style>
.md-content .md-typeset h1 { display: none; }
</style>
<p align="center">
<a href="https://materia.elnafo.ru"><img src="img/logo-full.png" alt="Materia"></a>
</p>
<p align="center">
<em>Materia is easy and fast cloud storage</em>
</p>

1
docs/reference/app.md Normal file
View File

@ -0,0 +1 @@
::: materia.app

1
docs/reference/core.md Normal file
View File

@ -0,0 +1 @@
::: materia.core

5
docs/reference/index.md Normal file
View File

@ -0,0 +1,5 @@
# Reference
Here's the reference or code API, the classes, functions, parameters, attributes, and
all the Materia parts.

1
docs/reference/models.md Normal file
View File

@ -0,0 +1 @@
::: materia.models

View File

@ -0,0 +1 @@
::: materia.routers

View File

@ -0,0 +1 @@
::: materia.security

1
docs/reference/tasks.md Normal file
View File

@ -0,0 +1 @@
::: materia.tasks

View File

@ -1,51 +1,243 @@
{
"nodes": {
"flake-utils": {
"ags": {
"inputs": {
"nixpkgs": [
"bonfire",
"nixpkgs"
],
"systems": "systems"
},
"locked": {
"lastModified": 1710146030,
"narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
"lastModified": 1721306136,
"narHash": "sha256-VKPsIGf3/a+RONBipx4lEE4LXG2sdMNkWQu22LNQItg=",
"owner": "Aylur",
"repo": "ags",
"rev": "344ea72cd3b8d4911f362fec34bce7d8fb37028c",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"owner": "Aylur",
"repo": "ags",
"type": "github"
}
},
"nix-github-actions": {
"blobs": {
"flake": false,
"locked": {
"lastModified": 1604995301,
"narHash": "sha256-wcLzgLec6SGJA8fx1OEN1yV/Py5b+U5iyYpksUY/yLw=",
"owner": "simple-nixos-mailserver",
"repo": "blobs",
"rev": "2cccdf1ca48316f2cfd1c9a0017e8de5a7156265",
"type": "gitlab"
},
"original": {
"owner": "simple-nixos-mailserver",
"repo": "blobs",
"type": "gitlab"
}
},
"bonfire": {
"inputs": {
"ags": "ags",
"catppuccin": "catppuccin",
"crane": "crane",
"fenix": "fenix",
"home-manager": "home-manager",
"nixos-mailserver": "nixos-mailserver",
"nixpkgs": "nixpkgs",
"nixvim": "nixvim",
"obs-image-reaction": "obs-image-reaction",
"oscuro": "oscuro",
"sops-nix": "sops-nix"
},
"locked": {
"lastModified": 1721891744,
"narHash": "sha256-1ZYNhS1WWcd6Md5kPlX7iuUKKGs8CSxo4QMmw/grnRA=",
"owner": "L-Nafaryus",
"repo": "bonfire",
"rev": "79340a0b933ff2b96070f1aadaf6dd70f867e75f",
"type": "github"
},
"original": {
"owner": "L-Nafaryus",
"repo": "bonfire",
"type": "github"
}
},
"catppuccin": {
"locked": {
"lastModified": 1720472194,
"narHash": "sha256-CYscFEts6tyvosc1T29nxhzIYJAj/1CCEkV3ZMzSN/c=",
"owner": "catppuccin",
"repo": "nix",
"rev": "d75d5803852fb0833767dc969a4581ac13204e22",
"type": "github"
},
"original": {
"owner": "catppuccin",
"repo": "nix",
"type": "github"
}
},
"crane": {
"inputs": {
"nixpkgs": [
"poetry2nix",
"bonfire",
"nixpkgs"
]
},
"locked": {
"lastModified": 1703863825,
"narHash": "sha256-rXwqjtwiGKJheXB43ybM8NwWB8rO2dSRrEqes0S7F5Y=",
"lastModified": 1721322122,
"narHash": "sha256-a0G1NvyXGzdwgu6e1HQpmK5R5yLsfxeBe07nNDyYd+g=",
"owner": "ipetkov",
"repo": "crane",
"rev": "8a68b987c476a33e90f203f0927614a75c3f47ea",
"type": "github"
},
"original": {
"owner": "ipetkov",
"repo": "crane",
"type": "github"
}
},
"dream2nix": {
"inputs": {
"nixpkgs": [
"nixpkgs"
],
"purescript-overlay": "purescript-overlay",
"pyproject-nix": "pyproject-nix"
},
"locked": {
"lastModified": 1722526955,
"narHash": "sha256-fFS8aDnfK9Qfm2FLnQ8pqWk8FzvFEv5LvTuZTZLREnc=",
"owner": "nix-community",
"repo": "nix-github-actions",
"rev": "5163432afc817cf8bd1f031418d1869e4c9d5547",
"repo": "dream2nix",
"rev": "3fd4c14d3683baac8d1f94286ae14fe160888b51",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "nix-github-actions",
"repo": "dream2nix",
"type": "github"
}
},
"fenix": {
"inputs": {
"nixpkgs": [
"bonfire",
"nixpkgs"
],
"rust-analyzer-src": [
"bonfire"
]
},
"locked": {
"lastModified": 1721629802,
"narHash": "sha256-GKlvM9M0mkKJrL6N1eMG4DrROO25Ds1apFw3/b8594w=",
"owner": "nix-community",
"repo": "fenix",
"rev": "1270fb024c6987dd825a20cd27319384a8d8569e",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "fenix",
"type": "github"
}
},
"flake-compat": {
"flake": false,
"locked": {
"lastModified": 1696426674,
"narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=",
"owner": "edolstra",
"repo": "flake-compat",
"rev": "0f9255e01c2351cc7d116c072cb317785dd33b33",
"type": "github"
},
"original": {
"owner": "edolstra",
"repo": "flake-compat",
"type": "github"
}
},
"flake-parts": {
"inputs": {
"nixpkgs-lib": [
"bonfire",
"nixvim",
"nixpkgs"
]
},
"locked": {
"lastModified": 1719994518,
"narHash": "sha256-pQMhCCHyQGRzdfAkdJ4cIWiw+JNuWsTX7f0ZYSyz0VY=",
"owner": "hercules-ci",
"repo": "flake-parts",
"rev": "9227223f6d922fee3c7b190b2cc238a99527bbb7",
"type": "github"
},
"original": {
"owner": "hercules-ci",
"repo": "flake-parts",
"type": "github"
}
},
"home-manager": {
"inputs": {
"nixpkgs": [
"bonfire",
"nixpkgs"
]
},
"locked": {
"lastModified": 1721534365,
"narHash": "sha256-XpZOkaSJKdOsz1wU6JfO59Rx2fqtcarQ0y6ndIOKNpI=",
"owner": "nix-community",
"repo": "home-manager",
"rev": "635563f245309ef5320f80c7ebcb89b2398d2949",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "home-manager",
"type": "github"
}
},
"nixos-mailserver": {
"inputs": {
"blobs": "blobs",
"flake-compat": "flake-compat",
"nixpkgs": [
"bonfire",
"nixpkgs"
],
"nixpkgs-24_05": "nixpkgs-24_05"
},
"locked": {
"lastModified": 1721121314,
"narHash": "sha256-zwc7YXga/1ppaZMWFreZykXtFwBgXodxUZiUx969r+g=",
"owner": "simple-nixos-mailserver",
"repo": "nixos-mailserver",
"rev": "059b50b2e729729ea00c6831124d3837c494f3d5",
"type": "gitlab"
},
"original": {
"owner": "simple-nixos-mailserver",
"repo": "nixos-mailserver",
"type": "gitlab"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1714906307,
"narHash": "sha256-UlRZtrCnhPFSJlDQE7M0eyhgvuuHBTe1eJ9N9AQlJQ0=",
"lastModified": 1721379653,
"narHash": "sha256-8MUgifkJ7lkZs3u99UDZMB4kbOxvMEXQZ31FO3SopZ0=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "25865a40d14b3f9cf19f19b924e2ab4069b09588",
"rev": "1d9c2c9b3e71b9ee663d11c5d298727dace8d374",
"type": "github"
},
"original": {
@ -55,83 +247,255 @@
"type": "github"
}
},
"poetry2nix": {
"nixpkgs-24_05": {
"locked": {
"lastModified": 1717144377,
"narHash": "sha256-F/TKWETwB5RaR8owkPPi+SPJh83AQsm6KrQAlJ8v/uA=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "805a384895c696f802a9bf5bf4720f37385df547",
"type": "github"
},
"original": {
"id": "nixpkgs",
"ref": "nixos-24.05",
"type": "indirect"
}
},
"nixpkgs-stable": {
"locked": {
"lastModified": 1721524707,
"narHash": "sha256-5NctRsoE54N86nWd0psae70YSLfrOek3Kv1e8KoXe/0=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "556533a23879fc7e5f98dd2e0b31a6911a213171",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "release-24.05",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_2": {
"locked": {
"lastModified": 1719223410,
"narHash": "sha256-jtIo8xR0Zp4SalIwmD+OdCwHF4l7OU6PD63UUK4ckt4=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "efb39c6052f3ce51587cf19733f5f4e5d515aa13",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_3": {
"locked": {
"lastModified": 1722421184,
"narHash": "sha256-/DJBI6trCeVnasdjUo9pbnodCLZcFqnVZiLUfqLH4jA=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "9f918d616c5321ad374ae6cb5ea89c9e04bf3e58",
"type": "github"
},
"original": {
"owner": "nixos",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixvim": {
"inputs": {
"flake-utils": "flake-utils",
"nix-github-actions": "nix-github-actions",
"devshell": [
"bonfire"
],
"flake-compat": [
"bonfire"
],
"flake-parts": "flake-parts",
"git-hooks": [
"bonfire"
],
"home-manager": [
"bonfire"
],
"nix-darwin": [
"bonfire"
],
"nixpkgs": [
"bonfire",
"nixpkgs"
],
"systems": "systems_2",
"treefmt-nix": "treefmt-nix"
"treefmt-nix": [
"bonfire"
]
},
"locked": {
"lastModified": 1715017507,
"narHash": "sha256-RN2Vsba56PfX02DunWcZYkMLsipp928h+LVAWMYmbZg=",
"lastModified": 1721772245,
"narHash": "sha256-//9p3Qm8gLbPUTsSGN2EMYkDwE5Sqq9B9P2X/z2+npw=",
"owner": "nix-community",
"repo": "poetry2nix",
"rev": "e6b36523407ae6a7a4dfe29770c30b3a3563b43a",
"repo": "nixvim",
"rev": "ab67ee7e8b33e788fc53d26dc6f423f9358e3e66",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "poetry2nix",
"repo": "nixvim",
"type": "github"
}
},
"obs-image-reaction": {
"inputs": {
"nixpkgs": "nixpkgs_2"
},
"locked": {
"lastModified": 1719314544,
"narHash": "sha256-GZa3+2OELKp/9b2+EwwzaIMNvR9niCy/YZ5OERhG9Hg=",
"owner": "L-Nafaryus",
"repo": "obs-image-reaction",
"rev": "0dcb3c27de5782dfdf95cb047ccceb3e65360e6b",
"type": "github"
},
"original": {
"owner": "L-Nafaryus",
"repo": "obs-image-reaction",
"type": "github"
}
},
"oscuro": {
"inputs": {
"bonfire": [
"bonfire"
],
"nixpkgs": [
"bonfire",
"oscuro",
"bonfire",
"nixpkgs"
]
},
"locked": {
"lastModified": 1714759244,
"narHash": "sha256-ZDH7WTsILPEIZuo3/C4QwOXTv7r1xoUxKOQSDFpdNEE=",
"owner": "L-Nafaryus",
"repo": "oscuro",
"rev": "68da7759c61b6d34f54087e3e845d8cc70702310",
"type": "github"
},
"original": {
"owner": "L-Nafaryus",
"repo": "oscuro",
"type": "github"
}
},
"purescript-overlay": {
"inputs": {
"nixpkgs": [
"dream2nix",
"nixpkgs"
],
"slimlock": "slimlock"
},
"locked": {
"lastModified": 1696022621,
"narHash": "sha256-eMjFmsj2G1E0Q5XiibUNgFjTiSz0GxIeSSzzVdoN730=",
"owner": "thomashoneyman",
"repo": "purescript-overlay",
"rev": "047c7933abd6da8aa239904422e22d190ce55ead",
"type": "github"
},
"original": {
"owner": "thomashoneyman",
"repo": "purescript-overlay",
"type": "github"
}
},
"pyproject-nix": {
"flake": false,
"locked": {
"lastModified": 1702448246,
"narHash": "sha256-hFg5s/hoJFv7tDpiGvEvXP0UfFvFEDgTdyHIjDVHu1I=",
"owner": "davhau",
"repo": "pyproject.nix",
"rev": "5a06a2697b228c04dd2f35659b4b659ca74f7aeb",
"type": "github"
},
"original": {
"owner": "davhau",
"ref": "dream2nix",
"repo": "pyproject.nix",
"type": "github"
}
},
"root": {
"inputs": {
"nixpkgs": "nixpkgs",
"poetry2nix": "poetry2nix"
"bonfire": "bonfire",
"dream2nix": "dream2nix",
"nixpkgs": "nixpkgs_3"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"systems_2": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"id": "systems",
"type": "indirect"
}
},
"treefmt-nix": {
"slimlock": {
"inputs": {
"nixpkgs": [
"poetry2nix",
"dream2nix",
"purescript-overlay",
"nixpkgs"
]
},
"locked": {
"lastModified": 1714058656,
"narHash": "sha256-Qv4RBm4LKuO4fNOfx9wl40W2rBbv5u5m+whxRYUMiaA=",
"owner": "numtide",
"repo": "treefmt-nix",
"rev": "c6aaf729f34a36c445618580a9f95a48f5e4e03f",
"lastModified": 1688610262,
"narHash": "sha256-Wg0ViDotFWGWqKIQzyYCgayeH8s4U1OZcTiWTQYdAp4=",
"owner": "thomashoneyman",
"repo": "slimlock",
"rev": "b5c6cdcaf636ebbebd0a1f32520929394493f1a6",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "treefmt-nix",
"owner": "thomashoneyman",
"repo": "slimlock",
"type": "github"
}
},
"sops-nix": {
"inputs": {
"nixpkgs": [
"bonfire",
"nixpkgs"
],
"nixpkgs-stable": "nixpkgs-stable"
},
"locked": {
"lastModified": 1721531171,
"narHash": "sha256-AsvPw7T0tBLb53xZGcUC3YPqlIpdxoSx56u8vPCr6gU=",
"owner": "Mic92",
"repo": "sops-nix",
"rev": "909e8cfb60d83321d85c8d17209d733658a21c95",
"type": "github"
},
"original": {
"owner": "Mic92",
"repo": "sops-nix",
"type": "github"
}
},
"systems": {
"locked": {
"lastModified": 1689347949,
"narHash": "sha256-12tWmuL2zgBgZkdoB6qXZsgJEH9LR3oUgpaQq2RbI80=",
"owner": "nix-systems",
"repo": "default-linux",
"rev": "31732fcf5e8fea42e59c2488ad31a0e651500f68",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default-linux",
"type": "github"
}
}

281
flake.nix
View File

@ -1,81 +1,218 @@
{
description = "Materia is a file server";
description = "Materia";
nixConfig = {
extra-substituters = [ "https://bonfire.cachix.org" ];
extra-trusted-public-keys = [ "bonfire.cachix.org-1:mzAGBy/Crdf8NhKail5ciK7ZrGRbPJJobW6TwFb7WYM=" ];
inputs = {
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
dream2nix = {
url = "github:nix-community/dream2nix";
inputs.nixpkgs.follows = "nixpkgs";
};
bonfire.url = "github:L-Nafaryus/bonfire";
};
inputs = {
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
poetry2nix = {
url = "github:nix-community/poetry2nix";
inputs.nixpkgs.follows = "nixpkgs";
outputs = {
self,
nixpkgs,
dream2nix,
bonfire,
...
}: let
system = "x86_64-linux";
pkgs = import nixpkgs {inherit system;};
bonLib = bonfire.lib;
dreamBuildPackage = {
module,
meta ? {},
extraModules ? [],
extraArgs ? {},
}:
(
nixpkgs.lib.evalModules {
modules = [module] ++ extraModules;
specialArgs =
{
inherit dream2nix;
packageSets.nixpkgs = pkgs;
}
// extraArgs;
}
)
.config
.public
// {inherit meta;};
in {
packages.x86_64-linux = {
materia-frontend-nodejs = dreamBuildPackage {
module = {
lib,
config,
dream2nix,
...
}: {
name = "materia-frontend";
version = "0.0.5";
imports = [
dream2nix.modules.dream2nix.WIP-nodejs-builder-v3
];
mkDerivation = {
src = ./workspaces/frontend;
};
deps = {nixpkgs, ...}: {
inherit
(nixpkgs)
fetchFromGitHub
stdenv
;
};
WIP-nodejs-builder-v3 = {
packageLockFile = "${config.mkDerivation.src}/package-lock.json";
};
};
meta = with nixpkgs.lib; {
description = "Materia frontend (nodejs)";
license = licenses.mit;
maintainers = with bonLib.maintainers; [L-Nafaryus];
broken = false;
};
};
materia-frontend = dreamBuildPackage {
extraArgs = {
inherit (self.packages.x86_64-linux) materia-frontend-nodejs;
};
module = {
config,
lib,
dream2nix,
materia-frontend-nodejs,
...
}: {
imports = [dream2nix.modules.dream2nix.WIP-python-pdm];
pdm.lockfile = ./workspaces/frontend/pdm.lock;
pdm.pyproject = ./workspaces/frontend/pyproject.toml;
deps = _: {
python = pkgs.python312;
};
mkDerivation = {
src = ./workspaces/frontend;
buildInputs = [
pkgs.python312.pkgs.pdm-backend
];
configurePhase = ''
cp -rv ${materia-frontend-nodejs}/dist ./src/materia_frontend/
'';
};
};
meta = with nixpkgs.lib; {
description = "Materia frontend";
license = licenses.mit;
maintainers = with bonLib.maintainers; [L-Nafaryus];
broken = false;
};
};
materia = dreamBuildPackage {
extraArgs = {
inherit (self.packages.x86_64-linux) materia-frontend;
};
module = {
config,
lib,
dream2nix,
materia-frontend,
...
}: {
imports = [dream2nix.modules.dream2nix.WIP-python-pdm];
pdm.lockfile = ./pdm.lock;
pdm.pyproject = ./pyproject.toml;
deps = _: {
python = pkgs.python312;
};
mkDerivation = {
src = ./.;
buildInputs = [
pkgs.python312.pkgs.pdm-backend
];
nativeBuildInputs = [
pkgs.python312.pkgs.wrapPython
];
propagatedBuildInputs = [
materia-frontend
];
};
};
meta = with nixpkgs.lib; {
description = "Materia";
license = licenses.mit;
maintainers = with bonLib.maintainers; [L-Nafaryus];
broken = false;
mainProgram = "materia";
};
};
postgresql-devel = bonfire.packages.x86_64-linux.postgresql;
redis-devel = bonfire.packages.x86_64-linux.redis;
materia-devel = let
user = "materia";
dataDir = "/var/lib/materia";
entryPoint = pkgs.writeTextDir "entrypoint.sh" ''
materia start
'';
in
pkgs.dockerTools.buildImage {
name = "materia";
tag = "latest";
copyToRoot = pkgs.buildEnv {
name = "image-root";
pathsToLink = ["/bin" "/etc" "/"];
paths = with pkgs; [
bash
self.packages.x86_64-linux.materia
entryPoint
];
};
runAsRoot = with pkgs; ''
#!${runtimeShell}
${dockerTools.shadowSetup}
groupadd -r ${user}
useradd -r -g ${user} --home-dir=${dataDir} ${user}
mkdir -p ${dataDir}
chown -R ${user}:${user} ${dataDir}
'';
config = {
Entrypoint = ["bash" "/entrypoint.sh"];
StopSignal = "SIGINT";
User = "${user}:${user}";
WorkingDir = dataDir;
ExposedPorts = {
"54601/tcp" = {};
};
Env = [
"MATERIA_APPLICATION__WORKING_DIRECTORY=${dataDir}"
];
};
};
};
outputs = { self, nixpkgs, poetry2nix, ... }:
let
#perSystem = systems: builtins.mapAttrs (name: value: nixpkgs.lib.genAttrs systems (system: value) );
forAllSystems = nixpkgs.lib.genAttrs [ "x86_64-linux" ];
nixpkgsFor = forAllSystems (system: import nixpkgs { inherit system; });
in
{
packages = forAllSystems (system: let
pkgs = nixpkgsFor.${system};
#inherit (poetry2nix.lib.mkPoetry2Nix { inherit pkgs; }) mkPoetryApplication;
in {
#materia = mkPoetryApplication {
# projectDir = ./.;
#};
#default = self.packages.${system}.materia;
});
apps = forAllSystems (system: {
materia = let
pkgs = nixpkgsFor.${system};
app = (poetry2nix.lib.mkPoetry2Nix { inherit pkgs; }).mkPoetryApplication { projectDir = self; };
in {
type = "app";
program = "${app}/bin/materia";
};
#default = materia;
});
devShells = forAllSystems (system:
let
pkgs = nixpkgsFor.${system};
db_name = "materia";
db_user = "materia";
db_password = "test";
db_path = "temp/materia";
in {
default = pkgs.mkShell {
buildInputs = with pkgs; [
nil
nodejs
ripgrep
postgresql
poetry
];
LD_LIBRARY_PATH = nixpkgs.lib.makeLibraryPath [ pkgs.stdenv.cc.cc ];
shellHook = ''
trap "pg_ctl -D ${db_path} stop" EXIT
[ ! -d $(pwd)/${db_path} ] && initdb -D $(pwd)/${db_path} -U ${db_user}
pg_ctl -D $(pwd)/${db_path} -l $(pwd)/${db_path}/db.log -o "--unix_socket_directories=$(pwd)/${db_path}" start
[ ! "$(psql -h $(pwd)/${db_path} -U ${db_user} -l | rg '^ ${db_name}')" ] && createdb -h $(pwd)/${db_path} -U ${db_user} ${db_name}
'';
};
});
devShells.x86_64-linux.default = pkgs.mkShell {
buildInputs = with pkgs; [postgresql redis pdm nodejs python312];
# greenlet requires libstdc++
LD_LIBRARY_PATH = nixpkgs.lib.makeLibraryPath [pkgs.stdenv.cc.cc];
};
};
}

89
mkdocs.yml Normal file
View File

@ -0,0 +1,89 @@
site_name: Materia Documentation
site_description: Materia cloud storage
#site_url:
repo_name: L-Nafaryus/materia
repo_url: https://vcs.elnafo.ru/L-Nafaryus/materia
copyright: Copyright &copy; 2024 L-Nafaryus
theme:
name: material
features:
- content.code.annotate
- content.code.copy
# - content.code.select
- content.footnote.tooltips
- content.tabs.link
- content.tooltips
- navigation.footer
- navigation.indexes
- navigation.instant
- navigation.instant.prefetch
# - navigation.instant.preview
- navigation.instant.progress
- navigation.path
- navigation.tabs
- navigation.tabs.sticky
- navigation.top
- navigation.tracking
- search.highlight
- search.share
- search.suggest
- toc.follow
logo: img/favicon.png
favicon: img/favicon.png
palette:
- media: "(prefers-color-scheme: light)"
scheme: slate
primary: deep purple
accent: deep purple
toggle:
icon: material/weather-sunny
name: Switch to light mode
- media: "(prefers-color-scheme: dark)"
scheme: default
primary: deep purple
accent: deep purple
toggle:
icon: material/weather-night
name: Switch to dark mode
plugins:
- search:
- mkdocstrings:
handlers:
python:
paths: [src] # search packages in the src folder
options:
extensions:
- griffe_typingdoc
#preload_modules:
#- sqlalchemy
#docstring_style: sphinx
show_submodules: true
show_source: true
show_if_no_docstring: true
show_symbol_type_heading: true
show_symbol_type_toc: true
show_root_heading: true
unwrap_annotated: true
merge_init_into_class: true
docstring_section_style: spacy
signature_crossrefs: true
inherited_members: true
members_order: source
separate_signature: true
filters:
- '!^_'
nav:
- Materia: index.md
- Reference:
- reference/index.md
- reference/app.md
- reference/core.md
- reference/models.md
- reference/routers.md
- reference/security.md
- reference/tasks.md
- API: api.md

2004
pdm.lock Normal file

File diff suppressed because it is too large Load Diff

1654
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,3 +0,0 @@
[virtualenvs]
in-project = true
create = true

View File

@ -1,38 +1,54 @@
[tool.poetry]
name = "materia-backend"
version = "0.1.0"
[project]
name = "materia"
version = "0.1.1"
description = "Materia is a file server"
authors = [
"L-Nafaryus <l.nafaryus@gmail.com>"
{name = "L-Nafaryus", email = "l.nafaryus@gmail.com"},
]
maintainers = [
"L-Nafaryus <l.nafaryus@gmail.com>"
dependencies = [
"fastapi<1.0.0,>=0.111.0",
"uvicorn[standard]<1.0.0,>=0.29.0",
"psycopg2-binary<3.0.0,>=2.9.9",
"toml<1.0.0,>=0.10.2",
"sqlalchemy[asyncio]<3.0.0,>=2.0.30",
"asyncpg<1.0.0,>=0.29.0",
"eventlet<1.0.0,>=0.36.1",
"bcrypt==4.1.2",
"pyjwt<3.0.0,>=2.8.0",
"requests<3.0.0,>=2.31.0",
"pillow<11.0.0,>=10.3.0",
"sqids<1.0.0,>=0.4.1",
"alembic<2.0.0,>=1.13.1",
"authlib<2.0.0,>=1.3.0",
"redis[hiredis]<6.0.0,>=5.0.4",
"aiosmtplib<4.0.0,>=3.0.1",
"emails<1.0,>=0.6",
"pydantic-settings<3.0.0,>=2.2.1",
"email-validator<3.0.0,>=2.1.1",
"pydanclick<1.0.0,>=0.2.0",
"loguru<1.0.0,>=0.7.2",
"alembic-postgresql-enum<2.0.0,>=1.2.0",
"gunicorn>=22.0.0",
"uvicorn-worker>=0.2.0",
"httpx>=0.27.0",
"cryptography>=43.0.0",
"python-multipart>=0.0.9",
"jinja2>=3.1.4",
"aiofiles>=24.1.0",
"aioshutil>=1.5",
"Celery>=5.4.0",
"streaming-form-data>=1.16.0",
]
license = "MIT"
requires-python = ">=3.12,<3.13"
readme = "README.md"
packages = [
{ include = "src" }
]
license = {text = "MIT"}
[tool.poetry.scripts]
materia = "src.main:main"
[build-system]
requires = ["pdm-backend"]
build-backend = "pdm.backend"
[tool.poetry.dependencies]
python = ">=3.10,<3.12"
fastapi = "^0.111.0"
uvicorn = {version = "^0.29.0", extras = ["standard"]}
psycopg2-binary = "^2.9.9"
toml = "^0.10.2"
sqlalchemy = {version = "^2.0.30", extras = ["asyncio"]}
asyncpg = "^0.29.0"
eventlet = "^0.36.1"
[tool.poetry.group.dev.dependencies]
black = "^23.3.0"
pytest = "^7.3.2"
pyflakes = "^3.0.1"
pyright = "^1.1.314"
alembic = "^1.13.1"
[project.scripts]
materia = "materia.app.cli:cli"
[tool.pyright]
reportGeneralTypeIssues = false
@ -41,6 +57,32 @@ reportGeneralTypeIssues = false
pythonpath = ["."]
testpaths = ["tests"]
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
[tool.pdm]
distribution = true
[tool.pdm.dev-dependencies]
dev = [
"-e file:///${PROJECT_ROOT}/workspaces/frontend",
"black<24.0.0,>=23.3.0",
"pytest<8.0.0,>=7.3.2",
"pyflakes<4.0.0,>=3.0.1",
"pyright<2.0.0,>=1.1.314",
"pytest-asyncio>=0.23.7",
"asgi-lifespan>=2.1.0",
"pytest-cov>=5.0.0",
"mkdocs-material>=9.5.34",
"mkdocstrings-python>=1.10.9",
"griffe-typingdoc>=0.2.6",
]
[tool.pdm.build]
includes = ["src/materia"]
[tool.pdm.scripts]
start.cmd = "python ./src/materia/main.py {args:start --app-mode development --log-level debug}"
setup.cmd = "psql -U postgres -h 127.0.0.1 -p 54320 -d postgres -c 'create role materia login;' -c 'create database materia owner materia;'"
teardown.cmd = "psql -U postgres -h 127.0.0.1 -p 54320 -d postgres -c 'drop database materia;' -c 'drop role materia;'"
rev.cmd = "alembic revision {args:--autogenerate}"
upgrade.cmd = "alembic upgrade {args:head}"
downgrade.cmd = "alembic downgrade {args:base}"
remove-revs.shell = "rm -v ./src/materia/models/migrations/versions/*.py"

View File

@ -1,84 +0,0 @@
from os import environ
from pathlib import Path
from typing import Self
from pydantic import BaseModel
import toml
class Database(BaseModel):
host: str
port: int
user: str
password: str
name: str
class Server(BaseModel):
address: str
port: int
class Jwt(BaseModel):
secret: str
expires_in: str
maxage: int
class Config(BaseModel):
database: Database
server: Server
jwt: Jwt
@staticmethod
def default() -> Self:
return Config(**{
"database": Database(**{
"host": "localhost",
"port": 5432,
"user": "materia",
"password": "test",
"name": "materia"
}),
"server": Server(**{
"address": "127.0.0.1",
"port": 54601
}),
"jwt": Jwt(**{
"secret": "change_this_secret",
"expires_in": "60m",
"maxage": 3600
})
})
def database_url(self) -> str:
return "postgresql+asyncpg://{}:{}@{}:{}/{}".format(
self.database.user,
self.database.password,
self.database.host,
self.database.port,
self.database.name
)
@staticmethod
def open(path: Path) -> Self | None:
try:
data: dict = toml.load(path)
except:
return None
else:
return Config(**data)
def write(self, path: Path):
with open(path, "w") as file:
toml.dump(self.model_dump(), file)
@staticmethod
def data_dir() -> Path:
cwd = Path.cwd()
if environ.get("MATERIA_DEBUG"):
return cwd / "temp"
else:
return cwd
# initialize config
config = Config.open(Config.data_dir().joinpath("config.toml"))
if not config:
config = Config.default()

View File

@ -1,87 +0,0 @@
from contextlib import asynccontextmanager
from pathlib import Path
from typing import AsyncIterator
from alembic.config import Config
from alembic.operations import Operations
from alembic.runtime.migration import MigrationContext
from alembic.script.base import ScriptDirectory
from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine, AsyncSession, create_async_engine, async_sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from asyncpg import Connection
class DatabaseManager:
def __init__(self):
self.engine: AsyncEngine | None = None
self.sessionmaker: async_sessionmaker[AsyncSession] | None = None
self.database_url: str | None = None
@staticmethod
def from_url(database_url: str):
instance = DatabaseManager()
instance.database_url = database_url
instance.engine = create_async_engine(database_url, pool_size = 100)
instance.sessionmaker = async_sessionmaker(bind = instance.engine, autocommit = False, autoflush = False)
return instance
async def dispose(self):
if self.engine is None:
raise Exception("DatabaseManager engine is not initialized")
await self.engine.dispose()
self.database_url = None
self.engine = None
self.sessionmaker = None
@asynccontextmanager
async def connection(self) -> AsyncIterator[AsyncConnection]:
if self.engine is None:
raise Exception("DatabaseManager engine is not initialized")
async with self.engine.begin() as connection:
try:
yield connection
except Exception as e:
await connection.rollback()
raise e
@asynccontextmanager
async def session(self) -> AsyncIterator[AsyncSession]:
if self.sessionmaker is None:
raise Exception("DatabaseManager session is not initialized")
session = self.sessionmaker();
try:
yield session
except Exception as e:
await session.rollback()
raise e
finally:
await session.close()
def run_migrations(self, connection: Connection):
if self.engine is None:
raise Exception("DatabaseManager engine is not initialized")
config = Config(Path("alembic.ini"))
config.set_main_option("sqlalchemy.url", self.database_url) # type: ignore
context = MigrationContext.configure(
connection = connection, # type: ignore
opts = {
"target_metadata": Base.metadata,
"fn": lambda rev, _: ScriptDirectory.from_config(config)._upgrade_revs("head", rev)
}
)
with context.begin_transaction():
with Operations.context(context):
context.run_migrations()
Base = declarative_base()
from src.db.user import User

View File

@ -1,42 +0,0 @@
"""Create a baseline migrations
Revision ID: 269db1cef2c9
Revises:
Create Date: 2024-05-08 18:48:41.969272
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '269db1cef2c9'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('user',
sa.Column('id', sa.Uuid(), nullable=False),
sa.Column('login_name', sa.String(), nullable=False),
sa.Column('hashed_password', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('email', sa.String(), nullable=False),
sa.Column('is_admin', sa.Boolean(), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('must_change_password', sa.Boolean(), nullable=False),
sa.Column('avatar', sa.String(), nullable=False),
sa.Column('created_unix', sa.BigInteger(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('user')
# ### end Alembic commands ###

View File

@ -1,21 +0,0 @@
from uuid import UUID, uuid4
from typing import Optional
from sqlalchemy import Column, BigInteger
from sqlalchemy.orm import mapped_column, Mapped
from src.db import Base
class User(Base):
__tablename__ = "user"
id: Mapped[UUID] = mapped_column(primary_key = True, default = uuid4)
login_name: Mapped[str]
hashed_password: Mapped[str]
name: Mapped[str]
email: Mapped[str]
is_admin: Mapped[bool]
is_active: Mapped[bool]
must_change_password: Mapped[bool]
avatar: Mapped[str]
created_unix = Column(BigInteger)

View File

@ -1,49 +0,0 @@
from contextlib import asynccontextmanager
from os import environ
import uvicorn
from fastapi import FastAPI
from src.config import config
from src.db import DatabaseManager
@asynccontextmanager
async def lifespan(app: FastAPI):
pool = DatabaseManager.from_url(config.database_url()) # type: ignore
app.state.config = config
app.state.pool = pool
async with pool.connection() as connection:
await connection.run_sync(pool.run_migrations) # type: ignore
yield
if pool.engine is not None:
await pool.dispose()
app = FastAPI(
title = "materia",
version = "0.1.0",
docs_url = "/api/docs",
lifespan = lifespan
)
def main():
uvicorn.run(
"src.main:app",
port = config.server.port,
host = config.server.address,
reload = bool(environ.get("MATERIA_DEBUG"))
)
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
pass

3
src/materia/__main__.py Normal file
View File

@ -0,0 +1,3 @@
from materia.app import cli
cli()

View File

@ -0,0 +1,2 @@
from materia.app.app import Context, Application
from materia.app.cli import cli

171
src/materia/app/app.py Normal file
View File

@ -0,0 +1,171 @@
from contextlib import _AsyncGeneratorContextManager, asynccontextmanager
import os
import sys
from typing import AsyncIterator, TypedDict, Self, Optional
from pathlib import Path
import uvicorn
from fastapi import FastAPI
from fastapi.routing import APIRoute
from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles
from materia.core import (
Config,
Logger,
LoggerInstance,
Database,
Cache,
Cron,
)
from materia import routers
from materia.core.misc import optional, optional_string
class Context(TypedDict):
config: Config
logger: LoggerInstance
database: Database
cache: Cache
class ApplicationError(Exception):
pass
class Application:
def __init__(
self,
config: Config,
):
self.config: Config = config
self.logger: Optional[LoggerInstance] = None
self.database: Optional[Database] = None
self.cache: Optional[Cache] = None
self.cron: Optional[Cron] = None
self.backend: Optional[FastAPI] = None
self.prepare_logger()
@staticmethod
async def new(config: Config):
app = Application(config)
# if user := config.application.user:
# os.setuid(pwd.getpwnam(user).pw_uid)
# if group := config.application.group:
# os.setgid(pwd.getpwnam(user).pw_gid)
app.logger.debug("Initializing application...")
await app.prepare_working_directory()
try:
await app.prepare_database()
await app.prepare_cache()
await app.prepare_cron()
app.prepare_server()
except Exception as e:
app.logger.error(" ".join(e.args))
sys.exit()
try:
import materia_frontend
except ModuleNotFoundError:
app.logger.warning(
"`materia_frontend` is not installed. No user interface will be served."
)
return app
def prepare_logger(self):
self.logger = Logger.new(**self.config.log.model_dump())
async def prepare_working_directory(self):
try:
path = self.config.application.working_directory.resolve()
self.logger.debug(f"Changing working directory to {path}")
os.chdir(path)
except FileNotFoundError as e:
self.logger.error("Failed to change working directory: {}", e)
sys.exit()
async def prepare_database(self):
url = self.config.database.url()
self.logger.info("Connecting to database {}", url)
self.database = await Database.new(url) # type: ignore
async def prepare_cache(self):
url = self.config.cache.url()
self.logger.info("Connecting to cache server {}", url)
self.cache = await Cache.new(url) # type: ignore
async def prepare_cron(self):
url = self.config.cache.url()
self.logger.info("Prepairing cron")
self.cron = Cron.new(
self.config.cron.workers_count, backend_url=url, broker_url=url
)
def prepare_server(self):
@asynccontextmanager
async def lifespan(app: FastAPI) -> AsyncIterator[Context]:
yield Context(
config=self.config,
logger=self.logger,
database=self.database,
cache=self.cache,
)
if self.database.engine is not None:
await self.database.dispose()
self.backend = FastAPI(
title="materia",
version="0.1.0",
docs_url=None,
redoc_url=None,
swagger_ui_init_oauth=None,
swagger_ui_oauth2_redirect_url=None,
openapi_url="/api/openapi.json",
lifespan=lifespan,
)
self.backend.add_middleware(
CORSMiddleware,
allow_origins=["http://localhost", "http://localhost:5173"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
self.backend.include_router(routers.docs.router)
self.backend.include_router(routers.api.router)
self.backend.include_router(routers.resources.router)
self.backend.include_router(routers.root.router)
for route in self.backend.routes:
if isinstance(route, APIRoute):
route.operation_id = (
optional_string(optional(route.tags.__getitem__, 0), "{}_")
+ route.name
)
async def start(self):
self.logger.info(f"Spinning up cron workers [{self.config.cron.workers_count}]")
self.cron.run_workers()
try:
self.logger.info("Running database migrations")
await self.database.run_migrations()
uvicorn_config = uvicorn.Config(
self.backend,
port=self.config.server.port,
host=str(self.config.server.address),
log_config=Logger.uvicorn_config(self.config.log.level),
)
server = uvicorn.Server(uvicorn_config)
await server.serve()
except (KeyboardInterrupt, SystemExit):
self.logger.info("Exiting...")
sys.exit()
except Exception as e:
self.logger.error(" ".join(e.args))
sys.exit()

12
src/materia/app/asgi.py Normal file
View File

@ -0,0 +1,12 @@
from os import environ
from pathlib import Path
from uvicorn.workers import UvicornWorker
from materia.config import Config
from materia._logging import uvicorn_log_config
class MateriaWorker(UvicornWorker):
CONFIG_KWARGS = {
"loop": "uvloop",
"log_config": uvicorn_log_config(Config.open(Path(environ["MATERIA_CONFIG"]).resolve()))
}

152
src/materia/app/cli.py Normal file
View File

@ -0,0 +1,152 @@
from pathlib import Path
import sys
import click
from materia.core.config import Config
from materia.core.logging import Logger
from materia.app import Application
import asyncio
import json
@click.group()
def cli():
pass
@cli.command()
@click.option("--config", type=Path)
@click.option("--debug", "-d", is_flag=True, default=False, help="Enable debug output.")
def start(config: Path, debug: bool):
config_path = config
logger = Logger.new()
# check the configuration file or use default
if config_path is not None:
config_path = config_path.resolve()
try:
logger.debug("Reading configuration file at {}", config_path)
if not config_path.exists():
logger.error("Configuration file was not found at {}.", config_path)
sys.exit(1)
else:
config = Config.open(config_path.resolve())
except Exception as e:
logger.error("Failed to read configuration file: {}", e)
sys.exit(1)
else:
# trying to find configuration file in the current working directory
config_path = Config.data_dir().joinpath("config.toml")
if config_path.exists():
logger.info("Found configuration file in the current working directory.")
try:
config = Config.open(config_path)
except Exception as e:
logger.error("Failed to read configuration file: {}", e)
else:
logger.info("Using the default configuration.")
config = Config()
else:
logger.info("Using the default configuration.")
config = Config()
if debug:
config.log.level = "debug"
async def main():
app = await Application.new(config)
await app.start()
asyncio.run(main())
@cli.group()
def config():
pass
@config.command("create", help="Create a new configuration file.")
@click.option(
"--path",
"-p",
type=Path,
default=Path.cwd().joinpath("config.toml"),
help="Path to the file.",
)
@click.option(
"--force", "-f", is_flag=True, default=False, help="Overwrite a file if exists."
)
def config_create(path: Path, force: bool):
path = path.resolve()
config = Config()
logger = Logger.new()
if path.exists() and not force:
logger.warning("File already exists at the given path. Exit.")
sys.exit(1)
if not path.parent.exists():
logger.info("Creating directory at {}", path)
path.parent.mkdir(parents=True)
logger.info("Writing configuration file at {}", path)
config.write(path)
logger.info("All done.")
@config.command("check", help="Check the configuration file.")
@click.option(
"--path",
"-p",
type=Path,
default=Path.cwd().joinpath("config.toml"),
help="Path to the file.",
)
def config_check(path: Path):
path = path.resolve()
logger = Logger.new()
if not path.exists():
logger.error("Configuration file was not found at the given path. Exit.")
sys.exit(1)
try:
Config.open(path)
except Exception as e:
logger.error("{}", e)
else:
logger.info("OK.")
@cli.group()
def export():
pass
@export.command("openapi", help="Export an OpenAPI specification.")
@click.option(
"--path",
"-p",
type=Path,
default=Path.cwd().joinpath("openapi.json"),
help="Path to the file.",
)
def export_openapi(path: Path):
path = path.resolve()
logger = Logger.new()
config = Config()
app = Application(config)
app.prepare_server()
logger.info("Writing file at {}", path)
try:
with open(path, "w") as io:
json.dump(app.backend.openapi(), io, sort_keys=False)
except Exception as e:
logger.error("{}", e)
logger.info("All done.")
if __name__ == "__main__":
cli()

29
src/materia/app/wsgi.py Normal file
View File

@ -0,0 +1,29 @@
from gunicorn.app.wsgiapp import WSGIApplication
import multiprocessing
class MateriaProcessManager(WSGIApplication):
def __init__(self, app: str, options: dict | None = None):
self.app_uri = app
self.options = options or {}
super().__init__()
def load_config(self):
config = {
key: value
for key, value in self.options.items()
if key in self.cfg.settings and value is not None
}
for key, value in config.items():
self.cfg.set(key.lower(), value)
def run():
options = {
"bind": "0.0.0.0:8000",
"workers": (multiprocessing.cpu_count() * 2) + 1,
"worker_class": "materia.app.wsgi.MateriaWorker",
"raw_env": ["FOO=1"],
"user": None,
"group": None
}
MateriaProcessManager("materia.app.app:run", options).run()

View File

@ -0,0 +1,13 @@
from materia.core.logging import Logger, LoggerInstance, LogLevel, LogMode
from materia.core.database import (
DatabaseError,
DatabaseMigrationError,
Database,
SessionMaker,
SessionContext,
ConnectionContext,
)
from materia.core.filesystem import FileSystem, FileSystemError, TemporaryFileTarget
from materia.core.config import Config
from materia.core.cache import Cache, CacheError
from materia.core.cron import Cron, CronError

56
src/materia/core/cache.py Normal file
View File

@ -0,0 +1,56 @@
from contextlib import asynccontextmanager
from typing import Any, AsyncGenerator, Self
from pydantic import RedisDsn
from redis import asyncio as aioredis
from redis.asyncio.client import Pipeline
from materia.core.logging import Logger
class CacheError(Exception):
pass
class Cache:
def __init__(self, url: RedisDsn, pool: aioredis.ConnectionPool):
self.url: RedisDsn = url
self.pool: aioredis.ConnectionPool = pool
@staticmethod
async def new(
url: RedisDsn,
encoding: str = "utf-8",
decode_responses: bool = True,
test_connection: bool = True,
) -> Self:
pool = aioredis.ConnectionPool.from_url(
str(url), encoding=encoding, decode_responses=decode_responses
)
if test_connection:
try:
if logger := Logger.instance():
logger.debug("Testing cache connection")
connection = pool.make_connection()
await connection.connect()
except ConnectionError as e:
raise CacheError(f"{e}")
else:
await connection.disconnect()
return Cache(url=url, pool=pool)
@asynccontextmanager
async def client(self) -> AsyncGenerator[aioredis.Redis, Any]:
try:
yield aioredis.Redis(connection_pool=self.pool)
except Exception as e:
raise CacheError(f"{e}")
@asynccontextmanager
async def pipeline(self, transaction: bool = True) -> AsyncGenerator[Pipeline, Any]:
client = await aioredis.Redis(connection_pool=self.pool)
try:
yield client.pipeline(transaction=transaction)
except Exception as e:
raise CacheError(f"{e}")

193
src/materia/core/config.py Normal file
View File

@ -0,0 +1,193 @@
from os import environ
from pathlib import Path
from typing import Literal, Optional, Self, Union
from pydantic import (
BaseModel,
Field,
NameEmail,
)
from pydantic_settings import BaseSettings
from pydantic.networks import IPvAnyAddress
import toml
class Application(BaseModel):
user: str = "materia"
group: str = "materia"
mode: Literal["production", "development"] = "production"
working_directory: Optional[Path] = Path.cwd()
class Log(BaseModel):
mode: Literal["console", "file", "all"] = "console"
level: Literal["info", "warning", "error", "critical", "debug", "trace"] = "info"
console_format: str = (
"<level>{level: <8}</level> <green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> - {message}"
)
file_format: str = (
"<level>{level: <8}</level>: <green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> - {message}"
)
file: Optional[Path] = None
file_rotation: str = "3 days"
file_retention: str = "1 week"
class Server(BaseModel):
scheme: Literal["http", "https"] = "http"
address: IPvAnyAddress = Field(default="127.0.0.1")
port: int = 54601
domain: str = "localhost"
def url(self) -> str:
return "{}://{}:{}".format(self.scheme, self.address, self.port)
class Database(BaseModel):
backend: Literal["postgresql"] = "postgresql"
scheme: Literal["postgresql+asyncpg"] = "postgresql+asyncpg"
address: IPvAnyAddress = Field(default="127.0.0.1")
port: int = 5432
name: Optional[str] = "materia"
user: str = "materia"
password: Optional[Union[str, Path]] = None
# ssl: bool = False
def url(self) -> str:
if self.backend in ["postgresql"]:
return (
"{}://{}:{}@{}:{}".format(
self.scheme, self.user, self.password, self.address, self.port
)
+ f"/{self.name}"
if self.name
else ""
)
else:
raise NotImplementedError()
class Cache(BaseModel):
backend: Literal["redis"] = "redis" # add: memory
# gc_interval: Optional[int] = 60 # for: memory
scheme: Literal["redis", "rediss"] = "redis"
address: Optional[IPvAnyAddress] = Field(default="127.0.0.1")
port: Optional[int] = 6379
user: Optional[str] = None
password: Optional[Union[str, Path]] = None
database: Optional[int] = 0 # for: redis
def url(self) -> str:
if self.backend in ["redis"]:
if self.user and self.password:
return "{}://{}:{}@{}:{}/{}".format(
self.scheme,
self.user,
self.password,
self.address,
self.port,
self.database,
)
else:
return "{}://{}:{}/{}".format(
self.scheme, self.address, self.port, self.database
)
else:
raise NotImplemented()
class Security(BaseModel):
secret_key: Optional[Union[str, Path]] = None
password_min_length: int = 8
password_hash_algo: Literal["bcrypt"] = "bcrypt"
cookie_http_only: bool = True
cookie_access_token_name: str = "materia_at"
cookie_refresh_token_name: str = "materia_rt"
class OAuth2(BaseModel):
enabled: bool = True
jwt_signing_algo: Literal["HS256"] = "HS256"
# check if signing algo need a key or generate it | HS256, HS384, HS512, RS256, RS384, RS512, ES256, ES384, ES512, EdDSA
jwt_signing_key: Optional[Union[str, Path]] = None
jwt_secret: Optional[Union[str, Path]] = (
None # only for HS256, HS384, HS512 | generate
)
access_token_lifetime: int = 3600
refresh_token_lifetime: int = 730 * 60
refresh_token_validation: bool = False
# @model_validator(mode = "after")
# def check(self) -> Self:
# if self.jwt_signing_algo in ["HS256", "HS384", "HS512"]:
# assert self.jwt_secret is not None, "JWT secret must be set for HS256, HS384, HS512 algorithms"
# else:
# assert self.jwt_signing_key is not None, "JWT signing key must be set"
#
# return self
class Mailer(BaseModel):
enabled: bool = False
scheme: Optional[Literal["smtp", "smtps", "smtp+starttls"]] = None
address: Optional[IPvAnyAddress] = None
port: Optional[int] = None
helo: bool = True
cert_file: Optional[Path] = None
key_file: Optional[Path] = None
from_: Optional[NameEmail] = None
user: Optional[str] = None
password: Optional[str] = None
plain_text: bool = False
class Cron(BaseModel):
workers_count: int = 1
class Repository(BaseModel):
capacity: int = 5 << 30
class Config(BaseSettings, env_prefix="materia_", env_nested_delimiter="__"):
application: Application = Application()
log: Log = Log()
server: Server = Server()
database: Database = Database()
cache: Cache = Cache()
security: Security = Security()
oauth2: OAuth2 = OAuth2()
mailer: Mailer = Mailer()
cron: Cron = Cron()
repository: Repository = Repository()
@staticmethod
def open(path: Path) -> Self | None:
try:
data: dict = toml.load(path)
except Exception as e:
raise e
# return None
else:
return Config(**data)
def write(self, path: Path):
dump = self.model_dump()
# TODO: make normal filter or check model_dump abilities
for key_first in dump.keys():
for key_second in dump[key_first].keys():
if isinstance(dump[key_first][key_second], Path):
dump[key_first][key_second] = str(dump[key_first][key_second])
with open(path, "w") as file:
toml.dump(dump, file)
@staticmethod
def data_dir() -> Path:
cwd = Path.cwd()
if environ.get("MATERIA_DEBUG"):
return cwd / "temp"
else:
return cwd

72
src/materia/core/cron.py Normal file
View File

@ -0,0 +1,72 @@
from typing import Optional, Self
from celery import Celery
from pydantic import RedisDsn
from threading import Thread
from materia.core.logging import Logger
class CronError(Exception):
pass
class Cron:
__instance__: Optional[Self] = None
def __init__(
self,
workers_count: int,
backend: Celery,
):
self.workers_count = workers_count
self.backend = backend
self.workers = []
self.worker_threads = []
Cron.__instance__ = self
@staticmethod
def new(
workers_count: int = 1,
backend_url: Optional[RedisDsn] = None,
broker_url: Optional[RedisDsn] = None,
test_connection: bool = True,
**kwargs,
):
cron = Cron(
workers_count,
# TODO: change log level
# TODO: exclude pickle
# TODO: disable startup banner
Celery(
"cron",
backend=backend_url,
broker=broker_url,
broker_connection_retry_on_startup=True,
task_serializer="pickle",
accept_content=["pickle", "json"],
**kwargs,
),
)
for _ in range(workers_count):
cron.workers.append(cron.backend.Worker())
if test_connection:
try:
if logger := Logger.instance():
logger.debug("Testing cron broker connection")
cron.backend.broker_connection().ensure_connection(max_retries=3)
except Exception as e:
raise CronError(f"Failed to connect cron broker: {broker_url}") from e
return cron
@staticmethod
def instance() -> Optional[Self]:
return Cron.__instance__
def run_workers(self):
for worker in self.workers:
thread = Thread(target=worker.start, daemon=True)
self.worker_threads.append(thread)
thread.start()

View File

@ -0,0 +1,173 @@
from contextlib import asynccontextmanager
from typing import AsyncIterator, Self, TypeAlias
from pathlib import Path
from pydantic import PostgresDsn, ValidationError
from sqlalchemy.ext.asyncio import (
AsyncConnection,
AsyncEngine,
AsyncSession,
async_sessionmaker,
create_async_engine,
)
from sqlalchemy.pool import NullPool
from asyncpg import Connection
from alembic.config import Config as AlembicConfig
from alembic.operations import Operations
from alembic.runtime.migration import MigrationContext
from alembic.script.base import ScriptDirectory
import alembic_postgresql_enum
from fastapi import HTTPException
from materia.core.logging import Logger
class DatabaseError(Exception):
pass
class DatabaseMigrationError(Exception):
pass
SessionContext: TypeAlias = AsyncIterator[AsyncSession]
SessionMaker: TypeAlias = async_sessionmaker[AsyncSession]
ConnectionContext: TypeAlias = AsyncIterator[AsyncConnection]
class Database:
def __init__(
self,
url: PostgresDsn,
engine: AsyncEngine,
sessionmaker: SessionMaker,
):
self.url: PostgresDsn = url
self.engine: AsyncEngine = engine
self.sessionmaker: SessionMaker = sessionmaker
@staticmethod
async def new(
url: PostgresDsn,
pool_size: int = 100,
poolclass=None,
autocommit: bool = False,
autoflush: bool = False,
expire_on_commit: bool = False,
test_connection: bool = True,
) -> Self:
engine_options = {"pool_size": pool_size}
if poolclass == NullPool:
engine_options = {"poolclass": NullPool}
engine = create_async_engine(str(url), **engine_options)
sessionmaker = async_sessionmaker(
bind=engine,
autocommit=autocommit,
autoflush=autoflush,
expire_on_commit=expire_on_commit,
)
database = Database(url=url, engine=engine, sessionmaker=sessionmaker)
if test_connection:
try:
if logger := Logger.instance():
logger.debug("Testing database connection")
async with database.connection() as connection:
await connection.rollback()
except Exception as e:
raise DatabaseError(
f"Failed to connect to database '{url}': {e}"
) from e
return database
async def dispose(self):
await self.engine.dispose()
@asynccontextmanager
async def connection(self) -> ConnectionContext:
async with self.engine.connect() as connection:
try:
yield connection
except Exception as e:
await connection.rollback()
raise DatabaseError(*e.args) from e
@asynccontextmanager
async def session(self) -> SessionContext:
session = self.sessionmaker()
try:
yield session
except (HTTPException, ValidationError) as e:
await session.rollback()
raise e from None
except Exception as e:
await session.rollback()
raise e # DatabaseError(*e.args) from e
finally:
await session.close()
def run_sync_migrations(self, connection: Connection):
from materia.models.base import Base
aconfig = AlembicConfig()
aconfig.set_main_option("sqlalchemy.url", str(self.url))
aconfig.set_main_option(
"script_location",
str(Path(__file__).parent.parent.joinpath("models", "migrations")),
)
context = MigrationContext.configure(
connection=connection, # type: ignore
opts={
"target_metadata": Base.metadata,
"fn": lambda rev, _: ScriptDirectory.from_config(aconfig)._upgrade_revs(
"head", rev
),
},
)
try:
with context.begin_transaction():
with Operations.context(context):
context.run_migrations()
except Exception as e:
raise DatabaseMigrationError(f"{e}")
async def run_migrations(self):
async with self.connection() as connection:
await connection.run_sync(self.run_sync_migrations) # type: ignore
def rollback_sync_migrations(self, connection: Connection):
from materia.models.base import Base
aconfig = AlembicConfig()
aconfig.set_main_option("sqlalchemy.url", str(self.url))
aconfig.set_main_option(
"script_location",
str(Path(__file__).parent.parent.joinpath("models", "migrations")),
)
context = MigrationContext.configure(
connection=connection, # type: ignore
opts={
"target_metadata": Base.metadata,
"fn": lambda rev, _: ScriptDirectory.from_config(
aconfig
)._downgrade_revs("base", rev),
},
)
try:
with context.begin_transaction():
with Operations.context(context):
context.run_migrations()
except Exception as e:
raise DatabaseMigrationError(f"{e}")
async def rollback_migrations(self):
async with self.connection() as connection:
await connection.run_sync(self.rollback_sync_migrations) # type: ignore

View File

@ -0,0 +1,235 @@
from typing import Optional, Self, Iterator, TypeVar
from pathlib import Path
import aiofiles
from aiofiles import os as async_os
from aiofiles import ospath as async_path
import aioshutil
import re
from tempfile import NamedTemporaryFile
from streaming_form_data.targets import BaseTarget
from uuid import uuid4
from materia.core.misc import optional
valid_path = re.compile(r"^/(.*/)*([^/]*)$")
class FileSystemError(Exception):
pass
class FileSystem:
def __init__(self, path: Path, isolated_directory: Optional[Path] = None):
if path == Path() or path is None:
raise FileSystemError("The given path is empty")
self.path = path
if isolated_directory and not isolated_directory.is_absolute():
raise FileSystemError("The isolated directory must be absolute")
self.isolated_directory = isolated_directory
# self.working_directory = working_directory
# self.relative_path = path.relative_to(working_directory)
async def exists(self) -> bool:
return await async_path.exists(self.path)
async def size(self) -> int:
return await async_path.getsize(self.path)
async def is_file(self) -> bool:
return await async_path.isfile(self.path)
async def is_directory(self) -> bool:
return await async_path.isdir(self.path)
def name(self) -> str:
return self.path.name
async def check_isolation(self, path: Path):
if not self.isolated_directory:
return
if not (await async_path.exists(self.isolated_directory)):
raise FileSystemError("Missed isolated directory")
if not optional(path.relative_to, self.isolated_directory):
raise FileSystemError(
"Attempting to work with a path that is outside the isolated directory"
)
if self.path == self.isolated_directory:
raise FileSystemError("Attempting to modify the isolated directory")
async def remove(self, shallow: bool = False):
await self.check_isolation(self.path)
try:
if await self.exists() and await self.is_file() and not shallow:
await aiofiles.os.remove(self.path)
if await self.exists() and await self.is_directory() and not shallow:
await aioshutil.rmtree(str(self.path))
except OSError as e:
raise FileSystemError(*e.args) from e
async def generate_name(self, target_directory: Path, name: str) -> str:
"""Generate name based on target directory contents and self type."""
count = 1
new_path = target_directory.joinpath(name)
while await async_path.exists(new_path):
if await self.is_file():
if with_counter := re.match(r"^(.+)\.(\d+)\.(\w+)$", new_path.name):
new_name, _, extension = with_counter.groups()
elif with_extension := re.match(r"^(.+)\.(\w+)$", new_path.name):
new_name, extension = with_extension.groups()
new_path = target_directory.joinpath(
"{}.{}.{}".format(new_name, count, extension)
)
if await self.is_directory():
if with_counter := re.match(r"^(.+)\.(\d+)$", new_path.name):
new_name, _ = with_counter.groups()
else:
new_name = new_path.name
new_path = target_directory.joinpath("{}.{}".format(new_name, count))
count += 1
return new_path.name
async def _generate_new_path(
self,
target_directory: Path,
new_name: Optional[str] = None,
force: bool = False,
shallow: bool = False,
) -> Path:
new_name = new_name or self.path.name
if await async_path.exists(target_directory.joinpath(new_name)):
if force or shallow:
new_name = await self.generate_name(target_directory, new_name)
else:
raise FileSystemError("Target destination already exists")
return target_directory.joinpath(new_name)
async def move(
self,
target_directory: Path,
new_name: Optional[str] = None,
force: bool = False,
shallow: bool = False,
) -> Self:
await self.check_isolation(self.path)
new_path = await self._generate_new_path(
target_directory, new_name, force=force, shallow=shallow
)
target = FileSystem(new_path, self.isolated_directory)
try:
if await self.exists() and not shallow:
await aioshutil.move(self.path, new_path)
except Exception as e:
raise FileSystemError(*e.args) from e
return target
async def rename(
self, new_name: str, force: bool = False, shallow: bool = False
) -> Self:
return await self.move(
self.path.parent, new_name=new_name, force=force, shallow=shallow
)
async def copy(
self,
target_directory: Path,
new_name: Optional[str] = None,
force: bool = False,
shallow: bool = False,
) -> Self:
await self.check_isolation(self.path)
new_path = await self._generate_new_path(
target_directory, new_name, force=force, shallow=shallow
)
target = FileSystem(new_path, self.isolated_directory)
try:
if await self.is_file() and not shallow:
await aioshutil.copy(self.path, new_path)
if await self.is_directory() and not shallow:
await aioshutil.copytree(self.path, new_path)
except Exception as e:
raise FileSystemError(*e.args) from e
return target
async def make_directory(self, force: bool = False):
try:
if await self.exists() and not force:
raise FileSystemError("Already exists")
await async_os.makedirs(self.path, exist_ok=force)
except Exception as e:
raise FileSystemError(*e.args)
async def write_file(self, data: bytes, force: bool = False):
try:
if await self.exists() and not force:
raise FileSystemError("Already exists")
async with aiofiles.open(self.path, mode="wb") as file:
await file.write(data)
except Exception as e:
raise FileSystemError(*e.args)
@staticmethod
def check_path(path: Path) -> bool:
return bool(valid_path.match(str(path)))
@staticmethod
def normalize(path: Path) -> Path:
"""Resolve path and make it relative."""
if not path.is_absolute():
path = Path("/").joinpath(path)
return Path(*path.resolve().parts[1:])
class TemporaryFileTarget(BaseTarget):
def __init__(
self, working_directory: Path, allow_overwrite: bool = True, *args, **kwargs
):
if working_directory == Path():
raise FileSystemError("The given working directory is empty")
super().__init__(*args, **kwargs)
self._mode = "wb" if allow_overwrite else "xb"
self._fd = None
self._path = working_directory.joinpath("cache", str(uuid4()))
def on_start(self):
if not self._path.parent.exists():
self._path.parent.mkdir(exist_ok=True)
self._fd = open(str(self._path), mode="wb")
def on_data_received(self, chunk: bytes):
if self._fd:
self._fd.write(chunk)
def on_finish(self):
if self._fd:
self._fd.close()
def path(self) -> Optional[Path]:
return self._path
def remove(self):
if self._fd:
if (path := Path(self._fd.name)).exists():
path.unlink()

128
src/materia/core/logging.py Normal file
View File

@ -0,0 +1,128 @@
import sys
from typing import Sequence, Literal, Optional, TypeAlias
from pathlib import Path
from loguru import logger
from loguru._logger import Logger as LoggerInstance
import logging
import inspect
class InterceptHandler(logging.Handler):
def emit(self, record: logging.LogRecord) -> None:
level: str | int
try:
level = logger.level(record.levelname).name
except ValueError:
level = record.levelno
frame, depth = inspect.currentframe(), 2
while frame and (depth == 0 or frame.f_code.co_filename == logging.__file__):
frame = frame.f_back
depth += 1
logger.opt(depth=depth, exception=record.exc_info).log(
level, record.getMessage()
)
LogLevel: TypeAlias = Literal["info", "warning", "error", "critical", "debug", "trace"]
LogMode: TypeAlias = Literal["console", "file", "all"]
class Logger:
__instance__: Optional[LoggerInstance] = None
def __init__(self):
raise NotImplementedError()
@staticmethod
def new(
mode: LogMode = "console",
level: LogLevel = "info",
console_format: str = (
"<level>{level: <8}</level> <green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> - {message}"
),
file_format: str = (
"<level>{level: <8}</level>: <green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> - {message}"
),
file: Optional[Path] = None,
file_rotation: str = "3 days",
file_retention: str = "1 week",
interceptions: Sequence[str] = [
"uvicorn",
"uvicorn.access",
"uvicorn.error",
"uvicorn.asgi",
"fastapi",
],
) -> LoggerInstance:
logger.remove()
if mode in ["console", "all"]:
logger.add(
sys.stdout,
enqueue=True,
backtrace=True,
level=level.upper(),
format=console_format,
filter=lambda record: record["level"].name
in ["INFO", "WARNING", "DEBUG", "TRACE"],
)
logger.add(
sys.stderr,
enqueue=True,
backtrace=True,
level=level.upper(),
format=console_format,
filter=lambda record: record["level"].name in ["ERROR", "CRITICAL"],
)
if mode in ["file", "all"]:
logger.add(
str(file),
rotation=file_rotation,
retention=file_retention,
enqueue=True,
backtrace=True,
level=level.upper(),
format=file_format,
)
logging.basicConfig(
handlers=[InterceptHandler()], level=logging.NOTSET, force=True
)
for external_logger in interceptions:
logging.getLogger(external_logger).handlers = [InterceptHandler()]
Logger.__instance__ = logger
return logger # type: ignore
@staticmethod
def instance() -> Optional[LoggerInstance]:
return Logger.__instance__
@staticmethod
def uvicorn_config(level: LogLevel) -> dict:
return {
"version": 1,
"disable_existing_loggers": False,
"handlers": {
"default": {"class": "materia.core.logging.InterceptHandler"},
"access": {"class": "materia.core.logging.InterceptHandler"},
},
"loggers": {
"uvicorn": {
"handlers": ["default"],
"level": level.upper(),
"propagate": False,
},
"uvicorn.error": {"level": level.upper()},
"uvicorn.access": {
"handlers": ["access"],
"level": level.upper(),
"propagate": False,
},
},
}

28
src/materia/core/misc.py Normal file
View File

@ -0,0 +1,28 @@
from typing import Optional, Self, Iterator, TypeVar, Callable, Any, ParamSpec
from functools import partial
T = TypeVar("T")
P = ParamSpec("P")
def optional(func: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> Optional[T]:
try:
res = func(*args, **kwargs)
except TypeError as e:
raise e
except Exception:
return None
return res
def optional_next(it: Iterator[T]) -> Optional[T]:
return optional(next, it)
def optional_string(value: Any, format_string: Optional[str] = None) -> str:
if value is None:
return ""
res = optional(str, value)
if res is None:
return ""
return format_string.format(res)

View File

@ -0,0 +1,31 @@
from materia.models.auth import (
LoginType,
LoginSource,
# OAuth2Application,
# OAuth2Grant,
# OAuth2AuthorizationCode,
)
from materia.models.user import User, UserCredentials, UserInfo
from materia.models.repository import (
Repository,
RepositoryInfo,
RepositoryContent,
RepositoryError,
)
from materia.models.directory import (
Directory,
DirectoryLink,
DirectoryInfo,
DirectoryContent,
DirectoryPath,
DirectoryRename,
DirectoryCopyMove,
)
from materia.models.file import (
File,
FileLink,
FileInfo,
FilePath,
FileRename,
FileCopyMove,
)

View File

@ -0,0 +1,3 @@
from materia.models.auth.source import LoginType, LoginSource
# from materia.models.auth.oauth2 import OAuth2Application, OAuth2Grant, OAuth2AuthorizationCode

View File

@ -0,0 +1,162 @@
from time import time
from typing import List, Optional, Self, Union
from uuid import UUID, uuid4
import bcrypt
import httpx
from sqlalchemy import BigInteger, ForeignKey, JSON, and_, select
from sqlalchemy.orm import mapped_column, Mapped, relationship
from pydantic import BaseModel, HttpUrl
from materia.models.base import Base
from materia.core import Database, Cache
from materia import security
class OAuth2Application(Base):
__tablename__ = "oauth2_application"
id: Mapped[int] = mapped_column(BigInteger, primary_key=True)
user_id: Mapped[UUID] = mapped_column(ForeignKey("user.id", ondelete="CASCADE"))
name: Mapped[str]
client_id: Mapped[UUID] = mapped_column(default=uuid4)
hashed_client_secret: Mapped[str]
redirect_uris: Mapped[List[str]] = mapped_column(JSON)
confidential_client: Mapped[bool] = mapped_column(default=True)
created: Mapped[int] = mapped_column(BigInteger, default=time)
updated: Mapped[int] = mapped_column(BigInteger, default=time)
# user: Mapped["user.User"] = relationship(back_populates = "oauth2_applications")
grants: Mapped[List["OAuth2Grant"]] = relationship(back_populates="application")
def contains_redirect_uri(self, uri: HttpUrl) -> bool:
if not self.confidential_client:
if uri.scheme == "http" and uri.host in ["127.0.0.1", "[::1]"]:
return uri in self.redirect_uris
else:
if uri.scheme == "https" and uri.port == 443:
return uri in self.redirect_uris
return False
async def generate_client_secret(self, db: Database) -> str:
client_secret = security.generate_key()
hashed_secret = bcrypt.hashpw(client_secret, bcrypt.gensalt())
self.hashed_client_secret = str(hashed_secret)
async with db.session() as session:
session.add(self)
await session.commit()
return str(client_secret)
def validate_client_secret(self, secret: bytes) -> bool:
return bcrypt.checkpw(secret, self.hashed_client_secret.encode())
@staticmethod
async def update(db: Database, app: "OAuth2Application"):
async with db.session() as session:
session.add(app)
await session.commit()
@staticmethod
async def delete(db: Database, id: int, user_id: int):
async with db.session() as session:
if not (
application := (
await session.scalars(
select(OAuth2Application).where(
and_(
OAuth2Application.id == id,
OAuth2Application.user_id == user_id,
)
)
)
).first()
):
raise Exception("OAuth2Application not found")
# await session.refresh(application, attribute_names = [ "grants" ])
await session.delete(application)
@staticmethod
async def by_client_id(client_id: str, db: Database) -> Union[Self, None]:
async with db.session() as session:
return await session.scalar(
select(OAuth2Application).where(
OAuth2Application.client_id == client_id
)
)
async def grant_by_user_id(
self, user_id: UUID, db: Database
) -> Union["OAuth2Grant", None]:
async with db.session() as session:
return (
await session.scalars(
select(OAuth2Grant).where(
and_(
OAuth2Grant.application_id == self.id,
OAuth2Grant.user_id == user_id,
)
)
)
).first()
class OAuth2AuthorizationCode(BaseModel):
grant: "OAuth2Grant"
code: str
redirect_uri: HttpUrl
created: int
lifetime: int
def generate_redirect_uri(self, state: Optional[str] = None) -> httpx.URL:
redirect = httpx.URL(str(self.redirect_uri))
if state:
redirect = redirect.copy_add_param("state", state)
redirect = redirect.copy_add_param("code", self.code)
return redirect
class OAuth2Grant(Base):
__tablename__ = "oauth2_grant"
id: Mapped[int] = mapped_column(BigInteger, primary_key=True)
user_id: Mapped[UUID] = mapped_column(ForeignKey("user.id", ondelete="CASCADE"))
application_id: Mapped[int] = mapped_column(
ForeignKey("oauth2_application.id", ondelete="CASCADE")
)
scope: Mapped[str]
created: Mapped[int] = mapped_column(default=time)
updated: Mapped[int] = mapped_column(default=time)
application: Mapped[OAuth2Application] = relationship(back_populates="grants")
async def generate_authorization_code(
self, redirect_uri: HttpUrl, cache: Cache
) -> OAuth2AuthorizationCode:
code = OAuth2AuthorizationCode(
grant=self,
redirect_uri=redirect_uri,
code=security.generate_key().decode(),
created=int(time()),
lifetime=3000,
)
async with cache.client() as client:
client.set(
"oauth2_authorization_code_{}".format(code.created),
code.code,
ex=code.lifetime,
)
return code
def scope_contains(self, scope: str) -> bool:
return scope in self.scope.split(" ")

View File

@ -0,0 +1,31 @@
import enum
from time import time
from sqlalchemy import BigInteger
from sqlalchemy.orm import Mapped, mapped_column
from materia.models.base import Base
class LoginType(enum.Enum):
Plain = enum.auto()
OAuth2 = enum.auto()
Smtp = enum.auto()
class LoginSource(Base):
__tablename__ = "login_source"
id: Mapped[int] = mapped_column(BigInteger, primary_key=True)
type: Mapped[LoginType]
created: Mapped[int] = mapped_column(default=time)
updated: Mapped[int] = mapped_column(default=time)
def is_plain(self) -> bool:
return self.type == LoginType.Plain
def is_oauth2(self) -> bool:
return self.type == LoginType.OAuth2
def is_smtp(self) -> bool:
return self.type == LoginType.Smtp

View File

@ -0,0 +1,27 @@
from typing import Optional, Self
from sqlalchemy.orm import DeclarativeBase
class Base(DeclarativeBase):
def to_dict(self) -> dict:
return {key: getattr(self, key) for key in self.__table__.columns.keys()}
def clone(self) -> Optional[Self]:
"""Clone model.
Included: columns and values, foreign keys
Ignored: primary keys, relationships
"""
# if not inspect(self).persistent:
# return
cloned = self.__class__(
**{
key: getattr(self, key)
for key in self.__table__.columns.keys()
# ignore primary keys
if key not in self.__table__.primary_key.columns.keys()
}
)
return cloned

View File

@ -0,0 +1,310 @@
from time import time
from typing import List, Optional, Self
from pathlib import Path
from sqlalchemy import BigInteger, ForeignKey, inspect
from sqlalchemy.orm import mapped_column, Mapped, relationship
import sqlalchemy as sa
from pydantic import BaseModel, ConfigDict
from materia.models.base import Base
from materia.core import SessionContext, Config, FileSystem
class DirectoryError(Exception):
pass
class Directory(Base):
__tablename__ = "directory"
id: Mapped[int] = mapped_column(BigInteger, primary_key=True)
repository_id: Mapped[int] = mapped_column(
ForeignKey("repository.id", ondelete="CASCADE")
)
parent_id: Mapped[int] = mapped_column(
ForeignKey("directory.id", ondelete="CASCADE"), nullable=True
)
created: Mapped[int] = mapped_column(BigInteger, nullable=False, default=time)
updated: Mapped[int] = mapped_column(BigInteger, nullable=False, default=time)
name: Mapped[str]
is_public: Mapped[bool] = mapped_column(default=False)
repository: Mapped["Repository"] = relationship(back_populates="directories")
directories: Mapped[List["Directory"]] = relationship(back_populates="parent")
parent: Mapped["Directory"] = relationship(
back_populates="directories", remote_side=[id]
)
files: Mapped[List["File"]] = relationship(back_populates="parent")
link: Mapped["DirectoryLink"] = relationship(back_populates="directory")
async def new(self, session: SessionContext, config: Config) -> Optional[Self]:
session.add(self)
await session.flush()
await session.refresh(self, attribute_names=["repository"])
repository_path = await self.repository.real_path(session, config)
directory_path = await self.real_path(session, config)
new_directory = FileSystem(directory_path, repository_path)
await new_directory.make_directory()
return self
async def remove(self, session: SessionContext, config: Config):
session.add(self)
await session.refresh(
self, attribute_names=["repository", "directories", "files"]
)
if self.directories:
for directory in self.directories:
await directory.remove(session, config)
if self.files:
for file in self.files:
await file.remove(session, config)
repository_path = await self.repository.real_path(session, config)
directory_path = await self.real_path(session, config)
current_directory = FileSystem(directory_path, repository_path)
await current_directory.remove()
await session.delete(self)
await session.flush()
async def relative_path(self, session: SessionContext) -> Optional[Path]:
"""Get path of the directory relative repository root."""
if inspect(self).was_deleted:
return None
parts = []
current_directory = self
while True:
# ISSUE: accessing `parent` attribute raises greenlet_spawn has not been called; can't call await_only() here
# parts.append(current_directory.name)
# session.add(current_directory)
# await session.refresh(current_directory, attribute_names=["parent"])
# if current_directory.parent is None:
# break
# current_directory = current_directory.parent
parts.append(current_directory.name)
if current_directory.parent_id is None:
break
current_directory = (
await session.scalars(
sa.select(Directory).where(
Directory.id == current_directory.parent_id,
)
)
).first()
return Path().joinpath(*reversed(parts))
async def real_path(
self, session: SessionContext, config: Config
) -> Optional[Path]:
"""Get absolute path of the directory"""
if inspect(self).was_deleted:
return None
repository_path = await self.repository.real_path(session, config)
relative_path = await self.relative_path(session)
return repository_path.joinpath(relative_path)
def is_root(self) -> bool:
return self.parent_id is None
@staticmethod
async def by_path(
repository: "Repository", path: Path, session: SessionContext, config: Config
) -> Optional[Self]:
if path == Path():
raise DirectoryError("Cannot find directory by empty path")
current_directory: Optional[Directory] = None
for part in path.parts:
# from root directory to target directory
current_directory = (
await session.scalars(
sa.select(Directory).where(
sa.and_(
Directory.repository_id == repository.id,
Directory.name == part,
(
Directory.parent_id == current_directory.id
if current_directory
else Directory.parent_id.is_(None)
),
)
)
)
).first()
if not current_directory:
return None
return current_directory
async def copy(
self,
target: Optional["Directory"],
session: SessionContext,
config: Config,
force: bool = False,
shallow: bool = False,
) -> Self:
session.add(self)
await session.refresh(self, attribute_names=["repository"])
repository_path = await self.repository.real_path(session, config)
directory_path = await self.real_path(session, config)
target_path = (
await target.real_path(session, config) if target else repository_path
)
current_directory = FileSystem(directory_path, repository_path)
new_directory = await current_directory.copy(
target_path, force=force, shallow=shallow
)
cloned = self.clone()
cloned.name = new_directory.name()
cloned.parent_id = target.id if target else None
session.add(cloned)
await session.flush()
await session.refresh(self, attribute_names=["files", "directories"])
for directory in self.directories:
await directory.copy(cloned, session, config, shallow=True)
for file in self.files:
await file.copy(cloned, session, config, shallow=True)
return self
async def move(
self,
target: Optional["Directory"],
session: SessionContext,
config: Config,
force: bool = False,
shallow: bool = False,
) -> Self:
session.add(self)
await session.refresh(self, attribute_names=["repository"])
repository_path = await self.repository.real_path(session, config)
directory_path = await self.real_path(session, config)
target_path = (
await target.real_path(session, config) if target else repository_path
)
current_directory = FileSystem(directory_path, repository_path)
moved_directory = await current_directory.move(
target_path, force=force, shallow=shallow
)
self.name = moved_directory.name()
self.parent_id = target.id if target else None
self.updated = time()
await session.flush()
return self
async def rename(
self,
name: str,
session: SessionContext,
config: Config,
force: bool = False,
shallow: bool = False,
) -> Self:
session.add(self)
await session.refresh(self, attribute_names=["repository"])
repository_path = await self.repository.real_path(session, config)
directory_path = await self.real_path(session, config)
current_directory = FileSystem(directory_path, repository_path)
renamed_directory = await current_directory.rename(
name, force=force, shallow=shallow
)
self.name = renamed_directory.name()
await session.flush()
return self
async def info(self, session: SessionContext) -> "DirectoryInfo":
session.add(self)
await session.refresh(self, attribute_names=["files"])
info = DirectoryInfo.model_validate(self)
relative_path = await self.relative_path(session)
info.path = Path("/").joinpath(relative_path) if relative_path else None
info.used = sum([file.size for file in self.files])
return info
class DirectoryLink(Base):
__tablename__ = "directory_link"
id: Mapped[int] = mapped_column(BigInteger, primary_key=True)
directory_id: Mapped[int] = mapped_column(
ForeignKey("directory.id", ondelete="CASCADE")
)
created: Mapped[int] = mapped_column(BigInteger, default=time)
url: Mapped[str]
directory: Mapped["Directory"] = relationship(back_populates="link")
class DirectoryInfo(BaseModel):
model_config = ConfigDict(from_attributes=True)
id: int
repository_id: int
parent_id: Optional[int]
created: int
updated: int
name: str
is_public: bool
path: Optional[Path] = None
used: Optional[int] = None
class DirectoryContent(BaseModel):
model_config = ConfigDict(arbitrary_types_allowed=True)
files: list["FileInfo"]
directories: list["DirectoryInfo"]
class DirectoryPath(BaseModel):
path: Path
class DirectoryRename(BaseModel):
path: Path
name: str
force: Optional[bool] = False
class DirectoryCopyMove(BaseModel):
path: Path
target: Path
force: Optional[bool] = False
from materia.models.repository import Repository
from materia.models.file import File, FileInfo

277
src/materia/models/file.py Normal file
View File

@ -0,0 +1,277 @@
from time import time
from typing import Optional, Self, Union
from pathlib import Path
from sqlalchemy import BigInteger, ForeignKey, inspect
from sqlalchemy.orm import mapped_column, Mapped, relationship
import sqlalchemy as sa
from pydantic import BaseModel, ConfigDict
from materia.models.base import Base
from materia.core import SessionContext, Config, FileSystem
class FileError(Exception):
pass
class File(Base):
__tablename__ = "file"
id: Mapped[int] = mapped_column(BigInteger, primary_key=True)
repository_id: Mapped[int] = mapped_column(
ForeignKey("repository.id", ondelete="CASCADE")
)
parent_id: Mapped[int] = mapped_column(
ForeignKey("directory.id", ondelete="CASCADE"), nullable=True
)
created: Mapped[int] = mapped_column(BigInteger, nullable=False, default=time)
updated: Mapped[int] = mapped_column(BigInteger, nullable=False, default=time)
name: Mapped[str]
is_public: Mapped[bool] = mapped_column(default=False)
size: Mapped[int] = mapped_column(BigInteger, nullable=True)
repository: Mapped["Repository"] = relationship(back_populates="files")
parent: Mapped["Directory"] = relationship(back_populates="files")
link: Mapped["FileLink"] = relationship(back_populates="file")
async def new(
self, data: Union[bytes, Path], session: SessionContext, config: Config
) -> Optional[Self]:
session.add(self)
await session.flush()
await session.refresh(self, attribute_names=["repository"])
file_path = await self.real_path(session, config)
repository_path = await self.repository.real_path(session, config)
new_file = FileSystem(file_path, repository_path)
if isinstance(data, bytes):
await new_file.write_file(data)
elif isinstance(data, Path):
from_file = FileSystem(data, config.application.working_directory)
await from_file.move(file_path.parent, new_name=file_path.name)
else:
raise FileError(f"Unknown data type passed: {type(data)}")
self.size = await new_file.size()
await session.flush()
return self
async def remove(self, session: SessionContext, config: Config):
session.add(self)
file_path = await self.real_path(session, config)
new_file = FileSystem(
file_path, await self.repository.real_path(session, config)
)
await new_file.remove()
await session.delete(self)
await session.flush()
async def relative_path(self, session: SessionContext) -> Optional[Path]:
if inspect(self).was_deleted:
return None
file_path = Path()
async with session.begin_nested():
session.add(self)
await session.refresh(self, attribute_names=["parent"])
if self.parent:
file_path = await self.parent.relative_path(session)
return file_path.joinpath(self.name)
async def real_path(
self, session: SessionContext, config: Config
) -> Optional[Path]:
if inspect(self).was_deleted:
return None
file_path = Path()
async with session.begin_nested():
session.add(self)
await session.refresh(self, attribute_names=["repository", "parent"])
if self.parent:
file_path = await self.parent.real_path(session, config)
else:
file_path = await self.repository.real_path(session, config)
return file_path.joinpath(self.name)
@staticmethod
async def by_path(
repository: "Repository", path: Path, session: SessionContext, config: Config
) -> Optional[Self]:
if path == Path():
raise FileError("Cannot find file by empty path")
parent_directory = (
None
if path.parent == Path()
else await Directory.by_path(repository, path.parent, session, config)
)
current_file = (
await session.scalars(
sa.select(File).where(
sa.and_(
File.repository_id == repository.id,
File.name == path.name,
(
File.parent_id == parent_directory.id
if parent_directory
else File.parent_id.is_(None)
),
)
)
)
).first()
return current_file
async def copy(
self,
directory: Optional["Directory"],
session: SessionContext,
config: Config,
force: bool = False,
shallow: bool = False,
) -> Self:
session.add(self)
await session.refresh(self, attribute_names=["repository"])
repository_path = await self.repository.real_path(session, config)
file_path = await self.real_path(session, config)
directory_path = (
await directory.real_path(session, config) if directory else repository_path
)
current_file = FileSystem(file_path, repository_path)
new_file = await current_file.copy(directory_path, force=force, shallow=shallow)
cloned = self.clone()
cloned.name = new_file.name()
cloned.parent_id = directory.id if directory else None
session.add(cloned)
await session.flush()
return self
async def move(
self,
directory: Optional["Directory"],
session: SessionContext,
config: Config,
force: bool = False,
shallow: bool = False,
) -> Self:
session.add(self)
await session.refresh(self, attribute_names=["repository"])
repository_path = await self.repository.real_path(session, config)
file_path = await self.real_path(session, config)
directory_path = (
await directory.real_path(session, config) if directory else repository_path
)
current_file = FileSystem(file_path, repository_path)
moved_file = await current_file.move(
directory_path, force=force, shallow=shallow
)
self.name = moved_file.name()
self.parent_id = directory.id if directory else None
self.updated = time()
await session.flush()
return self
async def rename(
self,
name: str,
session: SessionContext,
config: Config,
force: bool = False,
shallow: bool = False,
) -> Self:
session.add(self)
await session.refresh(self, attribute_names=["repository"])
repository_path = await self.repository.real_path(session, config)
file_path = await self.real_path(session, config)
current_file = FileSystem(file_path, repository_path)
renamed_file = await current_file.rename(name, force=force, shallow=shallow)
self.name = renamed_file.name()
self.updated = time()
await session.flush()
return self
async def info(self, session: SessionContext) -> Optional["FileInfo"]:
info = FileInfo.model_validate(self)
relative_path = await self.relative_path(session)
info.path = Path("/").joinpath(relative_path) if relative_path else None
return info
def convert_bytes(size: int):
for unit in ["bytes", "kB", "MB", "GB", "TB"]:
if size < 1024:
return f"{size}{unit}" if unit == "bytes" else f"{size:.1f}{unit}"
size >>= 10
class FileLink(Base):
__tablename__ = "file_link"
id: Mapped[int] = mapped_column(BigInteger, primary_key=True)
file_id: Mapped[int] = mapped_column(ForeignKey("file.id", ondelete="CASCADE"))
created: Mapped[int] = mapped_column(BigInteger, default=time)
url: Mapped[str]
file: Mapped["File"] = relationship(back_populates="link")
class FileInfo(BaseModel):
model_config = ConfigDict(from_attributes=True)
id: int
repository_id: int
parent_id: Optional[int]
created: int
updated: int
name: str
is_public: bool
size: int
path: Optional[Path] = None
class FilePath(BaseModel):
path: Path
class FileRename(BaseModel):
path: Path
name: str
force: Optional[bool] = False
class FileCopyMove(BaseModel):
path: Path
target: Path
force: Optional[bool] = False
from materia.models.repository import Repository
from materia.models.directory import Directory

View File

@ -1,30 +1,48 @@
import asyncio
from logging.config import fileConfig
from pathlib import Path
from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
from alembic.config import Config
from alembic.runtime.migration import MigrationContext
from alembic import context
import alembic_postgresql_enum
from src.config import config as materia_config
from src.db import Base
from materia.core import Config
from materia.models.base import Base
import materia.models.user
import materia.models.auth
import materia.models.repository
import materia.models.directory
import materia.models.file
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = Config(Path("alembic.ini"))
config.set_main_option("sqlalchemy.url", materia_config.database_url())
config = context.config
# config.set_main_option("sqlalchemy.url", Config().database.url())
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
fileConfig(config.config_file_name, disable_existing_loggers=False)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
@ -38,13 +56,12 @@ def run_migrations_offline() -> None:
"""
url = config.get_main_option("sqlalchemy.url")
context = MigrationContext.configure(
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
opts = {
"target_metadata": target_metadata,
"literal_binds": True,
}
version_table_schema="public",
)
with context.begin_transaction():
@ -52,12 +69,7 @@ def run_migrations_offline() -> None:
def do_run_migrations(connection: Connection) -> None:
context = MigrationContext.configure(
connection = connection,
opts = {
"target_metadata": target_metadata,
}
)
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
@ -87,7 +99,8 @@ def run_migrations_online() -> None:
asyncio.run(run_async_migrations())
#if context.is_offline_mode():
#run_migrations_offline()
#else:
#run_migrations_online()
if context.is_offline_mode():
run_migrations_offline()
else:
print("online")
run_migrations_online()

View File

@ -0,0 +1,139 @@
"""empty message
Revision ID: bf2ef6c7ab70
Revises:
Create Date: 2024-08-02 18:37:01.697075
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = 'bf2ef6c7ab70'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
sa.Enum('Plain', 'OAuth2', 'Smtp', name='logintype').create(op.get_bind())
op.create_table('login_source',
sa.Column('id', sa.BigInteger(), nullable=False),
sa.Column('type', postgresql.ENUM('Plain', 'OAuth2', 'Smtp', name='logintype', create_type=False), nullable=False),
sa.Column('created', sa.Integer(), nullable=False),
sa.Column('updated', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('user',
sa.Column('id', sa.Uuid(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('lower_name', sa.String(), nullable=False),
sa.Column('full_name', sa.String(), nullable=True),
sa.Column('email', sa.String(), nullable=False),
sa.Column('is_email_private', sa.Boolean(), nullable=False),
sa.Column('hashed_password', sa.String(), nullable=False),
sa.Column('must_change_password', sa.Boolean(), nullable=False),
sa.Column('login_type', postgresql.ENUM('Plain', 'OAuth2', 'Smtp', name='logintype', create_type=False), nullable=False),
sa.Column('created', sa.BigInteger(), nullable=False),
sa.Column('updated', sa.BigInteger(), nullable=False),
sa.Column('last_login', sa.BigInteger(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('is_admin', sa.Boolean(), nullable=False),
sa.Column('avatar', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('lower_name'),
sa.UniqueConstraint('name')
)
op.create_table('oauth2_application',
sa.Column('id', sa.BigInteger(), nullable=False),
sa.Column('user_id', sa.Uuid(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('client_id', sa.Uuid(), nullable=False),
sa.Column('hashed_client_secret', sa.String(), nullable=False),
sa.Column('redirect_uris', sa.JSON(), nullable=False),
sa.Column('confidential_client', sa.Boolean(), nullable=False),
sa.Column('created', sa.BigInteger(), nullable=False),
sa.Column('updated', sa.BigInteger(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_table('repository',
sa.Column('id', sa.BigInteger(), nullable=False),
sa.Column('user_id', sa.Uuid(), nullable=False),
sa.Column('capacity', sa.BigInteger(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_table('directory',
sa.Column('id', sa.BigInteger(), nullable=False),
sa.Column('repository_id', sa.BigInteger(), nullable=False),
sa.Column('parent_id', sa.BigInteger(), nullable=True),
sa.Column('created', sa.BigInteger(), nullable=False),
sa.Column('updated', sa.BigInteger(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('is_public', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['parent_id'], ['directory.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_table('oauth2_grant',
sa.Column('id', sa.BigInteger(), nullable=False),
sa.Column('user_id', sa.Uuid(), nullable=False),
sa.Column('application_id', sa.BigInteger(), nullable=False),
sa.Column('scope', sa.String(), nullable=False),
sa.Column('created', sa.Integer(), nullable=False),
sa.Column('updated', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['application_id'], ['oauth2_application.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_table('directory_link',
sa.Column('id', sa.BigInteger(), nullable=False),
sa.Column('directory_id', sa.BigInteger(), nullable=False),
sa.Column('created', sa.BigInteger(), nullable=False),
sa.Column('url', sa.String(), nullable=False),
sa.ForeignKeyConstraint(['directory_id'], ['directory.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_table('file',
sa.Column('id', sa.BigInteger(), nullable=False),
sa.Column('repository_id', sa.BigInteger(), nullable=False),
sa.Column('parent_id', sa.BigInteger(), nullable=True),
sa.Column('created', sa.BigInteger(), nullable=False),
sa.Column('updated', sa.BigInteger(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('path', sa.String(), nullable=True),
sa.Column('is_public', sa.Boolean(), nullable=False),
sa.Column('size', sa.BigInteger(), nullable=False),
sa.ForeignKeyConstraint(['parent_id'], ['directory.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_table('file_link',
sa.Column('id', sa.BigInteger(), nullable=False),
sa.Column('file_id', sa.BigInteger(), nullable=False),
sa.Column('created', sa.BigInteger(), nullable=False),
sa.Column('url', sa.String(), nullable=False),
sa.ForeignKeyConstraint(['file_id'], ['file.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('file_link')
op.drop_table('file')
op.drop_table('directory_link')
op.drop_table('oauth2_grant')
op.drop_table('directory')
op.drop_table('repository')
op.drop_table('oauth2_application')
op.drop_table('user')
op.drop_table('login_source')
sa.Enum('Plain', 'OAuth2', 'Smtp', name='logintype').drop(op.get_bind())
# ### end Alembic commands ###

View File

@ -0,0 +1,131 @@
from typing import List, Self, Optional
from uuid import UUID
from pathlib import Path
import shutil
from sqlalchemy import BigInteger, ForeignKey
from sqlalchemy.orm import mapped_column, Mapped, relationship
import sqlalchemy as sa
from pydantic import BaseModel, ConfigDict
from materia.models.base import Base
from materia.core import SessionContext, Config
class RepositoryError(Exception):
pass
class Repository(Base):
__tablename__ = "repository"
id: Mapped[int] = mapped_column(BigInteger, primary_key=True)
user_id: Mapped[UUID] = mapped_column(ForeignKey("user.id", ondelete="CASCADE"))
capacity: Mapped[int] = mapped_column(BigInteger, nullable=False)
user: Mapped["User"] = relationship(back_populates="repository")
directories: Mapped[List["Directory"]] = relationship(back_populates="repository")
files: Mapped[List["File"]] = relationship(back_populates="repository")
async def new(self, session: SessionContext, config: Config) -> Optional[Self]:
session.add(self)
await session.flush()
repository_path = await self.real_path(session, config)
relative_path = repository_path.relative_to(
config.application.working_directory
)
try:
repository_path.mkdir(parents=True, exist_ok=True)
except OSError as e:
raise RepositoryError(
f"Failed to create repository at /{relative_path}:",
*e.args,
)
await session.flush()
return self
async def real_path(self, session: SessionContext, config: Config) -> Path:
"""Get absolute path of the directory."""
session.add(self)
await session.refresh(self, attribute_names=["user"])
repository_path = config.application.working_directory.joinpath(
"repository", self.user.lower_name
)
return repository_path
async def remove(self, session: SessionContext, config: Config):
session.add(self)
await session.refresh(self, attribute_names=["directories", "files"])
for directory in self.directories:
if directory.is_root():
await directory.remove(session)
for file in self.files:
await file.remove(session)
repository_path = await self.real_path(session, config)
try:
shutil.rmtree(str(repository_path))
except OSError as e:
raise RepositoryError(
f"Failed to remove repository at /{repository_path.relative_to(config.application.working_directory)}:",
*e.args,
)
await session.delete(self)
await session.flush()
async def update(self, session: SessionContext):
await session.execute(
sa.update(Repository).values(self.to_dict()).where(Repository.id == self.id)
)
await session.flush()
@staticmethod
async def from_user(user: "User", session: SessionContext) -> Optional[Self]:
session.add(user)
await session.refresh(user, attribute_names=["repository"])
return user.repository
async def used_capacity(self, session: SessionContext) -> int:
session.add(self)
await session.refresh(self, attribute_names=["files"])
return sum([file.size for file in self.files])
async def remaining_capacity(self, session: SessionContext) -> int:
used = await self.used_capacity(session)
return self.capacity - used
async def info(self, session: SessionContext) -> "RepositoryInfo":
info = RepositoryInfo.model_validate(self)
info.used = await self.used_capacity(session)
return info
class RepositoryInfo(BaseModel):
model_config = ConfigDict(from_attributes=True)
id: int
capacity: int
used: Optional[int] = None
class RepositoryContent(BaseModel):
model_config = ConfigDict(arbitrary_types_allowed=True)
files: list["FileInfo"]
directories: list["DirectoryInfo"]
from materia.models.user import User
from materia.models.directory import Directory, DirectoryInfo
from materia.models.file import File, FileInfo

225
src/materia/models/user.py Normal file
View File

@ -0,0 +1,225 @@
from uuid import UUID, uuid4
from typing import Optional, Self, BinaryIO
import time
import re
from pydantic import BaseModel, EmailStr, ConfigDict
from sqlalchemy import BigInteger
from sqlalchemy.orm import mapped_column, Mapped, relationship
import sqlalchemy as sa
from PIL import Image
from sqids.sqids import Sqids
from aiofiles import os as async_os
from materia import security
from materia.models.base import Base
from materia.models.auth.source import LoginType
from materia.core import SessionContext, Config, FileSystem
valid_username = re.compile(r"^[\da-zA-Z][-.\w]*$")
invalid_username = re.compile(r"[-._]{2,}|[-._]$")
class UserError(Exception):
pass
class User(Base):
__tablename__ = "user"
id: Mapped[UUID] = mapped_column(primary_key=True, default=uuid4)
name: Mapped[str] = mapped_column(unique=True)
lower_name: Mapped[str] = mapped_column(unique=True)
full_name: Mapped[Optional[str]]
email: Mapped[str]
is_email_private: Mapped[bool] = mapped_column(default=True)
hashed_password: Mapped[str]
must_change_password: Mapped[bool] = mapped_column(default=False)
login_type: Mapped["LoginType"]
created: Mapped[int] = mapped_column(BigInteger, default=time.time)
updated: Mapped[int] = mapped_column(BigInteger, default=time.time)
last_login: Mapped[int] = mapped_column(BigInteger, nullable=True)
is_active: Mapped[bool] = mapped_column(default=False)
is_admin: Mapped[bool] = mapped_column(default=False)
avatar: Mapped[Optional[str]]
repository: Mapped["Repository"] = relationship(back_populates="user")
async def new(self, session: SessionContext, config: Config) -> Optional[Self]:
# Provide checks outer
session.add(self)
await session.flush()
return self
async def remove(self, session: SessionContext):
session.add(self)
await session.refresh(self, attribute_names=["repository"])
if self.repository:
await self.repository.remove()
await session.delete(self)
await session.flush()
def update_last_login(self):
self.last_login = int(time.time())
def is_local(self) -> bool:
return self.login_type == LoginType.Plain
def is_oauth2(self) -> bool:
return self.login_type == LoginType.OAuth2
@staticmethod
def check_username(name: str) -> bool:
return bool(valid_username.match(name) and not invalid_username.match(name))
@staticmethod
def check_password(password: str, config: Config) -> bool:
return not len(password) < config.security.password_min_length
@staticmethod
async def count(session: SessionContext) -> Optional[int]:
return await session.scalar(sa.select(sa.func.count(User.id)))
@staticmethod
async def by_name(
name: str, session: SessionContext, with_lower: bool = False
) -> Optional[Self]:
if with_lower:
query = User.lower_name == name.lower()
else:
query = User.name == name
return (await session.scalars(sa.select(User).where(query))).first()
@staticmethod
async def by_email(email: str, session: SessionContext) -> Optional[Self]:
return (
await session.scalars(sa.select(User).where(User.email == email))
).first()
@staticmethod
async def by_id(id: UUID, session: SessionContext) -> Optional[Self]:
return (await session.scalars(sa.select(User).where(User.id == id))).first()
async def edit_name(self, name: str, session: SessionContext) -> Self:
if not User.check_username(name):
raise UserError(f"Invalid username: {name}")
self.name = name
self.lower_name = name.lower()
session.add(self)
await session.flush()
return self
async def edit_password(
self, password: str, session: SessionContext, config: Config
) -> Self:
if not User.check_password(password, config):
raise UserError("Invalid password")
self.hashed_password = security.hash_password(
password, algo=config.security.password_hash_algo
)
session.add(self)
await session.flush()
return self
async def edit_email(self):
pass
def info(self) -> "UserInfo":
user_info = UserInfo.model_validate(self)
return user_info
async def edit_avatar(
self, avatar: BinaryIO | None, session: SessionContext, config: Config
):
avatar_dir = config.application.working_directory.joinpath("avatars")
if avatar is None:
if self.avatar is None:
return
avatar_file = FileSystem(
avatar_dir.joinpath(self.avatar), config.application.working_directory
)
if await avatar_file.exists():
await avatar_file.remove()
session.add(self)
self.avatar = None
await session.flush()
return
try:
image = Image.open(avatar)
except Exception as e:
raise UserError("Failed to read avatar data") from e
avatar_hashes: list[str] = (
await session.scalars(sa.select(User.avatar).where(User.avatar.isnot(None)))
).all()
avatar_id = Sqids(min_length=10, blocklist=avatar_hashes).encode(
[int(time.time())]
)
try:
if not avatar_dir.exists():
await async_os.mkdir(avatar_dir)
image.save(avatar_dir.joinpath(avatar_id), format=image.format)
except Exception as e:
raise UserError(f"Failed to save avatar: {e}") from e
if old_avatar := self.avatar:
avatar_file = FileSystem(
avatar_dir.joinpath(old_avatar), config.application.working_directory
)
if await avatar_file.exists():
await avatar_file.remove()
session.add(self)
self.avatar = avatar_id
await session.flush()
class UserCredentials(BaseModel):
name: str
password: str
email: Optional[EmailStr]
class UserInfo(BaseModel):
model_config = ConfigDict(from_attributes=True)
id: UUID
name: str
lower_name: str
full_name: Optional[str]
email: Optional[str]
is_email_private: bool
must_change_password: bool
login_type: "LoginType"
created: int
updated: int
last_login: Optional[int]
is_active: bool
is_admin: bool
avatar: Optional[str]
from materia.models.repository import Repository

View File

@ -0,0 +1 @@
from materia.routers import middleware, api, resources, root, docs

View File

@ -0,0 +1,17 @@
from fastapi import APIRouter, HTTPException
from materia.routers.api.auth import auth, oauth
from materia.routers.api import docs, user, repository, directory, file
router = APIRouter(prefix="/api")
router.include_router(docs.router)
router.include_router(auth.router)
router.include_router(oauth.router)
router.include_router(user.router)
router.include_router(repository.router)
router.include_router(directory.router)
router.include_router(file.router)
@router.get("/api/{catchall:path}", status_code=404, include_in_schema=False)
def not_found():
raise HTTPException(status_code=404)

View File

View File

@ -0,0 +1,100 @@
from typing import Optional
from fastapi import APIRouter, Depends, HTTPException, Response, status
from materia import security
from materia.routers.middleware import Context
from materia.models import LoginType, User, UserCredentials
router = APIRouter(tags=["auth"])
@router.post("/auth/signup")
async def signup(body: UserCredentials, ctx: Context = Depends()):
if not User.check_username(body.name):
raise HTTPException(
status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Invalid username"
)
if not User.check_password(body.password, ctx.config):
raise HTTPException(
status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Password is too short (minimum length {ctx.config.security.password_min_length})",
)
async with ctx.database.session() as session:
if await User.by_name(body.name, session, with_lower=True):
raise HTTPException(status.HTTP_409_CONFLICT, detail="User already exists")
if await User.by_email(body.email, session): # type: ignore
raise HTTPException(status.HTTP_409_CONFLICT, detail="Email already used")
count: Optional[int] = await User.count(session)
await User(
name=body.name,
lower_name=body.name.lower(),
full_name=body.name,
email=body.email,
hashed_password=security.hash_password(
body.password, algo=ctx.config.security.password_hash_algo
),
login_type=LoginType.Plain,
# first registered user is admin
is_admin=count == 0,
).new(session, ctx.config)
await session.commit()
@router.post("/auth/signin")
async def signin(body: UserCredentials, response: Response, ctx: Context = Depends()):
async with ctx.database.session() as session:
if (current_user := await User.by_name(body.name, session)) is None:
if (current_user := await User.by_email(str(body.email), session)) is None:
raise HTTPException(
status.HTTP_401_UNAUTHORIZED, detail="Invalid email"
)
if not security.validate_password(
body.password,
current_user.hashed_password,
algo=ctx.config.security.password_hash_algo,
):
raise HTTPException(status.HTTP_401_UNAUTHORIZED, detail="Invalid password")
issuer = "{}://{}".format(ctx.config.server.scheme, ctx.config.server.domain)
secret = (
ctx.config.oauth2.jwt_secret
if ctx.config.oauth2.jwt_signing_algo in ["HS256", "HS384", "HS512"]
else ctx.config.oauth2.jwt_signing_key
)
access_token = security.generate_token(
str(current_user.id),
str(secret),
ctx.config.oauth2.access_token_lifetime,
issuer,
)
refresh_token = security.generate_token(
"", str(secret), ctx.config.oauth2.refresh_token_lifetime, issuer
)
response.set_cookie(
ctx.config.security.cookie_access_token_name,
value=access_token,
max_age=ctx.config.oauth2.access_token_lifetime,
secure=True,
httponly=ctx.config.security.cookie_http_only,
samesite="lax",
)
response.set_cookie(
ctx.config.security.cookie_refresh_token_name,
value=refresh_token,
max_age=ctx.config.oauth2.refresh_token_lifetime,
secure=True,
httponly=ctx.config.security.cookie_http_only,
samesite="lax",
)
@router.get("/auth/signout")
async def signout(response: Response, ctx: Context = Depends()):
response.delete_cookie(ctx.config.security.cookie_access_token_name)
response.delete_cookie(ctx.config.security.cookie_refresh_token_name)

View File

@ -0,0 +1,82 @@
from typing import Annotated, Optional, Union
from fastapi import APIRouter, Depends, Form, HTTPException
from fastapi.security import OAuth2PasswordRequestFormStrict, SecurityScopes
from fastapi.security.oauth2 import OAuth2PasswordRequestForm
from pydantic import BaseModel, HttpUrl
from starlette.status import HTTP_500_INTERNAL_SERVER_ERROR
from materia.models import User
from materia.routers.middleware import Context
router = APIRouter(tags = ["oauth2"])
class OAuth2AuthorizationCodeRequestForm:
def __init__(
self,
redirect_uri: Annotated[HttpUrl, Form()],
client_id: Annotated[str, Form()],
scope: Annotated[Union[str, None], Form()] = None,
state: Annotated[Union[str, None], Form()] = None,
response_type: Annotated[str, Form()] = "code",
grant_type: Annotated[str, Form(pattern = "password")] = "authorization_code"
) -> None:
self.redirect_uri = redirect_uri
self.client_id = client_id
self.scope = scope
self.state = state
self.response_type = response_type
self.grant_type = grant_type
class AuthorizationCodeResponse(BaseModel):
code: str
@router.post("/oauth2/authorize")
async def authorize(form: Annotated[OAuth2AuthorizationCodeRequestForm, Depends()], ctx: Context = Depends()):
# grant_type: authorization_code, password_credentials, client_credentials, authorization_code (pkce)
ctx.logger.debug(form)
if form.grant_type == "authorization_code":
# TODO: form validation
if not (app := await OAuth2Application.by_client_id(form.client_id, ctx.database)):
raise HTTPException(status_code = HTTP_500_INTERNAL_SERVER_ERROR, detail = "Client ID not registered")
if not (owner := await User.by_id(app.user_id, ctx.database)):
raise HTTPException(status_code = HTTP_500_INTERNAL_SERVER_ERROR, detail = "User not found")
if not app.contains_redirect_uri(form.redirect_uri):
raise HTTPException(status_code = HTTP_500_INTERNAL_SERVER_ERROR, detail = "Unregistered redirect URI")
if not form.response_type == "code":
raise HTTPException(status_code = HTTP_500_INTERNAL_SERVER_ERROR, detail = "Unsupported response type")
# TODO: code challenge (S256, plain, ...)
# None: if not app.confidential_client: raise ...
grant = await app.grant_by_user_id(owner.id, ctx.database)
if app.confidential_client and grant is not None:
code = await grant.generate_authorization_code(form.redirect_uri, ctx.cache)
# TODO: include state to redirect_uri
# return redirect
# redirect to grant page
else:
raise HTTPException(status_code = HTTP_500_INTERNAL_SERVER_ERROR, detail = "Unsupported grant type")
pass
class AccessTokenResponse(BaseModel):
access_token: str
token_type: str
expires_in: int
refresh_token: str
scope: Optional[str]
@router.post("/oauth2/access_token")
async def token(ctx: Context = Depends()):
pass

View File

@ -0,0 +1,198 @@
from pathlib import Path
from fastapi import APIRouter, Depends, HTTPException, status
from materia.models import (
User,
Directory,
DirectoryInfo,
DirectoryContent,
DirectoryPath,
DirectoryRename,
DirectoryCopyMove,
Repository,
)
from materia.core import SessionContext, Config, FileSystem
from materia.routers import middleware
router = APIRouter(tags=["directory"])
async def validate_current_directory(
path: Path, repository: Repository, session: SessionContext, config: Config
) -> Directory:
if not FileSystem.check_path(path):
raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR, "Invalid path")
if not (
directory := await Directory.by_path(
repository,
FileSystem.normalize(path),
session,
config,
)
):
raise HTTPException(status.HTTP_404_NOT_FOUND, "Directory not found")
return directory
async def validate_target_directory(
path: Path, repository: Repository, session: SessionContext, config: Config
) -> Directory:
if not FileSystem.check_path(path):
raise HTTPException(
status.HTTP_500_INTERNAL_SERVER_ERROR, "Invalid target path"
)
if FileSystem.normalize(path) == Path():
# mean repository root
target_directory = None
else:
if not (
target_directory := await Directory.by_path(
repository,
FileSystem.normalize(path),
session,
config,
)
):
raise HTTPException(status.HTTP_404_NOT_FOUND, "Target directory not found")
return target_directory
@router.post("/directory")
async def create(
path: DirectoryPath,
repository: Repository = Depends(middleware.repository),
ctx: middleware.Context = Depends(),
):
if not FileSystem.check_path(path.path):
raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR, "Invalid path")
async with ctx.database.session() as session:
current_directory = None
current_path = Path()
directory = None
for part in FileSystem.normalize(path.path).parts:
if not (
directory := await Directory.by_path(
repository, current_path.joinpath(part), session, ctx.config
)
):
directory = await Directory(
repository_id=repository.id,
parent_id=current_directory.id if current_directory else None,
name=part,
).new(session, ctx.config)
current_directory = directory
current_path /= part
await session.commit()
@router.get("/directory")
async def info(
path: Path,
repository: Repository = Depends(middleware.repository),
ctx: middleware.Context = Depends(),
):
async with ctx.database.session() as session:
directory = await validate_current_directory(
path, repository, session, ctx.config
)
info = await directory.info(session)
return info
@router.delete("/directory")
async def remove(
path: Path,
repository: Repository = Depends(middleware.repository),
ctx: middleware.Context = Depends(),
):
async with ctx.database.session() as session:
directory = await validate_current_directory(
path, repository, session, ctx.config
)
await directory.remove(session, ctx.config)
await session.commit()
@router.patch("/directory/rename")
async def rename(
data: DirectoryRename,
repository: Repository = Depends(middleware.repository),
ctx: middleware.Context = Depends(),
):
async with ctx.database.session() as session:
directory = await validate_current_directory(
data.path, repository, session, ctx.config
)
await directory.rename(data.name, session, ctx.config, force=data.force)
await session.commit()
@router.patch("/directory/move")
async def move(
data: DirectoryCopyMove,
repository: Repository = Depends(middleware.repository),
ctx: middleware.Context = Depends(),
):
async with ctx.database.session() as session:
directory = await validate_current_directory(
data.path, repository, session, ctx.config
)
target_directory = await validate_target_directory(
data.target, repository, session, ctx.config
)
await directory.move(target_directory, session, ctx.config, force=data.force)
await session.commit()
@router.post("/directory/copy")
async def copy(
data: DirectoryCopyMove,
repository: Repository = Depends(middleware.repository),
ctx: middleware.Context = Depends(),
):
async with ctx.database.session() as session:
directory = await validate_current_directory(
data.path, repository, session, ctx.config
)
target_directory = await validate_target_directory(
data.target, repository, session, ctx.config
)
await directory.copy(target_directory, session, ctx.config, force=data.force)
await session.commit()
@router.get("/directory/content", response_model=DirectoryContent)
async def content(
path: Path,
repository: Repository = Depends(middleware.repository),
ctx: middleware.Context = Depends(),
):
async with ctx.database.session() as session:
directory = await validate_current_directory(
path, repository, session, ctx.config
)
session.add(directory)
await session.refresh(directory, attribute_names=["directories"])
await session.refresh(directory, attribute_names=["files"])
content = DirectoryContent(
files=[await _file.info(session) for _file in directory.files],
directories=[
await _directory.info(session) for _directory in directory.directories
],
)
return content

View File

@ -0,0 +1,40 @@
from fastapi import APIRouter, Request
from fastapi.responses import HTMLResponse
router = APIRouter()
@router.get("/docs", response_class=HTMLResponse, include_in_schema=False)
async def rapidoc(request: Request):
return f"""
<!doctype html>
<html>
<head>
<meta charset="utf-8">
<link href='http://fonts.googleapis.com/css?family=Roboto' rel='stylesheet' type='text/css'>
<script
type="module"
src="https://unpkg.com/rapidoc/dist/rapidoc-min.js"
></script>
</head>
<body>
<rapi-doc
spec-url="{request.app.openapi_url}"
theme = "dark"
show-header = "false"
show-info = "true"
allow-authentication = "true"
allow-server-selection = "true"
allow-api-list-style-selection = "true"
theme = "dark"
render-style = "focused"
bg-color="#1e2129"
primary-color="#a47bea"
regular-font="Roboto"
mono-font="Roboto Mono"
show-method-in-nav-bar="as-colored-text">
<img slot="logo" style="display: none"/>
</rapi-doc>
</body>
</html>
"""

View File

@ -0,0 +1,223 @@
from typing import Annotated, Optional
from pathlib import Path
from fastapi import (
Request,
APIRouter,
Depends,
HTTPException,
status,
UploadFile,
File as _File,
Form,
)
from fastapi.responses import JSONResponse
from materia.models import (
User,
File,
FileInfo,
Directory,
Repository,
FileRename,
FileCopyMove,
)
from materia.core import (
SessionContext,
Config,
FileSystem,
TemporaryFileTarget,
Database,
)
from materia.routers import middleware
from materia.routers.api.directory import validate_target_directory
from streaming_form_data import StreamingFormDataParser
from streaming_form_data.targets import ValueTarget
from starlette.requests import ClientDisconnect
from aiofiles import ospath as async_path
from materia.tasks import remove_cache_file
router = APIRouter(tags=["file"])
async def validate_current_file(
path: Path, repository: Repository, session: SessionContext, config: Config
) -> Directory:
if not FileSystem.check_path(path):
raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR, "Invalid path")
if not (
file := await File.by_path(
repository,
FileSystem.normalize(path),
session,
config,
)
):
raise HTTPException(status.HTTP_404_NOT_FOUND, "File not found")
return file
class FileSizeValidator:
def __init__(self, capacity: int):
self.body = 0
self.capacity = capacity
def __call__(self, chunk: bytes):
self.body += len(chunk)
if self.body > self.capacity:
raise HTTPException(status.HTTP_413_REQUEST_ENTITY_TOO_LARGE)
@router.post("/file", openapi_extra={
"requestBody" : {
"content": {
"multipart/form-data": {
"schema": {
"required": ["file", "path"],
"type": "object",
"properties": {
"file": { "type": "string", "format": "binary" },
"path": { "type": "string", "format": "path", "example": "/"}
}
}
}
},
"required": True
}
})
async def create(
request: Request,
repository: Repository = Depends(middleware.repository),
ctx: middleware.Context = Depends(),
):
async with ctx.database.session() as session:
capacity = await repository.remaining_capacity(session)
try:
file = TemporaryFileTarget(
ctx.config.application.working_directory,
validator=FileSizeValidator(capacity),
)
path = ValueTarget()
ctx.logger.debug(f"Shedule remove cache file: {file.path().name}")
remove_cache_file.apply_async(args=(file.path(), ctx.config), countdown=10)
parser = StreamingFormDataParser(headers=request.headers)
parser.register("file", file)
parser.register("path", path)
async for chunk in request.stream():
parser.data_received(chunk)
except ClientDisconnect:
file.remove()
raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR, "Client disconnect")
except HTTPException as e:
file.remove()
raise e
except Exception as e:
file.remove()
raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR, " ".join(e.args))
path = Path(path.value.decode())
if not file.multipart_filename:
file.remove()
raise HTTPException(
status.HTTP_417_EXPECTATION_FAILED, "Cannot upload file without name"
)
if not FileSystem.check_path(path):
file.remove()
raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR, "Invalid path")
async with ctx.database.session() as session:
target_directory = await validate_target_directory(
path, repository, session, ctx.config
)
try:
await File(
repository_id=repository.id,
parent_id=target_directory.id if target_directory else None,
name=file.multipart_filename,
size=await async_path.getsize(file.path()),
).new(file.path(), session, ctx.config)
except Exception:
raise HTTPException(
status.HTTP_500_INTERNAL_SERVER_ERROR, "Failed to create file"
)
else:
await session.commit()
@router.get("/file", response_model=FileInfo)
async def info(
path: Path,
repository: Repository = Depends(middleware.repository),
ctx: middleware.Context = Depends(),
):
async with ctx.database.session() as session:
file = await validate_current_file(path, repository, session, ctx.config)
info = file.info()
return info
@router.delete("/file")
async def remove(
path: Path,
repository: Repository = Depends(middleware.repository),
ctx: middleware.Context = Depends(),
):
async with ctx.database.session() as session:
file = await validate_current_file(path, repository, session, ctx.config)
await file.remove(session, ctx.config)
await session.commit()
@router.patch("/file/rename")
async def rename(
data: FileRename,
repository: Repository = Depends(middleware.repository),
ctx: middleware.Context = Depends(),
):
async with ctx.database.session() as session:
file = await validate_current_file(data.path, repository, session, ctx.config)
await file.rename(data.name, session, ctx.config, force=data.force)
await session.commit()
@router.patch("/file/move")
async def move(
data: FileCopyMove,
repository: Repository = Depends(middleware.repository),
ctx: middleware.Context = Depends(),
):
async with ctx.database.session() as session:
file = await validate_current_file(data.path, repository, session, ctx.config)
target_directory = await validate_target_directory(
data.target, repository, session, ctx.config
)
await file.move(target_directory, session, ctx.config, force=data.force)
await session.commit()
@router.post("/file/copy")
async def copy(
data: FileCopyMove,
repository: Repository = Depends(middleware.repository),
ctx: middleware.Context = Depends(),
):
async with ctx.database.session() as session:
file = await validate_current_file(data.path, repository, session, ctx.config)
target_directory = await validate_target_directory(
data.target, repository, session, ctx.config
)
await file.copy(target_directory, session, ctx.config, force=data.force)
await session.commit()

View File

@ -0,0 +1,73 @@
from fastapi import APIRouter, Depends, HTTPException, status
from materia.models import (
User,
Repository,
RepositoryInfo,
RepositoryContent,
FileInfo,
DirectoryInfo,
)
from materia.routers import middleware
router = APIRouter(tags=["repository"])
@router.post("/repository")
async def create(
user: User = Depends(middleware.user), ctx: middleware.Context = Depends()
):
async with ctx.database.session() as session:
if await Repository.from_user(user, session):
raise HTTPException(status.HTTP_409_CONFLICT, "Repository already exists")
async with ctx.database.session() as session:
try:
await Repository(
user_id=user.id, capacity=ctx.config.repository.capacity
).new(session, ctx.config)
await session.commit()
except Exception as e:
raise HTTPException(
status.HTTP_500_INTERNAL_SERVER_ERROR, detail=" ".join(e.args)
)
@router.get("/repository", response_model=RepositoryInfo)
async def info(
repository=Depends(middleware.repository), ctx: middleware.Context = Depends()
):
async with ctx.database.session() as session:
return await repository.info(session)
@router.delete("/repository")
async def remove(
repository=Depends(middleware.repository),
ctx: middleware.Context = Depends(),
):
try:
async with ctx.database.session() as session:
await repository.remove(session, ctx.config)
await session.commit()
except Exception as e:
raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR, f"{e}")
@router.get("/repository/content", response_model=RepositoryContent)
async def content(
repository=Depends(middleware.repository), ctx: middleware.Context = Depends()
):
async with ctx.database.session() as session:
session.add(repository)
await session.refresh(repository, attribute_names=["directories"])
await session.refresh(repository, attribute_names=["files"])
content = RepositoryContent(
files=[await _file.info(session) for _file in repository.files],
directories=[
await _directory.info(session) for _directory in repository.directories
],
)
return content

View File

@ -0,0 +1,16 @@
from celery.result import AsyncResult
from fastapi import APIRouter
from fastapi.responses import JSONResponse
router = APIRouter(tags=["tasks"])
@router.get("/tasks/${task_id}")
async def status_task(task_id):
task_result = AsyncResult(task_id)
result = {
"task_id": task_id,
"task_status": task_result.status,
"task_result": task_result.result,
}
return JSONResponse(result)

View File

@ -0,0 +1,67 @@
import uuid
import io
from fastapi import APIRouter, Depends, HTTPException, status, UploadFile
from materia.models import User, UserInfo
from materia.routers import middleware
router = APIRouter(tags=["user"])
@router.get("/user", response_model=UserInfo)
async def info(
claims=Depends(middleware.jwt_cookie), ctx: middleware.Context = Depends()
):
async with ctx.database.session() as session:
if not (current_user := await User.by_id(uuid.UUID(claims.sub), session)):
raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Missing user")
return current_user.info()
@router.delete("/user")
async def remove(
user: User = Depends(middleware.user), ctx: middleware.Context = Depends()
):
try:
async with ctx.database.session() as session:
await user.remove(session)
await session.commit()
except Exception as e:
raise HTTPException(
status.HTTP_500_INTERNAL_SERVER_ERROR, f"Failed to remove user: {e}"
) from e
@router.put("/user/avatar")
async def avatar(
file: UploadFile,
user: User = Depends(middleware.user),
ctx: middleware.Context = Depends(),
):
async with ctx.database.session() as session:
try:
await user.edit_avatar(io.BytesIO(await file.read()), session, ctx.config)
await session.commit()
except Exception as e:
raise HTTPException(
status.HTTP_500_INTERNAL_SERVER_ERROR,
f"{e}",
)
@router.delete("/user/avatar")
async def remove_avatar(
user: User = Depends(middleware.user),
ctx: middleware.Context = Depends(),
):
async with ctx.database.session() as session:
try:
await user.edit_avatar(None, session, ctx.config)
await session.commit()
except Exception as e:
raise HTTPException(
status.HTTP_500_INTERNAL_SERVER_ERROR,
f"{e}",
)

View File

@ -0,0 +1,33 @@
from fastapi import APIRouter, Request, Response, status, HTTPException, Depends
from fastapi.responses import HTMLResponse, FileResponse
from fastapi.templating import Jinja2Templates
from fastapi.staticfiles import StaticFiles
import mimetypes
from pathlib import Path
from materia.core.misc import optional
from materia.routers import middleware
from materia import docs as materia_docs
router = APIRouter()
# templates = Jinja2Templates(directory=Path(materia_docs.__path__[0]))
# p = Path(__file__).parent.joinpath("..", "docs").resolve()
# router.mount("/docs", StaticFiles(directory="doces", html=True), name="docs")
@router.get("/docs/{catchall:path}", include_in_schema=False)
async def docs(request: Request, ctx: middleware.Context = Depends()):
docs_directory = Path(materia_docs.__path__[0]).resolve()
target = docs_directory.joinpath(request.path_params["catchall"]).resolve()
if not optional(target.relative_to, docs_directory):
raise HTTPException(status.HTTP_403_FORBIDDEN)
if target.is_dir() and (index := target.joinpath("index.html")).is_file():
return FileResponse(index)
if not target.is_file():
raise HTTPException(status.HTTP_404_NOT_FOUND)
return FileResponse(target)

View File

@ -0,0 +1,112 @@
from typing import Optional
import uuid
from datetime import datetime
from pathlib import Path
from fastapi import HTTPException, Request, Response, status, Depends
from fastapi.security.base import SecurityBase
import jwt
from sqlalchemy import select
from pydantic import BaseModel
from enum import StrEnum
from http import HTTPMethod as HttpMethod
from fastapi.security import (
HTTPBearer,
OAuth2PasswordBearer,
OAuth2PasswordRequestForm,
APIKeyQuery,
APIKeyCookie,
APIKeyHeader,
)
from materia import security
from materia.models import User, Repository
class Context:
def __init__(self, request: Request):
self.config = request.state.config
self.database = request.state.database
self.cache = request.state.cache
self.logger = request.state.logger
async def jwt_cookie(request: Request, response: Response, ctx: Context = Depends()):
if not (
access_token := request.cookies.get(
ctx.config.security.cookie_access_token_name
)
):
raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Missing token")
refresh_token = request.cookies.get(ctx.config.security.cookie_refresh_token_name)
if ctx.config.oauth2.jwt_signing_algo in ["HS256", "HS384", "HS512"]:
secret = ctx.config.oauth2.jwt_secret
else:
secret = ctx.config.oauth2.jwt_signing_key
issuer = "{}://{}".format(ctx.config.server.scheme, ctx.config.server.domain)
try:
refresh_claims = (
security.validate_token(refresh_token, secret) if refresh_token else None
)
if refresh_claims:
if refresh_claims.exp < datetime.now().timestamp():
refresh_claims = None
except jwt.PyJWTError:
refresh_claims = None
try:
access_claims = security.validate_token(access_token, secret)
if access_claims.exp < datetime.now().timestamp():
if refresh_claims:
new_access_token = security.generate_token(
access_claims.sub,
str(secret),
ctx.config.oauth2.access_token_lifetime,
issuer,
)
access_claims = security.validate_token(new_access_token, secret)
response.set_cookie(
ctx.config.security.cookie_access_token_name,
value=new_access_token,
max_age=ctx.config.oauth2.access_token_lifetime,
secure=True,
httponly=ctx.config.security.cookie_http_only,
samesite="lax",
)
else:
access_claims = None
except jwt.PyJWTError as e:
raise HTTPException(status.HTTP_401_UNAUTHORIZED, f"Invalid token: {e}")
async with ctx.database.session() as session:
if not await User.by_id(uuid.UUID(access_claims.sub), session):
raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Invalid user")
return access_claims
async def user(claims=Depends(jwt_cookie), ctx: Context = Depends()) -> User:
async with ctx.database.session() as session:
if not (current_user := await User.by_id(uuid.UUID(claims.sub), session)):
raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Missing user")
return current_user
async def repository(user: User = Depends(user), ctx: Context = Depends()):
async with ctx.database.session() as session:
session.add(user)
await session.refresh(user, attribute_names=["repository"])
if not (repository := user.repository):
raise HTTPException(status.HTTP_404_NOT_FOUND, "Repository not found")
return repository
async def repository_path(user: User = Depends(user), ctx: Context = Depends()) -> Path:
return ctx.config.data_dir() / "repository" / user.lower_name

View File

@ -0,0 +1,60 @@
from fastapi import APIRouter, Depends, HTTPException, status, Response
from PIL import Image
import io
from pathlib import Path
import mimetypes
from materia.routers import middleware
from materia.core import Config
router = APIRouter(tags=["resources"], prefix="/resources")
@router.get("/avatars/{avatar_id}")
async def avatar(
avatar_id: str, format: str = "png", ctx: middleware.Context = Depends()
):
avatar_path = Config.data_dir() / "avatars" / avatar_id
format = format.upper()
if not avatar_path.exists():
raise HTTPException(
status.HTTP_404_NOT_FOUND, "Failed to find the given avatar"
)
try:
img = Image.open(avatar_path)
buffer = io.BytesIO()
if format == "JPEG":
img.convert("RGB")
img.save(buffer, format=format)
except OSError:
raise HTTPException(
status.HTTP_422_UNPROCESSABLE_ENTITY, "Failed to process image file"
)
return Response(content=buffer.getvalue(), media_type=Image.MIME[format])
try:
import materia_frontend
except ModuleNotFoundError:
pass
else:
@router.get("/assets/{filename}")
async def assets(filename: str):
path = Path(materia_frontend.__path__[0]).joinpath(
"dist", "resources", "assets", filename
)
if not path.exists():
return Response(status_code=status.HTTP_404_NOT_FOUND)
content = path.read_bytes()
mime = mimetypes.guess_type(path)[0]
return Response(content, media_type=mime)

View File

@ -0,0 +1,19 @@
from pathlib import Path
from fastapi import APIRouter, Request, HTTPException
from fastapi.responses import HTMLResponse
from fastapi.templating import Jinja2Templates
router = APIRouter(tags=["root"])
try:
import materia_frontend
except ModuleNotFoundError:
pass
else:
templates = Jinja2Templates(directory=Path(materia_frontend.__path__[0]) / "dist")
@router.get("/{spa:path}", response_class=HTMLResponse, include_in_schema=False)
async def root(request: Request):
# raise HTTPException(404)
return templates.TemplateResponse(request, "base.html", {"view": "app"})

View File

@ -0,0 +1,3 @@
from materia.security.secret_key import generate_key, encrypt_payload
from materia.security.token import TokenClaims, generate_token, validate_token
from materia.security.password import hash_password, validate_password

View File

@ -0,0 +1,19 @@
from typing import Literal
import bcrypt
def hash_password(password: str, algo: Literal["bcrypt"] = "bcrypt") -> str:
if algo == "bcrypt":
return bcrypt.hashpw(password.encode(), bcrypt.gensalt()).decode()
else:
raise NotImplementedError(algo)
def validate_password(
password: str, hash: str, algo: Literal["bcrypt"] = "bcrypt"
) -> bool:
if algo == "bcrypt":
return bcrypt.checkpw(password.encode(), hash.encode())
else:
raise NotImplementedError(algo)

View File

@ -0,0 +1,17 @@
import base64
from cryptography.fernet import Fernet
def generate_key() -> bytes:
return Fernet.generate_key()
def encrypt_payload(payload: bytes, key: bytes, valid_base64: bool = True) -> bytes:
func = Fernet(key)
data = func.encrypt(payload)
if valid_base64:
data = base64.b64encode(data, b"-_").decode().replace("=", "").encode()
return data

View File

@ -0,0 +1,29 @@
from typing import Optional
import datetime
from pydantic import BaseModel
import jwt
class TokenClaims(BaseModel):
sub: str
exp: int
iat: int
iss: Optional[str] = None
def generate_token(
sub: str, secret: str, duration: int, iss: Optional[str] = None
) -> str:
now = datetime.datetime.now()
iat = now.timestamp()
exp = (now + datetime.timedelta(seconds=duration)).timestamp()
claims = TokenClaims(sub=sub, exp=int(exp), iat=int(iat), iss=iss)
return jwt.encode(claims.model_dump(), secret)
def validate_token(token: str, secret: str) -> TokenClaims:
payload = jwt.decode(token, secret, algorithms=["HS256"])
return TokenClaims(**payload)

View File

@ -0,0 +1 @@
from materia.tasks.file import remove_cache_file

17
src/materia/tasks/file.py Normal file
View File

@ -0,0 +1,17 @@
from materia.core import Cron, CronError, SessionContext, Config, Database
from celery import shared_task
from fastapi import UploadFile
from materia.models import File
import asyncio
from pathlib import Path
from materia.core import FileSystem, Config
@shared_task(name="remove_cache_file")
def remove_cache_file(path: Path, config: Config):
target = FileSystem(path, config.application.working_directory.joinpath("cache"))
async def wrapper():
await target.remove()
asyncio.run(wrapper())

0
tests/__init__.py Normal file
View File

166
tests/conftest.py Normal file
View File

@ -0,0 +1,166 @@
import pytest_asyncio
from materia.models import (
User,
LoginType,
)
from materia.models.base import Base
from materia import security
from materia.app import Application
from materia.core import Config, Database, Cache, Cron
import sqlalchemy as sa
from sqlalchemy.pool import NullPool
from httpx import AsyncClient, ASGITransport, Cookies
from asgi_lifespan import LifespanManager
from pathlib import Path
@pytest_asyncio.fixture(scope="session")
async def config() -> Config:
conf = Config()
conf.database.port = 54320
conf.cache.port = 63790
return conf
@pytest_asyncio.fixture(scope="session")
async def database(config: Config) -> Database:
config_postgres = config
config_postgres.database.user = "postgres"
config_postgres.database.name = "postgres"
database_postgres = await Database.new(
config_postgres.database.url(), poolclass=NullPool
)
async with database_postgres.connection() as connection:
await connection.execution_options(isolation_level="AUTOCOMMIT")
await connection.execute(sa.text("create role pytest login"))
await connection.execute(sa.text("create database pytest owner pytest"))
await connection.commit()
await database_postgres.dispose()
config.database.user = "pytest"
config.database.name = "pytest"
database_pytest = await Database.new(config.database.url(), poolclass=NullPool)
yield database_pytest
await database_pytest.dispose()
async with database_postgres.connection() as connection:
await connection.execution_options(isolation_level="AUTOCOMMIT")
await connection.execute(sa.text("drop database pytest")),
await connection.execute(sa.text("drop role pytest"))
await connection.commit()
await database_postgres.dispose()
@pytest_asyncio.fixture(scope="session")
async def cache(config: Config) -> Cache:
config_pytest = config
config_pytest.cache.user = "pytest"
cache_pytest = await Cache.new(config_pytest.cache.url())
yield cache_pytest
@pytest_asyncio.fixture(scope="session")
async def cron(config: Config) -> Cache:
cron_pytest = Cron.new(
config.cron.workers_count,
backend_url=config.cache.url(),
broker_url=config.cache.url(),
)
yield cron_pytest
@pytest_asyncio.fixture(scope="function", autouse=True)
async def setup_database(database: Database):
async with database.connection() as connection:
await connection.run_sync(Base.metadata.create_all)
await connection.commit()
yield
async with database.connection() as connection:
await connection.run_sync(Base.metadata.drop_all)
await connection.commit()
@pytest_asyncio.fixture()
async def session(database: Database, request):
session = database.sessionmaker()
yield session
await session.rollback()
await session.close()
@pytest_asyncio.fixture(scope="function")
async def data(config: Config):
class TestData:
user = User(
name="PyTest",
lower_name="pytest",
email="pytest@example.com",
hashed_password=security.hash_password(
"iampytest", algo=config.security.password_hash_algo
),
login_type=LoginType.Plain,
is_admin=True,
)
return TestData()
@pytest_asyncio.fixture(scope="function")
async def api_config(config: Config, tmpdir) -> Config:
config.application.working_directory = Path(tmpdir)
config.oauth2.jwt_secret = "pytest_secret_key"
yield config
@pytest_asyncio.fixture(scope="function")
async def api_client(
api_config: Config, database: Database, cache: Cache, cron: Cron
) -> AsyncClient:
app = Application(api_config)
app.database = database
app.cache = cache
app.cron = cron
app.prepare_server()
async with LifespanManager(app.backend) as manager:
async with AsyncClient(
transport=ASGITransport(app=manager.app), base_url=api_config.server.url()
) as client:
yield client
@pytest_asyncio.fixture(scope="function")
async def auth_client(api_client: AsyncClient, api_config: Config) -> AsyncClient:
data = {"name": "PyTest", "password": "iampytest", "email": "pytest@example.com"}
await api_client.post(
"/api/auth/signup",
json=data,
)
auth = await api_client.post(
"/api/auth/signin",
json=data,
)
cookies = Cookies()
cookies.set(
"materia_at",
auth.cookies[api_config.security.cookie_access_token_name],
)
cookies.set(
"materia_rt",
auth.cookies[api_config.security.cookie_refresh_token_name],
)
api_client.cookies = cookies
yield api_client

191
tests/test_api.py Normal file
View File

@ -0,0 +1,191 @@
import pytest
from materia.core import Config
from httpx import AsyncClient, Cookies
from io import BytesIO
# TODO: replace downloadable images for tests
@pytest.mark.asyncio
async def test_auth(api_client: AsyncClient, api_config: Config):
data = {"name": "PyTest", "password": "iampytest", "email": "pytest@example.com"}
response = await api_client.post(
"/api/auth/signup",
json=data,
)
assert response.status_code == 200
response = await api_client.post(
"/api/auth/signin",
json=data,
)
assert response.status_code == 200
assert response.cookies.get(api_config.security.cookie_access_token_name)
assert response.cookies.get(api_config.security.cookie_refresh_token_name)
# TODO: conflict usernames and emails
response = await api_client.get("/api/auth/signout")
assert response.status_code == 200
@pytest.mark.asyncio
async def test_user(auth_client: AsyncClient, api_config: Config):
info = await auth_client.get("/api/user")
assert info.status_code == 200, info.text
async with AsyncClient() as client:
pytest_logo_res = await client.get(
"https://docs.pytest.org/en/stable/_static/pytest1.png"
)
assert isinstance(pytest_logo_res.content, bytes)
pytest_logo = BytesIO(pytest_logo_res.content)
avatar = await auth_client.put(
"/api/user/avatar",
files={"file": ("pytest.png", pytest_logo)},
)
assert avatar.status_code == 200, avatar.text
info = await auth_client.get("/api/user")
avatar_info = info.json()["avatar"]
assert avatar_info is not None
assert api_config.application.working_directory.joinpath(
"avatars", avatar_info
).exists()
avatar = await auth_client.delete("/api/user/avatar")
assert avatar.status_code == 200, avatar.text
info = await auth_client.get("/api/user")
assert info.json()["avatar"] is None
assert not api_config.application.working_directory.joinpath(
"avatars", avatar_info
).exists()
delete = await auth_client.delete("/api/user")
assert delete.status_code == 200, delete.text
info = await auth_client.get("/api/user")
assert info.status_code == 401, info.text
@pytest.mark.asyncio
async def test_repository(auth_client: AsyncClient, api_config: Config):
info = await auth_client.get("/api/repository")
assert info.status_code == 404, info.text
create = await auth_client.post("/api/repository")
assert create.status_code == 200, create.text
create = await auth_client.post("/api/repository")
assert create.status_code == 409, create.text
assert api_config.application.working_directory.joinpath(
"repository", "PyTest".lower()
).exists()
info = await auth_client.get("/api/repository")
assert info.status_code == 200, info.text
delete = await auth_client.delete("/api/repository")
assert delete.status_code == 200, delete.text
info = await auth_client.get("/api/repository")
assert info.status_code == 404, info.text
# TODO: content
@pytest.mark.asyncio
async def test_directory(auth_client: AsyncClient, api_config: Config):
first_dir_path = api_config.application.working_directory.joinpath(
"repository", "PyTest".lower(), "first_dir"
)
second_dir_path = api_config.application.working_directory.joinpath(
"repository", "PyTest".lower(), "second_dir"
)
second_dir_path_two = api_config.application.working_directory.joinpath(
"repository", "PyTest".lower(), "second_dir.1"
)
third_dir_path_one = api_config.application.working_directory.joinpath(
"repository", "PyTest".lower(), "third_dir"
)
third_dir_path_two = api_config.application.working_directory.joinpath(
"repository", "PyTest".lower(), "second_dir", "third_dir"
)
create = await auth_client.post("/api/repository")
assert create.status_code == 200, create.text
create = await auth_client.post("/api/directory", json={"path": "first_dir"})
assert create.status_code == 500, create.text
create = await auth_client.post("/api/directory", json={"path": "/first_dir"})
assert create.status_code == 200, create.text
assert first_dir_path.exists()
info = await auth_client.get("/api/directory", params=[("path", "/first_dir")])
assert info.status_code == 200, info.text
assert info.json()["used"] == 0
assert info.json()["path"] == "/first_dir"
create = await auth_client.patch(
"/api/directory/rename",
json={"path": "/first_dir", "name": "first_dir_renamed"},
)
assert create.status_code == 200, create.text
delete = await auth_client.delete(
"/api/directory", params=[("path", "/first_dir_renamed")]
)
assert delete.status_code == 200, delete.text
assert not first_dir_path.exists()
create = await auth_client.post("/api/directory", json={"path": "/second_dir"})
assert create.status_code == 200, create.text
create = await auth_client.post("/api/directory", json={"path": "/third_dir"})
assert create.status_code == 200, create.text
move = await auth_client.patch(
"/api/directory/move", json={"path": "/third_dir", "target": "/second_dir"}
)
assert move.status_code == 200, move.text
assert not third_dir_path_one.exists()
assert third_dir_path_two.exists()
info = await auth_client.get(
"/api/directory", params=[("path", "/second_dir/third_dir")]
)
assert info.status_code == 200, info.text
assert info.json()["path"] == "/second_dir/third_dir"
copy = await auth_client.post(
"/api/directory/copy",
json={"path": "/second_dir", "target": "/", "force": True},
)
assert copy.status_code == 200, copy.text
assert second_dir_path.exists()
assert second_dir_path_two.exists()
@pytest.mark.asyncio
async def test_file(auth_client: AsyncClient, api_config: Config):
create = await auth_client.post("/api/repository")
assert create.status_code == 200, create.text
async with AsyncClient() as client:
pytest_logo_res = await client.get(
"https://docs.pytest.org/en/stable/_static/pytest1.png"
)
assert isinstance(pytest_logo_res.content, bytes)
pytest_logo = BytesIO(pytest_logo_res.content)
create = await auth_client.post(
"/api/file", files={"file": ("pytest.png", pytest_logo)}, data={"path": "/"}
)
assert create.status_code == 200, create.text

283
tests/test_models.py Normal file
View File

@ -0,0 +1,283 @@
import pytest_asyncio
import pytest
from pathlib import Path
from materia.models import (
User,
Repository,
Directory,
RepositoryError,
File,
)
from materia.core import Config, SessionContext
from materia import security
import sqlalchemy as sa
from sqlalchemy.orm.session import make_transient
from sqlalchemy import inspect
import aiofiles
import aiofiles.os
@pytest.mark.asyncio
async def test_user(data, session: SessionContext, config: Config):
# simple
session.add(data.user)
await session.flush()
assert data.user.id is not None
assert security.validate_password("iampytest", data.user.hashed_password)
await session.rollback()
# methods
await data.user.new(session, config)
assert data.user.id is not None
assert await data.user.count(session) == 1
assert await User.by_name("PyTest", session) == data.user
assert await User.by_email("pytest@example.com", session) == data.user
await data.user.edit_name("AsyncPyTest", session)
assert await User.by_name("asyncpytest", session, with_lower=True) == data.user
assert await User.by_email("pytest@example.com", session) == data.user
assert await User.by_id(data.user.id, session) == data.user
await data.user.edit_password("iamnotpytest", session, config)
assert security.validate_password("iamnotpytest", data.user.hashed_password)
await data.user.remove(session)
@pytest.mark.asyncio
async def test_repository(data, tmpdir, session: SessionContext, config: Config):
config.application.working_directory = Path(tmpdir)
session.add(data.user)
await session.flush()
repository = await Repository(
user_id=data.user.id, capacity=config.repository.capacity
).new(session, config)
assert repository
assert repository.id is not None
assert (await repository.real_path(session, config)).exists()
assert await Repository.from_user(data.user, session) == repository
await session.refresh(repository, attribute_names=["user"])
cloned_repository = repository.clone()
assert cloned_repository.id is None and cloned_repository.user is None
session.add(cloned_repository)
await session.flush()
assert cloned_repository.id is not None
await repository.remove(session, config)
make_transient(repository)
session.add(repository)
await session.flush()
with pytest.raises(RepositoryError):
await repository.remove(session, config)
assert not (await repository.real_path(session, config)).exists()
@pytest.mark.asyncio
async def test_directory(data, tmpdir, session: SessionContext, config: Config):
config.application.working_directory = Path(tmpdir)
# setup
session.add(data.user)
await session.flush()
repository = await Repository(
user_id=data.user.id, capacity=config.repository.capacity
).new(session, config)
directory = await Directory(
repository_id=repository.id, parent_id=None, name="test1"
).new(session, config)
# simple
assert directory.id is not None
assert (
await session.scalars(
sa.select(Directory).where(
sa.and_(
Directory.repository_id == repository.id,
Directory.name == "test1",
)
)
)
).first() == directory
assert (await directory.real_path(session, config)).exists()
# nested simple
nested_directory = await Directory(
repository_id=repository.id,
parent_id=directory.id,
name="test_nested",
).new(session, config)
assert nested_directory.id is not None
assert (
await session.scalars(
sa.select(Directory).where(
sa.and_(
Directory.repository_id == repository.id,
Directory.name == "test_nested",
)
)
)
).first() == nested_directory
assert nested_directory.parent_id == directory.id
assert (await nested_directory.real_path(session, config)).exists()
# relationship
await session.refresh(directory, attribute_names=["directories", "files"])
assert isinstance(directory.files, list) and len(directory.files) == 0
assert isinstance(directory.directories, list) and len(directory.directories) == 1
await session.refresh(nested_directory, attribute_names=["directories", "files"])
assert (nested_directory.files, list) and len(nested_directory.files) == 0
assert (nested_directory.directories, list) and len(
nested_directory.directories
) == 0
#
assert (
await Directory.by_path(
repository, Path("test1", "test_nested"), session, config
)
== nested_directory
)
# remove nested
nested_path = await nested_directory.real_path(session, config)
assert nested_path.exists()
await nested_directory.remove(session, config)
assert inspect(nested_directory).was_deleted
assert await nested_directory.real_path(session, config) is None
assert not nested_path.exists()
await session.refresh(directory) # update attributes that was deleted
assert (await directory.real_path(session, config)).exists()
# rename
assert (
await directory.rename("test1", session, config, force=True)
).name == "test1.1"
await Directory(repository_id=repository.id, parent_id=None, name="test2").new(
session, config
)
assert (
await directory.rename("test2", session, config, force=True)
).name == "test2.1"
assert (await repository.real_path(session, config)).joinpath("test2.1").exists()
assert not (await repository.real_path(session, config)).joinpath("test1").exists()
directory_path = await directory.real_path(session, config)
assert directory_path.exists()
await directory.remove(session, config)
assert await directory.real_path(session, config) is None
assert not directory_path.exists()
@pytest.mark.asyncio
async def test_file(data, tmpdir, session: SessionContext, config: Config):
config.application.working_directory = Path(tmpdir)
# setup
session.add(data.user)
await session.flush()
repository = await Repository(
user_id=data.user.id, capacity=config.repository.capacity
).new(session, config)
directory = await Directory(
repository_id=repository.id, parent_id=None, name="test1"
).new(session, config)
directory2 = await Directory(
repository_id=repository.id, parent_id=None, name="test2"
).new(session, config)
data = b"Hello there, it's a test"
file = await File(
repository_id=repository.id,
parent_id=directory.id,
name="test_file.txt",
).new(data, session, config)
# simple
assert file.id is not None
assert (
await session.scalars(
sa.select(File).where(
sa.and_(
File.repository_id == repository.id,
File.parent_id == directory.id,
File.name == "test_file.txt",
)
)
)
).first() == file
# relationship
await session.refresh(file, attribute_names=["parent", "repository"])
assert file.parent == directory
assert file.repository == repository
#
assert (
await File.by_path(repository, Path("test1", "test_file.txt"), session, config)
== file
)
#
file_path = await file.real_path(session, config)
assert file_path.exists()
assert (await aiofiles.os.stat(file_path)).st_size == file.size
async with aiofiles.open(file_path, mode="rb") as io:
content = await io.read()
assert data == content
# rename
assert (
await file.rename("test_file_rename.txt", session, config, force=True)
).name == "test_file_rename.txt"
await File(
repository_id=repository.id, parent_id=directory.id, name="test_file_2.txt"
).new(b"", session, config)
assert (
await file.rename("test_file_2.txt", session, config, force=True)
).name == "test_file_2.1.txt"
assert (
(await repository.real_path(session, config))
.joinpath("test1", "test_file_2.1.txt")
.exists()
)
assert (
not (await repository.real_path(session, config))
.joinpath("test1", "test_file_rename.txt")
.exists()
)
# move
await file.move(directory2, session, config)
await session.refresh(file, attribute_names=["parent"])
assert file.parent == directory2
assert (
not (await repository.real_path(session, config))
.joinpath("test1", "test_file_2.1.txt")
.exists()
)
assert (
(await repository.real_path(session, config))
.joinpath("test2", "test_file_2.1.txt")
.exists()
)
# remove
await file.remove(session, config)
assert not await File.by_path(
repository, Path("test1", "test_file.txt"), session, config
)
assert not file_path.exists()

15
workspaces/frontend/.gitignore vendored Normal file
View File

@ -0,0 +1,15 @@
dist/
/.venv
__pycache__/
.pdm.toml
.pdm-python
.pdm-build/
node_modules/
*.tsbuildinfo
*.mjs
*.log
openapi.json
src/client

View File

@ -0,0 +1,16 @@
# materia-frontend
## Building (npm)
```sh
npm install
npm run build
```
## Building / installing (pdm)
```sh
pdm build
pdm install --prod --no-editable
```

View File

@ -0,0 +1,13 @@
<!DOCTYPE html>
<html lang="en" class="h-full">
<head>
<meta charset="UTF-8">
<link rel="icon" href="/resources/assets/logo.svg">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Materia Dev</title>
</head>
<body class="h-full text-zinc-200 font-sans ">
<div id="app" class="flex flex-col h-full"></div>
<script type="module" src="src/main.ts"></script>
</body>
</html>

4096
workspaces/frontend/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,40 @@
{
"name": "materia-frontend",
"version": "0.0.5",
"private": true,
"type": "module",
"scripts": {
"dev": "vite",
"build-check": "run-p type-check \"build-only {@}\" --",
"preview": "vite preview",
"build": "vite build",
"type-check": "vue-tsc --build --force",
"generate-client": "openapi --input ./openapi.json --output ./src/client_old/ --client axios --name Client",
"openapi-ts": "openapi-ts --input ./openapi.json --output ./src/client/ --client @hey-api/client-axios"
},
"dependencies": {
"@catppuccin/tailwindcss": "^0.1.6",
"@hey-api/client-axios": "^0.2.3",
"autoprefixer": "^10.4.18",
"axios": "^1.6.8",
"filesize": "^10.1.6",
"pinia": "^2.1.7",
"postcss": "^8.4.35",
"tailwindcss": "^3.4.1",
"vue": "^3.3.11",
"vue-router": "^4.3.0"
},
"devDependencies": {
"@hey-api/openapi-ts": "^0.53.0",
"@tsconfig/node18": "^18.2.2",
"@types/node": "^18.19.3",
"@vitejs/plugin-vue": "^4.5.2",
"@vitejs/plugin-vue-jsx": "^3.1.0",
"@vue/tsconfig": "^0.5.0",
"npm-run-all2": "^6.1.1",
"openapi-typescript-codegen": "^0.29.0",
"typescript": "~5.3.0",
"vite": "^5.0.10",
"vue-tsc": "^2.0.29"
}
}

View File

@ -0,0 +1,177 @@
# This file is @generated by PDM.
# It is not intended for manual editing.
[metadata]
groups = ["default", "dev"]
strategy = ["cross_platform", "inherit_metadata"]
lock_version = "4.4.1"
content_hash = "sha256:16bedb3de70622af531e01dee2c2773d108a005caf9fa9d2fbe9042267602ef6"
[[package]]
name = "black"
version = "23.12.1"
requires_python = ">=3.8"
summary = "The uncompromising code formatter."
groups = ["dev"]
dependencies = [
"click>=8.0.0",
"mypy-extensions>=0.4.3",
"packaging>=22.0",
"pathspec>=0.9.0",
"platformdirs>=2",
]
files = [
{file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"},
{file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"},
{file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"},
{file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"},
{file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"},
{file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"},
]
[[package]]
name = "click"
version = "8.1.7"
requires_python = ">=3.7"
summary = "Composable command line interface toolkit"
groups = ["dev"]
dependencies = [
"colorama; platform_system == \"Windows\"",
]
files = [
{file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
{file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
]
[[package]]
name = "colorama"
version = "0.4.6"
requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
summary = "Cross-platform colored terminal text."
groups = ["default", "dev"]
marker = "sys_platform == \"win32\" or platform_system == \"Windows\""
files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
[[package]]
name = "iniconfig"
version = "2.0.0"
requires_python = ">=3.7"
summary = "brain-dead simple config-ini parsing"
groups = ["dev"]
files = [
{file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
]
[[package]]
name = "loguru"
version = "0.7.2"
requires_python = ">=3.5"
summary = "Python logging made (stupidly) simple"
groups = ["default"]
dependencies = [
"colorama>=0.3.4; sys_platform == \"win32\"",
"win32-setctime>=1.0.0; sys_platform == \"win32\"",
]
files = [
{file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"},
{file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"},
]
[[package]]
name = "mypy-extensions"
version = "1.0.0"
requires_python = ">=3.5"
summary = "Type system extensions for programs checked with the mypy type checker."
groups = ["dev"]
files = [
{file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
]
[[package]]
name = "packaging"
version = "24.1"
requires_python = ">=3.8"
summary = "Core utilities for Python packages"
groups = ["dev"]
files = [
{file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"},
{file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
]
[[package]]
name = "pathspec"
version = "0.12.1"
requires_python = ">=3.8"
summary = "Utility library for gitignore style pattern matching of file paths."
groups = ["dev"]
files = [
{file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
{file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
]
[[package]]
name = "platformdirs"
version = "4.2.2"
requires_python = ">=3.8"
summary = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
groups = ["dev"]
files = [
{file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"},
{file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"},
]
[[package]]
name = "pluggy"
version = "1.5.0"
requires_python = ">=3.8"
summary = "plugin and hook calling mechanisms for python"
groups = ["dev"]
files = [
{file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
{file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
]
[[package]]
name = "pyflakes"
version = "3.2.0"
requires_python = ">=3.8"
summary = "passive checker of Python programs"
groups = ["dev"]
files = [
{file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"},
{file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"},
]
[[package]]
name = "pytest"
version = "7.4.4"
requires_python = ">=3.7"
summary = "pytest: simple powerful testing with Python"
groups = ["dev"]
dependencies = [
"colorama; sys_platform == \"win32\"",
"iniconfig",
"packaging",
"pluggy<2.0,>=0.12",
]
files = [
{file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"},
{file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"},
]
[[package]]
name = "win32-setctime"
version = "1.1.0"
requires_python = ">=3.5"
summary = "A small Python utility to set file creation time on Windows"
groups = ["default"]
marker = "sys_platform == \"win32\""
files = [
{file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"},
{file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"},
]

View File

@ -0,0 +1,6 @@
export default {
plugins: {
tailwindcss: {},
autoprefixer: {},
},
}

View File

@ -0,0 +1,45 @@
[project]
name = "materia-frontend"
version = "0.1.1"
description = "Materia frontend"
authors = [
{name = "L-Nafaryus", email = "l.nafaryus@gmail.com"},
]
dependencies = [
"loguru<1.0.0,>=0.7.2",
]
requires-python = ">=3.12,<3.13"
readme = "README.md"
license = {text = "MIT"}
[tool.pdm]
distribution = true
[tool.pdm.dev-dependencies]
dev = [
"black<24.0.0,>=23.3.0",
"pytest<8.0.0,>=7.3.2",
"pyflakes<4.0.0,>=3.0.1",
]
[tool.pdm.build]
includes = [ "src/materia_frontend" ]
[tool.pdm.scripts]
npm-install.cmd = "npm install --prefix ./"
npm-run-build.cmd = "npm run build --prefix ./"
move-dist.shell = "rm -vrf src/materia_frontend/dist && mv -v dist src/materia_frontend/ && cp -v templates/* src/materia_frontend/dist"
pre_build.composite = [ "npm-install", "npm-run-build", "move-dist" ]
materia-frontend.call = "materia_frontend.main:client"
[build-system]
requires = ["pdm-backend"]
build-backend = "pdm.backend"
[tool.pyright]
reportGeneralTypeIssues = false
[tool.pytest.ini_options]
pythonpath = ["."]
testpaths = ["tests"]

View File

@ -0,0 +1,7 @@
<script setup lang="ts">
import { RouterLink, RouterView } from 'vue-router';
</script>
<template>
<RouterView />
</template>

View File

@ -0,0 +1,69 @@
@tailwind base;
@tailwind components;
@tailwind utilities;
@layer base {
body {
@apply bg-ctp-crust;
font-family: Inter,sans-serif;
font-weight: 400;
}
a {
@apply text-ctp-lavender;
}
.input {
@apply w-full pl-3 pr-3 pt-2 pb-2 rounded border bg-ctp-mantle border-ctp-overlay0 hover:border-ctp-overlay1 focus:border-ctp-lavender text-ctp-text outline-none;
}
.input-file {
@apply block w-full border rounded cursor-pointer bg-ctp-base border-ctp-surface0 text-ctp-subtext0 focus:outline-none;
@apply file:bg-ctp-mantle file:border-ctp-surface0 file:mr-5 file:py-2 file:px-3 file:h-full file:border-y-0 file:border-l-0 file:border-r file:text-ctp-blue hover:file:cursor-pointer hover:file:bg-ctp-base;
}
label {
@apply text-ctp-text;
}
.button {
@apply pt-1 pb-1 pl-3 pr-3 sm:pt-2 sm:pb-2 sm:pl-5 sm:pr-5 rounded bg-ctp-mantle border border-ctp-surface0 hover:bg-ctp-base text-ctp-blue cursor-pointer;
}
.link-button {
@apply button text-ctp-lavender cursor-pointer border-none;
}
.hline {
@apply border-t border-ctp-surface0 ml-0 mr-0;
}
h1 {
@apply text-ctp-text pt-5 pb-5 border-b border-ctp-overlay0 mb-5;
}
label {
@apply text-ctp-text;
}
.icon {
@apply inline-block select-none text-center overflow-visible w-6 h-6 stroke-ctp-text;
}
}
@layer utilities {
.bg-grid {
background:
linear-gradient(180deg, rgba(0, 0, 0, 0) 0px, rgba(187, 65, 143, 1) 10%,
rgba(187, 65, 143, 1) 2px, rgba(0, 0, 0, 0) 0px),
linear-gradient(90deg, rgba(0, 0, 0, 0) 0px, rgba(187, 65, 143, 1) 10%,
rgba(187, 65, 143, 1) 2px, rgba(0, 0, 0, 0) 0px);
background-size: 2em 4em, 6em 2em;
transform: perspective(500px) rotateX(60deg) scale(0.5);
transform-origin: 50% 0%;
z-index: -1;
@apply absolute w-[250%] -left-[75%] h-[200%];
}
}

View File

@ -0,0 +1,33 @@
<script setup lang="ts">
import { ref, defineProps, defineEmits } from 'vue';
const { actions, x, y } = defineProps(['actions', 'x', 'y']);
const emit = defineEmits(['action-clicked']);
const emitAction = (action) => {
emit('action-clicked', action);
};
</script>
<template>
<div class="absolute z-50 context-menu bg-ctp-mantle border rounded border-ctp-overlay0"
:style="{ top: y + 'px', left: x + 'px' }">
<div v-for="action in actions" :key="action.action" @click="emitAction(action.action)"
class="hover:bg-ctp-base text-ctp-blue">
{{ action.label }}
</div>
</div>
</template>
<style scoped>
.context-menu {
position: absolute;
box-shadow: 2px 2px 4px rgba(0, 0, 0, 0.2);
min-width: 150px;
}
.context-menu div {
padding: 10px;
cursor: pointer;
}
</style>

View File

@ -0,0 +1,38 @@
<script setup lang="ts">
import { ref } from "vue";
const { data, actions } = defineProps(["data", "actions"]);
const isShow = ref(false);
const posX = ref(0);
const posY = ref(0);
const anchor = ref(null);
const emit = defineEmits(["onEvent"]);
const showMenu = (event) => {
event.preventDefault();
console.log("pos", event);
posX.value = event.pageX;
posY.value = event.pageY;
isShow.value = true;
emit("onEvent", event);
};
const closeMenu = () => {
isShow.value = false;
};
</script>
<template>
<div ref="anchor" @contextmenu="showMenu($event)" style="display: contents" v-click-outside="closeMenu">
<slot></slot>
</div>
<div v-if="isShow" class="absolute z-50 min-w-40 bg-ctp-mantle border rounded border-ctp-surface0"
:style="{ top: posY + 'px', left: posX + 'px' }">
<div v-for="action in actions" v-show="action.show()" :key="action.event" @click="action.event(data)"
class="hover:bg-ctp-base text-ctp-blue select-none pl-4 pr-4 pt-2 pb-2">
{{ action.label }}
</div>
</div>
</template>

View File

@ -0,0 +1,84 @@
<template>
<div>
<div v-if="header">
<h2>
{{ header.title }}
</h2>
<p>
{{ header.description }}
</p>
</div>
<table>
<thead>
<tr>
<th v-if="rowSelector">
<div>
<input id="contact-selectAll" type="checkbox" value="" @change="selectAll">
</div>
</th>
<th v-for="(item, idx) in fields" :key="idx">
{{ item.label }}
</th>
</tr>
</thead>
<tbody>
<tr v-for="(item, index) in data" :key="index">
<td v-if="rowSelector">
<div>
<input :id="`contact-${index}`" v-model="item.selected" type="checkbox">
</div>
</td>
<td v-for="(field, idx) in fields" :key="idx" @click="rowSelected(item)">
<span v-if="!hasNamedSlot(field.key)" :item="item">
{{ item[field.key] }}
</span>
<slot v-else :name="field.key" :item="item" />
</td>
</tr>
</tbody>
</table>
</div>
<div v-if="hasNamedSlot('footer')">
<slot name="footer" />
</div>
</template>
<script>
export default {
props: {
data: {
type: Array,
required: true,
default: () => []
},
header: {
type: Object,
required: false,
default: () => null
},
fields: {
type: Array,
required: true,
default: () => []
},
rowSelector: {
type: Boolean,
required: false,
default: false
}
},
methods: {
rowSelected(item) {
this.$emit('rowSelected', item)
},
selectAll(e) {
const checked = e.target.checked
this.data.forEach((item) => { item.selected = checked })
this.$forceUpdate()
},
hasNamedSlot(slotName) {
return Object.prototype.hasOwnProperty.call(this.$scopedSlots, slotName)
}
}
}
</script>

View File

@ -0,0 +1,15 @@
<script setup lang="ts">
import { defineProps } from "vue";
const { data } = defineProps(["data"]);
const onDragBeginEvent = (event) => {
event.dataTransfer.setData("value", JSON.stringify(data));
};
</script>
<template>
<tr draggable="true" @dragstart="onDragBeginEvent">
<slot />
</tr>
</template>

View File

@ -0,0 +1,18 @@
<script setup lang="ts">
import { defineProps } from "vue";
const { onDragOver, onDragLeave, onDrop } = defineProps(["onDragOver", "onDragLeave", "onDrop"]);
const onDragOverEvent = (event) => {
event.preventDefault();
if (onDragOver) {
onDragOver(event);
}
};
</script>
<template>
<tr @dragover="onDragOverEvent" @dragleave="onDragLeave" @drop="onDrop">
<slot />
</tr>
</template>

View File

@ -0,0 +1,9 @@
<script setup lang="ts">
</script>
<template>
<div class="fixed z-50 cursor-pointer" :style="{ top: pos_y + 'px', left: pos_x + 'px' }">
</div>
</template>

View File

@ -0,0 +1,22 @@
<script setup lang="ts">
import { ref } from "vue";
const active = ref<bool>(false);
function activate() {
active.value = !active.value;
}
function deactivate() {
active.value = false;
}
</script>
<template>
<div @click="activate" v-click-outside="deactivate">
<slot name="button"></slot>
<div v-if="active">
<slot name="content"></slot>
</div>
</div>
</template>

Some files were not shown because too many files have changed in this diff Show More