materia-server: fix directory no parent
This commit is contained in:
parent
ec41110e0b
commit
b89e8f3393
449
flake.nix
449
flake.nix
@ -1,227 +1,258 @@
|
|||||||
{
|
{
|
||||||
description = "Materia";
|
description = "Materia";
|
||||||
|
|
||||||
inputs = {
|
inputs = {
|
||||||
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
|
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
|
||||||
dream2nix = {
|
dream2nix = {
|
||||||
url = "github:nix-community/dream2nix";
|
url = "github:nix-community/dream2nix";
|
||||||
inputs.nixpkgs.follows = "nixpkgs";
|
inputs.nixpkgs.follows = "nixpkgs";
|
||||||
};
|
|
||||||
bonfire.url = "github:L-Nafaryus/bonfire";
|
|
||||||
};
|
};
|
||||||
|
bonfire.url = "github:L-Nafaryus/bonfire";
|
||||||
|
};
|
||||||
|
|
||||||
|
outputs = {
|
||||||
|
self,
|
||||||
|
nixpkgs,
|
||||||
|
dream2nix,
|
||||||
|
bonfire,
|
||||||
|
...
|
||||||
|
}: let
|
||||||
|
system = "x86_64-linux";
|
||||||
|
pkgs = import nixpkgs {inherit system;};
|
||||||
|
bonpkgs = bonfire.packages.${system};
|
||||||
|
bonlib = bonfire.lib;
|
||||||
|
|
||||||
outputs = { self, nixpkgs, dream2nix, bonfire, ... }:
|
dreamBuildPackage = {
|
||||||
let
|
module,
|
||||||
system = "x86_64-linux";
|
meta ? {},
|
||||||
pkgs = import nixpkgs { inherit system; };
|
extraModules ? [],
|
||||||
bonpkgs = bonfire.packages.${system};
|
extraArgs ? {},
|
||||||
bonlib = bonfire.lib;
|
}:
|
||||||
|
(
|
||||||
dreamBuildPackage = { module, meta ? {}, extraModules ? [], extraArgs ? {} }: (
|
nixpkgs.lib.evalModules {
|
||||||
nixpkgs.lib.evalModules {
|
modules = [module] ++ extraModules;
|
||||||
modules = [ module ] ++ extraModules;
|
specialArgs =
|
||||||
specialArgs = {
|
{
|
||||||
inherit dream2nix;
|
inherit dream2nix;
|
||||||
packageSets.nixpkgs = pkgs;
|
packageSets.nixpkgs = pkgs;
|
||||||
} // extraArgs;
|
|
||||||
}
|
}
|
||||||
).config.public // { inherit meta; };
|
// extraArgs;
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.config
|
||||||
|
.public
|
||||||
|
// {inherit meta;};
|
||||||
|
in {
|
||||||
|
packages.x86_64-linux = {
|
||||||
|
materia-frontend = dreamBuildPackage {
|
||||||
|
module = {
|
||||||
|
lib,
|
||||||
|
config,
|
||||||
|
dream2nix,
|
||||||
|
...
|
||||||
|
}: {
|
||||||
|
name = "materia-frontend";
|
||||||
|
version = "0.0.1";
|
||||||
|
|
||||||
in
|
imports = [
|
||||||
{
|
dream2nix.modules.dream2nix.WIP-nodejs-builder-v3
|
||||||
packages.x86_64-linux = {
|
];
|
||||||
materia-frontend = dreamBuildPackage {
|
|
||||||
module = { lib, config, dream2nix, ... }: {
|
|
||||||
name = "materia-frontend";
|
|
||||||
version = "0.0.1";
|
|
||||||
|
|
||||||
imports = [
|
mkDerivation = {
|
||||||
dream2nix.modules.dream2nix.WIP-nodejs-builder-v3
|
src = ./materia-web-client/src/materia-frontend;
|
||||||
];
|
};
|
||||||
|
|
||||||
mkDerivation = {
|
deps = {nixpkgs, ...}: {
|
||||||
src = ./materia-web-client/src/materia-frontend;
|
inherit
|
||||||
};
|
(nixpkgs)
|
||||||
|
fetchFromGitHub
|
||||||
|
stdenv
|
||||||
|
;
|
||||||
|
};
|
||||||
|
|
||||||
deps = {nixpkgs, ...}: {
|
WIP-nodejs-builder-v3 = {
|
||||||
inherit
|
packageLockFile = "${config.mkDerivation.src}/package-lock.json";
|
||||||
(nixpkgs)
|
};
|
||||||
fetchFromGitHub
|
};
|
||||||
stdenv
|
meta = with nixpkgs.lib; {
|
||||||
;
|
description = "Materia frontend";
|
||||||
};
|
license = licenses.mit;
|
||||||
|
maintainers = with bonlib.maintainers; [L-Nafaryus];
|
||||||
|
broken = false;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
WIP-nodejs-builder-v3 = {
|
materia-web-client = dreamBuildPackage {
|
||||||
packageLockFile = "${config.mkDerivation.src}/package-lock.json";
|
extraArgs = {
|
||||||
};
|
inherit (self.packages.x86_64-linux) materia-frontend;
|
||||||
};
|
};
|
||||||
meta = with nixpkgs.lib; {
|
module = {
|
||||||
description = "Materia frontend";
|
config,
|
||||||
license = licenses.mit;
|
lib,
|
||||||
maintainers = with bonlib.maintainers; [ L-Nafaryus ];
|
dream2nix,
|
||||||
broken = false;
|
materia-frontend,
|
||||||
};
|
...
|
||||||
};
|
}: {
|
||||||
|
imports = [dream2nix.modules.dream2nix.WIP-python-pdm];
|
||||||
materia-web-client = dreamBuildPackage {
|
|
||||||
extraArgs = {
|
pdm.lockfile = ./materia-web-client/pdm.lock;
|
||||||
inherit (self.packages.x86_64-linux) materia-frontend;
|
pdm.pyproject = ./materia-web-client/pyproject.toml;
|
||||||
};
|
|
||||||
module = {config, lib, dream2nix, materia-frontend, ...}: {
|
deps = _: {
|
||||||
imports = [ dream2nix.modules.dream2nix.WIP-python-pdm ];
|
python = pkgs.python3;
|
||||||
|
};
|
||||||
pdm.lockfile = ./materia-web-client/pdm.lock;
|
|
||||||
pdm.pyproject = ./materia-web-client/pyproject.toml;
|
mkDerivation = {
|
||||||
|
src = ./materia-web-client;
|
||||||
deps = _ : {
|
buildInputs = [
|
||||||
python = pkgs.python3;
|
pkgs.python3.pkgs.pdm-backend
|
||||||
};
|
];
|
||||||
|
configurePhase = ''
|
||||||
mkDerivation = {
|
cp -rv ${materia-frontend}/dist ./src/materia-frontend/
|
||||||
src = ./materia-web-client;
|
'';
|
||||||
buildInputs = [
|
};
|
||||||
pkgs.python3.pkgs.pdm-backend
|
};
|
||||||
];
|
meta = with nixpkgs.lib; {
|
||||||
configurePhase = ''
|
description = "Materia web client";
|
||||||
cp -rv ${materia-frontend}/dist ./src/materia-frontend/
|
license = licenses.mit;
|
||||||
'';
|
maintainers = with bonlib.maintainers; [L-Nafaryus];
|
||||||
};
|
broken = false;
|
||||||
};
|
};
|
||||||
meta = with nixpkgs.lib; {
|
};
|
||||||
description = "Materia web client";
|
|
||||||
license = licenses.mit;
|
materia-server = dreamBuildPackage {
|
||||||
maintainers = with bonlib.maintainers; [ L-Nafaryus ];
|
module = {
|
||||||
broken = false;
|
config,
|
||||||
};
|
lib,
|
||||||
};
|
dream2nix,
|
||||||
|
materia-frontend,
|
||||||
materia-server = dreamBuildPackage {
|
...
|
||||||
module = {config, lib, dream2nix, materia-frontend, ...}: {
|
}: {
|
||||||
imports = [ dream2nix.modules.dream2nix.WIP-python-pdm ];
|
imports = [dream2nix.modules.dream2nix.WIP-python-pdm];
|
||||||
|
|
||||||
pdm.lockfile = ./materia-server/pdm.lock;
|
pdm.lockfile = ./materia-server/pdm.lock;
|
||||||
pdm.pyproject = ./materia-server/pyproject.toml;
|
pdm.pyproject = ./materia-server/pyproject.toml;
|
||||||
|
|
||||||
deps = _ : {
|
deps = _: {
|
||||||
python = pkgs.python3;
|
python = pkgs.python3;
|
||||||
};
|
};
|
||||||
|
|
||||||
mkDerivation = {
|
mkDerivation = {
|
||||||
src = ./materia-server;
|
src = ./materia-server;
|
||||||
buildInputs = [
|
buildInputs = [
|
||||||
pkgs.python3.pkgs.pdm-backend
|
pkgs.python3.pkgs.pdm-backend
|
||||||
];
|
];
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
pkgs.python3.pkgs.wrapPython
|
pkgs.python3.pkgs.wrapPython
|
||||||
];
|
];
|
||||||
|
};
|
||||||
};
|
};
|
||||||
};
|
meta = with nixpkgs.lib; {
|
||||||
meta = with nixpkgs.lib; {
|
description = "Materia";
|
||||||
description = "Materia";
|
license = licenses.mit;
|
||||||
license = licenses.mit;
|
maintainers = with bonlib.maintainers; [L-Nafaryus];
|
||||||
maintainers = with bonlib.maintainers; [ L-Nafaryus ];
|
broken = false;
|
||||||
broken = false;
|
mainProgram = "materia-server";
|
||||||
mainProgram = "materia-server";
|
};
|
||||||
};
|
};
|
||||||
};
|
|
||||||
|
postgresql = let
|
||||||
postgresql = let
|
user = "postgres";
|
||||||
user = "postgres";
|
database = "postgres";
|
||||||
database = "postgres";
|
dataDir = "/var/lib/postgresql";
|
||||||
dataDir = "/var/lib/postgresql";
|
entryPoint = pkgs.writeTextDir "entrypoint.sh" ''
|
||||||
entryPoint = pkgs.writeTextDir "entrypoint.sh" ''
|
initdb -U ${user}
|
||||||
initdb -U ${user}
|
postgres -k ${dataDir}
|
||||||
postgres -k ${dataDir}
|
'';
|
||||||
'';
|
in
|
||||||
in pkgs.dockerTools.buildImage {
|
pkgs.dockerTools.buildImage {
|
||||||
name = "postgresql";
|
name = "postgresql";
|
||||||
tag = "devel";
|
tag = "devel";
|
||||||
|
|
||||||
copyToRoot = pkgs.buildEnv {
|
copyToRoot = pkgs.buildEnv {
|
||||||
name = "image-root";
|
name = "image-root";
|
||||||
pathsToLink = [ "/bin" "/etc" "/" ];
|
pathsToLink = ["/bin" "/etc" "/"];
|
||||||
paths = with pkgs; [
|
paths = with pkgs; [
|
||||||
bash
|
bash
|
||||||
postgresql
|
postgresql
|
||||||
entryPoint
|
entryPoint
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
runAsRoot = with pkgs; ''
|
runAsRoot = with pkgs; ''
|
||||||
#!${runtimeShell}
|
#!${runtimeShell}
|
||||||
${dockerTools.shadowSetup}
|
${dockerTools.shadowSetup}
|
||||||
groupadd -r ${user}
|
groupadd -r ${user}
|
||||||
useradd -r -g ${user} --home-dir=${dataDir} ${user}
|
useradd -r -g ${user} --home-dir=${dataDir} ${user}
|
||||||
mkdir -p ${dataDir}
|
mkdir -p ${dataDir}
|
||||||
chown -R ${user}:${user} ${dataDir}
|
chown -R ${user}:${user} ${dataDir}
|
||||||
'';
|
'';
|
||||||
|
|
||||||
config = {
|
config = {
|
||||||
Entrypoint = [ "bash" "/entrypoint.sh" ];
|
Entrypoint = ["bash" "/entrypoint.sh"];
|
||||||
StopSignal = "SIGINT";
|
StopSignal = "SIGINT";
|
||||||
User = "${user}:${user}";
|
User = "${user}:${user}";
|
||||||
Env = [ "PGDATA=${dataDir}" ];
|
Env = ["PGDATA=${dataDir}"];
|
||||||
WorkingDir = dataDir;
|
WorkingDir = dataDir;
|
||||||
ExposedPorts = {
|
ExposedPorts = {
|
||||||
"5432/tcp" = {};
|
"5432/tcp" = {};
|
||||||
};
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
redis = let
|
|
||||||
user = "redis";
|
|
||||||
dataDir = "/var/lib/redis";
|
|
||||||
entryPoint = pkgs.writeTextDir "entrypoint.sh" ''
|
|
||||||
redis-server \
|
|
||||||
--daemonize no \
|
|
||||||
--dir "${dataDir}"
|
|
||||||
'';
|
|
||||||
in pkgs.dockerTools.buildImage {
|
|
||||||
name = "redis";
|
|
||||||
tag = "devel";
|
|
||||||
|
|
||||||
copyToRoot = pkgs.buildEnv {
|
|
||||||
name = "image-root";
|
|
||||||
pathsToLink = [ "/bin" "/etc" "/" ];
|
|
||||||
paths = with pkgs; [
|
|
||||||
bash
|
|
||||||
redis
|
|
||||||
entryPoint
|
|
||||||
];
|
|
||||||
};
|
|
||||||
runAsRoot = with pkgs; ''
|
|
||||||
#!${runtimeShell}
|
|
||||||
${dockerTools.shadowSetup}
|
|
||||||
groupadd -r ${user}
|
|
||||||
useradd -r -g ${user} --home-dir=${dataDir} ${user}
|
|
||||||
mkdir -p ${dataDir}
|
|
||||||
chown -R ${user}:${user} ${dataDir}
|
|
||||||
'';
|
|
||||||
|
|
||||||
config = {
|
|
||||||
Entrypoint = [ "bash" "/entrypoint.sh" ];
|
|
||||||
StopSignal = "SIGINT";
|
|
||||||
User = "${user}:${user}";
|
|
||||||
WorkingDir = dataDir;
|
|
||||||
ExposedPorts = {
|
|
||||||
"6379/tcp" = {};
|
|
||||||
};
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
apps.x86_64-linux = {
|
redis = let
|
||||||
materia-server = {
|
user = "redis";
|
||||||
type = "app";
|
dataDir = "/var/lib/redis";
|
||||||
program = "${self.packages.x86_64-linux.materia-server}/bin/materia-server";
|
entryPoint = pkgs.writeTextDir "entrypoint.sh" ''
|
||||||
};
|
redis-server \
|
||||||
};
|
--daemonize no \
|
||||||
|
--dir "${dataDir}"
|
||||||
|
'';
|
||||||
|
in
|
||||||
|
pkgs.dockerTools.buildImage {
|
||||||
|
name = "redis";
|
||||||
|
tag = "devel";
|
||||||
|
|
||||||
devShells.x86_64-linux.default = pkgs.mkShell {
|
copyToRoot = pkgs.buildEnv {
|
||||||
buildInputs = with pkgs; [ postgresql redis pdm nodejs ];
|
name = "image-root";
|
||||||
# greenlet requires libstdc++
|
pathsToLink = ["/bin" "/etc" "/"];
|
||||||
LD_LIBRARY_PATH = nixpkgs.lib.makeLibraryPath [ pkgs.stdenv.cc.cc ];
|
paths = with pkgs; [
|
||||||
|
bash
|
||||||
|
redis
|
||||||
|
entryPoint
|
||||||
|
];
|
||||||
|
};
|
||||||
|
runAsRoot = with pkgs; ''
|
||||||
|
#!${runtimeShell}
|
||||||
|
${dockerTools.shadowSetup}
|
||||||
|
groupadd -r ${user}
|
||||||
|
useradd -r -g ${user} --home-dir=${dataDir} ${user}
|
||||||
|
mkdir -p ${dataDir}
|
||||||
|
chown -R ${user}:${user} ${dataDir}
|
||||||
|
'';
|
||||||
|
|
||||||
|
config = {
|
||||||
|
Entrypoint = ["bash" "/entrypoint.sh"];
|
||||||
|
StopSignal = "SIGINT";
|
||||||
|
User = "${user}:${user}";
|
||||||
|
WorkingDir = dataDir;
|
||||||
|
ExposedPorts = {
|
||||||
|
"6379/tcp" = {};
|
||||||
|
};
|
||||||
|
};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
apps.x86_64-linux = {
|
||||||
|
materia-server = {
|
||||||
|
type = "app";
|
||||||
|
program = "${self.packages.x86_64-linux.materia-server}/bin/materia-server";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
devShells.x86_64-linux.default = pkgs.mkShell {
|
||||||
|
buildInputs = with pkgs; [postgresql redis pdm nodejs];
|
||||||
|
# greenlet requires libstdc++
|
||||||
|
LD_LIBRARY_PATH = nixpkgs.lib.makeLibraryPath [pkgs.stdenv.cc.cc];
|
||||||
|
};
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
@ -1,11 +1,26 @@
|
|||||||
|
from materia_server.models.auth import (
|
||||||
|
LoginType,
|
||||||
|
LoginSource,
|
||||||
|
OAuth2Application,
|
||||||
|
OAuth2Grant,
|
||||||
|
OAuth2AuthorizationCode,
|
||||||
|
)
|
||||||
|
|
||||||
from materia_server.models.auth import LoginType, LoginSource, OAuth2Application, OAuth2Grant, OAuth2AuthorizationCode
|
from materia_server.models.database import (
|
||||||
|
Database,
|
||||||
from materia_server.models.database import Database, DatabaseError, DatabaseMigrationError, Cache, CacheError
|
DatabaseError,
|
||||||
|
DatabaseMigrationError,
|
||||||
|
Cache,
|
||||||
|
CacheError,
|
||||||
|
)
|
||||||
|
|
||||||
from materia_server.models.user import User, UserCredentials, UserInfo
|
from materia_server.models.user import User, UserCredentials, UserInfo
|
||||||
|
|
||||||
from materia_server.models.repository import Repository, RepositoryInfo
|
from materia_server.models.repository import (
|
||||||
|
Repository,
|
||||||
|
RepositoryInfo,
|
||||||
|
RepositoryContent,
|
||||||
|
)
|
||||||
|
|
||||||
from materia_server.models.directory import Directory, DirectoryLink, DirectoryInfo
|
from materia_server.models.directory import Directory, DirectoryLink, DirectoryInfo
|
||||||
|
|
||||||
|
@ -59,7 +59,7 @@ class Directory(Base):
|
|||||||
|
|
||||||
async def remove(self, db: database.Database):
|
async def remove(self, db: database.Database):
|
||||||
async with db.session() as session:
|
async with db.session() as session:
|
||||||
await session.execute(sa.delete(Directory).where(Directory.id == self.id))
|
await session.delete(self)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
@ -56,7 +56,7 @@ class File(Base):
|
|||||||
|
|
||||||
async def remove(self, db: database.Database):
|
async def remove(self, db: database.Database):
|
||||||
async with db.session() as session:
|
async with db.session() as session:
|
||||||
await session.execute(sa.delete(File).where(File.id == self.id))
|
await session.delete(self)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
from time import time
|
from time import time
|
||||||
from typing import List, Self
|
from typing import List, Self, Optional
|
||||||
from uuid import UUID, uuid4
|
from uuid import UUID, uuid4
|
||||||
|
|
||||||
from sqlalchemy import BigInteger, ForeignKey
|
from sqlalchemy import BigInteger, ForeignKey
|
||||||
from sqlalchemy.orm import mapped_column, Mapped, relationship
|
from sqlalchemy.orm import mapped_column, Mapped, relationship
|
||||||
from sqlalchemy.orm.attributes import InstrumentedAttribute
|
from sqlalchemy.orm.attributes import InstrumentedAttribute
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel, ConfigDict
|
||||||
|
|
||||||
from materia_server.models.base import Base
|
from materia_server.models.base import Base
|
||||||
from materia_server.models import database
|
from materia_server.models import database
|
||||||
@ -55,15 +55,24 @@ class Repository(Base):
|
|||||||
|
|
||||||
async def remove(self, db: database.Database):
|
async def remove(self, db: database.Database):
|
||||||
async with db.session() as session:
|
async with db.session() as session:
|
||||||
await session.execute(sa.delete(Repository).where(Repository.id == self.id))
|
await session.delete(self)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
|
|
||||||
|
|
||||||
class RepositoryInfo(BaseModel):
|
class RepositoryInfo(BaseModel):
|
||||||
|
model_config = ConfigDict(from_attributes=True)
|
||||||
|
|
||||||
|
id: int
|
||||||
capacity: int
|
capacity: int
|
||||||
used: int
|
used: Optional[int] = None
|
||||||
|
|
||||||
|
|
||||||
|
class RepositoryContent(BaseModel):
|
||||||
|
model_config = ConfigDict(arbitrary_types_allowed=True)
|
||||||
|
files: list["FileInfo"]
|
||||||
|
directories: list["DirectoryInfo"]
|
||||||
|
|
||||||
|
|
||||||
from materia_server.models.user import User
|
from materia_server.models.user import User
|
||||||
from materia_server.models.directory import Directory
|
from materia_server.models.directory import Directory, DirectoryInfo
|
||||||
from materia_server.models.file import File
|
from materia_server.models.file import File, FileInfo
|
||||||
|
@ -82,7 +82,7 @@ class User(Base):
|
|||||||
|
|
||||||
async def remove(self, db: database.Database):
|
async def remove(self, db: database.Database):
|
||||||
async with db.session() as session:
|
async with db.session() as session:
|
||||||
await session.execute(sa.delete(User).where(User.id == self.id))
|
await session.delete(self)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
@ -28,36 +28,39 @@ async def create(
|
|||||||
session.add(user)
|
session.add(user)
|
||||||
await session.refresh(user, attribute_names=["repository"])
|
await session.refresh(user, attribute_names=["repository"])
|
||||||
|
|
||||||
if not user.repository:
|
if not user.repository:
|
||||||
raise HTTPException(status.HTTP_404_NOT_FOUND, "Repository not found")
|
raise HTTPException(status.HTTP_404_NOT_FOUND, "Repository not found")
|
||||||
|
|
||||||
current_directory = None
|
current_directory = None
|
||||||
current_path = Path()
|
current_path = Path()
|
||||||
directory = None
|
directory = None
|
||||||
|
|
||||||
for part in directory_path.parts:
|
for part in directory_path.parts:
|
||||||
if not await Directory.by_path(
|
if not await Directory.by_path(
|
||||||
user.repository.id, current_path, part, ctx.database
|
user.repository.id, current_path, part, ctx.database
|
||||||
):
|
):
|
||||||
directory = Directory(
|
directory = Directory(
|
||||||
repository_id=user.repository.id,
|
repository_id=user.repository.id,
|
||||||
parent_id=current_directory.id if current_directory else None,
|
parent_id=current_directory.id if current_directory else None,
|
||||||
name=part,
|
name=part,
|
||||||
path=None if current_path == Path() else str(current_path),
|
path=None if current_path == Path() else str(current_path),
|
||||||
)
|
|
||||||
session.add(directory)
|
|
||||||
|
|
||||||
current_directory = directory
|
|
||||||
current_path /= part
|
|
||||||
|
|
||||||
try:
|
|
||||||
(repository_path / directory_path).mkdir(parents=True, exist_ok=True)
|
|
||||||
except OSError:
|
|
||||||
raise HTTPException(
|
|
||||||
status.HTTP_500_INTERNAL_SERVER_ERROR, "Failed to created a directory"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
await session.commit()
|
try:
|
||||||
|
(repository_path / current_path / part).mkdir(exist_ok=True)
|
||||||
|
except OSError:
|
||||||
|
raise HTTPException(
|
||||||
|
status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
f"Failed to create a directory {current_path / part}",
|
||||||
|
)
|
||||||
|
|
||||||
|
async with ctx.database.session() as session:
|
||||||
|
session.add(directory)
|
||||||
|
await session.commit()
|
||||||
|
await session.refresh(directory)
|
||||||
|
|
||||||
|
current_directory = directory
|
||||||
|
current_path /= part
|
||||||
|
|
||||||
|
|
||||||
@router.get("/directory")
|
@router.get("/directory")
|
||||||
|
@ -1,7 +1,15 @@
|
|||||||
import shutil
|
import shutil
|
||||||
|
from pathlib import Path
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
|
|
||||||
from materia_server.models import User, Repository, RepositoryInfo
|
from materia_server.models import (
|
||||||
|
User,
|
||||||
|
Repository,
|
||||||
|
RepositoryInfo,
|
||||||
|
RepositoryContent,
|
||||||
|
FileInfo,
|
||||||
|
DirectoryInfo,
|
||||||
|
)
|
||||||
from materia_server.routers import middleware
|
from materia_server.routers import middleware
|
||||||
from materia_server.config import Config
|
from materia_server.config import Config
|
||||||
|
|
||||||
@ -32,35 +40,24 @@ async def create(
|
|||||||
|
|
||||||
@router.get("/repository", response_model=RepositoryInfo)
|
@router.get("/repository", response_model=RepositoryInfo)
|
||||||
async def info(
|
async def info(
|
||||||
user: User = Depends(middleware.user), ctx: middleware.Context = Depends()
|
repository=Depends(middleware.repository), ctx: middleware.Context = Depends()
|
||||||
):
|
):
|
||||||
async with ctx.database.session() as session:
|
|
||||||
session.add(user)
|
|
||||||
await session.refresh(user, attribute_names=["repository"])
|
|
||||||
|
|
||||||
if not (repository := user.repository):
|
|
||||||
raise HTTPException(status.HTTP_404_NOT_FOUND, "Repository not found")
|
|
||||||
|
|
||||||
async with ctx.database.session() as session:
|
async with ctx.database.session() as session:
|
||||||
session.add(repository)
|
session.add(repository)
|
||||||
await session.refresh(repository, attribute_names=["files"])
|
await session.refresh(repository, attribute_names=["files"])
|
||||||
|
|
||||||
return RepositoryInfo(
|
info = RepositoryInfo.model_validate(repository)
|
||||||
capacity=repository.capacity,
|
info.used = sum([file.size for file in repository.files])
|
||||||
used=sum([file.size for file in repository.files]),
|
|
||||||
)
|
return info
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/repository")
|
@router.delete("/repository")
|
||||||
async def remove(
|
async def remove(
|
||||||
user: User = Depends(middleware.user), ctx: middleware.Context = Depends()
|
repository=Depends(middleware.repository),
|
||||||
|
repository_path=Depends(middleware.repository_path),
|
||||||
|
ctx: middleware.Context = Depends(),
|
||||||
):
|
):
|
||||||
repository_path = Config.data_dir() / "repository" / user.lower_name
|
|
||||||
|
|
||||||
async with ctx.database.session() as session:
|
|
||||||
session.add(user)
|
|
||||||
await session.refresh(user, attribute_names=["repository"])
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if repository_path.exists():
|
if repository_path.exists():
|
||||||
shutil.rmtree(str(repository_path))
|
shutil.rmtree(str(repository_path))
|
||||||
@ -69,4 +66,33 @@ async def remove(
|
|||||||
status.HTTP_500_INTERNAL_SERVER_ERROR, "Failed to remove repository"
|
status.HTTP_500_INTERNAL_SERVER_ERROR, "Failed to remove repository"
|
||||||
)
|
)
|
||||||
|
|
||||||
await user.repository.remove(ctx.database)
|
await repository.remove(ctx.database)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/repository/content", response_model=RepositoryContent)
|
||||||
|
async def content(
|
||||||
|
repository=Depends(middleware.repository), ctx: middleware.Context = Depends()
|
||||||
|
):
|
||||||
|
async with ctx.database.session() as session:
|
||||||
|
session.add(repository)
|
||||||
|
await session.refresh(repository, attribute_names=["directories"])
|
||||||
|
await session.refresh(repository, attribute_names=["files"])
|
||||||
|
|
||||||
|
content = RepositoryContent(
|
||||||
|
files=list(
|
||||||
|
map(
|
||||||
|
lambda file: FileInfo.model_validate(file),
|
||||||
|
filter(lambda file: file.path is None, repository.files),
|
||||||
|
)
|
||||||
|
),
|
||||||
|
directories=list(
|
||||||
|
map(
|
||||||
|
lambda directory: DirectoryInfo.model_validate(directory),
|
||||||
|
filter(
|
||||||
|
lambda directory: directory.path is None, repository.directories
|
||||||
|
),
|
||||||
|
)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
return content
|
||||||
|
@ -1,67 +1,81 @@
|
|||||||
from typing import Optional, Sequence
|
from typing import Optional, Sequence
|
||||||
import uuid
|
import uuid
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
from fastapi import HTTPException, Request, Response, status, Depends, Cookie
|
from fastapi import HTTPException, Request, Response, status, Depends, Cookie
|
||||||
from fastapi.security.base import SecurityBase
|
from fastapi.security.base import SecurityBase
|
||||||
import jwt
|
import jwt
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from enum import StrEnum
|
from enum import StrEnum
|
||||||
from http import HTTPMethod as HttpMethod
|
from http import HTTPMethod as HttpMethod
|
||||||
from fastapi.security import HTTPBearer, OAuth2PasswordBearer, OAuth2PasswordRequestForm, APIKeyQuery, APIKeyCookie, APIKeyHeader
|
from fastapi.security import (
|
||||||
|
HTTPBearer,
|
||||||
|
OAuth2PasswordBearer,
|
||||||
|
OAuth2PasswordRequestForm,
|
||||||
|
APIKeyQuery,
|
||||||
|
APIKeyCookie,
|
||||||
|
APIKeyHeader,
|
||||||
|
)
|
||||||
|
|
||||||
from materia_server import security
|
from materia_server import security
|
||||||
from materia_server.models import User
|
from materia_server.models import User, Repository
|
||||||
|
|
||||||
|
|
||||||
class Context:
|
class Context:
|
||||||
def __init__(self, request: Request):
|
def __init__(self, request: Request):
|
||||||
self.config = request.state.config
|
self.config = request.state.config
|
||||||
self.database = request.state.database
|
self.database = request.state.database
|
||||||
self.cache = request.state.cache
|
self.cache = request.state.cache
|
||||||
self.logger = request.state.logger
|
self.logger = request.state.logger
|
||||||
|
|
||||||
|
|
||||||
async def jwt_cookie(request: Request, response: Response, ctx: Context = Depends()):
|
async def jwt_cookie(request: Request, response: Response, ctx: Context = Depends()):
|
||||||
if not (access_token := request.cookies.get(ctx.config.security.cookie_access_token_name)):
|
if not (
|
||||||
|
access_token := request.cookies.get(
|
||||||
|
ctx.config.security.cookie_access_token_name
|
||||||
|
)
|
||||||
|
):
|
||||||
raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Missing token")
|
raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Missing token")
|
||||||
refresh_token = request.cookies.get(ctx.config.security.cookie_refresh_token_name)
|
refresh_token = request.cookies.get(ctx.config.security.cookie_refresh_token_name)
|
||||||
|
|
||||||
if ctx.config.oauth2.jwt_signing_algo in ["HS256", "HS384", "HS512"]:
|
if ctx.config.oauth2.jwt_signing_algo in ["HS256", "HS384", "HS512"]:
|
||||||
secret = ctx.config.oauth2.jwt_secret
|
secret = ctx.config.oauth2.jwt_secret
|
||||||
else:
|
else:
|
||||||
secret = ctx.config.oauth2.jwt_signing_key
|
secret = ctx.config.oauth2.jwt_signing_key
|
||||||
|
|
||||||
issuer = "{}://{}".format(ctx.config.server.scheme, ctx.config.server.domain)
|
issuer = "{}://{}".format(ctx.config.server.scheme, ctx.config.server.domain)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
refresh_claims = security.validate_token(refresh_token, secret) if refresh_token else None
|
refresh_claims = (
|
||||||
|
security.validate_token(refresh_token, secret) if refresh_token else None
|
||||||
|
)
|
||||||
|
|
||||||
if refresh_claims:
|
if refresh_claims:
|
||||||
if refresh_claims.exp < datetime.now().timestamp():
|
if refresh_claims.exp < datetime.now().timestamp():
|
||||||
refresh_claims = None
|
refresh_claims = None
|
||||||
except jwt.PyJWTError:
|
except jwt.PyJWTError:
|
||||||
refresh_claims = None
|
refresh_claims = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
access_claims = security.validate_token(access_token, secret)
|
access_claims = security.validate_token(access_token, secret)
|
||||||
|
|
||||||
if access_claims.exp < datetime.now().timestamp():
|
if access_claims.exp < datetime.now().timestamp():
|
||||||
if refresh_claims:
|
if refresh_claims:
|
||||||
new_access_token = security.generate_token(
|
new_access_token = security.generate_token(
|
||||||
access_claims.sub,
|
access_claims.sub,
|
||||||
str(secret),
|
str(secret),
|
||||||
ctx.config.oauth2.access_token_lifetime,
|
ctx.config.oauth2.access_token_lifetime,
|
||||||
issuer
|
issuer,
|
||||||
)
|
)
|
||||||
access_claims = security.validate_token(new_access_token, secret)
|
access_claims = security.validate_token(new_access_token, secret)
|
||||||
response.set_cookie(
|
response.set_cookie(
|
||||||
ctx.config.security.cookie_access_token_name,
|
ctx.config.security.cookie_access_token_name,
|
||||||
value = new_access_token,
|
value=new_access_token,
|
||||||
max_age = ctx.config.oauth2.access_token_lifetime,
|
max_age=ctx.config.oauth2.access_token_lifetime,
|
||||||
secure = True,
|
secure=True,
|
||||||
httponly = ctx.config.security.cookie_http_only,
|
httponly=ctx.config.security.cookie_http_only,
|
||||||
samesite = "lax"
|
samesite="lax",
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
access_claims = None
|
access_claims = None
|
||||||
@ -74,8 +88,23 @@ async def jwt_cookie(request: Request, response: Response, ctx: Context = Depend
|
|||||||
return access_claims
|
return access_claims
|
||||||
|
|
||||||
|
|
||||||
async def user(claims = Depends(jwt_cookie), ctx: Context = Depends()):
|
async def user(claims=Depends(jwt_cookie), ctx: Context = Depends()) -> User:
|
||||||
if not (current_user := await User.by_id(uuid.UUID(claims.sub), ctx.database)):
|
if not (current_user := await User.by_id(uuid.UUID(claims.sub), ctx.database)):
|
||||||
raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Missing user")
|
raise HTTPException(status.HTTP_401_UNAUTHORIZED, "Missing user")
|
||||||
|
|
||||||
return current_user
|
return current_user
|
||||||
|
|
||||||
|
|
||||||
|
async def repository(user: User = Depends(user), ctx: Context = Depends()):
|
||||||
|
async with ctx.database.session() as session:
|
||||||
|
session.add(user)
|
||||||
|
await session.refresh(user, attribute_names=["repository"])
|
||||||
|
|
||||||
|
if not (repository := user.repository):
|
||||||
|
raise HTTPException(status.HTTP_404_NOT_FOUND, "Repository not found")
|
||||||
|
|
||||||
|
return repository
|
||||||
|
|
||||||
|
|
||||||
|
async def repository_path(user: User = Depends(user), ctx: Context = Depends()) -> Path:
|
||||||
|
return ctx.config.data_dir() / "repository" / user.lower_name
|
||||||
|
Loading…
Reference in New Issue
Block a user