diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml deleted file mode 100644 index a6e33ce..0000000 --- a/.github/workflows/codeql.yml +++ /dev/null @@ -1,76 +0,0 @@ -# For most projects, this workflow file will not need changing; you simply need -# to commit it to your repository. -# -# You may wish to alter this file to override the set of languages analyzed, -# or to provide custom queries or build logic. -# -# ******** NOTE ******** -# We have attempted to detect the languages in your repository. Please check -# the `language` matrix defined below to confirm you have the correct set of -# supported CodeQL languages. -# -name: "CodeQL" - -on: - push: - branches: [ "main" ] - pull_request: - # The branches below must be a subset of the branches above - branches: [ "main" ] - schedule: - - cron: '31 8 * * 0' - -jobs: - analyze: - name: Analyze - runs-on: ubuntu-latest - permissions: - actions: read - contents: read - security-events: write - - strategy: - fail-fast: false - matrix: - language: [ 'python' ] - # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] - # Use only 'java' to analyze code written in Java, Kotlin or both - # Use only 'javascript' to analyze code written in JavaScript, TypeScript or both - # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support - - steps: - - name: Checkout repository - uses: actions/checkout@v3 - - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v2 - with: - languages: ${{ matrix.language }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. - - # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs - # queries: security-extended,security-and-quality - - - # Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java). - # If this step fails, then you should remove it and run the build manually (see below) - - name: Autobuild - uses: github/codeql-action/autobuild@v2 - - # ℹī¸ Command-line programs to run using the OS shell. - # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun - - # If the Autobuild fails above, remove it and uncomment the following three lines. - # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. - - # - run: | - # echo "Run, Build Application using script" - # ./location_of_script_within_repo/buildscript.sh - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 - with: - category: "/language:${{matrix.language}}" diff --git a/.github/workflows/docker.yml b/.github/workflows/release.yml similarity index 97% rename from .github/workflows/docker.yml rename to .github/workflows/release.yml index 5cff3c8..c1c24a9 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/release.yml @@ -1,4 +1,4 @@ -name: Publish docker image +name: Publish a new version on: push: @@ -10,6 +10,12 @@ jobs: steps: - uses: actions/checkout@v3 + - uses: docker/setup-buildx-action@v2 + - name: Login to Docker Hub + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Get the version id: get_version @@ -18,14 +24,6 @@ jobs: file: 'backend/pyproject.toml' field: 'tool.poetry.version' - - uses: docker/setup-buildx-action@v2 - - - name: Login to Docker Hub - uses: docker/login-action@v2 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Build docker image uses: docker/build-push-action@v4 with: diff --git a/Makefile b/Makefile index 85eb7c1..237222c 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,7 @@ IMAGE_NAME=nebulabroadcast/nebula-server:latest +VERSION=$(shell cd backend && poetry run python -c 'import nebula' --version) -test: +check: check_version cd frontend && yarn format cd backend && \ @@ -9,6 +10,10 @@ test: poetry run flake8 . && \ poetry run mypy . +check_version: + echo $(VERSION) + sed -i "s/^version = \".*\"/version = \"$(VERSION)\"/" backend/pyproject.toml + build: docker build -t $(IMAGE_NAME) . diff --git a/backend/api/auth.py b/backend/api/auth.py index 28a2be2..0bf3193 100644 --- a/backend/api/auth.py +++ b/backend/api/auth.py @@ -1,8 +1,8 @@ -from fastapi import Header +from fastapi import Depends, Header, Response from pydantic import Field import nebula -from nebula.exceptions import UnauthorizedException +from server.dependencies import current_user from server.models import RequestModel, ResponseModel from server.request import APIRequest from server.session import Session @@ -37,6 +37,11 @@ class LoginResponseModel(ResponseModel): ) +class PasswordRequestModel(RequestModel): + login: str | None = Field(None, title="Login", example="admin") + password: str = Field(..., title="Password", example="Password.123") + + # # Request # @@ -62,12 +67,49 @@ class LogoutRequest(APIRequest): async def handle(self, authorization: str | None = Header(None)): if not authorization: - raise UnauthorizedException("No authorization header provided") + raise nebula.UnauthorizedException("No authorization header provided") access_token = parse_access_token(authorization) if not access_token: - raise UnauthorizedException("Invalid authorization header provided") + raise nebula.UnauthorizedException("Invalid authorization header provided") await Session.delete(access_token) - raise UnauthorizedException("Logged out") + raise nebula.UnauthorizedException("Logged out") + + +class SetPassword(APIRequest): + """Set a new password for the current (or a given) user. + + In order to set a password for another user, the current user must be an admin. + """ + + name: str = "password" + title: str = "Set password" + + async def handle( + self, + request: PasswordRequestModel, + user: nebula.User = Depends(current_user), + ): + if request.login: + if not user.is_admin: + raise nebula.UnauthorizedException( + "Only admin can change other user's password" + ) + query = "SELECT meta FROM users WHERE login = $1" + async for row in nebula.db.iterate(query, request.login): + target_user = nebula.User.from_row(row) + break + else: + raise nebula.NotFoundException(f"User {request.login} not found") + else: + target_user = user + + if len(request.password) < 8: + raise nebula.BadRequestException("Password is too short") + + target_user.set_password(request.password) + await target_user.save() + + return Response(status_code=204) diff --git a/backend/api/browse.py b/backend/api/browse.py index 0eca961..b5610ca 100644 --- a/backend/api/browse.py +++ b/backend/api/browse.py @@ -1,6 +1,5 @@ from typing import Any, Literal -from fastapi import Depends from nxtools import slugify from pydantic import Field @@ -9,7 +8,7 @@ from nebula.enum import MetaClass from nebula.exceptions import NebulaException from nebula.metadata.normalize import normalize_meta -from server.dependencies import current_user +from server.dependencies import CurrentUser from server.models import RequestModel, ResponseModel from server.request import APIRequest @@ -19,6 +18,8 @@ REQUIRED_COLUMNS = [ "id", "id_folder", + "title", + "subtitle", "status", "content_type", "media_type", @@ -109,17 +110,19 @@ def sanitize_value(value: Any) -> Any: def build_conditions(conditions: list[ConditionModel]) -> list[str]: cond_list: list[str] = [] for condition in conditions: - assert condition.key in nebula.settings.metatypes + assert ( + condition.key in nebula.settings.metatypes + ), f"Invalid meta key {condition.key}" condition.value = normalize_meta(condition.key, condition.value) if condition.operator in ["IN", "NOT IN"]: - assert type(condition.value) is list + assert type(condition.value) is list, "Value must be a list" values = sql_list([sanitize_value(v) for v in condition.value], t="str") cond_list.append(f"meta->>'{condition.key}' {condition.operator} {values}") elif condition.operator in ["IS NULL", "IS NOT NULL"]: cond_list.append(f"meta->>'{condition.key}' {condition.operator}") else: value = sanitize_value(condition.value) - assert value + assert value, "Value must not be empty" # TODO casting to numbers for <, >, <=, >= cond_list.append(f"meta->>'{condition.key}' {condition.operator} '{value}'") return cond_list @@ -188,7 +191,7 @@ def build_query( # Process views if request.view is not None and not request.ignore_view_conditions: - assert type(request.view) is int + assert type(request.view) is int, "View must be an integer" if (view := nebula.settings.get_view(request.view)) is not None: if view.folders: cond_list.append(f"id_folder IN {sql_list(view.folders)}") @@ -207,14 +210,16 @@ def build_query( # Process full text if request.query: - for elm in slugify(request.query, make_set=True): + for elm in slugify(request.query, make_set=True, min_length=3): # no need to sanitize this. slugified strings are safe cond_list.append(f"id IN (SELECT id FROM ft WHERE value LIKE '{elm}%')") # Access control if user.is_limited: - cond_list.append(f"meta->>'created_by' = '{user.id}'") + c1 = f"meta->>'created_by' = '{user.id}'" + c2 = f"meta->'assignees' @> '[{user.id}]'::JSONB" + cond_list.append(f"({c1} OR {c2})") # Build conditions @@ -253,12 +258,12 @@ class Request(APIRequest): async def handle( self, request: BrowseRequestModel, - user: nebula.User = Depends(current_user), + user: CurrentUser, ) -> BrowseResponseModel: columns: list[str] = ["title", "duration"] if request.view is not None and not request.columns: - assert type(request.view) is int + assert type(request.view) is int, "View must be an integer" if (view := nebula.settings.get_view(request.view)) is not None: if view.columns is not None: columns = view.columns diff --git a/backend/api/delete.py b/backend/api/delete.py index 283e075..18f1443 100644 --- a/backend/api/delete.py +++ b/backend/api/delete.py @@ -1,11 +1,11 @@ -from fastapi import Depends, Response +from fastapi import Response from pydantic import Field import nebula from nebula.enum import ObjectType from nebula.helpers.scheduling import bin_refresh from nebula.objects.utils import get_object_class_by_name -from server.dependencies import current_user, request_initiator +from server.dependencies import CurrentUser, RequestInitiator from server.models import RequestModel from server.request import APIRequest @@ -30,8 +30,8 @@ class Request(APIRequest): async def handle( self, request: DeleteRequestModel, - user: nebula.User = Depends(current_user), - initiator: str | None = Depends(request_initiator), + user: CurrentUser, + initiator: RequestInitiator, ) -> Response: """Delete given objects.""" @@ -58,10 +58,10 @@ async def handle( ) case ObjectType.ASSET | ObjectType.EVENT: - # TODO: ACL HERE + # TODO: ACL HERE? # In general, normal users don't need to # delete assets or events directly - if not user["is_admin"]: + if not user.is_admin: raise nebula.ForbiddenException( "You are not allowed to delete this object" ) diff --git a/backend/api/get.py b/backend/api/get.py index 7228713..f78940c 100644 --- a/backend/api/get.py +++ b/backend/api/get.py @@ -1,11 +1,10 @@ from typing import Any -from fastapi import Depends from pydantic import Field import nebula from nebula.enum import ObjectType -from server.dependencies import current_user +from server.dependencies import CurrentUser from server.models import RequestModel, ResponseModel from server.request import APIRequest @@ -38,6 +37,28 @@ class GetResponseModel(ResponseModel): ) +def can_access_object(user: nebula.User, meta: dict[str, Any]) -> bool: + if user.is_admin: + return True + elif user.id in meta.get("assignees", []): + return True + elif user.is_limited: + if meta.get("created_by") != user.id: + return False + return True + if id_folder := meta.get("id_folder"): + # Users can view assets in folders they have access to + return user.can("asset_view", id_folder) + + if login := meta.get("login"): + # Users can view their own data + return login == user.name + + # Normal users don't need to access items, bins or events + # using get requests. + return False + + class Request(APIRequest): """Get a list of objects""" @@ -48,7 +69,7 @@ class Request(APIRequest): async def handle( self, request: GetRequestModel, - user: nebula.User = Depends(current_user), + user: CurrentUser, ) -> GetResponseModel: object_type_name = request.object_type.value @@ -56,12 +77,10 @@ async def handle( data = [] async for row in nebula.db.iterate(query, request.ids): - if user.is_limited: - # Limited users can only see their own objects - if row["meta"].get("created_by") != user.id: - raise nebula.ForbiddenException( - "You are not allowed to access this object" - ) + if not can_access_object(user, row["meta"]): + raise nebula.ForbiddenException( + "You are not allowed to access this object" + ) data.append(row["meta"]) return GetResponseModel(data=data) diff --git a/backend/api/init/settings.py b/backend/api/init/settings.py index e8f05a1..9822432 100644 --- a/backend/api/init/settings.py +++ b/backend/api/init/settings.py @@ -1,6 +1,8 @@ from pydantic import Field import nebula +from nebula.enum import ContentType +from nebula.filetypes import FileTypes from nebula.settings.common import LanguageCode from nebula.settings.models import ( BasePlayoutChannelSettings, @@ -105,6 +107,7 @@ class ClientSettingsModel(SettingsModel): metatypes: dict[str, ClientMetaTypeModel] = Field(default_factory=dict) cs: dict[str, ClientCSModel] = Field(default_factory=dict) playout_channels: list[BasePlayoutChannelSettings] = Field(default_factory=list) + filetypes: dict[str, ContentType] = Field(default_factory=dict) server_url: str | None = Field(None, title="Server URL") @@ -181,6 +184,12 @@ async def get_client_settings(lang: LanguageCode): filter=v.filter, ) + # + # FileTypes + # + + filetypes: dict[str, ContentType] = FileTypes.data + # # Construct the client settings # @@ -196,4 +205,5 @@ async def get_client_settings(lang: LanguageCode): cs=client_cs, metatypes=client_metatypes, users=users, + filetypes=filetypes, ) diff --git a/backend/api/jobs/actions.py b/backend/api/jobs/actions.py index 7e252fc..2f79dc4 100644 --- a/backend/api/jobs/actions.py +++ b/backend/api/jobs/actions.py @@ -1,4 +1,5 @@ from fastapi import Depends +from nxtools import xml from pydantic import Field import nebula @@ -8,12 +9,17 @@ class ActionsRequestModel(RequestModel): - ids: list[int] + ids: list[int] = Field( + ..., + title="Asset IDs", + description="List of asset IDs for which to get available actions", + example=[1, 2, 3], + ) class ActionItemModel(ResponseModel): - id: int = Field(..., title="Action ID") - name: str = Field(..., title="Action name") + id: int = Field(..., title="Action ID", example=1) + name: str = Field(..., title="Action name", example="proxy") class ActionsResponseModel(ResponseModel): @@ -46,14 +52,26 @@ async def handle( """ async for row in nebula.db.iterate(query): - # TODO: implement allow-if and ACL - result.append( - ActionItemModel( - id=row["id"], - name=row["title"], - ) - ) + if not user.can("job_control", row["id"]): + continue + + action_settings = xml(row["settings"]) + + if allow_if_elm := action_settings.findall("allow_if"): + allow_if_cond = allow_if_elm[0].text + + for id_asset in request.ids: + asset = await nebula.Asset.load(id_asset) + assert asset + if not eval(allow_if_cond): + break + else: + result.append( + ActionItemModel( + id=row["id"], + name=row["title"], + ) + ) nebula.log.info(f"Actions for assets {request.ids} are {result}") - return ActionsResponseModel(actions=result) diff --git a/backend/api/jobs/jobs.py b/backend/api/jobs/jobs.py index 56cb702..2fd4ff0 100644 --- a/backend/api/jobs/jobs.py +++ b/backend/api/jobs/jobs.py @@ -1,13 +1,13 @@ import time from typing import Literal -from fastapi import Depends, Response +from fastapi import Response from nxtools import slugify from pydantic import Field import nebula -from nebula.objects.user import User -from server.dependencies import current_user +from nebula.enum import JobState +from server.dependencies import CurrentUser from server.models import RequestModel, ResponseModel from server.request import APIRequest @@ -55,27 +55,42 @@ class JobsRequestModel(ResponseModel): class JobsItemModel(RequestModel): - id: int - status: int - progress: int - id_action: int - id_service: int | None - id_asset: int - id_user: int | None - message: str - ctime: int | None - stime: int | None - etime: int | None - asset_name: str | None - action_name: str | None - service_name: str | None + id: int = Field(..., title="Job ID") + status: JobState = Field(..., title="Job status") + progress: int = Field(..., title="Progress", example=24) + id_action: int = Field(..., title="Action ID", example=1) + id_service: int | None = Field(None, title="Service ID", example=3) + id_asset: int = Field(..., title="Asset ID") + id_user: int | None = Field( + None, + title="User ID", + description="ID of the user who started the job", + ) + message: str = Field(None, title="Status description", example="Encoding 24%") + ctime: int | None = Field(None, title="Created at", example=f"{int(time.time())}") + stime: int | None = Field(None, title="Started at", example=f"{int(time.time())}") + etime: int | None = Field(None, title="Finished at", example=f"{int(time.time())}") + asset_name: str | None = Field( + None, + title="Asset name", + description="Asset full title (title + subtitle)", + example="Star Trek IV: The voyage home", + ) + idec: str | None = Field( + None, + title="Primary identifier", + example="A123456", + ) + action_name: str | None = Field(None, example="proxy") + service_name: str | None = Field(None, example="conv01") + service_type: str | None = Field(None, example="conv") class JobsResponseModel(ResponseModel): jobs: list[JobsItemModel] = Field(default_factory=list) -async def can_user_control_job(user: User, id_job: int) -> bool: +async def can_user_control_job(user: nebula.User, id_job: int) -> bool: if user.is_admin: return True if user.can("job_control", True): @@ -90,7 +105,9 @@ async def can_user_control_job(user: User, id_job: int) -> bool: query = """ SELECT a.id FROM assets a, jobs j WHERE j.id = $1 AND j.id_asset = a.id - AND a.meta->>'created_by'::INTEGER = $2 + AND ( + a.meta->>'created_by'::INTEGER = $2 + OR a.meta->'assignees' @> '[$2]'::JSONB """ res = await nebula.db.fetch(query, id_job, user.id) return bool(res) @@ -154,7 +171,7 @@ class JobsRequest(APIRequest): async def handle( self, request: JobsRequestModel, - user: User = Depends(current_user), + user: CurrentUser, ) -> JobsResponseModel: if request.abort: @@ -171,9 +188,14 @@ async def handle( conds.append(f"a.id IN (SELECT id FROM ft WHERE value LIKE '{elm}%')") if user.is_limited: - conds.append(f"a.meta->>'created_by' = {user.id}") + conds.append( + f""" + (a.meta->>'created_by' = '{user.id}' + OR a.meta->'assignees' @> '[{user.id}]'::JSONB) + """ + ) - elif request.view == "active": + if request.view == "active": # Pending, in_progress, restart conds.append(f"(j.status IN (0, 1, 5) OR j.end_time > {time.time() - 30})") elif request.view == "finished": @@ -205,10 +227,13 @@ async def handle( j.creation_time AS ctime, j.start_time AS stime, j.end_time as etime, - a.meta->>'title' AS asset_name, + a.meta->>'title' AS asset_title, + a.meta->>'subtitle' AS asset_subtitle, + a.meta->>'id/main' AS idec, s.title AS service_name, u.login AS user_name, - ac.title AS action_name + ac.title AS action_name, + ac.service_type as service_type FROM jobs as j LEFT JOIN assets as a ON a.id = j.id_asset LEFT JOIN services as s ON s.id = j.id_service @@ -216,6 +241,7 @@ async def handle( LEFT JOIN actions as ac ON ac.id = j.id_action {('WHERE ' + (' AND '.join(conds))) if conds else ''} ORDER BY + j.progress DESC NULLS LAST, j.end_time DESC, j.start_time DESC, j.creation_time DESC @@ -224,6 +250,10 @@ async def handle( jobs = [] async for row in nebula.db.iterate(query): - jobs.append(JobsItemModel(**row)) + asset_name = row["asset_title"] + if subtitle := row["asset_subtitle"]: + separator = nebula.settings.system.subtitle_separator + asset_name = f"{asset_name}{separator}{subtitle}" + jobs.append(JobsItemModel(asset_name=asset_name, **row)) return JobsResponseModel(jobs=jobs) diff --git a/backend/api/jobs/send.py b/backend/api/jobs/send.py index bf83ccf..5e94af5 100644 --- a/backend/api/jobs/send.py +++ b/backend/api/jobs/send.py @@ -1,11 +1,10 @@ import time from typing import Any -from fastapi import Depends from pydantic import Field import nebula -from server.dependencies import current_user +from server.dependencies import CurrentUser from server.models import RequestModel, ResponseModel from server.request import APIRequest @@ -163,7 +162,7 @@ class SendRequest(APIRequest): async def handle( self, request: SendRequestModel, - user: nebula.User = Depends(current_user), + user: CurrentUser, ) -> SendResponseModel: if not user.can("job_control", request.id_action): diff --git a/backend/api/order/order.py b/backend/api/order/order.py index cccd632..4a6981f 100644 --- a/backend/api/order/order.py +++ b/backend/api/order/order.py @@ -33,6 +33,9 @@ async def set_rundown_order( connection=conn, username=user.name, ) + + assert isinstance(item, nebula.Item) # mypy + # Empty event may not have id_bin set, # but we know, where we are putting it. item["id_bin"] = id_bin diff --git a/backend/api/playout/__init__.py b/backend/api/playout/__init__.py index 4f9e89b..38efc95 100644 --- a/backend/api/playout/__init__.py +++ b/backend/api/playout/__init__.py @@ -25,6 +25,9 @@ async def handle( if not channel: raise nebula.NotFoundException("Channel not found") + if channel.engine == "dummy": + return PlayoutResponseModel(plugins=[]) + controller_url = f"http://{channel.controller_host}:{channel.controller_port}" async with httpx.AsyncClient() as client: diff --git a/backend/api/rundown/__init__.py b/backend/api/rundown/__init__.py index 48437b6..db22340 100644 --- a/backend/api/rundown/__init__.py +++ b/backend/api/rundown/__init__.py @@ -21,10 +21,7 @@ async def handle( user: nebula.User = Depends(current_user), ) -> RundownResponseModel: - # TODO: Handle ACL here - # if not ( - # user.has_right("rundown_view", id_channel) - # or user.has_right("rundown_edit", id_channel) - # ): + if not user.can("rundown_view", request.id_channel): + raise nebula.ForbiddenException("You are not allowed to view this rundown") return await get_rundown(request) diff --git a/backend/api/rundown/models.py b/backend/api/rundown/models.py index cbdbfd0..6e97dd7 100644 --- a/backend/api/rundown/models.py +++ b/backend/api/rundown/models.py @@ -32,7 +32,9 @@ class RundownRow(ResponseModel): mark_in: float | None = Field(None) mark_out: float | None = Field(None) run_mode: RunMode | None = Field(None) + loop: bool | None = Field(None) item_role: ItemMode | None = Field(None) + is_empty: bool = Field(True) class RundownResponseModel(ResponseModel): diff --git a/backend/api/rundown/rundown.py b/backend/api/rundown/rundown.py index 0133f2c..8257d20 100644 --- a/backend/api/rundown/rundown.py +++ b/backend/api/rundown/rundown.py @@ -147,6 +147,14 @@ async def get_rundown(request: RundownRequestModel) -> RundownResponseModel: # Append item to the result + meta = {} + if asset: + for key in channel.rundown_columns: + if (key in asset.meta) and ( + key not in ["title", "subtitle", "id_asset", "duration", "status"] + ): + meta[key] = asset.meta[key] + row = RundownRow( id=id_item, row_number=len(rows), @@ -154,6 +162,7 @@ async def get_rundown(request: RundownRequestModel) -> RundownResponseModel: scheduled_time=ts_scheduled, broadcast_time=ts_broadcast, run_mode=imeta.get("run_mode"), + loop=imeta.get("loop"), item_role=imeta.get("item_role"), title=item["title"], subtitle=item["subtitle"], @@ -165,6 +174,8 @@ async def get_rundown(request: RundownRequestModel) -> RundownResponseModel: asset_mtime=ameta.get("mtime", 0), mark_in=mark_in, mark_out=mark_out, + is_empty=False, + meta=meta, ) rows.append(row) @@ -177,5 +188,6 @@ async def get_rundown(request: RundownRequestModel) -> RundownResponseModel: ts_scheduled += duration ts_broadcast += duration last_event.duration += duration + last_event.is_empty = False return RundownResponseModel(rows=rows) diff --git a/backend/api/scheduler/__init__.py b/backend/api/scheduler/__init__.py index ce0aafe..88d4af0 100644 --- a/backend/api/scheduler/__init__.py +++ b/backend/api/scheduler/__init__.py @@ -1,6 +1,7 @@ from fastapi import Depends import nebula +from nebula.helpers.scheduling import bin_refresh from server.dependencies import current_user, request_initiator from server.request import APIRequest @@ -22,9 +23,18 @@ async def handle( initiator: str = Depends(request_initiator), ) -> SchedulerResponseModel: - has_rights = True # TODO + if not user.can("scheduler_view", request.id_channel): + raise nebula.ForbiddenException("You are not allowed to view this channel") - result = await scheduler(request, has_rights) + editable = user.can("scheduler_edit", request.id_channel) + result = await scheduler(request, editable) + + if result.affected_bins: + await bin_refresh( + result.affected_bins, + initiator=initiator, + user=user, + ) if result.affected_events: await nebula.msg( diff --git a/backend/api/scheduler/models.py b/backend/api/scheduler/models.py index c4f7441..30f6ee2 100644 --- a/backend/api/scheduler/models.py +++ b/backend/api/scheduler/models.py @@ -26,6 +26,8 @@ class EventData(RequestModel): example=123, ) + items: list[dict[str, Serializable]] | None = Field(default_factory=list) + meta: dict[str, Serializable] | None = Field( default=None, title="Event metadata", @@ -81,6 +83,14 @@ class SchedulerResponseModel(ResponseModel): default_factory=list, title="Affected events", description="List of event IDs that were affected by this request", + example=[134, 135, 136], + ) + + affected_bins: list[int] = Field( + default_factory=list, + title="Affected bins", + description="List of bin IDs that were affected by this request", + example=[134, 135, 136], ) events: list[dict] = Field( diff --git a/backend/api/scheduler/scheduler.py b/backend/api/scheduler/scheduler.py index 3e432c4..99312cf 100644 --- a/backend/api/scheduler/scheduler.py +++ b/backend/api/scheduler/scheduler.py @@ -26,6 +26,7 @@ async def create_new_event( new_event["start"] = event_data.start asset_meta = {} + position = 0 if event_data.id_asset: asset = await nebula.Asset.load(event_data.id_asset, connection=conn) @@ -35,13 +36,27 @@ async def create_new_event( new_item = nebula.Item(connection=conn) new_item["id_asset"] = event_data.id_asset new_item["id_bin"] = new_bin.id - new_item["position"] = 0 + new_item["position"] = position new_item["mark_in"] = asset["mark_in"] new_item["mark_out"] = asset["mark_out"] await new_item.save() new_bin["duration"] = asset.duration asset_meta = asset.meta + position += 1 + + if event_data.items: + for item_data in event_data.items: + if item_data.get("id"): + assert type(item_data["id"]) == int, "Invalid item ID" + item = await nebula.Item.load(item_data["id"], connection=conn) + else: + item = nebula.Item(connection=conn) + item.update(item_data) + item["id_bin"] = new_bin.id + item["position"] = position + await item.save() + position += 1 for field in channel.fields: if (value := asset_meta.get(field.name)) is not None: @@ -74,7 +89,8 @@ async def scheduler( start_time = parse_rundown_date(request.date, channel) end_time = start_time + (request.days * 86400) - changed_event_ids = [] + affected_events: list[int] = [] + affected_bins: list[int] = [] # # Delete events @@ -82,7 +98,7 @@ async def scheduler( if request.delete and editable: deleted_event_ids = await delete_events(request.delete) - changed_event_ids.extend(deleted_event_ids) + affected_events.extend(deleted_event_ids) # # Create / update events # @@ -100,20 +116,52 @@ async def scheduler( assert ( event_at_position.id is not None ), "Event at position returned event without ID. This should not happen." - changed_event_ids.append(event_at_position.id) + affected_events.append(event_at_position.id) - if event_data.asset_id: + if event_data.id_asset: # Replace event with another asset. # This should be supported, but is not yet. - if event_data.asset_id == event_at_position["id_asset"]: + if event_data.id_asset == event_at_position["id_asset"]: # Replace event with itself. This is a no-op. continue - raise nebula.NotImplementedException("Replacing events not supported") - asset = await nebula.Asset.load(event_data.asset_id) - + asset = await nebula.Asset.load(event_data.id_asset) assert asset + + # load the existing bin + ex_bin = await nebula.Bin.load(event_at_position["id_magic"]) + await ex_bin.get_items() + + for item in ex_bin.items: + if item["id_asset"] == event_at_position["id_asset"]: + # replace the asset in the bin + item["id_asset"] = event_data.id_asset + item["mark_in"] = asset["mark_in"] + item["mark_out"] = asset["mark_out"] + await item.save() + break + else: + # no primary asset found, so append it + new_item = nebula.Item() + new_item["id_asset"] = event_data.id_asset + new_item["id_bin"] = ex_bin.id + new_item["position"] = len(ex_bin.items) + new_item["mark_in"] = asset["mark_in"] + new_item["mark_out"] = asset["mark_out"] + await new_item.save() + ex_bin.items.append(new_item) + affected_bins.append(ex_bin.id) + + # update the event + event_at_position["id_asset"] = event_data.id_asset + for field in channel.fields: + if field.name in asset.meta: + event_at_position[field.name] = asset.meta[field.name] + affected_events.append(event_at_position.id) + await ex_bin.save() + await event_at_position.save() + # TODO: Implement replacing events else: @@ -128,7 +176,7 @@ async def scheduler( for field in channel.fields: if event_data.meta and (field.name in event_data.meta): event[field.name] = event_data.meta[field.name] - changed_event_ids.append(event_data.id) + affected_events.append(event_data.id) await event.save(notify=False) else: @@ -143,5 +191,6 @@ async def scheduler( events = [] return SchedulerResponseModel( events=[e.meta for e in events], - affected_events=changed_event_ids, + affected_events=affected_events, + affected_bins=affected_bins, ) diff --git a/backend/api/services.py b/backend/api/services.py index e7bd6bf..70e95da 100644 --- a/backend/api/services.py +++ b/backend/api/services.py @@ -1,11 +1,10 @@ import time -from fastapi import Depends from pydantic import Field import nebula from nebula.enum import ServiceState -from server.dependencies import current_user +from server.dependencies import CurrentUser from server.models import RequestModel, ResponseModel from server.request import APIRequest @@ -55,7 +54,7 @@ class Request(APIRequest): async def handle( self, request: ServiceRequestModel, - user: nebula.User = Depends(current_user), + user: CurrentUser, ) -> ServicesResponseModel: """List and control installed services.""" diff --git a/backend/api/set.py b/backend/api/set.py index 0f89248..961bda0 100644 --- a/backend/api/set.py +++ b/backend/api/set.py @@ -1,14 +1,15 @@ import os from typing import Any -from fastapi import Depends from pydantic import Field import nebula from nebula.common import import_module from nebula.enum import ObjectType +from nebula.helpers.scheduling import bin_refresh from nebula.objects.utils import get_object_class_by_name -from server.dependencies import current_user +from nebula.settings import load_settings +from server.dependencies import CurrentUser from server.models import RequestModel, ResponseModel from server.request import APIRequest @@ -26,7 +27,7 @@ def for_object(cls, object_type: ObjectType) -> Any: @classmethod def load_validators(cls) -> None: - nebula.log.info("Loading validators") + nebula.log.trace("Loading validators") cls.validators = {} if nebula.config.plugin_dir is None: @@ -39,7 +40,6 @@ def load_validators(cls) -> None: object_type.value.lower() + ".py", ) if not os.path.exists(validator_path): - nebula.log.warn(f"Validator not found in {validator_path}") continue validator_name = f"{object_type.value.lower()}_validator" @@ -49,7 +49,7 @@ def load_validators(cls) -> None: nebula.log.error(f"Validator {validator_name} has no validate method") continue - nebula.log.info(f"Loaded validator {validator_name}") + nebula.log.debug(f"Loaded validator {validator_name}") cls.validators[object_type.name] = validator.validate @@ -124,9 +124,11 @@ class SetRequest(APIRequest): async def handle( self, request: SetRequestModel, - user: nebula.User = Depends(current_user), + user: CurrentUser, ) -> SetResponseModel: """Create or update an object.""" + + reload_settings = False pool = await nebula.db.pool() async with pool.acquire() as conn: async with conn.transaction(): @@ -140,20 +142,36 @@ async def handle( object = await object_class.load(request.id, connection=conn) object["updated_by"] = user.id + if (password := request.data.pop("password", None)) is not None: + assert isinstance(password, str) + assert isinstance(object, nebula.User) + object.set_password(password) + if validator := Validator.for_object(request.object_type): - await validator( - object, - request.data, - connection=conn, - user=user, - ) + try: + await validator( + object, + request.data, + connection=conn, + user=user, + ) + except nebula.RequestSettingsReload: + reload_settings = True else: object.update(request.data) + await object.save() - return SetResponseModel( - id=object.id, - object_type=request.object_type, - ) + + if isinstance(object, nebula.Item) and object["id_bin"]: + await bin_refresh([object["id_bin"]]) + + if reload_settings: + await load_settings() + + return SetResponseModel( + id=object.id, + object_type=request.object_type, + ) class OperationsRequest(APIRequest): @@ -164,12 +182,14 @@ class OperationsRequest(APIRequest): async def handle( self, request: OperationsRequestModel, - user: nebula.User = Depends(current_user), + user: CurrentUser, ) -> OperationsResponseModel: """Create or update multiple objects in one requests.""" pool = await nebula.db.pool() result = [] + reload_settings = False + affected_bins: list[int] = [] for operation in request.operations: success = True op_id = operation.id @@ -190,16 +210,36 @@ async def handle( ) object["updated_by"] = user.id + if ( + password := operation.data.pop("password", None) + ) is not None: + assert isinstance( + password, str + ), "Password must be a string" + assert isinstance( + object, nebula.User + ), "Object must be a user in order to set a password" + object.set_password(password) + if validator := Validator.for_object(operation.object_type): - await validator( - object, - operation.data, - connection=conn, - user=user, - ) + try: + await validator( + object, + operation.data, + connection=conn, + user=user, + ) + except nebula.RequestSettingsReload: + reload_settings = True else: object.update(operation.data) await object.save() + if ( + isinstance(object, nebula.Item) + and object["id_bin"] + and object["id_bin"] not in affected_bins + ): + affected_bins.append(object["id_bin"]) op_id = object.id except Exception: nebula.log.traceback(user=user.name) @@ -213,5 +253,11 @@ async def handle( ) ) + if affected_bins: + await bin_refresh(affected_bins) + + if reload_settings: + await load_settings() + overall_success = all([x.success for x in result]) return OperationsResponseModel(operations=result, success=overall_success) diff --git a/backend/api/solve.py b/backend/api/solve.py index d23049e..8609d19 100644 --- a/backend/api/solve.py +++ b/backend/api/solve.py @@ -1,11 +1,11 @@ import os -from fastapi import Depends, Response +from fastapi import Response from pydantic import Field import nebula from nebula.common import classes_from_module, import_module -from server.dependencies import current_user +from server.dependencies import CurrentUser from server.models import RequestModel from server.request import APIRequest @@ -61,7 +61,7 @@ class Request(APIRequest): async def handle( self, request: SolveRequestModel, - user: nebula.User = Depends(current_user), + user: CurrentUser, ) -> Response: solver = get_solver(request.solver) diff --git a/backend/api/upload.py b/backend/api/upload.py new file mode 100644 index 0000000..d009622 --- /dev/null +++ b/backend/api/upload.py @@ -0,0 +1,84 @@ +import os +import time + +import aiofiles +from fastapi import Request, Response + +import nebula +from nebula.enum import MediaType, ObjectStatus +from nebula.filetypes import FileTypes +from server.dependencies import AssetInPath, CurrentUser +from server.request import APIRequest + + +class UploadRequest(APIRequest): + """Get a list of objects""" + + name: str = "upload" + path: str = "/upload/{id_asset}" + title: str = "Get objects" + response_class = Response + + async def handle( + self, + request: Request, + asset: AssetInPath, + user: CurrentUser, + ): + """Upload a media file for a given asset. + + This endpoint is used by the web frontend to upload media files. + """ + + assert asset["media_type"] == MediaType.FILE, "Only file assets can be uploaded" + extension = request.headers.get("X-nebula-extension") + assert extension, "Missing X-nebula-extension header" + + assert FileTypes.get(extension) == asset["content_type"], "Invalid content type" + + if nebula.settings.system.upload_storage and nebula.settings.system.upload_dir: + direct = False + storage = nebula.storages[nebula.settings.system.upload_storage] + upload_dir = nebula.settings.system.upload_dir + base_name = nebula.settings.system.upload_base_name.format(**asset.meta) + upload_full_dir = os.path.join(storage.local_path, upload_dir) + if not os.path.isdir(upload_full_dir): + try: + os.makedirs(upload_full_dir) + except Exception: + raise nebula.NebulaException("Unable to create uplad directory") + target_path = os.path.join(upload_full_dir, f"{base_name}.{extension}") + else: + direct = True + storage = nebula.storages[asset["id_storage"]] + assert asset.local_path, f"{asset} does not have path set" + bname = os.path.splitext(asset.local_path)[0] + target_path = f"{bname}.{extension}" + + nebula.log.debug(f"Uploading media file for {asset}", user=user.name) + + temp_dir = os.path.join(storage.local_path, ".nx", "creating") + if not os.path.isdir(temp_dir): + os.makedirs(temp_dir) + + temp_path = os.path.join(temp_dir, f"upload-{asset.id}-{time.time()}") + + i = 0 + async with aiofiles.open(temp_path, "wb") as f: + async for chunk in request.stream(): + i += len(chunk) + await f.write(chunk) + nebula.log.debug(f"Uploaded {i} bytes", user=user.name) + + os.rename(temp_path, target_path) + if direct: + if extension != os.path.splitext(asset["path"])[1][1:]: + nebula.log.warning( + f"Uploaded media file extension {extension} does not match " + f"asset extension {os.path.splitext(asset['path'])[1][1:]}" + ) + asset["path"] = os.path.splitext(asset["path"])[0] + "." + extension + # TODO: remove old file? + asset["status"] = ObjectStatus.CREATING + await asset.save() + nebula.log.info(f"Uploaded media file for {asset}", user=user.name) diff --git a/backend/cli/__main__.py b/backend/cli/__main__.py index 7516be5..8aad255 100644 --- a/backend/cli/__main__.py +++ b/backend/cli/__main__.py @@ -24,9 +24,11 @@ def get_plugin(name: str): try: plugin_module = import_module(module_name, module_path) except ModuleNotFoundError: - nebula.log.error(f"Module {name} not found") + nebula.log.error(f"Unable to import module {module_path}") + continue except ImportError: nebula.log.traceback(f"Error importing module {name}") + continue for plugin_class in classes_from_module( nebula.plugins.CLIPlugin, plugin_module diff --git a/backend/mypy.ini b/backend/mypy.ini index 32ad10c..e34a1a3 100644 --- a/backend/mypy.ini +++ b/backend/mypy.ini @@ -51,3 +51,8 @@ ignore_missing_imports = true ignore_errors = true follow_imports = skip ignore_missing_imports = true + +[mypy-mistune.*] +ignore_errors = true +follow_imports = skip +ignore_missing_imports = true diff --git a/backend/nebula/__init__.py b/backend/nebula/__init__.py index ea595bd..6adc2d6 100644 --- a/backend/nebula/__init__.py +++ b/backend/nebula/__init__.py @@ -1,3 +1,5 @@ +__version__ = "6.0.0" + __all__ = [ "config", "settings", @@ -19,6 +21,7 @@ "ForbiddenException", "NebulaException", "NotFoundException", + "RequestSettingsReload", "UnauthorizedException", "LoginFailedException", "NotImplementedException", @@ -28,6 +31,12 @@ "CLIPlugin", ] +import sys + +if "--version" in sys.argv: + print(__version__) + sys.exit(0) + import asyncio from .config import config @@ -40,6 +49,7 @@ NebulaException, NotFoundException, NotImplementedException, + RequestSettingsReload, UnauthorizedException, ValidationException, ) diff --git a/backend/nebula/config.py b/backend/nebula/config.py index 911a523..57d8041 100644 --- a/backend/nebula/config.py +++ b/backend/nebula/config.py @@ -11,7 +11,10 @@ class NebulaConfig(BaseModel): description="", ) - motd: str = Field("Nebula 6 ALPHA") + motd: str = Field( + "Nebula 6", + description="Message of the day", + ) postgres: PostgresDsn = Field( "postgres://nebula:nebula@postgres/nebula", diff --git a/backend/nebula/exceptions.py b/backend/nebula/exceptions.py index 18107e5..e7eac5e 100644 --- a/backend/nebula/exceptions.py +++ b/backend/nebula/exceptions.py @@ -1,11 +1,16 @@ from nebula.log import log as logger +class RequestSettingsReload(Exception): + pass + + class NebulaException(Exception): """Base class for all Nebula exceptions.""" detail: str = "Error" status: int = 500 + log: bool = True def __init__( self, @@ -19,7 +24,7 @@ def __init__( if detail is not None: self.detail = detail - if log is True: + if log is True or self.log: logger.error(f"EXCEPTION: {self.status} {self.detail}") elif type(log) is str: logger.error(f"EXCEPTION: {self.status} {log}") @@ -30,38 +35,46 @@ def __init__( class BadRequestException(NebulaException): detail = "Bad request" status = 400 + log = True class NotFoundException(NebulaException): detail = "Not found" status = 404 + log = False class UnauthorizedException(NebulaException): detail = "Unauthorized" status = 401 + log = False class ForbiddenException(NebulaException): detail = "Forbidden" status = 403 + log = False class LoginFailedException(NebulaException): detail = "Login failed" status = 401 + log = True class NotImplementedException(NebulaException): detail = "Not implemented" status = 501 + log = True class ConflictException(NebulaException): detail = "Conflict" status = 409 + log = True class ValidationException(NebulaException): detail = "Validation failed" status = 422 + log = True diff --git a/backend/nebula/filetypes.py b/backend/nebula/filetypes.py new file mode 100644 index 0000000..8be4c0c --- /dev/null +++ b/backend/nebula/filetypes.py @@ -0,0 +1,80 @@ +from nebula.enum import ContentType + + +class FileTypes: + data = { + "dv": ContentType.VIDEO, + "avi": ContentType.VIDEO, + "mov": ContentType.VIDEO, + "mpg": ContentType.VIDEO, + "mpeg": ContentType.VIDEO, + "mp4": ContentType.VIDEO, + "flv": ContentType.VIDEO, + "m4v": ContentType.VIDEO, + "m2t": ContentType.VIDEO, + "m2v": ContentType.VIDEO, + "m2p": ContentType.VIDEO, + "m2ts": ContentType.VIDEO, + "mts": ContentType.VIDEO, + "mkv": ContentType.VIDEO, + "3gp": ContentType.VIDEO, + "vob": ContentType.VIDEO, + "wmv": ContentType.VIDEO, + "video": ContentType.VIDEO, + "mxf": ContentType.VIDEO, + "ogv": ContentType.VIDEO, + "divx": ContentType.VIDEO, + "m3u8": ContentType.VIDEO, + "mpd": ContentType.VIDEO, + "webm": ContentType.VIDEO, + "wav": ContentType.AUDIO, + "aiff": ContentType.AUDIO, + "aif": ContentType.AUDIO, + "ogg": ContentType.AUDIO, + "mp3": ContentType.AUDIO, + "mp2": ContentType.AUDIO, + "m2a": ContentType.AUDIO, + "aac": ContentType.AUDIO, + "flac": ContentType.AUDIO, + "jpg": ContentType.IMAGE, + "jpeg": ContentType.IMAGE, + "png": ContentType.IMAGE, + "tga": ContentType.IMAGE, + "targa": ContentType.IMAGE, + "tif": ContentType.IMAGE, + "tiff": ContentType.IMAGE, + "hdr": ContentType.IMAGE, + "exr": ContentType.IMAGE, + "bmp": ContentType.IMAGE, + "gif": ContentType.IMAGE, + "psd": ContentType.IMAGE, + "xcf": ContentType.IMAGE, + } + + @classmethod + def get(cls, ext: str) -> ContentType | None: + return cls.data.get(ext) + + @classmethod + def by_ext(cls, ext: str) -> ContentType: + return cls.data[ext.lower()] + + @classmethod + def is_video(cls, ext: str) -> bool: + return cls.data.get(ext.lower(), -1) == ContentType.VIDEO + + @classmethod + def is_audio(cls, ext: str) -> bool: + return cls.data.get(ext.lower(), -1) == ContentType.AUDIO + + @classmethod + def is_image(cls, ext: str) -> bool: + return cls.data.get(ext.lower(), -1) == ContentType.IMAGE + + @classmethod + def exts_by_type(cls, content_type: ContentType) -> list[str]: + return [ext for ext, ct in cls.data.items() if ct == content_type] + + @classmethod + def exts(cls): + return list(cls.data.keys()) diff --git a/backend/nebula/helpers/email.py b/backend/nebula/helpers/email.py new file mode 100644 index 0000000..858a2b7 --- /dev/null +++ b/backend/nebula/helpers/email.py @@ -0,0 +1,86 @@ +import smtplib +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText + +import nebula + +try: + import mistune # noqa + + has_mistune = True +except ModuleNotFoundError: + has_mistune = False + + +def html2email(html) -> MIMEMultipart: + msg = MIMEMultipart("alternative") + text = "no plaitext version available" + part1 = MIMEText(text, "plain") + part2 = MIMEText(html, "html") + + msg.attach(part1) + msg.attach(part2) + + return msg + + +def markdown2email(text) -> MIMEMultipart | MIMEText: + if has_mistune: + msg = MIMEMultipart("alternative") + html = mistune.html(text) + part1 = MIMEText(text, "plain") + part2 = MIMEText(html, "html") + msg.attach(part1) + msg.attach(part2) + return msg + else: + return MIMEText(text, "plain") + + +def send_mail( + to: str | list[str], + subject: str, + body: str | MIMEText | MIMEMultipart, + **kwargs, +): + addresses: list[str] = [] + if isinstance(to, str): + addresses.append(to) + else: + addresses.extend(to) + + reply_address = kwargs.get("from", nebula.settings.system.mail_from) + + msg: MIMEText | MIMEMultipart + if isinstance(body, str): + msg = MIMEText(body) + else: + msg = body + + msg["Subject"] = subject + msg["From"] = reply_address + msg["To"] = ",".join(to) + + try: + assert nebula.settings.system.smtp_host is not None, "SMTP host not set" + assert nebula.settings.system.smtp_port is not None, "SMTP port not set" + except AssertionError as e: + nebula.log.error(f"Unable to send email: {e}") + return + + if nebula.settings.system.smtp_port == 25: + s = smtplib.SMTP(nebula.settings.system.smtp_host, port=25) + else: + s = smtplib.SMTP_SSL( + nebula.settings.system.smtp_host, port=nebula.settings.system.smtp_port + ) + + user = nebula.settings.system.smtp_user + password = nebula.settings.system.smtp_pass + + if user: + assert password is not None, "SMTP user set but no password" + + if user and password: + s.login(user, password) + s.sendmail(reply_address, addresses, msg.as_string()) diff --git a/backend/nebula/objects/asset.py b/backend/nebula/objects/asset.py index 6647d9a..6158657 100644 --- a/backend/nebula/objects/asset.py +++ b/backend/nebula/objects/asset.py @@ -4,6 +4,7 @@ from nebula.enum import ContentType, MediaType, ObjectStatus from nebula.objects.base import BaseObject +from nebula.settings import settings from nebula.storages import storages @@ -58,6 +59,21 @@ def slug(self) -> str | None: return slugify(f"{self['title']} {self['subtitle']}") + @property + def title(self) -> str: + """Return display title. + + Display title is a title with optional subtitle. + """ + + separator = settings.system.subtitle_separator + if not (title := self.get("title")): + title = f"Asset {self.id}" if self.id else "New asset" + + if subtitle := self.get("subtitle"): + return f"{title}{separator}{subtitle}" + return title + @property def duration(self) -> float: """Return duration of the asset in seconds. diff --git a/backend/nebula/objects/user.py b/backend/nebula/objects/user.py index 19074d8..6d39f93 100644 --- a/backend/nebula/objects/user.py +++ b/backend/nebula/objects/user.py @@ -13,12 +13,13 @@ NotImplementedException, ) from nebula.objects.base import BaseObject +from nebula.settings import settings def hash_password(password: str): if config.password_hashing == "legacy": return hashlib.sha256(password.encode("ascii")).hexdigest() - raise NotImplementedException + raise NotImplementedException("Hashing method not available") class UserRights(BaseModel): @@ -50,7 +51,7 @@ class User(BaseObject): @property def language(self): """Return the preferred language of the user.""" - return self["language"] or "en" + return self["language"] or settings.system.language @property def name(self): @@ -66,7 +67,7 @@ async def by_login(cls, login: str) -> "User": """Return the user with the given login.""" row = await db.fetch("SELECT meta FROM users WHERE login = $1", login) if not row: - raise NotFoundException + raise NotFoundException(f"User {login} not found") return cls.from_row(row[0]) @classmethod @@ -86,7 +87,10 @@ async def login(cls, username: str, password: str) -> "User": except asyncpg.exceptions.UndefinedTableError: raise NebulaException("Nebula is not installed") if not res: - raise LoginFailedException + raise LoginFailedException( + "Invalid login/password combination", + log=f"Invalid logging attempted with name '{username}'", + ) return cls(meta=res[0]["meta"]) def set_password(self, password: str): diff --git a/backend/nebula/plugins/__init__.py b/backend/nebula/plugins/__init__.py index aee2b21..0a777e4 100644 --- a/backend/nebula/plugins/__init__.py +++ b/backend/nebula/plugins/__init__.py @@ -1,7 +1,10 @@ __all__ = ["SolverPlugin", "CLIPlugin"] +from .common import modules_root from .solver import SolverPlugin +assert modules_root + class CLIPlugin: name: str = "cli_plugin" diff --git a/backend/nebula/plugins/common.py b/backend/nebula/plugins/common.py new file mode 100644 index 0000000..966ef21 --- /dev/null +++ b/backend/nebula/plugins/common.py @@ -0,0 +1,14 @@ +import os +import sys + +from nebula.config import config + +modules_root = os.path.join(config.plugin_dir, "common") +if os.path.isdir(modules_root): + for pydirname in os.listdir(modules_root): + pydir = os.path.join(modules_root, pydirname) + if not os.path.isdir(pydir): + continue + if pydir in sys.path: + continue + sys.path.append(pydir) diff --git a/backend/nebula/plugins/solver.py b/backend/nebula/plugins/solver.py index ad75b5f..79dfb49 100644 --- a/backend/nebula/plugins/solver.py +++ b/backend/nebula/plugins/solver.py @@ -3,6 +3,10 @@ import nebula from nebula.helpers.scheduling import bin_refresh +from .common import modules_root + +assert modules_root + class SolverPlugin: name: str = "solver" @@ -47,8 +51,8 @@ async def __call__(self, id_item: int): self.new_items = [] self.new_events = [] - self._next_event = await self.get_next_event() - self._needed_duration = await self.get_needed_duration() + self._next_event = await self.get_next_event(force=True) + self._needed_duration = await self.get_needed_duration(force=True) self._solve_next = None return await self.main() @@ -57,9 +61,9 @@ async def __call__(self, id_item: int): # Property loaders # - async def get_next_event(self) -> nebula.Event: + async def get_next_event(self, force: bool = False) -> nebula.Event: """Load event following the current one.""" - if self._next_event is None: + if (self._next_event is None) or force: res = await nebula.db.fetch( """ SELECT meta FROM events @@ -80,9 +84,9 @@ async def get_next_event(self) -> nebula.Event: ) return self._next_event - async def get_needed_duration(self): + async def get_needed_duration(self, force: bool = False): """Load the duration needed to fill the current event.""" - if not self._needed_duration: + if (self._needed_duration is None) or force: dur = self.next_event["start"] - self.event["start"] items = await self.bin.get_items() for item in items: @@ -166,8 +170,9 @@ async def block_split(self, tc: float) -> None: await new_event.save(notify=False) - self._needed_duration = None - self._next_event = None + self._next_event = new_event + self._needed_duration = await self.get_needed_duration(force=True) + self._needed_duration -= self.current_duration self._solve_next = new_placeholder if new_bin.id and (new_bin.id not in self.affected_bins): @@ -207,9 +212,12 @@ async def main(self): if self.bin.id not in self.affected_bins: self.affected_bins.append(self.bin.id) + # save event in case solver updated its metadata + await self.event.save() + # another paceholder was created, so we need to solve it if self._solve_next: - self(self._solve_next) + await self(self._solve_next.id) return # recalculate bin durations and notify clients about changes diff --git a/backend/nebula/settings/__init__.py b/backend/nebula/settings/__init__.py index a79d6e5..9a6adba 100644 --- a/backend/nebula/settings/__init__.py +++ b/backend/nebula/settings/__init__.py @@ -2,6 +2,7 @@ from nebula.config import config from nebula.db import db +from nebula.log import log from nebula.settings.metatypes import MetaType from nebula.settings.models import ( CSItemModel, @@ -91,6 +92,7 @@ async def load_settings(): Either in nebula.server on_init handler or by nebula.run """ + log.trace("Loading settings") new_settings = await get_server_settings() for key in new_settings.dict().keys(): if key in settings.dict().keys(): diff --git a/backend/nebula/settings/models.py b/backend/nebula/settings/models.py index b1ffc23..ddcbce5 100644 --- a/backend/nebula/settings/models.py +++ b/backend/nebula/settings/models.py @@ -3,7 +3,7 @@ from pydantic import Field from nebula.enum import ContentType, MediaType, ServiceState -from nebula.settings.common import SettingsModel +from nebula.settings.common import LanguageCode, SettingsModel from nebula.settings.metatypes import MetaType CSItemRole = Literal["hidden", "header", "label", "option"] @@ -59,6 +59,12 @@ class BaseSystemSettings(SettingsModel): description="A name used as the site (instance) identification", ) + language: LanguageCode = Field( + "en", + title="Default language", + example="en", + ) + ui_asset_create: bool = Field( True, title="Create assets in UI", @@ -79,6 +85,12 @@ class BaseSystemSettings(SettingsModel): "(when set to false, assets can only be uploaded via API and watch folders)", ) + subtitle_separator: str = Field( + ": ", + title="Subtitle separator", + description="String used to separate title and subtitle in displayed title", + ) + class SystemSettings(BaseSystemSettings): """System settings. @@ -93,13 +105,15 @@ class SystemSettings(BaseSystemSettings): worker_plugin_path: str = Field(".nx/plugins") upload_storage: int | None = Field(None) upload_dir: str | None = Field(None) + upload_base_name: str = Field("{id}") smtp_host: str | None = Field(None, title="SMTP host", example="smtp.example.com") smtp_port: int | None = Field(None, title="SMTP port", example=465) smtp_user: str | None = Field(None, title="SMTP user", example="smtpuser") - smtp_password: str | None = Field(None, title="SMTP password", example="smtppass.1") + smtp_pass: str | None = Field(None, title="SMTP password", example="smtppass.1") + mail_from: str | None = Field( - None, + "Nebula ", title="Mail from", description="Email address used as the sender", example="Nebula ", @@ -171,6 +185,7 @@ class StorageSettings(BaseStorageSettings): class FolderField(SettingsModel): name: str = Field(..., title="Field name") + section: str | None = Field(None, title="Section") mode: str | None = None format: str | None = None order: str | None = None diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 32d69c0..65ac38b 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nebula" -version = "6.0.0-beta.2" +version = "6.0.0" description = "Open source broadcast automation system" authors = ["Nebula Broadcast "] @@ -8,7 +8,7 @@ authors = ["Nebula Broadcast "] python = "^3.10" aiofiles = "^22.1.0" asyncpg = "^0.27.0" -fastapi = "^0.92" +fastapi = "^0.95" httpx = "^0.23.3" mistune = "^2.0.4" nxtools = "^1.6" @@ -17,7 +17,8 @@ pydantic = "^1.10.4" python-dotenv = "^0.19.2" redis = "^4.5.1" rich = "^12.0.1" -uvicorn = {extras = ["standard"], version = "^0.17.6"} +uvicorn = {extras = ["standard"], version = "0.20.0"} +shortuuid = "^1.0.11" [tool.poetry.dev-dependencies] pytest = "^7.0" diff --git a/backend/schema/meta-aliases-cs.json b/backend/schema/meta-aliases-cs.json index fede682..e1e4a8d 100644 --- a/backend/schema/meta-aliases-cs.json +++ b/backend/schema/meta-aliases-cs.json @@ -1,5 +1,6 @@ [ ["commercial/content" , "Reklamí obsah" , "Rekl. obsah" , ""], + ["commercial/pp" , "Product placement" , "PP" , "Obsahuje product placement"], ["solver" , "Solver" , null , ""], ["year" , "Rok" , null , "Rok vydÃĄní díla"], ["id_user" , "ID uÅživatele" , null , ""], @@ -74,6 +75,8 @@ ["graphic_usage" , "UÅžití grafiky" , "UÅžití grafiky" , ""], ["audio/silence" , "Ticho" , null , ""], ["place" , "Místo" , null , ""], + ["place/type" , "Typ místa" , null , ""], + ["place/specific" , "Upřesnění místa" , null , ""], ["video/is_interlaced" , "ProklÃĄdÃĄní" , null , ""], ["id_item" , "Item ID" , null , ""], ["rights/spatial" , "RegionÃĄlní omezení" , null , ""], @@ -90,6 +93,7 @@ ["logo" , "Logo" , null , ""], ["status" , "Stav" , null , ""], ["rights/ott" , "PrÃĄva pro OTT" , null , "Určuje, zda je pořad moÅžnÊ ÅĄÃ­Å™it pomocí OTT platforem (web, sociÃĄlní sítě, Smart TV...)"], + ["rights/broadcast" , "PrÃĄva pro vysílÃĄní" , null , "Určuje, zda je pořad moÅžnÊ ÅĄÃ­Å™it pomocí lineÃĄrního vysílÃĄní"], ["id_event" , "Event ID" , null , ""], ["id_bin" , "Bin ID" , null , ""], ["role/cast" , "Obsazení" , null , "ÄŒÃĄrkami oddělenÃŊ seznam herců"], @@ -97,6 +101,7 @@ ["id_folder" , "SloÅžka" , null , ""], ["is_admin" , "Admin" , null , ""], ["run_mode" , "Run mode" , null , "ReÅžim odbavení příspěvků"], + ["summary" , "Perex" , null , ""], ["description" , "Popis" , null , ""], ["file/format" , "FormÃĄt souboru" , null , ""], ["id" , "ID" , "#" , ""], diff --git a/backend/schema/meta-aliases-en.json b/backend/schema/meta-aliases-en.json index 1afaffb..6fd389a 100644 --- a/backend/schema/meta-aliases-en.json +++ b/backend/schema/meta-aliases-en.json @@ -1,5 +1,6 @@ [ ["commercial/content" , "Commercial content" , "Content" , ""], + ["commercial/pp" , "Product placement" , "PP" , "Contains product placement"], ["solver" , "Solver" , null , ""], ["year" , "Year" , null , ""], ["id_user" , "User ID" , null , ""], @@ -74,6 +75,8 @@ ["graphic_usage" , "Graphic usage" , "Usage" , ""], ["audio/silence" , "Silence" , null , ""], ["place" , "Place" , null , ""], + ["place/type" , "Place type" , null , ""], + ["place/specific" , "Place specification" , null , ""], ["video/is_interlaced" , "Is interlaced" , "Interlaced" , ""], ["id_item" , "Item ID" , null , ""], ["rights/spatial" , "Spatial rights" , null , ""], @@ -89,7 +92,8 @@ ["video/width" , "Width" , null , ""], ["logo" , "Logo" , null , ""], ["status" , "Status" , null , ""], - ["rights/ott" , "OTT Rights" , "OTT" , ""], + ["rights/ott" , "OTT Rights" , null , ""], + ["rights/broadcast" , "Broadcast rights" , null , ""], ["id_event" , "Event ID" , null , ""], ["id_bin" , "Bin ID" , null , ""], ["role/cast" , "Cast" , null , "Coma delimited list of actors"], @@ -97,6 +101,7 @@ ["id_folder" , "Folder" , null , ""], ["is_admin" , "Admin" , null , ""], ["run_mode" , "Run mode" , null , ""], + ["summary" , "Summary" , null , ""], ["description" , "Description" , null , ""], ["file/format" , "File format" , null , ""], ["id" , "ID" , "#" , ""], diff --git a/backend/server/__init__.py b/backend/server/__init__.py index 0c569d2..ef58179 100644 --- a/backend/server/__init__.py +++ b/backend/server/__init__.py @@ -1,16 +1,14 @@ import os -import aiofiles from fastapi import Depends, FastAPI, Header, Request -from fastapi.responses import JSONResponse, RedirectResponse, Response +from fastapi.responses import FileResponse, JSONResponse, Response from fastapi.staticfiles import StaticFiles from fastapi.websockets import WebSocket, WebSocketDisconnect import nebula -from nebula.enum import MediaType from nebula.exceptions import NebulaException from nebula.settings import load_settings -from server.dependencies import asset_in_path, current_user, current_user_query +from server.dependencies import current_user_query from server.endpoints import install_endpoints from server.storage_monitor import storage_monitor from server.video import range_requests_response @@ -50,7 +48,14 @@ async def custom_404_handler(request: Request, _): "method": request.method, }, ) - return RedirectResponse("/") + + index_path = os.path.join(nebula.config.frontend_dir, "index.html") + if os.path.exists(index_path): + return FileResponse( + index_path, + status_code=200, + media_type="text/html", + ) @app.exception_handler(NebulaException) @@ -58,8 +63,9 @@ async def openpype_exception_handler( request: Request, exc: NebulaException, ) -> JSONResponse: - endpoint = request.url.path.split("/")[-1] - nebula.log.error(f"{endpoint}: {exc}") # TODO: user? + # endpoint = request.url.path.split("/")[-1] + # We do not need to log this (It is up to NebulaException class) + # nebula.log.error(f"{endpoint}: {exc}") # TODO: user? return JSONResponse( status_code=exc.status, content={ @@ -74,6 +80,7 @@ async def openpype_exception_handler( @app.exception_handler(AssertionError) async def assertion_error_handler(request: Request, exc: AssertionError): + nebula.log.error(f"AssertionError: {exc}") return JSONResponse( status_code=500, content={ @@ -143,42 +150,6 @@ async def proxy( return range_requests_response(request, video_path, "video/mp4") -@app.post("/upload/{id_asset}", response_class=Response) -async def upload_media_file( - request: Request, - asset: nebula.Asset = Depends(asset_in_path), - user: nebula.User = Depends(current_user), -): - """Upload a media file for a given asset. - - This endpoint is used by the web frontend to upload media files. - """ - - assert asset["media_type"] == MediaType.FILE, "Only file assets can be uploaded" - assert nebula.settings.system.upload_storage, "Upload storage not configured" - assert nebula.settings.system.upload_dir, "Upload path not configured" - - storage = nebula.storages[nebula.settings.system.upload_storage] - upload_dir = nebula.settings.system.upload_dir - - extension = request.headers.get("X-nebula-extension") - assert extension, "Missing X-nebula-extension header" - assert extension in ["mp4", "mov", "mxf"], "Invalid extension" - - nebula.log.debug(f"Uploading media file for {asset}", user=user.name) - target_path = os.path.join( - storage.local_path, upload_dir, f"{asset.id}.{extension}" - ) - - i = 0 - async with aiofiles.open(target_path, "wb") as f: - async for chunk in request.stream(): - i += len(chunk) - await f.write(chunk) - - nebula.log.info(f"Uploaded media file for {asset}", user=user.name) - - # # Messaging # @@ -198,12 +169,15 @@ async def ws_endpoint(websocket: WebSocket) -> None: message.get("token"), topics=message.get("subscribe", []), ) + # if client.user_name: + # nebula.log.trace(f"{client.user_name} connected") except WebSocketDisconnect: - if client.user_name: - nebula.log.trace(f"{client.user_name} disconnected") - else: - nebula.log.trace("Anonymous client disconnected") - del messaging.clients[client.id] + # if client.user_name: + # nebula.log.trace(f"{client.user_name} disconnected") + try: + del messaging.clients[client.id] + except KeyError: + pass # @@ -229,7 +203,7 @@ def install_frontend_plugins(app: FastAPI): # TODO: this is a development hack. -HLS_DIR = "/storage/nebula_01/hls/" +HLS_DIR = "/mnt/nebula_01/hls/" if os.path.exists(HLS_DIR): app.mount("/hls", StaticFiles(directory=HLS_DIR)) diff --git a/backend/server/dependencies.py b/backend/server/dependencies.py index 168b623..ddcb632 100644 --- a/backend/server/dependencies.py +++ b/backend/server/dependencies.py @@ -1,3 +1,5 @@ +from typing import Annotated + from fastapi import Depends, Header, Path, Query import nebula @@ -13,11 +15,17 @@ async def access_token(authorization: str = Header(None)) -> str | None: return access_token +AccessToken = Annotated[str | None, Depends(access_token)] + + async def request_initiator(x_client_id: str | None = Header(None)) -> str | None: """Return the client ID of the request initiator.""" return x_client_id +RequestInitiator = Annotated[str, Depends(request_initiator)] + + async def current_user_query(token: str = Query(None)) -> nebula.User: if token is None: raise nebula.UnauthorizedException("No access token provided") @@ -27,6 +35,9 @@ async def current_user_query(token: str = Query(None)) -> nebula.User: return nebula.User(meta=session.user) +CurrentUserInQuery = Annotated[nebula.User, Depends(current_user_query)] + + async def current_user( access_token: str | None = Depends(access_token), ) -> nebula.User: @@ -39,6 +50,9 @@ async def current_user( return nebula.User(meta=session.user) +CurrentUser = Annotated[nebula.User, Depends(current_user)] + + async def current_user_optional( access_token: str | None = Depends(access_token), ) -> nebula.User | None: @@ -51,8 +65,14 @@ async def current_user_optional( return nebula.User(meta=session.user) +CurrentUserOptional = Annotated[nebula.User | None, Depends(current_user_optional)] + + async def asset_in_path( id_asset: int = Path(..., ge=0), ) -> nebula.Asset: """Return the asset with the given ID.""" return await nebula.Asset.load(id_asset) + + +AssetInPath = Annotated[nebula.Asset, Depends(asset_in_path)] diff --git a/backend/server/endpoints.py b/backend/server/endpoints.py index 918b694..7f5c6ef 100644 --- a/backend/server/endpoints.py +++ b/backend/server/endpoints.py @@ -46,7 +46,7 @@ def find_api_endpoints() -> Generator[APIRequest, None, None]: try: module = import_module(module_name, module_path) except ModuleNotFoundError: - nebula.log.error(f"Module {module_name} not found") + nebula.log.error(f"Module {module_name} not found in {module_path}") except ImportError: nebula.log.traceback(f"Failed to import module {module_name}") @@ -73,7 +73,7 @@ def install_endpoints(app: fastapi.FastAPI): continue endpoint_names.add(endpoint.name) - route = f"/api/{endpoint.name}" + route = endpoint.path or f"/api/{endpoint.name}" nebula.log.debug("Adding endpoint", route) additional_params = {} diff --git a/backend/server/request.py b/backend/server/request.py index bf22798..43fc221 100644 --- a/backend/server/request.py +++ b/backend/server/request.py @@ -5,6 +5,7 @@ class APIRequest: name: str + path: str | None = None title: str | None = None methods: list[str] = ["POST"] response_class: Any = None diff --git a/backend/server/storage_monitor.py b/backend/server/storage_monitor.py index 9e88916..de37131 100644 --- a/backend/server/storage_monitor.py +++ b/backend/server/storage_monitor.py @@ -24,13 +24,14 @@ async def exec_mount(cmd: str) -> bool: async def handle_samba_storage(storage: Storage): - - if time.time() < storage.last_mount_attempt + (storage.mount_attempts * 2): + if time.time() - storage.last_mount_attempt < min(storage.mount_attempts * 5, 120): return if not os.path.exists(storage.local_path): try: os.mkdir(storage.local_path) + except FileExistsError: + pass except Exception: nebula.log.traceback(f"Unable to create mountpoint for {storage}") storage.last_mount_attempt = time.time() @@ -65,13 +66,17 @@ async def handle_samba_storage(storage: Storage): nebula.log.success(f"{storage} mounted successfully") storage.mount_attempts = 0 else: - nebula.log.trace(cmd) - nebula.log.error(f"Unable to mount {storage}") + if storage.mount_attempts < 5: + nebula.log.trace(cmd) + nebula.log.error(f"Unable to mount {storage}") storage.last_mount_attempt = time.time() storage.mount_attempts += 1 class StorageMonitor(BackgroundTask): + def initialize(self): + self.status = {} + async def run(self): while True: await self.main() @@ -82,7 +87,18 @@ async def main(self): id_storage = row["id"] storage_settings = row["settings"] - storage = Storage(StorageSettings(id=id_storage, **storage_settings)) + storage = Storage( + StorageSettings( + id=id_storage, + **storage_settings, + ) + ) + storage.last_mount_attempt = self.status.get(id_storage, {}).get( + "last_mount_attempt", 0 + ) + storage.mount_attempts = self.status.get(id_storage, {}).get( + "mount_attempts", 0 + ) if storage.is_mounted: continue @@ -91,12 +107,17 @@ async def main(self): if not os.path.isdir(storage.path): try: os.makedirs(storage.path) - except Exception: + except FileExistsError: pass continue if storage.protocol == "samba": await handle_samba_storage(storage) + self.status[id_storage] = { + "last_mount_attempt": storage.last_mount_attempt, + "mount_attempts": storage.mount_attempts, + } + storage_monitor = StorageMonitor() diff --git a/backend/server/websocket.py b/backend/server/websocket.py index 16f4997..2dafdf1 100644 --- a/backend/server/websocket.py +++ b/backend/server/websocket.py @@ -56,6 +56,8 @@ async def send(self, message: dict[str, Any], auth_only: bool = True): await self.sock.send_text(json_dumps(message)) except WebSocketDisconnect: self.disconnected = True + except Exception as e: + nebula.log.trace("WS: Error sending message", e) async def receive(self): data = await self.sock.receive_text() @@ -132,7 +134,8 @@ async def run(self) -> None: "data": data[4], } - for client_id, client in self.clients.items(): + clients = list(self.clients.values()) + for client in clients: for topic in client.topics: if topic == "*" or message["topic"].startswith(topic): await client.send(message) diff --git a/backend/setup/defaults/folders.py b/backend/setup/defaults/folders.py index d9fb3e0..acd6952 100644 --- a/backend/setup/defaults/folders.py +++ b/backend/setup/defaults/folders.py @@ -14,18 +14,18 @@ ] serie_description: FieldList = [ - FolderField(name="serie"), + FolderField(name="serie", section="Series"), FolderField(name="serie/season"), FolderField(name="serie/episode"), ] roles_description: FieldList = [ - FolderField(name="role/director"), + FolderField(name="role/director", section="Roles"), FolderField(name="role/cast"), ] content_description: FieldList = [ - FolderField(name="genre", filter=movie_genre_pattern), + FolderField(name="genre", filter=movie_genre_pattern, section="Content"), FolderField(name="editorial_format", filter=r"^2(\.\d+){0,2}$"), FolderField(name="atmosphere"), FolderField(name="intention", filter=r"^1\.(1|2|3|4|5|6|7|8)$"), @@ -35,11 +35,12 @@ ] production_description: FieldList = [ - FolderField(name="date/valid"), + FolderField(name="date/valid", section="Production"), FolderField(name="editorial_control"), FolderField(name="rights"), FolderField(name="rights/type"), FolderField(name="rights/description"), + FolderField(name="rights/ott"), FolderField(name="notes"), FolderField(name="qc/report"), ] diff --git a/backend/setup/defaults/meta_types.py b/backend/setup/defaults/meta_types.py index b2a5389..c75b329 100644 --- a/backend/setup/defaults/meta_types.py +++ b/backend/setup/defaults/meta_types.py @@ -189,12 +189,18 @@ "fulltext": 8, "type": T.STRING, }, - "description": { + "summary": { "ns": "m", "fulltext": 8, "type": T.TEXT, "syntax": "md", }, + "description": { + "ns": "m", + "fulltext": 7, + "type": T.TEXT, + "syntax": "md", + }, "color": { "ns": "m", "type": T.COLOR, @@ -269,6 +275,10 @@ "order": "alias", }, "place": { + "ns": "m", + "type": T.STRING, + }, + "place/type": { "ns": "m", "type": T.LIST, "cs": "urn:tva:metadata-cs:PlaceTypeCS", @@ -345,6 +355,10 @@ "fulltext": True, "type": T.TEXT, }, + "rights/broadcast": { + "ns": "m", + "type": T.BOOLEAN, + }, "rights/ott": { "ns": "m", "type": T.BOOLEAN, @@ -386,6 +400,10 @@ "type": T.SELECT, "cs": "urn:site:clients", }, + "commercial/pp": { + "ns": "m", + "type": T.BOOLEAN, + }, "runs/daily": { "ns": "m", "type": T.INTEGER, diff --git a/backend/setup/defaults/views.py b/backend/setup/defaults/views.py index 9280f20..61e227f 100644 --- a/backend/setup/defaults/views.py +++ b/backend/setup/defaults/views.py @@ -78,7 +78,7 @@ name="Series", separator=True, position=30, - folders=[11], + folders=[13], columns=[ "title", "genre", diff --git a/backend/setup/metatypes.py b/backend/setup/metatypes.py index 426749c..69f2d20 100644 --- a/backend/setup/metatypes.py +++ b/backend/setup/metatypes.py @@ -16,7 +16,6 @@ async def setup_metatypes(meta_types, db): aliases[lang][key] = [alias, header, description] for key, data in meta_types.items(): - meta_type = {} meta_type["ns"] = data["ns"] meta_type["editable"] = True @@ -28,7 +27,14 @@ async def setup_metatypes(meta_types, db): meta_type[opt] = data[opt] for lang in languages: - meta_type["aliases"][lang] = aliases[lang][key] + try: + meta_type["aliases"][lang] = aliases[lang][key] + except KeyError: + meta_type["aliases"][lang] = [ + data.get("title", key.capitalize()), + None, + "", + ] await db.execute( """ diff --git a/backend/setup/settings.py b/backend/setup/settings.py index dccd5a7..84aad74 100644 --- a/backend/setup/settings.py +++ b/backend/setup/settings.py @@ -31,6 +31,7 @@ "meta_types": META_TYPES, "storages": [], "settings": {}, + "cs": [], } @@ -47,8 +48,8 @@ def load_overrides(): for key in TEMPLATE: if not hasattr(mod, key.upper()): continue - log.info(f"Using settings overrides: {spath}") override = getattr(mod, key.upper()) + log.info(f"Found overrides for {key}") if type(override) == dict and type(TEMPLATE[key]) == dict: TEMPLATE[key].update(override) @@ -144,13 +145,23 @@ async def setup_settings(db): # Setup classifications + used_urns = set() + for meta_type, mset in TEMPLATE["meta_types"].items(): + if mset.get("cs"): + used_urns.add(mset["cs"]) + classifications = [] async with httpx.AsyncClient() as client: response = await client.get("https://cs.nbla.xyz/dump") classifications = response.json() + classifications.extend(TEMPLATE["cs"]) + for scheme in classifications: name = scheme["cs"] + if name not in used_urns: + log.trace(f"Skipping unused classification scheme: {name}") + continue await db.execute("DELETE FROM cs WHERE cs = $1", name) for value in scheme["data"]: settings = scheme["data"][value] diff --git a/frontend/package.json b/frontend/package.json index 3637e50..3fb07f7 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -19,6 +19,7 @@ "luxon": "^3.1.0", "material-symbols": "^0.4.0", "react": "^18.2.0", + "react-datepicker": "^4.11.0", "react-dom": "^18.2.0", "react-markdown": "^8.0.4", "react-redux": "^8.0.2", diff --git a/frontend/src/actions.js b/frontend/src/actions.js index c61dd7f..90df127 100644 --- a/frontend/src/actions.js +++ b/frontend/src/actions.js @@ -4,7 +4,7 @@ import { createSlice } from '@reduxjs/toolkit' const initialState = { browserRefresh: 0, currentView: JSON.parse(localStorage.getItem('currentView') || 'null'), - searchQuery: '', + searchQuery: JSON.parse(localStorage.getItem('searchQuery') || '""'), selectedAssets: [], focusedAsset: null, pageTitle: '', @@ -38,6 +38,7 @@ const contextSlice = createSlice({ setSearchQuery: (state, action) => { state.searchQuery = action.payload + localStorage.setItem('searchQuery', JSON.stringify(action.payload)) return state }, diff --git a/frontend/src/components/BaseInput.jsx b/frontend/src/components/BaseInput.jsx new file mode 100644 index 0000000..6131feb --- /dev/null +++ b/frontend/src/components/BaseInput.jsx @@ -0,0 +1,60 @@ +import styled from 'styled-components' +import defaultTheme from './theme' + +const BaseInput = styled.input` + border: 0; + border-radius: ${(props) => props.theme.inputBorderRadius}; + background: ${(props) => props.theme.inputBackground}; + color: ${(props) => props.theme.colors.text}; + font-size: ${(props) => props.theme.fontSize}; + min-height: ${(props) => props.theme.inputHeight}; + max-height: ${(props) => props.theme.inputHeight}; + font-size: ${(props) => props.theme.fontSize}; + padding-left: ${(props) => props.theme.inputPadding}; + padding-right: ${(props) => props.theme.inputPadding}; + padding-top: 0; + padding-bottom: 0; + min-width: 200px; + + &:-webkit-autofill, + &:-webkit-autofill:focus { + transition: background-color 600000s 0s, color 600000s 0s; + } + + &:focus { + outline: 1px solid ${(props) => props.theme.colors.cyan}; + } + + &:hover { + color: ${(props) => props.theme.colors.text}; + } + + &:invalid, + &.error { + outline: 1px solid ${(props) => props.theme.colors.red} !important; + } + + &:read-only { + font-style: italic; + } + + &.timecode { + min-width: 96px; + max-width: 96px; + padding-right: 14px !important; + text-align: right; + font-family: monospace; + } + + &.textarea { + padding: ${(props) => props.theme.inputPadding}; + min-height: 60px; + max-height: 400px !important; + resize: vertical; + } +` +BaseInput.defaultProps = { + theme: defaultTheme, +} + +export default BaseInput diff --git a/frontend/src/components/button.jsx b/frontend/src/components/Button.jsx similarity index 67% rename from frontend/src/components/button.jsx rename to frontend/src/components/Button.jsx index 0ae4b11..28baebc 100644 --- a/frontend/src/components/button.jsx +++ b/frontend/src/components/Button.jsx @@ -1,5 +1,6 @@ import styled from 'styled-components' import defaultTheme from './theme' +import { forwardRef } from 'react' const BaseButton = styled.button` border: 0; @@ -42,25 +43,35 @@ const BaseButton = styled.button` &:disabled { cursor: not-allowed; background: ${(props) => props.theme.colors.surface03}; - color: ${(props) => props.theme.colors.surface06}; + color: ${(props) => props.theme.colors.surface08}; } ` BaseButton.defaultProps = { theme: defaultTheme, } -const Button = ({ icon, iconStyle, label, iconOnRight, ...props }) => { - return ( - - {iconOnRight && label} - {icon && ( - - {icon} - - )} - {!iconOnRight && label} - - ) -} +const Button = forwardRef( + ( + { icon, iconStyle, label, iconOnRight, active, className, ...props }, + ref + ) => { + const classes = className ? [className] : [] + if (active) { + classes.push('active') + } + + return ( + + {iconOnRight && label} + {icon && ( + + {icon} + + )} + {!iconOnRight && label} + + ) + } +) -export { Button } +export default Button diff --git a/frontend/src/components/dialog.jsx b/frontend/src/components/Dialog.jsx similarity index 100% rename from frontend/src/components/dialog.jsx rename to frontend/src/components/Dialog.jsx diff --git a/frontend/src/components/dropdown.jsx b/frontend/src/components/Dropdown.jsx similarity index 98% rename from frontend/src/components/dropdown.jsx rename to frontend/src/components/Dropdown.jsx index b629019..3aef3e7 100644 --- a/frontend/src/components/dropdown.jsx +++ b/frontend/src/components/Dropdown.jsx @@ -1,4 +1,4 @@ -import { Button } from './button' +import Button from './Button' import styled from 'styled-components' const DropdownContainer = styled.div` diff --git a/frontend/src/components/InputDatetime.jsx b/frontend/src/components/InputDatetime.jsx new file mode 100644 index 0000000..c4aa2c8 --- /dev/null +++ b/frontend/src/components/InputDatetime.jsx @@ -0,0 +1,166 @@ +import { useState, useEffect, useRef } from 'react' +import { DateTime } from 'luxon' +import styled from 'styled-components' + +import { DialogButtons } from './layout' +import DatePicker from 'react-datepicker' +import Dialog from './Dialog' + +import BaseInput from './BaseInput' +import Button from './Button' + +//import "react-datepicker/dist/react-datepicker.css" + +import './datepicker.sass' + +const timeRegex = /^(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})$/ +const dateRegex = /^(\d{4})-(\d{2})-(\d{2})$/ +const allowedDateCharsRegex = /^[\d-\:\ ]*$/ + +const DateTimeWrapper = styled.div` + display: flex; + flex-direction: row; + gap: 4px; + min-width: 200px; +` + +const DatePickerWrapper = styled.div` + display: flex; + flex-direction: row; + align-items: center; + justify-content: center; +` + +const CalendarDialog = ({ value, onChange, onClose }) => { + // get current timestamp + const defaultDate = DateTime.local().toSeconds() + + const [date, setDate] = useState(DateTime.fromSeconds(value || defaultDate)) + + return ( + + + { + setDate(DateTime.fromJSDate(date)) + }} + inline + /> + + + + ) +} + +const InputDatetime = ({ value, onChange, placeholder, className = '' }) => { + const [time, setTime] = useState() + const [isFocused, setIsFocused] = useState(false) + const [showCalendar, setShowCalendar] = useState(false) + const inputRef = useRef(null) + + useEffect(() => { + if (!value) { + setTime('') + return + } + + setTime(DateTime.fromSeconds(value).toFormat('yyyy-MM-dd HH:mm:ss')) + }, [value]) + + const handleChange = (event) => { + let newValue = event.target.value + if (!allowedDateCharsRegex.test(newValue)) return + + // if the original value ended with a dash and the new value removes this dash, + // so it is one byte shorter than the original value, we need to remove the dash + // as well as the last character of the new value + + if (time && time.length - 1 === newValue.length && time.endsWith('-')) { + newValue = newValue.slice(0, -1) + } else if ( + [4, 7].includes(newValue.length) && + newValue.charAt(newValue.length - 1) !== '-' + ) + newValue = newValue + '-' + setTime(newValue) + } + + const isValidTime = (timeString) => { + if (!timeString) return true + + if (timeRegex.test(timeString)) + if (!isNaN(DateTime.fromFormat(timeString, 'yyyy-MM-dd HH:mm:ss'))) + return true + return false + } + + const onSubmit = () => { + let value = 0 + + if (dateRegex.test(time)) { + setTime(time + ' 00:00:00') + return + } + + if (time && isValidTime(time)) { + value = DateTime.fromFormat(time, 'yyyy-MM-dd HH:mm:ss').toSeconds() + } + onChange(value) + inputRef.current.blur() + setIsFocused(false) + } + + const onKeyDown = (e) => { + if (e.key === 'Enter') { + onSubmit() + } + } + + return ( + + {showCalendar && ( + setShowCalendar(false)} + /> + )} + { + e.target.select(), setIsFocused(true) + }} + onKeyDown={onKeyDown} + /> +