From 3bcd2a06acc475b0c57db98a6cc74e2c2233f74f Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sun, 5 May 2024 18:42:51 -0400 Subject: [PATCH 01/38] Add new file section to deepwell config. --- deepwell/config.example.toml | 16 ++++++++++++++++ install/files/dev/deepwell.toml | 4 ++++ install/files/local/deepwell.toml | 4 ++++ install/files/prod/deepwell.toml | 4 ++++ 4 files changed, 28 insertions(+) diff --git a/deepwell/config.example.toml b/deepwell/config.example.toml index 69ee918a70..73b4911d1b 100644 --- a/deepwell/config.example.toml +++ b/deepwell/config.example.toml @@ -355,6 +355,22 @@ minimum-name-bytes = 3 # Set to 0 to disable. refill-name-change-days = 90 + +[file] + +# The length of paths used for S3 presigned URLs. +# +# The value doesn't particularly matter so long as it is sufficiently long +# to avoid collisions. +# +# Just to be safe, the generation mechanism is the same as for session tokens. +presigned-path-length = 32 + +# How long a presigned URL lasts before expiry. +# +# The value should only be a few minutes, and no longer than 12 hours. +presigned-expiration-minutes = 5 + [message] # The maximum size of a message's subject line, in bytes. diff --git a/install/files/dev/deepwell.toml b/install/files/dev/deepwell.toml index 2a0ccb8fb8..4024040eb8 100644 --- a/install/files/dev/deepwell.toml +++ b/install/files/dev/deepwell.toml @@ -67,6 +67,10 @@ maximum-name-changes = 3 minimum-name-bytes = 3 refill-name-change-days = 90 +[file] +presigned-path-length = 32 +presigned-expiration-minutes = 10 + [message] maximum-subject-bytes = 128 maximum-body-bytes = 200000 diff --git a/install/files/local/deepwell.toml b/install/files/local/deepwell.toml index cd49015359..b06ad2bb23 100644 --- a/install/files/local/deepwell.toml +++ b/install/files/local/deepwell.toml @@ -67,6 +67,10 @@ maximum-name-changes = 3 minimum-name-bytes = 3 refill-name-change-days = 90 +[file] +presigned-path-length = 32 +presigned-expiration-minutes = 10 + [message] maximum-subject-bytes = 128 maximum-body-bytes = 200000 diff --git a/install/files/prod/deepwell.toml b/install/files/prod/deepwell.toml index 386e863321..583182474f 100644 --- a/install/files/prod/deepwell.toml +++ b/install/files/prod/deepwell.toml @@ -67,6 +67,10 @@ maximum-name-changes = 3 minimum-name-bytes = 3 refill-name-change-days = 90 +[file] +presigned-path-length = 32 +presigned-expiration-minutes = 5 + [message] maximum-subject-bytes = 128 maximum-body-bytes = 200000 From 4c48597285f2fa153629cde7e0b83acb48ccd321 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sun, 5 May 2024 21:19:57 -0400 Subject: [PATCH 02/38] Add file section to configuration. --- deepwell/src/config/file.rs | 16 ++++++++++++++++ deepwell/src/config/object.rs | 6 ++++++ 2 files changed, 22 insertions(+) diff --git a/deepwell/src/config/file.rs b/deepwell/src/config/file.rs index e73f40a2c6..b4424fcc81 100644 --- a/deepwell/src/config/file.rs +++ b/deepwell/src/config/file.rs @@ -53,6 +53,7 @@ pub struct ConfigFile { ftml: Ftml, special_pages: SpecialPages, user: User, + file: FileSection, message: Message, } @@ -181,6 +182,14 @@ struct User { minimum_name_bytes: usize, } +// NOTE: Name conflict with std::fs::File +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(rename_all = "kebab-case")] +struct FileSection { + presigned_path_length: usize, + presigned_expiration_minutes: u64, +} + #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "kebab-case")] struct Message { @@ -303,6 +312,11 @@ impl ConfigFile { refill_name_change_days, minimum_name_bytes, }, + file: + FileSection { + presigned_path_length, + presigned_expiration_minutes, + }, message: Message { maximum_subject_bytes: maximum_message_subject_bytes, @@ -424,6 +438,8 @@ impl ConfigFile { )) }, minimum_name_bytes, + presigned_path_length, + presigned_duration: StdDuration::from_secs(presigned_expiration_minutes * 60), maximum_message_subject_bytes, maximum_message_body_bytes, maximum_message_recipients, diff --git a/deepwell/src/config/object.rs b/deepwell/src/config/object.rs index e97e4e38eb..7091686515 100644 --- a/deepwell/src/config/object.rs +++ b/deepwell/src/config/object.rs @@ -200,6 +200,12 @@ pub struct Config { /// Minimum length of bytes in a username. pub minimum_name_bytes: usize, + /// Length of randomly-generated portion of S3 presigned URLs. + pub presigned_path_length: usize, + + /// How long S3 presigned URLs will last before expiry. + pub presigned_duration: StdDuration, + /// Maximum size of the subject line allowed in a direct message. pub maximum_message_subject_bytes: usize, From c3b4585c7970ffd375f2c0438ab7b5917eb3dc81 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sun, 5 May 2024 21:52:14 -0400 Subject: [PATCH 03/38] Change field to seconds, not Duration. --- deepwell/src/config/file.rs | 4 ++-- deepwell/src/config/object.rs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/deepwell/src/config/file.rs b/deepwell/src/config/file.rs index b4424fcc81..b04142336a 100644 --- a/deepwell/src/config/file.rs +++ b/deepwell/src/config/file.rs @@ -187,7 +187,7 @@ struct User { #[serde(rename_all = "kebab-case")] struct FileSection { presigned_path_length: usize, - presigned_expiration_minutes: u64, + presigned_expiration_minutes: u32, } #[derive(Serialize, Deserialize, Debug, Clone)] @@ -439,7 +439,7 @@ impl ConfigFile { }, minimum_name_bytes, presigned_path_length, - presigned_duration: StdDuration::from_secs(presigned_expiration_minutes * 60), + presigned_expiry_secs: presigned_expiration_minutes * 60, maximum_message_subject_bytes, maximum_message_body_bytes, maximum_message_recipients, diff --git a/deepwell/src/config/object.rs b/deepwell/src/config/object.rs index 7091686515..d042c623b2 100644 --- a/deepwell/src/config/object.rs +++ b/deepwell/src/config/object.rs @@ -204,7 +204,7 @@ pub struct Config { pub presigned_path_length: usize, /// How long S3 presigned URLs will last before expiry. - pub presigned_duration: StdDuration, + pub presigned_expiry_secs: u32, /// Maximum size of the subject line allowed in a direct message. pub maximum_message_subject_bytes: usize, From 2013d08c01af70bd7064c8e4844a79ccc78af902 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sun, 5 May 2024 21:59:29 -0400 Subject: [PATCH 04/38] Begin BlobService::upload_url(). --- deepwell/src/services/blob/service.rs | 36 +++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/deepwell/src/services/blob/service.rs b/deepwell/src/services/blob/service.rs index 2f478589c7..bd4a0354f5 100644 --- a/deepwell/src/services/blob/service.rs +++ b/deepwell/src/services/blob/service.rs @@ -22,6 +22,9 @@ #![allow(dead_code)] use super::prelude::*; +use crate::utils::assert_is_csprng; +use rand::distributions::{Alphanumeric, DistString}; +use rand::thread_rng; use s3::request_trait::ResponseData; use s3::serde_types::HeadObjectResult; use std::str; @@ -48,10 +51,43 @@ pub const EMPTY_BLOB_MIME: &str = "inode/x-empty; charset=binary"; /// Timestamp is 2019/01/18 at midnight, the date of the first Wikijump commit. pub const EMPTY_BLOB_TIMESTAMP: i64 = 1547769600; +/// The subdirectory in the S3 bucket where all pending uploads are kept. +pub const PRESIGN_DIRECTORY: &str = "uploads"; + #[derive(Debug)] pub struct BlobService; impl BlobService { + /// Creates an S3 presign URL to allow an end user to upload a blob. + /// + /// # Returns + /// The generated presign URL that can be uploaded to. + pub async fn upload_url(ctx: &ServiceContext<'_>) -> Result { + info!("Creating presign upload URL for blob"); + + let config = ctx.config(); + let path = { + let mut path = format!("{PRESIGN_DIRECTORY}/"); + + { + let mut rng = thread_rng(); + assert_is_csprng(&rng); + Alphanumeric.append_string( + &mut rng, + &mut path, + config.presigned_path_length, + ); + } + + path + }; + + let bucket = ctx.s3_bucket(); + let url = bucket.presign_put(&path, config.presigned_expiry_secs, None)?; + + todo!() + } + /// Creates a blob with this data, if it does not already exist. pub async fn create>( ctx: &ServiceContext<'_>, From 43c5c7515271187c480927034b208bdb9a63d12f Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sun, 5 May 2024 22:47:16 -0400 Subject: [PATCH 05/38] Add blob_upload table. --- deepwell/migrations/20220906103252_deepwell.sql | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/deepwell/migrations/20220906103252_deepwell.sql b/deepwell/migrations/20220906103252_deepwell.sql index d05b602bd9..2026ff0e89 100644 --- a/deepwell/migrations/20220906103252_deepwell.sql +++ b/deepwell/migrations/20220906103252_deepwell.sql @@ -415,6 +415,15 @@ CREATE TABLE page_vote ( -- Files -- +-- Pending uploads to S3 +-- +-- Stores the presign URL along with the path in the bucket it corresponds to. +-- After processing, it is moved to be a real blob (if new) or deleted (if duplicate). +CREATE TABLE blob_upload ( + s3_path TEXT NOT NULL PRIMARY KEY, + presign_url TEXT NOT NULL UNIQUE, +); + -- Enum types for file_revision CREATE TYPE file_revision_type AS ENUM ( 'create', From 1e59199154e86bab83ccdca5d76f743c811f2f16 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sun, 5 May 2024 22:51:05 -0400 Subject: [PATCH 06/38] Add timestamp for partial upload pruning. --- deepwell/migrations/20220906103252_deepwell.sql | 1 + 1 file changed, 1 insertion(+) diff --git a/deepwell/migrations/20220906103252_deepwell.sql b/deepwell/migrations/20220906103252_deepwell.sql index 2026ff0e89..977a67b32f 100644 --- a/deepwell/migrations/20220906103252_deepwell.sql +++ b/deepwell/migrations/20220906103252_deepwell.sql @@ -422,6 +422,7 @@ CREATE TABLE page_vote ( CREATE TABLE blob_upload ( s3_path TEXT NOT NULL PRIMARY KEY, presign_url TEXT NOT NULL UNIQUE, + created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now() -- TODO add job to prune dead upload jobs ); -- Enum types for file_revision From 9642ba48b619d5eb6de2e8cae047cfa7324e999d Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sun, 5 May 2024 23:30:18 -0400 Subject: [PATCH 07/38] Start BlobService changes for presign URL system. --- deepwell/src/services/blob/service.rs | 84 ++++++++++++++++++--------- deepwell/src/services/blob/structs.rs | 8 ++- deepwell/src/services/error.rs | 4 ++ 3 files changed, 67 insertions(+), 29 deletions(-) diff --git a/deepwell/src/services/blob/service.rs b/deepwell/src/services/blob/service.rs index bd4a0354f5..cb8e05a088 100644 --- a/deepwell/src/services/blob/service.rs +++ b/deepwell/src/services/blob/service.rs @@ -62,11 +62,9 @@ impl BlobService { /// /// # Returns /// The generated presign URL that can be uploaded to. - pub async fn upload_url(ctx: &ServiceContext<'_>) -> Result { - info!("Creating presign upload URL for blob"); - + pub async fn create_upload(ctx: &ServiceContext<'_>) -> Result { let config = ctx.config(); - let path = { + let s3_path = { let mut path = format!("{PRESIGN_DIRECTORY}/"); { @@ -81,69 +79,100 @@ impl BlobService { path }; + info!("Creating presign upload URL for blob at path {s3_path}"); let bucket = ctx.s3_bucket(); - let url = bucket.presign_put(&path, config.presigned_expiry_secs, None)?; + let presign_url = bucket.presign_put(&s3_path, config.presigned_expiry_secs, None)?; - todo!() + Ok(CreateUploadOutput { + s3_path, + presign_url, + }) } - /// Creates a blob with this data, if it does not already exist. - pub async fn create>( - ctx: &ServiceContext<'_>, - data: B, - ) -> Result { - let data = data.as_ref(); - info!("Creating blob (length {})", data.len()); + pub async fn finish_upload(ctx: &ServiceContext<'_>, upload_path: &str) -> Result { + info!("Finishing upload for blob for path {upload_path}"); + let bucket = ctx.s3_bucket(); + + debug!("Download uploaded blob from S3 uploads to get metadata"); + let response = bucket.get_object(upload_path).await?; + let data: Vec = match response.status_code() { + 200 => response.into(), + _ => { + error!("Cannot find blob at presign path {upload_path}"); + return Err(Error::FileNotUploaded); + } + }; // Special handling for empty blobs if data.is_empty() { debug!("File being created is empty, special case"); - return Ok(CreateBlobOutput { + return Ok(FinalizeUploadOutput { hash: EMPTY_BLOB_HASH, mime: str!(EMPTY_BLOB_MIME), size: 0, }); } - // Upload blob - let bucket = ctx.s3_bucket(); - let hash = sha512_hash(data); - let hex_hash = blob_hash_to_hex(&hash); + debug!("Updating blob metadata in database and S3"); // Convert size to correct integer type let size: i64 = data.len().try_into().expect("Buffer size exceeds i64"); - match Self::head(ctx, &hex_hash).await? { + let hash = sha512_hash(&data); + let hex_hash = blob_hash_to_hex(&hash); + + // If the blob exists, then just delete the uploaded one. + // + // If it doesn't, then we need to move it. However, within + // S3 we cannot "move" objects, we have to upload and delete the original. + + let result = match Self::head(ctx, &hex_hash).await? { // Blob exists, copy metadata and return that Some(result) => { debug!("Blob with hash {hex_hash} already exists"); - // Content-Type header should be passed in + // Content-Type header should be returned let mime = result.content_type.ok_or(Error::S3Response)?; - Ok(CreateBlobOutput { hash, mime, size }) + Ok(FinalizeUploadOutput { + hash, + mime, + size, + created: false, + }) } - // Blob doesn't exist, insert it + // Blob doesn't exist, move the uploaded file None => { debug!("Blob with hash {hex_hash} to be created"); // Determine MIME type for the new file let mime = ctx.mime().get_mime_type(data.to_vec()).await?; - // Put into S3 + // Upload S3 object to final destination let response = bucket - .put_object_with_content_type(&hex_hash, data, &mime) + .put_object_with_content_type(&hex_hash, &data, &mime) .await?; // We assume all unexpected statuses are errors, even if 1XX or 2XX match response.status_code() { - 200 => Ok(CreateBlobOutput { hash, mime, size }), - _ => s3_error(&response, "creating S3 blob"), + 200 => Ok(FinalizeUploadOutput { + hash, + mime, + size, + created: true, + }), + _ => s3_error(&response, "creating final S3 blob"), } } - } + }; + + // Delete uploaded version, in either case + bucket.delete_object(upload_path).await?; + + // Return result based on blob status + result } pub async fn get_optional( @@ -160,7 +189,6 @@ impl BlobService { let bucket = ctx.s3_bucket(); let hex_hash = blob_hash_to_hex(hash); let response = bucket.get_object(&hex_hash).await?; - match response.status_code() { 200 => Ok(Some(response.into())), 404 => Ok(None), diff --git a/deepwell/src/services/blob/structs.rs b/deepwell/src/services/blob/structs.rs index aa018d9405..c4e58841de 100644 --- a/deepwell/src/services/blob/structs.rs +++ b/deepwell/src/services/blob/structs.rs @@ -22,7 +22,13 @@ use super::prelude::*; use time::OffsetDateTime; #[derive(Debug)] -pub struct CreateBlobOutput { +pub struct CreateUploadOutput { + pub s3_path: String, + pub presign_url: String, +} + +#[derive(Debug)] +pub struct FinalizeUploadOutput { pub hash: BlobHash, pub mime: String, pub size: i64, diff --git a/deepwell/src/services/error.rs b/deepwell/src/services/error.rs index 01f6188bdc..33c74d2759 100644 --- a/deepwell/src/services/error.rs +++ b/deepwell/src/services/error.rs @@ -218,6 +218,9 @@ pub enum Error { #[error("File revision does not exist")] FileRevisionNotFound, + #[error("File not uploaded")] + FileNotUploaded, // occurs when presign URL is not uploaded to + #[error("Vote does not exist")] VoteNotFound, @@ -315,6 +318,7 @@ impl Error { Error::MessageDraftNotFound => 2015, Error::BlobNotFound => 2016, Error::TextNotFound => 2017, + Error::FileNotUploaded => 2018, // 2100 -- Existing data Error::UserExists => 2100, From 9484a865867d94c31ace8dd9f117f49707b26368 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 11 May 2024 23:19:40 -0400 Subject: [PATCH 08/38] Add file_pending migration and pending model. --- .../migrations/20220906103252_deepwell.sql | 9 ++++- deepwell/src/models/file_pending.rs | 35 +++++++++++++++++++ deepwell/src/models/mod.rs | 1 + 3 files changed, 44 insertions(+), 1 deletion(-) create mode 100644 deepwell/src/models/file_pending.rs diff --git a/deepwell/migrations/20220906103252_deepwell.sql b/deepwell/migrations/20220906103252_deepwell.sql index 977a67b32f..71365be413 100644 --- a/deepwell/migrations/20220906103252_deepwell.sql +++ b/deepwell/migrations/20220906103252_deepwell.sql @@ -440,6 +440,12 @@ CREATE TYPE file_revision_change AS ENUM ( 'licensing' ); +CREATE TABLE file_pending ( + pending_file_id BIGSERIAL PRIMARY KEY, + s3_path TEXT NOT NULL CHECK length(s3_path) > 1, + presign_url TEXT NOT NULL CHECK length(presign_url) > 1 +); + CREATE TABLE file ( file_id BIGSERIAL PRIMARY KEY, created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), @@ -449,6 +455,7 @@ CREATE TABLE file ( name TEXT NOT NULL, page_id BIGINT NOT NULL REFERENCES page(page_id), site_id BIGINT NOT NULL REFERENCES site(site_id), + pending_file_id BIGINT REFERENCES file_pending(pending_file_id), UNIQUE (page_id, name, deleted_at) ); @@ -524,7 +531,7 @@ CREATE TYPE message_recipient_type AS ENUM ( -- A "record" is the underlying message data, with its contents, attachments, -- and associated metadata such as sender and recipient(s). CREATE TABLE message_record ( - external_id TEXT PRIMARY KEY, + external_id TEXT PRIMARY KEY, -- ID comes from message_draft created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), drafted_at TIMESTAMP WITH TIME ZONE NOT NULL, retracted_at TIMESTAMP WITH TIME ZONE, diff --git a/deepwell/src/models/file_pending.rs b/deepwell/src/models/file_pending.rs new file mode 100644 index 0000000000..3cc805f3a4 --- /dev/null +++ b/deepwell/src/models/file_pending.rs @@ -0,0 +1,35 @@ +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] +#[sea_orm(table_name = "file_pending")] +pub struct Model { + #[sea_orm(primary_key)] + pub pending_file_id: i64, + + #[sea_orm(column_type = "Text")] + pub s3_path: String, + + #[sea_orm(column_type = "Text")] + pub presign_url: String, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::file::Entity", + from = "Column::FileId", + to = "super::file::Column::FileId", + on_update = "NoAction", + on_delete = "NoAction" + )] + File, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::File.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/deepwell/src/models/mod.rs b/deepwell/src/models/mod.rs index 238e1d33ff..af50caf834 100644 --- a/deepwell/src/models/mod.rs +++ b/deepwell/src/models/mod.rs @@ -4,6 +4,7 @@ pub mod prelude; pub mod alias; pub mod file; +pub mod file_pending; pub mod file_revision; pub mod filter; pub mod message; From 44bce20f9eb2dd1e3ccde2e61802ed2a8b20539f Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sun, 12 May 2024 02:17:54 -0400 Subject: [PATCH 09/38] Start upload code. --- deepwell/src/services/blob/mod.rs | 8 ++- deepwell/src/services/blob/service.rs | 50 +++++++++++--- deepwell/src/services/blob/structs.rs | 8 +-- deepwell/src/services/file/service.rs | 98 +++++++++++++++++++++------ deepwell/src/services/file/structs.rs | 15 +++- 5 files changed, 139 insertions(+), 40 deletions(-) diff --git a/deepwell/src/services/blob/mod.rs b/deepwell/src/services/blob/mod.rs index 1411669dc5..31986dee0c 100644 --- a/deepwell/src/services/blob/mod.rs +++ b/deepwell/src/services/blob/mod.rs @@ -27,6 +27,9 @@ #[allow(unused_imports)] mod prelude { pub use super::super::prelude::*; + pub use super::service::{ + EMPTY_BLOB_HASH, EMPTY_BLOB_MIME, EMPTY_BLOB_TIMESTAMP, PRESIGN_DIRECTORY, + }; pub use super::structs::*; pub use crate::hash::{blob_hash_to_hex, sha512_hash, BlobHash}; } @@ -36,5 +39,8 @@ mod service; mod structs; pub use self::mime::MimeAnalyzer; -pub use self::service::BlobService; +pub use self::service::{ + BlobService, EMPTY_BLOB_HASH, EMPTY_BLOB_MIME, EMPTY_BLOB_TIMESTAMP, + PRESIGN_DIRECTORY, +}; pub use self::structs::*; diff --git a/deepwell/src/services/blob/service.rs b/deepwell/src/services/blob/service.rs index cb8e05a088..a776f63428 100644 --- a/deepwell/src/services/blob/service.rs +++ b/deepwell/src/services/blob/service.rs @@ -22,6 +22,9 @@ #![allow(dead_code)] use super::prelude::*; +use crate::models::file_pending::{ + self, Entity as FilePending, Model as FilePendingModel, +}; use crate::utils::assert_is_csprng; use rand::distributions::{Alphanumeric, DistString}; use rand::thread_rng; @@ -60,10 +63,16 @@ pub struct BlobService; impl BlobService { /// Creates an S3 presign URL to allow an end user to upload a blob. /// + /// Also adds an entry for the pending file upload (`file_pending`), + /// so it can be used by the main `file` table. + /// /// # Returns /// The generated presign URL that can be uploaded to. - pub async fn create_upload(ctx: &ServiceContext<'_>) -> Result { + pub async fn create_upload(ctx: &ServiceContext<'_>) -> Result { let config = ctx.config(); + let txn = ctx.transaction(); + + // Generate random S3 path let s3_path = { let mut path = format!("{PRESIGN_DIRECTORY}/"); @@ -81,21 +90,42 @@ impl BlobService { }; info!("Creating presign upload URL for blob at path {s3_path}"); + // Create presign URL let bucket = ctx.s3_bucket(); - let presign_url = bucket.presign_put(&s3_path, config.presigned_expiry_secs, None)?; - - Ok(CreateUploadOutput { - s3_path, - presign_url, - }) + let presign_url = + bucket.presign_put(&s3_path, config.presigned_expiry_secs, None)?; + + // Add pending file entry + let model = file_pending::ActiveModel { + s3_path: Set(s3_path), + presign_url: Set(presign_url), + ..Default::default() + }; + let output = model.insert(txn)?; + Ok(output) } - pub async fn finish_upload(ctx: &ServiceContext<'_>, upload_path: &str) -> Result { - info!("Finishing upload for blob for path {upload_path}"); + pub async fn finish_upload( + ctx: &ServiceContext<'_>, + pending_file_id: i64, + ) -> Result { + info!("Finishing upload for blob for pending file ID {pending_file_id}"); let bucket = ctx.s3_bucket(); + let txn = ctx.transaction(); + + debug!("Getting pending file info"); + let row = FilePending::find() + .filter(file_pending::Column::PendingFileId.eq(pending_file_id)) + .one(txn) + .await?; + + let pending = match row { + Some(pending) => pending, + None => return Err(Error::GeneralNotFound), + }; debug!("Download uploaded blob from S3 uploads to get metadata"); - let response = bucket.get_object(upload_path).await?; + let response = bucket.get_object(&pending.s3_path).await?; let data: Vec = match response.status_code() { 200 => response.into(), _ => { diff --git a/deepwell/src/services/blob/structs.rs b/deepwell/src/services/blob/structs.rs index c4e58841de..653f5eef51 100644 --- a/deepwell/src/services/blob/structs.rs +++ b/deepwell/src/services/blob/structs.rs @@ -22,13 +22,7 @@ use super::prelude::*; use time::OffsetDateTime; #[derive(Debug)] -pub struct CreateUploadOutput { - pub s3_path: String, - pub presign_url: String, -} - -#[derive(Debug)] -pub struct FinalizeUploadOutput { +pub struct FinalizeBlobUploadOutput { pub hash: BlobHash, pub mime: String, pub size: i64, diff --git a/deepwell/src/services/file/service.rs b/deepwell/src/services/file/service.rs index 12eefe1838..290854c1c2 100644 --- a/deepwell/src/services/file/service.rs +++ b/deepwell/src/services/file/service.rs @@ -20,7 +20,7 @@ use super::prelude::*; use crate::models::file::{self, Entity as File, Model as FileModel}; -use crate::services::blob::CreateBlobOutput; +use crate::services::blob::{FinalizeBlobUploadOutput, EMPTY_BLOB_HASH, EMPTY_BLOB_MIME}; use crate::services::file_revision::{ CreateFileRevision, CreateFileRevisionBody, CreateFirstFileRevision, CreateResurrectionFileRevision, CreateTombstoneFileRevision, FileBlob, @@ -32,11 +32,11 @@ use crate::services::{BlobService, FileRevisionService, FilterService}; pub struct FileService; impl FileService { - /// Uploads a file and tracks it as a separate file entity. + /// Starts a file upload and tracks it as a distinct file entity. /// /// In the background, this stores the blob via content addressing, /// meaning that duplicates are not uploaded twice. - pub async fn upload( + pub async fn start_upload( ctx: &ServiceContext<'_>, UploadFile { site_id, @@ -44,18 +44,11 @@ impl FileService { name, revision_comments, user_id, - data, licensing, bypass_filter, }: UploadFile, ) -> Result { - let txn = ctx.transaction(); - - info!( - "Creating file with name '{}', content length {}", - name, - data.len(), - ); + info!("Creating file with name '{}'", name); // Ensure row consistency Self::check_conflicts(ctx, page_id, &name, "create").await?; @@ -65,20 +58,20 @@ impl FileService { Self::run_filter(ctx, site_id, Some(&name)).await?; } - // Upload to S3, get derived metadata - let CreateBlobOutput { hash, mime, size } = - BlobService::create(ctx, &data).await?; + // Add pending file + let pending = BlobService::create_upload(ctx).await?; // Add new file let model = file::ActiveModel { name: Set(name.clone()), site_id: Set(site_id), page_id: Set(page_id), + pending_file_id: Set(Some(pending.pending_file_id)), ..Default::default() }; let file = model.insert(txn).await?; - // Add new file revision + // Add new file revision (with dummy data) let revision_output = FileRevisionService::create_first( ctx, CreateFirstFileRevision { @@ -87,9 +80,9 @@ impl FileService { file_id: file.file_id, user_id, name, - s3_hash: hash, - size_hint: size, - mime_hint: mime, + s3_hash: EMPTY_BLOB_HASH, + mime_hint: EMPTY_BLOB_MIME, + size_hint: 0, licensing, comments: revision_comments, }, @@ -99,6 +92,69 @@ impl FileService { Ok(revision_output) } + pub async fn finish_upload( + ctx: &ServiceContext<'_>, + FinishUploadFile { + site_id, + page_id, + file_id, + pending_file_id, + }: FinishUploadFile, + ) -> Result { + info!( + "Finishing file upload with site ID {} page ID {} file ID {} pending ID {}", + site_id, page_id, file_id, pending_file_id, + ); + + // Ensure file exists + let txn = ctx.transaction(); + let row = File::find() + .filter( + Condition::all() + .add(file::Column::SiteId.eq(site_id)) + .add(file::Column::PageId.eq(page_id)) + .add(file::Column::FileId.eq(file_id)) + .add(file::Column::DeletedAt.is_null()) + .add(file::Column::PendingFileId.eq(Some(pending_file_id))), + ) + .one(txn) + .await?; + + if row.is_none() { + error!("No pending file found"); + return Err(Error::FileNotFound); + } + + // Get first file revision + let file_revision = FileRevision::find() + .filter( + Condition::all() + .add(file_revision::Column::FileId.eq(file_id)) + .add(file_revision::Column::RevisionNumber.eq(0)) + .add( + file_revision::Column::RevisionType.eq(FileRevisionType::Create), + ), + ) + .one(txn) + .await?; + + // Update file revision to add the uploaded data + let FinalizeUploadOutput { + hash, + mime, + size, + created, + } = BlobService::finish_upload(ctx, pending_file_id).await?; + + let mut model = file_revision.into_active_model(); + model.s3_hash = Set(hash); + model.mime_hint = Set(mime); + model.size_hint = Set(size); + model.update(txn).await?; + + Ok(FinishUploadFileOutput { created }) + } + /// Edits a file, including the ability to upload a new version. pub async fn edit( ctx: &ServiceContext<'_>, @@ -400,7 +456,8 @@ impl FileService { .add(condition) .add(file::Column::SiteId.eq(site_id)) .add(file::Column::PageId.eq(page_id)) - .add(file::Column::DeletedAt.is_null()), + .add(file::Column::DeletedAt.is_null()) + .add(file::Column::PendingFileId.is_null()), ) .one(txn) .await? @@ -435,7 +492,8 @@ impl FileService { Condition::all() .add(file::Column::PageId.eq(page_id)) .add(file::Column::Name.eq(name)) - .add(file::Column::DeletedAt.is_null()), + .add(file::Column::DeletedAt.is_null()) + .add(file::Column::PendingFileId.is_null()), ) .into_tuple() .one(txn) diff --git a/deepwell/src/services/file/structs.rs b/deepwell/src/services/file/structs.rs index cdb44ea368..b15cf608b1 100644 --- a/deepwell/src/services/file/structs.rs +++ b/deepwell/src/services/file/structs.rs @@ -33,7 +33,6 @@ pub struct UploadFile { pub name: String, pub revision_comments: String, pub user_id: i64, - pub data: Bytes<'static>, pub licensing: JsonValue, // TODO #[serde(default)] @@ -42,6 +41,19 @@ pub struct UploadFile { pub type UploadFileOutput = CreateFirstFileRevisionOutput; +#[derive(Deserialize, Debug, Clone)] +pub struct FinishUploadFile { + pub site_id: i64, + pub page_id: i64, + pub file_id: i64, + pub pending_file_id: i64, +} + +#[derive(Serialize, Debug, Copy, Clone)] +pub struct FinishUploadFileOutput { + pub created: bool, +} + #[derive(Deserialize, Debug, Clone)] pub struct GetFile<'a> { pub site_id: i64, @@ -105,7 +117,6 @@ pub struct EditFile { #[serde(default)] pub struct EditFileBody { pub name: ProvidedValue, - pub data: ProvidedValue>, pub licensing: ProvidedValue, } From 892d7515501f69abcf2624b2ddc35232c9d173ab Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sun, 12 May 2024 02:28:19 -0400 Subject: [PATCH 10/38] Rename file_pending -> blob_pending. --- .../migrations/20220906103252_deepwell.sql | 27 ++++++-------- deepwell/src/models/blob_pending.rs | 17 +++++++++ deepwell/src/models/file.rs | 1 + deepwell/src/models/file_pending.rs | 35 ------------------- deepwell/src/models/mod.rs | 2 +- deepwell/src/services/blob/service.rs | 14 ++++---- 6 files changed, 37 insertions(+), 59 deletions(-) create mode 100644 deepwell/src/models/blob_pending.rs delete mode 100644 deepwell/src/models/file_pending.rs diff --git a/deepwell/migrations/20220906103252_deepwell.sql b/deepwell/migrations/20220906103252_deepwell.sql index 71365be413..2525c8bda3 100644 --- a/deepwell/migrations/20220906103252_deepwell.sql +++ b/deepwell/migrations/20220906103252_deepwell.sql @@ -412,19 +412,20 @@ CREATE TABLE page_vote ( ); -- --- Files +-- Blobs -- --- Pending uploads to S3 --- --- Stores the presign URL along with the path in the bucket it corresponds to. --- After processing, it is moved to be a real blob (if new) or deleted (if duplicate). -CREATE TABLE blob_upload ( - s3_path TEXT NOT NULL PRIMARY KEY, - presign_url TEXT NOT NULL UNIQUE, - created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now() -- TODO add job to prune dead upload jobs +-- Manages blobs that are being uploaded by the user +CREATE TABLE blob_pending ( + pending_file_id BIGSERIAL PRIMARY KEY, + s3_path TEXT NOT NULL CHECK length(s3_path) > 1, + presign_url TEXT NOT NULL CHECK length(presign_url) > 1 ); +-- +-- Files +-- + -- Enum types for file_revision CREATE TYPE file_revision_type AS ENUM ( 'create', @@ -440,12 +441,6 @@ CREATE TYPE file_revision_change AS ENUM ( 'licensing' ); -CREATE TABLE file_pending ( - pending_file_id BIGSERIAL PRIMARY KEY, - s3_path TEXT NOT NULL CHECK length(s3_path) > 1, - presign_url TEXT NOT NULL CHECK length(presign_url) > 1 -); - CREATE TABLE file ( file_id BIGSERIAL PRIMARY KEY, created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), @@ -455,7 +450,7 @@ CREATE TABLE file ( name TEXT NOT NULL, page_id BIGINT NOT NULL REFERENCES page(page_id), site_id BIGINT NOT NULL REFERENCES site(site_id), - pending_file_id BIGINT REFERENCES file_pending(pending_file_id), + pending_blob_id BIGINT REFERENCES file_pending(pending_file_id), UNIQUE (page_id, name, deleted_at) ); diff --git a/deepwell/src/models/blob_pending.rs b/deepwell/src/models/blob_pending.rs new file mode 100644 index 0000000000..70ea08d4cd --- /dev/null +++ b/deepwell/src/models/blob_pending.rs @@ -0,0 +1,17 @@ +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] +#[sea_orm(table_name = "blob_pending")] +pub struct Model { + #[sea_orm(primary_key)] + pub pending_blob_id: i64, + + #[sea_orm(column_type = "Text")] + pub s3_path: String, + + #[sea_orm(column_type = "Text")] + pub presign_url: String, +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/deepwell/src/models/file.rs b/deepwell/src/models/file.rs index ce05ac6f77..76e961ce75 100644 --- a/deepwell/src/models/file.rs +++ b/deepwell/src/models/file.rs @@ -16,6 +16,7 @@ pub struct Model { pub name: String, pub page_id: i64, pub site_id: i64, + pub pending_blob_id: i64, } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] diff --git a/deepwell/src/models/file_pending.rs b/deepwell/src/models/file_pending.rs deleted file mode 100644 index 3cc805f3a4..0000000000 --- a/deepwell/src/models/file_pending.rs +++ /dev/null @@ -1,35 +0,0 @@ -use sea_orm::entity::prelude::*; -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] -#[sea_orm(table_name = "file_pending")] -pub struct Model { - #[sea_orm(primary_key)] - pub pending_file_id: i64, - - #[sea_orm(column_type = "Text")] - pub s3_path: String, - - #[sea_orm(column_type = "Text")] - pub presign_url: String, -} - -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation { - #[sea_orm( - belongs_to = "super::file::Entity", - from = "Column::FileId", - to = "super::file::Column::FileId", - on_update = "NoAction", - on_delete = "NoAction" - )] - File, -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::File.def() - } -} - -impl ActiveModelBehavior for ActiveModel {} diff --git a/deepwell/src/models/mod.rs b/deepwell/src/models/mod.rs index af50caf834..3ba7524a08 100644 --- a/deepwell/src/models/mod.rs +++ b/deepwell/src/models/mod.rs @@ -3,8 +3,8 @@ pub mod prelude; pub mod alias; +pub mod blob_pending; pub mod file; -pub mod file_pending; pub mod file_revision; pub mod filter; pub mod message; diff --git a/deepwell/src/services/blob/service.rs b/deepwell/src/services/blob/service.rs index a776f63428..736971298e 100644 --- a/deepwell/src/services/blob/service.rs +++ b/deepwell/src/services/blob/service.rs @@ -22,8 +22,8 @@ #![allow(dead_code)] use super::prelude::*; -use crate::models::file_pending::{ - self, Entity as FilePending, Model as FilePendingModel, +use crate::models::blob_pending::{ + self, Entity as BlobPending, Model as BlobPendingModel, }; use crate::utils::assert_is_csprng; use rand::distributions::{Alphanumeric, DistString}; @@ -68,7 +68,7 @@ impl BlobService { /// /// # Returns /// The generated presign URL that can be uploaded to. - pub async fn create_upload(ctx: &ServiceContext<'_>) -> Result { + pub async fn create_upload(ctx: &ServiceContext<'_>) -> Result { let config = ctx.config(); let txn = ctx.transaction(); @@ -109,13 +109,13 @@ impl BlobService { ctx: &ServiceContext<'_>, pending_file_id: i64, ) -> Result { - info!("Finishing upload for blob for pending file ID {pending_file_id}"); + info!("Finishing upload for blob for pending blob ID {pending_blob_id}"); let bucket = ctx.s3_bucket(); let txn = ctx.transaction(); - debug!("Getting pending file info"); - let row = FilePending::find() - .filter(file_pending::Column::PendingFileId.eq(pending_file_id)) + debug!("Getting pending blob info"); + let row = BlobPending::find() + .filter(file_pending::Column::PendingBlobId.eq(pending_file_id)) .one(txn) .await?; From 6c2bae4aa6dfb42beb93c674c3f52a4f7e8dd7cd Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sun, 12 May 2024 03:58:28 -0400 Subject: [PATCH 11/38] Fix compilation. --- deepwell/src/endpoints/file.rs | 4 ++ deepwell/src/models/blob_pending.rs | 3 ++ deepwell/src/models/file.rs | 2 +- deepwell/src/services/blob/service.rs | 34 +++++++------- deepwell/src/services/file/service.rs | 61 +++++++++++++++++-------- deepwell/src/services/file/structs.rs | 2 +- deepwell/src/services/import/service.rs | 4 ++ deepwell/src/services/user/service.rs | 6 ++- 8 files changed, 77 insertions(+), 39 deletions(-) diff --git a/deepwell/src/endpoints/file.rs b/deepwell/src/endpoints/file.rs index 773f19865e..9fcb7a1014 100644 --- a/deepwell/src/endpoints/file.rs +++ b/deepwell/src/endpoints/file.rs @@ -83,6 +83,8 @@ pub async fn file_upload( ctx: &ServiceContext<'_>, params: Params<'static>, ) -> Result { + // FIXME file upload endpoint + /* let input: UploadFile = params.parse()?; info!( @@ -94,6 +96,8 @@ pub async fn file_upload( ); FileService::upload(ctx, input).await + */ + todo!() } pub async fn file_edit( diff --git a/deepwell/src/models/blob_pending.rs b/deepwell/src/models/blob_pending.rs index 70ea08d4cd..8d06fd4e65 100644 --- a/deepwell/src/models/blob_pending.rs +++ b/deepwell/src/models/blob_pending.rs @@ -14,4 +14,7 @@ pub struct Model { pub presign_url: String, } +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + impl ActiveModelBehavior for ActiveModel {} diff --git a/deepwell/src/models/file.rs b/deepwell/src/models/file.rs index 76e961ce75..b4d97b2aa0 100644 --- a/deepwell/src/models/file.rs +++ b/deepwell/src/models/file.rs @@ -16,7 +16,7 @@ pub struct Model { pub name: String, pub page_id: i64, pub site_id: i64, - pub pending_blob_id: i64, + pub pending_blob_id: Option, } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] diff --git a/deepwell/src/services/blob/service.rs b/deepwell/src/services/blob/service.rs index 736971298e..c92244bf27 100644 --- a/deepwell/src/services/blob/service.rs +++ b/deepwell/src/services/blob/service.rs @@ -38,7 +38,7 @@ use time::OffsetDateTime; /// /// Even though it is not the SHA-512 hash, for simplicity we treat the hash /// value with all zeroes to be the blob address for the empty blob. -/// This empty file is not actually stored in S3 but instead is a "virtual file", +/// This empty blob is not actually stored in S3 but instead is a "virtual blob", /// considered to have always been present in `BlobService`. pub const EMPTY_BLOB_HASH: BlobHash = [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, @@ -63,8 +63,8 @@ pub struct BlobService; impl BlobService { /// Creates an S3 presign URL to allow an end user to upload a blob. /// - /// Also adds an entry for the pending file upload (`file_pending`), - /// so it can be used by the main `file` table. + /// Also adds an entry for the pending blob upload (`blob_pending`), + /// so it can be used by the main `blob` table. /// /// # Returns /// The generated presign URL that can be uploaded to. @@ -95,19 +95,19 @@ impl BlobService { let presign_url = bucket.presign_put(&s3_path, config.presigned_expiry_secs, None)?; - // Add pending file entry - let model = file_pending::ActiveModel { + // Add pending blob entry + let model = blob_pending::ActiveModel { s3_path: Set(s3_path), presign_url: Set(presign_url), ..Default::default() }; - let output = model.insert(txn)?; + let output = model.insert(txn).await?; Ok(output) } pub async fn finish_upload( ctx: &ServiceContext<'_>, - pending_file_id: i64, + pending_blob_id: i64, ) -> Result { info!("Finishing upload for blob for pending blob ID {pending_blob_id}"); let bucket = ctx.s3_bucket(); @@ -115,21 +115,21 @@ impl BlobService { debug!("Getting pending blob info"); let row = BlobPending::find() - .filter(file_pending::Column::PendingBlobId.eq(pending_file_id)) + .filter(blob_pending::Column::PendingBlobId.eq(pending_blob_id)) .one(txn) .await?; - let pending = match row { + let BlobPendingModel { s3_path, .. } = match row { Some(pending) => pending, None => return Err(Error::GeneralNotFound), }; debug!("Download uploaded blob from S3 uploads to get metadata"); - let response = bucket.get_object(&pending.s3_path).await?; + let response = bucket.get_object(&s3_path).await?; let data: Vec = match response.status_code() { 200 => response.into(), _ => { - error!("Cannot find blob at presign path {upload_path}"); + error!("Cannot find blob at presign path {s3_path}"); return Err(Error::FileNotUploaded); } }; @@ -137,7 +137,7 @@ impl BlobService { // Special handling for empty blobs if data.is_empty() { debug!("File being created is empty, special case"); - return Ok(FinalizeUploadOutput { + return Ok(FinalizeBlobUploadOutput { hash: EMPTY_BLOB_HASH, mime: str!(EMPTY_BLOB_MIME), size: 0, @@ -165,7 +165,7 @@ impl BlobService { // Content-Type header should be returned let mime = result.content_type.ok_or(Error::S3Response)?; - Ok(FinalizeUploadOutput { + Ok(FinalizeBlobUploadOutput { hash, mime, size, @@ -173,11 +173,11 @@ impl BlobService { }) } - // Blob doesn't exist, move the uploaded file + // Blob doesn't exist, move it from uploaded None => { debug!("Blob with hash {hex_hash} to be created"); - // Determine MIME type for the new file + // Determine MIME type for the new blob let mime = ctx.mime().get_mime_type(data.to_vec()).await?; // Upload S3 object to final destination @@ -187,7 +187,7 @@ impl BlobService { // We assume all unexpected statuses are errors, even if 1XX or 2XX match response.status_code() { - 200 => Ok(FinalizeUploadOutput { + 200 => Ok(FinalizeBlobUploadOutput { hash, mime, size, @@ -199,7 +199,7 @@ impl BlobService { }; // Delete uploaded version, in either case - bucket.delete_object(upload_path).await?; + bucket.delete_object(&s3_path).await?; // Return result based on blob status result diff --git a/deepwell/src/services/file/service.rs b/deepwell/src/services/file/service.rs index 290854c1c2..da4c8be92b 100644 --- a/deepwell/src/services/file/service.rs +++ b/deepwell/src/services/file/service.rs @@ -20,6 +20,10 @@ use super::prelude::*; use crate::models::file::{self, Entity as File, Model as FileModel}; +use crate::models::file_revision::{ + self, Entity as FileRevision, Model as FileRevisionModel, +}; +use crate::models::sea_orm_active_enums::FileRevisionType; use crate::services::blob::{FinalizeBlobUploadOutput, EMPTY_BLOB_HASH, EMPTY_BLOB_MIME}; use crate::services::file_revision::{ CreateFileRevision, CreateFileRevisionBody, CreateFirstFileRevision, @@ -66,9 +70,11 @@ impl FileService { name: Set(name.clone()), site_id: Set(site_id), page_id: Set(page_id), - pending_file_id: Set(Some(pending.pending_file_id)), + pending_blob_id: Set(Some(pending.pending_blob_id)), ..Default::default() }; + + let txn = ctx.transaction(); let file = model.insert(txn).await?; // Add new file revision (with dummy data) @@ -81,7 +87,7 @@ impl FileService { user_id, name, s3_hash: EMPTY_BLOB_HASH, - mime_hint: EMPTY_BLOB_MIME, + mime_hint: str!(EMPTY_BLOB_MIME), size_hint: 0, licensing, comments: revision_comments, @@ -98,12 +104,12 @@ impl FileService { site_id, page_id, file_id, - pending_file_id, + pending_blob_id, }: FinishUploadFile, ) -> Result { info!( "Finishing file upload with site ID {} page ID {} file ID {} pending ID {}", - site_id, page_id, file_id, pending_file_id, + site_id, page_id, file_id, pending_blob_id, ); // Ensure file exists @@ -115,7 +121,7 @@ impl FileService { .add(file::Column::PageId.eq(page_id)) .add(file::Column::FileId.eq(file_id)) .add(file::Column::DeletedAt.is_null()) - .add(file::Column::PendingFileId.eq(Some(pending_file_id))), + .add(file::Column::PendingBlobId.eq(Some(pending_blob_id))), ) .one(txn) .await?; @@ -138,16 +144,31 @@ impl FileService { .one(txn) .await?; + let file_revision = match file_revision { + Some(file_revision) => file_revision, + None => return Err(Error::FileNotFound), + }; + + // Delete the pending blob row + let mut model = file::ActiveModel { + file_id: Set(file_id), + pending_blob_id: Set(None), + ..Default::default() + }; + model.update(txn).await?; + + File::delete_by_id(pending_blob_id).exec(txn).await?; + // Update file revision to add the uploaded data - let FinalizeUploadOutput { + let FinalizeBlobUploadOutput { hash, mime, size, created, - } = BlobService::finish_upload(ctx, pending_file_id).await?; + } = BlobService::finish_upload(ctx, pending_blob_id).await?; let mut model = file_revision.into_active_model(); - model.s3_hash = Set(hash); + model.s3_hash = Set(hash.to_vec()); model.mime_hint = Set(mime); model.size_hint = Set(size); model.update(txn).await?; @@ -174,11 +195,7 @@ impl FileService { let last_revision = FileRevisionService::get_latest(ctx, site_id, page_id, file_id).await?; - let EditFileBody { - name, - data, - licensing, - } = body; + let EditFileBody { name, licensing } = body; // Verify name change // @@ -193,11 +210,17 @@ impl FileService { } // Upload to S3, get derived metadata + // FIXME upload new file revision + /* let blob = match data { ProvidedValue::Unset => ProvidedValue::Unset, ProvidedValue::Set(bytes) => { - let CreateBlobOutput { hash, mime, size } = - BlobService::create(ctx, &bytes).await?; + let FinalizeBlobUploadOutput { + hash, + mime, + size, + created: _, + } = BlobService::finalize_upload(ctx, &bytes).await?; ProvidedValue::Set(FileBlob { s3_hash: hash, @@ -206,8 +229,8 @@ impl FileService { }) } }; - - // Make database changes + */ + let blob = ProvidedValue::Unset; // Update file metadata let model = file::ActiveModel { @@ -457,7 +480,7 @@ impl FileService { .add(file::Column::SiteId.eq(site_id)) .add(file::Column::PageId.eq(page_id)) .add(file::Column::DeletedAt.is_null()) - .add(file::Column::PendingFileId.is_null()), + .add(file::Column::PendingBlobId.is_null()), ) .one(txn) .await? @@ -493,7 +516,7 @@ impl FileService { .add(file::Column::PageId.eq(page_id)) .add(file::Column::Name.eq(name)) .add(file::Column::DeletedAt.is_null()) - .add(file::Column::PendingFileId.is_null()), + .add(file::Column::PendingBlobId.is_null()), ) .into_tuple() .one(txn) diff --git a/deepwell/src/services/file/structs.rs b/deepwell/src/services/file/structs.rs index b15cf608b1..bb15ff97f9 100644 --- a/deepwell/src/services/file/structs.rs +++ b/deepwell/src/services/file/structs.rs @@ -46,7 +46,7 @@ pub struct FinishUploadFile { pub site_id: i64, pub page_id: i64, pub file_id: i64, - pub pending_file_id: i64, + pub pending_blob_id: i64, } #[derive(Serialize, Debug, Copy, Clone)] diff --git a/deepwell/src/services/import/service.rs b/deepwell/src/services/import/service.rs index a0827f7ac4..f3ec50cb27 100644 --- a/deepwell/src/services/import/service.rs +++ b/deepwell/src/services/import/service.rs @@ -68,8 +68,12 @@ impl ImportService { let avatar_s3_hash = match avatar { None => None, Some(bytes) => { + // FIXME import - uploading avatars + /* let output = BlobService::create(ctx, &bytes).await?; Some(output.hash.to_vec()) + */ + todo!() } }; diff --git a/deepwell/src/services/user/service.rs b/deepwell/src/services/user/service.rs index fb5bff0b2a..a88069f938 100644 --- a/deepwell/src/services/user/service.rs +++ b/deepwell/src/services/user/service.rs @@ -22,7 +22,7 @@ use super::prelude::*; use crate::models::sea_orm_active_enums::{AliasType, UserType}; use crate::models::user::{self, Entity as User, Model as UserModel}; use crate::services::alias::CreateAlias; -use crate::services::blob::{BlobService, CreateBlobOutput}; +use crate::services::blob::{BlobService, FinalizeBlobUploadOutput}; use crate::services::email::{EmailClassification, EmailService}; use crate::services::filter::{FilterClass, FilterType}; use crate::services::{AliasService, FilterService, PasswordService}; @@ -425,10 +425,14 @@ impl UserService { let s3_hash = match avatar { None => None, Some(blob) => { + // FIXME blob upload + /* let CreateBlobOutput { hash, .. } = BlobService::create(ctx, &blob).await?; Some(hash.to_vec()) + */ + todo!() } }; From d3c3f98f4333438a3f850d18bf2e6e74bf5ce023 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Wed, 12 Jun 2024 07:21:42 -0400 Subject: [PATCH 12/38] [WIP] Start division between new and edit file uploads. --- deepwell/src/services/blob/service.rs | 3 + deepwell/src/services/file/service.rs | 123 ++++++++++++++++++-------- deepwell/src/services/file/structs.rs | 29 +++++- 3 files changed, 115 insertions(+), 40 deletions(-) diff --git a/deepwell/src/services/blob/service.rs b/deepwell/src/services/blob/service.rs index c92244bf27..91c71bd767 100644 --- a/deepwell/src/services/blob/service.rs +++ b/deepwell/src/services/blob/service.rs @@ -134,6 +134,9 @@ impl BlobService { } }; + debug!("Deleting pending blob"); + BlobPending::delete_by_id(pending_blob_id).exec(txn).await?; + // Special handling for empty blobs if data.is_empty() { debug!("File being created is empty, special case"); diff --git a/deepwell/src/services/file/service.rs b/deepwell/src/services/file/service.rs index da4c8be92b..a025ec65a9 100644 --- a/deepwell/src/services/file/service.rs +++ b/deepwell/src/services/file/service.rs @@ -36,13 +36,15 @@ use crate::services::{BlobService, FileRevisionService, FilterService}; pub struct FileService; impl FileService { + /// Creates a new file. + /// /// Starts a file upload and tracks it as a distinct file entity. /// /// In the background, this stores the blob via content addressing, /// meaning that duplicates are not uploaded twice. - pub async fn start_upload( + pub async fn start_new_upload( ctx: &ServiceContext<'_>, - UploadFile { + UploadNewFile { site_id, page_id, name, @@ -50,9 +52,10 @@ impl FileService { user_id, licensing, bypass_filter, - }: UploadFile, + }: UploadNewFile, ) -> Result { info!("Creating file with name '{}'", name); + let txn = ctx.transaction(); // Ensure row consistency Self::check_conflicts(ctx, page_id, &name, "create").await?; @@ -74,10 +77,9 @@ impl FileService { ..Default::default() }; - let txn = ctx.transaction(); let file = model.insert(txn).await?; - // Add new file revision (with dummy data) + // Add file revision (with dummy file data) let revision_output = FileRevisionService::create_first( ctx, CreateFirstFileRevision { @@ -98,17 +100,17 @@ impl FileService { Ok(revision_output) } - pub async fn finish_upload( + pub async fn finish_new_upload( ctx: &ServiceContext<'_>, - FinishUploadFile { + FinishUploadNewFile { site_id, page_id, file_id, pending_blob_id, - }: FinishUploadFile, + }: FinishUploadNewFile, ) -> Result { info!( - "Finishing file upload with site ID {} page ID {} file ID {} pending ID {}", + "Finishing new file upload with site ID {} page ID {} file ID {} pending ID {}", site_id, page_id, file_id, pending_blob_id, ); @@ -157,8 +159,6 @@ impl FileService { }; model.update(txn).await?; - File::delete_by_id(pending_blob_id).exec(txn).await?; - // Update file revision to add the uploaded data let FinalizeBlobUploadOutput { hash, @@ -176,7 +176,82 @@ impl FileService { Ok(FinishUploadFileOutput { created }) } - /// Edits a file, including the ability to upload a new version. + /// Edits a file, uploading a new file version. + pub async fn start_edit_upload( + ctx: &ServiceContext<'_>, + UploadFileEdit { + site_id, + page_id, + file_id, + user_id, + revision_comments, + }: UploadFileEdit, + ) -> Result<_UploadFileEditOutput> { + info!("Uploading new version to file ID {file_id}"); + + let txn = ctx.transaction(); + let last_revision = + FileRevisionService::get_latest(ctx, site_id, page_id, file_id).await?; + + // Add pending file + let pending = BlobService::create_upload(ctx).await?; + + // Add file revision (with dummy file data) + let revision_output = FileRevisionService::create( + ctx, + CreateFileRevision { + site_id, + page_id, + file_id, + user_id, + comments: revision_comments, + body: CreateFileRevisionBody { + blob: FileBlob { + s3_hash: EMPTY_BLOB_HASH, + mime_hint: str!(EMPTY_BLOB_MIME), + size_hint: 0, + }, + ..Default::default() + }, + }, + last_revision, + ) + .await?; + + Ok(revision_output) + } + + pub async fn finish_edit_upload( + ctx: &ServiceContext<'_>, + FinishUploadFileEdit { + site_id, + page_id, + file_id, + pending_blob_id, + }: FinishUploadFileEdit, + ) -> Result<_> { + info!( + "Finishing file edit upload with site ID {} page ID {} file ID {} pending ID {}", + site_id, page_id, file_id, pending_blob_id, + ); + + // Get latest file revision + // TODO + + // Update file metadata + let model = file::ActiveModel { + file_id: Set(file_id), + updated_at: Set(Some(now())), + ..Default::default() + }; + model.update(txn).await?; + + todo!() + } + + /// Edits a file, creating a new revision. + /// + /// Cannot be used to upload a new file version. pub async fn edit( ctx: &ServiceContext<'_>, EditFile { @@ -209,29 +284,6 @@ impl FileService { } } - // Upload to S3, get derived metadata - // FIXME upload new file revision - /* - let blob = match data { - ProvidedValue::Unset => ProvidedValue::Unset, - ProvidedValue::Set(bytes) => { - let FinalizeBlobUploadOutput { - hash, - mime, - size, - created: _, - } = BlobService::finalize_upload(ctx, &bytes).await?; - - ProvidedValue::Set(FileBlob { - s3_hash: hash, - size_hint: size, - mime_hint: mime, - }) - } - }; - */ - let blob = ProvidedValue::Unset; - // Update file metadata let model = file::ActiveModel { file_id: Set(file_id), @@ -251,7 +303,6 @@ impl FileService { comments: revision_comments, body: CreateFileRevisionBody { name, - blob, licensing, ..Default::default() }, diff --git a/deepwell/src/services/file/structs.rs b/deepwell/src/services/file/structs.rs index bb15ff97f9..3e05110d0b 100644 --- a/deepwell/src/services/file/structs.rs +++ b/deepwell/src/services/file/structs.rs @@ -27,7 +27,7 @@ use serde_json::Value as JsonValue; use time::OffsetDateTime; #[derive(Deserialize, Debug, Clone)] -pub struct UploadFile { +pub struct UploadNewFile { pub site_id: i64, pub page_id: i64, pub name: String, @@ -39,10 +39,11 @@ pub struct UploadFile { pub bypass_filter: bool, } -pub type UploadFileOutput = CreateFirstFileRevisionOutput; +// TODO +pub type UploadNewFileOutput = CreateFirstFileRevisionOutput; #[derive(Deserialize, Debug, Clone)] -pub struct FinishUploadFile { +pub struct FinishUploadNewFile { pub site_id: i64, pub page_id: i64, pub file_id: i64, @@ -50,10 +51,30 @@ pub struct FinishUploadFile { } #[derive(Serialize, Debug, Copy, Clone)] -pub struct FinishUploadFileOutput { +pub struct FinishUploadNewFileOutput { pub created: bool, } +#[derive(Deserialize, Debug, Clone)] +pub struct UploadFileEdit { + pub site_id: i64, + pub page_id: i64, + pub file_id: i64, + pub user_id: i64, + pub revision_comments: String, +} + +pub type UploadFileEditOutput = CreateFileRevisionOutput; + +#[derive(Deserialize, Debug, Clone)] +pub struct FinishUploadFileEdit { +} + +#[derive(Serialize, Debug, Clone)] +pub struct FinishUploadFileEditOutput { + // TODO +} + #[derive(Deserialize, Debug, Clone)] pub struct GetFile<'a> { pub site_id: i64, From 46dfe305254ed4db979ceb103bf152f3a5273296 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sun, 8 Sep 2024 19:18:27 -0400 Subject: [PATCH 13/38] Add created_at column to blob_pending. --- deepwell/migrations/20220906103252_deepwell.sql | 1 + deepwell/src/models/blob_pending.rs | 1 + 2 files changed, 2 insertions(+) diff --git a/deepwell/migrations/20220906103252_deepwell.sql b/deepwell/migrations/20220906103252_deepwell.sql index 2525c8bda3..423eef789e 100644 --- a/deepwell/migrations/20220906103252_deepwell.sql +++ b/deepwell/migrations/20220906103252_deepwell.sql @@ -418,6 +418,7 @@ CREATE TABLE page_vote ( -- Manages blobs that are being uploaded by the user CREATE TABLE blob_pending ( pending_file_id BIGSERIAL PRIMARY KEY, + created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), s3_path TEXT NOT NULL CHECK length(s3_path) > 1, presign_url TEXT NOT NULL CHECK length(presign_url) > 1 ); diff --git a/deepwell/src/models/blob_pending.rs b/deepwell/src/models/blob_pending.rs index 8d06fd4e65..a03282ac97 100644 --- a/deepwell/src/models/blob_pending.rs +++ b/deepwell/src/models/blob_pending.rs @@ -6,6 +6,7 @@ use serde::{Deserialize, Serialize}; pub struct Model { #[sea_orm(primary_key)] pub pending_blob_id: i64, + pub created_at: TimeDateTimeWithTimeZone, #[sea_orm(column_type = "Text")] pub s3_path: String, From 7219b39b8c7a7dcbfe958c4360eee67d5ef92f0b Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sun, 8 Sep 2024 19:21:51 -0400 Subject: [PATCH 14/38] Use find_by_id() instead of find(). --- deepwell/src/services/blob/service.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/deepwell/src/services/blob/service.rs b/deepwell/src/services/blob/service.rs index 91c71bd767..b647a71ef5 100644 --- a/deepwell/src/services/blob/service.rs +++ b/deepwell/src/services/blob/service.rs @@ -114,8 +114,7 @@ impl BlobService { let txn = ctx.transaction(); debug!("Getting pending blob info"); - let row = BlobPending::find() - .filter(blob_pending::Column::PendingBlobId.eq(pending_blob_id)) + let row = BlobPending::find_by_id(pending_blob_id) .one(txn) .await?; From 638495bb1de8fd5a9ac2423eae540e3495534963 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sun, 8 Sep 2024 21:15:31 -0400 Subject: [PATCH 15/38] Run rustfmt. --- deepwell/src/services/blob/service.rs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/deepwell/src/services/blob/service.rs b/deepwell/src/services/blob/service.rs index b647a71ef5..7b13752d8f 100644 --- a/deepwell/src/services/blob/service.rs +++ b/deepwell/src/services/blob/service.rs @@ -114,10 +114,7 @@ impl BlobService { let txn = ctx.transaction(); debug!("Getting pending blob info"); - let row = BlobPending::find_by_id(pending_blob_id) - .one(txn) - .await?; - + let row = BlobPending::find_by_id(pending_blob_id).one(txn).await?; let BlobPendingModel { s3_path, .. } = match row { Some(pending) => pending, None => return Err(Error::GeneralNotFound), From 23550136658ef16664a1fa00d53050cb0bc07018 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sun, 8 Sep 2024 23:11:48 -0400 Subject: [PATCH 16/38] Add FileRevisionService::create_pending(). --- deepwell/src/services/file/service.rs | 12 ++----- deepwell/src/services/file/structs.rs | 3 +- .../src/services/file_revision/service.rs | 36 ++++++++++++++++--- .../src/services/file_revision/structs.rs | 11 ++++++ 4 files changed, 47 insertions(+), 15 deletions(-) diff --git a/deepwell/src/services/file/service.rs b/deepwell/src/services/file/service.rs index a025ec65a9..c5f05e4b30 100644 --- a/deepwell/src/services/file/service.rs +++ b/deepwell/src/services/file/service.rs @@ -79,25 +79,19 @@ impl FileService { let file = model.insert(txn).await?; - // Add file revision (with dummy file data) - let revision_output = FileRevisionService::create_first( + FileRevisionService::create_pending( ctx, - CreateFirstFileRevision { + CreatePendingFileRevision { site_id, page_id, file_id: file.file_id, user_id, name, - s3_hash: EMPTY_BLOB_HASH, - mime_hint: str!(EMPTY_BLOB_MIME), - size_hint: 0, licensing, comments: revision_comments, }, ) - .await?; - - Ok(revision_output) + .await } pub async fn finish_new_upload( diff --git a/deepwell/src/services/file/structs.rs b/deepwell/src/services/file/structs.rs index 3e05110d0b..5450c5bab2 100644 --- a/deepwell/src/services/file/structs.rs +++ b/deepwell/src/services/file/structs.rs @@ -67,8 +67,7 @@ pub struct UploadFileEdit { pub type UploadFileEditOutput = CreateFileRevisionOutput; #[derive(Deserialize, Debug, Clone)] -pub struct FinishUploadFileEdit { -} +pub struct FinishUploadFileEdit {} #[derive(Serialize, Debug, Clone)] pub struct FinishUploadFileEditOutput { diff --git a/deepwell/src/services/file_revision/service.rs b/deepwell/src/services/file_revision/service.rs index 3af73a3d6d..d0431e388d 100644 --- a/deepwell/src/services/file_revision/service.rs +++ b/deepwell/src/services/file_revision/service.rs @@ -169,12 +169,40 @@ impl FileRevisionService { })) } - /// Creates the first revision for a newly-uploaded file. + /// Creates a dummy first revision for a file pending upload. + pub async fn create_pending( + ctx: &ServiceContext<'_>, + CreatePendingFileRevision { + site_id, + page_id, + file_id, + user_id, + name, + licensing, + comments, + }: CreatePendingFileRevision, + ) -> Result { + FileRevisionService::create_first( + ctx, + CreateFirstFileRevision { + site_id, + page_id, + file_id, + user_id, + name, + s3_hash: EMPTY_BLOB_HASH, + mime_hint: str!(EMPTY_BLOB_MIME), + size_hint: 0, + licensing, + comments, + }, + ) + .await + } + + /// Creates the first revision for an already-uploaded file. /// /// See `RevisionService::create_first()`. - /// - /// # Panics - /// If the given previous revision is for a different file or page, this method will panic. pub async fn create_first( ctx: &ServiceContext<'_>, CreateFirstFileRevision { diff --git a/deepwell/src/services/file_revision/structs.rs b/deepwell/src/services/file_revision/structs.rs index 3044bb52a8..e8d791d45b 100644 --- a/deepwell/src/services/file_revision/structs.rs +++ b/deepwell/src/services/file_revision/structs.rs @@ -68,6 +68,17 @@ pub struct CreateFirstFileRevision { pub comments: String, } +#[derive(Debug, Clone)] +pub struct CreatePendingFileRevision { + pub site_id: i64, + pub page_id: i64, + pub file_id: i64, + pub user_id: i64, + pub name: String, + pub licensing: serde_json::Value, + pub comments: String, +} + #[derive(Serialize, Debug, Clone, Default)] pub struct CreateFirstFileRevisionOutput { pub file_id: i64, From 904d61f59053b231286ae04054bda078171e6462 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sun, 8 Sep 2024 23:17:30 -0400 Subject: [PATCH 17/38] Add FileRevisionService::get_first(). --- deepwell/src/services/file/service.rs | 17 +------------ .../src/services/file_revision/service.rs | 25 +++++++++++++++++++ 2 files changed, 26 insertions(+), 16 deletions(-) diff --git a/deepwell/src/services/file/service.rs b/deepwell/src/services/file/service.rs index c5f05e4b30..a57914daf1 100644 --- a/deepwell/src/services/file/service.rs +++ b/deepwell/src/services/file/service.rs @@ -128,22 +128,7 @@ impl FileService { } // Get first file revision - let file_revision = FileRevision::find() - .filter( - Condition::all() - .add(file_revision::Column::FileId.eq(file_id)) - .add(file_revision::Column::RevisionNumber.eq(0)) - .add( - file_revision::Column::RevisionType.eq(FileRevisionType::Create), - ), - ) - .one(txn) - .await?; - - let file_revision = match file_revision { - Some(file_revision) => file_revision, - None => return Err(Error::FileNotFound), - }; + let file_revision = FileRevisionService::get_first(ctx, site_id, page_id, file_id).await?; // Delete the pending blob row let mut model = file::ActiveModel { diff --git a/deepwell/src/services/file_revision/service.rs b/deepwell/src/services/file_revision/service.rs index d0431e388d..cf5f1b34b0 100644 --- a/deepwell/src/services/file_revision/service.rs +++ b/deepwell/src/services/file_revision/service.rs @@ -443,6 +443,31 @@ impl FileRevisionService { Ok(revision) } + /// Get the first revision for this file. + pub async fn get_first( + ctx: &ServiceContext<'_>, + site_id: i64, + page_id: i64, + file_id: i64, + ) -> Result { + let model = FileRevision::find() + .filter( + Condition::all() + .add(file_revision::Column::SiteId.eq(site_id)) + .add(file_revision::Column::PageId.eq(page_id)) + .add(file_revision::Column::FileId.eq(file_id)) + .add(file_revision::Column::RevisionNumber.eq(0)) + .add( + file_revision::Column::RevisionType.eq(FileRevisionType::Create), + ), + ) + .one(txn) + .await? + .ok_or(Error::FileRevisionNotFound)?; + + Ok(model) + } + /// Get the latest revision for this file. /// /// See `RevisionService::get_latest()`. From 066a8feb41ca6f8abe7697b586c9d22bb0574069 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sun, 8 Sep 2024 23:28:24 -0400 Subject: [PATCH 18/38] Update comment. --- deepwell/src/services/file/service.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deepwell/src/services/file/service.rs b/deepwell/src/services/file/service.rs index a57914daf1..dfec6ef99c 100644 --- a/deepwell/src/services/file/service.rs +++ b/deepwell/src/services/file/service.rs @@ -130,7 +130,7 @@ impl FileService { // Get first file revision let file_revision = FileRevisionService::get_first(ctx, site_id, page_id, file_id).await?; - // Delete the pending blob row + // Remove pending_blob connection let mut model = file::ActiveModel { file_id: Set(file_id), pending_blob_id: Set(None), From 19d44cb51570b9d5860614a7829ee3b2be096364 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sun, 8 Sep 2024 23:44:47 -0400 Subject: [PATCH 19/38] Rename structs. --- deepwell/src/services/file/service.rs | 10 +++++----- deepwell/src/services/file/structs.rs | 8 ++++---- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/deepwell/src/services/file/service.rs b/deepwell/src/services/file/service.rs index dfec6ef99c..9dccb662aa 100644 --- a/deepwell/src/services/file/service.rs +++ b/deepwell/src/services/file/service.rs @@ -44,7 +44,7 @@ impl FileService { /// meaning that duplicates are not uploaded twice. pub async fn start_new_upload( ctx: &ServiceContext<'_>, - UploadNewFile { + StartFileUpload { site_id, page_id, name, @@ -52,8 +52,8 @@ impl FileService { user_id, licensing, bypass_filter, - }: UploadNewFile, - ) -> Result { + }: StartFileUpload, + ) -> Result { info!("Creating file with name '{}'", name); let txn = ctx.transaction(); @@ -96,12 +96,12 @@ impl FileService { pub async fn finish_new_upload( ctx: &ServiceContext<'_>, - FinishUploadNewFile { + FinishUploadFile { site_id, page_id, file_id, pending_blob_id, - }: FinishUploadNewFile, + }: FinishUploadFile, ) -> Result { info!( "Finishing new file upload with site ID {} page ID {} file ID {} pending ID {}", diff --git a/deepwell/src/services/file/structs.rs b/deepwell/src/services/file/structs.rs index 5450c5bab2..9b52d9ac86 100644 --- a/deepwell/src/services/file/structs.rs +++ b/deepwell/src/services/file/structs.rs @@ -27,7 +27,7 @@ use serde_json::Value as JsonValue; use time::OffsetDateTime; #[derive(Deserialize, Debug, Clone)] -pub struct UploadNewFile { +pub struct StartFileUpload { pub site_id: i64, pub page_id: i64, pub name: String, @@ -40,10 +40,10 @@ pub struct UploadNewFile { } // TODO -pub type UploadNewFileOutput = CreateFirstFileRevisionOutput; +pub type StartFileUploadOutput = CreateFirstFileRevisionOutput; #[derive(Deserialize, Debug, Clone)] -pub struct FinishUploadNewFile { +pub struct FinishUploadFile { pub site_id: i64, pub page_id: i64, pub file_id: i64, @@ -51,7 +51,7 @@ pub struct FinishUploadNewFile { } #[derive(Serialize, Debug, Copy, Clone)] -pub struct FinishUploadNewFileOutput { +pub struct FinishUploadFileOutput { pub created: bool, } From 2f3c28c0ff2c972d29419348d89efef4a74e43bf Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sun, 8 Sep 2024 23:47:02 -0400 Subject: [PATCH 20/38] Run rustfmt. --- deepwell/src/services/file/service.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/deepwell/src/services/file/service.rs b/deepwell/src/services/file/service.rs index 9dccb662aa..b40511f1a2 100644 --- a/deepwell/src/services/file/service.rs +++ b/deepwell/src/services/file/service.rs @@ -128,7 +128,8 @@ impl FileService { } // Get first file revision - let file_revision = FileRevisionService::get_first(ctx, site_id, page_id, file_id).await?; + let file_revision = + FileRevisionService::get_first(ctx, site_id, page_id, file_id).await?; // Remove pending_blob connection let mut model = file::ActiveModel { From cfac455b3a50854fdd744693664e01056aa41124 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sun, 8 Sep 2024 23:56:50 -0400 Subject: [PATCH 21/38] Add proper StartFileUploadOutput struct. --- deepwell/src/services/file/service.rs | 13 +++++++++---- deepwell/src/services/file/structs.rs | 8 ++++++-- 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/deepwell/src/services/file/service.rs b/deepwell/src/services/file/service.rs index b40511f1a2..6e8380075a 100644 --- a/deepwell/src/services/file/service.rs +++ b/deepwell/src/services/file/service.rs @@ -78,8 +78,7 @@ impl FileService { }; let file = model.insert(txn).await?; - - FileRevisionService::create_pending( + let file_revision = FileRevisionService::create_pending( ctx, CreatePendingFileRevision { site_id, @@ -91,7 +90,13 @@ impl FileService { comments: revision_comments, }, ) - .await + .await?; + + Ok(StartFileUploadOutput { + pending_blob_id: pending.pending_blob_id, + presign_url: pending.presign_url, + file_revision_id: file_revision.file_revision_id, + }) } pub async fn finish_new_upload( @@ -108,7 +113,7 @@ impl FileService { site_id, page_id, file_id, pending_blob_id, ); - // Ensure file exists + // Ensure a pending file exists let txn = ctx.transaction(); let row = File::find() .filter( diff --git a/deepwell/src/services/file/structs.rs b/deepwell/src/services/file/structs.rs index 9b52d9ac86..4cf87c520b 100644 --- a/deepwell/src/services/file/structs.rs +++ b/deepwell/src/services/file/structs.rs @@ -39,8 +39,12 @@ pub struct StartFileUpload { pub bypass_filter: bool, } -// TODO -pub type StartFileUploadOutput = CreateFirstFileRevisionOutput; +#[derive(Serialize, Debug, Clone)] +pub struct StartFileUploadOutput { + pub pending_blob_id: i64, + pub presign_url: String, + pub file_revision_id: i64, +} #[derive(Deserialize, Debug, Clone)] pub struct FinishUploadFile { From 1d683e3178c5d564b995746c701d27d0ba88920d Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Mon, 9 Sep 2024 00:14:59 -0400 Subject: [PATCH 22/38] Reword column clear again. --- deepwell/src/services/file/service.rs | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/deepwell/src/services/file/service.rs b/deepwell/src/services/file/service.rs index 6e8380075a..67a401a552 100644 --- a/deepwell/src/services/file/service.rs +++ b/deepwell/src/services/file/service.rs @@ -136,13 +136,15 @@ impl FileService { let file_revision = FileRevisionService::get_first(ctx, site_id, page_id, file_id).await?; - // Remove pending_blob connection - let mut model = file::ActiveModel { - file_id: Set(file_id), - pending_blob_id: Set(None), - ..Default::default() - }; - model.update(txn).await?; + // Clear pending_blob column + { + let mut model = file::ActiveModel { + file_id: Set(file_id), + pending_blob_id: Set(None), + ..Default::default() + }; + model.update(txn).await?; + } // Update file revision to add the uploaded data let FinalizeBlobUploadOutput { From 34379a38ea7baa77d3740a8e4f43cd6ffb7b39b3 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Mon, 9 Sep 2024 22:41:13 -0400 Subject: [PATCH 23/38] Update comments. --- deepwell/src/services/file/service.rs | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/deepwell/src/services/file/service.rs b/deepwell/src/services/file/service.rs index 67a401a552..de4b740140 100644 --- a/deepwell/src/services/file/service.rs +++ b/deepwell/src/services/file/service.rs @@ -147,6 +147,7 @@ impl FileService { } // Update file revision to add the uploaded data + // This deletes the pending blob row let FinalizeBlobUploadOutput { hash, mime, @@ -154,11 +155,14 @@ impl FileService { created, } = BlobService::finish_upload(ctx, pending_blob_id).await?; - let mut model = file_revision.into_active_model(); - model.s3_hash = Set(hash.to_vec()); - model.mime_hint = Set(mime); - model.size_hint = Set(size); - model.update(txn).await?; + // Update first file revision with uploaded data + { + let mut model = file_revision.into_active_model(); + model.s3_hash = Set(hash.to_vec()); + model.mime_hint = Set(mime); + model.size_hint = Set(size); + model.update(txn).await?; + } Ok(FinishUploadFileOutput { created }) } From 20308c48bb27e4e4f38e1611fcd3fc86118d105b Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Mon, 9 Sep 2024 22:41:54 -0400 Subject: [PATCH 24/38] Remove dead_code suppression. --- deepwell/src/services/blob/structs.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/deepwell/src/services/blob/structs.rs b/deepwell/src/services/blob/structs.rs index 653f5eef51..4a800818aa 100644 --- a/deepwell/src/services/blob/structs.rs +++ b/deepwell/src/services/blob/structs.rs @@ -29,7 +29,6 @@ pub struct FinalizeBlobUploadOutput { } #[derive(Debug)] -#[allow(dead_code)] // TEMP pub struct BlobMetadata { pub mime: String, pub size: i64, From c4a3dacc53ce7cdd501f1b16971a1271ae586f0d Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Mon, 9 Sep 2024 23:36:03 -0400 Subject: [PATCH 25/38] Add TODOs for incomplete file pruning jobs. --- deepwell/src/services/job/structs.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/deepwell/src/services/job/structs.rs b/deepwell/src/services/job/structs.rs index 8f483e98ac..5a863f8630 100644 --- a/deepwell/src/services/job/structs.rs +++ b/deepwell/src/services/job/structs.rs @@ -28,6 +28,8 @@ pub enum Job { }, PruneSessions, PruneText, + // TODO add job for pruning incomplete uploads (pending_blob table) + // TODO also add a job (file table) NameChangeRefill, LiftExpiredPunishments, } From 6cf5fd3034f669820225debceeb519589c8c1d6b Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Mon, 9 Sep 2024 23:44:03 -0400 Subject: [PATCH 26/38] Add FileRevisionService::finish_upload(). And complete FileService::finish_new_upload(). --- deepwell/src/services/blob/service.rs | 7 ++-- deepwell/src/services/file/service.rs | 35 +++++++------------ .../src/services/file_revision/service.rs | 34 ++++++++++++++++++ .../src/services/file_revision/structs.rs | 8 +++++ 4 files changed, 59 insertions(+), 25 deletions(-) diff --git a/deepwell/src/services/blob/service.rs b/deepwell/src/services/blob/service.rs index 7b13752d8f..c7d5799d23 100644 --- a/deepwell/src/services/blob/service.rs +++ b/deepwell/src/services/blob/service.rs @@ -109,7 +109,7 @@ impl BlobService { ctx: &ServiceContext<'_>, pending_blob_id: i64, ) -> Result { - info!("Finishing upload for blob for pending blob ID {pending_blob_id}"); + info!("Finishing upload for blob for pending ID {pending_blob_id}"); let bucket = ctx.s3_bucket(); let txn = ctx.transaction(); @@ -161,6 +161,9 @@ impl BlobService { Some(result) => { debug!("Blob with hash {hex_hash} already exists"); + // TODO: Should we ever update the mime type? + // In case of changing file formats, etc. + // Content-Type header should be returned let mime = result.content_type.ok_or(Error::S3Response)?; @@ -172,7 +175,7 @@ impl BlobService { }) } - // Blob doesn't exist, move it from uploaded + // Blob doesn't exist, "move" it None => { debug!("Blob with hash {hex_hash} to be created"); diff --git a/deepwell/src/services/file/service.rs b/deepwell/src/services/file/service.rs index de4b740140..84b709a77b 100644 --- a/deepwell/src/services/file/service.rs +++ b/deepwell/src/services/file/service.rs @@ -132,10 +132,6 @@ impl FileService { return Err(Error::FileNotFound); } - // Get first file revision - let file_revision = - FileRevisionService::get_first(ctx, site_id, page_id, file_id).await?; - // Clear pending_blob column { let mut model = file::ActiveModel { @@ -146,25 +142,18 @@ impl FileService { model.update(txn).await?; } - // Update file revision to add the uploaded data - // This deletes the pending blob row - let FinalizeBlobUploadOutput { - hash, - mime, - size, - created, - } = BlobService::finish_upload(ctx, pending_blob_id).await?; - - // Update first file revision with uploaded data - { - let mut model = file_revision.into_active_model(); - model.s3_hash = Set(hash.to_vec()); - model.mime_hint = Set(mime); - model.size_hint = Set(size); - model.update(txn).await?; - } - - Ok(FinishUploadFileOutput { created }) + // Finally, update the first file revision with the uploaded data. + // This gets the data from BlobService and then deletes the row. + FileRevisionService::finish_upload( + ctx, + FinishUploadFile { + site_id, + page_id, + file_id, + pending_blob_id, + }, + ) + .await } /// Edits a file, uploading a new file version. diff --git a/deepwell/src/services/file_revision/service.rs b/deepwell/src/services/file_revision/service.rs index cf5f1b34b0..9fb70b182f 100644 --- a/deepwell/src/services/file_revision/service.rs +++ b/deepwell/src/services/file_revision/service.rs @@ -443,6 +443,40 @@ impl FileRevisionService { Ok(revision) } + /// For a pending file, fill in the uploaded data fields. + pub async fn finish_upload( + ctx: &ServiceContext<'_>, + FinishUpload { + site_id, + page_id, + file_id, + pending_blob_id, + }: FinishUpload, + ) -> Result { + let txn = ctx.transaction(); + + // Move upload to final location, get its metadata + let FinalizeBlobUploadOutput { + hash, + mime, + size, + created, + } = BlobService::finish_upload(ctx, pending_blob_id).await?; + + // Get first file revision + let file_revision = + FileRevisionService::get_first(ctx, site_id, page_id, file_id).await?; + + // Update it with uploaded data + let mut model = file_revision.into_active_model(); + model.s3_hash = Set(hash.to_vec()); + model.mime_hint = Set(mime); + model.size_hint = Set(size); + + let file_revision = model.update(txn).await?; + Ok(file_revision) + } + /// Get the first revision for this file. pub async fn get_first( ctx: &ServiceContext<'_>, diff --git a/deepwell/src/services/file_revision/structs.rs b/deepwell/src/services/file_revision/structs.rs index e8d791d45b..d5d2f1869c 100644 --- a/deepwell/src/services/file_revision/structs.rs +++ b/deepwell/src/services/file_revision/structs.rs @@ -123,6 +123,14 @@ pub struct UpdateFileRevision { pub hidden: Vec, } +#[derive(Serialize, Debug, Clone)] +pub struct FinishUpload { + pub site_id: i64, + pub page_id: i64, + pub file_id: i64, + pub pending_blob_id: i64, +} + #[derive(Deserialize, Debug, Clone)] pub struct GetFileRevisionRange { pub page_id: i64, From a22bd48b0c02ab30934b6b4e4cd3d7026d1c34f0 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Tue, 10 Sep 2024 00:17:24 -0400 Subject: [PATCH 27/38] Improve output of finish_new_upload(). --- deepwell/src/endpoints/file.rs | 2 +- deepwell/src/services/blob/structs.rs | 1 + deepwell/src/services/file/service.rs | 7 ++++--- deepwell/src/services/file/structs.rs | 7 ++----- deepwell/src/services/file_revision/service.rs | 17 +++++++++++++---- deepwell/src/services/file_revision/structs.rs | 12 +++++++++++- 6 files changed, 32 insertions(+), 14 deletions(-) diff --git a/deepwell/src/endpoints/file.rs b/deepwell/src/endpoints/file.rs index 9fcb7a1014..18021119f7 100644 --- a/deepwell/src/endpoints/file.rs +++ b/deepwell/src/endpoints/file.rs @@ -25,7 +25,7 @@ use crate::services::blob::BlobService; use crate::services::file::{ DeleteFile, DeleteFileOutput, EditFile, EditFileOutput, GetBlobOutput, GetFileDetails, GetFileOutput, MoveFile, MoveFileOutput, RestoreFile, - RestoreFileOutput, UploadFile, UploadFileOutput, + RestoreFileOutput, }; use crate::services::Result; use crate::web::{Bytes, FileDetails}; diff --git a/deepwell/src/services/blob/structs.rs b/deepwell/src/services/blob/structs.rs index 4a800818aa..d931ac264a 100644 --- a/deepwell/src/services/blob/structs.rs +++ b/deepwell/src/services/blob/structs.rs @@ -26,6 +26,7 @@ pub struct FinalizeBlobUploadOutput { pub hash: BlobHash, pub mime: String, pub size: i64, + pub created: bool, } #[derive(Debug)] diff --git a/deepwell/src/services/file/service.rs b/deepwell/src/services/file/service.rs index 84b709a77b..9799d6d89f 100644 --- a/deepwell/src/services/file/service.rs +++ b/deepwell/src/services/file/service.rs @@ -27,7 +27,8 @@ use crate::models::sea_orm_active_enums::FileRevisionType; use crate::services::blob::{FinalizeBlobUploadOutput, EMPTY_BLOB_HASH, EMPTY_BLOB_MIME}; use crate::services::file_revision::{ CreateFileRevision, CreateFileRevisionBody, CreateFirstFileRevision, - CreateResurrectionFileRevision, CreateTombstoneFileRevision, FileBlob, + CreatePendingFileRevision, CreateResurrectionFileRevision, + CreateTombstoneFileRevision, FileBlob, }; use crate::services::filter::{FilterClass, FilterType}; use crate::services::{BlobService, FileRevisionService, FilterService}; @@ -146,7 +147,7 @@ impl FileService { // This gets the data from BlobService and then deletes the row. FileRevisionService::finish_upload( ctx, - FinishUploadFile { + FinishUpload { site_id, page_id, file_id, @@ -166,7 +167,7 @@ impl FileService { user_id, revision_comments, }: UploadFileEdit, - ) -> Result<_UploadFileEditOutput> { + ) -> Result { info!("Uploading new version to file ID {file_id}"); let txn = ctx.transaction(); diff --git a/deepwell/src/services/file/structs.rs b/deepwell/src/services/file/structs.rs index 4cf87c520b..185ed80f24 100644 --- a/deepwell/src/services/file/structs.rs +++ b/deepwell/src/services/file/structs.rs @@ -20,7 +20,7 @@ use crate::models::sea_orm_active_enums::FileRevisionType; use crate::services::file_revision::{ - CreateFileRevisionOutput, CreateFirstFileRevisionOutput, + CreateFileRevisionOutput, CreateFirstFileRevisionOutput, FinishUploadOutput, }; use crate::web::{Bytes, FileDetails, ProvidedValue, Reference}; use serde_json::Value as JsonValue; @@ -54,10 +54,7 @@ pub struct FinishUploadFile { pub pending_blob_id: i64, } -#[derive(Serialize, Debug, Copy, Clone)] -pub struct FinishUploadFileOutput { - pub created: bool, -} +pub type FinishUploadFileOutput = FinishUploadOutput; #[derive(Deserialize, Debug, Clone)] pub struct UploadFileEdit { diff --git a/deepwell/src/services/file_revision/service.rs b/deepwell/src/services/file_revision/service.rs index 9fb70b182f..184b3ff728 100644 --- a/deepwell/src/services/file_revision/service.rs +++ b/deepwell/src/services/file_revision/service.rs @@ -22,7 +22,8 @@ use super::prelude::*; use crate::models::file_revision::{ self, Entity as FileRevision, Model as FileRevisionModel, }; -use crate::services::{OutdateService, PageService}; +use crate::services::blob::{FinalizeBlobUploadOutput, EMPTY_BLOB_HASH, EMPTY_BLOB_MIME}; +use crate::services::{BlobService, OutdateService, PageService}; use crate::web::FetchDirection; use once_cell::sync::Lazy; use std::num::NonZeroI32; @@ -452,7 +453,7 @@ impl FileRevisionService { file_id, pending_blob_id, }: FinishUpload, - ) -> Result { + ) -> Result { let txn = ctx.transaction(); // Move upload to final location, get its metadata @@ -472,9 +473,16 @@ impl FileRevisionService { model.s3_hash = Set(hash.to_vec()); model.mime_hint = Set(mime); model.size_hint = Set(size); - let file_revision = model.update(txn).await?; - Ok(file_revision) + + Ok(FinishUploadOutput { + file_id, + file_revision_id: file_revision.revision_id, + s3_hash: Bytes::from(file_revision.s3_hash), + mime_hint: file_revision.mime_hint, + size_hint: file_revision.size_hint, + created, + }) } /// Get the first revision for this file. @@ -484,6 +492,7 @@ impl FileRevisionService { page_id: i64, file_id: i64, ) -> Result { + let txn = ctx.transaction(); let model = FileRevision::find() .filter( Condition::all() diff --git a/deepwell/src/services/file_revision/structs.rs b/deepwell/src/services/file_revision/structs.rs index d5d2f1869c..1be072ec51 100644 --- a/deepwell/src/services/file_revision/structs.rs +++ b/deepwell/src/services/file_revision/structs.rs @@ -123,7 +123,7 @@ pub struct UpdateFileRevision { pub hidden: Vec, } -#[derive(Serialize, Debug, Clone)] +#[derive(Deserialize, Debug, Clone)] pub struct FinishUpload { pub site_id: i64, pub page_id: i64, @@ -131,6 +131,16 @@ pub struct FinishUpload { pub pending_blob_id: i64, } +#[derive(Serialize, Debug, Copy, Clone)] +pub struct FinishUploadOutput { + pub file_id: i64, + pub file_revision_id: i64, + pub s3_hash: Bytes, + pub mime_hint: String, + pub size_hint: i64, + pub created: bool, +} + #[derive(Deserialize, Debug, Clone)] pub struct GetFileRevisionRange { pub page_id: i64, From 65a0b6ce66face232fc9e24785863b783bcc5635 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Tue, 10 Sep 2024 00:18:14 -0400 Subject: [PATCH 28/38] Delete dummy structs. --- deepwell/src/services/file/structs.rs | 8 -------- 1 file changed, 8 deletions(-) diff --git a/deepwell/src/services/file/structs.rs b/deepwell/src/services/file/structs.rs index 185ed80f24..25564859e5 100644 --- a/deepwell/src/services/file/structs.rs +++ b/deepwell/src/services/file/structs.rs @@ -67,14 +67,6 @@ pub struct UploadFileEdit { pub type UploadFileEditOutput = CreateFileRevisionOutput; -#[derive(Deserialize, Debug, Clone)] -pub struct FinishUploadFileEdit {} - -#[derive(Serialize, Debug, Clone)] -pub struct FinishUploadFileEditOutput { - // TODO -} - #[derive(Deserialize, Debug, Clone)] pub struct GetFile<'a> { pub site_id: i64, From 0a9cbc70f8c4ed6d6928624605e7fadc676ba361 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Tue, 10 Sep 2024 00:19:23 -0400 Subject: [PATCH 29/38] Rename types. --- deepwell/src/services/file/service.rs | 2 +- deepwell/src/services/file/structs.rs | 4 ++-- deepwell/src/services/file_revision/service.rs | 8 ++++---- deepwell/src/services/file_revision/structs.rs | 4 ++-- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/deepwell/src/services/file/service.rs b/deepwell/src/services/file/service.rs index 9799d6d89f..c36a5d18cf 100644 --- a/deepwell/src/services/file/service.rs +++ b/deepwell/src/services/file/service.rs @@ -147,7 +147,7 @@ impl FileService { // This gets the data from BlobService and then deletes the row. FileRevisionService::finish_upload( ctx, - FinishUpload { + FinishUploadFile { site_id, page_id, file_id, diff --git a/deepwell/src/services/file/structs.rs b/deepwell/src/services/file/structs.rs index 25564859e5..a808ccb3a9 100644 --- a/deepwell/src/services/file/structs.rs +++ b/deepwell/src/services/file/structs.rs @@ -20,7 +20,7 @@ use crate::models::sea_orm_active_enums::FileRevisionType; use crate::services::file_revision::{ - CreateFileRevisionOutput, CreateFirstFileRevisionOutput, FinishUploadOutput, + CreateFileRevisionOutput, CreateFirstFileRevisionOutput, FinishFileRevisionUploadOutput, }; use crate::web::{Bytes, FileDetails, ProvidedValue, Reference}; use serde_json::Value as JsonValue; @@ -54,7 +54,7 @@ pub struct FinishUploadFile { pub pending_blob_id: i64, } -pub type FinishUploadFileOutput = FinishUploadOutput; +pub type FinishUploadFileOutput = FinishFileRevisionUploadOutput; #[derive(Deserialize, Debug, Clone)] pub struct UploadFileEdit { diff --git a/deepwell/src/services/file_revision/service.rs b/deepwell/src/services/file_revision/service.rs index 184b3ff728..7f2fc8211f 100644 --- a/deepwell/src/services/file_revision/service.rs +++ b/deepwell/src/services/file_revision/service.rs @@ -447,13 +447,13 @@ impl FileRevisionService { /// For a pending file, fill in the uploaded data fields. pub async fn finish_upload( ctx: &ServiceContext<'_>, - FinishUpload { + FinishFileRevisionUpload { site_id, page_id, file_id, pending_blob_id, - }: FinishUpload, - ) -> Result { + }: FinishFileRevisionUpload, + ) -> Result { let txn = ctx.transaction(); // Move upload to final location, get its metadata @@ -475,7 +475,7 @@ impl FileRevisionService { model.size_hint = Set(size); let file_revision = model.update(txn).await?; - Ok(FinishUploadOutput { + Ok(FinishFileRevisionUploadOutput { file_id, file_revision_id: file_revision.revision_id, s3_hash: Bytes::from(file_revision.s3_hash), diff --git a/deepwell/src/services/file_revision/structs.rs b/deepwell/src/services/file_revision/structs.rs index 1be072ec51..4cfd3662db 100644 --- a/deepwell/src/services/file_revision/structs.rs +++ b/deepwell/src/services/file_revision/structs.rs @@ -124,7 +124,7 @@ pub struct UpdateFileRevision { } #[derive(Deserialize, Debug, Clone)] -pub struct FinishUpload { +pub struct FinishFileRevisionUpload { pub site_id: i64, pub page_id: i64, pub file_id: i64, @@ -132,7 +132,7 @@ pub struct FinishUpload { } #[derive(Serialize, Debug, Copy, Clone)] -pub struct FinishUploadOutput { +pub struct FinishFileRevisionUploadOutput { pub file_id: i64, pub file_revision_id: i64, pub s3_hash: Bytes, From 7f1c68489d5c5b309239374edaae636e7bc03600 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Tue, 10 Sep 2024 23:35:37 -0400 Subject: [PATCH 30/38] Merge pending jobs. --- deepwell/src/services/job/structs.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/deepwell/src/services/job/structs.rs b/deepwell/src/services/job/structs.rs index 5a863f8630..63502ea7f4 100644 --- a/deepwell/src/services/job/structs.rs +++ b/deepwell/src/services/job/structs.rs @@ -28,8 +28,7 @@ pub enum Job { }, PruneSessions, PruneText, - // TODO add job for pruning incomplete uploads (pending_blob table) - // TODO also add a job (file table) + // TODO add job for pruning incomplete uploads (pending_blob table and corresponding column in file table) NameChangeRefill, LiftExpiredPunishments, } From 064276a39e0b538c9161d713a367442dcf2fe87c Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Tue, 10 Sep 2024 23:39:30 -0400 Subject: [PATCH 31/38] Stub out edits for now. First need to figure out initial uploads, then will unify them in some nice, generic way. --- deepwell/src/services/file/service.rs | 55 +++------------------------ 1 file changed, 5 insertions(+), 50 deletions(-) diff --git a/deepwell/src/services/file/service.rs b/deepwell/src/services/file/service.rs index c36a5d18cf..d126b0fc5e 100644 --- a/deepwell/src/services/file/service.rs +++ b/deepwell/src/services/file/service.rs @@ -157,7 +157,8 @@ impl FileService { .await } - /// Edits a file, uploading a new file version. + /// Edits a file by uploading a new file version. + /// TODO needs to be implemented pub async fn start_edit_upload( ctx: &ServiceContext<'_>, UploadFileEdit { @@ -168,40 +169,10 @@ impl FileService { revision_comments, }: UploadFileEdit, ) -> Result { - info!("Uploading new version to file ID {file_id}"); - - let txn = ctx.transaction(); - let last_revision = - FileRevisionService::get_latest(ctx, site_id, page_id, file_id).await?; - - // Add pending file - let pending = BlobService::create_upload(ctx).await?; - - // Add file revision (with dummy file data) - let revision_output = FileRevisionService::create( - ctx, - CreateFileRevision { - site_id, - page_id, - file_id, - user_id, - comments: revision_comments, - body: CreateFileRevisionBody { - blob: FileBlob { - s3_hash: EMPTY_BLOB_HASH, - mime_hint: str!(EMPTY_BLOB_MIME), - size_hint: 0, - }, - ..Default::default() - }, - }, - last_revision, - ) - .await?; - - Ok(revision_output) + todo!() } + // TODO pub async fn finish_edit_upload( ctx: &ServiceContext<'_>, FinishUploadFileEdit { @@ -210,23 +181,7 @@ impl FileService { file_id, pending_blob_id, }: FinishUploadFileEdit, - ) -> Result<_> { - info!( - "Finishing file edit upload with site ID {} page ID {} file ID {} pending ID {}", - site_id, page_id, file_id, pending_blob_id, - ); - - // Get latest file revision - // TODO - - // Update file metadata - let model = file::ActiveModel { - file_id: Set(file_id), - updated_at: Set(Some(now())), - ..Default::default() - }; - model.update(txn).await?; - + ) -> Result<()> { todo!() } From 2a99341019d38664893e625be3d6c6102ab29a76 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Wed, 18 Sep 2024 03:14:42 -0400 Subject: [PATCH 32/38] Fix build errors. --- deepwell/src/endpoints/file.rs | 4 ++-- deepwell/src/services/blob/service.rs | 1 + deepwell/src/services/file/service.rs | 4 ++-- deepwell/src/services/file/structs.rs | 5 ++++- deepwell/src/services/file_revision/service.rs | 2 +- deepwell/src/services/file_revision/structs.rs | 6 +++--- 6 files changed, 13 insertions(+), 9 deletions(-) diff --git a/deepwell/src/endpoints/file.rs b/deepwell/src/endpoints/file.rs index 18021119f7..57102b43f0 100644 --- a/deepwell/src/endpoints/file.rs +++ b/deepwell/src/endpoints/file.rs @@ -25,7 +25,7 @@ use crate::services::blob::BlobService; use crate::services::file::{ DeleteFile, DeleteFileOutput, EditFile, EditFileOutput, GetBlobOutput, GetFileDetails, GetFileOutput, MoveFile, MoveFileOutput, RestoreFile, - RestoreFileOutput, + RestoreFileOutput, StartFileUploadOutput, }; use crate::services::Result; use crate::web::{Bytes, FileDetails}; @@ -82,7 +82,7 @@ pub async fn file_get( pub async fn file_upload( ctx: &ServiceContext<'_>, params: Params<'static>, -) -> Result { +) -> Result { // FIXME file upload endpoint /* let input: UploadFile = params.parse()?; diff --git a/deepwell/src/services/blob/service.rs b/deepwell/src/services/blob/service.rs index c7d5799d23..42c670a5c1 100644 --- a/deepwell/src/services/blob/service.rs +++ b/deepwell/src/services/blob/service.rs @@ -140,6 +140,7 @@ impl BlobService { hash: EMPTY_BLOB_HASH, mime: str!(EMPTY_BLOB_MIME), size: 0, + created: false, }); } diff --git a/deepwell/src/services/file/service.rs b/deepwell/src/services/file/service.rs index d126b0fc5e..4555613470 100644 --- a/deepwell/src/services/file/service.rs +++ b/deepwell/src/services/file/service.rs @@ -28,7 +28,7 @@ use crate::services::blob::{FinalizeBlobUploadOutput, EMPTY_BLOB_HASH, EMPTY_BLO use crate::services::file_revision::{ CreateFileRevision, CreateFileRevisionBody, CreateFirstFileRevision, CreatePendingFileRevision, CreateResurrectionFileRevision, - CreateTombstoneFileRevision, FileBlob, + CreateTombstoneFileRevision, FileBlob, FinishFileRevisionUpload, }; use crate::services::filter::{FilterClass, FilterType}; use crate::services::{BlobService, FileRevisionService, FilterService}; @@ -147,7 +147,7 @@ impl FileService { // This gets the data from BlobService and then deletes the row. FileRevisionService::finish_upload( ctx, - FinishUploadFile { + FinishFileRevisionUpload { site_id, page_id, file_id, diff --git a/deepwell/src/services/file/structs.rs b/deepwell/src/services/file/structs.rs index a808ccb3a9..4ecf6d7253 100644 --- a/deepwell/src/services/file/structs.rs +++ b/deepwell/src/services/file/structs.rs @@ -20,7 +20,8 @@ use crate::models::sea_orm_active_enums::FileRevisionType; use crate::services::file_revision::{ - CreateFileRevisionOutput, CreateFirstFileRevisionOutput, FinishFileRevisionUploadOutput, + CreateFileRevisionOutput, CreateFirstFileRevisionOutput, + FinishFileRevisionUploadOutput, }; use crate::web::{Bytes, FileDetails, ProvidedValue, Reference}; use serde_json::Value as JsonValue; @@ -67,6 +68,8 @@ pub struct UploadFileEdit { pub type UploadFileEditOutput = CreateFileRevisionOutput; +pub type FinishUploadFileEdit = FinishUploadFile; + #[derive(Deserialize, Debug, Clone)] pub struct GetFile<'a> { pub site_id: i64, diff --git a/deepwell/src/services/file_revision/service.rs b/deepwell/src/services/file_revision/service.rs index 7f2fc8211f..46c5a5fbda 100644 --- a/deepwell/src/services/file_revision/service.rs +++ b/deepwell/src/services/file_revision/service.rs @@ -24,7 +24,7 @@ use crate::models::file_revision::{ }; use crate::services::blob::{FinalizeBlobUploadOutput, EMPTY_BLOB_HASH, EMPTY_BLOB_MIME}; use crate::services::{BlobService, OutdateService, PageService}; -use crate::web::FetchDirection; +use crate::web::{Bytes, FetchDirection}; use once_cell::sync::Lazy; use std::num::NonZeroI32; diff --git a/deepwell/src/services/file_revision/structs.rs b/deepwell/src/services/file_revision/structs.rs index 4cfd3662db..737f29a6ab 100644 --- a/deepwell/src/services/file_revision/structs.rs +++ b/deepwell/src/services/file_revision/structs.rs @@ -21,7 +21,7 @@ use super::prelude::*; use crate::hash::BlobHash; use crate::services::page_revision::PageRevisionCountOutput; -use crate::web::FetchDirection; +use crate::web::{Bytes, FetchDirection}; #[derive(Debug, Clone)] pub struct CreateFileRevision { @@ -131,11 +131,11 @@ pub struct FinishFileRevisionUpload { pub pending_blob_id: i64, } -#[derive(Serialize, Debug, Copy, Clone)] +#[derive(Serialize, Debug, Clone)] pub struct FinishFileRevisionUploadOutput { pub file_id: i64, pub file_revision_id: i64, - pub s3_hash: Bytes, + pub s3_hash: Bytes<'static>, pub mime_hint: String, pub size_hint: i64, pub created: bool, From df1c682446d0103b94a02a19e8ec943a6fbe8d1f Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Wed, 18 Sep 2024 22:14:23 -0400 Subject: [PATCH 33/38] Fix CHECK constraints. --- deepwell/migrations/20220906103252_deepwell.sql | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deepwell/migrations/20220906103252_deepwell.sql b/deepwell/migrations/20220906103252_deepwell.sql index 423eef789e..ff229d4d4e 100644 --- a/deepwell/migrations/20220906103252_deepwell.sql +++ b/deepwell/migrations/20220906103252_deepwell.sql @@ -419,8 +419,8 @@ CREATE TABLE page_vote ( CREATE TABLE blob_pending ( pending_file_id BIGSERIAL PRIMARY KEY, created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), - s3_path TEXT NOT NULL CHECK length(s3_path) > 1, - presign_url TEXT NOT NULL CHECK length(presign_url) > 1 + s3_path TEXT NOT NULL CHECK (length(s3_path) > 1), + presign_url TEXT NOT NULL CHECK (length(presign_url) > 1) ); -- From d6876e7e6b42bcd6cce694f912d7e41e6c8fbad6 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Fri, 20 Sep 2024 04:09:43 -0400 Subject: [PATCH 34/38] Implement file_upload_* API methods. --- deepwell/src/api.rs | 3 ++- deepwell/src/endpoints/file.rs | 37 +++++++++++++++++---------- deepwell/src/services/file/service.rs | 2 +- 3 files changed, 27 insertions(+), 15 deletions(-) diff --git a/deepwell/src/api.rs b/deepwell/src/api.rs index 25f6a21c4c..9fbb4453fa 100644 --- a/deepwell/src/api.rs +++ b/deepwell/src/api.rs @@ -257,7 +257,8 @@ async fn build_module(app_state: ServerState) -> anyhow::Result, params: Params<'static>, ) -> Result { - // FIXME file upload endpoint - /* - let input: UploadFile = params.parse()?; + let input: StartFileUpload = params.parse()?; info!( - "Uploading file '{}' ({} bytes) to page ID {} in site ID {}", - input.name, - input.data.len(), + "Starting file upload '{}' to page ID {} in site ID {}", + input.name, input.page_id, input.site_id, + ); + + FileService::start_new_upload(ctx, input).await +} + +pub async fn file_upload_finish( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result { + let input: FinishUploadFile = params.parse()?; + + info!( + "Finishing file upload (pending blob ID {} for file ID {} in page ID {} in site ID {}", + input.pending_blob_id, + input.file_id, input.page_id, input.site_id, ); - FileService::upload(ctx, input).await - */ - todo!() + FileService::finish_new_upload(ctx, input).await } pub async fn file_edit( diff --git a/deepwell/src/services/file/service.rs b/deepwell/src/services/file/service.rs index 4555613470..af03fa9050 100644 --- a/deepwell/src/services/file/service.rs +++ b/deepwell/src/services/file/service.rs @@ -135,7 +135,7 @@ impl FileService { // Clear pending_blob column { - let mut model = file::ActiveModel { + let model = file::ActiveModel { file_id: Set(file_id), pending_blob_id: Set(None), ..Default::default() From fd3c4973f8fe21d5caadd050336bb15cc24e1f6d Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Fri, 20 Sep 2024 04:25:47 -0400 Subject: [PATCH 35/38] Rename file creation structs. --- deepwell/src/endpoints/file.rs | 16 +++++++------- deepwell/src/services/file/service.rs | 31 +++++++-------------------- deepwell/src/services/file/structs.rs | 10 ++++----- 3 files changed, 20 insertions(+), 37 deletions(-) diff --git a/deepwell/src/endpoints/file.rs b/deepwell/src/endpoints/file.rs index f7c68c8922..c520b870f5 100644 --- a/deepwell/src/endpoints/file.rs +++ b/deepwell/src/endpoints/file.rs @@ -23,10 +23,10 @@ use crate::models::file::Model as FileModel; use crate::models::file_revision::Model as FileRevisionModel; use crate::services::blob::BlobService; use crate::services::file::{ - DeleteFile, DeleteFileOutput, EditFile, EditFileOutput, FinishUploadFile, - FinishUploadFileOutput, GetBlobOutput, GetFileDetails, GetFileOutput, MoveFile, - MoveFileOutput, RestoreFile, RestoreFileOutput, StartFileUpload, - StartFileUploadOutput, + DeleteFile, DeleteFileOutput, EditFile, EditFileOutput, FinishFileCreation, + FinishFileCreationOutput, GetBlobOutput, GetFileDetails, GetFileOutput, MoveFile, + MoveFileOutput, RestoreFile, RestoreFileOutput, StartFileCreation, + StartFileCreationOutput, }; use crate::services::Result; use crate::web::{Bytes, FileDetails}; @@ -83,8 +83,8 @@ pub async fn file_get( pub async fn file_upload_start( ctx: &ServiceContext<'_>, params: Params<'static>, -) -> Result { - let input: StartFileUpload = params.parse()?; +) -> Result { + let input: StartFileCreation = params.parse()?; info!( "Starting file upload '{}' to page ID {} in site ID {}", @@ -97,8 +97,8 @@ pub async fn file_upload_start( pub async fn file_upload_finish( ctx: &ServiceContext<'_>, params: Params<'static>, -) -> Result { - let input: FinishUploadFile = params.parse()?; +) -> Result { + let input: FinishFileCreation = params.parse()?; info!( "Finishing file upload (pending blob ID {} for file ID {} in page ID {} in site ID {}", diff --git a/deepwell/src/services/file/service.rs b/deepwell/src/services/file/service.rs index af03fa9050..436c2db99c 100644 --- a/deepwell/src/services/file/service.rs +++ b/deepwell/src/services/file/service.rs @@ -45,7 +45,7 @@ impl FileService { /// meaning that duplicates are not uploaded twice. pub async fn start_new_upload( ctx: &ServiceContext<'_>, - StartFileUpload { + StartFileCreation { site_id, page_id, name, @@ -53,8 +53,8 @@ impl FileService { user_id, licensing, bypass_filter, - }: StartFileUpload, - ) -> Result { + }: StartFileCreation, + ) -> Result { info!("Creating file with name '{}'", name); let txn = ctx.transaction(); @@ -93,7 +93,7 @@ impl FileService { ) .await?; - Ok(StartFileUploadOutput { + Ok(StartFileCreationOutput { pending_blob_id: pending.pending_blob_id, presign_url: pending.presign_url, file_revision_id: file_revision.file_revision_id, @@ -102,13 +102,13 @@ impl FileService { pub async fn finish_new_upload( ctx: &ServiceContext<'_>, - FinishUploadFile { + FinishFileCreation { site_id, page_id, file_id, pending_blob_id, - }: FinishUploadFile, - ) -> Result { + }: FinishFileCreation, + ) -> Result { info!( "Finishing new file upload with site ID {} page ID {} file ID {} pending ID {}", site_id, page_id, file_id, pending_blob_id, @@ -161,27 +161,12 @@ impl FileService { /// TODO needs to be implemented pub async fn start_edit_upload( ctx: &ServiceContext<'_>, - UploadFileEdit { - site_id, - page_id, - file_id, - user_id, - revision_comments, - }: UploadFileEdit, ) -> Result { todo!() } // TODO - pub async fn finish_edit_upload( - ctx: &ServiceContext<'_>, - FinishUploadFileEdit { - site_id, - page_id, - file_id, - pending_blob_id, - }: FinishUploadFileEdit, - ) -> Result<()> { + pub async fn finish_edit_upload(ctx: &ServiceContext<'_>) -> Result<()> { todo!() } diff --git a/deepwell/src/services/file/structs.rs b/deepwell/src/services/file/structs.rs index 4ecf6d7253..7e0e5b4091 100644 --- a/deepwell/src/services/file/structs.rs +++ b/deepwell/src/services/file/structs.rs @@ -28,7 +28,7 @@ use serde_json::Value as JsonValue; use time::OffsetDateTime; #[derive(Deserialize, Debug, Clone)] -pub struct StartFileUpload { +pub struct StartFileCreation { pub site_id: i64, pub page_id: i64, pub name: String, @@ -41,21 +41,21 @@ pub struct StartFileUpload { } #[derive(Serialize, Debug, Clone)] -pub struct StartFileUploadOutput { +pub struct StartFileCreationOutput { pub pending_blob_id: i64, pub presign_url: String, pub file_revision_id: i64, } #[derive(Deserialize, Debug, Clone)] -pub struct FinishUploadFile { +pub struct FinishFileCreation { pub site_id: i64, pub page_id: i64, pub file_id: i64, pub pending_blob_id: i64, } -pub type FinishUploadFileOutput = FinishFileRevisionUploadOutput; +pub type FinishFileCreationOutput = FinishFileRevisionUploadOutput; #[derive(Deserialize, Debug, Clone)] pub struct UploadFileEdit { @@ -68,8 +68,6 @@ pub struct UploadFileEdit { pub type UploadFileEditOutput = CreateFileRevisionOutput; -pub type FinishUploadFileEdit = FinishUploadFile; - #[derive(Deserialize, Debug, Clone)] pub struct GetFile<'a> { pub site_id: i64, From 70fbfd19c21d77dfa8c145a64311e7ab7c382dc7 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Fri, 20 Sep 2024 04:33:41 -0400 Subject: [PATCH 36/38] Rename upload API methods. --- deepwell/src/api.rs | 6 ++++-- deepwell/src/endpoints/file.rs | 20 ++++++++++++++++++-- 2 files changed, 22 insertions(+), 4 deletions(-) diff --git a/deepwell/src/api.rs b/deepwell/src/api.rs index 9fbb4453fa..c1cdde21f3 100644 --- a/deepwell/src/api.rs +++ b/deepwell/src/api.rs @@ -257,8 +257,10 @@ async fn build_module(app_state: ServerState) -> anyhow::Result, params: Params<'static>, ) -> Result { @@ -94,7 +94,7 @@ pub async fn file_upload_start( FileService::start_new_upload(ctx, input).await } -pub async fn file_upload_finish( +pub async fn file_create_finish( ctx: &ServiceContext<'_>, params: Params<'static>, ) -> Result { @@ -111,6 +111,22 @@ pub async fn file_upload_finish( FileService::finish_new_upload(ctx, input).await } +// TODO +pub async fn file_edit_start( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result<()> { + todo!() +} + +// TODO +pub async fn file_edit_finish( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result<()> { + todo!() +} + pub async fn file_edit( ctx: &ServiceContext<'_>, params: Params<'static>, From e1e3b75efa413117fb542d4e6b81058474c21046 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Fri, 20 Sep 2024 04:38:36 -0400 Subject: [PATCH 37/38] Remove unused struct. --- deepwell/src/services/file/service.rs | 2 +- deepwell/src/services/file/structs.rs | 11 ----------- 2 files changed, 1 insertion(+), 12 deletions(-) diff --git a/deepwell/src/services/file/service.rs b/deepwell/src/services/file/service.rs index 436c2db99c..f33afad558 100644 --- a/deepwell/src/services/file/service.rs +++ b/deepwell/src/services/file/service.rs @@ -161,7 +161,7 @@ impl FileService { /// TODO needs to be implemented pub async fn start_edit_upload( ctx: &ServiceContext<'_>, - ) -> Result { + ) -> Result<()> { todo!() } diff --git a/deepwell/src/services/file/structs.rs b/deepwell/src/services/file/structs.rs index 7e0e5b4091..d499c802d6 100644 --- a/deepwell/src/services/file/structs.rs +++ b/deepwell/src/services/file/structs.rs @@ -57,17 +57,6 @@ pub struct FinishFileCreation { pub type FinishFileCreationOutput = FinishFileRevisionUploadOutput; -#[derive(Deserialize, Debug, Clone)] -pub struct UploadFileEdit { - pub site_id: i64, - pub page_id: i64, - pub file_id: i64, - pub user_id: i64, - pub revision_comments: String, -} - -pub type UploadFileEditOutput = CreateFileRevisionOutput; - #[derive(Deserialize, Debug, Clone)] pub struct GetFile<'a> { pub site_id: i64, From 08161de181cf51e15e024b8c0aa5e63ec73e7001 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Fri, 20 Sep 2024 04:52:21 -0400 Subject: [PATCH 38/38] Address warnings. --- deepwell/src/endpoints/file.rs | 30 +++++++++++++++---------- deepwell/src/services/blob/structs.rs | 8 +++++++ deepwell/src/services/file/service.rs | 6 ++--- deepwell/src/services/file/structs.rs | 7 ------ deepwell/src/services/import/service.rs | 1 + deepwell/src/services/user/service.rs | 1 + 6 files changed, 30 insertions(+), 23 deletions(-) diff --git a/deepwell/src/endpoints/file.rs b/deepwell/src/endpoints/file.rs index db2d948a7b..c909bdea9d 100644 --- a/deepwell/src/endpoints/file.rs +++ b/deepwell/src/endpoints/file.rs @@ -21,12 +21,11 @@ use super::prelude::*; use crate::models::file::Model as FileModel; use crate::models::file_revision::Model as FileRevisionModel; -use crate::services::blob::BlobService; +use crate::services::blob::{BlobMetadata, BlobService, GetBlobOutput}; use crate::services::file::{ DeleteFile, DeleteFileOutput, EditFile, EditFileOutput, FinishFileCreation, - FinishFileCreationOutput, GetBlobOutput, GetFileDetails, GetFileOutput, MoveFile, - MoveFileOutput, RestoreFile, RestoreFileOutput, StartFileCreation, - StartFileCreationOutput, + FinishFileCreationOutput, GetFileDetails, GetFileOutput, MoveFile, MoveFileOutput, + RestoreFile, RestoreFileOutput, StartFileCreation, StartFileCreationOutput, }; use crate::services::Result; use crate::web::{Bytes, FileDetails}; @@ -41,14 +40,19 @@ pub async fn blob_get( info!("Getting blob for S3 hash"); let hash: Bytes = params.parse()?; let data = BlobService::get(ctx, hash.as_ref()).await?; - let metadata = BlobService::get_metadata(ctx, hash.as_ref()).await?; - let output = GetBlobOutput { + let BlobMetadata { + mime, + size, + created_at, + } = BlobService::get_metadata(ctx, hash.as_ref()).await?; + + Ok(GetBlobOutput { data, - mime: metadata.mime, - size: metadata.size, - }; - Ok(output) + mime, + size, + created_at, + }) } pub async fn file_get( @@ -114,16 +118,18 @@ pub async fn file_create_finish( // TODO pub async fn file_edit_start( ctx: &ServiceContext<'_>, - params: Params<'static>, + _params: Params<'static>, ) -> Result<()> { + let _ = FileService::start_edit_upload(ctx).await?; todo!() } // TODO pub async fn file_edit_finish( ctx: &ServiceContext<'_>, - params: Params<'static>, + _params: Params<'static>, ) -> Result<()> { + let _ = FileService::finish_edit_upload(ctx).await?; todo!() } diff --git a/deepwell/src/services/blob/structs.rs b/deepwell/src/services/blob/structs.rs index d931ac264a..9abc342e79 100644 --- a/deepwell/src/services/blob/structs.rs +++ b/deepwell/src/services/blob/structs.rs @@ -35,3 +35,11 @@ pub struct BlobMetadata { pub size: i64, pub created_at: OffsetDateTime, } + +#[derive(Serialize, Debug, Clone)] +pub struct GetBlobOutput { + pub data: Vec, + pub mime: String, + pub size: i64, + pub created_at: OffsetDateTime, +} diff --git a/deepwell/src/services/file/service.rs b/deepwell/src/services/file/service.rs index f33afad558..4b94e88fe6 100644 --- a/deepwell/src/services/file/service.rs +++ b/deepwell/src/services/file/service.rs @@ -159,14 +159,12 @@ impl FileService { /// Edits a file by uploading a new file version. /// TODO needs to be implemented - pub async fn start_edit_upload( - ctx: &ServiceContext<'_>, - ) -> Result<()> { + pub async fn start_edit_upload(_ctx: &ServiceContext<'_>) -> Result { todo!() } // TODO - pub async fn finish_edit_upload(ctx: &ServiceContext<'_>) -> Result<()> { + pub async fn finish_edit_upload(_ctx: &ServiceContext<'_>) -> Result { todo!() } diff --git a/deepwell/src/services/file/structs.rs b/deepwell/src/services/file/structs.rs index d499c802d6..b510dbef2f 100644 --- a/deepwell/src/services/file/structs.rs +++ b/deepwell/src/services/file/structs.rs @@ -94,13 +94,6 @@ pub struct GetFileOutput { pub hidden_fields: Vec, } -#[derive(Serialize, Debug, Clone)] -pub struct GetBlobOutput { - pub data: Vec, - pub mime: String, - pub size: i64, -} - #[derive(Deserialize, Debug, Clone)] pub struct EditFile { pub site_id: i64, diff --git a/deepwell/src/services/import/service.rs b/deepwell/src/services/import/service.rs index f3ec50cb27..22835d0c71 100644 --- a/deepwell/src/services/import/service.rs +++ b/deepwell/src/services/import/service.rs @@ -73,6 +73,7 @@ impl ImportService { let output = BlobService::create(ctx, &bytes).await?; Some(output.hash.to_vec()) */ + let _ = bytes; todo!() } }; diff --git a/deepwell/src/services/user/service.rs b/deepwell/src/services/user/service.rs index a88069f938..18a80135c5 100644 --- a/deepwell/src/services/user/service.rs +++ b/deepwell/src/services/user/service.rs @@ -432,6 +432,7 @@ impl UserService { Some(hash.to_vec()) */ + let _ = blob; todo!() } };