Skip to content

Commit

Permalink
Feat/s3 integration (#1125)
Browse files Browse the repository at this point in the history
* Added s3 hosting initial capabilities without streaming capabilities.

* Added s3 hosting initial capabilities without streaming capabilities.

* move to download_location instead of status

* Also save podcast file type

* Reformatted files

* Fixed block on

* Added s3 backend

* Fixed local mode

* Added docs for S3
  • Loading branch information
SamTV12345 authored Jan 12, 2025
1 parent 85c0105 commit 5f0ea21
Show file tree
Hide file tree
Showing 55 changed files with 1,402 additions and 376 deletions.
316 changes: 300 additions & 16 deletions Cargo.lock

Large diffs are not rendered by default.

4 changes: 3 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ libsqlite3-sys = {version = "0.30.1", features = ["bundled"], optional = true}
r2d2_postgres = {version = "0.18.2", optional = true}
diesel_migrations = "2.2.0"
actix-files = "0.6.6"
actix-web = {version="4.9.0", features=["rustls"]}
actix-web = {version="4.9.0", features=["rustls", "http2"]}
jsonwebtoken = {version="9.3.0"}
log = "0.4.22"
futures-util = "0.3.31"
Expand Down Expand Up @@ -65,6 +65,8 @@ mp4ameta = "0.11.0"
file-format = "0.26.0"
maud = { version = "*", features = ["actix-web"] }
url = "2.5.4"
rust-s3 = { version = "0.35.1", features = ["blocking", "fail-on-err"] }
async-trait = "0.1.85"

[target.'cfg(not(windows))'.dependencies]
openssl = "0.10.68"
Expand Down
16 changes: 16 additions & 0 deletions docs/src/S3.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
# S3 configuration

So you want to use an S3 compatible storage backend to e.g. host files central or save costs for storage provisioning in the cloud? PodFetch now also supports S3 configuration.
This is also valuable if you want to use a self-hosted MinIO instance and don't want to map and mount volumes around.
It is currently necessary that you have your files in S3 configured readonly so people can stream them from there.


| Environment variable | Description | Default |
|----------------------|---------------------------------------|-----------------------|
| `S3_URL` | The URL of the S3 service. | http://localhost:9000 |
| `S3_REGION` | The region of the S3 service. | eu-west-1 |
| `S3_ACCESS_KEY` | The access key of the S3 service. | / |
| `S3_SECRET_KEY` | The secret key of the S3 service. | / |
| `S3_PROFILE` | The profile of the S3 service. | / |
| `S3_SECURITY_TOKEN` | The security token of the S3 service. | / |
| `S3_SESSION_TOKEN` | The session token of the S3 service. | / |
1 change: 1 addition & 0 deletions docs/src/SUMMARY.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# Summary

- [Introduction](./Introduction.md)
- [S3](./S3.md)
- [Installation](./Installation.md)
- [Authorization](./AUTH.md)
- [UI Walkthrough](./UIWalkthrough.md)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
-- This file should undo anything in `up.sql`
ALTER TABLE podcast_episodes ADD COLUMN local_image_url TEXT;
ALTER TABLE podcast_episodes ADD COLUMN local_url TEXT;

ALTER TABLE podcasts DROP COLUMN download_location;
ALTER TABLE podcast_episodes DROP COLUMN download_location;
12 changes: 12 additions & 0 deletions migrations/postgres/2025-01-10-163832_download_location/up.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
-- Your SQL goes here
ALTER TABLE podcast_episodes DROP COLUMN local_image_url;
ALTER TABLE podcast_episodes DROP COLUMN local_url;


ALTER TABLE podcasts ADD COLUMN download_location TEXT;
ALTER TABLE podcast_episodes ADD COLUMN download_location TEXT;

UPDATE podcasts SET download_location = 'Local';
UPDATE podcast_episodes SET download_location = 'Local' WHERE status = 'D';

ALTER TABLE podcast_episodes DROP COLUMN status;
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
-- This file should undo anything in `up.sql`
ALTER TABLE podcast_episodes ADD COLUMN local_image_url TEXT;
ALTER TABLE podcast_episodes ADD COLUMN local_url TEXT;

ALTER TABLE podcasts DROP COLUMN download_location;
ALTER TABLE podcast_episodes DROP COLUMN download_location;
12 changes: 12 additions & 0 deletions migrations/sqlite/2025-01-10-163832_download_location/up.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
-- Your SQL goes here
ALTER TABLE podcast_episodes DROP COLUMN local_image_url;
ALTER TABLE podcast_episodes DROP COLUMN local_url;


ALTER TABLE podcasts ADD COLUMN download_location TEXT;
ALTER TABLE podcast_episodes ADD COLUMN download_location TEXT;

UPDATE podcasts SET download_location = 'Local';
UPDATE podcast_episodes SET download_location = 'Local' WHERE status = 'D';

ALTER TABLE podcast_episodes DROP COLUMN status;
3 changes: 2 additions & 1 deletion src/adapters/api/controllers/routes.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use crate::adapters::api::controllers::device_controller::{get_devices_of_user, post_device};
use crate::constants::inner_constants::ENVIRONMENT_SERVICE;
use crate::controllers::api_doc::ApiDoc;
use crate::controllers::file_hosting::get_podcast_serving;
use crate::controllers::manifest_controller::get_manifest;
use crate::controllers::podcast_controller::proxy_podcast;
use crate::controllers::websocket_controller::{
Expand All @@ -9,7 +10,7 @@ use crate::controllers::websocket_controller::{
use crate::gpodder::auth::authentication::login;
use crate::gpodder::parametrization::get_client_parametrization;
use crate::gpodder::subscription::subscriptions::{get_subscriptions, upload_subscription_changes};
use crate::{get_api_config, get_podcast_serving, get_ui_config};
use crate::{get_api_config, get_ui_config};
use actix_web::body::{BoxBody, EitherBody};
use actix_web::dev::{ServiceFactory, ServiceRequest, ServiceResponse};
use actix_web::web::redirect;
Expand Down
102 changes: 83 additions & 19 deletions src/adapters/api/models/podcast_episode_dto.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
use crate::adapters::file::file_handler::FileHandlerType;
use crate::adapters::file::s3_file_handler::S3_BUCKET_CONFIG;
use crate::constants::inner_constants::ENVIRONMENT_SERVICE;
use crate::models::favorite_podcast_episode::FavoritePodcastEpisode;
use crate::models::podcast_episode::PodcastEpisode;
Expand All @@ -20,7 +22,7 @@ pub struct PodcastEpisodeDto {
pub(crate) local_url: String,
pub(crate) local_image_url: String,
pub(crate) description: String,
pub(crate) status: String,
pub(crate) status: bool,
pub(crate) download_time: Option<NaiveDateTime>,
pub(crate) guid: String,
pub(crate) deleted: bool,
Expand All @@ -33,21 +35,26 @@ impl From<(PodcastEpisode, Option<User>, Option<FavoritePodcastEpisode>)> for Po
PodcastEpisodeDto {
id: value.0.id,
podcast_id: value.0.podcast_id,
episode_id: value.0.episode_id,
name: value.0.name,
episode_id: value.0.episode_id.to_string(),
name: value.0.name.to_string(),
url: value.0.url.clone(),
date_of_recording: value.0.date_of_recording,
date_of_recording: value.0.date_of_recording.to_string(),
image_url: value.0.image_url.clone(),
total_time: value.0.total_time,
local_url: map_file_url(&value.0.file_episode_path, &value.0.url, &value.1),
local_image_url: map_file_url(&value.0.file_image_path, &value.0.image_url, &value.1),
description: value.0.description,
status: value.0.status,
local_url: map_url(&value.0, &value.0.file_episode_path, &value.0.url, &value.1),
local_image_url: map_url(
&value.0,
&value.0.file_image_path,
&value.0.image_url,
&value.1,
),
description: value.0.description.to_string(),
download_time: value.0.download_time,
guid: value.0.guid,
guid: value.0.guid.to_string(),
deleted: value.0.deleted,
episode_numbering_processed: value.0.episode_numbering_processed,
favored: value.2.map(|f| f.favorite),
status: value.0.is_downloaded(),
}
}
}
Expand All @@ -69,24 +76,26 @@ impl
PodcastEpisodeDto {
id: value.0.id,
podcast_id: value.0.podcast_id,
episode_id: value.0.episode_id,
name: value.0.name,
episode_id: value.0.episode_id.to_string(),
name: value.0.name.to_string(),
url: value.0.url.clone(),
date_of_recording: value.0.date_of_recording,
date_of_recording: value.0.date_of_recording.to_string(),
image_url: value.0.image_url.clone(),
total_time: value.0.total_time,
local_url: map_file_url_with_api_key(
&value.0,
&value.0.file_episode_path,
&value.0.url,
&value.1,
),
local_image_url: map_file_url_with_api_key(
&value.0,
&value.0.file_image_path,
&value.0.image_url,
&value.1,
),
description: value.0.description,
status: value.0.status,
description: value.0.description.to_string(),
status: value.0.is_downloaded(),
download_time: value.0.download_time,
guid: value.0.guid,
deleted: value.0.deleted,
Expand All @@ -96,7 +105,27 @@ impl
}
}

pub fn map_file_url_with_api_key(
fn map_file_url_with_api_key(
podcast_episode: &PodcastEpisode,
local_url: &Option<String>,
remote_url: &str,
api_key: &Option<String>,
) -> String {
match &podcast_episode.download_location {
Some(location) => {
let handle = FileHandlerType::from(location.as_str());
match handle {
FileHandlerType::Local => {
map_local_file_url_with_api_key(local_url, remote_url, api_key)
}
FileHandlerType::S3 => map_s3_url(local_url, remote_url),
}
}
None => remote_url.to_string(),
}
}

pub fn map_local_file_url_with_api_key(
url: &Option<String>,
remote_url: &str,
api_key: &Option<String>,
Expand All @@ -108,12 +137,14 @@ pub fn map_file_url_with_api_key(
.map(|c| urlencoding::encode(c.as_os_str().to_str().unwrap()))
.collect::<Vec<Cow<str>>>()
.join("/");
url_encoded = ENVIRONMENT_SERVICE.server_url.to_owned() + &url_encoded;
let urlencoded = url_encoded.clone();
url_encoded = ENVIRONMENT_SERVICE.server_url.to_owned();
url_encoded.push_str(&urlencoded);

match ENVIRONMENT_SERVICE.any_auth_enabled {
true => match &api_key {
None => url_encoded,
Some(api_key) => url_encoded + "?apiKey=" + api_key,
Some(api_key) => format!("{}{}{}", url_encoded, "?apiKey=", api_key),
},
false => url_encoded,
}
Expand All @@ -122,6 +153,24 @@ pub fn map_file_url_with_api_key(
}
}

fn map_url(
episode: &PodcastEpisode,
local_url: &Option<String>,
remote_url: &str,
user: &Option<User>,
) -> String {
match &episode.download_location {
Some(location) => {
let handle = FileHandlerType::from(location.as_str());
match handle {
FileHandlerType::Local => map_file_url(local_url, remote_url, user),
FileHandlerType::S3 => map_s3_url(local_url, remote_url),
}
}
None => remote_url.to_string(),
}
}

pub fn map_file_url(url: &Option<String>, remote_url: &str, user: &Option<User>) -> String {
match url {
Some(url) => {
Expand All @@ -130,14 +179,14 @@ pub fn map_file_url(url: &Option<String>, remote_url: &str, user: &Option<User>)
.map(|c| urlencoding::encode(c.as_os_str().to_str().unwrap()))
.collect::<Vec<Cow<str>>>()
.join("/");
url_encoded = ENVIRONMENT_SERVICE.server_url.to_owned() + &url_encoded;
url_encoded = format!("{}{}", ENVIRONMENT_SERVICE.server_url, url_encoded);

match ENVIRONMENT_SERVICE.any_auth_enabled {
true => match &user {
None => url_encoded,
Some(user) => match &user.api_key {
None => url_encoded,
Some(key) => url_encoded + "?apiKey=" + key,
Some(key) => format!("{}{}{}", url_encoded, "?apiKey=", key),
},
},
false => url_encoded,
Expand All @@ -146,3 +195,18 @@ pub fn map_file_url(url: &Option<String>, remote_url: &str, user: &Option<User>)
None => remote_url.to_string(),
}
}

pub fn map_s3_url(url: &Option<String>, remote_url: &str) -> String {
match url {
Some(url) => {
let mut url_encoded = PathBuf::from(url)
.components()
.map(|c| urlencoding::encode(c.as_os_str().to_str().unwrap()))
.collect::<Vec<Cow<str>>>()
.join("/");
url_encoded = format!("{}/{}", S3_BUCKET_CONFIG.endpoint, url_encoded);
url_encoded
}
None => remote_url.to_string(),
}
}
Loading

0 comments on commit 5f0ea21

Please sign in to comment.