summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/de.rs45
-rw-r--r--src/lib.rs319
-rw-r--r--src/main.rs167
-rw-r--r--src/render.rs116
-rw-r--r--src/s3.rs152
5 files changed, 799 insertions, 0 deletions
diff --git a/src/de.rs b/src/de.rs
new file mode 100644
index 0000000..47ad83a
--- /dev/null
+++ b/src/de.rs
@@ -0,0 +1,45 @@
+/// Convert an h:m:s string to a Duration
+pub mod hms_duration {
+ use std::num::ParseIntError;
+ use std::time::Duration;
+
+ use serde::de::Error;
+ use serde::Deserialize;
+ use serde::Deserializer;
+ use serde::Serializer;
+
+ pub fn serialize<S>(duration: &Duration, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ let secs = duration.as_secs();
+ let hours = secs / 3600;
+ let minutes = (secs % 3600) / 60;
+ let seconds = secs % 60;
+ let s = if hours > 0 {
+ format!("{hours:0>2}:{minutes:0>2}:{seconds:0>2}")
+ } else {
+ format!("{minutes:0>2}:{seconds:0>2}")
+ };
+ serializer.serialize_str(&s)
+ }
+
+ pub fn deserialize<'de, D>(deserializer: D) -> Result<Duration, D::Error>
+ where
+ D: Deserializer<'de>,
+ {
+ let s = String::deserialize(deserializer)?;
+ from_string(s).map_err(Error::custom)
+ }
+
+ pub fn from_string(s: String) -> Result<Duration, ParseIntError> {
+ let mut total_seconds = 0;
+ let mut mult = 1;
+ for x in s.rsplit(":") {
+ let t: u64 = x.parse()?;
+ total_seconds += t * mult;
+ mult *= 60;
+ }
+ Ok(Duration::from_secs(total_seconds))
+ }
+}
diff --git a/src/lib.rs b/src/lib.rs
new file mode 100644
index 0000000..98c2984
--- /dev/null
+++ b/src/lib.rs
@@ -0,0 +1,319 @@
+use std::collections::HashSet;
+use std::{borrow::Cow, collections::BTreeMap, error::Error, time::Duration};
+
+use chrono::NaiveDate;
+use futures::{future::ready, stream::iter, StreamExt, TryStreamExt};
+use lofty::file::{AudioFile, TaggedFileExt};
+use lofty::tag::Accessor;
+use log::info;
+use regex::Regex;
+use render::Renderer;
+use serde::{Deserialize, Serialize};
+use str_slug::slug;
+
+pub mod de;
+mod render;
+pub mod s3;
+
+/// The Index tracks the state from the last successful exection.
+#[derive(Default, Serialize, Deserialize)]
+pub struct Index {
+ #[serde(default)]
+ entries: BTreeMap<String, Entry>,
+ #[serde(default)]
+ templates: BTreeMap<String, Template>,
+ #[serde(default)]
+ rendered: BTreeMap<String, HashSet<String>>,
+}
+
+impl Index {
+ pub fn contains_key(&self, key: &str) -> bool {
+ self.entries.contains_key(key)
+ }
+
+ pub fn add(&mut self, entry: Entry) {
+ self.entries.insert(entry.filename.to_owned(), entry);
+ }
+
+ pub fn remove(&mut self, filename: &str) {
+ self.entries.remove(filename);
+ }
+}
+
+/// Entry records the metadata about a file in the collection.
+#[derive(Clone, Debug, Serialize, Deserialize)]
+pub struct Entry {
+ pub filename: String,
+ pub date: Option<NaiveDate>,
+ pub etag: Option<String>,
+ pub title: Option<String>,
+ pub album: Option<String>,
+ pub artist: Option<String>,
+ pub size: i64,
+ #[serde(with = "de::hms_duration")]
+ pub duration: Duration,
+ pub hidden: bool,
+}
+
+impl Entry {
+ pub fn read_from(
+ key: String,
+ etag: Option<String>,
+ size: i64,
+ date: Option<NaiveDate>,
+ mut file: std::fs::File,
+ ) -> Result<Entry, Box<dyn Error>> {
+ let tagged = lofty::read_from(&mut file)?;
+ let tag = tagged.primary_tag();
+
+ Ok(Entry {
+ filename: key.to_string(),
+ etag,
+ date,
+ title: tag.and_then(|t| t.title()).map(Cow::into_owned),
+ artist: tag.and_then(|t| t.artist()).map(Cow::into_owned),
+ album: tag.and_then(|t| t.album()).map(Cow::into_owned),
+ size,
+ duration: tagged.properties().duration(),
+ hidden: false,
+ })
+ }
+}
+
+/// Templates are used to render content for the feed.
+///
+/// `partial` templates are never rendered, but may be included in other templates.
+/// `index` templates are rendered once for the entire collection.
+/// Non-index templates are rendered once per entry.
+#[derive(Clone, Debug, Serialize, Deserialize)]
+pub struct Template {
+ pub name: String,
+ /// False if the template should be rendered for each entry.
+ /// True if the template renders the list of all entries.
+ #[serde(default)]
+ pub index: bool,
+ #[serde(default)]
+ pub partial: bool,
+ pub content_type: Option<String>,
+ #[serde(default)]
+ /// Only render this template for files that match this regex.
+ pub filter: Option<String>,
+ pub template: String,
+}
+
+impl Template {
+ fn make_filename(&self, entry: &Entry) -> String {
+ if self.index {
+ self.name.to_string()
+ } else {
+ let (basename, extension) = self
+ .name
+ .rsplit_once(".")
+ .unwrap_or_else(|| (&self.name, ".html"));
+ let title = if let Some(title) = &entry.title {
+ slug(title)
+ } else {
+ slug(
+ entry
+ .filename
+ .rsplit_once(".")
+ .unwrap_or_else(|| (&entry.filename, ""))
+ .0,
+ )
+ };
+ if let Some(ref date) = entry.date {
+ format!("{basename}/{date}-{title}.{extension}")
+ } else {
+ format!("{basename}/{title}.{extension}")
+ }
+ }
+ }
+}
+
+pub struct MP32RSS {
+ access: s3::Access,
+ index: Index,
+ index_etag: Option<String>,
+}
+
+impl MP32RSS {
+ pub async fn open(access: s3::Access) -> Result<Self, Box<dyn Error>> {
+ info!("Opening index file");
+ let (index, index_etag) = access.fetch_index().await?;
+ Ok(Self {
+ access,
+ index,
+ index_etag,
+ })
+ }
+
+ pub fn get_mut_entry(&mut self, filename: &str) -> Option<&mut Entry> {
+ self.index.entries.get_mut(filename)
+ }
+
+ pub async fn sync(&mut self) -> Result<(), Box<dyn Error>> {
+ info!("Saving index file");
+ let new_etag = self
+ .access
+ .put_index(&self.index, self.index_etag.as_deref())
+ .await?;
+ self.index_etag = new_etag;
+ Ok(())
+ }
+
+ pub async fn refresh(&mut self) -> Result<(), Box<dyn Error>> {
+ info!("Syncing against files in bucket");
+
+ // 2. List files in the bucket
+ let objects = self.access.list_mp3s().await?;
+
+ // 3. Find files missing in the index
+ // 4. For each missing file, download and add to local index
+ let new_entries: Vec<_> = iter(objects)
+ .filter(|k| ready(!self.index.contains_key(k)))
+ .then(|k| {
+ info!("Found new file: {k}...");
+ self.access.fetch_entry(k)
+ })
+ .try_collect()
+ .await?;
+
+ let mut new_filenames = HashSet::new();
+ for entry in new_entries {
+ new_filenames.insert(entry.filename.to_string());
+ self.index.add(entry);
+ }
+
+ // 5. Generate files for each template and upload
+ // 6. Upload each file
+ // Check the ETAG against the ETAG in the index object?
+ self.render(|e| new_filenames.contains(&e.filename)).await?;
+
+ // 7. Upload the index file
+ // If upload fails, abort (or retry from the beginning)
+ self.sync().await
+ }
+
+ pub async fn delete(&mut self, filename: &str) -> Result<(), Box<dyn Error>> {
+ self.index.remove(filename);
+ self.access.remove_file(filename).await?;
+ self.render(|e| e.filename == filename).await?;
+ self.sync().await
+ }
+
+ pub fn templates(&self) -> impl Iterator<Item = &Template> {
+ self.index.templates.values()
+ }
+
+ /// Add a new template and rerender.
+ pub async fn add_template(
+ &mut self,
+ name: String,
+ template: String,
+ index: bool,
+ partial: bool,
+ content_type: Option<String>,
+ filter: Option<String>,
+ ) -> Result<(), Box<dyn Error>> {
+ self.index.templates.insert(
+ name.to_owned(),
+ Template {
+ name,
+ template,
+ index,
+ partial,
+ content_type,
+ filter,
+ },
+ );
+ self.render(|_| true).await?;
+ self.sync().await
+ }
+
+ /// Add a new template and rerender.
+ pub async fn delete_template(&mut self, name: &str) -> Result<(), Box<dyn Error>> {
+ self.index.templates.remove(name);
+ self.render(|_| true).await?;
+ self.sync().await
+ }
+
+ /// Render all templates.
+ ///
+ // Filter limits rerendering to entries which return true.
+ pub async fn render<F>(&mut self, filter: F) -> Result<(), Box<dyn Error>>
+ where
+ F: Fn(&Entry) -> bool,
+ {
+ let empty_set = HashSet::new();
+ let mut renderer = Renderer::new(self.index.templates.values())?;
+
+ for template in self.index.templates.values() {
+ let existing_files = self
+ .index
+ .rendered
+ .get(&template.name)
+ .unwrap_or(&empty_set);
+ let mut new_files = HashSet::new();
+
+ if !template.partial {
+ info!("Rendering template {}", template.name);
+ let content_type = template.content_type.as_deref().unwrap_or("text/html");
+ let file_regex = Regex::new(template.filter.as_deref().unwrap_or(".*"))?;
+ let entries: Vec<_> = self
+ .index
+ .entries
+ .values()
+ .filter(|e| !e.hidden && file_regex.is_match(&e.filename))
+ .cloned()
+ .collect();
+ renderer.set_entries(entries.clone());
+ if template.index {
+ let filename = template.name.to_string();
+ let data = renderer.render_index(&template.name, &filename)?;
+ self.access.put_file(&filename, content_type, data).await?;
+ new_files.insert(filename);
+ } else {
+ // Generate a file for each entry.
+ for entry in &entries {
+ let filename = template.make_filename(entry);
+ if filter(entry) {
+ let data = renderer.render_entry(&template.name, &filename, entry)?;
+ self.access.put_file(&filename, content_type, data).await?;
+ new_files.insert(filename);
+ } else if existing_files.contains(&filename) {
+ new_files.insert(filename);
+ }
+ }
+ }
+ }
+
+ // Remove any orphaned files
+ for filename in existing_files {
+ if !new_files.contains(filename) {
+ info!("Removing rendered file {}", filename);
+ self.access.remove_file(filename).await?;
+ }
+ }
+
+ // Update the list of rendered files.
+ self.index
+ .rendered
+ .insert(template.name.to_string(), new_files);
+ }
+
+ // Remove renderings of any deleted templates
+ for (template, files) in &self.index.rendered {
+ if !self.index.templates.contains_key(template) {
+ info!("Removing all rendered files for template {}", template);
+ for filename in files {
+ self.access.remove_file(filename).await?;
+ }
+ }
+ }
+ self.index
+ .rendered
+ .retain(|k, _| self.index.templates.contains_key(k));
+
+ self.sync().await
+ }
+}
diff --git a/src/main.rs b/src/main.rs
new file mode 100644
index 0000000..4a89183
--- /dev/null
+++ b/src/main.rs
@@ -0,0 +1,167 @@
+use std::{error::Error, path::PathBuf};
+
+use clap::{arg, command, Parser, Subcommand};
+
+use env_logger::Env;
+use mp32rss::{de::hms_duration, s3::Access, MP32RSS};
+use tokio::{fs::File, io::AsyncReadExt};
+
+#[derive(Parser, Debug)]
+#[command(version, about)]
+struct Args {
+ #[arg(short, long)]
+ bucket: String,
+
+ #[command(subcommand)]
+ command: Command,
+}
+
+#[derive(Subcommand, Debug)]
+enum Command {
+ /// Scan the bucket for new files and regenerate the feed.
+ Refresh,
+
+ /// Rerender the feed.
+ Render,
+
+ /// Edit the metadata for an entry.
+ Edit(EditArgs),
+
+ /// Delete an entry and remove the file from the bucket.
+ Delete {
+ /// The key to the file in the bucket.
+ filename: String,
+ },
+
+ /// List the templates.
+ Templates,
+
+ /// Add or replace a template and regenerate the feed.
+ AddTemplate(AddTemplateArgs),
+
+ /// Remove a template and the files generated from it.
+ RemoveTemplate {
+ /// The template name.
+ name: String,
+ },
+}
+
+#[derive(clap::Args, Debug)]
+struct EditArgs {
+ /// The key to the file in the bucket.
+ filename: String,
+ /// The entry title.
+ #[arg(long)]
+ title: Option<String>,
+ /// The entry album.
+ #[arg(long)]
+ album: Option<String>,
+ /// The entry artist.
+ #[arg(long)]
+ artist: Option<String>,
+ /// The file size in bytes.
+ #[arg(long)]
+ size: Option<i64>,
+ /// The track length in h:m:s.
+ #[arg(long)]
+ duration: Option<String>,
+ /// True if the entry should not be published.
+ #[arg(long)]
+ hidden: Option<bool>,
+}
+
+#[derive(clap::Args, Debug)]
+struct AddTemplateArgs {
+ /// True if the template should be rendered once for all entries.
+ #[arg(long)]
+ index: bool,
+ #[arg(long)]
+ partial: bool,
+ #[arg(long)]
+ content_type: Option<String>,
+ #[arg(long)]
+ filter: Option<String>,
+ /// The template filename.
+ filename: PathBuf,
+ /// The rendered filename.
+ name: String,
+}
+
+#[tokio::main]
+async fn main() -> Result<(), Box<dyn Error>> {
+ env_logger::Builder::from_env(Env::default().default_filter_or("info")).init();
+
+ let args = Args::parse();
+ let config = aws_config::load_from_env().await;
+ let client = aws_sdk_s3::Client::new(&config);
+
+ let access = Access::new(client, args.bucket);
+ let mut feed = MP32RSS::open(access).await?;
+
+ match args.command {
+ Command::Refresh => feed.refresh().await,
+ Command::Render => feed.render(|_| true).await,
+ Command::Edit(edit_args) => update_entry(&mut feed, edit_args).await,
+ Command::Delete { filename } => feed.delete(&filename).await,
+ Command::Templates => list_templates(&feed),
+ Command::AddTemplate(add_template_args) => add_template(&mut feed, add_template_args).await,
+ Command::RemoveTemplate { name } => feed.delete_template(&name).await,
+ }
+}
+
+async fn update_entry(feed: &mut MP32RSS, args: EditArgs) -> Result<(), Box<dyn Error>> {
+ if let Some(entry) = feed.get_mut_entry(&args.filename) {
+ if let Some(v) = args.title {
+ entry.title = Some(v);
+ }
+ if let Some(v) = args.album {
+ entry.album = Some(v);
+ }
+ if let Some(v) = args.artist {
+ entry.artist = Some(v);
+ }
+ if let Some(v) = args.size {
+ entry.size = v;
+ }
+ if let Some(v) = args.duration {
+ entry.duration = hms_duration::from_string(v)?;
+ }
+ if let Some(v) = args.hidden {
+ entry.hidden = v;
+ }
+ }
+ feed.render(|e| e.filename == args.filename).await
+}
+
+fn list_templates(feed: &MP32RSS) -> Result<(), Box<dyn Error>> {
+ println!(
+ "{:<20} {:<5} {:<7} content-type",
+ "name", "index", "partial"
+ );
+ println!("{:-<20} {:-<5} {:-<7} {:-<12}", "", "", "", "");
+ for template in feed.templates() {
+ println!(
+ "{:<20} {:<5} {:<7} {}",
+ template.name,
+ if template.index { "index" } else { "" },
+ if template.partial { "partial" } else { "" },
+ template.content_type.as_deref().unwrap_or("text/html")
+ );
+ }
+ Ok(())
+}
+
+async fn add_template(feed: &mut MP32RSS, args: AddTemplateArgs) -> Result<(), Box<dyn Error>> {
+ let mut file = File::open(&args.filename).await?;
+ let mut data = String::new();
+ file.read_to_string(&mut data).await?;
+ feed.add_template(
+ args.name,
+ data,
+ args.index,
+ args.partial,
+ args.content_type,
+ args.filter,
+ )
+ .await
+}
diff --git a/src/render.rs b/src/render.rs
new file mode 100644
index 0000000..befc780
--- /dev/null
+++ b/src/render.rs
@@ -0,0 +1,116 @@
+use std::{
+ collections::{BTreeMap, HashMap},
+ error::Error,
+};
+
+use serde::{Deserialize, Serialize};
+use tera::{Context, Error as TeraError, Tera, Value};
+
+use crate::{Entry, Template};
+
+/// Page attributes available to the template.
+#[derive(Serialize, Deserialize, Debug)]
+struct Page<'a> {
+ /// The path portion of the URL to this page.
+ path: &'a str,
+}
+
+/// Renderer combines entries with a template to produce output content.
+pub struct Renderer {
+ tera: Tera,
+ entries: Vec<Entry>,
+ albums: Vec<(String, Vec<Entry>)>,
+}
+
+impl Renderer {
+ pub fn new<'a, T>(it: T) -> Result<Self, Box<dyn Error>>
+ where
+ T: Iterator<Item = &'a Template>,
+ {
+ let templates: BTreeMap<_, _> = it.map(|t| (t.name.to_string(), t.clone())).collect();
+
+ let mut tera = Tera::default();
+ tera.add_raw_templates(
+ templates
+ .values()
+ .map(|t| (t.name.to_string(), t.template.to_string()))
+ .collect::<Vec<_>>(),
+ )?;
+
+ tera.register_filter(
+ "make_filename",
+ move |value: &Value, attrs: &HashMap<String, Value>| {
+ let name = attrs
+ .get("template")
+ .and_then(|v| v.as_str())
+ .ok_or(TeraError::msg("Missing attribute template"))?;
+ let template = templates
+ .get(name)
+ .ok_or(TeraError::msg("Template doesn't exist"))?;
+ let entry = serde_json::from_value(value.clone())
+ .map_err(|_| TeraError::msg("Input to filter is not an entry"))?;
+ Ok(Value::from(template.make_filename(&entry)))
+ },
+ );
+
+ Ok(Self {
+ tera,
+ entries: Vec::new(),
+ albums: Vec::new(),
+ })
+ }
+
+ /// Render a template that represents a collection of entries.
+ pub fn render_index(&self, template: &str, path: &str) -> Result<String, Box<dyn Error>> {
+ let page = Page { path };
+ let context = self.make_context(&page);
+ Ok(self.tera.render(template, &context)?)
+ }
+
+ /// Render a template for a single entry.
+ pub fn render_entry(
+ &self,
+ template: &str,
+ path: &str,
+ entry: &Entry,
+ ) -> Result<String, Box<dyn Error>> {
+ let page = Page { path };
+ let mut context = self.make_context(&page);
+ context.insert("entry", entry);
+ Ok(self.tera.render(template, &context)?)
+ }
+
+ /// Prepare the Context object for the template.
+ fn make_context(&self, page: &Page) -> Context {
+ let mut context = Context::new();
+ context.insert("page", &page);
+ context.insert("entries", &self.entries);
+ context.insert("albums", &self.albums);
+ context
+ }
+
+ pub fn set_entries(&mut self, entries: Vec<Entry>) {
+ let mut albums: Vec<(_, _)> = entries
+ .iter()
+ .fold(BTreeMap::new(), |mut a: BTreeMap<_, Vec<_>>, b| {
+ let k = b
+ .album
+ .clone()
+ .or_else(|| b.title.clone())
+ .unwrap_or_else(|| b.filename.clone());
+ a.entry(k).or_default().push(b.to_owned());
+ a
+ })
+ .into_iter()
+ .collect();
+
+ albums.sort_by(|a, b| {
+ let da = a.1.iter().map(|v| &v.date).max();
+ let db = b.1.iter().map(|v| &v.date).max();
+ db.cmp(&da)
+ });
+
+ self.albums = albums;
+ self.entries = entries;
+ }
+}
diff --git a/src/s3.rs b/src/s3.rs
new file mode 100644
index 0000000..f72b579
--- /dev/null
+++ b/src/s3.rs
@@ -0,0 +1,152 @@
+use std::{
+ error::Error,
+ io::{Seek, Write},
+};
+
+use aws_sdk_s3::{
+ error::SdkError, operation::get_object::GetObjectError, primitives::ByteStream, Client,
+};
+use aws_smithy_types_convert::date_time::DateTimeExt;
+use bytes::buf::Buf;
+use futures::TryFutureExt;
+use tempfile::tempfile;
+
+use crate::{Entry, Index};
+
+pub struct Access {
+ client: Client,
+ bucket: String,
+}
+
+impl Access {
+ pub fn new(client: Client, bucket: String) -> Self {
+ Self { client, bucket }
+ }
+
+ pub async fn fetch_index(&self) -> Result<(Index, Option<String>), Box<dyn Error>> {
+ let result = self
+ .client
+ .get_object()
+ .bucket(&self.bucket)
+ .key(".mp32rss/index.json")
+ .send()
+ .await;
+
+ match result {
+ Ok(obj) => {
+ let data = obj.body.collect().await?;
+ let index: Index = serde_json::from_reader(data.reader())?;
+ Ok((index, obj.e_tag))
+ }
+
+ Err(SdkError::ServiceError(ref e)) => {
+ if let GetObjectError::NoSuchKey(_) = e.err() {
+ Ok((Index::default(), None))
+ } else {
+ Err(Box::from(result.err().unwrap()))
+ }
+ }
+
+ _ => Err(Box::from(result.err().unwrap())),
+ }
+ }
+
+ pub async fn put_index(
+ &self,
+ index: &Index,
+ index_etag: Option<&str>,
+ ) -> Result<Option<String>, Box<dyn Error>> {
+ let data = serde_json::to_string_pretty(index)?;
+
+ let mut builder = self
+ .client
+ .put_object()
+ .bucket(&self.bucket)
+ .key(".mp32rss/index.json")
+ .body(ByteStream::from(data.into_bytes()));
+
+ builder = if let Some(etag) = index_etag {
+ builder.if_match(etag)
+ } else {
+ builder.if_none_match('*')
+ };
+
+ let resp = builder.send().await?;
+ Ok(resp.e_tag)
+ }
+
+ pub async fn put_file(
+ &self,
+ key: &str,
+ content_type: &str,
+ data: String,
+ ) -> Result<Option<String>, Box<dyn Error>> {
+ let resp = self
+ .client
+ .put_object()
+ .bucket(&self.bucket)
+ .key(key)
+ .content_type(content_type)
+ .body(ByteStream::from(data.into_bytes()))
+ .send()
+ .await?;
+ Ok(resp.e_tag)
+ }
+
+ pub async fn list_mp3s(&self) -> Result<Vec<String>, Box<dyn Error>> {
+ Ok(self
+ .client
+ .list_objects_v2()
+ .bucket(&self.bucket)
+ .into_paginator()
+ .send()
+ .try_collect()
+ .await?
+ .iter()
+ .flat_map(|resp| resp.contents())
+ .map(|obj| obj.key().expect("Missing key from s3 object").to_string())
+ .filter(|key| key.ends_with(".mp3"))
+ .collect())
+ }
+
+ pub async fn fetch_entry(&self, key: String) -> Result<Entry, Box<dyn Error>> {
+ let mut file = tempfile()?;
+
+ let mut obj = self
+ .client
+ .get_object()
+ .bucket(&self.bucket)
+ .key(&key)
+ .send()
+ .await?;
+
+ let mut size = 0;
+ while let Some(bytes) = obj.body.try_next().await? {
+ size += bytes.len() as i64;
+ file.write_all(&bytes)?;
+ }
+
+ file.rewind()?;
+
+ Entry::read_from(
+ key,
+ obj.e_tag,
+ size,
+ obj.last_modified
+ .and_then(|dt| dt.to_chrono_utc().ok())
+ .map(|dt| dt.date_naive()),
+ file,
+ )
+ }
+
+ pub async fn remove_file(&self, key: &str) -> Result<(), Box<dyn Error>> {
+ self.client
+ .delete_object()
+ .bucket(&self.bucket)
+ .key(key)
+ .send()
+ .map_ok(|_| ())
+ .map_err(Box::from)
+ .await
+ }
+}